Created
June 29, 2023 20:39
-
-
Save ashiklom/cecbcfb7616b442b60d1c15d3bcaec08 to your computer and use it in GitHub Desktop.
Query and download data from OB DAAC
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
# Prerequisite: | |
# | |
# (1) Create a `~/.netrc` file (if it doesn't exist); e.g., `touch ~/.netrc` | |
# | |
# (2) Open the `~/.netrc` file in a plain text editor and add a single line like the following: | |
# | |
# machine urs.earthdata.nasa.gov login [email protected] password YourEDLPassword123 | |
# | |
# Alternatively, if you want a configuration just for this script, you can put | |
# the netrc information in a local file (e.g., `./my_netrc`). Then, when | |
# calling `curl`, remove the `-n` argument and replace it with `--netrc-file ./my_netrc`. | |
COOKIEFILE=".urs_cookies" | |
LISTFILE="filelist" | |
STARTDATE="2023-05-01 00:00:00" | |
CURDATE=$(date +"%Y-%m-%d %H:%M:%S") | |
# Use system default netrc file | |
CURL="curl -nL -b $COOKIEFILE -c $COOKIEFILE" | |
# Use netrc file at a specific path | |
# CURL="curl --netrc-file ./my_netrc -L -b $COOKIEFILE -c $COOKIEFILE" | |
# Remove old credentials. They'll be reset in the next curl call. | |
# Authentication should happen through the `.netrc` file. | |
rm -f "$COOKIEFILE" | |
# Download list of files matching query | |
# `-n` means to use authentication from the `.netrc` file. | |
# `-L` means to follow redirects | |
# `-w "\n"` at the end adds a trailing newline, to make it easier to loop over the file. | |
$CURL \ | |
-d "results_as_file=1" \ | |
-d "sensor_id=14" \ | |
-d "dtid=1019" \ | |
-d "sdate=$STARTDATE" \ | |
-d "edate=$CURDATE" \ | |
-d "subType=1" \ | |
-d "addurl=1" \ | |
-d "prod_id=chlor_a" \ | |
-d "resolution_id=4km" \ | |
-d "period=R32" "https://oceandata.sci.gsfc.nasa.gov/api/file_search" \ | |
-w "\n" > "$LISTFILE" | |
echo "Found $(cat $LISTFILE | wc -l) matching files." | |
while read -r url; do | |
fname=$(basename "$url") | |
if [ -f "$fname" ]; then | |
echo "File $fname already exists. Skipping..." | |
continue | |
fi | |
echo "Downloading URL: $url" | |
$CURL -O "$url" | |
done < "$LISTFILE" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Note that line 39 (
-d "dtid=1019"
) can (and probably should) be dropped from the query without affecting the results.