Last active
March 30, 2026 17:38
-
-
Save allanjust/ccddcf3d98af5290cd321de78b00bce1 to your computer and use it in GitHub Desktop.
Use harmony to subset TEMPO data from R
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # demo of downloading a single spatially subset granule from TEMPO | |
| # using harmony API | |
| library(earthdatalogin) | |
| library(httr2) | |
| # space-time bounds (API formatted strings) | |
| # https://harmony.earthdata.nasa.gov/docs | |
| lat_string <- "lat(18.1:20.6)" | |
| lon_string <- "lon(-100.4:-97.4)" | |
| temporal_string <- "2026-03-09T17:00:00Z,2026-03-09T19:00:00Z" | |
| # example shows Mexico City region 2026-03-09 | |
| # https://projects.cosmicds.cfa.harvard.edu/tempo-lite/?lat=19.2541&lon=-99.2473&zoom=8&t=1741525740000&extendedRange=true | |
| # 1. Auth: Get your token | |
| # we use earthdatalogin with NASA EarthData credentials already stored in our environment | |
| # https://boettiger-lab.github.io/earthdatalogin/ | |
| edl_set_token(prompt_for_netrc = FALSE) | |
| header_file <- Sys.getenv("GDAL_HTTP_HEADER_FILE") | |
| my_token <- gsub("Authorization: Bearer ", "", readLines(header_file, warn = FALSE)) | |
| # check your token by seeing if you can access a file listed in the earthdatalogin README | |
| url <- "https://data.lpdaac.earthdatacloud.nasa.gov/lp-prod-protected/HLSL30.020/HLS.L30.T56JKT.2023246T235950.v2.0/HLS.L30.T56JKT.2023246T235950.v2.0.SAA.tif" | |
| terra::rast(url, vsi=TRUE) | |
| # if earthdatalogin isn't set up, instead of giving SpatRaster info this will return | |
| # "Error: [rast] file does not exist"... | |
| # 2. STEP ONE: Find the Granule ID using CMR (NASA's Search Engine) | |
| # This confirms the data exists before we even talk to Harmony | |
| # TEMPO_NO2_L3_V04 | |
| concept_id <- "C3685896708-LARC_CLOUD" | |
| # https://doi.org/10.5067/IS-40e/TEMPO/NO2_L3.004 | |
| # TEMPO_O3PROF_L3_V04 | |
| # concept_id <- "C3685896402-LARC_CLOUD" | |
| # https://doi.org/10.5067/IS-40e/TEMPO/O3PROF_L3.004 | |
| cmr_url <- "https://cmr.earthdata.nasa.gov/search/granules.json" | |
| cmr_resp <- request(cmr_url) %>% | |
| req_url_query( | |
| concept_id = concept_id, | |
| temporal = temporal_string, | |
| page_size = 1 | |
| ) %>% | |
| req_perform() %>% | |
| resp_body_json() | |
| # Extract the Granule UR (Unique Resource name) | |
| granule_id <- cmr_resp$feed$entry[[1]]$title | |
| cat("Found Granule:", granule_id, "\n") | |
| # 3. STEP TWO: Send that specific Granule to Harmony for subsetting | |
| # Note the URL structure: we use the collection ID + /ogc-api-coverages/... | |
| harmony_url <- paste0("https://harmony.earthdata.nasa.gov/", concept_id, "/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset") | |
| req <- request(harmony_url) %>% | |
| req_url_query( | |
| granuleName = granule_id, | |
| subset = lat_string, | |
| subset = lon_string, | |
| format = "application/x-netcdf4" | |
| ) %>% | |
| req_auth_bearer_token(my_token) %>% | |
| req_headers(Accept = "application/json") | |
| # 1. Define your output path | |
| out_file <- "Mexico_City_TEMPO_V04.nc" | |
| # 2. Run the request, but specify the 'path' | |
| # in this case - when there is a single expected output the API returns it directly | |
| # as a netcdf -- so this wouldn't work for a large set of resulting files | |
| resp <- req %>% | |
| req_perform(path = out_file) | |
| cat("Success! File saved to:", out_file, "\n") | |
| # 3. Verify with terra | |
| # note issue with finding CRS so we use md=T | |
| # https://github.com/rspatial/terra/issues/1795#issuecomment-2918037100 | |
| library(terra) | |
| if (file.exists(out_file)) { | |
| r <- rast(out_file, subds = "/product/vertical_column_troposphere", md = T) | |
| print(r) | |
| plot(r, main = "TEMPO V04 - Central Mexico") | |
| } | |
| # end of script |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment