pq <- function(x, ex = NULL) {
env <- Sys.getenv("AWS_NO_SIGN_REQUEST")
Sys.setenv("AWS_NO_SIGN_REQUEST" = "YES")
on.exit(Sys.setenv("AWS_NO_SIGN_REQUEST" = env))
if (!is.null(ex)) {
x <- terra::crop(x, ex)
} else {## python takes the same amount of time about 2 seconds
#from osgeo import gdal
#gdal.UseExceptions()
#import time
#t0 = time.time()
#ds = gdal.Open("TCI.tif")
#d = ds.ReadRaster()
#t1 = time.time()code to do a small extract in R
https://rstats.me/@mdsumner/114729203537529604
Global Ensemble Digital Terrain Model 30m (GEDTM30)
https://zenodo.org/records/15490367
rproj <- function() sprintf("+proj=laea +lon_0=%f +lat_0=%f", runif(1, -180, 180), runif(1, -90, 90))
pts <- geosphere::randomCoordinates(1e5)
ll <- function() "EPSG:4326"
library(terra)
library(sf)
terra_ <- function(x) {
terra::project(x, to = rproj(), from = ll())
}first clone the repo (later we'll read remotely without this)
git clone https://opensource.unicc.org/open-source-united-initiative/un-tech-over/challenge-1/ahead-of-the-storm-challenge1-datasets
cd ahead-of-the-storm-challenge1-datasets
Now move into the repo and read in R
## we can get that WKT by using Oblique Mercator params
prj <- "+proj=omerc +lonc=-122.6 +lat_0=40 +gamma=21 +x_0=1000000"
shp <- "/vsicurl/https://github.com/noaa-nwfsc/VMS-pipeline/raw/refs/heads/main/spatial_data/map_rotation/fivekm_grid_extent_rect_21.shp"
library(terra)
#> terra 1.8.52
v <- vect(shp)
wkt0 <- crs(prj)files <- tibble::tibble(filename = vsi_read_dir(vsidir)) |>
dplyr::mutate(source = glue::glue("{vsidir}/{filename}")) |>
dplyr::mutate(longitude = glue::glue("vrt://{source}?sd_name=longitude"),
latitude = glue::glue("vrt://{source}?sd_name=latitude"))
files <- do.call(rbind, lapply(vars, \(.x) dplyr::filter(files, stringr::str_detect(filename, glue::glue("^{.x}_")))))
i <- 1
## we can probably save this for subsequent runspkgurl <- "https://cran.r-project.org/src/contrib/terra_1.8-50.tar.gz"
dsn0 <- sprintf("/vsitar//vsicurl/%s", pkgurl)
files <- gdalraster::vsi_read_dir(dsn0, recursive = TRUE)
tif0 <- grep("\\.tif$", files, value = TRUE)[1]
tif <- sprintf("%s/%s", dsn0, tif0)
new(gdalraster::GDALRaster, tif)library(bowerbird)
my_source <- bb_source(
name = "Chlorophyll-a concentration in seawater (not log-transformed), generated by as a blended combination of OCI, OCI2, OC2 and OCx algorithms, depending on water class memberships",
id = "ESACCI-OC-L3S-CHLOR_A-MERGED",
description = "European Space Agency Climate Change Initiative composites of merged sensor (MERIS, MODIS Aqua, SeaWiFS LAC & GAC, VIIRS, OLCI) products.",
doc_url = "http://esa-oceancolour-cci.org",
source_url =
c("https://www.oceancolour.org/thredds/catalog/cci/v6.0-release/geographic/monthly/chlor_a/catalog.html",
"https://www.oceancolour.org/thredds/catalog/cci/v6.0-release/geographic/daily/chlor_a/catalog.html",