## docker run --rm -ti ghcr.io/mdsumner/gdal-builds:rocker-gdal-dev-python bash
reticulate::use_python("/workenv/bin/python3")
library(reticulate)
py_require("virtualizarr")
open_virtual_mfdataset <- import("virtualizarr")$open_virtual_mfdataset
ds <- open_virtual_mfdataset(list("https://thredds.nci.org.au/thredds/fileServer/gb6/BRAN/BRAN2023/daily/atm_flux_diag_2024_06.nc"))
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
alg/data/2by2.tif | |
alg/data/3by3_average.tif | |
alg/data/3by3_sum.tif | |
alg/data/4by4.tif | |
alg/data/bug_6526_input.tif | |
alg/data/contour_in.tif | |
alg/data/geoloc/latitude_including_pole.tif | |
alg/data/geoloc/longitude_including_pole.tif | |
alg/data/nodata_precision_issue_float32.tif | |
alg/data/pat.tif |
Spider worldclim for urls of useable data
library(bowerbird)
my_directory <- tempdir()
cf <- bb_config(local_file_root = my_directory)
##https://geodata.ucdavis.edu/climate/worldclim/2_1/base/wc2.1_10m_tmin.zip
src <-
reticulate::use_python("/workenv/bin/python3")
reticulate::py_require("numcodecs")
numcodecs <- reticulate::import("numcodecs")
## just found by eye
zlib <- numcodecs$zlib$Zlib(4L)
https://github.com/openlandmap/GEDTM30?tab=readme-ov-file
gdalinfo /vsicurl/https://s3.opengeohub.org/global/edtm/legendtm_rf_30m_m_s_20000101_20231231_go_epsg.4326_v20250130.tif
Driver: GTiff/GeoTIFF
Files: /vsicurl/https://s3.opengeohub.org/global/edtm/legendtm_rf_30m_m_s_20000101_20231231_go_epsg.4326_v20250130.tif
Size is 1440010, 600010
Coordinate System is:
x_from_col <- function(dimension, bbox, col) {
col[col < 1] <- NA
col[col > dimension[1L]] <- NA
xres <- diff(bbox[c(1, 3)]) / dimension[1]
bbox[1] - xres/2 + col * xres
}
y_from_row <- function(dimension, bbox, row) {
row[row < 1] <- NA
row[row > dimension[2]] <- NA
extract rema at points
library(xml2)
library(gdalraster)
dsn <- "/vsicurl/https://raw.githubusercontent.com/mdsumner/rema-ovr/main/REMA-2m_dem_ovr.vrt"
url <- gsub("/vsicurl/", "", dsn)
xml <- read_xml(url)
dst <- xml |> xml_find_all(".//DstRect")
## https://developmentseed.org/obstore/latest/examples/fastapi/
# Example large Parquet file hosted in AWS open data
#store = S3Store("ookla-open-data", region="us-west-2", skip_signature=True)
#path = "parquet/performance/type=fixed/year=2024/quarter=1/2024-01-01_performance_fixed_tiles.parquet"
Sys.setenv("AWS_REGION" = "us-west-2")
128 cpus, 158 seconds
options(parallelly.fork.enable = TRUE, future.rng.onMisuse = "ignore")
library(furrr); plan(multicore)
d <- arrow::read_parquet("https://data.source.coop/ausantarctic/ghrsst-mur-v2/ghrsst-mur-v2.parquet")
dsn <- sprintf("/vsicurl/%s", d$assets$analysed_sst$href)
#(cell <- terra::cellFromXY(terra::rast(dsn[1]), cbind(147, -48)))
# 496796700
Open a virtual dataset, how can I get the dict() of references from .lat?
import virtualizarr
oisst = virtualizarr.open_virtual_dataset("https://www.ncei.noaa.gov/data/sea-surface-temperature-optimum-interpolation/v2.1/access/avhrr/198109/oisst-avhrr-v02r01.19810901.nc")
oisst.lat
<xarray.DataArray 'lat' (lat: 720)> Size: 3kB
ManifestArray
NewerOlder