parcels <- function(address, distance = 200) {
parcel_dsn <- "/vsizip//vsicurl/listdata.thelist.tas.gov.au/opendata/data/LIST_PARCELS_HOBART.zip/list_parcels_hobart.shp"
pt <- tidygeocoder::geo(address, quiet = TRUE)
parcel_ds <- new(gdalraster::GDALVector, parcel_dsn)
on.exit(parcel_ds$close(), add = TRUE)
prj <- parcel_ds$getSpatialRef()
pp <- gdalraster::transform_xy(cbind(pt$long, pt$lat), srs_to = prj, srs_from = "EPSG:4326")
ex <- rep(pp, each = 2) + c(-1, 1, -1, 1) * distance
sf <- gdalraster::bbox_to_wkt(ex[c(1, 3, 2, 4)])
In this thread it's actually not well defined, essentially if a vertex is used twice it shouldn't be in the output. (But should we normalize on edge or vertex, or full shared boundaries?). Should islands stay in the set? (I don't think so)
With silicate, simplest case is
library(silicate)
sc <- SC(polygon)
library(dplyr)
sc$object_link_edge <- sc$object_link_edge |>
group_by(edge_) |>
do you need 500 slightly overlapping Zarr datasets?
src <- "/vsicurl/https://projects.pawsey.org.au/idea-gebco-tif/GEBCO_2024.tif"
src <- "https://projects.pawsey.org.au/idea-gebco-tif/GEBCO_2024.tif"
library(purrr) ## purrr CRAN
library(mirai) ## mirai CRAN
if (!file.exists(basename(src))) {
curl::curl_download(src, basename(src)) ## curl CRAN
}
docker run --rm -ti ubuntu
apt update
apt install -y curl
curl -fsSL https://pixi.sh/install.sh | sh
pixi init example && cd example
#pixi add pkg-config
#pixi add gdal
## pixi add ... r r-sf r-terra ##etc note that GDAL is very recent and for terra, but not for sf (and no sign of gdalraster)
## note the %s embedded, replace with your 'AWSAccessKeyId=AKI...'
"/vsizip/{/vsicurl/https://prod-is-usgs-sb-prod-content.s3.amazonaws.com/6810c1a4d4be022940554075/Annual_NLCD_LndCov_2024_CU_C1V1.zip?%s}/Annual_NLCD_LndCov_2024_CU_C1V1.tif"
https://bsky.app/profile/mdsumner.bsky.social/post/3luj4y4apos2k
library(sooty) ## remotes::install_github("mdsumner/sooty") ## for (copies of) the NetCDF sources
library(sds) ## remotes::install_github("hypertidy/sds") ## for palettized image urls
library(stringr)
library(dplyr)
#>
#> Attaching package: 'dplyr'
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
https://bsky.app/profile/mdsumner.bsky.social/post/3lt4lhylxhs2v
info <- vapour::vapour_raster_info(dsn <- "/vsicurl/https://projects.pawsey.org.au/idea-gebco-tif/GEBCO_2024.tif")
## remotes::install_github("hypertidy/grout")
g <- grout::grout(info$dimension, info$extent, blocksize = info$block)
idx <- grout::tile_index(g)
options(parallelly.fork.enable = TRUE, future.rng.onMisuse = "ignore")
library(furrr); plan(multicore)
ERDDAP doesn't support range downloading
export GDAL_DISABLE_READDIR_ON_OPEN=EMPTY_DIR
gdalinfo /vsicurl/https://coastwatch.pfeg.noaa.gov/erddap/files/jplMURSST41/20250701090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc
ERROR 1: 416: Range downloading not supported by this server: Error {
code=416;
message="Requested Range Not Satisfiable: REQUESTED_RANGE_NOT_SATISFIABLE: Don't try to connect to .nc or .hdf files on ERDDAP's /files/ system as if they were local files. It is horribly inefficient and often causes other problems. Instead: a) Use (OPeN)DAP client software to connect to ERDDAP's DAP services for this dataset (which have /griddap/ or /tabledap/ in the URL). That's what DAP is for. b) Or, use the dataset's Data Access Form to request a subset of data. c) Or, if you need the entire file or repeated access over a long period of time, use curl, wget, or your browser to download the entire file, then access the data from your local copy of the file.";
}
This works to load but we can't sel() it sensibly, any ideas?
import virtualizarr
#virtualizarr.__version__
#'1.3.3.dev81+ga5d04d7'
from obstore.store import HTTPStore
from virtualizarr.parsers import HDFParser
NewerOlder