Created
August 21, 2024 02:57
-
-
Save njtierney/1f55bf906570949be363086daeaf97ac to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# from https://thredds.nci.org.au/thredds/catalog/gb6/BRAN/BRAN2020/daily/catalog.html?dataset=gb6/BRAN/BRAN2020/daily/ocean_temp_1995_06.nc | |
library(tidyverse) | |
library(terra) | |
nci_url <- "https://thredds.nci.org.au/thredds/fileServer/gb6/BRAN/BRAN2020/daily/ocean_temp_1995_06.nc" | |
vsi <- function(url) glue::glue("/vsicurl/{url}") | |
sf::gdal_utils("info", vsi(nci_url)) | |
ncdf4::nc_open() | |
# SST data of Antarctica | |
# can we start at a station | |
# can we scale that up | |
# SST from now (ish) until 1st Sept 1981 every day | |
# template the year, month, actual day | |
# always same format | |
sst_url <- "https://www.ncei.noaa.gov/data/sea-surface-temperature-optimum-interpolation/v2.1/access/avhrr/202408/oisst-avhrr-v02r01.20240801.nc" | |
curl::curl_download(sst_url, basename(sst_url)) | |
sst_file <- "oisst-avhrr-v02r01.20240801.nc" | |
subdata <- function(filename, | |
varname = NULL, | |
driver = "NetCDF") { | |
stopifnot(!is.null(varname)) | |
glue::glue('{driver}:"{filename}":{varname}') | |
} | |
subdata(sst_file, "sst") | |
sst_aug <- subdata(sst_file, "sst") |> rast() | |
plot(sst_aug) | |
sst_aug <- rast(subdata(vsi(sst_url))) | |
date <- as.Date("2024-01-01") | |
aug <- rast("~/Downloads/oisst-avhrr-v02r01.20240801.nc") | |
aug | |
"20240801" | |
underway_url <- "https://github.com/mdsumner/nuyina.underway/raw/main/data-raw/nuyina_underway.parquet" | |
curl::curl_download(underway_url, basename(underway_url)) | |
data_underway <- nanoparquet::read_parquet( | |
basename(underway_url) | |
) | |
data_underway$datetime - 1 | |
library(tidyverse) | |
past_week_of_data <- data_underway |> | |
filter(between(as.Date(datetime), date - 7, date + 7 ) | |
) | |
data_underway |> | |
filter(between(as.Date(datetime), date - 7, date + 7 ) | |
) | |
data_underway$verysouth <- data_underway$latitude < -60 | |
data_underway$segment <- c(0, cumsum(abs(diff(data_underway$verysouth)))) | |
data_underway$segment[!data_underway$verysouth] <- NA | |
plot(data_underway$datetime, data_underway$latitude, pch = ".", col = sample(palr::d_pal(data_underway$segment))) | |
plot(data_underway$datetime, data_underway$sea_water_temperature, pch = ".") | |
library(duckdb) | |
dbConnect(duckdb(), "nuyina_underway.parquet") |
code as start to comparing sat to underway (will be interesting to look at how the pitch/roll and temperature seen by the ship compares minute by minute to how the daily remotes sensing data looks)
underway_url <- "https://github.com/mdsumner/nuyina.underway/raw/main/data-raw/nuyina_underway.parquet"
nuy <-arrow::read_parquet(underway_url) |> dplyr::filter(as.Date(datetime) > "2022-09-01")
tst <- nuy$latitude < -55
nuy$seg <- c(0, (cumsum(abs(diff(tst)))))
nuy$seg[!tst] <- NA
nuy$seg <- as.integer(factor(nuy$seg))
library(ggplot2)
ggplot(nuy |> dplyr::filter(!is.na(seg)), aes(datetime, latitude, col = factor(seg))) + geom_point(pch = ".")
(dt <- as.Date(range(nuy$datetime[nuy$seg == 1], na.rm = T)) + c(0, 1))
library(dplyr)
voyage <- nuy |> dplyr::filter(between(as.Date(datetime), dt[1], dt[2]))
## raadtools lookup (here Mike should cache this lookup for every Nuyina point)
library(raadtools)
voyage$iceconc <- extract(readice, voyage |> dplyr::select(longitude, latitude, datetime), setNA = T)
voyage$oisst <- extract(readsst, voyage |> dplyr::select(longitude, latitude, datetime))
voyage$ghrsst <- extract(readghrsst, voyage |> dplyr::select(longitude, latitude, datetime))
par(mfrow = c(2, 1))
plot(voyage$datetime, voyage$ghrsst)
lines(voyage$datetime, voyage$sea_water_temperature)
plot(voyage$datetime, voyage$oisst)
lines(voyage$datetime, voyage$sea_water_temperature)
ggplot(voyage, aes(datetime, iceconc)) + geom_path()
dateice <- voyage$datetime[min(which(voyage$iceconc > 0))]
dateice
sst_url <- glue::glue("https://www.ncei.noaa.gov/data/sea-surface-temperature-optimum-interpolation/v2.1/access/avhrr/{format(dateice, '%Y%m')}/oisst-avhrr-v02r01.{format(dateice, '%Y%m%d')}.nc")
sst_file <- basename(sst_url)
if (!file.exists(sst_file)) curl::curl_download(sst_url, sst_file)
subdata <- function(filename,
varname = NULL,
driver = "NetCDF") {
stopifnot(!is.null(varname))
glue::glue('{driver}:"{filename}":{varname}')
}
#vsi <- function(url) glue::glue("/vsicurl/{url}")
sstraster <- subdata(sst_file, "sst") |> rast()
iceraster <- subdata(sst_file, "ice") |> rast()
plot(voyage$longitude, voyage$latitude, asp = 1/cos(mean(voyage$latitude) * pi/180), type = "n")
plot(iceraster, add = TRUE)
lines(voyage$longitude, voyage$latitude)
points(cbind(voyage$longitude, voyage$latitude)[findInterval(as.POSIXct(dateice, tz = "UTC"), voyage$datetime),, drop = F ], pch = "X", col = "hotpink")
plot(voyage$datetime, voyage$sea_water_temperature, pch = ".")
abline(v = dateice, h = -1.8)
plot(voyage$datetime, pmax(voyage$platform_pitch_fore_up, voyage$platform_heave_down, voyage$platform_roll_starboard_down), pch = ".")
abline(v = (dateice) + c(-1, 0, 1) * 24 * 3600)
Thanks, @mdsumner !
I'm getting this error for raadtools
, do I need to install something? Or is this perhaps a GDAL thing?
> library(raadtools)
Loading required package: raster
Loading required package: sp
Attaching package: ‘raster’
The following object is masked from ‘package:dplyr’:
select
> voyage$iceconc <- extract(readice, voyage |> dplyr::select(longitude, latitude, datetime), setNA = T)
no files found in the 'raadfiles.filename.database'
and no root directories found.
Error in .find_files_generic(pattern) : no files found
oh sorry, you can't install raadtools - you have to be on our system/s for now - which I meant to do. I'm just keeping notes here, but I'll make you an account and email deets :)
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
So this works (on my docker image)
and this ( I don't know how to do this lazy, open_dataset() does not work on the url, but it does on the downloaded parquet)
This also works, so I think it's down to the GDAL version.
also this
a very complex model extract
looks like this to GDAL
pick on var variable (subdataset)