Skip to content

Instantly share code, notes, and snippets.

@rabernat
Last active January 27, 2017 08:55
Show Gist options
  • Save rabernat/4e6ba6e199b5c7647c411da2ad7e62db to your computer and use it in GitHub Desktop.
Save rabernat/4e6ba6e199b5c7647c411da2ad7e62db to your computer and use it in GitHub Desktop.
aggregation routines for coarse graining
# implement my own aggregation downsampling function
from itertools import product
import numpy as np
import xarray as xr
def aggregate(data, factor=2, mean=True):
ndim = data.ndim
shape = data.shape
# promote single value to list
if isinstance(factor, int):
factors = ndim * [factor,]
# check we have the right number of dimensions
assert len(factors) == ndim
# make sure shapes are compatible
for s, fac in zip(shape, factors):
assert s % factor == 0
out = 0
# it is lazy to use a set...don't have to figure out the necessary logic
slices = []
for start_indices in product(*[range(f) for f in factors]):
slices.append(
[slice(sidx, s, factor) for sidx, s in zip(start_indices, shape)]
)
# how would we generalize to other reduce functions?
result = reduce(np.add, [data[sl] for sl in slices])
if mean:
result /= len(slices)
return result
# wrap in xarray
def xr_aggregate(data, **kwargs):
assert isinstance(data, xr.DataArray)
newdata = aggregate(data.values, **kwargs)
coords = {k: aggregate(data[k].values, **kwargs) for k in data.dims}
return xr.DataArray(newdata, dims=data.dims, coords=coords, attrs=data.attrs)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment