def test_interp_on_quantiles_monthly(): t = xr.cftime_range("2000-01-01", "2030-12-31", freq="D", calendar="noleap") ref = xr.DataArray( (-20 * np.cos(2 * np.pi * t.dayofyear / 365) + 2 * np.random.random_sample((t.size, )) + 273.15 + 0.1 * (t - t[0]).days / 365), # "warming" of 1K per decade, dims=("time", ), coords={"time": t}, attrs={"units": "K"}, ) sim = xr.DataArray( (-18 * np.cos(2 * np.pi * t.dayofyear / 365) + 2 * np.random.random_sample((t.size, )) + 273.15 + 0.11 * (t - t[0]).days / 365), # "warming" of 1.1K per decade dims=("time", ), coords={"time": t}, attrs={"units": "K"}, ) ref = ref.sel(time=slice(None, "2015-01-01")) hist = sim.sel(time=slice(None, "2015-01-01")) group = Grouper("time.month") quantiles = u.equally_spaced_nodes(15, eps=1e-6) ref_q = group.apply(nbu.quantile, ref, main_only=True, q=quantiles) hist_q = group.apply(nbu.quantile, hist, main_only=True, q=quantiles) af = u.get_correction(hist_q, ref_q, "+") for interp in ["nearest", "linear", "cubic"]: afi = u.interp_on_quantiles(sim, hist_q, af, group="time.month", method=interp, extrapolation="constant") assert afi.isnull().sum("time") == 0, interp
def test_interp_on_quantiles(shape, group, method): group = Grouper(group) raw = np.random.random_sample(shape) # [0, 1] t = pd.date_range("2000-01-01", periods=shape[0], freq="D") # obs : [9, 11] obs = xr.DataArray(raw * 2 + 9, dims=("time", "lat", "lon")[:len(shape)], coords={"time": t}) # sim [9, 11.4] (x1.2 + 0.2) sim = xr.DataArray(raw * 2.4 + 9, dims=("time", "lat", "lon")[:len(shape)], coords={"time": t}) # fut [9.02, 11.38] (x1.18 + 0.2) In order to have every point of fut inside the range of sim fut_raw = raw * 2.36 + 9.02 fut_raw[np.array([100, 300, 500, 700])] = 1000 # Points outside the sim range will be NaN fut = xr.DataArray(fut_raw, dims=("time", "lat", "lon")[:len(shape)], coords={"time": t}) q = np.linspace(0, 1, 11) xq = group.apply("quantile", sim, q=q).rename(quantile="quantiles") yq = group.apply("quantile", obs, q=q).rename(quantile="quantiles") fut_corr = u.interp_on_quantiles( fut, xq, yq, group=group, method=method).transpose(*("time", "lat", "lon")[:len(shape)]) if method == "nearest": np.testing.assert_allclose(fut_corr.values, obs.values, rtol=0.3) assert fut_corr.isnull().sum() == 0 else: np.testing.assert_allclose(fut_corr.values, obs.where(fut != 1000).values, rtol=2e-3) xr.testing.assert_equal(fut_corr.isnull(), fut == 1000)
def test_grouper_apply(tas_series, use_dask, group, n): tas1 = tas_series(np.arange(366), start="2000-01-01") tas0 = tas_series(np.zeros(366), start="2000-01-01") tas = xr.concat((tas1, tas0), dim="lat") grouper = Grouper(group) if not group.startswith("time"): tas = tas.rename(time=grouper.dim) tas1 = tas1.rename(time=grouper.dim) tas0 = tas0.rename(time=grouper.dim) if use_dask: tas = tas.chunk({"lat": 1, grouper.dim: -1}) tas0 = tas1.chunk({grouper.dim: -1}) tas1 = tas0.chunk({grouper.dim: -1}) # Normal monthly mean out_mean = grouper.apply("mean", tas) if grouper.prop: exp = tas.groupby(group).mean() else: exp = tas.mean(dim=grouper.dim) np.testing.assert_array_equal(out_mean, exp) # With additionnal dimension included grouper = Grouper(group, add_dims=["lat"]) out = grouper.apply("mean", tas) assert out.ndim == int(grouper.prop is not None) np.testing.assert_array_equal(out, exp.mean("lat")) assert out.attrs["group"] == group assert out.attrs["group_compute_dims"] == [grouper.dim, "lat"] assert out.attrs["group_window"] == 1 # Additionnal but main_only out = grouper.apply("mean", tas, main_only=True) np.testing.assert_array_equal(out, out_mean) # With window win_grouper = Grouper(group, window=5) out = win_grouper.apply("mean", tas) rolld = tas.rolling({ win_grouper.dim: 5 }, center=True).construct(window_dim="window") if grouper.prop: exp = rolld.groupby(group).mean(dim=[win_grouper.dim, "window"]) else: exp = rolld.mean(dim=[grouper.dim, "window"]) np.testing.assert_array_equal(out, exp) # With function + nongrouping-grouped grouper = Grouper(group) def normalize(grp, dim): return grp / grp.mean(dim=dim) normed = grouper.apply(normalize, tas) assert normed.shape == tas.shape if use_dask: assert normed.chunks == ((1, 1), (366, )) # With window + nongrouping-grouped out = win_grouper.apply(normalize, tas) assert out.shape == tas.shape # Mixed output def mixed_reduce(grdds, dim=None): tas1 = grdds.tas1.mean(dim=dim) tas0 = grdds.tas0 / grdds.tas0.mean(dim=dim) tas1.attrs["_group_apply_reshape"] = True return xr.Dataset(data_vars={"tas1_mean": tas1, "norm_tas0": tas0}) out = grouper.apply(mixed_reduce, {"tas1": tas1, "tas0": tas0}) if grouper.prop: assert grouper.prop not in out.norm_tas0.dims assert grouper.prop in out.tas1_mean.dims if use_dask: assert out.tas1_mean.chunks == (((n, ), ) if grouper.prop else tuple()) assert out.norm_tas0.chunks == ((366, ), ) # Mixed input if grouper.prop: def normalize_from_precomputed(grpds, dim=None): return (grpds.tas / grpds.tas1_mean).mean(dim=dim) out = grouper.apply(normalize_from_precomputed, { "tas": tas, "tas1_mean": out.tas1_mean }).isel(lat=0) exp = normed.groupby(group).mean().isel(lat=0) assert grouper.prop in out.dims np.testing.assert_array_equal(out, exp)