def test_equally_spaced_nodes(): x = u.equally_spaced_nodes(5, eps=1e-4) assert len(x) == 7 d = np.diff(x) np.testing.assert_almost_equal(d[0], d[1] / 2, 3) x = u.equally_spaced_nodes(1) np.testing.assert_almost_equal(x[0], 0.5)
def train_quantiledeltamapping( reference, historical, variable, kind, quantiles_n=100, window_n=31 ): """Train quantile delta mapping Parameters ---------- reference : xr.Dataset Dataset to use as model reference. Target variable must have a units attribute. historical : xr.Dataset Dataset to use as historical simulation. Target variable must have a units attribute. variable : str Name of target variable to extract from `historical` and `reference`. kind : {"+", "*"} Kind of variable. Used for QDM scaling. quantiles_n : int, optional Number of quantiles for QDM. window_n : int, optional Centered window size for day-of-year grouping. Returns ------- xclim.sdba.adjustment.QuantileDeltaMapping """ qdm = sdba.adjustment.QuantileDeltaMapping.train( ref=reference[variable], hist=historical[variable], kind=str(kind), group=sdba.Grouper("time.dayofyear", window=int(window_n)), nquantiles=equally_spaced_nodes(int(quantiles_n), eps=None), ) return qdm
def cannon_2015_rvs(n, random=True): # Frozen distributions fd = cannon_2015_dist() if random: r = [d.rvs(n) for d in fd] else: u = equally_spaced_nodes(n, None) r = [d.ppf(u) for d in fd] return map(lambda x: series(x, "pr"), r)
def _make_qm(a, *, group="time.month"): a = np.atleast_2d(a) n, m = a.shape mo = range(1, m + 1) if group.prop: q = equally_spaced_nodes(n, None) dims = ("quantiles", group.prop) coords = {"quantiles": q, "month": mo} else: q = equally_spaced_nodes(m, None) dims = ("quantiles", ) coords = {"quantiles": q} a = a[0] return xr.DataArray( a, dims=dims, coords=coords, attrs={ "group": group, "window": 1 }, )
def test_interp_on_quantiles_monthly(): t = xr.cftime_range("2000-01-01", "2030-12-31", freq="D", calendar="noleap") ref = xr.DataArray( (-20 * np.cos(2 * np.pi * t.dayofyear / 365) + 2 * np.random.random_sample((t.size, )) + 273.15 + 0.1 * (t - t[0]).days / 365), # "warming" of 1K per decade, dims=("time", ), coords={"time": t}, attrs={"units": "K"}, ) sim = xr.DataArray( (-18 * np.cos(2 * np.pi * t.dayofyear / 365) + 2 * np.random.random_sample((t.size, )) + 273.15 + 0.11 * (t - t[0]).days / 365), # "warming" of 1.1K per decade dims=("time", ), coords={"time": t}, attrs={"units": "K"}, ) ref = ref.sel(time=slice(None, "2015-01-01")) hist = sim.sel(time=slice(None, "2015-01-01")) group = Grouper("time.month") quantiles = u.equally_spaced_nodes(15, eps=1e-6) ref_q = group.apply(nbu.quantile, ref, main_only=True, q=quantiles) hist_q = group.apply(nbu.quantile, hist, main_only=True, q=quantiles) af = u.get_correction(hist_q, ref_q, "+") for interp in ["nearest", "linear", "cubic"]: afi = u.interp_on_quantiles(sim, hist_q, af, group="time.month", method=interp, extrapolation="constant") assert afi.isnull().sum("time") == 0, interp