def test_adapt_freq_add_dims(use_dask): time = pd.date_range("1990-01-01", "2020-12-31", freq="D") prvals = np.random.randint(0, 100, size=(time.size, 3)) pr = xr.DataArray( prvals, coords={ "time": time, "lat": [0, 1, 2] }, dims=("time", "lat"), attrs={"units": "mm d-1"}, ) if use_dask: pr = pr.chunk() group = Grouper("time.month", add_dims=["lat"]) with xr.set_options(keep_attrs=True): prsim = xr.where(pr < 20, pr / 20, pr) prref = xr.where(pr < 10, pr / 20, pr) sim_ad, pth, dP0 = adapt_freq(prref, prsim, thresh="1 mm d-1", group=group) assert set(sim_ad.dims) == set(prsim.dims) assert "lat" not in pth.dims group = Grouper("time.dayofyear", window=5) with xr.set_options(keep_attrs=True): prsim = xr.where(pr < 20, pr / 20, pr) prref = xr.where(pr < 10, pr / 20, pr) sim_ad, pth, dP0 = adapt_freq(prref, prsim, thresh="1 mm d-1", group=group) assert set(sim_ad.dims) == set(prsim.dims)
def test_adapt_freq(use_dask): time = pd.date_range("1990-01-01", "2020-12-31", freq="D") prvals = np.random.randint(0, 100, size=(time.size, 3)) pr = xr.DataArray( prvals, coords={ "time": time, "lat": [0, 1, 2] }, dims=("time", "lat"), attrs={"units": "mm d-1"}, ) if use_dask: pr = pr.chunk({"lat": 1}) group = Grouper("time.month") with xr.set_options(keep_attrs=True): prsim = xr.where(pr < 20, pr / 20, pr) prref = xr.where(pr < 10, pr / 20, pr) sim_ad, pth, dP0 = adapt_freq(prref, prsim, thresh="1 mm d-1", group=group) # Where the input is considered zero input_zeros = sim_ad.where(prsim <= 1) # The proportion of corrected values (time.size * 3 * 0.2 is the theoritical number of values under 1 in prsim) dP0_out = (input_zeros > 1).sum() / (time.size * 3 * 0.2) np.testing.assert_allclose(dP0_out, 0.5, atol=0.1) # Assert that corrected values were generated in the range ]1, 20 + tol[ corrected = (input_zeros.where(input_zeros > 1).stack( flat=["lat", "time"]).reset_index("flat").dropna("flat")) assert ((corrected < 20.1) & (corrected > 1)).all() # Assert that non-corrected values are untouched # Again we add a 0.5 tol because of randomness. xr.testing.assert_equal( sim_ad.where(prsim > 20.1), prsim.where(prsim > 20.5).transpose("lat", "time"), ) # Assert that Pth and dP0 are approx the good values np.testing.assert_allclose(pth, 20, rtol=0.05) np.testing.assert_allclose(dP0, 0.5, atol=0.14) assert sim_ad.units == "mm d-1" assert sim_ad.attrs["references"].startswith("Themeßl") assert pth.units == "mm d-1"
def adapt_freq_graph(): """ Create a graphic with the additive adjustment factors estimated after applying the adapt_freq method. """ n = 10000 x = tu.series(synth_rainfall(2, 2, wet_freq=0.25, size=n), "pr") # sim y = tu.series(synth_rainfall(2, 2, wet_freq=0.5, size=n), "pr") # ref xp = adapt_freq(x, y, thresh=0).sim_ad fig, (ax1, ax2) = plt.subplots(2, 1) sx = x.sortby(x) sy = y.sortby(y) sxp = xp.sortby(xp) # Original and corrected series ax1.plot(sx.values, color="blue", lw=1.5, label="x : sim") ax1.plot(sxp.values, color="pink", label="xp : sim corrected") ax1.plot(sy.values, color="k", label="y : ref") ax1.legend() # Compute qm factors qm_add = QuantileDeltaMapping(kind="+", group="time").train(y, x).ds qm_mul = QuantileDeltaMapping(kind="*", group="time").train(y, x).ds qm_add_p = QuantileDeltaMapping(kind="+", group="time").train(y, xp).ds qm_mul_p = QuantileDeltaMapping(kind="*", group="time").train(y, xp).ds qm_add.cf.plot(ax=ax2, color="cyan", ls="--", label="+: y-x") qm_add_p.cf.plot(ax=ax2, color="cyan", label="+: y-xp") qm_mul.cf.plot(ax=ax2, color="brown", ls="--", label="*: y/x") qm_mul_p.cf.plot(ax=ax2, color="brown", label="*: y/xp") ax2.legend(loc="upper left", frameon=False) return fig