def test_array_coverage(empty_ds): da = empty_ds["CO2"] da[:] = np.nan da.name = None da.pr.loc[{"time": "2001", "area": "COL"}] = 12.0 * ureg("Gg CO2 / year") da.pr.loc[{"time": "2002", "area": "COL"}] = 13.0 * ureg("Gg CO2 / year") expected = pd.DataFrame( index=da["area (ISO3)"].values, columns=da["time"].to_index(), data=np.zeros((len(da["area (ISO3)"]), len(da["time"])), dtype=np.int32), ) expected.loc["COL", "2001"] = 1 expected.loc["COL", "2002"] = 1 expected.index.name = "area (ISO3)" expected.columns.name = "time" pd.testing.assert_frame_equal( expected.astype(np.int32), da.pr.coverage("area", "time").astype(np.int32) ) pd.testing.assert_frame_equal( expected.T.astype(np.int32), da.pr.coverage("time", "area (ISO3)").astype(np.int32), )
def partly_nan_ds(self, empty_ds): empty_ds["CO2"][:] = 1 * ureg("Gg CO2 / year") empty_ds["SF6"][:] = 1 * ureg("Gg SF6 / year") empty_ds["CH4"][:] = 1 * ureg("Gg CH4 / year") empty_ds["CH4"].loc[{ "area (ISO3)": "COL" }] = np.nan * ureg("Gg CH4 / year") return empty_ds
def partly_filled_ds(self, partly_nan_ds): partly_nan_ds["KYOTOGHG (AR4GWP100)"][:] = 1 * ureg("Gg CO2 / year") partly_nan_ds["KYOTOGHG (AR4GWP100)"].loc[{ "area (ISO3)": "COL" }] = np.nan * ureg("Gg CO2 / year") partly_nan_ds["KYOTOGHG (AR4GWP100)"].loc[{ "area (ISO3)": "BOL", "time": "2020" }] = np.nan * ureg("Gg CO2 / year") return partly_nan_ds
def test_contents_sum_skipna(self, partly_nan_ds): summed = partly_nan_ds.pr.gas_basket_contents_sum( basket="KYOTOGHG (AR4GWP100)", basket_contents=["CO2", "SF6", "CH4"], skipna=False, ) expected = partly_nan_ds["KYOTOGHG (AR4GWP100)"].copy() expected[:] = (1 + self.sf6 + self.ch4) * ureg("Gg CO2 / year") # NaNs not skipped expected.loc[{"area (ISO3)": "COL"}] = np.nan * ureg("Gg CO2 / year") assert_equal(summed, expected, equal_nan=True)
def test_contents_sum_default(self, partly_nan_ds): summed = partly_nan_ds.pr.gas_basket_contents_sum( basket="KYOTOGHG (AR4GWP100)", basket_contents=["CO2", "SF6", "CH4"], ) expected = partly_nan_ds["KYOTOGHG (AR4GWP100)"].copy() expected[:] = (1 + self.sf6 + self.ch4) * ureg("Gg CO2 / year") # NaN counted as 0 expected.loc[{ "area (ISO3)": "COL" }] = (1 + self.sf6) * ureg("Gg CO2 / year") assert_equal(summed, expected)
def test_fill_na_from_contents_skipna(self, partly_filled_ds): filled = partly_filled_ds.pr.fill_na_gas_basket_from_contents( basket="KYOTOGHG (AR4GWP100)", basket_contents=["CO2", "SF6", "CH4"], skipna=False, ) expected = partly_filled_ds["KYOTOGHG (AR4GWP100)"].copy() expected.loc[{"area (ISO3)": "COL"}] = np.nan * ureg("Gg CO2 / year") expected.loc[{ "area (ISO3)": "BOL", "time": "2020" }] = (1 + self.sf6 + self.ch4) * ureg("Gg CO2 / year") assert_equal(filled, expected, equal_nan=True)
def test_fill_na_from_contents_sel(self, partly_filled_ds): filled = partly_filled_ds.pr.fill_na_gas_basket_from_contents( basket="KYOTOGHG (AR4GWP100)", basket_contents=["CO2", "SF6", "CH4"], sel={"area (ISO3)": ["BOL"]}, skipna_evaluation_dims=("time", ), ) expected = partly_filled_ds["KYOTOGHG (AR4GWP100)"].copy() expected.loc[{ "area (ISO3)": "BOL", "time": "2020" }] = (1 + self.sf6 + self.ch4) * ureg("Gg CO2 / year") assert_equal(filled, expected, equal_nan=True) with pytest.raises( ValueError, match= "The dimension of the selection doesn't match the dimension", ): partly_filled_ds.pr.fill_na_gas_basket_from_contents( basket="KYOTOGHG (AR4GWP100)", basket_contents=["CO2", "SF6", "CH4"], sel={"area (ISO3)": "BOL"}, skipna_evaluation_dims=("time", ), )
def test_automatic_unit_conversion( self, da: xr.DataArray, ts: np.ndarray, co2, new ): actual = da.pr.set( "area", "COL", ts * ureg("Mg CO2 / year"), existing="overwrite", **new ) expected = da expected.loc[{"area (ISO3)": "COL"}] = 1e-3 * ts[..., np.newaxis] * co2 assert_aligned_equal(actual, expected)
def test_contents_sum_skipna_evaluation_dims(self, partly_nan_ds): partly_nan_ds["CH4"].loc[{ "area (ISO3)": "ARG", "time": "2012" }] = np.nan * ureg("Gg CH4 / year") summed = partly_nan_ds.pr.gas_basket_contents_sum( basket="KYOTOGHG (AR4GWP100)", basket_contents=["CO2", "SF6", "CH4"], skipna_evaluation_dims=("time", ), ) expected = partly_nan_ds["KYOTOGHG (AR4GWP100)"].copy() expected[:] = (1 + self.sf6 + self.ch4) * ureg("Gg CO2 / year") # NaN only skipped where all time points NaN expected.loc[{ "area (ISO3)": "COL" }] = (1 + self.sf6) * ureg("Gg CO2 / year") expected.loc[{ "area (ISO3)": "ARG", "time": "2012" }] = np.nan * ureg("Gg CO2 / year") assert_equal(summed, expected, equal_nan=True)
def test_downscale_timeseries(empty_ds): for key in empty_ds: empty_ds[key][:] = np.nan t = empty_ds.loc[{"area (ISO3)": "BOL"}].copy() t["area (ISO3)"] = ["CAMB"] # here, the sum of COL, ARG, MEX, and BOL ds = xr.concat([empty_ds, t], dim="area (ISO3)") da: xr.DataArray = ds["CO2"] da.loc[{ "area (ISO3)": ["COL", "ARG", "MEX"], "time": "2002" }] = 1 * ureg("Gg CO2 / year") da.loc[{"area (ISO3)": "BOL", "time": "2002"}] = 3 * ureg("Gg CO2 / year") da.loc[{"area (ISO3)": "CAMB", "time": "2002"}] = 6 * ureg("Gg CO2 / year") da.loc[{ "area (ISO3)": ["COL", "ARG", "MEX", "BOL"], "time": "2012" }] = 2 * ureg("Gg CO2 / year") da.loc[{ "area (ISO3)": "CAMB", "source": "RAND2020" }] = np.concatenate( [np.array([6] * 11), np.stack([8, 8]), np.linspace(8, 10, 8)]) * ureg("Gg CO2 / year") downscaled = da.pr.downscale_timeseries( dim="area (ISO3)", basket="CAMB", basket_contents=["COL", "ARG", "MEX", "BOL"]) expected = da.copy() expected.loc[{ "area (ISO3)": ["COL", "ARG", "MEX"], "source": "RAND2020" }] = np.broadcast_to( np.concatenate([ np.array([1, 1]), np.linspace(1 / 6, 2 / 8, 11) * np.array([6] * 9 + [8] * 2), np.linspace(2, 2 * 10 / 8, 8), ]), (3, 21), ).T * ureg("Gg CO2 / year") expected.loc[{ "area (ISO3)": "BOL", "source": "RAND2020" }] = np.concatenate([ np.array([3, 3]), np.linspace(3 / 6, 2 / 8, 11) * np.array([6] * 9 + [8] * 2), np.linspace(2, 2 * 10 / 8, 8), ]) * ureg("Gg CO2 / year") # we need a higher atol, because downscale_timeseries actually does the # downscaling using a proper calendar while here we use a calendar where all years # have the same length. assert_equal(downscaled, expected, equal_nan=True, atol=0.01) allclose( downscaled.loc[{ "area (ISO3)": "CAMB" }], downscaled.loc[{ "area (ISO3)": ["COL", "ARG", "MEX", "BOL"] }].sum(dim="area (ISO3)"), ) downscaled_ds = ds.pr.downscale_timeseries( dim="area (ISO3)", basket="CAMB", basket_contents=["COL", "ARG", "MEX", "BOL"]) assert_equal(downscaled_ds["CO2"], expected, equal_nan=True, atol=0.01) da.loc[{"area (ISO3)": "BOL", "time": "2002"}] = 2 * ureg("Gg CO2 / year") with pytest.raises( ValueError, match="To continue regardless, set check_consistency=False"): da.pr.downscale_timeseries( dim="area (ISO3)", basket="CAMB", basket_contents=["COL", "ARG", "MEX", "BOL"], ) with pytest.raises( ValueError, match="To continue regardless, set check_consistency=False"): ds.pr.downscale_timeseries( dim="area (ISO3)", basket="CAMB", basket_contents=["COL", "ARG", "MEX", "BOL"], ) downscaled = da.pr.downscale_timeseries( dim="area (ISO3)", basket="CAMB", basket_contents=["COL", "ARG", "MEX", "BOL"], check_consistency=False, ) expected = da.copy() expected.loc[{ "area (ISO3)": ["COL", "ARG", "MEX"], "source": "RAND2020" }] = np.broadcast_to( np.concatenate([ np.array([1.2, 1.2, 1]), (np.linspace(1 / 5, 2 / 8, 11) * np.array([6] * 9 + [8] * 2))[1:], np.linspace(2, 2 * 10 / 8, 8), ]), (3, 21), ).T * ureg("Gg CO2 / year") expected.loc[{ "area (ISO3)": "BOL", "source": "RAND2020" }] = np.concatenate([ np.array([2.4, 2.4, 2]), (np.linspace(2 / 5, 2 / 8, 11) * np.array([6] * 9 + [8] * 2))[1:], np.linspace(2, 2 * 10 / 8, 8), ]) * ureg("Gg CO2 / year") assert_equal(downscaled, expected, equal_nan=True, atol=0.01) downscaled = da.pr.downscale_timeseries( dim="area (ISO3)", basket="CAMB", basket_contents=["COL", "ARG", "MEX", "BOL"], check_consistency=False, sel={"time": slice("2005", "2020")}, ) expected = da.copy() expected.loc[{ "area (ISO3)": ["COL", "ARG", "MEX", "BOL"], "source": "RAND2020" }] = np.broadcast_to( np.concatenate([ np.array([ np.nan, np.nan, 1, np.nan, np.nan, 6 / 4, 6 / 4, 6 / 4, 6 / 4, 6 / 4, 6 / 4, 2, 2, ]), np.linspace(2, 2 * 10 / 8, 8), ]), (4, 21), ).T * ureg("Gg CO2 / year") expected.loc[{ "area (ISO3)": "BOL", "time": "2002" }] = 2 * ureg("Gg CO2 / year") assert_equal(downscaled, expected, equal_nan=True, atol=0.01)
def test_downscale_gas_timeseries(empty_ds): for key in empty_ds: empty_ds[key][:] = np.nan empty_ds["CO2"].loc[{"time": "2002"}] = 1 * ureg("Gg CO2 / year") empty_ds["SF6"].loc[{"time": "2002"}] = 1 * ureg("Gg SF6 / year") empty_ds["CH4"].loc[{"time": "2002"}] = 1 * ureg("Gg CH4 / year") sf6 = 22_800 ch4 = 25 empty_ds["KYOTOGHG (AR4GWP100)"][:] = (1 + sf6 + ch4) * ureg("Gg CO2 / year") empty_ds["KYOTOGHG (AR4GWP100)"].loc[{ "time": "2020" }] = (2 * (1 + sf6 + ch4) * ureg("Gg CO2 / year")) downscaled = empty_ds.pr.downscale_gas_timeseries( basket="KYOTOGHG (AR4GWP100)", basket_contents=["CO2", "SF6", "CH4"]) expected = empty_ds.copy() expected["CO2"][:] = 1 * ureg("Gg CO2 / year") expected["SF6"][:] = 1 * ureg("Gg SF6 / year") expected["CH4"][:] = 1 * ureg("Gg CH4 / year") expected["CO2"].loc[{"time": "2020"}] = 2 * ureg("Gg CO2 / year") expected["SF6"].loc[{"time": "2020"}] = 2 * ureg("Gg SF6 / year") expected["CH4"].loc[{"time": "2020"}] = 2 * ureg("Gg CH4 / year") xr.testing.assert_identical(downscaled, expected) empty_ds["SF6"].loc[{"time": "2002"}] = 2 * ureg("Gg SF6 / year") with pytest.raises( ValueError, match="To continue regardless, set check_consistency=False"): empty_ds.pr.downscale_gas_timeseries( basket="KYOTOGHG (AR4GWP100)", basket_contents=["CO2", "SF6", "CH4"])
def test_incompatible_units(self, da: xr.DataArray, ts: np.ndarray, new): with pytest.raises(pint.errors.DimensionalityError, match="Cannot convert"): da.pr.set("area", "COL", ts * ureg("kg"), existing="overwrite", **new)
def co2() -> pint.Unit: return ureg("Gg CO2 / year")