def simple_df(request): _df = FULL_FEATURE_DF.copy() if request.param == "datetime": _df.rename(DTS_MAPPING, axis="columns", inplace=True) df = IamDataFrame(model="model_a", scenario="scen_a", data=_df) df.set_meta("foo", "string") yield df
def simple_df(request): _df = FULL_FEATURE_DF.copy() if request.param == 'datetime': _df.rename(DTS_MAPPING, axis="columns", inplace=True) df = IamDataFrame(model='model_a', scenario='scen_a', data=_df) df.set_meta('foo', 'string') yield df
def test_df(request): tdf = TEST_DF.rename({2005: request.param[0], 2010: request.param[1]}, axis="columns") df = IamDataFrame(data=tdf) for i in META_COLS: df.set_meta(META_DF[i]) yield df
def _validate_meta(df: pyam.IamDataFrame, allowed_meta: dict) -> pyam.IamDataFrame: """Validation function for meta indicators""" # remove unexpected meta columns expected_meta = list(allowed_meta) + ["exclude"] unexpected_meta = [c for c in df.meta.columns if c not in expected_meta] if unexpected_meta: logger.warning( f"Removing unexpected meta indicators: {unexpected_meta}") df.meta.drop(unexpected_meta, axis=1, inplace=True) # validate meta columns for accepted values (if provided) or assign default for key, value in allowed_meta.items(): # if the meta column exists, check that values are allowed if key in df.meta.columns: unknown = [v for v in df.meta[key].unique() if v not in value] if unknown: logger.warning(f"Unknown values {unknown} for `{key}`, " f"setting to default `{value[0]}`") df.meta[key] = [ v if v in value else value[0] for v in df.meta[key] ] # if meta indicated was not provided, set to default else: logger.info(f"Setting `{key}` to default `{value[0]}`") df.set_meta(name=key, meta=value[0]) return df
def test_df_time(): df = IamDataFrame(data=TEST_DF.rename( { 2005: TEST_DTS[0], 2010: TEST_DTS[1] }, axis="columns")) for i in META_COLS: df.set_meta(META_DF[i]) yield df
def test_query_with_meta_arg(conn, test_pd_df, kwargs): # test reading timeseries data (including subannual data) exp = IamDataFrame(test_pd_df, subannual='Year')\ .append(MODEL_B_DF, model='model_b', scenario='scen_a', region='World') for i in ['version', 'string']: exp.set_meta(META_DF.iloc[[0, 1, 3]][i]) # test method via Connection df = conn.query(meta=['string'], **kwargs) assert_iamframe_equal(df, exp.filter(**kwargs)) # test top-level method df = read_iiasa(TEST_API, meta=['string'], **kwargs) assert_iamframe_equal(df, exp.filter(**kwargs))
def subannual_df(): _df = FULL_FEATURE_DF.iloc[0:6].copy() def add_subannual(_data, name, value): _data["subannual"] = name _data[TEST_YEARS] = _data[TEST_YEARS] * value return _data # primary energy is a direct sum across sub-annual timeslices mapping = [("year", 1), ("winter", 0.7), ("summer", 0.3)] lst = [add_subannual(_df.copy(), name, value) for name, value in mapping] df = IamDataFrame(model="model_a", scenario="scen_a", data=pd.concat(lst)) df.set_meta("foo", "string") yield df
def subannual_df(): _df = FULL_FEATURE_DF.iloc[0:6].copy() def add_subannual(_data, name, value): _data['subannual'] = name _data[TEST_YEARS] = _data[TEST_YEARS] * value return _data # primary energy is a direct sum across sub-annual timeslices mapping = [('year', 1), ('winter', 0.7), ('summer', 0.3)] lst = [add_subannual(_df.copy(), name, value) for name, value in mapping] df = IamDataFrame(model='model_a', scenario='scen_a', data=pd.concat(lst)) df.set_meta('foo', 'string') yield df
def test_query_with_meta_arg(conn, test_pd_df, meta, kwargs): # test reading timeseries data (including subannual data) exp = IamDataFrame(test_pd_df, subannual="Year").append( MODEL_B_DF, model="model_b", scenario="scen_a", region="World" ) for i in ["version", "string"]: exp.set_meta(META_DF.iloc[[0, 1, 3]][i]) # test method via Connection df = conn.query(meta=meta, **kwargs) assert_iamframe_equal(df, exp.filter(**kwargs)) # test top-level method df = read_iiasa(TEST_API, meta=meta, **kwargs) assert_iamframe_equal(df, exp.filter(**kwargs))
def test_df_year(): df = IamDataFrame(data=TEST_DF) for i in META_COLS: df.set_meta(META_DF[i]) yield df
def test_df(request): df = IamDataFrame(data=TEST_DF.rename(request.param, axis="columns")) for i in META_COLS: df.set_meta(META_DF[i]) yield df
def test_df_mixed(): mapping = dict([(i, j) for i, j in zip(TEST_YEARS, TEST_TIME_MIXED)]) df = IamDataFrame(data=TEST_DF.rename(mapping, axis="columns")) for i in META_COLS: df.set_meta(META_DF[i]) yield df