def test_opsd2reegis(): path = os.path.join(os.path.dirname(__file__), 'data') cfg.tmp_set('paths', 'opsd', path) cfg.tmp_set('paths', 'powerplants', path) fn_opsd = opsd.opsd_power_plants() fn_reegis = powerplants.pp_opsd2reegis() os.remove(fn_opsd) filename = str(fn_reegis.split(os.sep)[-1]) geo_path = cfg.get('paths', 'geometry') geo_file = cfg.get('geometry', 'federalstates_polygon') gdf = geo.load(path=geo_path, filename=geo_file) powerplants.add_regions_to_powerplants( gdf, 'fed_states', filename=filename, path=path, dump=True) geo_path = cfg.get('paths', 'geometry') geo_file = cfg.get('coastdat', 'coastdatgrid_polygon') gdf = geo.load(path=geo_path, filename=geo_file) pp = powerplants.add_regions_to_powerplants( gdf, 'coastdat2', filename=filename, path=path, dump=False) os.remove(fn_reegis) eq_(int(pp.groupby('fed_states').sum().loc['BE', 'capacity']), 2427) year = 2000 pp = powerplants.get_reegis_powerplants(year, pp=pp) eq_(int(pp.groupby('fed_states').sum().loc['BE', 'capacity_2000']), 2391) eq_(coastdat.windzone_region_fraction( pp, name='fed_states', year=year).round(2).loc['NI', 3], 0.24)
def plot_figure(number, save=False, path=None, show=False, **kwargs): logging.info("***** PLOT FIGURE {0} ************".format(number)) if path is None: path = cfg.get("paths", "local_root") fpath = os.path.join(path, "figures") cfg.tmp_set("paths", "figures", fpath) os.makedirs(fpath, exist_ok=True) if number not in get_number_name(): msg = ( "Figure {0} not found. Please choose from the following list: {1}" ) raise ValueError(msg.format(number, list(get_number_name().keys()))) elif float(number) > 6.1: ppath = download_scenario_results(path) cfg.tmp_set("paths", "phd", ppath) filename, fig_show = get_number_name()[number](**kwargs) if fig_show is not None: show = fig_show if "." not in filename: filename = filename + ".svg" if save is True: fn = os.path.join(fpath, filename) logging.info("Save figure as {0}".format(fn)) plt.savefig(fn) logging.info("Plot") if show is True or save is not True: plt.show()
def test_set_temp_value(): files = [ os.path.join(os.path.dirname(__file__), "data", "config_test.ini") ] config.init(files=files) with assert_raises_regexp(NoOptionError, "No option 'blubb' in section: 'type_tester'"): config.get("type_tester", "blubb") config.tmp_set("type_tester", "blubb", "None") eq_(config.get("type_tester", "blubb"), None) config.tmp_set("type_tester", "blubb", "5.5") eq_(config.get("type_tester", "blubb"), 5.5)
def test_state_balance(): fn = os.path.join( os.path.dirname(__file__), os.pardir, "reegis", "data", "static", "energy_balance_federal_states.csv", ) config.tmp_set("energy_balance", "energy_balance_states", fn) eb = energy_balance.get_states_energy_balance(2000) eq_(int(eb.loc[("BB", "extraction"), "lignite (raw)"]), 356092)
def read_bmwi_sheet_7_test(): test_path = os.path.join(os.path.dirname(__file__), 'data', 'temp') os.makedirs(test_path, exist_ok=True) cfg.tmp_set('paths', 'general', test_path) eq_(bmwi.bmwi_re_energy_capacity().loc[2016, ('water', 'capacity')], 5601) eq_(bmwi.get_annual_electricity_demand_bmwi(2014), 523.988) eq_(bmwi.get_annual_electricity_demand_bmwi(1900), None) fs = bmwi.read_bmwi_sheet_7('a').sort_index() total = int(float(fs.loc[('Industrie', 'gesamt'), 2014])) eq_(total, 2545) fs = bmwi.read_bmwi_sheet_7('b').sort_index() total = int(float(fs.loc[('private Haushalte', 'gesamt'), 2014])) eq_(total, 2188)
def test_read_conv_pp(): my_dir = os.path.join(os.path.expanduser("~"), "reegis_opsd_test") os.makedirs(my_dir, exist_ok=True) cfg.tmp_set("paths_pattern", "opsd", my_dir) cfg.tmp_set("paths", "powerplants", my_dir) with assert_raises_regexp(ValueError, "Category 'conv' is not valid."): opsd.load_original_opsd_file("conv", True) df = opsd.load_original_opsd_file("conventional", True) for f in [ "conventional_readme.md", "conventional_datapackage.json", "conventional_power_plants_DE.csv", ]: ok_(os.path.isfile(os.path.join(my_dir, f))) rmtree(my_dir) eq_(int(df["capacity_net_bnetza"].sum()), 118684)
def setup_class(cls): path = os.path.join(os.path.dirname(__file__), "data") cfg.tmp_set("paths_pattern", "opsd", path) cfg.tmp_set("paths", "powerplants", path) fn_opsd = opsd.opsd_power_plants() os.remove(fn_opsd) fn_opsd = os.path.join(cfg.get("paths_pattern", "opsd"), cfg.get("opsd", "opsd_prepared")) fn_test = fn_opsd.replace(".h5", "_test.h5") copyfile(fn_test, fn_opsd) fn_reegis = powerplants.pp_opsd2reegis() os.remove(fn_opsd) filename = str(fn_reegis.split(os.sep)[-1]) cls.gdf1 = geo.get_federal_states_polygon() powerplants.add_regions_to_powerplants(cls.gdf1, "fed_states", filename=filename, path=path, dump=True) geo_path = cfg.get("paths", "geometry") geo_file = cfg.get("coastdat", "coastdatgrid_polygon") gdf2 = geo.load(path=geo_path, filename=geo_file) cls.pp = powerplants.add_regions_to_powerplants(gdf2, "coastdat2", filename=filename, path=path, dump=False) year = 2014 cls.pp2 = powerplants.get_powerplants_by_region( cls.gdf1, year, "my_states") cls.pp2["efficiency_{0}".format(year)] = cls.pp2["capacity_{0}".format( year)].div(cls.pp2["capacity_in_{0}".format(year)]) cls.pp2.drop( ["capacity", "capacity_in", "thermal_capacity"], axis=1, inplace=True, ) fn_reegis2 = fn_reegis.replace(".h5", "_my_states.h5") os.remove(fn_reegis2) os.remove(fn_reegis) rmtree(os.path.join(path, "messages"))
def get_file_name_doctests(): cfg.tmp_set("results", "dir", "results_cbc") fn1 = os.path.join( cfg.get("paths", "scenario"), "deflex", "2014", "results_cbc", "deflex_2014_de21.esys", ) fn2 = os.path.join( cfg.get("paths", "scenario"), "deflex", "2013", "results_cbc", "deflex_2013_de21.esys", ) return fn1, fn2
def setUpClass(cls): cfg.tmp_set("open_ego", "ego_load_areas", "ego_load_areas_db_test.csv") openego.get_ego_data(osf=False, query="?where=un_id<10") cfg.tmp_set("open_ego", "ego_load_areas", "ego_load_areas_test.csv") cfg.tmp_set("open_ego", "osf_url", "https://osf.io/w9pv6/download") cfg.tmp_set("open_ego", "ego_file", "oep_ego_demand_combined_test1.h5") cls.load = openego.get_ego_data() cls.geo = geometries.get_federal_states_polygon() filename = "oep_ego_demand_combined_test.h5" path = cfg.get("paths", "demand") cls.fn = os.path.join(path, filename) cls.load.to_hdf(cls.fn, "demand")
def create_basic_scenario(year, rmap=None, path=None, csv_dir=None, xls_name=None, round_values=None): paths = namedtuple('paths', 'xls, csv') if rmap is not None: cfg.tmp_set('init', 'map', rmap) table_collection = deflex.basic_scenario.create_scenario( year, round_values) table_collection = clean_time_series(table_collection) name = '{0}_{1}_{2}'.format('deflex', year, cfg.get('init', 'map')) sce = deflex.scenario_tools.Scenario(table_collection=table_collection, name=name, year=year) if path is None: path = os.path.join(cfg.get('paths', 'scenario'), 'deflex', str(year)) if csv_dir is None: csv_path = os.path.join(path, '{0}_csv'.format(name)) else: csv_path = os.path.join(path, csv_dir) if xls_name is None: xls_path = os.path.join(path, name + '.xls') else: xls_path = os.path.join(path, xls_name) os.makedirs(path, exist_ok=True) os.makedirs(csv_path, exist_ok=True) fullpath = paths(xls=xls_path, csv=csv_path) sce.to_excel(fullpath.xls) sce.to_csv(fullpath.csv) return fullpath
def create_weather_variation_scenario(year, start=1998, rmap=None, round_values=None): weather_years = range(start, 2015) for weather_year in weather_years: logging.info("{2} Create weather variation {0} for {1} {2}".format( weather_year, year, '**********************')) if rmap is not None: cfg.tmp_set('init', 'map', rmap) table_collection = deflex.basic_scenario.create_scenario( year, round_values, weather_year=weather_year) table_collection = clean_time_series(table_collection) name = '{0}_{1}_{2}_weather_{3}'.format('deflex', year, cfg.get('init', 'map'), weather_year) sce = deflex.scenario_tools.Scenario(table_collection=table_collection, name=name, year=year) path = os.path.join(cfg.get('paths', 'scenario'), 'deflex', str(year) + '_var_entsoe') sce.to_excel(os.path.join(path, name + '.xls')) sce.to_csv(os.path.join(path, '{0}_csv'.format(name)))
def inhabitant_tests(): test_path = os.path.join(os.path.dirname(__file__), "data", "temp") os.makedirs(test_path, exist_ok=True) cfg.tmp_set("paths", "inhabitants", test_path) ew = inhabitants.get_ew_by_federal_states(2014) eq_(int(ew.sum()), 81197537)
def setup_class(cls): path = os.path.join(TEST_PATH, "de22_heat_transmission_csv") sc = st.DeflexScenario() sc.read_csv(path) cls.tables = sc.input_data tmp_tables = {} parameter = { "costs_source": "ewi", "downtime_bioenergy": 0.1, "limited_transformer": "bioenergy", "local_fuels": "district heating", "map": "de22", "mobility_other": "petrol", "round": 1, "separate_heat_regions": "de22", "copperplate": False, "default_transmission_efficiency": 0.9, "group_transformer": False, "heat": True, "use_CO2_costs": True, "use_downtime_factor": True, "use_variable_costs": False, "year": 2014, } config.init(paths=[os.path.dirname(dfile)]) for option, value in parameter.items(): cfg.tmp_set("creator", option, str(value)) config.tmp_set("creator", option, str(value)) name = "heat_demand_deflex" fn = os.path.join(os.path.dirname(__file__), "data", name + ".csv") tmp_tables[name] = pd.read_csv(fn, index_col=[0], header=[0, 1]) name = "transformer_balance" fn = os.path.join(os.path.dirname(__file__), "data", name + ".csv") tmp_tables[name] = pd.read_csv(fn, index_col=[0, 1, 2], header=[0]) powerplants.scenario_powerplants = MagicMock( return_value={ "volatile plants": cls.tables["volatile plants"], "power plants": cls.tables["power plants"], }) powerplants.scenario_chp = MagicMock( return_value={ "heat-chp plants": cls.tables["heat-chp plants"], "power plants": cls.tables["power plants"], }) feedin.scenario_feedin = MagicMock( return_value=cls.tables["volatile series"]) demand_table = { "electricity demand series": cls.tables["electricity demand series"], "heat demand series": cls.tables["heat demand series"], } demand.scenario_demand = MagicMock(return_value=demand_table) name = "deflex_2014_de22_heat_transmission" polygons = deflex_regions(rmap=parameter["map"], rtype="polygons") lines = deflex_power_lines(parameter["map"]).index cls.input_data = scenario_creator.create_scenario( polygons, 2014, name, lines)
cap1, dist1 = get_grid_capacity(grid, int(a), int(b)) cap2, dist2 = get_grid_capacity(grid, int(b), int(a)) if cap1 == 0 and cap2 == 0: pwr_lines.loc[l, 'capacity'] = 0 pwr_lines.loc[l, 'distance'] = 0 elif cap1 == 0 and cap2 != 0: pwr_lines.loc[l, 'capacity'] = cap2 pwr_lines.loc[l, 'distance'] = dist2 elif cap1 != 0 and cap2 == 0: pwr_lines.loc[l, 'capacity'] = cap1 pwr_lines.loc[l, 'distance'] = dist1 else: print("Error in {0}".format(l)) # plot_grid(pwr_lines) df = pwr_lines[['capacity', 'distance']] return df def get_grid(): return pd.read_csv(os.path.join('data', 'grid', 'de21_transmission.csv'), index_col='Unnamed: 0') if __name__ == "__main__": cfg.tmp_set('init', 'map', 'de17') lines = get_electrical_transmission_deflex(duplicate=False) print(lines) print(len(lines))
int(demand_region.sum().sum()), int(demand_state.sum().sum()))) return demand_region if __name__ == "__main__": logger.define_logging(screen_level=logging.ERROR, file_level=logging.ERROR) get_heat_profiles_deflex(2014) # egofile_deflex = os.path.join( # cfg.get('paths', 'demand'), # cfg.get('demand', 'ego_file_deflex')).format(map='de22') # ego_demand_deflex = pd.read_hdf(egofile_deflex, 'demand') # print(ego_demand_deflex['de22_region'].unique()) # exit(0) cfg.tmp_set('init', 'map', 'de22') net = reegis.bmwi.get_annual_electricity_demand_bmwi(2014) dem22 = get_deflex_profile(2014, 'openego_entsoe', annual_demand=net).sum() cfg.tmp_set('init', 'map', 'de21') dem21 = get_deflex_profile(2014, 'openego_entsoe', annual_demand=net).sum() print(round(dem21 - dem22, 2)) print(round(dem21, 2)) print(round(dem22, 2)) # print(get_deflex_profile(2014, 'renpass', annual_demand=net).sum()) exit(0) # print(openego_demand_share()) # exit(0) # elec_demand_tester(2013) # prepare_ego_demand() # exit(0) for y in [2014]:
def create_basic_reegis_scenario( name, regions, parameter, lines=None, csv_path=None, excel_path=None, ): """ Create a basic scenario for a given year and region-set. Parameters ---------- name : str Name of the scenario regions : geopandas.geoDataFrame Set of region polygons. lines : geopandas.geoDataFrame Set of transmission lines. parameter : dict Parameter set for the creation process. Some parameters will have a default value. For the default values see below. csv_path : str A directory to store the scenario as csv collection. If None no csv collection will be created. Either csv_path or excel_path must not be 'None'. excel_path : str A file to store the scenario as an excel map. If None no excel file will be created. Both suffixes 'xls' or 'xlsx' are possible. The excel format can be used in most spreadsheet programs such as LibreOffice or Gnumeric. Either csv_path or excel_path must not be 'None'. Returns ------- namedtuple : Path Notes ----- List of default values: * copperplate: True * default_transmission_efficiency: 0.9 * costs_source: "ewi" * downtime_bioenergy: 0.1 * group_transformer: False * heat: False * limited_transformer: "bioenergy", * local_fuels: "district heating", * map: "de02", * mobility_other: "petrol", * round: 1, * separate_heat_regions: "de22", * use_CO2_costs: False, * use_downtime_factor: True, * use_variable_costs: False, * year: 2014 Examples -------- >>> from oemof.tools import logger >>> from deflex.geometries import deflex_power_lines >>> from deflex.geometries import deflex_regions >>> >>> logger.define_logging(screen_level=logging.DEBUG) # doctest: +SKIP >>> >>> my_parameter = { ... "year": 2014, ... "map": "de02", ... "copperplate": True, ... "heat": True, ... } >>> >>> my_name = "deflex" >>> for k, v in my_parameter.items(): ... my_name += "_" + str(k) + "-" + str(v) >>> >>> polygons = deflex_regions(rmap=my_parameter["map"], rtype="polygons") >>> my_lines = deflex_power_lines(my_parameter["map"]).index >>> path = "/my/path/creator/{0}{1}".format(my_name, "{0}") >>> >>> create_basic_reegis_scenario( ... name=my_name, ... regions=polygons, ... lines=my_lines, ... parameter=my_parameter, ... excel_path=path.format(".xlsx"), ... csv_path=path.format("_csv"), ... ) # doctest: +SKIP """ # The default parameter can be found in "creator.ini". config.init(paths=[os.path.dirname(dfile)]) for option, value in parameter.items(): cfg.tmp_set("creator", option, str(value)) config.tmp_set("creator", option, str(value)) year = cfg.get("creator", "year") configuration = json.dumps(cfg.get_dict("creator"), indent=4, sort_keys=True) logging.info( "The following configuration is used to build the scenario:" " %s", configuration, ) paths = namedtuple("paths", "xls, csv") table_collection = create_scenario(regions, year, name, lines) table_collection = clean_time_series(table_collection) name = table_collection["general"].get("name") sce = scenario.Scenario(input_data=table_collection, name=name, year=year) if csv_path is not None: os.makedirs(csv_path, exist_ok=True) sce.to_csv(csv_path) if excel_path is not None: os.makedirs(os.path.dirname(excel_path), exist_ok=True) sce.to_xlsx(excel_path) return paths(xls=excel_path, csv=csv_path)
def test_set_temp_without_init(): config.tmp_set("type_tester", "blubb", "None")
sc.create_model() logging.info("Solve the optimisation model: {0}".format(stopwatch())) sc.solve() logging.info("Solved. Dump results: {0}".format(stopwatch())) res_path = os.path.join(path, 'results_{0}'.format(cfg.get('general', 'solver'))) os.makedirs(res_path, exist_ok=True) out_file = os.path.join(res_path, name + '.esys') logging.info("Dump file to {0}".format(out_file)) sc.meta['end_time'] = datetime.now() sc.dump_es(out_file) logging.info("All done. deflex finished without errors: {0}".format( stopwatch())) if __name__ == "__main__": logger.define_logging() for y in [2014, 2013, 2012]: for my_rmap in ['de21', 'de22']: cfg.tmp_set('init', 'map', my_rmap) try: main(y) except Exception as e: logging.error(traceback.format_exc()) time.sleep(0.5) logging.error(e) time.sleep(0.5)