def pp_reegis2deflex(regions, name, filename_in=None, filename_out=None): """ Add federal states and deflex regions to powerplant table from reegis. As the process takes a while the result is stored for further usage. Returns ------- str : The full path where the result file is stored. """ if filename_out is None: filename_out = os.path.join( cfg.get("paths", "powerplants"), cfg.get("powerplants", "deflex_pp"), ).format(map=cfg.get("init", "map")) # Add deflex regions to powerplants pp = powerplants.add_regions_to_powerplants(regions, name, dump=False, filename=filename_in) # Add federal states to powerplants federal_states = reegis_geometries.get_federal_states_polygon() pp = powerplants.add_regions_to_powerplants(federal_states, "federal_states", pp=pp, dump=False) # store the results for further usage of deflex pp.to_hdf(filename_out, "pp") return filename_out
def test_02_create_deflex_powerplants(): de = geometries.deflex_regions("de21") fn_in = os.path.join(cfg.get("paths", "powerplants"), "reegis_pp_test.h5") fn_out = os.path.join(cfg.get("paths", "powerplants"), "deflex_pp_test.h5") powerplants.pp_reegis2deflex( de, "de21", filename_in=fn_in, filename_out=fn_out )
def scenario_transmission(table_collection, regions, name, copperplate=False): """Get power plants for the scenario year Examples -------- >>> regions=geometries.deflex_regions(rmap='de21') # doctest: +SKIP >>> pp=scenario_powerplants(dict(), regions, 2014, 'de21', 1 ... ) # doctest: +SKIP >>> lines=scenario_transmission(pp, regions, 'de21') # doctest: +SKIP >>> int(lines.loc['DE07-DE05', ('electrical', 'capacity')] ... ) # doctest: +SKIP 1978 >>> int(lines.loc['DE07-DE05', ('electrical', 'distance')] ... ) # doctest: +SKIP 199 >>> float(lines.loc['DE07-DE05', ('electrical', 'efficiency')] ... ) # doctest: +SKIP 0.9 >>> lines=scenario_transmission(pp, regions, 'de21', copperplate=True ... ) # doctest: +SKIP >>> float(lines.loc['DE07-DE05', ('electrical', 'capacity')] ... ) # doctest: +SKIP inf >>> float(lines.loc['DE07-DE05', ('electrical', 'distance')] ... ) # doctest: +SKIP nan >>> float(lines.loc['DE07-DE05', ('electrical', 'efficiency')] ... ) # doctest: +SKIP 1.0 """ vs = table_collection["volatile_source"] # This should be done automatic e.g. if representative point outside the # landmass polygon. offshore_regions = geometries.divide_off_and_onshore(regions).offshore if name in ["de21", "de22"] and not copperplate: elec_trans = transmission.get_electrical_transmission_renpass() general_efficiency = cfg.get("transmission", "general_efficiency") if general_efficiency is not None: elec_trans["efficiency"] = general_efficiency else: msg = ("The calculation of the efficiency by distance is not yet " "implemented") raise NotImplementedError(msg) else: elec_trans = transmission.get_electrical_transmission_default() # Set transmission capacity of offshore power lines to installed capacity # Multiply the installed capacity with 1.1 to get a buffer of 10%. for offreg in offshore_regions: elec_trans.loc[elec_trans.index.str.contains(offreg), "capacity"] = (vs[offreg].sum().sum() * 1.1) elec_trans = pd.concat([elec_trans], axis=1, keys=["electrical"]).sort_index(1) if cfg.get("init", "map") == "de22" and not copperplate: elec_trans.loc["DE22-DE01", ("electrical", "efficiency")] = 0.9999 elec_trans.loc["DE22-DE01", ("electrical", "capacity")] = 9999999 return elec_trans
def scenario_transmission(regions, lines, rmap=None, copperplate=None): """Get power plants for the scenario year Examples -------- >>> my_regions=geometries.deflex_regions(rmap="de21") >>> my_lines = geometries.deflex_power_lines(rmap="de21").index >>> lines=scenario_transmission(my_regions, my_lines, "de21", False) >>> int(lines.loc["DE07-DE05", "capacity"]) 1978 >>> int(lines.loc["DE07-DE05", "distance"]) 199 >>> float(lines.loc["DE07-DE05", "efficiency"]) 0.9 >>> lines=scenario_transmission(my_regions, my_lines, copperplate=True) >>> float(lines.loc["DE07-DE05", "capacity"]) inf >>> float(lines.loc["DE07-DE05", "distance"]) nan >>> float(lines.loc["DE07-DE05", "efficiency"]) 1.0 """ # This should be done automatic e.g. if representative point outside the # landmass polygon. offshore_regions = geometries.divide_off_and_onshore(regions).offshore if rmap is None: rmap = cfg.get("creator", "map") if copperplate is None: copperplate = cfg.get("creator", "copperplate") if rmap in ["de21", "de22"] and not copperplate: elec_trans = get_electrical_transmission_renpass() general_efficiency = cfg.get("creator", "default_transmission_efficiency") if general_efficiency is not None: elec_trans["efficiency"] = general_efficiency else: msg = ("The calculation of the efficiency by distance is not yet " "implemented") raise NotImplementedError(msg) else: elec_trans = get_electrical_transmission_default(power_lines=lines) # Set transmission capacity of offshore power lines to installed capacity # Multiply the installed capacity with 1.1 to get a buffer of 10%. for offreg in offshore_regions: elec_trans.loc[elec_trans.index.str.contains(offreg), "capacity"] = "inf" if cfg.get("creator", "map") == "de22" and not cfg.get("creator", "copperplate"): elec_trans.loc["DE22-DE01", "efficiency"] = 0.999999 elec_trans.loc["DE22-DE01", "capacity"] = 9999999 elec_trans.loc["DE22-DE01", "distance"] = 0 return elec_trans
def scenario_decentralised_heat(): """ Returns ------- """ filename = os.path.join(cfg.get("paths", "data_deflex"), cfg.get("heating", "table")) return pd.read_csv(filename, header=[0, 1], index_col=[0])
def get_deflex_pp_by_year(regions, year, name, overwrite_capacity=False): """ Parameters ---------- regions : GeoDataFrame year : int name : str overwrite_capacity : bool By default (False) a new column "capacity_<year>" is created. If set to True the old capacity column will be overwritten. Returns ------- """ filename = os.path.join(cfg.get("paths", "powerplants"), cfg.get("powerplants", "deflex_pp")).format(map=name) logging.info("Get deflex power plants for {0}.".format(year)) if not os.path.isfile(filename): msg = "File '{0}' does not exist. Will create it from reegis file." logging.debug(msg.format(filename)) filename = pp_reegis2deflex(regions, name) pp = pd.DataFrame(pd.read_hdf(filename, "pp", mode="r")) # Remove unwanted data sets pp = process_pp_table(pp) filter_columns = ["capacity_{0}", "capacity_in_{0}"] # Get all powerplants for the given year. # If com_month exist the power plants will be considered month-wise. # Otherwise the commission/decommission within the given year is not # considered. for fcol in filter_columns: filter_column = fcol.format(year) orig_column = fcol[:-4] c1 = (pp["com_year"] < year) & (pp["decom_year"] > year) pp.loc[c1, filter_column] = pp.loc[c1, orig_column] c2 = pp["com_year"] == year pp.loc[c2, filter_column] = (pp.loc[c2, orig_column] * (12 - pp.loc[c2, "com_month"]) / 12) c3 = pp["decom_year"] == year pp.loc[c3, filter_column] = (pp.loc[c3, orig_column] * pp.loc[c3, "com_month"] / 12) if overwrite_capacity: pp[orig_column] = 0 pp[orig_column] = pp[filter_column] del pp[filter_column] return pp
def test_missing_value(): files = [ os.path.join(os.path.dirname(__file__), "data", "config_test.ini") ] config.init(files=files) with pytest.raises( NoOptionError, match="No option 'blubb' in section: 'type_tester'" ): config.get("type_tester", "blubb") with pytest.raises(NoSectionError, match="No section: 'typetester'"): config.get("typetester", "blubb")
def test_set_temp_value(): files = [ os.path.join(os.path.dirname(__file__), "data", "config_test.ini") ] config.init(files=files) with pytest.raises( NoOptionError, match="No option 'blubb' in section: 'type_tester'" ): config.get("type_tester", "blubb") config.tmp_set("type_tester", "blubb", "None") assert config.get("type_tester", "blubb") is None config.tmp_set("type_tester", "blubb", "5.5") assert config.get("type_tester", "blubb") == 5.5
def main(year, rmap, csv=True, es=None, plot_graph=False, extra_regions=None): """ Parameters ---------- year : int Year of an existing basic scenario. rmap : str A valid deflex map id (de02, de17, de21, de22) of an existing scenario. csv : bool Use csv collection. If set to False the xls-file is used. es : oemof.solph.EnergySystem A valid energy system if needed. plot_graph : bool Set to True to plot the energy system. extra_regions : list Use separate resource buses for these regions. Returns ------- Examples -------- >>> main(2014, 'de21') # doctest: +SKIP """ stopwatch() cfg.tmp_set("init", "map", rmap) name = "{0}_{1}_{2}".format("deflex", year, cfg.get("init", "map")) path = os.path.join(cfg.get("paths", "scenario"), "deflex", str(year)) if csv is True: csv_dir = name + "_csv" csv_path = os.path.join(path, csv_dir) excel_path = None else: excel_path = os.path.join(path, name + ".xls") csv_path = None model_scenario( xls_file=excel_path, csv_path=csv_path, res_path=path, name=name, rmap=rmap, year=year, es=es, plot_graph=plot_graph, extra_regions=extra_regions, )
def test_scenario_transmisson_error(self): old_value = cfg.get("transmission", "general_efficiency") cfg.tmp_set("transmission", "general_efficiency", "None") msg = "The calculation of the efficiency by distance is not yet" with assert_raises_regexp(NotImplementedError, msg): basic_scenario.scenario_transmission(self.pp, self.regions, "de22") cfg.tmp_set("transmission", "general_efficiency", old_value)
def test_01_download_reegis_power_plants(): url = "https://osf.io/ude5c/download" path = cfg.get("paths", "powerplants") file = "reegis_pp_test.h5" filename = os.path.join(path, file) download_file(filename, url)
def test_init_own_file_list(): files = [ os.path.join(os.path.dirname(__file__), "data", "config_test.ini") ] config.init(files=files) fn = sorted([f.split(os.sep)[-1] for f in config.FILES]) assert fn == ["config_test.ini"] assert config.get("tester", "my_test") == "my_value"
def deflex_regions(rmap=None, rtype='polygons'): """ Parameters ---------- rmap : str Name of the deflex map. rtype : str Type of the deflex map ('polygon', 'labels'). Returns ------- GeoDataFrame Examples -------- >>> regions=deflex_regions('de17') >>> len(regions) 17 >>> regions.geometry.iloc[0].geom_type 'MultiPolygon' >>> l=deflex_regions('de21', 'labels').loc['DE04', 'geometry'] >>> l.geom_type 'Point' >>> l.x 13.2 >>> l.y 51.1 >>> cfg.tmp_set('init', 'map', 'de22') >>> deflex_regions().name 'de22' >>> list(deflex_regions('de02').index) ['DE01', 'DE02'] """ if rmap is None: rmap = cfg.get("init", "map") name = os.path.join( cfg.get("paths", "geo_deflex"), cfg.get("geometry", "deflex_polygon").format(suffix=".geojson", map=rmap, type=rtype), ) regions = geo.load(fullname=name) regions.set_index("region", inplace=True) regions.name = rmap return regions
def general_data(year, input_data): general = pd.DataFrame(columns=["value"]) general.loc["year"] = year general.loc["number of time steps"] = len( input_data["electricity demand series"]) # Create name if cfg.get("creator", "heat"): heat = "heat" else: heat = "no-heat" if cfg.get("creator", "group_transformer"): merit = "no-reg-merit" else: merit = "reg-merit" general.loc["name"] = "{0}_{1}_{2}_{3}_{4}".format( "deflex", year, cfg.get("creator", "map"), heat, merit) return general
def test_03_not_existing_file(): old_value = cfg.get("paths", "powerplants") cfg.tmp_set("paths", "powerplants", "/home/pet/") de = geometries.deflex_regions("de22") powerplants.pp_reegis2deflex = MagicMock(return_value="/home/pet/pp.h5") with assert_raises_regexp( Exception, "File /home/pet/pp.h5 does not exist" ): powerplants.get_deflex_pp_by_year(de, 2012, "de22") cfg.tmp_set("paths", "powerplants", old_value)
def test_download_pp_from_osf(): """Download pp-file from osf.""" url = "https://osf.io/qtc56/download" path = cfg.get("paths", "powerplants") file = "de21_pp.h5" filename = os.path.join(path, file) if not os.path.isfile(filename): req = requests.get(url) with open(filename, "wb") as fout: fout.write(req.content)
def process_pp_table(pp): # # Remove powerplants outside Germany # for state in cfg.get_list('powerplants', 'remove_states'): # pp=pp.loc[pp.state != state] # # if clean_offshore: # pp=remove_onshore_technology_from_offshore_regions(pp) # Remove PHES (storages) if cfg.get("powerplants", "remove_phes"): pp = pp.loc[pp.technology != "Pumped storage"] return pp
def setup_func(): """Download pp-file from osf.""" downloads = [ ("n7ahr", "geothermal"), ("5n7t3", "hydro"), ("2qwv7", "solar"), ("9dvpf", "wind"), ] for d in downloads: url = "https://osf.io/{0}/download".format(d[0]) path = os.path.join(cfg.get("paths", "feedin"), "de21", "2014") file = "2014_feedin_de21_normalised_{0}.csv".format(d[1]) filename = os.path.join(path, file) os.makedirs(path, exist_ok=True) download_file(filename, url) src = os.path.join(os.path.dirname(__file__), "data", "windzone_de21.csv") trg = os.path.join(cfg.get("paths", "powerplants"), "windzone_de21.csv") copyfile(src, trg)
def divide_off_and_onshore(regions): """ Sort regions into onshore and offshore regions. A namedtuple with two list of regions ids will be returned. Fetch the `onshore` and `offshore` attribute of the named tuple to get the list. Parameters ---------- regions : GeoDataFrame A region set with the region id in the index. Returns ------- named tuple Examples -------- >>> reg=deflex_regions('de02') >>> divide_off_and_onshore(reg).onshore ['DE01'] >>> reg=deflex_regions('de21') >>> divide_off_and_onshore(reg).offshore ['DE19', 'DE20', 'DE21'] """ region_type = namedtuple("RegionType", "offshore onshore") regions_centroid = regions.copy() regions_centroid.geometry = regions_centroid.centroid germany_onshore = geo.load(cfg.get("paths", "geometry"), cfg.get("geometry", "germany_polygon")) gdf = geo.spatial_join_with_buffer(regions_centroid, germany_onshore, "onshore", limit=0) onshore = list(gdf.loc[gdf.onshore == 0].index) offshore = list(gdf.loc[gdf.onshore == "unknown"].index) return region_type(offshore=offshore, onshore=onshore)
def setup_func(): """Download pp-file from osf.""" url = "https://osf.io/m435r/download" path = cfg.get("paths", "demand") file = "heat_profile_state_2014_weather_2014.csv" filename = os.path.join(path, file) download_file(filename, url) url = "https://osf.io/6vmdh/download" file = "oep_ego_demand_combined.h5" filename = os.path.join(path, file) download_file(filename, url)
def deflex_power_lines(rmap=None, rtype="lines"): """ Parameters ---------- rmap : str Name of the deflex powerline map. rtype : str Type of the deflex powerline map ('lines', 'labels'). Returns ------- Examples -------- >>> lines=deflex_power_lines('de17') >>> lines.geometry.iloc[0].geom_type 'LineString' >>> len(lines) 31 >>> deflex_power_lines('de02').index[0] 'DE01-DE02' >>> cfg.tmp_set('init', 'map', 'de21') >>> deflex_power_lines().name 'de21' """ if rmap is None: rmap = cfg.get("init", "map") name = os.path.join( cfg.get("paths", "geo_deflex"), cfg.get("geometry", "powerlines").format(map=rmap, type=rtype, suffix=".geojson"), ) lines = geo.load(fullname=name) lines.set_index("name", inplace=True) lines.name = rmap return lines
def deflex_regions(rmap=None, rtype="polygons"): """ Parameters ---------- rmap : str Name of the deflex map. rtype : str Type of the deflex map ('polygon', 'labels'). Returns ------- GeoDataFrame Examples -------- >>> my_regions=deflex_regions('de17') >>> len(my_regions) 17 >>> my_regions.geometry.iloc[0].geom_type 'MultiPolygon' >>> l=deflex_regions('de21', 'labels').loc['DE04', 'geometry'] >>> l.geom_type 'Point' >>> l.x 13.2 >>> l.y 51.1 >>> deflex_regions(rmap="de22").name 'de22' >>> list(deflex_regions('de02').index) ['DE01', 'DE02'] """ name = os.path.join( os.path.dirname(__file__), "data", "geometries", cfg.get("geometry", "deflex_polygon").format(suffix=".geojson", map=rmap, type=rtype), ) regions = gpd.read_file(name) regions.set_index("region", inplace=True) regions.name = rmap return regions
def test_get_function(): """Read config file.""" files = [ os.path.join(os.path.dirname(__file__), "data", "config_test.ini") ] config.init(files=files) assert config.get("type_tester", "my_bool") assert isinstance(config.get("type_tester", "my_int"), int) assert isinstance(config.get("type_tester", "my_float"), float) assert isinstance(config.get("type_tester", "my_string"), str) assert isinstance(config.get("type_tester", "my_None"), type(None)) assert isinstance(config.get("type_tester", "my_list"), str) assert int(config.get_list("type_tester", "my_list")[2]) == 7
def divide_off_and_onshore(regions): """ Sort regions into onshore and offshore regions. A namedtuple with two list of regions ids will be returned. Fetch the `onshore` and `offshore` attribute of the named tuple to get the list. Parameters ---------- regions : GeoDataFrame A region set with the region id in the index. Returns ------- named tuple Examples -------- >>> reg=deflex_regions('de02') >>> divide_off_and_onshore(reg).onshore ['DE01'] >>> reg=deflex_regions('de21') >>> divide_off_and_onshore(reg).offshore ['DE19', 'DE20', 'DE21'] """ region_type = namedtuple("RegionType", "offshore onshore") regions_centroid = regions.copy() regions_centroid.geometry = regions_centroid.to_crs( epsg=25832).centroid.to_crs(epsg="4326") germany_onshore = gpd.read_file( os.path.join( os.path.dirname(__file__), "data", "geometries", cfg.get("geometry", "germany_polygon"), )) gdf = gpd.sjoin(regions_centroid, germany_onshore, how="left", op="within") onshore = list(gdf.loc[~gdf.gid.isnull()].index) offshore = list(gdf.loc[gdf.gid.isnull()].index) return region_type(offshore=offshore, onshore=onshore)
def scenario_chp(table_collection, regions, year, name, weather_year=None): """ Parameters ---------- table_collection regions year name weather_year Returns ------- Examples -------- >>> regions=geometries.deflex_regions(rmap='de21') # doctest: +SKIP >>> pp=scenario_powerplants(dict(), regions, 2014, 'de21', 1 ... ) # doctest: +SKIP >>> int(pp['transformer'].loc['capacity', ('DE01', 'hard coal')] ... ) # doctest: +SKIP 1291 >>> transf=scenario_chp(pp, regions, 2014, 'de21') # doctest: +SKIP >>> transf=transf['transformer'] # doctest: +SKIP >>> int(transf.loc['capacity', ('DE01', 'hard coal')]) # doctest: +SKIP 485 >>> int(transf.loc['capacity_elec_chp', ('DE01', 'hard coal')] ... ) # doctest: +SKIP 806 """ # values from heat balance cb = energy_balance.get_transformation_balance_by_region( regions, year, name) cb.rename(columns={"re": cfg.get("chp", "renewable_source")}, inplace=True) heat_b = reegis_powerplants.calculate_chp_share_and_efficiency(cb) heat_demand = demand.get_heat_profiles_deflex(regions, year, weather_year=weather_year) return chp_table(heat_b, heat_demand, table_collection)
def deflex_power_lines(rmap=None, rtype="lines"): """ Parameters ---------- rmap : str Name of the deflex powerline map. rtype : str Type of the deflex powerline map ('lines', 'labels'). Returns ------- Examples -------- >>> my_lines=deflex_power_lines('de17') >>> my_lines.geometry.iloc[0].geom_type 'LineString' >>> len(my_lines) 31 >>> deflex_power_lines('de02').index[0] 'DE01-DE02' >>> deflex_power_lines(rmap="de21").name 'de21' """ name = os.path.join( os.path.dirname(__file__), "data", "geometries", cfg.get("geometry", "powerlines").format(map=rmap, type=rtype, suffix=".geojson"), ) lines = gpd.read_file(name) lines.set_index("name", inplace=True) lines.name = rmap return lines
def create_basic_scenario( year, rmap=None, path=None, csv_dir=None, xls_name=None, round_values=None, only_out=None, ): """ Create a basic scenario for a given year and region-set and store it into an excel-file or csv-collection. Parameters ---------- year : int rmap : str path : str csv_dir : str xls_name : str round_values : bool only_out : str Returns ------- namedtuple : Path Examples -------- >>> year=2014 # doctest: +SKIP >>> my_rmap='de21' # doctest: +SKIP >>> p=create_basic_scenario(year, rmap=my_rmap) # doctest: +SKIP >>> print("Xls path: {0}".format(p.xls)) # doctest: +SKIP >>> print("Csv path: {0}".format(p.csv)) # doctest: +SKIP """ paths = namedtuple("paths", "xls, csv") if rmap is not None: cfg.tmp_set("init", "map", rmap) name = cfg.get("init", "map") regions = geometries.deflex_regions(rmap=cfg.get("init", "map")) table_collection = create_scenario(regions, year, name, round_values) table_collection = clean_time_series(table_collection) name = "{0}_{1}_{2}".format("deflex", year, cfg.get("init", "map")) sce = scenario_tools.Scenario(table_collection=table_collection, name=name, year=year) if path is None: path = os.path.join(cfg.get("paths", "scenario"), "deflex", str(year)) if only_out == "xls": csv_path = None elif csv_dir is None: csv_path = os.path.join(path, "{0}_csv".format(name)) else: csv_path = os.path.join(path, csv_dir) if only_out == "csv": xls_path = None elif xls_name is None: xls_path = os.path.join(path, name + ".xls") else: xls_path = os.path.join(path, xls_name) fullpath = paths(xls=xls_path, csv=csv_path) if not only_out == "csv": os.makedirs(path, exist_ok=True) sce.to_excel(fullpath.xls) if not only_out == "xls": os.makedirs(csv_path, exist_ok=True) sce.to_csv(fullpath.csv) return fullpath
def create_scenario(regions, year, name, lines, opsd_version=None): """ Parameters ---------- regions year name lines : iterable[str] A list of names of transmission lines. All name must contain a dash between the id of the regions (FromRegion-ToRegion). opsd_version Returns ------- """ if opsd_version is None: if year < 2015: opsd_version = "2019-06-05" table_collection = {"general": pd.DataFrame()} logging.info("BASIC SCENARIO - STORAGES") table_collection["electricity storages"] = storages.scenario_storages( regions, year, name) logging.info("BASIC SCENARIO - POWER PLANTS") pp = powerplants.scenario_powerplants(table_collection, regions, year, name) table_collection["volatile plants"] = pp["volatile plants"] table_collection["power plants"] = pp["power plants"] logging.info("BASIC SCENARIO - TRANSMISSION") if len(regions) > 1: table_collection["power lines"] = transmission.scenario_transmission( regions, lines) else: logging.info("...skipped") logging.info("BASIC SCENARIO - CHP PLANTS") if cfg.get("creator", "heat"): chp = powerplants.scenario_chp(table_collection, regions, year, name) table_collection["heat-chp plants"] = chp["heat-chp plants"] table_collection["power plants"] = chp["power plants"] else: logging.info("...skipped") logging.info("BASIC SCENARIO - DECENTRALISED HEAT") if cfg.get("creator", "heat"): table_collection[ "decentralised heat"] = scenario_default_decentralised_heat() else: logging.info("...skipped") logging.info("BASIC SCENARIO - SOURCES") cs = commodity.scenario_commodity_sources(year) table_collection["general"].loc["co2 price", "value"] = cs.pop("co2_price").iloc[0] table_collection["commodity sources"] = cs table_collection["volatile series"] = feedin.scenario_feedin( regions, year, name) logging.info("BASIC SCENARIO - DEMAND") table_collection.update( demand.scenario_demand( regions, year, name, opsd_version=opsd_version, )) logging.info("BASIC SCENARIO - MOBILITY") if cfg.get("creator", "mobility"): table_collection = mobility.scenario_mobility(year, table_collection) else: logging.info("...skipped") logging.info("ADD GENERAL DATA") table_collection["general"] = pd.concat( [table_collection["general"], general_data(year, table_collection)]) table_collection["info"] = meta_data() logging.info("ADD META DATA") return table_collection
def get_electrical_transmission_renpass(both_directions=False): """ Prepare the transmission capacity and distance between de21 regions from the renpass database. The original table of the reegis database is transferred to a csv file, which is part of the reegis package. As renpass is deprecated it will not change in the future. The index uses the format 'region1-region2'. The distance is taken from centroid to centroid. By default every region pair exists only once. It is possible to get an entry in both directions if the parameter `both_directions` is set True. The capacity calculation is taken from the description of the renpass package [1]_. The data is taken from the renpass database [2]_. This function is only valid for the original renpass region set. Parameters ---------- both_directions : bool If True any line will be replicated in the reverse direction. Returns ------- pd.DataFrame Transmission capacity and distance between regions References ---------- .. [1] Wiese, Frauke (2015). „Renewable Energy Pathways Simulation System – Open Source as an approach to meet challenges in energy modeling“. Diss. University of Flensburg. URL : https://www.reiner-lemoine-stiftung.de/pdf/dissertationen/Dissertation_Frauke_Wiese.pdf. (page 49) .. [2] Wiese, F.: Renpass - Renewable Energy Pathways Simulation System, https://github.com/fraukewiese/renpass Examples -------- >>> df=get_electrical_transmission_renpass() >>> int(df.loc['DE11-DE17', 'capacity']) 2506 >>> int(df.loc['DE18-DE17', 'distance']) 119 >>> df.loc['DE08-DE06'] capacity 7519.040402 distance 257.000000 Name: DE08-DE06, dtype: float64 >>> df=get_electrical_transmission_renpass(both_directions=True) >>> int(df.loc['DE11-DE17', 'capacity']) 2506 >>> int(df.loc['DE17-DE11', 'capacity']) 2506 """ f_security = cfg.get("transmission", "security_factor") current_max = cfg.get("transmission", "current_max") grid = pd.read_csv( os.path.join( cfg.get("paths", "data_deflex"), cfg.get("transmission", "transmission_renpass"), )) grid["capacity_calc"] = (grid.circuits * current_max * grid.voltage * f_security * math.sqrt(3) / 1000) pwr_lines = pd.DataFrame(geometries.deflex_power_lines()) for l in pwr_lines.index: split = l.split("-") a = int("110{0}".format(split[0][2:])) b = int("110{0}".format(split[1][2:])) # print(a, b) cap1, dist1 = get_grid_capacity(grid, a, b) cap2, dist2 = get_grid_capacity(grid, b, a) if cap1 == 0 and cap2 == 0: pwr_lines.loc[l, "capacity"] = 0 pwr_lines.loc[l, "distance"] = 0 elif cap1 == 0: pwr_lines.loc[l, "capacity"] = cap2 pwr_lines.loc[l, "distance"] = dist2 elif cap2 == 0: pwr_lines.loc[l, "capacity"] = cap1 pwr_lines.loc[l, "distance"] = dist1 # plot_grid(pwr_lines) df = pwr_lines[["capacity", "distance"]] if both_directions is True: df = add_reverse_direction(df) return df
def model_scenario( xls_file=None, csv_path=None, res_path=None, name="noname", rmap=None, year="unknown", es=None, plot_graph=False, extra_regions=None, ): """ Compute a deflex scenario. Parameters ---------- xls_file : str Full filename to a valid xls-file. csv_path : str Full path to a valid csv-collection. res_path : str Path to store the output file. If None the results will be stored along with the scenarios. name : str The name of the scenario. year : int The year or year-range of the scenario. rmap : str The name of the used region map. es : oemof.solph.EnergySystem A valid energy system if needed. plot_graph : bool Set to True to plot the energy system. extra_regions : list Use separate resource buses for these regions. Returns ------- Examples -------- >>> model_scenario('/my/path/to/scenario.xls', name='my_scenario', ... rmap='deXX', year=2025) # doctest: +SKIP """ stopwatch() if xls_file is not None and csv_path is not None: raise ValueError("It is not allowed to define more than one input.") meta = { "year": year, "model_base": "deflex", "map": rmap, "solver": cfg.get("general", "solver"), "start_time": datetime.now(), } sc = scenario_tools.DeflexScenario(name=name, year=2014, meta=meta) if es is not None: sc.es = es if csv_path is not None: if res_path is None: res_path = os.path.dirname(csv_path) logging.info("Read scenario from csv collection: {0}".format( stopwatch())) sc.load_csv(csv_path) elif xls_file is not None: if res_path is None: res_path = os.path.dirname(xls_file) logging.info("Read scenario from xls-file: {0}".format(stopwatch())) sc.load_excel(xls_file) if extra_regions is not None: sc.extra_regions = extra_regions logging.info("Add nodes to the EnergySystem: {0}".format(stopwatch())) sc.table2es() # Save energySystem to '.graphml' file if plot_graph is True if plot_graph: sc.plot_nodes( filename=os.path.join(res_path, name), remove_nodes_with_substrings=["bus_cs"], ) logging.info("Create the concrete model: {0}".format(stopwatch())) sc.create_model() logging.info("Solve the optimisation model: {0}".format(stopwatch())) sc.solve(solver=cfg.get("general", "solver")) logging.info("Solved. Dump results: {0}".format(stopwatch())) res_path = os.path.join(res_path, "results_{0}".format(cfg.get("general", "solver"))) os.makedirs(res_path, exist_ok=True) out_file = os.path.join(res_path, name + ".esys") logging.info("Dump file to {0}".format(out_file)) sc.meta["end_time"] = datetime.now() sc.dump_es(out_file) logging.info("All done. deflex finished without errors: {0}".format( stopwatch()))