def setup_class(cls): path = os.path.join(TEST_PATH, "de21_no-heat_csv") sc = st.DeflexScenario() sc.read_csv(path) cls.tables = sc.input_data tmp_tables = {} name = "heat_demand_deflex" fn = os.path.join(os.path.dirname(__file__), "data", name + ".csv") tmp_tables[name] = pd.read_csv(fn, index_col=[0], header=[0, 1]) name = "transformer_balance" fn = os.path.join(os.path.dirname(__file__), "data", name + ".csv") tmp_tables[name] = pd.read_csv(fn, index_col=[0, 1, 2], header=[0]) powerplants.scenario_powerplants = MagicMock( return_value={ "volatile plants": cls.tables["volatile plants"], "power plants": cls.tables["power plants"], }) feedin.scenario_feedin = MagicMock( return_value=cls.tables["volatile series"]) demand_table = { "electricity demand series": cls.tables["electricity demand series"] } demand.scenario_demand = MagicMock(return_value=demand_table) my_parameter = { "copperplate": True, "group_transformer": False, "heat": False, "use_variable_costs": True, "use_CO2_costs": True, "map": "de21", } my_name = "deflex" for k, v in my_parameter.items(): my_name += "_" + str(k) + "-" + str(v) polygons = deflex_regions(rmap=my_parameter["map"], rtype="polygons") lines = deflex_power_lines(my_parameter["map"]).index base = os.path.join(os.path.expanduser("~"), ".tmp_x345234dE_deflex") os.makedirs(base, exist_ok=True) path = os.path.join(base, "deflex_test{0}") name = "deflex_2014_de21_no-heat" scenario_creator.create_basic_reegis_scenario( name=name, regions=polygons, lines=lines, parameter=my_parameter, excel_path=path.format(".xlsx"), csv_path=path.format("_csv"), ) sc_new = st.DeflexScenario() sc_new.read_csv(path.format("_csv")) cls.input_data = sc_new.input_data
def test_02_create_deflex_powerplants(): de = geometries.deflex_regions("de21") fn_in = os.path.join(cfg.get("paths", "powerplants"), "reegis_pp_test.h5") fn_out = os.path.join(cfg.get("paths", "powerplants"), "deflex_pp_test.h5") powerplants.pp_reegis2deflex( de, "de21", filename_in=fn_in, filename_out=fn_out )
def scenario_feedin_test(): """Test scenario feed-in.""" cfg.tmp_set("init", "map", "de21") regions = geometries.deflex_regions(rmap="de21") f = basic_scenario.scenario_feedin(regions, 2014, "de21") eq_(int(f["DE01"].sum()["wind"]), 2159) eq_(int(f["DE01"].sum()["solar"]), 913) eq_(int(f["DE16"].sum()["wind"]), 1753)
def test_prevent_mutable_region_object(): """Make sure the region object is not mutated.""" reg = geometries.deflex_regions("de21") eq_(reg.geometry.iloc[0].geom_type, "MultiPolygon") eq_( geometries.divide_off_and_onshore(reg).offshore, ["DE19", "DE20", "DE21"], ) eq_(reg.geometry.iloc[0].geom_type, "MultiPolygon")
def test_03_not_existing_file(): old_value = cfg.get("paths", "powerplants") cfg.tmp_set("paths", "powerplants", "/home/pet/") de = geometries.deflex_regions("de22") powerplants.pp_reegis2deflex = MagicMock(return_value="/home/pet/pp.h5") with assert_raises_regexp( Exception, "File /home/pet/pp.h5 does not exist" ): powerplants.get_deflex_pp_by_year(de, 2012, "de22") cfg.tmp_set("paths", "powerplants", old_value)
def test_scenario_creation(): data = {} for name in ["volatile_series", "demand_series"]: fn = os.path.join( os.path.dirname(__file__), "data", "deflex_2014_de21_test_csv", name + ".csv", ) data[name] = pd.read_csv(fn, index_col=[0], header=[0, 1]) name = "heat_demand_deflex" fn = os.path.join(os.path.dirname(__file__), "data", name + ".csv") data[name] = pd.read_csv(fn, index_col=[0], header=[0, 1]) name = "transformer_balance" fn = os.path.join(os.path.dirname(__file__), "data", name + ".csv") data[name] = pd.read_csv(fn, index_col=[0, 1, 2], header=[0]) basic_scenario.scenario_feedin = MagicMock( return_value=data["volatile_series"]) basic_scenario.scenario_demand = MagicMock( return_value=data["demand_series"]) energy_balance.get_transformation_balance_by_region = MagicMock( return_value=data["transformer_balance"]) demand.get_heat_profiles_deflex = MagicMock( return_value=data["heat_demand_deflex"]) regions = geometries.deflex_regions(rmap="de21") table_collection = basic_scenario.create_scenario(regions, 2014, "de21") eq_( sorted(list(table_collection.keys())), sorted([ "storages", "transformer", "volatile_source", "transmission", "decentralised_heat", "commodity_source", "volatile_series", "demand_series", "mobility_energy_content", "mobility_mileage", "mobility_spec_demand", ]), ) eq_(len(list(table_collection.keys())), 11)
def more_heat_pumps(sc, heat_pump_fraction, cop): year = 2014 abs_decentr_heat = sc.table_collection["demand_series"]["DE_demand"].sum( axis=1 ) heat_pump = abs_decentr_heat * heat_pump_fraction sc.table_collection["demand_series"]["DE_demand"] *= 1 - heat_pump_fraction deflex_regions = geometries.deflex_regions(rmap=sc.map) name = "{0}_region".format(sc.map) inhab = inhabitants.get_inhabitants_by_region( year, deflex_regions, name=name ) inhab_fraction = inhab.div(inhab.sum()) for region in inhab_fraction.index: if inhab_fraction.loc[region] > 0: sc.table_collection["demand_series"][ (region, "electrical_load") ] += inhab_fraction.loc[region] * heat_pump.div(cop)
Dataframe containing aggregated yearly power demand (households, CTS and industry) for region selection ------- """ if elc_data is None: elc_data = get_demandregio_electricity_consumption_by_nuts3(year) agg_power = pd.DataFrame(index=regions.index, columns=['households', 'CTS', 'industry']) nuts3_list = get_nutslist_for_regions(regions) for zone in regions.index: idx = nuts3_list.loc[zone]['nuts'] agg_power.loc[zone]['households'] = elc_data['households'][idx].sum() agg_power.loc[zone]['CTS'] = elc_data['CTS'][idx].sum() agg_power.loc[zone]['industry'] = elc_data['industry'][idx].sum() return agg_power regions = geo_deflex.deflex_regions(rmap='de22') #test_el = get_demandregio_electricity_consumption_by_nuts3(2016) #test_heat = get_combined_heatload_for_region(2016) aggtest1 = aggregate_power_by_region(regions, 2015) aggtest2 = aggregate_power_by_region(regions, 2016) aggtest3 = aggregate_heat_by_region(regions, 2015) aggtest4 = aggregate_heat_by_region(regions, 2016) #power_DE22 = aggregate_power_by_region(regions, 2015) #heat_DE22 = aggregate_heat_by_region(regions, 2015)
def plot_power_lines( data, key, cmap_lines=None, cmap_bg=None, direction=True, vmax=None, label_min=None, label_max=None, unit="GWh", size=None, ax=None, legend=True, unit_to_label=False, divide=1, decimal=0, ): """ Parameters ---------- data key cmap_lines cmap_bg direction vmax label_min label_max unit size ax legend unit_to_label divide decimal Returns ------- """ if size is None and ax is None: ax = plt.figure(figsize=(5, 5)).add_subplot(1, 1, 1) elif size is not None and ax is None: ax = plt.figure(figsize=size).add_subplot(1, 1, 1) if unit_to_label is True: label_unit = unit else: label_unit = "" lines = reegis.geometries.load( cfg.get("paths", "geometry"), cfg.get("geometry", "de21_power_lines") ) polygons = d_geometries.deflex_regions(rmap="de21", rtype="polygons") lines = lines.merge(data.div(divide), left_index=True, right_index=True) lines["centroid"] = lines.centroid if cmap_bg is None: cmap_bg = LinearSegmentedColormap.from_list( "mycmap", [(0, "#aed8b4"), (1, "#bddce5")] ) if cmap_lines is None: cmap_lines = LinearSegmentedColormap.from_list( "mycmap", [(0, "#aaaaaa"), (0.0001, "green"), (0.5, "yellow"), (1, "red")], ) offshore = d_geometries.divide_off_and_onshore(polygons).offshore polygons["color"] = 0 polygons.loc[offshore, "color"] = 1 lines["reverse"] = lines[key] < 0 # if direction is False: lines.loc[lines["reverse"], key] = lines.loc[lines["reverse"], key] * -1 if vmax is None: vmax = lines[key].max() if label_min is None: label_min = vmax * 0.5 if label_max is None: label_max = float("inf") ax = polygons.plot( edgecolor="#9aa1a9", cmap=cmap_bg, column="color", ax=ax, aspect="equal", ) ax = lines.plot( cmap=cmap_lines, legend=legend, ax=ax, column=key, vmin=0, vmax=vmax, aspect="equal", ) for i, v in lines.iterrows(): x1 = v["geometry"].coords[0][0] y1 = v["geometry"].coords[0][1] x2 = v["geometry"].coords[1][0] y2 = v["geometry"].coords[1][1] value_relative = v[key] / vmax mc = cmap_lines(value_relative) orient = math.atan(abs(x1 - x2) / abs(y1 - y2)) if (y1 > y2) & (x1 > x2) or (y1 < y2) & (x1 < x2): orient *= -1 if v["reverse"]: orient += math.pi if v[key] == 0 or not direction: polygon = patches.RegularPolygon( (v["centroid"].x, v["centroid"].y), 4, 0.15, orientation=orient, color=(0, 0, 0, 0), zorder=10, ) else: polygon = patches.RegularPolygon( (v["centroid"].x, v["centroid"].y), 3, 0.15, orientation=orient, color=mc, zorder=10, ) ax.add_patch(polygon) if decimal == 0: value = int(round(v[key])) else: value = round(v[key], decimal) if label_min <= value <= label_max: if v["reverse"] is True and direction is False: value *= -1 ax.text( v["centroid"].x, v["centroid"].y, "{0} {1}".format(value, label_unit), color="#000000", fontsize=9.5, zorder=15, path_effects=[ path_effects.withStroke(linewidth=3, foreground="w") ], ) for spine in plt.gca().spines.values(): spine.set_visible(False) ax.axis("off") polygons.apply( lambda x: ax.annotate( x.name, xy=x.geometry.centroid.coords[0], ha="center" ), axis=1, ) return ax
def setup_class(cls): path = os.path.join(TEST_PATH, "de22_heat_transmission_csv") sc = st.DeflexScenario() sc.read_csv(path) cls.tables = sc.input_data tmp_tables = {} parameter = { "costs_source": "ewi", "downtime_bioenergy": 0.1, "limited_transformer": "bioenergy", "local_fuels": "district heating", "map": "de22", "mobility_other": "petrol", "round": 1, "separate_heat_regions": "de22", "copperplate": False, "default_transmission_efficiency": 0.9, "group_transformer": False, "heat": True, "use_CO2_costs": True, "use_downtime_factor": True, "use_variable_costs": False, "year": 2014, } config.init(paths=[os.path.dirname(dfile)]) for option, value in parameter.items(): cfg.tmp_set("creator", option, str(value)) config.tmp_set("creator", option, str(value)) name = "heat_demand_deflex" fn = os.path.join(os.path.dirname(__file__), "data", name + ".csv") tmp_tables[name] = pd.read_csv(fn, index_col=[0], header=[0, 1]) name = "transformer_balance" fn = os.path.join(os.path.dirname(__file__), "data", name + ".csv") tmp_tables[name] = pd.read_csv(fn, index_col=[0, 1, 2], header=[0]) powerplants.scenario_powerplants = MagicMock( return_value={ "volatile plants": cls.tables["volatile plants"], "power plants": cls.tables["power plants"], }) powerplants.scenario_chp = MagicMock( return_value={ "heat-chp plants": cls.tables["heat-chp plants"], "power plants": cls.tables["power plants"], }) feedin.scenario_feedin = MagicMock( return_value=cls.tables["volatile series"]) demand_table = { "electricity demand series": cls.tables["electricity demand series"], "heat demand series": cls.tables["heat demand series"], } demand.scenario_demand = MagicMock(return_value=demand_table) name = "deflex_2014_de22_heat_transmission" polygons = deflex_regions(rmap=parameter["map"], rtype="polygons") lines = deflex_power_lines(parameter["map"]).index cls.input_data = scenario_creator.create_scenario( polygons, 2014, name, lines)
# Define installable capacity per square meter in MW p_per_qm_wind = 8 / 1e6 # 8 W/m² Fläche p_per_qm_pv = 200 / 1e6 # 200 W/m² Fläche -> eta=20% # Calculate maximum installable capacity for onshore wind and rooftop-PV P_max_wind = suitable_area['wind_area'] * p_per_qm_wind P_max_pv = suitable_area['pv_area'] * p_per_qm_pv # Load NUTS3-mixed-COPS nuts3_cops = pd.read_csv('/home/dbeier/Daten/COP_NUTS3.csv') nuts3_cops.drop('Unnamed: 0', axis='columns', inplace=True) nuts3_cops.set_index(pd.date_range('1/1/2014', periods=8760, freq='H'), inplace=True) # Get indices for zones of interest de22_list = geo_deflex.deflex_regions(rmap='de22', rtype='polygons').index de17_list = geo_reegis.get_federal_states_polygon().index # Aggregate values for de17 and de22 regions to prepare for # Create empty Dataframes dflx_input = pd.DataFrame( index=de22_list, columns=['power', 'lt-heat', 'ht-heat', 'P_wind', 'P_pv']) dflx_input_fedstates = pd.DataFrame( index=de17_list, columns=['power', 'lt-heat', 'ht-heat', 'P_wind', 'P_pv']) dflx_cop_de17_heat = pd.DataFrame(index=pd.date_range('1/1/2014', periods=8760, freq='H'), columns=de17_list) dflx_cop_de22_heat = pd.DataFrame(index=pd.date_range('1/1/2014', periods=8760,
def create_basic_scenario( year, rmap=None, path=None, csv_dir=None, xls_name=None, round_values=None, only_out=None, ): """ Create a basic scenario for a given year and region-set and store it into an excel-file or csv-collection. Parameters ---------- year : int rmap : str path : str csv_dir : str xls_name : str round_values : bool only_out : str Returns ------- namedtuple : Path Examples -------- >>> year=2014 # doctest: +SKIP >>> my_rmap='de21' # doctest: +SKIP >>> p=create_basic_scenario(year, rmap=my_rmap) # doctest: +SKIP >>> print("Xls path: {0}".format(p.xls)) # doctest: +SKIP >>> print("Csv path: {0}".format(p.csv)) # doctest: +SKIP """ paths = namedtuple("paths", "xls, csv") if rmap is not None: cfg.tmp_set("init", "map", rmap) name = cfg.get("init", "map") regions = geometries.deflex_regions(rmap=cfg.get("init", "map")) table_collection = create_scenario(regions, year, name, round_values) table_collection = clean_time_series(table_collection) name = "{0}_{1}_{2}".format("deflex", year, cfg.get("init", "map")) sce = scenario_tools.Scenario(table_collection=table_collection, name=name, year=year) if path is None: path = os.path.join(cfg.get("paths", "scenario"), "deflex", str(year)) if only_out == "xls": csv_path = None elif csv_dir is None: csv_path = os.path.join(path, "{0}_csv".format(name)) else: csv_path = os.path.join(path, csv_dir) if only_out == "csv": xls_path = None elif xls_name is None: xls_path = os.path.join(path, name + ".xls") else: xls_path = os.path.join(path, xls_name) fullpath = paths(xls=xls_path, csv=csv_path) if not only_out == "csv": os.makedirs(path, exist_ok=True) sce.to_excel(fullpath.xls) if not only_out == "xls": os.makedirs(csv_path, exist_ok=True) sce.to_csv(fullpath.csv) return fullpath
P_max = pd.read_csv(fn) P_max.set_index('nuts3', drop=True, inplace=True) agg_capacity = pd.DataFrame(index=regions.index, columns=["P_wind", "P_pv"]) nuts3_list = integrate_demandregio.get_nutslist_for_regions(regions) for zone in regions.index: idx = nuts3_list.loc[zone]['nuts'] agg_capacity.loc[zone]['P_wind'] = P_max['P_wind'][idx].sum() agg_capacity.loc[zone]['P_pv'] = P_max['P_pv'][idx].sum() return agg_capacity regions = geo_deflex.deflex_regions(rmap='de17', rtype='polygons') P_wind_pv = aggregate_capacity_by_region(regions) #suitable_area = get_pv_wind_areas_by_nuts3() #path = os.path.join(cfg.get("paths", "GLAES"), 'nuts3_geojson') #test = calc_wind_pv_areas(path) #de_area, ecWind = calculate_wind_area(DE) #(de_area/1e6) / 357000 #wind_pv_area = get_pv_wind_areas_by_nuts3('/home/dbeier/git-projects/db_test_repo/nuts3_geojson/', create_geojson=True) # Define path where nuts3 regions are stored oder should be stored #path = '/home/dbeier/git-projects/test_repo/nuts3_geojson/' # Only necessary if nuts3 vektor files are not created yet
def setUpClass(cls): cls.regions = geometries.deflex_regions(rmap="de21") cls.pp = basic_scenario.scenario_powerplants(dict(), cls.regions, 2014, "de21", 1)
### Within this script releant energy system data for a mid-term scenario is fetched and processed from disaggregator import data from scenario_builder import cop_precalc, snippets, heatload_scenario_calculator from deflex import geometries as geo_deflex from reegis import land_availability_glaes, demand_disaggregator, entsoe, demand_heat from scenario_builder import emobpy_processing import pandas as pd import os # Set parameters and get data needed for all scenarios nuts3_index = data.database_shapes().index de21 = geo_deflex.deflex_regions(rmap='de21') year = 2015 # Excel findet sich auch hier: SeaDrive/Für meine Gruppen/QUARREE 100/02_Modellierung/09_Szenarien Q100 path_to_data = '/home/dbeier/reegis/data/scenario_data/commodity_sources_costs.xls' # Get ENTSO-E load profile from reegis profile = entsoe.get_entsoe_load(2014).reset_index(drop=True)["DE_load_"] norm_profile = profile.div(profile.sum()) heat_profiles_reegis = demand_heat.get_heat_profiles_by_region(de21, 2014, name='test') profile_lt = snippets.return_normalized_domestic_profiles( de21, heat_profiles_reegis) profile_ht = snippets.return_normalized_industrial_profiles( de21, heat_profiles_reegis) # Fetch costs and emission applicable for scenario (sheet1) costs = snippets.get_cost_emission_scenario_data(path_to_data)
def scenario_demand_test(): """Test scenario demand.""" regions = geometries.deflex_regions(rmap="de21") d = basic_scenario.scenario_demand(regions, 2014, "de21") eq_(int(d["DE01", "district heating"].sum()), 18639262) eq_(int(d["DE05", "electrical_load"].sum()), 10069304)