def display_transmission(net: pypsa.Network): """Display information about transmission""" print('\n\n\n# --- TRANSMISSION --- #') if len(net.links.index) != 0: links_capacities = get_links_capacity(net) print(f"Links capacity:\n{links_capacities}\n") df_power = get_links_power(net) df_cf = get_links_usage(net) df_capex = get_links_capex(net) df_links = pd.concat([df_power.rename('Flows [TWh]'), df_cf.rename('CF [%]'), df_capex.rename('capex [M$]')], axis=1) df_links.loc['AC', 'ccost [M€/GW/km]'] = get_costs('AC', sum(net.snapshot_weightings['objective']))[0] df_links.loc['DC', 'ccost [M€/GW/km]'] = get_costs('DC', sum(net.snapshot_weightings['objective']))[0] print(f"Links flows & costs:\n{df_links}\n")
def add_generators(network: pypsa.Network, tech: str) -> pypsa.Network: """ Add conventional generators to a Network instance. Parameters ---------- network: pypsa.Network A PyPSA Network instance with nodes associated to regions. tech: str Type of conventional generator (ccgt or ocgt) Returns ------- network: pypsa.Network Updated network """ logger.info(f"Adding {tech} generation.") assert hasattr(network.buses, "onshore_region"), "Some buses must be associated to an onshore region to add" \ "conventional generators." # Filter to keep only onshore buses # buses = network.buses[network.buses.onshore] buses = network.buses.dropna(subset=["onshore_region"], axis=0) capital_cost, marginal_cost = get_costs( tech, sum(network.snapshot_weightings['objective'])) # Get fuel type and efficiency fuel, efficiency = get_tech_info(tech, ["fuel", "efficiency_ds"]) network.madd("Generator", buses.index, suffix=f" Gen {tech}", bus=buses.index, p_nom_extendable=True, type=tech, carrier=fuel, efficiency=efficiency, marginal_cost=marginal_cost, capital_cost=capital_cost, x=buses.x.values, y=buses.y.values) return network
def get_topology(network: pypsa.Network, countries: List[str] = None, p_nom_extendable: bool = True, extension_multiplier: float = None, extension_base: str = 'GCA', use_ex_line_cap: bool = True, p_max_pu: float = 1.0, plot: bool = False) -> pypsa.Network: """ Load the e-highway network topology (buses and links) using PyPSA. Parameters ---------- network: pypsa.Network Network instance countries: List[str] (default: None) List of ISO codes of countries for which we want the tyndp topology. p_nom_extendable: bool (default: True) Whether line capacity is allowed to be expanded extension_multiplier: float (default: None) By how much the capacity can be extended if extendable. If None, no limit on expansion. extension_base: str (default: GCA) TYNDP 2040 scenario to use as the basis for computing the max potential of links, can be one of: - ST (Sustainable Transition): targets reached by national regulation, emission trading schemes and subsidies, maximising the use of existing infrastructure ~ 180 GW and 94 TWkm - DG (Distributed Generation): prosumers at the centre - small-scale generation, batteries and fuel-switching society engaged and empowered ~ 190 GW and 99 TWkm - GCA (Global Climate Action): full-speed global decarbonisation, large-scale renewables ~ 200 GW and 103 TWkm The three scenarios are quite similar in terms of NTCs with GCA being the most generous. use_ex_line_cap: bool (default True) Whether to use existing line capacity p_max_pu: float (default: 1.0) Maximal dispatch per unit of p_nom plot: bool (default: False) Whether to show loaded topology or not Returns ------- network: pypsa.Network Updated network """ assert countries is None or len(countries) != 0, "Error: Countries list must not be empty. If you want to " \ "obtain, the full topology, don't pass anything as argument." assert extension_base in ['ST', 'DG', 'GCA'], f"Error: extension_base must be one of ST, DG or GCA, " \ f"received {extension_base}." topology_dir = f"{data_path}topologies/tyndp2018/generated/" buses_fn = f"{topology_dir}buses.csv" assert isfile( buses_fn), f"Error: Buses are undefined. Please run 'preprocess'." buses = pd.read_csv(buses_fn, index_col='id') links_fn = f"{topology_dir}links.csv" assert isfile( links_fn), f"Error: Links are undefined. Please run 'preprocess'." links = pd.read_csv(links_fn, index_col='id') if countries is not None: # Check if there is a bus for each country considered missing_countries = set(countries) - set(buses.index) assert not missing_countries, f"Error: No buses exist for the following countries: {missing_countries}" # Remove buses that are not associated with the considered countries buses = buses.loc[buses.index.isin(countries)] countries = buses.index # Converting polygons strings to Polygon object for region_type in ["onshore_region", "offshore_region"]: regions = buses[region_type].values # Convert strings for i, region in enumerate(regions): if isinstance(region, str): regions[i] = shapely.wkt.loads(region) # If we have only one bus, add it to the network and return if len(buses) == 1: network.import_components_from_dataframe(buses, "Bus") return network # Remove links for which one of the two end buses has been removed links = pd.DataFrame(links.loc[links.bus0.isin(buses.index) & links.bus1.isin(buses.index)]) # Removing offshore buses that are not connected anymore connected_buses = sorted(list( set(links["bus0"]).union(set(links["bus1"])))) buses = buses.loc[connected_buses] disconnected_onshore_bus = set(countries) - set(buses.index) assert not disconnected_onshore_bus, f"Error: Buses {disconnected_onshore_bus} were disconnected." if not use_ex_line_cap: links['p_nom'] = 0 links['p_nom_min'] = links['p_nom'] links['p_max_pu'] = p_max_pu links['p_min_pu'] = -p_max_pu # Making the link bi-directional links['p_nom_extendable'] = p_nom_extendable if p_nom_extendable: # Choose p_nom_max based on some TYNDP 2040 scenario if extension_base == 'ST': links['p_nom_max'] = links['p_nom_st'] elif extension_base == 'DG': links['p_nom_max'] = links['p_nom_dg'] else: links['p_nom_max'] = links['p_nom_gca'] links = links.drop(['p_nom_st', 'p_nom_dg', 'p_nom_gca'], axis=1) if extension_multiplier is not None: links['p_nom_max'] = (links['p_nom_max'] * extension_multiplier).round(3) links['p_nom_max'] = links[['p_nom_max', 'p_nom_min']].max(axis=1) else: links['p_nom_max'] = "inf" links['capital_cost'] = pd.Series(index=links.index) for idx in links.index: carrier = links.loc[idx].carrier cap_cost, _ = get_costs(carrier, sum(network.snapshot_weightings['objective'])) links.loc[idx, ('capital_cost', )] = cap_cost * links.length.loc[idx] network.import_components_from_dataframe(buses, "Bus") network.import_components_from_dataframe(links, "Link") if plot: from epippy.topologies.core.plot import plot_topology plot_topology(buses, links) plt.show() return network
def get_topology(network: pypsa.Network, countries: List[str] = None, add_offshore: bool = True, extend_line_cap: bool = True, use_ex_line_cap: bool = True, plot: bool = False) -> pypsa.Network: """ Load the e-highway network topology (buses and links) using PyPSA. Parameters ---------- network: pypsa.Network Network instance countries: List[str] (default: None) List of ISO codes of countries for which we want the e-highway topology add_offshore: bool (default: True) Whether to include offshore nodes extend_line_cap: bool (default True) Whether line capacity is allowed to be expanded use_ex_line_cap: bool (default True) Whether to use existing line capacity plot: bool (default: False) Whether to show loaded topology or not Returns ------- network: pypsa.Network Updated network """ assert countries is None or len(countries) != 0, "Error: Countries list must not be empty. If you want to " \ "obtain, the full topology, don't pass anything as argument." topology_dir = f"{data_path}topologies/e-highways/generated/" buses_fn = f"{topology_dir}buses.csv" assert isfile( buses_fn), f"Error: Buses are undefined. Please run 'preprocess'." buses = pd.read_csv(buses_fn, index_col='id') lines_fn = f"{topology_dir}lines.csv" assert isfile( lines_fn), f"Error: Lines are undefined. Please run 'preprocess'." lines = pd.read_csv(lines_fn, index_col='id') # Remove offshore buses if not considered if not add_offshore: buses = buses.dropna(subset=["onshore_region"]) if countries is not None: # In e-highway, GB is referenced as UK iso_to_ehighway = {"GB": "UK"} ehighway_countries = [ iso_to_ehighway[c] if c in iso_to_ehighway else c for c in countries ] # Remove onshore buses that are not in the considered region, # keep also buses that are offshore (i.e. with a country name that is not a string) def filter_buses(bus): return (not isinstance( bus.country, str)) or (bus.name[2:] in ehighway_countries) buses = buses.loc[buses.apply(filter_buses, axis=1)] else: countries = replace_iso2_codes( list( set([ idx[2:] for idx in buses.dropna(subset=["onshore_region"]).index ]))) # Converting polygons strings to Polygon object for region_type in ["onshore_region", "offshore_region"]: regions = buses[region_type].values # Convert strings for i, region in enumerate(regions): if isinstance(region, str): regions[i] = shapely.wkt.loads(region) # Remove lines for which one of the two end buses has been removed lines = pd.DataFrame(lines.loc[lines.bus0.isin(buses.index) & lines.bus1.isin(buses.index)]) # Removing offshore buses that are not connected anymore connected_buses = sorted(list( set(lines["bus0"]).union(set(lines["bus1"])))) buses = buses.loc[connected_buses] assert len( buses ) != 0, "Error: No buses are located in the given list of countries." # Add offshore polygons to remaining offshore buses if add_offshore: offshore_shapes = get_shapes(countries, which='offshore', save=True)["geometry"] if len(offshore_shapes) != 0: offshore_zones_shape = unary_union(offshore_shapes.values) offshore_bus_indexes = buses[ buses["onshore_region"].isnull()].index offshore_buses = buses.loc[offshore_bus_indexes] # Use a home-made 'voronoi' partition to assign a region to each offshore bus buses.loc[offshore_bus_indexes, "offshore_region"] = voronoi_special( offshore_zones_shape, offshore_buses[["x", "y"]]) # Setting line parameters """ For DC-opf lines['s_nom'] *= 1000.0 # PyPSA uses MW lines['s_nom_min'] = lines['s_nom'] # Define reactance # TODO: do sth more clever lines['x'] = pd.Series(0.00001, index=lines.index) lines['s_nom_extendable'] = pd.Series(True, index=lines.index) # TODO: parametrize lines['capital_cost'] = pd.Series(index=lines.index) for idx in lines.index: carrier = lines.loc[idx].carrier cap_cost, _ = get_costs(carrier, sum(network.snapshot_weightings['objective'])) lines.loc[idx, ('capital_cost', )] = cap_cost * lines.length.loc[idx] """ lines['p_nom'] = lines["s_nom"] if not use_ex_line_cap: lines['p_nom'] = 0 lines['p_nom_min'] = lines['p_nom'] lines['p_min_pu'] = -1. # Making the link bi-directional lines = lines.drop('s_nom', axis=1) lines['p_nom_extendable'] = extend_line_cap lines['capital_cost'] = pd.Series(index=lines.index) for idx in lines.index: carrier = lines.loc[idx].carrier cap_cost, _ = get_costs(carrier, sum(network.snapshot_weightings['objective'])) lines.loc[idx, ('capital_cost', )] = cap_cost * lines.length.loc[idx] network.import_components_from_dataframe(buses, "Bus") network.import_components_from_dataframe(lines, "Link") # network.import_components_from_dataframe(lines, "Line") for dc-opf if plot: from epippy.topologies.core.plot import plot_topology plot_topology(buses, lines) plt.show() return network
def add_generators(net: pypsa.Network, countries: List[str], use_ex_cap: bool = True, extendable: bool = False) -> pypsa.Network: """ Add nuclear generators to a PyPsa Network instance. Parameters ---------- net: pypsa.Network A Network instance with nodes associated to parameters: 'onshore' and 'region'. countries: List[str] Codes of countries over which the network is built use_ex_cap: bool (default: True) Whether to consider existing capacity or not extendable: bool (default: False) Whether generators are extendable Returns ------- network: pypsa.Network Updated network """ for attr in ["onshore_region"]: assert hasattr( net.buses, attr), f"Error: Buses must contain a '{attr}' attribute." # Nuclear plants can only be added onshore # onshore_buses = net.buses[net.buses.onshore] onshore_buses = net.buses.dropna(subset=["onshore_region"], axis=0) if len(onshore_buses) == 0: warn( "Warning: Trying to add nuclear to network without onshore buses.") return net gens = get_powerplants('nuclear', countries) buses_countries = list(onshore_buses.country) if hasattr( onshore_buses, 'country') else None gens["bus_id"] = match_powerplants_to_regions( gens, onshore_buses.onshore_region, shapes_countries=buses_countries, dist_threshold=50.0) # If no plants in the chosen countries, return directly the network if len(gens) == 0: return net logger.info( f"Adding {gens['Capacity'].sum() * 1e-3:.2f} GW of nuclear capacity " f"in {sorted(gens['ISO2'].unique())}.") if not use_ex_cap: gens.Capacity = 0. gens.Capacity /= 1000. # Convert MW to GW capital_cost, marginal_cost = get_costs( 'nuclear', sum(net.snapshot_weightings['objective'])) # Get fuel type, efficiency and ramp rates fuel, efficiency, ramp_rate, base_level = \ get_tech_info('nuclear', ["fuel", "efficiency_ds", "ramp_rate", "base_level"]) net.madd("Generator", "Gen nuclear " + gens.Name + " " + gens.bus_id, bus=gens.bus_id.values, p_nom=gens.Capacity.values, p_nom_min=gens.Capacity.values, p_nom_extendable=extendable, type='nuclear', carrier=fuel, efficiency=efficiency, marginal_cost=marginal_cost, capital_cost=capital_cost, ramp_limit_up=ramp_rate, ramp_limit_down=ramp_rate, p_min_pu=base_level, x=gens.lon.values, y=gens.lat.values) return net
def add_phs_plants(net: pypsa.Network, topology_type: str = "countries", extendable: bool = False, cyclic_sof: bool = True) -> pypsa.Network: """ Add pumped-hydro storage units to a PyPSA Network instance. Parameters ---------- net: pypsa.Network A Network instance. topology_type: str Can currently be countries (for one node per country topologies) or ehighway (for topologies based on ehighway) extendable: bool (default: False) Whether generators are extendable cyclic_sof: bool (default: True) Whether to set to True the cyclic_state_of_charge for the storage_unit component Returns ------- net: pypsa.Network Updated network """ check_assertions(net, topology_type) # Hydro generators can only be added onshore buses_onshore = net.buses.dropna(subset=["onshore_region"], axis=0) # Load capacities aggr_level = "countries" if topology_type == "countries" else "NUTS3" pow_cap, en_cap = get_phs_capacities(aggr_level) if topology_type == 'countries': # Extract only countries for which data is available countries_with_capacity = sorted( list(set(buses_onshore.country) & set(pow_cap.index))) buses_with_capacity_indexes = net.buses[net.buses.country.isin( countries_with_capacity)].index bus_pow_cap = pow_cap.loc[countries_with_capacity] bus_pow_cap.index = buses_with_capacity_indexes bus_en_cap = en_cap.loc[countries_with_capacity] bus_en_cap.index = buses_with_capacity_indexes else: # topology_type == 'ehighway bus_pow_cap, bus_en_cap = phs_inputs_nuts_to_ehighway( buses_onshore.index, pow_cap, en_cap) countries_with_capacity = set(bus_pow_cap.index.str[2:]) logger.info( f"Adding {bus_pow_cap.sum():.3f} GW of PHS hydro " f"with {bus_en_cap.sum():.3f} GWh of storage in {countries_with_capacity}." ) max_hours = bus_en_cap / bus_pow_cap # Get cost and efficiencies capital_cost, marginal_cost = get_costs( 'phs', sum(net.snapshot_weightings['objective'])) efficiency_dispatch, efficiency_store, self_discharge = \ get_tech_info('phs', ["efficiency_ds", "efficiency_ch", "efficiency_sd"]) self_discharge = round(1 - self_discharge, 4) net.madd("StorageUnit", bus_pow_cap.index, suffix=" Storage PHS", bus=bus_pow_cap.index, type='phs', p_nom=bus_pow_cap, p_nom_min=bus_pow_cap, p_nom_extendable=extendable, max_hours=max_hours.values, capital_cost=capital_cost, marginal_cost=marginal_cost, efficiency_store=efficiency_store, efficiency_dispatch=efficiency_dispatch, self_discharge=self_discharge, cyclic_state_of_charge=cyclic_sof, x=buses_onshore.loc[bus_pow_cap.index].x, y=buses_onshore.loc[bus_pow_cap.index].y) return net
def add_sto_plants(net: pypsa.Network, topology_type: str = "countries", extendable: bool = False, cyclic_sof: bool = True) -> pypsa.Network: """ Add run-of-river generators to a Network instance Parameters ---------- net: pypsa.Network A Network instance. topology_type: str Can currently be countries (for one node per country topologies) or ehighway (for topologies based on ehighway) extendable: bool (default: False) Whether generators are extendable cyclic_sof: bool (default: True) Whether to set to True the cyclic_state_of_charge for the storage_unit component Returns ------- net: pypsa.Network Updated network """ check_assertions(net, topology_type) # Hydro generators can only be added onshore buses_onshore = net.buses.dropna(subset=["onshore_region"], axis=0) # Load capacities and inflows aggr_level = "countries" if topology_type == "countries" else "NUTS3" pow_cap, en_cap = get_sto_capacities(aggr_level) inflows = get_sto_inflows(aggr_level, net.snapshots) if topology_type == 'countries': # Extract only countries for which data is available countries_with_capacity = sorted( list(set(buses_onshore.country) & set(pow_cap.index))) buses_with_capacity_indexes = net.buses[net.buses.country.isin( countries_with_capacity)].index bus_pow_cap = pow_cap.loc[countries_with_capacity] bus_pow_cap.index = buses_with_capacity_indexes bus_en_cap = en_cap.loc[countries_with_capacity] bus_en_cap.index = buses_with_capacity_indexes bus_inflows = inflows[countries_with_capacity] bus_inflows.columns = buses_with_capacity_indexes else: # topology_type == 'ehighway' bus_pow_cap, bus_en_cap, bus_inflows = \ sto_inputs_nuts_to_ehighway(buses_onshore.index, pow_cap, en_cap, inflows) countries_with_capacity = set(bus_pow_cap.index.str[2:]) logger.info( f"Adding {bus_pow_cap.sum():.2f} GW of STO hydro " f"with {bus_en_cap.sum() * 1e-3:.2f} TWh of storage in {countries_with_capacity}." ) bus_inflows = bus_inflows.round(3) max_hours = bus_en_cap / bus_pow_cap capital_cost, marginal_cost = get_costs( 'sto', sum(net.snapshot_weightings['objective'])) # Get efficiencies efficiency_dispatch = get_tech_info('sto', ['efficiency_ds'])["efficiency_ds"] net.madd("StorageUnit", bus_pow_cap.index, suffix=" Storage reservoir", bus=bus_pow_cap.index, type='sto', p_nom=bus_pow_cap, p_nom_min=bus_pow_cap, p_min_pu=0., p_nom_extendable=extendable, capital_cost=capital_cost, marginal_cost=marginal_cost, efficiency_store=0., efficiency_dispatch=efficiency_dispatch, cyclic_state_of_charge=cyclic_sof, max_hours=max_hours, inflow=bus_inflows, x=buses_onshore.loc[bus_pow_cap.index.values].x, y=buses_onshore.loc[bus_pow_cap.index.values].y) return net
def add_ror_plants(net: pypsa.Network, topology_type: str = "countries", extendable: bool = False) -> pypsa.Network: """ Add run-of-river generators to a Network instance. Parameters ---------- net: pypsa.Network A Network instance. topology_type: str Can currently be countries (for one node per country topologies) or ehighway (for topologies based on ehighway) extendable: bool (default: False) Whether generators are extendable Returns ------- net: pypsa.Network Updated network """ check_assertions(net, topology_type) # Hydro generators can only be added onshore buses_onshore = net.buses.dropna(subset=["onshore_region"], axis=0) # Load capacities and inflows aggr_level = "countries" if topology_type == "countries" else "NUTS3" pow_cap = get_ror_capacities(aggr_level) inflows = get_ror_inflows(aggr_level, net.snapshots) if topology_type == 'countries': # Extract only countries for which data is available countries_with_capacity = sorted( list(set(buses_onshore.country) & set(pow_cap.index))) buses_with_capacity_indexes = net.buses[net.buses.country.isin( countries_with_capacity)].index bus_pow_cap = pow_cap.loc[countries_with_capacity] bus_pow_cap.index = buses_with_capacity_indexes bus_inflows = inflows[countries_with_capacity] bus_inflows.columns = buses_with_capacity_indexes else: # topology_type == 'ehighway' bus_pow_cap, bus_inflows = \ ror_inputs_nuts_to_ehighway(buses_onshore.index, pow_cap, inflows) countries_with_capacity = set(bus_pow_cap.index.str[2:]) logger.info( f"Adding {bus_pow_cap.sum():.2f} GW of ROR hydro in {countries_with_capacity}." ) bus_inflows = bus_inflows.dropna().round(3) # Get cost and efficiencies capital_cost, marginal_cost = get_costs( 'ror', sum(net.snapshot_weightings['objective'])) efficiency = get_tech_info('ror', ["efficiency_ds"])["efficiency_ds"] net.madd("Generator", bus_pow_cap.index, suffix=" Generator ror", bus=bus_pow_cap.index, type='ror', p_nom=bus_pow_cap, p_nom_min=bus_pow_cap, p_nom_extendable=extendable, capital_cost=capital_cost, marginal_cost=marginal_cost, efficiency=efficiency, p_min_pu=0., p_max_pu=bus_inflows, x=buses_onshore.loc[bus_pow_cap.index].x, y=buses_onshore.loc[bus_pow_cap.index].y) return net
def add_batteries(network: pypsa.Network, battery_type: str, buses_ids: List[str] = None, fixed_duration: bool = False) -> pypsa.Network: """ Add a battery at each node of the network. Parameters ---------- network: pypsa.Network PyPSA network battery_type: str Type of battery to add buses_ids: List[str] IDs of the buses at which we want to add batteries. fixed_duration: bool Whether the battery storage is modelled with fixed duration. Returns ------- network: pypsa.Network Updated network """ logger.info(f"Adding {battery_type} storage.") buses = network.buses if buses_ids is not None: buses = buses.loc[buses_ids] # buses = network.buses[network.buses.onshore] # onshore_bus_indexes = pd.Index([bus_id for bus_id in buses.index if buses.loc[bus_id].onshore]) onshore_buses = buses.dropna(subset=["onshore_region"], axis=0) # Add batteries with fixed energy-power ratio if fixed_duration: capital_cost, marginal_cost = get_costs( battery_type, sum(network.snapshot_weightings['objective'])) efficiency_dispatch, efficiency_store, self_discharge = \ get_tech_info(battery_type, ["efficiency_ds", "efficiency_ch", "efficiency_sd"]) self_discharge = round(1 - self_discharge, 4) # Get max number of hours of storage max_hours = get_config_values(battery_type, ["max_hours"]) network.madd("StorageUnit", onshore_buses.index, suffix=f" StorageUnit {battery_type}", type=battery_type, bus=onshore_buses.index, p_nom_extendable=True, max_hours=max_hours, capital_cost=capital_cost, marginal_cost=marginal_cost, efficiency_dispatch=efficiency_dispatch, efficiency_store=efficiency_store, standing_loss=self_discharge) # Add batteries where energy and power are sized independently else: battery_type_power = battery_type + '_p' battery_type_energy = battery_type + '_e' capital_cost, marginal_cost = get_costs( battery_type_power, sum(network.snapshot_weightings['objective'])) capital_cost_e, marginal_cost_e = get_costs( battery_type_energy, sum(network.snapshot_weightings['objective'])) efficiency_dispatch, efficiency_store = get_tech_info( battery_type_power, ["efficiency_ds", "efficiency_ch"]) self_discharge = get_tech_info(battery_type_energy, ["efficiency_sd"]).astype(float) self_discharge = round(1 - self_discharge.values[0], 4) ctd_ratio = get_config_values(battery_type_power, ["ctd_ratio"]) network.madd("StorageUnit", onshore_buses.index, suffix=f" StorageUnit {battery_type}", type=battery_type, bus=onshore_buses.index, p_nom_extendable=True, capital_cost=capital_cost, marginal_cost=marginal_cost, capital_cost_e=capital_cost_e, marginal_cost_e=marginal_cost_e, efficiency_dispatch=efficiency_dispatch, efficiency_store=efficiency_store, standing_loss=self_discharge, ctd_ratio=ctd_ratio) storages = network.storage_units.index[network.storage_units.type == battery_type] for storage_to_replace in storages: replace_su_closed_loop(network, storage_to_replace) return network
def add_generators_using_siting(net: pypsa.Network, technologies: List[str], region: str, siting_params: Dict[str, Any], use_ex_cap: bool = True, limit_max_cap: bool = True, output_dir: str = None) -> pypsa.Network: """ Add generators for different technologies at a series of location selected via an optimization mechanism. Parameters ---------- net: pypsa.Network A network with defined buses. technologies: List[str] Which technologies to add using this methodology siting_params: Dict[str, Any] Set of parameters necessary for siting. region: str Region over which the network is defined use_ex_cap: bool (default: True) Whether to take into account existing capacity. limit_max_cap: bool (default: True) Whether to limit capacity expansion at each grid cell to a certain capacity potential. output_dir: str Absolute path to directory where resite output should be stored Returns ------- net: pypsa.Network Updated network Notes ----- net.buses must have a 'region_onshore' if adding onshore technologies and a 'region_offshore' attribute if adding offshore technologies. """ for param in ["timeslice", "spatial_resolution", "modelling", "formulation", "formulation_params", "write_lp"]: assert param in siting_params, f"Error: Missing parameter {param} for siting." from resite.resite import Resite logger.info('Setting up resite.') resite = Resite([region], technologies, siting_params["timeslice"], siting_params["spatial_resolution"], siting_params["min_cap_if_selected"]) resite.build_data(use_ex_cap) logger.info('resite model being built.') resite.build_model(siting_params["modelling"], siting_params['formulation'], siting_params['formulation_params'], siting_params['write_lp'], output_dir) logger.info('Sending resite to solver.') resite.solve_model(solver_options=siting_params['solver_options'], solver=siting_params['solver']) logger.info('Retrieving resite results.') resite.retrieve_selected_sites_data() tech_location_dict = resite.sel_tech_points_dict existing_cap_ds = resite.sel_data_dict["existing_cap_ds"] cap_potential_ds = resite.sel_data_dict["cap_potential_ds"] cap_factor_df = resite.sel_data_dict["cap_factor_df"] logger.info("Saving resite results") resite.save(output_dir) if not resite.timestamps.equals(net.snapshots): # If network snapshots is a subset of resite snapshots just crop the data missing_timestamps = set(net.snapshots) - set(resite.timestamps) if not missing_timestamps: cap_factor_df = cap_factor_df.loc[net.snapshots] else: # In other case, need to recompute capacity factors raise NotImplementedError("Error: Network snapshots must currently be a subset of resite snapshots.") for tech, points in tech_location_dict.items(): onshore_tech = get_config_values(tech, ['onshore']) # Associate sites to buses (using the associated shapes) buses = net.buses.copy() region_type = 'onshore_region' if onshore_tech else 'offshore_region' buses = buses.dropna(subset=[region_type]) associated_buses = match_points_to_regions(points, buses[region_type]).dropna() points = list(associated_buses.index) p_nom_max = 'inf' if limit_max_cap: p_nom_max = cap_potential_ds[tech][points].values p_nom = existing_cap_ds[tech][points].values p_max_pu = cap_factor_df[tech][points].values capital_cost, marginal_cost = get_costs(tech, len(net.snapshots)) net.madd("Generator", pd.Index([f"Gen {tech} {x}-{y}" for x, y in points]), bus=associated_buses.values, p_nom_extendable=True, p_nom_max=p_nom_max, p_nom=p_nom, p_nom_min=p_nom, p_min_pu=0., p_max_pu=p_max_pu, type=tech, x=[x for x, _ in points], y=[y for _, y in points], marginal_cost=marginal_cost, capital_cost=capital_cost) return net
def add_generators_per_bus(net: pypsa.Network, technologies: List[str], use_ex_cap: bool = True, bus_ids: List[str] = None, precision: int = 3) -> pypsa.Network: """ Add VRES generators to each bus of a PyPSA Network, each bus being associated to a geographical region. Parameters ---------- net: pypsa.Network A PyPSA Network instance with buses associated to regions. technologies: List[str] Names of VRES technologies to be added. use_ex_cap: bool (default: True) Whether to take into account existing capacity. bus_ids: List[str] Subset of buses to which the generators must be added. precision: int (default: 3) Indicates at which decimal values should be rounded Returns ------- net: pypsa.Network Updated network Notes ----- Each bus must contain 'x', 'y' attributes. In addition, each bus must have a 'region_onshore' and/or 'region_offshore' attributes. Finally, if the topology has one bus per country (and no offshore buses), all buses can be associated to an ISO code under the attribute 'country' to fasten some computations. """ # Filter out buses all_buses = net.buses.copy() all_buses = all_buses[all_buses['country'].notna()] if bus_ids is not None: all_buses = all_buses.loc[bus_ids] for attr in ["x", "y"]: assert hasattr(all_buses, attr), f"Error: Buses must contain a '{attr}' attribute." assert all([len(bus[["onshore_region", "offshore_region"]].dropna()) != 0 for idx, bus in all_buses.iterrows()]), \ "Error: Each bus must be associated to an 'onshore_region' and/or 'offshore_region' attribute." one_bus_per_country = False if hasattr(all_buses, 'country'): # Check every bus has a value for this attribute complete = len(all_buses["country"].dropna()) == len(all_buses) # Check the values are unique unique = len(all_buses["country"].unique()) == len(all_buses) one_bus_per_country = complete & unique tech_config_dict = get_config_dict(technologies, ["filters", "power_density", "onshore"]) for tech in technologies: # Detect if technology is onshore(/offshore) based onshore_tech = tech_config_dict[tech]["onshore"] # Get buses which are associated to an onshore/offshore region region_type = "onshore_region" if onshore_tech else 'offshore_region' buses = all_buses.dropna(subset=[region_type], axis=0) countries = list(buses["country"].unique()) # Get the shapes of regions associated to each bus buses_regions_shapes_ds = buses[region_type] # Compute capacity potential at each bus # TODO: WARNING: first part of if-else to be removed enspreso = False if enspreso: logger.warning("Capacity potentials computed using ENSPRESO data.") if one_bus_per_country: cap_pot_country_ds = get_capacity_potential_for_countries(tech, countries) cap_pot_ds = pd.Series(index=buses.index, dtype=float) cap_pot_ds[:] = cap_pot_country_ds.loc[buses.country] else: # topology_type == "regions" cap_pot_ds = get_capacity_potential_for_regions({tech: buses_regions_shapes_ds.values})[tech] cap_pot_ds.index = buses.index else: # Using GLAES filters = tech_config_dict[tech]["filters"] power_density = tech_config_dict[tech]["power_density"] cap_pot_ds = pd.Series(index=buses.index, dtype=float) cap_pot_ds[:] = get_capacity_potential_for_shapes(buses_regions_shapes_ds.values, filters, power_density, precision=precision) # Get one capacity factor time series per bus if one_bus_per_country: # For country-based topologies, use aggregated series obtained from Renewables.ninja cap_factor_countries_df = get_cap_factor_for_countries(tech, countries, net.snapshots, precision, False) cap_factor_df = pd.DataFrame(index=net.snapshots, columns=buses.index, dtype=float) cap_factor_df[:] = cap_factor_countries_df[buses.country] else: # For region-based topology, compute capacity factors at (rounded) buses position spatial_res = 0.5 points = [(round(shape.centroid.x/spatial_res) * spatial_res, round(shape.centroid.y/spatial_res) * spatial_res) for shape in buses_regions_shapes_ds.values] cap_factor_df = compute_capacity_factors({tech: points}, spatial_res, net.snapshots, precision)[tech] cap_factor_df.columns = buses.index # Compute legacy capacity (not available for wind_floating) legacy_cap_ds = pd.Series(0., index=buses.index, dtype=float) if use_ex_cap and tech != "wind_floating": if one_bus_per_country and len(countries) != 0: legacy_cap_countries = get_legacy_capacity_in_countries(tech, countries) legacy_cap_ds[:] = legacy_cap_countries.loc[buses.country] else: legacy_cap_ds = get_legacy_capacity_in_regions(tech, buses_regions_shapes_ds, countries) # Update capacity potentials if legacy capacity is bigger for bus in buses.index: if cap_pot_ds.loc[bus] < legacy_cap_ds.loc[bus]: cap_pot_ds.loc[bus] = legacy_cap_ds.loc[bus] # Remove generators if capacity potential is 0 non_zero_potential_gens_index = cap_pot_ds[cap_pot_ds > 0].index cap_pot_ds = cap_pot_ds.loc[non_zero_potential_gens_index] legacy_cap_ds = legacy_cap_ds.loc[non_zero_potential_gens_index] cap_factor_df = cap_factor_df[non_zero_potential_gens_index] buses = buses.loc[non_zero_potential_gens_index] # Get costs capital_cost, marginal_cost = get_costs(tech, len(net.snapshots)) # Adding to the network net.madd("Generator", buses.index, suffix=f" Gen {tech}", bus=buses.index, p_nom_extendable=True, p_nom=legacy_cap_ds, p_nom_min=legacy_cap_ds, p_nom_max=cap_pot_ds, p_min_pu=0., p_max_pu=cap_factor_df, type=tech, x=buses.x.values, y=buses.y.values, marginal_cost=marginal_cost, capital_cost=capital_cost) return net
def add_generators_in_grid_cells(net: pypsa.Network, technologies: List[str], region: str, spatial_resolution: float, use_ex_cap: bool = True, limit_max_cap: bool = True, min_cap_pot: List[float] = None) -> pypsa.Network: """ Create VRES generators in every grid cells obtained from dividing a certain number of regions. Parameters ---------- net: pypsa.Network A PyPSA Network instance with buses associated to regions technologies: List[str] Which technologies to add. region: str Region code defined in 'data_path'/geographics/region_definition.csv over which the network is defined. spatial_resolution: float Spatial resolution at which to define grid cells. use_ex_cap: bool (default: True) Whether to take into account existing capacity. limit_max_cap: bool (default: True) Whether to limit capacity expansion at each grid cell to a certain capacity potential. min_cap_pot: List[float] (default: None) List of thresholds per technology. Points with capacity potential under this threshold will be removed. Returns ------- net: pypsa.Network Updated network Notes ----- net.buses must have a 'region_onshore' if adding onshore technologies and a 'region_offshore' attribute if adding offshore technologies. """ from resite.resite import Resite # Generate deployment sites using resite resite = Resite([region], technologies, [net.snapshots[0], net.snapshots[-1]], spatial_resolution) resite.build_data(use_ex_cap, min_cap_pot) for tech in technologies: points = resite.tech_points_dict[tech] onshore_tech = get_config_values(tech, ['onshore']) # Associate sites to buses (using the associated shapes) buses = net.buses.copy() region_type = 'onshore_region' if onshore_tech else 'offshore_region' buses = buses.dropna(subset=[region_type]) associated_buses = match_points_to_regions(points, buses[region_type]).dropna() points = list(associated_buses.index) p_nom_max = 'inf' if limit_max_cap: p_nom_max = resite.data_dict["cap_potential_ds"][tech][points].values p_nom = resite.data_dict["existing_cap_ds"][tech][points].values p_max_pu = resite.data_dict["cap_factor_df"][tech][points].values capital_cost, marginal_cost = get_costs(tech, len(net.snapshots)) net.madd("Generator", pd.Index([f"Gen {tech} {x}-{y}" for x, y in points]), bus=associated_buses.values, p_nom_extendable=True, p_nom_max=p_nom_max, p_nom=p_nom, p_nom_min=p_nom, p_min_pu=0., p_max_pu=p_max_pu, type=tech, x=[x for x, _ in points], y=[y for _, y in points], marginal_cost=marginal_cost, capital_cost=capital_cost) return net
def upgrade_topology(net: pypsa.Network, regions: List[str], plot: bool = False, ac_carrier: str = "HVAC_OHL", dc_carrier: str = "HVDC_GLIS") -> pypsa.Network: buses = pd.DataFrame(columns=["x", "y", "country", "onshore_region", "offshore_region"]) links = pd.DataFrame(columns=["bus0", "bus1", "carrier", "length"]) if "IS" in regions: buses.loc["IS", "onshore_region"] = get_shapes(["IS"], "onshore")["geometry"][0] buses.loc["IS", ["x", "y"]] = buses.loc["IS", "onshore_region"].centroid buses.loc["IS", "country"] = "IS" # Adding link to GB links.loc["IS-GB", ["bus0", "bus1", "carrier"]] = ["IS", "GB", dc_carrier] if "GL" in regions: assert 'IS' in regions, "Error: Cannot add a node in Greenland without adding a node in Iceland." full_gl_shape = get_shapes(["GL"], "onshore")["geometry"][0] trunc_gl_shape = full_gl_shape.intersection(Polygon([(-44.6, 59.5), (-44.6, 60.6), (-42, 60.6), (-42, 59.5)])) buses.loc["GL", "onshore_region"] = trunc_gl_shape buses.loc["GL", ["x", "y"]] = (-44., 60.) # buses.loc["GL", "country"] = "GL" # Adding link to IS links.loc["GL-IS", ["bus0", "bus1", "carrier"]] = ["GL", "IS", dc_carrier] if "na" in regions: countries = get_subregions("na") shapes = get_shapes(countries, "onshore")["geometry"] trunc_shape = Polygon([(-14, 27.7), (-14, 40), (40, 40), (40, 27.7)]) for c in countries: buses.loc[c, "onshore_region"] = shapes.loc[c].intersection(trunc_shape) # buses.loc[c, "country"] = c buses.loc["DZ", ["x", "y"]] = (3, 36.5) # Algeria, Alger buses.loc["EG", ["x", "y"]] = (31., 30.) # Egypt, Cairo buses.loc["LY", ["x", "y"]] = (22, 32) #(13., 32.5) # Libya, Tripoli buses.loc["MA", ["x", "y"]] = (-6., 35.) # Morocco, Rabat buses.loc["TN", ["x", "y"]] = (10., 36.5) # Tunisia, Tunis # Adding links links.loc["DZ-MA", ["bus0", "bus1", "carrier"]] = ["DZ", "MA", ac_carrier] links.loc["DZ-TN", ["bus0", "bus1", "carrier"]] = ["DZ", "TN", ac_carrier] links.loc["LY-TN", ["bus0", "bus1", "carrier", "length"]] = ["LY", "TN", ac_carrier, 2000] links.loc["EG-LY", ["bus0", "bus1", "carrier", "length"]] = ["EG", "LY", ac_carrier, 700] if "GR" in net.buses.index: links.loc["LY-GR", ["bus0", "bus1", "carrier", "length"]] = ["LY", "GR", dc_carrier, 900] if "ES" in net.buses.index: links.loc["MA-ES", ["bus0", "bus1", "carrier"]] = ["MA", "ES", dc_carrier] if "IT" in net.buses.index: links.loc["TN-IT", ["bus0", "bus1", "carrier", "length"]] = ["TN", "IT", dc_carrier, 600] if "me" in regions: # countries = ["AE", "BH", "CY", "IL", "IQ", "IR", "JO", "KW", "LB", "OM", "QA", "SA", "SY"] # , "YE"] countries = get_subregions("me") shapes = get_shapes(countries, "onshore")["geometry"] trunc_shape = Polygon([(25, 27.7), (25, 60), (60, 60), (60, 27.7)]) for c in countries: buses.loc[c, "onshore_region"] = shapes.loc[c].intersection(trunc_shape) # buses.loc[c, "country"] = c # buses.loc["AE", ["x", "y"]] = (54.5, 24.5) # UAE, Abu Dhabi # buses.loc["BH", ["x", "y"]] = (50.35, 26.13) # Bahrain, Manama buses.loc["TR", ["x", "y"]] = buses.loc["TR", "onshore_region"].centroid buses.loc["CY", ["x", "y"]] = (33.21, 35.1) # Cyprus, Nicosia buses.loc["IL", ["x", "y"]] = (34.76, 32.09) # Tel-Aviv, Jerusalem # if 'TR' in net.buses.index: # buses.loc["IQ", ["x", "y"]] = (44.23, 33.2) # Iraq, Baghdad # buses.loc["IR", ["x", "y"]] = (51.23, 35.41) # Iran, Tehran # else: # buses = buses.drop(["IQ", "IR"]) buses.loc["JO", ["x", "y"]] = (35.55, 31.56) # Jordan, Amman # buses.loc["KW", ["x", "y"]] = (47.58, 29.22) # Kuwait, Kuwait City # buses.loc["LB", ["x", "y"]] = (35.3, 33.53) # Lebanon, Beirut # buses.loc["OM", ["x", "y"]] = (58.24, 23.35) # Oman, Muscat # buses.loc["QA", ["x", "y"]] = (51.32, 25.17) # Qatar, Doha buses.loc["SA", ["x", "y"]] = buses.loc["SA", "onshore_region"].centroid #(46.43, 24.38) # Saudi Arabia, Riyadh buses.loc["SY", ["x", "y"]] = (36.64, 34.63) # Syria, Homs # buses.loc["YE", ["x", "y"]] = (44.12, 15.20) # Yemen, Sana # Adding links links.loc["IL-JO", ["bus0", "bus1", "carrier"]] = ["IL", "JO", ac_carrier] # links.loc["IL-LI", ["bus0", "bus1", "carrier"]] = ["IL", "LB", ac_carrier] # links.loc["SY-LI", ["bus0", "bus1", "carrier"]] = ["SY", "LB", ac_carrier] links.loc["SY-JO", ["bus0", "bus1", "carrier"]] = ["SY", "JO", ac_carrier] links.loc["IL-CY", ["bus0", "bus1", "carrier"]] = ["IL", "CY", "DC"] # This links comes from nowhere links.loc["SA-JO", ["bus0", "bus1", "carrier"]] = ["SA", "JO", ac_carrier] # links.loc["CY-SY", ["bus0", "bus1", "carrier"]] = ["CY", "SY", "DC"] # links.loc["OM-AE", ["bus0", "bus1", "carrier"]] = ["OM", "AE", ac_carrier] # links.loc["QA-AE", ["bus0", "bus1", "carrier"]] = ["QA", "AE", ac_carrier] # links.loc["QA-SA", ["bus0", "bus1", "carrier"]] = ["QA", "SA", ac_carrier] # links.loc["BH-QA", ["bus0", "bus1", "carrier"]] = ["BH", "QA", ac_carrier] # links.loc["BH-KW", ["bus0", "bus1", "carrier"]] = ["BH", "KW", ac_carrier] # links.loc["BH-SA", ["bus0", "bus1", "carrier"]] = ["BH", "SA", ac_carrier] # links.loc["YE-SA", ["bus0", "bus1", "carrier"]] = ["YE", "SA", ac_carrier] if "EG" in buses.index: links.loc["EG-IL", ["bus0", "bus1", "carrier"]] = ["EG", "IL", ac_carrier] links.loc["SA-EG", ["bus0", "bus1", "carrier"]] = ["SA", "EG", ac_carrier] #if "TR" in net.buses.index: links.loc["SY-TR", ["bus0", "bus1", "carrier"]] = ["SY", "TR", ac_carrier] # links.loc["IQ-TR", ["bus0", "bus1", "carrier"]] = ["IQ", "TR", ac_carrier] # links.loc["IR-TR", ["bus0", "bus1", "carrier"]] = ["IR", "TR", ac_carrier] # links.loc["IR-IQ", ["bus0", "bus1", "carrier"]] = ["IR", "IQ", ac_carrier] if "GR" in net.buses.index: links.loc["CY-GR", ["bus0", "bus1", "carrier", "length"]] = ["CY", "GR", dc_carrier, 850] # From TYNDP links.loc["TR-GR", ["bus0", "bus1", "carrier", "length"]] = ["TR", "GR", dc_carrier, 1173.53] # p_nom = 0.66 if "BG" in net.buses.index: links.loc["TR-BG", ["bus0", "bus1", "carrier", "length"]] = ["TR", "BG", ac_carrier, 932.16] # p_nom = 1.2 buses = buses.infer_objects() net.madd("Bus", buses.index, x=buses.x, y=buses.y, country=buses.country, onshore_region=buses.onshore_region, offshore_region=buses.offshore_region,) # Adding length to the lines for which we did not fix it manually for idx in links[links.length.isnull()].index: bus0_id = links.loc[idx]["bus0"] bus1_id = links.loc[idx]["bus1"] bus0_x = net.buses.loc[bus0_id]["x"] bus0_y = net.buses.loc[bus0_id]["y"] bus1_x = net.buses.loc[bus1_id]["x"] bus1_y = net.buses.loc[bus1_id]["y"] links.loc[idx, "length"] = geopy.distance.geodesic((bus0_y, bus0_x), (bus1_y, bus1_x)).km links['capital_cost'] = pd.Series(index=links.index) for idx in links.index: carrier = links.loc[idx].carrier cap_cost, _ = get_costs(carrier, sum(net.snapshot_weightings['objective'])) links.loc[idx, ('capital_cost', )] = cap_cost * links.length.loc[idx] net.madd("Link", links.index, bus0=links.bus0, bus1=links.bus1, carrier=links.carrier, p_nom_extendable=True, length=links.length, capital_cost=links.capital_cost) # from tyndp if "TR" in net.buses.index: net.links.loc[["TR-BG", "TR-GR"], "p_nom"] = [1.2, 0.66] if plot: plot_topology(net.buses, net.links) plt.show() return net
def add_res_at_sites( net: pypsa.Network, config, output_dir, eu_countries, ): eu_technologies = config['res']['techs'] logger.info(f"Adding RES {eu_technologies} generation.") spatial_res = config["res"]["spatial_resolution"] use_ex_cap = config["res"]["use_ex_cap"] min_cap_pot = config["res"]["min_cap_pot"] min_cap_if_sel = config["res"]["min_cap_if_selected"] # Build sites for EU r_europe = Resite(eu_countries, eu_technologies, [net.snapshots[0], net.snapshots[-1]], spatial_res, min_cap_if_sel) regions_shapes = net.buses.loc[eu_countries, ["onshore_region", 'offshore_region']] regions_shapes.columns = ['onshore', 'offshore'] r_europe.build_data(use_ex_cap, min_cap_pot, regions_shapes=regions_shapes) net.cc_ds = r_europe.data_dict["capacity_credit_ds"] # Build sites for other regions non_eu_res = config["non_eu"] all_remote_countries = [] if non_eu_res is not None: for region in non_eu_res.keys(): if region in ["na", "me"]: remote_countries = get_subregions(region) else: remote_countries = [region] all_remote_countries += remote_countries remote_techs = non_eu_res[region] r_remote = Resite(remote_countries, remote_techs, [net.snapshots[0], net.snapshots[-1]], spatial_res) regions_shapes = net.buses.loc[ remote_countries, ["onshore_region", 'offshore_region']] regions_shapes.columns = ['onshore', 'offshore'] r_remote.build_data(False, compute_load=False, regions_shapes=regions_shapes) # Add sites to European ones r_europe.regions += r_remote.regions r_europe.technologies = list( set(r_europe.technologies).union(r_remote.technologies)) r_europe.min_cap_pot_dict = { **r_europe.min_cap_pot_dict, **r_remote.min_cap_pot_dict } r_europe.tech_points_tuples = np.concatenate( (r_europe.tech_points_tuples, r_remote.tech_points_tuples)) r_europe.initial_sites_ds = r_europe.initial_sites_ds.append( r_remote.initial_sites_ds) r_europe.tech_points_regions_ds = \ r_europe.tech_points_regions_ds.append(r_remote.tech_points_regions_ds) r_europe.data_dict["load"] = pd.concat( [r_europe.data_dict["load"], r_remote.data_dict["load"]], axis=1) r_europe.data_dict["cap_potential_ds"] = \ r_europe.data_dict["cap_potential_ds"].append(r_remote.data_dict["cap_potential_ds"]) r_europe.data_dict["existing_cap_ds"] = \ r_europe.data_dict["existing_cap_ds"].append(r_remote.data_dict["existing_cap_ds"]) r_europe.data_dict["cap_factor_df"] = \ pd.concat([r_europe.data_dict["cap_factor_df"], r_remote.data_dict["cap_factor_df"]], axis=1) # Update dictionary tech_points_dict = {} techs = set(r_europe.initial_sites_ds.index.get_level_values(0)) for tech in techs: tech_points_dict[tech] = list(r_europe.initial_sites_ds[tech].index) r_europe.tech_points_dict = tech_points_dict # Do siting if required if config["res"]["strategy"] == "siting": logger.info('resite model being built.') siting_params = config['res'] # if siting_params['formulation'] == "min_cost_global": # siting_params['formulation_params']['perc_per_region'] = \ # siting_params['formulation_params']['perc_per_region'] + [0.] * len(all_remote_countries) r_europe.build_model(siting_params["modelling"], siting_params['formulation'], siting_params['formulation_params'], siting_params['write_lp'], f"{output_dir}resite/") logger.info('Sending resite to solver.') r_europe.init_output_folder(f"{output_dir}resite/") r_europe.solve_model(f"{output_dir}resite/", solver=config['solver'], solver_options=config['solver_options']) logger.info("Saving resite results") r_europe.retrieve_selected_sites_data() r_europe.save(f"{output_dir}resite/") # Add solution to network logger.info('Retrieving resite results.') tech_location_dict = r_europe.sel_tech_points_dict existing_cap_ds = r_europe.sel_data_dict["existing_cap_ds"] cap_potential_ds = r_europe.sel_data_dict["cap_potential_ds"] cap_factor_df = r_europe.sel_data_dict["cap_factor_df"] if not r_europe.timestamps.equals(net.snapshots): # If network snapshots is a subset of resite snapshots just crop the data missing_timestamps = set(net.snapshots) - set(r_europe.timestamps) if not missing_timestamps: cap_factor_df = cap_factor_df.loc[net.snapshots] else: # In other case, need to recompute capacity factors raise NotImplementedError( "Error: Network snapshots must currently be a subset of resite snapshots." ) else: # no siting tech_location_dict = r_europe.tech_points_dict existing_cap_ds = r_europe.data_dict["existing_cap_ds"] cap_potential_ds = r_europe.data_dict["cap_potential_ds"] cap_factor_df = r_europe.data_dict["cap_factor_df"] for tech, points in tech_location_dict.items(): onshore_tech = get_config_values(tech, ['onshore']) # Associate sites to buses (using the associated shapes) buses = net.buses.copy() region_type = 'onshore_region' if onshore_tech else 'offshore_region' buses = buses.dropna(subset=[region_type]) associated_buses = match_points_to_regions( points, buses[region_type]).dropna() points = list(associated_buses.index) p_nom_max = 'inf' if config['res']['limit_max_cap']: p_nom_max = cap_potential_ds[tech][points].values p_nom = existing_cap_ds[tech][points].values p_max_pu = cap_factor_df[tech][points].values capital_cost, marginal_cost = get_costs( tech, sum(net.snapshot_weightings['objective'])) net.madd("Generator", pd.Index([f"Gen {tech} {x}-{y}" for x, y in points]), bus=associated_buses.values, p_nom_extendable=True, p_nom_max=p_nom_max, p_nom=p_nom, p_nom_min=p_nom, p_min_pu=0., p_max_pu=p_max_pu, type=tech, x=[x for x, _ in points], y=[y for _, y in points], marginal_cost=marginal_cost, capital_cost=capital_cost) return net