def test_build_data_wrong_cap_pot_thresholds_len(): technologies = ["pv_utility", "wind_offshore"] regions = ["BENELUX"] timeslice = ['2015-01-01T00:00', '2015-01-01T23:00'] resite = Resite(regions, technologies, timeslice, 0.5) with pytest.raises(AssertionError): resite.build_data(True, [0.01])
def test_build_data_two_regions(): technologies = ["pv_utility", "wind_offshore"] regions = ["BENELUX", "PT"] timeslice = ['2015-01-01T00:00', '2015-01-01T23:00'] timestamps = pd.date_range(timeslice[0], timeslice[1], freq='1H') nb_sites = [72, 49] resite = Resite(regions, technologies, timeslice, 0.5) resite.build_data(True) build_data_test(resite, technologies, regions, timestamps, nb_sites)
def test_build_model(): technologies = ["pv_utility", "wind_offshore"] regions = ["BENELUX"] timeslice = ['2015-01-01T00:00', '2015-01-01T23:00'] resite = Resite(regions, technologies, timeslice, 0.5) resite.build_data(True) resite.build_model("pyomo", "max_generation", {"nb_sites_per_region": [5]}, False) for attr in ["instance", "modelling", "formulation", "formulation_params"]: assert hasattr(resite, attr)
def test_solve_model(): technologies = ["pv_utility", "wind_offshore"] regions = ["BENELUX"] timeslice = ['2015-01-01T00:00', '2015-01-01T23:00'] resite = Resite(regions, technologies, timeslice, 0.5) resite.build_data(True) resite.build_model("pyomo", "max_generation", {"nb_sites_per_region": [5]}, False) resite.solve_model() for attr in ["objective", "y_ds", "sel_tech_points_dict"]: assert hasattr(resite, attr) for tech_point in resite.y_ds.index: assert tech_point in resite.tech_points_tuples.tolist() for tech in resite.sel_tech_points_dict: assert tech in technologies points = resite.sel_tech_points_dict[tech] for point in points: assert (tech, point[0], point[1]) in resite.tech_points_tuples.tolist()
def test_init(): resite = Resite(["BENELUX"], ["wind_onshore"], ['2015-01-01T00:00', '2015-01-01T23:00'], 0.5) for attr in ["technologies", "regions", "timestamps", "spatial_res"]: assert hasattr(resite, attr)
def add_generators_using_siting(net: pypsa.Network, technologies: List[str], region: str, siting_params: Dict[str, Any], use_ex_cap: bool = True, limit_max_cap: bool = True, output_dir: str = None) -> pypsa.Network: """ Add generators for different technologies at a series of location selected via an optimization mechanism. Parameters ---------- net: pypsa.Network A network with defined buses. technologies: List[str] Which technologies to add using this methodology siting_params: Dict[str, Any] Set of parameters necessary for siting. region: str Region over which the network is defined use_ex_cap: bool (default: True) Whether to take into account existing capacity. limit_max_cap: bool (default: True) Whether to limit capacity expansion at each grid cell to a certain capacity potential. output_dir: str Absolute path to directory where resite output should be stored Returns ------- net: pypsa.Network Updated network Notes ----- net.buses must have a 'region_onshore' if adding onshore technologies and a 'region_offshore' attribute if adding offshore technologies. """ for param in ["timeslice", "spatial_resolution", "modelling", "formulation", "formulation_params", "write_lp"]: assert param in siting_params, f"Error: Missing parameter {param} for siting." from resite.resite import Resite logger.info('Setting up resite.') resite = Resite([region], technologies, siting_params["timeslice"], siting_params["spatial_resolution"], siting_params["min_cap_if_selected"]) resite.build_data(use_ex_cap) logger.info('resite model being built.') resite.build_model(siting_params["modelling"], siting_params['formulation'], siting_params['formulation_params'], siting_params['write_lp'], output_dir) logger.info('Sending resite to solver.') resite.solve_model(solver_options=siting_params['solver_options'], solver=siting_params['solver']) logger.info('Retrieving resite results.') resite.retrieve_selected_sites_data() tech_location_dict = resite.sel_tech_points_dict existing_cap_ds = resite.sel_data_dict["existing_cap_ds"] cap_potential_ds = resite.sel_data_dict["cap_potential_ds"] cap_factor_df = resite.sel_data_dict["cap_factor_df"] logger.info("Saving resite results") resite.save(output_dir) if not resite.timestamps.equals(net.snapshots): # If network snapshots is a subset of resite snapshots just crop the data missing_timestamps = set(net.snapshots) - set(resite.timestamps) if not missing_timestamps: cap_factor_df = cap_factor_df.loc[net.snapshots] else: # In other case, need to recompute capacity factors raise NotImplementedError("Error: Network snapshots must currently be a subset of resite snapshots.") for tech, points in tech_location_dict.items(): onshore_tech = get_config_values(tech, ['onshore']) # Associate sites to buses (using the associated shapes) buses = net.buses.copy() region_type = 'onshore_region' if onshore_tech else 'offshore_region' buses = buses.dropna(subset=[region_type]) associated_buses = match_points_to_regions(points, buses[region_type]).dropna() points = list(associated_buses.index) p_nom_max = 'inf' if limit_max_cap: p_nom_max = cap_potential_ds[tech][points].values p_nom = existing_cap_ds[tech][points].values p_max_pu = cap_factor_df[tech][points].values capital_cost, marginal_cost = get_costs(tech, len(net.snapshots)) net.madd("Generator", pd.Index([f"Gen {tech} {x}-{y}" for x, y in points]), bus=associated_buses.values, p_nom_extendable=True, p_nom_max=p_nom_max, p_nom=p_nom, p_nom_min=p_nom, p_min_pu=0., p_max_pu=p_max_pu, type=tech, x=[x for x, _ in points], y=[y for _, y in points], marginal_cost=marginal_cost, capital_cost=capital_cost) return net
def add_generators_in_grid_cells(net: pypsa.Network, technologies: List[str], region: str, spatial_resolution: float, use_ex_cap: bool = True, limit_max_cap: bool = True, min_cap_pot: List[float] = None) -> pypsa.Network: """ Create VRES generators in every grid cells obtained from dividing a certain number of regions. Parameters ---------- net: pypsa.Network A PyPSA Network instance with buses associated to regions technologies: List[str] Which technologies to add. region: str Region code defined in 'data_path'/geographics/region_definition.csv over which the network is defined. spatial_resolution: float Spatial resolution at which to define grid cells. use_ex_cap: bool (default: True) Whether to take into account existing capacity. limit_max_cap: bool (default: True) Whether to limit capacity expansion at each grid cell to a certain capacity potential. min_cap_pot: List[float] (default: None) List of thresholds per technology. Points with capacity potential under this threshold will be removed. Returns ------- net: pypsa.Network Updated network Notes ----- net.buses must have a 'region_onshore' if adding onshore technologies and a 'region_offshore' attribute if adding offshore technologies. """ from resite.resite import Resite # Generate deployment sites using resite resite = Resite([region], technologies, [net.snapshots[0], net.snapshots[-1]], spatial_resolution) resite.build_data(use_ex_cap, min_cap_pot) for tech in technologies: points = resite.tech_points_dict[tech] onshore_tech = get_config_values(tech, ['onshore']) # Associate sites to buses (using the associated shapes) buses = net.buses.copy() region_type = 'onshore_region' if onshore_tech else 'offshore_region' buses = buses.dropna(subset=[region_type]) associated_buses = match_points_to_regions(points, buses[region_type]).dropna() points = list(associated_buses.index) p_nom_max = 'inf' if limit_max_cap: p_nom_max = resite.data_dict["cap_potential_ds"][tech][points].values p_nom = resite.data_dict["existing_cap_ds"][tech][points].values p_max_pu = resite.data_dict["cap_factor_df"][tech][points].values capital_cost, marginal_cost = get_costs(tech, len(net.snapshots)) net.madd("Generator", pd.Index([f"Gen {tech} {x}-{y}" for x, y in points]), bus=associated_buses.values, p_nom_extendable=True, p_nom_max=p_nom_max, p_nom=p_nom, p_nom_min=p_nom, p_min_pu=0., p_max_pu=p_max_pu, type=tech, x=[x for x, _ in points], y=[y for _, y in points], marginal_cost=marginal_cost, capital_cost=capital_cost) return net
def add_res_at_sites( net: pypsa.Network, config, output_dir, eu_countries, ): eu_technologies = config['res']['techs'] logger.info(f"Adding RES {eu_technologies} generation.") spatial_res = config["res"]["spatial_resolution"] use_ex_cap = config["res"]["use_ex_cap"] min_cap_pot = config["res"]["min_cap_pot"] min_cap_if_sel = config["res"]["min_cap_if_selected"] # Build sites for EU r_europe = Resite(eu_countries, eu_technologies, [net.snapshots[0], net.snapshots[-1]], spatial_res, min_cap_if_sel) regions_shapes = net.buses.loc[eu_countries, ["onshore_region", 'offshore_region']] regions_shapes.columns = ['onshore', 'offshore'] r_europe.build_data(use_ex_cap, min_cap_pot, regions_shapes=regions_shapes) net.cc_ds = r_europe.data_dict["capacity_credit_ds"] # Build sites for other regions non_eu_res = config["non_eu"] all_remote_countries = [] if non_eu_res is not None: for region in non_eu_res.keys(): if region in ["na", "me"]: remote_countries = get_subregions(region) else: remote_countries = [region] all_remote_countries += remote_countries remote_techs = non_eu_res[region] r_remote = Resite(remote_countries, remote_techs, [net.snapshots[0], net.snapshots[-1]], spatial_res) regions_shapes = net.buses.loc[ remote_countries, ["onshore_region", 'offshore_region']] regions_shapes.columns = ['onshore', 'offshore'] r_remote.build_data(False, compute_load=False, regions_shapes=regions_shapes) # Add sites to European ones r_europe.regions += r_remote.regions r_europe.technologies = list( set(r_europe.technologies).union(r_remote.technologies)) r_europe.min_cap_pot_dict = { **r_europe.min_cap_pot_dict, **r_remote.min_cap_pot_dict } r_europe.tech_points_tuples = np.concatenate( (r_europe.tech_points_tuples, r_remote.tech_points_tuples)) r_europe.initial_sites_ds = r_europe.initial_sites_ds.append( r_remote.initial_sites_ds) r_europe.tech_points_regions_ds = \ r_europe.tech_points_regions_ds.append(r_remote.tech_points_regions_ds) r_europe.data_dict["load"] = pd.concat( [r_europe.data_dict["load"], r_remote.data_dict["load"]], axis=1) r_europe.data_dict["cap_potential_ds"] = \ r_europe.data_dict["cap_potential_ds"].append(r_remote.data_dict["cap_potential_ds"]) r_europe.data_dict["existing_cap_ds"] = \ r_europe.data_dict["existing_cap_ds"].append(r_remote.data_dict["existing_cap_ds"]) r_europe.data_dict["cap_factor_df"] = \ pd.concat([r_europe.data_dict["cap_factor_df"], r_remote.data_dict["cap_factor_df"]], axis=1) # Update dictionary tech_points_dict = {} techs = set(r_europe.initial_sites_ds.index.get_level_values(0)) for tech in techs: tech_points_dict[tech] = list(r_europe.initial_sites_ds[tech].index) r_europe.tech_points_dict = tech_points_dict # Do siting if required if config["res"]["strategy"] == "siting": logger.info('resite model being built.') siting_params = config['res'] # if siting_params['formulation'] == "min_cost_global": # siting_params['formulation_params']['perc_per_region'] = \ # siting_params['formulation_params']['perc_per_region'] + [0.] * len(all_remote_countries) r_europe.build_model(siting_params["modelling"], siting_params['formulation'], siting_params['formulation_params'], siting_params['write_lp'], f"{output_dir}resite/") logger.info('Sending resite to solver.') r_europe.init_output_folder(f"{output_dir}resite/") r_europe.solve_model(f"{output_dir}resite/", solver=config['solver'], solver_options=config['solver_options']) logger.info("Saving resite results") r_europe.retrieve_selected_sites_data() r_europe.save(f"{output_dir}resite/") # Add solution to network logger.info('Retrieving resite results.') tech_location_dict = r_europe.sel_tech_points_dict existing_cap_ds = r_europe.sel_data_dict["existing_cap_ds"] cap_potential_ds = r_europe.sel_data_dict["cap_potential_ds"] cap_factor_df = r_europe.sel_data_dict["cap_factor_df"] if not r_europe.timestamps.equals(net.snapshots): # If network snapshots is a subset of resite snapshots just crop the data missing_timestamps = set(net.snapshots) - set(r_europe.timestamps) if not missing_timestamps: cap_factor_df = cap_factor_df.loc[net.snapshots] else: # In other case, need to recompute capacity factors raise NotImplementedError( "Error: Network snapshots must currently be a subset of resite snapshots." ) else: # no siting tech_location_dict = r_europe.tech_points_dict existing_cap_ds = r_europe.data_dict["existing_cap_ds"] cap_potential_ds = r_europe.data_dict["cap_potential_ds"] cap_factor_df = r_europe.data_dict["cap_factor_df"] for tech, points in tech_location_dict.items(): onshore_tech = get_config_values(tech, ['onshore']) # Associate sites to buses (using the associated shapes) buses = net.buses.copy() region_type = 'onshore_region' if onshore_tech else 'offshore_region' buses = buses.dropna(subset=[region_type]) associated_buses = match_points_to_regions( points, buses[region_type]).dropna() points = list(associated_buses.index) p_nom_max = 'inf' if config['res']['limit_max_cap']: p_nom_max = cap_potential_ds[tech][points].values p_nom = existing_cap_ds[tech][points].values p_max_pu = cap_factor_df[tech][points].values capital_cost, marginal_cost = get_costs( tech, sum(net.snapshot_weightings['objective'])) net.madd("Generator", pd.Index([f"Gen {tech} {x}-{y}" for x, y in points]), bus=associated_buses.values, p_nom_extendable=True, p_nom_max=p_nom_max, p_nom=p_nom, p_nom_min=p_nom, p_min_pu=0., p_max_pu=p_max_pu, type=tech, x=[x for x, _ in points], y=[y for _, y in points], marginal_cost=marginal_cost, capital_cost=capital_cost) return net
regions, p_nom_extendable=True, extension_multiplier=2.0) regions_shapes = net.buses.loc[regions, ["onshore_region", 'offshore_region']] regions_shapes.columns = ['onshore', 'offshore'] output_folder = join(dirname(abspath(__file__)), f"output/{strftime('%Y%m%d_%H%M%S')}/") # Compute and save results if not isdir(output_folder): makedirs(output_folder) logger.info('Building class.') resite = Resite(regions, params["technologies"], params["timeslice"], params["spatial_resolution"], params['min_cap_if_selected']) logger.info('Reading input.') resite.build_data(use_ex_cap=params["use_ex_cap"], min_cap_pot=params["min_cap_pot"], regions_shapes=regions_shapes) logger.info('Model being built.') resite.build_model(params["modelling"], params['formulation'], params['formulation_params'], params['write_lp'], output_folder) logger.info('Sending model to solver.') results = resite.solve_model(output_folder=output_folder, solver_options=params['solver_options'],