def get_capacity_potential_per_country(countries: List[str], is_onshore: float, filters: Dict, power_density: float, processes: int = None): """ Return capacity potentials (GW) in a series of countries. Parameters ---------- countries: List[str] List of ISO codes. is_onshore: bool Whether the technology is onshore located. filters: Dict Dictionary containing a set of values describing the filters to apply to obtain land availability. power_density: float Power density in MW/km2 processes: int (default: None) Number of parallel processes Returns ------- pd.Series Series containing the capacity potentials (GW) for each code. """ which = 'onshore' if is_onshore else 'offshore' shapes = get_shapes(countries, which=which, save=True)["geometry"] land_availability = get_land_availability_for_shapes( shapes, filters, processes) return pd.Series(land_availability * power_density / 1e3, index=shapes.index)
def test_match_powerplants_to_regions_shape_mismatch(): countries = ["BE", "FI", "FR", "GB", "RO"] shapes = get_shapes(countries, which="offshore")["geometry"] df = get_powerplants('ror', countries) ds = match_powerplants_to_regions(df, shapes, dist_threshold=0.) assert len(ds) == len(df) assert len(ds.dropna()) == 0
def test_create_grid_cells_too_coarse_resolution(): shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"] res = 10.0 points, gc = create_grid_cells(shape, res) assert isinstance(points, list) assert isinstance(gc, np.ndarray) assert len(points) == 1 assert len(gc) == 1
def test_get_grid_cells_missing_shapes(): shapes = get_shapes(["BE"]) onshore_shape = shapes[~shapes["offshore"]].loc["BE", "geometry"] offshore_shape = shapes[shapes["offshore"]].loc["BE", "geometry"] with pytest.raises(AssertionError): get_grid_cells(['wind_onshore'], 0.5, offshore_shape=offshore_shape) with pytest.raises(AssertionError): get_grid_cells(['wind_offshore'], 0.5, onshore_shape=onshore_shape)
def test_create_grid_cells(): shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"] res = 1.0 points, gc = create_grid_cells(shape, res) assert len(gc) == len(points) assert all([isinstance(cell, Polygon) or isinstance(cell, MultiPolygon) for cell in gc]) areas_sum = sum([cell.area for cell in gc]) assert abs(areas_sum - shape.area)/max(areas_sum, shape.area) < 0.01
def test_match_powerplants_to_regions_without_shapes(): countries = ["BE", "FI", "FR", "GB", "RO"] nb_plants = [3, 14, 85, 9, 77] shapes = get_shapes(countries, which="onshore")["geometry"] df = get_powerplants('sto', countries) ds = match_powerplants_to_regions(df, shapes, countries) assert len(ds) == len(df) for i, c in enumerate(countries): assert sum(ds == c) == nb_plants[i] regions = [ "ES11", "ES12", "ES13", "ES21", "ES22", "ES23", "ES24", "ES41", "ES42", "ES43", "ES51", "ES52" ] shapes = get_shapes(regions, which="onshore")["geometry"] df = get_powerplants('nuclear', ["ES"]) ds = match_powerplants_to_regions(df, shapes, [c[:2] for c in regions]) assert all([c in ds.values for c in ["ES52", "ES42", "ES51"]])
def create_interior_shore_proximity_prior(): """Generate a Prior, defined over onshore territories, indicating pixels which are less-than or equal-to X meters from shore""" # Indicates distances too close to shore (m) # considering values for 12, 30, 50, 60, 100, 150 and 200 nm (-> 22, 56, 93, 111, 185, 278 and 370 km) # distances = [0, 5e3, 10e3, 15e3, 20e3, 22e3, 25e3, 50e3, 56e3, 93e3, 100e3, # 111e3, 185e3, 200e3, 278e3, 300e3, 370e3, 400e3, 500e3, 1000e3] distances = [100, 250, 500, 1000, 1500] # Create onshore shape countries = [ "AL", "AT", "BA", "BE", "BG", "CH", "CZ", "DE", "DK", "EE", "ES", "FI", "FR", "GB", "GR", "HR", "HU", "IE", "IT", "LT", "LU", "LV", "ME", "MK", "NL", "NO", "PL", "PT", "RO", "RS", "SE", "SI", "SK" ] shapes = get_shapes(countries, which='onshore') onshore_union = unary_union(shapes["geometry"].values) poly_wkt = shapely.wkt.dumps(onshore_union) spatial_ref = osr.SpatialReference() spatial_ref.ImportFromEPSG(4326) poly = ogr.CreateGeometryFromWkt(poly_wkt, spatial_ref) # Make Region Mask (set resolution to 1km) reg = gk.RegionMask.load(poly, pixelRes=100) # Create a geometry list from the osm files from shapely.geometry import Polygon opposite = Polygon([(-20, 30), (-20, 75), (40, 75), (40, 30)]).difference(onshore_union)[0] from epippy.geographics.plot import display_polygons display_polygons([opposite]) poly_wkt_out_eu = shapely.wkt.dumps(opposite) spatial_ref = osr.SpatialReference() spatial_ref.ImportFromEPSG(4326) poly_out_eu = ogr.CreateGeometryFromWkt(poly_wkt_out_eu, spatial_ref) target = osr.SpatialReference() target.ImportFromEPSG(3857) transform = osr.CoordinateTransformation(spatial_ref, target) poly_out_eu.Transform(transform) # Get edge matrix result = edgesByProximity(reg, [poly_out_eu], distances) # Save result ftr_id = 0 name = "interior_shore_proximity" unit = "meters" description = "Indicates pixels which are less-than or equal-to X meters from shore inside shore" source = "NaturalEarth" tail = str(int(dt.now().timestamp())) potential_dir = f"{data_path}generation/vres/potentials/" output_dir = f"{potential_dir}generated/GLAES/" writeEdgeFile(result, reg, ftr_id, output_dir, name, tail, unit, description, source, distances)
def test_match_powerplants_to_regions_non_iso_codes_for_shapes(): countries = ["BE", "NL"] shapes = get_shapes(countries)["geometry"] with pytest.raises(AssertionError): pp_df = pd.DataFrame({ "ISO2": ["BE", "NL"], "lon": [0, 1], "lat": [1, 0] }) match_powerplants_to_regions(pp_df, shapes, ["Belgium", "Netherlands"])
def test_match_points_to_regions_one_point_away_from_shape_keeping(): onshore_shapes = get_shapes(["NL", "BE"], "onshore")["geometry"] ds = match_points_to_regions([(3.91953, 52.0067)], onshore_shapes, distance_threshold=5.) assert isinstance(ds, pd.Series) assert (len(ds) == 1) assert (3.91953, 52.0067) in ds.index assert np.isnan(ds[(3.91953, 52.0067)])
def test_match_points_to_regions_one_point_near_shape_keeping(): onshore_shapes = get_shapes(["NL", "BE"], "onshore")["geometry"] ds = match_points_to_regions([(3.9855853, 51.9205033)], onshore_shapes, distance_threshold=5.) assert isinstance(ds, pd.Series) assert (len(ds) == 1) assert (3.9855853, 51.9205033) in ds.index assert ds[(3.9855853, 51.9205033)] == "NL"
def test_match_points_to_regions_one_point_near_shape_not_keeping(): onshore_shapes = get_shapes(["NL"], "onshore")["geometry"] ds = match_points_to_regions([(3.9855853, 51.9205033)], onshore_shapes, keep_outside=False) assert isinstance(ds, pd.Series) assert (len(ds) == 1) assert (3.9855853, 51.9205033) in ds.index assert np.isnan(ds[(3.9855853, 51.9205033)])
def test_match_points_to_regions_one_point_in_two_shapes(): onshore_shapes = get_shapes(["BE", "NL"], "onshore")["geometry"] ds = match_points_to_regions([(4.3053506, 50.8550625)], onshore_shapes, keep_outside=False) assert isinstance(ds, pd.Series) assert (len(ds) == 1) assert (4.3053506, 50.8550625) in ds.index assert ds[(4.3053506, 50.8550625)] == "BE"
def test_get_points_in_shape_without_init_points(): shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"] res = 0.5 points = get_points_in_shape(shape, res) assert isinstance(points, list) assert all(isinstance(point, tuple) for point in points) assert all(len(point) == 2 for point in points) assert all( map( lambda point: int(point[0] / res) == point[0] / res and int(point[ 1] / res) == point[1] / res, points))
def test_match_powerplants_to_regions_missing_columns(): countries = ["BE", "NL"] shapes = get_shapes(countries)["geometry"] with pytest.raises(AssertionError): pp_df = pd.DataFrame(columns=["ISO2", "lon"]) match_powerplants_to_regions(pp_df, shapes) with pytest.raises(AssertionError): pp_df = pd.DataFrame(columns=["ISO2", "lat"]) match_powerplants_to_regions(pp_df, shapes) with pytest.raises(AssertionError): pp_df = pd.DataFrame(columns=["lat", "lon"]) match_powerplants_to_regions(pp_df, shapes)
def test_get_grid_cells(): shapes = get_shapes(["BE"]) onshore_shape = shapes[~shapes["offshore"]]["geometry"] offshore_shape = shapes[shapes["offshore"]]["geometry"] ds = get_grid_cells(['wind_onshore', 'wind_offshore', 'pv_utility'], 0.25, onshore_shape, offshore_shape) assert isinstance(ds, pd.Series) assert len(ds['wind_offshore']) == 6 assert len(ds['wind_onshore']) == 63 assert len(ds['pv_utility']) == 63 assert (ds['wind_onshore'] == ds['pv_utility']).all()
def create_shore_proximity_prior(): """Generate a Prior, defined over offshore territories, indicating pixels which are less-than or equal-to X meters from shore""" # Indicates distances too close to shore (m) # considering values for 12, 30, 50, 60, 100, 150 and 200 nm (-> 22, 56, 93, 111, 185, 278 and 370 km) # distances = [0, 5e3, 10e3, 15e3, 20e3, 22e3, 25e3, 50e3, 56e3, 93e3, 100e3, # 111e3, 185e3, 200e3, 278e3, 300e3, 370e3, 400e3, 500e3, 1000e3] distances = [0, 20e3, 50e3, 100e3, 111e3, 185e3, 370e3, 500e3] # Create offshore shape countries = [ "AL", "BA", "BE", "BG", "DE", "DK", "EE", "ES", "FI", "FR", "GB", "GR", "HR", "IE", "IT", "LT", "LV", "ME", "NL", "NO", "PL", "PT", "RO", "SE", "SI" ] shapes = get_shapes(countries, which='offshore', save=True) offshore_union = unary_union(shapes["geometry"].values) poly_wkt = shapely.wkt.dumps(offshore_union) spatial_ref = osr.SpatialReference() spatial_ref.ImportFromEPSG(4326) poly = ogr.CreateGeometryFromWkt(poly_wkt, spatial_ref) # Make Region Mask (set resolution to 1km) reg = gk.RegionMask.load(poly, pixelRes=1000) # Create a geometry list from the osm files potential_dir = f"{data_path}generation/vres/potentials/" gebco = gk.vector.loadVector( f"{potential_dir}source/GEBCO/GEBCO_2019/gebco_2019_n75.0_s30.0_w-20.0_e40.0.tif" ) indicated = reg.indicateValues(gebco, value='(0-]', applyMask=False) > 0.5 geom = gk.geom.polygonizeMask(indicated, bounds=reg.extent.xyXY, srs=reg.srs) # Get edge matrix result = edgesByProximity(reg, [geom], distances) # Save result ftr_id = 0 name = "shore_proximity" unit = "meters" description = "Indicates pixels which are less-than or equal-to X meters from shore" source = "GEBCO" tail = str(int(dt.now().timestamp())) output_dir = f"{potential_dir}generated/GLAES/" writeEdgeFile(result, reg, ftr_id, output_dir, name, tail, unit, description, source, distances)
def test_get_points_in_shape_with_init_points(): shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"] res = 1.0 points_in = [(4.0, 51.0), (5.0, 50.0)] point_out = [(4.0, 52.0), (3.0, 50.0)] init_points = point_out + points_in points = get_points_in_shape(shape, res, init_points) assert isinstance(points, list) assert all(isinstance(point, tuple) for point in points) assert all(len(point) == 2 for point in points) assert all( map( lambda point: int(point[0] / res) == point[0] / res and int(point[ 1] / res) == point[1] / res, points)) assert points == points_in
def define_simple_network() -> pypsa.Network: """ Returns a simple test PyPSA network. The network is composed of three onshore buses associated to the onshore territories of Belgium, the Netherlands and Luxembourg and of one offshore bus corresponding to the offshore territory of Belgium. Currently, no links and lines are integrated. """ net = pypsa.Network() buses_id = ["ONBE", "ONNL", "ONLU", "ONFR", "OFF1"] # Geographical info all_shapes = get_shapes(["BE", "NL", "LU", "FR"], which='onshore_offshore') onshore_shapes = all_shapes.loc[~all_shapes['offshore']]["geometry"] offshore_shape = all_shapes.loc[(all_shapes['offshore']) & (all_shapes.index == 'BE')]["geometry"] centroids = [ onshore_shapes["BE"].centroid, onshore_shapes["NL"].centroid, onshore_shapes["LU"].centroid, onshore_shapes["FR"].centroid, offshore_shape["BE"].centroid ] xs, ys = zip(*[(point.x, point.y) for point in centroids]) # Add buses buses = pd.DataFrame(index=buses_id, columns=["x", "y", "country", "region", "onshore"]) buses["x"] = xs buses["y"] = ys buses["country"] = ["BE", "NL", "LU", "FR", None] buses["onshore_region"] = [ onshore_shapes["BE"], onshore_shapes["NL"], onshore_shapes["LU"], onshore_shapes["FR"], None ] buses["offshore_region"] = [None, None, None, None, offshore_shape["BE"]] net.import_components_from_dataframe(buses, "Bus") print(buses) # Time ts = pd.date_range('2015-01-01T00:00', '2015-01-01T23:00', freq='1H') net.set_snapshots(ts) return net
def get_capacity_potential_at_points( tech_points_dict: Dict[str, List[Tuple[float, float]]], spatial_resolution: float, countries: List[str], existing_capacity_ds: pd.Series = None) -> pd.Series: """ Compute the potential capacity at a series of points for different technologies. Parameters ---------- tech_points_dict : Dict[str, Dict[str, List[Tuple[float, float]]] Dictionary associating to each tech a list of points. spatial_resolution : float Spatial resolution of the points. countries: List[str] List of ISO codes of countries in which the points are situated existing_capacity_ds: pd.Series (default: None) Data series given for each tuple of (tech, point) the existing capacity. Returns ------- capacity_potential_ds : pd.Series Gives for each pair of technology - point the associated capacity potential in GW """ accepted_techs = [ 'wind_onshore', 'wind_offshore', 'wind_floating', 'pv_utility', 'pv_residential' ] for tech, points in tech_points_dict.items(): assert tech in accepted_techs, f"Error: tech {tech} is not in {accepted_techs}" assert len( points) != 0, f"Error: List of points for tech {tech} is empty." assert all(map(lambda point: int(point[0]/spatial_resolution) == point[0]/spatial_resolution and int(point[1]/spatial_resolution) == point[1]/spatial_resolution, points)), \ f"Error: Some points do not have the correct resolution {spatial_resolution}" pop_density_array = load_population_density_data(spatial_resolution) # Create a modified copy of regions to deal with UK and EL iso_to_nuts0 = {"GB": "UK", "GR": "EL"} nuts0_regions = [ iso_to_nuts0[c] if c in iso_to_nuts0 else c for c in countries ] # Get NUTS2 and EEZ shapes nuts2_regions_list = get_available_regions("nuts2") codes = [code for code in nuts2_regions_list if code[:2] in nuts0_regions] region_shapes_dict = { "nuts2": get_shapes(codes, which='onshore')["geometry"], "eez": get_shapes(countries, which='offshore', save=True)["geometry"] } region_shapes_dict["eez"].index = [ f"EZ{code}" for code in region_shapes_dict["eez"].index ] tech_points_tuples = sorted([(tech, point[0], point[1]) for tech, points in tech_points_dict.items() for point in points]) capacity_potential_ds = pd.Series( 0., index=pd.MultiIndex.from_tuples(tech_points_tuples)) # Check that if existing capacity is defined for every point if existing_capacity_ds is not None: missing_existing_points = set(existing_capacity_ds.index) - set( capacity_potential_ds.index) assert not missing_existing_points, \ f"Error: Missing following points in existing capacity series: {missing_existing_points}" for tech, points in tech_points_dict.items(): # Compute potential for each NUTS2 or EEZ potential_per_region_ds = read_capacity_potential(tech, nuts_type='nuts2') # Find the geographical region code associated to each point if tech in ['wind_offshore', 'wind_floating']: region_shapes = region_shapes_dict["eez"] else: region_shapes = region_shapes_dict["nuts2"] point_regions_ds = match_points_to_regions(points, region_shapes).dropna() points = list(point_regions_ds.index) points_info_df = pd.DataFrame(point_regions_ds.values, point_regions_ds.index, columns=["region"]) if tech in ['wind_offshore', 'wind_floating']: # For offshore sites, divide the total potential of the region by the number of points # associated to that region # Get how many points we have in each region and the potential capacity of those regions region_freq_ds = points_info_df.groupby(['region' ])['region'].count() regions = region_freq_ds.index region_cap_pot_ds = potential_per_region_ds[regions] region_info_df = pd.concat([region_freq_ds, region_cap_pot_ds], axis=1) region_info_df.columns = ["freq", "cap_pot"] # Assign these values to each points depending on which region they fall in points_info_df = \ points_info_df.merge(region_info_df, left_on='region', right_on='region', right_index=True) # Compute potential of each point by dividing the region potential by the number of points it contains cap_pot_per_point = points_info_df["cap_pot"] / points_info_df[ "freq"] else: # tech in ['wind_onshore', 'pv_utility', 'pv_residential']: # For onshore sites, divide the total anti-proportionally (or proportionally for residential PV) # to population # Here were actually using population density, which is proportional to population because we consider # that each point is associated to an equivalent area. points_info_df['pop_dens'] = np.clip( pop_density_array.sel(locations=points).values, a_min=1., a_max=None) if tech in ['wind_onshore', 'pv_utility']: points_info_df['pop_dens'] = 1. / points_info_df['pop_dens'] # Aggregate per region and get capacity potential for regions in which the points fall regions_info_df = points_info_df.groupby(['region']).sum() regions_info_df["cap_pot"] = potential_per_region_ds[ regions_info_df.index] regions_info_df.columns = ['sum_pop_dens', 'cap_pot'] # Assign these values to each points depending on which region they fall in points_info_df = points_info_df.merge(regions_info_df, left_on='region', right_on='region', right_index=True) # Compute potential cap_pot_per_point = points_info_df['pop_dens'] * points_info_df[ 'cap_pot'] / points_info_df['sum_pop_dens'] capacity_potential_ds.loc[ tech, cap_pot_per_point.index] = cap_pot_per_point.values # Update capacity potential with existing potential if present if existing_capacity_ds is not None: underestimated_capacity = existing_capacity_ds[ capacity_potential_ds.index] > capacity_potential_ds capacity_potential_ds[underestimated_capacity] = existing_capacity_ds[ underestimated_capacity] return capacity_potential_ds
def cluster_network(net: pypsa.Network, nuts_codes: List[str]): # Get shapes of regions (onshore and offshore) shapes = get_shapes(nuts_codes, which='onshore') # --- Buses --- # # TODO: this is shit --> should return a list keeping the index of the original points def add_region(lon, lat): try: region_code = matched_locs[lon, lat] # Need the if because some points are exactly at the same position return region_code if (isinstance(region_code, str) or isinstance(region_code, float) or isinstance(region_code, int)) else region_code.iloc[0] except (AttributeError, KeyError): return None # plants_region_ds.loc[pp_df_in_country.index] = \ # pp_df_in_country[["lon", "lat"]].apply(lambda x: add_region(x[0], x[1]), axis=1) buses_positions = net.buses[['x', 'y']].apply(lambda p: (p.x, p.y), axis=1).tolist() matched_locs = match_points_to_regions(buses_positions, shapes["geometry"], keep_outside=False).dropna() old_to_new_buses_map = net.buses[["x", "y"]].apply(lambda p: add_region(p.x, p.y), axis=1).dropna() nuts_codes_with_buses = list(set(old_to_new_buses_map)) # Merge buses to centroid of countries (even offshore ones) buses_df = pd.DataFrame(columns=["bus_id", "x", "y"]) buses_df["bus_id"] = nuts_codes_with_buses buses_df = buses_df.set_index('bus_id') regions = shapes.loc[nuts_codes_with_buses, 'geometry'] buses_df['onshore_region'] = regions buses_df["x"] = regions.centroid.apply(lambda p: p.x) buses_df["y"] = regions.centroid.apply(lambda p: p.y) print(buses_df) def compute_distance(bus0, bus1): bus0_x, bus0_y = buses_df.loc[bus0, ['x', 'y']] bus1_x, bus1_y = buses_df.loc[bus1, ['x', 'y']] return geopy.distance.geodesic((bus0_y, bus0_x), (bus1_y, bus1_x)).km # --- Lines --- # # Remove lines associated to buses that have been removed net.lines = net.lines.loc[net.lines.bus0.isin(old_to_new_buses_map.index) & net.lines.bus1.isin(old_to_new_buses_map.index)] # Assign new bus to lines net.lines.bus0 = net.lines.bus0.map(old_to_new_buses_map) net.lines.bus1 = net.lines.bus1.map(old_to_new_buses_map) # Remove lines that have the same starting and end bus net.lines = net.lines[net.lines.bus0 != net.lines.bus1] # Merge lines with same starting and end bus # Get all lines connected to the same bus in the same direction net.lines[['bus0', 'bus1']] = [sorted((bus0, bus1)) for (bus0, bus1) in net.lines[['bus0', 'bus1']].values] # Get pairs of connected bus connected_buses = set([(bus0, bus1) for (bus0, bus1) in net.lines[['bus0', 'bus1']].values.tolist()]) all_lines_df = pd.DataFrame() # Compute reactance of lines net.lines["x"] = net.lines["length"]*net.lines['type'].map(net.line_types.x_per_length) for bus0, bus1 in connected_buses: # line_index = f"{bus0}-{bus1}" # lines_df.loc[line_index, ['bus0', 'bus1']] = (bus0, bus1) # Get all lines in original network that were connected to bus0 and bus1 old_lines_df = net.lines[(net.lines.bus0 == bus0) & (net.lines.bus1 == bus1)] # Merge those lines by voltage level # TODO: Parameters to consider # - name: ok # - bus0: ok # - bus1: ok # - underground: ? --> affect cost # - under_construction: already dealt with before # - tags: nope # - geometry: nope (replaced by straight line) # - length: how? -> just take fly-distance (times 1.25 like in pypsa-eur?) # - x: how? # - v_nom: how? # - num_parallel: how? # Use sth similar to this: net.lines.loc[non380_lines_b, 'num_parallel'] *= (net.lines.loc[non380_lines_b, 'v_nom'] / 380.)**2 ? # - s_nom: how? lines_df = old_lines_df.groupby("type")['v_nom'].unique().apply(lambda x: x[0]).to_frame() lines_df['s_nom'] = old_lines_df.groupby("type")['s_nom'].sum() lines_df['num_parallel'] = old_lines_df.groupby("type")['num_parallel'].sum() lines_df['x'] = old_lines_df.groupby("type")['x'].apply(lambda x: 1/sum(1/x)) lines_df["line_id"] = lines_df['v_nom'].apply(lambda x: f"{bus0}-{bus1}-{x}") lines_df["bus0"] = bus0 lines_df["bus1"] = bus1 lines_df["length"] = lines_df[['bus0', 'bus1']].apply(lambda x: compute_distance(x.bus0, x.bus1), axis=1) lines_df = lines_df.reset_index().set_index('line_id') all_lines_df = pd.concat((all_lines_df, lines_df)) print(all_lines_df) # --- Links --- # # Remove lines associated to buses that have been removed net.links = net.links.loc[net.links.bus0.isin(old_to_new_buses_map.index) & net.links.bus1.isin(old_to_new_buses_map.index)] net.links.bus0 = net.links.bus0.map(old_to_new_buses_map) net.links.bus1 = net.links.bus1.map(old_to_new_buses_map) # Remove links that have the same starting and end bus net.links = net.links[net.links.bus0 != net.links.bus1] # Merge links with same starting and end bus # Get all links connected to the same bus in the same direction net.links[['bus0', 'bus1']] = [sorted((str(bus0), str(bus1))) for (bus0, bus1) in net.links[['bus0', 'bus1']].values] # Get pairs of connected bus connected_buses = set([(bus0, bus1) for (bus0, bus1) in net.links[['bus0', 'bus1']].values.tolist()]) links_df = pd.DataFrame(index=[f"{bus0}-{bus1}" for (bus0, bus1) in connected_buses], columns=['bus0', 'bus1', 'p_nom', 'length']) for bus0, bus1 in connected_buses: link_index = f"{bus0}-{bus1}" links_df.loc[link_index, ['bus0', 'bus1']] = (bus0, bus1) # Get all links in original network that were connected to bus0 and bus1 old_links_df = net.links[(net.links.bus0 == bus0) & (net.links.bus1 == bus1)] # Add capacities links_df.loc[link_index, 'p_nom'] = old_links_df['p_nom'].sum() links_df["length"] = links_df[['bus0', 'bus1']].apply(lambda x: compute_distance(x.bus0, x.bus1), axis=1) # TODO: Parameters to consider # - name: ok # - bus0: ok # - bus1: ok # - carrier: nope --> all dc # - underground: how? --> affect cost # - underwater_fraction: how? --> affect cost # - under_construction: already dealt with before # - tags: nope # - geometry: nope (replaced by straight line) # - length: how? -> just take fly-distance (times 1.25 like in pypsa-eur?) # - p_nom: how? # - num_parallel: how? # TODO: What about transformers? # print(net.links) print(links_df) clustered_net = pypsa.Network() clustered_net.import_components_from_dataframe(buses_df, 'Bus') clustered_net.import_components_from_dataframe(all_lines_df, 'Line') clustered_net.import_components_from_dataframe(links_df, 'Link') print(clustered_net.buses.onshore_region) return clustered_net
def test_match_points_to_regions_empty_list_of_points(): onshore_shapes = get_shapes(["BE"], "onshore")["geometry"] with pytest.raises(AssertionError): match_points_to_regions([], onshore_shapes)
def preprocess(plotting=True) -> None: """ Pre-process tyndp-country buses and links information. Parameters ---------- plotting: bool Whether to plot the results """ generated_dir = f"{data_path}topologies/tyndp2018/generated/" if not isdir(generated_dir): makedirs(generated_dir) # Create links link_data_fn = f"{data_path}topologies/tyndp2018/source/Input Data.xlsx" # Read TYNDP2018 (NTC 2027, reference grid) data # - ST (Sustainable Transition): targets reached by national regulation, emission trading schemes and subsidies, # maximising the use of existing infrastructure # - DG (Distributed Generation): prosumers at the centre - small-scale generation, batteries and fuel-switching # society engaged and empowered # - GCA (Global Climate Action): full-speed global decarbonisation, large-scale renewables links = pd.read_excel(link_data_fn, sheet_name="NTC", index_col=0, skiprows=[0, 2], usecols=[0, 3, 4, 5, 6, 7, 8, 9, 10], names=[ "link", "in", "out", "st_in", "st_out", "dg_in", "dg_out", "gca_in", "gca_out" ]) # Get NTC as the minimum capacity between the two flow directions. links["p_nom"] = links[["in", "out"]].max(axis=1) links["p_nom_st"] = links[["st_in", "st_out"]].max(axis=1) links["p_nom_dg"] = links[["dg_in", "dg_out"]].max(axis=1) links["p_nom_gca"] = links[["gca_in", "gca_out"]].max(axis=1) links["bus0"] = links.index.str[:2] links["bus1"] = [i[1][:2] for i in links.index.str.split('-')] # Remove links which do not cross international borders. links_crossborder = links[links["bus0"] != links["bus1"]].copy() links_crossborder[ "id"] = links_crossborder["bus0"] + '-' + links_crossborder["bus1"] # Sum all capacities belonging to the same border and convert from MW to GW. links = links_crossborder.groupby("id")[[ "p_nom", "p_nom_st", "p_nom_dg", "p_nom_gca" ]].sum() / 1000. links["id"] = links.index.values links["bus0"] = links["id"].apply(lambda k: k.split('-')[0]) links["bus1"] = links["id"].apply(lambda k: k.split('-')[1]) # A subset of links are assumed to be DC connections. dc_set = { 'BE-GB', 'CY-GR', 'DE-GB', 'DE-NO', 'DE-SE', 'DK-GB', 'DK-NL', 'DK-NO', 'DK-PL', 'DK-SE', 'EE-FI', 'ES-FR', 'FR-GB', 'FR-IE', 'GB-IE', 'GB-IS', 'GB-NL', 'GB-NO', 'GR-IT', 'GR-TR', 'IT-ME', 'IT-MT', 'IT-TN', 'LT-SE', 'PL-SE', 'NL-NO' } links["carrier"] = links["id"].apply(lambda x: 'DC' if x in dc_set else 'AC') # A connection between Rep. of Ireland (IE) and Northern Ireland (NI) is considered in the TYNDP, yet as NI is the # TODO: this is north ireland --> need to add it to the capacity between IE-GB # ISO2 code of Nicaragua, this results in weird results. Thus, the connection is dropped, as IE-GB links exist. links = links[~links.index.str.contains("NI")] # Create buses buses_names = [] for name in links.index: buses_names += name.split("-") buses_names = sorted(list(set(buses_names))) # buses = pd.DataFrame(index=buses_names, columns=["x", "y", "country", "region", "onshore"]) buses = pd.DataFrame( index=buses_names, columns=["x", "y", "country", "onshore_region", "offshore_region"]) buses.index.names = ["id"] buses.country = list(buses.index) # buses.onshore = True # Get shape of each country # buses.region = get_shapes(buses.index.values, which='onshore', save=True)["geometry"] shapes = get_shapes(buses.index.values, save=True) # Crop regions going too far north nordics = ["FI", "NO", "SE"] intersection_poly = Polygon([(0., 50.), (0., 66.5), (40., 66.5), (40., 50.)]) shapes.loc[nordics, "geometry"] = shapes.loc[nordics, "geometry"].apply( lambda x: x.intersection(intersection_poly)) # Add regions to buses buses.onshore_region = shapes[~shapes.offshore]["geometry"] offshore_shapes = shapes[shapes.offshore]["geometry"] buses.loc[offshore_shapes.index, "offshore_region"] = offshore_shapes centroids = [region.centroid for region in buses.onshore_region] buses.x = [c.x for c in centroids] buses.y = [c.y for c in centroids] for item in buses.index: if item == 'NO': buses.loc[item, 'x'] = 10.2513 buses.loc[item, 'y'] = 60.2416 elif item == 'SE': buses.loc[item, 'x'] = 15.2138 buses.loc[item, 'y'] = 59.3386 elif item == 'DK': buses.loc[item, 'x'] = 9.0227 buses.loc[item, 'y'] = 56.1997 elif item == 'GB': buses.loc[item, 'x'] = -1.2816 buses.loc[item, 'y'] = 52.7108 elif item == 'HR': buses.loc[item, 'x'] = 15.89 buses.loc[item, 'y'] = 45.7366 elif item == 'GR': buses.loc[item, 'x'] = 21.57 buses.loc[item, 'y'] = 40.19 elif item == 'FI': buses.loc[item, 'x'] = 24.82 buses.loc[item, 'y'] = 61.06 # Adding length to the links def compute_distance(bus0, bus1): bus0_x, bus0_y = buses.loc[bus0, ['x', 'y']] bus1_x, bus1_y = buses.loc[bus1, ['x', 'y']] return geopy.distance.geodesic((bus0_y, bus0_x), (bus1_y, bus1_x)).km links["length"] = links[['bus0', 'bus1']].apply( lambda x: compute_distance(x.bus0, x.bus1), axis=1) if plotting: from epippy.topologies.core.plot import plot_topology plot_topology(buses, links) plt.show() # buses.region = buses.region.astype(str) buses.onshore_region = buses.onshore_region.astype(str) buses.offshore_region = buses.offshore_region.astype(str) buses.to_csv(f"{generated_dir}buses.csv") links.to_csv(f"{generated_dir}links.csv")
def get_map_divided_by_region(self, regions_dict, strategy='siting'): all_xs = self.net.buses.x.values all_ys = self.net.buses.y.values minx = min(all_xs) - 5 maxx = max(all_xs) + 5 miny = min(all_ys) - 2 maxy = max(all_ys) + 2 fig = get_map_layout("", [minx, maxx, miny, maxy], False) from functools import reduce all_countries = sorted( reduce(lambda x, y: x + y, list(regions_dict.values()))) offshore_shapes = get_shapes(all_countries, 'offshore')["geometry"] offshore_shapes.index = [ 'UK' if idx == "GB" else idx for idx in offshore_shapes.index ] if strategy == "bus": # Compute capacity potential per eez tech_regions_dict = {"wind_offshore": offshore_shapes.values} wind_capacity_potential_per_country = get_capacity_potential_for_regions( tech_regions_dict)['wind_offshore'] wind_capacity_potential_per_country.index = offshore_shapes.index # Compute generation per offshore bus offshore_buses_index = self.net.buses[~self.net.buses. onshore].index total_generation_per_bus = pd.Series(index=offshore_buses_index) total_max_capacity_per_bus = pd.Series(index=offshore_buses_index) for idx in offshore_buses_index: offshore_generators_index = self.net.generators[ self.net.generators.bus == idx].index total_generation_per_bus[idx] = self.net.generators_t.p[ offshore_generators_index].values.sum() total_max_capacity_per_bus[idx] = self.net.generators.loc[ offshore_generators_index, 'p_nom_max'].values.sum() offshore_bus_region_shapes = self.net.buses.loc[ offshore_buses_index].offshore_region feature_collection = [] all_caps_pd = pd.DataFrame(0., index=list(regions_dict.keys()), columns=[ "ccgt", "load", "nuclear", "pv_utility", "ror", "sto" "wind_onshore", "wind_offshore" ]) for idx, regions in regions_dict.items(): # Get buses in region buses_index = self.net.buses.loc[[ idx for idx in self.net.buses.index if idx[2:4] in regions ]].index # Agglomerate regions together region_shape = \ cascaded_union([shapely.wkt.loads(self.net.buses.loc[bus_id].onshore_region) for bus_id in buses_index]) centroid = region_shape.centroid if isinstance(region_shape, sPolygon): feature_collection += [ Feature(geometry=Polygon( [list(region_shape.exterior.coords)]), id=idx) ] else: feature_collection += \ [Feature(geometry=MultiPolygon([(list(poly.exterior.coords), ) for poly in region_shape]), id=idx)] # Get all generators for those buses generators = self.net.generators[self.net.generators.bus.isin( buses_index)] all_cap = dict.fromkeys(sorted(set(generators.type))) for key in all_cap: generators_type_index = generators[generators.type == key].index all_cap[key] = np.sum( self.net.generators_t.p[generators_type_index].values) # Add STO output storage_units = self.net.storage_units[ self.net.storage_units.bus.isin(buses_index)] stos = storage_units[storage_units.type == "sto"] all_cap['sto'] = np.sum( self.net.storage_units_t.p[stos.index].values) # Add wind_offshore offshore_regions = [ r for r in regions if r in offshore_shapes.index ] eez_region_shape = cascaded_union( offshore_shapes.loc[offshore_regions]["geometry"]) offshore_generators = self.net.generators[self.net.generators.type == 'wind_offshore'] if strategy == 'siting': offshore_generators_in_region = \ offshore_generators[["x", "y"]].apply(lambda x: eez_region_shape.contains(Point(x[0], x[1])), axis=1) offshore_generators_in_region_index = offshore_generators[ offshore_generators_in_region].index if len(offshore_generators_in_region_index) != 0: all_cap['wind_offshore'] = np.sum( self.net.generators_t. p[offshore_generators_in_region_index].values) elif strategy == 'bus': wind_capacity = wind_capacity_potential_per_country[ offshore_regions].sum() # Compute intersection with all offshore shapes all_cap['wind_offshore'] = 0 for off_idx, off_region_shape in offshore_bus_region_shapes.items( ): off_region_shape = shapely.wkt.loads(off_region_shape) intersection = off_region_shape.intersection( eez_region_shape) prop_cap_received_by_bus = ( intersection.area / eez_region_shape.area) * wind_capacity all_cap['wind_offshore'] += ( prop_cap_received_by_bus / total_max_capacity_per_bus[off_idx] ) * total_generation_per_bus[off_idx] x = (centroid.x - minx) / (maxx - minx) y = (centroid.y - miny) / (maxy - miny) title = idx if ' ' in title: title = f"{title.split(' ')[0]}<br>{title.split(' ')[1]}" # Sort values sorted_keys = sorted(list(all_cap.keys())) sorted_values = [all_cap[key] for key in sorted_keys] for i, key in enumerate(sorted_keys): all_caps_pd.loc[idx, key] = sorted_values[i] all_caps_pd.to_csv("generation_siting.csv") fig.add_trace( go.Pie( values=sorted_values, labels=sorted_keys, hole=0.4, text=[""] * len(all_cap.keys()), textposition="none", scalegroup='one', domain=dict(x=[max(x - 0.14, 0), min(x + 0.14, 1.0)], y=[max(y - 0.14, 0), min(y + 0.14, 1.0)]), marker=dict( colors=[self.tech_colors[key] for key in sorted_keys]), sort=False, title=dict(text=f"{title}", position='middle center', font=dict(size=20)))) feature_collection = FeatureCollection(feature_collection) fig.add_trace( go.Choropleth( locations=list(regions_dict.keys()), geojson=feature_collection, z=list(range(len(regions_dict.keys()))), text=list(regions_dict.keys()), marker=dict(opacity=0.3), colorscale='viridis', autocolorscale=False, reversescale=True, marker_line_color='black', marker_line_width=1.0, )) # Add offshore regions if 0: feature_collection = [] for i, idx in enumerate(offshore_buses_index): region_shape = shapely.wkt.loads( self.net.buses.loc[idx].region) if isinstance(region_shape, sPolygon): feature_collection += [ Feature(geometry=Polygon( [list(region_shape.exterior.coords)]), id=idx) ] else: feature_collection += \ [Feature(geometry=MultiPolygon([(list(poly.exterior.coords), ) for poly in region_shape]), id=idx)] feature_collection = FeatureCollection(feature_collection) fig.add_trace( go.Choropleth( locations=offshore_buses_index.values, geojson=feature_collection, z=list(range(len(offshore_buses_index))), text=offshore_buses_index, marker=dict(opacity=0.5), colorscale='plotly3', autocolorscale=False, reversescale=True, marker_line_color='black', marker_line_width=0.5, )) return fig
def upgrade_topology(net: pypsa.Network, regions: List[str], plot: bool = False, ac_carrier: str = "HVAC_OHL", dc_carrier: str = "HVDC_GLIS") -> pypsa.Network: buses = pd.DataFrame(columns=["x", "y", "country", "onshore_region", "offshore_region"]) links = pd.DataFrame(columns=["bus0", "bus1", "carrier", "length"]) if "IS" in regions: buses.loc["IS", "onshore_region"] = get_shapes(["IS"], "onshore")["geometry"][0] buses.loc["IS", ["x", "y"]] = buses.loc["IS", "onshore_region"].centroid buses.loc["IS", "country"] = "IS" # Adding link to GB links.loc["IS-GB", ["bus0", "bus1", "carrier"]] = ["IS", "GB", dc_carrier] if "GL" in regions: assert 'IS' in regions, "Error: Cannot add a node in Greenland without adding a node in Iceland." full_gl_shape = get_shapes(["GL"], "onshore")["geometry"][0] trunc_gl_shape = full_gl_shape.intersection(Polygon([(-44.6, 59.5), (-44.6, 60.6), (-42, 60.6), (-42, 59.5)])) buses.loc["GL", "onshore_region"] = trunc_gl_shape buses.loc["GL", ["x", "y"]] = (-44., 60.) # buses.loc["GL", "country"] = "GL" # Adding link to IS links.loc["GL-IS", ["bus0", "bus1", "carrier"]] = ["GL", "IS", dc_carrier] if "na" in regions: countries = get_subregions("na") shapes = get_shapes(countries, "onshore")["geometry"] trunc_shape = Polygon([(-14, 27.7), (-14, 40), (40, 40), (40, 27.7)]) for c in countries: buses.loc[c, "onshore_region"] = shapes.loc[c].intersection(trunc_shape) # buses.loc[c, "country"] = c buses.loc["DZ", ["x", "y"]] = (3, 36.5) # Algeria, Alger buses.loc["EG", ["x", "y"]] = (31., 30.) # Egypt, Cairo buses.loc["LY", ["x", "y"]] = (22, 32) #(13., 32.5) # Libya, Tripoli buses.loc["MA", ["x", "y"]] = (-6., 35.) # Morocco, Rabat buses.loc["TN", ["x", "y"]] = (10., 36.5) # Tunisia, Tunis # Adding links links.loc["DZ-MA", ["bus0", "bus1", "carrier"]] = ["DZ", "MA", ac_carrier] links.loc["DZ-TN", ["bus0", "bus1", "carrier"]] = ["DZ", "TN", ac_carrier] links.loc["LY-TN", ["bus0", "bus1", "carrier", "length"]] = ["LY", "TN", ac_carrier, 2000] links.loc["EG-LY", ["bus0", "bus1", "carrier", "length"]] = ["EG", "LY", ac_carrier, 700] if "GR" in net.buses.index: links.loc["LY-GR", ["bus0", "bus1", "carrier", "length"]] = ["LY", "GR", dc_carrier, 900] if "ES" in net.buses.index: links.loc["MA-ES", ["bus0", "bus1", "carrier"]] = ["MA", "ES", dc_carrier] if "IT" in net.buses.index: links.loc["TN-IT", ["bus0", "bus1", "carrier", "length"]] = ["TN", "IT", dc_carrier, 600] if "me" in regions: # countries = ["AE", "BH", "CY", "IL", "IQ", "IR", "JO", "KW", "LB", "OM", "QA", "SA", "SY"] # , "YE"] countries = get_subregions("me") shapes = get_shapes(countries, "onshore")["geometry"] trunc_shape = Polygon([(25, 27.7), (25, 60), (60, 60), (60, 27.7)]) for c in countries: buses.loc[c, "onshore_region"] = shapes.loc[c].intersection(trunc_shape) # buses.loc[c, "country"] = c # buses.loc["AE", ["x", "y"]] = (54.5, 24.5) # UAE, Abu Dhabi # buses.loc["BH", ["x", "y"]] = (50.35, 26.13) # Bahrain, Manama buses.loc["TR", ["x", "y"]] = buses.loc["TR", "onshore_region"].centroid buses.loc["CY", ["x", "y"]] = (33.21, 35.1) # Cyprus, Nicosia buses.loc["IL", ["x", "y"]] = (34.76, 32.09) # Tel-Aviv, Jerusalem # if 'TR' in net.buses.index: # buses.loc["IQ", ["x", "y"]] = (44.23, 33.2) # Iraq, Baghdad # buses.loc["IR", ["x", "y"]] = (51.23, 35.41) # Iran, Tehran # else: # buses = buses.drop(["IQ", "IR"]) buses.loc["JO", ["x", "y"]] = (35.55, 31.56) # Jordan, Amman # buses.loc["KW", ["x", "y"]] = (47.58, 29.22) # Kuwait, Kuwait City # buses.loc["LB", ["x", "y"]] = (35.3, 33.53) # Lebanon, Beirut # buses.loc["OM", ["x", "y"]] = (58.24, 23.35) # Oman, Muscat # buses.loc["QA", ["x", "y"]] = (51.32, 25.17) # Qatar, Doha buses.loc["SA", ["x", "y"]] = buses.loc["SA", "onshore_region"].centroid #(46.43, 24.38) # Saudi Arabia, Riyadh buses.loc["SY", ["x", "y"]] = (36.64, 34.63) # Syria, Homs # buses.loc["YE", ["x", "y"]] = (44.12, 15.20) # Yemen, Sana # Adding links links.loc["IL-JO", ["bus0", "bus1", "carrier"]] = ["IL", "JO", ac_carrier] # links.loc["IL-LI", ["bus0", "bus1", "carrier"]] = ["IL", "LB", ac_carrier] # links.loc["SY-LI", ["bus0", "bus1", "carrier"]] = ["SY", "LB", ac_carrier] links.loc["SY-JO", ["bus0", "bus1", "carrier"]] = ["SY", "JO", ac_carrier] links.loc["IL-CY", ["bus0", "bus1", "carrier"]] = ["IL", "CY", "DC"] # This links comes from nowhere links.loc["SA-JO", ["bus0", "bus1", "carrier"]] = ["SA", "JO", ac_carrier] # links.loc["CY-SY", ["bus0", "bus1", "carrier"]] = ["CY", "SY", "DC"] # links.loc["OM-AE", ["bus0", "bus1", "carrier"]] = ["OM", "AE", ac_carrier] # links.loc["QA-AE", ["bus0", "bus1", "carrier"]] = ["QA", "AE", ac_carrier] # links.loc["QA-SA", ["bus0", "bus1", "carrier"]] = ["QA", "SA", ac_carrier] # links.loc["BH-QA", ["bus0", "bus1", "carrier"]] = ["BH", "QA", ac_carrier] # links.loc["BH-KW", ["bus0", "bus1", "carrier"]] = ["BH", "KW", ac_carrier] # links.loc["BH-SA", ["bus0", "bus1", "carrier"]] = ["BH", "SA", ac_carrier] # links.loc["YE-SA", ["bus0", "bus1", "carrier"]] = ["YE", "SA", ac_carrier] if "EG" in buses.index: links.loc["EG-IL", ["bus0", "bus1", "carrier"]] = ["EG", "IL", ac_carrier] links.loc["SA-EG", ["bus0", "bus1", "carrier"]] = ["SA", "EG", ac_carrier] #if "TR" in net.buses.index: links.loc["SY-TR", ["bus0", "bus1", "carrier"]] = ["SY", "TR", ac_carrier] # links.loc["IQ-TR", ["bus0", "bus1", "carrier"]] = ["IQ", "TR", ac_carrier] # links.loc["IR-TR", ["bus0", "bus1", "carrier"]] = ["IR", "TR", ac_carrier] # links.loc["IR-IQ", ["bus0", "bus1", "carrier"]] = ["IR", "IQ", ac_carrier] if "GR" in net.buses.index: links.loc["CY-GR", ["bus0", "bus1", "carrier", "length"]] = ["CY", "GR", dc_carrier, 850] # From TYNDP links.loc["TR-GR", ["bus0", "bus1", "carrier", "length"]] = ["TR", "GR", dc_carrier, 1173.53] # p_nom = 0.66 if "BG" in net.buses.index: links.loc["TR-BG", ["bus0", "bus1", "carrier", "length"]] = ["TR", "BG", ac_carrier, 932.16] # p_nom = 1.2 buses = buses.infer_objects() net.madd("Bus", buses.index, x=buses.x, y=buses.y, country=buses.country, onshore_region=buses.onshore_region, offshore_region=buses.offshore_region,) # Adding length to the lines for which we did not fix it manually for idx in links[links.length.isnull()].index: bus0_id = links.loc[idx]["bus0"] bus1_id = links.loc[idx]["bus1"] bus0_x = net.buses.loc[bus0_id]["x"] bus0_y = net.buses.loc[bus0_id]["y"] bus1_x = net.buses.loc[bus1_id]["x"] bus1_y = net.buses.loc[bus1_id]["y"] links.loc[idx, "length"] = geopy.distance.geodesic((bus0_y, bus0_x), (bus1_y, bus1_x)).km links['capital_cost'] = pd.Series(index=links.index) for idx in links.index: carrier = links.loc[idx].carrier cap_cost, _ = get_costs(carrier, sum(net.snapshot_weightings['objective'])) links.loc[idx, ('capital_cost', )] = cap_cost * links.length.loc[idx] net.madd("Link", links.index, bus0=links.bus0, bus1=links.bus1, carrier=links.carrier, p_nom_extendable=True, length=links.length, capital_cost=links.capital_cost) # from tyndp if "TR" in net.buses.index: net.links.loc[["TR-BG", "TR-GR"], "p_nom"] = [1.2, 0.66] if plot: plot_topology(net.buses, net.links) plt.show() return net
Dictionary containing a set of values describing the filters to apply to obtain land availability. power_density: float Power density in MW/km2 processes: int (default: None) Number of parallel processes Returns ------- pd.Series Series containing the capacity potentials (GW) for each code. """ which = 'onshore' if is_onshore else 'offshore' shapes = get_shapes(countries, which=which, save=True)["geometry"] land_availability = get_land_availability_for_shapes( shapes, filters, processes) return pd.Series(land_availability * power_density / 1e3, index=shapes.index) if __name__ == '__main__': from epippy.technologies import get_config_values filters_ = get_config_values("wind_onshore_national", ["filters"]) print(filters_) # filters_ = {"depth_thresholds": {"high": -200, "low": None}} full_gl_shape = get_shapes(["LU"], "onshore")["geometry"].values filters_ = {"natura": 1} # trunc_gl_shape = full_gl_shape.intersection(Polygon([(11.5, 52.5), (11.5, 53.5), (12.5, 53.5), (12.5, 52.5)])) print(get_capacity_potential_for_shapes(full_gl_shape, filters_, 5))
def get_topology(network: pypsa.Network, countries: List[str] = None, add_offshore: bool = True, extend_line_cap: bool = True, use_ex_line_cap: bool = True, plot: bool = False) -> pypsa.Network: """ Load the e-highway network topology (buses and links) using PyPSA. Parameters ---------- network: pypsa.Network Network instance countries: List[str] (default: None) List of ISO codes of countries for which we want the e-highway topology add_offshore: bool (default: True) Whether to include offshore nodes extend_line_cap: bool (default True) Whether line capacity is allowed to be expanded use_ex_line_cap: bool (default True) Whether to use existing line capacity plot: bool (default: False) Whether to show loaded topology or not Returns ------- network: pypsa.Network Updated network """ assert countries is None or len(countries) != 0, "Error: Countries list must not be empty. If you want to " \ "obtain, the full topology, don't pass anything as argument." topology_dir = f"{data_path}topologies/e-highways/generated/" buses_fn = f"{topology_dir}buses.csv" assert isfile( buses_fn), f"Error: Buses are undefined. Please run 'preprocess'." buses = pd.read_csv(buses_fn, index_col='id') lines_fn = f"{topology_dir}lines.csv" assert isfile( lines_fn), f"Error: Lines are undefined. Please run 'preprocess'." lines = pd.read_csv(lines_fn, index_col='id') # Remove offshore buses if not considered if not add_offshore: buses = buses.dropna(subset=["onshore_region"]) if countries is not None: # In e-highway, GB is referenced as UK iso_to_ehighway = {"GB": "UK"} ehighway_countries = [ iso_to_ehighway[c] if c in iso_to_ehighway else c for c in countries ] # Remove onshore buses that are not in the considered region, # keep also buses that are offshore (i.e. with a country name that is not a string) def filter_buses(bus): return (not isinstance( bus.country, str)) or (bus.name[2:] in ehighway_countries) buses = buses.loc[buses.apply(filter_buses, axis=1)] else: countries = replace_iso2_codes( list( set([ idx[2:] for idx in buses.dropna(subset=["onshore_region"]).index ]))) # Converting polygons strings to Polygon object for region_type in ["onshore_region", "offshore_region"]: regions = buses[region_type].values # Convert strings for i, region in enumerate(regions): if isinstance(region, str): regions[i] = shapely.wkt.loads(region) # Remove lines for which one of the two end buses has been removed lines = pd.DataFrame(lines.loc[lines.bus0.isin(buses.index) & lines.bus1.isin(buses.index)]) # Removing offshore buses that are not connected anymore connected_buses = sorted(list( set(lines["bus0"]).union(set(lines["bus1"])))) buses = buses.loc[connected_buses] assert len( buses ) != 0, "Error: No buses are located in the given list of countries." # Add offshore polygons to remaining offshore buses if add_offshore: offshore_shapes = get_shapes(countries, which='offshore', save=True)["geometry"] if len(offshore_shapes) != 0: offshore_zones_shape = unary_union(offshore_shapes.values) offshore_bus_indexes = buses[ buses["onshore_region"].isnull()].index offshore_buses = buses.loc[offshore_bus_indexes] # Use a home-made 'voronoi' partition to assign a region to each offshore bus buses.loc[offshore_bus_indexes, "offshore_region"] = voronoi_special( offshore_zones_shape, offshore_buses[["x", "y"]]) # Setting line parameters """ For DC-opf lines['s_nom'] *= 1000.0 # PyPSA uses MW lines['s_nom_min'] = lines['s_nom'] # Define reactance # TODO: do sth more clever lines['x'] = pd.Series(0.00001, index=lines.index) lines['s_nom_extendable'] = pd.Series(True, index=lines.index) # TODO: parametrize lines['capital_cost'] = pd.Series(index=lines.index) for idx in lines.index: carrier = lines.loc[idx].carrier cap_cost, _ = get_costs(carrier, sum(network.snapshot_weightings['objective'])) lines.loc[idx, ('capital_cost', )] = cap_cost * lines.length.loc[idx] """ lines['p_nom'] = lines["s_nom"] if not use_ex_line_cap: lines['p_nom'] = 0 lines['p_nom_min'] = lines['p_nom'] lines['p_min_pu'] = -1. # Making the link bi-directional lines = lines.drop('s_nom', axis=1) lines['p_nom_extendable'] = extend_line_cap lines['capital_cost'] = pd.Series(index=lines.index) for idx in lines.index: carrier = lines.loc[idx].carrier cap_cost, _ = get_costs(carrier, sum(network.snapshot_weightings['objective'])) lines.loc[idx, ('capital_cost', )] = cap_cost * lines.length.loc[idx] network.import_components_from_dataframe(buses, "Bus") network.import_components_from_dataframe(lines, "Link") # network.import_components_from_dataframe(lines, "Line") for dc-opf if plot: from epippy.topologies.core.plot import plot_topology plot_topology(buses, lines) plt.show() return network
def get_capacity_potential_for_regions( tech_regions_dict: Dict[str, List[Union[Polygon, MultiPolygon]]]) -> pd.Series: """ Get capacity potential (in GW) for a series of technology for associated geographical regions. Parameters ---------- tech_regions_dict: Dict[str, List[Union[Polygon, MultiPolygon]]] Dictionary giving for each technology for which region we want to obtain potential capacity Returns ------- capacity_potential_ds: pd.Series Gives for each pair of technology and region the associated potential capacity in GW """ accepted_techs = [ 'wind_onshore', 'wind_offshore', 'wind_floating', 'pv_utility', 'pv_residential' ] for tech in tech_regions_dict.keys(): assert tech in accepted_techs, f"Error: tech {tech} is not in {accepted_techs}" tech_regions_tuples = [(tech, i) for tech, points in tech_regions_dict.items() for i in range(len(points))] capacity_potential_ds = pd.Series( 0., index=pd.MultiIndex.from_tuples(tech_regions_tuples)) for tech, regions in tech_regions_dict.items(): # Compute potential for each NUTS2 or EEZ potential_per_subregion_ds = read_capacity_potential(tech, nuts_type='nuts2') if tech in ["wind_offshore", "wind_floating"]: potential_per_subregion_ds.index = [ code[2:] for code in potential_per_subregion_ds.index ] # Get NUTS2 or EEZ shapes if tech in ['wind_offshore', 'wind_floating']: offshore_codes = list( set([code[:2] for code in potential_per_subregion_ds.index])) shapes = get_shapes(offshore_codes, 'offshore', True)["geometry"] else: shapes = get_shapes(list(potential_per_subregion_ds.index), 'onshore', True)["geometry"] # Compute capacity potential for the regions given as argument for i, region in enumerate(regions): cap_pot = 0 for index, shape in shapes.items(): try: intersection = region.intersection(shape) except TopologicalError: logger.info( f"Warning: Problem with shape for code {index}") continue if intersection.is_empty or intersection.area == 0.: continue cap_pot += potential_per_subregion_ds[ index] * intersection.area / shape.area try: region = region.difference(intersection) except TopologicalError: logger.info( f"Warning: Problem with shape for code {index}") if region.is_empty or region.area == 0.: break capacity_potential_ds.loc[tech, i] = cap_pot return capacity_potential_ds
def aggregate_legacy_capacity(spatial_resolution: float, include_operating: bool): """ Aggregate legacy data at a given spatial resolution. Parameters ---------- spatial_resolution: float Spatial resolution at which we want to aggregate. include_operating: bool Whether to include already operating plants or not. """ countries = ["AL", "AT", "BA", "BE", "BG", "CH", "CY", "CZ", "DE", "DK", "EE", "ES", "FI", "FR", "GB", "GR", "HR", "HU", "IE", "IS", "IT", "LT", "LU", "LV", "ME", "MK", "NL", "NO", "PL", "PT", "RO", "RS", "SE", "SI", "SK"] # removed ["BY", "UA", "FO"] technologies = ["wind_onshore", "wind_offshore", "pv_utility", "pv_residential"] capacities_df_ls = [] for country in countries: print(f"Country: {country}") shapes = get_shapes([country]) onshore_shape = shapes[~shapes["offshore"]]["geometry"] offshore_shape = shapes[shapes["offshore"]]["geometry"] # If not offshore shape for country, remove offshore technologies from set offshore_shape = None if len(offshore_shape) == 0 else offshore_shape technologies_in_country = technologies if offshore_shape is None: technologies_in_country = [tech for tech in technologies if get_config_values(tech, ['onshore'])] # Divide shapes into grid cells grid_cells_ds = get_grid_cells(technologies_in_country, spatial_resolution, onshore_shape, offshore_shape) technologies_in_country = set(grid_cells_ds.index.get_level_values(0)) # Get capacity in each grid cell capacities_per_country_ds = pd.Series(index=grid_cells_ds.index, name="Capacity (GW)", dtype=float) for tech in technologies_in_country: idx = capacities_per_country_ds[tech].index if tech == 'pv_residential': capacities_per_country_and_tech = \ get_legacy_capacity_in_regions_from_non_open(tech, grid_cells_ds.loc[tech], [country], spatial_resolution, include_operating, match_distance=100) else: capacities_per_country_and_tech = \ get_legacy_capacity_in_regions_from_non_open(tech, grid_cells_ds.loc[tech].reset_index()[0], [country], spatial_resolution, include_operating, match_distance=100) capacities_per_country_and_tech.index = idx capacities_per_country_ds[tech].update(capacities_per_country_and_tech) capacities_per_country_df = capacities_per_country_ds.to_frame() capacities_per_country_df.loc[:, "ISO2"] = country capacities_df_ls += [capacities_per_country_df] # Aggregate dataframe from each country capacities_df = pd.concat(capacities_df_ls).sort_index() # Replace technology name by plant and type tech_to_plant_type = {tech: get_config_values(tech, ["plant", "type"]) for tech in technologies} capacities_df = capacities_df.reset_index() capacities_df["Plant"] = capacities_df["Technology Name"].apply(lambda x: tech_to_plant_type[x][0]) capacities_df["Type"] = capacities_df["Technology Name"].apply(lambda x: tech_to_plant_type[x][1]) capacities_df = capacities_df.drop("Technology Name", axis=1) capacities_df = capacities_df.set_index(["Plant", "Type", "Longitude", "Latitude"]) legacy_dir = f"{data_path}generation/vres/legacy/generated/" capacities_df.round(4).to_csv(f"{legacy_dir}aggregated_capacity.csv", header=True, columns=["ISO2", "Capacity (GW)"])
def get_cap_factor_for_countries(tech: str, countries: List[str], timestamps: pd.DatetimeIndex, precision: int = 3, throw_error: bool = True) -> pd.DataFrame: """ Return capacity factors time-series for a set of countries over a given timestamps, for a given technology. Parameters ---------- tech: str One of the technology associated to plant 'PV' or 'Wind' (with type 'Onshore', 'Offshore' or 'Floating'). countries: List[str] List of ISO codes of countries. timestamps: pd.DatetimeIndex List of time stamps. precision: int (default: 3) Indicates at which decimal capacity factors should be rounded throw_error: bool (default True) Whether to throw an error when capacity factors are not available for a given country or compute capacity factors from another method. Returns ------- pd.DataFrame Capacity factors dataframe indexed by timestamps and with columns corresponding to countries. """ plant, plant_type = get_config_values(tech, ["plant", "type"]) profiles_dir = f"{data_path}generation/vres/profiles/generated/" if plant == 'PV': capacity_factors_df = pd.read_csv(f"{profiles_dir}pv_cap_factors.csv", index_col=0) elif plant == "Wind" and plant_type == "Onshore": capacity_factors_df = pd.read_csv(f"{profiles_dir}onshore_wind_cap_factors.csv", index_col=0) elif plant == "Wind" and plant_type in ["Offshore", "Floating"]: capacity_factors_df = pd.read_csv(f"{profiles_dir}offshore_wind_cap_factors.csv", index_col=0) else: raise ValueError(f"Error: No capacity factors for technology {tech} of plant {plant} and type {type}.") capacity_factors_df.index = pd.DatetimeIndex(capacity_factors_df.index) # Slicing on time missing_timestamps = set(timestamps) - set(capacity_factors_df.index) assert not missing_timestamps, f"Error: {tech} data for timestamps {missing_timestamps} is not available." capacity_factors_df = capacity_factors_df.loc[timestamps] # Slicing on country missing_countries = set(countries) - set(capacity_factors_df.columns) if missing_countries: if throw_error: raise ValueError(f"Error: {tech} data for countries {missing_countries} is not available.") else: # Compute capacity factors from centroid of country (onshore/offshore) shape spatial_res = 0.5 missing_countries = sorted(list(missing_countries)) which = 'onshore' if get_config_values(tech, ["onshore"]) else 'offshore' shapes_df = get_shapes(missing_countries, which=which, save=True) # TODO: weird userwarning happening on Iceland centroids = shapes_df["geometry"].centroid points = [(round(p.x / spatial_res) * spatial_res, round(p.y / spatial_res) * spatial_res) for p in centroids] cap_factor_df = compute_capacity_factors({tech: points}, spatial_res, timestamps)[tech] cap_factor_df.columns = missing_countries capacity_factors_df = pd.concat([capacity_factors_df, cap_factor_df], axis=1) return capacity_factors_df[countries].round(precision)
def test_get_points_in_shape_too_coarse_resolution(): shape = get_shapes(["BE"], "onshore").loc["BE", "geometry"] res = 10.0 points = get_points_in_shape(shape, res) assert isinstance(points, list) assert len(points) == 0