Esempio n. 1
0
def get_reference_emission_levels_for_region(region: str, year: int) -> float:
    """
    Return the total CO2 emissions (in kT) emitted by a series of countries in a given region for a given year.

    Parameters
    ----------
    region: str
        Region consisting of one or several countries.
    year: int
        Year

    Returns
    -------
    emission_ref: float
        Total Co2 emissions in kT

    """
    return sum([
        get_co2_emission_level_for_country(country, year)
        for country in get_subregions(region)
    ])
Esempio n. 2
0
        ["float", np.nan, np.nan, "Energy-related marginal cost", "Input (optional)"]
    override_comp_attrs["StorageUnit"].loc["ctd_ratio"] = \
        ["float", np.nan, np.nan, "Charge-to-discharge rated power ratio", "Input (optional)"]

    net = pypsa.Network(name="Remote hubs network (with siting)",
                        override_component_attrs=override_comp_attrs)
    net.config = config
    net.set_snapshots(timestamps)

    # Adding carriers
    for fuel in fuel_info.index[1:-1]:
        net.add("Carrier", fuel, co2_emissions=fuel_info.loc[fuel, "CO2"])

    # Loading topology
    logger.info("Loading topology.")
    eu_countries = get_subregions(config["region"])
    link_multiplier = 1 if config["link_multiplier"] is None else config[
        "link_multiplier"]
    net = get_topology(net,
                       eu_countries,
                       p_nom_extendable=True,
                       extension_multiplier=link_multiplier)

    # Adding load
    logger.info("Adding load.")
    load = get_load(timestamps=timestamps,
                    countries=eu_countries,
                    missing_data='interpolate')
    load_indexes = "Load " + pd.Index(eu_countries)
    loads = pd.DataFrame(load.values,
                         index=net.snapshots,
Esempio n. 3
0
def get_load(timestamps: pd.DatetimeIndex = None, years_range: List[int] = None,
             countries: List[str] = None, regions: List[str] = None,
             precision: int = 3, missing_data: str = "error") -> pd.DataFrame:
    """
    Compute hourly load time series (in GWh) for given countries or regions.

    The desired time slice can be either given as as series of time stamps or
    as a range of years for which we want full data.

    Parameters
    ----------
    timestamps: pd.DatetimeIndex (default: None)
        Datetime index
    years_range: List[int]
        Range of years (if you desire to obtain data for only one year just pass a list with twice the year)
    countries: List[str] (default: None)
        ISO codes of countries
    regions: List[str] (default: None)
        List of codes referring to regions made of several countries defined in
        'data_path'/geographics/region_definition.csv
    precision: int (default: 3)
        Indicates at which decimal load values should be rounded
    missing_data: str (default: error)
        Defines how to deal with missing data. If value is 'error', throws an error. If value is 'interpolate', uses
        data from another country

    Returns
    -------
    pd.DataFrame (index = timestamps, columns = regions or countries)
        DataFrame associating to each country or region the corresponding its hourly load (in GWh)

    """

    assert (countries is None) != (regions is None), "Error: You must either specify a list of countries or " \
                                                     "a list of regions made of countries, but not both."
    assert (timestamps is None) != (years_range is None), "Error: You must either specify a range of years or " \
                                                          "a series of time stamps, but not both."
    assert years_range is None or (len(years_range) == 2 and years_range[0] <= years_range[1]), \
        f"The desired years range must be specified as a list of two ints (the first one being smaller" \
        f" or equal to the second one, received {years_range}"
    assert missing_data in ["error", "interpolate"], f"Error: missing_data must be one of 'error' or 'interpolate'"

    if years_range is not None:
        timestamps = pd.date_range(f"{years_range[0]}-01-01 00:00:00", f"{years_range[1]}-12-31 23:00:00", freq='1H')

    opsd_load_fn = f"{data_path}load/generated/opsd_load.csv"
    load = pd.read_csv(opsd_load_fn, index_col=0, engine='python')
    load.index = pd.DatetimeIndex(load.index)
    missing_timestamps = set(timestamps) - set(load.index)
    assert not missing_timestamps, f"Error: Load is not available " \
                                   f"for the following timestamps {sorted(list(missing_timestamps))}"

    # Slice on time and remove columns in which we don't have available data for the full time period
    load = load.loc[timestamps].dropna(axis=1)
    # Convert to GWh
    load = load * 1e-3

    def get_countries_load(countries_: List[str]):
        countries_load = pd.DataFrame(index=timestamps, columns=countries_)
        missing_countries = set(countries_) - set(load.columns)
        if missing_countries:
            if missing_data == "error":
                raise ValueError(f"Error: Load is not available for countries {sorted(list(missing_countries))} "
                                 f"for the required timestamps.\nAvailable countries are {list(load.columns)}.")
            else:
                countries_load[list(missing_countries)] = \
                    get_load_from_source_country(list(missing_countries), load.index, precision=precision)
        countries_with_data = list(set(countries) - set(missing_countries))
        countries_load[countries_with_data] = load[countries_with_data]
        return countries_load

    # Get load per country
    if countries is not None:
        return get_countries_load(countries).round(precision)
    # Get load aggregated by region
    elif regions is not None:
        load_per_region = pd.DataFrame(columns=regions, index=timestamps)
        for region in regions:
            # Get load date for all subregions and sum it
            countries = get_subregions(region)
            load_per_region[region] = get_countries_load(countries).sum(axis=1).values

        return load_per_region.round(precision)
Esempio n. 4
0
def base_solve(main_output_dir, config):

    # Main directories
    tech_dir = f"{data_path}technologies/"
    output_dir = f"{main_output_dir}/base/"

    techs = config["res"]["techs"].copy()
    if config["dispatch"]["include"]:
        techs += [config["dispatch"]["tech"]]
    if config["nuclear"]["include"]:
        techs += ["nuclear"]
    if config["battery"]["include"]:
        techs += [config["battery"]["type"]]
    if config["phs"]["include"]:
        techs += ["phs"]
    if config["ror"]["include"]:
        techs += ["ror"]
    if config["sto"]["include"]:
        techs += ["sto"]
    tech_config = get_config_dict(techs)

    # Parameters
    tech_info = pd.read_excel(join(tech_dir, 'tech_info.xlsx'), sheet_name='values', index_col=0)
    fuel_info = pd.read_excel(join(tech_dir, 'fuel_info.xlsx'), sheet_name='values', index_col=0)

    # Compute and save results
    if not isdir(output_dir):
        makedirs(output_dir)

    # Save config and parameters files
    yaml.dump(config, open(f"{output_dir}config.yaml", 'w'), sort_keys=False)
    yaml.dump(tech_config, open(f"{output_dir}tech_config.yaml", 'w'), sort_keys=False)
    tech_info.to_csv(f"{output_dir}tech_info.csv")
    fuel_info.to_csv(f"{output_dir}fuel_info.csv")

    # Time
    timeslice = config['time']['slice']
    time_resolution = config['time']['resolution']
    timestamps = pd.date_range(timeslice[0], timeslice[1], freq=f"{time_resolution}H")

    # Building network
    # Add location to Generators and StorageUnits
    override_comp_attrs = pypsa.descriptors.Dict({k: v.copy() for k, v in pypsa.components.component_attrs.items()})
    override_comp_attrs["Generator"].loc["x"] = ["float", np.nan, np.nan, "x in position (x;y)", "Input (optional)"]
    override_comp_attrs["Generator"].loc["y"] = ["float", np.nan, np.nan, "y in position (x;y)", "Input (optional)"]
    override_comp_attrs["StorageUnit"].loc["x"] = ["float", np.nan, np.nan, "x in position (x;y)", "Input (optional)"]
    override_comp_attrs["StorageUnit"].loc["y"] = ["float", np.nan, np.nan, "y in position (x;y)", "Input (optional)"]

    net = pypsa.Network(name="TYNDP2018 network", override_component_attrs=override_comp_attrs)
    net.set_snapshots(timestamps)

    # Adding carriers
    for fuel in fuel_info.index[1:-1]:
        net.add("Carrier", fuel, co2_emissions=fuel_info.loc[fuel, "CO2"])

    # Loading topology
    logger.info("Loading topology.")
    countries = get_subregions(config["region"])
    net = get_topology(net, countries, p_nom_extendable=True, plot=False)

    # Adding load
    logger.info("Adding load.")
    load = get_load(timestamps=timestamps, countries=countries, missing_data='interpolate')
    load_indexes = "Load " + net.buses.index
    loads = pd.DataFrame(load.values, index=net.snapshots, columns=load_indexes)
    net.madd("Load", load_indexes, bus=net.buses.index, p_set=loads)

    if config["functionalities"]["load_shed"]["include"]:
        logger.info("Adding load shedding generators.")
        net = add_load_shedding(net, loads)

    # Adding pv and wind generators
    if config['res']['include']:
        technologies = config['res']['techs']
        net = add_res_per_bus(net, technologies, config["res"]["use_ex_cap"])

    # Add conventional gen
    if config["dispatch"]["include"]:
        tech = config["dispatch"]["tech"]
        net = add_conventional(net, tech)

    # Adding nuclear
    if config["nuclear"]["include"]:
        net = add_nuclear(net, countries, config["nuclear"]["use_ex_cap"], config["nuclear"]["extendable"])

    if config["sto"]["include"]:
        net = add_sto_plants(net, 'countries', config["sto"]["extendable"], config["sto"]["cyclic_sof"])

    if config["phs"]["include"]:
        net = add_phs_plants(net, 'countries', config["phs"]["extendable"], config["phs"]["cyclic_sof"])

    if config["ror"]["include"]:
        net = add_ror_plants(net, 'countries', config["ror"]["extendable"])

    if config["battery"]["include"]:
        net = add_batteries(net, config["battery"]["type"])

    net.lopf(solver_name=config["solver"],
             solver_logfile=f"{output_dir}solver.log",
             #solver_options=config["solver_options"],
             keep_references=True,
             pyomo=False)

    net.export_to_csv_folder(output_dir)

    return output_dir
Esempio n. 5
0
                                      edgecolor='darkgrey',
                                      facecolor=cf.COLORS['land_alt1'])

    fig = plt.figure(figsize=(13, 13))
    ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
    ax.add_feature(land_50m, linewidth=0.5, zorder=-1)
    ax.add_feature(cf.BORDERS.with_scale('50m'),
                   edgecolor='darkgrey',
                   linewidth=0.5,
                   zorder=-1)
    ax.set_extent([-15, 42.5, 30, 72.5])

    map = ax.scatter(capacities_df["Longitude"],
                     capacities_df["Latitude"],
                     c=capacities_df["Difference (GW)"],
                     s=1,
                     vmax=1.2,
                     vmin=0.0)
    fig.colorbar(map)

    if show:
        plt.show()
    else:
        return ax


if __name__ == '__main__':
    regions = get_subregions("EU2")
    # plot_capacity_per_country("wind_offshore", regions, lonrange=[-12, 30], latrange=[35, 75])
    plot_per_point("wind_onshore")
    # plot_diff("wind_onshore")
Esempio n. 6
0
            's_max_pu': 0.7,
            'types': {
                132.: "Al/St 240/40 2-bundle 220.0",
                220.: "Al/St 240/40 2-bundle 220.0",
                300.: "Al/St 240/40 3-bundle 300.0",
                380.: "Al/St 240/40 4-bundle 380.0"
            }
        },
        "links": {
            'p_max_pu': 1.0,
            'include_tyndp': True
        },
        "transformers": {}
    }

    voltages_ = [220., 300., 380.]
    from epippy.geographics import get_subregions, get_nuts_codes, revert_iso2_codes
    # TODO: does not work with bosnia and kosovo
    countries_ = get_subregions("EU2")
    # countries_ = ['MK']
    nuts_codes_ = get_nuts_codes(2, 2016, revert_iso2_codes(countries_))
    # Some weird BEZ, LUZ, etc...
    nuts_codes_ = [code for code in nuts_codes_ if not code.endswith('Z')]

    if 0:
        net_ = preprocess(True)

    else:
        # net_ = simplify_network(net_, config_)
        load_topology(nuts_codes_, config_, voltages_, True)
Esempio n. 7
0
    override_comp_attrs = pypsa.descriptors.Dict({k: v.copy() for k, v in pypsa.components.component_attrs.items()})
    override_comp_attrs["Generator"].loc["x"] = ["float", np.nan, np.nan, "x in position (x;y)", "Input (optional)"]
    override_comp_attrs["Generator"].loc["y"] = ["float", np.nan, np.nan, "y in position (x;y)", "Input (optional)"]
    override_comp_attrs["StorageUnit"].loc["x"] = ["float", np.nan, np.nan, "x in position (x;y)", "Input (optional)"]
    override_comp_attrs["StorageUnit"].loc["y"] = ["float", np.nan, np.nan, "y in position (x;y)", "Input (optional)"]

    net = pypsa.Network(name="TYNDP2018 network", override_component_attrs=override_comp_attrs)
    net.set_snapshots(timestamps)

    # Adding carriers
    for fuel in fuel_info.index[1:-1]:
        net.add("Carrier", fuel, co2_emissions=fuel_info.loc[fuel, "CO2"])

    # Loading topology
    logger.info("Loading topology.")
    countries = get_subregions(config["region"])
    net = get_topology(net, countries, extend_line_cap=True, plot=False)

    # Adding load
    logger.info("Adding load.")
    load = get_load(timestamps=timestamps, countries=countries, missing_data='interpolate')
    load_indexes = "Load " + net.buses.index
    loads = pd.DataFrame(load.values, index=net.snapshots, columns=load_indexes)
    net.madd("Load", load_indexes, bus=net.buses.index, p_set=loads)

    if config["functionalities"]["load_shed"]["include"]:
        logger.info("Adding load shedding generators.")
        net = add_load_shedding(net, loads)

    # Adding pv and wind generators
    if config['res']['include']:
Esempio n. 8
0
def test_get_eez_and_land_union_shapes():
    codes = get_subregions("EU2")
    ds = get_eez_and_land_union_shapes(codes)
    assert isinstance(ds, pd.Series)
    assert not (set(ds.index).symmetric_difference(set(codes)))
Esempio n. 9
0
def upgrade_topology(net: pypsa.Network, regions: List[str], plot: bool = False,
                     ac_carrier: str = "HVAC_OHL", dc_carrier: str = "HVDC_GLIS") -> pypsa.Network:

    buses = pd.DataFrame(columns=["x", "y", "country", "onshore_region", "offshore_region"])
    links = pd.DataFrame(columns=["bus0", "bus1", "carrier", "length"])

    if "IS" in regions:
        buses.loc["IS", "onshore_region"] = get_shapes(["IS"], "onshore")["geometry"][0]
        buses.loc["IS", ["x", "y"]] = buses.loc["IS", "onshore_region"].centroid
        buses.loc["IS", "country"] = "IS"
        # Adding link to GB
        links.loc["IS-GB", ["bus0", "bus1", "carrier"]] = ["IS", "GB", dc_carrier]

    if "GL" in regions:
        assert 'IS' in regions, "Error: Cannot add a node in Greenland without adding a node in Iceland."
        full_gl_shape = get_shapes(["GL"], "onshore")["geometry"][0]
        trunc_gl_shape = full_gl_shape.intersection(Polygon([(-44.6, 59.5), (-44.6, 60.6), (-42, 60.6), (-42, 59.5)]))
        buses.loc["GL", "onshore_region"] = trunc_gl_shape
        buses.loc["GL", ["x", "y"]] = (-44., 60.)
        # buses.loc["GL", "country"] = "GL"
        # Adding link to IS
        links.loc["GL-IS", ["bus0", "bus1", "carrier"]] = ["GL", "IS", dc_carrier]

    if "na" in regions:
        countries = get_subregions("na")
        shapes = get_shapes(countries, "onshore")["geometry"]
        trunc_shape = Polygon([(-14, 27.7), (-14, 40), (40, 40), (40, 27.7)])
        for c in countries:
            buses.loc[c, "onshore_region"] = shapes.loc[c].intersection(trunc_shape)
            # buses.loc[c, "country"] = c
        buses.loc["DZ", ["x", "y"]] = (3, 36.5)  # Algeria, Alger
        buses.loc["EG", ["x", "y"]] = (31., 30.)  # Egypt, Cairo
        buses.loc["LY", ["x", "y"]] = (22, 32) #(13., 32.5)  # Libya, Tripoli
        buses.loc["MA", ["x", "y"]] = (-6., 35.)  # Morocco, Rabat
        buses.loc["TN", ["x", "y"]] = (10., 36.5)  # Tunisia, Tunis
        # Adding links
        links.loc["DZ-MA", ["bus0", "bus1", "carrier"]] = ["DZ", "MA", ac_carrier]
        links.loc["DZ-TN", ["bus0", "bus1", "carrier"]] = ["DZ", "TN", ac_carrier]
        links.loc["LY-TN", ["bus0", "bus1", "carrier", "length"]] = ["LY", "TN", ac_carrier, 2000]
        links.loc["EG-LY", ["bus0", "bus1", "carrier", "length"]] = ["EG", "LY", ac_carrier, 700]
        if "GR" in net.buses.index:
            links.loc["LY-GR", ["bus0", "bus1", "carrier", "length"]] = ["LY", "GR", dc_carrier, 900]
        if "ES" in net.buses.index:
            links.loc["MA-ES", ["bus0", "bus1", "carrier"]] = ["MA", "ES", dc_carrier]
        if "IT" in net.buses.index:
            links.loc["TN-IT", ["bus0", "bus1", "carrier", "length"]] = ["TN", "IT", dc_carrier, 600]

    if "me" in regions:
        # countries = ["AE", "BH", "CY", "IL", "IQ", "IR", "JO", "KW", "LB", "OM", "QA", "SA", "SY"]  # , "YE"]
        countries = get_subregions("me")
        shapes = get_shapes(countries, "onshore")["geometry"]
        trunc_shape = Polygon([(25, 27.7), (25, 60), (60, 60), (60, 27.7)])
        for c in countries:
            buses.loc[c, "onshore_region"] = shapes.loc[c].intersection(trunc_shape)
            # buses.loc[c, "country"] = c
        # buses.loc["AE", ["x", "y"]] = (54.5, 24.5)  # UAE, Abu Dhabi
        # buses.loc["BH", ["x", "y"]] = (50.35, 26.13)  # Bahrain, Manama
        buses.loc["TR", ["x", "y"]] = buses.loc["TR", "onshore_region"].centroid
        buses.loc["CY", ["x", "y"]] = (33.21, 35.1)  # Cyprus, Nicosia
        buses.loc["IL", ["x", "y"]] = (34.76, 32.09)  # Tel-Aviv, Jerusalem
        # if 'TR' in net.buses.index:
        #     buses.loc["IQ", ["x", "y"]] = (44.23, 33.2)  # Iraq, Baghdad
        #     buses.loc["IR", ["x", "y"]] = (51.23, 35.41)  # Iran, Tehran
        # else:
        #    buses = buses.drop(["IQ", "IR"])
        buses.loc["JO", ["x", "y"]] = (35.55, 31.56)  # Jordan, Amman
        # buses.loc["KW", ["x", "y"]] = (47.58, 29.22)  # Kuwait, Kuwait City
        # buses.loc["LB", ["x", "y"]] = (35.3, 33.53)  # Lebanon, Beirut
        # buses.loc["OM", ["x", "y"]] = (58.24, 23.35)  # Oman, Muscat
        # buses.loc["QA", ["x", "y"]] = (51.32, 25.17)  # Qatar, Doha
        buses.loc["SA", ["x", "y"]] = buses.loc["SA", "onshore_region"].centroid #(46.43, 24.38)  # Saudi Arabia, Riyadh
        buses.loc["SY", ["x", "y"]] = (36.64, 34.63)  # Syria, Homs
        # buses.loc["YE", ["x", "y"]] = (44.12, 15.20)  # Yemen, Sana
        # Adding links
        links.loc["IL-JO", ["bus0", "bus1", "carrier"]] = ["IL", "JO", ac_carrier]
        # links.loc["IL-LI", ["bus0", "bus1", "carrier"]] = ["IL", "LB", ac_carrier]
        # links.loc["SY-LI", ["bus0", "bus1", "carrier"]] = ["SY", "LB", ac_carrier]
        links.loc["SY-JO", ["bus0", "bus1", "carrier"]] = ["SY", "JO", ac_carrier]
        links.loc["IL-CY", ["bus0", "bus1", "carrier"]] = ["IL", "CY", "DC"]
        # This links comes from nowhere
        links.loc["SA-JO", ["bus0", "bus1", "carrier"]] = ["SA", "JO", ac_carrier]
        # links.loc["CY-SY", ["bus0", "bus1", "carrier"]] = ["CY", "SY", "DC"]
        # links.loc["OM-AE", ["bus0", "bus1", "carrier"]] = ["OM", "AE", ac_carrier]
        # links.loc["QA-AE", ["bus0", "bus1", "carrier"]] = ["QA", "AE", ac_carrier]
        # links.loc["QA-SA", ["bus0", "bus1", "carrier"]] = ["QA", "SA", ac_carrier]
        # links.loc["BH-QA", ["bus0", "bus1", "carrier"]] = ["BH", "QA", ac_carrier]
        # links.loc["BH-KW", ["bus0", "bus1", "carrier"]] = ["BH", "KW", ac_carrier]
        # links.loc["BH-SA", ["bus0", "bus1", "carrier"]] = ["BH", "SA", ac_carrier]
        # links.loc["YE-SA", ["bus0", "bus1", "carrier"]] = ["YE", "SA", ac_carrier]
        if "EG" in buses.index:
            links.loc["EG-IL", ["bus0", "bus1", "carrier"]] = ["EG", "IL", ac_carrier]
            links.loc["SA-EG", ["bus0", "bus1", "carrier"]] = ["SA", "EG", ac_carrier]
        #if "TR" in net.buses.index:
        links.loc["SY-TR", ["bus0", "bus1", "carrier"]] = ["SY", "TR", ac_carrier]
            # links.loc["IQ-TR", ["bus0", "bus1", "carrier"]] = ["IQ", "TR", ac_carrier]
            # links.loc["IR-TR", ["bus0", "bus1", "carrier"]] = ["IR", "TR", ac_carrier]
            # links.loc["IR-IQ", ["bus0", "bus1", "carrier"]] = ["IR", "IQ", ac_carrier]
        if "GR" in net.buses.index:
            links.loc["CY-GR", ["bus0", "bus1", "carrier", "length"]] = ["CY", "GR", dc_carrier, 850]
            # From TYNDP
            links.loc["TR-GR", ["bus0", "bus1", "carrier", "length"]] = ["TR", "GR", dc_carrier, 1173.53]  # p_nom = 0.66
        if "BG" in net.buses.index:
            links.loc["TR-BG", ["bus0", "bus1", "carrier", "length"]] = ["TR", "BG", ac_carrier, 932.16]  # p_nom = 1.2

    buses = buses.infer_objects()
    net.madd("Bus", buses.index,
             x=buses.x, y=buses.y, country=buses.country,
             onshore_region=buses.onshore_region, offshore_region=buses.offshore_region,)

    # Adding length to the lines for which we did not fix it manually
    for idx in links[links.length.isnull()].index:
        bus0_id = links.loc[idx]["bus0"]
        bus1_id = links.loc[idx]["bus1"]
        bus0_x = net.buses.loc[bus0_id]["x"]
        bus0_y = net.buses.loc[bus0_id]["y"]
        bus1_x = net.buses.loc[bus1_id]["x"]
        bus1_y = net.buses.loc[bus1_id]["y"]
        links.loc[idx, "length"] = geopy.distance.geodesic((bus0_y, bus0_x), (bus1_y, bus1_x)).km

    links['capital_cost'] = pd.Series(index=links.index)
    for idx in links.index:
        carrier = links.loc[idx].carrier
        cap_cost, _ = get_costs(carrier, sum(net.snapshot_weightings['objective']))
        links.loc[idx, ('capital_cost', )] = cap_cost * links.length.loc[idx]
    net.madd("Link", links.index, bus0=links.bus0, bus1=links.bus1, carrier=links.carrier, p_nom_extendable=True,
             length=links.length, capital_cost=links.capital_cost)

    # from tyndp
    if "TR" in net.buses.index:
        net.links.loc[["TR-BG", "TR-GR"], "p_nom"] = [1.2, 0.66]

    if plot:
        plot_topology(net.buses, net.links)
        plt.show()

    return net
Esempio n. 10
0
def add_res_at_sites(
    net: pypsa.Network,
    config,
    output_dir,
    eu_countries,
):

    eu_technologies = config['res']['techs']

    logger.info(f"Adding RES {eu_technologies} generation.")

    spatial_res = config["res"]["spatial_resolution"]
    use_ex_cap = config["res"]["use_ex_cap"]
    min_cap_pot = config["res"]["min_cap_pot"]
    min_cap_if_sel = config["res"]["min_cap_if_selected"]

    # Build sites for EU
    r_europe = Resite(eu_countries, eu_technologies,
                      [net.snapshots[0], net.snapshots[-1]], spatial_res,
                      min_cap_if_sel)
    regions_shapes = net.buses.loc[eu_countries,
                                   ["onshore_region", 'offshore_region']]
    regions_shapes.columns = ['onshore', 'offshore']
    r_europe.build_data(use_ex_cap, min_cap_pot, regions_shapes=regions_shapes)
    net.cc_ds = r_europe.data_dict["capacity_credit_ds"]

    # Build sites for other regions
    non_eu_res = config["non_eu"]
    all_remote_countries = []
    if non_eu_res is not None:
        for region in non_eu_res.keys():
            if region in ["na", "me"]:
                remote_countries = get_subregions(region)
            else:
                remote_countries = [region]
            all_remote_countries += remote_countries
            remote_techs = non_eu_res[region]
            r_remote = Resite(remote_countries, remote_techs,
                              [net.snapshots[0], net.snapshots[-1]],
                              spatial_res)
            regions_shapes = net.buses.loc[
                remote_countries, ["onshore_region", 'offshore_region']]
            regions_shapes.columns = ['onshore', 'offshore']
            r_remote.build_data(False,
                                compute_load=False,
                                regions_shapes=regions_shapes)

            # Add sites to European ones
            r_europe.regions += r_remote.regions
            r_europe.technologies = list(
                set(r_europe.technologies).union(r_remote.technologies))
            r_europe.min_cap_pot_dict = {
                **r_europe.min_cap_pot_dict,
                **r_remote.min_cap_pot_dict
            }
            r_europe.tech_points_tuples = np.concatenate(
                (r_europe.tech_points_tuples, r_remote.tech_points_tuples))
            r_europe.initial_sites_ds = r_europe.initial_sites_ds.append(
                r_remote.initial_sites_ds)
            r_europe.tech_points_regions_ds = \
                r_europe.tech_points_regions_ds.append(r_remote.tech_points_regions_ds)
            r_europe.data_dict["load"] = pd.concat(
                [r_europe.data_dict["load"], r_remote.data_dict["load"]],
                axis=1)
            r_europe.data_dict["cap_potential_ds"] = \
                r_europe.data_dict["cap_potential_ds"].append(r_remote.data_dict["cap_potential_ds"])
            r_europe.data_dict["existing_cap_ds"] = \
                r_europe.data_dict["existing_cap_ds"].append(r_remote.data_dict["existing_cap_ds"])
            r_europe.data_dict["cap_factor_df"] = \
                pd.concat([r_europe.data_dict["cap_factor_df"], r_remote.data_dict["cap_factor_df"]], axis=1)

    # Update dictionary
    tech_points_dict = {}
    techs = set(r_europe.initial_sites_ds.index.get_level_values(0))
    for tech in techs:
        tech_points_dict[tech] = list(r_europe.initial_sites_ds[tech].index)
    r_europe.tech_points_dict = tech_points_dict

    # Do siting if required
    if config["res"]["strategy"] == "siting":
        logger.info('resite model being built.')
        siting_params = config['res']
        # if siting_params['formulation'] == "min_cost_global":
        #    siting_params['formulation_params']['perc_per_region'] = \
        #        siting_params['formulation_params']['perc_per_region'] + [0.] * len(all_remote_countries)
        r_europe.build_model(siting_params["modelling"],
                             siting_params['formulation'],
                             siting_params['formulation_params'],
                             siting_params['write_lp'], f"{output_dir}resite/")

        logger.info('Sending resite to solver.')
        r_europe.init_output_folder(f"{output_dir}resite/")
        r_europe.solve_model(f"{output_dir}resite/",
                             solver=config['solver'],
                             solver_options=config['solver_options'])

        logger.info("Saving resite results")
        r_europe.retrieve_selected_sites_data()
        r_europe.save(f"{output_dir}resite/")

        # Add solution to network
        logger.info('Retrieving resite results.')
        tech_location_dict = r_europe.sel_tech_points_dict
        existing_cap_ds = r_europe.sel_data_dict["existing_cap_ds"]
        cap_potential_ds = r_europe.sel_data_dict["cap_potential_ds"]
        cap_factor_df = r_europe.sel_data_dict["cap_factor_df"]

        if not r_europe.timestamps.equals(net.snapshots):
            # If network snapshots is a subset of resite snapshots just crop the data
            missing_timestamps = set(net.snapshots) - set(r_europe.timestamps)
            if not missing_timestamps:
                cap_factor_df = cap_factor_df.loc[net.snapshots]
            else:
                # In other case, need to recompute capacity factors
                raise NotImplementedError(
                    "Error: Network snapshots must currently be a subset of resite snapshots."
                )

    else:  # no siting
        tech_location_dict = r_europe.tech_points_dict
        existing_cap_ds = r_europe.data_dict["existing_cap_ds"]
        cap_potential_ds = r_europe.data_dict["cap_potential_ds"]
        cap_factor_df = r_europe.data_dict["cap_factor_df"]

    for tech, points in tech_location_dict.items():

        onshore_tech = get_config_values(tech, ['onshore'])

        # Associate sites to buses (using the associated shapes)
        buses = net.buses.copy()
        region_type = 'onshore_region' if onshore_tech else 'offshore_region'
        buses = buses.dropna(subset=[region_type])
        associated_buses = match_points_to_regions(
            points, buses[region_type]).dropna()
        points = list(associated_buses.index)

        p_nom_max = 'inf'
        if config['res']['limit_max_cap']:
            p_nom_max = cap_potential_ds[tech][points].values
        p_nom = existing_cap_ds[tech][points].values
        p_max_pu = cap_factor_df[tech][points].values

        capital_cost, marginal_cost = get_costs(
            tech, sum(net.snapshot_weightings['objective']))

        net.madd("Generator",
                 pd.Index([f"Gen {tech} {x}-{y}" for x, y in points]),
                 bus=associated_buses.values,
                 p_nom_extendable=True,
                 p_nom_max=p_nom_max,
                 p_nom=p_nom,
                 p_nom_min=p_nom,
                 p_min_pu=0.,
                 p_max_pu=p_max_pu,
                 type=tech,
                 x=[x for x, _ in points],
                 y=[y for _, y in points],
                 marginal_cost=marginal_cost,
                 capital_cost=capital_cost)

    return net
Esempio n. 11
0
def add_extra_functionalities(net: pypsa.Network, snapshots: pd.DatetimeIndex):
    """
    Wrapper for the inclusion of multiple extra_functionalities.

    Parameters
    ----------
    net: pypsa.Network
        A PyPSA Network instance with buses associated to regions
        and containing a functionality configuration dictionary
    snapshots: pd.DatetimeIndex
        Network snapshots.

    """

    assert hasattr(net, 'config'), 'To use functionalities, you need to give the network a config attribute' \
                                   'specifying which functionality you want to add.'

    mandatory_fields = ['functionalities', 'pyomo']
    for field in mandatory_fields:
        assert field in net.config, f'Error: No field {field} found in config.'
    conf_func = net.config["functionalities"]

    pyomo = net.config['pyomo']
    if pyomo:
        import network.globals.pyomo as funcs
    else:
        import network.globals.nomopyomo as funcs

    # Some functionalities are currently only implemented in pyomo
    if 'snsp' in conf_func and conf_func["snsp"]["include"]:
        if pyomo:
            funcs.add_snsp_constraint_tyndp(net, snapshots,
                                            conf_func["snsp"]["share"])
        else:
            logger.warning(
                'SNSP functionality is currently not implented in nomopyomo')

    if 'curtailement' in conf_func and conf_func["curtailment"]["include"]:
        if pyomo:
            strategy = conf_func["curtailment"]["strategy"][0]
            if strategy == 'economic':
                funcs.add_curtailment_penalty_term(
                    net, snapshots, conf_func["curtailment"]["strategy"][1])
            elif strategy == 'technical':
                funcs.add_curtailment_constraints(
                    net, snapshots, conf_func["curtailment"]["strategy"][1])
        else:
            logger.warning(
                'Curtailement functionality is currently not implented in nomopyomo'
            )

    if "co2_emissions" in conf_func and conf_func["co2_emissions"]["include"]:
        strategy = conf_func["co2_emissions"]["strategy"]
        mitigation_factor = conf_func["co2_emissions"]["mitigation_factor"]
        ref_year = conf_func["co2_emissions"]["reference_year"]
        if strategy == 'country':
            countries = get_subregions(net.config['region'])
            assert len(countries) == len(mitigation_factor), \
                "Error: a co2 emission reduction share must be given for each country in the main region."
            mitigation_factor_dict = dict(zip(countries, mitigation_factor))
            funcs.add_co2_budget_per_country(net, mitigation_factor_dict,
                                             ref_year)
        elif strategy == 'global':
            funcs.add_co2_budget_global(net, net.config["region"],
                                        mitigation_factor, ref_year)

    if 'import_limit' in conf_func and conf_func["import_limit"]["include"]:
        funcs.add_import_limit_constraint(net,
                                          conf_func["import_limit"]["share"])
        if pyomo:
            countries = get_subregions(net.config['region'])
            funcs.add_import_limit_constraint(
                net, conf_func["import_limit"]["share"], countries)

    if 'techs' in net.config and 'battery' in net.config["techs"] and\
            not net.config["techs"]["battery"]["fixed_duration"]:
        ctd_ratio = get_config_values("Li-ion_p", ["ctd_ratio"])
        funcs.store_links_constraint(net, ctd_ratio)

    if "disp_cap" in conf_func and conf_func["disp_cap"]["include"]:
        countries = get_subregions(net.config['region'])
        disp_threshold = conf_func["disp_cap"]["disp_threshold"]
        assert len(countries) == len(disp_threshold), \
            "A dispatchable capacity threshold must be given for each country in the main region."
        thresholds = dict(zip(countries, disp_threshold))
        funcs.dispatchable_capacity_lower_bound(net, thresholds)

    if 'prm' in conf_func and conf_func["prm"]["include"]:
        prm = conf_func["prm"]["PRM"]
        funcs.add_planning_reserve_constraint(net, prm)

    if 'mga' in conf_func and conf_func['mga']['include']:
        mga_type = conf_func['mga']['type']
        epsilon = conf_func['mga']['epsilon']
        if not pyomo:
            funcs.min_capacity(net, mga_type, epsilon)
        else:
            logger.warning(
                'MGA functionality is currently not implemented in nomopyomo')
Esempio n. 12
0
    cap_pot_df["ISO_3"] = convert_country_codes(cap_pot_df.index.values, 'alpha_2', 'alpha_3')

    fig = go.Figure(data=go.Choropleth(
        locations=cap_pot_df['ISO_3'],  # Spatial coordinates
        z=cap_pot_df[0],  # Data to be color-coded
        text=[f"{cap} GW" for cap in cap_pot_df[0].values],
        colorscale='Reds',
        colorbar_title=f"Capacity (GW)"
    ))

    fig.update_layout(
        geo=dict(
            lonaxis=dict(
                range=lon_range,
            ),
            lataxis=dict(
                range=lat_range,
            ),
            scope='europe'),
        title=f"Capacity potential for {tech}"
    )

    fig.show()


if __name__ == '__main__':
    from epippy.geographics import get_subregions
    countries_ = get_subregions("BENELUX")
    tech_ = "pv_residential"
    plot_capacity(tech_, countries_, lon_range=[-12, 30], lat_range=[35, 75])