Ejemplo n.º 1
0
 def reduce_network(n, buses):
     nr = pypsa.Network()
     nr.import_components_from_dataframe(buses, "Bus")
     nr.import_components_from_dataframe(
         n.lines.loc[n.lines.bus0.isin(buses.index)
                     & n.lines.bus1.isin(buses.index)], "Line")
     return nr
Ejemplo n.º 2
0
def make_summaries(networks_dict):

    columns = pd.MultiIndex.from_tuples(
        networks_dict.keys(), names=["cost", "resarea", "sectors", "opts"])

    dfs = {}

    for output in outputs:
        dfs[output] = pd.DataFrame(columns=columns, dtype=float)

    for label, filename in iteritems(networks_dict):
        print(label, filename)
        if not os.path.exists(filename):
            print("does not exist!!")
            continue

        try:
            n = pypsa.Network(filename)
        except OSError:
            logger.warning("Skipping {filename}".format(filename=filename))
            continue

        Nyears = n.snapshot_weightings.sum() / 8760.

        assign_carriers(n)

        for output in outputs:
            dfs[output] = globals()["calculate_" + output](n, label,
                                                           dfs[output])

    return dfs
Ejemplo n.º 3
0
def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
    import pypsa
    from add_electricity import load_costs, update_transmission_costs

    n = pypsa.Network(fn)

    n.loads["carrier"] = n.loads.bus.map(n.buses.carrier) + " load"
    n.stores["carrier"] = n.stores.bus.map(n.buses.carrier)

    n.links["carrier"] = (
        n.links.bus0.map(n.buses.carrier) + "-" + n.links.bus1.map(n.buses.carrier)
    )
    n.lines["carrier"] = "AC line"
    n.transformers["carrier"] = "AC transformer"

    n.lines["s_nom"] = n.lines["s_nom_min"]
    n.links["p_nom"] = n.links["p_nom_min"]

    if combine_hydro_ps:
        n.storage_units.loc[
            n.storage_units.carrier.isin({"PHS", "hydro"}), "carrier"
        ] = "hydro+PHS"

    # if the carrier was not set on the heat storage units
    # bus_carrier = n.storage_units.bus.map(n.buses.carrier)
    # n.storage_units.loc[bus_carrier == "heat","carrier"] = "water tanks"

    Nyears = n.snapshot_weightings.objective.sum() / 8760.0
    costs = load_costs(Nyears, tech_costs, config["costs"], config["electricity"])
    update_transmission_costs(n, costs)

    return n
Ejemplo n.º 4
0
def load_network(fn, opts, combine_hydro_ps=True):
    n = pypsa.Network(fn)

    n.loads["carrier"] = n.loads.bus.map(n.buses.carrier) + " load"
    n.stores["carrier"] = n.stores.bus.map(n.buses.carrier)

    n.links["carrier"] = (n.links.bus0.map(n.buses.carrier) + "-" +
                          n.links.bus1.map(n.buses.carrier))
    n.lines["carrier"] = "AC line"
    n.transformers["carrier"] = "AC transformer"

    n.lines['s_nom'] = n.lines['s_nom_min']

    if combine_hydro_ps:
        n.storage_units.loc[
            n.storage_units.carrier.isin({'Pumped storage', 'Hydro'}),
            'carrier'] = 'Hydro+PS'

    # #if the carrier was not set on the heat storage units
    # bus_carrier = n.storage_units.bus.map(n.buses.carrier)
    # n.storage_units.loc[bus_carrier == "heat","carrier"] = "water tanks"

    for name in opts['heat_links'] + opts['heat_generators']:
        n.links.loc[n.links.index.to_series().str.endswith(name),
                    "carrier"] = name

    return n
Ejemplo n.º 5
0
def make_summaries(networks_dict):

    columns = pd.MultiIndex.from_tuples(
        networks_dict.keys(), names=["simpl", "clusters", "lv", "opts"])

    dfs = {}

    for output in outputs:
        dfs[output] = pd.DataFrame(columns=columns, dtype=float)

    for label, filename in iteritems(networks_dict):
        print(label, filename)
        if not os.path.exists(filename):
            print("does not exist!!")
            continue

        n = pypsa.Network(filename)

        assign_carriers(n)

        Nyears = n.snapshot_weightings.sum() / 8760.
        costs = load_costs(Nyears, snakemake.input[0],
                           snakemake.config['costs'],
                           snakemake.config['electricity'])
        update_transmission_costs(n, costs)

        for output in outputs:
            dfs[output] = globals()["calculate_" + output](n, label,
                                                           dfs[output])

    return dfs
Ejemplo n.º 6
0
def test_opf():

    csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples",
                                   "opf-storage-hvdc", "opf-storage-data")

    n = pypsa.Network(csv_folder_name)

    target_path = os.path.join(csv_folder_name, "results", "generators-p.csv")

    target_gen_p = pd.read_csv(target_path, index_col=0, parse_dates=True)

    #test results were generated with GLPK and other solvers may differ
    for solver_name in solvers:

        n.lopf(solver_name=solver_name, pyomo=True)

        equal(n.generators_t.p.reindex_like(target_gen_p),
              target_gen_p,
              decimal=2)

    if sys.version_info.major >= 3:

        for solver_name in solvers:

            status, cond = n.lopf(solver_name=solver_name, pyomo=False)
            assert status == 'ok'
            equal(n.generators_t.p.reindex_like(target_gen_p),
                  target_gen_p,
                  decimal=2)
Ejemplo n.º 7
0
def test_lopf():

    csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples",
                                   "ac-dc-meshed", "ac-dc-data")

    n = pypsa.Network(csv_folder_name)

    results_folder_name = os.path.join(csv_folder_name, "results-lopf")

    n_r = pypsa.Network(results_folder_name)

    #test results were generated with GLPK; solution should be unique,
    #so other solvers should not differ (tested with cbc and gurobi)

    snapshots = n.snapshots

    for formulation, free_memory in product(
        ["angles", "cycles", "kirchhoff", "ptdf"], [{}, {"pypsa"}]):
        n.lopf(snapshots=snapshots,
               solver_name=solver_name,
               formulation=formulation,
               free_memory=free_memory)

        equal(n.generators_t.p.loc[:, n.generators.index],
              n_r.generators_t.p.loc[:, n.generators.index],
              decimal=4)
        equal(n.lines_t.p0.loc[:, n.lines.index],
              n_r.lines_t.p0.loc[:, n.lines.index],
              decimal=4)
        equal(n.links_t.p0.loc[:, n.links.index],
              n_r.links_t.p0.loc[:, n.links.index],
              decimal=4)

    if sys.version_info.major >= 3:
        status, cond = n.lopf(snapshots=snapshots,
                              solver_name=solver_name,
                              pyomo=False)
        assert status == 'ok'
        equal(n.generators_t.p.loc[:, n.generators.index],
              n_r.generators_t.p.loc[:, n.generators.index],
              decimal=2)
        equal(n.lines_t.p0.loc[:, n.lines.index],
              n_r.lines_t.p0.loc[:, n.lines.index],
              decimal=2)
        equal(n.links_t.p0.loc[:, n.links.index],
              n_r.links_t.p0.loc[:, n.links.index],
              decimal=2)
Ejemplo n.º 8
0
def solve_UC(t, adv_index, temp):
    network = pypsa.Network()
    network.set_snapshots(range(simulation_hours))

    for i in range(n_buses):
        network.add("Bus", "bus {}".format(i))

    for i in range(n_lines):
        network.add("Line",
                    "line {}".format(i),
                    bus0="bus %d" % line_matrix[i, 0],
                    bus1="bus %d" % line_matrix[i, 1],
                    x=0.0001,
                    s_nom=line_capacity[i])

    for i in range(n_gen):
        network.add("Generator",
                    "gen {}".format(i),
                    bus="bus {}".format(np.int(generator[i, 0] - 1)),
                    committable=True,
                    marginal_cost=marginal_costs[i, 0],
                    p_min_pu=0.1,
                    initial_status=0,
                    ramp_limit_up=up_ramp[i],
                    ramp_limit_down=down_ramp[i],
                    min_up_time=mini_up_time[i, 0],
                    min_down_time=mini_down_time[i, 0],
                    start_up_cost=startup_costs[i, 0],
                    shut_down_cost=shut_down_costs[i, 0],
                    p_nom=capacity[i])

    for i in range(n_buses):
        if adv_index[i] == 1:
            network.add("Load",
                        "load {}".format(i),
                        bus="bus {}".format(i),
                        p_set=(rows[t:t + 24, temp + 1] * 1.03 *
                               load_ratio[i]).reshape(-1, ))
        else:
            network.add("Load",
                        "load {}".format(i),
                        bus="bus {}".format(i),
                        p_set=(rows[t:t + 24, 0] * 1.03 *
                               load_ratio[i]).reshape(-1, ))

    #print(network.buses)
    #print(network.lines)
    #print(network.generators)
    #print(network.loads)
    #print("Power Network Initialization finished")
    network.lopf(network.snapshots)
    generator_val = np.array(network.generators_t.p, dtype=float)
    line_flow = np.array(network.lines_t.p0, dtype=float)[0]
    generator_schedule = np.array(network.generators_t.status, dtype=float)
    print("Current schedule", np.shape(generator_schedule))
    #print("Current dispatch", generator_val)
    #print("Line flow", line_flow)
    #print("Line flow", np.array(network.lines_t.p1, dtype=float)[0])
    return generator_schedule, generator_val, line_flow
Ejemplo n.º 9
0
def import_network(options, tmp_network=False):
    network = pypsa.Network()
    if tmp_network == True:
        network.import_from_csv_folder(options['tmp_dir'] + 'network/')
    else:
        network.import_from_hdf5(options['network_path'])
    network.snapshots = network.snapshots[0:50]
    return network
Ejemplo n.º 10
0
def solve():
    n = pypsa.Network(PATH)
    n.generators.p_nom_max.fillna(np.inf, inplace=True)
    n.snapshots = n.snapshots[:NSNAPSHOTS]

    m = n.lopf(solver_options=SOLVER_PARAMS,
               formulation="kirchhoff",
               solver_name=SOLVER)
Ejemplo n.º 11
0
def create_admittance(data):

    n = pypsa.Network(data)
    n.determine_network_topology()
    for sub in n.sub_networks.obj:
        pypsa.pf.calculate_Y(sub)

    return sub.Y
Ejemplo n.º 12
0
def test_tiny_with_default():
    n = pypsa.Network(snapshots=range(2))
    n.investment_periods = [2020, 2030]
    n.add("Bus", 1)
    n.add("Generator", 1, bus=1, p_nom_extendable=True, capital_cost=10)
    n.add("Load", 1, bus=1, p_set=100)
    n.lopf(pyomo=False, multi_investment_periods=True)
    assert n.generators.p_nom_opt.item() == 100
Ejemplo n.º 13
0
def test_pypower_case():

    #ppopt is a dictionary with the details of the optimization routine to run
    ppopt = ppoption(PF_ALG=2)

    #choose DC or AC
    ppopt["PF_DC"] = True

    #ppc is a dictionary with details about the network, including baseMVA, branches and generators
    ppc = case()

    results, success = runpf(ppc, ppopt)

    #store results in a DataFrame for easy access
    results_df = {}

    #branches
    columns = 'bus0, bus1, r, x, b, rateA, rateB, rateC, ratio, angle, status, angmin, angmax, p0, q0, p1, q1'.split(
        ", ")
    results_df['branch'] = pd.DataFrame(data=results["branch"],
                                        columns=columns)

    #buses
    columns = [
        "bus", "type", "Pd", "Qd", "Gs", "Bs", "area", "v_mag_pu_set",
        "v_ang_set", "v_nom", "zone", "Vmax", "Vmin"
    ]
    results_df['bus'] = pd.DataFrame(data=results["bus"],
                                     columns=columns,
                                     index=results["bus"][:, 0])

    #generators
    columns = "bus, p, q, q_max, q_min, Vg, mBase, status, p_max, p_min, Pc1, Pc2, Qc1min, Qc1max, Qc2min, Qc2max, ramp_agc, ramp_10, ramp_30, ramp_q, apf".split(
        ", ")
    results_df['gen'] = pd.DataFrame(data=results["gen"], columns=columns)

    #now compute in PyPSA

    network = pypsa.Network()
    network.import_from_pypower_ppc(ppc)
    network.lpf()

    #compare generator dispatch

    p_pypsa = network.generators_t.p.loc["now"].values
    p_pypower = results_df['gen']["p"].values

    np.testing.assert_array_almost_equal(p_pypsa, p_pypower)

    #compare branch flows
    for item in ["lines", "transformers"]:
        df = getattr(network, item)
        pnl = getattr(network, item + "_t")

        for si in ["p0", "p1"]:
            si_pypsa = getattr(pnl, si).loc["now"].values
            si_pypower = results_df['branch'][si][df.original_index].values
            np.testing.assert_array_almost_equal(si_pypsa, si_pypower)
Ejemplo n.º 14
0
def n():
    csv_folder = os.path.join(
        os.path.dirname(__file__),
        "..",
        "examples",
        "ac-dc-meshed",
        "ac-dc-data"
    )
    return pypsa.Network(csv_folder)
Ejemplo n.º 15
0
def network():
    csv_folder_name = os.path.join(
        os.path.dirname(__file__),
        "..",
        "examples",
        "scigrid-de",
        "scigrid-with-load-gen-trafos",
    )
    return pypsa.Network(csv_folder_name)
Ejemplo n.º 16
0
def solve_ED(t, adv_index, temp, schedule):
    network = pypsa.Network()
    network.set_snapshots(range(1))

    for i in range(n_buses):
        network.add("Bus", "bus {}".format(i))

    for i in range(n_lines):
        network.add("Line",
                    "line {}".format(i),
                    bus0="bus %d" % line_matrix[i, 0],
                    bus1="bus %d" % line_matrix[i, 1],
                    x=0.0001,
                    s_nom=line_capacity[i])

    for i in range(n_gen):
        if schedule[i] == 1:
            network.add("Generator",
                        "gen {}".format(i),
                        bus="bus {}".format(np.int(generator[i, 0] - 1)),
                        committable=True,
                        marginal_cost=marginal_costs[i, 0],
                        p_min_pu=0.1,
                        p_nom=capacity[i])

    for i in range(n_buses):
        network.add("Load",
                    "load {}".format(i),
                    bus="bus {}".format(i),
                    p_set=(rows[t:t + 1, 1] * 1.03 * load_ratio[i]).reshape(
                        -1, ))
        network.add("Generator",
                    "gen_adv {}".format(i),
                    bus="bus {}".format(i),
                    marginal_cost=100000,
                    committable=True,
                    p_min_pu=0,
                    initial_status=1,
                    p_nom=100000)

    #print(network.buses)
    #print(network.lines)
    #print(network.generators)
    #print(network.loads)
    #print("Power Network Initialization finished")
    network.lopf(network.snapshots)
    generator_val = np.array(network.generators_t.p, dtype=float)
    line_flow = np.array(network.lines_t.p0, dtype=float)[0]
    generator_schedule = np.array(network.generators_t.status, dtype=float)
    print("Current schedule", np.shape(generator_schedule))
    #print("Current dispatch", generator_val)
    print("Line flow", np.shape(line_flow))

    print("Generation", generator_val[:10])
    #print("Line flow", np.array(network.lines_t.p1, dtype=float)[0])
    return generator_schedule, generator_val, line_flow
def solve_redispatch_workflow(c_rate=0.25):
    """
    Function to run the redispatch workflow for one network in the networks_redispatch folder.
    Used to compare bat and no bat scenario for a single network.
    """
    storage_ops = "load"

    folder = r'/cluster/home/wlaumen/Euler/pypsa-eur/networks_redispatch'
    filename = "elec_s300_220_ec_lcopt_1H-noex"

    path_n = folder + "/" + filename + ".nc"
    path_n_optim = folder + "/solved/" + filename + ".nc"
    # Define network and network_optim
    n = pypsa.Network(path_n)
    n_optim = pypsa.Network(path_n_optim)

    # Scenario parameters
    flex_potential = 300
    plant_potential = 100

    # Run redispatch w/o batteries & export files
    export_path = folder + r"/results"
    n_d, n_rd = redispatch_workflow(n=n,
                                    n_optim=n_optim,
                                    c_rate=c_rate,
                                    storage_ops="none",
                                    flex_potential=flex_potential,
                                    plant_potential=plant_potential,
                                    scenario="no bat",
                                    ratio_wind=2.2,
                                    ratio_pv=1.38,
                                    lcos=0)

    # export solved dispatch & redispatch workflow as well as objective value list
    n_d.export_to_netcdf(path=export_path + r"/dispatch/" + filename +
                         "_2018_1.nc",
                         export_standard_types=False,
                         least_significant_digit=None)
    n_rd.export_to_netcdf(path=export_path + r"/redispatch/" + filename +
                          "_2018_1.nc",
                          export_standard_types=False,
                          least_significant_digit=None)
    gc.collect()
Ejemplo n.º 18
0
def create_info_df(data):

    n = pypsa.Network(data)
    d = {
        'buses': np.array(n.buses.index),
        'control': np.array(n.buses.control)
    }
    df = pd.DataFrame(data=d)

    return df
Ejemplo n.º 19
0
def test_tiny_infeasible():
    n = pypsa.Network(snapshots=range(2))
    n.investment_periods = [2020, 2030]
    n.add("Bus", 1)
    n.add(
        "Generator", 1, bus=1, p_nom_extendable=True, capital_cost=10, build_year=2030
    )
    n.add("Load", 1, bus=1, p_set=100)
    with pytest.raises(ValueError):
        status, cond = n.lopf(pyomo=False, multi_investment_periods=True)
Ejemplo n.º 20
0
def make_csv():

    scenarios = snakemake.config["run_settings"]["scenario"]

    columns = pd.MultiIndex.from_product(
        (scenarios, snakemake.config["run_settings"]["country"]),
        names=["scenario", "country"])

    stats = pd.DataFrame(columns=columns, dtype=float)

    for scenario in scenarios:
        for ct in snakemake.config["run_settings"]["country"]:
            print(scenario, ct)
            network = pypsa.Network("{}{}-{}.nc".format(
                snakemake.config["results_dir"], ct, scenario))
            stats.at["cost", (scenario,
                              ct)] = network.buses_t.marginal_price.mean()[ct]

            for g in ["wind", "solar"]:
                stats.at[g, (scenario,
                             ct)] = network.generators.p_nom_opt[ct + " " + g]
                stats.at["cost-" + g,
                         (scenario,
                          ct)] = (network.generators.p_nom_opt *
                                  network.generators.capital_cost
                                  )[ct + " " +
                                    g] / network.snapshot_weightings.sum()

            for ls, ll in [("charger", "battery charge"),
                           ("elec", "H2 electrolysis"), ("fc", "H2 to power")]:
                stats.at[ls, (scenario,
                              ct)] = network.links.p_nom_opt[ct + " " + ll]
                stats.at["cost-" + ls, (scenario, ct)] = (
                    network.links.p_nom_opt * network.links.capital_cost
                )[ct + " " + ll] / network.snapshot_weightings.sum()

            for es, el in [("batt", "battery storage"), ("H2", "H2 storage")]:
                stats.at[es, (scenario,
                              ct)] = network.stores.e_nom_opt[ct + " " + el]
                stats.at["cost-" + es, (scenario, ct)] = (
                    network.stores.e_nom_opt * network.stores.capital_cost
                )[ct + " " + el] / network.snapshot_weightings.sum()

            available = network.generators_t.p_max_pu.multiply(
                network.generators.p_nom_opt).sum()
            used = network.generators_t.p.sum()
            curtailment = (available - used) / available
            load = network.loads_t.p.sum().sum()
            supply = available / load
            stats.loc["wcurt", (scenario, ct)] = curtailment[ct + " wind"]
            stats.loc["scurt", (scenario, ct)] = curtailment[ct + " solar"]
            stats.loc["wsupply", (scenario, ct)] = supply[ct + " wind"]
            stats.loc["ssupply", (scenario, ct)] = supply[ct + " solar"]

    stats.to_csv(snakemake.output[0])
Ejemplo n.º 21
0
def solve_all_redispatch_workflows(c_rate=0.25, flex_share=0.1):
    """
    Function to run the redispatch workflow for all networks in the networks_redispatch folder.
    This function is used to compare multiple historic redispatch networks (without bat) whether the redispatch changes
    drastically. It is run without batteries.
    """

    folder = r'/cluster/home/wlaumen/Euler/pypsa-eur/networks_redispatch'
    for filepath in glob.iglob(folder + '/*.nc'):
        filename = filepath.split('/')[-1].split(".")[0]
        print(filename + "\n\n\n\n\n\n")
        path_n = filepath
        path_n_optim = folder + "/solved/" + filename + ".nc"
        # Define network and network_optim
        n = pypsa.Network(path_n)
        n_optim = pypsa.Network(path_n_optim)

        # Run redispatch w/o batteries & export files
        # n_d, n_rd = redispatch_workflow(n, n_optim, scenario="no bat", ratio_wind = 1, ratio_pv = 1)
        # # export solved dispatch & redispatch workflow as well as objective value list
        # export_path = folder + r"/results"
        # n_d.export_to_netcdf(path=export_path + r"/dispatch/" + filename + "_1wind1sol.nc", export_standard_types=False, least_significant_digit=None)
        # n_rd.export_to_netcdf(path=export_path + r"/redispatch/" + filename + "_1wind1sol.nc", export_standard_types=False, least_significant_digit=None)
        # gc.collect()

        n_d, n_rd = redispatch_workflow(n,
                                        n_optim,
                                        scenario="no bat",
                                        ratio_wind=2.1,
                                        ratio_pv=1.38)
        # export solved dispatch & redispatch workflow as well as objective value list
        export_path = folder + r"/results"
        n_d.export_to_netcdf(path=export_path + r"/dispatch/" + filename +
                             "_25wind14sol.nc",
                             export_standard_types=False,
                             least_significant_digit=None)
        n_rd.export_to_netcdf(path=export_path + r"/redispatch/" + filename +
                              "_25wind14sol.nc",
                              export_standard_types=False,
                              least_significant_digit=None)
        gc.collect()
Ejemplo n.º 22
0
def network_mi():
    csv_folder_name = os.path.join(
        os.path.dirname(__file__),
        "..",
        "examples",
        "ac-dc-meshed",
        "ac-dc-data",
    )
    n = pypsa.Network(csv_folder_name)
    n.snapshots = pd.MultiIndex.from_product([['first'], n.snapshots])
    n.generators_t.p.loc[:, :] = np.random.rand(*n.generators_t.p.shape)
    return n
Ejemplo n.º 23
0
 def __init__(self):
     G = pd.read_csv('bus_config/Ybus_30_real', header=None)
     B = pd.read_csv('bus_config/Ybus_30_imag', header=None)
     self.network = pypsa.Network()
     ppc = case30()
     self.network.import_from_pypower_ppc(ppc)
     self.network.pf()
     self.G = G.values
     self.B = B.values
     self.num_bus = 30
     self.num_lines = 41
     self.bus_num = np.array(self.network.lines[['bus0', 'bus1']].values)
Ejemplo n.º 24
0
def test_lopf():

    csv_folder_name = "../examples/ac-dc-meshed/ac-dc-data"

    network = pypsa.Network(csv_folder_name=csv_folder_name)

    results_folder_name = os.path.join(csv_folder_name, "results-lopf")

    network_r = pypsa.Network(csv_folder_name=results_folder_name)

    #test results were generated with GLPK; solution should be unique,
    #so other solvers should not differ (tested with cbc and gurobi)
    solver_name = "cbc"

    snapshots = network.snapshots

    for formulation, free_memory in product(
        ["angles", "cycles", "kirchhoff", "ptdf"],
        [{}, {"pypsa"}, {"pypsa", "pyomo-hack"}]):
        network.lopf(snapshots=snapshots,
                     solver_name=solver_name,
                     formulation=formulation,
                     free_memory=free_memory)
        print(network.generators_t.p.loc[:, network.generators.index])
        print(network_r.generators_t.p.loc[:, network.generators.index])

        np.testing.assert_array_almost_equal(
            network.generators_t.p.loc[:, network.generators.index],
            network_r.generators_t.p.loc[:, network.generators.index],
            decimal=4)

        np.testing.assert_array_almost_equal(
            network.lines_t.p0.loc[:, network.lines.index],
            network_r.lines_t.p0.loc[:, network.lines.index],
            decimal=4)

        np.testing.assert_array_almost_equal(
            network.links_t.p0.loc[:, network.links.index],
            network_r.links_t.p0.loc[:, network.links.index],
            decimal=4)
Ejemplo n.º 25
0
def test_lpf():
    csv_folder_name = os.path.join(os.path.dirname(__file__), "..", "examples", "ac-dc-meshed", "ac-dc-data")

    network = pypsa.Network(csv_folder_name)

    results_folder_name = os.path.join(csv_folder_name, "results-lpf")

    network_r = pypsa.Network(results_folder_name)

    for snapshot in network.snapshots[:2]:
        network.lpf(snapshot)

    np.testing.assert_array_almost_equal(network.generators_t.p[network.generators.index].iloc[:2],network_r.generators_t.p[network.generators.index].iloc[:2])
    np.testing.assert_array_almost_equal(network.lines_t.p0[network.lines.index].iloc[:2],network_r.lines_t.p0[network.lines.index].iloc[:2])
    np.testing.assert_array_almost_equal(network.links_t.p0[network.links.index].iloc[:2],network_r.links_t.p0[network.links.index].iloc[:2])


    network.lpf(snapshots=network.snapshots)

    np.testing.assert_array_almost_equal(network.generators_t.p[network.generators.index],network_r.generators_t.p[network.generators.index])
    np.testing.assert_array_almost_equal(network.lines_t.p0[network.lines.index],network_r.lines_t.p0[network.lines.index])
    np.testing.assert_array_almost_equal(network.links_t.p0[network.links.index],network_r.links_t.p0[network.links.index])
Ejemplo n.º 26
0
def network_mi():
    csv_folder_name = os.path.join(
        os.path.dirname(__file__),
        "..",
        "examples",
        "ac-dc-meshed",
        "ac-dc-data",
    )
    n = pypsa.Network(csv_folder_name)
    n.snapshots = pd.MultiIndex.from_product([[2013], n.snapshots])
    gens_i = n.generators.index
    n.generators_t.p[gens_i] = np.random.rand(len(n.snapshots), len(gens_i))
    return n
Ejemplo n.º 27
0
def test_sclopf():
    csv_folder_name = os.path.join(
        os.path.dirname(__file__),
        "..",
        "examples",
        "scigrid-de",
        "scigrid-with-load-gen-trafos",
    )

    n = pypsa.Network(csv_folder_name)

    # test results were generated with GLPK and other solvers may differ

    # There are some infeasibilities without line extensions
    for line_name in ["316", "527", "602"]:
        n.lines.loc[line_name, "s_nom"] = 1200

    # choose the contingencies
    branch_outages = n.lines.index[:2]

    objectives = []
    for pyomo in [True, False]:

        n.sclopf(
            n.snapshots[0],
            branch_outages=branch_outages,
            pyomo=pyomo,
            solver_name=solver_name,
        )

        # For the PF, set the P to the optimised P
        n.generators_t.p_set = n.generators_t.p.copy()
        n.generators.loc[:, "p_set_t"] = True
        n.storage_units_t.p_set = n.storage_units_t.p.copy()
        n.storage_units.loc[:, "p_set_t"] = True

        # Check no lines are overloaded with the linear contingency analysis

        p0_test = n.lpf_contingency(n.snapshots[0],
                                    branch_outages=branch_outages)

        # check loading as per unit of s_nom in each contingency

        max_loading = (abs(p0_test.divide(n.passive_branches().s_nom,
                                          axis=0)).describe().loc["max"])

        arr_equal(max_loading, np.ones((len(max_loading))), decimal=4)

        objectives.append(n.objective)

    equal(*objectives, decimal=1)
Ejemplo n.º 28
0
def mga_solve(base_net_dir, config, main_output_dir, epsilons):

    for epsilon in epsilons:

        # Minimizing transmission
        output_dir = f"{main_output_dir}min_eps{epsilon}/"
        # Compute and save results
        if not isdir(output_dir):
            makedirs(output_dir)

        net = pypsa.Network()
        net.import_from_csv_folder(base_net_dir)
        net.epsilon = epsilon
        net.lopf(solver_name=config["solver"],
                 solver_logfile=f"{output_dir}solver.log",
                 solver_options=config["solver_options"],
                 extra_functionality=min_transmission,
                 skip_objective=True,
                 pyomo=False)

        net.export_to_csv_folder(output_dir)

        # Maximizing transmission
        output_dir = f"{main_output_dir}max_eps{epsilon}/"
        # Compute and save results
        if not isdir(output_dir):
            makedirs(output_dir)

        net = pypsa.Network()
        net.import_from_csv_folder(base_net_dir)
        net.epsilon = epsilon
        net.lopf(solver_name=config["solver"],
                 solver_logfile=f"{output_dir}solver.log",
                 solver_options=config["solver_options"],
                 extra_functionality=max_transmission,
                 skip_objective=True,
                 pyomo=False)
        net.export_to_csv_folder(output_dir)
Ejemplo n.º 29
0
def load_network(import_name=None, custom_components=None):
    """
    Helper for importing a pypsa.Network with additional custom components.

    Parameters
    ----------
    import_name : str
        As in pypsa.Network(import_name)
    custom_components : dict
        Dictionary listing custom components.
        For using ``snakemake.config["override_components"]``
        in ``config.yaml`` define:

        .. code:: yaml

            override_components:
                ShadowPrice:
                    component: ["shadow_prices","Shadow price for a global constraint.",np.nan]
                    attributes:
                    name: ["string","n/a","n/a","Unique name","Input (required)"]
                    value: ["float","n/a",0.,"shadow value","Output"]

    Returns
    -------
    pypsa.Network
    """
    import pypsa
    from pypsa.descriptors import Dict

    override_components = None
    override_component_attrs = None

    if custom_components is not None:
        override_components = pypsa.components.components.copy()
        override_component_attrs = Dict(
            {k: v.copy() for k, v in pypsa.components.component_attrs.items()}
        )
        for k, v in custom_components.items():
            override_components.loc[k] = v["component"]
            override_component_attrs[k] = pd.DataFrame(
                columns=["type", "unit", "default", "description", "status"]
            )
            for attr, val in v["attributes"].items():
                override_component_attrs[k].loc[attr] = val

    return pypsa.Network(
        import_name=import_name,
        override_components=override_components,
        override_component_attrs=override_component_attrs,
    )
Ejemplo n.º 30
0
 def __init__(self):
     G = pd.read_csv('bus_config/Ybus_14_real', header=None)
     B = pd.read_csv('bus_config/Ybus_14_imag', header=None)
     self.network = pypsa.Network()
     ppc = case14()
     self.network.import_from_pypower_ppc(ppc)
     self.network.pf()
     self.G = G.values
     self.B = B.values
     self.shunt_b = np.zeros(14)
     self.shunt_b[8] = 0.19
     self.num_bus = 14
     self.num_lines = 17
     self.bus_num = np.array(self.network.lines[['bus0', 'bus1']].values)