Пример #1
0
def test_generate_no_sw_variant():
    net_orig = _net_for_testing()
    net = deepcopy(net_orig)
    sb.generate_no_sw_variant(net)
    assert (net.bus.name.values == net_orig.bus.name.loc[net.bus.index.difference(
            [1, 6])].values).all()
    assert pp.dataframes_equal(net.line, net_orig.line)
    assert pp.dataframes_equal(net.trafo, net_orig.trafo)
    assert pp.dataframes_equal(net.switch, net_orig.switch.loc[[6, 9, 10, 11, 13, 14]])
def test_test_network():
    net = csv2pp(test_network_path, no_generic_coord=True)

    # test min/max ratio
    for elm in pp.pp_elements(bus=False, branch_elements=False, other_elements=False):
        if "min_p_mw" in net[elm].columns and "max_p_mw" in net[elm].columns:
            isnull = net[elm][["min_p_mw", "max_p_mw"]].isnull().any(1)
            assert (net[elm].min_p_mw[~isnull] <= net[elm].max_p_mw[~isnull]).all()
        if "min_q_mvar" in net[elm].columns and "max_q_mvar" in net[elm].columns:
            isnull = net[elm][["min_q_mvar", "max_q_mvar"]].isnull().any(1)
            assert (net[elm].min_q_mvar[~isnull] <= net[elm].max_q_mvar[~isnull]).all()

    pp2csv(net, test_output_folder_path, export_pp_std_types=False, drop_inactive_elements=False)

    # --- test equality of exported csv data and given csv data
    csv_orig = read_csv_data(test_network_path, ";")
    csv_exported = read_csv_data(test_output_folder_path, ";")

    all_eq = True
    for tablename in csv_orig.keys():
        try:
            eq = pp.dataframes_equal(csv_orig[tablename], csv_exported[tablename], tol=1e-7)
            if not eq:
                logger.error("csv_orig['%s'] and csv_exported['%s'] differ." % (tablename,
                                                                                tablename))
                logger.error(csv_orig[tablename].head())
                logger.error(csv_exported[tablename].head())
                logger.error(csv_orig[tablename].dtypes)
                logger.error(csv_exported[tablename].dtypes)
        except ValueError:
            eq = False
            logger.error("dataframes_equal did not work for %s." % tablename)
        all_eq &= eq
    assert all_eq
def test_reindex_dict_dataframes():
    df0 = pd.DataFrame([[0], [1]])
    df1 = pd.DataFrame([[0], [1]], index=[2, 4])
    df2 = pd.DataFrame([[0], [1]], index=[1, 0])
    dict_ = {1: df1, 2: df2}
    expected = {1: df0, 2: df0}
    sb.reindex_dict_dataframes(dict_)
    for k in dict_.keys():
        assert pp.dataframes_equal(dict_[k], expected[k])
Пример #4
0
def test_merge_and_split_nets():
    net1 = nw.mv_oberrhein()
    # TODO there are some geodata values in oberrhein without corresponding lines
    net1.line_geodata.drop(set(net1.line_geodata.index) - set(net1.line.index), inplace=True)
    n1 = len(net1.bus)
    pp.runpp(net1)
    net2 = nw.create_cigre_network_mv()
    pp.runpp(net2)
    net = pp.merge_nets(net1, net2)
    pp.runpp(net)
    assert np.allclose(net.res_bus.vm_pu.iloc[:n1].values, net1.res_bus.vm_pu.values)
    assert np.allclose(net.res_bus.vm_pu.iloc[n1:].values, net2.res_bus.vm_pu.values)

    net3 = pp.select_subnet(net, net.bus.index[:n1], include_results=True)
    assert pp.dataframes_equal(net3.res_bus[["vm_pu"]], net1.res_bus[["vm_pu"]])

    net4 = pp.select_subnet(net, net.bus.index[n1:], include_results=True)
    assert np.allclose(net4.res_bus.vm_pu.values, net2.res_bus.vm_pu.values)
Пример #5
0
def test_select_subnet():
    # This network has switches of type 'l' and 't'
    net = nw.create_cigre_network_mv()

    # Do nothing
    same_net = pp.select_subnet(net, net.bus.index)
    assert pp.dataframes_equal(net.bus, same_net.bus)
    assert pp.dataframes_equal(net.switch, same_net.switch)
    assert pp.dataframes_equal(net.trafo, same_net.trafo)
    assert pp.dataframes_equal(net.line, same_net.line)
    assert pp.dataframes_equal(net.load, same_net.load)
    assert pp.dataframes_equal(net.ext_grid, same_net.ext_grid)

    # Remove everything
    empty = pp.select_subnet(net, set())
    assert len(empty.bus) == 0
    assert len(empty.line) == 0
    assert len(empty.load) == 0
    assert len(empty.trafo) == 0
    assert len(empty.switch) == 0
    assert len(empty.ext_grid) == 0

    # Should keep all trafo ('t') switches when buses are included
    hv_buses = set(net.trafo.hv_bus)
    lv_buses = set(net.trafo.lv_bus)
    trafo_switch_buses = set(net.switch[net.switch.et == 't'].bus)
    subnet = pp.select_subnet(net, hv_buses | lv_buses | trafo_switch_buses)
    assert net.switch[net.switch.et == 't'].index.isin(subnet.switch.index).all()

    # Should keep all line ('l') switches when buses are included
    from_bus = set(net.line.from_bus)
    to_bus = set(net.line.to_bus)
    line_switch_buses = set(net.switch[net.switch.et == 'l'].bus)
    subnet = pp.select_subnet(net, from_bus | to_bus | line_switch_buses)
    assert net.switch[net.switch.et == 'l'].index.isin(subnet.switch.index).all()

    # This network has switches of type 'b'
    net2 = nw.create_cigre_network_lv()

    # Should keep all bus-to-bus ('b') switches when buses are included
    buses = set(net2.switch[net2.switch.et == 'b'].bus)
    elements = set(net2.switch[net2.switch.et == 'b'].element)
    subnet = pp.select_subnet(net2, buses | elements)
    assert net2.switch[net2.switch.et == 'b'].index.isin(subnet.switch.index).all()
def test_merge_dataframes():
    df1 = pd.DataFrame([["01.01.2016 00:00:00", 5, 1, "str"],
                        ["03.01.2016 01:00:00", 4, 2, "hallo"],
                        ["04.01.2016 10:00:00", 3, 3, 5]],
                       columns=["time", "B", "A", "C"])

    df2 = pd.DataFrame(
        [["01.02.2016 00:00:00", -1, 3.2, 2], [
            "01.01.2016 00:00:00", 5, 4, 2.1
        ], ["02.01.2016 00:30:15", 8, 7, 3], ["02.02.2016 13:45:00", 3, 1, 4]],
        columns=["time", "A", "B", "D"])

    df3 = pd.DataFrame([["01.01.2016 00:00:00", 9, 6, 8.1, 3]],
                       columns=["time", "A", "B", "D", "C"])

    # ordered index and column, df1 with precedence, time as index
    return1 = sb.merge_dataframes([df1, df2],
                                  column_to_sort="time",
                                  keep="first",
                                  index_time_str="%d.%m.%Y %H:%M:%S")
    res1 = pd.DataFrame([["01.01.2016 00:00:00", 1, 5, "str", 2.1],
                         ["02.01.2016 00:30:15", 8, 7, None, 3],
                         ["03.01.2016 01:00:00", 2, 4, "hallo", None],
                         ["04.01.2016 10:00:00", 3, 3, 5, None],
                         ["01.02.2016 00:00:00", -1, 3.2, None, 2],
                         ["02.02.2016 13:45:00", 3, 1, None, 4]],
                        columns=["time", "A", "B", "C", "D"])
    assert pp.dataframes_equal(return1, res1)

    # ordered index and column, df2 with precedence, time as index
    return2 = sb.merge_dataframes([df1, df2],
                                  column_to_sort="time",
                                  keep="last",
                                  index_time_str="%d.%m.%Y %H:%M:%S")
    res2 = deepcopy(res1)
    res2.loc[0, ["A", "B"]] = df2.loc[1, ["A", "B"]]
    assert pp.dataframes_equal(return2, res2)

    # --- changed input
    new_df1_idx = [1, 3, 4]
    new_df2_idx = [11, 1, 2, 12]
    unsorted_index = new_df1_idx + [11, 2, 12]
    unsorted_columns = list(df1.columns) + ["D"]
    df1.index = new_df1_idx
    df2.index = new_df2_idx

    # ordered index and column, df1 with precedence, no extra index
    return5 = sb.merge_dataframes([df1, df2], keep="first")
    res5 = deepcopy(res1)
    if version.parse(pd.__version__) >= version.parse("0.21.0"):
        res5 = res5.reindex(columns=["A", "B", "C", "D", "time"])
    else:
        res5 = res5.reindex_axis(["A", "B", "C", "D", "time"], axis=1)
    res5.index = [1, 2, 3, 4, 11, 12]
    assert pp.dataframes_equal(return5, res5)

    # ordered index and column, df2 with precedence, no extra index
    return6 = sb.merge_dataframes([df1, df2], keep="last")
    res6 = deepcopy(res5)
    res6.loc[1, ["A", "B"]] = df2.loc[1, ["A", "B"]]
    assert pp.dataframes_equal(return6, res6)

    # beware idx order, df1 with precedence, no extra index
    return7 = sb.merge_dataframes([df1, df2], keep="first", sort=False)
    try:
        res7 = deepcopy(res5).reindex(unsorted_index, columns=unsorted_columns)
    except TypeError:  # legacy for pandas <0.21
        res7 = deepcopy(res5).reindex_axis(unsorted_index)
        res7 = res7.reindex_axis(unsorted_columns, axis=1)
    assert pp.dataframes_equal(return7, res7)

    # beware idx order, df1 with precedence, no extra index
    return8 = sb.merge_dataframes([df1, df2], keep="last", sort=False)
    try:
        res8 = deepcopy(res6).reindex(unsorted_index, columns=unsorted_columns)
    except TypeError:  # legacy for pandas <0.21
        res8 = deepcopy(res6).reindex_axis(unsorted_index)
        res8 = res8.reindex_axis(unsorted_columns, axis=1)
    assert pp.dataframes_equal(return8, res8)

    # merge 3 dfs while keeping first duplicates
    return9 = sb.merge_dataframes([df1, df2, df3],
                                  keep="first",
                                  column_to_sort="time",
                                  index_time_str="%d.%m.%Y %H:%M:%S")
    assert pp.dataframes_equal(return9, res1)

    # merge 3 dfs while keeping last duplicates
    return10 = sb.merge_dataframes([df1, df2, df3],
                                   keep="last",
                                   column_to_sort="time",
                                   index_time_str="%d.%m.%Y %H:%M:%S")
    res10 = deepcopy(res1)
    df3_col_except_time = df3.columns.difference(["time"])
    res10.loc[0, df3_col_except_time] = df3.loc[0, df3_col_except_time].values
    assert pp.dataframes_equal(return10, res10)

    # merge 3 dfs while keeping all duplicates
    return11 = sb.merge_dataframes([df1, df2, df3], keep="all")
    assert return11.shape == (len(df1) + len(df2) + len(df3),
                              len(
                                  df1.columns.union(
                                      df2.columns.union(df3.columns))))
def test_convert_to_parallel_branches():
    # create test grid
    net = pp.create_empty_network()
    pp.create_bus(net, 110)
    pp.create_buses(net, 2, 20)

    # --- transformers & corresponding switches
    pp.create_transformer(net, 0, 1, "40 MVA 110/20 kV", name="Trafo 1")
    pp.create_switch(net, 1, 0, "t", name="Tr-Switch 1")
    # only name changed:
    pp.create_transformer(net, 0, 1, "40 MVA 110/20 kV", name="Trafo 2")
    pp.create_switch(net, 1, 1, "t", name="Tr-Switch 2")
    # only max_loading changed:
    pp.create_transformer(net, 0, 1, "40 MVA 110/20 kV", name="Trafo 1", max_loading_percent=50)
    pp.create_switch(net, 1, 2, "t", name="Tr-Switch 1")
    # only switch position changed:
    pp.create_transformer(net, 0, 1, "40 MVA 110/20 kV", name="Trafo 1")
    pp.create_switch(net, 1, 3, "t", closed=False, name="Tr-Switch 1")
    # only switch missing:
    pp.create_transformer(net, 0, 1, "40 MVA 110/20 kV", name="Trafo 1")
    # only name and std_type changed:
    pp.create_transformer(net, 0, 1, "25 MVA 110/20 kV", name="Trafo 3")
    pp.create_switch(net, 1, 5, "t", name="Tr-Switch 3")
    # only name changed and switch added:
    pp.create_transformer(net, 0, 1, "40 MVA 110/20 kV", name="Trafo 4")
    pp.create_switch(net, 1, 6, "t", name="Tr-Switch 4a")
    pp.create_switch(net, 0, 6, "t", name="Tr-Switch 4b")
    # only name and parallel changed:
    pp.create_transformer(net, 0, 1, "40 MVA 110/20 kV", name="Trafo 5", parallel=2)
    pp.create_switch(net, 1, 7, "t", name="Tr-Switch 5")

    # --- lines & corresponding switches
    pp.create_line(net, 1, 2, 1.11, "94-AL1/15-ST1A 20.0", name="Line 1")
    pp.create_switch(net, 2, 0, "l", name="L-Switch 1")
    # only name changed:
    pp.create_line(net, 1, 2, 1.11, "94-AL1/15-ST1A 20.0", name="Line 2")
    pp.create_switch(net, 2, 1, "l", name="L-Switch 2")
    # only max_loading changed:
    pp.create_line(net, 1, 2, 1.11, "94-AL1/15-ST1A 20.0", name="Line 1", max_loading_percent=50)
    pp.create_switch(net, 2, 2, "l", name="L-Switch 1")
    # only switch position changed:
    pp.create_line(net, 1, 2, 1.11, "94-AL1/15-ST1A 20.0", name="Line 1")
    pp.create_switch(net, 2, 3, "l", closed=False, name="L-Switch 1")
    # only switch missing:
    pp.create_line(net, 1, 2, 1.11, "94-AL1/15-ST1A 20.0", name="Line 1")
    # only name and std_type changed:
    pp.create_line(net, 1, 2, 1.11, "48-AL1/8-ST1A 20.0", name="Line 3")
    pp.create_switch(net, 2, 5, "l", name="L-Switch 3")
    # only name changed and switch added:
    pp.create_line(net, 1, 2, 1.11, "94-AL1/15-ST1A 20.0", name="Line 4")
    pp.create_switch(net, 2, 6, "l", name="L-Switch 4a")
    pp.create_switch(net, 1, 6, "l", name="L-Switch 4b")
    # only name and parallel changed:
    pp.create_line(net, 1, 2, 1.11, "94-AL1/15-ST1A 20.0", name="Line 5", parallel=2)
    pp.create_switch(net, 2, 7, "l", name="L-Switch 5")
    # only name and from_bus <-> to_bus changed:
    pp.create_line(net, 2, 1, 1.11, "94-AL1/15-ST1A 20.0", name="Line 6")
    pp.create_switch(net, 2, 8, "l", name="L-Switch 6")

    net1 = deepcopy(net)
    net2 = deepcopy(net)
    net3 = deepcopy(net)

    # complete
    convert_parallel_branches(net1, multiple_entries=False)
    for elm in ["trafo", "line"]:
        assert sorted(net1[elm].index) == [0, 2, 3, 4, 5, 6]
    assert list(net1["trafo"].parallel.values) == [4] + [1]*5
    assert list(net1["line"].parallel.values) == [5] + [1]*5

    # only line
    convert_parallel_branches(net2, multiple_entries=False, elm_to_convert=["line"])
    assert pp.dataframes_equal(net2.line, net1.line)
    assert pp.dataframes_equal(net2.trafo, net.trafo)

    # only exclude "max_loading_percent"
    convert_parallel_branches(net3, multiple_entries=False, exclude_cols_from_parallel_finding=[
                                      "name", "parallel", "max_loading_percent"])
    for elm in ["trafo", "line"]:
        assert sorted(net3[elm].index) == [0, 3, 4, 5, 6]
    assert list(net3["trafo"].parallel.values) == [5] + [1]*4
    assert list(net3["line"].parallel.values) == [6] + [1]*4
Пример #8
0
def test_opf_task():
    net = pp.create_empty_network()
    pp.create_buses(net, 6, [10, 10, 10, 0.4, 7, 7],
                    min_vm_pu=[0.9, 0.9, 0.88, 0.9, np.nan, np.nan])
    idx_ext_grid = 1
    pp.create_ext_grid(net, 0, max_q_mvar=80, min_p_mw=0, index=idx_ext_grid)
    pp.create_gen(net, 1, 10, min_q_mvar=-50, max_q_mvar=-10, min_p_mw=0, max_p_mw=60)
    pp.create_gen(net, 2, 8)
    pp.create_gen(net, 3, 5)
    pp.create_load(net, 3, 120, max_p_mw=8)
    pp.create_sgen(net, 1, 8, min_q_mvar=-50, max_q_mvar=-10, controllable=False)
    pp.create_sgen(net, 2, 8)
    pp.create_storage(net, 3, 2, 100, min_q_mvar=-10, max_q_mvar=-50, min_p_mw=0, max_p_mw=60,
                      controllable=True)
    pp.create_dcline(net, 4, 5, 0.3, 1e-4, 1e-2, 1.01, 1.02, min_q_from_mvar=-10,
                     min_q_to_mvar=-10)
    pp.create_line(net, 3, 4, 5, "122-AL1/20-ST1A 10.0", max_loading_percent=50)
    pp.create_transformer(net, 2, 3, "0.25 MVA 10/0.4 kV")

    # --- run and check opf_task()
    out1 = pp.opf_task(net, keep=True)
    assert out1["flexibilities_without_costs"] == "all"
    assert sorted(out1["flexibilities"].keys()) == [i1 + i2 for i1 in ["P", "Q"] for i2 in [
        "dcline", "ext_grid", "gen", "storage"]]
    for key, df in out1["flexibilities"].items():
        assert df.shape[0]
        if "gen" in key:
            assert df.shape[0] > 1
    assert out1["flexibilities"]["Pext_grid"].loc[0, "index"] == [1]
    assert np.isnan(out1["flexibilities"]["Pext_grid"].loc[0, "max"])
    assert out1["flexibilities"]["Pext_grid"].loc[0, "min"] == 0
    assert np.isnan(out1["flexibilities"]["Qext_grid"].loc[0, "min"])
    assert out1["flexibilities"]["Qext_grid"].loc[0, "max"] == 80
    assert sorted(out1["network_constraints"].keys()) == ["LOADINGline", "VMbus"]
    assert out1["network_constraints"]["VMbus"].shape[0] == 3

    # check delta_pq
    net.gen.loc[0, "min_p_mw"] = net.gen.loc[0, "max_p_mw"] - 1e-5
    out2 = pp.opf_task(net, delta_pq=1e-3, keep=True)
    assert out2["flexibilities"]["Pgen"].shape[0] == 1

    net.gen.loc[0, "min_p_mw"] = net.gen.loc[0, "max_p_mw"] - 1e-1
    out1["flexibilities"]["Pgen"].loc[0, "min"] = out1["flexibilities"]["Pgen"].loc[
                                                      0, "max"] - 1e-1
    out3 = pp.opf_task(net, delta_pq=1e-3, keep=True)
    for key in out3["flexibilities"]:
        assert pp.dataframes_equal(out3["flexibilities"][key], out1["flexibilities"][key])

    # check costs
    pp.create_poly_cost(net, idx_ext_grid, "ext_grid", 2)
    pp.create_poly_cost(net, 1, "gen", 1.7)
    pp.create_poly_cost(net, 0, "dcline", 2)
    pp.create_pwl_cost(net, 2, "gen", [[-1e9, 1, 3.1], [1, 1e9, 0.5]], power_type="q")
    out4 = pp.opf_task(net)
    for dict_key in ["flexibilities", "network_constraints"]:
        for key in out4[dict_key]:
            assert pp.dataframes_equal(out4[dict_key][key], out1[dict_key][key])
    assert isinstance(out4["flexibilities_without_costs"], dict)
    expected_elm_without_cost = ["gen", "storage"]
    assert sorted(out4["flexibilities_without_costs"].keys()) == expected_elm_without_cost
    for elm in expected_elm_without_cost:
        assert len(out4["flexibilities_without_costs"][elm]) == 1