Exemple #1
0
def test_add_column_from_node_to_elements():
    net = nw.create_cigre_network_mv("pv_wind")
    net.bus["subnet"] = ["subnet_%i" % i for i in range(net.bus.shape[0])]
    net.sgen["subnet"] = "already_given"
    net.switch["subnet"] = None
    net_orig = copy.deepcopy(net)

    branch_bus = ["from_bus", "lv_bus"]
    pp.add_column_from_node_to_elements(net, "subnet", False, branch_bus=branch_bus)

    def check_subnet_correctness(net, elements, branch_bus):
        for elm in elements:
            if "bus" in net[elm].columns:
                assert all(pp.compare_arrays(net[elm]["subnet"].values,
                                             np.array(["subnet_%i" % bus for bus in net[elm].bus])))
            elif branch_bus[0] in net[elm].columns:
                assert all(pp.compare_arrays(net[elm]["subnet"].values, np.array([
                        "subnet_%i" % bus for bus in net[elm][branch_bus[0]]])))
            elif branch_bus[1] in net[elm].columns:
                assert all(pp.compare_arrays(net[elm]["subnet"].values, np.array([
                        "subnet_%i" % bus for bus in net[elm][branch_bus[1]]])))

    check_subnet_correctness(net, pp.pp_elements(bus=False)-{"sgen"}, branch_bus)

    pp.add_column_from_node_to_elements(net_orig, "subnet", True, branch_bus=branch_bus)
    check_subnet_correctness(net_orig, pp.pp_elements(bus=False), branch_bus)
def test_test_network():
    net = csv2pp(test_network_path, no_generic_coord=True)

    # test min/max ratio
    for elm in pp.pp_elements(bus=False, branch_elements=False, other_elements=False):
        if "min_p_mw" in net[elm].columns and "max_p_mw" in net[elm].columns:
            isnull = net[elm][["min_p_mw", "max_p_mw"]].isnull().any(1)
            assert (net[elm].min_p_mw[~isnull] <= net[elm].max_p_mw[~isnull]).all()
        if "min_q_mvar" in net[elm].columns and "max_q_mvar" in net[elm].columns:
            isnull = net[elm][["min_q_mvar", "max_q_mvar"]].isnull().any(1)
            assert (net[elm].min_q_mvar[~isnull] <= net[elm].max_q_mvar[~isnull]).all()

    pp2csv(net, test_output_folder_path, export_pp_std_types=False, drop_inactive_elements=False)

    # --- test equality of exported csv data and given csv data
    csv_orig = read_csv_data(test_network_path, ";")
    csv_exported = read_csv_data(test_output_folder_path, ";")

    all_eq = True
    for tablename in csv_orig.keys():
        try:
            eq = pp.dataframes_equal(csv_orig[tablename], csv_exported[tablename], tol=1e-7)
            if not eq:
                logger.error("csv_orig['%s'] and csv_exported['%s'] differ." % (tablename,
                                                                                tablename))
                logger.error(csv_orig[tablename].head())
                logger.error(csv_exported[tablename].head())
                logger.error(csv_orig[tablename].dtypes)
                logger.error(csv_exported[tablename].dtypes)
        except ValueError:
            eq = False
            logger.error("dataframes_equal did not work for %s." % tablename)
        all_eq &= eq
    assert all_eq
def provide_voltLvl_col(net):
    """ This function provides 'voltLvl' column in pp_elements DataFrames (not in net.substation).
    """
    if "voltLvl" not in net.bus.columns or net.bus["voltLvl"].isnull().all():
        # set voltLvl considering vn_kv and substations
        net.bus["voltLvl"] = get_voltlvl(net.bus.vn_kv)
    else:  # fill voltLvl nan values with vn_kv information
        idx_nan = net.bus.index[net.bus.voltLvl.isnull()]
        net.bus.voltLvl.loc[idx_nan] = get_voltlvl(net.bus.vn_kv.loc[idx_nan])

    # --- provide voltLvl parameters for all elements
    # add voltLvl column from node to all elements but "trafo"
    pp.add_column_from_node_to_elements(net,
                                        "voltLvl",
                                        replace=False,
                                        elements=pp.pp_elements(bus=False) -
                                        {"trafo"},
                                        branch_bus=["to_bus", "hv_bus"])
    # add voltLvl column from node to trafo without verbose
    pp.add_column_from_node_to_elements(net,
                                        "voltLvl",
                                        replace=False,
                                        elements={"trafo"},
                                        branch_bus=["to_bus", "hv_bus"],
                                        verbose=False)
    pp.add_column_from_element_to_elements(net,
                                           "voltLvl",
                                           replace=False,
                                           elements=["measurement"])
    # correct voltLvl for trafos
    net.trafo["voltLvl"] = np.array(
        (net.bus.voltLvl.loc[net.trafo.hv_bus].values +
         net.bus.voltLvl.loc[net.trafo.lv_bus].values) / 2).astype(int)
Exemple #4
0
def callback_network(*args):
    labelTest.configure(text="Choose the node to display the information:")
    labelTest.configure(text="name: {}".format(variable.get()))
    net_name = "{}".format(variable.get())
    OptionList = list(pd.pp_elements(net_name))
    opt = tk.OptionMenu(app, variable, *OptionList)
    opt.config(width=200, font=('Helvetica', 12))
    opt.pack(side="top")
Exemple #5
0
def all_voltlvl_idx(net, elms=None, include_empty_elms_dicts=False):
    """
    Wrapper function of voltlvl_idx() to receive dicts for every element in 'elms' which include
    a set of indices (the dicts values) to every voltage level (the dicts keys).

    INPUT:
        **net** - the pandapower net

    OPTIONAL:
        **elms** (iterable) - names of elements which should be considered. If None, all pandapower
        elements are considered.

        include_empty_elms_dicts (bool, False) - If True, dicts of elements will also be consiered
        if they are empty

    EXAMPLE:
        lvl_dicts = all_voltlvl_idx(net, ["bus"])
        print(lvl_dicts["bus"][3])  # could print a set of HV buses, such as {1, 2, 3}
        print(lvl_dicts["bus"][5])  # could print a set of MV buses, such as {4, 5, 6}
    """
    elms = elms if elms is not None else pp_elements()
    lvl_dicts = dict()
    for elm in elms:
        if net[elm].shape[0] or include_empty_elms_dicts:
            lvl_dicts[elm] = dict()

            if "trafo" not in elm:
                voltlvls = [1, 3, 5, 7]
                for lvl in voltlvls:
                    lvl_dicts[elm][lvl] = set(voltlvl_idx(net, elm, lvl))

            else:  # special handling for trafos and trafo3ws
                found_elm = set()
                for hv_lvl in [1, 3, 5, 7]:
                    lvl_dicts[elm][hv_lvl] = set(voltlvl_idx(net, elm, hv_lvl, "hv_bus")) & \
                        set(voltlvl_idx(net, elm, hv_lvl, "lv_bus"))
                    found_elm |= lvl_dicts[elm][hv_lvl]
                    if hv_lvl < 6:
                        lvl_dicts[elm][hv_lvl+1] = set(voltlvl_idx(net, elm, hv_lvl, "hv_bus")) & \
                            set(voltlvl_idx(net, elm, hv_lvl+2, "lv_bus"))
                        found_elm |= lvl_dicts[elm][hv_lvl + 1]

                other = set(net[elm].index) - found_elm
                if len(other):
                    bus_types = ["hv_bus", "lv_bus"] if elm == "trafo" else [
                        "hv_bus", "mv_bus", "lv_bus"
                    ]
                    for idx in other:
                        voltage_values = net.bus.vn_kv.loc[
                            net[elm][bus_types].loc[idx]].values
                        key = "-".join(get_voltlvl(voltage_values).astype(str))
                        if key not in lvl_dicts[elm].keys():
                            lvl_dicts[elm][key] = set()
                        lvl_dicts[elm][key] |= set([idx])

    return lvl_dicts
def provide_subnet_col(net):
    """ This function provides 'subnet' column in all DataFrames of net. While
    csv2pp() writes all subnet information into pandapower_net[element]["subnet"], this function
    allows pp2csv() to consider pandapower_net[element]["zone"] if pandapower_net[element]["subnet"]
    is not available. """
    # --- provide subnet column in net.bus and fill with data of net.bus.zone
    if "subnet" not in net.bus.columns or net.bus["subnet"].isnull().all():
        # copy zone to subnet
        net.bus["subnet"] = net.bus.zone
    else:  # fill subnet nan values with zone
        net.bus.subnet.loc[net.bus.subnet.isnull()] = net.bus.zone.loc[
            net.bus.subnet.isnull()]

    # --- for all elements: if subnet is not avialable but zone, take it from zone.
    for element in net.keys():
        if isinstance(net[element], pd.DataFrame):
            if "subnet" not in net[element].columns or net[element][
                    "subnet"].isnull().all():
                if "zone" in net[element].columns and not net[element][
                        "zone"].isnull().all():
                    net[element]["subnet"] = net[element]["zone"].values

    # --- If both, subnet and zone, are not available, take subnet from bus
    # add subnet column from node to all elements but "trafo"
    pp.add_column_from_node_to_elements(net,
                                        "subnet",
                                        replace=False,
                                        elements=pp.pp_elements(bus=False) -
                                        {"trafo"},
                                        branch_bus=["from_bus", "lv_bus"])
    # add subnet column from node to trafo without verbose
    pp.add_column_from_node_to_elements(net,
                                        "subnet",
                                        replace=False,
                                        elements={"trafo"},
                                        branch_bus=["from_bus", "lv_bus"],
                                        verbose=False)
    pp.add_column_from_element_to_elements(net,
                                           "subnet",
                                           replace=False,
                                           elements=["measurement"])

    # --- at trafo switches: use subnet from trafo instead of the bus subnet data:
    trafo_sw = net.switch.index[net.switch.et == "t"]
    net.switch.subnet.loc[trafo_sw] = net.trafo.subnet.loc[
        net.switch.element.loc[trafo_sw]].values

    # --- at measurements: use branch subnet instead of bus subnet data:
    for branch_type in ["line", "trafo"]:
        meas = net.measurement.index[net.measurement.element_type ==
                                     branch_type]
        net.measurement.subnet.loc[meas] = net[branch_type].subnet.loc[
            net.measurement.element.loc[meas]].values
Exemple #7
0
def test_all_voltlvl_idx():
    net = pn.example_simple()

    lvl_dicts = sb.all_voltlvl_idx(net)

    elms = set()
    for elm in pp.pp_elements():
        if net[elm].shape[0]:
            elms |= {elm}
            idxs = set()
            for _, idx in lvl_dicts[elm].items():
                idxs |= idx
            assert set(net[elm].index) == idxs
    assert elms == set(lvl_dicts.keys())

    elms = ["bus"]
    lvl_dicts = sb.all_voltlvl_idx(net, elms=elms)
    assert list(lvl_dicts.keys()) == elms

    lvl_dicts = sb.all_voltlvl_idx(net,
                                   elms=["bus", "trafo3w"],
                                   include_empty_elms_dicts=True)
    assert not bool(net.trafo3w.shape[0])
    assert "trafo3w" in lvl_dicts.keys()
def _test_net_validity(net, sb_code_params, shortened, input_path=None):
    """ This function is to test validity of a simbench net. """

    # --- deduce values from sb_code_params to test extracted csv data
    # lv_net_extent: 0-no lv_net    1-one lv_net    2-all lv_nets
    lv_net_extent = int(bool(len(sb_code_params[2])))
    if bool(lv_net_extent) and sb_code_params[4] == "all":
        lv_net_extent += 1
    # net_factor: how many lower voltage grids are expected to be connected
    if shortened:
        net_factor = 8
    else:
        if sb_code_params[1] == "HV":
            net_factor = 10
        else:
            net_factor = 50

    # --- test data existence
    # buses
    expected_buses = {0: 12, 1: 80, 2: net_factor*12}[lv_net_extent] if sb_code_params[1] != "EHV" \
        else {0: 6, 1: 65, 2: 125}[lv_net_extent]
    assert net.bus.shape[0] > expected_buses

    # ext_grid
    assert bool(net.ext_grid.shape[0])

    # switches
    if sb_code_params[6]:
        if int(sb_code_params[5]) > 0:
            if net.switch.shape[0] <= net.line.shape[0] * 2 - 2:
                logger.info(
                    "There are %i switches, but %i " %
                    (net.switch.shape[0], net.line.shape[0]) +
                    "lines -> some lines are not surrounded by switches.")
        else:
            assert net.switch.shape[0] > net.line.shape[0] * 2 - 2
    else:
        assert not net.switch.closed.any()
        assert (net.switch.et != "b").all()

    # all buses supplied
    if sb_code_params[1] != "complete_data":
        unsup_buses = unsupplied_buses(net, respect_switches=False)
        if len(unsup_buses):
            logger.error("There are %i unsupplied buses." % len(unsup_buses))
            if len(unsup_buses) < 10:
                logger.error("These are: " +
                             str(net.bus.name.loc[unsup_buses]))
        assert not len(unsup_buses)

    # lines
    assert net.line.shape[0] >= net.bus.shape[0]-net.trafo.shape[0]-(net.switch.et == "b").sum() - \
        2*net.trafo3w.shape[0]-net.impedance.shape[0]-net.dcline.shape[0]-net.ext_grid.shape[0]

    # trafos
    if sb_code_params[1] == "EHV":
        expected_trafos = {0: 0, 1: 2, 2: 8}[lv_net_extent]
    elif sb_code_params[1] == "HV":
        expected_trafos = {0: 2, 1: 4, 2: net_factor * 2}[lv_net_extent]
    elif sb_code_params[1] == "MV":
        expected_trafos = {0: 2, 1: 3, 2: net_factor * 1}[lv_net_extent]
    elif sb_code_params[1] == "LV":
        expected_trafos = {0: 1}[lv_net_extent]
    elif sb_code_params[1] == "complete_data":
        expected_trafos = 200
    assert net.trafo.shape[0] >= expected_trafos

    # load
    expected_loads = {0: 10, 1: net_factor, 2: net_factor*10}[lv_net_extent] if \
        sb_code_params[1] != "EHV" else {0: 3, 1: 53, 2: 113}[lv_net_extent]
    assert net.load.shape[0] > expected_loads

    # sgen
    if sb_code_params[1] == "LV":
        expected_sgen = {0: 0}[lv_net_extent]
    elif sb_code_params[2] == "LV":
        expected_sgen = {1: 50, 2: 50 + net_factor * 1}[lv_net_extent]
    else:
        expected_sgen = expected_loads
    assert net.sgen.shape[0] > expected_sgen

    # measurement
    if pd.Series(["HV", "MV"]).isin([sb_code_params[1],
                                     sb_code_params[2]]).any():
        assert net.measurement.shape[0] > 1

    # bus_geodata
    assert net.bus.shape[0] == net.bus_geodata.shape[0]
    # check_that_all_buses_connected_by_switches_have_same_geodata
    for bus_group in bus_groups_connected_by_switches(net):
        first_bus = list(bus_group)[0]
        assert all(
            np.isclose(net.bus_geodata.x.loc[bus_group],
                       net.bus_geodata.x.loc[first_bus])
            & np.isclose(net.bus_geodata.y.loc[bus_group],
                         net.bus_geodata.y.loc[first_bus]))

    # --- test data content
    # substation
    for elm in ["bus", "trafo", "trafo3w", "switch"]:
        mentioned_substations = pd.Series(
            net[elm].substation.unique()).dropna()
        if not mentioned_substations.isin(net.substation.name.values).all():
            raise AssertionError(
                str(
                    list(mentioned_substations.
                         loc[~mentioned_substations.isin(net.substation.name.
                                                         values)].values)) +
                " from element '%s' misses in net.substation" % elm)

    # check subnet
    input_path = input_path if input_path is not None else sb.complete_data_path(
        sb_code_params[5])
    hv_subnet, lv_subnets = sb.get_relevant_subnets(sb_code_params,
                                                    input_path=input_path)
    allowed_elms_missing_subnet = [
        "gen", "dcline", "trafo3w", "impedance", "measurement", "shunt",
        "storage", "ward", "xward"
    ]
    if not sb_code_params[6]:
        allowed_elms_missing_subnet += ["switch"]

    if sb_code_params[1] != "complete_data":
        hv_subnets = sb.ensure_iterability(hv_subnet)
        for elm in pp.pp_elements():
            if "subnet" not in net[elm].columns or not bool(net[elm].shape[0]):
                assert elm in allowed_elms_missing_subnet
            else:  # subnet is in net[elm].columns and there are one or more elements
                subnet_split = net[elm].subnet.str.split("_", expand=True)
                subnet_ok = set()
                subnet_ok |= set(
                    subnet_split.index[subnet_split[0].isin(hv_subnets +
                                                            lv_subnets)])
                if elm in ["bus", "measurement", "switch"]:
                    if 1 in subnet_split.columns:
                        subnet_ok |= set(subnet_split.index[
                            subnet_split[1].isin(hv_subnets)])
                assert len(subnet_ok) == net[elm].shape[0]

    # check profile existing
    assert not sb.profiles_are_missing(net)

    # --- check profiles and loadflow
    check_loadflow = sb_code_params[1] != "complete_data"
    check_loadflow &= sb_code_params[2] != "HVMVLV"
    if check_loadflow:
        try:
            pp.runpp(net)
            converged = net.converged
        except:
            sb_code = sb.get_simbench_code_from_parameters(sb_code_params)
            logger.error("Loadflow not converged with %s" % sb_code)
            converged = False
        assert converged
def pp2csv_data(net1,
                export_pp_std_types=False,
                drop_inactive_elements=True,
                highest_existing_coordinate_number=-1,
                round_qLoad_by_voltLvl=False,
                reserved_aux_node_names=None):
    """ Internal functionality of pp2csv, but without writing the determined dict to csv files.
    For parameter explanations, please have a look at the pp2csv() docstring. """
    # --- initializations
    net = deepcopy(
        net1)  # necessary because in net will be changed in converter function
    csv_data = _init_csv_tables(
        ['elements', 'profiles', 'types', 'res_elements'])
    aux_nodes_are_reserved = reserved_aux_node_names is not None

    # --- net data preparation for converting
    _extend_pandapower_net_columns(net)
    if drop_inactive_elements:
        # attention: trafo3ws are not considered in current version of drop_inactive_elements()
        pp.drop_inactive_elements(net, respect_switches=False)
    check_results = pp.deviation_from_std_type(net)
    if check_results:
        logger.warning(
            "There are deviations from standard types in elements: " +
            str(["%s" % elm for elm in check_results.keys()]) +
            ". Only the standard " + "type values are converted to csv.")
    convert_parallel_branches(net)
    if net.bus.shape[0] and not net.bus_geodata.shape[0] or (
            net.bus_geodata.shape[0] != net.bus.shape[0]):
        logger.info(
            "Since there are no or incomplete bus_geodata, generic geodata are assumed."
        )
        net.bus_geodata.drop(net.bus_geodata.index, inplace=True)
        create_generic_coordinates(net)
    merge_busbar_coordinates(net)
    move_slack_gens_to_ext_grid(net)

    scaling_is_not_1 = []
    for i in pp.pp_elements():
        # prevent elements without name
        net[i] = ensure_full_column_data_existence(net, i, 'name')
        avoid_duplicates_in_column(net, i, 'name')
        # log scaling factor different from 1
        if "scaling" in net[i].columns:
            if not np.allclose(net[i]["scaling"].values, 1):
                scaling_is_not_1 += [i]
    if len(scaling_is_not_1):
        logger.warning("In elements " + str(scaling_is_not_1) +
                       ", the parameter 'scaling' " +
                       "differs from 1, which is not converted.")
    # log min_e_mwh
    if not np.allclose(net.storage["min_e_mwh"].dropna().values, 0.):
        logger.warning(
            "Storage parameter 'min_e_mwh' is not converted but differs from 0."
        )

    # further preparation
    provide_subnet_col(net)
    provide_voltLvl_col(net)
    provide_substation_cols(net)
    convert_node_type(net)
    _add_dspf_calc_type_and_phys_type_columns(net)
    _add_vm_va_setpoints_to_buses(net)
    _prepare_res_bus_table(net)
    reserved_aux_node_names = replace_branch_switches(net,
                                                      reserved_aux_node_names)
    _convert_measurement(net)
    _add_coordID(net, highest_existing_coordinate_number)
    if not net["trafo"]["autoTapSide"].isnull().all():
        net["trafo"]["autoTapSide"] = net["trafo"]["autoTapSide"].str.upper()

    # --- convert net
    _pp_profiles_to_csv(net, csv_data)
    if "loadcases" in net:
        csv_data["StudyCases"] = net["loadcases"]
    else:
        csv_data["StudyCases"] = pd.DataFrame()
    _pp_types_to_csv1(net, export_pp_std_types)
    _multi_parameter_determination(net)
    _convert_elements_and_types(net, csv_data)
    _pp_types_to_csv2(csv_data)

    if round_qLoad_by_voltLvl:
        _round_qLoad_by_voltLvl(csv_data)

    # --- post_conversion_checks
    _check_id_voltLvl_subnet(csv_data)

    if aux_nodes_are_reserved:
        return csv_data, reserved_aux_node_names
    else:
        return csv_data
Exemple #10
0
]
app = tk.Tk()
app.geometry('700x400')

variable = tk.StringVar(app)
variable.set(OptionList[0])

tk.Label(app, text="Select a network")
opt = tk.OptionMenu(app, variable, *OptionList)
opt.config(width=200, font=('Helvetica', 12))
opt.pack(side="top")

variable.trace("w", callback_network)

net_name = "{}".format(variable.get())
OptionList = list(pd.pp_elements(net_name))
variable = tk.StringVar(app)
variable.set(OptionList[0])
tk.Label(app, text="Select an element")
opt = tk.OptionMenu(app, variable, *OptionList)

variable.trace("w", callback)

labelTest = tk.Label(text="", font=('Helvetica', 12), fg='red')
labelTest.pack(side="top")

app.mainloop()


class choose_plot():
    def __init__(self, prompt):
def test_example_simple():
    net = example_simple()

    # --- fix scaling
    net.load["scaling"] = 1.

    # --- add some additional data
    net.bus["subnet"] = ["net%i" % i for i in net.bus.index]
    pp.create_measurement(net, "i", "trafo", np.nan, np.nan, 0, "hv", name="1")
    pp.create_measurement(net, "i", "line", np.nan, np.nan, 1, "to", name="2")
    pp.create_measurement(net, "v", "bus", np.nan, np.nan, 0, name="3")

    net.shunt["max_step"] = np.nan
    stor = pp.create_storage(net, 6, 0.01, 0.1, -0.002, 0.05, 80, name="sda", min_p_mw=-0.01,
                             max_p_mw=0.008, min_q_mvar=-0.01, max_q_mvar=0.005)
    net.storage.loc[stor, "efficiency_percent"] = 90
    net.storage.loc[stor, "self-discharge_percent_per_day"] = 0.3
    pp.create_dcline(net, 4, 6, 0.01, 0.1, 1e-3, 1.0, 1.01, name="df", min_q_from_mvar=-0.01)
    pp.runpp(net)
    to_drop = pp.create_bus(net, 7, "to_drop")

    # --- add names to elements
    for i in pp.pp_elements():
        net[i] = ensure_full_column_data_existence(net, i, 'name')
        avoid_duplicates_in_column(net, i, 'name')

    # --- create geodata
    net.bus_geodata["x"] = [0, 1, 2, 3, 4, 5, 5, 3.63]
    net.bus_geodata["y"] = [0]*5+[-5, 5, 2.33]
    merge_busbar_coordinates(net)

    # --- convert
    csv_data = pp2csv_data(net, export_pp_std_types=True, drop_inactive_elements=True)
    net_from_csv_data = csv_data2pp(csv_data)

    # --- adjust net appearance
    pp.drop_buses(net, [to_drop])
    del net["OPF_converged"]
    net.load["type"] = np.nan
    del net_from_csv_data["substation"]
    del net_from_csv_data["profiles"]
    for key in net.keys():
        if isinstance(net[key], pd.DataFrame):
            # drop unequal columns
            dummy_columns = net[key].columns
            extra_columns = net_from_csv_data[key].columns.difference(dummy_columns)
            net_from_csv_data[key].drop(columns=extra_columns, inplace=True)
            # drop result table rows
            if "res_" in key:
                if not key == "res_bus":
                    net[key].drop(net[key].index, inplace=True)
                else:
                    net[key].loc[:, ["p_mw", "q_mvar"]] = np.nan
            # adjust dtypes
            if net[key].shape[0]:
                try:
                    net_from_csv_data[key] = net_from_csv_data[key].astype(dtype=dict(net[
                        key].dtypes))
                except:
                    logger.error("dtype adjustment of %s failed." % key)

    eq = pp.nets_equal(net, net_from_csv_data, tol=1e-7)
    assert eq
Exemple #12
0
def sorted_from_json(path):
    net = pp.from_json(path)
    for elm in pp.pp_elements():
        net[elm].sort_index(inplace=True)
    return net
 def check_elm_number(net1, net2, excerpt_elms=None):
     excerpt_elms = set() if excerpt_elms is None else set(excerpt_elms)
     for elm in set(pp.pp_elements()) - excerpt_elms:
         assert net1[elm].shape[0] == net2[elm].shape[0]