def test_ow_index():
    net = simple_four_bus_system()
    steps = [3, 5, 7]
    p_data = pd.DataFrame(index=steps,
                          columns=["0", "1"],
                          data=[
                              [0.01, 0.02],
                              [0.03, 0.04],
                              [0.05, 0.06],
                          ])
    v_data = pd.DataFrame(index=steps, columns=["0"], data=[1.01, 1.03, 1.02])

    ds_p = DFData(p_data)
    ds_v = DFData(v_data)

    ct.ConstControl(net,
                    element='load',
                    variable='p_mw',
                    element_index=net.load.index.tolist(),
                    data_source=ds_p,
                    profile_name=p_data.columns)
    ct.ConstControl(net,
                    element='ext_grid',
                    variable='vm_pu',
                    element_index=0,
                    data_source=ds_v,
                    profile_name='0')

    ow = OutputWriter(net)
    ow.log_variable('res_bus', 'vm_pu')
    ow.log_variable('res_line', 'loading_percent')

    run_timeseries(net, time_steps=p_data.index, verbose=False)

    assert np.all(ow.output["res_line.loading_percent"].index == p_data.index)
Example #2
0
def _data_source():
    """

    :return:
    :rtype:
    """
    profiles_sink = pd.read_csv(os.path.join(pp_dir, 'test', 'pipeflow_internals', 'data',
                                             'test_time_series_sink_profiles.csv'), index_col=0)
    profiles_source = pd.read_csv(os.path.join(pp_dir, 'test', 'pipeflow_internals', 'data',
                                               'test_time_series_source_profiles.csv'), index_col=0)
    ds_sink = DFData(profiles_sink)
    ds_source = DFData(profiles_source)
    return ds_sink, ds_source
Example #3
0
def test_new_pp_object_io():
    net = networks.mv_oberrhein()
    ds = DFData(pd.DataFrame(data=np.array([[0, 1, 2], [7, 8, 9]])))
    control.ConstControl(net,
                         'sgen',
                         'p_mw',
                         42,
                         profile_name=0,
                         data_source=ds)
    control.ContinuousTapControl(net, 142, 1)

    obj = net.controller.object.at[0]
    obj.run = pp.runpp

    s = json.dumps(net, cls=PPJSONEncoder)

    net1 = json.loads(s, cls=PPJSONDecoder)

    obj1 = net1.controller.object.at[0]
    obj2 = net1.controller.object.at[1]

    assert isinstance(obj1, control.ConstControl)
    assert isinstance(obj2, control.ContinuousTapControl)
    assert obj1.run is pp.runpp
    assert isinstance(obj1.data_source, DFData)
    assert isinstance(obj1.data_source.df, pd.DataFrame)
Example #4
0
    def create_data_source(self, mode):
        # The function to create data frame for the loads
        profiles = pd.DataFrame()

        # For high load, 5-10% more than the initial value
        if mode == 'high':
            for i in range(len(self.net.load)):
                profiles['load_{}_p'.format(
                    str(i))] = (1.05 +
                                (0.05 * np.random.random(self.n_time_steps))
                                ) * self.net.load.p_mw[i]
                profiles['load_{}_q'.format(
                    str(i))] = (1.05 +
                                (0.05 * np.random.random(self.n_time_steps))
                                ) * self.net.load.q_mvar[i]

        # For low load, 5-10% less than the initial value
        elif mode == 'low':
            for i in range(len(self.net.load)):
                profiles['load_{}_p'.format(
                    str(i))] = (0.90 +
                                (0.05 * np.random.random(self.n_time_steps))
                                ) * self.net.load.p_mw[i]
                profiles['load_{}_q'.format(
                    str(i))] = (0.90 +
                                (0.05 * np.random.random(self.n_time_steps))
                                ) * self.net.load.q_mvar[i]

        ds = DFData(profiles)
        return profiles, ds
def create_data_source(n_timesteps=24):
    profiles = pd.DataFrame()
    profiles['load1_p'] = np.random.random(n_timesteps) * 10.
    profiles['sgen1_p'] = np.random.random(n_timesteps) * 20.

    ds = DFData(profiles)

    return profiles, ds
Example #6
0
def create_data_source(n_timesteps=24):
    profiles = pd.DataFrame()
    profiles['load1_p'] = julyLoadProfile
    profiles['sgen1_p'] = july_PV_Production

    ds = DFData(profiles)

    return profiles, ds
Example #7
0
def _data_source():
    """
    Read out existing time series (csv files) for sinks and sources.

    :return: Time series values from csv files for sink and source
    :rtype: DataFrame
    """
    profiles_sink = pd.read_csv(os.path.join(
        pp_dir, 'test', 'pipeflow_internals', 'data',
        'test_time_series_sink_profiles.csv'),
                                index_col=0)
    profiles_source = pd.read_csv(os.path.join(
        pp_dir, 'test', 'pipeflow_internals', 'data',
        'test_time_series_source_profiles.csv'),
                                  index_col=0)
    ds_sink = DFData(profiles_sink)
    ds_source = DFData(profiles_source)
    return ds_sink, ds_source
Example #8
0
def apply_const_controllers(net, absolute_profiles_values):
    """
    Applys ConstControl instances to the net. As a result, one can easily run timeseries with given
    power values of e.g. loads, sgens, storages or gens.

    INPUT:
        **net** - pandapower net

        **absolute_profiles_values** - dict of Dataframes with absolute values for the profiles,
        keys should be tuples of length 2 (element and parameter), DataFrame size is
        timesteps x number of elements

    """
    n_time_steps = dict()
    for (elm, param), values in absolute_profiles_values.items():
        if values.shape[1]:

            # check DataFrame shape[0] == time_steps
            if elm in n_time_steps.keys():
                if n_time_steps[elm] != values.shape[0]:
                    logger.warning(
                        "There are two profiles for %ss which have different "
                        % elm + "amount of time steps.")
            else:
                n_time_steps[elm] = values.shape[0]

            # check DataFrame shape[1] == net[elm].index
            unknown_idx = values.columns.difference(net[elm].index)
            if len(unknown_idx):
                logger.warning(
                    "In absolute_profiles_values[%s][%s], " % (elm, param) +
                    "there are indices additional & unknown to net[%s].index" %
                    elm + str(["%i" % i for i in unknown_idx]))
            missing_idx = net[elm].index.difference(values.columns)
            if len(missing_idx):
                logger.warning(
                    "In absolute_profiles_values[%s][%s], " % (elm, param) +
                    "these indices are missing compared to net[%s].index" %
                    elm + str(["%i" % i for i in missing_idx]))

            # apply const controllers
            idx = list(net[elm].index.intersection(values.columns))
            ConstControl(net,
                         element=elm,
                         variable=param,
                         element_index=idx,
                         profile_name=idx,
                         data_source=DFData(
                             absolute_profiles_values[(elm, param)][idx]))

    # compare all DataFrame shape[0] == time_steps
    if len(set(n_time_steps.values())) > 1:
        logger.warning("The profiles have different amount of time steps:")
        logger.warning(n_time_steps)
Example #9
0
def load_input(data):
    profiles = pd.DataFrame()
    profiles['load1_p'] = data.loc[:]['Node_1'] / 1000000
    profiles['load2_p'] = data.loc[:]['Node_2'] / 1000000
    profiles['load3_p'] = data.loc[:]['Node_3'] / 1000000
    profiles['load4_p'] = data.loc[:]['Node_4'] / 1000000
    profiles['load5_p'] = data.loc[:]['Node_5'] / 1000000
    # profiles['load1_p'] = data.loc[:]['HH_1'] * 10 / 1000000
    # profiles['load2_p'] = data.loc[:]['HH_2'] * 10 / 1000000
    # profiles['load3_p'] = data.loc[:]['HH_3'] * 10 / 1000000
    # profiles['load4_p'] = data.loc[:]['HH_4'] * 10 / 1000000
    # profiles['load5_p'] = data.loc[:]['HH_5'] * 10 / 1000000

    ds = DFData(profiles)

    return profiles, ds
Example #10
0
def test_output_writer_log(simple_test_net):
    net = simple_test_net

    # timeseries data
    df = pd.DataFrame([[15, 30, 2], [12, 27, 1.5], [7, 29, 2.1]])
    ds = DFData(df)

    # Create gen controller with datasource
    ct.ConstControl(net,
                    element="load",
                    variable="p_mw",
                    element_index=[0, 2],
                    data_source=ds,
                    profile_name=[0, 2])

    # Create, add output and set outputwriter
    ow = OutputWriter(net, output_path=tempfile.gettempdir())
    ow.remove_log_variable("res_bus")
    orig_index = [0, 1]
    ow.log_variable("res_bus", "vm_pu", orig_index)
    ow.log_variable("res_sgen", "p_mw")
    ow.log_variable("res_sgen", "q_mvar")

    # Run timeseries
    run_timeseries(net, time_steps=range(2), verbose=False)

    # --- double logged variables handling
    ow2 = copy.deepcopy(ow)
    new_idx = 2
    ow2.log_variable("res_bus", "vm_pu", new_idx, eval_name="test")
    run_timeseries(net, time_steps=range(2), output_writer=ow2, verbose=False)
    assert all(ow2.output["res_bus.vm_pu"].columns == orig_index + [new_idx])

    # Todo: This test makes no sense if res_bus is logged by default
    # ow3 = copy.deepcopy(ow)
    # new_idx = [2, 3]
    # ow3.log_variable("res_bus", "vm_pu", new_idx)
    # run_timeseries(net, time_steps=range(2), output_writer=ow3)
    # assert all(ow3.output["res_bus.vm_pu"].columns == orig_index + new_idx)

    ow4 = copy.deepcopy(ow)
    new_idx = [2, 4]
    ow4.log_variable("res_bus", "vm_pu", new_idx, eval_name=["test1", "test2"])
    run_timeseries(net, time_steps=range(2), output_writer=ow4, verbose=False)
    assert all(ow4.output["res_bus.vm_pu"].columns == orig_index + new_idx)
Example #11
0
def create_data_source(net, mode='', n_timesteps=30):
    profiles = pd.DataFrame()
    if mode == 'High Load':
        for i in range(len(net.load)):
            profiles['load{}_P'.format(str(i))] = 1.05 * net.load.p_mw[i] + (
                0.05 * np.random.random(n_timesteps) * net.load.p_mw[i])
            profiles['load{}_Q'.format(str(i))] = 1.05 * net.load.q_mvar[i] + (
                0.05 * np.random.random(n_timesteps) * net.load.q_mvar[i])
    elif mode == 'Low Load':
        for i in range(len(net.load)):
            profiles['load{}_P'.format(str(i))] = 0.90 * net.load.p_mw[i] + (
                0.05 * np.random.random(n_timesteps) * net.load.p_mw[i])
            profiles['load{}_Q'.format(str(i))] = 0.90 * net.load.q_mvar[i] + (
                0.05 * np.random.random(n_timesteps) * net.load.q_mvar[i])

    ds = DFData(profiles)

    return profiles, ds
Example #12
0
def create_data_source(n_timesteps=10):
    profiles = pd.DataFrame()
    profiles['load1'] = np.random.random(n_timesteps) * 2e1
    profiles['load2_mv_p'] = np.random.random(n_timesteps) * 4e1
    profiles['load2_mv_q'] = np.random.random(n_timesteps) * 1e1

    profiles['load3_hv_p'] = profiles.load2_mv_p + abs(np.random.random())
    profiles['load3_hv_q'] = profiles.load2_mv_q + abs(np.random.random())

    profiles['slack_v'] = np.clip(
        np.random.random(n_timesteps) + 0.5, 0.8, 1.2)
    profiles['trafo_v'] = np.clip(
        np.random.random(n_timesteps) + 0.5, 0.9, 1.1)

    profiles["trafo_tap"] = np.random.randint(-3, 3, n_timesteps)

    ds = DFData(profiles)

    return profiles, ds
Example #13
0
def create_data_source(n_timesteps=60, state='NS'):
    profiles = pd.DataFrame()
    if state == 'HL':
        profiles['load1_p'] = np.random.random(n_timesteps) * 90 * 1.1
        profiles['load2_p'] = np.random.random(n_timesteps) * 100 * 1.1
        profiles['load3_p'] = np.random.random(n_timesteps) * 125 * 1.1
    if state == 'LL':
        profiles['load1_p'] = np.random.random(n_timesteps) * 90 * 0.9
        profiles['load2_p'] = np.random.random(n_timesteps) * 100 * 0.9
        profiles['load3_p'] = np.random.random(n_timesteps) * 125 * 0.9
    if state == 'NS':
        profiles['load1_p'] = np.random.random(n_timesteps) * 90
        profiles['load2_p'] = np.random.random(n_timesteps) * 100
        profiles['load3_p'] = np.random.random(n_timesteps) * 125

        profiles['sgen1_p'] = np.random.random(n_timesteps) * 163
    if state in ['NS', 'HL', 'LL', 'LD']:
        profiles['sgen2_p'] = np.random.random(n_timesteps) * 85

    ds = DFData(profiles)
    return profiles, ds
Example #14
0
def release_control_test_network():
    # empty net
    net = pp.create_empty_network("net", add_stdtypes=False)

    # fluid
    pp.create_fluid_from_lib(net, "water", overwrite=True)

    # junctions
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 0", index=None, in_service=True,
                       type="junction", geodata=None)
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 1")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 2")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 3")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 4")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 5")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 6")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 7")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 8")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 9")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 10")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 11")
    pp.create_junction(net, pn_bar=3, tfluid_k=293, height_m=0, name="Junction 12")

    # pipes
    pp.create_pipe_from_parameters(net, from_junction=0, to_junction=8, length_km=3, diameter_m=0.01, k_mm=1,
                                   loss_coefficient=0, sections=10, alpha_w_per_m2k=10, text_k=293,
                                   qext_w=0., name="Pipe 0", index=None, geodata=None, in_service=True, type="pipe")
    pp.create_pipe_from_parameters(net, 9, 2, length_km=6, diameter_m=0.075, k_mm=.1, sections=10,
                                   alpha_w_per_m2k=3,
                                   name="Pipe 1")
    pp.create_pipe_from_parameters(net, 2, 12, length_km=5, diameter_m=0.06, k_mm=.1, sections=10,
                                   alpha_w_per_m2k=20,
                                   name="Pipe 2")
    pp.create_pipe_from_parameters(net, 4, 12, length_km=0.1, diameter_m=0.07, k_mm=.1, sections=10,
                                   alpha_w_per_m2k=2,
                                   name="Pipe 3")
    pp.create_pipe_from_parameters(net, 5, 3, length_km=1, diameter_m=0.09, k_mm=.1, sections=10, alpha_w_per_m2k=3,
                                   name="Pipe 4")
    pp.create_pipe_from_parameters(net, 4, 11, length_km=2.5, diameter_m=0.08, k_mm=.1, sections=10,
                                   alpha_w_per_m2k=15,
                                   name="Pipe 5")
    pp.create_pipe_from_parameters(net, 7, 6, length_km=4.5, diameter_m=0.085, k_mm=.1, sections=10,
                                   alpha_w_per_m2k=2.5, name="Pipe 6")
    pp.create_pipe_from_parameters(net, 1, 7, length_km=4, diameter_m=0.03, k_mm=.1, sections=10, alpha_w_per_m2k=1,
                                   name="Pipe 7")

    # external grids
    pp.create_ext_grid(net, junction=0, p_bar=3, t_k=300, name="External Grid 0", in_service=True, index=None,
                       type="pt")
    pp.create_ext_grid(net, 1, p_bar=5, t_k=350, name="External Grid 1", type="pt")

    # sinks
    pp.create_sink(net, junction=2, mdot_kg_per_s=0.2, scaling=1., name="Sink 0", index=None, in_service=True,
                   type="sink")
    pp.create_sink(net, 3, mdot_kg_per_s=0.1, name="Sink 1")
    pp.create_sink(net, 4, mdot_kg_per_s=0.5, name="Sink 2")
    pp.create_sink(net, 5, mdot_kg_per_s=0.07, name="Sink 3")
    pp.create_sink(net, 6, mdot_kg_per_s=0.09, name="Sink 4")
    pp.create_sink(net, 7, mdot_kg_per_s=0.1, name="Sink 5")

    # sources
    pp.create_source(net, junction=8, mdot_kg_per_s=0.1, scaling=1., name="Source 0", index=None, in_service=True,
                     type="source")
    pp.create_source(net, junction=9, mdot_kg_per_s=0.03, name="Source 1")
    pp.create_source(net, junction=10, mdot_kg_per_s=0.04, name="Source 2")
    pp.create_source(net, junction=11, mdot_kg_per_s=0.09, name="Source 3")

    # valves
    pp.create_valve(net, from_junction=8, to_junction=9, diameter_m=0.1, opened=True, loss_coefficient=0,
                    name="Valve 0", index=None, type="valve")
    pp.create_valve(net, 9, 4, diameter_m=0.05, opened=True, name="Valve 1")

    # pump
    pp.create_pump_from_parameters(net, from_junction=8, to_junction=3, new_std_type_name="Pump",
                                   pressure_list=[6.1, 5.8, 4],
                                   flowrate_list=[0, 19, 83], reg_polynomial_degree=2,
                                   poly_coefficents=None, name=None, index=None, in_service=True,
                                   type="pump")

    # circulation pump mass
    pp.create_circ_pump_const_mass_flow(net, from_junction=3, to_junction=4, p_bar=6, mdot_kg_per_s=1,
                                        t_k=290, name="Circ. Pump Mass", index=None, in_service=True,
                                        type="pt")

    # circulation pump pressure
    pp.create_circ_pump_const_pressure(net, from_junction=11, to_junction=5, p_bar=5, plift_bar=2,
                                       t_k=290, name="Circ. Pump Pressure", index=None, in_service=True, type="pt")

    # heat exchanger
    pp.create_heat_exchanger(net, from_junction=10, to_junction=6, diameter_m=0.08, qext_w=50, loss_coefficient=0,
                             name="Heat Exchanger 0", index=None, in_service=True, type="heat_exchanger")
    pp.create_heat_exchanger(net, from_junction=4, to_junction=10, diameter_m=0.08, qext_w=28000,
                             loss_coefficient=0,
                             name="Heat Exchanger 1", index=None, in_service=True, type="heat_exchanger")
    # time series
    profiles_sink = pd.read_csv(os.path.join(pp_dir, 'test', 'api', 'release_cycle',
                                             'release_control_test_sink_profiles.csv'), index_col=0)
    profiles_source = pd.read_csv(os.path.join(pp_dir, 'test', 'api', 'release_cycle',
                                               'release_control_test_source_profiles.csv'), index_col=0)
    ds_sink = DFData(profiles_sink)
    ds_source = DFData(profiles_source)

    const_sink = control.ConstControl(net, element='sink', variable='mdot_kg_per_s',
                                      element_index=net.sink.index.values, data_source=ds_sink,
                                      profile_name=net.sink.index.values.astype(str))
    const_source = control.ConstControl(net, element='source', variable='mdot_kg_per_s',
                                        element_index=net.source.index.values,
                                        data_source=ds_source,
                                        profile_name=net.source.index.values.astype(str))

    const_sink.initial_run = False
    const_source.initial_run = False

    pp.pipeflow(net)

    pp.to_json(net, os.path.join(path, 'example_%s.json' % pp.__version__))

    return net