Exemplo n.º 1
0
def test_scenario_storage():
    """Test the behaviour of Storage nodes with multiple scenarios

    The model defined has two inflow scenarios: 5 and 10. It is expected that
    the volume in the storage node should increase at different rates in the
    two scenarios.
    """
    model = Model()

    i = Input(model, 'input', max_flow=999)
    s = Storage(model, 'storage', num_inputs=1, num_outputs=1, max_volume=1000, initial_volume=500)
    o = Output(model, 'output', max_flow=999)

    scenario_input = Scenario(model, 'Inflow', size=2)
    i.min_flow = ConstantScenarioParameter(model, scenario_input, [5.0, 10.0])

    i.connect(s)
    s.connect(o)

    s_rec = NumpyArrayStorageRecorder(model, s)

    model.run()

    assert_allclose(i.flow, [5, 10])
    assert_allclose(s_rec.data[0], [505, 510])
    assert_allclose(s_rec.data[1], [510, 520])
Exemplo n.º 2
0
def test_numpy_index_parameter_recorder(simple_storage_model):
    """
    Test the NumpyArrayIndexParameterRecorder

    Note the parameter is recorded at the start of the timestep, while the
    storage is recorded at the end of the timestep.
    """
    from pywr.parameters.control_curves import ControlCurveIndexParameter

    model = simple_storage_model

    res = model.nodes['Storage']

    p = ControlCurveIndexParameter(model, res, [5.0/20.0, 2.5/20.0])

    res_rec = NumpyArrayStorageRecorder(model, res)
    lvl_rec = NumpyArrayIndexParameterRecorder(model, p)

    model.run()

    assert(res_rec.data.shape == (5, 1))
    assert_allclose(res_rec.data, np.array([[7, 4, 1, 0, 0]]).T, atol=1e-7)
    assert (lvl_rec.data.shape == (5, 1))
    assert_allclose(lvl_rec.data, np.array([[0, 0, 1, 2, 2]]).T, atol=1e-7)


    df = lvl_rec.to_dataframe()
    assert df.shape == (5, 1)
    assert_allclose(df.values, np.array([[0, 0, 1, 2, 2]]).T, atol=1e-7)
Exemplo n.º 3
0
def test_demand_saving_with_indexed_array_from_hdf():
    """Test demand saving based on a predefined demand saving level in a HDF file."""
    model = load_model("demand_saving_hdf.json")

    model.timestepper.end = pd.Timestamp("2016-01-31")

    rec_demand = NumpyArrayNodeRecorder(model, model.nodes["Demand"])
    rec_storage = NumpyArrayStorageRecorder(model, model.nodes["Reservoir"])

    model.check()
    model.run()

    max_volume = model.nodes["Reservoir"].max_volume

    # model starts with no demand saving
    demand_baseline = 50.0
    demand_saving = 1.0
    assert_allclose(rec_demand.data[0, 0], demand_baseline * demand_saving)

    # first control curve breached
    demand_saving = 0.8
    assert_allclose(rec_demand.data[11, 0], demand_baseline * demand_saving)

    # second control curve breached
    demand_saving = 0.5
    assert_allclose(rec_demand.data[12, 0], demand_baseline * demand_saving)

    # second control curve breached
    demand_saving = 0.25
    assert_allclose(rec_demand.data[13, 0], demand_baseline * demand_saving)
Exemplo n.º 4
0
def test_numpy_storage_recorder(simple_storage_model):
    """
    Test the NumpyArrayStorageRecorder
    """
    model = simple_storage_model

    res = model.nodes['Storage']

    rec = NumpyArrayStorageRecorder(model, res)

    model.run()

    assert(rec.data.shape == (5, 1))
    assert_allclose(rec.data, np.array([[7, 4, 1, 0, 0]]).T, atol=1e-7)


    df = rec.to_dataframe()
    assert df.shape == (5, 1)
    assert_allclose(df.values, np.array([[7, 4, 1, 0, 0]]).T, atol=1e-7)
Exemplo n.º 5
0
def test_simple_battery():

    m = Model.load(os.path.join(TEST_FOLDER, 'models', 'simple-battery.json'),
                   solver='glpk-dcopf')

    gen1 = NumpyArrayNodeRecorder(m, m.nodes['gen1'])
    pv2 = NumpyArrayNodeRecorder(m, m.nodes['pv2'])
    battery1 = NumpyArrayStorageRecorder(m, m.nodes['battery1'])

    m.setup()
    m.run()

    df = pandas.concat(
        {
            'gen1': gen1.to_dataframe(),
            'pv2': pv2.to_dataframe(),
            'battery1': battery1.to_dataframe()
        },
        axis=1)

    assert df.shape[0] == 745
Exemplo n.º 6
0
    def test_event_capture_with_storage(self, cyclical_storage_model, recorder_agg_func):
        """ Test Storage events using a StorageThresholdRecorder """
        m = cyclical_storage_model

        strg = m.nodes['Storage']
        arry = NumpyArrayStorageRecorder(m, strg)

        # Create the trigger using a threhsold parameter
        trigger = StorageThresholdRecorder(m, strg, 4.0, predicate='<=')
        evt_rec = EventRecorder(m, trigger)
        evt_dur = EventDurationRecorder(m, evt_rec, recorder_agg_func=recorder_agg_func, agg_func='max')

        m.run()

        # Ensure there is at least one event
        assert evt_rec.events

        # Build a timeseries of when the events say an event is active
        triggered = np.zeros_like(arry.data, dtype=np.int)
        for evt in evt_rec.events:
            triggered[evt.start.index:evt.end.index, evt.scenario_index.global_id] = 1

            # Check the duration
            td = evt.end.datetime - evt.start.datetime
            assert evt.duration == td.days

        # Test that the volumes in the Storage node during the event periods match
        assert_equal(triggered, arry.data <= 4)

        df = evt_rec.to_dataframe()

        assert len(df) == len(evt_rec.events)

        func = TestEventRecorder.funcs[recorder_agg_func]

        # Now check the EventDurationRecorder does the aggregation correctly
        expected_durations = []
        for si in m.scenarios.combinations:
            event_durations = []
            for evt in evt_rec.events:
                if evt.scenario_index.global_id == si.global_id:
                    event_durations.append(evt.duration)

            # If there are no events then the metric is zero
            if len(event_durations) > 0:
                expected_durations.append(func(event_durations))
            else:
                expected_durations.append(0.0)

        assert_allclose(evt_dur.values(), expected_durations)
        assert_allclose(evt_dur.aggregated_value(), np.max(expected_durations))
Exemplo n.º 7
0
def add_node_array_recorders(model):
    """ Helper function to add NumpyArrayXXX recorders to a Pywr model. """

    # Add node recorders
    for node in model.nodes:
        if isinstance(node, Node):
            name = '__{}__:{}'.format(node.name, 'simulated_flow')
            NumpyArrayNodeRecorder(model, node, name=name)
        elif isinstance(node, Storage):
            name = '__{}__:{}'.format(node.name, 'simulated_volume')
            NumpyArrayStorageRecorder(model, node, name=name)
        else:
            import warnings
            warnings.warn(
                'Unrecognised node subclass "{}" with name "{}". Skipping '
                'recording this node.'.format(node.__class__.__name__,
                                              node.name), RuntimeWarning)
Exemplo n.º 8
0
    def _add_node_flagged_recorders(self, model):

        for node in model.nodes:
            try:
                flags = self._node_recorder_flags[node.name]
            except KeyError:
                flags = {
                    'timeseries': True
                }  # Default to recording timeseries if not defined.

            for flag, to_record in flags.items():
                if not to_record:
                    continue

                if flag == 'timeseries':
                    #if isinstance(node, (Node, Generator, Load, Line)):
                    if isinstance(node, (Node)):
                        name = '__{}__:{}'.format(node.name, 'simulated_flow')
                        NumpyArrayNodeRecorder(model, node, name=name)
                    elif isinstance(node, (Storage)):
                        name = '__{}__:{}'.format(node.name,
                                                  'simulated_volume')
                        NumpyArrayStorageRecorder(model, node, name=name)
                    else:
                        import warnings
                        warnings.warn(
                            'Unrecognised node subclass "{}" with name "{}" for timeseries recording. Skipping '
                            'recording this node.'.format(
                                node.__class__.__name__, node.name),
                            RuntimeWarning)

                elif flag == 'deficit':
                    if isinstance(node, Node):
                        deficit_parameter = DeficitParameter(model, node)
                        name = '__{}__:{}'.format(node.name,
                                                  'simulated_deficit')
                        NumpyArrayParameterRecorder(model,
                                                    deficit_parameter,
                                                    name=name)
                    else:
                        import warnings
                        warnings.warn(
                            'Unrecognised node subclass "{}" with name "{}" for deficit recording. Skipping '
                            'recording this node.'.format(
                                node.__class__.__name__, node.name),
                            RuntimeWarning)
Exemplo n.º 9
0
    def test_no_event_capture_with_storage(self, cyclical_storage_model, recorder_agg_func):
        """ Test Storage events using a StorageThresholdRecorder """
        m = cyclical_storage_model

        strg = m.nodes['Storage']
        arry = NumpyArrayStorageRecorder(m, strg)

        # Create the trigger using a threhsold parameter
        trigger = StorageThresholdRecorder(m, strg, -1.0, predicate='<')
        evt_rec = EventRecorder(m, trigger)
        evt_dur = EventDurationRecorder(m, evt_rec, recorder_agg_func=recorder_agg_func, agg_func='max')

        m.run()

        # Ensure there are no events in this test
        assert len(evt_rec.events) == 0
        df = evt_rec.to_dataframe()
        assert len(df) == 0

        assert_allclose(evt_dur.values(), np.zeros(len(m.scenarios.combinations)))
        assert_allclose(evt_dur.aggregated_value(), 0)
Exemplo n.º 10
0
def test_demand_saving_with_indexed_array():
    """Test demand saving based on reservoir control curves

    This is a relatively complex test to pass due to the large number of
    dependencies of the parameters actually being tested. The test is an
    example of how demand savings can be applied in times of drought based
    on the state of a reservoir.
    """

    model = load_model("demand_saving2.json")

    model.timestepper.end = pd.Timestamp("2016-01-31")

    rec_demand = NumpyArrayNodeRecorder(model, model.nodes["Demand"])
    rec_storage = NumpyArrayStorageRecorder(model, model.nodes["Reservoir"])

    model.check()
    model.run()

    max_volume = model.nodes["Reservoir"].max_volume

    # model starts with no demand saving
    demand_baseline = 50.0
    demand_factor = 0.9  # jan-apr
    demand_saving = 1.0
    assert_allclose(rec_demand.data[0, 0],
                    demand_baseline * demand_factor * demand_saving)

    # first control curve breached
    demand_saving = 0.95
    assert (rec_storage.data[4, 0] < (0.8 * max_volume))
    assert_allclose(rec_demand.data[5, 0],
                    demand_baseline * demand_factor * demand_saving)

    # second control curve breached
    demand_saving = 0.5
    assert (rec_storage.data[11, 0] < (0.5 * max_volume))
    assert_allclose(rec_demand.data[12, 0],
                    demand_baseline * demand_factor * demand_saving)
Exemplo n.º 11
0
def test_concatenated_dataframes(simple_storage_model):
    """
    Test that Model.to_dataframe returns something sensible.

    """
    model = simple_storage_model

    scA = Scenario(model, 'A', size=2)
    scB = Scenario(model, 'B', size=3)

    res = model.nodes['Storage']
    rec1 = NumpyArrayStorageRecorder(model, res)
    otpt = model.nodes['Output']
    rec2 = NumpyArrayNodeRecorder(model, otpt)
    # The following can't return a DataFrame; is included to check
    # it doesn't cause any issues
    rec3 = TotalDeficitNodeRecorder(model, otpt)

    model.run()

    df = model.to_dataframe()
    assert df.shape == (5, 2*2*3)
    assert df.columns.names == ['Recorder', 'A', 'B']
Exemplo n.º 12
0
def test_keating_aquifer(solver):
    model = Model(
        solver=solver,
        start=pandas.to_datetime('2016-01-01'),
        end=pandas.to_datetime('2016-01-01'),
    )

    aqfer = KeatingAquifer(
        model,
        'keating',
        num_streams,
        num_additional_inputs,
        stream_flow_levels,
        transmissivity,
        coefficient,
        levels,
        area=area,
        storativity=storativity,
    )

    catchment = Input(model, 'catchment', max_flow=0)
    stream = Output(model, 'stream', max_flow=np.inf, cost=0)
    abstraction = Output(model, 'abstraction', max_flow=15, cost=-999)

    catchment.connect(aqfer)
    aqfer.connect(stream, from_slot=0)
    aqfer.connect(abstraction, from_slot=1)

    rec_level = NumpyArrayLevelRecorder(model, aqfer)
    rec_volume = NumpyArrayStorageRecorder(model, aqfer)
    rec_stream = NumpyArrayNodeRecorder(model, stream)
    rec_abstraction = NumpyArrayNodeRecorder(model, abstraction)

    model.check()

    assert(len(aqfer.inputs) == (num_streams + num_additional_inputs))

    for initial_level in (50, 100, 110, 150):
        # set the inital aquifer level and therefor the initial volume
        aqfer.initial_level = initial_level
        initial_volume = aqfer.initial_volume
        assert(initial_volume == (area * storativity[0] * initial_level * 0.001))
        # run the model (for one timestep only)
        model.run()
        # manually calculate keating streamflow and check model flows are OK
        Qp = 2 * transmissivity[0] * max(initial_level - stream_flow_levels[0][0], 0) * coefficient
        Qe = 2 * transmissivity[1] * max(initial_level - stream_flow_levels[0][1], 0) * coefficient
        delta_storage = initial_volume - rec_volume.data[0, 0]
        abs_flow = rec_abstraction.data[0, 0]
        stream_flow = rec_stream.data[0, 0]
        assert(delta_storage == (stream_flow + abs_flow))
        assert(stream_flow == (Qp+Qe))

    A_VERY_LARGE_NUMBER = 9999999999999
    model.timestepper.end = pandas.to_datetime('2016-01-02')

    # fill the aquifer completely
    # there is no spill for the storage so it should find no feasible solution
    with pytest.raises(RuntimeError):
        catchment.max_flow = A_VERY_LARGE_NUMBER
        catchment.min_flow = A_VERY_LARGE_NUMBER
        model.run()

    # drain the aquifer completely
    catchment.min_flow = 0
    catchment.max_flow = 0
    abstraction.max_flow = A_VERY_LARGE_NUMBER
    model.run()
    assert(rec_volume.data[1, 0] == 0)
    abs_flow = rec_abstraction.data[1, 0]
    stream_flow = rec_stream.data[1, 0]
    assert(stream_flow == 0)
    assert(abs_flow == 0)