Exemple #1
0
def test_two_scenarios(simple_linear_model, ):
    """Basic test of Scenario functionality"""
    model = simple_linear_model  # Convenience renaming

    scenario_input = Scenario(model, 'Inflow', size=2)
    model.nodes["Input"].max_flow = ConstantScenarioParameter(
        scenario_input, [5.0, 10.0])

    scenario_outflow = Scenario(model, 'Outflow', size=2)
    model.nodes["Output"].max_flow = ConstantScenarioParameter(
        scenario_outflow, [3.0, 8.0])
    model.nodes["Output"].cost = -2.0

    # add numpy recorders to input and output nodes
    NumpyArrayNodeRecorder(model, model.nodes["Input"], "input")
    NumpyArrayNodeRecorder(model, model.nodes["Output"], "output")

    expected_node_results = {
        "Input": [3.0, 5.0, 3.0, 8.0],
        "Link": [3.0, 5.0, 3.0, 8.0],
        "Output": [3.0, 5.0, 3.0, 8.0],
    }

    assert_model(model, expected_node_results)

    model.run()

    # combine recorder outputs to a single dataframe
    df = model.to_dataframe()
    assert (df.shape == (365, 2 * 2 * 2))
    assert_allclose(df["input", 0, 0].iloc[0], 3.0)
    assert_allclose(df["input", 0, 1].iloc[0], 5.0)
    assert_allclose(df["input", 1, 0].iloc[0], 3.0)
    assert_allclose(df["input", 1, 1].iloc[0], 8.0)
Exemple #2
0
def test_numpy_recorder(simple_linear_model):
    """
    Test the NumpyArrayNodeRecorder
    """
    model = simple_linear_model
    otpt = model.nodes['Output']

    model.nodes['Input'].max_flow = 10.0
    otpt.cost = -2.0
    rec = NumpyArrayNodeRecorder(model, otpt)

    # test retrieval of recorder
    assert model.recorders['numpyarraynoderecorder.Output'] == rec
    # test changing name of recorder
    rec.name = 'timeseries.Output'
    assert model.recorders['timeseries.Output'] == rec
    with pytest.raises(KeyError):
        model.recorders['numpyarraynoderecorder.Output']

    model.run()

    assert rec.data.shape == (365, 1)
    assert np.all((rec.data - 10.0) < 1e-12)

    df = rec.to_dataframe()
    assert df.shape == (365, 1)
    assert np.all((df.values - 10.0) < 1e-12)
Exemple #3
0
    def _make_weather_nodes(self, model, weather, cost):

        if not isinstance(self.area, Parameter):
            raise ValueError(
                'Weather nodes can only be created if an area Parameter is given.'
            )

        rainfall = weather['rainfall'].astype(np.float64)
        rainfall_param = MonthlyProfileParameter(model, rainfall)

        evaporation = weather['evaporation'].astype(np.float64)
        evaporation_param = MonthlyProfileParameter(model, evaporation)

        # Assume rainfall/evap is mm/day
        # Need to convert:
        #   Mm2 -> m2
        #   mm/day -> m/day
        #   m3/day -> Mm3/day
        # TODO allow this to be configured
        const = ConstantParameter(model, 1e6 * 1e-3 * 1e-6)

        # Create the flow parameters multiplying area by rate of rainfall/evap
        rainfall_flow_param = AggregatedParameter(
            model, [rainfall_param, const, self.area], agg_func='product')
        evaporation_flow_param = AggregatedParameter(
            model, [evaporation_param, const, self.area], agg_func='product')

        # Create the nodes to provide the flows
        rainfall_node = Input(model,
                              '{}.rainfall'.format(self.name),
                              parent=self)
        rainfall_node.max_flow = rainfall_flow_param
        rainfall_node.cost = cost

        evporation_node = Output(model,
                                 '{}.evaporation'.format(self.name),
                                 parent=self)
        evporation_node.max_flow = evaporation_flow_param
        evporation_node.cost = cost

        rainfall_node.connect(self)
        self.connect(evporation_node)
        self.rainfall_node = rainfall_node
        self.evaporation_node = evporation_node

        # Finally record these flows
        self.rainfall_recorder = NumpyArrayNodeRecorder(
            model, rainfall_node, name=f'__{rainfall_node.name}__:rainfall')
        self.evaporation_recorder = NumpyArrayNodeRecorder(
            model,
            evporation_node,
            name=f'__{evporation_node.name}__:evaporation')
Exemple #4
0
def test_demand_saving_with_indexed_array_from_hdf():
    """Test demand saving based on a predefined demand saving level in a HDF file."""
    model = load_model("demand_saving_hdf.json")

    model.timestepper.end = pd.Timestamp("2016-01-31")

    rec_demand = NumpyArrayNodeRecorder(model, model.nodes["Demand"])
    rec_storage = NumpyArrayStorageRecorder(model, model.nodes["Reservoir"])

    model.check()
    model.run()

    max_volume = model.nodes["Reservoir"].max_volume

    # model starts with no demand saving
    demand_baseline = 50.0
    demand_saving = 1.0
    assert_allclose(rec_demand.data[0, 0], demand_baseline * demand_saving)

    # first control curve breached
    demand_saving = 0.8
    assert_allclose(rec_demand.data[11, 0], demand_baseline * demand_saving)

    # second control curve breached
    demand_saving = 0.5
    assert_allclose(rec_demand.data[12, 0], demand_baseline * demand_saving)

    # second control curve breached
    demand_saving = 0.25
    assert_allclose(rec_demand.data[13, 0], demand_baseline * demand_saving)
Exemple #5
0
def test_mean_flow_recorder(solver):
    model = Model(solver=solver)
    model.timestepper.start = pandas.to_datetime("2016-01-01")
    model.timestepper.end = pandas.to_datetime("2016-01-04")

    inpt = Input(model, "input")
    otpt = Output(model, "output")
    inpt.connect(otpt)

    rec_flow = NumpyArrayNodeRecorder(model, inpt)
    rec_mean = MeanFlowRecorder(model, node=inpt, timesteps=3)

    scenario = Scenario(model, "dummy", size=2)

    inpt.max_flow = inpt.min_flow = FunctionParameter(model, inpt, lambda model, t, si: 2 + t.index)
    model.run()

    expected = [
        2.0,
        (2.0 + 3.0) / 2,
        (2.0 + 3.0 + 4.0) / 3,
        (3.0 + 4.0 + 5.0) / 3,  # zeroth day forgotten
    ]

    for value, expected_value in zip(rec_mean.data[:, 0], expected):
        assert_allclose(value, expected_value)
Exemple #6
0
def test_delay_node(key, delay, initial_flow):
    """Test that the `DelayNode` and the `FlowDelayParameter` internal to it correctly delay node for a range of inputs and
    across scenarios"""
    model = Model()

    model.timestepper.start = "2015/01/01"
    model.timestepper.end = "2015/01/31"

    scen = Scenario(model, name="scenario", size=2)
    flow_vals = np.arange(1, 63).reshape((31, 2), order="F")
    flow = ArrayIndexedScenarioParameter(model, scen, flow_vals)

    catchment = Catchment(model, name="input", flow=flow)
    kwargs = {key: delay}
    if initial_flow:
        kwargs["initial_flow"] = initial_flow
    delaynode = DelayNode(model, name="delaynode", **kwargs)
    output = Output(model, name="output")

    catchment.connect(delaynode)
    delaynode.connect(output)

    rec = NumpyArrayNodeRecorder(model, output)

    model.run()
    if initial_flow:
        expected = np.concatenate(
            [np.full((delay, 2), initial_flow), flow_vals[:-delay, :]])
    else:
        expected = np.concatenate(
            [np.zeros((delay, 2)), flow_vals[:-delay, :]])

    assert_array_almost_equal(rec.data, expected)
Exemple #7
0
def test_annual_license_json():
    """
    This test demonstrates how an annual licence can be forceably distributed
    evenly across a year. The licence must build up a surplus before it can
    use more than the average.
    """
    model = load_model("annual_license.json")

    model.timestepper.start = "2001-01-01"
    model.timestepper.end = "2001-01-31"
    model.timestepper.delta = 5

    rec = NumpyArrayNodeRecorder(model, model.nodes["supply1"])

    model.run()

    initial_amount = 200.0
    # first day evenly apportions initial amount for each day of year
    first_day = initial_amount / 365
    assert_allclose(rec.data[0], first_day)
    # second day does the same, minus yesterday and with less days remaining
    remaining_days = 365 - 5
    second_day = (initial_amount - first_day * 5) / remaining_days
    assert_allclose(rec.data[1], second_day)
    # actual amount is the same as maximum was taken
    assert_allclose(first_day, second_day)
    # third day nothing is taken (no demand), so licence is saved
    assert_allclose(rec.data[2], 0.0)
    # fourth day more can be supplied as we've built up a surplus
    remaining_days = 365 - 5 * 3
    fourth_day = (initial_amount -
                  (first_day + second_day) * 5) / remaining_days
    assert_allclose(rec.data[3], fourth_day)
    assert fourth_day > first_day
Exemple #8
0
    def test_event_capture_with_node(self, cyclical_linear_model):
        """ Test Node flow events using a NodeThresholdRecorder """
        m = cyclical_linear_model

        otpt = m.nodes['Output']
        arry = NumpyArrayNodeRecorder(m, otpt)

        # Create the trigger using a threhsold parameter
        trigger = NodeThresholdRecorder(m, otpt, 4.0, predicate='>')
        evt_rec = EventRecorder(m, trigger)

        m.run()

        # Ensure there is at least one event
        assert evt_rec.events

        # Build a timeseries of when the events say an event is active
        triggered = np.zeros_like(arry.data, dtype=np.int)
        for evt in evt_rec.events:
            triggered[evt.start.index:evt.end.index, evt.scenario_index.global_id] = 1

            # Check the duration
            td = evt.end.datetime - evt.start.datetime
            assert evt.duration == td.days

        # Test that the volumes in the Storage node during the event periods match
        assert_equal(triggered, arry.data > 4)
Exemple #9
0
    def test_statistic_recorder(self, cyclical_storage_model, recorder_agg_func):
        """ Test EventStatisticRecorder """
        m = cyclical_storage_model

        strg = m.nodes['Storage']
        inpt = m.nodes['Input']
        arry = NumpyArrayNodeRecorder(m, inpt)

        # Create the trigger using a threhsold parameter
        trigger = StorageThresholdRecorder(m, strg, 4.0, predicate='<=')
        evt_rec = EventRecorder(m, trigger, tracked_parameter=inpt.max_flow)
        evt_stat = EventStatisticRecorder(m, evt_rec, agg_func='max', event_agg_func='min', recorder_agg_func=recorder_agg_func)

        m.run()

        # Ensure there is at least one event
        assert evt_rec.events

        evt_values = {si.global_id:[] for si in m.scenarios.combinations}
        for evt in evt_rec.events:
            evt_values[evt.scenario_index.global_id].append(np.min(arry.data[evt.start.index:evt.end.index, evt.scenario_index.global_id]))

        func = TestEventRecorder.funcs[recorder_agg_func]

        agg_evt_values = []
        for k, v in sorted(evt_values.items()):
            if len(v) > 0:
                agg_evt_values.append(func(v))
            else:
                agg_evt_values.append(np.nan)

        # Test that the
        assert_allclose(evt_stat.values(), agg_evt_values)
        assert_allclose(evt_stat.aggregated_value(), np.max(agg_evt_values))
Exemple #10
0
def test_sdc_recorder():
    """
    Test the StorageDurationCurveRecorder
    """
    model = load_model("timeseries3.json")
    inpt = model.nodes['catchment1']
    strg = model.nodes['reservoir1']

    percentiles = np.linspace(20., 100., 5)
    flow_rec = NumpyArrayNodeRecorder(model, inpt)
    rec = StorageDurationCurveRecorder(model, strg, percentiles, sdc_agg_func="max", agg_func="min")

    # test retrieval of recorder
    assert model.recorders['storagedurationcurverecorder.reservoir1'] == rec

    model.run()

    # Manually calculate expected storage and percentiles
    strg_volume = strg.initial_volume + np.cumsum(flow_rec.data - 23.0, axis=0)
    strg_pciles = np.percentile(strg_volume, percentiles, axis=0)

    assert_allclose(rec.sdc, strg_pciles)
    assert_allclose(np.max(rec.sdc, axis=0), rec.values())
    assert_allclose(np.min(np.max(rec.sdc, axis=0)), rec.aggregated_value())

    assert rec.sdc.shape == (len(percentiles), len(model.scenarios.combinations))
    df = rec.to_dataframe()
    assert df.shape == (len(percentiles), len(model.scenarios.combinations))
Exemple #11
0
def test_two_scenarios(simple_linear_model, ):
    """Basic test of Scenario functionality"""
    model = simple_linear_model  # Convenience renaming

    scenario_input = Scenario(model, 'Inflow', size=2)
    model.nodes["Input"].max_flow = ConstantScenarioParameter(
        model, scenario_input, [5.0, 10.0])

    scenario_outflow = Scenario(model,
                                'Outflow',
                                size=2,
                                ensemble_names=['High', 'Low'])
    model.nodes["Output"].max_flow = ConstantScenarioParameter(
        model, scenario_outflow, [3.0, 8.0])
    model.nodes["Output"].cost = -2.0

    # Check ensemble names are provided in the multi-index
    index = model.scenarios.multiindex
    assert index.levels[0].name == 'Inflow'
    assert index.levels[1].name == 'Outflow'
    assert np.all(index.levels[1] == ['High', 'Low'])

    # add numpy recorders to input and output nodes
    NumpyArrayNodeRecorder(model, model.nodes["Input"], "input")
    NumpyArrayNodeRecorder(model, model.nodes["Output"], "output")

    expected_node_results = {
        "Input": [3.0, 5.0, 3.0, 8.0],
        "Link": [3.0, 5.0, 3.0, 8.0],
        "Output": [3.0, 5.0, 3.0, 8.0],
    }

    assert_model(model, expected_node_results)

    model.run()

    # combine recorder outputs to a single dataframe
    df = model.to_dataframe()
    assert (df.shape == (365, 2 * 2 * 2))
    assert_allclose(df["input", 0, 'High'].iloc[0], 3.0)
    assert_allclose(df["input", 0, 'Low'].iloc[0], 5.0)
    assert_allclose(df["input", 1, 'High'].iloc[0], 3.0)
    assert_allclose(df["input", 1, 'Low'].iloc[0], 8.0)
Exemple #12
0
def main(filename):
    base, ext = os.path.splitext(filename)
    m = Model.load(filename, solver='glpk-dcopf')

    gen1 = NumpyArrayNodeRecorder(m, m.nodes['gen1'])
    pv2 = NumpyArrayNodeRecorder(m, m.nodes['pv2'])
    ProgressRecorder(m)
    CSVRecorder(m, f'{base}.csv')

    m.setup()
    stats = m.run()
    print(stats.to_dataframe())

    df = pandas.concat({'gen1': gen1.to_dataframe(), 'pv2': pv2.to_dataframe()}, axis=1)

    fig, ax = plt.subplots(figsize=(8, 4))
    df.plot(ax=ax)
    df.resample('D').mean().plot(ax=ax, color='black')
    ax.set_ylabel('MW')
    fig.savefig(f'{base}.png', dpi=300)

    fig, ax = plt.subplots(figsize=(8, 4))
    df.resample('M').sum().plot(ax=ax)
    ax.set_ylabel('MWh per month')
    fig.savefig(f'{base}-monthly.png', dpi=300)

    plt.show()
Exemple #13
0
def add_node_array_recorders(model):
    """ Helper function to add NumpyArrayXXX recorders to a Pywr model. """

    # Add node recorders
    for node in model.nodes:
        if isinstance(node, Node):
            name = '__{}__:{}'.format(node.name, 'simulated_flow')
            NumpyArrayNodeRecorder(model, node, name=name)
        elif isinstance(node, Storage):
            name = '__{}__:{}'.format(node.name, 'simulated_volume')
            NumpyArrayStorageRecorder(model, node, name=name)
        else:
            import warnings
            warnings.warn(
                'Unrecognised node subclass "{}" with name "{}". Skipping '
                'recording this node.'.format(node.__class__.__name__,
                                              node.name), RuntimeWarning)
Exemple #14
0
    def _add_node_flagged_recorders(self, model):

        for node in model.nodes:
            try:
                flags = self._node_recorder_flags[node.name]
            except KeyError:
                flags = {
                    'timeseries': True
                }  # Default to recording timeseries if not defined.

            for flag, to_record in flags.items():
                if not to_record:
                    continue

                if flag == 'timeseries':
                    #if isinstance(node, (Node, Generator, Load, Line)):
                    if isinstance(node, (Node)):
                        name = '__{}__:{}'.format(node.name, 'simulated_flow')
                        NumpyArrayNodeRecorder(model, node, name=name)
                    elif isinstance(node, (Storage)):
                        name = '__{}__:{}'.format(node.name,
                                                  'simulated_volume')
                        NumpyArrayStorageRecorder(model, node, name=name)
                    else:
                        import warnings
                        warnings.warn(
                            'Unrecognised node subclass "{}" with name "{}" for timeseries recording. Skipping '
                            'recording this node.'.format(
                                node.__class__.__name__, node.name),
                            RuntimeWarning)

                elif flag == 'deficit':
                    if isinstance(node, Node):
                        deficit_parameter = DeficitParameter(model, node)
                        name = '__{}__:{}'.format(node.name,
                                                  'simulated_deficit')
                        NumpyArrayParameterRecorder(model,
                                                    deficit_parameter,
                                                    name=name)
                    else:
                        import warnings
                        warnings.warn(
                            'Unrecognised node subclass "{}" with name "{}" for deficit recording. Skipping '
                            'recording this node.'.format(
                                node.__class__.__name__, node.name),
                            RuntimeWarning)
Exemple #15
0
def test_reset_timestepper_recorder(solver):
    model = Model(
        solver=solver,
        start=pandas.to_datetime('2016-01-01'),
        end=pandas.to_datetime('2016-01-01')
    )

    inpt = Input(model, "input", max_flow=10)
    otpt = Output(model, "output", max_flow=50, cost=-10)
    inpt.connect(otpt)

    rec = NumpyArrayNodeRecorder(model, otpt)

    model.run()

    model.timestepper.end = pandas.to_datetime("2016-01-02")

    model.run()
Exemple #16
0
    def _make_rainfall_node(self, model, rainfall, cost):

        if not isinstance(self.area, Parameter):
            log.warning(
                'Weather nodes can only be created if an area Parameter is given.'
            )
            return

        if rainfall is None:
            try:
                rainfall_param = load_parameter(model,
                                                f'__{self.name}__:rainfall')
            except KeyError:
                log.warning(
                    f"Please speficy a rainfall or a weather on node {self.name}"
                )
                return
        elif isinstance(rainfall, pd.DataFrame) or isinstance(
                rainfall, pd.Series):
            #assume it's a dataframe
            rainfall = rainfall.astype(np.float64)
            rainfall_param = MonthlyProfileParameter(model, rainfall)
        else:
            rainfall_param = rainfall

        # Create the flow parameters multiplying area by rate of rainfall/evap
        rainfall_flow_param = AggregatedParameter(
            model, [rainfall_param, self.const, self.area], agg_func='product')

        # Create the nodes to provide the flows
        rainfall_node = Input(model,
                              '{}.rainfall'.format(self.name),
                              parent=self)
        rainfall_node.max_flow = rainfall_flow_param
        rainfall_node.cost = cost

        rainfall_node.connect(self)
        self.rainfall_node = rainfall_node

        # Finally record these flows
        self.rainfall_recorder = NumpyArrayNodeRecorder(
            model, rainfall_node, name=f'__{rainfall_node.name}__:rainfall')
Exemple #17
0
    def _make_evaporation_node(self, model, evaporation, cost):

        if not isinstance(self.area, Parameter):
            log.warning(
                'Evaporation nodes can only be created if an area Parameter is given.'
            )
            return

        if evaporation is None:
            try:
                evaporation_param = load_parameter(
                    model, f'__{self.name}__:evaporation')
            except KeyError:
                log.warning(
                    f"Please speficy an evaporation or a weather on node {self.name}"
                )
                return
        elif isinstance(evaporation, pd.DataFrame) or isinstance(
                evaporation, pd.Series):
            evaporation = evaporation.astype(np.float64)
            evaporation_param = MonthlyProfileParameter(model, evaporation)
        else:
            evaporation_param = evaporation

        evaporation_flow_param = AggregatedParameter(
            model, [evaporation_param, self.const, self.area],
            agg_func='product')

        evporation_node = Output(model,
                                 '{}.evaporation'.format(self.name),
                                 parent=self)
        evporation_node.max_flow = evaporation_flow_param
        evporation_node.cost = cost

        self.connect(evporation_node)
        self.evaporation_node = evporation_node

        self.evaporation_recorder = NumpyArrayNodeRecorder(
            model,
            evporation_node,
            name=f'__{evporation_node.name}__:evaporation')
Exemple #18
0
def test_demand_saving_with_indexed_array():
    """Test demand saving based on reservoir control curves

    This is a relatively complex test to pass due to the large number of
    dependencies of the parameters actually being tested. The test is an
    example of how demand savings can be applied in times of drought based
    on the state of a reservoir.
    """

    model = load_model("demand_saving2.json")

    model.timestepper.end = pd.Timestamp("2016-01-31")

    rec_demand = NumpyArrayNodeRecorder(model, model.nodes["Demand"])
    rec_storage = NumpyArrayStorageRecorder(model, model.nodes["Reservoir"])

    model.check()
    model.run()

    max_volume = model.nodes["Reservoir"].max_volume

    # model starts with no demand saving
    demand_baseline = 50.0
    demand_factor = 0.9  # jan-apr
    demand_saving = 1.0
    assert_allclose(rec_demand.data[0, 0],
                    demand_baseline * demand_factor * demand_saving)

    # first control curve breached
    demand_saving = 0.95
    assert (rec_storage.data[4, 0] < (0.8 * max_volume))
    assert_allclose(rec_demand.data[5, 0],
                    demand_baseline * demand_factor * demand_saving)

    # second control curve breached
    demand_saving = 0.5
    assert (rec_storage.data[11, 0] < (0.5 * max_volume))
    assert_allclose(rec_demand.data[12, 0],
                    demand_baseline * demand_factor * demand_saving)
Exemple #19
0
def test_concatenated_dataframes(simple_storage_model):
    """
    Test that Model.to_dataframe returns something sensible.

    """
    model = simple_storage_model

    scA = Scenario(model, 'A', size=2)
    scB = Scenario(model, 'B', size=3)

    res = model.nodes['Storage']
    rec1 = NumpyArrayStorageRecorder(model, res)
    otpt = model.nodes['Output']
    rec2 = NumpyArrayNodeRecorder(model, otpt)
    # The following can't return a DataFrame; is included to check
    # it doesn't cause any issues
    rec3 = TotalDeficitNodeRecorder(model, otpt)

    model.run()

    df = model.to_dataframe()
    assert df.shape == (5, 2*2*3)
    assert df.columns.names == ['Recorder', 'A', 'B']
Exemple #20
0
def test_pv_generator():

    m = Model.load(os.path.join(TEST_FOLDER, 'models', 'pv-generator.json'),
                   solver='glpk-dcopf')

    gen1 = NumpyArrayNodeRecorder(m, m.nodes['gen1'])
    pv2 = NumpyArrayNodeRecorder(m, m.nodes['pv2'])

    m.setup()
    m.run()

    df = pandas.concat({
        'gen1': gen1.to_dataframe(),
        'pv2': pv2.to_dataframe()
    },
                       axis=1)

    assert df.shape[0] == 745
Exemple #21
0
def test_simple_battery():

    m = Model.load(os.path.join(TEST_FOLDER, 'models', 'simple-battery.json'),
                   solver='glpk-dcopf')

    gen1 = NumpyArrayNodeRecorder(m, m.nodes['gen1'])
    pv2 = NumpyArrayNodeRecorder(m, m.nodes['pv2'])
    battery1 = NumpyArrayStorageRecorder(m, m.nodes['battery1'])

    m.setup()
    m.run()

    df = pandas.concat(
        {
            'gen1': gen1.to_dataframe(),
            'pv2': pv2.to_dataframe(),
            'battery1': battery1.to_dataframe()
        },
        axis=1)

    assert df.shape[0] == 745
Exemple #22
0
def test_keating_aquifer(solver):
    model = Model(
        solver=solver,
        start=pandas.to_datetime('2016-01-01'),
        end=pandas.to_datetime('2016-01-01'),
    )

    aqfer = KeatingAquifer(
        model,
        'keating',
        num_streams,
        num_additional_inputs,
        stream_flow_levels,
        transmissivity,
        coefficient,
        levels,
        area=area,
        storativity=storativity,
    )

    catchment = Input(model, 'catchment', max_flow=0)
    stream = Output(model, 'stream', max_flow=np.inf, cost=0)
    abstraction = Output(model, 'abstraction', max_flow=15, cost=-999)

    catchment.connect(aqfer)
    aqfer.connect(stream, from_slot=0)
    aqfer.connect(abstraction, from_slot=1)

    rec_level = NumpyArrayLevelRecorder(model, aqfer)
    rec_volume = NumpyArrayStorageRecorder(model, aqfer)
    rec_stream = NumpyArrayNodeRecorder(model, stream)
    rec_abstraction = NumpyArrayNodeRecorder(model, abstraction)

    model.check()

    assert(len(aqfer.inputs) == (num_streams + num_additional_inputs))

    for initial_level in (50, 100, 110, 150):
        # set the inital aquifer level and therefor the initial volume
        aqfer.initial_level = initial_level
        initial_volume = aqfer.initial_volume
        assert(initial_volume == (area * storativity[0] * initial_level * 0.001))
        # run the model (for one timestep only)
        model.run()
        # manually calculate keating streamflow and check model flows are OK
        Qp = 2 * transmissivity[0] * max(initial_level - stream_flow_levels[0][0], 0) * coefficient
        Qe = 2 * transmissivity[1] * max(initial_level - stream_flow_levels[0][1], 0) * coefficient
        delta_storage = initial_volume - rec_volume.data[0, 0]
        abs_flow = rec_abstraction.data[0, 0]
        stream_flow = rec_stream.data[0, 0]
        assert(delta_storage == (stream_flow + abs_flow))
        assert(stream_flow == (Qp+Qe))

    A_VERY_LARGE_NUMBER = 9999999999999
    model.timestepper.end = pandas.to_datetime('2016-01-02')

    # fill the aquifer completely
    # there is no spill for the storage so it should find no feasible solution
    with pytest.raises(RuntimeError):
        catchment.max_flow = A_VERY_LARGE_NUMBER
        catchment.min_flow = A_VERY_LARGE_NUMBER
        model.run()

    # drain the aquifer completely
    catchment.min_flow = 0
    catchment.max_flow = 0
    abstraction.max_flow = A_VERY_LARGE_NUMBER
    model.run()
    assert(rec_volume.data[1, 0] == 0)
    abs_flow = rec_abstraction.data[1, 0]
    stream_flow = rec_stream.data[1, 0]
    assert(stream_flow == 0)
    assert(abs_flow == 0)