Beispiel #1
0
    def test_with_nonstorage_load(self, model):
        """ Test load from dict with 'storage_node' key. """
        m = model
        m.scenarios.setup()
        s = Storage(m, 'Storage', max_volume=100.0)
        l = Link(m, 'Link')

        data = {
            "type": "controlcurve",
            "control_curve": 0.8,
            "values": [10.0, 0.0],
            "storage_node": "Storage"
        }

        l.cost = p = load_parameter(model, data)
        assert isinstance(p, ControlCurveParameter)

        s.setup(m)  # Init memory view on storage (bypasses usual `Model.setup`)
        si = ScenarioIndex(0, np.array([0], dtype=np.int32))
        print(s.volume)
        assert_allclose(l.get_cost(m.timestepper.current, si), 0.0)
        # When storage volume changes, the cost of the link changes.
        s.initial_volume = 90.0
        m.reset()
        assert_allclose(l.get_cost(m.timestepper.current, si), 10.0)
Beispiel #2
0
def simple_storage_model(request, solver):
    """
    Make a simple model with a single Input, Storage and Output.
    
    Input -> Storage -> Output
    """

    model = pywr.core.Model(start=pandas.to_datetime('2016-01-01'),
                            end=pandas.to_datetime('2016-01-05'),
                            timestep=datetime.timedelta(1),
                            solver=solver)

    inpt = Input(model, name="Input", max_flow=5.0, cost=-1)
    res = Storage(model,
                  name="Storage",
                  num_outputs=1,
                  num_inputs=1,
                  max_volume=20,
                  initial_volume=10)
    otpt = Output(model, name="Output", max_flow=8, cost=-999)

    inpt.connect(res)
    res.connect(otpt)

    return model
Beispiel #3
0
def test_daily_profile_control_curve(simple_linear_model):
    """ Test `DailyProfileControlCurveParameter` """
    model = simple_linear_model
    s = Storage(model, 'Storage', max_volume=100.0)
    l = Link(model, 'Link2')

    data = {
        'type': 'dailyprofilecontrolcurve',
        'control_curves': [0.8, 0.6],
        'values': [[1.0] * 366, [0.7] * np.arange(366), [0.3] * 366],
        'storage_node': 'Storage'
    }

    l.max_flow = p = load_parameter(model, data)
    model.setup()

    @assert_rec(model, p)
    def expected_func(timestep, scenario_index):
        v = s.initial_volume
        doy = timestep.dayofyear
        if v >= 80.0:
            expected = 1.0
        elif v >= 60:
            expected = 0.7 * (doy - 1)
        else:
            expected = 0.3

        return expected

    for initial_volume in (90, 70, 30):
        s.initial_volume = initial_volume
        model.run()
Beispiel #4
0
def test_scenario_storage():
    """Test the behaviour of Storage nodes with multiple scenarios

    The model defined has two inflow scenarios: 5 and 10. It is expected that
    the volume in the storage node should increase at different rates in the
    two scenarios.
    """
    model = Model()

    i = Input(model, 'input', max_flow=999)
    s = Storage(model, 'storage', num_inputs=1, num_outputs=1, max_volume=1000, initial_volume=500)
    o = Output(model, 'output', max_flow=999)

    scenario_input = Scenario(model, 'Inflow', size=2)
    i.min_flow = ConstantScenarioParameter(model, scenario_input, [5.0, 10.0])

    i.connect(s)
    s.connect(o)

    s_rec = NumpyArrayStorageRecorder(model, s)

    model.run()

    assert_allclose(i.flow, [5, 10])
    assert_allclose(s_rec.data[0], [505, 510])
    assert_allclose(s_rec.data[1], [510, 520])
Beispiel #5
0
    def test_with_values(self, model):
        """Test with `values` keyword argument"""
        m = model
        s = Storage(m, 'Storage', max_volume=100.0)

        # Return 10.0 when above 0.0 when below
        s.cost = ControlCurveParameter(s, [0.8, 0.6], [1.0, 0.7, 0.4])
        self._assert_results(m, s)
Beispiel #6
0
    def test_single_cc_load(self, model):
        """ Test load from dict with 'control_curve' key

        This is different to the above test by using singular 'control_curve' key in the dict
        """
        m = model
        m.scenarios.setup()
        s = Storage(m, 'Storage', max_volume=100.0)

        data = {
            "type": "controlcurve",
            "storage_node": "Storage",
            "control_curve": 0.8,
        }

        s.cost = p = load_parameter(model, data)
        assert isinstance(p, ControlCurveParameter)

        s.setup(m)  # Init memory view on storage (bypasses usual `Model.setup`)

        si = ScenarioIndex(0, np.array([0], dtype=np.int32))
        s.initial_volume = 90.0
        m.reset()
        assert_allclose(s.get_cost(m.timestepper.current, si), 0)

        s.initial_volume = 70.0
        m.reset()
        assert_allclose(s.get_cost(m.timestepper.current, si), 1)
Beispiel #7
0
    def test_with_parameters(self, model):
        """ Test with `parameters` keyword argument. """
        m = model

        s = Storage(m, 'Storage', max_volume=100.0)

        # Two different control curves
        cc = [ConstantParameter(0.8), ConstantParameter(0.6)]
        # Three different parameters to return
        params = [
            ConstantParameter(1.0), ConstantParameter(0.7), ConstantParameter(0.4)
        ]
        s.cost = ControlCurveParameter(s, cc, parameters=params)

        self._assert_results(m, s)
Beispiel #8
0
def three_storage_model(request):
    """
    Make a simple model with three input, storage and output nodes. Also adds
    an `AggregatedStorage` and `AggregatedNode`.

        Input 0 -> Storage 0 -> Output 0
        Input 1 -> Storage 1 -> Output 1
        Input 2 -> Storage 2 -> Output 2


    """

    model = pywr.core.Model(
        start=pandas.to_datetime('2016-01-01'),
        end=pandas.to_datetime('2016-01-05'),
        timestep=datetime.timedelta(1),
    )

    all_res = []
    all_otpt = []

    for num in range(3):
        inpt = Input(model,
                     name="Input {}".format(num),
                     max_flow=5.0 * num,
                     cost=-1)
        res = Storage(model,
                      name="Storage {}".format(num),
                      num_outputs=1,
                      num_inputs=1,
                      max_volume=20,
                      initial_volume=10 + num)
        otpt = Output(model,
                      name="Output {}".format(num),
                      max_flow=8 + num,
                      cost=-999)

        inpt.connect(res)
        res.connect(otpt)

        all_res.append(res)
        all_otpt.append(otpt)

    AggregatedStorage(model, name='Total Storage', storage_nodes=all_res)
    AggregatedNode(model, name='Total Output', nodes=all_otpt)
    return model
Beispiel #9
0
def test_scaled_profile_nested_load(model):
    """ Test `ScaledProfileParameter` loading with `AggregatedParameter` """
    model.timestepper.delta = 15

    s = Storage(model, 'Storage', max_volume=100.0, num_outputs=0)
    d = Output(model, 'Link')
    data = {
        'type': 'scaledprofile',
        'scale': 50.0,
        'profile': {
            'type':
            'aggregated',
            'agg_func':
            'product',
            'parameters': [{
                'type': 'monthlyprofile',
                'values': [0.5] * 12
            }, {
                'type':
                'monthlyprofilecontrolcurve',
                'control_curves': [0.8, 0.6],
                'values': [[1.0] * 12, [0.7] * np.arange(12), [0.3] * 12],
                'storage_node':
                'Storage'
            }]
        }
    }

    s.connect(d)

    d.max_flow = p = load_parameter(model, data)

    @assert_rec(model, p)
    def expected_func(timestep, scenario_index):
        if s.initial_volume == 90:
            return 50.0 * 0.5 * 1.0
        elif s.initial_volume == 70:
            return 50.0 * 0.5 * 0.7 * (timestep.month - 1)
        else:
            return 50.0 * 0.5 * 0.3

    for initial_volume in (90, 70, 30):
        s.initial_volume = initial_volume
        model.run()
Beispiel #10
0
def test_scaled_profile_nested_load(model):
    """ Test `ScaledProfileParameter` loading with `AggregatedParameter` """

    s = Storage(model, 'Storage', max_volume=100.0)
    l = Link(model, 'Link')
    data = {
        'type': 'scaledprofile',
        'scale': 50.0,
        'profile': {
            'type':
            'aggregated',
            'agg_func':
            'product',
            'parameters': [{
                'type': 'monthlyprofile',
                'values': [0.5] * 12
            }, {
                'type':
                'monthlyprofilecontrolcurve',
                'control_curves': [0.8, 0.6],
                'values': [[1.0] * 12, [0.7] * np.arange(12), [0.3] * 12],
                'storage_node':
                'Storage'
            }]
        }
    }

    l.max_flow = p = load_parameter(model, data)

    p.setup(model)

    # Test correct aggregation is performed
    model.scenarios.setup()
    s.setup(
        model)  # Init memory view on storage (bypasses usual `Model.setup`)

    s.initial_volume = 90.0
    model.reset()  # Set initial volume on storage
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))
    for mth in range(1, 13):
        ts = Timestep(datetime.datetime(2016, mth, 1), 366, 1.0)
        np.testing.assert_allclose(p.value(ts, si), 50.0 * 0.5 * 1.0)

    s.initial_volume = 70.0
    model.reset()  # Set initial volume on storage
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))
    for mth in range(1, 13):
        ts = Timestep(datetime.datetime(2016, mth, 1), 366, 1.0)
        np.testing.assert_allclose(p.value(ts, si),
                                   50.0 * 0.5 * 0.7 * (mth - 1))

    s.initial_volume = 30.0
    model.reset()  # Set initial volume on storage
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))
    for mth in range(1, 13):
        ts = Timestep(datetime.datetime(2016, mth, 1), 366, 1.0)
        np.testing.assert_allclose(p.value(ts, si), 50.0 * 0.5 * 0.3)
Beispiel #11
0
    def test_with_nonstorage(self, model):
        """ Test usage on non-`Storage` node. """
        # Now test if the parameter is used on a non storage node
        m = model
        m.scenarios.setup()
        s = Storage(m, 'Storage', max_volume=100.0)

        l = Link(m, 'Link')
        cc = ConstantParameter(0.8)
        l.cost = ControlCurveParameter(s, cc, [10.0, 0.0])

        s.setup(m)  # Init memory view on storage (bypasses usual `Model.setup`)
        print(s.volume)
        si = ScenarioIndex(0, np.array([0], dtype=np.int32))
        assert_allclose(l.get_cost(m.timestepper.current, si), 0.0)
        # When storage volume changes, the cost of the link changes.
        s.initial_volume = 90.0
        m.reset()
        print(s.volume)
        assert_allclose(l.get_cost(m.timestepper.current, si), 10.0)
Beispiel #12
0
def test_scaled_profile_nested_load(model):
    """ Test `ScaledProfileParameter` loading with `AggregatedParameter` """
    model.timestepper.delta = 15

    s = Storage(model,
                'Storage',
                max_volume=100.0,
                initial_volume=50.0,
                num_outputs=0)
    d = Output(model, 'Link')
    data = {
        'type': 'scaledprofile',
        'scale': 50.0,
        'profile': {
            'type':
            'aggregated',
            'agg_func':
            'product',
            'parameters': [{
                'type': 'monthlyprofile',
                'values': [0.5] * 12
            }, {
                'type': 'constant',
                'value': 1.5,
            }]
        }
    }

    s.connect(d)

    d.max_flow = p = load_parameter(model, data)

    @assert_rec(model, p)
    def expected_func(timestep, scenario_index):
        return 50.0 * 0.5 * 1.5

    model.run()
Beispiel #13
0
    def test_no_scale_no_profile(self, simple_linear_model):
        """ No scale or profile specified """
        model = simple_linear_model
        s = Storage(model, 'Storage', max_volume=100.0)
        l = Link(model, 'Link2')

        data = {
            'type': 'monthlyprofilecontrolcurve',
            'control_curves': [0.8, 0.6],
            'values': [[1.0] * 12, [0.7] * np.arange(12), [0.3] * 12],
            'storage_node': 'Storage'
        }

        l.max_flow = p = load_parameter(model, data)
        self._assert_results(model, s, p)
Beispiel #14
0
    def test_values_load(self, model):
        """ Test load of float lists. """

        m = model
        s = Storage(m, 'Storage', max_volume=100.0)

        data = {
            "type": "controlcurve",
            "control_curves": [0.8, 0.6],
            "values": [1.0, 0.7, 0.4],
            "storage_node": "Storage"
        }

        s.cost = p = load_parameter(model, data)
        assert isinstance(p, ControlCurveParameter)
        self._assert_results(m, s)
Beispiel #15
0
    def test_scale_no_profile(self, simple_linear_model):
        """ Test `MonthlyProfileControlCurveParameter` """
        model = simple_linear_model
        s = Storage(model, 'Storage', max_volume=100.0)
        l = Link(model, 'Link2')

        data = {
            'type': 'monthlyprofilecontrolcurve',
            'control_curves': [0.8, 0.6],
            'values': [[1.0] * 12, [0.7] * np.arange(12), [0.3] * 12],
            'storage_node': 'Storage',
            'scale': 1.5
        }

        l.max_flow = p = load_parameter(model, data)
        model.setup()
        self._assert_results(model, s, p, scale=1.5)
Beispiel #16
0
    def test_no_scale_profile(self, model):
        """ No scale, but profile array specified """

        s = Storage(model, 'Storage', max_volume=100.0)
        l = Link(model, 'Link')

        data = {
            'type': 'monthlyprofilecontrolcurve',
            'control_curves': [0.8, 0.6],
            'values': [[1.0] * 12, [0.7] * np.arange(12), [0.3] * 12],
            'storage_node': 'Storage',
            'profile': [1.5]*12
        }

        l.max_flow = p = load_parameter(model, data)
        p.setup(model)
        model.scenarios.setup()
        self._assert_results(model, s, p, scale=1.5)
Beispiel #17
0
def test_daily_profile_control_curve(model):
    """ Test `DailyProfileControlCurveParameter` """

    s = Storage(model, 'Storage', max_volume=100.0)
    l = Link(model, 'Link')

    data = {
        'type': 'dailyprofilecontrolcurve',
        'control_curves': [0.8, 0.6],
        'values': [[1.0]*366, [0.7]*np.arange(366), [0.3]*366],
        'storage_node': 'Storage'
    }

    l.max_flow = p = load_parameter(model, data)
    p.setup(model)

    # Test correct aggregation is performed
    model.scenarios.setup()
    s.setup(model)  # Init memory view on storage (bypasses usual `Model.setup`)

    s.initial_volume = 90.0
    model.reset()  # Set initial volume on storage
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))
    for mth in range(1, 13):
        ts = Timestep(datetime.datetime(2016, mth, 1), 366, 1.0)
        np.testing.assert_allclose(p.value(ts, si), 1.0)

    s.initial_volume = 70.0
    model.reset()  # Set initial volume on storage
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))
    for mth in range(1, 13):
        ts = Timestep(datetime.datetime(2016, mth, 1), 366, 1.0)
        doy = ts.datetime.dayofyear
        np.testing.assert_allclose(p.value(ts, si), 0.7*(doy - 1))

    s.initial_volume = 30.0
    model.reset()  # Set initial volume on storage
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))
    for mth in range(1, 13):
        ts = Timestep(datetime.datetime(2016, mth, 1), 366, 1.0)
        np.testing.assert_allclose(p.value(ts, si), 0.3)
Beispiel #18
0
    def test_parameters_load(self, model):
        """ Test load of parameter lists for 'control_curves' and 'parameters' keys. """

        m = model
        s = Storage(m, 'Storage', max_volume=100.0)

        data = {
            "type": "controlcurve",
            "storage_node": "Storage",
            "control_curves": [
                {
                    "type": "constant",
                    "value": 0.8
                },
                {
                    "type": "monthlyprofile",
                    "values": [0.6]*12
                }
            ],
            "parameters": [
                {
                    "type": "constant",
                    "value": 1.0,
                },
                {
                    "type": "constant",
                    "value": 0.7
                },
                {
                    "type": "constant",
                    "value": 0.4
                }
            ]
        }

        s.cost = p = load_parameter(model, data)
        assert isinstance(p, ControlCurveParameter)
        self._assert_results(m, s)
Beispiel #19
0
# separate non_storage dicts for greater flexibility with recording model results
storage = {}
non_storage = {}
non_storage_outputs = {}
non_storage_junctions = {}
non_storage_inputs = {}
non_storage_types = input_types + output_types + misc_types

for node_id, node_trait in node_lookup_id.items():
    types = node_trait['type']
    name = node_trait['name']
    if types in storage_types:
        num_outputs = node_trait['connect_in']
        num_inputs = node_trait['connect_out']
        storage[node_id] = Storage(model,
                                   name=name,
                                   num_outputs=num_outputs,
                                   num_inputs=num_inputs)
    elif types in output_types:
        non_storage_outputs[node_id] = Output(model, name=name)
    elif types in misc_types:
        non_storage_junctions[node_id] = Link(model, name=name)
    elif types in input_types:
        non_storage_inputs[node_id] = Input(model, name=name)
    else:
        raise Exception("Oops, missed a type!")

non_storage = {
    **non_storage_inputs,
    **non_storage_outputs,
    **non_storage_junctions
}
Beispiel #20
0
    def create_model(self, network, template, initial_volumes=None):

        model = Model(solver='glpk-edge')

        # -----------------GENERATE NETWORK STRUCTURE -----------------------

        output_ids = []
        input_ids = []

        non_storage_types = list(output_types.keys()) + list(
            input_types.keys()) + list(node_types.keys())

        # create node dictionaries by name and id
        node_lookup = {}
        for node in network['nodes']:
            name = '{} (node)'.format(node['name'])
            types = [
                t for t in node['types'] if t['template_id'] == template['id']
            ]
            if not types:
                continue
            if len(types) > 1:
                msg = "Type is ambiguous for {}. Please remove extra types.".format(
                    name)
                raise Exception(msg)
            type_name = types[-1]['name']
            node_lookup[node.get("id")] = {
                'type': type_name,
                'name': name,
                'connect_in': 0,
                'connect_out': 0,
            }
            if type_name in output_types:
                output_ids.append(node['id'])
            elif type_name in input_types:
                input_ids.append(node['id'])

        # create link lookups and pywr links
        link_lookup = {}
        for link in network['links']:
            name = '{} (link)'.format(link['name'])
            types = [
                t for t in link['types'] if t['template_id'] == template['id']
            ]
            if not types:
                continue
            type_name = types[-1]['name']
            link_id = link['id']
            node_1_id = link['node_1_id']
            node_2_id = link['node_2_id']
            node_lookup[node_1_id]['connect_out'] += 1
            node_lookup[node_2_id]['connect_in'] += 1
            link_lookup[link_id] = {
                'name': name,
                'type': type_name,
                'node_1_id': node_1_id,
                'node_2_id': node_2_id,
                'from_slot': node_lookup[node_1_id]['connect_out'] - 1,
                'to_slot': node_lookup[node_2_id]['connect_in'] - 1,
            }

            if node_1_id in output_ids:
                node = node_lookup[node_1_id]
                msg = 'Topology error: Output {} appears to be upstream of {}'.format(
                    node['name'], name)
                raise Exception(msg)
            elif node_2_id in input_ids:
                node = node_lookup[node_2_id]
                msg = 'Topology error: Input {} appears to be downstream of {}'.format(
                    node['name'], name)
                raise Exception(msg)

            LinkType = link_types.get(type_name, Link)
            self.non_storage[('link', link_id)] = LinkType(model, name=name)

        # Q/C

        # remove unconnected links
        d = []
        for link_id, link in link_lookup.items():
            if link['node_1_id'] not in node_lookup or link[
                    'node_2_id'] not in node_lookup:
                d.append(link_id)
        for link_id in d:
            del link_lookup[link_id]

        connected_nodes = []
        for link_id, link in link_lookup.items():
            connected_nodes.append(link['node_1_id'])
            connected_nodes.append(link['node_2_id'])

        # remove unconnected nodes
        d = []
        for node_id in node_lookup:
            if node_id not in connected_nodes:
                d.append(node_id)
        for node_id in d:
            del node_lookup[node_id]

        # create pywr nodes dictionary with format ["name" = pywr type + 'name']
        # for storage and non storage

        # TODO: change looping variable notation
        for node_id, node in node_lookup.items():
            type_name = node['type']
            name = node['name']
            connect_in = node.get('connect_in', 0)
            connect_out = node.get('connect_out', 0)
            if (type_name in storage_types
                    or connect_out > 1) and type_name not in non_storage_types:
                initial_volume = initial_volumes.get(
                    node_id, 0.0) if initial_volumes is not None else 0.0
                self.storage[node_id] = Storage(model,
                                                name=name,
                                                num_outputs=connect_in,
                                                num_inputs=connect_out,
                                                initial_volume=initial_volume)
                if type_name not in storage_types:
                    self.storage[node_id].max_volume = 0.0
            else:

                if type_name in input_types:
                    NodeType = input_types[type_name]
                elif type_name in output_types:
                    NodeType = output_types[type_name]
                elif type_name in node_types:
                    NodeType = node_types[type_name]
                elif connect_in > 1:
                    NodeType = River
                else:
                    NodeType = Link

                self.non_storage[('node', node_id)] = NodeType(model,
                                                               name=name)

        # create network connections
        # must assign connection slots for storage
        # TODO: change looping variable notation
        for link_id, link in link_lookup.items():
            node_1_id = link['node_1_id']
            node_2_id = link['node_2_id']

            _link = self.non_storage[('link', link_id)]
            up_storage = self.storage.get(node_1_id)
            up_node = self.non_storage.get(('node', node_1_id))
            down_storage = self.storage.get(node_2_id)
            down_node = self.non_storage.get(('node', node_2_id))

            if up_storage:
                up_storage.connect(_link, from_slot=link['from_slot'])
            else:
                up_node.connect(_link)

            if down_storage:
                _link.connect(down_storage, to_slot=link['to_slot'])
            else:
                _link.connect(down_node)

        self.model = model
Beispiel #21
0
def create_model(harmonic=True):
    # import flow timeseries for catchments
    flow = pd.read_csv(os.path.join('data', 'thames_stochastic_flow.gz'))

    flow['Date'] = flow['Date'].apply(pd.to_datetime)
    flow.set_index('Date', inplace=True)
    # resample input to weekly average
    flow = flow.resample('7D', how='mean')

    model = InspyredOptimisationModel(
        solver='glpk',
        start=flow.index[0],
        end=flow.index[365*10],  # roughly 10 years
        timestep=datetime.timedelta(7),  # weekly time-step
    )

    flow_parameter = ArrayIndexedParameter(model, flow['flow'].values)

    catchment1 = Input(model, 'catchment1', min_flow=flow_parameter, max_flow=flow_parameter)
    catchment2 = Input(model, 'catchment2', min_flow=flow_parameter, max_flow=flow_parameter)

    reservoir1 = Storage(model, 'reservoir1', min_volume=3000, max_volume=20000, initial_volume=16000)
    reservoir2 = Storage(model, 'reservoir2', min_volume=3000, max_volume=20000, initial_volume=16000)

    if harmonic:
        control_curve = AnnualHarmonicSeriesParameter(model, 0.5, [0.5], [0.0], mean_upper_bounds=1.0, amplitude_upper_bounds=1.0)
    else:
        control_curve = MonthlyProfileParameter(model, np.array([0.0]*12), lower_bounds=0.0, upper_bounds=1.0)

    control_curve.is_variable = True
    controller = ControlCurveParameter(model, reservoir1, control_curve, [0.0, 10.0])
    transfer = Link(model, 'transfer', max_flow=controller, cost=-500)

    demand1 = Output(model, 'demand1', max_flow=45.0, cost=-101)
    demand2 = Output(model, 'demand2', max_flow=20.0, cost=-100)

    river1 = Link(model, 'river1')
    river2 = Link(model, 'river2')

    # compensation flows from reservoirs
    compensation1 = Link(model, 'compensation1', max_flow=5.0, cost=-9999)
    compensation2 = Link(model, 'compensation2', max_flow=5.0, cost=-9998)

    terminator = Output(model, 'terminator', cost=1.0)

    catchment1.connect(reservoir1)
    catchment2.connect(reservoir2)
    reservoir1.connect(demand1)
    reservoir2.connect(demand2)
    reservoir2.connect(transfer)
    transfer.connect(reservoir1)
    reservoir1.connect(river1)
    reservoir2.connect(river2)
    river1.connect(terminator)
    river2.connect(terminator)

    reservoir1.connect(compensation1)
    reservoir2.connect(compensation2)
    compensation1.connect(terminator)
    compensation2.connect(terminator)

    r1 = TotalDeficitNodeRecorder(model, demand1)
    r2 = TotalDeficitNodeRecorder(model, demand2)
    r3 = AggregatedRecorder(model, [r1, r2], agg_func="mean")
    r3.is_objective = 'minimise'
    r4 = TotalFlowNodeRecorder(model, transfer)
    r4.is_objective = 'minimise'

    return model
Beispiel #22
0
def test_control_curve_interpolated(model):
    m = model
    m.scenarios.setup()
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))

    s = Storage(m, 'Storage', max_volume=100.0)

    cc = ConstantParameter(0.8)
    values = [20.0, 5.0, 0.0]
    s.cost = ControlCurveInterpolatedParameter(s, cc, values)
    s.setup(m)

    for v in (0.0, 10.0, 50.0, 80.0, 90.0, 100.0):
        s.initial_volume = v
        s.reset()
        assert_allclose(s.get_cost(m.timestepper.current, si), np.interp(v/100.0, [0.0, 0.8, 1.0], values[::-1]))

    # special case when control curve is 100%
    cc.update(np.array([1.0,]))
    s.initial_volume == 100.0
    s.reset()
    assert_allclose(s.get_cost(m.timestepper.current, si), values[1])

    # special case when control curve is 0%
    cc.update(np.array([0.0,]))
    s.initial_volume == 0.0
    s.reset()
    assert_allclose(s.get_cost(m.timestepper.current, si), values[0])