Пример #1
0
def test_mean_flow_recorder(solver):
    model = Model(solver=solver)
    model.timestepper.start = pandas.to_datetime("2016-01-01")
    model.timestepper.end = pandas.to_datetime("2016-01-04")

    inpt = Input(model, "input")
    otpt = Output(model, "output")
    inpt.connect(otpt)

    rec_flow = NumpyArrayNodeRecorder(model, inpt)
    rec_mean = MeanFlowRecorder(model, node=inpt, timesteps=3)

    scenario = Scenario(model, "dummy", size=2)

    inpt.max_flow = inpt.min_flow = FunctionParameter(model, inpt, lambda model, t, si: 2 + t.index)
    model.run()

    expected = [
        2.0,
        (2.0 + 3.0) / 2,
        (2.0 + 3.0 + 4.0) / 3,
        (3.0 + 4.0 + 5.0) / 3,  # zeroth day forgotten
    ]

    for value, expected_value in zip(rec_mean.data[:, 0], expected):
        assert_allclose(value, expected_value)
Пример #2
0
def test_scenario_collection():
    """ Basic test of Scenario and ScenarioCollection API """

    model = Model()

    # There is 1 combination when there are no Scenarios
    model.scenarios.setup()
    assert(len(model.scenarios.combinations) == 1)
    assert(len(model.scenarios) == 0)
    scA = Scenario(model, 'Scenario A', size=3)
    model.scenarios.setup()
    assert(len(model.scenarios.combinations) == 3)
    assert(len(model.scenarios) == 1)
    scA = Scenario(model, 'Scenario B', size=2)
    model.scenarios.setup()
    assert(len(model.scenarios.combinations) == 6)
    assert(len(model.scenarios) == 2)

    assert_equal([comb.indices for comb in model.scenarios.combinations],
                 [[0, 0], [0, 1], [1, 0], [1, 1], [2, 0], [2, 1]])

    names = model.scenarios.combination_names
    for n, (ia, ib) in zip(names, [[0, 0], [0, 1], [1, 0], [1, 1], [2, 0], [2, 1]]):
        assert n == 'Scenario A.{:03d}-Scenario B.{:03d}'.format(ia, ib)

    index = model.scenarios.multiindex
    assert_equal(index.tolist(),
                 [[0, 0], [0, 1], [1, 0], [1, 1], [2, 0], [2, 1]])
    assert_equal(index.names, ['Scenario A', 'Scenario B'])
Пример #3
0
def model(solver):
    model = Model(
        solver=solver,
        start=pandas.to_datetime("2016-01-01"),
        end=pandas.to_datetime("2016-01-10"),
    )
    return model
Пример #4
0
def test_scenario_storage():
    """Test the behaviour of Storage nodes with multiple scenarios

    The model defined has two inflow scenarios: 5 and 10. It is expected that
    the volume in the storage node should increase at different rates in the
    two scenarios.
    """
    model = Model()

    i = Input(model, 'input', max_flow=999)
    s = Storage(model, 'storage', num_inputs=1, num_outputs=1, max_volume=1000, initial_volume=500)
    o = Output(model, 'output', max_flow=999)

    scenario_input = Scenario(model, 'Inflow', size=2)
    i.min_flow = ConstantScenarioParameter(model, scenario_input, [5.0, 10.0])

    i.connect(s)
    s.connect(o)

    s_rec = NumpyArrayStorageRecorder(model, s)

    model.run()

    assert_allclose(i.flow, [5, 10])
    assert_allclose(s_rec.data[0], [505, 510])
    assert_allclose(s_rec.data[1], [510, 520])
Пример #5
0
def test_mean_flow_recorder_days(solver):
    model = Model(solver=solver)
    model.timestepper.delta = 7

    inpt = Input(model, "input")
    otpt = Output(model, "output")
    inpt.connect(otpt)

    rec_mean = MeanFlowRecorder(model, node=inpt, days=31)

    model.run()
    assert(rec_mean.timesteps == 4)
Пример #6
0
def create_model():
    # create a model
    model = Model(start="2016-01-01", end="2019-12-31", timestep=7)

    # create three nodes (an input, a link, and an output)
    A = Input(model, name="A", max_flow=10.0)
    B = Link(model, name="B", cost=1.0)
    C = Output(model, name="C", max_flow=5.0, cost=-2.0)

    # connect nodes
    A.connect(B)
    B.connect(C)

    return model
Пример #7
0
def simple_linear_model(request, solver):
    """
    Make a simple model with a single Input and Output.

    Input -> Link -> Output

    """
    model = Model(solver=solver)
    inpt = Input(model, name="Input")
    lnk = Link(model, name="Link", cost=1.0)
    inpt.connect(lnk)
    otpt = Output(model, name="Output")
    lnk.connect(otpt)

    return model
Пример #8
0
def test_catchment_many_successors(solver):
    """Test if node with fixed flow can have multiple successors. See #225"""
    model = Model(solver=solver)
    catchment = Catchment(model, "catchment", flow=100)
    out1 = Output(model, "out1", max_flow=10, cost=-100)
    out2 = Output(model, "out2", max_flow=15, cost=-50)
    out3 = Output(model, "out3")
    catchment.connect(out1)
    catchment.connect(out2)
    catchment.connect(out3)
    model.check()
    model.run()
    assert_allclose(out1.flow, 10)
    assert_allclose(out2.flow, 15)
    assert_allclose(out3.flow, 75)
Пример #9
0
def test_invalid_parameter_values():
    """
    Test that `load_parameter_values` returns a ValueError rather than KeyError.

    This is useful to catch and give useful messages when no valid reference to
    a data location is given.

    Regression test for Issue #247 (https://github.com/pywr/pywr/issues/247)
    """

    from pywr.parameters._parameters import load_parameter_values

    m = Model()
    data = {'name': 'my_parameter', 'type': 'AParameterThatShouldHaveValues'}
    with pytest.raises(ValueError):
        load_parameter_values(model, data)
Пример #10
0
def test_reset_timestepper_recorder(solver):
    model = Model(
        solver=solver,
        start=pandas.to_datetime('2016-01-01'),
        end=pandas.to_datetime('2016-01-01')
    )

    inpt = Input(model, "input", max_flow=10)
    otpt = Output(model, "output", max_flow=50, cost=-10)
    inpt.connect(otpt)

    rec = NumpyArrayNodeRecorder(model, otpt)

    model.run()

    model.timestepper.end = pandas.to_datetime("2016-01-02")

    model.run()
Пример #11
0
    A.connect(Z)
    B.connect(Z)

    agg = AggregatedNode(model, "agg", [A, B])
    agg.flow_weights = flow_weights
    agg.max_flow = 30.0

    model.run()

    assert_allclose(agg.flow, expected_agg_flow)
    assert_allclose(A.flow, expected_A_flow)
    assert_allclose(B.flow, expected_B_flow)


@pytest.mark.skipif(Model().solver.name == "lpsolve",
                    reason="Not supported in lpsolve.")
def test_aggregated_node_max_flow_parameter(model):
    """Nodes constrained by the max_flow of their AggregatedNode using a Parameter """
    A = Input(model, "A", max_flow=20.0, cost=1)
    B = Input(model, "B", max_flow=20.0, cost=2)
    Z = Output(model, "Z", max_flow=100, cost=-10)

    A.connect(Z)
    B.connect(Z)

    agg = AggregatedNode(model, "agg", [A, B])
    agg.max_flow = ConstantParameter(model, 30.0)

    model.run()
Пример #12
0
def test_daily_license(simple_linear_model):
    '''Test daily licence'''
    m = simple_linear_model
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))
    lic = TimestepLicense(m, None, 42.0)
    assert (isinstance(lic, License))
    assert (lic.value(Timestep(pandas.Period('2015-1-1'), 0, 1), si) == 42.0)

    # daily licences don't have resource state
    assert (lic.resource_state(Timestep(pandas.Period('2015-1-1'), 0, 1)) is
            None)


@pytest.mark.skipif(
    Model().solver.name.startswith("glpk")
    and Model().solver.set_fixed_flows_once,
    reason="This test changes constant constraints between steps.")
def test_simple_model_with_annual_licence(simple_linear_model):
    m = simple_linear_model
    si = ScenarioIndex(0, np.array([0], dtype=np.int32))

    annual_total = 365
    lic = AnnualLicense(m, m.nodes["Input"], annual_total)
    # Apply licence to the model
    m.nodes["Input"].max_flow = lic
    m.nodes["Output"].max_flow = 10.0
    m.nodes["Output"].cost = -10.0
    m.setup()

    m.step()
Пример #13
0
    A.connect(Z)
    B.connect(Z)

    agg = AggregatedNode(model, "agg", [A, B])
    agg.flow_weights = flow_weights
    agg.max_flow = 30.0

    model.run()

    assert_allclose(agg.flow, expected_agg_flow)
    assert_allclose(A.flow, expected_A_flow)
    assert_allclose(B.flow, expected_B_flow)


@pytest.mark.skipif(Model().solver.name == "lpsolve",
                    reason="Not supported in lpsolve.")
def test_aggregated_node_max_flow_parameter(model):
    """Nodes constrained by the max_flow of their AggregatedNode using a Parameter"""
    A = Input(model, "A", max_flow=20.0, cost=1)
    B = Input(model, "B", max_flow=20.0, cost=2)
    Z = Output(model, "Z", max_flow=100, cost=-10)

    A.connect(Z)
    B.connect(Z)

    agg = AggregatedNode(model, "agg", [A, B])
    agg.max_flow = ConstantParameter(model, 30.0)

    model.run()
Пример #14
0
    A.connect(Z)
    B.connect(Z)

    agg = AggregatedNode(model, "agg", [A, B])
    agg.flow_weights = flow_weights
    agg.max_flow = 30.0

    model.run()

    assert_allclose(agg.flow, expected_agg_flow)
    assert_allclose(A.flow, expected_A_flow)
    assert_allclose(B.flow, expected_B_flow)


@pytest.mark.skipif(Model().solver.name == "lpsolve", reason="Not supported in lpsolve.")
def test_aggregated_node_max_flow_parameter(model):
    """Nodes constrained by the max_flow of their AggregatedNode using a Parameter """
    A = Input(model, "A", max_flow=20.0, cost=1)
    B = Input(model, "B", max_flow=20.0, cost=2)
    Z = Output(model, "Z", max_flow=100, cost=-10)

    A.connect(Z)
    B.connect(Z)

    agg = AggregatedNode(model, "agg", [A, B])
    agg.max_flow = ConstantParameter(model, 30.0)

    model.run()

    assert_allclose(agg.flow, 30.0)
Пример #15
0
def model(request):
    model = Model()
    return model
Пример #16
0
    def create_model(self, network, template, initial_volumes=None):

        model = Model(solver='glpk-edge')

        # -----------------GENERATE NETWORK STRUCTURE -----------------------

        output_ids = []
        input_ids = []

        non_storage_types = list(output_types.keys()) + list(
            input_types.keys()) + list(node_types.keys())

        # create node dictionaries by name and id
        node_lookup = {}
        for node in network['nodes']:
            name = '{} (node)'.format(node['name'])
            types = [
                t for t in node['types'] if t['template_id'] == template['id']
            ]
            if not types:
                continue
            if len(types) > 1:
                msg = "Type is ambiguous for {}. Please remove extra types.".format(
                    name)
                raise Exception(msg)
            type_name = types[-1]['name']
            node_lookup[node.get("id")] = {
                'type': type_name,
                'name': name,
                'connect_in': 0,
                'connect_out': 0,
            }
            if type_name in output_types:
                output_ids.append(node['id'])
            elif type_name in input_types:
                input_ids.append(node['id'])

        # create link lookups and pywr links
        link_lookup = {}
        for link in network['links']:
            name = '{} (link)'.format(link['name'])
            types = [
                t for t in link['types'] if t['template_id'] == template['id']
            ]
            if not types:
                continue
            type_name = types[-1]['name']
            link_id = link['id']
            node_1_id = link['node_1_id']
            node_2_id = link['node_2_id']
            node_lookup[node_1_id]['connect_out'] += 1
            node_lookup[node_2_id]['connect_in'] += 1
            link_lookup[link_id] = {
                'name': name,
                'type': type_name,
                'node_1_id': node_1_id,
                'node_2_id': node_2_id,
                'from_slot': node_lookup[node_1_id]['connect_out'] - 1,
                'to_slot': node_lookup[node_2_id]['connect_in'] - 1,
            }

            if node_1_id in output_ids:
                node = node_lookup[node_1_id]
                msg = 'Topology error: Output {} appears to be upstream of {}'.format(
                    node['name'], name)
                raise Exception(msg)
            elif node_2_id in input_ids:
                node = node_lookup[node_2_id]
                msg = 'Topology error: Input {} appears to be downstream of {}'.format(
                    node['name'], name)
                raise Exception(msg)

            LinkType = link_types.get(type_name, Link)
            self.non_storage[('link', link_id)] = LinkType(model, name=name)

        # Q/C

        # remove unconnected links
        d = []
        for link_id, link in link_lookup.items():
            if link['node_1_id'] not in node_lookup or link[
                    'node_2_id'] not in node_lookup:
                d.append(link_id)
        for link_id in d:
            del link_lookup[link_id]

        connected_nodes = []
        for link_id, link in link_lookup.items():
            connected_nodes.append(link['node_1_id'])
            connected_nodes.append(link['node_2_id'])

        # remove unconnected nodes
        d = []
        for node_id in node_lookup:
            if node_id not in connected_nodes:
                d.append(node_id)
        for node_id in d:
            del node_lookup[node_id]

        # create pywr nodes dictionary with format ["name" = pywr type + 'name']
        # for storage and non storage

        # TODO: change looping variable notation
        for node_id, node in node_lookup.items():
            type_name = node['type']
            name = node['name']
            connect_in = node.get('connect_in', 0)
            connect_out = node.get('connect_out', 0)
            if (type_name in storage_types
                    or connect_out > 1) and type_name not in non_storage_types:
                initial_volume = initial_volumes.get(
                    node_id, 0.0) if initial_volumes is not None else 0.0
                self.storage[node_id] = Storage(model,
                                                name=name,
                                                num_outputs=connect_in,
                                                num_inputs=connect_out,
                                                initial_volume=initial_volume)
                if type_name not in storage_types:
                    self.storage[node_id].max_volume = 0.0
            else:

                if type_name in input_types:
                    NodeType = input_types[type_name]
                elif type_name in output_types:
                    NodeType = output_types[type_name]
                elif type_name in node_types:
                    NodeType = node_types[type_name]
                elif connect_in > 1:
                    NodeType = River
                else:
                    NodeType = Link

                self.non_storage[('node', node_id)] = NodeType(model,
                                                               name=name)

        # create network connections
        # must assign connection slots for storage
        # TODO: change looping variable notation
        for link_id, link in link_lookup.items():
            node_1_id = link['node_1_id']
            node_2_id = link['node_2_id']

            _link = self.non_storage[('link', link_id)]
            up_storage = self.storage.get(node_1_id)
            up_node = self.non_storage.get(('node', node_1_id))
            down_storage = self.storage.get(node_2_id)
            down_node = self.non_storage.get(('node', node_2_id))

            if up_storage:
                up_storage.connect(_link, from_slot=link['from_slot'])
            else:
                up_node.connect(_link)

            if down_storage:
                _link.connect(down_storage, to_slot=link['to_slot'])
            else:
                _link.connect(down_node)

        self.model = model
Пример #17
0
def model(request, solver):
    model = Model(solver=solver)
    return model
Пример #18
0
def model(solver):
    return Model(solver=solver)
Пример #19
0
def test_keating_aquifer(solver):
    model = Model(
        solver=solver,
        start=pandas.to_datetime('2016-01-01'),
        end=pandas.to_datetime('2016-01-01'),
    )

    aqfer = KeatingAquifer(
        model,
        'keating',
        num_streams,
        num_additional_inputs,
        stream_flow_levels,
        transmissivity,
        coefficient,
        levels,
        area=area,
        storativity=storativity,
    )

    catchment = Input(model, 'catchment', max_flow=0)
    stream = Output(model, 'stream', max_flow=np.inf, cost=0)
    abstraction = Output(model, 'abstraction', max_flow=15, cost=-999)

    catchment.connect(aqfer)
    aqfer.connect(stream, from_slot=0)
    aqfer.connect(abstraction, from_slot=1)

    rec_level = NumpyArrayLevelRecorder(model, aqfer)
    rec_volume = NumpyArrayStorageRecorder(model, aqfer)
    rec_stream = NumpyArrayNodeRecorder(model, stream)
    rec_abstraction = NumpyArrayNodeRecorder(model, abstraction)

    model.check()

    assert(len(aqfer.inputs) == (num_streams + num_additional_inputs))

    for initial_level in (50, 100, 110, 150):
        # set the inital aquifer level and therefor the initial volume
        aqfer.initial_level = initial_level
        initial_volume = aqfer.initial_volume
        assert(initial_volume == (area * storativity[0] * initial_level * 0.001))
        # run the model (for one timestep only)
        model.run()
        # manually calculate keating streamflow and check model flows are OK
        Qp = 2 * transmissivity[0] * max(initial_level - stream_flow_levels[0][0], 0) * coefficient
        Qe = 2 * transmissivity[1] * max(initial_level - stream_flow_levels[0][1], 0) * coefficient
        delta_storage = initial_volume - rec_volume.data[0, 0]
        abs_flow = rec_abstraction.data[0, 0]
        stream_flow = rec_stream.data[0, 0]
        assert(delta_storage == (stream_flow + abs_flow))
        assert(stream_flow == (Qp+Qe))

    A_VERY_LARGE_NUMBER = 9999999999999
    model.timestepper.end = pandas.to_datetime('2016-01-02')

    # fill the aquifer completely
    # there is no spill for the storage so it should find no feasible solution
    with pytest.raises(RuntimeError):
        catchment.max_flow = A_VERY_LARGE_NUMBER
        catchment.min_flow = A_VERY_LARGE_NUMBER
        model.run()

    # drain the aquifer completely
    catchment.min_flow = 0
    catchment.max_flow = 0
    abstraction.max_flow = A_VERY_LARGE_NUMBER
    model.run()
    assert(rec_volume.data[1, 0] == 0)
    abs_flow = rec_abstraction.data[1, 0]
    stream_flow = rec_stream.data[1, 0]
    assert(stream_flow == 0)
    assert(abs_flow == 0)
Пример #20
0
results_folder = 'C:/Users/Josh Soper/Documents/Mexico/Summer 2018/Analysis'

# ----------------- CREATE MODEL -----------------------

# TODO: End time needs to be last day of final model year, not first day of next year
for scenarios in data['network']['scenarios']:
    if scenarios['name'] == option:
        meta = scenarios
        start = datetime.strptime(meta['start_time'],
                                  '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%d')
        # end = datetime.strptime(meta['end_time'], '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%d')
        end = '2015-12-31'
        if meta['time_step'] == 'day':
            ts = 1

model = Model(start=start, end=end, timestep=ts, solver='glpk')

# -----------------GENERATE NETWORK STRUCTURE -----------------------

# create node dictionaries by name and id

node_lookup_name = {}
node_lookup_id = {}

for node in nodes_list:
    types = [
        t for t in node['types']
        if abs(t['template_id']) == abs(template['id'])
    ]
    node_lookup_name[node.get('name')] = {
        'type': types[0]['name'] if types else None,
Пример #21
0
    def create_model(self, network, template, constants=None, variables=None, policies=None, initial_volumes=None):

        model = Model(solver='glpk-edge')

        # -----------------GENERATE NETWORK STRUCTURE -----------------------
        # ...and add initial parameter values

        output_ids = []
        input_ids = []

        non_storage_types = list(output_types.keys()) + list(input_types.keys()) + list(node_types.keys())

        def add_value_to_node(res_attr_idx, type_name, attr_name):
            pywr_param = None
            constant = constants.pop(res_attr_idx, None)
            if constant:
                pywr_param = ConstantParameter(model, constant)
            elif variables:
                variable = variables.pop(res_attr_idx, None)
                if variable:
                    values = list(variable['values'].values())
                    pywr_param = ArrayIndexedParameter(model, values)
            elif policies:
                policy = policies.pop(res_attr_idx, None)
                if policy:
                    pywr_param = self.create_register_policy(policy)

            if pywr_param is not None:
                type_name = type_name.lower()
                attr_name = attr_name.lower()
                (resource_type, resource_id, attr_id) = res_attr_idx
                try:
                    self.update_param(resource_type, resource_id, type_name, attr_name, value=pywr_param)
                except:
                    raise

        # create node dictionaries by name and id
        node_lookup = {}
        for node in network['nodes']:
            name = '{} (node)'.format(node['name'])
            types = [t for t in node['types'] if t['template_id'] == template['id']]
            if not types:
                continue
            if len(types) > 1:
                msg = "Type is ambiguous for {}. Please remove extra types.".format(name)
                raise Exception(msg)
            type_name = types[-1]['name']
            node_lookup[node.get("id")] = {
                'type': type_name,
                'name': name,
                'connect_in': 0,
                'connect_out': 0,
                'attributes': node['attributes']
            }
            if type_name in output_types:
                output_ids.append(node['id'])
            elif type_name in input_types:
                input_ids.append(node['id'])

        # create link lookups and pywr links
        link_lookup = {}
        for link in network['links']:
            residx = ('link', link['id'])
            name = '{} (link)'.format(link['name'])
            types = [t for t in link['types'] if t['template_id'] == template['id']]
            if not types:
                continue
            type_name = types[-1]['name']
            link_id = link['id']
            node_1_id = link['node_1_id']
            node_2_id = link['node_2_id']
            try:
                node_lookup[node_1_id]['connect_out'] += 1
            except:
                raise Exception('Topology error for {}. Upstream node ID {} not found.'.format(name, node_1_id))
            try:
                node_lookup[node_2_id]['connect_in'] += 1
            except:
                raise Exception('Topology error for {}. Downstream node ID {} not found.'.format(name, node_2_id))
            link_lookup[link_id] = {
                'name': name,
                'type': type_name,
                'node_1_id': node_1_id,
                'node_2_id': node_2_id,
                'from_slot': node_lookup[node_1_id]['connect_out'] - 1,
                'to_slot': node_lookup[node_2_id]['connect_in'] - 1,
            }

            if node_1_id in output_ids:
                node = node_lookup[node_1_id]
                msg = 'Topology error: Output {} appears to be upstream of {}'.format(node['name'], name)
                raise Exception(msg)
            elif node_2_id in input_ids:
                node = node_lookup[node_2_id]
                msg = 'Topology error: Input {} appears to be downstream of {}'.format(node['name'], name)
                raise Exception(msg)

            LinkType = link_types.get(type_name, Link)

            self.non_storage[residx] = LinkType(model, name=name)

            for ra in link['attributes']:
                res_attr_idx = ('link', link['id'], ra['attr_id'])
                add_value_to_node(res_attr_idx, type_name, ra['attr_name'])

        # Q/C

        # remove unconnected links
        d = []
        for link_id, link in link_lookup.items():
            if link['node_1_id'] not in node_lookup or link['node_2_id'] not in node_lookup:
                d.append(link_id)
        for link_id in d:
            del link_lookup[link_id]

        connected_nodes = []
        for link_id, link in link_lookup.items():
            connected_nodes.append(link['node_1_id'])
            connected_nodes.append(link['node_2_id'])

        # remove unconnected nodes
        d = []
        for node_id in node_lookup:
            if node_id not in connected_nodes:
                d.append(node_id)
        for node_id in d:
            del node_lookup[node_id]

        # create pywr nodes dictionary with format ["name" = pywr type + 'name']
        # for storage and non storage

        for node_id, node in node_lookup.items():
            residx = ('node', node_id)
            type_name = node['type']
            name = node['name']
            connect_in = node.get('connect_in', 0)
            connect_out = node.get('connect_out', 0)
            if (type_name in storage_types or connect_out > 1) and type_name not in non_storage_types:
                self.storage[node_id] = Storage(
                    model,
                    name=name,
                    num_outputs=connect_in,
                    num_inputs=connect_out,
                    initial_volume=initial_volumes.get(node_id, 0.0) if initial_volumes is not None else 0.0
                )
                if type_name not in storage_types:
                    self.storage[node_id].max_volume = 0.0
            else:

                if type_name in input_types:
                    NodeType = input_types[type_name]
                elif type_name in output_types:
                    NodeType = output_types[type_name]
                elif type_name in node_types:
                    NodeType = node_types[type_name]
                elif connect_in > 1:
                    NodeType = River
                else:
                    NodeType = Link

                self.non_storage[residx] = NodeType(model, name=name)

            for ra in node['attributes']:
                res_attr_idx = ('node', node_id, ra['attr_id'])
                try:
                    add_value_to_node(res_attr_idx, type_name, ra['attr_name'])
                except Exception as err:
                    print(err)
                    raise

        # create network connections
        # must assign connection slots for storage
        # TODO: change looping variable notation
        for link_id, link in link_lookup.items():
            node_1_id = link['node_1_id']
            node_2_id = link['node_2_id']

            _link = self.non_storage[('link', link_id)]
            up_storage = self.storage.get(node_1_id)
            up_node = self.non_storage.get(('node', node_1_id))
            down_storage = self.storage.get(node_2_id)
            down_node = self.non_storage.get(('node', node_2_id))

            if up_storage:
                up_storage.connect(_link, from_slot=link['from_slot'])
            else:
                up_node.connect(_link)

            if down_storage:
                _link.connect(down_storage, to_slot=link['to_slot'])
            else:
                _link.connect(down_node)

        self.model = model
Пример #22
0
def model():
    return Model()
Пример #23
0
def model(solver):
    model = Model(solver=solver)
    model.timestepper.start = Timestamp("2016-01-01")
    model.timestepper.end = Timestamp("2016-01-02")
    return model
Пример #24
0
    A.connect(Z)
    B.connect(Z)

    agg = AggregatedNode(model, "agg", [A, B])
    agg.flow_weights = flow_weights
    agg.max_flow = 30.0

    model.run()

    assert_allclose(agg.flow, expected_agg_flow)
    assert_allclose(A.flow, expected_A_flow)
    assert_allclose(B.flow, expected_B_flow)


@pytest.mark.skipif(Model().solver.name == "lpsolve",
                    reason="Not supported in lpsolve.")
def test_aggregated_node_max_flow_parameter(model):
    """Nodes constrained by the max_flow of their AggregatedNode using a Parameter """
    A = Input(model, "A", max_flow=20.0, cost=1)
    B = Input(model, "B", max_flow=20.0, cost=2)
    Z = Output(model, "Z", max_flow=100, cost=-10)

    A.connect(Z)
    B.connect(Z)

    agg = AggregatedNode(model, "agg", [A, B])
    agg.max_flow = ConstantParameter(model, 30.0)

    model.run()