예제 #1
0
파일: test_delay.py 프로젝트: tomjanus/pywr
def test_delay_node(key, delay, initial_flow):
    """Test that the `DelayNode` and the `FlowDelayParameter` internal to it correctly delay node for a range of inputs and
    across scenarios"""
    model = Model()

    model.timestepper.start = "2015/01/01"
    model.timestepper.end = "2015/01/31"

    scen = Scenario(model, name="scenario", size=2)
    flow_vals = np.arange(1, 63).reshape((31, 2), order="F")
    flow = ArrayIndexedScenarioParameter(model, scen, flow_vals)

    catchment = Catchment(model, name="input", flow=flow)
    kwargs = {key: delay}
    if initial_flow:
        kwargs["initial_flow"] = initial_flow
    delaynode = DelayNode(model, name="delaynode", **kwargs)
    output = Output(model, name="output")

    catchment.connect(delaynode)
    delaynode.connect(output)

    rec = NumpyArrayNodeRecorder(model, output)

    model.run()
    if initial_flow:
        expected = np.concatenate(
            [np.full((delay, 2), initial_flow), flow_vals[:-delay, :]])
    else:
        expected = np.concatenate(
            [np.zeros((delay, 2)), flow_vals[:-delay, :]])

    assert_array_almost_equal(rec.data, expected)
예제 #2
0
def historic_run():
    """Run the model."""
    logger.info(f'Version: {VERSION}')
    logger.info('Initialising cloud storage client ...')
    client = init_azure_storage()
    download_hydrology(client)
    logger.info('Starting model run ...')
    # Run the model
    if not os.path.exists(OUT_DIR):
        os.makedirs(OUT_DIR)

    model = Model.load(MODEL_FILENAME)
    # Add a storage recorder
    TablesRecorder(model,
                   os.path.join(OUT_DIR, 'thames_output.h5'),
                   parameters=[p for p in model.parameters])
    ProgressRecorder(model)

    # Run the model
    stats = model.run()
    logger.info(stats)
    # Upload the results
    logger.info('Uploading outputs ...')
    upload_outputs(client)
    # Print stats
    stats_df = stats.to_dataframe()
    logger.info(stats_df)
예제 #3
0
파일: runner.py 프로젝트: pywr/hydra-pywr
    def load_pywr_model(self, solver=None):
        """ Create a Pywr model from the exported data. """
        pywr_data = self.get_pywr_data()
        model = Model.load(pywr_data, solver=solver)
        self.model = model

        return pywr_data
예제 #4
0
파일: run-pv.py 프로젝트: pywr/pywr-dcopf
def main(filename):
    base, ext = os.path.splitext(filename)
    m = Model.load(filename, solver='glpk-dcopf')

    gen1 = NumpyArrayNodeRecorder(m, m.nodes['gen1'])
    pv2 = NumpyArrayNodeRecorder(m, m.nodes['pv2'])
    ProgressRecorder(m)
    CSVRecorder(m, f'{base}.csv')

    m.setup()
    stats = m.run()
    print(stats.to_dataframe())

    df = pandas.concat({'gen1': gen1.to_dataframe(), 'pv2': pv2.to_dataframe()}, axis=1)

    fig, ax = plt.subplots(figsize=(8, 4))
    df.plot(ax=ax)
    df.resample('D').mean().plot(ax=ax, color='black')
    ax.set_ylabel('MW')
    fig.savefig(f'{base}.png', dpi=300)

    fig, ax = plt.subplots(figsize=(8, 4))
    df.resample('M').sum().plot(ax=ax)
    ax.set_ylabel('MWh per month')
    fig.savefig(f'{base}-monthly.png', dpi=300)

    plt.show()
예제 #5
0
def test_empty_storage_min_flow():

    model = Model()
    storage = Storage(model, "storage", initial_volume=100, max_volume=100, num_inputs=1, num_outputs=0)
    otpt = Output(model, "output", min_flow=75)
    storage.connect(otpt)
    model.check()
    model.step()
    with pytest.raises(RuntimeError):
        model.step()
예제 #6
0
파일: test_delay.py 프로젝트: tomjanus/pywr
def test_delay_failure(key, delay):
    """Test the FlowDelayParameter returns a ValueError when the input value of the `days` attribute is not
    divisible exactly by the model timestep delta and when the `timesteps` attribute is less than 1
    """

    model = Model()
    model.timestepper.start = "2015/01/01"
    model.timestepper.end = "2015/01/31"
    model.timestepper.delta = 3

    catchment = Catchment(model, name="input", flow=1)
    output = Output(model, name="output")
    catchment.connect(output)

    FlowDelayParameter(model, catchment, **{key: delay})

    with pytest.raises(ValueError):
        model.setup()
예제 #7
0
def test_basic_losses():

    m = Model.load(os.path.join(TEST_FOLDER, 'models', 'basic-losses.json'),
                   solver='glpk-dcopf')

    m.setup()
    m.run()

    np.testing.assert_allclose(m.nodes['gen1'].flow, [50])
    np.testing.assert_allclose(m.nodes['gen2'].flow, [100])
    np.testing.assert_allclose(m.nodes['load1'].flow, [135])
예제 #8
0
def load_model(filename=None, data=None, solver=None):
    '''Load a test model and check it'''
    if data is None:
        path = os.path.join(os.path.dirname(__file__), 'models')
        with open(os.path.join(path, filename), 'r') as f:
            data = f.read()
    else:
        path = None

    model = Model.loads(data, path=path, solver=solver)
    model.check()
    return model
예제 #9
0
def test_transfer2(pywr_solver, size):
    """Test a simple transfer model. """

    model = Model()

    Scenario(model, name='test', size=size)

    demands = [9.47, 7.65, 9.04, 9.56, 9.44]
    supply = [4.74, 6.59, 12.04, 8.81, 11.75]

    for i, (s, d) in enumerate(zip(supply, demands)):
        s = Input(model, name=f'supply-{i}', max_flow=s)
        lnk = Link(model, name=f'wtw-{i}')
        d = Output(model, name=f'demand-{i}', max_flow=d, cost=-10.0)

        s.connect(lnk)
        lnk.connect(d)

    transfer04 = Link(model, name='transfer-04', max_flow=15.36, cost=1.0)

    model.nodes['wtw-0'].connect(transfer04)
    transfer04.connect(model.nodes['wtw-4'])

    model.setup()
    model.step()

    expected_supply = [4.74, 6.59, 9.04, 8.81, 9.44]

    for i, expected in enumerate(expected_supply):
        assert_allclose(model.nodes[f'supply-{i}'].flow, [expected] * size)

    assert_allclose(transfer04.flow, [0.0] * size, atol=1e-8)
예제 #10
0
def test_transfer(pywr_solver, size):
    """Test a simple transfer model. """

    model = Model()

    Scenario(model, name='test', size=size)

    supply1 = Input(model, name='supply-1', max_flow=5.0)
    wtw1 = Link(model, name='wtw-1')
    demand1 = Output(model, name='demand-1', max_flow=10.0, cost=-10.0)

    supply1.connect(wtw1)
    wtw1.connect(demand1)

    supply2 = Input(model, name='supply-2', max_flow=15.0)
    wtw2 = Link(model, name='wtw-2')
    demand2 = Output(model, name='demand-2', max_flow=10.0, cost=-10.0)

    supply2.connect(wtw2)
    wtw2.connect(demand2)

    transfer21 = Link(model, name='transfer-12', max_flow=2.0, cost=1.0)
    wtw2.connect(transfer21)
    transfer21.connect(wtw1)

    model.setup()
    model.step()

    assert_allclose(supply1.flow, [5.0] * size)
    assert_allclose(demand1.flow, [7.0] * size)
    assert_allclose(supply2.flow, [12.0] * size)
    assert_allclose(demand2.flow, [10.0] * size)
    assert_allclose(transfer21.flow, [2.0] * size)
예제 #11
0
파일: test_delay.py 프로젝트: tomjanus/pywr
def test_delay_param_load():
    """Test that the `.load` method of `FlowDelayParameter` works correctly"""

    model = Model()
    model.timestepper.start = "2015/01/01"
    model.timestepper.end = "2015/01/31"
    catchment = Catchment(model, name="input", flow=1)
    output = Output(model, name="output")
    catchment.connect(output)

    data = {"name": "delay", "node": "input", "days": 2}

    param = FlowDelayParameter.load(model, data)

    assert param.days == 2

    data = {"name": "delay2", "node": "input", "timesteps": 2}

    param2 = FlowDelayParameter.load(model, data)

    assert param2.timesteps == 2

    expected = np.concatenate([np.zeros(2), np.ones(29)]).reshape(31, 1)

    AssertionRecorder(model, param, name="rec1", expected_data=expected)
    AssertionRecorder(model, param2, name="rec2", expected_data=expected)

    model.setup()
    model.run()
예제 #12
0
def create_model():
    # create a model
    model = Model(start="2016-01-01", end="2019-12-31", timestep=7)

    # create three nodes (an input, a link, and an output)
    A = Input(model, name="A", max_flow=10.0)
    B = Link(model, name="B", cost=10.0)
    C = Output(model, name="C", max_flow=5.0, cost=-20.0)

    # connect the nodes together
    A.connect(B)
    B.connect(C)

    return model
예제 #13
0
def test_simple():

    m = Model.load(os.path.join(TEST_FOLDER, 'models', 'simple.json'),
                   solver='glpk-dcopf')

    m.setup()
    m.run()

    np.testing.assert_allclose(m.nodes['gen1'].flow, [100.0])
    np.testing.assert_allclose(m.nodes['gen2'].flow, [50.0])
    np.testing.assert_allclose(m.nodes['load3'].flow, [150.0])

    np.testing.assert_allclose(m.nodes['line12'].flow, [50 / 3])
    np.testing.assert_allclose(m.nodes['line13'].flow, [100.0 - 50 / 3])
    np.testing.assert_allclose(m.nodes['line23'].flow, [50.0 + 50 / 3])
예제 #14
0
class TestGlpkErrorHandling:
    @pytest.mark.skipif(
        Model().solver.name == "lpsolve" or Model().solver.use_unsafe_api,
        reason="NaN not checked for lpsolve or unsafe GLPK API.",
    )
    def test_nan_constraint_error(self):
        """Test a NaN in a row constraint causes an error"""
        # parse the JSON into a model
        model = load_model("simple1.json")

        nan_param = NanParameter(model)
        inpt = model.nodes["supply1"]
        inpt.max_flow = nan_param

        model.setup()

        with pytest.raises(pywr.solvers.GLPKError):
            model.run()

    @pytest.mark.skipif(
        Model().solver.name == "lpsolve" or Model().solver.use_unsafe_api,
        reason="NaN not checked for lpsolve or unsafe GLPK API.",
    )
    def test_nan_cost_error(self):
        """Test a NaN in a node cost causes an error"""
        # parse the JSON into a model
        model = load_model("simple1.json")

        nan_param = NanParameter(model)
        inpt = model.nodes["supply1"]
        inpt.cost = nan_param

        model.setup()

        with pytest.raises(pywr.solvers.GLPKError):
            model.run()
예제 #15
0
def test_export(db_with_pywr_network, logged_in_client):
    client = logged_in_client

    pywr_network_id, pywr_scenario_id, pywr_json_filename = db_with_pywr_network
    exporter = PywrHydraExporter.from_network_id(client, pywr_network_id, pywr_scenario_id)
    pywr_data_exported = exporter.get_pywr_data()

    # Check transformed data is about right
    with open(pywr_json_filename) as fh:
        pywr_data = json.load(fh)

    assert_identical_pywr_data(pywr_data, pywr_data_exported)

    m = Model.load(pywr_data)
    m.run()
예제 #16
0
def test_simple_line_constraints():

    m = Model.load(os.path.join(TEST_FOLDER, 'models',
                                'simple-line-constraints.json'),
                   solver='glpk-dcopf')

    m.setup()
    m.run()

    np.testing.assert_allclose(m.nodes['gen1'].flow, [25.0])
    np.testing.assert_allclose(m.nodes['gen2'].flow, [100.0])
    np.testing.assert_allclose(m.nodes['load3'].flow, [125.0])

    np.testing.assert_allclose(m.nodes['line12'].flow, [-25.0])
    np.testing.assert_allclose(m.nodes['line13'].flow, [50.0])
    np.testing.assert_allclose(m.nodes['line23'].flow, [75.0])
예제 #17
0
def test_export(db_with_pywr_network, logged_in_client):
    client = logged_in_client

    pywr_network_id, pywr_scenario_id, pywr_json_filename = db_with_pywr_network
    exporter = PywrHydraExporter.from_network_id(client, pywr_network_id,
                                                 pywr_scenario_id)
    pywr_data_exported = exporter.get_pywr_data()

    # Check transformed data is about right
    with open(pywr_json_filename) as fh:
        pywr_data = json.load(fh)

    assert_identical_pywr_data(pywr_data, pywr_data_exported)

    m = Model.load(pywr_data)
    m.run()
예제 #18
0
def test_from_model():

    json_path = os.path.join(os.path.dirname(__file__), "models",
                             "river1.json")
    model = Model.load(json_path)
    json_dict = pywr_model_to_d3_json(model, attributes=True)

    assert "nodes" in json_dict.keys()
    assert "links" in json_dict.keys()

    node_names = ["catchment1", "river1", "abs1", "link1", "term1", "demand1"]
    for node in json_dict["nodes"]:
        assert node["name"] in node_names

    catchment = get_node(json_dict["nodes"], "catchment1")
    catchment_max_flow = get_node_attribute(catchment, "max_flow")
    assert catchment_max_flow["value"] == "5.0"
예제 #19
0
def test_pv_generator():

    m = Model.load(os.path.join(TEST_FOLDER, 'models', 'pv-generator.json'),
                   solver='glpk-dcopf')

    gen1 = NumpyArrayNodeRecorder(m, m.nodes['gen1'])
    pv2 = NumpyArrayNodeRecorder(m, m.nodes['pv2'])

    m.setup()
    m.run()

    df = pandas.concat({
        'gen1': gen1.to_dataframe(),
        'pv2': pv2.to_dataframe()
    },
                       axis=1)

    assert df.shape[0] == 745
예제 #20
0
def test_simple_battery():

    m = Model.load(os.path.join(TEST_FOLDER, 'models', 'simple-battery.json'),
                   solver='glpk-dcopf')

    gen1 = NumpyArrayNodeRecorder(m, m.nodes['gen1'])
    pv2 = NumpyArrayNodeRecorder(m, m.nodes['pv2'])
    battery1 = NumpyArrayStorageRecorder(m, m.nodes['battery1'])

    m.setup()
    m.run()

    df = pandas.concat(
        {
            'gen1': gen1.to_dataframe(),
            'pv2': pv2.to_dataframe(),
            'battery1': battery1.to_dataframe()
        },
        axis=1)

    assert df.shape[0] == 745
예제 #21
0
def simple_dcopf_model():

    m = Model(solver='glpk-dcopf')

    g1 = Generator(m, 'gen1')
    g1.max_flow = 100
    g1.cost = 1.0

    g2 = Generator(m, 'gen2')
    g2.max_flow = 100
    g2.cost = 2.0

    l3 = Load(m, 'load3')
    l3.max_flow = 150
    l3.cost = -10

    b1 = Bus(m, 'bus1')
    b2 = Bus(m, 'bus2')
    b3 = Bus(m, 'bus3')

    l12 = Line(m, 'line12')
    l13 = Line(m, 'line13')
    l23 = Line(m, 'line23')

    g1.connect(b1)
    g2.connect(b2)
    l3.connect(b3)

    b1.connect(l12)
    l12.connect(b2)

    b1.connect(l13)
    l13.connect(b3)

    b2.connect(l23)
    l23.connect(b3)

    return m
예제 #22
0
def test_simple_losses():

    m = Model.load(os.path.join(TEST_FOLDER, 'models', 'simple-losses.json'),
                   solver='glpk-dcopf')

    m.setup()
    m.run()

    # Gen1 losses
    line12_losses = 0.1 * 100 / 9
    line13_losses = 0.1 * (100 - 100 / 9)
    line23_losses = 0.1 * (100 / 9 - line12_losses)
    gen1_losses = line12_losses + line13_losses + line23_losses

    gen2 = (150 - 100 + gen1_losses) / (1 - 0.1)

    np.testing.assert_allclose(m.nodes['gen1'].flow, [100.0])
    np.testing.assert_allclose(m.nodes['gen2'].flow, [gen2])
    np.testing.assert_allclose(m.nodes['load3'].flow, [150.0])

    np.testing.assert_allclose(m.nodes['line12'].flow, [100 / 9])
    np.testing.assert_allclose(m.nodes['line13'].flow, [100.0 - 100 / 9])
    np.testing.assert_allclose(m.nodes['line23'].flow,
                               [gen2 + 100 / 9 - line12_losses])
예제 #23
0
    result = model.step()
    assert_allclose(supply1.flow, 15.0, atol=1e-7)
    assert_allclose(supply2.flow, 15.0, atol=1e-7)
    assert_allclose(demand1.flow, 30.0, atol=1e-7)


def test_run_bottleneck():
    '''Test max flow constraint on intermediate nodes is upheld'''
    model = load_model('bottleneck.json')
    result = model.step()
    d1 = model.nodes['demand1']
    d2 = model.nodes['demand2']
    assert_allclose(d1.flow + d2.flow, 15.0, atol=1e-7)


@pytest.mark.skipif(Model().solver.name == "glpk-edge",
                    reason="Not valid for GLPK Edge based solver.")
def test_run_discharge_upstream():
    '''Test river with inline discharge (upstream)

    In this instance the discharge is upstream of the abstraction, and so can
    be abstracted in the same way as the water from the catchment
    '''
    model = load_model('river_discharge1.json')
    model.step()
    demand = model.nodes['demand1']
    term = model.nodes['term1']
    assert_allclose(demand.flow, 8.0, atol=1e-7)
    assert_allclose(term.flow, 0.0, atol=1e-7)

예제 #24
0
def run():

    # Run the model
    model = Model.load(MODEL_FILENAME)

    # Add a storage recorder
    TablesRecorder(model,
                   'thames_output.h5',
                   parameters=[p for p in model.parameters])

    # Run the model
    stats = model.run()
    print(stats)
    stats_df = stats.to_dataframe()
    print(stats_df)

    keys_to_plot = (
        'time_taken_before',
        'solver_stats.bounds_update_nonstorage',
        'solver_stats.bounds_update_storage',
        'solver_stats.objective_update',
        'solver_stats.lp_solve',
        'solver_stats.result_update',
        'time_taken_after',
    )

    keys_to_tabulate = (
        'timesteps',
        'time_taken',
        'solver',
        'num_scenarios',
        'speed',
        'solver_name'
        'solver_stats.total',
        'solver_stats.number_of_rows',
        'solver_stats.number_of_cols',
        'solver_stats.number_of_nonzero',
        'solver_stats.number_of_routes',
        'solver_stats.number_of_nodes',
    )

    values = []
    labels = []
    explode = []
    solver_sub_total = 0.0
    for k in keys_to_plot:
        v = stats_df.loc[k][0]
        values.append(v)
        label = k.split('.', 1)[-1].replace('_', ' ').capitalize()
        explode.append(0.0)
        if k.startswith('solver_stats'):
            labels.append('Solver - {}'.format(label))
            solver_sub_total += v
        else:

            labels.append(label)

    values.append(stats_df.loc['solver_stats.total'][0] - solver_sub_total)
    labels.append('Solver - Other')
    explode.append(0.0)

    values.append(stats_df.loc['time_taken'][0] - sum(values))
    values = np.array(values) / sum(values)
    labels.append('Other')
    explode.append(0.0)

    fig, (ax1, ax2) = plt.subplots(figsize=(12, 4),
                                   ncols=2,
                                   sharey='row',
                                   gridspec_kw={'width_ratios': [2, 1]})

    print(values, labels)
    ax1.pie(values,
            explode=explode,
            labels=labels,
            autopct='%1.1f%%',
            startangle=90)
    ax1.axis(
        'equal')  # Equal aspect ratio ensures that pie is drawn as a circle.

    cell_text = []
    for index, value in stats_df.iterrows():
        if index not in keys_to_tabulate:
            continue
        v = value[0]
        if isinstance(v, (float, np.float64, np.float32)):
            v = f'{v:.2f}'

        cell_text.append([index, v])

    tbl = ax2.table(cellText=cell_text,
                    colLabels=['Statistic', 'Value'],
                    loc='center')
    tbl.scale(1.5, 1.5)  # may help
    tbl.set_fontsize(14)
    ax2.axis('off')

    fig.savefig('run_statistics_w_tables.png', dpi=300)
    fig.savefig('run_statistics_w_tables.eps')

    plt.show()
예제 #25
0
    demand1.max_flow = 40.0
    result = model.step()
    assert_allclose(supply1.flow, 15.0, atol=1e-7)
    assert_allclose(supply2.flow, 15.0, atol=1e-7)
    assert_allclose(demand1.flow, 30.0, atol=1e-7)


def test_run_bottleneck():
    '''Test max flow constraint on intermediate nodes is upheld'''
    model = load_model('bottleneck.json')
    result = model.step()
    d1 = model.nodes['demand1']
    d2 = model.nodes['demand2']
    assert_allclose(d1.flow+d2.flow, 15.0, atol=1e-7)

@pytest.mark.skipif(Model().solver.name == "glpk-edge", reason="Not valid for GLPK Edge based solver.")
def test_run_discharge_upstream():
    '''Test river with inline discharge (upstream)

    In this instance the discharge is upstream of the abstraction, and so can
    be abstracted in the same way as the water from the catchment
    '''
    model = load_model('river_discharge1.json')
    model.step()
    demand = model.nodes['demand1']
    term = model.nodes['term1']
    assert_allclose(demand.flow, 8.0, atol=1e-7)
    assert_allclose(term.flow, 0.0, atol=1e-7)

@pytest.mark.skipif(Model().solver.name == "glpk-edge", reason="Not valid for GLPK Edge based solver.")
def test_run_discharge_downstream():
예제 #26
0
def pytest_report_header(config):
    headers = []
    solver_name = Model().solver.name
    headers.append('solver: {}'.format(solver_name))
    return '\n'.join(headers)
예제 #27
0
파일: runner.py 프로젝트: UMWRG/PywrApp
 def load_pywr_model(self):
     """ Create a Pywr model from the exported data. """
     pywr_data = self.get_pywr_data()
     model = Model.load(pywr_data)
     self.model = model
예제 #28
0
파일: runner.py 프로젝트: pywr/PywrApp
 def load_pywr_model(self):
     """ Create a Pywr model from the exported data. """
     pywr_data = self.get_pywr_data()
     model = Model.load(pywr_data)
     self.model = model
예제 #29
0
def test_reservoir_circle():
    """
    Issue #140. A model with a circular route, from a reservoir Input back
    around to it's own Output.

                 Demand
                    ^
                    |
                Reservoir <- Pumping
                    |           ^
                    v           |
              Compensation      |
                    |           |
                    v           |
    Catchment -> River 1 -> River 2 ----> MRFA -> Waste
                                    |              ^
                                    |---> MRFB ----|
    """
    model = Model()

    catchment = Input(model, "catchment", max_flow=500, min_flow=500)

    reservoir = Storage(model,
                        "reservoir",
                        max_volume=10000,
                        initial_volume=5000)

    demand = Output(model, "demand", max_flow=50, cost=-100)
    pumping_station = Link(model, "pumping station", max_flow=100, cost=-10)
    river1 = Link(model, "river1")
    river2 = Link(model, "river2")
    compensation = Link(model, "compensation", cost=600)
    mrfA = Link(model, "mrfA", cost=-500, max_flow=50)
    mrfB = Link(model, "mrfB")
    waste = Output(model, "waste")

    catchment.connect(river1)
    river1.connect(river2)
    river2.connect(mrfA)
    river2.connect(mrfB)
    mrfA.connect(waste)
    mrfB.connect(waste)
    river2.connect(pumping_station)
    pumping_station.connect(reservoir)
    reservoir.connect(compensation)
    compensation.connect(river1)
    reservoir.connect(demand)

    model.check()
    model.setup()

    # not limited by mrf, pump capacity is constraint
    model.step()
    assert_allclose(catchment.flow, 500)
    assert_allclose(waste.flow, 400)
    assert_allclose(compensation.flow, 0)
    assert_allclose(pumping_station.flow, 100)
    assert_allclose(demand.flow, 50)

    # limited by mrf
    catchment.min_flow = catchment.max_flow = 100
    model.step()
    assert_allclose(waste.flow, 50)
    assert_allclose(compensation.flow, 0)
    assert_allclose(pumping_station.flow, 50)
    assert_allclose(demand.flow, 50)

    # reservoir can support mrf, but doesn't need to
    compensation.cost = 200
    model.step()
    assert_allclose(waste.flow, 50)
    assert_allclose(compensation.flow, 0)
    assert_allclose(pumping_station.flow, 50)
    assert_allclose(demand.flow, 50)

    # reservoir supporting mrf
    catchment.min_flow = catchment.max_flow = 0
    model.step()
    assert_allclose(waste.flow, 50)
    assert_allclose(compensation.flow, 50)
    assert_allclose(pumping_station.flow, 0)
    assert_allclose(demand.flow, 50)
예제 #30
0
    result = model.step()
    assert_allclose(supply1.flow, 15.0, atol=1e-7)
    assert_allclose(supply2.flow, 15.0, atol=1e-7)
    assert_allclose(demand1.flow, 30.0, atol=1e-7)


def test_run_bottleneck():
    """Test max flow constraint on intermediate nodes is upheld"""
    model = load_model("bottleneck.json")
    result = model.step()
    d1 = model.nodes["demand1"]
    d2 = model.nodes["demand2"]
    assert_allclose(d1.flow + d2.flow, 15.0, atol=1e-7)


@pytest.mark.skipif(Model().solver.name == "glpk-edge",
                    reason="Not valid for GLPK Edge based solver.")
def test_run_discharge_upstream():
    """Test river with inline discharge (upstream)

    In this instance the discharge is upstream of the abstraction, and so can
    be abstracted in the same way as the water from the catchment
    """
    model = load_model("river_discharge1.json")
    model.step()
    demand = model.nodes["demand1"]
    term = model.nodes["term1"]
    assert_allclose(demand.flow, 8.0, atol=1e-7)
    assert_allclose(term.flow, 0.0, atol=1e-7)

예제 #31
0
def test_run_empty():
    # empty model should raise an exception if run
    model = Model()
    with pytest.raises(ModelStructureError):
        model.run()
예제 #32
0
def test_select_solver():
    """Test specifying the solver in JSON"""
    solver_names = [solver.name for solver in pywr.solvers.solver_registry]
    for solver_name in solver_names:
        data = '''{"metadata": {"minimum_version": "0.1"}, "nodes": {}, "edges": {}, "timestepper": {"start": "1990-01-01","end": "1999-12-31","timestep": 1}, "solver": {"name": "%s"}}''' % solver_name
        model = load_model(data=data)
        assert (model.solver.name.lower() == solver_name)


def test_solver_unrecognised():
    '''Test specifying an unrecognised solver JSON'''
    solver_name = 'foobar'
    data = '''{"metadata": {"minimum_version": "0.1"}, "nodes": {}, "edges": {}, "timestepper": {"start": "1990-01-01","end": "1999-12-31","timestep": 1}, "solver": {"name": "%s"}}''' % solver_name
    with pytest.raises(KeyError):
        model = load_model(data=data)


@pytest.mark.skipif(Model().solver.name != "glpk",
                    reason="only valid for glpk")
@pytest.mark.parametrize("use_presolve", ["true", "false"])
def test_select_glpk_presolve(use_presolve):
    """Test specifying the solver in JSON"""
    solver_names = ["glpk"]
    for solver_name in solver_names:
        data = '''{"metadata": {"minimum_version": "0.1"}, "nodes": {}, "edges": {}, "timestepper": {"start": "1990-01-01","end": "1999-12-31","timestep": 1}, "solver": {"name": "%s", "use_presolve": %s}}''' % (
            solver_name, use_presolve)
        model = load_model(data=data)
        assert (model.solver.name.lower() == solver_name)
        assert (model.solver._cy_solver.use_presolve == (
            use_presolve == "true"))