def test_aggregated_storage_control_curve(three_storage_model): """Test using a control curve based on an aggregate storage, rather than a single storage. """ model = three_storage_model # create a new supply node inpt = Input(model, "Input 3", cost=-1000) inpt.connect(model.nodes["Output 0"]) inpt.connect(model.nodes["Output 1"]) inpt.connect(model.nodes["Output 2"]) # limit the flow of the new node using a control curve on the aggregate storage curves = [0.5] # 50% values = [0, 5] inpt.max_flow = ControlCurveParameter(model.nodes["Total Storage"], curves, values) # initial storage is > 50% so flow == 0 model.step() np.testing.assert_allclose(inpt.flow, 0.0) # set initial storage to < 50% storages = [node for node in model.nodes if isinstance(node, Storage)] for node, value in zip(storages, [0.6, 0.1, 0.1]): if isinstance(node, Storage): node.initial_volume = node.max_volume * value # now below the control curve, so flow is allowed model.reset() model.step() np.testing.assert_allclose(inpt.flow, 5.0)
def test_daily_profile_leap_day(model): """Test behaviour of daily profile parameter for leap years """ inpt = Input(model, "input") otpt = Output(model, "otpt", max_flow=None, cost=-999) inpt.connect(otpt) inpt.max_flow = DailyProfileParameter(model, np.arange(0, 366, dtype=np.float64)) # non-leap year model.timestepper.start = pd.to_datetime("2015-01-01") model.timestepper.end = pd.to_datetime("2015-12-31") model.run() assert_allclose(inpt.flow, 365) # NOT 364 # leap year model.timestepper.start = pd.to_datetime("2016-01-01") model.timestepper.end = pd.to_datetime("2016-12-31") model.run() assert_allclose(inpt.flow, 365)
def test_keating_aquifer(solver): model = Model( solver=solver, start=pandas.to_datetime('2016-01-01'), end=pandas.to_datetime('2016-01-01'), ) aqfer = KeatingAquifer( model, 'keating', num_streams, num_additional_inputs, stream_flow_levels, transmissivity, coefficient, levels, area=area, storativity=storativity, ) catchment = Input(model, 'catchment', max_flow=0) stream = Output(model, 'stream', max_flow=np.inf, cost=0) abstraction = Output(model, 'abstraction', max_flow=15, cost=-999) catchment.connect(aqfer) aqfer.connect(stream, from_slot=0) aqfer.connect(abstraction, from_slot=1) rec_level = NumpyArrayLevelRecorder(model, aqfer) rec_volume = NumpyArrayStorageRecorder(model, aqfer) rec_stream = NumpyArrayNodeRecorder(model, stream) rec_abstraction = NumpyArrayNodeRecorder(model, abstraction) model.check() assert(len(aqfer.inputs) == (num_streams + num_additional_inputs)) for initial_level in (50, 100, 110, 150): # set the inital aquifer level and therefor the initial volume aqfer.initial_level = initial_level initial_volume = aqfer.initial_volume assert(initial_volume == (area * storativity[0] * initial_level * 0.001)) # run the model (for one timestep only) model.run() # manually calculate keating streamflow and check model flows are OK Qp = 2 * transmissivity[0] * max(initial_level - stream_flow_levels[0][0], 0) * coefficient Qe = 2 * transmissivity[1] * max(initial_level - stream_flow_levels[0][1], 0) * coefficient delta_storage = initial_volume - rec_volume.data[0, 0] abs_flow = rec_abstraction.data[0, 0] stream_flow = rec_stream.data[0, 0] assert(delta_storage == (stream_flow + abs_flow)) assert(stream_flow == (Qp+Qe)) A_VERY_LARGE_NUMBER = 9999999999999 model.timestepper.end = pandas.to_datetime('2016-01-02') # fill the aquifer completely # there is no spill for the storage so it should find no feasible solution with pytest.raises(RuntimeError): catchment.max_flow = A_VERY_LARGE_NUMBER catchment.min_flow = A_VERY_LARGE_NUMBER model.run() # drain the aquifer completely catchment.min_flow = 0 catchment.max_flow = 0 abstraction.max_flow = A_VERY_LARGE_NUMBER model.run() assert(rec_volume.data[1, 0] == 0) abs_flow = rec_abstraction.data[1, 0] stream_flow = rec_stream.data[1, 0] assert(stream_flow == 0) assert(abs_flow == 0)