def simple_storage_model(request, solver): """ Make a simple model with a single Input, Storage and Output. Input -> Storage -> Output """ model = pywr.core.Model(start=pandas.to_datetime('2016-01-01'), end=pandas.to_datetime('2016-01-05'), timestep=datetime.timedelta(1), solver=solver) inpt = Input(model, name="Input", max_flow=5.0, cost=-1) res = Storage(model, name="Storage", num_outputs=1, num_inputs=1, max_volume=20, initial_volume=10) otpt = Output(model, name="Output", max_flow=8, cost=-999) inpt.connect(res) res.connect(otpt) return model
def test_scenario_storage(): """Test the behaviour of Storage nodes with multiple scenarios The model defined has two inflow scenarios: 5 and 10. It is expected that the volume in the storage node should increase at different rates in the two scenarios. """ model = Model() i = Input(model, 'input', max_flow=999) s = Storage(model, 'storage', num_inputs=1, num_outputs=1, max_volume=1000, initial_volume=500) o = Output(model, 'output', max_flow=999) scenario_input = Scenario(model, 'Inflow', size=2) i.min_flow = ConstantScenarioParameter(model, scenario_input, [5.0, 10.0]) i.connect(s) s.connect(o) s_rec = NumpyArrayStorageRecorder(model, s) model.run() assert_allclose(i.flow, [5, 10]) assert_allclose(s_rec.data[0], [505, 510]) assert_allclose(s_rec.data[1], [510, 520])
def three_storage_model(request): """ Make a simple model with three input, storage and output nodes. Also adds an `AggregatedStorage` and `AggregatedNode`. Input 0 -> Storage 0 -> Output 0 Input 1 -> Storage 1 -> Output 1 Input 2 -> Storage 2 -> Output 2 """ model = pywr.core.Model( start=pandas.to_datetime('2016-01-01'), end=pandas.to_datetime('2016-01-05'), timestep=datetime.timedelta(1), ) all_res = [] all_otpt = [] for num in range(3): inpt = Input(model, name="Input {}".format(num), max_flow=5.0 * num, cost=-1) res = Storage(model, name="Storage {}".format(num), num_outputs=1, num_inputs=1, max_volume=20, initial_volume=10 + num) otpt = Output(model, name="Output {}".format(num), max_flow=8 + num, cost=-999) inpt.connect(res) res.connect(otpt) all_res.append(res) all_otpt.append(otpt) AggregatedStorage(model, name='Total Storage', storage_nodes=all_res) AggregatedNode(model, name='Total Output', nodes=all_otpt) return model
def test_scaled_profile_nested_load(model): """ Test `ScaledProfileParameter` loading with `AggregatedParameter` """ model.timestepper.delta = 15 s = Storage(model, 'Storage', max_volume=100.0, num_outputs=0) d = Output(model, 'Link') data = { 'type': 'scaledprofile', 'scale': 50.0, 'profile': { 'type': 'aggregated', 'agg_func': 'product', 'parameters': [{ 'type': 'monthlyprofile', 'values': [0.5] * 12 }, { 'type': 'monthlyprofilecontrolcurve', 'control_curves': [0.8, 0.6], 'values': [[1.0] * 12, [0.7] * np.arange(12), [0.3] * 12], 'storage_node': 'Storage' }] } } s.connect(d) d.max_flow = p = load_parameter(model, data) @assert_rec(model, p) def expected_func(timestep, scenario_index): if s.initial_volume == 90: return 50.0 * 0.5 * 1.0 elif s.initial_volume == 70: return 50.0 * 0.5 * 0.7 * (timestep.month - 1) else: return 50.0 * 0.5 * 0.3 for initial_volume in (90, 70, 30): s.initial_volume = initial_volume model.run()
def test_scaled_profile_nested_load(model): """ Test `ScaledProfileParameter` loading with `AggregatedParameter` """ model.timestepper.delta = 15 s = Storage(model, 'Storage', max_volume=100.0, initial_volume=50.0, num_outputs=0) d = Output(model, 'Link') data = { 'type': 'scaledprofile', 'scale': 50.0, 'profile': { 'type': 'aggregated', 'agg_func': 'product', 'parameters': [{ 'type': 'monthlyprofile', 'values': [0.5] * 12 }, { 'type': 'constant', 'value': 1.5, }] } } s.connect(d) d.max_flow = p = load_parameter(model, data) @assert_rec(model, p) def expected_func(timestep, scenario_index): return 50.0 * 0.5 * 1.5 model.run()
def create_model(harmonic=True): # import flow timeseries for catchments flow = pd.read_csv(os.path.join('data', 'thames_stochastic_flow.gz')) flow['Date'] = flow['Date'].apply(pd.to_datetime) flow.set_index('Date', inplace=True) # resample input to weekly average flow = flow.resample('7D', how='mean') model = InspyredOptimisationModel( solver='glpk', start=flow.index[0], end=flow.index[365*10], # roughly 10 years timestep=datetime.timedelta(7), # weekly time-step ) flow_parameter = ArrayIndexedParameter(model, flow['flow'].values) catchment1 = Input(model, 'catchment1', min_flow=flow_parameter, max_flow=flow_parameter) catchment2 = Input(model, 'catchment2', min_flow=flow_parameter, max_flow=flow_parameter) reservoir1 = Storage(model, 'reservoir1', min_volume=3000, max_volume=20000, initial_volume=16000) reservoir2 = Storage(model, 'reservoir2', min_volume=3000, max_volume=20000, initial_volume=16000) if harmonic: control_curve = AnnualHarmonicSeriesParameter(model, 0.5, [0.5], [0.0], mean_upper_bounds=1.0, amplitude_upper_bounds=1.0) else: control_curve = MonthlyProfileParameter(model, np.array([0.0]*12), lower_bounds=0.0, upper_bounds=1.0) control_curve.is_variable = True controller = ControlCurveParameter(model, reservoir1, control_curve, [0.0, 10.0]) transfer = Link(model, 'transfer', max_flow=controller, cost=-500) demand1 = Output(model, 'demand1', max_flow=45.0, cost=-101) demand2 = Output(model, 'demand2', max_flow=20.0, cost=-100) river1 = Link(model, 'river1') river2 = Link(model, 'river2') # compensation flows from reservoirs compensation1 = Link(model, 'compensation1', max_flow=5.0, cost=-9999) compensation2 = Link(model, 'compensation2', max_flow=5.0, cost=-9998) terminator = Output(model, 'terminator', cost=1.0) catchment1.connect(reservoir1) catchment2.connect(reservoir2) reservoir1.connect(demand1) reservoir2.connect(demand2) reservoir2.connect(transfer) transfer.connect(reservoir1) reservoir1.connect(river1) reservoir2.connect(river2) river1.connect(terminator) river2.connect(terminator) reservoir1.connect(compensation1) reservoir2.connect(compensation2) compensation1.connect(terminator) compensation2.connect(terminator) r1 = TotalDeficitNodeRecorder(model, demand1) r2 = TotalDeficitNodeRecorder(model, demand2) r3 = AggregatedRecorder(model, [r1, r2], agg_func="mean") r3.is_objective = 'minimise' r4 = TotalFlowNodeRecorder(model, transfer) r4.is_objective = 'minimise' return model