def component_results(es, results, path, model): """ Writes results aggregated by component type """ for k, v in es._typemap.items(): if type(k) == str: _seq_by_type = [ views.node(results, n, multiindex=True)['sequences'] for n in es.nodes if isinstance(n, v) and not isinstance(n, Bus) ] if _seq_by_type: seq_by_type = pd.concat(_seq_by_type, axis=1) type_path = os.path.join(path, 'sequences') if not os.path.exists(type_path): os.makedirs(type_path) seq_by_type.to_csv(os.path.join(type_path, str(k) + '.csv'), sep=";") _sca_by_type = [ views.node(results, n, multiindex=True).get('scalars') for n in es.nodes if isinstance(n, v) and not isinstance(n, Bus) ] if [x for x in _sca_by_type if x is not None]: sca_by_type = pd.concat(_sca_by_type) type_path = os.path.join(path, 'scalars') if not os.path.exists(type_path): os.makedirs(type_path) sca_by_type.to_csv(os.path.join(type_path, str(k) + '.csv'), header=True, sep=";")
def check_results_dataframe(energysystem_data): """Explore optimized supply, storage and investments. Also calculates renewable share in the system. :dict: results :dataframe: elect_bus """ # Restore dumped result for - later Processing * # energysystem = solph.EnergySystem() # energysystem.restore( # dpath='../data/03_urban_energy_requirements/', filename='om_data') results = energysystem_data.results['main'] electricity_bus = views.node(results, 'electricity') custom_storage = views.node(results, 'storage') elect_bus = electricity_bus['sequences'] print('** electricity sequence head(5) **') print(elect_bus.head(5)) storage_ = custom_storage['sequences'] print('** storage sequence head(5) **') print(storage_.head(5)) my_results = electricity_bus['scalars'] # installed capacity of storage in GWh my_results['storage_invest_GWh'] = ( custom_storage['scalars'][(('storage', 'None'), 'invest')] / 1e6) # installed capacity of wind power plant in MW my_results['wind_invest_MW'] = ( electricity_bus['scalars'][(('wind', 'electricity'), 'invest')] / 1e3) # resulting renewable energy share my_results['renewable_share'] = \ (1 - electricity_bus['sequences'][(('pp_gas', 'electricity'), 'flow')].sum() / electricity_bus['sequences'][(('electricity', 'demand'), 'flow')].sum()) logging.info("check optimisation results.") pp.pprint(my_results)
def bus_results(es, results, select="sequences", concat=False): """ Aggregated for every bus of the energy system """ br = {} buses = [b for b in es.nodes if isinstance(b, Bus)] for b in buses: if select == "sequences": bus_sequences = pd.concat( [ views.node(results, b, multiindex=True).get( "sequences", pd.DataFrame()) ], axis=1, ) br[str(b)] = bus_sequences if select == "scalars": br[str(b)] = views.node(results, b, multiindex=True).get("scalars") if concat: if select == "sequences": axis = 1 else: axis = 0 br = pd.concat([b for b in br.values()], axis=axis) return br
def plot_results(results): # plot electrical bus result_data = views.node(results, 'bel')['sequences'] result_data[(('bel', 'demand_el'), 'flow')] *= -1 columns = [ c for c in result_data.columns if not any(s in str(c) for s in ['status', 'costs']) ] result_data = result_data[columns] ax = result_data.plot(kind='line', drawstyle='steps-post', grid=True, rot=0) ax.set_xlabel('Hour') ax.set_ylabel('P (MW)') plt.show() result_data = views.node(results, 'bth')['sequences'] result_data[(('bth', 'demand_th'), 'flow')] *= -1 columns = [ c for c in result_data.columns if not any(s in str(c) for s in ['status', 'costs']) ] result_data = result_data[columns] ax = result_data.plot(kind='line', drawstyle='steps-post', grid=True, rot=0) ax.set_xlabel('Hour') ax.set_ylabel('Q (MW)') plt.show() return result_data
def test_parameter_with_node_view(self): param_results = processing.parameter_as_dict(self.es, exclude_none=True) bel1 = views.node(param_results, 'b_el1') eq_(bel1['scalars'][(('b_el1', 'storage'), 'variable_costs')], 3) bel1_m = views.node(param_results, 'b_el1', multiindex=True) eq_(bel1_m['scalars'].loc[('b_el1', 'storage', 'variable_costs')], 3)
def update_states(self, results, sim_params): # Update the states of the electrolyzer # If the states dict of this object wasn't created yet, it's done here. if not 'temperature' in self.states: self.states['temperature'] = [None] * sim_params.n_intervals # Get the flows of the electrolyzer for this time step. data_electrolyzer = views.node(results, self.name) df_electrolyzer = data_electrolyzer['sequences'] # Get the hydrogen produced this time step [kg]. for i_result in df_electrolyzer: if i_result[0][0] == self.name and i_result[1] == 'flow': # Case: This is the flow from the electrolyzer to the hydrogen bus, therefor the produced H2 [kg]. this_h2_produced = df_electrolyzer[i_result][0] # With the hydrogen produced this step the according temperature can be interpolated from the supporting points. suporting_points_temp = self.supporting_points['temperature'] suporting_points_h2 = self.supporting_points['h2_produced'] this_temp = np.interp(this_h2_produced, suporting_points_h2, suporting_points_temp) # Update the current temperature and the temperature state for this time step. self.temperature = this_temp self.states['temperature'][sim_params.i_interval] = this_temp
def test_output_by_type_view(self): results = processing.results(self.om) transformer_output = views.node_output_by_type(results, node_type=Transformer) compare = views.node(results, 'diesel', multiindex=True)['sequences'][('diesel', 'b_el1', 'flow')] eq_(int(transformer_output.sum()), int(compare.sum()))
def test_net_storage_flow(self): results = processing.results(self.om) storage_flow = views.net_storage_flow(results, node_type=GenericStorage) compare = views.node(results, 'storage', multiindex=True)['sequences'] eq_(((compare[('storage', 'b_el2', 'flow')] - compare[('b_el1', 'storage', 'flow')]).to_frame() == storage_flow.values).all()[0], True)
def bus_results(es, results, path, model): """ Writes results aggregated for every bus of the energy system """ buses = [b for b in es.nodes if isinstance(b, Bus)] for b in buses: bus_sequences = pd.concat( [views.node(results, b, multiindex=True)['sequences']], axis=1) type_path = os.path.join(path, 'sequences') if not os.path.exists(type_path): os.makedirs(type_path) bus_sequences.to_csv(os.path.join(type_path, str(b) + '.csv'), sep=";")
def sizing_results(results, m, sizing_list): res = {} for i in range(len(sizing_list)): node = m.es.groups[sizing_list[i]] node_data = views.node(results, node) for nodes, flow_name in node_data['sequences']: if 'scalars' in node_data and node_data['scalars'].get( (nodes, 'invest')) is not None: res[nodes] = node_data['scalars'].get((nodes, 'invest')) df = pd.DataFrame.from_dict(res, orient='index') return df
def results_postprocessing(n, component_list, time_horizon=None): generator_list = [] for i in range(len(component_list)): d1 = views.node(n, component_list[i])['sequences'] if time_horizon is not None: generator_list.append(d1.iloc[:time_horizon]) else: generator_list.append(d1) res = pd.concat(generator_list, axis=1) return res
def component_results(es, results, select="sequences"): """ Aggregated by component type """ c = {} if not hasattr(es, "typemap"): setattr(es, "typemap", facades.TYPEMAP) for k, v in es.typemap.items(): if type(k) == str: if select == "sequences": _seq_by_type = [ views.node(results, n, multiindex=True).get("sequences") for n in es.nodes if isinstance(n, v) and not isinstance(n, Bus) ] # check if dataframes / series have been returned if any([ isinstance(i, (pd.DataFrame, pd.Series)) for i in _seq_by_type ]): seq_by_type = pd.concat(_seq_by_type, axis=1) c[str(k)] = seq_by_type if select == "scalars": _sca_by_type = [ views.node(results, n, multiindex=True).get("scalars") for n in es.nodes if isinstance(n, v) and not isinstance(n, Bus) ] if [x for x in _sca_by_type if x is not None]: _sca_by_type = pd.concat(_sca_by_type) c[str(k)] = _sca_by_type return c
def plot(energysystem_data, year): """This code is copied from the oemof plotting examples. It allows for the customization of the plot using oemof/oev plotting object. See link here: `oemof-plotting_examples`_ https://github.com/oemof/oemof-examples/tree/master/oemof_examples/oemof.solph/v0.3.x/plotting_examples """ results = energysystem_data.results['main'] cdict = { (('electricity', 'demand'), 'flow'): '#ce4aff', (('electricity', 'excess_bel'), 'flow'): '#555555', (('electricity', 'storage'), 'flow'): '#42c77a', (('pp_gas', 'electricity'), 'flow'): '#636f6b', (('pv', 'electricity'), 'flow'): '#ffde32', (('storage', 'electricity'), 'flow'): '#42c77a', (('wind', 'electricity'), 'flow'): '#5b5bae' } inorder = [(('pv', 'electricity'), 'flow'), (('wind', 'electricity'), 'flow'), (('storage', 'electricity'), 'flow'), (('pp_gas', 'electricity'), 'flow')] fig = plt.figure(figsize=(14, 8)) electricity_seq = views.node(results, 'electricity')['sequences'] plot_slice = oev.plot.slice_df( electricity_seq[str(year) + '-03-01':str(year) + '-03-10'], date_from=pd.datetime(year, 1, 1)) my_plot = oev.plot.io_plot('electricity', plot_slice, cdict=cdict, inorder=inorder, ax=fig.add_subplot(1, 1, 1), smooth=True) ax = shape_legend('electricity', **my_plot) ax = oev.plot.set_datetime_ticks(ax, plot_slice.index, tick_distance=48, date_format='%d-%m-%H', offset=12) ax.set_ylabel('Power in MW') ax.set_xlabel(str(year)) ax.set_title("Electricity bus") plt.savefig('../data/04_Visualisation/om.png', dpi=300) logging.info("Generate plot of optimized variables.") plt.show()
def test_results_with_old_dump(): """ Test again with a stored dump created with v0.2.1dev (896a6d50) """ energysystem = solph.EnergySystem() error = None try: energysystem.restore(dpath=os.path.dirname(os.path.realpath(__file__)), filename='es_dump_test_2_1dev.oemof') except UnpicklingError as e: error = e # Just making sure, the right error is raised. If the error message # changes, the test has to be changed accordingly. eq_(len(str(error)), 431) # ************************************************** # Test again with a stored dump created with v0.2.3dev (896a6d50) energysystem = solph.EnergySystem() energysystem.restore(dpath=os.path.dirname(os.path.realpath(__file__)), filename='es_dump_test_2_3dev.oemof') # Note: This internal attribute is new in v.0.3.0, so the dump doesn't # contain it for obvious reasons. Setting it manually to the correct # value prevents the test from erroring. energysystem._first_ungrouped_node_index_ = len(energysystem.nodes) results = energysystem.results['main'] electricity_bus = views.node(results, 'electricity') my_results = electricity_bus['sequences'].sum(axis=0).to_dict() storage = energysystem.groups['storage'] my_results['storage_invest'] = results[(storage, None)]['scalars']['invest'] stor_invest_dict = { 'storage_invest': 2040000, (('electricity', 'demand'), 'flow'): 105867395, (('electricity', 'excess_bel'), 'flow'): 211771291, (('electricity', 'storage'), 'flow'): 2350931, (('pp_gas', 'electricity'), 'flow'): 5148414, (('pv', 'electricity'), 'flow'): 7488607, (('storage', 'electricity'), 'flow'): 1880745, (('wind', 'electricity'), 'flow'): 305471851 } for key in stor_invest_dict.keys(): eq_(int(round(my_results[key])), int(round(stor_invest_dict[key])))
def test_results_with_actual_dump(): energysystem = solph.EnergySystem() energysystem.restore() # Results results = energysystem.results['main'] meta = energysystem.results['meta'] electricity_bus = views.node(results, 'electricity') my_results = electricity_bus['sequences'].sum(axis=0).to_dict() storage = energysystem.groups['storage'] my_results['storage_invest'] = results[(storage, None)]['scalars']['invest'] stor_invest_dict = { 'storage_invest': 2040000, (('electricity', 'None'), 'duals'): 10800000000321, (('electricity', 'demand'), 'flow'): 105867395, (('electricity', 'excess_bel'), 'flow'): 211771291, (('electricity', 'storage'), 'flow'): 2350931, (('pp_gas', 'electricity'), 'flow'): 5148414, (('pv', 'electricity'), 'flow'): 7488607, (('storage', 'electricity'), 'flow'): 1880745, (('wind', 'electricity'), 'flow'): 305471851 } for key in stor_invest_dict.keys(): eq_(int(round(my_results[key])), int(round(stor_invest_dict[key]))) # Solver results eq_(str(meta['solver']['Termination condition']), 'optimal') eq_(meta['solver']['Error rc'], 0) eq_(str(meta['solver']['Status']), 'ok') # Problem results eq_(meta['problem']['Lower bound'], 4.231675777e+17) eq_(meta['problem']['Upper bound'], 4.231675777e+17) eq_(meta['problem']['Number of variables'], 2805) eq_(meta['problem']['Number of constraints'], 2806) eq_(meta['problem']['Number of nonzeros'], 1197) eq_(meta['problem']['Number of objectives'], 1) eq_(str(meta['problem']['Sense']), 'minimize') # Objective function eq_(round(meta['objective']), 423167578261115584)
def get_node_results_df(self, node_label): """Return DataFrame with optimization results (timeseries) for single node. Parameters ---------- node_label : :obj:`str` Label of node the data should be looked up for Returns ------- :pandas:`pandas.DataFrame` Node results (timeseries) """ if node_label in [str(n) for n in self.simulation.esys.nodes]: return views.node(self.results_raw, node_label) else: raise ValueError( f'Node "{node_label}" not found in energy system!')
def update_states(self, results, sim_params): data_storage = views.node(results, self.name) df_storage = data_storage['sequences'] # Loop Through the data frame values and update states accordingly. for i_result in df_storage: if i_result[1] == 'capacity': if 'storage_level' not in self.states: # Initialize an array that tracks the state stored mass. self.states['storage_level'] = [None ] * sim_params.n_intervals self.states['pressure'] = [None] * sim_params.n_intervals # Check if this result is the storage capacity. self.storage_level = df_storage[i_result][0] self.states['storage_level'][ sim_params.i_interval] = self.storage_level # Get the storage pressure [bar]. self.pressure = self.get_pressure(self.storage_level) self.states['pressure'][sim_params.i_interval] = self.pressure
def get_lcoe_for_DG(results, node): def get_variable_costs(flow_comp, current_flow): variable_costs_factor = flow_comp.variable_costs.data if variable_costs_factor[0] is not None: variable_costs_factors = pandas.Series(variable_costs_factor) variable_costs_factors.reset_index(drop=True) return current_flow.mul(variable_costs_factors).sum() else: return 0.0 node_data = views.node( results, node ) output=0 resource=0 invest=0 if isinstance(node, str): raise TypeError('Node has to be a real node, not str') for nodes, flow_name in node_data['sequences']: flow = node_data['sequences'][(nodes, flow_name)] flow.reset_index( drop=True, inplace=True ) if nodes[0]==node: flow_component = node.outputs[nodes[1]] if flow_name=='status': runtime_hours = flow.sum() om = flow_component.nonconvex.om_costs*runtime_hours*flow_component.nominal_value elif flow_name=='flow': output+=flow.sum() invest+=flow_component.fixed_costs*flow_component.nominal_value else: flow_component = node.inputs[nodes[0]] print(flow_component) resource += get_variable_costs( flow_component, flow ) return map( lambda x: x, [invest, om, resource,output] )
def sizing_results(results, m, sizing_list): """ The function returns the sizes for the components that are attributed with Investment-object. :param results: results table pd.DataFrame :param m: operational model om.solph.model :param sizing list: labels of sizing components ['PV', 'storage'] list of str :return: results sizing results table pd.DataFrame """ res = {} for i in range( len( sizing_list ) ): node = m.es.groups[sizing_list[i]] node_data = views.node( results, node ) for nodes, flow_name in node_data['sequences']: if 'scalars' in node_data and node_data['scalars'].get( (nodes, 'invest') ) is not None: res[nodes] = node_data['scalars'].get( (nodes, 'invest') ) result = pd.DataFrame.from_dict( res, orient='index' ) return result
def update_flows(self, results, sim_params, comp_name=None): # Check if the component has an attribute 'flows', if not, create it as an empty dict. if not hasattr(self, 'flows'): self.flows = {} # While components can generate more than one oemof model, they sometimes need to give a custom name. if comp_name is None: comp_name = self.name this_comp_node = views.node(results, comp_name) this_df = this_comp_node['sequences'] for i_result in this_df: # Check if this result is a flow if i_result[1] == 'flow': this_flow_name = 'flow: ' + i_result[0][0] + '-->' + i_result[ 0][1] # Check if there already is an array to store the flow information, if not, create one. if this_flow_name not in self.flows: self.flows[this_flow_name] = [None ] * sim_params.n_intervals # Saving this flow value to the results file self.flows[this_flow_name][ sim_params.i_interval] = this_df[i_result][0]
# initialise the operational model om = solph.Model(energysystem) # if tee_switch is true solver messages will be displayed logging.info('Solve the optimization problem') om.solve(solver='cbc', solve_kwargs={'tee': True}) ########################################################################## # Check and plot the results ########################################################################## # check if the new result object is working for custom components results = processing.results(om) custom_storage = views.node(results, 'storage') electricity_bus = views.node(results, 'electricity') meta_results = processing.meta_results(om) pp.pprint(meta_results) my_results = electricity_bus['scalars'] # installed capacity of storage in GWh my_results['storage_invest_GWh'] = ( results[(storage, None)]['scalars']['invest'] / 1e6) # resulting renewable energy share my_results['res_share'] = (1 - results[(pp_gas, bel)]['sequences'].sum() / results[(bel, demand)]['sequences'].sum())
def test_duals(self): results = processing.results(self.om) bel = views.node(results, 'b_el1', multiindex=True) eq_(int(bel['sequences']['b_el1', 'None', 'duals'].sum()), 48)
def test_multiindex_sequences(self): results = processing.results(self.om) bel1 = views.node(results, 'b_el1', multiindex=True) eq_(int(bel1['sequences'][('diesel', 'b_el1', 'flow')].sum()), 2875)
########################################################################## # create an optimization problem and solve it om = solph.Model(es) # solve model om.solve(solver='cbc', solve_kwargs={'tee': True}) ########################################################################## # Check and plot the results ########################################################################## results = processing.results(om) invest = views.node(results, 'b_heat')['scalars']\ [(('thermal_collector', 'b_heat'), 'invest')] print("Invested in {} solar thermal power.".format(invest)) # plot data if plt is not None: # plot electrical bus data = views.node(results, 'b_heat')['sequences'] data[[(('b_heat', 'demand_heat'), 'flow')]] exclude = ['excess_heat', 'status'] columns = [ c for c in data.columns if not any(s in c[0] or s in c[1] for s in exclude) ] data = data[columns] ax = data.plot(kind='line', drawstyle='steps-post', grid=True, rot=0)
def test_dispatch_example(solver='cbc', periods=24 * 5): """Create an energy system and optimize the dispatch at least costs.""" Node.registry = None filename = os.path.join(os.path.dirname(__file__), 'input_data.csv') data = pd.read_csv(filename, sep=",") # ######################### create energysystem components ################ # resource buses bcoal = Bus(label='coal', balanced=False) bgas = Bus(label='gas', balanced=False) boil = Bus(label='oil', balanced=False) blig = Bus(label='lignite', balanced=False) # electricity and heat bel = Bus(label='b_el') bth = Bus(label='b_th') # an excess and a shortage variable can help to avoid infeasible problems excess_el = Sink(label='excess_el', inputs={bel: Flow()}) # shortage_el = Source(label='shortage_el', # outputs={bel: Flow(variable_costs=200)}) # sources ep_wind = economics.annuity(capex=1000, n=20, wacc=0.05) wind = Source(label='wind', outputs={ bel: Flow(actual_value=data['wind'], fixed=True, investment=Investment(ep_costs=ep_wind, existing=100)) }) ep_pv = economics.annuity(capex=1500, n=20, wacc=0.05) pv = Source(label='pv', outputs={ bel: Flow(actual_value=data['pv'], fixed=True, investment=Investment(ep_costs=ep_pv, existing=80)) }) # demands (electricity/heat) demand_el = Sink(label='demand_elec', inputs={ bel: Flow(nominal_value=85, actual_value=data['demand_el'], fixed=True) }) demand_th = Sink(label='demand_therm', inputs={ bth: Flow(nominal_value=40, actual_value=data['demand_th'], fixed=True) }) # power plants pp_coal = Transformer( label='pp_coal', inputs={bcoal: Flow()}, outputs={bel: Flow(nominal_value=20.2, variable_costs=25)}, conversion_factors={bel: 0.39}) pp_lig = Transformer( label='pp_lig', inputs={blig: Flow()}, outputs={bel: Flow(nominal_value=11.8, variable_costs=19)}, conversion_factors={bel: 0.41}) pp_gas = Transformer( label='pp_gas', inputs={bgas: Flow()}, outputs={bel: Flow(nominal_value=41, variable_costs=40)}, conversion_factors={bel: 0.50}) pp_oil = Transformer( label='pp_oil', inputs={boil: Flow()}, outputs={bel: Flow(nominal_value=5, variable_costs=50)}, conversion_factors={bel: 0.28}) # combined heat and power plant (chp) pp_chp = Transformer(label='pp_chp', inputs={bgas: Flow()}, outputs={ bel: Flow(nominal_value=30, variable_costs=42), bth: Flow(nominal_value=40) }, conversion_factors={ bel: 0.3, bth: 0.4 }) # heatpump with a coefficient of performance (COP) of 3 b_heat_source = Bus(label='b_heat_source') heat_source = Source(label='heat_source', outputs={b_heat_source: Flow()}) cop = 3 heat_pump = Transformer(label='el_heat_pump', inputs={ bel: Flow(), b_heat_source: Flow() }, outputs={bth: Flow(nominal_value=10)}, conversion_factors={ bel: 1 / 3, b_heat_source: (cop - 1) / cop }) datetimeindex = pd.date_range('1/1/2012', periods=periods, freq='H') energysystem = EnergySystem(timeindex=datetimeindex) energysystem.add(bcoal, bgas, boil, bel, bth, blig, excess_el, wind, pv, demand_el, demand_th, pp_coal, pp_lig, pp_oil, pp_gas, pp_chp, b_heat_source, heat_source, heat_pump) # ################################ optimization ########################### # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem) # solve problem optimization_model.solve(solver=solver) # write back results from optimization object to energysystem optimization_model.results() # ################################ results ################################ # generic result object results = processing.results(om=optimization_model) # subset of results that includes all flows into and from electrical bus # sequences are stored within a pandas.DataFrames and scalars e.g. # investment values within a pandas.Series object. # in this case the entry data['scalars'] does not exist since no investment # variables are used data = views.node(results, 'b_el') # generate results to be evaluated in tests comp_results = data['sequences'].sum(axis=0).to_dict() comp_results['pv_capacity'] = results[(pv, bel)]['scalars'].invest comp_results['wind_capacity'] = results[(wind, bel)]['scalars'].invest test_results = { (('wind', 'b_el'), 'flow'): 9239, (('pv', 'b_el'), 'flow'): 1147, (('b_el', 'demand_elec'), 'flow'): 7440, (('b_el', 'excess_el'), 'flow'): 6261, (('pp_chp', 'b_el'), 'flow'): 477, (('pp_lig', 'b_el'), 'flow'): 850, (('pp_gas', 'b_el'), 'flow'): 934, (('pp_coal', 'b_el'), 'flow'): 1256, (('pp_oil', 'b_el'), 'flow'): 0, (('b_el', 'el_heat_pump'), 'flow'): 202, 'pv_capacity': 44, 'wind_capacity': 246, } for key in test_results.keys(): eq_(int(round(comp_results[key])), int(round(test_results[key])))
Beta=[0.19 for p in range(0, periods)], back_pressure=False) # create an optimization problem and solve it om = solph.Model(es) # debugging # om.write('generic_chp.lp', io_options={'symbolic_solver_labels': True}) # solve model om.solve(solver='cbc', solve_kwargs={'tee': True}) # create result object results = processing.results(om) # plot data if plt is not None: # plot PQ diagram from component results data = results[(ccet, None)]['sequences'] ax = data.plot(kind='scatter', x='Q', y='P', grid=True) ax.set_xlabel('Q (MW)') ax.set_ylabel('P (MW)') plt.show() # plot thermal bus data = views.node(results, 'bth')['sequences'] ax = data.plot(kind='line', drawstyle='steps-post', grid=True) ax.set_xlabel('Time (h)') ax.set_ylabel('Q (MW)') plt.show()
def test_regression_investment_storage(solver='cbc'): """The problem was infeasible if the existing capacity and the maximum was defined in the Flow. """ logging.info('Initialize the energy system') date_time_index = pd.date_range('1/1/2012', periods=4, freq='H') energysystem = solph.EnergySystem(timeindex=date_time_index) Node.registry = energysystem # Buses bgas = solph.Bus(label=('natural', 'gas')) bel = solph.Bus(label='electricity') solph.Sink(label='demand', inputs={ bel: solph.Flow(actual_value=[209643, 207497, 200108, 191892], fixed=True, nominal_value=1) }) # Sources solph.Source(label='rgas', outputs={bgas: solph.Flow()}) # Transformer solph.Transformer(label='pp_gas', inputs={bgas: solph.Flow()}, outputs={bel: solph.Flow(nominal_value=300000)}, conversion_factors={bel: 0.58}) # Investment storage solph.components.GenericStorage( label='storage', inputs={ bel: solph.Flow( investment=solph.Investment(existing=625046 / 6, maximum=0)) }, outputs={ bel: solph.Flow( investment=solph.Investment(existing=104174.33, maximum=1)) }, loss_rate=0.00, initial_storage_level=0, invest_relation_input_capacity=1 / 6, invest_relation_output_capacity=1 / 6, inflow_conversion_factor=1, outflow_conversion_factor=0.8, investment=solph.Investment(ep_costs=50, existing=625046), ) # Solve model om = solph.Model(energysystem) om.solve(solver=solver) # Results results = processing.results(om) electricity_bus = views.node(results, 'electricity') my_results = electricity_bus['sequences'].sum(axis=0).to_dict() storage = energysystem.groups['storage'] my_results['storage_invest'] = results[(storage, None)]['scalars']['invest']
def rolling_horizon( PV, Storage, SH=8760, PH=120, CH=120, ): iter = 0 start = 0 stop = PH mode = 'simulation' initial_capacity = 0.5 path = 'results' filepath = '/diesel_pv_batt_PH120_P1_B1' components_list = [ 'demand', 'PV', 'storage', 'pp_oil_1', 'pp_oil_2', 'pp_oil_3', 'excess' ] results_list = [] economic_list = [] cost = main.get_cost_dict(PH) file = 'data/timeseries.csv' timeseries = pd.read_csv(file, sep=';') timeseries.set_index(pd.DatetimeIndex(timeseries['timestamp'], freq='H'), inplace=True) timeseries.drop(labels='timestamp', axis=1, inplace=True) timeseries[timeseries['PV'] > 1] = 1 itermax = int((SH / CH) - 1) objective = 0.0 while iter <= itermax: if iter == 0: status = True else: status = False feedin_RH = timeseries.iloc[start:stop] print(str(iter + 1) + '/' + str(itermax + 1)) m = main.create_optimization_model(mode, feedin_RH, initial_capacity, cost, PV, Storage, iterstatus=status)[0] results_el = main.solve_and_create_results(m) objective += processing.meta_results(m)['objective'] initial_capacity = views.node( results_el, 'storage')['sequences'][(('storage', 'None'), 'capacity')][CH - 1] start += CH stop += CH iter += 1 return objective
invest_relation_output_capacity=1 / 6, inflow_conversion_factor=0.95, outflow_conversion_factor=0.95, investment=Investment(ep_costs=costs['storage']['epc'])) ################################################################# # Create model and solve ################################################################# m = Model(energysystem) # om.write(filename, io_options={'symbolic_solver_labels': True}) m.solve(solver='cbc', solve_kwargs={'tee': True}) results = processing.results(m) views.node(results, 'storage') views.node(results, 'micro_grid')['sequences'].plot(drawstyle='steps') plt.show() graph = create_graph(energysystem, m) draw_graph(graph, plot=True, layout='neato', node_size=3000, arrows=False, node_color={'micro_grid': '#7EC0EE'})
b_1: Flow(nominal_value=150, actual_value=[1, 0], fixed=True)})) m = Model(energysystem=es) # m.write('transshipment.lp', io_options={'symbolic_solver_labels': True}) m.solve(solver='cbc', solve_kwargs={'tee': True, 'keepfiles': False}) m.results() graph = create_nx_graph(es, m) draw_graph(graph, plot=True, layout='neato', node_size=3000, node_color={ 'b_0': '#cd3333', 'b_1': '#7EC0EE', 'b_2': '#eeac7e'}) results = processing.results(m) print(views.node(results, 'gen_0')) print(views.node(results, 'gen_1')) views.node(results, 'line_0')['sequences'].plot(kind='bar') # look at constraints of Links in the pyomo model LinkBlock m.LinkBlock.pprint()