def compute(es=None, **arguments): """Creates the optimization model, solves it and writes back results to energy system object Parameters ---------- es : :class:`oemof.solph.network.EnergySystem` object Energy system holding nodes, grouping functions and other important information. **arguments : key word arguments Arguments passed from command line """ if es.temporal is not None: m = Model(es, objective_weighting=es.temporal['weighting']) else: m = Model(es) logging.info('Model creation time: ' + stopwatch()) m.receive_duals() if arguments['--debug']: filename = 'renpass_model.lp' logging.info('Writing lp-file to {}.'.format(filename)) m.write(filename, io_options={'symbolic_solver_labels': True}) m.solve(solver=arguments['--solver'], solve_kwargs={'tee': True}) logging.info('Optimization time: ' + stopwatch()) return m
def optimize(input_data_dir, results_data_dir, solver='cbc', debug=False): r""" Takes the specified datapackage, creates an energysystem and solves the optimization problem. """ # create energy system object logging.info("Creating EnergySystem from datapackage") es = EnergySystem.from_datapackage( os.path.join(input_data_dir, "datapackage.json"), attributemap={}, typemap=TYPEMAP, ) # create model from energy system (this is just oemof.solph) logging.info("Creating the optimization model") m = Model(es) # if you want dual variables / shadow prices uncomment line below # m.receive_duals() # save lp file together with optimization results if debug: lp_file_dir = os.path.join(results_data_dir, 'model.lp') logging.info(f"Saving the lp-file to {lp_file_dir}") m.write(lp_file_dir, io_options={'symbolic_solver_labels': True}) # select solver 'gurobi', 'cplex', 'glpk' etc logging.info(f'Solving the problem using {solver}') m.solve(solver=solver) # get the results from the the solved model(still oemof.solph) es.results = m.results() es.params = outputlib.processing.parameter_as_dict(es) # now we use the write results method to write the results in oemof-tabular # format logging.info(f'Writing the results to {results_data_dir}') es.dump(results_data_dir)
class Scenario: """ Definition of a deflex scenario object. """ def __init__(self, **kwargs): """ Parameters ---------- kwargs """ self.name = kwargs.get("name", "unnamed_scenario") self.table_collection = kwargs.get("table_collection", {}) self.year = kwargs.get("year", None) self.ignore_errors = kwargs.get("ignore_errors", False) self.round_values = kwargs.get("round_values", 0) self.model = kwargs.get("model", None) self.es = kwargs.get("es", None) self.results = None self.results_fn = kwargs.get("results_fn", None) self.debug = kwargs.get("debug", None) self.location = None self.map = None self.meta = kwargs.get("meta", None) def initialise_energy_system(self): if self.debug is True: number_of_time_steps = 3 else: try: if calendar.isleap(self.year): number_of_time_steps = 8784 else: number_of_time_steps = 8760 except TypeError: msg = ("You cannot create an EnergySystem with self.year={0}, " "of type {1}.") raise TypeError(msg.format(self.year, type(self.year))) date_time_index = pd.date_range("1/1/{0}".format(self.year), periods=number_of_time_steps, freq="H") return EnergySystem(timeindex=date_time_index) def load_excel(self, filename=None): """Load scenario from an excel-file.""" if filename is not None: self.location = filename xls = pd.ExcelFile(self.location) for sheet in xls.sheet_names: self.table_collection[sheet] = xls.parse(sheet, index_col=[0], header=[0, 1]) return self def load_csv(self, path=None): """Load scenario from a csv-collection.""" if path is not None: self.location = path for file in os.listdir(self.location): if file[-4:] == ".csv": filename = os.path.join(self.location, file) self.table_collection[file[:-4]] = pd.read_csv(filename, index_col=[0], header=[0, 1]) return self def to_excel(self, filename): """Dump scenario into an excel-file.""" # create path if it does not exist os.makedirs(os.path.dirname(filename), exist_ok=True) writer = pd.ExcelWriter(filename) for name, df in sorted(self.table_collection.items()): df.to_excel(writer, name) writer.save() logging.info("Scenario saved as excel file to {0}".format(filename)) def to_csv(self, path): """Dump scenario into a csv-collection.""" if os.path.isdir(path): shutil.rmtree(os.path.join(path)) os.makedirs(path) for name, df in self.table_collection.items(): name = name.replace(" ", "_") + ".csv" filename = os.path.join(path, name) df.to_csv(filename) logging.info("Scenario saved as csv-collection to {0}".format(path)) def check_table(self, table_name): if self.table_collection[table_name].isnull().values.any(): c = [] for column in self.table_collection[table_name].columns: if self.table_collection[table_name][column].isnull().any(): c.append(column) msg = "Nan Values in the {0} table (columns: {1})." raise ValueError(msg.format(table_name, c)) return self def create_nodes(self): pass def initialise_es(self, year=None): if year is not None: self.year = year self.es = self.initialise_energy_system() return self def add_nodes(self, nodes): """ Parameters ---------- nodes : dict Dictionary with a unique key and values of type oemof.network.Node. Returns ------- self """ if self.es is None: self.initialise_es() self.es.add(*nodes.values()) return self def table2es(self): if self.es is None: self.es = self.initialise_energy_system() nodes = self.create_nodes() self.es.add(*nodes.values()) return self def create_model(self): self.model = Model(self.es) return self def dump_es(self, filename): os.makedirs(os.path.dirname(filename), exist_ok=True) f = open(filename, "wb") if self.meta is None: if self.es.results is not None and "Meta" in self.es.results: self.meta = self.es.results["meta"] pickle.dump(self.meta, f) pickle.dump(self.es.__dict__, f) f.close() logging.info("Results dumped to {0}.".format(filename)) def restore_es(self, filename=None): if filename is None: filename = self.results_fn else: self.results_fn = filename if self.es is None: self.es = EnergySystem() f = open(filename, "rb") self.meta = pickle.load(f) self.es.__dict__ = pickle.load(f) f.close() self.results = self.es.results["main"] logging.info("Results restored from {0}.".format(filename)) def scenario_info(self, solver_name): sc_info = { "name": self.name, "datetime": datetime.datetime.now(), "year": self.year, "solver": solver_name, } return sc_info def solve(self, with_duals=False, tee=True, logfile=None, solver=None): logging.info("Optimising using {0}.".format(solver)) if with_duals: self.model.receive_duals() if self.debug: filename = os.path.join(helpers.extend_basic_path("lp_files"), "reegis.lp") logging.info("Store lp-file in {0}.".format(filename)) self.model.write(filename, io_options={"symbolic_solver_labels": True}) self.model.solve(solver=solver, solve_kwargs={ "tee": tee, "logfile": logfile }) self.es.results["main"] = processing.results(self.model) self.es.results["meta"] = processing.meta_results(self.model) self.es.results["param"] = processing.parameter_as_dict(self.es) self.es.results["meta"]["scenario"] = self.scenario_info(solver) self.es.results["meta"]["in_location"] = self.location self.es.results["meta"]["file_date"] = datetime.datetime.fromtimestamp( os.path.getmtime(self.location)) self.es.results["meta"]["oemof_version"] = logger.get_version() self.results = self.es.results["main"] def plot_nodes(self, show=None, filename=None, **kwargs): rm_nodes = kwargs.get("remove_nodes_with_substrings") g = graph.create_nx_graph(self.es, filename=filename, remove_nodes_with_substrings=rm_nodes) if show is True: draw_graph(g, **kwargs) return g
def test_dispatch_example(solver='cbc', periods=24*5): """Create an energy system and optimize the dispatch at least costs.""" Node.registry = None filename = os.path.join(os.path.dirname(__file__), 'input_data.csv') data = pd.read_csv(filename, sep=",") # ######################### create energysystem components ################ # resource buses bcoal = Bus(label='coal', balanced=False) bgas = Bus(label='gas', balanced=False) boil = Bus(label='oil', balanced=False) blig = Bus(label='lignite', balanced=False) # electricity and heat bel = Bus(label='b_el') bth = Bus(label='b_th') # an excess and a shortage variable can help to avoid infeasible problems excess_el = Sink(label='excess_el', inputs={bel: Flow()}) # shortage_el = Source(label='shortage_el', # outputs={bel: Flow(variable_costs=200)}) # sources ep_wind = economics.annuity(capex=1000, n=20, wacc=0.05) wind = Source(label='wind', outputs={bel: Flow( fix=data['wind'], investment=Investment(ep_costs=ep_wind, existing=100))}) ep_pv = economics.annuity(capex=1500, n=20, wacc=0.05) pv = Source(label='pv', outputs={bel: Flow( fix=data['pv'], investment=Investment(ep_costs=ep_pv, existing=80))}) # demands (electricity/heat) demand_el = Sink(label='demand_elec', inputs={bel: Flow(nominal_value=85, fix=data['demand_el'])}) demand_th = Sink(label='demand_therm', inputs={bth: Flow(nominal_value=40, fix=data['demand_th'])}) # power plants pp_coal = Transformer(label='pp_coal', inputs={bcoal: Flow()}, outputs={bel: Flow(nominal_value=20.2, variable_costs=25)}, conversion_factors={bel: 0.39}) pp_lig = Transformer(label='pp_lig', inputs={blig: Flow()}, outputs={bel: Flow(nominal_value=11.8, variable_costs=19)}, conversion_factors={bel: 0.41}) pp_gas = Transformer(label='pp_gas', inputs={bgas: Flow()}, outputs={bel: Flow(nominal_value=41, variable_costs=40)}, conversion_factors={bel: 0.50}) pp_oil = Transformer(label='pp_oil', inputs={boil: Flow()}, outputs={bel: Flow(nominal_value=5, variable_costs=50)}, conversion_factors={bel: 0.28}) # combined heat and power plant (chp) pp_chp = Transformer(label='pp_chp', inputs={bgas: Flow()}, outputs={bel: Flow(nominal_value=30, variable_costs=42), bth: Flow(nominal_value=40)}, conversion_factors={bel: 0.3, bth: 0.4}) # heatpump with a coefficient of performance (COP) of 3 b_heat_source = Bus(label='b_heat_source') heat_source = Source(label='heat_source', outputs={b_heat_source: Flow()}) cop = 3 heat_pump = Transformer(label='el_heat_pump', inputs={bel: Flow(), b_heat_source: Flow()}, outputs={bth: Flow(nominal_value=10)}, conversion_factors={ bel: 1/3, b_heat_source: (cop-1)/cop}) datetimeindex = pd.date_range('1/1/2012', periods=periods, freq='H') energysystem = EnergySystem(timeindex=datetimeindex) energysystem.add(bcoal, bgas, boil, bel, bth, blig, excess_el, wind, pv, demand_el, demand_th, pp_coal, pp_lig, pp_oil, pp_gas, pp_chp, b_heat_source, heat_source, heat_pump) # ################################ optimization ########################### # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem) # solve problem optimization_model.solve(solver=solver) # write back results from optimization object to energysystem optimization_model.results() # ################################ results ################################ # generic result object results = processing.results(om=optimization_model) # subset of results that includes all flows into and from electrical bus # sequences are stored within a pandas.DataFrames and scalars e.g. # investment values within a pandas.Series object. # in this case the entry data['scalars'] does not exist since no investment # variables are used data = views.node(results, 'b_el') # generate results to be evaluated in tests comp_results = data['sequences'].sum(axis=0).to_dict() comp_results['pv_capacity'] = results[(pv, bel)]['scalars'].invest comp_results['wind_capacity'] = results[(wind, bel)]['scalars'].invest test_results = { (('wind', 'b_el'), 'flow'): 9239, (('pv', 'b_el'), 'flow'): 1147, (('b_el', 'demand_elec'), 'flow'): 7440, (('b_el', 'excess_el'), 'flow'): 6261, (('pp_chp', 'b_el'), 'flow'): 477, (('pp_lig', 'b_el'), 'flow'): 850, (('pp_gas', 'b_el'), 'flow'): 934, (('pp_coal', 'b_el'), 'flow'): 1256, (('pp_oil', 'b_el'), 'flow'): 0, (('b_el', 'el_heat_pump'), 'flow'): 202, 'pv_capacity': 44, 'wind_capacity': 246, } for key in test_results.keys(): eq_(int(round(comp_results[key])), int(round(test_results[key])))
) es.add( Source( label="gen_1", outputs={b_el1: Flow(nominal_value=100, variable_costs=25)}, ) ) es.add(Sink(label="load", inputs={b_el2: Flow(nominal_value=100, fix=[1, 1])})) m = Model(energysystem=es) # m.write('lopf.lp', io_options={'symbolic_solver_labels': True}) m.solve(solver="cbc", solve_kwargs={"tee": True, "keepfiles": False}) m.results() graph = create_nx_graph(es) draw_graph( graph, plot=True, layout="neato", node_size=3000, node_color={"b_0": "#cd3333", "b_1": "#7EC0EE", "b_2": "#eeac7e"}, )
initial_capacity=0.5, invest_relation_input_capacity=1 / 6, invest_relation_output_capacity=1 / 6, inflow_conversion_factor=0.95, outflow_conversion_factor=0.95, investment=Investment(ep_costs=costs['storage']['epc'])) ################################################################# # Create model and solve ################################################################# m = Model(energysystem) # om.write(filename, io_options={'symbolic_solver_labels': True}) m.solve(solver='cbc', solve_kwargs={'tee': True}) results = processing.results(m) views.node(results, 'storage') views.node(results, 'micro_grid')['sequences'].plot(drawstyle='steps') plt.show() graph = create_graph(energysystem, m) draw_graph(graph, plot=True, layout='neato', node_size=3000, arrows=False,
import oemof.tabular.tools.postprocessing as pp # create path for results (we use the datapackage_dir to store results) results_path = 'results' if not os.path.exists(results_path): os.makedirs(results_path) # create energy system object es = EnergySystem.from_datapackage( os.path.join("./datapackage", "datapackage.json"), attributemap={}, typemap=TYPEMAP, ) # create model from energy system (this is just oemof.solph) m = Model(es) # if you want dual variables / shadow prices uncomment line below # m.receive_duals() # select solver 'gurobi', 'cplex', 'glpk' etc m.solve("glpk") # get the results from the the solved model(still oemof.solph) m.results = m.results() # now we use the write results method to write the results in oemof-tabular # format pp.write_results(m, results_path) print("process completed")
# Creating the excess sink and the shortage source excess_el = Sink(label='excess_el', inputs={elbus: Flow()}) shortage_el = Source(label='shortage_el', outputs={elbus: Flow(variable_costs=1e20)}) # Adding all the components to the energy system es.add(excess_el, shortage_el, thdemand, eldemand, heat_pump, el_storage, chp_gas, pv, gas, gasbus, thbus, elbus) # Create the model for optimization and run the optimization opt_model = Model(es) opt_model.solve(solver='cbc') logging.info('Optimization successful') # Post-processing and data visualization results_main = outputlib.processing.results(opt_model) results_meta = outputlib.processing.meta_results(opt_model) params = outputlib.processing.parameter_as_dict(es) print(results_meta) print(results_main[gasbus, chp_gas]['sequences'].head()) flows_el = pd.DataFrame(index=date_time_index)
actual_value=pv_ts)}) demand_el = Sink(label='electricity_demand', inputs={bus_el: Flow(nominal_value=2, fixed=True, actual_value=demand_ts)}) curtailment = Sink(label='curtailment', inputs={bus_el: Flow(nominal_value=5, max=pv_ts)}) es.add(bus_el, bus_gas, source_gas, gas_pp, pv, demand_el, curtailment) optimodel = Model(es) optimodel.solve() results = optimodel.results() string_results = outputlib.processing.convert_keys_to_strings(results) # collect all timeseries in a DataFrame sequences = {k: v['sequences'] for k, v in string_results.items()} sequences = pd.concat(sequences, axis=1) print(sequences) # plot idx = pd.IndexSlice fig, ax = plt.subplots()
el_bus: Flow( nominal_value=12.5, variable_costs=10 ) } ) es.add(el_bus, demand, pp1, pp2) om = Model(es) lp_file_dir = 'dispatch.lp' om.write(lp_file_dir, io_options={'symbolic_solver_labels': True}) om.solve() results = om.results() string_results = outputlib.processing.convert_keys_to_strings(results) string_results = outputlib.processing.convert_keys_to_strings(results) # collect all timeseries in a DataFrame sequences = {k: v['sequences'] for k, v in string_results.items()} sequences = pd.concat(sequences, axis=1) print(sequences) # plot idx = pd.IndexSlice
def test_lopf(solver="cbc"): logging.info("Initialize the energy system") # create time index for 192 hours in May. date_time_index = pd.date_range("5/5/2012", periods=1, freq="H") es = EnergySystem(timeindex=date_time_index) ########################################################################## # Create oemof.solph objects ########################################################################## logging.info("Create oemof.solph objects") b_el0 = custom.ElectricalBus(label="b_0", v_min=-1, v_max=1) b_el1 = custom.ElectricalBus(label="b_1", v_min=-1, v_max=1) b_el2 = custom.ElectricalBus(label="b_2", v_min=-1, v_max=1) es.add(b_el0, b_el1, b_el2) es.add( custom.ElectricalLine( input=b_el0, output=b_el1, reactance=0.0001, investment=Investment(ep_costs=10), min=-1, max=1, )) es.add( custom.ElectricalLine( input=b_el1, output=b_el2, reactance=0.0001, nominal_value=60, min=-1, max=1, )) es.add( custom.ElectricalLine( input=b_el2, output=b_el0, reactance=0.0001, nominal_value=60, min=-1, max=1, )) es.add( Source( label="gen_0", outputs={b_el0: Flow(nominal_value=100, variable_costs=50)}, )) es.add( Source( label="gen_1", outputs={b_el1: Flow(nominal_value=100, variable_costs=25)}, )) es.add(Sink( label="load", inputs={b_el2: Flow(nominal_value=100, fix=1)}, )) ########################################################################## # Optimise the energy system and plot the results ########################################################################## logging.info("Creating optimisation model") om = Model(es) logging.info("Running lopf on 3-Node exmaple system") om.solve(solver=solver) results = processing.results(om) generators = views.node_output_by_type(results, Source) generators_test_results = { (es.groups["gen_0"], es.groups["b_0"], "flow"): 20, (es.groups["gen_1"], es.groups["b_1"], "flow"): 80, } for key in generators_test_results.keys(): logging.debug("Test genertor production of {0}".format(key)) eq_( int(round(generators[key])), int(round(generators_test_results[key])), ) eq_( results[es.groups["b_2"], es.groups["b_0"]]["sequences"]["flow"][0], -40, ) eq_(results[es.groups["b_1"], es.groups["b_2"]]["sequences"]["flow"][0], 60) eq_( results[es.groups["b_0"], es.groups["b_1"]]["sequences"]["flow"][0], -20, ) # objective function eq_(round(processing.meta_results(om)["objective"]), 3200)
# heatpump with a coefficient of performance (COP) of 3 b_heat_source = Bus(label='b_heat_source') heat_source = Source(label='heat_source', outputs={b_heat_source: Flow()}) cop = 3 heat_pump = Transformer(label='heat_pump', inputs={ bel: Flow(), b_heat_source: Flow() }, outputs={bth: Flow(nominal_value=10)}, conversion_factors={ bel: 1 / 3, b_heat_source: (cop - 1) / cop }) datetimeindex = pd.date_range('1/1/2012', periods=24, freq='H') energysystem = EnergySystem(timeindex=datetimeindex) energysystem.add(bcoal, bgas, boil, bel, bth, blig, excess_el, wind, pv, demand_el, demand_th, pp_coal, pp_lig, pp_oil, pp_gas, pp_chp, b_heat_source, heat_source, heat_pump) # ################################ optimization ########################### # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem) # solve problem optimization_model.solve()
def test_dispatch_fix_example(solver='cbc', periods=10): """Invest in a flow with a `fix` sequence containing values > 1.""" Node.registry = None filename = os.path.join(os.path.dirname(__file__), 'input_data.csv') data = pd.read_csv(filename, sep=",") # ######################### create energysystem components ################ # electricity and heat bel = Bus(label='b_el') # an excess and a shortage variable can help to avoid infeasible problems excess_el = Sink(label='excess_el', inputs={bel: Flow()}) # shortage_el = Source(label='shortage_el', # outputs={bel: Flow(variable_costs=200)}) # sources ep_pv = economics.annuity(capex=1500, n=20, wacc=0.05) pv = Source(label='pv', outputs={ bel: Flow(fix=data['pv'], investment=Investment(ep_costs=ep_pv)) }) # demands (electricity/heat) demand_el = Sink( label='demand_elec', inputs={bel: Flow(nominal_value=85, fix=data['demand_el'])}) datetimeindex = pd.date_range('1/1/2012', periods=periods, freq='H') energysystem = EnergySystem(timeindex=datetimeindex) energysystem.add(bel, excess_el, pv, demand_el) # ################################ optimization ########################### # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem) # solve problem optimization_model.solve(solver=solver) # ################################ results ################################ # generic result object results = processing.results(om=optimization_model) # subset of results that includes all flows into and from electrical bus # sequences are stored within a pandas.DataFrames and scalars e.g. # investment values within a pandas.Series object. # in this case the entry data['scalars'] does not exist since no investment # variables are used data = views.node(results, 'b_el') # generate results to be evaluated in tests comp_results = data['sequences'].sum(axis=0).to_dict() comp_results['pv_capacity'] = results[(pv, bel)]['scalars'].invest assert comp_results[(('pv', 'b_el'), 'flow')] > 0
def test_connect_invest(): date_time_index = pd.date_range('1/1/2012', periods=24 * 7, freq='H') energysystem = EnergySystem(timeindex=date_time_index) network.Node.registry = energysystem # Read data file full_filename = os.path.join(os.path.dirname(__file__), 'connect_invest.csv') data = pd.read_csv(full_filename, sep=",") logging.info('Create oemof objects') # create electricity bus bel1 = Bus(label="electricity1") bel2 = Bus(label="electricity2") # create excess component for the electricity bus to allow overproduction Sink(label='excess_bel', inputs={bel2: Flow()}) Source(label='shortage', outputs={bel2: Flow(variable_costs=50000)}) # create fixed source object representing wind power plants Source(label='wind', outputs={bel1: Flow(fix=data['wind'], nominal_value=1000000)}) # create simple sink object representing the electrical demand Sink(label='demand', inputs={bel1: Flow(fix=data['demand_el'], nominal_value=1)}) storage = components.GenericStorage( label='storage', inputs={bel1: Flow(variable_costs=10e10)}, outputs={bel1: Flow(variable_costs=10e10)}, loss_rate=0.00, initial_storage_level=0, invest_relation_input_capacity=1 / 6, invest_relation_output_capacity=1 / 6, inflow_conversion_factor=1, outflow_conversion_factor=0.8, investment=Investment(ep_costs=0.2), ) line12 = Transformer( label="line12", inputs={bel1: Flow()}, outputs={bel2: Flow(investment=Investment(ep_costs=20))}) line21 = Transformer( label="line21", inputs={bel2: Flow()}, outputs={bel1: Flow(investment=Investment(ep_costs=20))}) om = Model(energysystem) constraints.equate_variables(om, om.InvestmentFlow.invest[line12, bel2], om.InvestmentFlow.invest[line21, bel1], 2) constraints.equate_variables( om, om.InvestmentFlow.invest[line12, bel2], om.GenericInvestmentStorageBlock.invest[storage]) # if tee_switch is true solver messages will be displayed logging.info('Solve the optimization problem') om.solve(solver='cbc') # check if the new result object is working for custom components results = processing.results(om) my_results = dict() my_results['line12'] = float(views.node(results, 'line12')['scalars']) my_results['line21'] = float(views.node(results, 'line21')['scalars']) stor_res = views.node(results, 'storage')['scalars'] my_results['storage_in'] = stor_res[(('electricity1', 'storage'), 'invest')] my_results['storage'] = stor_res[(('storage', 'None'), 'invest')] my_results['storage_out'] = stor_res[(('storage', 'electricity1'), 'invest')] connect_invest_dict = { 'line12': 814705, 'line21': 1629410, 'storage': 814705, 'storage_in': 135784, 'storage_out': 135784 } for key in connect_invest_dict.keys(): eq_(int(round(my_results[key])), int(round(connect_invest_dict[key])))
def test_gen_caes(): # read sequence data full_filename = os.path.join(os.path.dirname(__file__), 'generic_caes.csv') data = pd.read_csv(full_filename) # select periods periods = len(data)-1 # create an energy system idx = pd.date_range('1/1/2017', periods=periods, freq='H') es = EnergySystem(timeindex=idx) Node.registry = es # resources bgas = Bus(label='bgas') Source(label='rgas', outputs={ bgas: Flow(variable_costs=20)}) # power bel_source = Bus(label='bel_source') Source(label='source_el', outputs={ bel_source: Flow(variable_costs=data['price_el_source'])}) bel_sink = Bus(label='bel_sink') Sink(label='sink_el', inputs={ bel_sink: Flow(variable_costs=data['price_el_sink'])}) # dictionary with parameters for a specific CAES plant # based on thermal modelling and linearization techniques concept = { 'cav_e_in_b': 0, 'cav_e_in_m': 0.6457267578, 'cav_e_out_b': 0, 'cav_e_out_m': 0.3739636077, 'cav_eta_temp': 1.0, 'cav_level_max': 211.11, 'cmp_p_max_b': 86.0918959849, 'cmp_p_max_m': 0.0679999932, 'cmp_p_min': 1, 'cmp_q_out_b': -19.3996965679, 'cmp_q_out_m': 1.1066036114, 'cmp_q_tes_share': 0, 'exp_p_max_b': 46.1294016678, 'exp_p_max_m': 0.2528340303, 'exp_p_min': 1, 'exp_q_in_b': -2.2073411014, 'exp_q_in_m': 1.129249765, 'exp_q_tes_share': 0, 'tes_eta_temp': 1.0, 'tes_level_max': 0.0 } # generic compressed air energy storage (caes) plant custom.GenericCAES( label='caes', electrical_input={bel_source: Flow()}, fuel_input={bgas: Flow()}, electrical_output={bel_sink: Flow()}, params=concept, fixed_costs=0) # create an optimization problem and solve it om = Model(es) # solve model om.solve(solver='cbc') # create result object results = processing.results(om) data = views.node( results, 'caes', keep_none_type=True )['sequences'].sum(axis=0).to_dict() test_dict = { (('caes', None), 'cav_level'): 25658.82964382, (('caes', None), 'exp_p'): 5020.801997000007, (('caes', None), 'exp_q_fuel_in'): 5170.880360999999, (('caes', None), 'tes_e_out'): 0.0, (('caes', None), 'exp_st'): 226.0, (('bgas', 'caes'), 'flow'): 5170.880360999999, (('caes', None), 'cav_e_out'): 1877.5972265299995, (('caes', None), 'exp_p_max'): 17512.352336, (('caes', None), 'cmp_q_waste'): 2499.9125993000007, (('caes', None), 'cmp_p'): 2907.7271520000004, (('caes', None), 'exp_q_add_in'): 0.0, (('caes', None), 'cmp_st'): 37.0, (('caes', None), 'cmp_q_out_sum'): 2499.9125993000007, (('caes', None), 'tes_level'): 0.0, (('caes', None), 'tes_e_in'): 0.0, (('caes', None), 'exp_q_in_sum'): 5170.880360999999, (('caes', None), 'cmp_p_max'): 22320.76334300001, (('caes', 'bel_sink'), 'flow'): 5020.801997000007, (('bel_source', 'caes'), 'flow'): 2907.7271520000004, (('caes', None), 'cav_e_in'): 1877.597226} for key in test_dict.keys(): eq_(int(round(data[key])), int(round(test_dict[key])))
Transformer( label="pp_oil", inputs={boil: Flow()}, outputs={bel: Flow(nominal_value=98e6, variable_costs=8)}, conversion_factors={bel: 0.33}, )) # ################################ optimization ########################### # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem) # solve problem optimization_model.solve(solver=solver, solve_kwargs={ "tee": True, "keepfiles": False }) # write back results from optimization object to energysystem optimization_model.results() # ################################ results ################################ # subset of results that includes all flows into and from electrical bus # sequences are stored within a pandas.DataFrames and scalars e.g. # investment values within a pandas.Series object. # in this case the entry data['scalars'] does not exist since no investment # variables are used data = views.node(optimization_model.results(), "bel") data["sequences"].info()
config = building.read_build_config('config.toml') es = EnergySystem.from_datapackage( "datapackage.json", attributemap={}, typemap=facades.TYPEMAP, ) m = Model(es) m.write('tmp.lp', io_options={"symbolic_solver_labels": True}) m.receive_duals() m.solve('gurobi') m.results = m.results() if os.path.exists('results'): shutil.rmtree('results') os.mkdir('results') pp.write_results(m, 'results', scalars=False) # create short summary supply_sum = (pp.supply_results( results=m.results, es=m.es, bus=[b.label for b in es.nodes if isinstance(b, Bus)], types=[
def run_basic_energysystem(args): n_val_wind = args[0] n_val_solar = args[1] start = time.time() # initialize and provide data energysystem = EnergySystem(timeindex=datetimeindex) # buses bcoal = Bus(label='coal', balanced=False) bgas = Bus(label='gas', balanced=False) bel = Bus(label='electricity') energysystem.add(bcoal, bgas, bel) # sources energysystem.add( Source(label='wind', outputs={ bel: Flow(actual_value=data['wind'], nominal_value=n_val_wind, fixed=True) })) energysystem.add( Source(label='pv', outputs={ bel: Flow(actual_value=data['pv'], nominal_value=n_val_solar, fixed=True) })) # excess and shortage to avoid infeasibilies energysystem.add(Sink(label='excess_el', inputs={bel: Flow()})) energysystem.add( Source(label='shortage_el', outputs={bel: Flow(variable_costs=200)})) # demands (electricity/heat) energysystem.add( Sink(label='demand_el', inputs={ bel: Flow(nominal_value=65, actual_value=data['demand_el'], fixed=True) })) # power plants energysystem.add( Transformer(label='pp_coal', inputs={bcoal: Flow()}, outputs={bel: Flow(nominal_value=20.2, variable_costs=25)}, conversion_factors={bel: 0.39})) energysystem.add( Transformer(label='pp_gas', inputs={bgas: Flow()}, outputs={bel: Flow(nominal_value=41, variable_costs=40)}, conversion_factors={bel: 0.50})) # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem) # solve problem optimization_model.solve(solver=solver, solve_kwargs={ 'tee': False, 'keepfiles': False }) results = outputlib.processing.results(optimization_model) results_el = outputlib.views.node(results, 'electricity') el_sequences = results_el['sequences'] el_prod = el_sequences[[(('wind', 'electricity'), 'flow'), (('pv', 'electricity'), 'flow'), (('pp_coal', 'electricity'), 'flow'), (('pp_gas', 'electricity'), 'flow'), (('shortage_el', 'electricity'), 'flow')]] inputs = outputlib.processing.convert_keys_to_strings( outputlib.processing.parameter_as_dict(optimization_model)) nom_vals = [[key, value['scalars']['nominal_value']] for key, value in inputs.items() if 'nominal_value' in value['scalars']] nom_vals = pd.DataFrame(nom_vals, columns=['flow', 'nominal_value']) summed_flows = [ (key, value['sequences'].sum()[0]) for key, value in outputlib.processing.convert_keys_to_strings( results).items() ] summed_flows = pd.DataFrame(summed_flows, columns=['flow', 'summed_flows']) end = time.time() print('simulation lasted: ', end - start, 'sec') return el_prod
def run_add_constraints_example(solver='cbc', nologg=False): if not nologg: logging.basicConfig(level=logging.INFO) # ##### creating an oemof solph optimization model, nothing special here ## # create an energy system object for the oemof solph nodes es = EnergySystem(timeindex=pd.date_range('1/1/2017', periods=4, freq='H')) # add some nodes boil = Bus(label="oil", balanced=False) blig = Bus(label="lignite", balanced=False) b_el = Bus(label="b_el") es.add(boil, blig, b_el) sink = Sink(label="Sink", inputs={ b_el: Flow(nominal_value=40, actual_value=[0.5, 0.4, 0.3, 1], fixed=True) }) pp_oil = Transformer( label='pp_oil', inputs={boil: Flow()}, outputs={b_el: Flow(nominal_value=50, variable_costs=25)}, conversion_factors={b_el: 0.39}) pp_lig = Transformer( label='pp_lig', inputs={blig: Flow()}, outputs={b_el: Flow(nominal_value=50, variable_costs=10)}, conversion_factors={b_el: 0.41}) es.add(sink, pp_oil, pp_lig) # create the model om = Model(energysystem=es) # add specific emission values to flow objects if source is a commodity bus for s, t in om.flows.keys(): if s is boil: om.flows[s, t].emission_factor = 0.27 # t/MWh if s is blig: om.flows[s, t].emission_factor = 0.39 # t/MWh emission_limit = 60e3 # add the outflow share om.flows[(boil, pp_oil)].outflow_share = [1, 0.5, 0, 0.3] # Now we are going to add a 'sub-model' and add a user specific constraint # first we add a pyomo Block() instance that we can use to add our # constraints. Then, we add this Block to our previous defined # Model instance and add the constraints. myblock = po.Block() # create a pyomo set with the flows (i.e. list of tuples), # there will of course be only one flow inside this set, the one we used to # add outflow_share myblock.MYFLOWS = po.Set(initialize=[ k for (k, v) in om.flows.items() if hasattr(v, 'outflow_share') ]) # pyomo does not need a po.Set, we can use a simple list as well myblock.COMMODITYFLOWS = [ k for (k, v) in om.flows.items() if hasattr(v, 'emission_factor') ] # add the sub-model to the oemof Model instance om.add_component('MyBlock', myblock) def _inflow_share_rule(m, s, e, t): """pyomo rule definition: Here we can use all objects from the block or the om object, in this case we don't need anything from the block except the newly defined set MYFLOWS. """ expr = (om.flow[s, e, t] >= om.flows[s, e].outflow_share[t] * sum(om.flow[i, o, t] for (i, o) in om.FLOWS if o == e)) return expr myblock.inflow_share = po.Constraint(myblock.MYFLOWS, om.TIMESTEPS, rule=_inflow_share_rule) # add emission constraint myblock.emission_constr = po.Constraint( expr=(sum(om.flow[i, o, t] for (i, o) in myblock.COMMODITYFLOWS for t in om.TIMESTEPS) <= emission_limit)) # solve and write results to dictionary # you may print the model with om.pprint() om.solve(solver=solver) logging.info("Successfully finished.")
here = os.path.abspath(os.path.dirname(__file__)) name = 'simple_model' preprocessed = sys.argv[1] optimized = sys.argv[2] if not os.path.exists(optimized): os.mkdir(optimized) es = EnergySystem.from_datapackage( os.path.join(preprocessed, "datapackage.json"), attributemap={}, typemap=TYPEMAP, ) # create model from energy system (this is just oemof.solph) m = Model(es) # select solver 'gurobi', 'cplex', 'glpk' etc m.solve(solver='cbc') # get the results from the the solved model(still oemof.solph) es.results = m.results() # now we use the write results method to write the results in oemoftabular # format es.dump(optimized)
es = EnergySystem() el0 = elec.ElectricalBus('el0') el1 = elec.ElectricalBus('el1') el2 = elec.ElectricalBus('el2') line0 = elec.Line(from_bus=el0, to_bus=el1, capacity=60, reactance=0.0001) line1 = elec.Line(from_bus=el1, to_bus=el2, capacity=60, reactance=0.0001) line2 = elec.Line(from_bus=el2, to_bus=el0, capacity=60, reactance=0.0001) gen0 = fc.Dispatchable("gen0", capacity=100, bus=el0, marginal_cost=50, carrier='coal') gen1 = fc.Dispatchable("gen1", capacity=100, bus=el1, marginal_cost=25, carrier='gas') load0 = fc.Load("load0", bus=el2, amount=100, profile=[1]) es.add(el0, el1, el2, line0, line1, line2, gen0, gen1, load0) m = Model(es) m.solve() m.write('lopf-model.lp')
for example in examples: print("Runnig postprocessing example with datapackage {}".format(example)) es = EnergySystem.from_datapackage( pkg.resource_filename( "oemof.tabular", "examples/datapackages/{}/datapackage.json".format(example), ), attributemap={}, typemap=TYPEMAP, ) es.timeindex = es.timeindex[0:5] m = Model(es) m.solve(solver="cbc") # skip foreignkeys example as not all buses are present if example != "foreignkeys": br = pp.bus_results(es, m.results(), select="scalars") if example == "investment": br["bus0"].xs([es.groups["bus0"], "invest"], level=[1, 2]) pp.supply_results(results=m.results(), es=es, bus=["heat-bus"]) pp.supply_results(results=m.results(), es=es, bus=["bus0", "bus1"]) pp.demand_results(results=m.results(), es=es, bus=["bus0", "bus1"]) pp.component_results(results=m.results(), es=es, select="sequences")
es.add(Sink(label="load_0", inputs={ b_0: Flow(nominal_value=150, actual_value=[0, 1], fixed=True)})) es.add(Sink(label="load_1", inputs={ b_1: Flow(nominal_value=150, actual_value=[1, 0], fixed=True)})) m = Model(energysystem=es) # m.write('transshipment.lp', io_options={'symbolic_solver_labels': True}) m.solve(solver='cbc', solve_kwargs={'tee': True, 'keepfiles': False}) m.results() graph = create_nx_graph(es, m) draw_graph(graph, plot=True, layout='neato', node_size=3000, node_color={ 'b_0': '#cd3333', 'b_1': '#7EC0EE', 'b_2': '#eeac7e'}) results = processing.results(m) print(views.node(results, 'gen_0')) print(views.node(results, 'gen_1'))
def test_dispatch_one_time_step(solver='cbc', periods=1): """Create an energy system and optimize the dispatch at least costs.""" # ######################### create energysystem components ################ Node.registry = None # resource buses bgas = Bus(label='gas', balanced=False) # electricity and heat bel = Bus(label='b_el') bth = Bus(label='b_th') # an excess and a shortage variable can help to avoid infeasible problems excess_el = Sink(label='excess_el', inputs={bel: Flow()}) # sources wind = Source( label='wind', outputs={bel: Flow(actual_value=0.5, nominal_value=66.3, fixed=True)}) # demands (electricity/heat) demand_el = Sink( label='demand_elec', inputs={bel: Flow(nominal_value=85, actual_value=0.3, fixed=True)}) demand_th = Sink( label='demand_therm', inputs={bth: Flow(nominal_value=40, actual_value=0.2, fixed=True)}) # combined heat and power plant (chp) pp_chp = Transformer(label='pp_chp', inputs={bgas: Flow()}, outputs={ bel: Flow(nominal_value=30, variable_costs=42), bth: Flow(nominal_value=40) }, conversion_factors={ bel: 0.3, bth: 0.4 }) # heatpump with a coefficient of performance (COP) of 3 b_heat_source = Bus(label='b_heat_source') heat_source = Source(label='heat_source', outputs={b_heat_source: Flow()}) cop = 3 heat_pump = Transformer(label='heat_pump', inputs={ bel: Flow(), b_heat_source: Flow() }, outputs={bth: Flow(nominal_value=10)}, conversion_factors={ bel: 1 / 3, b_heat_source: (cop - 1) / cop }) energysystem = EnergySystem(timeindex=[1]) energysystem.add(bgas, bel, bth, excess_el, wind, demand_el, demand_th, pp_chp, b_heat_source, heat_source, heat_pump) # ################################ optimization ########################### # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem, timeincrement=1) # solve problem optimization_model.solve(solver=solver) # write back results from optimization object to energysystem optimization_model.results() # ################################ results ################################ data = views.node(processing.results(om=optimization_model), 'b_el') # generate results to be evaluated in tests results = data['sequences'].sum(axis=0).to_dict() test_results = { (('wind', 'b_el'), 'flow'): 33, (('b_el', 'demand_elec'), 'flow'): 26, (('b_el', 'excess_el'), 'flow'): 5, (('b_el', 'heat_pump'), 'flow'): 3, } for key in test_results.keys(): eq_(int(round(results[key])), int(round(test_results[key])))
# an excess and a shortage variable can help to avoid infeasible problems excess_el = Sink(label='excess_el', inputs={bus_el: Flow()}) shortage_el = Source(label='shortage_el', outputs={bus_el: Flow(variable_costs=100000)}) # ## Add all to the energysystem energysystem.add(bus_coal, bus_gas, bus_el, source_gas, source_coal, wind, pv, demand_el, pp_coal, storage_el, excess_el, shortage_el) # ## Create an Optimization Model and solve it # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem) # solve problem optimization_model.solve(solver=solver) # ## Get results results_main = outputlib.processing.results(optimization_model) results_meta = outputlib.processing.meta_results(optimization_model) params = outputlib.processing.parameter_as_dict(energysystem) # ## Pass results to energysystem.results object before saving energysystem.results['main'] = results_main energysystem.results['meta'] = results_meta energysystem.params = params # ## Save results - Dump the energysystem (to ~/home/user/.oemof by default) # Specify path and filename if you do not want to overwrite energysystem.dump(dpath=None, filename=None)
bus=heat_bus_SDE, tech="grid", carrier="heat", marginal_cost=costs.at["vom", "excess_heat"])) print("Demand data have been read.") # OEMoF Model Creation m = Model(es) print("OSeEM-DE is ready to solve.") # LP File m.write(os.path.join(results_path, "investment.lp"), io_options={"symbolic_solver_labels": True}) # Shadow Price m.receive_duals() # Solve m.solve("cbc") m.results = m.results() print("OSeEM-DE solved the optimization problem. :)") # Results pp.write_results(m, results_path) print("Results have been written. Results are available in {}.".format( results_path))
outputs={b_el: solph.Flow()}, in_breakpoints=in_breakpoints, conversion_function=conv_func, pw_repn='CC') # 'CC', 'DCC', 'INC', 'MC' # DCC TODO: Solve problem in outputlib with DCC energysystem.add(pwltf) # create and solve the optimization model optimization_model = Model(energysystem) optimization_model.write('/home/jann/Desktop/my_model.lp', io_options={'symbolic_solver_labels': True}) optimization_model.solve(solver=solver, solve_kwargs={ 'tee': False, 'keepfiles': False }) results = outputlib.processing.results(optimization_model) string_results = outputlib.processing.convert_keys_to_strings(results) df = outputlib.processing.create_dataframe(optimization_model) sequences = {k: v['sequences'] for k, v in string_results.items()} df = pd.concat(sequences, axis=1) df[('efficiency', None, None)] = df[('pwltf', 'electricity', 'flow')].divide(df[('gas', 'pwltf', 'flow')]) def linearized_func(func, x_break, x): y_break = func(x_break)
def compute(datapackage, solver="gurobi"): """ """ config = Scenario.from_path( os.path.join("scenarios", datapackage + ".toml") ) emission_limit = config["scenario"].get("co2_limit") temporal_resolution = config.get("model", {}).get("temporal_resolution", 1) datapackage_dir = os.path.join("datapackages", datapackage) # create results path scenario_path = os.path.join("results", datapackage) if not os.path.exists(scenario_path): os.makedirs(scenario_path) output_path = os.path.join(scenario_path, "output") if not os.path.exists(output_path): os.makedirs(output_path) # copy package either aggregated or the original one (only data!) if temporal_resolution > 1: logging.info("Aggregating for temporal aggregation ... ") path = aggregation.temporal_skip( os.path.join(datapackage_dir, "datapackage.json"), temporal_resolution, path=scenario_path, name="input", ) else: path = processing.copy_datapackage( os.path.join(datapackage_dir, "datapackage.json"), os.path.abspath(os.path.join(scenario_path, "input")), subset="data", ) es = EnergySystem.from_datapackage( os.path.join(path, "datapackage.json"), attributemap={}, typemap=facades.TYPEMAP, ) m = Model(es) if emission_limit is not None: constraints.emission_limit(m, limit=emission_limit) flows = {} for (i, o) in m.flows: if hasattr(m.flows[i, o], "emission_factor"): flows[(i, o)] = m.flows[i, o] # add emission as expression to model BUSES = [b for b in es.nodes if isinstance(b, Bus)] def emission_rule(m, b, t): expr = sum( m.flow[inflow, outflow, t] * m.timeincrement[t] * getattr(flows[inflow, outflow], "emission_factor", 0) for (inflow, outflow) in flows if outflow is b ) return expr m.emissions = Expression(BUSES, m.TIMESTEPS, rule=emission_rule) m.receive_duals() m.solve(solver) m.results = m.results() pp.write_results(m, output_path) modelstats = outputlib.processing.meta_results(m) modelstats.pop("solver") modelstats["problem"].pop("Sense") # TODO: This is not model stats -> move somewhere else! modelstats["temporal_resolution"] = temporal_resolution modelstats["emission_limit"] = emission_limit with open(os.path.join(scenario_path, "modelstats.json"), "w") as outfile: json.dump(modelstats, outfile, indent=4) supply_sum = ( pp.supply_results( results=m.results, es=m.es, bus=[b.label for b in es.nodes if isinstance(b, Bus)], types=[ "dispatchable", "volatile", "conversion", "backpressure", "extraction", # "storage", "reservoir", ], ) # .clip(0) .sum().reset_index() ) supply_sum["from"] = supply_sum.apply( lambda x: "-".join(x["from"].label.split("-")[1::]), axis=1 ) supply_sum.drop("type", axis=1, inplace=True) supply_sum = ( supply_sum.set_index(["from", "to"]).unstack("from") / 1e6 * temporal_resolution ) supply_sum.columns = supply_sum.columns.droplevel(0) summary = supply_sum # pd.concat([supply_sum, excess_share], axis=1) ## grid imports = pd.DataFrame() link_results = pp.component_results(m.es, m.results).get("link") link_results.to_csv( os.path.join(scenario_path, "output", "transmission.csv") ) for b in [b.label for b in es.nodes if isinstance(b, Bus)]: if link_results is not None and m.es.groups[b] in list( link_results.columns.levels[0] ): ex = link_results.loc[ :, (m.es.groups[b], slice(None), "flow") ].sum(axis=1) im = link_results.loc[ :, (slice(None), m.es.groups[b], "flow") ].sum(axis=1) net_import = im - ex net_import.name = m.es.groups[b] imports = pd.concat([imports, net_import], axis=1) summary["total_supply"] = summary.sum(axis=1) summary["RE-supply"] = ( summary["wind-onshore"] + summary["wind-offshore"] + summary["biomass-st"] + summary["hydro-ror"] + summary["hydro-reservoir"] + summary["solar-pv"] ) if "other-res" in summary: summary["RE-supply"] += summary["other-res"] summary["RE-share"] = summary["RE-supply"] / summary["total_supply"] summary["import"] = imports[imports > 0].sum() / 1e6 * temporal_resolution summary["export"] = imports[imports < 0].sum() / 1e6 * temporal_resolution summary.to_csv(os.path.join(scenario_path, "summary.csv")) emissions = ( pd.Series({key: value() for key, value in m.emissions.items()}) .unstack() .T ) emissions.to_csv(os.path.join(scenario_path, "emissions.csv"))