def compute(es=None, **arguments): """Creates the optimization model, solves it and writes back results to energy system object Parameters ---------- es : :class:`oemof.solph.network.EnergySystem` object Energy system holding nodes, grouping functions and other important information. **arguments : key word arguments Arguments passed from command line """ if es.temporal is not None: m = Model(es, objective_weighting=es.temporal['weighting']) else: m = Model(es) logging.info('Model creation time: ' + stopwatch()) m.receive_duals() if arguments['--debug']: filename = 'renpass_model.lp' logging.info('Writing lp-file to {}.'.format(filename)) m.write(filename, io_options={'symbolic_solver_labels': True}) m.solve(solver=arguments['--solver'], solve_kwargs={'tee': True}) logging.info('Optimization time: ' + stopwatch()) return m
def test_dispatch_example(solver='cbc', periods=24 * 5): """Create an energy system and optimize the dispatch at least costs.""" filename = os.path.join(os.path.dirname(__file__), 'input_data.csv') data = pd.read_csv(filename, sep=",") # ######################### create energysystem components ################ # resource buses bcoal = Bus(label='coal', balanced=False) bgas = Bus(label='gas', balanced=False) boil = Bus(label='oil', balanced=False) blig = Bus(label='lignite', balanced=False) # electricity and heat bel = Bus(label='b_el') bth = Bus(label='b_th') # an excess and a shortage variable can help to avoid infeasible problems excess_el = Sink(label='excess_el', inputs={bel: Flow()}) # shortage_el = Source(label='shortage_el', # outputs={bel: Flow(variable_costs=200)}) # sources wind = Source(label='wind', outputs={ bel: Flow(actual_value=data['wind'], nominal_value=66.3, fixed=True) }) pv = Source(label='pv', outputs={ bel: Flow(actual_value=data['pv'], nominal_value=65.3, fixed=True) }) # demands (electricity/heat) demand_el = Sink(label='demand_elec', inputs={ bel: Flow(nominal_value=85, actual_value=data['demand_el'], fixed=True) }) demand_th = Sink(label='demand_therm', inputs={ bth: Flow(nominal_value=40, actual_value=data['demand_th'], fixed=True) }) # power plants pp_coal = Transformer( label='pp_coal', inputs={bcoal: Flow()}, outputs={bel: Flow(nominal_value=20.2, variable_costs=25)}, conversion_factors={bel: 0.39}) pp_lig = Transformer( label='pp_lig', inputs={blig: Flow()}, outputs={bel: Flow(nominal_value=11.8, variable_costs=19)}, conversion_factors={bel: 0.41}) pp_gas = Transformer( label='pp_gas', inputs={bgas: Flow()}, outputs={bel: Flow(nominal_value=41, variable_costs=40)}, conversion_factors={bel: 0.50}) pp_oil = Transformer( label='pp_oil', inputs={boil: Flow()}, outputs={bel: Flow(nominal_value=5, variable_costs=50)}, conversion_factors={bel: 0.28}) # combined heat and power plant (chp) pp_chp = Transformer(label='pp_chp', inputs={bgas: Flow()}, outputs={ bel: Flow(nominal_value=30, variable_costs=42), bth: Flow(nominal_value=40) }, conversion_factors={ bel: 0.3, bth: 0.4 }) # heatpump with a coefficient of performance (COP) of 3 b_heat_source = Bus(label='b_heat_source') heat_source = Source(label='heat_source', outputs={b_heat_source: Flow()}) cop = 3 heat_pump = Transformer(label='heat_pump', inputs={ bel: Flow(), b_heat_source: Flow() }, outputs={bth: Flow(nominal_value=10)}, conversion_factors={ bel: 1 / 3, b_heat_source: (cop - 1) / cop }) datetimeindex = pd.date_range('1/1/2012', periods=periods, freq='H') energysystem = EnergySystem(timeindex=datetimeindex) energysystem.add(bcoal, bgas, boil, bel, bth, blig, excess_el, wind, pv, demand_el, demand_th, pp_coal, pp_lig, pp_oil, pp_gas, pp_chp, b_heat_source, heat_source, heat_pump) # ################################ optimization ########################### # create optimization model based on energy_system optimization_model = Model(energysystem=energysystem) optimization_model.receive_duals() # solve problem optimization_model.solve(solver=solver) # write back results from optimization object to energysystem optimization_model.results() # ################################ results ################################ # generic result object results = processing.results(om=optimization_model) # subset of results that includes all flows into and from electrical bus # sequences are stored within a pandas.DataFrames and scalars e.g. # investment values within a pandas.Series object. # in this case the entry data['scalars'] does not exist since no investment # variables are used data = views.node(results, 'b_el') # generate results to be evaluated in tests results = data['sequences'].sum(axis=0).to_dict() test_results = { (('wind', 'b_el'), 'flow'): 1773, (('pv', 'b_el'), 'flow'): 605, (('b_el', 'demand_elec'), 'flow'): 7440, (('b_el', 'excess_el'), 'flow'): 139, (('pp_chp', 'b_el'), 'flow'): 666, (('pp_lig', 'b_el'), 'flow'): 1210, (('pp_gas', 'b_el'), 'flow'): 1519, (('pp_coal', 'b_el'), 'flow'): 1925, (('pp_oil', 'b_el'), 'flow'): 0, (('b_el', 'heat_pump'), 'flow'): 118, } for key in test_results.keys(): eq_(int(round(results[key])), int(round(test_results[key])))
df = df.iloc[:timesteps] df.to_csv(fname, index=False, sep=';') config = building.read_build_config('config.toml') es = EnergySystem.from_datapackage( "datapackage.json", attributemap={}, typemap=facades.TYPEMAP, ) m = Model(es) m.write('tmp.lp', io_options={"symbolic_solver_labels": True}) m.receive_duals() m.solve('gurobi') m.results = m.results() if os.path.exists('results'): shutil.rmtree('results') os.mkdir('results') pp.write_results(m, 'results', scalars=False) # create short summary supply_sum = (pp.supply_results( results=m.results, es=m.es,
class Scenario: """ Definition of a deflex scenario object. """ def __init__(self, **kwargs): """ Parameters ---------- kwargs """ self.name = kwargs.get("name", "unnamed_scenario") self.table_collection = kwargs.get("table_collection", {}) self.year = kwargs.get("year", None) self.ignore_errors = kwargs.get("ignore_errors", False) self.round_values = kwargs.get("round_values", 0) self.model = kwargs.get("model", None) self.es = kwargs.get("es", None) self.results = None self.results_fn = kwargs.get("results_fn", None) self.debug = kwargs.get("debug", None) self.location = None self.map = None self.meta = kwargs.get("meta", None) def initialise_energy_system(self): if self.debug is True: number_of_time_steps = 3 else: try: if calendar.isleap(self.year): number_of_time_steps = 8784 else: number_of_time_steps = 8760 except TypeError: msg = ("You cannot create an EnergySystem with self.year={0}, " "of type {1}.") raise TypeError(msg.format(self.year, type(self.year))) date_time_index = pd.date_range("1/1/{0}".format(self.year), periods=number_of_time_steps, freq="H") return EnergySystem(timeindex=date_time_index) def load_excel(self, filename=None): """Load scenario from an excel-file.""" if filename is not None: self.location = filename xls = pd.ExcelFile(self.location) for sheet in xls.sheet_names: self.table_collection[sheet] = xls.parse(sheet, index_col=[0], header=[0, 1]) return self def load_csv(self, path=None): """Load scenario from a csv-collection.""" if path is not None: self.location = path for file in os.listdir(self.location): if file[-4:] == ".csv": filename = os.path.join(self.location, file) self.table_collection[file[:-4]] = pd.read_csv(filename, index_col=[0], header=[0, 1]) return self def to_excel(self, filename): """Dump scenario into an excel-file.""" # create path if it does not exist os.makedirs(os.path.dirname(filename), exist_ok=True) writer = pd.ExcelWriter(filename) for name, df in sorted(self.table_collection.items()): df.to_excel(writer, name) writer.save() logging.info("Scenario saved as excel file to {0}".format(filename)) def to_csv(self, path): """Dump scenario into a csv-collection.""" if os.path.isdir(path): shutil.rmtree(os.path.join(path)) os.makedirs(path) for name, df in self.table_collection.items(): name = name.replace(" ", "_") + ".csv" filename = os.path.join(path, name) df.to_csv(filename) logging.info("Scenario saved as csv-collection to {0}".format(path)) def check_table(self, table_name): if self.table_collection[table_name].isnull().values.any(): c = [] for column in self.table_collection[table_name].columns: if self.table_collection[table_name][column].isnull().any(): c.append(column) msg = "Nan Values in the {0} table (columns: {1})." raise ValueError(msg.format(table_name, c)) return self def create_nodes(self): pass def initialise_es(self, year=None): if year is not None: self.year = year self.es = self.initialise_energy_system() return self def add_nodes(self, nodes): """ Parameters ---------- nodes : dict Dictionary with a unique key and values of type oemof.network.Node. Returns ------- self """ if self.es is None: self.initialise_es() self.es.add(*nodes.values()) return self def table2es(self): if self.es is None: self.es = self.initialise_energy_system() nodes = self.create_nodes() self.es.add(*nodes.values()) return self def create_model(self): self.model = Model(self.es) return self def dump_es(self, filename): os.makedirs(os.path.dirname(filename), exist_ok=True) f = open(filename, "wb") if self.meta is None: if self.es.results is not None and "Meta" in self.es.results: self.meta = self.es.results["meta"] pickle.dump(self.meta, f) pickle.dump(self.es.__dict__, f) f.close() logging.info("Results dumped to {0}.".format(filename)) def restore_es(self, filename=None): if filename is None: filename = self.results_fn else: self.results_fn = filename if self.es is None: self.es = EnergySystem() f = open(filename, "rb") self.meta = pickle.load(f) self.es.__dict__ = pickle.load(f) f.close() self.results = self.es.results["main"] logging.info("Results restored from {0}.".format(filename)) def scenario_info(self, solver_name): sc_info = { "name": self.name, "datetime": datetime.datetime.now(), "year": self.year, "solver": solver_name, } return sc_info def solve(self, with_duals=False, tee=True, logfile=None, solver=None): logging.info("Optimising using {0}.".format(solver)) if with_duals: self.model.receive_duals() if self.debug: filename = os.path.join(helpers.extend_basic_path("lp_files"), "reegis.lp") logging.info("Store lp-file in {0}.".format(filename)) self.model.write(filename, io_options={"symbolic_solver_labels": True}) self.model.solve(solver=solver, solve_kwargs={ "tee": tee, "logfile": logfile }) self.es.results["main"] = processing.results(self.model) self.es.results["meta"] = processing.meta_results(self.model) self.es.results["param"] = processing.parameter_as_dict(self.es) self.es.results["meta"]["scenario"] = self.scenario_info(solver) self.es.results["meta"]["in_location"] = self.location self.es.results["meta"]["file_date"] = datetime.datetime.fromtimestamp( os.path.getmtime(self.location)) self.es.results["meta"]["oemof_version"] = logger.get_version() self.results = self.es.results["main"] def plot_nodes(self, show=None, filename=None, **kwargs): rm_nodes = kwargs.get("remove_nodes_with_substrings") g = graph.create_nx_graph(self.es, filename=filename, remove_nodes_with_substrings=rm_nodes) if show is True: draw_graph(g, **kwargs) return g
def compute(datapackage, solver="gurobi"): """ """ config = Scenario.from_path( os.path.join("scenarios", datapackage + ".toml") ) emission_limit = config["scenario"].get("co2_limit") temporal_resolution = config.get("model", {}).get("temporal_resolution", 1) datapackage_dir = os.path.join("datapackages", datapackage) # create results path scenario_path = os.path.join("results", datapackage) if not os.path.exists(scenario_path): os.makedirs(scenario_path) output_path = os.path.join(scenario_path, "output") if not os.path.exists(output_path): os.makedirs(output_path) # copy package either aggregated or the original one (only data!) if temporal_resolution > 1: logging.info("Aggregating for temporal aggregation ... ") path = aggregation.temporal_skip( os.path.join(datapackage_dir, "datapackage.json"), temporal_resolution, path=scenario_path, name="input", ) else: path = processing.copy_datapackage( os.path.join(datapackage_dir, "datapackage.json"), os.path.abspath(os.path.join(scenario_path, "input")), subset="data", ) es = EnergySystem.from_datapackage( os.path.join(path, "datapackage.json"), attributemap={}, typemap=facades.TYPEMAP, ) m = Model(es) if emission_limit is not None: constraints.emission_limit(m, limit=emission_limit) flows = {} for (i, o) in m.flows: if hasattr(m.flows[i, o], "emission_factor"): flows[(i, o)] = m.flows[i, o] # add emission as expression to model BUSES = [b for b in es.nodes if isinstance(b, Bus)] def emission_rule(m, b, t): expr = sum( m.flow[inflow, outflow, t] * m.timeincrement[t] * getattr(flows[inflow, outflow], "emission_factor", 0) for (inflow, outflow) in flows if outflow is b ) return expr m.emissions = Expression(BUSES, m.TIMESTEPS, rule=emission_rule) m.receive_duals() m.solve(solver) m.results = m.results() pp.write_results(m, output_path) modelstats = outputlib.processing.meta_results(m) modelstats.pop("solver") modelstats["problem"].pop("Sense") # TODO: This is not model stats -> move somewhere else! modelstats["temporal_resolution"] = temporal_resolution modelstats["emission_limit"] = emission_limit with open(os.path.join(scenario_path, "modelstats.json"), "w") as outfile: json.dump(modelstats, outfile, indent=4) supply_sum = ( pp.supply_results( results=m.results, es=m.es, bus=[b.label for b in es.nodes if isinstance(b, Bus)], types=[ "dispatchable", "volatile", "conversion", "backpressure", "extraction", # "storage", "reservoir", ], ) # .clip(0) .sum().reset_index() ) supply_sum["from"] = supply_sum.apply( lambda x: "-".join(x["from"].label.split("-")[1::]), axis=1 ) supply_sum.drop("type", axis=1, inplace=True) supply_sum = ( supply_sum.set_index(["from", "to"]).unstack("from") / 1e6 * temporal_resolution ) supply_sum.columns = supply_sum.columns.droplevel(0) summary = supply_sum # pd.concat([supply_sum, excess_share], axis=1) ## grid imports = pd.DataFrame() link_results = pp.component_results(m.es, m.results).get("link") link_results.to_csv( os.path.join(scenario_path, "output", "transmission.csv") ) for b in [b.label for b in es.nodes if isinstance(b, Bus)]: if link_results is not None and m.es.groups[b] in list( link_results.columns.levels[0] ): ex = link_results.loc[ :, (m.es.groups[b], slice(None), "flow") ].sum(axis=1) im = link_results.loc[ :, (slice(None), m.es.groups[b], "flow") ].sum(axis=1) net_import = im - ex net_import.name = m.es.groups[b] imports = pd.concat([imports, net_import], axis=1) summary["total_supply"] = summary.sum(axis=1) summary["RE-supply"] = ( summary["wind-onshore"] + summary["wind-offshore"] + summary["biomass-st"] + summary["hydro-ror"] + summary["hydro-reservoir"] + summary["solar-pv"] ) if "other-res" in summary: summary["RE-supply"] += summary["other-res"] summary["RE-share"] = summary["RE-supply"] / summary["total_supply"] summary["import"] = imports[imports > 0].sum() / 1e6 * temporal_resolution summary["export"] = imports[imports < 0].sum() / 1e6 * temporal_resolution summary.to_csv(os.path.join(scenario_path, "summary.csv")) emissions = ( pd.Series({key: value() for key, value in m.emissions.items()}) .unstack() .T ) emissions.to_csv(os.path.join(scenario_path, "emissions.csv"))