Beispiel #1
0
def create_energysystem(datapackage, **arguments):
    """Creates the energysystem.

    Parameters
    ----------
    datapackage: str
        path to datapackage metadata file in JSON format
    **arguments : key word arguments
        Arguments passed from command line
    """

    es = EnergySystem.from_datapackage(arguments['DATAPACKAGE'],
                                       attributemap={},
                                       typemap=options.typemap)

    es._typemap = options.typemap

    end = es.timeindex.get_loc(es.timeindex[int(arguments['--t_end'])]) + 1

    es.timeindex = es.timeindex[int(arguments['--t_start']):end]

    return es
Beispiel #2
0
def optimize(input_data_dir, results_data_dir, solver='cbc', debug=False):
    r"""
    Takes the specified datapackage, creates an energysystem and solves the
    optimization problem.
    """
    # create energy system object
    logging.info("Creating EnergySystem from datapackage")
    es = EnergySystem.from_datapackage(
        os.path.join(input_data_dir, "datapackage.json"),
        attributemap={},
        typemap=TYPEMAP,
    )

    # create model from energy system (this is just oemof.solph)
    logging.info("Creating the optimization model")
    m = Model(es)

    # if you want dual variables / shadow prices uncomment line below
    # m.receive_duals()

    # save lp file together with optimization results
    if debug:
        lp_file_dir = os.path.join(results_data_dir, 'model.lp')
        logging.info(f"Saving the lp-file to {lp_file_dir}")
        m.write(lp_file_dir, io_options={'symbolic_solver_labels': True})

    # select solver 'gurobi', 'cplex', 'glpk' etc
    logging.info(f'Solving the problem using {solver}')
    m.solve(solver=solver)

    # get the results from the the solved model(still oemof.solph)
    es.results = m.results()
    es.params = outputlib.processing.parameter_as_dict(es)

    # now we use the write results method to write the results in oemof-tabular
    # format
    logging.info(f'Writing the results to {results_data_dir}')
    es.dump(results_data_dir)
Beispiel #3
0
import os
import oemof.tabular.datapackage

from oemof.solph import EnergySystem, Model
from oemof.tabular.facades import TYPEMAP
import oemof.tabular.tools.postprocessing as pp

# create  path for results (we use the datapackage_dir to store results)
results_path = 'results'
if not os.path.exists(results_path):
    os.makedirs(results_path)

# create energy system object
es = EnergySystem.from_datapackage(
    os.path.join("./datapackage", "datapackage.json"),
    attributemap={},
    typemap=TYPEMAP,
)

# create model from energy system (this is just oemof.solph)
m = Model(es)

# if you want dual variables / shadow prices uncomment line below
# m.receive_duals()

# select solver 'gurobi', 'cplex', 'glpk' etc
m.solve("glpk")

# get the results from the the solved model(still oemof.solph)
m.results = m.results()
Beispiel #4
0
"""
"""

timesteps = 8760

for f in os.listdir('data/sequences/'):
    fname = os.path.join('data', 'sequences', f)
    df = pd.read_csv(fname, sep=';')
    df = df.iloc[:timesteps]
    df.to_csv(fname, index=False, sep=';')

config = building.read_build_config('config.toml')

es = EnergySystem.from_datapackage(
    "datapackage.json",
    attributemap={},
    typemap=facades.TYPEMAP,
)

m = Model(es)

m.write('tmp.lp', io_options={"symbolic_solver_labels": True})

m.receive_duals()

m.solve('gurobi')

m.results = m.results()

if os.path.exists('results'):
    shutil.rmtree('results')
from oemof.outputlib import views
from oemof.solph import EnergySystem, Model

from oemof.tabular.facades import TYPEMAP
import oemof.tabular.datapackage  # noqa
import oemof.tabular.tools.postprocessing as pp


examples = ["dispatch", "investment", "foreignkeys"]
for example in examples:
    print("Runnig postprocessing example with datapackage {}".format(example))
    es = EnergySystem.from_datapackage(
        pkg.resource_filename(
            "oemof.tabular",
            "examples/datapackages/{}/datapackage.json".format(example),
        ),
        attributemap={},
        typemap=TYPEMAP,
    )

    es.timeindex = es.timeindex[0:5]

    m = Model(es)

    m.solve(solver="cbc")

    # skip foreignkeys example as not all buses are present
    if example != "foreignkeys":
        br = pp.bus_results(es, m.results(), select="scalars")

        if example == "investment":
Beispiel #6
0
from oemof.tabular.facades import TYPEMAP

here = os.path.abspath(os.path.dirname(__file__))

name = 'simple_model'

preprocessed = sys.argv[1]

optimized = sys.argv[2]

if not os.path.exists(optimized):
    os.mkdir(optimized)

es = EnergySystem.from_datapackage(
    os.path.join(preprocessed, "datapackage.json"),
    attributemap={},
    typemap=TYPEMAP,
)

# create model from energy system (this is just oemof.solph)
m = Model(es)

# select solver 'gurobi', 'cplex', 'glpk' etc
m.solve(solver='cbc')

# get the results from the the solved model(still oemof.solph)
es.results = m.results()

# now we use the write results method to write the results in oemoftabular
# format
es.dump(optimized)
from oemof.solph import EnergySystem, Model
from renpass import options
import pprint


dispatch = False
investment = True

if dispatch:
    es1 = EnergySystem.from_datapackage(
        'renpass/examples/dispatch/datapackage.json',
        attributemap={},
        typemap=options.typemap)

    for n in es1.nodes:
        pprint.pprint(n.__dict__)


if investment:
    es2 = EnergySystem.from_datapackage(
        'renpass/examples/investment/datapackage.json',
        attributemap={},
        typemap=options.typemap)

    for n in es2.nodes:
        pprint.pprint(n.__dict__)

    m = Model(es2)

Beispiel #8
0
def compute(datapackage, solver="gurobi"):
    """
    """
    config = Scenario.from_path(
        os.path.join("scenarios", datapackage + ".toml")
    )
    emission_limit = config["scenario"].get("co2_limit")

    temporal_resolution = config.get("model", {}).get("temporal_resolution", 1)

    datapackage_dir = os.path.join("datapackages", datapackage)

    # create results path
    scenario_path = os.path.join("results", datapackage)
    if not os.path.exists(scenario_path):
        os.makedirs(scenario_path)
    output_path = os.path.join(scenario_path, "output")

    if not os.path.exists(output_path):
        os.makedirs(output_path)

    # copy package either aggregated or the original one (only data!)
    if temporal_resolution > 1:
        logging.info("Aggregating for temporal aggregation ... ")
        path = aggregation.temporal_skip(
            os.path.join(datapackage_dir, "datapackage.json"),
            temporal_resolution,
            path=scenario_path,
            name="input",
        )
    else:
        path = processing.copy_datapackage(
            os.path.join(datapackage_dir, "datapackage.json"),
            os.path.abspath(os.path.join(scenario_path, "input")),
            subset="data",
        )

    es = EnergySystem.from_datapackage(
        os.path.join(path, "datapackage.json"),
        attributemap={},
        typemap=facades.TYPEMAP,
    )

    m = Model(es)

    if emission_limit is not None:
        constraints.emission_limit(m, limit=emission_limit)

    flows = {}
    for (i, o) in m.flows:
        if hasattr(m.flows[i, o], "emission_factor"):
            flows[(i, o)] = m.flows[i, o]

    # add emission as expression to model
    BUSES = [b for b in es.nodes if isinstance(b, Bus)]

    def emission_rule(m, b, t):
        expr = sum(
            m.flow[inflow, outflow, t]
            * m.timeincrement[t]
            * getattr(flows[inflow, outflow], "emission_factor", 0)
            for (inflow, outflow) in flows
            if outflow is b
        )
        return expr

    m.emissions = Expression(BUSES, m.TIMESTEPS, rule=emission_rule)

    m.receive_duals()

    m.solve(solver)

    m.results = m.results()

    pp.write_results(m, output_path)

    modelstats = outputlib.processing.meta_results(m)
    modelstats.pop("solver")
    modelstats["problem"].pop("Sense")
    # TODO: This is not model stats -> move somewhere else!
    modelstats["temporal_resolution"] = temporal_resolution
    modelstats["emission_limit"] = emission_limit

    with open(os.path.join(scenario_path, "modelstats.json"), "w") as outfile:
        json.dump(modelstats, outfile, indent=4)

    supply_sum = (
        pp.supply_results(
            results=m.results,
            es=m.es,
            bus=[b.label for b in es.nodes if isinstance(b, Bus)],
            types=[
                "dispatchable",
                "volatile",
                "conversion",
                "backpressure",
                "extraction",
                #    "storage",
                "reservoir",
            ],
        )
        # .clip(0)
        .sum().reset_index()
    )
    supply_sum["from"] = supply_sum.apply(
        lambda x: "-".join(x["from"].label.split("-")[1::]), axis=1
    )
    supply_sum.drop("type", axis=1, inplace=True)
    supply_sum = (
        supply_sum.set_index(["from", "to"]).unstack("from")
        / 1e6
        * temporal_resolution
    )
    supply_sum.columns = supply_sum.columns.droplevel(0)
    summary = supply_sum  # pd.concat([supply_sum, excess_share], axis=1)
    ## grid
    imports = pd.DataFrame()
    link_results = pp.component_results(m.es, m.results).get("link")
    link_results.to_csv(
        os.path.join(scenario_path, "output", "transmission.csv")
    )

    for b in [b.label for b in es.nodes if isinstance(b, Bus)]:
        if link_results is not None and m.es.groups[b] in list(
            link_results.columns.levels[0]
        ):
            ex = link_results.loc[
                :, (m.es.groups[b], slice(None), "flow")
            ].sum(axis=1)
            im = link_results.loc[
                :, (slice(None), m.es.groups[b], "flow")
            ].sum(axis=1)

            net_import = im - ex
            net_import.name = m.es.groups[b]
            imports = pd.concat([imports, net_import], axis=1)

    summary["total_supply"] = summary.sum(axis=1)
    summary["RE-supply"] = (
        summary["wind-onshore"]
        + summary["wind-offshore"]
        + summary["biomass-st"]
        + summary["hydro-ror"]
        + summary["hydro-reservoir"]
        + summary["solar-pv"]
    )
    if "other-res" in summary:
        summary["RE-supply"] += summary["other-res"]

    summary["RE-share"] = summary["RE-supply"] / summary["total_supply"]

    summary["import"] = imports[imports > 0].sum() / 1e6 * temporal_resolution
    summary["export"] = imports[imports < 0].sum() / 1e6 * temporal_resolution
    summary.to_csv(os.path.join(scenario_path, "summary.csv"))

    emissions = (
        pd.Series({key: value() for key, value in m.emissions.items()})
        .unstack()
        .T
    )
    emissions.to_csv(os.path.join(scenario_path, "emissions.csv"))