def override_component_attrs(directory): """Tell PyPSA that links can have multiple outputs by overriding the component_attrs. This can be done for as many buses as you need with format busi for i = 2,3,4,5,.... See https://pypsa.org/doc/components.html#link-with-multiple-outputs-or-inputs Parameters ---------- directory : string Folder where component attributes to override are stored analogous to ``pypsa/component_attrs``, e.g. `links.csv`. Returns ------- Dictionary of overriden component attributes. """ attrs = Dict({k: v.copy() for k, v in component_attrs.items()}) for component, list_name in components.list_name.items(): fn = f"{directory}/{list_name}.csv" if os.path.isfile(fn): overrides = pd.read_csv(fn, index_col=0, na_values="n/a") attrs[component] = overrides.combine_first(attrs[component]) return attrs
def approximate(pcs, M=0): ''' Approximation for decomposition components, with updated mean value. The latter changes as it consists of the mean values of none included components. ''' vec = pcs.vec.loc[:, :M] val = pcs.val[:M] beta = pcs.beta.loc[:, :M] mean = pcs.mean mean = mean - (mean @ vec * vec).sum(1) return Dict(vec=vec, val=val, beta=beta, mean=mean)
def decomposition_pcs(df, abbrev=None): ''' PCA without substracting mean without scaling the covariance matrix. The eigenvalues of those components reflect the real variance covered by the components. ''' mean = df.mean() C = covariance(df) val, vec = eig(C) beta = df @ vec return Dict(vec=vec, val=val, beta=beta, mean=mean, C=C, abbr=abbrev)
def mock_snakemake(rulename, **wildcards): """ This function is expected to be executed from the 'scripts'-directory of ' the snakemake project. It returns a snakemake.script.Snakemake object, based on the Snakefile. If a rule has wildcards, you have to specify them in **wildcards. Parameters ---------- rulename: str name of the rule for which the snakemake object should be generated **wildcards: keyword arguments fixing the wildcards. Only necessary if wildcards are needed. """ import snakemake as sm import os from pypsa.descriptors import Dict from snakemake.script import Snakemake script_dir = Path(__file__).parent.resolve() assert Path.cwd().resolve() == script_dir, \ f'mock_snakemake has to be run from the repository scripts directory {script_dir}' os.chdir(script_dir.parent) for p in sm.SNAKEFILE_CHOICES: if os.path.exists(p): snakefile = p break workflow = sm.Workflow(snakefile) workflow.include(snakefile) workflow.global_resources = {} rule = workflow.get_rule(rulename) dag = sm.dag.DAG(workflow, rules=[rule]) wc = Dict(wildcards) job = sm.jobs.Job(rule, dag, wc) def make_accessable(*ios): for io in ios: for i in range(len(io)): io[i] = os.path.abspath(io[i]) make_accessable(job.input, job.output, job.log) snakemake = Snakemake(job.input, job.output, job.params, job.wildcards, job.threads, job.resources, job.log, job.dag.workflow.config, job.rule.name, None,) # create log and output dir if not existent for path in list(snakemake.log) + list(snakemake.output): Path(path).parent.mkdir(parents=True, exist_ok=True) os.chdir(script_dir) return snakemake
def load_network(import_name=None, custom_components=None): """ Helper for importing a pypsa.Network with additional custom components. Parameters ---------- import_name : str As in pypsa.Network(import_name) custom_components : dict Dictionary listing custom components. For using ``snakemake.config["override_components"]`` in ``config.yaml`` define: .. code:: yaml override_components: ShadowPrice: component: ["shadow_prices","Shadow price for a global constraint.",np.nan] attributes: name: ["string","n/a","n/a","Unique name","Input (required)"] value: ["float","n/a",0.,"shadow value","Output"] Returns ------- pypsa.Network """ import pypsa from pypsa.descriptors import Dict override_components = None override_component_attrs = None if custom_components is not None: override_components = pypsa.components.components.copy() override_component_attrs = Dict( {k: v.copy() for k, v in pypsa.components.component_attrs.items()} ) for k, v in custom_components.items(): override_components.loc[k] = v["component"] override_component_attrs[k] = pd.DataFrame( columns=["type", "unit", "default", "description", "status"] ) for attr, val in v["attributes"].items(): override_component_attrs[k].loc[attr] = val return pypsa.Network( import_name=import_name, override_components=override_components, override_component_attrs=override_component_attrs, )
def variance_pcs(df, abbrev=None): ''' Ordinary PCA with substracting mean from original data set and scaling of the Covariance matrix. ''' mean = df.mean() C = covariance(df - mean) trace = np.trace(C) C /= trace val, vec = eig(C) beta = (df - df.mean()) @ vec return Dict(vec=vec, val=val, beta=beta, mean=mean, C=C, abbr=abbrev, trace=trace)
fig.tight_layout() fig.savefig("paper_graphics/{}/compare-sys_cost-co2-{}.pdf".format( scenario, scenario), transparent=True) if __name__ == "__main__": # Detect running outside of snakemake and mock snakemake for testing if 'snakemake' in globals(): fn = snakemake.input["summary"] config = snakemake.config else: from pypsa.descriptors import Dict config = Dict() config["run"] = "190919-final" config["results_dir"] = "results" fn = "{}/{}/csvs/summary.csv".format(config["results_dir"], config["run"]) scenario = config["run"] df = pd.read_csv(fn, index_col=[0, 1]) assumptions = "wind1040-sola510" pen = "075" ylim_comparison = 140. for policy in [ "pen{}{}-{}-nuclNone-lCCSNone".format(pen, "{}", assumptions),
import pypsa, pandas as pd, numpy as np from pypsa.descriptors import Dict from pyomo.environ import Constraint override_components = pypsa.components.components.copy() override_components.loc["ShadowPrice"] = [ "shadow_prices", "Shadow price for a global constraint.", np.nan ] override_components.loc["CHP"] = [ "chps", "Combined heat and power plant.", np.nan ] override_component_attrs = Dict( {k: v.copy() for k, v in pypsa.components.component_attrs.items()}) override_component_attrs["ShadowPrice"] = pd.DataFrame( columns=["type", "unit", "default", "description", "status"]) override_component_attrs["ShadowPrice"].loc["name"] = [ "string", "n/a", "n/a", "Unique name", "Input (required)" ] override_component_attrs["ShadowPrice"].loc["value"] = [ "float", "n/a", 0., "shadow value", "Output" ] override_component_attrs["CHP"] = pd.DataFrame( columns=["type", "unit", "default", "description", "status"]) override_component_attrs["CHP"].loc["name"] = [ "string", "n/a", "n/a", "Unique name", "Input (required)" ]
network.generators.p_nom_opt).sum() used = network.generators_t.p.sum() curtailment = (available - used) / available load = network.loads_t.p.sum().sum() supply = available / load stats.loc["wcurt", (scenario, ct)] = curtailment[ct + " wind"] stats.loc["scurt", (scenario, ct)] = curtailment[ct + " solar"] stats.loc["wsupply", (scenario, ct)] = supply[ct + " wind"] stats.loc["ssupply", (scenario, ct)] = supply[ct + " solar"] stats.to_csv(snakemake.output[0]) if __name__ == "__main__": # Detect running outside of snakemake and mock snakemake for testing if 'snakemake' not in globals(): from pypsa.descriptors import Dict import yaml snakemake = Dict() with open('config.yaml') as f: snakemake.config = yaml.load(f) snakemake["output"] = [ "{}summary.csv".format(snakemake.config["results_dir"]) ] make_csv()
network.consistency_check() network.lopf(solver_name=solver_name, solver_options=solver_options, extra_functionality=extra_functionality) network.export_to_netcdf(snakemake.output[0]) return network if __name__ == "__main__": # Detect running outside of snakemake and mock up snakemake for testing if 'snakemake' not in globals(): from pypsa.descriptors import Dict import yaml snakemake = Dict() with open('config.yaml') as f: snakemake.config = yaml.load(f) snakemake["wildcards"] = Dict({ "country" : "DE", "scenario" : "2020"}) snakemake["output"] = ["results/{}-{}.nc".format(snakemake.wildcards.country, snakemake.wildcards.scenario)] network = solve_network(snakemake.wildcards.country, snakemake.wildcards.scenario)