def map_solution(c, attr): variables = get_var(n, c, attr, pop=pop) predefined = True if (c, attr) not in lookup.index: predefined = False n.sols[c] = n.sols[c] if c in n.sols else Dict(df=pd.DataFrame(), pnl={}) n.solutions.at[(c, attr), 'in_comp'] = predefined if isinstance(variables, pd.DataFrame): # case that variables are timedependent n.solutions.at[(c, attr), 'pnl'] = True pnl = n.pnl(c) if predefined else n.sols[c].pnl values = variables.apply(lambda x: x.map(variables_sol)) # values = variables.stack().map(variables_sol).unstack() if c in n.passive_branch_components and attr == "s": set_from_frame(pnl, 'p0', values) set_from_frame(pnl, 'p1', - values) elif c == 'Link' and attr == "p": set_from_frame(pnl, 'p0', values) for i in ['1'] + additional_linkports(n): i_eff = '' if i == '1' else i eff = get_as_dense(n, 'Link', f'efficiency{i_eff}', sns) set_from_frame(pnl, f'p{i}', - values * eff) pnl[f'p{i}'].loc[sns, n.links.index[n.links[f'bus{i}'] == ""]] = \ n.component_attrs['Link'].loc[f'p{i}','default'] else: set_from_frame(pnl, attr, values) else: # case that variables are static n.solutions.at[(c, attr), 'pnl'] = False sol = variables.map(variables_sol) if predefined: non_ext = n.df(c)[attr] n.df(c)[attr + '_opt'] = sol.reindex(non_ext.index).fillna(non_ext) else: n.sols[c].df[attr] = sol
def define_mga_constraint(n, sns, epsilon=None, with_fix=None): """Build constraint defining near-optimal feasible space Parameters ---------- n : pypsa.Network sns : Series|list-like snapshots epsilon : float, optional Allowed added cost compared to least-cost solution, by default None with_fix : bool, optional Calculation of allowed cost penalty should include cost of non-extendable components, by default None """ if epsilon is None: epsilon = float(snakemake.wildcards.epsilon) if with_fix is None: with_fix = snakemake.config.get("include_non_extendable", True) expr = [] # operation for c, attr in lookup.query("marginal_cost").index: cost = (get_as_dense(n, c, "marginal_cost", sns).loc[:, lambda ds: (ds != 0).all()].mul( n.snapshot_weightings[sns], axis=0)) if cost.empty: continue expr.append( linexpr((cost, get_var(n, c, attr).loc[sns, cost.columns])).stack()) # investment for c, attr in nominal_attrs.items(): cost = n.df(c)["capital_cost"][get_extendable_i(n, c)] if cost.empty: continue expr.append(linexpr((cost, get_var(n, c, attr)[cost.index]))) lhs = pd.concat(expr).sum() if with_fix: ext_const = objective_constant(n, ext=True, nonext=False) nonext_const = objective_constant(n, ext=False, nonext=True) rhs = (1 + epsilon) * (n.objective + ext_const + nonext_const) - nonext_const else: ext_const = objective_constant(n) rhs = (1 + epsilon) * (n.objective + ext_const) define_constraints(n, lhs, "<=", rhs, "GlobalConstraint", "mu_epsilon")
def define_mga_constraint(n, sns): epsilon = float(snakemake.wildcards.epsilon) expr = [] # operation for c, attr in lookup.query("marginal_cost").index: cost = (get_as_dense(n, c, "marginal_cost", sns).loc[:, lambda ds: (ds != 0).all()].mul( n.snapshot_weightings[sns], axis=0)) if cost.empty: continue expr.append( linexpr((cost, get_var(n, c, attr).loc[sns, cost.columns])).stack()) # investment for c, attr in nominal_attrs.items(): cost = n.df(c)["capital_cost"][get_extendable_i(n, c)] if cost.empty: continue expr.append(linexpr((cost, get_var(n, c, attr)[cost.index]))) lhs = pd.concat(expr).sum() if snakemake.config["include_non_extendable"]: ext_const = objective_constant(n, ext=True, nonext=False) nonext_const = objective_constant(n, ext=False, nonext=True) rhs = (1 + epsilon) * (n.objective + ext_const + nonext_const) - nonext_const else: ext_const = objective_constant(n) rhs = (1 + epsilon) * (n.objective + ext_const) define_constraints(n, lhs, "<=", rhs, "GlobalConstraint", "mu_epsilon")
def assign_solution_netzbooster(n, sns, variables_sol, constraints_dual, keep_references=False, keep_shadowprices=None): """ Helper function. Assigns the solution of a succesful optimization to the network. """ def set_from_frame(pnl, attr, df): if attr not in pnl: #use this for subnetworks_t pnl[attr] = df.reindex(n.snapshots) elif pnl[attr].empty: pnl[attr] = df.reindex(n.snapshots) else: pnl[attr].loc[sns, :] = df.reindex(columns=pnl[attr].columns) pop = not keep_references def map_solution(c, attr): variables = get_var(n, c, attr, pop=pop) predefined = True if (c, attr) not in lookup.index: predefined = False n.sols[c] = n.sols[c] if c in n.sols else Dict(df=pd.DataFrame(), pnl={}) n.solutions.at[(c, attr), 'in_comp'] = predefined if isinstance(variables, pd.DataFrame): # case that variables are timedependent n.solutions.at[(c, attr), 'pnl'] = True pnl = n.pnl(c) if predefined else n.sols[c].pnl values = variables.apply(lambda x: x.map(variables_sol)) # values = variables.stack().map(variables_sol).unstack() if c in n.passive_branch_components and attr == "s": set_from_frame(pnl, 'p0', values) set_from_frame(pnl, 'p1', -values) elif c == 'Link' and attr == "p": set_from_frame(pnl, 'p0', values) for i in ['1'] + additional_linkports(n): i_eff = '' if i == '1' else i eff = get_as_dense(n, 'Link', f'efficiency{i_eff}', sns) set_from_frame(pnl, f'p{i}', -values * eff) pnl[f'p{i}'].loc[sns, n.links.index[n.links[f'bus{i}'] == ""]] = \ n.component_attrs['Link'].loc[f'p{i}','default'] else: set_from_frame(pnl, attr, values) else: # case that variables are static n.solutions.at[(c, attr), 'pnl'] = False sol = variables.map(variables_sol) if predefined: non_ext = n.df(c)[attr] n.df(c)[attr + '_opt'] = sol.reindex( non_ext.index).fillna(non_ext) else: n.sols[c].df[attr] = sol n.sols = Dict() n.solutions = pd.DataFrame(index=n.variables.index, columns=['in_comp', 'pnl']) for c, attr in n.variables.index: map_solution(c, attr) # if nominal capcity was no variable set optimal value to nominal for c, attr in lookup.query('nominal').index.difference(n.variables.index): n.df(c)[attr + '_opt'] = n.df(c)[attr] # recalculate storageunit net dispatch if not n.df('StorageUnit').empty: c = 'StorageUnit' n.pnl(c)['p'] = n.pnl(c)['p_dispatch'] - n.pnl(c)['p_store'] # duals if keep_shadowprices == False: keep_shadowprices = [] sp = n.constraints.index if isinstance(keep_shadowprices, list): sp = sp[sp.isin(keep_shadowprices, level=0)] def map_dual(c, attr): # If c is a pypsa component name the dual is store at n.pnl(c) # or n.df(c). For the second case the index of the constraints have to # be a subset of n.df(c).index otherwise the dual is stored at # n.duals[c].df constraints = get_con(n, c, attr, pop=pop) is_pnl = isinstance(constraints, pd.DataFrame) sign = 1 if 'upper' in attr or attr == 'marginal_price' else -1 n.dualvalues.at[(c, attr), 'pnl'] = is_pnl to_component = c in n.all_components if is_pnl: n.dualvalues.at[(c, attr), 'in_comp'] = to_component # changed for netzbooster duals = constraints.apply(lambda x: x.map(sign * constraints_dual)) if c not in n.duals and not to_component: n.duals[c] = Dict(df=pd.DataFrame(), pnl={}) pnl = n.pnl(c) if to_component else n.duals[c].pnl set_from_frame(pnl, attr, duals) else: # here to_component can change duals = constraints.map(sign * constraints_dual) if to_component: to_component = (duals.index.isin(n.df(c).index).all()) n.dualvalues.at[(c, attr), 'in_comp'] = to_component if c not in n.duals and not to_component: n.duals[c] = Dict(df=pd.DataFrame(), pnl={}) df = n.df(c) if to_component else n.duals[c].df df[attr] = duals n.duals = Dict() n.dualvalues = pd.DataFrame(index=sp, columns=['in_comp', 'pnl']) # extract shadow prices attached to components for c, attr in sp: map_dual(c, attr) #correct prices for snapshot weightings n.buses_t.marginal_price.loc[sns] = n.buses_t.marginal_price.loc[ sns].divide(n.snapshot_weightings.loc[sns], axis=0) # discard remaining if wanted if not keep_references: for c, attr in n.constraints.index.difference(sp): get_con(n, c, attr, pop) #load if len(n.loads): set_from_frame(n.pnl('Load'), 'p', get_as_dense(n, 'Load', 'p_set', sns)) #clean up vars and cons for c in list(n.vars): if n.vars[c].df.empty and n.vars[c].pnl == {}: n.vars.pop(c) for c in list(n.cons): if n.cons[c].df.empty and n.cons[c].pnl == {}: n.cons.pop(c) # recalculate injection ca = [('Generator', 'p', 'bus'), ('Store', 'p', 'bus'), ('Load', 'p', 'bus'), ('StorageUnit', 'p', 'bus'), ('Link', 'p0', 'bus0'), ('Link', 'p1', 'bus1')] for i in additional_linkports(n): ca.append(('Link', f'p{i}', f'bus{i}')) sign = lambda c: n.df(c).sign if 'sign' in n.df(c ) else -1 #sign for 'Link' n.buses_t.p = pd.concat( [n.pnl(c)[attr].mul(sign(c)).rename(columns=n.df(c)[group]) for c, attr, group in ca], axis=1).groupby(level=0, axis=1).sum()\ .reindex(columns=n.buses.index, fill_value=0) def v_ang_for_(sub): buses_i = sub.buses_o if len(buses_i) == 1: return pd.DataFrame(0, index=sns, columns=buses_i) sub.calculate_B_H(skip_pre=True) Z = pd.DataFrame(np.linalg.pinv((sub.B).todense()), buses_i, buses_i) Z -= Z[sub.slack_bus] return n.buses_t.p.reindex(columns=buses_i) @ Z n.buses_t.v_ang = (pd.concat( [v_ang_for_(sub) for sub in n.sub_networks.obj], axis=1).reindex(columns=n.buses.index, fill_value=0))