class PriceSetter: def __init__(self, first_year=2016, final_year=2040, scenarios_per_year=5): self.dual = Dual(first_year, final_year, scenarios_per_year) self.data = ModelData() self.analysis = AnalyseResults() # Common model components. May need to update these values self.common = CommonComponents(first_year=first_year, final_year=final_year, scenarios_per_year=scenarios_per_year) # Model sets self.sets = self.get_model_sets() @staticmethod def convert_to_frame(results, index_name, variable_name): """Convert dict to pandas DataFrame""" # Convert dictionary to DataFrame df = pd.Series( results[variable_name]).rename_axis(index_name).to_frame( name=variable_name) return df def get_model_sets(self): """Define sets used in model""" # Get all sets used within the model m = ConcreteModel() m = self.common.define_sets(m) return m def get_eligible_generators(self): """Find generators which are eligible for rebates / penalties""" # Eligible generators eligible_generators = [ g for g in self.sets.G_THERM.union(self.sets.G_C_WIND, self.sets.G_C_SOLAR) ] return eligible_generators def get_generator_cost_parameters(self, results_dir, filename, eligible_generators): """ Get parameters affecting generator marginal costs, and compute the net marginal cost for a given policy """ # Model results results = self.analysis.load_results(results_dir, filename) # Price setting algorithm costs = pd.Series(results['C_MC']).rename_axis( ['generator', 'year']).to_frame(name='marginal_cost') # Add emissions intensity baseline costs = costs.join(pd.Series( results['baseline']).rename_axis('year').to_frame(name='baseline'), how='left') # Add permit price costs = (costs.join(pd.Series( results['permit_price']).rename_axis('year').to_frame( name='permit_price'), how='left')) # Emissions intensities for existing and candidate units existing_emissions = self.analysis.data.existing_units.loc[:, ( 'PARAMETERS', 'EMISSIONS')] candidate_emissions = self.analysis.data.candidate_units.loc[:, ( 'PARAMETERS', 'EMISSIONS')] # Combine emissions intensities into a single DataFrame emission_intensities = (pd.concat([ existing_emissions, candidate_emissions ]).rename_axis('generator').to_frame('emissions_intensity')) # Join emissions intensities costs = costs.join(emission_intensities, how='left') # Total marginal cost (taking into account net cost under policy) costs['net_marginal_cost'] = ( costs['marginal_cost'] + (costs['emissions_intensity'] - costs['baseline']) * costs['permit_price']) def correct_for_ineligible_generators(row): """Update costs so only eligible generators have new costs (ineligible generators have unchanged costs)""" if row.name[0] in eligible_generators: return row['net_marginal_cost'] else: return row['marginal_cost'] # Correct for ineligible generators costs['net_marginal_cost'] = costs.apply( correct_for_ineligible_generators, axis=1) return costs def get_price_setting_generators(self, results_dir, filename, eligible_generators): """Find price setting generators""" # Prices prices = self.analysis.parse_prices(results_dir, filename) # Generator SRMC and cost parameters (emissions intensities, baselines, permit prices) generator_costs = self.get_generator_cost_parameters( results_dir, filename, eligible_generators) def get_price_setting_generator(row): """Get price setting generator, price difference, and absolute real price""" # Year and average real price for a given row year, price = row.name[0], row['average_price_real'] # Absolute difference between price and all generator SRMCs abs_price_difference = generator_costs.loc[( slice(None), year), 'net_marginal_cost'].subtract(price).abs() # Price setting generator and absolute price difference for that generator generator, difference = abs_price_difference.idxmin( )[0], abs_price_difference.min() # Generator SRMC srmc = generator_costs.loc[(generator, year), 'net_marginal_cost'] # Update generator name to load shedding if price very high (indicative of load shedding) if difference > 9000: generator = 'LOAD-SHEDDING' difference = np.nan srmc = np.nan return pd.Series({ 'generator': generator, 'difference': difference, 'price': price, 'srmc': srmc }) # Find price setting generators price_setters = prices.apply(get_price_setting_generator, axis=1) # Combine output into single dictionary output = { 'price_setters': price_setters, 'prices': prices, 'generator_costs': generator_costs } return output def get_dual_component_existing_thermal(self, results): """Get dual variable component of dual constraint for existing thermal units""" # def get_existing_thermal_unit_dual_information(): dfs = [] for v in ['SIGMA_1', 'SIGMA_2', 'SIGMA_20', 'SIGMA_23']: print(v) index = ('generator', 'year', 'scenario', 'interval') dfs.append(self.convert_to_frame(results, index, v)) # Place all information in a single DataFrame df_c = pd.concat(dfs, axis=1).dropna() # Get offset values df_c['SIGMA_20_PLUS_1'] = df_c['SIGMA_20'].shift(-1) df_c['SIGMA_23_PLUS_1'] = df_c['SIGMA_23'].shift(-1) return df_c def k(self, g): """Mapping generator to the NEM zone to which it belongs""" if g in self.sets.G_E: return self.data.existing_units_dict[('PARAMETERS', 'NEM_ZONE')][g] elif g in self.sets.G_C.difference(self.sets.G_STORAGE): return self.data.candidate_units_dict[('PARAMETERS', 'ZONE')][g] elif g in self.sets.G_STORAGE: return self.data.battery_properties_dict['NEM_ZONE'][g] else: raise Exception(f'Unexpected generator: {g}') @staticmethod def merge_generator_node_prices(dual_component, zone_prices): """Get prices at the node to which a generator is connected""" # Merge price information df = (pd.merge(dual_component.reset_index(), zone_prices.reset_index(), how='left', left_on=['zone', 'year', 'scenario', 'interval'], right_on=['zone', 'year', 'scenario', 'interval']).set_index([ 'generator', 'year', 'scenario', 'interval', 'zone' ])) return df def get_generator_cost_information(self, results): """Merge generator cost information""" # Load results delta = self.convert_to_frame(results, 'year', 'DELTA') rho = self.convert_to_frame(results, ('year', 'scenario'), 'RHO') emissions_rate = self.convert_to_frame(results, 'generator', 'EMISSIONS_RATE') baseline = self.convert_to_frame(results, 'year', 'baseline') permit_price = self.convert_to_frame(results, 'year', 'permit_price') marginal_cost = self.convert_to_frame(results, ('generator', 'year'), 'C_MC') # Join information into single dataFrame df_c = marginal_cost.join(emissions_rate, how='left') df_c = df_c.join(baseline, how='left') df_c = df_c.join(permit_price, how='left') df_c = df_c.join(delta, how='left') df_c = df_c.join(rho, how='left') # Add a scaling factor for the final year final_year = df_c.index.levels[0][-1] df_c['scaling_factor'] = df_c.apply( lambda x: 1 if int(x.name[0]) < final_year else 1 + (1 / 0.06), axis=1) return df_c def merge_generator_cost_information(self, df, results): """Merge generator cost information from model""" # Get generator cost information generator_cost_info = self.get_generator_cost_information(results) df = (pd.merge(df.reset_index(), generator_cost_info.reset_index(), how='left').set_index([ 'generator', 'year', 'scenario', 'interval', 'zone' ])) return df def get_constraint_body_existing_thermal(self, results): """Get body of dual power output constraint for existing thermal generators""" # Components of dual power output constraint duals = self.get_dual_component_existing_thermal(results) # Map between generators and zones generators = duals.index.levels[0] generator_zone_map = (pd.DataFrame.from_dict( {g: self.k(g) for g in generators}, orient='index', columns=['zone']).rename_axis('generator')) # Add NEM zone to index duals = duals.join(generator_zone_map, how='left').set_index('zone', append=True) # Power balance dual variables var_index = ('zone', 'year', 'scenario', 'interval') prices = self.convert_to_frame(results, var_index, 'PRICES') # Merge price information c = self.merge_generator_node_prices(duals, prices) # Merge operating cost information c = price_setter.merge_generator_cost_information(c, results) return c def evaluate_constraint_body_existing_thermal(self, results): """Evaluate constraint body information for existing thermal units (should = 0)""" # Get values of terms constituting the constraint c = self.get_constraint_body_existing_thermal(results) # Correct for all intervals excluding the last interval of each scenario s_1 = (-c['SIGMA_1'].abs() + c['SIGMA_2'].abs() - c['PRICES'].abs() + (c['DELTA'] * c['RHO'] * c['scaling_factor'] * (c['C_MC'] + (c['EMISSIONS_RATE'] - c['baseline']) * c['permit_price'])) + c['SIGMA_20'].abs() - c['SIGMA_20_PLUS_1'].abs() - c['SIGMA_23'].abs() + c['SIGMA_23_PLUS_1'].abs()) # Set last interval to NaN s_1.loc[(slice(None), slice(None), slice(None), 24, slice(None))] = np.nan # Last interval of each scenario s_2 = (-c['SIGMA_1'].abs() + c['SIGMA_2'].abs() - c['PRICES'].abs() + (c['DELTA'] * c['RHO'] * c['scaling_factor'] * (c['C_MC'] + (c['EMISSIONS_RATE'] - c['baseline']) * c['permit_price'])) + c['SIGMA_20'].abs() - c['SIGMA_23'].abs()) # Update so corrected values for last interval are accounted for s_3 = s_1.to_frame(name='body') s_3.update(s_2.to_frame(name='body'), overwrite=False) return s_3 def evaluate_constraint_dual_component_existing_thermal(self, results): """Evaluate dual component of constraint""" # Get values of terms constituting the constraint c = self.get_constraint_body_existing_thermal(results) # Dual component - correct for intervals excluding the last interval of each scenario s_1 = (-c['SIGMA_1'].abs() + c['SIGMA_2'].abs() + c['SIGMA_20'].abs() - c['SIGMA_20_PLUS_1'].abs() - c['SIGMA_23'].abs() + c['SIGMA_23_PLUS_1'].abs()) # Set last interval to NaN s_1.loc[(slice(None), slice(None), slice(None), 24, slice(None))] = np.nan # Dual component - correct for last interval of each scenario s_2 = -c['SIGMA_1'].abs() + c['SIGMA_2'].abs() + c['SIGMA_20'].abs( ) - c['SIGMA_23'].abs() # Combine components s_3 = s_1.to_frame(name='body') s_3.update(s_2.to_frame(name='body'), overwrite=False) return s_3 def get_price_setting_generators_from_model_results(self, results): """Find price setting generators""" # Generators eligible for a rebate / penalty under the scheme eligible_generators = self.get_eligible_generators() # Generator costs generator_costs = self.get_generator_cost_information(results) # Get prices in each zone for each dispatch interval index = ('zone', 'year', 'scenario', 'interval') zone_price = self.convert_to_frame(results, index, 'PRICES') def correct_permit_prices(row): """Only eligible generators face a non-zero permit price""" if row.name[1] in eligible_generators: return row['permit_price'] else: return 0 # Update permit prices generator_costs['permit_price'] = generator_costs.apply( correct_permit_prices, axis=1) # Net marginal costs generator_costs['net_marginal_cost'] = ( generator_costs['scaling_factor'] * generator_costs['DELTA'] * generator_costs['RHO'] * (generator_costs['C_MC'] + (generator_costs['EMISSIONS_RATE'] - generator_costs['baseline']) * generator_costs['permit_price'])) def get_price_setter(row): """Find generator whose marginal cost is closest to interval marginal cost""" # Extract zone, year, scenario, and interval information z, y, s, t = row.name # Power balance constraint marginal cost for given interval (related to price) p = abs(row['PRICES']) # Net marginal costs of all generators for the given interval generator_mc = generator_costs.loc[(y, slice(None), s), :] # Scenario duration and discount factor (arbitrarily selecting YWPS4 to get a single row) rho, delta = generator_costs.loc[(y, 'YWPS4', s), ['RHO', 'DELTA']].values # Difference between marginal cost in given interval and all generator marginal costs for that interval diff = generator_mc['net_marginal_cost'].subtract(p).abs() # Extract generator ID and absolute cost difference g, cost_diff = diff.idxmin(), diff.min() # Details of the price setting generator cols = [ 'EMISSIONS_RATE', 'baseline', 'permit_price', 'C_MC', 'net_marginal_cost', 'scaling_factor' ] emissions_rate, baseline, permit_price, marginal_cost, net_marginal_cost, scaling_factor = generator_costs.loc[ g, cols] # Compute normalised price and cost differences price_normalised = p / (delta * rho * scaling_factor) cost_diff_normalised = cost_diff / (delta * rho) net_marginal_cost_normalised = net_marginal_cost / (delta * rho * scaling_factor) return (g[1], p, price_normalised, cost_diff, cost_diff_normalised, emissions_rate, baseline, permit_price, marginal_cost, net_marginal_cost, net_marginal_cost_normalised) # Get price setting generator information ps = zone_price.apply(get_price_setter, axis=1) # Convert column of tuples to DataFrame with separate columns columns = [ 'generator', 'price_abs', 'price_normalised', 'difference_abs', 'difference_normalised', 'emissions_rate', 'baseline', 'permit_price', 'marginal_cost', 'net_marginal_cost', 'net_marginal_cost_normalised' ] ps = pd.DataFrame(ps.to_list(), columns=columns, index=zone_price.index) return ps
results_directory = os.path.join(os.path.dirname(__file__), os.path.pardir, '3_model', 'linear', 'output', 'local') tmp_directory = os.path.join(os.path.dirname(__file__), 'output', 'tmp', 'local') figures_directory = os.path.join(os.path.dirname(__file__), 'output', 'figures') # Object used to analyse results and get price target trajectory analysis = AnalyseResults() # Get plot data plot_data = PlotData(tmp_directory) plots = CreatePlots(tmp_directory, figures_directory) # Get BAU price trajectories bau = analysis.load_results(results_directory, 'bau_case.pickle') bau_prices = analysis.get_year_average_price(bau['PRICES'], -1) bau_price_trajectory = bau_prices['average_price_real'].to_dict() bau_first_year_trajectory = { y: bau_price_trajectory[2016] for y in range(2016, 2031) } # Create plots plots.plot_tax_rep_comparison() plots.plot_transition_year_comparison('baudev') plots.plot_transition_year_comparison('ptar') plots.plot_transition_year_comparison('pdev') plot_params = { 'price_trajectory': bau_price_trajectory,
class Targets: def __init__(self): # Object used to analyse results self.analysis = AnalyseResults() @staticmethod def get_year_emission_intensity_target(initial_emissions_intensity, half_life, year, start_year): """Get half-life emissions intensity target for each year in model horizon""" # Re-index such that first year in model horizon is t = 0 t = year - start_year exponent = (-t / half_life) return initial_emissions_intensity * (2**exponent) def get_emissions_intensity_target(self, half_life): """Get sequence of yearly emissions intensity targets""" # Get emissions intensities for each year of model horizon - BAU case df_bau = self.analysis.get_year_system_emissions_intensities( 'primal_bau_results.pickle') df_bau = df_bau.rename( columns={'emissions_intensity': 'bau_emissions_intensity'}) # First and last years of model horizon start, end = df_bau.index[[0, -1]] # Initial emissions intensity E_0 = df_bau.loc[start, 'bau_emissions_intensity'] # Emissions intensity target sequence target_sequence = { y: self.get_year_emission_intensity_target(E_0, half_life, y, start) for y in range(start, end + 1) } # Convert to DataFrame df_sequence = pd.Series(target_sequence).rename_axis('year').to_frame( 'emissions_intensity_target') # Combine with bau emissions intensities df_c = pd.concat([df_bau, df_sequence], axis=1) return df_c def get_first_year_average_real_bau_price(self): """Get average price in first year of model horizon""" # Get average price in first year of model horizon (real price) prices = self.analysis.get_year_average_price( 'primal_bau_results.pickle') return prices.iloc[0]['average_price_real'] @staticmethod def load_emissions_intensity_target(filename): """Load emissions intensity target""" # Check that emissions target loads correctly with open(os.path.join(os.path.dirname(__file__), 'output', filename), 'r') as f: target = json.load(f) # Convert keys from strings to integers target = {int(k): v for k, v in target.items()} return target @staticmethod def load_first_year_average_bau_price(filename): """Load average price in first year - BAU scenario""" # Check that price loads correctly with open(os.path.join(os.path.dirname(__file__), 'output', filename), 'r') as f: price = json.load(f) return price['first_year_average_price'] def get_cumulative_emissions_target(self, filename, frac): """ Load emissions target Parameters ---------- filename : str Name of results file on which emissions target will be based frac : float Target emissions reduction. E.g. 0.5 would imply emissions should be less than or equal to 50% of total emissions observed in results associated with 'filename' """ return float(self.analysis.get_total_emissions(filename) * frac) @staticmethod def load_cumulative_emissions_target(): """Load cumulative emissions target""" with open( os.path.join(os.path.dirname(__file__), 'output', 'cumulative_emissions_target.json'), 'r') as f: emissions_target = json.load(f) return emissions_target['cumulative_emissions_target'] def get_interim_emissions_target(self, filename): """Load total emissions in each year when pursuing a cumulative emissions cap""" # Get emissions in each year of model horizon when pursuing cumulative target year_emissions = self.analysis.get_year_emissions(filename) return year_emissions @staticmethod def load_interim_emissions_target(): """Load interim emissions target""" with open( os.path.join(os.path.dirname(__file__), 'output', 'interim_emissions_target.json'), 'r') as f: emissions_target = json.load(f) # Convert years to integers emissions_target = {int(k): v for k, v in emissions_target.items()} return emissions_target def get_cumulative_emissions_cap_carbon_price(self): """Get carbon price from cumulative emissions cap model results""" # Results results = self.analysis.load_results( 'cumulative_emissions_cap_results.pickle') return results['CUMULATIVE_EMISSIONS_CAP_CONS_DUAL'] def get_interim_emissions_cap_carbon_price(self): """Get carbon price from interim emissions cap model results""" # Results results = self.analysis.load_results( 'interim_emissions_cap_results.pickle') return results['INTERIM_EMISSIONS_CAP_CONS_DUAL'] @staticmethod def get_envelope(n_0, half_life, first_year, year): """Get revenue envelope level for a given year""" # Year with first year = 0 t = year - first_year return n_0 * np.power(0.5, (t / half_life))