def clean_df(df, stack_years=False): if type(df) is not pd.core.frame.DataFrame: raise ValueError('output_type must be a pandas dataframe') if stack_years and 'year' not in df.index.names: df = df.stack() util.replace_index_name(df, 'year') df.columns = ['value'] if 'year' in df.index.names: df = df[df.index.get_level_values('year') >= cfg.getParamAsInt( 'current_year')] dct = cfg.outputs_id_map index = df.index index.set_levels([[dct[name].get(item, item) for item in level] for name, level in zip(index.names, index.levels)], inplace=True) index.names = [ x.upper() if isinstance(x, basestring) else x for x in index.names ] if isinstance(df.columns, pd.MultiIndex): columns = df.columns columns.set_levels( [[dct[name].get(item, item) for item in level] for name, level in zip(columns.names, columns.levels)], inplace=True) columns.names = [ x.upper() if isinstance(x, basestring) else x for x in columns.names ] else: df.columns = [ x.upper() if isinstance(x, basestring) else x for x in df.columns ] return df
def process_active_shapes(self): logging.info(' mapping data for:') if cfg.getParamAsBoolean('parallel_process'): pool = pathos.multiprocessing.Pool( processes=cfg.getParamAsInt('num_cores'), maxtasksperchild=1) shapes = pool.map(helper_multiprocess.process_shapes, self.data.values(), chunksize=1) pool.close() pool.join() self.data = dict(zip(self.data.keys(), shapes)) else: for id in self.data: self.data[id].process_shape()
def set_gen_technologies(self, geography, thermal_dispatch_df): pmax = np.array(util.df_slice(thermal_dispatch_df,['capacity',geography],['IO',self.dispatch_geography]).values).T[0] marginal_cost = np.array(util.df_slice(thermal_dispatch_df,['cost',geography],['IO',self.dispatch_geography]).values).T[0] MORs = np.array(util.df_slice(thermal_dispatch_df,['maintenance_outage_rate',geography],['IO',self.dispatch_geography]).values).T[0] FORs = np.array(util.df_slice(thermal_dispatch_df,['forced_outage_rate',geography],['IO',self.dispatch_geography]).values).T[0] must_run = np.array(util.df_slice(thermal_dispatch_df,['must_run',geography],['IO',self.dispatch_geography]).values).T[0] clustered_dict = dispatch_generators.cluster_generators(n_clusters = cfg.getParamAsInt('generator_steps', 'opt'), pmax=pmax, marginal_cost=marginal_cost, FORs=FORs, MORs=MORs, must_run=must_run, pad_stack=False, zero_mc_4_must_run=True) generator_numbers = range(len(clustered_dict['derated_pmax'])) for number in generator_numbers: generator = str(((max(generator_numbers)+1)* (self.dispatch_geographies.index(geography))) + (number)+1) if generator not in self.generation_technologies: self.generation_technologies.append(generator) self.geography[generator] = geography self.feeder[generator] = 'bulk' self.min_capacity[generator] = 0 self.capacity[generator] = clustered_dict['derated_pmax'][number] self.variable_costs[generator] = clustered_dict['marginal_cost'][number]
def export_io(self): io_table_write_step = cfg.getParamAsInt('io_table_write_step', 'output_detail') io_table_years = sorted([min(self.supply.years)] + range(max(self.supply.years), min(self.supply.years), -io_table_write_step)) df_list = [] for year in io_table_years: keys = self.supply.demand_sectors year_df = pd.concat([self.supply.io_dict[year][sector] for sector in keys], keys=keys,names=['sector']) year_df = pd.concat([year_df]*len(keys), keys=keys, names=['sector'], axis=1) df_list.append(year_df) keys = io_table_years name = ['year'] df = pd.concat(df_list,keys=keys,names=name) for row_sector in self.supply.demand_sectors: for col_sector in self.supply.demand_sectors: if row_sector != col_sector: df.loc[util.level_specific_indexer(df,'sector',row_sector),util.level_specific_indexer(df,'sector',col_sector,axis=1)] = 0 self.supply.outputs.io = df result_df = self.supply.outputs.return_cleaned_output('io') keys = [self.scenario.name.upper(), cfg.timestamp] names = ['SCENARIO','TIMESTAMP'] for key, name in zip(keys,names): result_df = pd.concat([result_df], keys=[key],names=[name]) Output.write(result_df, 's_io.csv', os.path.join(cfg.workingdir, 'supply_outputs'))
def __init__(self, database_path): # Initiate pint for unit conversions self.ureg = pint.UnitRegistry() self.cfg_energy_unit = cfg.getParam('calculation_energy_unit') self.cfg_currency = cfg.getParam('currency_name') self.cfg_currency_year = cfg.getParamAsInt('currency_year') db = get_database(database_path) self.currency_table = db.get_table("CurrenciesConversion").data self.currency_table = self.currency_table.set_index( ['currency', 'year']).sort_index() self.inflation_table = db.get_table("InflationConversion").data self.inflation_table = self.inflation_table.set_index( ['currency', 'year']).sort_index() for unit_def in UnitConverter._unit_defs: unit_name = unit_def.split(' = ')[0] if hasattr(self.ureg, unit_name): logging.debug( 'pint already has unit {}, unit is not being redefined'. format(unit_name)) continue self.ureg.define(unit_def)
def set_technologies(self,storage_capacity_dict, storage_efficiency_dict, thermal_dispatch_df): """prepares storage technologies for dispatch optimization args: storage_capacity_dict = dictionary of storage discharge capacity and storage discharge duration storage_efficiency_dict = dictionary of storage efficiency sets: capacity = dictionary of storage discharge capacity with keys of dispatch period and unique tech_dispatch_id, a tuple of dispatch_geography,zone,feeder,and technology duration = dictionary of storage discharge duration with keys of dispatch period and unique tech_dispatch_id, a tuple of dispatch_geography,zone,feeder,and technology feeder = dictionary of feeder ids with keys of dispatch period and unique tech_dispatch_id, a tuple of dispatch_geography,zone,feeder,and technology region = dictionary of dispatch regions with keys of dispatch period and unique tech_dispatch_id, a tuple of dispatch_geography,zone,feeder,and technology charging_efficiency = dictionary of storage charging efficiency with keys of unique tech_dispatch_id, a tuple of dispatch_geography,zone,feeder,and technology discharging_efficiency = dictionary of storage discharging efficiency (equal to charging efficiency) with keys of unique tech_dispatch_id, a tuple of dispatch_geography,zone,feeder,and technology large_storage = dictionary of binary flags indicating whether a resource is considered large storage with keys of unique tech_dispatch_id, a tuple of dispatch_geography,zone,feeder,and technology """ self.geography= dict() self.min_capacity = dict() self.capacity = dict() self.charging_efficiency = dict() self.discharging_efficiency = dict() self.duration = dict() self.feeder = dict() self.large_storage = dict() self.energy = dict() self.storage_technologies = [] self.alloc_technologies = [] self.alloc_geography = dict() self.alloc_capacity = dict() self.alloc_energy = dict() self.alloc_charging_efficiency = dict() self.alloc_discharging_efficiency = dict() self.generation_technologies = [] self.variable_costs = {} self.annual_ld_energy= util.recursivedict() self.ld_geography = util.recursivedict() self.ld_capacity = util.recursivedict() self.ld_min_capacity = util.recursivedict() self.ld_feeder = util.recursivedict() for dispatch_geography in storage_capacity_dict['power'].keys(): for zone in storage_capacity_dict['power'][dispatch_geography].keys(): for feeder in storage_capacity_dict['power'][dispatch_geography][zone].keys(): for tech in storage_capacity_dict['power'][dispatch_geography][zone][feeder].keys(): tech_dispatch_id = str((dispatch_geography,zone,feeder,tech)) self.storage_technologies.append(tech_dispatch_id) self.geography[tech_dispatch_id ] = dispatch_geography self.min_capacity[tech_dispatch_id] = 0 self.capacity[tech_dispatch_id] = storage_capacity_dict['power'][dispatch_geography][zone][feeder][tech] self.duration[tech_dispatch_id] = storage_capacity_dict['duration'][dispatch_geography][zone][feeder][tech] self.energy[tech_dispatch_id] = self.capacity[tech_dispatch_id] * self.duration[tech_dispatch_id] if self.duration[tech_dispatch_id] >= cfg.getParamAsInt('large_storage_duration', section='opt'): self.alloc_technologies.append(tech_dispatch_id) self.large_storage[tech_dispatch_id] = 1 self.alloc_geography[tech_dispatch_id] = dispatch_geography self.alloc_capacity[tech_dispatch_id] = self.capacity[tech_dispatch_id] * len(self.hours) self.alloc_energy[tech_dispatch_id] = self.energy[tech_dispatch_id] x = 1/np.sqrt(storage_efficiency_dict[dispatch_geography][zone][feeder][tech]) if not np.isfinite(x): x = 1 self.alloc_charging_efficiency[tech_dispatch_id] = x self.alloc_discharging_efficiency[tech_dispatch_id] = copy.deepcopy(self.alloc_charging_efficiency)[tech_dispatch_id] else: self.large_storage[tech_dispatch_id] = 0 x = 1/np.sqrt(storage_efficiency_dict[dispatch_geography][zone][feeder][tech]) if not np.isfinite(x): x = 1 self.charging_efficiency[tech_dispatch_id] = x self.discharging_efficiency[tech_dispatch_id] = copy.deepcopy(self.charging_efficiency)[tech_dispatch_id] self.feeder[tech_dispatch_id] = feeder for dispatch_geography in GeoMapper.dispatch_geographies: self.set_gen_technologies(dispatch_geography,thermal_dispatch_df) self.convert_all_to_period() self.set_transmission_energy()
def calc_and_format_direct_demand_energy(self): demand_energy = GeoMapper.geo_map(self.demand.outputs.d_energy.copy(), GeoMapper.demand_primary_geography, GeoMapper.combined_outputs_geography, 'total') demand_energy = Output.clean_df(demand_energy) demand_energy = demand_energy[demand_energy.index.get_level_values('YEAR')>=cfg.getParamAsInt('current_year')] demand_energy = util.add_to_df_index(demand_energy, names=['EXPORT/DOMESTIC', "ENERGY ACCOUNTING"], keys=['DOMESTIC','FINAL']) return demand_energy