class MultistageStandalone(Solution_UC_multistage): def __init__(self, power_system, stage_times, store): self.power_system = power_system self.is_stochastic = power_system.is_stochastic self._resolved = self.is_stochastic or \ user_config.deterministic_solve or user_config.perfect_solve times = pd.concat( [times.non_overlap().strings for times in stage_times]).index self.times = TimeIndex(times) self.times.set_initial(stage_times[0].initialTime) self.expected_cost = self.totalcost_generation = store['expected_cost'] if self._resolved: self.observed_cost = self.totalcost_generation = \ store['observed_cost'] self.generators_power = store['power'] self.generators_status = store['status'] self.load_shed_timeseries = store['load_shed'] self.gen_shed_timeseries = store['gen_shed'] self.load_shed = store['load_shed'].sum() self.gen_shed = store['gen_shed'].sum() self.solve_time = store['solve_time'].sum()
def __init__(self, power_system, stage_times, store): self.power_system = power_system self.is_stochastic = power_system.is_stochastic self._resolved = self.is_stochastic or \ user_config.deterministic_solve or user_config.perfect_solve times = pd.concat( [times.non_overlap().strings for times in stage_times]).index self.times = TimeIndex(times) self.times.set_initial(stage_times[0].initialTime) self.expected_cost = self.totalcost_generation = store['expected_cost'] if self._resolved: self.observed_cost = self.totalcost_generation = \ store['observed_cost'] self.generators_power = store['power'] self.generators_status = store['status'] self.load_shed_timeseries = store['load_shed'] self.gen_shed_timeseries = store['gen_shed'] self.load_shed = store['load_shed'].sum() self.gen_shed = store['gen_shed'].sum() self.solve_time = store['solve_time'].sum()
def load_state(): storage = get_storage() user_config.update(storage['configuration'].to_dict()) # set up the times startidx = int(storage['times'][0].strip('t')) times = TimeIndex(storage['times'].index, startidx) intervalhrs = (times.strings.index[1] - times.strings.index[0]).total_seconds() / 3600.0 times._int_division = user_config.hours_commitment / intervalhrs times._int_overlap = user_config.hours_overlap / intervalhrs if len(times) <= times._int_division: # dont set overlap for last stage times._int_overlap = 0 # create power_system power_system, times, scenario_tree = parse_standalone(storage, times) generators = power_system.generators() # set up initial state t = times.initialTime status = correct_status(storage['status']).ix[t] for gen in generators: g = str(gen) gen.set_initial_condition(power=storage['power'][g][t], status=status[g], hoursinstatus=storage['hrsinstatus'][g][t]) return power_system, times, scenario_tree
def load_state(): storage = get_storage() user_config.update(storage['configuration'].to_dict()) # set up the times startidx = int(storage['times'][0].strip('t')) times = TimeIndex(storage['times'].index, startidx) intervalhrs = (times.strings.index[1] - times.strings.index[0] ).total_seconds() / 3600.0 times._int_division = user_config.hours_commitment / intervalhrs times._int_overlap = user_config.hours_overlap / intervalhrs if len(times) <= times._int_division: # dont set overlap for last stage times._int_overlap = 0 # create power_system power_system, times, scenario_tree = parse_standalone(storage, times) generators = power_system.generators() # set up initial state t = times.initialTime status = correct_status(storage['status']).ix[t] for gen in generators: g = str(gen) gen.set_initial_condition( power=storage['power'][g][t], status=status[g], hoursinstatus=storage['hrsinstatus'][g][t]) return power_system, times, scenario_tree
def __init__(self, power_system, stage_times, stage_solutions): update_attributes(self, locals(), exclude=['stage_solutions', 'stage_times']) self._resolved = power_system.is_stochastic \ or user_config.deterministic_solve or user_config.perfect_solve self.is_stochastic = any(sln.is_stochastic for sln in stage_solutions) times = pd.concat( [times.non_overlap().strings for times in stage_times]).index self.times = TimeIndex(times) self.times.set_initial(stage_times[0].initialTime) self.objective = self._sum_over('objective', stage_solutions) self.solve_time = self._sum_over('solve_time', stage_solutions) self.mipgaps = pd.Series([sln.mipgap for sln in stage_solutions]) self._get_outputs(stage_solutions) self._get_costs(stage_solutions) self._get_prices(stage_solutions)
class Solution_UC_multistage(Solution_UC): ''' Muti-stage unit commitment. Each stage represents one optimization problem. Each element of the list :param:stage_solutions is a :class:`~results.Solution_UC` object. ''' def __init__(self, power_system, stage_times, stage_solutions): update_attributes(self, locals(), exclude=['stage_solutions', 'stage_times']) self._resolved = power_system.is_stochastic \ or user_config.deterministic_solve or user_config.perfect_solve self.is_stochastic = any(sln.is_stochastic for sln in stage_solutions) times = pd.concat( [times.non_overlap().strings for times in stage_times]).index self.times = TimeIndex(times) self.times.set_initial(stage_times[0].initialTime) self.objective = self._sum_over('objective', stage_solutions) self.solve_time = self._sum_over('solve_time', stage_solutions) self.mipgaps = pd.Series([sln.mipgap for sln in stage_solutions]) self._get_outputs(stage_solutions) self._get_costs(stage_solutions) self._get_prices(stage_solutions) def _sum_over(self, attrib, stage_solutions): return sum(getattr(sln, attrib) for sln in stage_solutions) def _concat(self, attrib, slns): return pd.concat([getattr(sln, attrib) for sln in slns]) def _get_outputs(self, slns): '''the outputs under observed wind''' self.generators_power = self._concat('generators_power', slns) self.generators_status = self._concat('generators_status', slns) if self._resolved: self.expected_power = self._concat('expected_power', slns) self.expected_status = self._concat('expected_status', slns) def _get_costs(self, slns): self.expected_cost = self.totalcost_generation = \ self._concat('expected_totalcost' if self._resolved else 'totalcost_generation', slns) self.load_shed_timeseries = self._concat('load_shed_timeseries', slns) self.gen_shed_timeseries = self._concat('gen_shed_timeseries', slns) self.load_shed = self.load_shed_timeseries.sum() self.gen_shed = self.gen_shed_timeseries.sum() if self._resolved: self.observed_cost = self.totalcost_generation = \ self._concat('observed_totalcost', slns) def info_cost(self): resolved = self._resolved expected = 'expected ' if resolved else '' observed = 'observed ' if resolved else '' out = [] out.append('total {}generation cost = {}'.format( expected, self.expected_cost.sum().sum())) if resolved: out.append('total {}generation cost = {}'.format( observed, self.observed_cost.sum().sum())) return out def _get_prices(self, stage_solutions): self.lmps = {} try: for stage in stage_solutions: self.lmps.update(stage.lmps) except: # no lmps in stochastic solutions right now pass def show(self): out = [] out.extend(self.info_status()) out.extend(self.info_cost()) out.extend(self.info_shedding()) print '\n'.join(out) def info_generators(self): return [] def info_loads(self): return [] def info_status(self): return ['solved multistage problem in a total solver time ' + 'of {time:0.4f} sec'.format(time=self.solve_time)] def info_shedding(self): return [ 'total load shed={}MW'.format(self.load_shed) if self.load_shed > 0.01 else '', 'total gen shed={}MW'.format(self.gen_shed) if self.gen_shed > 0.01 else '', ]
def setup_times(generators_data, loads_data): """ Create a :class:`~schedule.TimeIndex` object from the schedule files. Also create a unified DataFrame of all the schedules, `timeseries`. If there are no schedule files (as in ED,OPF), create an index with just a single time. """ fcol = 'schedulefilename' ncol = 'schedulename' loads_data[ncol] = None generators_data[ncol] = None if fcol not in loads_data.columns: loads_data[fcol] = None if fcol not in generators_data.columns: generators_data[fcol] = None datadir = user_config.directory timeseries = {} def filter_notnull(df, col): return df[df[col].notnull()] for i, load in filter_notnull(loads_data, fcol).iterrows(): name = 'd{}'.format(i) loads_data.ix[i, ncol] = name timeseries[name] = get_schedule(joindir(datadir, load[fcol])) * \ user_config.load_multiplier + user_config.load_adder for i, gen in filter_notnull(generators_data, fcol).iterrows(): name = 'g{}'.format(i) generators_data.ix[i, ncol] = name timeseries[name] = get_schedule(joindir(datadir, gen[fcol])) # handle observed and forecast power fobscol = 'observedfilename' obscol = 'observedname' ffcstcol = 'forecastfilename' fcstcol = 'forecastname' obs_name = None if fobscol in generators_data: generators_data[obscol] = None for i, gen in filter_notnull(generators_data, fobscol).iterrows(): obs_name = 'g{}_observations'.format(i) generators_data.ix[i, obscol] = obs_name timeseries[obs_name] = get_schedule(joindir(datadir, gen[fobscol])) if user_config.wind_multiplier != 1.0: timeseries[obs_name] *= user_config.wind_multiplier generators_data = generators_data.drop(fobscol, axis=1) fcst_name = None if ffcstcol in generators_data: generators_data[fcstcol] = None for i, gen in filter_notnull(generators_data, ffcstcol).iterrows(): fcst_name = 'g{}_forecast'.format(i) generators_data.ix[i, fcstcol] = fcst_name timeseries[fcst_name] = get_schedule(joindir(datadir, gen[ffcstcol])) * \ user_config.wind_multiplier + user_config.wind_forecast_adder if user_config.wind_error_multiplier != 1.0: logging.debug('scaling wind forecast error') obs_name = 'g{}_observations'.format(i) error = timeseries[fcst_name] - timeseries[obs_name] timeseries[fcst_name] = timeseries[obs_name] + \ error * user_config.wind_error_multiplier if (timeseries[fcst_name] < 0).any(): print timeseries[fcst_name].describe() logging.warning('Wind forecast must always be at least zero.') timeseries[fcst_name][timeseries[fcst_name] < 0] = 0 generators_data = generators_data.drop(ffcstcol, axis=1) generators_data = generators_data.drop(fcol, axis=1) loads_data = loads_data.drop(fcol, axis=1) if len(timeseries) == 0: # this is a ED or OPF problem - only one time return DataFrame(), just_one_time(), generators_data, loads_data timeseries = DataFrame(timeseries) times = TimeIndex(timeseries.index) timeseries.index = times.strings.values if user_config.wind_capacity_factor != 0: if len(filter_notnull(generators_data, obscol)) != 1: raise NotImplementedError( 'wind capacity factor only works with one wind generator') all_loads = timeseries[filter(lambda col: col.startswith('d'), timeseries.columns)] capf_current = timeseries[obs_name].sum() / all_loads.sum(axis=1).sum() wind_mult = user_config.wind_capacity_factor / capf_current user_config.wind_multiplier = wind_mult logging.info('scaling wind from a c.f. of {} to a c.f. of {}'.format( capf_current, user_config.wind_capacity_factor)) timeseries[obs_name] *= wind_mult if fcst_name: timeseries[fcst_name] *= wind_mult return timeseries, times, generators_data, loads_data