class Translator(object): def __init__(self, gdx_file, gams_dir=None, lazy_load=False): self.__gdx = GdxFile(gams_dir=gams_dir, lazy_load=lazy_load) self.__gdx.read(gdx_file) self.__dataframes = None def __exit__(self, *args): self.__gdx.__exit__(self, *args) def __del__(self): self.__gdx.__del__() @property def gams_dir(self): return self.gdx.gams_dir @gams_dir.setter def gams_dir(self, value): self.gdx.gams_dir = value @property def gdx_file(self): return self.gdx.filename @gdx_file.setter def gdx_file(self, value): self.__gdx.__del__() self.__gdx = GdxFile(gams_dir=self.gdx.gams_dir, lazy_load=self.gdx.lazy_load) self.__gdx.read(value) self.__dataframes = None @property def gdx(self): return self.__gdx @property def dataframes(self): if self.__dataframes is None: self.__dataframes = OrderedDict() for symbol in self.__gdx: if not symbol.loaded: symbol.load() self.__dataframes[symbol.name] = symbol.dataframe.copy() return self.__dataframes @property def symbols(self): return [symbol_name for symbol_name in self.gdx] def dataframe(self, symbol_name): if not symbol_name in self.gdx: raise Error("No symbol named '{}' in '{}'.".format( symbol_name, self.gdx_file)) if not self.gdx[symbol_name].loaded: self.gdx[symbol_name].load() # This was returning { symbol_name: dataframe }, which seems intuitively off. return self.gdx[symbol_name].dataframe.copy()
def collect_results(self): result_file = os.path.join(self.outdir, 'MatchGenerationMix_p.gdx') if not os.path.exists(result_file): return False from gdxpds.gdx import GdxFile with GdxFile() as p_gdx: p_gdx.read(result_file) variables = [ ('Capacity', self.request.generators_columns()), ('CapacityAdded', self.request.generators_columns()), ('CapacityKept', self.request.generators_columns()), ('CapacitySwapped', self.request.generators_swapped_columns()), ('CapacityRemoved', self.request.generators_columns()), ('Distance', ['Level']) ] capacity_column = self.request.generators_columns()[-1] args = [] for variable_name, column_names in variables: p_gdx[variable_name].load() tmp = p_gdx[variable_name].dataframe.iloc[:, :( len(column_names))] tmp.columns = column_names # clear out 0 capacity entries if capacity_column in tmp.columns: tmp = tmp[tmp[capacity_column] > 0.0] args.append(tmp) args[-1] = args[-1]['Level'].values[0] self.request.register_results(*args) return True
def gdx(self): if self.__gdx is None: self.__gdx = GdxFile(gams_dir=self.__gams_dir) for symbol_name, df in self.dataframes.items(): self.__add_symbol_to_gdx(symbol_name, df) return self.__gdx
class Translator(object): def __init__(self, dataframes, gams_dir=None): self.dataframes = dataframes self.__gams_dir = None def __exit__(self, *args): if self.__gdx is not None: self.__gdx.__exit__(self, *args) def __del__(self): if self.__gdx is not None: self.__gdx.__del__() @property def dataframes(self): return self.__dataframes @dataframes.setter def dataframes(self, value): err_msg = "Expecting map of name, pandas.DataFrame pairs." try: for symbol_name, df in value.items(): if not isinstance(symbol_name, str): raise Error(err_msg) if not isinstance(df, pds.DataFrame): raise Error(err_msg) except AttributeError: raise Error(err_msg) self.__dataframes = value self.__gdx = None @property def gams_dir(self): return self.__gams_dir @gams_dir.setter def gams_dir(self, value): self.__gams_dir = value @property def gdx(self): if self.__gdx is None: self.__gdx = GdxFile(gams_dir=self.__gams_dir) for symbol_name, df in self.dataframes.items(): self.__add_symbol_to_gdx(symbol_name, df) return self.__gdx def save_gdx(self, path, gams_dir=None): if gams_dir is not None: self.__gams_dir = gams_dir self.gdx.write(path) def __add_symbol_to_gdx(self, symbol_name, df): data_type, num_dims = self.__infer_data_type(symbol_name, df) logger.info("Inferred data type of {} to be {}.".format( symbol_name, data_type.name)) self.__gdx.append(GdxSymbol(symbol_name, data_type, dims=num_dims)) self.__gdx[symbol_name].dataframe = df return def __infer_data_type(self, symbol_name, df): """ Returns ------- (gdxpds.GamsDataType, int) symbol type and number of dimensions implied by df """ # See if structure implies that symbol_name may be a Variable or an Equation # If so, break tie based on naming convention--Variables start with upper case, # equations start with lower case var_or_eqn = False df_col_names = df.columns var_eqn_col_names = [ col_name for col_name, col_ind in GAMS_VALUE_COLS_MAP[GamsDataType.Variable] ] if len(df_col_names) >= len(var_eqn_col_names): # might be variable or equation var_or_eqn = True trunc_df_col_names = df_col_names[len(df_col_names) - len(var_eqn_col_names):] for i, df_col in enumerate(trunc_df_col_names): if df_col and (df_col.lower() != var_eqn_col_names[i].lower()): var_or_eqn = False break if var_or_eqn: num_dims = len(df_col_names) - len(var_eqn_col_names) if symbol_name[0].upper() == symbol_name[0]: return GamsDataType.Variable, num_dims else: return GamsDataType.Equation, num_dims # Parameter or set num_dims = len(df_col_names) - 1 if len(df.index) > 0: if isinstance(df.loc[df.index[0], df.columns[-1]], Number): return GamsDataType.Parameter, num_dims return GamsDataType.Set, num_dims
def setup(self, gendists=None, precision=0): gendists_df, desired_capacity_df = super().setup(gendists=gendists, precision=precision) from gdxpds.gdx import GdxFile, GdxSymbol, GamsDataType with GdxFile() as ingdx: # Sets ingdx.append(GdxSymbol('n', GamsDataType.Set, dims=['n'])) df = pds.DataFrame(self.request.nodes['node_id']) df['Value'] = True ingdx[-1].dataframe = df ingdx.append(GdxSymbol('g', GamsDataType.Set, dims=['g'])) df = pds.DataFrame([[g, True] for g in self.request.gentypes], columns=['g', 'Value']) ingdx[-1].dataframe = df ingdx.append(GdxSymbol('g_indep', GamsDataType.Set, dims=['g'])) df = pds.DataFrame([[g, True] for g in self.request.gentypes if g in self.request.RESOURCE_INDEPENDENT], columns=['g', 'Value']) ingdx[-1].dataframe = df ingdx.append(GdxSymbol('g_dep', GamsDataType.Set, dims=['g'])) df = pds.DataFrame([[g, True] for g in self.request.gentypes if g not in self.request.RESOURCE_INDEPENDENT], columns=['g', 'Value']) ingdx[-1].dataframe = df # Parameters ingdx.append( GdxSymbol('desired_capacity', GamsDataType.Parameter, dims=['g'])) ingdx[-1].dataframe = desired_capacity_df ingdx.append( GdxSymbol('current_capacity', GamsDataType.Parameter, dims=['n', 'g'])) # pivot with sum on capacity in case there are multiple units of type g at node n df = pds.pivot_table(self.request.generators, values='capacity (MW)', index=['node_id', 'generator type'], aggfunc=np.sum) df = df.reset_index() df.columns = ['n', 'g', 'Value'] ingdx[-1].dataframe = df ingdx.append( GdxSymbol('g_dist', GamsDataType.Parameter, dims=['g', 'gg'])) ingdx[-1].dataframe = gendists_df ingdx.append( GdxSymbol('current_indep_capacity', GamsDataType.Parameter, dims=['n'])) df = pds.pivot_table(self.request.generators[ self.request.generators['generator type'].isin( self.request.RESOURCE_INDEPENDENT)], values='capacity (MW)', index=['node_id'], aggfunc=np.sum) df = df.reset_index() df.columns = ['n', 'Value'] ingdx[-1].dataframe = df ingdx.append( GdxSymbol('maximum_capacity', GamsDataType.Parameter, dims=['n', 'g_dep'])) data = [] for g in self.request.gentypes: if g in self.request.RESOURCE_INDEPENDENT: continue if g in self.request.nodes: tmp = pds.DataFrame(self.request.nodes['node_id']) tmp['g_dep'] = g tmp['Value'] = self.request.nodes[g] data.append(tmp) df = pds.concat(data) df.columns = ['n', 'g_dep', 'Value'] ingdx[-1].dataframe = df ingdx.write(os.path.join(self.outdir, 'in.gdx'))
def gdx_file(self, value): self.__gdx.__del__() self.__gdx = GdxFile(gams_dir=self.gdx.gams_dir, lazy_load=self.gdx.lazy_load) self.__gdx.read(value) self.__dataframes = None
def __init__(self, gdx_file, gams_dir=None, lazy_load=False): self.__gdx = GdxFile(gams_dir=gams_dir, lazy_load=lazy_load) self.__gdx.read(gdx_file) self.__dataframes = None