def read_excel(self, fname): """Read Excel file data and load into the scenario. Parameters ---------- fname : string path to file """ funcs = { 'set': self.add_set, 'par': self.add_par, } dfs = pd_read(fname, sheet_name=None) # get item-type mapping df = dfs['ix_type_mapping'] ix_types = dict(zip(df['item'], df['ix_type'])) # fill in necessary items first (only sets for now) col = 0 # special case for prefill set Series def is_prefill(x): return dfs[x].columns[0] == col and len(dfs[x].columns) == 1 prefill = [x for x in dfs if is_prefill(x)] for name in prefill: data = list(dfs[name][col]) if len(data) > 0: ix_type = ix_types[name] funcs[ix_type](name, data) # fill all other pars and sets, skipping those already done skip_sheets = ['ix_type_mapping'] + prefill for sheet_name, df in dfs.items(): if sheet_name not in skip_sheets and not df.empty: ix_type = ix_types[sheet_name] funcs[ix_type](sheet_name, df)
def read_excel(self, fname, add_units=False, commit_steps=False): """Read Excel file data and load into the scenario. Parameters ---------- fname : string path to file add_units : bool add missing units, if any, to the platform instance. default: False commit_steps : bool commit changes after every data addition. default: False """ funcs = { 'set': self.add_set, 'par': self.add_par, } logger().info('Reading data from {}'.format(fname)) dfs = pd_read(fname, sheet_name=None) # get item-type mapping df = dfs['ix_type_mapping'] ix_types = dict(zip(df['item'], df['ix_type'])) # fill in necessary items first (only sets for now) col = 0 # special case for prefill set Series def is_prefill(x): return dfs[x].columns[0] == col and len(dfs[x].columns) == 1 prefill = [x for x in dfs if is_prefill(x)] for name in prefill: data = list(dfs[name][col]) if len(data) > 0: ix_type = ix_types[name] logger().info('Loading data for {}'.format(name)) funcs[ix_type](name, data) if commit_steps: self.commit('Loaded initial data from {}'.format(fname)) self.check_out() # fill all other pars and sets, skipping those already done skip_sheets = ['ix_type_mapping'] + prefill for sheet_name, df in dfs.items(): if sheet_name not in skip_sheets and not df.empty: logger().info('Loading data for {}'.format(sheet_name)) if add_units and 'unit' in df.columns: # add missing units units = set(self.platform.units()) missing = set(df['unit'].unique()) - units for unit in missing: logger().info('Adding missing unit: {}'.format(unit)) self.platform.add_unit(unit) # load data ix_type = ix_types[sheet_name] funcs[ix_type](sheet_name, df) if commit_steps: self.commit('Loaded {} from {}'.format(sheet_name, fname)) self.check_out()
def make_obs(fname, exp, **kwargs): utils.pd_write(exp, fname, index=False) obs = utils.pd_read(fname, **kwargs) os.remove(fname) return obs