def read_add(system, file): """ Read an addition PSS/E dyr file. Parameters ---------- system : System System instance to which data will be loaded file : str Path to the additional `dyr` file Returns ------- bool data parsing status """ dyr_dict = _read_dyr_dict(file) system.dyr_dict = dyr_dict # read yaml and set header for each pss/e model dirname = os.path.dirname(__file__) with open(f'{dirname}/psse-dyr.yaml', 'r') as f: dyr_yaml = yaml.full_load(f) sorted_models = sort_psse_models(dyr_yaml, system) for psse_model in dyr_dict: if psse_model in dyr_yaml: if 'inputs' in dyr_yaml[psse_model]: dyr_dict[psse_model].columns = dyr_yaml[psse_model]['inputs'] # collect not supported models not_supported = [] for model in dyr_dict: if model not in sorted_models: not_supported.append(model) # print out debug messages if len(dyr_dict): logger.debug('dyr contains models %s', ", ".join(dyr_dict.keys())) if len(not_supported): logger.warning('Models not yet supported: %s', ", ".join(not_supported)) else: logger.debug('All dyr models are supported.') # load data into models for psse_model in sorted_models: if psse_model not in dyr_dict: # device not exist continue if psse_model not in dyr_yaml: logger.error(f"PSS/E Model <{psse_model}> is not supported.") continue logger.debug(f'Parsing PSS/E model {psse_model}') dest = dyr_yaml[psse_model]['destination'] find = {} if 'find' in dyr_yaml[psse_model]: for name, source in dyr_yaml[psse_model]['find'].items(): for model, conditions in source.items(): allow_none = conditions.pop('allow_none', 0) cond_names = conditions.keys() cond_values = [] for col in conditions.values(): if col in find: cond_values.append(find[col]) else: cond_values.append(dyr_dict[psse_model][col]) try: logger.debug( "<%s> trying to find <%s> using cond_names=%s and cond_values=%s", psse_model, model, cond_names, cond_values) logger.debug("<%s> contains %d devices", model, system.__dict__[model].n) find[name] = system.__dict__[model].find_idx( cond_names, cond_values, allow_none=allow_none) except IndexError as e: logger.error( "Data file likely contains references to unsupported models." ) logger.error(e) return False if 'get' in dyr_yaml[psse_model]: for name, source in dyr_yaml[psse_model]['get'].items(): for model, conditions in source.items(): idx_name = conditions['idx'] if idx_name in dyr_dict[psse_model]: conditions['idx'] = dyr_dict[psse_model][idx_name] else: conditions['idx'] = find[idx_name] find[name] = system.__dict__[model].get(**conditions) if 'outputs' in dyr_yaml[psse_model]: output_keys = list(dyr_yaml[psse_model]['outputs'].keys()) output_exprs = list(dyr_yaml[psse_model]['outputs'].values()) out_dict = {} for idx in range(len(output_exprs)): out_key = output_keys[idx] expr = output_exprs[idx] if expr in find: out_dict[out_key] = find[expr] elif ';' in expr: args, func = expr.split(';') func = eval(func) args = args.split(',') # support local and external model parameters argv = list() for param in args: if '.' in param: argv.append(param.split('.')) else: argv.append((psse_model, param)) argv = [dyr_dict[model][param] for model, param in argv] out_dict[output_keys[idx]] = func(*argv) else: out_dict[output_keys[idx]] = dyr_dict[psse_model][expr] df = pd.DataFrame.from_dict(out_dict) for row in df.to_dict(orient='records'): system.add(dest, row) system.link_ext_param(system.__dict__[dest]) return True
def read_add(system, file): """ Read addition PSS/E dyr file. Parameters ---------- system file Returns ------- """ warn_experimental("PSS/E dyr support") with open(file, 'r') as f: input_list = [line.strip() for line in f] # concatenate multi-line device data input_concat_dict = defaultdict(list) multi_line = list() for i, line in enumerate(input_list): if line == '': continue if '/' not in line: multi_line.append(line) else: multi_line.append(line.split('/')[0]) single_line = ' '.join(multi_line) single_list = single_line.split("'") psse_model = single_list[1].strip() input_concat_dict[psse_model].append(single_list[0] + single_list[2]) multi_line = list() # construct pandas dataframe for all models dyr_dict = dict() for psse_model, all_rows in input_concat_dict.items(): dev_params_num = [([to_number(cell) for cell in row.split()]) for row in all_rows] dyr_dict[psse_model] = pd.DataFrame(dev_params_num) # read yaml and set header for each pss/e model dirname = os.path.dirname(__file__) with open(f'{dirname}/psse-dyr.yaml', 'r') as f: dyr_yaml = yaml.full_load(f) for psse_model in dyr_dict: if psse_model in dyr_yaml: if 'inputs' in dyr_yaml[psse_model]: dyr_dict[psse_model].columns = dyr_yaml[psse_model]['inputs'] # load data into models for psse_model in dyr_dict: if psse_model not in dyr_yaml: logger.error(f"PSS/E Model <{psse_model}> is not supported.") continue dest = dyr_yaml[psse_model]['destination'] find = {} if 'find' in dyr_yaml[psse_model]: for name, source in dyr_yaml[psse_model]['find'].items(): for model, conditions in source.items(): cond_names = conditions.keys() cond_values = [ dyr_dict[psse_model][col] for col in conditions.values() ] find[name] = system.__dict__[model].find_idx( cond_names, cond_values) if 'get' in dyr_yaml[psse_model]: for name, source in dyr_yaml[psse_model]['get'].items(): for model, conditions in source.items(): idx_name = conditions['idx'] if idx_name in dyr_dict[psse_model]: conditions['idx'] = dyr_dict[psse_model][idx_name] else: conditions['idx'] = find[idx_name] find[name] = system.__dict__[model].get(**conditions) if 'outputs' in dyr_yaml[psse_model]: output_keys = list(dyr_yaml[psse_model]['outputs'].keys()) output_exprs = list(dyr_yaml[psse_model]['outputs'].values()) out_dict = {} for idx in range(len(output_exprs)): out_key = output_keys[idx] expr = output_exprs[idx] if expr in find: out_dict[out_key] = find[expr] elif ';' in expr: args, func = expr.split(';') func = eval(func) args = args.split(',') argv = [pairs.split('.') for pairs in args] argv = [dyr_dict[model][param] for model, param in argv] out_dict[output_keys[idx]] = func(*argv) else: out_dict[output_keys[idx]] = dyr_dict[psse_model][expr] df = pd.DataFrame.from_dict(out_dict) for row in df.to_dict(orient='records'): system.add(dest, row) system.link_ext_param(system.__dict__[dest]) return True
https://github.com/anderson-optimization/em-psse License Pending """ import os from io import StringIO from andes.shared import pd, yaml import logging logger = logging.getLogger(__name__) dirname = os.path.dirname(__file__) with open('{}/psse-modes.yaml'.format(dirname), 'r') as in_file: modes = yaml.full_load(in_file) def get_signals(line_num, line, current_mode): # print(line_num,line) signals = [] for m in modes: for s in m['signal']: if 'text' in s: if s['text'] in line: signals.append((s, m)) if 'line' in s: if s['line'] == line_num: signals.append((s, m)) return signals
def read_add(system, file): """ Read an addition PSS/E dyr file. Parameters ---------- system : System System instance to which data will be loaded file : str Path to the additional `dyr` file Returns ------- bool data parsing status """ with open(file, 'r') as f: input_list = [line.strip() for line in f] # concatenate multi-line device data input_concat_dict = defaultdict(list) multi_line = list() for i, line in enumerate(input_list): if line == '': continue if '/' not in line: multi_line.append(line) else: multi_line.append(line.split('/')[0]) single_line = ' '.join(multi_line) if single_line.strip() == '': continue single_list = single_line.split("'") psse_model = single_list[1].strip() input_concat_dict[psse_model].append(single_list[0] + single_list[2]) multi_line = list() # construct pandas dataframe for all models dyr_dict = dict() # input data from dyr file for psse_model, all_rows in input_concat_dict.items(): dev_params_num = [([to_number(cell) for cell in row.split()]) for row in all_rows] dyr_dict[psse_model] = pd.DataFrame(dev_params_num) # read yaml and set header for each pss/e model dirname = os.path.dirname(__file__) with open(f'{dirname}/psse-dyr.yaml', 'r') as f: dyr_yaml = yaml.full_load(f) sorted_models = sort_psse_models(dyr_yaml) for psse_model in dyr_dict: if psse_model in dyr_yaml: if 'inputs' in dyr_yaml[psse_model]: dyr_dict[psse_model].columns = dyr_yaml[psse_model]['inputs'] # collect not supported models not_supported = [] for model in dyr_dict: if model not in sorted_models: not_supported.append(model) # print out debug messages if len(dyr_dict): logger.debug(f'dyr contains models {", ".join(dyr_dict.keys())}') if len(not_supported): logger.warning(f'Models not yet supported: {", ".join(not_supported)}') else: logger.debug('All dyr models are supported.') # load data into models for psse_model in sorted_models: if psse_model not in dyr_dict: # device not exist continue if psse_model not in dyr_yaml: logger.error(f"PSS/E Model <{psse_model}> is not supported.") continue logger.debug(f'Parsing PSS/E model {psse_model}') dest = dyr_yaml[psse_model]['destination'] find = {} if 'find' in dyr_yaml[psse_model]: for name, source in dyr_yaml[psse_model]['find'].items(): for model, conditions in source.items(): allow_none = conditions.pop('allow_none', 0) cond_names = conditions.keys() cond_values = [] for col in conditions.values(): if col in find: cond_values.append(find[col]) else: cond_values.append(dyr_dict[psse_model][col]) try: find[name] = system.__dict__[model].find_idx( cond_names, cond_values, allow_none=allow_none) except IndexError as e: logger.error( "Data file likely contains references to unsupported models." ) logger.error(e) return False if 'get' in dyr_yaml[psse_model]: for name, source in dyr_yaml[psse_model]['get'].items(): for model, conditions in source.items(): idx_name = conditions['idx'] if idx_name in dyr_dict[psse_model]: conditions['idx'] = dyr_dict[psse_model][idx_name] else: conditions['idx'] = find[idx_name] find[name] = system.__dict__[model].get(**conditions) if 'outputs' in dyr_yaml[psse_model]: output_keys = list(dyr_yaml[psse_model]['outputs'].keys()) output_exprs = list(dyr_yaml[psse_model]['outputs'].values()) out_dict = {} for idx in range(len(output_exprs)): out_key = output_keys[idx] expr = output_exprs[idx] if expr in find: out_dict[out_key] = find[expr] elif ';' in expr: args, func = expr.split(';') func = eval(func) args = args.split(',') argv = [pairs.split('.') for pairs in args] argv = [dyr_dict[model][param] for model, param in argv] out_dict[output_keys[idx]] = func(*argv) else: out_dict[output_keys[idx]] = dyr_dict[psse_model][expr] df = pd.DataFrame.from_dict(out_dict) for row in df.to_dict(orient='records'): system.add(dest, row) system.link_ext_param(system.__dict__[dest]) return True
def read_add(system, file): """ Read addition PSS/E dyr file. TODO: implement this function Parameters ---------- system file Returns ------- """ warn_experimental("PSS/E dyr support is incomplete.") with open(file, 'r') as f: input_list = [line.strip() for line in f] # concatenate multi-line device data all_dict = defaultdict(list) data_list = list() for i, line in enumerate(input_list): if line == '': continue if '/' not in line: data_list.append(line) else: data_list.append(line.split('/')[0]) data_str = ' '.join(data_list) data_split = data_str.split("'") model_name = data_split[1].strip() all_dict[model_name].append(data_split[0] + data_split[2]) data_list = list() # construct pandas dataframe for all models df_dict = dict() for key, val in all_dict.items(): df_1row = pd.DataFrame(val) df_str = df_1row[0].str.split(expand=True) df_dict[key] = df_str.astype(float) # set header for each dirname = os.path.dirname(__file__) with open('{}/psse-dyr.yaml'.format(dirname), 'r') as f: dyr_format = yaml.full_load(f) dyr_dict = dict() for item in dyr_format: dyr_dict.update(item) for key in df_dict: if key in dyr_dict: if 'inputs' in dyr_dict[key]: df_dict[key].columns = dyr_dict[key]['inputs'] system.df_dict = df_dict # TODO: Load data into models return True