Exemplo n.º 1
0
def build_group_table(ssa, group, columns, mdl_name=[]):
    """
    Build the table for devices in a group in an ADNES System.

    Parameters
    ----------
    ssa : andes.system.System
        The ADNES system to build the table
    group : string
        The ADNES group
    columns : list of string
        The common columns of a group that to be included in the table.
    mdl_name : list of string
        The list of models that to be included in the table. Default as all models.

    Returns
    -------
    DataFrame

        The output Dataframe contains the columns from the device
    """
    group_df = pd.DataFrame(columns=columns)
    group = getattr(ssa, group)
    if not mdl_name:
        mdl_dict = getattr(group, 'models')
        for key in mdl_dict:
            mdl = getattr(ssa, key)
            group_df = pd.concat([group_df, mdl.as_df()[columns]], axis=0)
    else:
        for key in mdl_name:
            mdl = getattr(ssa, key)
            group_df = pd.concat([group_df, mdl.as_df()[columns]], axis=0)
    return group_df
Exemplo n.º 2
0
    def as_df(self, vin=False):
        """
        Export all parameters as a `pandas.DataFrame` object.
        This function utilizes `as_dict` for preparing data.

        Returns
        -------
        DataFrame
            A dataframe containing all model data. An `uid` column is added.
        vin : bool
            If True, export all parameters from original input (``vin``).
        """
        if vin is False:
            out = pd.DataFrame(self.as_dict()).set_index('uid')
        else:
            out = pd.DataFrame(self.as_dict(vin=True)).set_index('uid')

        return out
Exemplo n.º 3
0
def _sumPF_ppn(ppn):
    """Summarize PF results of a pandapower net"""
    rg = pd.DataFrame()
    rg['gen'] = ppn.res_gen['p_mw']
    rg['bus'] = ppn.gen['bus']
    rg = rg.groupby('bus').sum()
    rg.reset_index(inplace=True)
    rd = pd.DataFrame()
    rd['demand'] = ppn.res_load['p_mw']
    rd['bus'] = ppn.load['bus']
    rd = rd.groupby('bus').sum()
    rd.reset_index(inplace=True)
    rp = pd.DataFrame()
    rp['bus'] = ppn.bus.index
    rp = pd.merge(rp, rg, how='left', on='bus')
    rp = pd.merge(rp, rd, how='left', on='bus')
    rp.fillna(0, inplace=True)
    rp['ngen'] = rp['gen'] - rp['demand']
    return rp
Exemplo n.º 4
0
def _sumPF_ppn(ppn):
    """Summarize PF results of a pandapower net"""
    rg = pd.concat([ppn.res_gen[['p_mw']], ppn.gen[['bus']]],
                   axis=1).rename(columns={'p_mw': 'gen'})
    rd = pd.concat([ppn.res_load[['p_mw']], ppn.load[['bus']]],
                   axis=1).rename(columns={'p_mw': 'demand'})
    rp = pd.DataFrame()
    rp['bus'] = ppn.bus.index
    rp = rp.merge(rg, on='bus', how='left')
    rp = rp.merge(rd, on='bus', how='left')
    rp.fillna(0, inplace=True)
    rp['ngen'] = rp['gen'] - rp['demand']
    rp = rp.groupby('bus').sum().reset_index(drop=True)
    rp['bus'] = rp.index
    return rp
Exemplo n.º 5
0
    def as_df_local(self):
        """
        Export local variable values and services to a DataFrame.
        """

        out = dict()
        out['uid'] = np.arange(self.n)
        out['idx'] = self.idx.v

        for name, instance in self.cache.all_vars.items():
            out[name] = instance.v

        for name, instance in self.services.items():
            out[name] = instance.v

        return pd.DataFrame(out).set_index('uid')
Exemplo n.º 6
0
def _read_dyr_dict(file):
    """
    Parse dyr file into a dict where keys are model names and values are dataframes.
    """
    input_list = andes.io.read_file_like(file)

    # concatenate multi-line device data
    input_concat_dict = defaultdict(list)
    multi_line = list()
    for line in input_list:
        if line == '':
            continue
        if '/' not in line:
            multi_line.append(line)
        else:
            multi_line.append(line.split('/')[0])
            single_line = ' '.join(multi_line)

            if single_line.strip() == '':
                continue

            single_list = single_line.split("'")

            psse_model = single_list[1].strip()
            input_concat_dict[psse_model].append(single_list[0] +
                                                 single_list[2])
            multi_line = list()

    # construct pandas dataframe for all models
    dyr_dict = dict()  # input data from dyr file

    for psse_model, all_rows in input_concat_dict.items():
        dev_params_num = [([to_number(cell) for cell in row.split()])
                          for row in all_rows]
        dyr_dict[psse_model] = pd.DataFrame(dev_params_num)

    return dyr_dict
Exemplo n.º 7
0
def read_add(system, file):
    """
    Read addition PSS/E dyr file.

    Parameters
    ----------
    system
    file

    Returns
    -------

    """

    warn_experimental("PSS/E dyr support")

    with open(file, 'r') as f:
        input_list = [line.strip() for line in f]

    # concatenate multi-line device data
    input_concat_dict = defaultdict(list)
    multi_line = list()
    for i, line in enumerate(input_list):
        if line == '':
            continue
        if '/' not in line:
            multi_line.append(line)
        else:
            multi_line.append(line.split('/')[0])
            single_line = ' '.join(multi_line)
            single_list = single_line.split("'")

            psse_model = single_list[1].strip()
            input_concat_dict[psse_model].append(single_list[0] +
                                                 single_list[2])
            multi_line = list()

    # construct pandas dataframe for all models
    dyr_dict = dict()
    for psse_model, all_rows in input_concat_dict.items():
        dev_params_num = [([to_number(cell) for cell in row.split()])
                          for row in all_rows]
        dyr_dict[psse_model] = pd.DataFrame(dev_params_num)

    # read yaml and set header for each pss/e model
    dirname = os.path.dirname(__file__)
    with open(f'{dirname}/psse-dyr.yaml', 'r') as f:
        dyr_yaml = yaml.full_load(f)

    for psse_model in dyr_dict:
        if psse_model in dyr_yaml:
            if 'inputs' in dyr_yaml[psse_model]:
                dyr_dict[psse_model].columns = dyr_yaml[psse_model]['inputs']

    # load data into models
    for psse_model in dyr_dict:
        if psse_model not in dyr_yaml:
            logger.error(f"PSS/E Model <{psse_model}> is not supported.")
            continue

        dest = dyr_yaml[psse_model]['destination']
        find = {}

        if 'find' in dyr_yaml[psse_model]:
            for name, source in dyr_yaml[psse_model]['find'].items():
                for model, conditions in source.items():
                    cond_names = conditions.keys()
                    cond_values = [
                        dyr_dict[psse_model][col]
                        for col in conditions.values()
                    ]
                    find[name] = system.__dict__[model].find_idx(
                        cond_names, cond_values)

        if 'get' in dyr_yaml[psse_model]:
            for name, source in dyr_yaml[psse_model]['get'].items():
                for model, conditions in source.items():
                    idx_name = conditions['idx']
                    if idx_name in dyr_dict[psse_model]:
                        conditions['idx'] = dyr_dict[psse_model][idx_name]
                    else:
                        conditions['idx'] = find[idx_name]
                    find[name] = system.__dict__[model].get(**conditions)

        if 'outputs' in dyr_yaml[psse_model]:
            output_keys = list(dyr_yaml[psse_model]['outputs'].keys())
            output_exprs = list(dyr_yaml[psse_model]['outputs'].values())
            out_dict = {}

            for idx in range(len(output_exprs)):
                out_key = output_keys[idx]
                expr = output_exprs[idx]
                if expr in find:
                    out_dict[out_key] = find[expr]
                elif ';' in expr:
                    args, func = expr.split(';')
                    func = eval(func)
                    args = args.split(',')
                    argv = [pairs.split('.') for pairs in args]
                    argv = [dyr_dict[model][param] for model, param in argv]
                    out_dict[output_keys[idx]] = func(*argv)
                else:
                    out_dict[output_keys[idx]] = dyr_dict[psse_model][expr]

            df = pd.DataFrame.from_dict(out_dict)
            for row in df.to_dict(orient='records'):
                system.add(dest, row)

        system.link_ext_param(system.__dict__[dest])

    return True
Exemplo n.º 8
0
def read_add(system, file):
    """
    Read an addition PSS/E dyr file.

    Parameters
    ----------
    system : System
        System instance to which data will be loaded
    file : str
        Path to the additional `dyr` file

    Returns
    -------
    bool
        data parsing status
    """
    with open(file, 'r') as f:
        input_list = [line.strip() for line in f]

    # concatenate multi-line device data
    input_concat_dict = defaultdict(list)
    multi_line = list()
    for i, line in enumerate(input_list):
        if line == '':
            continue
        if '/' not in line:
            multi_line.append(line)
        else:
            multi_line.append(line.split('/')[0])
            single_line = ' '.join(multi_line)

            if single_line.strip() == '':
                continue

            single_list = single_line.split("'")

            psse_model = single_list[1].strip()
            input_concat_dict[psse_model].append(single_list[0] +
                                                 single_list[2])
            multi_line = list()

    # construct pandas dataframe for all models
    dyr_dict = dict()  # input data from dyr file

    for psse_model, all_rows in input_concat_dict.items():
        dev_params_num = [([to_number(cell) for cell in row.split()])
                          for row in all_rows]
        dyr_dict[psse_model] = pd.DataFrame(dev_params_num)

    # read yaml and set header for each pss/e model
    dirname = os.path.dirname(__file__)
    with open(f'{dirname}/psse-dyr.yaml', 'r') as f:
        dyr_yaml = yaml.full_load(f)

    sorted_models = sort_psse_models(dyr_yaml)

    for psse_model in dyr_dict:
        if psse_model in dyr_yaml:
            if 'inputs' in dyr_yaml[psse_model]:
                dyr_dict[psse_model].columns = dyr_yaml[psse_model]['inputs']

    # collect not supported models
    not_supported = []
    for model in dyr_dict:
        if model not in sorted_models:
            not_supported.append(model)

    # print out debug messages
    if len(dyr_dict):
        logger.debug(f'dyr contains models {", ".join(dyr_dict.keys())}')

    if len(not_supported):
        logger.warning(f'Models not yet supported: {", ".join(not_supported)}')
    else:
        logger.debug('All dyr models are supported.')

    # load data into models
    for psse_model in sorted_models:
        if psse_model not in dyr_dict:
            # device not exist
            continue

        if psse_model not in dyr_yaml:
            logger.error(f"PSS/E Model <{psse_model}> is not supported.")
            continue

        logger.debug(f'Parsing PSS/E model {psse_model}')

        dest = dyr_yaml[psse_model]['destination']
        find = {}

        if 'find' in dyr_yaml[psse_model]:
            for name, source in dyr_yaml[psse_model]['find'].items():

                for model, conditions in source.items():
                    allow_none = conditions.pop('allow_none', 0)
                    cond_names = conditions.keys()
                    cond_values = []

                    for col in conditions.values():
                        if col in find:
                            cond_values.append(find[col])
                        else:
                            cond_values.append(dyr_dict[psse_model][col])

                    try:
                        find[name] = system.__dict__[model].find_idx(
                            cond_names, cond_values, allow_none=allow_none)
                    except IndexError as e:
                        logger.error(
                            "Data file likely contains references to unsupported models."
                        )
                        logger.error(e)
                        return False

        if 'get' in dyr_yaml[psse_model]:
            for name, source in dyr_yaml[psse_model]['get'].items():
                for model, conditions in source.items():
                    idx_name = conditions['idx']
                    if idx_name in dyr_dict[psse_model]:
                        conditions['idx'] = dyr_dict[psse_model][idx_name]
                    else:
                        conditions['idx'] = find[idx_name]
                    find[name] = system.__dict__[model].get(**conditions)

        if 'outputs' in dyr_yaml[psse_model]:
            output_keys = list(dyr_yaml[psse_model]['outputs'].keys())
            output_exprs = list(dyr_yaml[psse_model]['outputs'].values())
            out_dict = {}

            for idx in range(len(output_exprs)):
                out_key = output_keys[idx]
                expr = output_exprs[idx]
                if expr in find:
                    out_dict[out_key] = find[expr]
                elif ';' in expr:
                    args, func = expr.split(';')
                    func = eval(func)
                    args = args.split(',')
                    argv = [pairs.split('.') for pairs in args]
                    argv = [dyr_dict[model][param] for model, param in argv]
                    out_dict[output_keys[idx]] = func(*argv)
                else:
                    out_dict[output_keys[idx]] = dyr_dict[psse_model][expr]

            df = pd.DataFrame.from_dict(out_dict)
            for row in df.to_dict(orient='records'):
                system.add(dest, row)

        system.link_ext_param(system.__dict__[dest])

    return True
Exemplo n.º 9
0
def read_add(system, file):
    """
    Read addition PSS/E dyr file.

    TODO: implement this function

    Parameters
    ----------
    system
    file

    Returns
    -------

    """

    warn_experimental("PSS/E dyr support is incomplete.")

    with open(file, 'r') as f:
        input_list = [line.strip() for line in f]

    # concatenate multi-line device data
    all_dict = defaultdict(list)
    data_list = list()
    for i, line in enumerate(input_list):
        if line == '':
            continue
        if '/' not in line:
            data_list.append(line)
        else:
            data_list.append(line.split('/')[0])
            data_str = ' '.join(data_list)
            data_split = data_str.split("'")

            model_name = data_split[1].strip()
            all_dict[model_name].append(data_split[0] + data_split[2])
            data_list = list()

    # construct pandas dataframe for all models
    df_dict = dict()
    for key, val in all_dict.items():
        df_1row = pd.DataFrame(val)
        df_str = df_1row[0].str.split(expand=True)
        df_dict[key] = df_str.astype(float)

    # set header for each
    dirname = os.path.dirname(__file__)
    with open('{}/psse-dyr.yaml'.format(dirname), 'r') as f:
        dyr_format = yaml.full_load(f)
    dyr_dict = dict()

    for item in dyr_format:
        dyr_dict.update(item)

    for key in df_dict:
        if key in dyr_dict:
            if 'inputs' in dyr_dict[key]:
                df_dict[key].columns = dyr_dict[key]['inputs']

    system.df_dict = df_dict

    # TODO: Load data into models

    return True