예제 #1
0
def sensor_report(sensor_aggregated_df, data_names, report_columns_usage_dct,
                  max_title):
    """Function to create report Datafrmae from sensor_aggregated_df 
    (slice and reorder columns, translate values in columns)"""

    # loading values to translate
    translate_dct = dct_from_columns('customer_report',
                                     max_title,
                                     'Датчики_перевод_eng',
                                     'Датчики_перевод_ru',
                                     init_file='san_automation_info.xlsx')
    sensor_report_df = translate_values(
        sensor_aggregated_df,
        translate_dct=translate_dct,
        translate_columns=['Type', 'Status', 'Vlaue', 'Unit'])
    # translate_values(sensor_aggregated_df, translate_dct, max_title)
    sensor_report_df, = dataframe_segmentation(sensor_aggregated_df,
                                               data_names[1:],
                                               report_columns_usage_dct,
                                               max_title)
    return sensor_report_df


# def translate_values(translated_df, translate_dct, max_title):
#     """Function to translate values in corresponding columns"""

#     # columns which values need to be translated
#     translate_columns = ['Type', 'Status', 'Vlaue', 'Unit']
#     # translate values in column if column in DataFrame
#     for column in translate_columns:
#         if column in translated_df.columns:
#             translated_df[column] = translated_df[column].replace(to_replace=translate_dct)

#     return translated_df
예제 #2
0
def create_report_tables(portshow_aggregated_df,
                         storage_connection_statistics_df,
                         device_connection_statistics_df, data_names,
                         report_columns_usage_dct, max_title):
    """Function to create required report DataFrames out of aggregated DataFrame"""

    add_columns_lst = [
        'FW_Recommeneded', 'Driver_Recommeneded', 'FW_Supported',
        'HW_Supported'
    ]
    portshow_aggregated_df = portshow_aggregated_df.reindex(
        columns=[*portshow_aggregated_df.columns.tolist(), *add_columns_lst])
    # partition aggregated DataFrame to required tables
    # pylint: disable=unbalanced-tuple-unpacking
    servers_report_df, storage_report_df, library_report_df, hba_report_df, \
        storage_connection_df,  library_connection_df, server_connection_df, npiv_report_df = \
        dataframe_segmentation(portshow_aggregated_df, data_names, report_columns_usage_dct, max_title)

    # clean and sort DataFrames
    # device report
    servers_report_df = _clean_dataframe(servers_report_df, 'srv')
    hba_report_df = _clean_dataframe(hba_report_df,
                                     'srv',
                                     duplicates=['Идентификатор порта WWPN'])
    storage_report_df = _clean_dataframe(storage_report_df, 'stor')
    storage_report_df = _multi_fabric(storage_report_df,
                                      report_columns_usage_dct)
    library_report_df = _clean_dataframe(library_report_df, 'lib')
    # device connection reports
    storage_connection_df = _clean_dataframe(storage_connection_df,
                                             'stor',
                                             clean=True)
    storage_connection_df = translate_values(storage_connection_df)
    library_connection_df = _clean_dataframe(library_connection_df,
                                             'lib',
                                             clean=True)
    server_connection_df = _clean_dataframe(server_connection_df,
                                            'srv',
                                            clean=True)
    npiv_report_df = _clean_dataframe(npiv_report_df,
                                      'npiv',
                                      duplicates=None,
                                      clean=True)
    # device connection statistics reports
    storage_connection_statistics_report_df = connection_statistics_report(
        storage_connection_statistics_df, max_title)
    device_connection_statistics_report_df = connection_statistics_report(
        device_connection_statistics_df, max_title)

    return servers_report_df, storage_report_df, library_report_df, hba_report_df, \
            storage_connection_df,  library_connection_df, server_connection_df, npiv_report_df, \
                storage_connection_statistics_report_df, device_connection_statistics_report_df
예제 #3
0
def unzoned_device_report(portshow_cfg_aggregated_df, data_names,
                          report_columns_usage_dct, max_title):
    """
    Function to check all fabric devices for usage in zoning configuration and
    check if all fabric devices have aliases.
    Create unzoned devices and no aliases reports
    """

    # switche and virtual connect ports are not part of zoning configuration by defenition
    mask_not_switch_vc = ~portshow_cfg_aggregated_df.deviceType.isin(
        ['SWITCH', 'VC'])
    # show online ports only
    mask_online = portshow_cfg_aggregated_df['portState'] == 'Online'
    # Access gateway switch connection information is excessive
    mask_native = portshow_cfg_aggregated_df['switchMode'] == 'Native'
    # show ports which are not part of any configuration
    mask_not_zoned = portshow_cfg_aggregated_df['cfg_type'].isna()
    # show_devices that have no aliases
    mask_no_alias = portshow_cfg_aggregated_df['alias'].isna()

    unzoned_device_df = portshow_cfg_aggregated_df.loc[mask_native
                                                       & mask_online
                                                       & mask_not_switch_vc
                                                       & mask_not_zoned]
    unzoned_device_df.dropna(axis='columns', how='all')

    no_alias_device_df = portshow_cfg_aggregated_df.loc[mask_native
                                                        & mask_online
                                                        & mask_not_switch_vc
                                                        & mask_no_alias]
    # no_alias_devices_df.dropna(axis='columns', how='all')
    # create report DataFeame
    # pylint: disable=unbalanced-tuple-unpacking
    unzoned_device_report_df, = dataframe_segmentation(
        unzoned_device_df, data_names[0], report_columns_usage_dct, max_title)
    no_alias_device_report_df, = dataframe_segmentation(
        no_alias_device_df, data_names[1], report_columns_usage_dct, max_title)
    return unzoned_device_report_df, no_alias_device_report_df
예제 #4
0
def create_report(aggregated_df, data_name, translate_dct,
                  report_columns_usage_dct, max_title):
    """
    Auxiliary function to remove unnecessary columns from aggregated DataFrame and
    extract required columns and create report dataframe
    """

    # pylint: disable=unbalanced-tuple-unpacking
    cleaned_df = drop_excessive_columns(aggregated_df,
                                        report_columns_usage_dct)
    translate_columns = [
        'Fabric_device_status', 'Target_Initiator_note', 'Target_model_note',
        'Effective_cfg_usage_note', 'Pair_zone_note',
        'Multiple_fabric_label_connection', 'Zone_and_Pairzone_names_related',
        'Zone_name_device_names_related'
    ]
    cleaned_df = translate_values(cleaned_df, translate_dct, translate_columns)
    # take required data from aggregated DataFrame to create report
    report_df, = dataframe_segmentation(cleaned_df, data_name,
                                        report_columns_usage_dct, max_title)
    return report_df
예제 #5
0
def absent_device(zoning_aggregated_df, data_name, translate_dct,
                  report_columns_usage_dct, max_title):
    """Function to create table with absent and unavailable remote devices in zoning configuration"""

    mask_absent = zoning_aggregated_df.Fabric_device_status.isin(
        ['absent', 'remote_na'])
    absent_columns = [
        'Fabric_name', 'Fabric_label', 'cfg', 'cfg_type', 'zone_member',
        'alias_member', 'Fabric_device_status', 'zonemember_Fabric_name',
        'zonemember_Fabric_label', 'zone'
    ]
    absent_device_df = zoning_aggregated_df.loc[mask_absent, absent_columns]
    absent_device_df = absent_device_df.groupby(absent_columns[:-1],
                                                as_index=False,
                                                dropna=False).agg(
                                                    {'zone': ', '.join})
    absent_device_df = translate_values(absent_device_df, translate_dct,
                                        ['Fabric_device_status'])
    zoning_absent_device_report_df, = dataframe_segmentation(
        absent_device_df, data_name, report_columns_usage_dct, max_title)
    return zoning_absent_device_report_df