Exemple #1
0
def storage_port_partner(system_port_3par_df, portshow_aggregated_df):
    """Function to add 3PAR port partner (faiolver port) Wwnp and fabric connection information to system_port_3par_df"""

    # add port partner Wwnp to system_port_3par_df
    system_port_partner_3par_df = system_port_3par_df[['configname', 'Storage_Port', 'PortName']].copy()
    system_port_partner_3par_df.rename(columns={'Storage_Port': 'Storage_Port_Partner', 'PortName': 'Storage_Port_Partner_Wwnp'}, inplace=True)
    system_port_3par_df = dataframe_fillna(system_port_3par_df, system_port_partner_3par_df, 
                                            filled_lst=['Storage_Port_Partner_Wwnp'], 
                                            join_lst=['configname', 'Storage_Port_Partner'])

    # DataDrame containing all Wwnp in san
    fabric_wwnp_columns = ['Fabric_name', 'Fabric_label', 'PortName']
    portshow_fabric_wwnp_df = portshow_aggregated_df[fabric_wwnp_columns].copy()
    portshow_fabric_wwnp_df.dropna(subset=fabric_wwnp_columns, inplace=True)
    portshow_fabric_wwnp_df.drop_duplicates(inplace=True)
    
    # rename portshow_fabric_wwnp_df columns to correspond columns in system_port_partner_3par_df DataDrame
    storage_port_partner_columns = ['Storage_Port_Partner_Fabric_name', 'Storage_Port_Partner_Fabric_label', 'Storage_Port_Partner_Wwnp']
    rename_dct = dict(zip(fabric_wwnp_columns, storage_port_partner_columns))
    portshow_fabric_wwnp_df.rename(columns=rename_dct, inplace=True)
    # fill in Fabric connection information of failover ports
    system_port_3par_df = dataframe_fillna(system_port_3par_df, portshow_fabric_wwnp_df, 
                                            join_lst=storage_port_partner_columns[2:], 
                                            filled_lst=storage_port_partner_columns[:2])
    return system_port_3par_df
Exemple #2
0
def verify_cfg_type(aggregated_df, zoning_aggregated_df, search_lst):
    """Function to check which type of configuration 'search_lst' apllied in (effective or defined)"""

    # separate effective and defined configs zoning configurations
    mask_effective = zoning_aggregated_df['cfg_type'].str.contains('effective',
                                                                   na=False)
    mask_defined = zoning_aggregated_df['cfg_type'].str.contains('defined',
                                                                 na=False)

    cfg_lst = [*['Fabric_name', 'Fabric_label'], *search_lst, *['cfg_type']]
    cfg_effective_df = zoning_aggregated_df.loc[mask_effective, cfg_lst]
    cfg_defined_df = zoning_aggregated_df.loc[mask_defined, cfg_lst]
    # fill empty cfg_type values in alias_aggregated_df DataFrame with config type from cfg_effective_df
    # thus if alias is in effective config it's marked as effective
    aggregated_df = dataframe_fillna(aggregated_df,
                                     cfg_effective_df,
                                     join_lst=cfg_lst[:-1],
                                     filled_lst=cfg_lst[-1:])
    # fill rest empty values with defined word if alias is in defined config
    aggregated_df = dataframe_fillna(aggregated_df,
                                     cfg_defined_df,
                                     join_lst=cfg_lst[:-1],
                                     filled_lst=cfg_lst[-1:])

    return aggregated_df
Exemple #3
0
def group_name_fillna(portshow_aggregated_df):

    storage_grp_df, library_grp_df = alias_wwnn_group(portshow_aggregated_df)
    library_sn_grp_df = alias_serial_group(portshow_aggregated_df)

    portshow_aggregated_df['Group_Name'] = None

    portshow_aggregated_df = dataframe_fillna(portshow_aggregated_df, storage_grp_df, ['NodeName'], ['Group_Name'])
    portshow_aggregated_df.Device_Host_Name.fillna(portshow_aggregated_df.Group_Name, inplace= True)
    portshow_aggregated_df = dataframe_fillna(portshow_aggregated_df, library_sn_grp_df, ['Device_SN'], ['Group_Name'])
    portshow_aggregated_df = dataframe_fillna(portshow_aggregated_df, library_grp_df, ['NodeName'], ['Group_Name'])

    return portshow_aggregated_df
Exemple #4
0
def storage_3par_fillna(portshow_aggregated_df, system_3par_df, port_3par_df):
    """Function to add 3PAR information collected from 3PAR configuration files to
    portshow_aggregated_df"""

    if not port_3par_df.empty and not system_3par_df.empty:
        # system information
        system_columns = ['configname', 'System_Model', 'System_Name', 
                            'Serial_Number', 'IP_Address', 'Location']
        system_3par_cp_df = system_3par_df[system_columns].copy()
        system_3par_cp_df.drop_duplicates(inplace=True)

        # add system information to 3PAR ports DataFrame
        system_port_3par_df = port_3par_df.merge(system_3par_cp_df, how='left', on=['configname'])
        # convert Wwnn and Wwnp to regular represenatation (lower case with colon delimeter)
        system_port_3par_df = convert_wwn(system_port_3par_df, ['NodeName', 'PortName'])
        # rename columns to correspond portshow_aggregated_df
        rename_columns = {'System_Name': 'Device_Name',	'System_Model':	'Device_Model', 
                            'Serial_Number': 'Device_SN', 'Location': 'Device_Location'}
        system_port_3par_df.rename(columns=rename_columns, inplace=True)
        system_port_3par_df['Device_Host_Name'] = system_port_3par_df['Device_Name']


        system_port_3par_df = storage_port_partner(system_port_3par_df, portshow_aggregated_df)


        # add 3PAR information to portshow_aggregated_df
        fillna_wwnn_columns = ['Device_Name', 'Device_Host_Name', 'Device_Model', 'Device_SN', 'IP_Address', 'Device_Location']
        portshow_aggregated_df = \
            dataframe_fillna(portshow_aggregated_df, system_port_3par_df, join_lst=['NodeName'] , filled_lst=fillna_wwnn_columns)

        fillna_wwnp_columns = ['Storage_Port_Partner_Fabric_name', 'Storage_Port_Partner_Fabric_label', 
                                'Storage_Port_Partner', 'Storage_Port_Partner_Wwnp', 
                                'Storage_Port_Mode', 'Storage_Port_Type']
        portshow_aggregated_df = \
            dataframe_fillna(portshow_aggregated_df, system_port_3par_df, join_lst=['PortName'] , filled_lst=fillna_wwnp_columns)

        portshow_aggregated_df = sequential_equality_note(portshow_aggregated_df, 
                                                            columns1=['Fabric_name', 'Fabric_label'], 
                                                            columns2=['Storage_Port_Partner_Fabric_name', 'Storage_Port_Partner_Fabric_label'], 
                                                            note_column='Storage_Port_Partner_Fabric_equal')

    # if 3PAR configuration was not extracted apply reserved name (3PAR model and SN combination)
    if 'Device_Name_reserved' in portshow_aggregated_df.columns:
        portshow_aggregated_df['Device_Host_Name'].fillna(portshow_aggregated_df['Device_Name_reserved'], inplace = True)

    return portshow_aggregated_df
Exemple #5
0
def _merge_ag_groups(left_group_df, right_group_df, slave_group=False):
    """
    Auxiliary function to merge two npiv link groups. Takes right and left groups to merge.
    Returns left group with connected ports from right group.
    """

    # columns of DataFrame to merge with
    join_columns_lst = [
        'Fabric_name', 'Fabric_label', 'configname', 'chassis_name',
        'chassis_wwn', 'switchName', 'switchWwn', 'Connected_portId',
        'Index_slot_port'
    ]

    # each group with presumed AG links have empty columns with information about
    # connected device name (Device_Host_Name) and it's port number (Device_Port)
    rename_columns_dct = {
        'switchName': 'Device_Host_Name',
        'Index_slot_port': 'Device_Port'
    }

    # dataframe_fillna function accepept as parameters two lists
    # list with DataFrames column names to merge on
    join_lst = ['Fabric_name', 'Fabric_label', 'Connected_portId']
    # list with DataFrames column names from which information need to be copied
    # from right to left DataFrame
    filled_lst = ['Device_Host_Name', 'Device_Port']
    """
    It's not possible to strictly identify port number connected to the slave AG link 
    if number of slave links in trunk exceeds one (FCID is the same for all trunk ports 
    and there is no WWNp value for slave link). Thus after merge operation we can get
    multiple ports with the same port number for both sides of the link.
    To avoid this we apply grouping of links and joining all ports with the same FCID in one line
    """
    # columns names grouping performed on
    grp_lst = portcmd_columns_lst.copy()
    grp_lst.remove('Device_Port')

    # align Group#5 DataFrame paired for Group#4 DataFrame and perform merge operation
    right_join_df = right_group_df.loc[:, join_columns_lst].copy()
    right_join_df.rename(columns=rename_columns_dct, inplace=True)
    # fill values for connected switch name and port number for each AG link
    left_group_df = dataframe_fillna(left_group_df,
                                     right_join_df,
                                     join_lst,
                                     filled_lst,
                                     remove_duplicates=False)

    if slave_group:
        # pandas 1.04 is not able to perform grouping if nan values present thus replcae it with unknown value
        left_group_df.fillna('unknown', inplace=True)
        # grouping ports numbers in case of multiple links in tunk
        left_group_df = left_group_df.groupby(grp_lst, as_index=False).agg(
            {'Device_Port': ', '.join})
        # return nan values
        left_group_df.replace('unknown', pd.NA, inplace=True)

    return left_group_df
Exemple #6
0
def fill_switch_info(portshow_aggregated_df, switch_params_df,
                     switch_params_aggregated_df, report_data_lst):
    """
    Function to add connected switch information (SN, IP, Location, FOS, Model) to portcmd DataFrame 
    based on combination of connected oui and switch main board seral number (oui_board_sn)
    """

    # generate combination of oui and switch main board seral number based on Connected port WWN
    portshow_aggregated_df[
        'oui_board_sn'] = portshow_aggregated_df.Connected_portWwn.str.slice(
            start=6)
    switch_params_aggregated_df[
        'oui_board_sn'] = switch_params_aggregated_df.switchWwn.str.slice(
            start=6)

    # extract required columns from switch_params_aggregated_df
    switch_params_columns_lst = [
        'Fabric_name', 'Fabric_label', 'oui_board_sn', 'chassis_name',
        'boot.ipa', 'ssn', 'FOS_version', 'Brocade_modelName', 'HPE_modelName',
        'Device_Location'
    ]
    switch_params_join_df = switch_params_aggregated_df.loc[:,
                                                            switch_params_columns_lst].copy(
                                                            )
    switch_params_join_df['HPE_modelName'].replace('x', np.nan, inplace=True)
    switch_params_join_df['HPE_modelName'].fillna(
        switch_params_join_df['Brocade_modelName'], inplace=True)
    switch_params_join_df.drop(columns=['Brocade_modelName'], inplace=True)

    # rename columns to correspond columns in portshow_aggregated_df
    switch_params_columns_dct = {
        'chassis_name': 'Device_Host_Name',
        'boot.ipa': 'IP_Address',
        'ssn': 'Device_SN',
        'FOS_version': 'Device_Fw',
        'HPE_modelName': 'Device_Model'
    }
    switch_params_join_df.rename(columns=switch_params_columns_dct,
                                 inplace=True)

    # # fill empty values in portshow_aggregated_df from switch_params_join_df
    switch_join_columns_lst = switch_params_join_df.columns.to_list()
    portshow_aggregated_df['Device_Model'].replace('^x$',
                                                   np.nan,
                                                   regex=True,
                                                   inplace=True)
    portshow_aggregated_df = dataframe_fillna(
        portshow_aggregated_df,
        switch_params_join_df,
        join_lst=switch_join_columns_lst[:3],
        filled_lst=switch_join_columns_lst[3:])

    return portshow_aggregated_df
Exemple #7
0
def fill_isl_link(portshow_aggregated_df, isl_aggregated_df):
    """Function to add ISL links information (switchname, port, ip, model) to portcmd DataFrame"""

    if not isl_aggregated_df.empty:
        # extract required columns from isl_aggregated_df
        isl_columns_lst = [
            'Fabric_name', 'Fabric_label', 'configname', 'chassis_name',
            'SwitchName', 'switchWwn', 'portIndex', 'slot', 'port',
            'Connected_SwitchName', 'Connected_portIndex', 'Connected_slot',
            'Connected_port', 'Connected_HPE_modelName',
            'Connected_Enet_IP_Addr'
        ]
        isl_join_df = isl_aggregated_df.loc[:, isl_columns_lst].copy()

        # merge portIndex, slot and port number of Connected port
        isl_join_df = isl_join_df.astype(
            {
                'Connected_portIndex': 'str',
                'Connected_slot': 'str',
                'Connected_port': 'str'
            },
            errors='ignore')
        isl_join_df['Device_Port'] = isl_join_df.Connected_portIndex + '-' + \
            isl_join_df.Connected_slot + '-' + isl_join_df.Connected_port

        # rename columns to correspond columns in portshow_aggregated_df
        isl_columns_dct = {
            'SwitchName': 'switchName',
            'Connected_SwitchName': 'Device_Host_Name',
            'Connected_HPE_modelName': 'Device_Model',
            'Connected_Enet_IP_Addr': 'IP_Address'
        }
        isl_join_df.rename(columns=isl_columns_dct, inplace=True)

        # isl_join_df columns required to fill empty values in portshow_aggregated_df
        isl_columns_lst = [
            'Fabric_name', 'Fabric_label', 'configname', 'chassis_name',
            'switchName', 'switchWwn', 'portIndex', 'slot', 'port',
            'Device_Host_Name', 'Device_Model', 'Device_Port', 'IP_Address'
        ]
        isl_join_df = isl_join_df.reindex(columns=isl_columns_lst)

        # fill empty values in portshow_aggregated_df from isl_join_df
        portshow_aggregated_df = dataframe_fillna(
            portshow_aggregated_df,
            isl_join_df,
            join_lst=isl_columns_lst[:9],
            filled_lst=isl_columns_lst[9:])

    return portshow_aggregated_df
Exemple #8
0
def verify_tdz(zoning_modified_df):
    """Function to find target driven zones zones"""
    
    if 'peerzone_member_type' in zoning_modified_df.columns and zoning_modified_df['peerzone_member_type'].notna().any():
        # zone need to be efficient and peer type
        # mask_valid_zone = ~zoning_modified_df['Target_Initiator_note'].isin(invalid_zone_tags)
        mask_property = zoning_modified_df['peerzone_member_type'] == 'principal'
        zoning_tdz_df = zoning_modified_df.loc[mask_property].copy()
        zoning_tdz_df.dropna(subset=['PortName'], inplace=True)
        
        # zone name need contain tdz tag and principal member Wwnp (without colons)
        zoning_tdz_df['PortName_colon_free'] = zoning_tdz_df['PortName'].str.replace(r':', '')
        zoning_tdz_df = zoning_tdz_df.loc[zoning_tdz_df.apply(lambda x: 'tdz' in x.zone and x.PortName_colon_free in x.zone, axis=1)].copy()
        
        # zone_duplicates_free and tdz_tag columns used for dataframe_fillna
        zoning_tdz_df['zone_duplicates_free'] = zoning_tdz_df['zone']
        zoning_tdz_df['tdz_tag'] = 'tdz_tag'

        tdz_columns = ['Fabric_name', 'Fabric_label', 'cfg', 'cfg_type', 'zone_duplicates_free', 'tdz_tag']
        zoning_modified_df = dataframe_fillna(zoning_modified_df, zoning_tdz_df, filled_lst=tdz_columns[-1:], join_lst=tdz_columns[:-1])

    return zoning_modified_df
Exemple #9
0
def ag_switch_info(switch_params_aggregated_df, ag_principal_df):
    """
    Function to add AG switches and VC's switchtype, fw version collected 
    from Principal switch configuration to switch_params_aggrefated_df DataFrame
    """

    # extract required columns from ag_principal_df DataFrame and translate it's
    # titles to correspond columns in switch_params_aggregated_df DataFrame
    ag_columns_lst = [
        'AG_Switch_WWN', 'AG_Switch_Type', 'AG_Switch_Firmware_Version'
    ]
    switch_columns_lst = ['switchWwn', 'switchType', 'FOS_version']
    ag_translate_dct = dict(zip(ag_columns_lst, switch_columns_lst))
    ag_fw_type_df = ag_principal_df.copy()
    ag_fw_type_df = ag_fw_type_df.loc[:, ag_columns_lst]
    ag_fw_type_df.rename(columns=ag_translate_dct, inplace=True)
    # fill information for AG switches and VC
    switch_params_aggregated_df = \
        dataframe_fillna(switch_params_aggregated_df, ag_fw_type_df, join_lst=switch_columns_lst[0:1],
                                                                    filled_lst=switch_columns_lst[1:])

    return switch_params_aggregated_df
Exemple #10
0
def add_aglink_connected_port(portshow_aggregated_df, master_native_df,
                              master_native_cisco_df, master_ag_df,
                              slave_native_df, slave_ag_df):
    """
    Function to combine five AG link groups into one, drop undefined links
    and add information about connected switch name
    and port number from joint AG link DataFrame to the main portcmd DataFrame.
    All AG links from Native mode switch to the AG/NPV switch marked as NPIV.
    """

    # concatenate AG link groups 1, 2, 4 (links from Native to AG)
    # mark links as npiv
    ag_df = pd.concat(
        [master_native_df, master_native_cisco_df, slave_native_df])
    expected_ag_links_df = ag_df.copy()

    ag_df.dropna(subset=['Device_Host_Name'], inplace=True)
    ag_df['Connected_NPIV'] = 'yes'
    # add access gateway switches
    ag_df = pd.concat([ag_df, master_ag_df, slave_ag_df])
    expected_ag_links_df = pd.concat(
        [expected_ag_links_df, master_ag_df, slave_ag_df])
    # drop rows with undefined links
    ag_df.dropna(subset=['Device_Host_Name'], inplace=True)
    # add information about connected switch name and port number
    # from joint AG link DataFrame to the main portcmd DataFrame.
    join_lst = [
        'Fabric_name', 'Fabric_label', 'switchName', 'switchWwn',
        'Connected_portId', 'portIndex', 'slot', 'port'
    ]
    filled_lst = [
        'Device_Host_Name', 'Device_Port', 'deviceType', 'deviceSubtype',
        'Connected_NPIV'
    ]
    portshow_aggregated_df = dataframe_fillna(portshow_aggregated_df, ag_df,
                                              join_lst, filled_lst)

    return portshow_aggregated_df, expected_ag_links_df
Exemple #11
0
def prior_preparation(errdump_df, switchshow_df, switch_params_aggregated_df):
    """Function to add switchWwn information, date and time config was collected for each switch,
     fabric labeling and convert message dates and config collection dates to date format"""

    # add switchWwn information
    errdump_aggregated_df = switchshow_join(errdump_df, switchshow_df)
    # fabric labeling
    errdump_aggregated_df = dataframe_fabric_labeling(
        errdump_aggregated_df, switch_params_aggregated_df)
    # add config collection date
    errdump_aggregated_df = dataframe_fillna(
        errdump_aggregated_df,
        switch_params_aggregated_df,
        ['configname', 'chassis_name', 'chassis_wwn'],
        ['config_collection_date'],
        drop_na=False)
    # convert dates columns
    errdump_aggregated_df['Message_date'] = pd.to_datetime(
        errdump_aggregated_df['Message_date'])
    errdump_aggregated_df['config_collection_date'] = pd.to_datetime(
        errdump_aggregated_df['config_collection_date'])

    return errdump_aggregated_df
Exemple #12
0
def ag_principal_fillna(portshow_aggregated_df, ag_principal_label_df,
                        switch_models_df):
    """
    Function to add information from labled Princial AG swithes DataFrame
    to portshow_aggregated_df DataFrame
    """

    # define all AG switches ann VCFC modules as Connected through NPIV
    ag_principal_label_df['Connected_NPIV'] = 'yes'

    # complete ag_principal_label_df DataFrame with information from switch_models DataFrame
    ag_principal_label_df.switchType = ag_principal_label_df.switchType.astype(
        'float64', errors='ignore')
    # floor switchType fractional value to correcpond values in  switch_models_df DataFrame
    ag_principal_label_df.switchType = np.floor(
        ag_principal_label_df.switchType)
    switch_models_df.switchType = switch_models_df.switchType.astype(
        'float64', errors='ignore')
    ag_principal_label_df = ag_principal_label_df.merge(switch_models_df,
                                                        how='left',
                                                        on='switchType')

    # add AG information to portshow_aggregated_df
    ag_principal_label_df.rename(columns={'HPE_modelName': 'Device_Model'},
                                 inplace=True)
    fillna_columns_lst = [
        'Fabric_name', 'Fabric_label', 'NodeName', 'Device_Host_Name',
        'IP_Address', 'Device_Fw', 'Connected_NPIV', 'Device_Model'
    ]
    portshow_aggregated_df = dataframe_fillna(
        portshow_aggregated_df,
        ag_principal_label_df,
        join_lst=fillna_columns_lst[:3],
        filled_lst=fillna_columns_lst[3:])

    return portshow_aggregated_df
Exemple #13
0
def verify_isl_cfg_equality(isl_aggregated_df):
    """Function to find port configuratin parameters which are not equal for both sides of ISL connection"""

    isl_cp_df = isl_aggregated_df.copy()
    # '..' means service or setting is in OFF state
    isl_cp_df.replace(to_replace='..', value='OFF', inplace=True)
    # join QOS_Port and QOS_E_Port columns
    isl_cp_df['QOS_Port'] = isl_cp_df['QOS_Port'].fillna(
        isl_cp_df['QOS_E_Port'])
    isl_cp_df['Connected_QOS_Port'] = isl_cp_df['Connected_QOS_Port'].fillna(
        isl_cp_df['Connected_QOS_E_Port'])
    # list of port setting to be vrified for equality from both sides of isl
    cfg_columns = [
        'Speed_Cfg', 'Trunk_Port', 'Long_Distance', 'VC_Link_Init',
        'Locked_E_Port', 'ISL_R_RDY_Mode', 'RSCN_Suppressed', 'LOS_TOV_mode',
        'QOS_Port', 'Rate_Limit', 'Credit_Recovery', 'Compression',
        'Encryption', '10G/16G_FEC', 'Fault_Delay', 'TDZ_mode',
        'Fill_Word(Current)', 'FEC'
    ]

    for cfg in cfg_columns:
        # column names with current main port and connected port configuration parameter of the switch
        connected_cfg = 'Connected_' + cfg
        # column names with current unequal main port and connected port configuration parameter
        unequal_cfg = cfg + '_unequal'
        connected_unequal_cfg = connected_cfg + '_unequal'
        # both ports must have port cfg parameters in some state (not na)
        mask_notna = isl_cp_df[[cfg, connected_cfg]].notna().all(axis=1)
        # parameter value is not equal for both sides of isl connection
        mask_differnt_cfg = isl_cp_df[cfg] != isl_cp_df[connected_cfg]
        # add parameter name and it's value to column with name containing parameter name
        # and 'unequal' tag for main and connected ports
        isl_cp_df.loc[mask_notna & mask_differnt_cfg, unequal_cfg] = \
            cfg + ': ' + isl_cp_df[cfg].astype('str')
        isl_cp_df.loc[mask_notna & mask_differnt_cfg, connected_unequal_cfg] = \
            cfg + ': ' + isl_cp_df[connected_cfg].astype('str')

    # column names with unequal paremater names and values for main and connected ports
    unequal_cfg_columns = [cfg + '_unequal' for cfg in cfg_columns]
    connected_unequal_cfg_columns = [
        'Connected_' + cfg for cfg in unequal_cfg_columns
    ]

    # join all columns with unequal parameters for main and connected ports separately
    isl_cp_df = сoncatenate_columns(isl_cp_df,
                                    summary_column='Unequal_cfg',
                                    merge_columns=unequal_cfg_columns,
                                    sep=', ',
                                    drop_merge_columns=True)
    isl_cp_df = сoncatenate_columns(
        isl_cp_df,
        summary_column='Connected_Unequal_cfg',
        merge_columns=connected_unequal_cfg_columns,
        sep=', ',
        drop_merge_columns=True)

    # add unequal parameters values for both sides of the link to isl_aggregated_df
    isl_cfg_columns = [
        'configname', 'chassis_name', 'chassis_wwn', 'SwitchName', 'switchWwn',
        'slot', 'port', 'Connected_SwitchName', 'Connected_switchWwn',
        'Connected_slot', 'Connected_port', 'Unequal_cfg',
        'Connected_Unequal_cfg'
    ]
    isl_aggregated_df = dataframe_fillna(isl_aggregated_df,
                                         isl_cp_df,
                                         join_lst=isl_cfg_columns[:-2],
                                         filled_lst=isl_cfg_columns[-2:])
    return isl_aggregated_df
Exemple #14
0
def zonemember_statistics(zoning_aggregated_df, report_data_lst):
    """Main function to create zonemembers statistics"""

    zoning_modified_df, zoning_duplicated_df, zoning_pairs_df = modify_zoning(zoning_aggregated_df)

    # get statistics DataFrames for zone and cfgtype level statistics
    zonemember_zonelevel_stat_df = count_zonemember_statistics(zoning_modified_df)
    zonemember_cfgtypelevel_stat_df = count_zonemember_statistics(zoning_modified_df, zone=False)
    zonemember_zonelevel_stat_df.reset_index(inplace=True)
    # drop duplicated All row
    zonemember_zonelevel_stat_df.drop(zonemember_zonelevel_stat_df.index[zonemember_zonelevel_stat_df['Fabric_name'] == 'All'], inplace = True)
    zonemember_cfgtypelevel_stat_df.reset_index(inplace=True)
    # add defined and actual wwn number for each zone
    zone_wwn_number_df = defined_actual_wwn_number(zoning_aggregated_df, df_type='zone')
    
    zonemember_zonelevel_stat_df = zonemember_zonelevel_stat_df.merge(zone_wwn_number_df, how='left', 
                                                                        on=['Fabric_name', 'Fabric_label', 'cfg', 'cfg_type', 'zone'])

    # if zone is empty (no active devices) fill device_type columns (target/initiator) with zeroes
    device_type_columns = [column for column in zonemember_zonelevel_stat_df.columns if ('initiator' in column.lower() or 'target' in column.lower())]

    mask_empty_zone = zonemember_zonelevel_stat_df['Total_zonemembers_active'] == 0
    zonemember_zonelevel_stat_df.loc[mask_empty_zone, device_type_columns] = \
        zonemember_zonelevel_stat_df.loc[mask_empty_zone, device_type_columns].fillna(0)

    # add list of zone pairs to each zone in statistics
    zoning_paired_columns = ['Fabric_name', 'Fabric_label',  'cfg_type',  'zone',
                                'Zone_name_device_names_ratio', 'Zone_name_device_names_related', 
                                'All_devices_multiple_fabric_label_connection', 'zone_paired',
                                'Zone_and_Pairzone_names_ratio', 'Zone_and_Pairzone_names_related']
    zonemember_zonelevel_stat_df = dataframe_fillna(zonemember_zonelevel_stat_df, zoning_pairs_df, 
                                                        join_lst=zoning_paired_columns[:4], filled_lst=zoning_paired_columns[4:])

    # add list of identical (duplicated) zones to each zone in statistics
    zoning_duplicated_columns = ['Fabric_name', 'Fabric_label',  'cfg',  'cfg_type',  'zone', 'zone_duplicated']
    zonemember_zonelevel_stat_df = dataframe_fillna(zonemember_zonelevel_stat_df, zoning_duplicated_df, 
                                                        join_lst=zoning_duplicated_columns[:-1], filled_lst=[zoning_duplicated_columns[-1]])


    # add 'Target_Initiator'and 'Target_model' notes to zonemember_zonelevel_stat_df DataFrame
    zonemember_zonelevel_stat_df = note_zonemember_statistics(zonemember_zonelevel_stat_df)
    # add note if zone is not used in effective configuration
    grp_columns = ['Fabric_name', 'Fabric_label', 'zone']
    zonemember_zonelevel_stat_df['Effective_cfg_usage_note'] = zonemember_zonelevel_stat_df.groupby(by=grp_columns)['cfg_type'].transform(lambda x: ', '.join(set(x)))
    mask_non_effective = ~zonemember_zonelevel_stat_df['Effective_cfg_usage_note'].str.contains('effective')
    zonemember_zonelevel_stat_df['Effective_cfg_usage_note'] = np.where(mask_non_effective, 'unused_zone', pd.NA)
    zonemember_zonelevel_stat_df['Effective_cfg_usage_note'].fillna(np.nan, inplace=True)


    # TO_REMOVE rellocated up
    # # add list of identical (duplicated) zones to each zone in statistics
    # zoning_duplicated_columns = ['Fabric_name', 'Fabric_label',  'cfg',  'cfg_type',  'zone', 'zone_duplicated']
    # zonemember_zonelevel_stat_df = dataframe_fillna(zonemember_zonelevel_stat_df, zoning_duplicated_df, 
    #                                                     join_lst=zoning_duplicated_columns[:-1], filled_lst=[zoning_duplicated_columns[-1]])
    # # add list of zone pairs to each zone in statistics
    # zoning_paired_columns = ['Fabric_name', 'Fabric_label',  'cfg_type',  'zone', 
    #                             'All_devices_multiple_fabric_label_connection', 'zone_paired']
    # zonemember_zonelevel_stat_df = dataframe_fillna(zonemember_zonelevel_stat_df, zoning_pairs_df, 
    #                                                     join_lst=zoning_paired_columns[:4], filled_lst=zoning_paired_columns[4:])


    # remove duplicated and paired zones list if current zone is non-working zone (duplication of working zones only required)
    # list of duplicated zones is removed but duplication tag remains  
    mask_valid_zone = ~zonemember_zonelevel_stat_df['Target_Initiator_note'].isin(['no_target', 'no_initiator', 'no_target, no_initiator', 'no_target, several_initiators'])
    columns = ['zone_duplicated', 'zone_paired', 
                'Zone_name_device_names_ratio', 'Zone_name_device_names_related',
                'Zone_and_Pairzone_names_ratio', 'Zone_and_Pairzone_names_related']
    zonemember_zonelevel_stat_df[columns] = zonemember_zonelevel_stat_df[columns].where(mask_valid_zone)

    # TO_REMOVE 
    # zonemember_zonelevel_stat_df['zone_duplicated'] = zonemember_zonelevel_stat_df['zone_duplicated'].where(mask_valid_zone)
    # zonemember_zonelevel_stat_df['zone_paired'] = zonemember_zonelevel_stat_df['zone_paired'].where(mask_valid_zone)

    # sort values
    zonemember_zonelevel_stat_df.sort_values(by=['Fabric_name', 'Fabric_label', 'cfg_type', 'cfg', 'zone'],
                                                ascending=[True, True, False, True, True], inplace=True, ignore_index=True)
    # concatenate both statistics
    zonemember_statistics_df = pd.concat([zonemember_zonelevel_stat_df, zonemember_cfgtypelevel_stat_df], ignore_index=True)

    return zonemember_statistics_df, zonemember_zonelevel_stat_df
Exemple #15
0
def errdump_portshow(errdump_aggregated_df, portshow_aggregated_df):
    """Function to add port and connected device information to errdump_aggregated_df"""

    mask_device_name = portshow_aggregated_df['Device_Host_Name'].notna()
    # if Message_portIndex is present but port number is not then fillna slot and port number from portshow
    if (errdump_aggregated_df['Message_portIndex'].notna()
            & errdump_aggregated_df['port'].isna()).any():

        portshow_join_df = portshow_aggregated_df.loc[mask_device_name].copy()
        portshow_join_df.rename(columns={'portIndex': 'Message_portIndex'},
                                inplace=True)
        portshow_join_columns = [
            'configname', 'chassis_name', 'chassis_wwn', 'Message_portIndex'
        ]
        errdump_aggregated_df = dataframe_fillna(
            errdump_aggregated_df,
            portshow_join_df,
            join_lst=portshow_join_columns,
            filled_lst=['slot', 'port'],
            remove_duplicates=False,
            drop_na=False)

    # if port number present but not slot number then slot number is zero
    mask_slot_na = errdump_aggregated_df['slot'].isna()
    mask_portnumber = errdump_aggregated_df['port'].notna()
    errdump_aggregated_df.loc[mask_portnumber & mask_slot_na, 'slot'] = \
        errdump_aggregated_df.loc[mask_portnumber & mask_slot_na, 'slot'].fillna('0')

    # add port and connected device information based chassis info, slot and port number
    portshow_columns = [
        'configname', 'chassis_name', 'chassis_wwn', 'slot', 'port',
        'portIndex', 'Index_slot_port', 'portType', 'portState', 'speed',
        'Connected_portId', 'Connected_portWwn', 'Device_Host_Name',
        'Device_Port', 'Device_Location', 'deviceType', 'deviceSubtype'
    ]

    portshow_join_df = portshow_aggregated_df[portshow_columns].copy()
    if errdump_aggregated_df[['slot', 'port']].notna().all(axis=1).any():
        errdump_aggregated_df = errdump_aggregated_df.merge(
            portshow_join_df, how='left', on=portshow_columns[:5])

    # TO_REMOVE
    # errdump_aggregated_df = dataframe_fillna(errdump_aggregated_df, portshow_aggregated_df, portshow_columns[:5], portshow_columns[5:], remove_duplicates=False, drop_na=False)

    # add empty columns if there was no merge with portshow_join_df
    add_empty_columns = [
        column for column in portshow_columns[3:]
        if not column in errdump_aggregated_df.columns
    ]
    errdump_aggregated_df[add_empty_columns] = np.nan

    # add device information based in pid
    if errdump_aggregated_df['Message_portId'].notna().any():
        portshow_join_df = portshow_aggregated_df.loc[mask_device_name].copy()
        portshow_join_df['Message_portId'] = portshow_join_df[
            'Connected_portId']
        errdump_aggregated_df = dataframe_fillna(
            errdump_aggregated_df,
            portshow_join_df,
            join_lst=[*portshow_columns[:3], 'Message_portId'],
            filled_lst=portshow_columns[3:],
            remove_duplicates=False,
            drop_na=False)

    # add fabric name and label for switches with chassis info, slot and port
    errdump_aggregated_df = \
        dataframe_fillna(errdump_aggregated_df, portshow_aggregated_df, portshow_columns[:5], ['Fabric_name', 'Fabric_label'])

    # concatenate devce name and device port columns
    errdump_aggregated_df['Device_Host_Name_Port'] = \
        errdump_aggregated_df[['Device_Host_Name', 'Device_Port']].stack().groupby(level=0).agg(' port '.join)

    return errdump_aggregated_df
Exemple #16
0
def verify_trunkarea_link(portshow_aggregated_df, porttrunkarea_df):
    """Function to fill missing device information for  slave links of trunk area links.
    Device information obtained from master link. And then identify NPIV link number.
    All links inside trunk area link have same number (analogy with ISL number within the trunk)."""

    # counter for switch wwn appeared in DataFrames
    switch_npiv_link_dct = defaultdict(int)

    switch_columns = [
        'configname', 'chassis_name', 'chassis_wwn', 'switchName', 'switchWwn'
    ]
    master_port_columns = ['Master_slot', 'Master_port']
    port_columns = ['slot', 'port']
    device_columns = [
        'NodeName', 'Device_type', 'Device_Model', 'Device_Fw', 'Device_Name',
        'IP_Address', 'Device_Host_Name', 'HBA_Manufacturer', 'deviceType',
        'deviceSubtype', 'Connected_NPIV', 'NPIV_link_number'
    ]

    def npiv_link_counter(series):
        "Aux function to identify npiv link number"

        switch_npiv_link_dct[series['switchWwn']] += 1
        return switch_npiv_link_dct[series['switchWwn']]

    if not porttrunkarea_df.empty:
        portshow_cp_df = portshow_aggregated_df.copy()
        porttrunkarea_cp_df = porttrunkarea_df.copy()
        porttrunkarea_cp_df.rename(columns={'SwitchName': 'switchName'},
                                   inplace=True)

        # filter devices connected behind npiv (except AG links and links between switch and VC module)
        # all devices with FCID xxxx00 are directly connected
        re_zero_fcid = r'\w{4}00'
        mask_zero_fcid = portshow_cp_df['Connected_portId'].str.contains(
            pat=re_zero_fcid)
        portshow_cp_df = portshow_cp_df.loc[mask_zero_fcid]

        # npiv trunk area link number defined by link number of trunk master link
        master_port = porttrunkarea_cp_df['State'] == 'Master'
        porttrunkarea_master_df = porttrunkarea_cp_df.loc[master_port].copy()
        porttrunkarea_master_df[
            'NPIV_link_number'] = porttrunkarea_master_df.apply(
                lambda series: npiv_link_counter(series), axis=1)
        porttrunkarea_cp_df = dataframe_fillna(
            porttrunkarea_cp_df,
            porttrunkarea_master_df,
            join_lst=[*switch_columns, *master_port_columns],
            filled_lst=['NPIV_link_number'])

        portshow_cp_df['Master_slot'] = portshow_cp_df['slot']
        portshow_cp_df['Master_port'] = portshow_cp_df['port']
        portshow_cp_df = portshow_cp_df.astype(
            {
                'slot': 'str',
                'port': 'str',
                'Master_slot': 'str',
                'Master_port': 'str'
            },
            errors='ignore')
        porttrunkarea_cp_df = porttrunkarea_cp_df.astype(
            {
                'slot': 'str',
                'port': 'str',
                'Master_slot': 'str',
                'Master_port': 'str'
            },
            errors='ignore')
        # fill device information for each link in trunk area link based on trunk master link
        porttrunkarea_cp_df = dataframe_fillna(
            porttrunkarea_cp_df,
            portshow_cp_df,
            join_lst=[*switch_columns, *master_port_columns],
            filled_lst=[*device_columns[:-1], 'Connected_portId'])
        # add device information for slave trunk area links
        portshow_aggregated_df = portshow_aggregated_df.astype(
            {
                'portIndex': 'str',
                'slot': 'str',
                'port': 'str'
            },
            errors='ignore')
        portshow_aggregated_df = dataframe_fillna(
            portshow_aggregated_df,
            porttrunkarea_cp_df,
            join_lst=[*switch_columns, 'Connected_portId', *port_columns],
            filled_lst=device_columns)
    else:
        portshow_aggregated_df['NPIV_link_number'] = np.nan

    # number npiv links which are not part of the trunk area link
    # each link in that case is independent and have it's own number
    mask_npiv = portshow_aggregated_df['Connected_NPIV'] == 'yes'
    mask_empty_npiv_number = portshow_aggregated_df['NPIV_link_number'].isna()
    portshow_npiv_df = portshow_aggregated_df.loc[
        mask_npiv & mask_empty_npiv_number].copy()
    portshow_npiv_df['NPIV_link_number'] = portshow_npiv_df.apply(
        lambda series: npiv_link_counter(series), axis=1)
    # add npiv links numbers for links out of trunk area links to portshow_aggregated_df DataFrame
    portshow_aggregated_df = dataframe_fillna(
        portshow_aggregated_df,
        portshow_npiv_df,
        join_lst=[*switch_columns, 'Connected_portId', *port_columns],
        filled_lst=['NPIV_link_number'])
    return portshow_aggregated_df
Exemple #17
0
def modify_zoning(zoning_aggregated_df):
    """Function to modify zoning_aggregated_df DataFrame to count statistics"""

    statistics_columns_lst = ['deviceType', 'deviceSubtype', 'Device_type', 'Wwn_type', 'peerzone_member_type'] 

    # to count zonemeber stitistics it is required to make
    # changes in zoning_aggregated_df DataFrame
    zoning_modified_df = zoning_aggregated_df.copy()
    # All classes of servers are considered to be SRV class
    zoning_modified_df.deviceType.replace(to_replace={'BLADE_SRV': 'SRV', 'SYNERGY_SRV': 'SRV', 'SRV_BLADE': 'SRV', 'SRV_SYNERGY': 'SRV'}, inplace=True)
    # deviceType transformed to be combination if device class and device type
    zoning_modified_df.deviceSubtype = zoning_modified_df['deviceType'] + ' ' + zoning_modified_df['deviceSubtype']
    # servers device type is not important for zonemember analysys
    mask_srv = zoning_modified_df.deviceType.str.contains('SRV', na=False)
    zoning_modified_df.deviceSubtype = np.where(mask_srv, np.nan, zoning_modified_df.deviceSubtype)
    # tag duplicated PortWwnp in zone 
    mask_wwnp_duplicated = zoning_modified_df['wwnp_instance_number_per_zone'] > 1
    zoning_modified_df['Wwnp_duplicated'] = np.where(mask_wwnp_duplicated, 'Wwnp_duplicated', np.nan)

    """
    We are interested to count connected devices statistics only.
    Connected devices are in the same fabric with the switch which 
    zoning configurutaion defined in (local) or imported to that fabric
    in case of LSAN zones (imported).
    Ports with status remote_na, initializing and configured considered to be
    not connected (np.nan) and thus it's 'deviceType', 'deviceSubtype', 'Device_type', 
    'Wwn_type', 'peerzone_member_type' are not taking into acccount.
    'peerzone_member_type' for Peerzone property member is not changed and counted in statistics. 
    But device status for not connected ports is reflected in zonemember statistics.
    """  
    mask_connected = zoning_aggregated_df['Fabric_device_status'].isin(['local', 'imported'])
    mask_peerzone_property = zoning_aggregated_df['peerzone_member_type'].str.contains('property', na=False)
    # axis 1 replace values with nan along the row
    zoning_modified_df[statistics_columns_lst] = \
    zoning_modified_df[statistics_columns_lst].where(mask_connected | mask_peerzone_property, pd.Series((np.nan*len(statistics_columns_lst))), axis=1)
    
    # TO_REMOVE due different series len and columns number
    # zoning_modified_df[statistics_columns_lst] = \
    #     zoning_modified_df[statistics_columns_lst].where(mask_connected | mask_peerzone_property, pd.Series((np.nan, np.nan)), axis=1)

    mask_zone_name = zoning_modified_df['zone_duplicates_free'].isna()
    zoning_modified_df['zone_tag'] = zoning_modified_df['zone_duplicates_free'].where(mask_zone_name, 'zone_tag')
    # lsan_tag was added in analysis_zoning_aggregation module
    zoning_modified_df['lsan_tag'] = zoning_modified_df['lsan_tag'].where(~mask_zone_name, np.nan)
    # add tdz_tag
    zoning_modified_df = verify_tdz(zoning_modified_df)
    # add qos zone tag
    mask_qos = zoning_modified_df['zone_duplicates_free'].str.contains(r'^QOS[LMH]\d')
    zoning_modified_df.loc[~mask_zone_name & mask_qos, 'qos_tag'] = 'qos_tag'

    # verify duplicated zones (zones with the same set of PortWwns)
    zoning_duplicated_df = verify_duplicated_zones(zoning_aggregated_df)
    zoning_duplicated_columns = ['Fabric_name', 'Fabric_label',  'cfg',  'cfg_type',  'zone_duplicates_free', 'zone_duplicated_tag']
    # add zone_duplicated_tag for each duplicated zone from zone_duplicates_free column (to count each zone only once further)
    zoning_modified_df = \
        dataframe_fillna(zoning_modified_df, zoning_duplicated_df, join_lst=zoning_duplicated_columns[:-1], filled_lst=[zoning_duplicated_columns[-1]])

    # find zone pairs (zones with the same set device names) in another fabric_labels of the same fabric_name
    zoning_pairs_df = verify_pair_zones(zoning_aggregated_df)
    zoning_pairs_columns = ['Fabric_name', 'Fabric_label',  'cfg_type',  'zone_duplicates_free', 'zone_paired_tag']
    # add zone_paired_tag for each paired zone from zone_duplicates_free column (to count each zone only once further)
    zoning_modified_df = \
        dataframe_fillna(zoning_modified_df, zoning_pairs_df, join_lst=zoning_pairs_columns[:-1], filled_lst=[zoning_pairs_columns[-1]]) 

    zoning_modified_df.replace(to_replace='nan', value=np.nan, inplace=True)

    return zoning_modified_df, zoning_duplicated_df, zoning_pairs_df
Exemple #18
0
def verify_pair_zones(zoning_aggregated_df):
    """Function to find pair zone or zones in other fabric_labels of the same fabric_name"""

    columns = ['Fabric_name', 'Fabric_label', 'cfg_type']
    mask_connected = zoning_aggregated_df['Fabric_device_status'].isin(['local', 'remote_imported'])
    zoning_cp_df = zoning_aggregated_df.loc[mask_connected].copy()
    # TO_REMOVE
    # # drop rows with empty PortWwwns (absent zonemembers)
    # zoning_cp_df.dropna(subset=['PortName'], inplace=True)

    # drop duplicated Wwnp in each zone
    zoning_cp_df.drop_duplicates(subset=[*columns, 'zone', 'PortName'], inplace=True)

    # verify if all devices in each zone are connected to other faric_labels of the same fabric_name
    grp_columns = columns + ['zone']
    mask_all_devices_multiple_fabric_label_connection = \
        zoning_cp_df.groupby(by=grp_columns)['Multiple_fabric_label_connection'].transform(lambda series: series.isin(['Yes']).all())
    zoning_cp_df['All_devices_multiple_fabric_label_connection'] = np.where(mask_all_devices_multiple_fabric_label_connection, 'Yes', 'No')

    # group device names of each zone to sorted set thus removing duplicates and present it as comma separated list
    grp_columns.append('All_devices_multiple_fabric_label_connection')
    zoning_grp_df = zoning_cp_df.groupby(by=grp_columns)['Device_Host_Name'].agg(lambda x: ', '.join(sorted(set(x))))
    zoning_grp_df = pd.DataFrame(zoning_grp_df)
    zoning_grp_df.reset_index(inplace=True)

    fabric_name_lst = zoning_grp_df['Fabric_name'].unique().tolist()
    fabric_label_lst = zoning_grp_df['Fabric_label'].unique().tolist()

    # columns take into account zone members (zone devices) in cfg_type for each zone 
    # cfg_name is not taken into account since it might differ in fabrics
    cfgtype_zone_device_columns = [*columns, 'All_devices_multiple_fabric_label_connection', 'Device_Host_Name']
    # DataFrame contains list of zones with its pairs for each fabric and cfg_type
    zoning_pairs_df = pd.DataFrame()                           
    # list of columns with zone pairs for concatenation below
    zone_paired_columns = set()

    for fabric_name in fabric_name_lst:
        for fabric_label in fabric_label_lst:
            # fabric labels to be verified to find pair zones
            verified_label_lst = fabric_label_lst.copy()
            # fabric_label for which pair zones are verified should be removed from the list
            verified_label_lst.remove(fabric_label)
            # DataFrame with zones for which pair zones are need to be find
            mask_current = (zoning_grp_df[['Fabric_name', 'Fabric_label']] == (fabric_name, fabric_label)).all(axis=1)
            current_df = zoning_grp_df.loc[mask_current].copy()
            if not current_df.empty:
                # check zoning configurations in each verified fabrics to find pair zones
                for verified_label in verified_label_lst:
                    # DataFrame with zones in which pair zones are searched for
                    mask_verified = (zoning_grp_df[['Fabric_name', 'Fabric_label']] == (fabric_name, verified_label)).all(axis=1)
                    verified_df = zoning_grp_df.loc[mask_verified].copy()
                    if not verified_df.empty:
                        # if there are more then one zone with identical list of devices
                        verified_grp_df = verified_df.groupby(by=cfgtype_zone_device_columns)['zone'].agg(lambda x: ', '.join(sorted(set(x))))
                        verified_grp_df = pd.DataFrame(verified_grp_df)
                        verified_grp_df.reset_index(inplace=True)
                        # if there are more then two fabric_labels in fabric_name then use fabric_label tag with the name of pair zone
                        if len(fabric_label_lst) > 2:
                            mask_zone_notna = zoning_pairs_df['zone'].notna()
                            verified_grp_df.loc[mask_zone_notna, 'zone'] = '(' + verified_label + ': ' + zoning_pairs_df.loc[mask_zone_notna, 'zone'] + ')'
                        # to merge pair zones change fabric_label in verified fabric to fabric_label of fabric for which pair zones are searched for
                        verified_grp_df['Fabric_label'] = fabric_label
                        # column name with pair zones in verified fabric_label
                        zone_paired_column = 'zone_paired_' + str(verified_label)
                        zone_paired_columns.add(zone_paired_column)
                        verified_grp_df.rename(columns={'zone': zone_paired_column}, inplace=True)
                        # add column with pair zones in verified fabric_label to zoning configuration
                        current_df = dataframe_fillna(current_df, verified_grp_df, 
                                                    join_lst=cfgtype_zone_device_columns, 
                                                    filled_lst=[zone_paired_column])           
                # add zoning configuration with pair zones in all fabric_labels to general zoning configuration DataFrame
                zoning_pairs_df = pd.concat([zoning_pairs_df, current_df])

    zone_paired_columns = list(zone_paired_columns)
    zoning_pairs_df = сoncatenate_columns(zoning_pairs_df, summary_column='zone_paired', 
                                            merge_columns=zone_paired_columns, sep=', ', drop_merge_columns=True)
    # add zone_paired_tag
    mask_zone_notna = zoning_pairs_df['zone_paired'].notna()
    zoning_pairs_df.loc[mask_zone_notna, 'zone_paired_tag'] = 'zone_paired_tag'
    zoning_pairs_df['zone_duplicates_free'] = zoning_pairs_df['zone']
    # verify if zonename related with pair zone name and device names included in each zone
    zoning_pairs_df = verify_zonename_ratio(zoning_pairs_df)
    return zoning_pairs_df
Exemple #19
0
def storage_host_aggregation(host_3par_df, system_3par_df, port_3par_df,
                             portshow_aggregated_df, zoning_aggregated_df):
    """Function to create aggregated storage host presentation DataFrame"""

    if system_3par_df.empty:
        return pd.DataFrame()

    storage_host_aggregated_df = host_3par_df.copy()
    # add system_name
    storage_host_aggregated_df = dataframe_fillna(storage_host_aggregated_df,
                                                  system_3par_df,
                                                  join_lst=['configname'],
                                                  filled_lst=['System_Name'])
    # add controller's ports Wwnp and Wwnp
    storage_host_aggregated_df = dataframe_fillna(
        storage_host_aggregated_df,
        port_3par_df,
        join_lst=['configname', 'Storage_Port'],
        filled_lst=['NodeName', 'PortName'])
    # convert Wwnn and Wwnp to regular represenatation (lower case with colon delimeter)
    storage_host_aggregated_df = convert_wwn(
        storage_host_aggregated_df, ['Host_Wwn', 'NodeName', 'PortName'])
    # add controllers ports Fabric_name and Fabric_label
    storage_host_aggregated_df = dataframe_fillna(
        storage_host_aggregated_df,
        portshow_aggregated_df,
        join_lst=['PortName'],
        filled_lst=['Fabric_name', 'Fabric_label'])
    # rename controllers NodeName and PortName
    rename_columns = {
        'NodeName': 'Storage_Port_Wwnn',
        'PortName': 'Storage_Port_Wwnp'
    }
    storage_host_aggregated_df.rename(columns=rename_columns, inplace=bool)
    # 'clean' Wwn column to have Wwnp only. check Wwnn -> Wwnp correspondance in all fabrics

    # TO_REMOVE check Wwnn -> Wwnp correspondance in all fabrics
    # storage_host_aggregated_df = replace_wwnn(storage_host_aggregated_df, 'Host_Wwn',
    #                                             portshow_aggregated_df, ['NodeName', 'PortName'],
    #                                             fabric_columns = ['Fabric_name', 'Fabric_label'])
    storage_host_aggregated_df = replace_wwnn(storage_host_aggregated_df,
                                              'Host_Wwn',
                                              portshow_aggregated_df,
                                              ['NodeName', 'PortName'])

    # add Host Wwnp zoning device status in fabric of storage port connection
    storage_host_aggregated_df = dataframe_fillna(
        storage_host_aggregated_df,
        zoning_aggregated_df,
        join_lst=['Fabric_name', 'Fabric_label', 'PortName'],
        filled_lst=['Fabric_device_status'])

    # rename controllers Fabric_name and Fabric_label
    rename_columns = {
        'Fabric_name': 'Storage_Fabric_name',
        'Fabric_label': 'Storage_Fabric_label',
        'Fabric_device_status': 'Fabric_host_status'
    }
    storage_host_aggregated_df.rename(columns=rename_columns, inplace=bool)

    # add host information
    host_columns = [
        'Fabric_name', 'Fabric_label', 'chassis_name', 'switchName',
        'Index_slot_port', 'Connected_portId', 'Device_Host_Name',
        'Device_Port', 'Host_OS', 'Device_Location',
        'Device_Host_Name_per_fabric_name_and_label',
        'Device_Host_Name_per_fabric_label',
        'Device_Host_Name_per_fabric_name', 'Device_Host_Name_total_fabrics'
    ]
    storage_host_aggregated_df = dataframe_fillna(storage_host_aggregated_df,
                                                  portshow_aggregated_df,
                                                  join_lst=['PortName'],
                                                  filled_lst=host_columns,
                                                  remove_duplicates=False)

    # rename host columns
    rename_columns = {
        'Fabric_name': 'Host_Fabric_name',
        'Fabric_label': 'Host_Fabric_label',
        'PortName': 'Host_Wwnp'
    }
    storage_host_aggregated_df.rename(columns=rename_columns, inplace=bool)

    # verify if host and storage ports are in the same fabric
    storage_host_aggregated_df = sequential_equality_note(
        storage_host_aggregated_df, ['Host_Fabric_name', 'Host_Fabric_label'],
        ['Storage_Fabric_name', 'Storage_Fabric_label'],
        'Host_Storage_Fabric_equal')
    # verify persona (host mode) is defined in coreespondence with host os
    storage_host_aggregated_df = verify_host_mode(storage_host_aggregated_df)
    # verify if storage port and host port are zoned
    storage_host_aggregated_df = verify_storage_host_zoning(
        storage_host_aggregated_df, zoning_aggregated_df)
    # sort aggregated DataFrame
    sort_columns = ['System_Name', 'Host_Id', 'Host_Name', 'Storage_Port']
    storage_host_aggregated_df.sort_values(by=sort_columns, inplace=True)
    return storage_host_aggregated_df