Ejemplo n.º 1
0
def sensor_analysis_main(sensor_df, switch_params_aggregated_df,
                         report_columns_usage_dct, report_data_lst):
    """Main function to analyze zoning configuration"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['sensor_aggregated', 'Датчики']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    sensor_aggregated_df, sensor_report_df = data_lst

    # list of data to analyze from report_info table
    analyzed_data_names = [
        'switch_params_aggregated', 'fabric_labels', 'sensor'
    ]

    # force run when any data from data_lst was not saved (file not found) or
    # procedure execution explicitly requested for output data or data used during fn execution
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title, analyzed_data_names)
    if force_run:
        # current operation information string
        info = f'Generating sensor readings table'
        print(info, end=" ")

        # aggregated DataFrames
        sensor_aggregated_df = sensor_aggregation(sensor_df,
                                                  switch_params_aggregated_df)

        # after finish display status
        status_info('ok', max_title, len(info))

        # report tables
        sensor_report_df = sensor_report(sensor_aggregated_df, data_names,
                                         report_columns_usage_dct, max_title)

        # create list with partitioned DataFrames
        data_lst = [sensor_aggregated_df, sensor_report_df]
        # saving data to json or csv file
        save_data(report_data_lst, data_names, *data_lst)

    # verify if loaded data is empty and replace information string with empty DataFrame
    else:
        sensor_aggregated_df, sensor_report_df = verify_data(
            report_data_lst, data_names, *data_lst)

        data_lst = [sensor_aggregated_df, sensor_report_df]
    # save data to service file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        save_xlsx_file(data_frame, data_name, report_data_lst)

    return sensor_aggregated_df
Ejemplo n.º 2
0
def blade_system_extract(blade_folder, report_data_lst):
    """Function to extract blade systems information"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['blade_interconnect', 'blade_servers', 'blade_vc']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    module_comprehensive_lst, blades_comprehensive_lst, blade_vc_comprehensive_lst = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):

        # lists to store only REQUIRED infromation
        # collecting data for all blades during looping
        # list containing enclosure, blade and hba information for all blade systems
        blades_comprehensive_lst = []
        # list containing enclosure and interconnect modules information for all blade systems
        module_comprehensive_lst = []
        # list containing virtual connect ports information for all blade systems
        blade_vc_comprehensive_lst = []

        if blade_folder:
            print('\nEXTRACTING BLADES SYSTEM INFORMATION ...\n')

            # collects files in folder with txt extension
            txt_files = find_files(blade_folder,
                                   max_title,
                                   filename_extension='txt')
            log_files = find_files(blade_folder,
                                   max_title,
                                   filename_extension='log')
            blade_configs_lst = txt_files + log_files
            # number of files to check
            configs_num = len(blade_configs_lst)

            if configs_num:

                # data imported from init file to extract values from config file
                enclosure_params, _, comp_keys, match_keys, comp_dct = data_extract_objects(
                    'blades', max_title)
                module_params = columns_import('blades', max_title,
                                               'module_params')
                blade_params = columns_import('blades', max_title,
                                              'blade_params')

                for i, blade_config in enumerate(blade_configs_lst):
                    # file name with extension
                    configname_wext = os.path.basename(blade_config)
                    # remove extension from filename
                    configname, _ = os.path.splitext(configname_wext)
                    # Active Onboard Administrator IP address
                    oa_ip = None
                    # interconnect modules number
                    module_num = 0

                    # current operation information string
                    info = f'[{i+1} of {configs_num}]: {configname} system.'
                    print(info, end=" ")

                    # search control dictionary. continue to check file until all parameters groups are found
                    collected = {
                        'enclosure': False,
                        'oa_ip': False,
                        'module': False,
                        'servers': False,
                        'vc': False
                    }
                    # if blade_lst remains empty after file checkimng than status_info shows NO_DATA for current file
                    blade_lst = []
                    enclosure_vc_lst = []

                    with open(blade_config, encoding='utf-8',
                              errors='ignore') as file:
                        # check file until all groups of parameters extracted
                        while not all(collected.values()):
                            line = file.readline()
                            if not line:
                                break
                            # enclosure section start
                            if re.search(
                                    r'>SHOW ENCLOSURE INFO|^ +ENCLOSURE INFORMATION$',
                                    line):
                                enclosure_dct = {}
                                collected['enclosure'] = True
                                # while not reach empty line
                                while not re.search(r'Serial Number', line):
                                    line = file.readline()
                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # name_value_pair_match
                                    if match_dct[match_keys[0]]:
                                        result = match_dct[match_keys[0]]
                                        enclosure_dct[result.group(1).strip(
                                        )] = result.group(2).strip()
                                    if not line:
                                        break
                                # rename Description key to Enclosure Type key for VC
                                if enclosure_dct.get('Description'):
                                    enclosure_dct[
                                        'Enclosure Type'] = enclosure_dct.pop(
                                            'Description')
                                # creating list with REQUIRED enclosure information only
                                enclosure_lst = [
                                    enclosure_dct.get(param)
                                    for param in enclosure_params
                                ]
                            # enclosure section end
                            # vc fabric connection section start
                            elif re.search(r'FABRIC INFORMATION', line):
                                info_type = 'Type VC'
                                print(info_type, end=" ")
                                info = info + " " + info_type
                                line = file.readline()
                                collected['vc'] = True
                                while not re.search(
                                        r'FC-CONNECTION INFORMATION', line):

                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # vc_port_match
                                    if match_dct[match_keys[14]]:
                                        vc_port = line_to_list(
                                            comp_dct[comp_keys[14]], line,
                                            *enclosure_lst)
                                        enclosure_vc_lst.append(vc_port)
                                        blade_vc_comprehensive_lst.append(
                                            vc_port)
                                        line = file.readline()
                                    else:
                                        line = file.readline()
                                        if not line:
                                            break

                            # vc fabric connection section end
                            # active onboard administrator ip section start
                            elif re.search(r'>SHOW TOPOLOGY *$', line):
                                info_type = 'Type Blade Enclosure'
                                print(info_type, end=" ")
                                info = info + " " + info_type
                                line = file.readline()
                                collected['oa_ip'] = True
                                while not re.search(r'^>SHOW', line):
                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # oa_ip_match
                                    if match_dct[match_keys[1]]:
                                        oa_ip = match_dct[match_keys[1]].group(
                                            1)
                                        line = file.readline()
                                        break
                                    else:
                                        line = file.readline()
                                        if not line:
                                            break
                            # active onboard administrator ip section end
                            # interconnect modules section start
                            elif re.search(r'>SHOW INTERCONNECT INFO ALL',
                                           line):
                                line = file.readline()
                                collected['modules'] = True
                                while not re.search(r'^>SHOW', line):
                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # module_type_num_match
                                    if match_dct[match_keys[2]]:
                                        module_dct = {}
                                        module_lst = []
                                        module = match_dct[match_keys[2]]
                                        # interconnect module slot number
                                        module_slot = module.group(1)
                                        # interconnect module type (Ethernet, FC)
                                        module_type = module.group(2).rstrip()
                                        line = file.readline()
                                        # module_section_end_comp
                                        while not re.search(
                                                comp_dct[comp_keys[3]], line):
                                            # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                            match_dct = {
                                                match_key:
                                                comp_dct[comp_key].match(line)
                                                for comp_key, match_key in zip(
                                                    comp_keys, match_keys)
                                            }
                                            # name_value_pair_match
                                            if match_dct[match_keys[0]]:
                                                result = match_dct[
                                                    match_keys[0]]
                                                name = result.group(1).strip()
                                                value = result.group(2).strip()
                                                # if value is empty string use None
                                                if value == '':
                                                    value = None
                                                module_dct[name] = value
                                                line = file.readline()
                                            else:
                                                line = file.readline()
                                                if not line:
                                                    break
                                        # creating list with REQUIRED interconnect module information only
                                        module_lst = [
                                            module_dct.get(param)
                                            for param in module_params
                                        ]
                                        # add current module information to list containing all modules infromation
                                        # oa_ip added as None and extracted later in the file
                                        module_comprehensive_lst.append([
                                            *enclosure_lst, oa_ip, module_slot,
                                            module_type, *module_lst
                                        ])
                                        # based on module's number oa_ip is added to module_comprehensive_lst after extraction
                                        module_num += 1
                                    else:
                                        line = file.readline()
                                        if not line:
                                            break
                            # interconnect modules section end
                            # blade server, hba and flb section start
                            elif re.search(r'>SHOW SERVER INFO ALL', line):
                                line = file.readline()
                                collected['servers'] = True
                                while not re.search(r'^>SHOW', line):
                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # blade_server_num_match
                                    if match_dct[match_keys[4]]:
                                        blade_dct = {}
                                        blade_lst = []
                                        hba_lst = []
                                        result = match_dct[match_keys[4]]
                                        blade_dct[result.group(
                                            1)] = result.group(2)
                                        # blade_num = result.group(2)
                                        # print("Blade number:", blade_num)
                                        line = file.readline()
                                        # server_section_end_comp
                                        while not re.search(
                                                comp_dct[comp_keys[11]], line):
                                            # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                            match_dct = {
                                                match_key:
                                                comp_dct[comp_key].match(line)
                                                for comp_key, match_key in zip(
                                                    comp_keys, match_keys)
                                            }
                                            # mezzanin hba section start
                                            # mezzanine_model_match
                                            if match_dct[match_keys[6]]:
                                                result = match_dct[
                                                    match_keys[6]]
                                                hba_description = result.group(
                                                    1)
                                                hba_model = result.group(2)
                                                line = file.readline()
                                                # mezzanine_wwn_comp
                                                while re.search(
                                                        comp_dct[comp_keys[7]],
                                                        line):
                                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                                    match_dct = {
                                                        match_key:
                                                        comp_dct[comp_key].
                                                        match(line)
                                                        for comp_key, match_key
                                                        in zip(
                                                            comp_keys,
                                                            match_keys)
                                                    }
                                                    # mezzanine_wwn_match
                                                    result = match_dct[
                                                        match_keys[7]]
                                                    wwnp = result.group(1)
                                                    hba_lst.append([
                                                        hba_description,
                                                        hba_model, wwnp
                                                    ])
                                                    line = file.readline()
                                            # mezzanin hba section end
                                            # flex flb hba section start
                                            # flb_model_match and flex_ethernet_match
                                            elif match_dct[match_keys[
                                                    8]] or match_dct[
                                                        match_keys[15]]:
                                                if match_dct[match_keys[8]]:
                                                    result = match_dct[
                                                        match_keys[8]]
                                                    flex_description = result.group(
                                                        1)
                                                    if re.search(
                                                            comp_dct[
                                                                comp_keys[13]],
                                                            line):
                                                        flex_model = re.search(
                                                            comp_dct[
                                                                comp_keys[13]],
                                                            line).group(1)
                                                    else:
                                                        flex_model = None
                                                elif match_dct[match_keys[15]]:
                                                    result = match_dct[
                                                        match_keys[15]]
                                                    flex_description = result.group(
                                                        1)
                                                    flex_model = result.group(
                                                        1)
                                                line = file.readline()
                                                # wwn_mac_line_comp
                                                while re.search(
                                                        comp_dct[comp_keys[9]],
                                                        line):
                                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                                    match_dct = {
                                                        match_key:
                                                        comp_dct[comp_key].
                                                        match(line)
                                                        for comp_key, match_key
                                                        in zip(
                                                            comp_keys,
                                                            match_keys)
                                                    }
                                                    # flb_wwn_match
                                                    if match_dct[
                                                            match_keys[10]]:
                                                        result = match_dct[
                                                            match_keys[10]]
                                                        wwnp = result.group(1)
                                                        hba_lst.append([
                                                            flex_description,
                                                            flex_model, wwnp
                                                        ])

                                                    line = file.readline()
                                            # flex flb hba section end
                                            # blade server section start
                                            # blade_server_info_match
                                            elif match_dct[match_keys[5]]:
                                                result = match_dct[
                                                    match_keys[5]]
                                                # name = result.group(1) + result.group(2) if result.group(2) else result.group(1)
                                                name = result.group(1).rstrip()
                                                value = result.group(
                                                    3).rstrip()
                                                # to avoid Type parameter overwrire
                                                # add parameter only if parameter has not been added to blade dictionary before
                                                if not blade_dct.get(name):
                                                    blade_dct[name] = value
                                                line = file.readline()
                                            # blade server section end
                                            # if none of matches found for current blade server than next line
                                            else:
                                                line = file.readline()
                                                if not line:
                                                    break
                                        # unpopulated blade slots have 'Server Blade Type' line but populated have 'Type' line
                                        # add 'Server Blade Type' parameter for populated slots for consistency
                                        if blade_dct.get('Type'):
                                            blade_dct[
                                                'Server Blade Type'] = blade_dct.pop(
                                                    'Type')
                                        # creating list with REQUIRED blade server information only
                                        blade_lst = [
                                            blade_dct.get(param)
                                            for param in blade_params
                                        ]
                                        # if hba or flex cards installed in blade server
                                        if len(hba_lst):
                                            # add line for each hba to blades_comprehensive_lst
                                            for hba in hba_lst:
                                                blades_comprehensive_lst.append(
                                                    [
                                                        *enclosure_lst,
                                                        *blade_lst, *hba
                                                    ])
                                        # if no nba add one line with enclosure and blade info only
                                        else:
                                            blades_comprehensive_lst.append([
                                                *enclosure_lst, *blade_lst,
                                                None, None
                                            ])
                                    # if no blade_server_num_match found in >SHOW SERVER INFO ALL section than next line
                                    else:
                                        line = file.readline()
                                        if not line:
                                            break
                            # blade server, hba and flb section end

                        # adding OA IP to module_comprehensive_lst based on interconnect modules number
                        for num in range(-1, -module_num - 1, -1):
                            module_comprehensive_lst[num][3] = oa_ip
                        # show status blades information extraction from file
                        if blade_lst or enclosure_vc_lst:
                            status_info('ok', max_title, len(info))
                        else:
                            status_info('no data', max_title, len(info))
                # save extracted data to json file
                save_data(report_data_lst, data_names,
                          module_comprehensive_lst, blades_comprehensive_lst,
                          blade_vc_comprehensive_lst)
        else:
            # current operation information string
            info = f'Collecting enclosure, interconnect modules, blade servers, hba'
            print(info, end=" ")
            status_info('skip', max_title, len(info))
            # save empty data to json file
            save_data(report_data_lst, data_names, module_comprehensive_lst,
                      blades_comprehensive_lst, blade_vc_comprehensive_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        module_comprehensive_lst, blades_comprehensive_lst, blade_vc_comprehensive_lst = verify_data(
            report_data_lst, data_names, *data_lst)

    return module_comprehensive_lst, blades_comprehensive_lst, blade_vc_comprehensive_lst
Ejemplo n.º 3
0
def interswitch_connection_extract(switch_params_lst, report_data_lst):
    """Function to extract interswitch connection information"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['isl', 'trunk', 'porttrunkarea']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    isl_lst, trunk_lst, porttrunkarea_lst = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):
        print(
            '\nEXTRACTING INTERSWITCH CONNECTION INFORMATION (ISL, TRUNK, TRUNKAREA) ...\n'
        )

        # extract chassis parameters names from init file
        switch_columns = columns_import('switch', max_title, 'columns')
        # number of switches to check
        switch_num = len(switch_params_lst)

        # data imported from init file to extract values from config file
        *_, comp_keys, match_keys, comp_dct = data_extract_objects(
            'isl', max_title)

        # lists to store only REQUIRED infromation
        # collecting data for all switches ports during looping
        isl_lst = []
        trunk_lst = []
        porttrunkarea_lst = []

        # switch_params_lst [[switch_params_sw1], [switch_params_sw1]]
        # checking each switch for switch level parameters
        for i, switch_params_data in enumerate(switch_params_lst):
            # data unpacking from iter param
            # dictionary with parameters for the current switch
            switch_params_data_dct = dict(
                zip(switch_columns, switch_params_data))
            switch_info_keys = [
                'configname', 'chassis_name', 'chassis_wwn', 'switch_index',
                'SwitchName', 'switchWwn', 'switchRole', 'Fabric_ID',
                'FC_Router', 'switchMode'
            ]
            switch_info_lst = [
                switch_params_data_dct.get(key) for key in switch_info_keys
            ]
            ls_mode_on = True if switch_params_data_dct[
                'LS_mode'] == 'ON' else False

            sshow_file, _, _, switch_index, switch_name, *_, switch_mode = switch_info_lst

            # current operation information string
            info = f'[{i+1} of {switch_num}]: {switch_name} isl, trunk and trunk area ports. Switch mode: {switch_mode}'
            print(info, end=" ")
            # search control dictionary. continue to check sshow_file until all parameters groups are found
            collected = {'isl': False, 'trunk': False, 'trunkarea': False}

            if switch_mode == 'Native':
                with open(sshow_file, encoding='utf-8',
                          errors='ignore') as file:
                    # check file until all groups of parameters extracted
                    while not all(collected.values()):
                        line = file.readline()
                        if not line:
                            break
                        # isl section start
                        # switchcmd_islshow_comp
                        if re.search(comp_dct[comp_keys[0]],
                                     line) and not collected['isl']:
                            collected['isl'] = True
                            if ls_mode_on:
                                while not re.search(
                                        fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',
                                        line):
                                    line = file.readline()
                                    if not line:
                                        break
                            # switchcmd_end_comp
                            while not re.search(comp_dct[comp_keys[2]], line):
                                line = file.readline()
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                # islshow_match
                                if match_dct[match_keys[1]]:
                                    isl_port = line_to_list(
                                        comp_dct[comp_keys[1]], line,
                                        *switch_info_lst[:-1])
                                    # portcfg parameters
                                    if isl_port[-1]:
                                        isl_port[-1] = isl_port[-1].replace(
                                            ' ', ', ')
                                    # appending list with only REQUIRED port info for the current loop iteration
                                    # to the list with all ISL port info
                                    isl_lst.append(isl_port)
                                if not line:
                                    break
                        # isl section end
                        # trunk section start
                        # switchcmd_trunkshow_comp
                        if re.search(comp_dct[comp_keys[3]],
                                     line) and not collected['trunk']:
                            collected['trunk'] = True
                            if ls_mode_on:
                                while not re.search(
                                        fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',
                                        line):
                                    line = file.readline()
                                    if not line:
                                        break
                            # switchcmd_end_comp
                            while not re.search(comp_dct[comp_keys[2]], line):
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                # trunkshow_match
                                if match_dct[match_keys[4]]:
                                    trunk_port = line_to_list(
                                        comp_dct[comp_keys[4]], line,
                                        *switch_info_lst[:-1])
                                    # if trunk line has trunk number then remove ":" from trunk number
                                    if trunk_port[9]:
                                        trunk_port[9] = trunk_port[9].strip(
                                            ':')
                                        trunk_num = trunk_port[9]
                                    # if trunk line has no number then use number from previous line
                                    else:
                                        trunk_port[9] = trunk_num
                                    # appending list with only REQUIRED trunk info for the current loop iteration
                                    # to the list with all trunk port info
                                    trunk_lst.append(trunk_port)
                                line = file.readline()
                                if not line:
                                    break
                        # trunk section end
                        # porttrunkarea section start
                        # switchcmd_trunkarea_comp
                        if re.search(comp_dct[comp_keys[5]],
                                     line) and not collected['trunkarea']:
                            collected['trunkarea'] = True
                            if ls_mode_on:
                                while not re.search(
                                        fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',
                                        line):
                                    line = file.readline()
                                    if not line:
                                        break
                            # switchcmd_end_comp
                            while not re.search(comp_dct[comp_keys[2]], line):
                                line = file.readline()
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                # 'porttrunkarea_match'
                                if match_dct[match_keys[6]]:
                                    porttrunkarea_port_lst = line_to_list(
                                        comp_dct[comp_keys[6]], line,
                                        *switch_info_lst[:6])
                                    # for No_light ports port and slot numbers are '--'
                                    if porttrunkarea_port_lst[11] == '--':
                                        porttrunkarea_port_lst[10] = '--'
                                    # if switch has no slots than slot number is 0
                                    for idx in [6, 10]:
                                        if not porttrunkarea_port_lst[idx]:
                                            porttrunkarea_port_lst[idx] = str(
                                                0)

                                    porttrunkarea_lst.append(
                                        porttrunkarea_port_lst)
                                if not line:
                                    break
                        # porttrunkarea section end
                status_info('ok', max_title, len(info))
            # if switch in Access Gateway mode then skip
            else:
                status_info('skip', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, isl_lst, trunk_lst,
                  porttrunkarea_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        isl_lst, trunk_lst, porttrunkarea_lst = verify_data(
            report_data_lst, data_names, *data_lst)

    return isl_lst, trunk_lst, porttrunkarea_lst
Ejemplo n.º 4
0
def err_sfp_cfg_analysis_main(portshow_aggregated_df, sfpshow_df,
                              portcfgshow_df, report_columns_usage_dct,
                              report_data_lst):
    """Main function to add porterr, transceiver and portcfg information to portshow DataFrame"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst
    portshow_sfp_force_flag = False
    portshow_sfp_export_flag, *_ = report_steps_dct['portshow_sfp_aggregated']

    # names to save data obtained after current module execution
    data_names = [
        'portshow_sfp_aggregated', 'Ошибки', 'Параметры_SFP',
        'Параметры_портов'
    ]
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    portshow_sfp_aggregated_df, error_report_df, sfp_report_df, portcfg_report_df = data_lst

    # list of data to analyze from report_info table
    analyzed_data_names = [
        'portshow_aggregated', 'sfpshow', 'portcfgshow', 'portcmd',
        'switchshow_ports', 'switch_params_aggregated', 'fdmi',
        'device_rename', 'report_columns_usage_upd', 'nscamshow', 'nsshow',
        'alias', 'blade_servers', 'fabric_labels'
    ]

    # force run when any data from data_lst was not saved (file not found) or
    # procedure execution explicitly requested for output data or data used during fn execution
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title, analyzed_data_names)

    if force_run:
        # import transeivers information from file
        sfp_model_df = dataframe_import('sfp_models', max_title)
        # current operation information string
        info = f'Updating connected devices table'
        print(info, end=" ")
        # add sfpshow, transceiver information and portcfg to aggregated portcmd DataFrame
        portshow_sfp_aggregated_df = port_complete(portshow_aggregated_df,
                                                   sfpshow_df, sfp_model_df,
                                                   portcfgshow_df)
        # after finish display status
        status_info('ok', max_title, len(info))

        # warning if UKNOWN SFP present
        if (portshow_sfp_aggregated_df['Transceiver_Supported'] ==
                'Unknown SFP').any():
            info_columns = [
                'Fabric_name', 'Fabric_label', 'configname', 'chassis_name',
                'chassis_wwn', 'slot', 'port', 'Transceiver_Supported'
            ]
            portshow_sfp_info_df = portshow_sfp_aggregated_df.drop_duplicates(
                subset=info_columns).copy()
            unknown_count = len(portshow_sfp_info_df[
                portshow_sfp_info_df['Transceiver_Supported'] ==
                'Unknown SFP'])
            info = f'{unknown_count} {"port" if unknown_count == 1 else "ports"} with UNKNOWN supported SFP tag found'
            print(info, end=" ")
            status_info('warning', max_title, len(info))
            # ask if save portshow_aggregated_df
            if not portshow_sfp_export_flag:
                reply = reply_request(
                    "Do you want to save 'portshow_sfp_aggregated'? (y)es/(n)o: "
                )
                if reply == 'y':
                    portshow_sfp_force_flag = True

        # create reaport tables from port_complete_df DataFrtame
        error_report_df, sfp_report_df, portcfg_report_df = \
            create_report_tables(portshow_sfp_aggregated_df, data_names[1:], report_columns_usage_dct, max_title)
        # saving data to json or csv file
        data_lst = [
            portshow_sfp_aggregated_df, error_report_df, sfp_report_df,
            portcfg_report_df
        ]
        save_data(report_data_lst, data_names, *data_lst)
    # verify if loaded data is empty and reset DataFrame if yes
    else:
        portshow_sfp_aggregated_df, error_report_df, sfp_report_df, portcfg_report_df \
            = verify_data(report_data_lst, data_names, *data_lst)
        data_lst = [
            portshow_sfp_aggregated_df, error_report_df, sfp_report_df,
            portcfg_report_df
        ]
    # save data to excel file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        force_flag = False
        if data_name == 'portshow_sfp_aggregated':
            force_flag = portshow_sfp_force_flag
        save_xlsx_file(data_frame,
                       data_name,
                       report_data_lst,
                       force_flag=force_flag)

    return portshow_sfp_aggregated_df
Ejemplo n.º 5
0
def fabricstatistics_main(portshow_aggregated_df, switchshow_ports_df,
                          fabricshow_ag_labels_df, nscamshow_df, portshow_df,
                          report_columns_usage_dct, report_data_lst):
    """Main function to count Fabrics statistics"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['fabric_statistics', 'Статистика_фабрики']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were obtained on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    fabric_statistics_df, fabric_statistics_report_df = data_lst

    # list of data to analyze from report_info table
    analyzed_data_names = [
        'portcmd', 'switchshow_ports', 'switch_params_aggregated',
        'portshow_aggregated', 'switch_parameters', 'chassis_parameters',
        'fdmi', 'nscamshow', 'nsshow', 'alias', 'blade_servers',
        'fabric_labels'
    ]

    chassis_column_usage = report_columns_usage_dct['chassis_info_usage']
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title, analyzed_data_names)
    if force_run:
        # current operation information string
        info = f'Counting up Fabrics statistics'
        print(info, end=" ")

        fabric_statistics_df = statisctics_aggregated(portshow_aggregated_df,
                                                      switchshow_ports_df,
                                                      fabricshow_ag_labels_df,
                                                      nscamshow_df,
                                                      portshow_df,
                                                      report_data_lst)
        # after finish display status
        status_info('ok', max_title, len(info))
        # get report DataFrame
        fabric_statistics_report_df = statistics_report(
            fabric_statistics_df, chassis_column_usage, max_title)
        # create list with partitioned DataFrames
        data_lst = [fabric_statistics_df, fabric_statistics_report_df]
        # saving data to json or csv file
        save_data(report_data_lst, data_names, *data_lst)
    # verify if loaded data is empty and replace information string with empty DataFrame
    else:
        fabric_statistics_df, fabric_statistics_report_df = \
            verify_data(report_data_lst, data_names, *data_lst)
        data_lst = [fabric_statistics_df, fabric_statistics_report_df]
    # save data to service file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        save_xlsx_file(data_frame, data_name, report_data_lst)

    return fabric_statistics_df
Ejemplo n.º 6
0
def fabricshow_extract(switch_params_lst, report_data_lst):
    """
    Function to extract from principal switch configuration 
    list of switches in fabric including AG switches
    """

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['fabricshow', 'ag_principal']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    fabricshow_lst, ag_principal_lst = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):
        print(
            '\nEXTRACTING FABRICS INFORMATION FROM SUPPORTSHOW CONFIGURATION FILES ...\n'
        )

        # extract switch parameters names from init file
        switch_columns = columns_import('switch', max_title, 'columns')
        # number of switches to check
        switch_num = len(switch_params_lst)
        # list to store only REQUIRED switch parameters
        # collecting data for all switches during looping
        fabricshow_lst = []
        ag_principal_lst = []
        # data imported from init file to extract values from config file
        *_, comp_keys, match_keys, comp_dct = data_extract_objects(
            'fabricshow', max_title)
        ag_params = columns_import('fabricshow', max_title, 'ag_params')

        # switch_params_lst [[switch_params_sw1], [switch_params_sw1]]
        # checking each switch for switch level parameters
        for i, switch_params_data in enumerate(switch_params_lst):
            # data unpacking from iter param
            # dictionary with parameters for the current switch
            switch_params_data_dct = dict(
                zip(switch_columns, switch_params_data))
            switch_info_keys = [
                'configname', 'chassis_name', 'chassis_wwn', 'switch_index',
                'SwitchName', 'switchWwn', 'switchRole', 'Fabric_ID',
                'FC_Router', 'switchMode'
            ]
            switch_info_lst = [
                switch_params_data_dct[key] for key in switch_info_keys
            ]
            ls_mode_on = True if switch_params_data_dct[
                'LS_mode'] == 'ON' else False

            sshow_file, _, _, switch_index, switch_name, _, switch_role = switch_info_lst[:
                                                                                          7]

            # current operation information string
            info = f'[{i+1} of {switch_num}]: {switch_name} fabric environment. Switch role: {switch_role}'
            print(info, end=" ")

            collected = {'fabricshow': False, 'ag_principal': False}

            # switch_params_data_dct.get('FC_Router') == 'ON'
            # print('collected', collected)

            # check config of Principal switch only
            if switch_role == 'Principal':
                # principal_switch_lst contains sshow_file, chassis_name, chassis_wwn, switch_index, switch_name, switch_fid
                principal_switch_lst = [
                    *switch_info_lst[:6], *switch_info_lst[7:9]
                ]

                # search control dictionary. continue to check sshow_file until all parameters groups are found
                with open(sshow_file, encoding='utf-8',
                          errors='ignore') as file:
                    # check file until all groups of parameters extracted
                    while not all(collected.values()):
                        line = file.readline()
                        if not line:
                            break
                        # fabricshow section start
                        if re.search(
                                r'^(SWITCHCMD /fabos/cliexec/)?fabricshow\s*:$',
                                line):
                            # when section is found corresponding collected dict values changed to True
                            collected['fabricshow'] = True
                            if ls_mode_on:
                                while not re.search(
                                        fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',
                                        line):
                                    line = file.readline()
                                    if not line:
                                        break
                            while not re.search(
                                    r'^(real [\w.]+)|(\*\* SS CMD END \*\*)$',
                                    line):
                                line = file.readline()
                                # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                # match_keys ['fabricshow_match']
                                # 'fabricshow_match'
                                if match_dct[match_keys[0]]:
                                    fabricshow_lst.append(
                                        line_to_list(comp_dct[comp_keys[0]],
                                                     line,
                                                     *principal_switch_lst))
                                if not line:
                                    break
                        # ag_principal section start
                        # switchcmd_agshow_comp
                        if re.search(comp_dct[comp_keys[4]], line):
                            collected['ag_principal'] = True
                            # if switch in LS mode switch to required LS number
                            if ls_mode_on:
                                while not re.search(
                                        fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',
                                        line):
                                    line = file.readline()
                                    if not line:
                                        break
                            # switchcmd_end_comp
                            while not re.search(comp_dct[comp_keys[10]], line):
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                # ag_num_match
                                if match_dct[match_keys[5]]:
                                    # dictionary to store all DISCOVERED switch ports information
                                    # collecting data only for the logical switch in current loop
                                    # Access Gateway common information dictionary
                                    ag_info_dct = {}
                                    # Attached F-Port information dictionary
                                    ag_attach_dct = {}
                                    # Access Gateway F-Port information dictionary
                                    ag_fport_dct = {}
                                    # Domaid ID, port_ID, port_index dictionary
                                    did_port_dct = {}

                                    # move cursor to one line down to get inside while loop
                                    line = file.readline()
                                    # ag_switchcmd_end_comp
                                    while not re.search(
                                            comp_dct[comp_keys[9]], line):
                                        match_dct = {
                                            match_key:
                                            comp_dct[comp_key].match(line)
                                            for comp_key, match_key in zip(
                                                comp_keys, match_keys)
                                        }
                                        # ag_info_match
                                        if match_dct[match_keys[6]]:
                                            ag_info_dct[match_dct[
                                                match_keys[6]].group(
                                                    1).rstrip()] = match_dct[
                                                        match_keys[6]].group(
                                                            2).rstrip()
                                        # ag_attached_match
                                        elif match_dct[match_keys[7]]:
                                            # if Attached F-Port information dictionary is empty than create dictionary with N-Port ID(s) as keys and empty lists as values
                                            # if ag_attach_dct has been already created (not empty) then it's preserved
                                            ag_attach_dct = ag_attach_dct or dict(
                                                (n_portid, []) for n_portid in
                                                ag_info_dct['N-Port ID(s)'].
                                                split(','))
                                            # extracting attached F-port data from line to list
                                            ag_attach_lst = line_to_list(
                                                comp_dct[comp_keys[7]], line)
                                            # getting port_ID of N-port from port_id of F-port
                                            n_portid = ag_attach_lst[
                                                0][:-2] + '00'
                                            # adding current line F-port information to Attached F-Port information dictionary
                                            if n_portid in ag_attach_dct.keys(
                                            ):
                                                ag_attach_dct[n_portid].append(
                                                    ag_attach_lst)
                                        # ag_fport_match
                                        elif match_dct[match_keys[8]]:
                                            # create Access Gateway F-Port information dictionary
                                            ag_fport_dct = ag_fport_dct or dict(
                                                (n_portid, []) for n_portid in
                                                ag_info_dct['N-Port ID(s)'].
                                                split(','))
                                            # extracting access gateway F-port data from line to list
                                            ag_fport_lst = line_to_list(
                                                comp_dct[comp_keys[8]], line)
                                            # getting port_ID of N-port from port_id of F-port
                                            n_portid = ag_fport_lst[
                                                1][:-2] + '00'
                                            # adding current line F-port information to Access Gateway F-Port information dictionary
                                            if n_portid in ag_fport_dct.keys():
                                                ag_fport_dct[n_portid].append(
                                                    ag_fport_lst)
                                        line = file.readline()
                                        if not line:
                                            break

                                    # list of N-ports extracted from N-Port ID(s) line
                                    n_portids_lst = ag_info_dct[
                                        'N-Port ID(s)'].split(',')
                                    # (domain_id, n_portid)
                                    did_port_lst = [
                                        (int(n_portid[:4], 0), n_portid)
                                        for n_portid in n_portids_lst
                                    ]
                                    # creating dictionary with n_portid as keys and (domain_id, n_portid) as values
                                    did_port_dct = {
                                        port[1]: list(port)
                                        for port in did_port_lst
                                    }

                                    # change values representation in dictionaries
                                    # before {n_portid: [(port_id_1, port_wwn_1, f-port_num_1)], [(port_id_2, port_wwn_2, f-port_num_2)]}
                                    # after {n_portid: [(port_id_1, port_id_2), (port_wwn_1, port_wwn_2), (f-port_num_1, f-port_num_1)]
                                    ag_attach_dct = {
                                        n_portid:
                                        list(zip(*ag_attach_dct[n_portid]))
                                        for n_portid in n_portids_lst
                                        if ag_attach_dct.get(n_portid)
                                    }
                                    ag_fport_dct = {
                                        n_portid:
                                        list(zip(*ag_fport_dct[n_portid]))
                                        for n_portid in n_portids_lst
                                        if ag_fport_dct.get(n_portid)
                                    }

                                    # add connected switch port_index to did_port_dct extracted from ag_attach_dct
                                    # (domain_id, n_portid, n_port_index)
                                    # if no port_index then add None
                                    for n_portid in n_portids_lst:
                                        if ag_attach_dct.get(n_portid):
                                            did_port_dct[n_portid].append(
                                                ag_attach_dct[n_portid][2][0])
                                        else:
                                            did_port_dct[n_portid].append(None)

                                    # for each element of list convert tuples to strings
                                    # if no data extracted for the n_portid then add None for each parameter
                                    for n_portid in n_portids_lst:
                                        if ag_attach_dct.get(n_portid):
                                            ag_attach_dct[n_portid] = [
                                                ', '.join(v) for v in
                                                ag_attach_dct[n_portid]
                                            ]
                                        else:
                                            ag_attach_dct[n_portid] = [None
                                                                       ] * 3
                                    for n_portid in n_portids_lst:
                                        if ag_fport_dct.get(n_portid):
                                            ag_fport_dct[n_portid] = [
                                                ', '.join(v)
                                                for v in ag_fport_dct[n_portid]
                                            ]
                                        else:
                                            ag_fport_dct[n_portid] = [None] * 3

                                    # getting data from ag_info_dct in required order
                                    ag_info_lst = [
                                        ag_info_dct.get(param, None)
                                        for param in ag_params
                                    ]
                                    # appending list with only REQUIRED ag info for the current loop iteration to the list with all ag switch info
                                    for n_portid in n_portids_lst:
                                        ag_principal_lst.append([
                                            *principal_switch_lst[:-1],
                                            *ag_info_lst,
                                            *did_port_dct[n_portid],
                                            *ag_attach_dct[n_portid],
                                            *ag_fport_dct[n_portid]
                                        ])
                                else:
                                    line = file.readline()
                                if not line:
                                    break
                        # ag_principal section end

                status_info('ok', max_title, len(info))
            else:
                status_info('skip', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, fabricshow_lst,
                  ag_principal_lst)
    else:
        fabricshow_lst, ag_principal_lst = verify_data(report_data_lst,
                                                       data_names, *data_lst)

    return fabricshow_lst, ag_principal_lst
Ejemplo n.º 7
0
def sensor_extract(chassis_params_lst, report_data_lst):
    """Function to extract sensor information"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['sensor']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    sensor_lst, = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):
        print('\nEXTRACTING ENVIRONMENT DATA ...\n')

        # # extract chassis parameters names from init file
        # switch_columns = columns_import('switch', max_title, 'columns')

        # extract chassis parameters names from init file
        chassis_columns = columns_import('chassis', max_title, 'columns')
        # number of switches to check
        switch_num = len(chassis_params_lst)
        # data imported from init file to extract values from config file
        *_, comp_keys, match_keys, comp_dct = data_extract_objects(
            'sensor', max_title)

        # lists to store only REQUIRED infromation
        # collecting data for all switches ports during looping
        sensor_lst = []

        # chassis_params_fabric_lst [[chassis_params_sw1], [chassis_params_sw1]]
        # checking each chassis for switch level parameters
        for i, chassis_params_data in enumerate(chassis_params_lst):
            # data unpacking from iter param
            # dictionary with parameters for the current chassis
            chassis_params_data_dct = dict(
                zip(chassis_columns, chassis_params_data))
            chassis_info_keys = ['configname', 'chassis_name', 'chassis_wwn']
            chassis_info_lst = [
                chassis_params_data_dct.get(key) for key in chassis_info_keys
            ]

            sshow_file, chassis_name, _ = chassis_info_lst

            # current operation information string
            info = f'[{i+1} of {switch_num}]: {chassis_name} sensor readings'
            print(info, end=" ")
            # search control dictionary. continue to check sshow_file until all parameters groups are found
            collected = {'sensor': False}

            with open(sshow_file, encoding='utf-8', errors='ignore') as file:
                # check file until all groups of parameters extracted
                while not all(collected.values()):
                    line = file.readline()
                    if not line:
                        break
                    # sensor section start
                    # switchcmd_sensorshow_comp
                    if re.search(comp_dct[comp_keys[0]],
                                 line) and not collected['sensor']:
                        collected['sensor'] = True
                        # switchcmd_end_comp
                        while not re.search(comp_dct[comp_keys[2]], line):
                            line = file.readline()
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # islshow_match
                            if match_dct[match_keys[1]]:
                                sensor_reading = line_to_list(
                                    comp_dct[comp_keys[1]], line,
                                    *chassis_info_lst)
                                # appending list with only REQUIRED port info for the current loop iteration
                                # to the list with all ISL port info
                                sensor_lst.append(sensor_reading)
                            if not line:
                                break
                    # sensor section end
            status_info('ok', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, sensor_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        sensor_lst = verify_data(report_data_lst, data_names, *data_lst)

    return sensor_lst
Ejemplo n.º 8
0
def switch_params_analysis_main(fabricshow_ag_labels_df, chassis_params_df,
                                switch_params_df, maps_params_df,
                                blade_module_loc_df, ag_principal_df,
                                report_data_lst):
    """Main function to create aggregated switch parameters table and report tables"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = [
        'report_columns_usage', 'switch_params_aggregated', 'Коммутаторы',
        'Фабрика', 'Параметры_коммутаторов', 'Лицензии',
        'Глобальные_параметры_фабрики'
    ]
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    report_columns_usage_dct, switch_params_aggregated_df, switches_report_df, fabric_report_df, \
        switches_parameters_report_df, licenses_report_df, global_fabric_parameters_report_df  = data_lst

    # list of data to analyze from report_info table
    analyzed_data_names = [
        'chassis_parameters', 'switch_parameters', 'switchshow_ports',
        'maps_parameters', 'blade_interconnect', 'fabric_labels'
    ]

    # clean fabricshow DataFrame from unneccessary data
    fabric_clean_df = fabric_clean(fabricshow_ag_labels_df)
    # force run when any data from data_lst was not saved (file not found) or
    # procedure execution explicitly requested for output data or data used during fn execution
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title, analyzed_data_names)
    if force_run:

        # import data with switch models, firmware and etc
        switch_models_df = dataframe_import('switch_models', max_title)

        # current operation information string
        info = f'Generating aggregated switch parameters table'
        print(info, end=" ")

        # create aggregated table by joining DataFrames
        switch_params_aggregated_df, report_columns_usage_dct = \
            fabric_aggregation(fabric_clean_df, chassis_params_df, \
                switch_params_df, maps_params_df, switch_models_df, ag_principal_df)
        # add 'Device_Location for Blade chassis switches
        switch_params_aggregated_df = fill_device_location(
            switch_params_aggregated_df, blade_module_loc_df)

        # after finish display status
        status_info('ok', max_title, len(info))

        # check if switch config files missing
        mask_fabric = switch_params_aggregated_df[[
            'Fabric_name', 'Fabric_label'
        ]].notna().all(axis=1)
        mask_no_config = switch_params_aggregated_df['chassis_name'].isna()
        missing_configs_num = switch_params_aggregated_df.loc[mask_no_config][
            'Fabric_name'].count()
        if missing_configs_num:
            info = f'{missing_configs_num} switch configuration{"s" if missing_configs_num > 1 else ""} MISSING'
            print(info, end=" ")
            status_info('warning', max_title, len(info))

        switches_report_df, fabric_report_df, switches_parameters_report_df, \
            licenses_report_df, global_fabric_parameters_report_df = \
                switchs_params_report(switch_params_aggregated_df, data_names, report_columns_usage_dct, max_title)

        # # partition aggregated DataFrame to required tables
        # switches_report_df, fabric_report_df,  \
        #     switches_parameters_report_df, licenses_report_df = \
        #         dataframe_segmentation(switch_params_aggregated_df, data_names[2:-1], \
        #             report_columns_usage_dct, max_title)

        # # global parameters are equal for all switches in one fabric thus checking Principal switches only
        # mask_principal = switch_params_aggregated_df['switchRole'] == 'Principal'
        # switch_params_principal_df = switch_params_aggregated_df.loc[mask_principal].copy()
        # global_fabric_parameters_report_df, = dataframe_segmentation(switch_params_principal_df, data_names[-1], \
        #             report_columns_usage_dct, max_title)

        # # drop rows with empty switch names columns
        # fabric_report_df.dropna(subset = ['Имя коммутатора'], inplace = True)
        # switches_parameters_report_df.dropna(subset = ['Имя коммутатора'], inplace = True)
        # licenses_report_df.dropna(subset = ['Имя коммутатора'], inplace = True)

        # # drop fabric_id if all have same value
        # if fabric_report_df['Fabric ID'].dropna().nunique() == 1:
        #     fabric_report_df.drop(columns=['Fabric ID'], inplace=True)

        # # TO_REMOVE No need to drop duplicates coz Principal switches only used before
        # # # parameters are equal for all switches in one fabric
        # # if report_columns_usage_dct['fabric_name_usage']:
        # #     global_fabric_parameters_report_df.drop_duplicates(subset=['Фабрика', 'Подсеть'], inplace=True)
        # # else:
        # #     global_fabric_parameters_report_df.drop_duplicates(subset=['Подсеть'], inplace=True)

        # global_fabric_parameters_report_df.reset_index(inplace=True, drop=True)

        # create list with partitioned DataFrames
        data_lst = [
            report_columns_usage_dct, switch_params_aggregated_df,
            switches_report_df, fabric_report_df,
            switches_parameters_report_df, licenses_report_df,
            global_fabric_parameters_report_df
        ]

        # saving data to json or csv file
        save_data(report_data_lst, data_names, *data_lst)
    # verify if loaded data is empty and replace information string with empty DataFrame
    else:
        report_columns_usage_dct, switch_params_aggregated_df, switches_report_df, fabric_report_df,  \
            switches_parameters_report_df, licenses_report_df, global_fabric_parameters_report_df = verify_data(report_data_lst, data_names, *data_lst)
        data_lst = [
            report_columns_usage_dct, switch_params_aggregated_df,
            switches_report_df, fabric_report_df,
            switches_parameters_report_df, licenses_report_df,
            global_fabric_parameters_report_df
        ]
    # save data to service file if it's required
    for data_name, data_frame in zip(data_names[1:], data_lst[1:]):
        save_xlsx_file(data_frame, data_name, report_data_lst)

    return report_columns_usage_dct, switch_params_aggregated_df, fabric_clean_df
Ejemplo n.º 9
0
def switch_params_configshow_extract(chassis_params_fabric_lst, report_data_lst):
    """Function to extract switch parameters"""

    # report_data_lst contains information: 
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['switch_parameters', 'switchshow_ports']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    switch_params_lst, switchshow_ports_lst = data_lst
    
    # data force extract check 
    # list of keys for each data from data_lst representing if it is required 
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [report_steps_dct[data_name][1] for data_name in data_names]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst, max_title)
    
    # when any of data_lst was not saved or 
    # force extract flag is on then re-extract data  from configueation files  
    if not all(data_lst) or any(force_extract_keys_lst):    
        print('\nEXTRACTING SWITCH PARAMETERS FROM SUPPORTSHOW CONFIGURATION FILES ...\n')   
        
        # extract chassis parameters names from init file
        chassis_columns = columns_import('chassis', max_title, 'columns')
        # number of switches to check
        switch_num = len(chassis_params_fabric_lst)   
        # list to store only REQUIRED switch parameters
        # collecting data for all switches during looping
        switch_params_lst = []
        # list to store switch ports details 
        switchshow_ports_lst = []    
        # data imported from init file to extract values from config file
        switch_params, params_add, comp_keys, match_keys, comp_dct = data_extract_objects('switch', max_title)
        
        # chassis_params_fabric_lst [[chassis_params_sw1], [chassis_params_sw1]]
        # checking each chassis for switch level parameters
        for i, chassis_params_data in enumerate(chassis_params_fabric_lst):       
            # data unpacking from iter param
            # dictionary with parameters for the current chassis
            chassis_params_data_dct = dict(zip(chassis_columns, chassis_params_data))
            sshow_file = chassis_params_data_dct['configname']
            chassis_name = chassis_params_data_dct['chassis_name']
            chassis_wwn = chassis_params_data_dct['chassis_wwn']
            # num_ls = int(chassis_params_data_dct["Number_of_LS"]) if not chassis_params_data_dct["Number_of_LS"] in ['0', None] else 1
            # when num of logical switches is 0 or None than mode is Non-VF otherwise VF
            ls_mode_on = (True if not chassis_params_data_dct["Number_of_LS"] in ['0', None] else False)
            ls_mode = ('ON' if not chassis_params_data_dct["Number_of_LS"] in ['0', None] else 'OFF')
            # logical switches indexes. if switch is in Non-VF mode then ls_id is 0
            ls_ids = chassis_params_data_dct['LS_IDs'].split(', ') if chassis_params_data_dct['LS_IDs'] else ['0']      
            
            # current operation information string
            info = f'[{i+1} of {switch_num}]: {chassis_params_data_dct["chassis_name"]} switch parameters. Number of LS: {chassis_params_data_dct["Number_of_LS"]}'
            print(info, end =" ")

            # check each logical switch in chassis
            for i in ls_ids:
                # search control dictionary. continue to check sshow_file until all parameters groups are found
                collected = {'configshow': False, 'switchshow': False}
                # dictionary to store all DISCOVERED switch parameters
                # collecting data only for the logical switch in current loop
                switch_params_dct = {}      
                with open(sshow_file, encoding='utf-8', errors='ignore') as file:
                    # check file until all groups of parameters extracted
                    while not all(collected.values()):
                        line = file.readline()                        
                        if not line:
                            break
                        # configshow section start
                        if re.search(fr'^\[Switch Configuration Begin *: *{i}\]$', line) and not collected['configshow']:
                            # when section is found corresponding collected dict values changed to True
                            collected['configshow'] = True
                            
                            while not re.search(fr'^\[Switch Configuration End : {i}\]$',line):
                                line = file.readline()
                                # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                match_dct ={match_key: comp_dct[comp_key].match(line) for comp_key, match_key in zip(comp_keys, match_keys)}                           
                                # match_keys ['switch_configall_match', 'switch_switchshow_match']
                                if match_dct[match_keys[0]]:
                                    switch_params_dct[match_dct[match_keys[0]].group(1).rstrip()] = match_dct[match_keys[0]].group(3).rstrip()              
                                if not line:
                                    break
                        # config section end
                        # switchshow section start
                        if re.search(r'^(SWITCHCMD /fabos/bin/)?switchshow *:$', line) and not collected['switchshow']:
                            collected['switchshow'] = True
                            if ls_mode_on:
                                while not re.search(fr'^CURRENT CONTEXT -- {i} *, \d+$',line):
                                    line = file.readline()
                                    if not line:
                                        break
                            while not re.search(r'^real [\w.]+$',line):
                                line = file.readline()
                                match_dct ={match_key: comp_dct[comp_key].match(line) for comp_key, match_key in zip(comp_keys, match_keys)}
                                # 'switch_switchshow_match'
                                if match_dct[match_keys[1]]:
                                    switch_params_dct[match_dct[match_keys[1]].group(1).rstrip()] = match_dct[match_keys[1]].group(2).rstrip()
                                # 'ls_attr_match'
                                if match_dct[match_keys[2]]:
                                    ls_attr = comp_dct[comp_keys[2]].findall(line)[0]
                                    for k, v in zip(ls_attr[::2], ls_attr[1::2]):
                                        switch_params_dct[k] = v
                                # 'switchshow_portinfo_match'
                                if match_dct[match_keys[3]]:
                                    switchinfo_lst = [sshow_file, chassis_name, chassis_wwn, str(i), 
                                                      switch_params_dct.get('switchName', None), 
                                                      switch_params_dct.get('switchWwn', None), 
                                                      switch_params_dct.get('switchState', None), 
                                                      switch_params_dct.get('switchMode', None)
                                                      ]
                                    switchshow_port_lst = line_to_list(comp_dct[comp_keys[3]], line, *switchinfo_lst)
                                    # if switch has no slots than slot number is 0
                                    if not switchshow_port_lst[9]:
                                        switchshow_port_lst[9] = str(0)
                                    
                                    switchshow_ports_lst.append(switchshow_port_lst)
                                                     
                                if not line:
                                    break                        
                        # switchshow section end
                        
                # additional values which need to be added to the switch params dictionary 
                # switch_params_add order ('configname', 'chassis_name', 'switch_index', 'ls_mode')
                # values axtracted in manual mode. if change values order change keys order in init.xlsx switch tab "params_add" column
                switch_params_values = (sshow_file, chassis_name, chassis_wwn, str(i), ls_mode)

                if switch_params_dct:
                    # adding additional parameters and values to the switch_params_switch_dct
                    update_dct(params_add, switch_params_values, switch_params_dct)                                                
                    # creating list with REQUIRED chassis parameters for the current switch.
                    # if no value in the switch_params_dct for the parameter then None is added 
                    # and appending this list to the list of all switches switch_params_fabric_lst            
                    switch_params_lst.append([switch_params_dct.get(switch_param, None) for switch_param in switch_params])
                                
            status_info('ok', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, switch_params_lst, switchshow_ports_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        switch_params_lst, switchshow_ports_lst = verify_data(report_data_lst, data_names, *data_lst)

        
    return switch_params_lst, switchshow_ports_lst
Ejemplo n.º 10
0
def synergy_system_extract(synergy_folder, report_data_lst):
    """Function to extract blade systems information"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['synergy_interconnect', 'synergy_servers']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)

    # # unpacking from the loaded list with data
    # # pylint: disable=unbalanced-tuple-unpacking
    # module_comprehensive_lst, blades_comprehensive_lst, blade_vc_comprehensive_lst = data_lst

    # force run when any data from data_lst was not saved (file not found) or
    # procedure execution explicitly requested for output data or data used during fn execution
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title)
    if force_run:

        # lists to store only REQUIRED infromation
        # collecting data for all blades during looping
        # list containing enclosure, blade and hba information for all blade systems

        # list containing enclosure and interconnect modules information for all blade systems

        synergy_module_columns = [
            'Enclosure_Name', 'Enclosure_SN', 'Enclosure_Type',
            'Interconnect_Bay', 'Interconnect_Model', 'Interconnect_SN',
            'Interconnect_Firmware', 'Interconnect_Name', 'NodeName',
            'Device_Location'
        ]

        synergy_server_columns = [
            'Enclosure_Name', 'Enclosure_Slot', 'Host_Name', 'name',
            'serverprofilename', 'Device_Model', 'Device_SN', 'Host_OS',
            'HBA_Description', 'Mezz_type', 'Connected_portWwn',
            'Mezz_location', 'Device_Location', 'HBA_Firmware'
        ]

        synergy_module_aggregated_df = pd.DataFrame(
            columns=synergy_module_columns)
        synergy_servers_aggregated_df = pd.DataFrame(
            columns=synergy_server_columns)

        if synergy_folder:
            print('\nEXTRACTING SYNERGY SYSTEM INFORMATION ...\n')

            # collects files in folder with xlsm extension
            synergy_config_lst = find_files(synergy_folder,
                                            max_title,
                                            filename_extension='xlsm')
            # number of files to check
            configs_num = len(synergy_config_lst)

            if configs_num:

                # # data imported from init file to extract values from config file
                # enclosure_params, _, comp_keys, match_keys, comp_dct = data_extract_objects('blades', max_title)
                # module_params = columns_import('blades', max_title, 'module_params')
                # blade_params = columns_import('blades', max_title, 'blade_params')

                for i, synergy_config in enumerate(synergy_config_lst):
                    # file name with extension
                    configname_wext = os.path.basename(synergy_config)
                    # remove extension from filename
                    configname, _ = os.path.splitext(configname_wext)

                    # current operation information string
                    info = f'[{i+1} of {configs_num}]: {configname} system.'
                    print(info, end=" ")

                    syn_enclosure_df = pd.read_excel(synergy_config,
                                                     sheet_name='enclosures')
                    syn_module_df = pd.read_excel(
                        synergy_config, sheet_name='interconnectbays')

                    syn_server_hw_df = pd.read_excel(
                        synergy_config, sheet_name='server-hardware')
                    syn_server_fw_sw_df = pd.read_excel(
                        synergy_config, sheet_name='server-fw-sw')
                    syn_server_profile_connection_df = pd.read_excel(
                        synergy_config, sheet_name='server-prof-conn-details')

                    synergy_module_df = synergy_module(syn_enclosure_df,
                                                       syn_module_df)

                    if synergy_module_aggregated_df.empty:
                        synergy_module_aggregated_df = synergy_module_df
                    else:
                        synergy_module_aggregated_df = pd.concat(
                            [synergy_module_aggregated_df, synergy_module_df],
                            ignore_index=True)

                    synergy_server_wwn_df = synergy_server_wwn(
                        syn_server_hw_df)
                    synergy_profile_wwn_df = synergy_profile_wwn(
                        syn_server_profile_connection_df,
                        synergy_server_wwn_df)

                    # conctenate connection profile and server hardware
                    synergy_servers_df = pd.concat(
                        [synergy_server_wwn_df, synergy_profile_wwn_df],
                        ignore_index=True)
                    synergy_servers_df.drop_duplicates(inplace=True)

                    # add mezzanine firmware details
                    synergy_servers_df = synergy_mezz_fw(
                        syn_server_fw_sw_df, synergy_servers_df)
                    synergy_servers_df.sort_values(
                        by=['enclosurename', 'position', 'Mezz_WWPN'],
                        ignore_index=True,
                        inplace=True)

                    if synergy_servers_aggregated_df.empty:
                        synergy_servers_aggregated_df = synergy_servers_df
                    else:
                        synergy_servers_aggregated_df = pd.concat(
                            [
                                synergy_servers_aggregated_df,
                                synergy_servers_df
                            ],
                            ignore_index=True)

                    if synergy_module_aggregated_df['switchbasewwn'].notna(
                    ).any():
                        synergy_module_aggregated_df[
                            'switchbasewwn'] = synergy_module_aggregated_df[
                                'switchbasewwn'].str.lower()
                    if synergy_servers_aggregated_df['Mezz_WWPN'].notna().any(
                    ):
                        synergy_servers_aggregated_df[
                            'Mezz_WWPN'] = synergy_servers_aggregated_df[
                                'Mezz_WWPN'].str.lower()

                    if not synergy_servers_df.empty or not synergy_module_df.empty:
                        status_info('ok', max_title, len(info))
                    else:
                        status_info('no data', max_title, len(info))

                module_columns_dct = {
                    'enclosurename': 'Enclosure_Name',
                    'enclosure_serialnumber': 'Enclosure_SN',
                    'enclosuretype': 'Enclosure_Type',
                    'baynumber': 'Interconnect_Bay',
                    'interconnectmodel': 'Interconnect_Model',
                    'serialnumber': 'Interconnect_SN',
                    'switchfwversion': 'Interconnect_Firmware',
                    'hostname': 'Interconnect_Name',
                    'switchbasewwn': 'NodeName',
                    'device_location': 'Device_Location'
                }

                synergy_module_aggregated_df.rename(columns=module_columns_dct,
                                                    inplace=True)
                synergy_module_aggregated_df.replace(r'^None$|^none$|^ *$',
                                                     value=np.nan,
                                                     regex=True,
                                                     inplace=True)

                server_columns_dct = {
                    'enclosurename': 'Enclosure_Name',
                    'position': 'Enclosure_Slot',
                    'servername': 'Host_Name',
                    'model': 'Device_Model',
                    'serialnumber': 'Device_SN',
                    'oshint': 'Host_OS',
                    'Mezz': 'HBA_Description',
                    'Mezz_WWPN': 'Connected_portWwn',
                    'device_location': 'Device_Location',
                    'componentversion': 'HBA_Firmware'
                }

                synergy_servers_aggregated_df.rename(
                    columns=server_columns_dct, inplace=True)
                synergy_servers_aggregated_df.replace(r'^None$|^none$|^ *$',
                                                      value=np.nan,
                                                      regex=True,
                                                      inplace=True)

                data_lst = [
                    synergy_module_aggregated_df, synergy_servers_aggregated_df
                ]
                # save extracted data to json file
                save_data(report_data_lst, data_names, *data_lst)
        else:
            # current operation information string
            info = f'Collecting synergy details'
            print(info, end=" ")
            status_info('skip', max_title, len(info))
            data_lst = [
                synergy_module_aggregated_df, synergy_servers_aggregated_df
            ]
            # save empty data to json file
            save_data(report_data_lst, data_names, *data_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        synergy_module_aggregated_df, synergy_servers_aggregated_df = verify_data(
            report_data_lst, data_names, *data_lst)
        data_lst = [
            synergy_module_aggregated_df, synergy_servers_aggregated_df
        ]
    # save data to service file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        save_xlsx_file(data_frame, data_name, report_data_lst)

    return synergy_module_aggregated_df, synergy_servers_aggregated_df
Ejemplo n.º 11
0
def fabric_main(fabricshow_ag_labels_df, chassis_params_df, \
    switch_params_df, maps_params_df, report_data_lst):
    """Main function to create tables"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = [
        'Коммутаторы', 'Фабрика', 'Глобальные_параметры_фабрики',
        'Параметры_коммутаторов', 'Лицензии'
    ]
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    switches_report_df, fabric_report_df, global_fabric_parameters_report_df, \
        switches_parameters_report_df, licenses_report_df = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # list with True (if data loaded) and/or False (if data was not found and None returned)
    data_check = force_extract_check(data_names, data_lst,
                                     force_extract_keys_lst, max_title)

    # flag if fabrics labels was forced to be changed
    fabric_labels_change = True if report_steps_dct['fabric_labels'][
        1] else False
    # initialization chassis information and farbric name columns usage
    report_columns_usage_dct = {
        'fabric_name_usage': True,
        'chassis_info_usage': True
    }
    # import data with switch models, firmware and etc
    switch_models_df = dataframe_import('switch_models', max_title)
    # clean fabricshow DataFrame from unneccessary data
    fabric_clean_df = fabric_clean(fabricshow_ag_labels_df)
    # create aggregated table by joining DataFrames
    switch_params_aggregated_df, report_columns_usage_dct = \
        fabric_aggregation(fabric_clean_df, chassis_params_df, \
            switch_params_df, maps_params_df, switch_models_df)
    save_xlsx_file(switch_params_aggregated_df, 'switch_params_aggregated', \
        report_data_lst, report_type = 'analysis')

    # when no data saved or force extract flag is on or fabric labels have been changed than
    # analyze extracted config data
    if not all(data_check) or any(
            force_extract_keys_lst) or fabric_labels_change:
        # information string if fabric labels force changed was initiated
        # and statistics recounting required
        if fabric_labels_change and not any(force_extract_keys_lst) and all(
                data_check):
            info = f'Switch information force extract due to change in Fabrics labeling'
            print(info, end=" ")
            status_info('ok', max_title, len(info))

        # partition aggregated DataFrame to required tables
        switches_report_df, fabric_report_df, global_fabric_parameters_report_df, \
            switches_parameters_report_df, licenses_report_df = \
                dataframe_segmentation(switch_params_aggregated_df, data_names, \
                    report_columns_usage_dct, max_title)

        # drop rows with empty switch names columns
        fabric_report_df.dropna(subset=['Имя коммутатора'], inplace=True)
        switches_parameters_report_df.dropna(subset=['Имя коммутатора'],
                                             inplace=True)
        licenses_report_df.dropna(subset=['Имя коммутатора'], inplace=True)

        # parameters are equal for all switches in one fabric
        if report_columns_usage_dct['fabric_name_usage']:
            global_fabric_parameters_report_df.drop_duplicates(
                subset=['Фабрика', 'Подсеть'], inplace=True)
        else:
            global_fabric_parameters_report_df.drop_duplicates(
                subset=['Подсеть'], inplace=True)
        global_fabric_parameters_report_df.reset_index(inplace=True, drop=True)

        # create list with partitioned DataFrames
        data_lst = [switches_report_df, fabric_report_df, global_fabric_parameters_report_df, \
            switches_parameters_report_df, licenses_report_df]

        # current operation information string
        info = f'Generating Fabric and Switches tables'
        print(info, end=" ")
        # after finish display status
        status_info('ok', max_title, len(info))

        # saving DataFrames to csv file
        save_data(report_data_lst, data_names, *data_lst)
        # save_data(report_data_lst, data_auxillary_names, *data_auxillary_lst)

    # save data to service file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        save_xlsx_file(data_frame, data_name, report_data_lst)

    return switch_params_aggregated_df, report_columns_usage_dct, fabric_clean_df
Ejemplo n.º 12
0
def storage_3par_extract(nsshow_df, nscamshow_df, local_3par_folder,
                         project_folder, report_data_lst):
    """Function to extract 3PAR storage information"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['system_3par', 'port_3par', 'host_3par']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    system_3par_comprehensive_lst, port_3par_comprehensive_lst, host_3par_comprehensive_lst = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):

        # lists to store only REQUIRED infromation
        # collecting data for all systems during looping
        # list containing system parameters
        system_3par_comprehensive_lst = []
        # list containing 3par FC port information
        port_3par_comprehensive_lst = []
        # list containing hosts defined on 3par ports
        host_3par_comprehensive_lst = []

        # data imported from init file to extract values from config file
        params, params_add, comp_keys, match_keys, comp_dct = data_extract_objects(
            '3par', max_title)
        # verify if 3par systems registered in fabric NameServer
        ns_3par_df = verify_ns_3par(nsshow_df, nscamshow_df, comp_dct)

        if not ns_3par_df.empty:
            print('\n')
            print('3PAR Storage Systems detected in SAN')
            print(ns_3par_df)
            print('\n')
            # find configuration files to parse (download from STATs, local folder or use configurations
            # downloaded on previous iterations)
            configs_3par_lst = configs_download(ns_3par_df, project_folder,
                                                local_3par_folder, comp_keys,
                                                match_keys, comp_dct,
                                                report_data_lst)

            if configs_3par_lst:
                print('\nEXTRACTING 3PAR STORAGE INFORMATION ...\n')
                # number of files to check
                configs_num = len(configs_3par_lst)

                for i, config_3par in enumerate(configs_3par_lst):
                    # file name
                    configname = os.path.basename(config_3par)
                    # current operation information string
                    info = f'[{i+1} of {configs_num}]: {configname} system'
                    print(info, end=" ")
                    showsys_lst, port_lst, host_lst = parse_config(
                        config_3par, params, params_add, comp_keys, match_keys,
                        comp_dct)
                    system_3par_comprehensive_lst.extend(showsys_lst)
                    port_3par_comprehensive_lst.extend(port_lst)
                    host_3par_comprehensive_lst.extend(host_lst)
                    if port_lst or host_lst:
                        status_info('ok', max_title, len(info))
                    else:
                        status_info('no data', max_title, len(info))

                # save extracted data to json file
                save_data(report_data_lst, data_names,
                          system_3par_comprehensive_lst,
                          port_3par_comprehensive_lst,
                          host_3par_comprehensive_lst)
        else:
            # current operation information string
            info = f'Collecting 3PAR storage systems information'
            print(info, end=" ")
            status_info('skip', max_title, len(info))
            # save empty data to json file
            save_data(report_data_lst, data_names,
                      system_3par_comprehensive_lst,
                      port_3par_comprehensive_lst, host_3par_comprehensive_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        system_3par_comprehensive_lst, port_3par_comprehensive_lst, host_3par_comprehensive_lst = verify_data(
            report_data_lst, data_names, *data_lst)

    return system_3par_comprehensive_lst, port_3par_comprehensive_lst, host_3par_comprehensive_lst
Ejemplo n.º 13
0
def isl_main(fabricshow_ag_labels_df, switch_params_aggregated_df, report_columns_usage_dct, 
    isl_df, trunk_df, fcredge_df, portshow_df, sfpshow_df, portcfgshow_df, switchshow_ports_df, report_data_lst):
    """Main function to create ISL and IFR report tables"""
    
   # report_data_lst contains information: 
   # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['isl_aggregated', 'isl_statistics', 'Межкоммутаторные_соединения', 'Межфабричные_соединения', 'Статистика_ISL']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')
    
    # loading data if were saved on previous iterations 
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    isl_aggregated_df, isl_statistics_df, isl_report_df, ifl_report_df, isl_statistics_report_df = data_lst

    # list of data to analyze from report_info table
    analyzed_data_names = ['isl', 'trunk', 'fcredge', 'sfpshow', 'portcfgshow', 
                            'chassis_parameters', 'switch_parameters', 'switchshow_ports', 
                            'maps_parameters', 'blade_interconnect', 'fabric_labels']

    # force run when any data from data_lst was not saved (file not found) or 
    # procedure execution explicitly requested for output data or data used during fn execution  
    force_run = verify_force_run(data_names, data_lst, report_steps_dct, 
                                            max_title, analyzed_data_names)
    if force_run:

        # data imported from init file (regular expression patterns) to extract values from data columns
        # re_pattern list contains comp_keys, match_keys, comp_dct    
        _, _, *re_pattern_lst = data_extract_objects('common_regex', max_title)

        # current operation information string
        info = f'Generating ISL and IFL tables'
        print(info, end =" ")

        # get aggregated DataFrames
        isl_aggregated_df, fcredge_df = \
            isl_aggregated(fabricshow_ag_labels_df, switch_params_aggregated_df, 
            isl_df, trunk_df, fcredge_df, portshow_df, sfpshow_df, portcfgshow_df, switchshow_ports_df, re_pattern_lst)

        isl_statistics_df = isl_statistics(isl_aggregated_df, re_pattern_lst, report_data_lst)

        # after finish display status
        status_info('ok', max_title, len(info))      

        # partition aggregated DataFrame to required tables
        isl_report_df, = dataframe_segmentation(isl_aggregated_df, [data_names[2]], report_columns_usage_dct, max_title)
        isl_report_df = translate_values(isl_report_df, translate_dct={'Yes': 'Да', 'No': 'Нет'})
        # if no trunks in fabric drop trunk columns
        if trunk_df.empty:
            isl_report_df.drop(columns = ['Идентификатор транка', 'Deskew', 'Master'], inplace = True)
        # check if IFL table required
        if not fcredge_df.empty:
            ifl_report_df, = dataframe_segmentation(fcredge_df, [data_names[3]], report_columns_usage_dct, max_title)
        else:
            ifl_report_df = fcredge_df.copy()

        isl_statistics_report_df = isl_statistics_report(isl_statistics_df, report_columns_usage_dct, max_title)

        # create list with partitioned DataFrames
        data_lst = [isl_aggregated_df, isl_statistics_df, isl_report_df, ifl_report_df, isl_statistics_report_df]
        # saving fabric_statistics and fabric_statistics_summary DataFrames to csv file
        save_data(report_data_lst, data_names, *data_lst)
    # verify if loaded data is empty and replace information string with empty DataFrame
    else:
        isl_aggregated_df, isl_statistics_df, isl_report_df, ifl_report_df, isl_statistics_report_df = \
            verify_data(report_data_lst, data_names, *data_lst)
        data_lst = [isl_aggregated_df, isl_statistics_df, isl_report_df, ifl_report_df, isl_statistics_report_df]
    # save data to service file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        save_xlsx_file(data_frame, data_name, report_data_lst)

    return isl_aggregated_df, isl_statistics_df
Ejemplo n.º 14
0
def portcmdshow_extract(chassis_params_fabric_lst, report_data_lst):
    """Function to extract portshow, portloginshow, portstatsshow information"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['portcmd']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    portshow_lst, = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):
        print(
            '\nEXTRACTING PORTSHOW, PORTLOGINSHOW, PORTSTATSSHOW INFORMATION FROM SUPPORTSHOW CONFIGURATION FILES ...\n'
        )

        # extract chassis parameters names from init file
        chassis_columns = columns_import('chassis', max_title, 'columns')
        # number of switches to check
        switch_num = len(chassis_params_fabric_lst)
        # list to store only REQUIRED switch parameters
        # collecting data for all switches during looping
        portshow_lst = []
        # data imported from init file to extract values from config file
        portcmd_params, params_add, comp_keys, match_keys, comp_dct = data_extract_objects(
            'portcmd', max_title)

        # chassis_params_fabric_lst [[chassis_params_sw1], [chassis_params_sw1]]
        # checking each chassis for switch level parameters
        for i, chassis_params_data in enumerate(chassis_params_fabric_lst):
            # data unpacking from iter param
            # dictionary with parameters for the current chassis
            chassis_params_data_dct = dict(
                zip(chassis_columns, chassis_params_data))
            sshow_file = chassis_params_data_dct['configname']
            chassis_name = chassis_params_data_dct['chassis_name']
            chassis_wwn = chassis_params_data_dct['chassis_wwn']
            # current operation information string
            info = f'[{i+1} of {switch_num}]: {chassis_params_data_dct["chassis_name"]} switch portshow, portloginshow and statsshow'
            print(info, end=" ")

            # search control dictionary. continue to check sshow_file until all parameters groups are found
            collected = {'portshow': False}

            with open(sshow_file, encoding='utf-8', errors='ignore') as file:
                # check file until all groups of parameters extracted
                while not all(collected.values()):
                    line = file.readline()
                    if not line:
                        break
                    # sshow_port section start
                    if re.search(r'^\| Section: SSHOW_PORT \|$', line):
                        # when section is found corresponding collected dict values changed to True
                        collected['portshow'] = True
                        while not re.search(r'^\| ... rebuilt finished *\|$',
                                            line):
                            line = file.readline()
                            if not line:
                                break
                            # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # portFcPortCmdShow section start
                            if match_dct[match_keys[0]]:
                                # dictionary to store all DISCOVERED parameters
                                # collecting data only for the chassis in current loop
                                portcmd_dct = {}
                                # connected devices wwns in portshow section
                                connected_wwn_lst = []
                                # list to store connected devices port_id and wwn pairs in portlogin section
                                portid_wwn_lst = []
                                port_index = None
                                slot_port_lst = line_to_list(
                                    comp_dct[comp_keys[0]], line)
                                while not re.search(r'^portshow +(\d{1,4})$',
                                                    line):
                                    line = file.readline()
                                    if not line:
                                        break
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                # portshow section start
                                if match_dct[match_keys[1]]:
                                    port_index = match_dct[
                                        match_keys[1]].group(1)
                                    while not re.search(
                                            fr'^portloginshow +{int(port_index)}$',
                                            line):
                                        line = file.readline()
                                        match_dct = {
                                            match_key:
                                            comp_dct[comp_key].match(line)
                                            for comp_key, match_key in zip(
                                                comp_keys, match_keys)
                                        }
                                        # portshow_params_match
                                        if match_dct[match_keys[2]]:
                                            portshow_attr, = comp_dct[
                                                comp_keys[2]].findall(line)
                                            # portstate parameter has special processing rule
                                            if portshow_attr[0] == 'portState':
                                                portcmd_dct[
                                                    'portState'] = portshow_attr[
                                                        2]
                                            # portshow_attr can contain up to 3 parameter name and value pairs
                                            # parameters name on even positions in the portshow_attr and values on odd
                                            else:
                                                for k, v in zip(
                                                        portshow_attr[::2],
                                                        portshow_attr[1::2]):
                                                    portcmd_dct[k] = v
                                        # portscn_match has two parameter name and value pairs
                                        if match_dct[match_keys[6]]:
                                            portscn_line = line_to_list(
                                                comp_dct[comp_keys[6]], line)
                                            for k, v in zip(
                                                    portscn_line[::2],
                                                    portscn_line[1::2]):
                                                portcmd_dct[k] = v
                                        # portdistance_match
                                        if match_dct[match_keys[7]]:
                                            portdistance_line = line_to_list(
                                                comp_dct[comp_keys[7]], line)
                                            portcmd_dct[portdistance_line[
                                                0]] = portdistance_line[1]
                                        # device_connected_wwn_match
                                        if match_dct[match_keys[8]]:
                                            connected_wwn = line_to_list(
                                                comp_dct[comp_keys[8]], line)
                                            connected_wwn_lst.append(
                                                (portcmd_dct.get('portId'),
                                                 connected_wwn))
                                        if not line:
                                            break
                                # portshow section end
                                # portlogin section start
                                if re.match(
                                        fr'^portloginshow +{int(port_index)}$',
                                        line):

                                    while not re.search(
                                            fr'^portregshow +{int(port_index)}$',
                                            line):
                                        line = file.readline()
                                        match_dct = {
                                            match_key:
                                            comp_dct[comp_key].match(line)
                                            for comp_key, match_key in zip(
                                                comp_keys, match_keys)
                                        }
                                        # connected_wwn_match
                                        if match_dct[match_keys[3]]:
                                            # first value in tuple unpacking is fe or fd and not required
                                            _, port_id, wwn = line_to_list(
                                                comp_dct[comp_keys[3]], line)
                                            # port_id = '0x' + port_id
                                            portid_wwn_lst.append(
                                                (port_id, wwn))
                                        if not line:
                                            break
                                    # sorting connected devices list by port_ids
                                    if len(portid_wwn_lst) != 0:
                                        portid_wwn_lst = sorted(portid_wwn_lst)
                                    # if portlogin empty then use connected devices from portshow section
                                    # applied for E-ports
                                    elif len(connected_wwn_lst) != 0:
                                        portid_wwn_lst = connected_wwn_lst.copy(
                                        )
                                    # adding port_id and None wwn if no device is connected or slave trunk link
                                    else:
                                        portid_wwn_lst.append(
                                            [portcmd_dct.get('portId'), None])
                                # portlogin section end
                                while not re.match(
                                        fr'^portstatsshow +{int(port_index)}$',
                                        line):
                                    line = file.readline()
                                    if not line:
                                        break
                                # portstatsshow section start
                                if re.match(
                                        fr'^portstatsshow +{int(port_index)}$',
                                        line):
                                    while not re.search(
                                            fr'^(portstats64show|portcamshow) +{int(port_index)}$',
                                            line):
                                        line = file.readline()
                                        match_dct = {
                                            match_key:
                                            comp_dct[comp_key].match(line)
                                            for comp_key, match_key in zip(
                                                comp_keys, match_keys)
                                        }
                                        # port information without virtual channel numbers
                                        if match_dct[match_keys[4]]:
                                            portcmd_dct[match_dct[
                                                match_keys[4]].group(
                                                    1).rstrip()] = match_dct[
                                                        match_keys[4]].group(2)
                                        # port information with virtual channel numbers
                                        elif match_dct[match_keys[5]]:
                                            line_values = line_to_list(
                                                comp_dct[comp_keys[5]], line)
                                            param_name, start_vc = line_values[
                                                0:2]
                                            for i, value in enumerate(
                                                    line_values[3:]):
                                                param_name_vc = param_name + '_' + str(
                                                    int(start_vc) + i)
                                                portcmd_dct[
                                                    param_name_vc] = value
                                        if not line:
                                            break
                                # portstatsshow section end
                                # portFcPortCmdShow section end

                                # additional values which need to be added to the dictionary with all DISCOVERED parameters during current loop iteration
                                # chassis_slot_port_values order (configname, chassis_name, port_index, slot_num, port_num, port_ids and wwns of connected devices)
                                # values axtracted in manual mode. if change values order change keys order in init.xlsx "chassis_params_add" column
                                for port_id, connected_wwn in portid_wwn_lst:
                                    chassis_slot_port_values = [
                                        sshow_file, chassis_name, chassis_wwn,
                                        port_index, *slot_port_lst, port_id,
                                        connected_wwn
                                    ]
                                    # print('chassis_slot_port_values', chassis_slot_port_values)
                                    # adding or changing data from chassis_slot_port_values to the DISCOVERED dictionary
                                    update_dct(params_add,
                                               chassis_slot_port_values,
                                               portcmd_dct)
                                    # adding data to the REQUIRED list for each device connected to the port
                                    portshow_lst.append([
                                        portcmd_dct.get(portcmd_param, None)
                                        for portcmd_param in portcmd_params
                                    ])
                                    # print('portshow_lst', portshow_lst)

                    # sshow_port section end
            status_info('ok', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, portshow_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        portshow_lst = verify_data(report_data_lst, data_names, *data_lst)

    return portshow_lst
Ejemplo n.º 15
0
def fcr_extract(switch_params_lst, report_data_lst):
    """Function to extract fabrics routing information"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = [
        'fcrfabric', 'fcrproxydev', 'fcrphydev', 'lsan', 'fcredge',
        'fcrresource'
    ]
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    fcrfabric_lst, fcrproxydev_lst, fcrphydev_lst, lsan_lst, \
        fcredge_lst, fcrresource_lst = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):
        print(
            '\nEXTRACTING FABRICS ROUTING INFORMATION FROM SUPPORTSHOW CONFIGURATION FILES ...\n'
        )

        # extract switch parameters names from init file
        switch_columns = columns_import('switch', max_title, 'columns')
        # number of switches to check
        switch_num = len(switch_params_lst)

        # lists to store only REQUIRED switch parameters
        # collecting data for all switches during looping
        fcrfabric_lst = []
        fcrproxydev_lst = []
        fcrphydev_lst = []
        lsan_lst = []
        fcredge_lst = []
        fcrresource_lst = []

        # dictionary to collect fcr device data
        # first element of list is regular expression pattern number,
        # second - list to collect data, third is index in line to which slice extracted list
        fcrdev_dct = {
            'fcrproxydev': [5, fcrproxydev_lst, None],
            'fcrphydev': [6, fcrphydev_lst, -3]
        }

        # data imported from init file to extract values from config file
        params, _, comp_keys, match_keys, comp_dct = data_extract_objects(
            'fcr', max_title)

        # switch_params_lst [[switch_params_sw1], [switch_params_sw1]]
        # checking each switch for switch level parameters
        for i, switch_params_data in enumerate(switch_params_lst):
            # dictionary with parameters for the current switch
            switch_params_data_dct = dict(
                zip(switch_columns, switch_params_data))
            switch_info_keys = [
                'configname', 'chassis_name', 'chassis_wwn', 'switch_index',
                'SwitchName', 'switchWwn', 'switchRole', 'Fabric_ID',
                'FC_Router'
            ]
            switch_info_lst = [
                switch_params_data_dct.get(key) for key in switch_info_keys
            ]
            ls_mode_on = True if switch_params_data_dct[
                'LS_mode'] == 'ON' else False

            # data unpacking from iter param
            sshow_file, *_, switch_name, _, switch_role, fid, fc_router = switch_info_lst

            # current operation information string
            info = f'[{i+1} of {switch_num}]: {switch_name} fabric routing. FC Routing: {fc_router}'
            print(info, end=" ")

            # search control dictionary. continue to check sshow_file until all parameters groups are found
            collected = {'fcrfabric': False, 'fcrproxydev': False, 'fcrphydev': False,
                         'lsanzone': False, 'fcredge': False, 'fcrresource': False} \
                if switch_role == 'Principal' else {'fcredge': False, 'fcrresource': False}

            # check config of FC routers only
            if fc_router == 'ON':
                # fcrouter_info_lst contains sshow_file, chassis_name, switch_index, switch_name, switch_fid
                fcrouter_info_lst = [*switch_info_lst[:6], switch_info_lst[7]]
                with open(sshow_file, encoding='utf-8',
                          errors='ignore') as file:
                    # check file until all groups of parameters extracted
                    while not all(collected.values()):
                        line = file.readline()
                        if not line:
                            break
                        # check configs of Principal switches only
                        if switch_role == 'Principal':
                            # fcrfabricshow section start
                            # switchcmd_fcrfabricshow
                            if re.search(comp_dct[comp_keys[0]],
                                         line) and not collected['fcrfabric']:
                                collected['fcrfabric'] = True
                                if ls_mode_on:
                                    while not re.search(
                                            fr'^BASE +SWITCH +CONTEXT *-- *FID: *{fid}$',
                                            line):
                                        line = file.readline()
                                        if not line:
                                            break
                                # switchcmd_end_comp
                                while not re.search(comp_dct[comp_keys[4]],
                                                    line):
                                    line = file.readline()
                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # fc_router_match'
                                    if match_dct[match_keys[1]]:
                                        fcrouter_params_lst = line_to_list(
                                            comp_dct[comp_keys[1]], line)
                                        # check if line is empty
                                        while not re.match('\r?\n', line):
                                            line = file.readline()
                                            match_dct = {
                                                match_key:
                                                comp_dct[comp_key].match(line)
                                                for comp_key, match_key in zip(
                                                    comp_keys, match_keys)
                                            }
                                            # fcr_info_match
                                            if match_dct[match_keys[2]]:
                                                fcrouter_name = match_dct[
                                                    match_keys[2]].group(1)
                                            # fcr_exports_match
                                            if match_dct[match_keys[3]]:
                                                fcrfabric_lst.append(
                                                    line_to_list(
                                                        comp_dct[comp_keys[3]],
                                                        line,
                                                        *fcrouter_info_lst,
                                                        fcrouter_name,
                                                        *fcrouter_params_lst))
                                            if not line:
                                                break
                                    if not line:
                                        break
                            # fcrfabricshow section end

                            # fcrproxydev and fcrphydev checked in a loop over dictionary keys coz data representation is similar
                            # fcrdevshow section start
                            for fcrdev_type in fcrdev_dct.keys():
                                re_num, fcrdev_lst, slice_index = fcrdev_dct[
                                    fcrdev_type]
                                # switchcmd_fcrproxydevshow_comp
                                if re.search(
                                        comp_dct[comp_keys[re_num]],
                                        line) and not collected[fcrdev_type]:
                                    collected[fcrdev_type] = True
                                    if ls_mode_on:
                                        while not re.search(
                                                fr'^BASE +SWITCH +CONTEXT *-- *FID: *{fid} *$',
                                                line):
                                            line = file.readline()
                                            if not line:
                                                break
                                    # switchcmd_end_comp
                                    while not re.search(
                                            comp_dct[comp_keys[4]], line):
                                        line = file.readline()
                                        match_dct = {
                                            match_key:
                                            comp_dct[comp_key].match(line)
                                            for comp_key, match_key in zip(
                                                comp_keys, match_keys)
                                        }
                                        # fcrdevshow_match
                                        if match_dct[match_keys[7]]:
                                            fcrdev_lst.append(
                                                line_to_list(
                                                    comp_dct[comp_keys[7]],
                                                    line, *fcrouter_info_lst)
                                                [:slice_index])
                                        if not line:
                                            break
                            # fcrdevshow section end
                            # lsanzoneshow section start
                            if re.search(comp_dct[comp_keys[8]],
                                         line) and not collected['lsanzone']:
                                collected['lsanzone'] = True
                                if ls_mode_on:
                                    while not re.search(
                                            fr'^BASE +SWITCH +CONTEXT *-- *FID: *{fid} *$',
                                            line):
                                        line = file.readline()
                                        if not line:
                                            break
                                # switchcmd_end_comp
                                while not re.search(comp_dct[comp_keys[4]],
                                                    line):
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # lsan_name_match
                                    if match_dct[match_keys[9]]:
                                        # switch_info and current connected device wwnp
                                        lsan_name = line_to_list(
                                            comp_dct[comp_keys[9]], line)
                                        # move cursor to one line down to get inside while loop
                                        line = file.readline()
                                        # lsan_switchcmd_end_comp
                                        while not re.search(
                                                comp_dct[comp_keys[11]], line):
                                            # line = file.readline()
                                            match_dct = {
                                                match_key:
                                                comp_dct[comp_key].match(line)
                                                for comp_key, match_key in zip(
                                                    comp_keys, match_keys)
                                            }
                                            # lsan_members_match
                                            if match_dct[match_keys[10]]:
                                                lsan_member = line_to_list(
                                                    comp_dct[comp_keys[10]],
                                                    line)
                                                lsan_lst.append([
                                                    *fcrouter_info_lst,
                                                    *lsan_name, *lsan_member
                                                ])
                                            #     line = file.readline()
                                            # else:
                                            #     line = file.readline()
                                            line = file.readline()
                                            if not line:
                                                break
                                    else:
                                        line = file.readline()
                                    if not line:
                                        break
                            # lsanzoneshow section end

                        # fcredge and fcrresource checked for Principal and Subordinate routers
                        # fcredgeshow section start
                        if re.search(comp_dct[comp_keys[12]],
                                     line) and not collected['fcredge']:
                            collected['fcredge'] = True
                            if ls_mode_on:
                                while not re.search(
                                        fr'^BASE +SWITCH +CONTEXT *-- *FID: *{fid} *$',
                                        line):
                                    line = file.readline()
                                    if not line:
                                        break
                            # switchcmd_end_comp
                            while not re.search(comp_dct[comp_keys[4]], line):
                                line = file.readline()
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                # fcredgeshow_match
                                if match_dct[match_keys[13]]:
                                    # fcredge_lst.append(line_to_list(comp_dct[comp_keys[13]], line, *fcrouter_info_lst, switch_wwn))
                                    fcredge_lst.append(
                                        line_to_list(comp_dct[comp_keys[13]],
                                                     line, *fcrouter_info_lst))
                                if not line:
                                    break
                        # fcredgeshow section end
                        # fcrresourceshow section start
                        if re.search(comp_dct[comp_keys[14]],
                                     line) and not collected['fcrresource']:
                            collected['fcrresource'] = True
                            fcrresource_dct = {}
                            if ls_mode_on:
                                while not re.search(
                                        fr'^BASE +SWITCH +CONTEXT *-- *FID: *{fid} *$',
                                        line):
                                    line = file.readline()
                                    if not line:
                                        break
                            # switchcmd_end_comp
                            while not re.search(comp_dct[comp_keys[4]], line):
                                line = file.readline()
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                # fcredgeshow_match
                                if match_dct[match_keys[15]]:
                                    fcrresource_dct[match_dct[match_keys[15]].group(1).rstrip()] = \
                                        [match_dct[match_keys[15]].group(2), match_dct[match_keys[15]].group(3)]
                                if not line:
                                    break
                            # each value of dictionary is list of two elements
                            # itertools.chain makes flat tmp_lst list from all lists in dictionary
                            tmp_lst = list(
                                itertools.chain(*[
                                    fcrresource_dct.get(param
                                                        ) if fcrresource_dct.
                                    get(param) else [None, None]
                                    for param in params
                                ]))
                            fcrresource_lst.append(
                                [*fcrouter_info_lst, *tmp_lst])
                        # fcrresourceshow section end

                status_info('ok', max_title, len(info))
            else:
                status_info('skip', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, fcrfabric_lst, fcrproxydev_lst,
                  fcrphydev_lst, lsan_lst, fcredge_lst, fcrresource_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        fcrfabric_lst, fcrproxydev_lst, fcrphydev_lst, lsan_lst, fcredge_lst, fcrresource_lst = verify_data(
            report_data_lst, data_names, *data_lst)

    return fcrfabric_lst, fcrproxydev_lst, fcrphydev_lst, lsan_lst, fcredge_lst, fcrresource_lst
Ejemplo n.º 16
0
def errdump_main(errdump_df, switchshow_df, switch_params_aggregated_df,
                 portshow_aggregated_df, report_columns_usage_dct,
                 report_data_lst):
    """Main function to get most frequently appeared log messages"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['errdump_aggregated', 'raslog_counter', 'Журнал']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # loading data if were saved on previous iterations
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    errdump_aggregated_df, raslog_counter_df, raslog_report_df = data_lst

    # list of data to analyze from report_info table
    analyzed_data_names = [
        'chassis_parameters', 'switch_parameters', 'switchshow_ports',
        'maps_parameters', 'portshow_aggregated', 'fabric_labels'
    ]

    # force run when any data from data_lst was not saved (file not found) or
    # procedure execution explicitly requested for output data or data used during fn execution
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title, analyzed_data_names)
    if force_run:
        # data imported from init file (regular expression patterns) to extract values from data columns
        # re_pattern list contains comp_keys, match_keys, comp_dct
        _, _, *re_pattern_lst = data_extract_objects('raslog', max_title)

        # current operation information string
        info = f'Counting RASLog messages'
        print(info, end=" ")

        # get aggregated DataFrames
        errdump_aggregated_df, raslog_counter_df, raslog_frequent_df = \
            errdump_aggregated(errdump_df, switchshow_df, switch_params_aggregated_df, portshow_aggregated_df, re_pattern_lst)
        # after finish display status
        status_info('ok', max_title, len(info))

        # partition aggregated DataFrame to required tables
        raslog_report_df = raslog_report(raslog_frequent_df, data_names,
                                         report_columns_usage_dct, max_title)

        # create list with partitioned DataFrames
        data_lst = [errdump_aggregated_df, raslog_counter_df, raslog_report_df]
        # saving fabric_statistics and fabric_statistics_summary DataFrames to csv file
        save_data(report_data_lst, data_names, *data_lst)
    # verify if loaded data is empty and replace information string with empty DataFrame
    else:
        errdump_aggregated_df, raslog_counter_df, raslog_report_df = \
            verify_data(report_data_lst, data_names, *data_lst)
        data_lst = [errdump_aggregated_df, raslog_counter_df, raslog_report_df]
    # save data to service file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        save_xlsx_file(data_frame, data_name, report_data_lst)

    return errdump_aggregated_df, raslog_counter_df
Ejemplo n.º 17
0
def connected_devices_extract(switch_params_lst, report_data_lst):
    """Function to extract connected devices information
    (fdmi, nsshow, nscamshow)
    """

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['fdmi', 'nsshow', 'nscamshow']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    fdmi_lst, nsshow_lst, nscamshow_lst = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):
        print(
            '\nEXTRACTING INFORMATION ABOUT CONNECTED DEVICES (FDMI, NSSHOW, NSCAMSHOW) ...\n'
        )

        # extract chassis parameters names from init file
        switch_columns = columns_import('switch', max_title, 'columns')
        # number of switches to check
        switch_num = len(switch_params_lst)

        # data imported from init file to extract values from config file
        params, params_add, comp_keys, match_keys, comp_dct = data_extract_objects(
            'connected_dev', max_title)
        nsshow_params, nsshow_params_add = columns_import(
            'connected_dev', max_title, 'nsshow_params', 'nsshow_params_add')

        # lists to store only REQUIRED infromation
        # collecting data for all switches ports during looping
        fdmi_lst = []
        # lists with local Name Server (NS) information
        nsshow_lst = []
        nscamshow_lst = []

        # dictionary with required to collect nsshow data
        # first element of list is regular expression pattern number, second - list to collect data
        nsshow_dct = {
            'nsshow': [5, nsshow_lst],
            'nscamshow': [6, nscamshow_lst]
        }

        # switch_params_lst [[switch_params_sw1], [switch_params_sw1]]
        # checking each switch for switch level parameters
        for i, switch_params_data in enumerate(switch_params_lst):
            # data unpacking from iter param
            # dictionary with parameters for the current chassis
            switch_params_data_dct = dict(
                zip(switch_columns, switch_params_data))
            switch_info_keys = [
                'configname', 'chassis_name', 'chassis_wwn', 'switch_index',
                'SwitchName', 'switchWwn', 'switchMode'
            ]
            switch_info_lst = [
                switch_params_data_dct.get(key) for key in switch_info_keys
            ]
            ls_mode_on = True if switch_params_data_dct[
                'LS_mode'] == 'ON' else False

            sshow_file, *_, switch_index, switch_name, _, switch_mode = switch_info_lst

            # current operation information string
            info = f'[{i+1} of {switch_num}]: {switch_name} connected devices'
            print(info, end=" ")

            # search control dictionary. continue to check sshow_file until all parameters groups are found
            # Name Server service started only in Native mode
            collected = {'fdmi': False, 'nsshow': False, 'nscamshow': False} \
                if switch_params_data_dct.get('switchMode') == 'Native' else {'fdmi': False}

            with open(sshow_file, encoding='utf-8', errors='ignore') as file:
                # check file until all groups of parameters extracted
                while not all(collected.values()):
                    line = file.readline()
                    if not line:
                        break
                    # fdmi section start
                    # switchcmd_fdmishow_comp
                    if re.search(comp_dct[comp_keys[0]], line):
                        collected['fdmi'] = True
                        if ls_mode_on:
                            while not re.search(
                                    fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',
                                    line):
                                line = file.readline()
                                if not line:
                                    break
                        # local_database_comp
                        while not re.search(comp_dct[comp_keys[4]], line):
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # wwnp_match
                            if match_dct[match_keys[1]]:
                                # dictionary to store all DISCOVERED switch ports information
                                # collecting data only for the logical switch in current loop
                                fdmi_dct = {}
                                # switch_info and current connected device wwnp
                                switch_wwnp = line_to_list(
                                    comp_dct[comp_keys[1]], line,
                                    *switch_info_lst[:6])
                                # move cursor to one line down to get inside while loop
                                line = file.readline()
                                # wwnp_local_comp
                                while not re.search(comp_dct[comp_keys[3]],
                                                    line):
                                    line = file.readline()
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # fdmi_port_match
                                    if match_dct[match_keys[2]]:
                                        fdmi_dct[match_dct[match_keys[
                                            2]].group(1).rstrip()] = match_dct[
                                                match_keys[2]].group(
                                                    2).rstrip()
                                    if not line:
                                        break

                                # adding additional parameters and values to the fdmi_dct
                                update_dct(params_add, switch_wwnp, fdmi_dct)
                                # appending list with only REQUIRED port info for the current loop iteration to the list with all fabrics port info
                                fdmi_lst.append([
                                    fdmi_dct.get(param, None)
                                    for param in params
                                ])
                            else:
                                line = file.readline()
                            if not line:
                                break
                    # fdmi section end
                    # only switches in Native mode have Name Server service started
                    if switch_mode == 'Native':
                        # nsshow section start
                        for nsshow_type in nsshow_dct.keys():
                            # unpacking re number and list to save REQUIRED params
                            re_num, ns_lst = nsshow_dct[nsshow_type]
                            # switchcmd_nsshow_comp, switchcmd_nscamshow_comp
                            if re.search(comp_dct[comp_keys[re_num]], line):
                                collected[nsshow_type] = True
                                if ls_mode_on:
                                    while not re.search(
                                            fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',
                                            line):
                                        line = file.readline()
                                        if not line:
                                            break
                                # switchcmd_end_comp
                                while not re.search(comp_dct[comp_keys[9]],
                                                    line):
                                    # line = file.readline()
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # port_pid__match
                                    if match_dct[match_keys[7]]:
                                        # dictionary to store all DISCOVERED switch ports information
                                        # collecting data only for the logical switch in current loop
                                        nsshow_port_dct = {}
                                        # switch_info and current connected device wwnp
                                        switch_pid = line_to_list(
                                            comp_dct[comp_keys[7]], line,
                                            *switch_info_lst[:6])
                                        # move cursor to one line down to get inside while loop
                                        line = file.readline()
                                        # pid_switchcmd_end_comp
                                        while not re.search(
                                                comp_dct[comp_keys[8]], line):
                                            match_dct = {
                                                match_key:
                                                comp_dct[comp_key].match(line)
                                                for comp_key, match_key in zip(
                                                    comp_keys, match_keys)
                                            }
                                            # nsshow_port_match
                                            if match_dct[match_keys[2]]:
                                                nsshow_port_dct[match_dct[
                                                    match_keys[2]].group(
                                                        1
                                                    ).rstrip()] = match_dct[
                                                        match_keys[2]].group(
                                                            2).rstrip()
                                            line = file.readline()
                                            if not line:
                                                break

                                        # adding additional parameters and values to the fdmi_dct
                                        update_dct(nsshow_params_add,
                                                   switch_pid, nsshow_port_dct)
                                        # appending list with only REQUIRED port info for the current loop iteration to the list with all fabrics port info
                                        ns_lst.append([
                                            nsshow_port_dct.get(
                                                nsshow_param, None)
                                            for nsshow_param in nsshow_params
                                        ])
                                    else:
                                        line = file.readline()
                                    if not line:
                                        break
                        # nsshow section end
            status_info('ok', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, fdmi_lst, nsshow_lst,
                  nscamshow_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        fdmi_lst, nsshow_lst, nscamshow_lst = verify_data(
            report_data_lst, data_names, *data_lst)

    return fdmi_lst, nsshow_lst, nscamshow_lst
Ejemplo n.º 18
0
def portinfo_extract(switch_params_lst, report_data_lst):
    """Function to extract switch port information"""
    
    # report_data_lst contains information: 
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['sfpshow', 'portcfgshow']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration    
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    sfpshow_lst, portcfgshow_lst = data_lst

    # when any data from data_lst was not saved (file not found) or 
    # force extract flag is on then re-extract data from configuration files  
    force_run = verify_force_run(data_names, data_lst, report_steps_dct, max_title)

    # # data force extract check 
    # # list of keys for each data from data_lst representing if it is required 
    # # to re-collect or re-analyze data even they were obtained on previous iterations
    # force_extract_keys_lst = [report_steps_dct[data_name][1] for data_name in data_names]
    # # print data which were loaded but for which force extract flag is on
    # force_extract_check(data_names, data_lst, force_extract_keys_lst, max_title)
    
    # # when any of data_lst was not saved or 
    # # force extract flag is on then re-extract data  from configueation files  
    # if not all(data_lst) or any(force_extract_keys_lst):
    if force_run:    
        print('\nEXTRACTING SWITCH PORTS SFP, PORTCFG INFORMATION FROM SUPPORTSHOW CONFIGURATION FILES ...\n')   
        
        # extract chassis parameters names from init file
        switch_columns = columns_import('switch', max_title, 'columns')
        # number of switches to check
        switch_num = len(switch_params_lst)   
     
        # data imported from init file to extract values from config file
        params, params_add, comp_keys, match_keys, comp_dct = data_extract_objects('portinfo', max_title)
        portcfg_params = columns_import('portinfo', max_title, 'portcfg_params')
        # dictionary to save portcfg ALL information for all ports in fabric
        portcfgshow_dct = dict((key, []) for key in portcfg_params)
        # list to store only REQUIRED switch parameters
        # collecting sfpshow data for all switches ports during looping
        sfpshow_lst = []
        # list to save portcfg information for all ports in fabric
        portcfgshow_lst = []

        
        # switch_params_lst [[switch_params_sw1], [switch_params_sw1]]
        # checking each switch for switch level parameters
        for i, switch_params_data in enumerate(switch_params_lst):       

            # data unpacking from iter param
            # dictionary with parameters for the current switch
            switch_params_data_dct = dict(zip(switch_columns, switch_params_data))
            switch_info_keys = ['configname', 'chassis_name', 'chassis_wwn', 'switch_index', 
                                'SwitchName', 'switchWwn']
            switch_info_lst = [switch_params_data_dct.get(key) for key in switch_info_keys]
            ls_mode_on = True if switch_params_data_dct['LS_mode'] == 'ON' else False
            
            sshow_file, _, _, switch_index, switch_name, *_ = switch_info_lst
            
            # current operation information string
            info = f'[{i+1} of {switch_num}]: {switch_name} ports sfp and cfg'
            print(info, end =" ")           
            # search control dictionary. continue to check sshow_file until all parameters groups are found
            collected = {'sfpshow': False, 'portcfgshow': False}
            with open(sshow_file, encoding='utf-8', errors='ignore') as file:
                # check file until all groups of parameters extracted
                while not all(collected.values()):
                    line = file.readline()                        
                    if not line:
                        break
                    # sfpshow section start
                    if re.search(r'^(SWITCHCMD )?(/fabos/cliexec/)?sfpshow +-all *: *$', line) and not collected['sfpshow']:
                        collected['sfpshow'] = True
                        if ls_mode_on:
                            while not re.search(fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',line):
                                line = file.readline()
                                if not line:
                                    break
                        while not re.search(r'^(real [\w.]+)|(\*\* SS CMD END \*\*)$',line):
                            line = file.readline()
                            match_dct ={match_key: comp_dct[comp_key].match(line) for comp_key, match_key in zip(comp_keys, match_keys)}
                            if match_dct[match_keys[0]]:
                                # dictionary to store all DISCOVERED switch ports information
                                # collecting data only for the logical switch in current loop
                                sfpshow_dct = {}
                                _, slot_num, port_num = line_to_list(comp_dct[comp_keys[0]], line)
                                # if switch has no slots then all ports have slot 0
                                slot_num = '0' if not slot_num else slot_num
                                while not re.match('\r?\n', line):
                                    line = file.readline()
                                    match_dct ={match_key: comp_dct[comp_key].match(line) for comp_key, match_key in zip(comp_keys, match_keys)}
                                    # power_match
                                    if match_dct[match_keys[1]]:
                                        sfp_power_lst = line_to_list(comp_dct[comp_keys[1]], line)
                                        # cut off RX or TX Power
                                        sfp_power_value_unit = sfp_power_lst[1:]
                                        for v, k in zip(sfp_power_value_unit[::2], sfp_power_value_unit[1::2]):
                                            if k == 'uWatts':
                                                k = 'uW'
                                            key = sfp_power_lst[0] + '_' + k
                                            sfpshow_dct[key] = v
                                    # transceiver_match
                                    elif match_dct[match_keys[2]]:
                                        sfpshow_dct[match_dct[match_keys[2]].group(1).rstrip()] = match_dct[match_keys[2]].group(2).rstrip()
                                    # no_sfp_match
                                    elif match_dct[match_keys[3]]:
                                            sfpshow_dct['Vendor Name'] = 'No SFP module'
                                    # not_available_match
                                    elif match_dct[match_keys[4]]:
                                            sfpshow_dct[match_dct[match_keys[4]].group(1).rstrip()] = match_dct[match_keys[4]].group(2).rstrip()
                                    # sfp_info_match
                                    elif match_dct[match_keys[5]]:
                                        sfpshow_dct[match_dct[match_keys[5]].group(1).rstrip()] = match_dct[match_keys[5]].group(2).rstrip()                                        
                                    if not line:
                                        break
                                    
                                # additional values which need to be added to the dictionary with all DISCOVERED parameters during current loop iteration
                                # values axtracted in manual mode. if change values order change keys order in init.xlsx "chassis_params_add" column                                   
                                sfpshow_port_values = [*switch_info_lst, slot_num, port_num]                                       
                                # adding additional parameters and values to the sfpshow_dct
                                update_dct(params_add, sfpshow_port_values, sfpshow_dct)               
                                # appending list with only REQUIRED port info for the current loop iteration to the list with all fabrics port info
                                sfpshow_lst.append([sfpshow_dct.get(param, None) for param in params])
                    # sfpshow section end
                    # portcfgshow section start
                    if re.search(r'^(SWITCHCMD )?(/fabos/cliexec/)?portcfgshow *: *$', line) and not collected['portcfgshow']:
                        collected['portcfgshow'] = True
                        if ls_mode_on:
                            while not re.search(fr'^CURRENT CONTEXT -- {switch_index} *, \d+$',line):
                                line = file.readline()
                                if not line:
                                    break
                        while not re.search(r'^(real [\w.]+)|(\*\* SS CMD END \*\*)$|No ports found in switch',line):
                            line = file.readline()
                            match_dct ={match_key: comp_dct[comp_key].match(line) for comp_key, match_key in zip(comp_keys, match_keys)}
                            # 'slot_port_line_match'
                            if match_dct[match_keys[6]]:
                                # dictionary to store all DISCOVERED switch ports information
                                portcfgshow_tmp_dct = {}
                                # extract slot and port numbers
                                slot_num, port_nums_str = line_to_list(comp_dct[comp_keys[6]], line)
                                port_nums_lst = port_nums_str.split()
                                port_nums = len(port_nums_lst)
                                # list with switch and slot information
                                switch_info_slot_lst = switch_info_lst.copy()
                                switch_info_slot_lst.append(slot_num)
                                # adding switch and slot information for each port to dictionary
                                for portcfg_param, switch_info_value in zip(portcfg_params[:7], switch_info_slot_lst):
                                    portcfgshow_tmp_dct[portcfg_param] = [switch_info_value for i in range(port_nums)]
                                # adding port numbers to dictionary    
                                portcfgshow_tmp_dct[portcfg_params[7]] = port_nums_lst                                
                                while not re.match('\r?\n', line):
                                    line = file.readline()
                                    match_dct ={match_key: comp_dct[comp_key].match(line) for comp_key, match_key in zip(comp_keys, match_keys)}
                                    # portcfg_match
                                    if match_dct[match_keys[7]]:
                                        # extract param name and values for each port and adding to dictionary
                                        param_name, param_values_str = line_to_list(comp_dct[comp_keys[7]], line)
                                        portcfgshow_tmp_dct[param_name] = param_values_str.split()
                                    if not line:
                                        break
                                # saving portcfg information of REQUIRED parameters from dictionary with DISCOVERED parameters
                                for portcfg_param in portcfg_params:
                                    portcfgshow_dct[portcfg_param].extend(portcfgshow_tmp_dct.get(portcfg_param, [None for i in range(port_nums)]))              
                    # portcfgshow section end
                     
            status_info('ok', max_title, len(info))

        # after check all config files create list of lists from dictionary. each nested list contains portcfg information for one port
        for portcfg_param in portcfg_params:
            portcfgshow_lst.append(portcfgshow_dct.get(portcfg_param))            
        portcfgshow_lst = list(zip(*portcfgshow_lst))
        
        # save extracted data to json file
        save_data(report_data_lst, data_names, sfpshow_lst, portcfgshow_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        sfpshow_lst, portcfgshow_lst = verify_data(report_data_lst, data_names, *data_lst)
        
    
    return sfpshow_lst, portcfgshow_lst
Ejemplo n.º 19
0
def zoning_analysis_main(switch_params_aggregated_df, portshow_aggregated_df,
                         cfg_df, zone_df, alias_df, cfg_effective_df,
                         fcrfabric_df, lsan_df, peerzone_df,
                         report_columns_usage_dct, report_data_lst):
    """Main function to analyze zoning configuration"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = [
        'zoning_aggregated', 'alias_aggregated', 'zonemember_statistics',
        'portshow_zoned_aggregated', 'alias_statistics',
        'effective_cfg_statistics', 'Зонирование', 'Псевдонимы',
        'Зонирование_A&B', 'Порты_не_в_зонах', 'Порты_без_псевдономов',
        'Отсутсвуют_в_сети', 'Статистика_зон', 'Статистика_псевдонимов',
        'Статистика_конфигурации'
    ]
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    zoning_aggregated_df, alias_aggregated_df, zonemember_statistics_df, \
        portshow_zoned_aggregated_df, alias_statistics_df, effective_cfg_statistics_df, zoning_report_df, alias_report_df, \
            zoning_compare_report_df, unzoned_device_report_df, no_alias_device_report_df, zoning_absent_device_report_df,\
                zonemember_statistics_report_df,  alias_statistics_report_df, effective_cfg_statistics_report_df = data_lst

    # list of data to analyze from report_info table
    analyzed_data_names = [
        'cfg', 'cfg_effective', 'zone', 'alias', 'switch_params_aggregated',
        'switch_parameters', 'switchshow_ports', 'chassis_parameters',
        'portshow_aggregated', 'device_rename', 'report_columns_usage_upd',
        'portcmd', 'fdmi', 'nscamshow', 'nsshow', 'blade_servers',
        'fabric_labels'
    ]

    # force run when any data from data_lst was not saved (file not found) or
    # procedure execution explicitly requested for output data or data used during fn execution
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title, analyzed_data_names)
    if force_run:
        # current operation information string
        info = f'Generating zoning table'
        print(info, end=" ")

        # aggregated DataFrames
        zoning_aggregated_df, alias_aggregated_df \
            = zoning_aggregated(switch_params_aggregated_df, portshow_aggregated_df,
                                    cfg_df, zone_df, alias_df, cfg_effective_df, fcrfabric_df, lsan_df, peerzone_df, report_data_lst)

        # create comprehesive statistics DataFrame with Fabric summaries and
        # zones statistics DataFrame without summaries
        zonemember_statistics_df, zonemember_zonelevel_stat_df = zonemember_statistics(
            zoning_aggregated_df, report_data_lst)
        # add zoning statistics notes, zone duplicates and zone pairs to zoning aggregated DataFrame
        zoning_aggregated_df = statistics_to_aggregated_zoning(
            zoning_aggregated_df, zonemember_zonelevel_stat_df)
        # check all fabric devices (Wwnp) for usage in zoning configuration
        portshow_zoned_aggregated_df = verify_cfg_type(portshow_aggregated_df,
                                                       zoning_aggregated_df,
                                                       ['PortName'])
        # create alias configuration statistics
        alias_statistics_df = alias_dashboard(alias_aggregated_df,
                                              portshow_zoned_aggregated_df)
        # create Effective zoning configuration summary statistics
        effective_cfg_statistics_df = cfg_dashborad(
            zonemember_statistics_df, portshow_zoned_aggregated_df,
            zoning_aggregated_df, alias_aggregated_df)
        # after finish display status
        status_info('ok', max_title, len(info))

        # report tables
        zoning_report_df, alias_report_df, zoning_compare_report_df, \
            unzoned_device_report_df, no_alias_device_report_df, zoning_absent_device_report_df, \
                zonemember_statistics_report_df, alias_statistics_report_df, effective_cfg_statistics_report_df = \
                    zoning_report_main(zoning_aggregated_df, alias_aggregated_df, portshow_zoned_aggregated_df,
                                        zonemember_statistics_df, alias_statistics_df, effective_cfg_statistics_df,
                                        data_names, report_columns_usage_dct, max_title)

        # create list with partitioned DataFrames
        data_lst = [
            zoning_aggregated_df, alias_aggregated_df,
            zonemember_statistics_df, portshow_zoned_aggregated_df,
            alias_statistics_df, effective_cfg_statistics_df, zoning_report_df,
            alias_report_df, zoning_compare_report_df,
            unzoned_device_report_df, no_alias_device_report_df,
            zoning_absent_device_report_df, zonemember_statistics_report_df,
            alias_statistics_report_df, effective_cfg_statistics_report_df
        ]
        # saving data to json or csv file
        save_data(report_data_lst, data_names, *data_lst)

    # verify if loaded data is empty and replace information string with empty DataFrame
    else:
        zoning_aggregated_df, alias_aggregated_df, zonemember_statistics_df, \
            portshow_zoned_aggregated_df, alias_statistics_df, effective_cfg_statistics_df, zoning_report_df, alias_report_df, \
                zoning_compare_report_df, unzoned_device_report_df, no_alias_device_report_df, zoning_absent_device_report_df, \
                    zonemember_statistics_report_df, alias_statistics_report_df, effective_cfg_statistics_report_df \
                    = verify_data(report_data_lst, data_names, *data_lst)

        data_lst = [
            zoning_aggregated_df, alias_aggregated_df,
            zonemember_statistics_df, portshow_zoned_aggregated_df,
            alias_statistics_df, effective_cfg_statistics_df, zoning_report_df,
            alias_report_df, zoning_compare_report_df,
            unzoned_device_report_df, no_alias_device_report_df,
            zoning_absent_device_report_df, zonemember_statistics_report_df,
            alias_statistics_report_df, effective_cfg_statistics_report_df
        ]
    # save data to service file if it's required
    for data_name, data_frame in zip(data_names, data_lst):

        save_xlsx_file(data_frame, data_name, report_data_lst)

    return zoning_aggregated_df, alias_aggregated_df, portshow_zoned_aggregated_df
Ejemplo n.º 20
0
def chassis_params_extract(all_config_data, report_data_lst):
    """Function to extract chassis parameters"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['chassis_parameters']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    chassis_params_fabric_lst, = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):
        print(
            '\nEXTRACTING CHASSIS PARAMETERS FROM SUPPORTSHOW CONFIGURATION FILES ...\n'
        )
        # number of switches to check
        switch_num = len(all_config_data)
        # list to store only REQUIRED chassis parameters
        # collecting data for all chassis during looping
        chassis_params_fabric_lst = []
        # data imported from init file to extract values from config file
        chassis_params, chassis_params_add, comp_keys, match_keys, comp_dct = data_extract_objects(
            'chassis', max_title)

        # all_confg_data format ([swtch_name, supportshow file, (ams_maps_log files, ...)])
        # checking each config set(supportshow file) for chassis level parameters
        for i, switch_config_data in enumerate(all_config_data):
            # data unpacking from iter param
            switch_name, sshow_file, ams_maps_file = switch_config_data
            # search control dictionary. continue to check sshow_file until all parameters groups are found
            collected = {
                'configshow': False,
                'uptime_cpu': False,
                'flash': False,
                'memory': False,
                'dhcp': False,
                'licenses': False,
                'vf_id': False
            }
            # dictionary to store all DISCOVERED chassis parameters
            # collecting data only for the chassis in current loop
            chassis_params_dct = {}
            # sets and list to store parameters which could be joined in one group
            snmp_target_set = set()
            syslog_set = set()
            tz_lst = []
            licenses = []
            vf_id_set = set()

            # current operation information string
            info = f'[{i+1} of {switch_num}]: {switch_name} chassis parameters'
            print(info, end=" ")

            with open(sshow_file, encoding='utf-8', errors='ignore') as file:
                # check file until all groups of parameters extracted
                while not all(collected.values()):
                    line = file.readline()
                    if not line:
                        break
                    # configshow section start
                    if re.search(
                            r'^(/fabos/cliexec/|/bin/cat /var/log/)?configshow *-?(all)? *:$',
                            line):
                        # when section is found corresponding collected dict values changed to True
                        collected['configshow'] = True
                        while not re.search(
                                r'^(\[Chassis Configuration End\])|(real [\w.]+)|(\*\* SS CMD END \*\*)$',
                                line):
                            line = file.readline()
                            # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # match_keys ['chassis_param_match', 'snmp_target_match', 'syslog_match', 'tz_match', 'uptime_cpu_match', 'memory_match', 'flash_match']
                            # 'chassis_param_match'
                            if match_dct[match_keys[0]]:
                                chassis_params_dct[match_dct[
                                    match_keys[0]].group(1).rstrip(
                                    )] = match_dct[match_keys[0]].group(3)
                            # for snmp and syslog data addresses are added to the coreesponding sets to avoid duplicates
                            # 'snmp_target_match'
                            if match_dct[match_keys[1]] and match_dct[
                                    match_keys[1]].group(2) != '0.0.0.0':
                                snmp_target_set.add(
                                    match_dct[match_keys[1]].group(2))
                            # 'syslog_match'
                            if match_dct[match_keys[2]]:
                                syslog_set.add(
                                    match_dct[match_keys[2]].group(2))
                            # for timezone extracted data added to the list for later concatenation
                            # 'tz_match'
                            if match_dct[match_keys[3]]:
                                tz_lst.append(
                                    match_dct[match_keys[3]].group(2))
                            if not line:
                                break
                    # config section end
                    # uptime section start
                    elif re.search(r'^(/fabos/cliexec/)?uptime *:$', line):
                        collected['uptime_cpu'] = True
                        while not re.search(r'^real [\w.]+$', line):
                            line = file.readline()
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # 'uptime_cpu_match'
                            if match_dct[match_keys[4]]:
                                uptime = match_dct[match_keys[4]].group(1)
                                cpu_load = match_dct[match_keys[4]].group(2)
                            if not line:
                                break
                    # uptime section end
                    # memory section start
                    elif re.search(r'^(/bin/)?(cat\s+)?/proc/meminfo\s*:$',
                                   line):
                        collected['memory'] = True
                        memory_dct = {}
                        while not re.search(r'^real [\w.]+$', line):
                            line = file.readline()
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # 'memory_match'
                            if match_dct[match_keys[5]]:
                                memory_dct[match_dct[match_keys[5]].group(
                                    1)] = match_dct[match_keys[5]].group(2)
                            if not line:
                                break
                        # free_mem + buffers > 5% from total memory
                        # memory usage < 95%
                        memory = round((1 - (int(memory_dct['MemFree']) +
                                             int(memory_dct['Buffers'])) /
                                        int(memory_dct['MemTotal'])) * 100)
                        memory = str(memory)
                    # memory section end
                    # flash section start
                    elif re.search(r'^(/bin/)?df\s*:$', line):
                        collected['flash'] = True
                        while not re.search(r'^real [\w.]+$', line):
                            line = file.readline()
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # 'flash_match'
                            if match_dct[match_keys[6]]:
                                flash = match_dct[match_keys[6]].group(1)
                            if not line:
                                break
                    # flash section end
                    # ipaddrshow section start
                    if re.search(r'^(/fabos/link_bin/)?ipaddrshow *:$', line):
                        collected['dhcp'] = True
                        while not re.search(
                                r'^(real [\w.]+)|(\*\* SS CMD END \*\*)$',
                                line):
                            line = file.readline()
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # 'dhcp_match'
                            if match_dct[match_keys[7]]:
                                chassis_params_dct[match_dct[
                                    match_keys[7]].group(1).rstrip(
                                    )] = match_dct[match_keys[7]].group(2)
                            if not line:
                                break
                    # ipaddrshow section end
                    # licenses section start
                    if re.search(r'^(/fabos/cliexec/)?licenseshow *:$', line):
                        collected['licenses'] = True
                        while not re.search(
                                r'^(real [\w.]+)|(\*\* SS CMD END \*\*)$',
                                line):
                            line = file.readline()
                            match_dct = {
                                match_key: comp_dct[comp_key].match(line)
                                for comp_key, match_key in zip(
                                    comp_keys, match_keys)
                            }
                            # 'licenses_match'
                            if match_dct[match_keys[8]]:
                                licenses.append(
                                    match_dct[match_keys[8]].group(1))
                            elif re.match('^No licenses installed.$', line):
                                licenses = 'No licenses installed'
                            if not line:
                                break
                    # licenses section end
                    # LS indexes identification start
                    if re.search(r'Section *: +SSHOW_FABRIC', line):
                        collected['vf_id'] = True
                        while not re.search(
                                r'^(SWITCHCMD /fabos/cliexec/)?dom *:$|Non-VF',
                                line):
                            if re.search(r'CURRENT +CONTEXT +-- +(\d+) *, \d+',
                                         line):
                                id = re.match(
                                    r'CURRENT +CONTEXT +-- +(\d+) *, \d+',
                                    line).group(1)
                                vf_id_set.add(id)
                                line = file.readline()
                            else:
                                line = file.readline()
                                if not line:
                                    break
                    # LS indexes identification end

            # additional values which need to be added to the chassis params dictionary
            # chassis_params_add order (configname, ams_maps_log, chassis_name, snmp_server, syslog_server, timezone_h:m, uptime, cpu_average_load, memory_usage, flash_usage, licenses)
            # values axtracted in manual mode. if change values order change keys order in init.xlsx "chassis_params_add" column
            vf_id_lst = list(vf_id_set)
            vf_id_lst.sort()
            chassis_params_values = (sshow_file, ams_maps_file, switch_name,
                                     vf_id_lst, snmp_target_set, syslog_set,
                                     tz_lst, uptime, cpu_load, memory, flash,
                                     licenses)

            # adding additional parameters and values to the chassis_params_switch_dct
            for chassis_param_add, chassis_param_value in zip(
                    chassis_params_add, chassis_params_values):
                if chassis_param_value:
                    if not isinstance(chassis_param_value, str):
                        s = ':' if chassis_param_add == 'timezone_h:m' else ', '
                        chassis_param_value = f'{s}'.join(chassis_param_value)
                    chassis_params_dct[chassis_param_add] = chassis_param_value
            # creating list with REQUIRED chassis parameters for the current switch
            # if no value in the chassis_params_dct for the parameter then None is added
            # and appending this list to the list of all switches chassis_params_fabric_lst
            chassis_params_fabric_lst.append([
                chassis_params_dct.get(chassis_param, None)
                for chassis_param in chassis_params
            ])

            status_info('ok', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, chassis_params_fabric_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        chassis_params_fabric_lst = verify_data(report_data_lst, data_names,
                                                *data_lst)

    return chassis_params_fabric_lst
Ejemplo n.º 21
0
def portcmd_analysis_main(portshow_df, switchshow_ports_df, switch_params_df,
                          switch_params_aggregated_df, isl_aggregated_df,
                          nsshow_df, nscamshow_df, ag_principal_df,
                          porttrunkarea_df, alias_df, fdmi_df, blade_module_df,
                          blade_servers_df, blade_vc_df, synergy_module_df,
                          synergy_servers_df, system_3par_df, port_3par_df,
                          report_columns_usage_dct, report_data_lst):
    """Main function to add connected devices information to portshow DataFrame"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = [
        'portshow_aggregated', 'storage_connection_statistics',
        'device_connection_statistics', 'device_rename',
        'report_columns_usage_upd', 'Серверы', 'Массивы', 'Библиотеки',
        'Микрокоды_HBA', 'Подключение_массивов', 'Подключение_библиотек',
        'Подключение_серверов', 'NPIV', 'Статистика_массивов',
        'Статистика_устройств'
    ]
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    report_columns_usage_bckp = report_columns_usage_dct

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # flag to forcible save portshow_aggregated_df if required
    portshow_force_flag = False
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    portshow_aggregated_df, storage_connection_statistics_df, device_connection_statistics_df, \
        device_rename_df, report_columns_usage_dct, \
            servers_report_df, storage_report_df, library_report_df, hba_report_df, \
                storage_connection_df,  library_connection_df, server_connection_df, npiv_report_df, \
                    storage_connection_statistics_report_df, device_connection_statistics_report_df = data_lst
    nsshow_unsplit_df = pd.DataFrame()

    if not report_columns_usage_dct:
        report_columns_usage_dct = report_columns_usage_bckp

    # list of data to analyze from report_info table
    analyzed_data_names = [
        'portcmd', 'switchshow_ports', 'switch_params_aggregated',
        'switch_parameters', 'chassis_parameters', 'fdmi', 'nscamshow',
        'nsshow', 'alias', 'blade_servers', 'fabric_labels', 'isl', 'trunk',
        'isl_aggregated', 'Параметры_SFP', 'portshow_sfp_aggregated'
    ]

    # force run when any data from data_lst was not saved (file not found) or
    # procedure execution explicitly requested for output data or data used during fn execution
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title, analyzed_data_names)
    if force_run:
        # import data with switch models, firmware and etc
        switch_models_df = dataframe_import('switch_models', max_title)
        # data imported from init file (regular expression patterns) to extract values from data columns
        # re_pattern list contains comp_keys, match_keys, comp_dct
        _, _, *re_pattern_lst = data_extract_objects('nameserver', max_title)

        oui_df = dataframe_import('oui',
                                  max_title,
                                  columns=['Connected_oui', 'type', 'subtype'])

        # current operation information string
        info = f'Generating connected devices table'
        print(info, end=" ")


        portshow_aggregated_df, alias_wwnn_wwnp_df, nsshow_unsplit_df, expected_ag_links_df = \
            portshow_aggregated(portshow_df, switchshow_ports_df, switch_params_df,
                                switch_params_aggregated_df, isl_aggregated_df, nsshow_df,
                                nscamshow_df, ag_principal_df, porttrunkarea_df, switch_models_df, alias_df,
                                oui_df, fdmi_df, blade_module_df,  blade_servers_df, blade_vc_df,
                                synergy_module_df, synergy_servers_df, system_3par_df, port_3par_df,
                                re_pattern_lst, report_data_lst)

        # after finish display status
        status_info('ok', max_title, len(info))
        # show warning if any UNKNOWN device class founded, if any PortSymb or NodeSymb is not parsed,
        # if new switch founded
        portshow_force_flag, nsshow_unsplit_force_flag, expected_ag_links_force_flag = \
            warning_notification(portshow_aggregated_df, switch_params_aggregated_df,
            nsshow_unsplit_df, expected_ag_links_df, report_data_lst)
        # correct device names manually
        portshow_aggregated_df, device_rename_df = \
            devicename_correction_main(portshow_aggregated_df, device_rename_df,
                                        report_columns_usage_dct, report_data_lst)
        # count Device_Host_Name instances for fabric_label, label and total in fabric
        portshow_aggregated_df = device_ports_per_group(portshow_aggregated_df)

        # count device connection statistics
        info = f'Counting device connection statistics'
        print(info, end=" ")
        storage_connection_statistics_df = storage_connection_statistics(
            portshow_aggregated_df, re_pattern_lst)
        device_connection_statistics_df = device_connection_statistics(
            portshow_aggregated_df)
        status_info('ok', max_title, len(info))

        servers_report_df, storage_report_df, library_report_df, hba_report_df, \
            storage_connection_df,  library_connection_df, server_connection_df, npiv_report_df, \
                storage_connection_statistics_report_df, device_connection_statistics_report_df  = \
                    create_report_tables(portshow_aggregated_df, storage_connection_statistics_df,
                                            device_connection_statistics_df, data_names[5:-2],
                                            report_columns_usage_dct, max_title)
        # create list with partitioned DataFrames
        data_lst = [
            portshow_aggregated_df, storage_connection_statistics_df,
            device_connection_statistics_df, device_rename_df,
            report_columns_usage_dct, servers_report_df, storage_report_df,
            library_report_df, hba_report_df, storage_connection_df,
            library_connection_df, server_connection_df, npiv_report_df,
            storage_connection_statistics_report_df,
            device_connection_statistics_report_df
        ]

        # saving data to json or csv file
        save_data(report_data_lst, data_names, *data_lst)
        save_xlsx_file(nsshow_unsplit_df,
                       'nsshow_unsplit',
                       report_data_lst,
                       force_flag=nsshow_unsplit_force_flag)
        save_xlsx_file(expected_ag_links_df,
                       'expected_ag_links',
                       report_data_lst,
                       force_flag=expected_ag_links_force_flag)
    # verify if loaded data is empty and replace information string with empty DataFrame
    else:
        portshow_aggregated_df, storage_connection_statistics_df, device_connection_statistics_df, \
            device_rename_df, report_columns_usage_dct, \
                servers_report_df, storage_report_df, library_report_df, hba_report_df, \
                    storage_connection_df, library_connection_df, server_connection_df, npiv_report_df, \
                        storage_connection_statistics_report_df, device_connection_statistics_report_df \
                            = verify_data(report_data_lst, data_names, *data_lst)
        data_lst = [
            portshow_aggregated_df, storage_connection_statistics_df,
            device_connection_statistics_df, device_rename_df,
            report_columns_usage_dct, servers_report_df, storage_report_df,
            library_report_df, hba_report_df, storage_connection_df,
            library_connection_df, server_connection_df, npiv_report_df,
            storage_connection_statistics_report_df,
            device_connection_statistics_report_df
        ]
    # save data to service file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        force_flag = False
        if data_name == 'portshow_aggregated':
            force_flag = portshow_force_flag
        save_xlsx_file(data_frame,
                       data_name,
                       report_data_lst,
                       force_flag=force_flag)
    return portshow_aggregated_df
Ejemplo n.º 22
0
def fabriclabels_main(switchshow_ports_df, switch_params_df, fabricshow_df,
                      ag_principal_df, report_data_lst):
    """Function to set Fabric labels"""

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    customer_name, report_path, _, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['fabric_labels']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking DataFrames from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    fabricshow_ag_labels_df, = data_lst

    # list of data to analyze from report_info table
    analyzed_data_names = []
    # force run when any data from data_lst was not saved (file not found) or
    # procedure execution explicitly requested for output data or data used during fn execution
    force_run = verify_force_run(data_names, data_lst, report_steps_dct,
                                 max_title, analyzed_data_names)

    if force_run:
        print('\nSETTING UP FABRICS NAMES AND LABELS  ...\n')

        fabricshow_summary_df = auto_fabrics_labeling(switchshow_ports_df,
                                                      switch_params_df,
                                                      fabricshow_df,
                                                      report_data_lst)

        # display automatic fabric labeling
        info_labels = [
            'Fabric_name', 'Fabric_label', 'chassis_name',
            'Principal_switch_name', 'Fabric_ID', 'FC_Route', 'Total_switch',
            'Domain_IDs', 'Switch_names', 'Device_ports', 'Online_ports',
            'LS_type', 'Fabric_Name'
        ]
        # service file name for detailed information
        current_date = str(date.today())
        file_name = customer_name + '_' + report_steps_dct[
            'fabricshow_summary'][2] + '_' + current_date + '.xlsx'
        # file_name = customer_name + '_analysis_report_' + current_date + '.xlsx'
        print('\nAutomatic fabrics labeling\n')
        # set option to show all columns
        with pd.option_context('display.max_columns', None,
                               'display.expand_frame_repr', False):
            # pd.set_option('max_columns', None)
            # pd.set_option('expand_frame_repr', False)
            print(fabricshow_summary_df.loc[:, info_labels])
        print(
            f"\nFor detailed switch port types and numbers statistic in each fabric check '{file_name}' file 'fabricshow_statistics' sheet in"
        )
        print(f'{report_path} directory')
        print('ATTN! CLOSE file after check\n')

        # ask user if Automatic Fabric labeling need to be corrected
        query = 'Do you want to change Fabrics Names or Labels? (y)es/(n)o: '
        reply = reply_request(query)
        if reply == 'y':
            # saving DataFrame to Excel to check during manual labeling if required
            save_xlsx_file(fabricshow_summary_df,
                           'fabricshow_summary',
                           report_data_lst,
                           force_flag=True)
            fabricshow_summary_df = manual_fabrics_labeling(
                fabricshow_summary_df, info_labels)

        # takes all switches working in Native and AG switches
        # merge the in one DataFrame and identify which Fabrics they belong too with fabricshow_summary DataFrame
        fabricshow_ag_labels_df = native_ag_labeling(fabricshow_df,
                                                     ag_principal_df,
                                                     fabricshow_summary_df)

        # # disable option to show all columns
        # pd.reset_option('max_columns')
        # pd.reset_option('expand_frame_repr')

        # create list with partitioned DataFrames
        data_lst = [fabricshow_ag_labels_df]
        # saving data to json or csv file
        save_data(report_data_lst, data_names, *data_lst)
    # verify if loaded data is empty and replace information string with empty DataFrame
    else:
        fabricshow_ag_labels_df = verify_data(report_data_lst, data_names,
                                              *data_lst)
        data_lst = [fabricshow_ag_labels_df]
    # save data to excel file if it's required
    for data_name, data_frame in zip(data_names, data_lst):
        save_xlsx_file(data_frame, data_name, report_data_lst)

    return fabricshow_ag_labels_df
Ejemplo n.º 23
0
def maps_params_extract(all_config_data, report_data_lst):
    """Function to extract maps parameters
    """

    # report_data_lst contains information:
    # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct
    *_, max_title, report_steps_dct = report_data_lst

    # names to save data obtained after current module execution
    data_names = ['maps_parameters']
    # service step information
    print(f'\n\n{report_steps_dct[data_names[0]][3]}\n')

    # load data if they were saved on previos program execution iteration
    data_lst = load_data(report_data_lst, *data_names)
    # unpacking from the loaded list with data
    # pylint: disable=unbalanced-tuple-unpacking
    maps_params_fabric_lst, = data_lst

    # data force extract check
    # list of keys for each data from data_lst representing if it is required
    # to re-collect or re-analyze data even they were obtained on previous iterations
    force_extract_keys_lst = [
        report_steps_dct[data_name][1] for data_name in data_names
    ]
    # print data which were loaded but for which force extract flag is on
    force_extract_check(data_names, data_lst, force_extract_keys_lst,
                        max_title)

    # when any of data_lst was not saved or
    # force extract flag is on then re-extract data  from configueation files
    if not all(data_lst) or any(force_extract_keys_lst):
        print(
            '\nEXTRACTING MAPS DATA FROM AMS_MAPS_LOG CONFIGURATION FILES ...\n'
        )

        # number of switches to check
        switch_num = len(all_config_data)
        # list to store only REQUIRED parameters
        # collecting data for all switches during looping
        maps_params_fabric_lst = []
        # data imported from init file to extract values from config file
        maps_params, maps_params_add, comp_keys, match_keys, comp_dct = data_extract_objects(
            'maps', max_title)

        # all_confg_data format ([swtch_name, supportshow file, (ams_maps_log files, ...)])
        # checking each config set(supportshow file) for chassis level parameters
        for i, switch_config_data in enumerate(all_config_data):
            # data unpacking from iter param
            switch_name, sshow_file, ams_maps_files = switch_config_data
            # number of ams_maps configs
            num_maps = len(ams_maps_files) if ams_maps_files else 0
            # current operation information string
            info = f'[{i+1} of {switch_num}]: {switch_name} MAPS parameters. Number of AMS_MAPS configs: {num_maps} ...'
            print(info)

            # checking ams_maps log file for each logical switch
            if ams_maps_files:
                for ams_maps_file in ams_maps_files:
                    # search control dictionary. continue to check sshow_file until all parameters groups are found
                    collected = {'switch_index': False, 'global_dash': False}
                    # dictionary to store all DISCOVERED switch parameters
                    # collecting data only for the logical switch in current loop
                    maps_params_dct = {}

                    info = ' ' * 16 + f'{os.path.basename(ams_maps_file)} processing'
                    print(info, end=" ")

                    with open(ams_maps_file, encoding='utf-8',
                              errors='ignore') as file:
                        # check file until all groups of parameters extracted
                        while not all(collected.values()):
                            line = file.readline()
                            if not line:
                                break
                            # logical switch index section start
                            if re.search(
                                    r'^[= ]*AMS/MAPS *Data *Switch *(\d+)[= ]*$',
                                    line):
                                # when section is found corresponding collected dict values changed to True
                                collected['switch_index'] = True
                                match_dct = {
                                    match_key: comp_dct[comp_key].match(line)
                                    for comp_key, match_key in zip(
                                        comp_keys, match_keys)
                                }
                                switch_index = match_dct[match_keys[0]].group(
                                    1)
                            # logical switch index section end
                            # global dashboard section start
                            if re.search(
                                    r'^[- ]*MAPS +Global +Monitoring +Configuration[ -]*$',
                                    line):
                                collected['global_dash'] = True
                                while not re.search(r'^[- ]*NM +Data[- ]*$',
                                                    line):
                                    line = file.readline()
                                    # dictionary with match names as keys and match result of current line with all imported regular expressions as values
                                    match_dct = {
                                        match_key:
                                        comp_dct[comp_key].match(line)
                                        for comp_key, match_key in zip(
                                            comp_keys, match_keys)
                                    }
                                    # match_keys ['switch_index_match', 'dashboard_match', 'report_match', 'no_lic_match']
                                    # 'dashboard_match'
                                    if match_dct[match_keys[1]]:
                                        maps_params_dct[match_dct[match_keys[
                                            1]].group(1).rstrip()] = match_dct[
                                                match_keys[1]].group(2)
                                    # 'report_match'
                                    if match_dct[match_keys[2]]:
                                        maps_params_dct[match_dct[match_keys[
                                            2]].group(1).rstrip()] = match_dct[
                                                match_keys[2]].group(2)
                                    # 'no Fabric lic match'
                                    if match_dct[match_keys[3]]:
                                        for maps_param in maps_params[6:23]:
                                            maps_params_dct[
                                                maps_param] = 'No FV lic'
                                    if not line:
                                        break
                            # global dashboard section end

                    # additional values which need to be added to the chassis params dictionary
                    # chassis_params_add order (configname, ams_maps_config, chassis_name, switch_index)
                    # values axtracted in manual mode. if change values order change keys order in init.xlsx "maps_params_add" column
                    maps_params_values = (sshow_file, ams_maps_file,
                                          switch_name, switch_index)

                    # adding additional parameters and values to the chassis_params_switch_dct
                    for maps_param_add, maps_param_value in zip(
                            maps_params_add, maps_params_values):
                        maps_params_dct[maps_param_add] = maps_param_value

                    # creating list with REQUIRED maps parameters for the current switch
                    # if no value in the maps_params_dct for the parameter then None is added
                    # and appending this list to the list of all switches maps_params_fabric_lst
                    maps_params_fabric_lst.append([
                        maps_params_dct.get(maps_param, None)
                        for maps_param in maps_params
                    ])

                    status_info('ok', max_title, len(info))
            else:
                info = ' ' * 16 + 'No AMS_MAPS configuration found.'
                print(info, end=" ")
                status_info('skip', max_title, len(info))
        # save extracted data to json file
        save_data(report_data_lst, data_names, maps_params_fabric_lst)
    # verify if loaded data is empty after first iteration and replace information string with empty list
    else:
        maps_params_fabric_lst = verify_data(report_data_lst, data_names,
                                             *data_lst)

    return maps_params_fabric_lst