def define_device_to_rename(portshow_aggregated_df, device_rename_df, max_title, force_form_update_flag, force_change_data_lst, report_data_lst): """ Function to define (create new, return previously saved or return empty) device_rename_df DataFrame to apply device rename schema """ # if device_rename_df DataFrame doesn't exist (1st iteration) # or force flag to change device_rename_df DataFrame is on # or some related DataFrames was forcibly changed if device_rename_df is None or force_form_update_flag: print('\n') if force_change_data_lst: print( f"Request to force change of {', '.join(force_change_data_lst)} data was received." ) reply = reply_request( 'Do you want to change auto assigned device names? (y)es/(n)o: ') if reply == 'y': # if device_rename_df DataFrame doesn't exist (1st iteration) if device_rename_df is None: # create new device rename DataFrame manual_device_rename_df = create_device_rename_form( portshow_aggregated_df) else: # if any related DataFrames was forcibly changed ask if device rename form reset required if force_change_data_lst: reply = reply_request( 'Do you want to apply previously saved device rename schema? (y)es/(n)o: ' ) if reply == 'y': print('\n') return device_rename_df else: print('\n') reply = reply_request( 'Do you want to reset device rename form? (y)es/(n)o: ' ) if reply == 'y': # create new device rename DataFrame manual_device_rename_df = create_device_rename_form( portshow_aggregated_df) else: # use saved device rename DataFrame manual_device_rename_df = device_rename_df.copy() else: # if no force change in related DataFrames but device_rename_df DataFrame # change initiated use saved device rename DataFrame manual_device_rename_df = device_rename_df.copy() # save manual_device_rename_df DataFrame to excel file to use at as form to fill sheet_title = 'device_rename_form' file_path = save_xlsx_file(manual_device_rename_df, sheet_title, report_data_lst, force_flag=True) file_name = os.path.basename(file_path) file_directory = os.path.dirname(file_path) print( f"\nTo rename devices put new names into the '{file_name}' file, '{sheet_title}' sheet in\n'{file_directory}' directory" ) print('ATTN! CLOSE file after changes were made\n') # complete the manual_device_rename_df form and import it reply = reply_request("When finish enter 'yes': ", ['yes']) if reply == 'y': print('\n') device_rename_df = dataframe_import(sheet_title, max_title, init_file=file_path, header=2) else: # if don't change auto assigned names save empty device_rename_df DataFrame device_rename_columns = [ 'Fabric_name', 'Device_Host_Name', 'Group_Name', 'deviceType', 'deviceSubtype', 'Device_Host_Name_rename' ] device_rename_df = pd.DataFrame(columns=device_rename_columns) else: # check loaded device_rename_df DataFrame (if it's empty) device_rename_df = verify_data(report_data_lst, ['device_rename'], device_rename_df, show_status=False) return device_rename_df
def switch_params_analysis_main(fabricshow_ag_labels_df, chassis_params_df, switch_params_df, maps_params_df, blade_module_loc_df, ag_principal_df, report_data_lst): """Main function to create aggregated switch parameters table and report tables""" # report_data_lst contains information: # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct *_, max_title, report_steps_dct = report_data_lst # names to save data obtained after current module execution data_names = [ 'report_columns_usage', 'switch_params_aggregated', 'Коммутаторы', 'Фабрика', 'Параметры_коммутаторов', 'Лицензии', 'Глобальные_параметры_фабрики' ] # service step information print(f'\n\n{report_steps_dct[data_names[0]][3]}\n') # load data if they were saved on previos program execution iteration data_lst = load_data(report_data_lst, *data_names) # unpacking DataFrames from the loaded list with data # pylint: disable=unbalanced-tuple-unpacking report_columns_usage_dct, switch_params_aggregated_df, switches_report_df, fabric_report_df, \ switches_parameters_report_df, licenses_report_df, global_fabric_parameters_report_df = data_lst # list of data to analyze from report_info table analyzed_data_names = [ 'chassis_parameters', 'switch_parameters', 'switchshow_ports', 'maps_parameters', 'blade_interconnect', 'fabric_labels' ] # clean fabricshow DataFrame from unneccessary data fabric_clean_df = fabric_clean(fabricshow_ag_labels_df) # force run when any data from data_lst was not saved (file not found) or # procedure execution explicitly requested for output data or data used during fn execution force_run = verify_force_run(data_names, data_lst, report_steps_dct, max_title, analyzed_data_names) if force_run: # import data with switch models, firmware and etc switch_models_df = dataframe_import('switch_models', max_title) # current operation information string info = f'Generating aggregated switch parameters table' print(info, end=" ") # create aggregated table by joining DataFrames switch_params_aggregated_df, report_columns_usage_dct = \ fabric_aggregation(fabric_clean_df, chassis_params_df, \ switch_params_df, maps_params_df, switch_models_df, ag_principal_df) # add 'Device_Location for Blade chassis switches switch_params_aggregated_df = fill_device_location( switch_params_aggregated_df, blade_module_loc_df) # after finish display status status_info('ok', max_title, len(info)) # check if switch config files missing mask_fabric = switch_params_aggregated_df[[ 'Fabric_name', 'Fabric_label' ]].notna().all(axis=1) mask_no_config = switch_params_aggregated_df['chassis_name'].isna() missing_configs_num = switch_params_aggregated_df.loc[mask_no_config][ 'Fabric_name'].count() if missing_configs_num: info = f'{missing_configs_num} switch configuration{"s" if missing_configs_num > 1 else ""} MISSING' print(info, end=" ") status_info('warning', max_title, len(info)) switches_report_df, fabric_report_df, switches_parameters_report_df, \ licenses_report_df, global_fabric_parameters_report_df = \ switchs_params_report(switch_params_aggregated_df, data_names, report_columns_usage_dct, max_title) # # partition aggregated DataFrame to required tables # switches_report_df, fabric_report_df, \ # switches_parameters_report_df, licenses_report_df = \ # dataframe_segmentation(switch_params_aggregated_df, data_names[2:-1], \ # report_columns_usage_dct, max_title) # # global parameters are equal for all switches in one fabric thus checking Principal switches only # mask_principal = switch_params_aggregated_df['switchRole'] == 'Principal' # switch_params_principal_df = switch_params_aggregated_df.loc[mask_principal].copy() # global_fabric_parameters_report_df, = dataframe_segmentation(switch_params_principal_df, data_names[-1], \ # report_columns_usage_dct, max_title) # # drop rows with empty switch names columns # fabric_report_df.dropna(subset = ['Имя коммутатора'], inplace = True) # switches_parameters_report_df.dropna(subset = ['Имя коммутатора'], inplace = True) # licenses_report_df.dropna(subset = ['Имя коммутатора'], inplace = True) # # drop fabric_id if all have same value # if fabric_report_df['Fabric ID'].dropna().nunique() == 1: # fabric_report_df.drop(columns=['Fabric ID'], inplace=True) # # TO_REMOVE No need to drop duplicates coz Principal switches only used before # # # parameters are equal for all switches in one fabric # # if report_columns_usage_dct['fabric_name_usage']: # # global_fabric_parameters_report_df.drop_duplicates(subset=['Фабрика', 'Подсеть'], inplace=True) # # else: # # global_fabric_parameters_report_df.drop_duplicates(subset=['Подсеть'], inplace=True) # global_fabric_parameters_report_df.reset_index(inplace=True, drop=True) # create list with partitioned DataFrames data_lst = [ report_columns_usage_dct, switch_params_aggregated_df, switches_report_df, fabric_report_df, switches_parameters_report_df, licenses_report_df, global_fabric_parameters_report_df ] # saving data to json or csv file save_data(report_data_lst, data_names, *data_lst) # verify if loaded data is empty and replace information string with empty DataFrame else: report_columns_usage_dct, switch_params_aggregated_df, switches_report_df, fabric_report_df, \ switches_parameters_report_df, licenses_report_df, global_fabric_parameters_report_df = verify_data(report_data_lst, data_names, *data_lst) data_lst = [ report_columns_usage_dct, switch_params_aggregated_df, switches_report_df, fabric_report_df, switches_parameters_report_df, licenses_report_df, global_fabric_parameters_report_df ] # save data to service file if it's required for data_name, data_frame in zip(data_names[1:], data_lst[1:]): save_xlsx_file(data_frame, data_name, report_data_lst) return report_columns_usage_dct, switch_params_aggregated_df, fabric_clean_df
def portcmd_analysis_main(portshow_df, switchshow_ports_df, switch_params_df, switch_params_aggregated_df, isl_aggregated_df, nsshow_df, nscamshow_df, ag_principal_df, porttrunkarea_df, alias_df, fdmi_df, blade_module_df, blade_servers_df, blade_vc_df, synergy_module_df, synergy_servers_df, system_3par_df, port_3par_df, report_columns_usage_dct, report_data_lst): """Main function to add connected devices information to portshow DataFrame""" # report_data_lst contains information: # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct *_, max_title, report_steps_dct = report_data_lst # names to save data obtained after current module execution data_names = [ 'portshow_aggregated', 'storage_connection_statistics', 'device_connection_statistics', 'device_rename', 'report_columns_usage_upd', 'Серверы', 'Массивы', 'Библиотеки', 'Микрокоды_HBA', 'Подключение_массивов', 'Подключение_библиотек', 'Подключение_серверов', 'NPIV', 'Статистика_массивов', 'Статистика_устройств' ] # service step information print(f'\n\n{report_steps_dct[data_names[0]][3]}\n') report_columns_usage_bckp = report_columns_usage_dct # load data if they were saved on previos program execution iteration data_lst = load_data(report_data_lst, *data_names) # flag to forcible save portshow_aggregated_df if required portshow_force_flag = False # unpacking DataFrames from the loaded list with data # pylint: disable=unbalanced-tuple-unpacking portshow_aggregated_df, storage_connection_statistics_df, device_connection_statistics_df, \ device_rename_df, report_columns_usage_dct, \ servers_report_df, storage_report_df, library_report_df, hba_report_df, \ storage_connection_df, library_connection_df, server_connection_df, npiv_report_df, \ storage_connection_statistics_report_df, device_connection_statistics_report_df = data_lst nsshow_unsplit_df = pd.DataFrame() if not report_columns_usage_dct: report_columns_usage_dct = report_columns_usage_bckp # list of data to analyze from report_info table analyzed_data_names = [ 'portcmd', 'switchshow_ports', 'switch_params_aggregated', 'switch_parameters', 'chassis_parameters', 'fdmi', 'nscamshow', 'nsshow', 'alias', 'blade_servers', 'fabric_labels', 'isl', 'trunk', 'isl_aggregated', 'Параметры_SFP', 'portshow_sfp_aggregated' ] # force run when any data from data_lst was not saved (file not found) or # procedure execution explicitly requested for output data or data used during fn execution force_run = verify_force_run(data_names, data_lst, report_steps_dct, max_title, analyzed_data_names) if force_run: # import data with switch models, firmware and etc switch_models_df = dataframe_import('switch_models', max_title) # data imported from init file (regular expression patterns) to extract values from data columns # re_pattern list contains comp_keys, match_keys, comp_dct _, _, *re_pattern_lst = data_extract_objects('nameserver', max_title) oui_df = dataframe_import('oui', max_title, columns=['Connected_oui', 'type', 'subtype']) # current operation information string info = f'Generating connected devices table' print(info, end=" ") portshow_aggregated_df, alias_wwnn_wwnp_df, nsshow_unsplit_df, expected_ag_links_df = \ portshow_aggregated(portshow_df, switchshow_ports_df, switch_params_df, switch_params_aggregated_df, isl_aggregated_df, nsshow_df, nscamshow_df, ag_principal_df, porttrunkarea_df, switch_models_df, alias_df, oui_df, fdmi_df, blade_module_df, blade_servers_df, blade_vc_df, synergy_module_df, synergy_servers_df, system_3par_df, port_3par_df, re_pattern_lst, report_data_lst) # after finish display status status_info('ok', max_title, len(info)) # show warning if any UNKNOWN device class founded, if any PortSymb or NodeSymb is not parsed, # if new switch founded portshow_force_flag, nsshow_unsplit_force_flag, expected_ag_links_force_flag = \ warning_notification(portshow_aggregated_df, switch_params_aggregated_df, nsshow_unsplit_df, expected_ag_links_df, report_data_lst) # correct device names manually portshow_aggregated_df, device_rename_df = \ devicename_correction_main(portshow_aggregated_df, device_rename_df, report_columns_usage_dct, report_data_lst) # count Device_Host_Name instances for fabric_label, label and total in fabric portshow_aggregated_df = device_ports_per_group(portshow_aggregated_df) # count device connection statistics info = f'Counting device connection statistics' print(info, end=" ") storage_connection_statistics_df = storage_connection_statistics( portshow_aggregated_df, re_pattern_lst) device_connection_statistics_df = device_connection_statistics( portshow_aggregated_df) status_info('ok', max_title, len(info)) servers_report_df, storage_report_df, library_report_df, hba_report_df, \ storage_connection_df, library_connection_df, server_connection_df, npiv_report_df, \ storage_connection_statistics_report_df, device_connection_statistics_report_df = \ create_report_tables(portshow_aggregated_df, storage_connection_statistics_df, device_connection_statistics_df, data_names[5:-2], report_columns_usage_dct, max_title) # create list with partitioned DataFrames data_lst = [ portshow_aggregated_df, storage_connection_statistics_df, device_connection_statistics_df, device_rename_df, report_columns_usage_dct, servers_report_df, storage_report_df, library_report_df, hba_report_df, storage_connection_df, library_connection_df, server_connection_df, npiv_report_df, storage_connection_statistics_report_df, device_connection_statistics_report_df ] # saving data to json or csv file save_data(report_data_lst, data_names, *data_lst) save_xlsx_file(nsshow_unsplit_df, 'nsshow_unsplit', report_data_lst, force_flag=nsshow_unsplit_force_flag) save_xlsx_file(expected_ag_links_df, 'expected_ag_links', report_data_lst, force_flag=expected_ag_links_force_flag) # verify if loaded data is empty and replace information string with empty DataFrame else: portshow_aggregated_df, storage_connection_statistics_df, device_connection_statistics_df, \ device_rename_df, report_columns_usage_dct, \ servers_report_df, storage_report_df, library_report_df, hba_report_df, \ storage_connection_df, library_connection_df, server_connection_df, npiv_report_df, \ storage_connection_statistics_report_df, device_connection_statistics_report_df \ = verify_data(report_data_lst, data_names, *data_lst) data_lst = [ portshow_aggregated_df, storage_connection_statistics_df, device_connection_statistics_df, device_rename_df, report_columns_usage_dct, servers_report_df, storage_report_df, library_report_df, hba_report_df, storage_connection_df, library_connection_df, server_connection_df, npiv_report_df, storage_connection_statistics_report_df, device_connection_statistics_report_df ] # save data to service file if it's required for data_name, data_frame in zip(data_names, data_lst): force_flag = False if data_name == 'portshow_aggregated': force_flag = portshow_force_flag save_xlsx_file(data_frame, data_name, report_data_lst, force_flag=force_flag) return portshow_aggregated_df
def err_sfp_cfg_analysis_main(portshow_aggregated_df, sfpshow_df, portcfgshow_df, report_columns_usage_dct, report_data_lst): """Main function to add porterr, transceiver and portcfg information to portshow DataFrame""" # report_data_lst contains information: # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct *_, max_title, report_steps_dct = report_data_lst portshow_sfp_force_flag = False portshow_sfp_export_flag, *_ = report_steps_dct['portshow_sfp_aggregated'] # names to save data obtained after current module execution data_names = [ 'portshow_sfp_aggregated', 'Ошибки', 'Параметры_SFP', 'Параметры_портов' ] # service step information print(f'\n\n{report_steps_dct[data_names[0]][3]}\n') # load data if they were saved on previos program execution iteration data_lst = load_data(report_data_lst, *data_names) # unpacking DataFrames from the loaded list with data # pylint: disable=unbalanced-tuple-unpacking portshow_sfp_aggregated_df, error_report_df, sfp_report_df, portcfg_report_df = data_lst # list of data to analyze from report_info table analyzed_data_names = [ 'portshow_aggregated', 'sfpshow', 'portcfgshow', 'portcmd', 'switchshow_ports', 'switch_params_aggregated', 'fdmi', 'device_rename', 'report_columns_usage_upd', 'nscamshow', 'nsshow', 'alias', 'blade_servers', 'fabric_labels' ] # force run when any data from data_lst was not saved (file not found) or # procedure execution explicitly requested for output data or data used during fn execution force_run = verify_force_run(data_names, data_lst, report_steps_dct, max_title, analyzed_data_names) if force_run: # import transeivers information from file sfp_model_df = dataframe_import('sfp_models', max_title) # current operation information string info = f'Updating connected devices table' print(info, end=" ") # add sfpshow, transceiver information and portcfg to aggregated portcmd DataFrame portshow_sfp_aggregated_df = port_complete(portshow_aggregated_df, sfpshow_df, sfp_model_df, portcfgshow_df) # after finish display status status_info('ok', max_title, len(info)) # warning if UKNOWN SFP present if (portshow_sfp_aggregated_df['Transceiver_Supported'] == 'Unknown SFP').any(): info_columns = [ 'Fabric_name', 'Fabric_label', 'configname', 'chassis_name', 'chassis_wwn', 'slot', 'port', 'Transceiver_Supported' ] portshow_sfp_info_df = portshow_sfp_aggregated_df.drop_duplicates( subset=info_columns).copy() unknown_count = len(portshow_sfp_info_df[ portshow_sfp_info_df['Transceiver_Supported'] == 'Unknown SFP']) info = f'{unknown_count} {"port" if unknown_count == 1 else "ports"} with UNKNOWN supported SFP tag found' print(info, end=" ") status_info('warning', max_title, len(info)) # ask if save portshow_aggregated_df if not portshow_sfp_export_flag: reply = reply_request( "Do you want to save 'portshow_sfp_aggregated'? (y)es/(n)o: " ) if reply == 'y': portshow_sfp_force_flag = True # create reaport tables from port_complete_df DataFrtame error_report_df, sfp_report_df, portcfg_report_df = \ create_report_tables(portshow_sfp_aggregated_df, data_names[1:], report_columns_usage_dct, max_title) # saving data to json or csv file data_lst = [ portshow_sfp_aggregated_df, error_report_df, sfp_report_df, portcfg_report_df ] save_data(report_data_lst, data_names, *data_lst) # verify if loaded data is empty and reset DataFrame if yes else: portshow_sfp_aggregated_df, error_report_df, sfp_report_df, portcfg_report_df \ = verify_data(report_data_lst, data_names, *data_lst) data_lst = [ portshow_sfp_aggregated_df, error_report_df, sfp_report_df, portcfg_report_df ] # save data to excel file if it's required for data_name, data_frame in zip(data_names, data_lst): force_flag = False if data_name == 'portshow_sfp_aggregated': force_flag = portshow_sfp_force_flag save_xlsx_file(data_frame, data_name, report_data_lst, force_flag=force_flag) return portshow_sfp_aggregated_df
def fabric_main(fabricshow_ag_labels_df, chassis_params_df, \ switch_params_df, maps_params_df, report_data_lst): """Main function to create tables""" # report_data_lst contains information: # customer_name, dir_report, dir to save obtained data, max_title, report_steps_dct *_, max_title, report_steps_dct = report_data_lst # names to save data obtained after current module execution data_names = [ 'Коммутаторы', 'Фабрика', 'Глобальные_параметры_фабрики', 'Параметры_коммутаторов', 'Лицензии' ] # service step information print(f'\n\n{report_steps_dct[data_names[0]][3]}\n') # load data if they were saved on previos program execution iteration data_lst = load_data(report_data_lst, *data_names) # unpacking DataFrames from the loaded list with data # pylint: disable=unbalanced-tuple-unpacking switches_report_df, fabric_report_df, global_fabric_parameters_report_df, \ switches_parameters_report_df, licenses_report_df = data_lst # data force extract check # list of keys for each data from data_lst representing if it is required # to re-collect or re-analyze data even they were obtained on previous iterations force_extract_keys_lst = [ report_steps_dct[data_name][1] for data_name in data_names ] # list with True (if data loaded) and/or False (if data was not found and None returned) data_check = force_extract_check(data_names, data_lst, force_extract_keys_lst, max_title) # flag if fabrics labels was forced to be changed fabric_labels_change = True if report_steps_dct['fabric_labels'][ 1] else False # initialization chassis information and farbric name columns usage report_columns_usage_dct = { 'fabric_name_usage': True, 'chassis_info_usage': True } # import data with switch models, firmware and etc switch_models_df = dataframe_import('switch_models', max_title) # clean fabricshow DataFrame from unneccessary data fabric_clean_df = fabric_clean(fabricshow_ag_labels_df) # create aggregated table by joining DataFrames switch_params_aggregated_df, report_columns_usage_dct = \ fabric_aggregation(fabric_clean_df, chassis_params_df, \ switch_params_df, maps_params_df, switch_models_df) save_xlsx_file(switch_params_aggregated_df, 'switch_params_aggregated', \ report_data_lst, report_type = 'analysis') # when no data saved or force extract flag is on or fabric labels have been changed than # analyze extracted config data if not all(data_check) or any( force_extract_keys_lst) or fabric_labels_change: # information string if fabric labels force changed was initiated # and statistics recounting required if fabric_labels_change and not any(force_extract_keys_lst) and all( data_check): info = f'Switch information force extract due to change in Fabrics labeling' print(info, end=" ") status_info('ok', max_title, len(info)) # partition aggregated DataFrame to required tables switches_report_df, fabric_report_df, global_fabric_parameters_report_df, \ switches_parameters_report_df, licenses_report_df = \ dataframe_segmentation(switch_params_aggregated_df, data_names, \ report_columns_usage_dct, max_title) # drop rows with empty switch names columns fabric_report_df.dropna(subset=['Имя коммутатора'], inplace=True) switches_parameters_report_df.dropna(subset=['Имя коммутатора'], inplace=True) licenses_report_df.dropna(subset=['Имя коммутатора'], inplace=True) # parameters are equal for all switches in one fabric if report_columns_usage_dct['fabric_name_usage']: global_fabric_parameters_report_df.drop_duplicates( subset=['Фабрика', 'Подсеть'], inplace=True) else: global_fabric_parameters_report_df.drop_duplicates( subset=['Подсеть'], inplace=True) global_fabric_parameters_report_df.reset_index(inplace=True, drop=True) # create list with partitioned DataFrames data_lst = [switches_report_df, fabric_report_df, global_fabric_parameters_report_df, \ switches_parameters_report_df, licenses_report_df] # current operation information string info = f'Generating Fabric and Switches tables' print(info, end=" ") # after finish display status status_info('ok', max_title, len(info)) # saving DataFrames to csv file save_data(report_data_lst, data_names, *data_lst) # save_data(report_data_lst, data_auxillary_names, *data_auxillary_lst) # save data to service file if it's required for data_name, data_frame in zip(data_names, data_lst): save_xlsx_file(data_frame, data_name, report_data_lst) return switch_params_aggregated_df, report_columns_usage_dct, fabric_clean_df