def pseudo_main(): """Basically the main(). Did it this way so it can easily be used as a standalone module or called from another. :return: Exit code. See exist codes in brcddb.brcddb_common :rtype: int """ ip, user_id, pw, outf, sec, s_flag, vd, c_file, fid, log, nl = parse_args() if vd: brcdapi_rest.verbose_debug = True if s_flag: brcdapi_log.set_suppress_all() if not nl: brcdapi_log.open_log(log) if sec is None: sec = 'none' fid_l = None if fid is None else fid.split(',') ml = ['WARNING!!! Debug is enabled'] if _DEBUG else list() ml.append('IP: ' + brcdapi_util.mask_ip_addr(ip, True)) ml.append('ID: ' + user_id) ml.append('security: ' + sec) ml.append('Output file: ' + outf) ml.append('KPI file: ' + str(c_file)) ml.append('FID List: ' + str(fid)) brcdapi_log.log(ml, True) outf = brcdapi_file.full_file_name(outf, '.json') # Create project proj_obj = brcddb_project.new( "Captured_data", datetime.datetime.now().strftime('%d %b %Y %H:%M:%S')) proj_obj.s_python_version(sys.version) proj_obj.s_description("This is a test") # Login session = api_int.login(user_id, pw, ip, sec, proj_obj) if brcdapi_auth.is_error(session): return brcddb_common.EXIT_STATUS_API_ERROR # Collect the data try: api_int.get_batch(session, proj_obj, _kpi_list(session, c_file), fid_l) except BaseException as e: brcdapi_log.exception( 'Programming error encountered. Exception is: ' + str(e), True) # Logout obj = brcdapi_rest.logout(session) if brcdapi_auth.is_error(obj): brcdapi_log.log(brcdapi_auth.formatted_error_msg(obj), True) # Dump the database to a file if _WRITE: brcdapi_log.log('Saving project to: ' + outf, True) plain_copy = dict() brcddb_copy.brcddb_to_plain_copy(proj_obj, plain_copy) brcdapi_file.write_dump(plain_copy, outf) brcdapi_log.log('Save complete', True) return proj_obj.r_exit_code()
def combine_main(): """Basically the main(). Did it this way so it can easily be used as a standalone module or called from another. :return: Exit code :rtype: int """ global _DEBUG # Get and validate user input inf, outf, s_flag, log, nl = parse_args() if s_flag: brcdapi_log.set_suppress_all() if not nl: brcdapi_log.open_log(log) ml = ['WARNING!!! Debug is enabled'] if _DEBUG else list() ml.append('Directory, -i: ' + inf) ml.append('Output file, -o: ' + outf) brcdapi_log.log(ml, True) # Create project proj_obj = brcddb_project.new( inf, datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S')) proj_obj.s_python_version(sys.version) proj_obj.s_description('Captured data from ' + inf) # Get a list of files - Filter out directories is just to protect the user. It shouldn't be necessary. outf = brcdapi_file.full_file_name(outf, '.json') files = brcdapi_file.read_directory(inf) if outf in files: brcdapi_log.log( 'Combined output file, ' + outf + ', already exists in: ' + inf + '. Processing halted', True) proj_obj.s_error_flag() else: x = len('.json') for file in [ f for f in files if len(f) > x and f.lower()[len(f) - x:] == '.json' ]: brcdapi_log.log('Processing file: ' + file, True) obj = brcdapi_file.read_dump(inf + '/' + file) brcddb_copy.plain_copy_to_brcddb(obj, proj_obj) # Now save the combined file plain_copy = dict() brcddb_copy.brcddb_to_plain_copy(proj_obj, plain_copy) try: brcdapi_file.write_dump(plain_copy, inf + '/' + outf) except FileNotFoundError: brcdapi_log.log(['', 'Folder not found: ' + inf, ''], True) return brcddb_common.EXIT_STATUS_OK
def pseudo_main(): """Basically the main(). Did it this way to use with IDE :return: Exit code :rtype: int """ global _kpi_l # Get the command line input ml = ['WARNING!!! Debug is enabled'] if _DEBUG else list() in_file, fid_str, vd, log, nl = parse_args() if vd: brcdapi_rest.verbose_debug = True if not nl: brcdapi_log.open_log(log) ml.append('FID: ' + str(fid_str)) fid_l = None if fid_str is None else fid_str.split(',') brcdapi_log.log(ml, True) # Read the file with the login credentials switch_list = _parse_login_credentials(in_file) # Create a project object proj_obj = brcddb_project.new("Captured_data", datetime.datetime.now().strftime('%d %b %Y %H:%M:%S')) # Poll all the switches ec_l = list() for switch in switch_list: # Collect the data for each switch ec_l.append(_capture_data(proj_obj, _kpi_l, fid_l, switch['id'], switch['pw'], switch['ip'], switch['sec'])) # Build cross references. This associates name server logins with a physical port. It is necessary in this example # because what is attached to the port is used as the port description added to the database. brcdapi_log.log('Building cross references', True) brcddb_project.build_xref(proj_obj) # Add the data to your database brcdapi_log.log('Adding data to database', True) _add_data_to_db(proj_obj) # Return the first error status encountered for ec in ec_l: if ec != brcddb_common.EXIT_STATUS_OK: return ec return ec # If we get this far, everything was good
def pseudo_main(): """Basically the main(). Did it this way so it can easily be used as a standalone module or called from another. :return: Exit code. See exit codes in brcddb.brcddb_common :rtype: int """ global _DEBUG, __version__ # Get and validate user input report, in_f, single_port_graph_in, stats_graph_in, graph_type = _get_input( ) ml = ['WARNING!!! Debug is enabled'] if _DEBUG else list() ml.append(os.path.basename(__file__) + ' version: ' + __version__) ml.append('Report: ' + report) ml.append('Input file: ' + in_f) ml.append('Port graph: ' + str(single_port_graph_in)) ml.append('Stat graph: ' + str(stats_graph_in)) ml.append('Graph type: ' + str(graph_type)) brcdapi_log.log(ml, True) # Read in the previously collected data obj = brcddb_file.read_dump(in_f) if obj is None: return brcddb_common.EXIT_STATUS_ERROR proj_obj = brcddb_project.new(obj.get('_obj_key'), obj.get('_date')) proj_obj.s_python_version(sys.version) proj_obj.s_description(obj.get('_description')) brcddb_copy.plain_copy_to_brcddb(obj, proj_obj) obj.clear() base_switch_obj = proj_obj.r_switch_obj(proj_obj.r_get('base_switch_wwn')) # Build the cross-reference tables. brcddb_util.build_login_port_map( proj_obj) # Correlates name server logins with ports fab_obj = base_switch_obj.r_fabric_obj() if fab_obj is not None: brcddb_fabric.zone_analysis(base_switch_obj.r_fabric_obj( )) # Determines what zones each login participates in graph_list, msg_list = _graphs(base_switch_obj, single_port_graph_in, stats_graph_in, graph_type) return _write_report(base_switch_obj, report, graph_list, msg_list)
def pseudo_main(): """Basically the main(). :return: Exit code :rtype: int """ global _DEBUG # Get and validate command line input. ec = brcddb_common.EXIT_STATUS_OK ml = ['WARNING!!! Debug is enabled'] if _DEBUG else list() ip, user_id, pw, sec, file, force, s_flag, echo, vd, log, nl = parse_args() if vd: brcdapi_rest.verbose_debug = True if s_flag: brcdapi_log.set_suppress_all() if not nl: brcdapi_log.open_log(log) if sec is None: sec = 'none' file = brcdapi_file.full_file_name(file, '.xlsx') if ip is not None: if user_id is None: ml.append('Missing user ID, -id') ec = brcddb_common.EXIT_STATUS_INPUT_ERROR if pw is None: ml.append('Missing password, -pw') ec = brcddb_common.EXIT_STATUS_INPUT_ERROR ml.append('File: ' + file) ml.append( 'IP address: ' + brcdapi_util.mask_ip_addr(ip) if isinstance(ip, str) else str(ip)) ml.append('ID: ' + str(user_id)) ml.append('sec: ' + sec) if len(ml) > 0: brcdapi_log.log(ml, True) if ec != brcddb_common.EXIT_STATUS_OK: return ec echo = False if echo is None else echo # Read in the Workbook, generate the portaddress --bind commands, and configure the switch(es) switch_d_list = [ switch_d for switch_d in report_utils.parse_switch_file(file).values() ] session = proj_obj = None try: for switch_d in switch_d_list: switch_d.update(err_msgs=list()) # Create the bind commands if switch_d['bind']: _bind_commands(switch_d) cli_l = switch_d['bind_commands'].copy() i = 0 while i < len(cli_l): cli_l.insert(i, '') i += 16 cli_l.insert( 0, '\n# Bind commands for FID ' + str(switch_d['fid'])) cli_l.append('\n# End bind commands for FID ' + str(switch_d['fid'])) brcdapi_log.log(cli_l, True) # Create the logical switch if ip is not None and switch_d['switch_flag']: if session is None: # Login session = api_int.login(user_id, pw, ip, sec, proj_obj) if fos_auth.is_error(session): return brcddb_common.EXIT_STATUS_API_ERROR if proj_obj is None: # Create a project object proj_obj = brcddb_project.new( 'Create_LS', datetime.datetime.now().strftime('%d %b %Y %H:%M:%S')) proj_obj.s_python_version(sys.version) proj_obj.s_description('Creating logical switches from ' + os.path.basename(__file__)) api_int.get_batch(session, proj_obj, _basic_capture_kpi_l, None) if proj_obj.r_is_any_error(): switch_d['err_msgs'].append( 'Error reading logical switch information from chassis' ) brcdapi_log.log( switch_d['err_msgs'][len(switch_d['err_msgs']) - 1], True) else: ec = _configure_switch(user_id, pw, session, proj_obj, switch_d, force, echo) except BaseException as e: switch_d['err_msgs'].append( 'Programming error encountered. Exception: ' + str(e)) brcdapi_log.log(switch_d['err_msgs'][len(switch_d['err_msgs']) - 1], True) ec = brcddb_common.EXIT_STATUS_ERROR # Logout and create and print a summary report if session is not None: obj = brcdapi_rest.logout(session) if fos_auth.is_error(obj): brcdapi_log.log(fos_auth.formatted_error_msg(obj), True) ec = brcddb_common.EXIT_STATUS_API_ERROR if ip is not None: _print_summary(switch_d_list) return ec
def pseudo_main(): """Basically the main(). Did it this way so it can easily be used as a standalone module or called from another. :return: Exit code. See exist codes in brcddb.brcddb_common :rtype: int """ global _DEBUG, _DEFAULT_POLL_INTERVAL, _DEFAULT_MAX_SAMPLE, _proj_obj, _session, _out_f, _switch_obj global _base_switch_obj, __version__, _uris, _uris_2 signal.signal(signal.SIGINT, _signal_handler) # Get user input ip, user_id, pw, sec, fid, pct, max_p, _out_f = _get_input() default_text = ' (default)' ml = ['WARNING!!! Debug is enabled'] if _DEBUG else list() ml.append(os.path.basename(__file__) + ' version: ' + __version__) ml.append('IP Address: ' + brcdapi_util.mask_ip_addr(ip)) ml.append('User ID: ' + user_id) ml.append('FID: ' + str(fid)) if max_p is None: max_p = _DEFAULT_MAX_SAMPLE ml.append('Samples: ' + str(max_p) + default_text) else: ml.append('Samples: ' + str(max_p)) if pct is None: pct = _DEFAULT_POLL_INTERVAL ml.append('Poll Interval: ' + str(pct) + default_text) else: ml.append('Poll Interval: ' + str(pct) + ' (defaulting to ' + str(_MIN_POLL) + ')' if pct < _MIN_POLL else '') ml.append('Output File: ' + _out_f) brcdapi_log.log(ml, True) # Create project _proj_obj = brcddb_project.new( 'Port_Stats', datetime.datetime.now().strftime('%d %b %Y %H:%M:%S')) _proj_obj.s_python_version(sys.version) _proj_obj.s_description('Port statistics') # Login _session = brcddb_int.login(user_id, pw, ip, sec, _proj_obj) if fos_auth.is_error(_session): brcdapi_log.log(fos_auth.formatted_error_msg(_session), True) return brcddb_common.EXIT_STATUS_ERROR try: # I always put all code after login in a try/except in case of a code bug or network error, I still logout # Capture the initial switch and port information along with the first set of statistics. brcdapi_log.log('Capturing initial data', True) brcddb_int.get_batch(_session, _proj_obj, _uris, fid) # Captured data is put in _proj_obj chassis_obj = _proj_obj.r_chassis_obj(_session.get('chassis_wwn')) if chassis_obj.r_is_vf_enabled(): if fid is None: fid = 128 _base_switch_obj = chassis_obj.r_switch_obj_for_fid(fid) else: _base_switch_obj = chassis_obj.r_switch_objects()[0] if _base_switch_obj is None: brcdapi_log.log('Switch for FID ' + str(fid) + ' not found. ', True) return _wrap_up(brcddb_common.EXIT_STATUS_ERROR) base_switch_wwn = _base_switch_obj.r_obj_key() if _base_switch_obj.r_fabric_key() is None: _base_switch_obj.s_fabric_key( base_switch_wwn ) # Fake out a fabric principal if we don't have one _proj_obj.s_add_fabric(base_switch_wwn) brcddb_int.get_batch(_session, _proj_obj, _uris_2, fid) # Captured data is put in _proj_obj time.sleep( 5 ) # Somewhat arbitrary time. Don't want a throttling delay if the poll interval is very short # Get the first sample stats_buf = 'brocade-interface/fibrechannel-statistics' last_time = time.time() last_stats = brcddb_int.get_rest(_session, stats_buf, _base_switch_obj, fid) for p in last_stats.get('fibrechannel-statistics'): _base_switch_obj.r_port_obj(p.get('name')).s_new_key( 'fibrechannel-statistics', p) # Now start collecting the port and interface statistics for i in range(0, max_p): x = pct - (time.time() - last_time) time.sleep(_MIN_POLL if x < _MIN_POLL else x) switch_obj = _proj_obj.s_add_switch(base_switch_wwn + '-' + str(i)) last_time = time.time() obj = brcddb_int.get_rest(_session, 'brocade-interface/fibrechannel', switch_obj, fid) if not fos_auth.is_error(obj): for p in obj.get('fibrechannel'): switch_obj.s_add_port(p.get('name')).s_new_key( 'fibrechannel', p) obj = brcddb_int.get_rest(_session, stats_buf, switch_obj, fid) if fos_auth.is_error( obj ): # We typically get here when the login times out or network fails. brcdapi_log.log( 'Error encountered. Data collection limited to ' + str(i) + ' samples.', True) _wrap_up(brcddb_common.EXIT_STATUS_ERROR) return brcddb_common.EXIT_STATUS_ERROR obj = brcddb_int.get_rest(_session, stats_buf, switch_obj, fid) if fos_auth.is_error( obj ): # We typically get here when the login times out or network fails. brcdapi_log.log( 'Error encountered. Data collection limited to ' + str(i) + ' samples.', True) _wrap_up(brcddb_common.EXIT_STATUS_ERROR) return brcddb_common.EXIT_STATUS_ERROR for p in _stats_diff(last_stats, obj).get('fibrechannel-statistics'): switch_obj.s_add_port(p.get('name')).s_new_key( 'fibrechannel-statistics', p) _switch_obj.append(switch_obj) last_stats = obj return _wrap_up(brcddb_common.EXIT_STATUS_OK) except BaseException as e: brcdapi_log.log([ 'Error capturing statistics. ' + _EXCEPTION_MSG, 'Exception: ' + str(e) ], True) return _wrap_up(brcddb_common.EXIT_STATUS_ERROR)
def _get_project(sl, pl, addl_parms): """Reads or captures project data :param sl: List of switches to poll via the API :type sl: list :param pl: List of project files to combine :type pl: list :param addl_parms: Additional parameters (debug and logging) to be passed to capture.py. :type addl_parms: list :return rl: List of error messages :rtype: list :return proj_obj: Project object. None if there was an error obtaining the project object :rtype proj_obj: brcddb.classes.project.ProjObj, None """ global _ZONE_KPI_FILE rl = list() # Error messages # Create project proj_obj = brcddb_project.new( 'zone_merge', datetime.datetime.now().strftime('%d %b %Y %H:%M:%S')) proj_obj.s_python_version(sys.version) proj_obj.s_description('Zone merge') # Get a unique folder name for multi_capture.py and combine.py folder_l = [f for f in os.listdir('.') if not isfile(f)] base_folder = '_zone_merge_work_folder_' i = 0 work_folder = base_folder + str(i) while work_folder in folder_l: i += 1 work_folder = base_folder + str(i) os.mkdir(work_folder) # Add the KPI file for the captures zone_kpi_file = work_folder + '/' + _ZONE_KPI_FILE f = open(zone_kpi_file, 'w') f.write('\n'.join(_kpis_for_capture) + '\n') f.close() # Start all the data captures for the switches to be polled so that multiple switches can be captured in parallel if len(sl) > 0: brcdapi_log.log('Collecting zoning data from switches', True) captured_d = dict() pid_l = list() for sub_d in sl: ip_addr = sub_d['ip'] file_name = work_folder + '/switch_' + ip_addr.split( '.').pop() + '_' + str(len(pid_l)) sub_d.update(file=file_name) file_name = brcdapi_file.full_file_name(file_name, '.json') d = captured_d.get(ip_addr) if d is None: sub_d_l = list() captured_d.update({ip_addr: dict(sub_d_l=sub_d_l, file=file_name)}) params = [ 'python.exe', 'capture.py', '-ip', ip_addr, '-id', sub_d['id'], '-pw', sub_d['pw'], '-s', 'none' if sub_d['sec'] is None else sub_d['sec'], '-f', file_name, '-c', zone_kpi_file ] + addl_parms pid_l.append( dict(p=subprocess.Popen(params), file_name=file_name, ip=ip_addr)) sub_d_l.append(sub_d) # Add the data read from this chassis to the project object for pid_d in pid_l: # Wait for all captures to complete before continuing pid_d.update(s=pid_d['p'].wait()) brcdapi_log.log( 'Completed capture for ' + pid_d['file_name'] + '. Ending status: ' + str(pid_d['s']), True) for pid_d in pid_l: obj = brcdapi_file.read_dump(pid_d['file_name']) if obj is None: rl.append('Capture for ' + file_name + '. failed.') else: brcddb_copy.plain_copy_to_brcddb(obj, proj_obj) captured_d[pid_d['ip']].update(fab_keys=obj['_fabric_objs'].keys()) if len(rl) > 0: return rl, proj_obj # Figure out the fabric WWN for all the FIDs for the polled switches for d in captured_d.values(): fab_obj_l = [proj_obj.r_fabric_obj(k) for k in d['fab_keys']] for fab_obj in fab_obj_l: if fab_obj.r_get( 'zone_merge' ) is None: # I can't think of a reason why it wouldn't be None fab_obj.s_new_key('zone_merge', dict(file=d['file'])) for sub_d in d['sub_d_l']: found = False fid = sub_d['fid'] if isinstance( fid, int ): # If the user is just running a scan, there won't be a fid for fab_obj in fab_obj_l: if fid in brcddb_fabric.fab_fids(fab_obj): s_buf = 'none' if sub_d['sec'] is None else sub_d['sec'] zm_d = fab_obj.r_get('zone_merge') zm_d.update(fab_wwn=fab_obj.r_obj_key(), update=sub_d['update'], cfg=sub_d['cfg'], fid=sub_d['fid'], ip=sub_d['ip'], id=sub_d['id'], pw=sub_d['pw'], sec=s_buf) fab_obj.s_new_key('zone_merge', zm_d) found = True break if not found: rl.append('Could not find FID ' + str(fid) + ' in ' + brcdapi_util.mask_ip_addr(sub_d['ip'])) # Add in all the read in project files if len(pl) > 0: brcdapi_log.log('Reading project files', True) for sub_d in pl: file_name = brcdapi_file.full_file_name(sub_d['project_file'], '.json') obj = brcdapi_file.read_dump(file_name) brcddb_copy.plain_copy_to_brcddb(obj, proj_obj) for fab_obj in [ proj_obj.r_fabric_obj(k) for k in obj['_fabric_objs'].keys() ]: if fab_obj.r_get( 'zone_merge' ) is None: # It should be None. This is just future proofing. fab_obj.s_new_key('zone_merge', dict(file=file_name)) fab_obj = proj_obj.r_fabric_obj(sub_d.get('fab_wwn')) if fab_obj is None: rl.append('Could not find fabric WWN ' + str(sub_d.get('fab_wwn')) + ' in ' + file_name) else: fab_obj.r_get('zone_merge').update(fab_wwn=fab_obj.r_obj_key(), update=False, cfg=sub_d['cfg']) return rl, proj_obj
def pseudo_main(): """Basically the main(). Did it this way so it can easily be used as a standalone module or called from another. :return: Exit code. See exist codes in brcddb.brcddb_common :rtype: int """ ec = brcddb_common.EXIT_STATUS_OK # Get the user input ml = ['WARNING!!! Debug is enabled'] if _DEBUG else list() ip, user_id, pw, sec, s_flag, fid, vd, log, nl = parse_args() if not nl: brcdapi_log.open_log(log) if vd: brcdapi_rest.verbose_debug = True if s_flag: brcdapi_log.set_suppress_all() if sec is None: sec = 'none' fid_list = None if fid is None else [int(i) for i in fid.split(',')] ml.extend([ 'IP address: ' + ip, 'User ID: ' + user_id, 'Security: ' + sec, 'Surpress: ' + str(s_flag), 'FID: ' + 'Automatic' if fid is None else fid ]) brcdapi_log.log(ml, True) # Create the project proj_obj = brcddb_project.new( 'Captured_data', datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S')) proj_obj.s_python_version(sys.version) proj_obj.s_description('Unused ports to disable') # Login session = api_int.login(user_id, pw, ip, sec, proj_obj) if fos_auth.is_error(session): brcdapi_log.log(fos_auth.formatted_error_msg(session), True) return brcddb_common.EXIT_STATUS_ERROR # Capture data - stats are cleared on a per port basis so this is needed to determine what the ports are. try: # I always put all code after login in a try/except so that if I have a code bug, I still logout brcdapi_log.log('Capturing data', True) api_int.get_batch(session, proj_obj, chassis_uris, list(), fid_list) # Captured data is put in proj_obj chassis_obj = proj_obj.r_chassis_obj(session.get('chassis_wwn')) # Clear stats on each switch for switch_obj in chassis_obj.r_switch_objects(): fid = brcddb_switch.switch_fid(switch_obj) if fid_list is None or fid in fid_list: temp_ec = clear_stats(session, switch_obj) ec = temp_ec if ec != brcddb_common.EXIT_STATUS_OK else ec except: # Bare because I don't care what happened. I just want to logout. brcdapi_log.exception('Programming error encountered', True) ec = brcddb_common.EXIT_STATUS_ERROR # Logout obj = brcdapi_rest.logout(session) if fos_auth.is_error(obj): brcdapi_log.log(fos_auth.formatted_error_msg(obj), True) return brcddb_common.EXIT_STATUS_ERROR return ec