def parse(self, data): """ Parse show interface details command output to get interface details :param data: show interface details Command output :return: list of dict contains all interfaces """ try: result = [] generic_parser = GenericTextParser() physical = generic_parser.parse(data, self.physical_regex_rule)[0] physical.update({ 'connected': "TRUE" if physical['operationalStatus'] == "Up" else "FALSE" }) physical.update({ 'operationalStatus': "UP" if physical['operationalStatus'] == "Up" else "DOWN" }) physical.update({ 'administrativeStatus': "UP" if physical['administrativeStatus'] == "Enabled" else "DOWN" }) physical.update({ 'hardwareAddress': "" if physical['hardwareAddress'].isalpha() else physical['hardwareAddress'] }) if not physical['name']: return result parser = LineBasedBlockParser('Logical interface') blocks = parser.parse(data) for block in blocks: logical = generic_parser.parse(block, self.logical_interface_regex)[0] if logical['mask'] == "Unspecified": continue if logical['ipAddress'] and logical['mask']: physical.update({ "ipAddress": "{}/{}".format(logical['ipAddress'], logical['mask'].split('/')[1]) }) else: physical.update({"ipAddress": ""}) physical.update( {"members": "{}".format(self.get_members(block))}) physical.update({ "name": "{}".format(logical['name'] if logical['name'] else physical['name']) }) if physical['mtu'] == 'Unlimited': physical.update({"mtu": "0"}) result.append(physical.copy()) except Exception as e: py_logger.error("{}\n{}".format(e, traceback.format_exc())) raise e return result
def parse_command_output(self, cmd, command_result): blocks = [] table = [] # Each row is dictionary if BLOCK_PARSER_KEY in cmd: if ARGUMENTS_KEY in cmd[BLOCK_PARSER_KEY]: block_parser = import_utilities.load_block_parser( cmd[BLOCK_PARSER_KEY][NAME_KEY])( **cmd[BLOCK_PARSER_KEY][ARGUMENTS_KEY]) else: block_parser = import_utilities.load_class( cmd[BLOCK_PARSER_KEY][NAME_KEY])() blocks = import_utilities.load_class_method( block_parser, 'parse')(command_result) else: blocks.append(command_result) for block in blocks: try: if not block: continue result_dict = self.process_block(block, cmd) if len(result_dict) > 0: table += result_dict except IndexError as e: py_logger.info( "Couldn't parse block {}\nfor command {}".format( block, cmd[COMMAND_KEY])) py_logger.error(e) table = self.filter_columns(cmd, table) return table
def execute_commands(self): ssh_connect_handler = None try: ssh_connect_handler = SSHConnectHandler(ip=self.credentials.ip_or_fqdn, username=self.credentials.username, password=self.credentials.password, device_type=self.credentials.device_type, port=self.credentials.port) command_output_dict = {} for workload in self.workloads: command_id = workload[TABLE_ID_KEY] if REUSE_TABLES_KEY in workload: table = self.process_tables(workload) elif REUSE_COMMAND_KEY in workload: command_result = command_output_dict[workload[REUSE_COMMAND_KEY]] workload[COMMAND_KEY] = workload[REUSE_COMMAND_KEY] py_logger.info('Command %s Result %s' % (workload[REUSE_COMMAND_KEY], command_result)) table = self.parse_command_output(workload, command_result) else: command_result = ssh_connect_handler.execute_command(workload[COMMAND_KEY]) command_output_dict[workload[COMMAND_KEY]] = command_result py_logger.info('Command %s Result %s' % (workload[COMMAND_KEY], command_result)) table = self.parse_command_output(workload, command_result) if 'switch' == command_id: table[0]['ipAddress/fqdn'] = self.credentials.ip_or_fqdn table[0]['name'] = "{}-{}".format(table[0]['name'], self.credentials.ip_or_fqdn) self.result_map[command_id] = table except Exception as e: py_logger.error("Error occurred while executing command : {}".format(e)) raise e finally: ssh_connect_handler.close_connection()
def parse(self, data): """ Parse show route instance detail command output :param data: show route detail command output :return: List of dict of routes """ try: result = [] parser = LineBasedBlockParser("(.*) \(.* ent") blocks = parser.parse(data) generic_parser = GenericTextParser() vrf_name = generic_parser.parse(blocks[0], self.vrf_rule)[0] if "inet6.0" in vrf_name['name']: return result vrf = vrf_name['name'] for v_idx, block in enumerate(blocks[1:]): parser = LineBasedBlockParser("\*?(\w+)\s+Preference:.*") line_blocks = parser.parse(block) network_name = generic_parser.parse(line_blocks[0], self.network_name_rule)[0] for idx, line_block in enumerate(line_blocks[1:]): routes = generic_parser.parse(line_block, self.route_rules)[0] if routes['next_hop_type'] == "Receive": continue routes.pop('next_hop_type') if not routes['interfaceName'] and not routes[ 'network_interface']: continue routes.update({"vrf": vrf}) routes.update( {"network": "{}".format(network_name['name'])}) routes.update({ "name": "{}_{}_{}".format(network_name['name'], v_idx, idx) }) routes.update({ "interfaceName": routes['interfaceName'] if routes['interfaceName'] else routes['network_interface'] }) routes.pop('network_interface') routes.update({ "routeType": "{}".format(routes['routeType'] if routes['nextHop'] else "DIRECT") }) routes.update({ "nextHop": "{}".format(routes['nextHop'] if routes['nextHop'] else "DIRECT") }) result.append(routes.copy()) except Exception as e: py_logger.error("{}\n{}".format(e, traceback.format_exc())) raise e return result
def parse(self, data): """ Parse show ip route vrf command output :param data: show ip route vrf command output :return: List of dict of routes """ try: result = [] parser = LineBasedBlockParser( ".*(\\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\\b/.*)\[.*") blocks = parser.parse(data) generic_parser = GenericTextParser() vrf = "default" for block in blocks[1:]: line_blocks = block.splitlines() route_network = generic_parser.parse(line_blocks[0], self.rules)[0] route_type = route_network['routeType'].rstrip() for idx, line_block in enumerate(line_blocks): line = line_block.split(',') interface_name = line[-1].lstrip() routes = generic_parser.parse(line[0], self.route_rules)[0] routes.update({ "network": "{}".format(route_network['network'].rstrip()) }) routes.update({ "name": "{}_{}".format(route_network['network'].rstrip(), idx) }) routes.update({"vrf": vrf}) routes.update({"interfaceName": interface_name}) # if 'loopback' in routes['interfaceName']: # continue # if not routes['interfaceName']: # continue routes.update({ "routeType": "{}".format( self.route_types[route_type] if self.route_types. has_key(route_type) else "DYNAMIC") }) routes.update({ "nextHop": "{}".format(routes['nextHop'] if route_type != 'C' else "DIRECT") }) result.append(routes.copy()) except Exception as e: py_logger.error("{}\n{}".format(e, traceback.format_exc())) raise e return result
def zipdir(self): if not os.path.exists(self.path): os.makedirs(self.path) if not os.path.exists(self.path): py_logger.error("Couldn't create directory {}. Please check permissions.".format(self.path)) return self.copy_project_base() zipfile_path, zipfile_name = os.path.split(os.path.abspath(self.filename)) zip_file_path = '{}/{}'.format(zipfile_path, zipfile_name) zipf = zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_DEFLATED) for root, dirs, files in os.walk(self.path): for f in files: zipf.write(os.path.join(root, f)) zipf.close()
def join_tables(self): if not self.table_joiners: return try: for joiner_config in self.table_joiners: joiner_class = import_utilities.load_class(joiner_config[NAME_KEY])() source_table = self.result_map[joiner_config[SOURCE_TABLE_KEY]] destination_table = self.result_map[joiner_config[DESTINATION_TABLE_KEY]] source_column = joiner_config[SOURCE_COLUMN_KEY] destination_column = joiner_config[DESTINATION_COLUMN_KEY] table = import_utilities.load_class_method(joiner_class, 'join_tables')(source_table, destination_table, source_column, destination_column) self.result_map[joiner_config[JOINED_TABLE_ID_KEY]] = table except KeyError as e: py_logger.error("Failed to join tables: KeyError : {}".format(e)) raise e
def filter_columns(cmd, table): if SELECT_COLUMNS_KEY not in cmd: return table final_table = [] keys = cmd[SELECT_COLUMNS_KEY] for row in table: new_row = {} for k in keys: try: value = row[k] except KeyError: py_logger.error("Did not find key {}".format(k)) continue new_row[keys[k]] = value final_table.append(new_row) return final_table
def write(path, filename, table): if table is None: py_logger.warn( '{} Table cannot be None. Will not write to csv.'.format( filename)) return if type(table) != list: py_logger.warn( 'Table is a list of dictionaries. Will not write to csv.') return if len(table) == 0: py_logger.warn( '{} Table cannot be empty. Will not write to csv.'.format( filename)) return csv.register_dialect('dialect', quoting=csv.QUOTE_ALL, skipinitialspace=True) if not os.path.exists(path): os.makedirs(path) if not os.path.exists(path): py_logger.error( "Couldn't create directory {}. Please check permissions.". format(path)) return with open(path + '/' + filename + CsvWriter.CSV_EXTENSION, 'w') as write_file: writer = csv.writer(write_file, dialect='dialect') if len(table) > 0: row = table[0] if type(row) != dict: py_logger.warn( 'Table is a list of dictionaries. Not a valid row.') return headers = row.keys() writer.writerow(headers) for d in table: if type(d) != dict: py_logger.warn('Not a valid row. Will skip row.') continue row = [] for h in headers: row.append(d[h] if h in d else '') writer.writerow(row)