def file_diff(file_a, file_b, input_dir, output_dir, one_off_cidr_cef, output_format): """ Function to do a plain file diff, and output a Excel Spreadsheet :param file_a: A file :param file_b: A File :param input_dir: The input directory :param output_dir: The output directory :param one_off_cidr_cef: Created cause of issue with ASR 9K OS Upgrades :param output_format: HTML smart diff, or xls :return: None """ LOGGER.debug('Starting Function file_diff') a_list = pdt.file_to_list(file_a, input_dir) b_list = pdt.file_to_list(file_b, input_dir) if one_off_cidr_cef: b_list = one_off_next_hop(b_list) if output_format == 'xlsx': diff_data_set = diffhelper.get_a_data_set_diff(a_list, b_list) spreadsheet_file_name = pdt.file_name_increase('file_diff.xlsx', output_dir) excel_obj = mod.scripts.WriteXlsxDiff( os.path.join(output_dir, spreadsheet_file_name), diffhelper.list_with_line_numbers(a_list), diffhelper.list_with_line_numbers(b_list), diff_data_set) excel_obj.write_spreadsheet() print('File named {file_name} created in {folder}'.format( file_name=spreadsheet_file_name, folder=output_dir)) elif output_format == 'html': diff_object = diff_match_patch() html_temp = list() html_temp.append( diff_pretty_html_table( diff_object.diff_lineMode('\n'.join(a_list), '\n'.join(b_list), deadline=20))) html_file_name = pdt.file_name_increase('file_diff.html', output_dir) pdt.list_to_file(html_temp, html_file_name, output_dir) print('File named {file_name} created in {folder}'.format( file_name=html_file_name, folder=output_dir))
def prefix_list_diff(file_a, file_b, output_file, input_dir, output_dir): """ The main running function :param file_a: The name of file a :param file_b: The name of file b :param output_file: The name of the output file :param input_dir: The name of the input directory :param output_dir: The name of the output directory :return: None """ LOGGER.debug('Starting Function main') a_list = pdt.file_to_list(file_a, input_dir) b_list = pdt.file_to_list(file_b, input_dir) pl_obj_a = PrefixListInformation(a_list) pl_obj_b = PrefixListInformation(b_list) output_list = pl_obj_a.compare_prefix_list(pl_obj_b, file_b) output_list += pl_obj_b.compare_prefix_list(pl_obj_a, file_a) output_file = pdt.file_name_increase(output_file, output_dir) print('File named {file_name} created in {folder}'.format( file_name=pdt.list_to_file(output_list, output_file, output_dir), folder=output_dir))
def acl_diff(file_a, file_b, output_file, input_dir, output_dir): """ The ACL Diffing Function :param file_a: The name of file a :param file_b: The name of file b :param output_file: The name of the output file :param input_dir: The name of the input directory :param output_dir: The name of the output directory :return: None """ LOGGER.debug('Starting Function acl_diff') a_list = pdt.file_to_list(file_a, input_dir) b_list = pdt.file_to_list(file_b, input_dir) acl_obj_a = AccessListInformation(a_list) acl_obj_b = AccessListInformation(b_list) output_list = acl_obj_a.compare_access_list(acl_obj_b, file_b) output_list += acl_obj_b.compare_access_list(acl_obj_a, file_a) output_file = pdt.file_name_increase(output_file, output_dir) print('File named {file_name} created in {folder}'.format( file_name=pdt.list_to_file(output_list, output_file, output_dir), folder=output_dir))
def __give_results(self): """ Method to give the results of the test :return: None """ LOGGER.debug('Starting Method __give_results in Class: {}'.format( type(self))) output_list = list() correct_answers = 0 display_correct_answers = list() incorrect_answers = 0 display_incorrect_answers = list() for result in self.answered_questions: if result.get('correct'): correct_answers += 1 display_correct_answers.append('{question}: {answer}'.format( question=result.get('question'), answer=result.get('answer'))) elif not result.get('correct'): incorrect_answers += 1 display_incorrect_answers.append('{question}: {answer}'.format( question=result.get('question'), answer=result.get('answer'))) mnu.clear_screen() output_list.append( '------------------------------------------------------') output_list.append('Correct: {}'.format(correct_answers)) for item in display_correct_answers: output_list.append(item) output_list.append( '\n------------------------------------------------------') output_list.append('Incorrect: {}'.format(incorrect_answers)) for item in display_incorrect_answers: output_list.append(item) output_list.append( '\n------------------------------------------------------') output_list.append('Final Score') output_list.append('%{:.2f} Correct'.format( correct_answers / len(self.answered_questions) * 100)) for line in output_list: print(line) output_file_name = 'ScoreReport.txt' info = pdt.list_to_file( output_list, pdt.file_name_increase(output_file_name, self.dirs.get_output_dir()), self.dirs.get_output_dir()) print('Output score to {}'.format( os.path.join(self.dirs.get_output_dir(), info))) input('Press <ENTER> to continue')
def __create_config_yml_file(self): """ Method to create a config.yml if one does not exist :return: None """ temp_list = list() temp_list.append('--- # Version 1.0.0') temp_list.append('config:') temp_list.append(' input_directory: ') temp_list.append(' output_directory: ') temp_list.append(' logging_directory: ') temp_list.append(' logging_level: ') temp_list.append( '# Logging level is 3 by default, highest is 1 lowest is 5') pdt.list_to_file(temp_list, 'config.yml', self.data_dir)
def __config_as_yml(self, config_yml): """ Method to see or output config as YML data :param config_yml: A yml file :return None """ yml_data = yaml.dump(config_yml, default_flow_style=False, indent=4) print(yml_data) if not self.display_only: try: if not self.output_file_name: file_name = pdt.file_name_increase('config.json', self.directories.get_output_dir()) else: file_name = self.output_file_name pdt.list_to_file(yml_data.splitlines(), file_name, self.directories.get_output_dir()) except FileNotFoundError as e: LOGGER.critical('Can not write output {}'.format(self.directories.get_output_dir())) sys.exit(e)
def __config_as_json(self, config_yml): """ Method to see or output config as JSON data :param config_yml: A yml file :return None """ json_data = json.dumps(config_yml, sort_keys=True, indent=4) print(json_data) if not self.display_only: try: if not self.output_file_name: file_name = pdt.file_name_increase('config.json', self.directories.get_output_dir()) else: file_name = self.output_file_name pdt.list_to_file(json_data.splitlines(), file_name, self.directories.get_output_dir()) except FileNotFoundError as e: LOGGER.critical('Can not write output {}'.format(self.directories.get_output_dir())) sys.exit(e)
def acl_to_prefix_list_converter(file_a, output_file, input_dir, output_dir): """ The ACL Diffing Function :param file_a: The name of file a :param output_file: The name of the output file :param input_dir: The name of the input directory :param output_dir: The name of the output directory :return: None """ LOGGER.debug('Starting Function acl_to_prefix_list_converter') a_list = pdt.file_to_list(file_a, input_dir) acl_obj_a = AccessListInformation(a_list) output_list = acl_obj_a.convert_to_prefix_list() output_file = pdt.file_name_increase(output_file, output_dir) print('File named {file_name} created in {folder}'.format( file_name=pdt.list_to_file(output_list, output_file, output_dir), folder=output_dir))
def convert_acl_to_our_format(directories=None, input_file_name=None, output_file_name=None, display_only=False, reset_sequences=False): """ Function to convert a ACL to a YML format for QuickConfigTemplates :param directories: :param input_file_name: The input file name :param output_file_name: The output file name :param display_only: Boolean true = don't output to file :param reset_sequences: Set True to recount sequences :return: None """ temp_list = list() acl_obj = None try: acls = pdt.file_to_list(input_file_name, directories.get_yml_dir(input_file_name)) acls = clean_list(acls) if len(acls) == 0: error = 'No data found in file {}'.format(os.path.join(directories.get_yml_dir(input_file_name), input_file_name)) LOGGER.critical(error) sys.exit(error) except FileNotFoundError as e: error = '{error}'.format(error=e) LOGGER.critical(error) sys.exit(error) for line in acls: line_split = line.split() if directories.get_logging_level() == logging.DEBUG: print(line_split) if line_split[0] == 'ip': try: if line_split[2] == 'standard': acl_obj = StandardAclData(line_split[3], reset_sequences) elif line_split[2] == 'extended': acl_obj = ExtendedAclData(line_split[3], reset_sequences) except IndexError: error = 'Cannot find ACL name in this statement "{}"'.format(line) LOGGER.error(error) sys.exit(error) elif line_split[0] == 'permit' or line_split[0] == 'deny': if acl_obj: acl_obj.set_lines(line) else: error = 'Cannot find ACL Object' LOGGER.error(error) sys.exit(error) else: if acl_obj: acl_obj.set_lines(line) else: error = 'Cannot find ACL Object' LOGGER.error(error) sys.exit(error) temp_list.append('--- # Created from file: {} with acl_create'.format(input_file_name)) temp_list.append('common:') temp_list.append(' template: <replace>') temp_list.append(' devices:') temp_list.append(' - device:') temp_list.append(' - devicename: <replace>') temp_list.append(' management_ip: <replace>') if acl_obj.get_acl_type() == 'standard': temp_list.append(' standard_acls:') temp_list.append(' - acl_name: {}'.format(acl_obj.get_name())) temp_list.append(' sequences:') for line_data in acl_obj.get_lines(): temp_list.append(' - sequence: {}'.format(line_data.get('sequence'))) temp_list.append(' permit_deny: {}'.format(line_data.get('permit_deny'))) temp_list.append(' source_network: {}'.format(line_data.get('source_network'))) elif acl_obj.get_acl_type() == 'extended': temp_list.append(' extended_acls:') temp_list.append(' - acl_name: {}'.format(acl_obj.get_name())) temp_list.append(' sequences:') for line_data in acl_obj.get_lines(): temp_list.append(' - sequence: {}'.format(line_data.get('sequence'))) temp_list.append(' permit_deny: {}'.format(line_data.get('permit_deny'))) temp_list.append(' protocol: {}'.format(line_data.get('protocol'))) temp_list.append(' source_network: {}'.format(line_data.get('source_network'))) temp_list.append(' destination_network: {}'.format(line_data.get('destination_network'))) """ --- # Test data to for ios common: template: ios_base.jinja2 ticket_number: CHG123456789 devices: - device: - devicename: IOS-RTR02 management_ip: 10.99.222.23 extended_acls: - acl_name: ACL-EXT-1 sequences: - destination_network: 192.168.5.0 0.0.0.255 permit_deny: permit protocol: ip sequence: 10 source_network: 192.168.1.0 0.0.0.255 - destination_network: 192.168.6.0 0.0.0.255 destination_port: 445 permit_deny: permit protocol: tcp sequence: 20 source_network: 192.168.4.0 0.0.0.255 - destination_network: 192.168.6.0 0.0.0.255 destination_port_range: 445 600 permit_deny: permit protocol: tcp sequence: 30 source_network: 192.168.4.0 0.0.0.255 - destination_network: 192.168.6.0 0.0.0.255 permit_deny: permit protocol: tcp sequence: 40 source_network: 192.168.4.0 0.0.0.255 source_port_range: 445 600 """ if not display_only: file_name = pdt.file_name_increase(output_file_name, directories.get_output_dir()) pdt.list_to_file(temp_list, file_name, directories.get_output_dir()) output_notify = 'Filename: {} output to directory {}'.format(file_name, directories.get_output_dir()) print(output_notify) LOGGER.debug(output_notify) for final_yml in temp_list: print(final_yml) if directories.get_logging_level() == logging.DEBUG: print(acl_obj) for line in acl_obj.get_lines(): print(line)
def get_network_aggregator(file_a, lower_constraint, upper_constraint, input_dir, output_dir,): """ Function to create a spreadsheet of possible aggregates :param file_a: The file name :param lower_constraint: a value between 0 and 32 :param upper_constraint: a value between 0 and 32 :param input_dir: The input directory :param output_dir: The output directory :return: None """ LOGGER.debug('Starting Function get_network_aggregator') temp_list_good = list() temp_list_bad = list() temp_possible_set = set() final_dict = dict() top_n_dict = None a_list = pdt.file_to_list(file_a, input_dir) for line in a_list: line_split = line.split() for item in line_split: if ipv4.ip_mask(item, return_tuple=False): temp_list_good.append(item) else: temp_list_bad.append(item) pdt.list_to_file(temp_list_good, pdt.file_name_increase('good_ip.txt', output_dir), output_dir) pdt.list_to_file(temp_list_bad, pdt.file_name_increase('bad_ip.txt', output_dir), output_dir) for good_cidr_subnet in temp_list_good: good_cidr_subnet_split = good_cidr_subnet.split('/') if good_cidr_subnet_split[0] != '0.0.0.0': for net in ipv4.all_subnets_shorter_prefix(good_cidr_subnet_split[0], good_cidr_subnet_split[1], include_default=False): net_split = net.split('/') if int(net_split[1]) >= int(lower_constraint) and int(net_split[1]) <= int(upper_constraint): temp_possible_set.add(net) for final_net in temp_possible_set: final_dict[final_net] = { 'matched': list(), 'unmatched': list(), } for good_cidr_subnet in temp_list_good: good_cidr_subnet_split = good_cidr_subnet.split('/') if final_net in ipv4.all_subnets_shorter_prefix(good_cidr_subnet_split[0], good_cidr_subnet_split[1], include_default=False): final_dict[final_net]['matched'].append(good_cidr_subnet) else: final_dict[final_net]['unmatched'].append(good_cidr_subnet) for key in final_dict: top_n_dict = get_top_n(top_n_dict, final_dict, key, 10) spread_sheet_file_name = pdt.file_name_increase('aggregator.xlsx', output_dir) spread_sheet = mod.scripts.WriteXlsxAggregate(os.path.join(output_dir, spread_sheet_file_name), final_dict, top_n_dict, len(temp_list_good)) spread_sheet.write_spreadsheet()
def __run_template_v1(self): """ Method to build, and output the template """ try: config = yaml.safe_load(self.yml_data) except Exception as e: error = 'Error retrieving yml yaml.safe_load(yml_data) {}'.format(e) LOGGER.critical(error) sys.exit(error) try: common_data = config.get('common') except Exception as e: LOGGER.critical('Error could not retrieve common_data {}'.format(e)) sys.exit(e) env = Environment(autoescape=select_autoescape(enabled_extensions=('html', 'xml', 'jinja2'), default_for_string=True), loader=FileSystemLoader(self.directories.get_templates_dir()), lstrip_blocks=True, trim_blocks=True) self.output_file_name = pdt.file_name_increase(self.output_file_name, self.directories.get_output_dir()) template = env.get_template(common_data.get('template')) if not self.display_only: try: pdt.list_to_file(template.render(common_data).splitlines(), self.output_file_name, self.directories.get_output_dir()) except FileNotFoundError as e: LOGGER.critical('Can not write output {}'.format(self.directories.get_output_dir())) sys.exit(e) print(template.render(common_data)) if self.display_json: self.__config_as_json(config) if self.display_yml: self.__config_as_yml(config) if LOGGER.getEffectiveLevel() == logging.DEBUG: zip_file_name = pdt.file_name_increase('debug.zip', self.directories.get_output_dir()) try: self.directories.collect_and_zip_files(self.__collect_templates(env), zip_file_name, file_extension_list=['jinja2'], file_name_list=None) self.directories.collect_and_zip_files([self.directories.get_yml_dir(self.yml_file_name)], zip_file_name, file_extension_list=['yml', 'yaml'], file_name_list=None) self.directories.collect_and_zip_files([self.directories.get_data_dir()], zip_file_name, file_extension_list=None, file_name_list=['config.yml']) self.directories.collect_and_zip_files([self.directories.get_logging_dir()], zip_file_name, file_extension_list=None, file_name_list=['logs.txt']) self.directories.collect_and_zip_files([self.directories.get_output_dir()], zip_file_name, file_extension_list=None, file_name_list=[self.output_file_name]) except Exception as e: LOGGER.critical(e) self.directories.collect_and_zip_files([self.directories.get_logging_dir()], zip_file_name, file_extension_list=None, file_name_list=['logs.txt']) LOGGER.debug('config data: {}'.format(config)) if self.package_name: self.__create_zip_package(env, self.output_file_name)
def server_rest(self): """ Method to send a restful request to build on a remote server :return: Nothing yet """ server_config = yaml.safe_load(open(os.path.join(self.directories.get_data_dir(), 'config.yml'))).get('remote_build_server_config') error = 'Missing parameter in remote_build_server_config: {}'.format(server_config) if not server_config.get('protocol'): LOGGER.critical(error) raise EnvironmentError(error) elif not server_config.get('server_host'): LOGGER.critical(error) raise EnvironmentError(error) elif not server_config.get('server_api_uri'): LOGGER.critical(error) raise EnvironmentError(error) elif not server_config.get('server_port'): LOGGER.critical(error) raise EnvironmentError(error) server_api_uri = server_config.get('server_api_uri') yaml_data = yaml.safe_load(self.yml_data) rest_object = ARestMe() rest_object.set_server_and_port(server_config.get('protocol'), server_config.get('server_host'), server_config.get('server_port')) rest_object.set_update_headers('Qct', 'ApiVersion1') rest_object.set_update_headers('Qct-Te', __version__) if yaml_data.get('remote_build_server_yaml_template'): response_data = rest_object.send_post('{server_api_uri}remote_yaml_' 'build'.format(server_api_uri=server_api_uri), yaml_data) else: response_data = rest_object.send_post('{server_api_uri}basic_build'.format(server_api_uri=server_api_uri), yaml_data) if response_data.get('status_code') == 200: config = response_data.get('config') colorama.init(autoreset=True) for line in config.splitlines(): if 'error' in line: print(colorama.Fore.RED + colorama.Style.BRIGHT + line) else: print(colorama.Fore.GREEN + colorama.Style.BRIGHT + line) if self.begin_string or self.include_string: self.__get_found_data(config) if not self.display_only: self.output_file_name = pdt.file_name_increase(self.output_file_name, self.directories.get_output_dir()) try: pdt.list_to_file(config.splitlines(), self.output_file_name, self.directories.get_output_dir()) except FileNotFoundError as e: LOGGER.critical('Can not write output {}'.format(self.directories.get_output_dir())) sys.exit(e) if self.display_json: self.__config_as_json(yaml_data) if self.display_yml: self.__config_as_yml(yaml_data) else: print(response_data.get('error'))
def mroute_to_source(self, mroute_device_type_opt, mroute_file_name, cef_device_type_opt, cef_file_name): """ Method that matches mroutes to source interfaces :param mroute_device_type_opt: Device option from device_type_menu_dict :param mroute_file_name: Text file of information :param cef_device_type_opt: Device option from device_type_menu_dict :param cef_file_name: Text file of information :return: """ LOGGER.debug('Starting Method mroute_to_source in {class_type}'.format( class_type=type(self))) match_count = 0 stuff_set = set() mcast_table_dict = dict() cef_table_dict = dict() if mroute_device_type_opt == 'IOS': mcast_table_dict = self.ios_mroute_to_dict(mroute_file_name) elif mroute_device_type_opt == 'NX-OS': mcast_table_dict = self.nxos_mroute_to_dict(mroute_file_name) elif mroute_device_type_opt == 'IOS-XR': mcast_table_dict = self.xr_mroute_to_dict(mroute_file_name) else: print('BAD SHIT!!') if cef_device_type_opt == 'IOS': cef_table_dict = self.ios_cef_to_dict(cef_file_name) elif cef_device_type_opt == 'NX-OS': cef_table_dict = self.nxos_cef_to_dict(cef_file_name) elif cef_device_type_opt == 'IOS-XR': cef_table_dict = self.xr_cef_to_dict(cef_file_name) else: print('BAD SHIT!!') mcast_table_dict_copy = dict(mcast_table_dict).copy() for mcast_table_dict_key in mcast_table_dict: ip_address, cidr_mask = ipv4.ucast_ip_mask( mcast_table_dict[mcast_table_dict_key]['SOURCE']) all_subnets_shorter_prefix = ipv4.all_subnets_shorter_prefix( ip_address, cidr_mask) for cef_table_dict_key in cef_table_dict: if cef_table_dict[cef_table_dict_key][ 'ROUTE'] in all_subnets_shorter_prefix: match_count += 1 stuff_set.add( '%s,%s,%s,%s' % (cef_table_dict[cef_table_dict_key]['ROUTE'], mcast_table_dict[mcast_table_dict_key]['SOURCE'], mcast_table_dict[mcast_table_dict_key]['GROUP'], cef_table_dict[cef_table_dict_key]['INTERFACE'])) del mcast_table_dict_copy[mcast_table_dict_key] break for mcast_table_dict_copy_key in mcast_table_dict_copy: ip_address, cidr_mask = ipv4.ucast_ip_mask( mcast_table_dict_copy[mcast_table_dict_copy_key]['SOURCE']) all_subnets_shorter_prefix = ipv4.all_subnets_shorter_prefix( ip_address, cidr_mask, True) for cef_table_dict_key in cef_table_dict: if cef_table_dict[cef_table_dict_key][ 'ROUTE'] in all_subnets_shorter_prefix: match_count += 1 stuff_set.add( '%s,%s,%s,%s' % (cef_table_dict[cef_table_dict_key]['ROUTE'], mcast_table_dict_copy[mcast_table_dict_copy_key] ['SOURCE'], mcast_table_dict_copy[mcast_table_dict_copy_key] ['GROUP'], cef_table_dict[cef_table_dict_key]['INTERFACE'])) break pdt.list_to_file(stuff_set, 'test.txt', self.OUTPUT_DIR) print('MATCH TOTAL: %i' % (match_count, )) input('PRESS <ENTER> TO CONTINUE')
def convert_acl_to_our_format( directories=None, input_file_name=None, # pylint: disable=too-many-locals,too-many-branches,too-many-statements output_file_name=None, display_only=False, reset_sequences=False): """Function to convert a ACL to a YML format for QuickConfigTemplates :param directories: :param input_file_name: The input file name :param output_file_name: The output file name :param display_only: Boolean true = don't output to file :param reset_sequences: Set True to recount sequences :rtype: None :returns: None """ temp_list = list() acl_obj = None try: acls = pdt.file_to_list(input_file_name, directories.get_yml_dir(input_file_name)) acls = clean_list(acls) if len(acls) == 0: error = 'No data found in file {}'.format( os.path.join(directories.get_yml_dir(input_file_name), input_file_name)) LOGGER.critical(error) sys.exit(error) except FileNotFoundError as e: # pylint: disable=invalid-name error = '{error}'.format(error=e) LOGGER.critical(error) sys.exit(error) for line in acls: line_split = line.split() if directories.get_logging_level() == logging.DEBUG: print(line_split) if line_split[0] == 'ip': try: if line_split[2] == 'standard': acl_obj = StandardAclData(line_split[3], reset_sequences) elif line_split[2] == 'extended': acl_obj = ExtendedAclData(line_split[3], reset_sequences) except IndexError: error = 'Cannot find ACL name in this statement "{}"'.format( line) LOGGER.error(error) sys.exit(error) elif line_split[0] == 'permit' or line_split[0] == 'deny': if acl_obj: acl_obj.set_lines(line) else: error = 'Cannot find ACL Object' LOGGER.error(error) sys.exit(error) else: if acl_obj: acl_obj.set_lines(line) else: error = 'Cannot find ACL Object' LOGGER.error(error) sys.exit(error) temp_list.append( '--- # Created from file: {} with acl_create'.format(input_file_name)) temp_list.append('common:') temp_list.append(' template: <replace>') temp_list.append(' devices:') temp_list.append(' - device:') temp_list.append(' - devicename: <replace>') temp_list.append(' management_ip: <replace>') if acl_obj.get_acl_type() == 'standard': temp_list.append(' standard_acls:') temp_list.append(' - acl_name: {}'.format( acl_obj.get_name())) temp_list.append(' sequences:') for line_data in acl_obj.get_lines(): temp_list.append(' - sequence: {}'.format( line_data.get('sequence'))) temp_list.append(' permit_deny: {}'.format( line_data.get('permit_deny'))) temp_list.append(' source_network: {}'.format( line_data.get('source_network'))) elif acl_obj.get_acl_type() == 'extended': temp_list.append(' extended_acls:') temp_list.append(' - acl_name: {}'.format( acl_obj.get_name())) temp_list.append(' sequences:') for line_data in acl_obj.get_lines(): temp_list.append(' - sequence: {}'.format( line_data.get('sequence'))) temp_list.append(' permit_deny: {}'.format( line_data.get('permit_deny'))) temp_list.append(' protocol: {}'.format( line_data.get('protocol'))) temp_list.append(' source_network: {}'.format( line_data.get('source_network'))) temp_list.append( ' destination_network: {}'.format( line_data.get('destination_network'))) if not display_only: file_name = pdt.file_name_increase(output_file_name, directories.get_output_dir()) pdt.list_to_file(temp_list, file_name, directories.get_output_dir()) output_notify = 'Filename: {} output to directory {}'.format( file_name, directories.get_output_dir()) print(output_notify) LOGGER.debug(output_notify) for final_yml in temp_list: print(final_yml) if directories.get_logging_level() == logging.DEBUG: print(acl_obj) for line in acl_obj.get_lines(): print(line)
def convert_prefix_list_to_our_format( directories=None, input_file_name=None, # pylint: disable=too-many-locals output_file_name=None, display_only=False, reset_sequences=False): """ Function to convert a Prefix-List to a YML format for QuickConfigTemplates :param directories: :param input_file_name: The input file name :param output_file_name: The output file name :param display_only: Boolean true = don't output to file :param reset_sequences: Set True to recount sequences :return: None """ temp_list = list() pl_obj = None try: prefix_lists = pdt.file_to_list( input_file_name, directories.get_yml_dir(input_file_name)) prefix_lists = clean_list(prefix_lists) if len(prefix_lists) == 0: error = 'No data found in file {}'.format( os.path.join(directories.get_yml_dir(input_file_name), input_file_name)) LOGGER.critical(error) sys.exit(error) except FileNotFoundError as e: # pylint: disable=invalid-name error = '{error}'.format(error=e) LOGGER.critical(error) sys.exit(error) for line in prefix_lists: line_split = line.split() if not pl_obj: pl_obj = PrefixListData(line_split[2], reset_sequences=reset_sequences) pl_obj.set_lines(line) else: pl_obj.set_lines(line) temp_list.append( '--- # Created from file: {} with pl_create'.format(input_file_name)) temp_list.append('common:') temp_list.append(' template: <replace>') temp_list.append(' devices:') temp_list.append(' - device:') temp_list.append(' - devicename: <replace>') temp_list.append(' management_ip: <replace>') temp_list.append(' prefix_lists:') temp_list.append(' - prefix_list_name: {}'.format( pl_obj.get_name())) temp_list.append(' sequences:') for seq_dict in pl_obj.get_lines(): temp_list.append(' - sequence: {}'.format( seq_dict.get('sequence'))) temp_list.append(' permit_deny: {}'.format( seq_dict.get('permit_deny'))) temp_list.append(' network: {}'.format( seq_dict.get('network'))) if seq_dict.get('le_ge'): temp_list.append(' le_ge: {}'.format( seq_dict.get('le_ge'))) temp_list.append(' le_ge_value: {}'.format( seq_dict.get('le_ge_value'))) if not display_only: file_name = pdt.file_name_increase(output_file_name, directories.get_output_dir()) pdt.list_to_file(temp_list, file_name, directories.get_output_dir()) output_notify = 'Filename: {} output to directory {}'.format( file_name, directories.get_output_dir()) print(output_notify) LOGGER.debug(output_notify) for final_yml in temp_list: print(final_yml)
def __output_text_files(self): LOGGER.debug( 'Starting method __output_text_files in class {class_obj}'.format( class_obj=type(self))) self.__split_standard_acls() for key in self.standard_acls: pdt.list_to_file(self.standard_acls[key], 'STANDARD-ACL-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) self.__split_extended_acls() for key in self.extended_acls: pdt.list_to_file(self.extended_acls[key], 'EXTENDED-ACL-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) if not self.standard_acls and not self.extended_acls: self.__split_nxos_acls() for key in self.nxos_acls: pdt.list_to_file(self.nxos_acls[key], 'ACL-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) if not self.nxos_acls: self.__split_iosxr_acls() for key in self.iosxr_acls: pdt.list_to_file( self.iosxr_acls[key], 'ACL-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) self.__split_interfaces() if self.interfaces: pdt.list_to_file(self.interfaces, 'INTERFACES.txt', os.path.join(self.output_dir, self.hostname)) self.__split_prefix_lists() for key in self.prefix_lists: pdt.list_to_file(self.prefix_lists[key], 'PREFIX-LIST-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) if not self.prefix_lists: self.__split_prefix_sets() for key in self.prefix_lists: pdt.list_to_file(self.prefix_lists[key], 'PREFIX-SET-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) self.__split_route_maps() for key in self.route_maps: pdt.list_to_file(self.route_maps[key], 'ROUTE-MAP-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) if not self.route_maps: self.__split_route_policies() for key in self.route_maps: pdt.list_to_file(self.route_maps[key], 'ROUTE-POLICY-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) self.__split_standard_community_lists() for key in self.standard_community_lists: pdt.list_to_file(self.standard_community_lists[key], 'STANDARD-CL-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname)) if not self.standard_community_lists: self.__split_community_sets() for key in self.standard_community_lists: pdt.list_to_file(self.standard_community_lists[key], 'CS-{name}.txt'.format(name=key), os.path.join(self.output_dir, self.hostname))
def convert_route_map_to_our_format(directories=None, input_file_name=None, output_file_name=None, display_only=False, reset_sequences=False): """ Function to convert a Route-Map to a YML format for QuickConfigTemplates :param directories: :param input_file_name: The input file name :param output_file_name: The output file name :param display_only: Boolean true = don't output to file :param reset_sequences: Set True to recount sequences :return: None """ temp_list = list() rmap_obj = None try: route_maps = pdt.file_to_list(input_file_name, directories.get_yml_dir(input_file_name)) route_maps = clean_list(route_maps) if len(route_maps) == 0: error = 'No data found in file {}'.format( os.path.join(directories.get_yml_dir(input_file_name), input_file_name)) LOGGER.critical(error) sys.exit(error) except FileNotFoundError as e: error = '{error}'.format(error=e) LOGGER.critical(error) sys.exit(error) for rm_line in route_maps: rm_line_split = pdt.remove_extra_spaces(rm_line).split() if rm_line_split[0] == 'route-map': try: if not rmap_obj: rmap_obj = RouteMapData(rm_line_split[1], reset_sequences) rmap_obj.set_sequence_info(rm_line_split[3], rm_line_split[2]) elif rmap_obj: rmap_obj.set_new_sequence() rmap_obj.set_sequence_info(rm_line_split[3], rm_line_split[2]) except IndexError: error = 'Cannot find Route-Map name in this statement "{}"'.format( rm_line) LOGGER.error(error) print(error) error = 'Your data in file {} does not all seem to be a ' \ 'Route-Map'.format(os.path.join(directories.get_yml_dir(input_file_name), input_file_name)) LOGGER.critical(error) sys.exit(error) elif rm_line_split[0] == 'description': rmap_obj.set_description(rm_line) elif rm_line_split[0] == 'match': rmap_obj.set_matches(rm_line) elif rm_line_split[0] == 'set': rmap_obj.set_sets(rm_line) if not rmap_obj: error = 'Your data in file {} does not all seem to be a ' \ 'Route-Map'.format(os.path.join(directories.get_yml_dir(input_file_name), input_file_name)) LOGGER.critical(error) sys.exit(error) else: rmap_obj.set_new_sequence() temp_list.append( '--- # Created from file: {} with rm_create'.format(input_file_name)) temp_list.append('common:') temp_list.append(' template: <replace>') temp_list.append(' devices:') temp_list.append(' - device:') temp_list.append(' - devicename: <replace>') temp_list.append(' management_ip: <replace>') temp_list.append(' route_maps:') temp_list.append(' - route_map_name: {}'.format( rmap_obj.get_name())) temp_list.append(' sequences:') for line in rmap_obj.get_sequences(): temp_list.append(' - sequence: {}'.format( line.get('sequence'))) if line.get('description'): temp_list.append(' description: {}'.format( line.get('description'))) temp_list.append(' permit_deny: {}'.format( line.get('permit_deny'))) if line.get('match'): temp_list.append(' match:') for enum, match_line in enumerate(line.get('match')): temp_list.append( ' - match_item: {}'.format( match_line.get('match_item'))) temp_list.append(' match_item_name: ' '{}'.format( match_line.get('match_item_name'))) if line.get('set'): temp_list.append(' set:') for enum, set_line in enumerate(line.get('set')): temp_list.append(' - set_item: {}'.format( set_line.get('set_item'))) temp_list.append( ' set_item_to: {}'.format( set_line.get('set_item_to'))) if not display_only: file_name = pdt.file_name_increase(output_file_name, directories.get_output_dir()) pdt.list_to_file(temp_list, file_name, directories.get_output_dir()) output_notify = 'Filename: {} output to directory {}'.format( file_name, directories.get_output_dir()) print(output_notify) LOGGER.debug(output_notify) for final_yml in temp_list: print(final_yml)