def process_sdo(arguments): """Processes SDOs. Calls populate script which calls script to parse all the modules on the given path which is one of the params. Populate script will also send the request to populate confd on given ip and port. It will also copy all the modules to parent directory of this project /api/sdo. It will also call indexing script to update searching. Arguments: :param arguments: (list) list of arguments sent from api sender :return (__response_type) one of the response types which is either 'Failed' or 'Finished successfully' """ LOGGER.debug('Processing sdo') tree_created = True if arguments[-4] == 'True' else False arguments = arguments[:-4] direc = '/'.join(arguments[6].split('/')[0:3]) arguments.append("--result-html-dir") arguments.append(result_dir) arguments.append('--api-port') arguments.append(repr(api_port)) arguments.append('--api-protocol') arguments.append(api_protocol) arguments.append('--save-file-dir') arguments.append(save_file_dir) with open("log.txt", "wr") as f: try: subprocess.check_call(arguments, stderr=f) except subprocess.CalledProcessError as e: shutil.rmtree(direc) LOGGER.error('Server error: {}'.format(e)) return __response_type[ 0] + '#split#Server error while parsing or populating data' try: os.makedirs(get_curr_dir(__file__) + '/../../api/sdo/') except OSError as e: # be happy if someone already created the path if e.errno != errno.EEXIST: return __response_type[ 0] + '#split#Server error - could not create directory' if tree_created: subprocess.call([ "cp", "-r", direc + "/temp/.", get_curr_dir(__file__) + "/../../api/sdo/" ]) with open('../parseAndPopulate/' + direc + '/prepare.json', 'r') as f: global all_modules all_modules = json.load(f) if notify_indexing: send_to_indexing(yangcatalog_api_prefix, direc + '/prepare.json', [arguments[11], arguments[12]], sdo_type=True, force_indexing=False) return __response_type[1]
def process_vendor(arguments): """Processes vendors. Calls populate script which calls script to parse all the modules that are contained in the given hello message xml file or in ietf-yang-module xml file which is one of the params. Populate script will also send the request to populate confd on given ip and port. It will also copy all the modules to parent directory of this project /api/sdo. It will also call indexing script to update searching. Arguments: :param arguments: (list) list of arguments sent from api sender :return (__response_type) one of the response types which is either 'Failed' or 'Finished successfully' """ LOGGER.debug('Processing vendor') tree_created = True if arguments[-5] == 'True' else False integrity_file_location = arguments[-4] arguments = arguments[:-5] direc = '/'.join(arguments[5].split('/')[0:3]) arguments.append("--result-html-dir") arguments.append(result_dir) arguments.append('--save-file-dir') arguments.append(save_file_dir) with open("log.txt", "wr") as f: try: subprocess.check_call(arguments, stderr=f) except subprocess.CalledProcessError as e: shutil.rmtree(direc) LOGGER.error('Server error: {}'.format(e)) return __response_type[0] + '#split#Server error while parsing or populating data' try: os.makedirs(get_curr_dir(__file__) + '/../../api/vendor/') except OSError as e: # be happy if someone already created the path if e.errno != errno.EEXIST: LOGGER.error('Server error: {}'.format(e)) return __response_type[0] + '#split#Server error - could not create directory' subprocess.call(["cp", "-r", direc + "/temp/.", get_curr_dir(__file__) + "/../../api/vendor/"]) if tree_created: with open('../parseAndPopulate/' + direc + '/prepare.json', 'r') as f: global all_modules all_modules = json.load(f) if notify_indexing: send_to_indexing(yangcatalog_api_prefix, direc + '/prepare.json', [arguments[9], arguments[10]]) integrity_file_name = datetime.utcnow().strftime("%Y-%m-%dT%H:%m:%S.%f")[:-3] + 'Z' if integrity_file_location != './': shutil.move('./integrity.html', integrity_file_location + 'integrity' + integrity_file_name + '.html') return __response_type[1]
def __find_file(self, name, revision='*'): yang_file = find_first_file('/'.join(self.__path.split('/')[0:-1]), name + '.yang' , name + '@' + revision + '.yang') if yang_file is None: yang_file = find_first_file(get_curr_dir(__file__) + '/../../.', name + '.yang', name + '@' + revision + '.yang') return yang_file
def dumps(file): file.write('<!DOCTYPE html><html><body> <ul>' '<li>Generated on {}</li>' '</ul><h1>Yangcatalog statistics</h1>' .format(time.strftime("%d/%m/%y"))) file.write('<h3>YANG modules in directory but not present in any NETCONF hello message in that directory:</h3>') for key in Statistics.useless_modules: if len(Statistics.useless_modules[key]) > 0: file.write('<h5>' + key + ':</h5>') file.write('<p>' + ', '.join([value.split('/')[-1] for value in Statistics.useless_modules[key]]) + '</p>') file.write('<h3>YANG modules in NETCONF hello messages for a directory but the YANG modules is not present' + ' in that directory:</h3>') for key in Statistics.missing_modules: file.write('<h5>' + key + ':</h5>') file.write('<p>' + ', '.join([value.split('/')[-1] for value in Statistics.missing_modules[key]]) + '</p>') file.write('<h3>YANG modules in NETCONF hello messages for a directory but their' + ' submodules are missing:</h3>') for key in Statistics.missing_submodules: file.write('<h5>' + key + ':</h5>') file.write('<p>' + ', '.join([value.split('/')[-1] for value in Statistics.missing_submodules[key]]) + '</p>') file.write('<h3>YANG modules in NETCONF hello messages for a directory but their' + ' revision date is missing:</h3>') for key in Statistics.missing_revision: file.write('<h5>' + key + ':</h5>') file.write('<p>' + ', '.join([value.split('/')[-1] for value in Statistics.missing_revision[key]]) + '</p>') file.write('<h3>YANG modules in NETCONF hello messages for a directory but their' + ' namespace is wrong or missing:</h3>') for key in Statistics.missing_wrong_namespaces: file.write('<h5>' + key + ':</h5>') for value in Statistics.missing_wrong_namespaces[key]: file.write('<p>' + str(value) + '</p>') missing = [] my_files = find_missing_hello(get_curr_dir(__file__) + '/../../vendor/', '*.yang') for name in set(my_files): if '.incompatible' not in name and 'MIBS' not in name: missing.append(name) missing = ', '.join(missing).replace(get_curr_dir(__file__) + '/../..', '') file.write('<h3>Folders with yang files but missing hello message inside of file:</h3><p>' + missing + '</p>') file.write('</body></html>')
def parse_imp_inc(self, modules, set_of_names, is_include, schema_part, capabilities, netconf_version): for mod in modules: if is_include: name = mod.name conformance_type = 'import' else: conformance_type = None name = mod.arg if name not in set_of_names: LOGGER.info('Parsing module {}'.format(name)) set_of_names.add(name) yang_file = find_first_file('/'.join(self.split[0:-1]), name + '.yang', name + '@*.yang') if yang_file is None: yang_file = find_first_file( get_curr_dir(__file__) + '/../../.', name + '.yang', name + '@*.yang') if yang_file is None: # TODO add integrity that this file is missing return try: yang = Modules(yang_file, self.html_result_dir, self.parsed_jsons, self.json_dir, is_vendor_imp_inc=True, run_integrity=self.run_integrity) yang.parse_all(name, self.prepare.name_revision_organization, schema_part, self.to) yang.add_vendor_information( self.vendor, self.platform_data, self.software_version, self.os_version, self.feature_set, self.os, conformance_type, capabilities, netconf_version, self.integrity_checker, self.split) if self.run_integrity: yang.resolve_integrity(self.integrity_checker, self.split, self.os_version) self.prepare.add_key_sdo_module(yang) self.parse_imp_inc(yang.submodule, set_of_names, True, schema_part, capabilities, netconf_version) self.parse_imp_inc(yang.imports, set_of_names, False, schema_part, capabilities, netconf_version) except FileError: self.integrity_checker.add_module('/'.join(self.split), [name]) LOGGER.warning( 'File {} not found in the repository'.format(name))
def is_transational(rows, output): if output.split('\n')[0].endswith('-state'): if '+--rw' in output: return False name_of_module = output.split('\n')[0].split(': ')[1] name_of_module = name_of_module.split('-state')[0] coresponding_nmda_file = self.__find_file(name_of_module) if coresponding_nmda_file: arguments = [ "pyang", "-p", get_curr_dir(__file__) + "/../../.", "-f", "tree", coresponding_nmda_file ] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() pyang_list_of_rows = stdout.split('\n')[1:] if 'error' in stderr and 'is not found' in stderr: return False elif stdout == '': return False for x in range(0, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if rows[x].strip(' ') == '': break if len(rows[x].split('+--')[0]) == 4: if '-state' in rows[x]: return False if len(rows[x].split('augment')[0]) == 2: part = rows[x].strip(' ').split('/')[1] if '-state' in part: return False if '+--ro ' in rows[x]: leaf = \ rows[x].split('+--ro ')[1].split(' ')[0].split( '?')[0] dataExist = False for y in range(0, len(pyang_list_of_rows)): if leaf in pyang_list_of_rows[y]: dataExist = True if not dataExist: return False return True else: return False else: return False
def __find_file(self, name, revision='*', submodule=False, normal_search=True): yang_file = find_first_file('/'.join(self.__path.split('/')[0:-1]), name + '.yang' , name + '@' + revision + '.yang') if yang_file is None: if normal_search: if submodule: self.__missing_submodules.append(name) else: self.__missing_modules.append(name) yang_file = find_first_file(get_curr_dir(__file__) + '/../../.', name + '.yang', name + '@' + revision + '.yang') return yang_file
def query_create(question): """Ask a path like question via raw_input() and return their answer. "question" is a string that is presented to the user. The return value is path that should be added to database. """ while True: sys.stdout.write(question) choice = raw_input().lower() if choice.startswith('/'): choice = choice[1:] if choice.endswith('/'): choice = choice[:-1] if choice == '/': choice_without_last = '/' else: if len(choice.split('/')) > 1: choice_without_last = '/'.join(choice.split('/')[:-1]) else: choice_without_last = choice if os.path.isdir(get_curr_dir(__file__) + '/../../' + choice): return choice else: print ('Path ' + choice_without_last + ' does not exist.') create = query_yes_no('would you like to create path ' + choice) if create: try: os.makedirs(choice) except OSError as e: # be happy if someone already created the path if e.errno != errno.EEXIST: raise return choice
parser.add_argument('--api-ip', default='yangcatalog.org', type=str, help='Set ip address where the api is started. Default -> yangcatalog.org') args = parser.parse_args() start = time.time() index = 1 integrity = None sdo = args.sdo search_dirs = [args.dir] if sdo: stats_list = {'sdo': search_dirs} else: stats_list = {'vendor': search_dirs} if args.run_integrity: stats_list = {'vendor': [get_curr_dir(__file__) + '/../../vendor/cisco']} LOGGER.info('Starting to iterate through files') for key in stats_list: search_dirs = stats_list[key] if key == 'sdo': sdo = True prepare_sdo = prepare.Prepare("prepare", args.result_html_dir, args.api_port, args.api_ip, args.api_protocol) for search_dir in search_dirs: LOGGER.info('Found directory for sdo {}'.format(search_dir)) integrity = statistics.Statistics(search_dir) capability = cap.Capability(search_dir, index, prepare_sdo, integrity, args.api, sdo,
def dumps(file): file.write('<!DOCTYPE html><html><body> <ul>' '<li>Generated on {}</li>' '</ul><h1>Yangcatalog statistics</h1>'.format( time.strftime("%d/%m/%y"))) file.write( '<h3>YANG modules in directory but not present in any NETCONF hello message in that directory:</h3>' ) for key in Statistics.useless_modules: if len(Statistics.useless_modules[key]) > 0: file.write('<h5>' + key + ':</h5>') file.write('<p>' + ', '.join([ value.split('/')[-1] for value in Statistics.useless_modules[key] ]) + '</p>') file.write( '<h3>YANG modules in NETCONF hello messages for a directory but the YANG modules is not present' + ' in that directory:</h3>') for key in Statistics.missing_modules: file.write('<h5>' + key + ':</h5>') file.write('<p>' + ', '.join([ value.split('/')[-1] for value in Statistics.missing_modules[key] ]) + '</p>') file.write( '<h3>YANG modules in NETCONF hello messages for a directory but their' + ' submodules are missing:</h3>') for key in Statistics.missing_submodules: file.write('<h5>' + key + ':</h5>') file.write('<p>' + ', '.join([ value.split('/')[-1] for value in Statistics.missing_submodules[key] ]) + '</p>') file.write( '<h3>YANG modules in NETCONF hello messages for a directory but their' + ' revision date is missing:</h3>') for key in Statistics.missing_revision: file.write('<h5>' + key + ':</h5>') file.write('<p>' + ', '.join([ value.split('/')[-1] for value in Statistics.missing_revision[key] ]) + '</p>') file.write( '<h3>YANG modules in NETCONF hello messages for a directory but their' + ' namespace is wrong or missing:</h3>') for key in Statistics.missing_wrong_namespaces: file.write('<h5>' + key + ':</h5>') for value in Statistics.missing_wrong_namespaces[key]: file.write('<p>' + str(value) + '</p>') missing = [] my_files = find_missing_hello( get_curr_dir(__file__) + '/../../vendor/', '*.yang') for name in set(my_files): if '.incompatible' not in name and 'MIBS' not in name: missing.append(name) missing = ', '.join(missing).replace( get_curr_dir(__file__) + '/../..', '') file.write( '<h3>Folders with yang files but missing hello message inside of file:</h3><p>' + missing + '</p>') file.write('</body></html>')
suffix = 'api' yangcatalog_api_prefix = '{}://{}{}{}/'.format(args.api_protocol, args.api_ip, separator, suffix) start = time.time() index = 1 integrity = None sdo = args.sdo search_dirs = [args.dir] if sdo: stats_list = {'sdo': search_dirs} else: stats_list = {'vendor': search_dirs} if args.run_integrity: stats_list = {'vendor': [get_curr_dir(__file__) + '/../../vendor/cisco']} LOGGER.info('Starting to iterate through files') for key in stats_list: search_dirs = stats_list[key] if key == 'sdo': sdo = True prepare_sdo = prepare.Prepare("prepare", yangcatalog_api_prefix) for search_dir in search_dirs: LOGGER.info('Found directory for sdo {}'.format(search_dir)) integrity = statistics.Statistics(search_dir) capability = cap.Capability(search_dir, index, prepare_sdo, integrity, args.api, sdo, args.json_dir, args.result_html_dir, args.save_file_dir)
'credentials').split(' ') result_html_dir = config.get('DraftPullLocal-Section', 'result-html-dir') protocol = config.get('DraftPullLocal-Section', 'protocol') notify = config.get('DraftPullLocal-Section', 'notify-index') save_file_dir = config.get('DraftPullLocal-Section', 'save-file-dir') LOGGER.info( 'Loading all files from http://www.claise.be/IETFYANGDraft.json') ietf_draft_json = load_json_from_url( 'http://www.claise.be/IETFYANGDraft.json') response = urllib.urlretrieve('http://www.claise.be/YANG-RFC.tar', './rfc.tar') tar = tarfile.open('./rfc.tar') tar.extractall(get_curr_dir(__file__) + '/../../standard/ietf/RFC') tar.close() os.remove('./rfc.tar') check_name_no_revision_exist( get_curr_dir(__file__) + '/../../standard/ietf/RFC/') check_early_revisions(get_curr_dir(__file__) + '/../../standard/ietf/RFC/') with open("log.txt", "wr") as f: try: LOGGER.info('Calling populate script') arguments = [ "python", "../parseAndPopulate/populate.py", "--sdo", "--port", confd_port, "--ip", confd_ip, "--api-protocol", protocol, "--api-port", api_port, "--api-ip", api_ip, "--dir", get_curr_dir(__file__) + "/../../standard/ietf/RFC", "--result-html-dir", result_html_dir, "--credentials", credentials[0], credentials[1], "--save-file-dir",
dependants[impName].add(key) for inc in yangInclude: incName = inc.arg incRev = None for sub in inc.substmts: if sub.keyword == 'revision-date': incRev = sub.arg if incRev: name_rev = '{}@{}'.format(incName, incRev) dependencies[key].add(name_rev) if name_rev not in dependants: dependants[name_rev] = set() dependants[name_rev].add(key) else: dependencies[key].add(incName) if incName not in dependants: dependants[incName] = set() dependants[incName].add(key) except: pass if __name__ == "__main__": dependencies = {} dependants = {} for dir in [get_curr_dir(__file__) + '/../../experimental', get_curr_dir(__file__) + '/../../standard', get_curr_dir(__file__) + '/../../vendor']: search_dependencies(dir) pass
except OSError as e: # be happy if someone already created the path if e.errno != errno.EEXIST: raise tar.extractall(repo.localdir + '/standard/ietf/RFCtemp') tar.close() diff_files = [] new_files = [] check_name_no_revision_exist(repo.localdir + '/standard/ietf/RFCtemp/') check_early_revisions(repo.localdir + '/standard/ietf/RFCtemp/') for root, subdirs, sdos in os.walk(repo.localdir + '/standard/ietf/RFCtemp'): for file_name in sdos: if '.yang' in file_name: if os.path.exists( get_curr_dir(__file__) + '/../../standard/ietf/RFC/' + file_name): same = filecmp.cmp( get_curr_dir(__file__) + '/../../standard/ietf/RFC/' + file_name, root + '/' + file_name) if not same: diff_files.append(file_name) else: new_files.append(file_name) shutil.rmtree(repo.localdir + '/standard/ietf/RFCtemp') os.remove(repo.localdir + '/tools/ietfYangDraftPull/rfc.tar') if len(new_files) > 0 or len(diff_files) > 0: LOGGER.warning('new or modified RFC files found. Sending an E-mail') mf = messageFactory.MessageFactory() mf.send_new_rfc_message(new_files, diff_files)
ver += 1 upgraded_version = '{}.{}.{}'.format(ver, 0, 0) module[ 'derived-semantic-version'] = upgraded_version new_modules.append(module) continue else: schema2 = '{}{}@{}.yang'.format( args.save_file_dir, modules[-2]['name'], modules[-2]['revision']) schema1 = '{}{}@{}.yang'.format( args.save_file_dir, modules[-1]['name'], modules[-1]['revision']) arguments = [ 'pyang', '-P', get_curr_dir(__file__) + '/../../.', '-p', get_curr_dir(__file__) + '/../../.', schema1, '--check-update-from', schema2 ] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() if stderr == '': arguments = [ "pyang", '-p', get_curr_dir(__file__) + '/../../.', "-f", "tree", schema1 ] pyang = subprocess.Popen( arguments,
if is_uwsgi == 'True': separator = '/' suffix = 'api' yangcatalog_api_prefix = '{}://{}{}{}/'.format(protocol, api_ip, separator, suffix) path = yangcatalog_api_prefix + 'search/vendors/vendor/cisco' res = requests.get(path, auth=(auth[0], auth[1]), headers={'Accept': 'application/json'}) vendors_data = json.loads(res.content) xr = [] nx = [] xe = [] xr_versions = sorted( next(os.walk(get_curr_dir(__file__) + '/../../vendor/cisco/xr'))[1]) nx_versions = sorted( next(os.walk(get_curr_dir(__file__) + '/../../vendor/cisco/nx'))[1]) xe_versions = sorted( next(os.walk(get_curr_dir(__file__) + '/../../vendor/cisco/xe'))[1]) xr_values = [] nx_values = [] xe_values = [] for vendor in vendors_data['yang-catalog:vendor']['platforms']['platform']: platform_name = vendor['name'] os_type = vendor['software-versions']['software-version'][0]['software-flavors']['software-flavor'][0]\ ['modules']['module'][0]['os-type'] if 'IOS-XR' == os_type: xr.append(platform_name) if 'IOS-XE' == os_type: xe.append(platform_name)
if modules[-2]['compilation'] != 'passed': versions = modules[-2]['semver'].split('.') ver = int(versions[0]) ver += 1 upgraded_version = '{}.{}.{}'.format(ver, 0, 0) module['derived-semantic-version'] = upgraded_version new_modules.append(module) continue else: schema2 = '{}{}@{}.yang'.format(args.save_file_dir, modules[-2]['name'], modules[-2]['revision']) schema1 = '{}{}@{}.yang'.format(args.save_file_dir, modules[-1]['name'], modules[-1]['revision']) arguments = ['pyang', '-P', get_curr_dir(__file__) + '/../../.', '-p', get_curr_dir(__file__) + '/../../.', schema1, '--check-update-from', schema2] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() if stderr == '': arguments = ["pyang", '-p', get_curr_dir(__file__) + '/../../.', "-f", "tree", schema1] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() arguments = ["pyang", "-p", get_curr_dir(__file__) + "/../../.", "-f", "tree", schema2] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
api_ip = config.get('DraftPullLocal-Section', 'api-ip') api_port = config.get('General-Section', 'api-port') confd_ip = config.get('General-Section', 'confd-ip') confd_port = config.get('General-Section', 'confd-port') credentials = config.get('General-Section', 'credentials').split(' ') result_html_dir = config.get('DraftPullLocal-Section', 'result-html-dir') protocol = config.get('General-Section', 'protocol-api') notify = config.get('DraftPullLocal-Section', 'notify-index') save_file_dir = config.get('DraftPullLocal-Section', 'save-file-dir') private_credentials = config.get('General-Section', 'private-secret').split(' ') LOGGER.info('Loading all files from https://new.yangcatalog.org/private/IETFDraft.json') ietf_draft_json = requests.get('https://new.yangcatalog.org/private/IETFDraft.json' , auth=(private_credentials[0], private_credentials[1])).json() response = requests.get('https://new.yangcatalog.org/private/YANG-RFC.tgz' , auth=(private_credentials[0], private_credentials[1])) zfile = open(get_curr_dir(__file__) + '/rfc.tgz', 'wb') zfile.write(response.content) zfile.close() tgz = tarfile.open(get_curr_dir(__file__) + '/rfc.tgz') tgz.extractall(get_curr_dir(__file__) + '/../../standard/ietf/RFC') tgz.close() os.remove(get_curr_dir(__file__) + '/rfc.tgz') check_name_no_revision_exist(get_curr_dir(__file__) + '/../../standard/ietf/RFC/') check_early_revisions(get_curr_dir(__file__) + '/../../standard/ietf/RFC/') with open("log.txt", "wr") as f: try: LOGGER.info('Calling populate script') arguments = ["python", "../parseAndPopulate/populate.py", "--sdo", "--port", confd_port, "--ip", confd_ip, "--api-protocol", protocol, "--api-port", api_port, "--api-ip", api_ip, "--dir", get_curr_dir(__file__) + "/../../standard/ietf/RFC", "--result-html-dir", result_html_dir, "--credentials", credentials[0], credentials[1],
suffix = api_port if is_uwsgi == 'True': separator = '/' suffix = 'api' yangcatalog_api_prefix = '{}://{}{}{}/'.format(protocol, api_ip, separator, suffix) xr = set() nx = set() xe = set() solve_platforms('../../vendor/cisco/xr', xr) solve_platforms('../../vendor/cisco/xe', xe) solve_platforms('../../vendor/cisco/nx', nx) xr_versions = sorted(next(os.walk(get_curr_dir(__file__) + '/../../vendor/cisco/xr'))[1]) nx_versions = sorted(next(os.walk(get_curr_dir(__file__) + '/../../vendor/cisco/nx'))[1]) xe_versions = sorted(next(os.walk(get_curr_dir(__file__) + '/../../vendor/cisco/xe'))[1]) xr_values = [] nx_values = [] xe_values = [] for version in xr_versions: j = None try: with open('../../vendor/cisco/xr/' + version + '/platform-metadata.json', 'r') as f: j = json.load(f) j = j['platforms']['platform'] except: j = []
def __resolve_tree_type(self): def is_openconfig(rows, output): count_config = output.count('+-- config') count_state = output.count('+-- state') if count_config != count_state: return False row_number = 0 skip = [] for row in rows: if 'x--' in row or 'o--' in row: continue if '' == row.strip(' '): break if '+--rw' in row and row_number != 0 \ and row_number not in skip and '[' not in row and \ (len(row.replace('|', '').strip(' ').split( ' ')) != 2 or '(' in row): if '->' in row and 'config' in row.split('->')[ 1] and '+--rw config' not in rows[row_number - 1]: row_number += 1 continue if '+--rw config' not in rows[row_number - 1]: if 'augment' in rows[row_number - 1]: if not rows[row_number - 1].endswith(':config:'): return False else: return False length_before = set([len(row.split('+--')[0])]) skip = [] for x in range(row_number, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if len(rows[x].split('+--')[0]) not in length_before: if (len(rows[x].replace('|', '').strip(' ').split( ' ')) != 2 and '[' not in rows[x]) \ or '+--:' in rows[x] or '(' in rows[x]: length_before.add(len(rows[x].split('+--')[0])) else: break if '+--ro' in rows[x]: return False duplicate = \ rows[x].replace('+--rw', '+--ro').split('+--')[1] if duplicate.replace(' ', '') not in output.replace( ' ', ''): return False skip.append(x) if '+--ro' in row and row_number != 0 and row_number not in skip and '[' not in row and \ (len(row.replace('|', '').strip(' ').split( ' ')) != 2 or '(' in row): if '->' in row and 'state' in row.split( '->')[1] and '+--ro state' not in rows[row_number - 1]: row_number += 1 continue if '+--ro state' not in rows[row_number - 1]: if 'augment' in rows[row_number - 1]: if not rows[row_number - 1].endswith(':state:'): return False else: return False length_before = len(row.split('+--')[0]) skip = [] for x in range(row_number, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if len(rows[x].split('+--')[0]) < length_before: break if '+--rw' in rows[x]: return False skip.append(x) row_number += 1 return True def is_combined(rows, output): if output.split('\n')[0].endswith('-state'): return False next_obsolete_or_deprecated = False for row in rows: if next_obsolete_or_deprecated: if 'x--' in row or 'o--' in row: next_obsolete_or_deprecated = False else: return False if 'x--' in row or 'o--' in row: continue if '+--rw config' == row.replace( '|', '').strip(' ') or '+--ro state' == row.replace( '|', '').strip(' '): return False if len(row.split('+--')[0]) == 4: if '-state' in row and '+--ro' in row: return False if len(row.split('augment')[0]) == 2: part = row.strip(' ').split('/')[1] if '-state' in part: next_obsolete_or_deprecated = True part = row.strip(' ').split('/')[-1] if ':state:' in part or '/state:' in part \ or ':config:' in part or '/config:' in part: next_obsolete_or_deprecated = True return True def is_transational(rows, output): if output.split('\n')[0].endswith('-state'): if '+--rw' in output: return False name_of_module = output.split('\n')[0].split(': ')[1] name_of_module = name_of_module.split('-state')[0] coresponding_nmda_file = self.__find_file(name_of_module) if coresponding_nmda_file: arguments = [ "pyang", "-p", get_curr_dir(__file__) + "/../../.", "-f", "tree", coresponding_nmda_file ] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() pyang_list_of_rows = stdout.split('\n')[1:] if 'error' in stderr and 'is not found' in stderr: return False elif stdout == '': return False for x in range(0, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if rows[x].strip(' ') == '': break if len(rows[x].split('+--')[0]) == 4: if '-state' in rows[x]: return False if len(rows[x].split('augment')[0]) == 2: part = rows[x].strip(' ').split('/')[1] if '-state' in part: return False if '+--ro ' in rows[x]: leaf = \ rows[x].split('+--ro ')[1].split(' ')[0].split( '?')[0] dataExist = False for y in range(0, len(pyang_list_of_rows)): if leaf in pyang_list_of_rows[y]: dataExist = True if not dataExist: return False return True else: return False else: return False def is_split(rows, output): failed = False row_num = 0 if output.split('\n')[0].endswith('-state'): return False for row in rows: if 'x--' in row or 'o--' in row: continue if '+--rw config' == row.replace('|', '').strip( ' ') or '+--ro state' == row.replace('|', '') \ .strip(' '): return False if 'augment' in row: part = row.strip(' ').split('/')[-1] if ':state:' in part or '/state:' in part or ':config:' in part or '/config:' in part: return False for row in rows: if 'x--' in row or 'o--' in row: continue if row == '': break if (len(row.split('+--')[0]) == 4 and 'augment' not in rows[row_num - 1]) or len( row.split('augment')[0]) == 2: if '-state' in row: if 'augment' in row: part = row.strip(' ').split('/')[1] if '-state' not in part: row_num += 1 continue for x in range(row_num + 1, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if rows[x].strip(' ') == '' \ or (len(rows[x].split('+--')[ 0]) == 4 and 'augment' not in rows[row_num - 1]) \ or len(row.split('augment')[0]) == 2: break if '+--rw' in rows[x]: failed = True break row_num += 1 if failed: return False else: return True x = 0 for module in self.__all_modules['module']: x += 1 self.__path = '{}{}@{}.yang'.format(self.__save_file_dir, module['name'], module['revision']) LOGGER.info('Searching tree type for {}. {} out of {}'.format( module['name'], x, len(self.__all_modules['module']))) LOGGER.debug('Get tree type from tag from module {}'.format( self.__path)) arguments = [ "pyang", "-p", get_curr_dir(__file__) + "/../../.", "-f", "tree", self.__path ] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() if 'error' in stderr and 'is not found' in stderr: LOGGER.debug( 'Could not use pyang to generate tree because of error {} on module {}' .format(stderr, self.__path)) module['tree-type'] = 'unclassified' elif stdout == '': module['tree-type'] = 'not-applicable' else: pyang_list_of_rows = stdout.split('\n')[1:] if 'submodule' == module['module-type']: LOGGER.debug('Module {} is a submodule'.format( self.__path)) module['tree-type'] = 'not-applicable' elif is_combined(pyang_list_of_rows, stdout): module['tree-type'] = 'nmda-compatible' elif is_transational(pyang_list_of_rows, stdout): module['tree-type'] = 'transitional-extra' elif is_openconfig(pyang_list_of_rows, stdout): module['tree-type'] = 'openconfig' elif is_split(pyang_list_of_rows, stdout): module['tree-type'] = 'split' else: module['tree-type'] = 'unclassified'
def __parse_semver(self): z = 0 for module in self.__all_modules['module']: z += 1 LOGGER.info('Searching semver for {}. {} out of {}'.format( module['name'], z, len(self.__all_modules['module']))) url = '{}search/name/{}'.format(self.__yangcatalog_api_prefix, module['name']) response = requests.get(url, auth=(self.__credentials[0], self.__credentials[1]), headers={'Accept': 'application/json'}) if response.status_code == 404: module['derived-semantic-version'] = '1.0.0' self.__new_modules.append(module) else: data = json.loads(response.content) rev = module['revision'].split('-') date = datetime(int(rev[0]), int(rev[1]), int(rev[2])) module_temp = {} module_temp['name'] = module['name'] module_temp['revision'] = module['revision'] module_temp['organization'] = module['organization'] module_temp['compilation'] = module['compilation-status'] module_temp['date'] = date module_temp['schema'] = module['schema'] modules = [module_temp] semver_exist = True for mod in data['yang-catalog:modules']['module']: module_temp = {} revision = mod['revision'] if revision == module['revision']: continue rev = revision.split('-') module_temp['revision'] = revision module_temp['date'] = datetime(int(rev[0]), int(rev[1]), int(rev[2])) module_temp['name'] = mod['name'] module_temp['organization'] = mod['organization'] module_temp['schema'] = mod.get('schema') module_temp['compilation'] = mod['compilation-status'] module_temp['semver'] = mod.get('derived-semantic-version') if module_temp['semver'] is None: semver_exist = False modules.append(module_temp) if len(modules) == 1: module['derived-semantic-version'] = '1.0.0' self.__new_modules.append(module) continue modules = sorted(modules, key=lambda k: k['date']) if modules[-1]['date'] == date and semver_exist: if modules[-1]['compilation'] != 'passed': versions = modules[-2]['semver'].split('.') ver = int(versions[0]) ver += 1 upgraded_version = '{}.{}.{}'.format(ver, 0, 0) module['derived-semantic-version'] = upgraded_version self.__new_modules.append(module) else: if modules[-2]['compilation'] != 'passed': versions = modules[-2]['semver'].split('.') ver = int(versions[0]) ver += 1 upgraded_version = '{}.{}.{}'.format(ver, 0, 0) module[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(module) continue else: schema2 = '{}{}@{}.yang'.format( self.__save_file_dir, modules[-2]['name'], modules[-2]['revision']) schema1 = '{}{}@{}.yang'.format( self.__save_file_dir, modules[-1]['name'], modules[-1]['revision']) arguments = [ 'pyang', '-P', get_curr_dir(__file__) + '/../../.', '-p', get_curr_dir(__file__) + '/../../.', schema1, '--check-update-from', schema2 ] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() if stderr == '': arguments = [ "pyang", '-p', get_curr_dir(__file__) + '/../../.', "-f", "tree", schema1 ] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() arguments = [ "pyang", "-p", get_curr_dir(__file__) + "/../../.", "-f", "tree", schema2 ] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout2, stderr = pyang.communicate() if stdout == stdout2: versions = modules[-2]['semver'].split('.') ver = int(versions[2]) ver += 1 upgraded_version = '{}.{}.{}'.format( versions[0], versions[1], ver) module[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(module) continue else: versions = modules[-2]['semver'].split('.') ver = int(versions[1]) ver += 1 upgraded_version = '{}.{}.{}'.format( versions[0], ver, 0) module[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(module) continue else: versions = modules[-2]['semver'].split('.') ver = int(versions[0]) ver += 1 upgraded_version = '{}.{}.{}'.format(ver, 0, 0) module[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(module) continue else: mod = {} mod['name'] = modules[0]['name'] mod['revision'] = modules[0]['revision'] mod['organization'] = modules[0]['organization'] modules[0]['semver'] = '1.0.0' response = requests.get( '{}://{}:{}/api/config/catalog/modules/module/{},{},{}' .format(self.__protocol, self.__ip, self.__port, mod['name'], mod['revision'], mod['organization']), auth=(self.__credentials[0], self.__credentials[1]), headers={'Accept': 'application/vnd.yang.data+json'}) response = json.loads( response.content)['yang-catalog:module'] response['derived-semantic-version'] = '1.0.0' self.__new_modules.append(response) for x in range(1, len(modules)): mod = {} mod['name'] = modules[x]['name'] mod['revision'] = modules[x]['revision'] mod['organization'] = modules[x]['organization'] if modules[x]['compilation'] != 'passed': versions = modules[x - 1]['semver'].split('.') ver = int(versions[0]) ver += 1 upgraded_version = '{}.{}.{}'.format(ver, 0, 0) modules[x]['semver'] = upgraded_version response = requests.get( '{}://{}:{}/api/config/catalog/modules/module/{},{},{}' .format(self.__protocol, self.__ip, self.__port, mod['name'], mod['revision'], mod['organization']), auth=(self.__credentials[0], self.__credentials[1]), headers={ 'Accept': 'application/vnd.yang.data+json' }) response = json.loads( response.content)['yang-catalog:module'] response[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(response) else: if modules[x - 1]['compilation'] != 'passed': versions = modules[x - 1]['semver'].split('.') ver = int(versions[0]) ver += 1 upgraded_version = '{}.{}.{}'.format(ver, 0, 0) modules[x]['semver'] = upgraded_version response = requests.get( '{}://{}:{}/api/config/catalog/modules/module/{},{},{}' .format(self.__protocol, self.__ip, self.__port, mod['name'], mod['revision'], mod['organization']), auth=(self.__credentials[0], self.__credentials[1]), headers={ 'Accept': 'application/vnd.yang.data+json' }) response = json.loads( response.content)['yang-catalog:module'] response[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(response) continue else: schema2 = '{}{}@{}.yang'.format( self.__save_file_dir, modules[x]['name'], modules[x]['revision']) schema1 = '{}{}@{}.yang'.format( self.__save_file_dir, modules[x - 1]['name'], modules[x - 1]['revision']) arguments = [ 'pyang', '-p', get_curr_dir(__file__) + '/../../.', '-P', get_curr_dir(__file__) + '/../../.', schema2, '--check-update-from', schema1 ] pyang = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() if stderr == '': arguments = [ "pyang", '-p', get_curr_dir(__file__) + '/../../.', "-f", "tree", schema1 ] pyang = subprocess.Popen( arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pyang.communicate() arguments = [ "pyang", '-p', get_curr_dir(__file__) + '/../../.', "-f", "tree", schema2 ] pyang = subprocess.Popen( arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout2, stderr = pyang.communicate() if stdout == stdout2: versions = modules[x - 1]['semver'].split('.') ver = int(versions[2]) ver += 1 upgraded_version = '{}.{}.{}'.format( versions[0], versions[1], ver) modules[x]['semver'] = upgraded_version response = requests.get( '{}://{}:{}/api/config/catalog/modules/module/{},{},{}' .format(self.__protocol, self.__ip, self.__port, mod['name'], mod['revision'], mod['organization']), auth=(self.__credentials[0], self.__credentials[1]), headers={ 'Accept': 'application/vnd.yang.data+json' }) response = json.loads( response.content )['yang-catalog:module'] response[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(response) else: versions = modules[x - 1]['semver'].split('.') ver = int(versions[1]) ver += 1 upgraded_version = '{}.{}.{}'.format( versions[0], ver, 0) modules[x]['semver'] = upgraded_version response = requests.get( '{}://{}:{}/api/config/catalog/modules/module/{},{},{}' .format(self.__protocol, self.__ip, self.__port, mod['name'], mod['revision'], mod['organization']), auth=(self.__credentials[0], self.__credentials[1]), headers={ 'Accept': 'application/vnd.yang.data+json' }) response = json.loads( response.content )['yang-catalog:module'] response[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(response) else: versions = modules[x - 1]['semver'].split('.') ver = int(versions[0]) ver += 1 upgraded_version = '{}.{}.{}'.format(ver, 0, 0) modules[x]['semver'] = upgraded_version response = requests.get( '{}://{}:{}/api/config/catalog/modules/module/{},{},{}' .format(self.__protocol, self.__ip, self.__port, mod['name'], mod['revision'], mod['organization']), auth=(self.__credentials[0], self.__credentials[1]), headers={ 'Accept': 'application/vnd.yang.data+json' }) response = json.loads( response.content)['yang-catalog:module'] response[ 'derived-semantic-version'] = upgraded_version self.__new_modules.append(response)
yangcatalog_api_prefix = '{}://{}{}{}/'.format(args.api_protocol, args.api_ip, separator, suffix) start = time.time() index = 1 integrity = None sdo = args.sdo search_dirs = [args.dir] if sdo: stats_list = {'sdo': search_dirs} else: stats_list = {'vendor': search_dirs} if args.run_integrity: stats_list = { 'vendor': [get_curr_dir(__file__) + '/../../vendor/cisco'] } LOGGER.info('Starting to iterate through files') for key in stats_list: search_dirs = stats_list[key] if key == 'sdo': sdo = True prepare_sdo = prepare.Prepare("prepare", yangcatalog_api_prefix) for search_dir in search_dirs: LOGGER.info('Found directory for sdo {}'.format(search_dir)) integrity = statistics.Statistics(search_dir) capability = cap.Capability(search_dir, index, prepare_sdo, integrity, args.api, sdo, args.json_dir,
api_port = config.get('General-Section', 'api-port') confd_ip = config.get('General-Section', 'confd-ip') confd_port = config.get('General-Section', 'confd-port') credentials = config.get('General-Section', 'credentials').split(' ') result_html_dir = config.get('DraftPullLocal-Section', 'result-html-dir') protocol = config.get('General-Section', 'protocol-api') notify = config.get('DraftPullLocal-Section', 'notify-index') save_file_dir = config.get('DraftPullLocal-Section', 'save-file-dir') LOGGER.info('Loading all files from http://www.claise.be/IETFYANGDraft.json') ietf_draft_json = load_json_from_url('http://www.claise.be/IETFYANGDraft.json') response = urllib.urlretrieve('http://www.claise.be/YANG-RFC.tar', './rfc.tar') tar = tarfile.open('./rfc.tar') tar.extractall(get_curr_dir(__file__) + '/../../standard/ietf/RFC') tar.close() os.remove('./rfc.tar') check_name_no_revision_exist(get_curr_dir(__file__) + '/../../standard/ietf/RFC/') check_early_revisions(get_curr_dir(__file__) + '/../../standard/ietf/RFC/') with open("log.txt", "wr") as f: try: LOGGER.info('Calling populate script') arguments = ["python", "../parseAndPopulate/populate.py", "--sdo", "--port", confd_port, "--ip", confd_ip, "--api-protocol", protocol, "--api-port", api_port, "--api-ip", api_ip, "--dir", get_curr_dir(__file__) + "/../../standard/ietf/RFC", "--result-html-dir", result_html_dir, "--credentials", credentials[0], credentials[1], "--save-file-dir", save_file_dir] if notify == 'True': arguments.append("--notify-indexing") subprocess.check_call(arguments, stderr=f)
def send_to_indexing(yc_api_prefix, modules_to_index, credentials, apiIp=None, sdo_type=False, delete=False, from_api=True, set_key=None, force_indexing=True): """ Sends the POST request which will activate indexing script for modules which will help to speed up process of searching. It will create a json body of all the modules containing module name and path where the module can be found if we are adding new modules. Other situation can be if we need to delete module. In this case we are sending list of modules that need to be deleted. Arguments: :param yc_api_prefix: (str) prefix for sending request to api :param modules_to_index: (json file) prepare.json file generated while parsing all the modules. This file is used to iterate through all the modules. :param credentials: (list) Basic authorization credentials - username, password respectively. :param sdo_type: (bool) Whether or not it is sdo that needs to be sent. :param delete: (bool) Whether or not we are deleting module. :param from_api: (bool) Whether or not api sent the request to index. :param set_key: (str) String containing key to confirm that it is receiver that sends data. This is is verified before indexing takes place. :param force_indexing: (bool) Whether or not we should force indexing even if module exists in cache. """ global api_ip if apiIp is not None: api_ip = apiIp LOGGER.debug('Sending data for indexing') mf = messageFactory.MessageFactory() if delete: body_to_send = json.dumps({'modules-to-delete': modules_to_index}, indent=4) mf.send_removed_yang_files(body_to_send) for mod in modules_to_index: name, revision_organization = mod.split('@') revision, organization = revision_organization.split('/') path_to_delete_local = "{}{}@{}.yang".format( save_file_dir, name, revision) data = {'input': {'dependents': [{'name': name}]}} response = requests.post(yc_api_prefix + 'search-filter', auth=(credentials[0], credentials[1]), json={'input': data}) if response.status_code == 201: modules = json.loads(response.content) for mod in modules: m_name = mod['name'] m_rev = mod['revision'] m_org = mod['organization'] url = ('{}://{}:{}/api/config/catalog/modules/module/' '{},{},{}/dependents/{}'.format( confd_protocol, confd_ip, confdPort, m_name, m_rev, m_org, name)) requests.delete(url, auth=(credentials[0], credentials[1]), headers={ 'Content-Type': 'application/vnd.yang.data+json' }) if os.path.exists(path_to_delete_local): os.remove(path_to_delete_local) else: with open(modules_to_index, 'r') as f: sdos_json = json.load(f) post_body = {} load_new_files_to_github = False if from_api: if sdo_type: prefix = 'api/sdo/' else: prefix = 'api/vendor/' for module in sdos_json['module']: response = http_request('{}search/modules/{},{},{}'.format( yc_api_prefix, module['name'], module['revision'], module['organization']), 'GET', '', credentials, 'application/vnd.yang.data+json', return_code=True) code = response.code if force_indexing or (code != 200 and code != 201 and code != 204): if module.get('schema'): path = prefix + module['schema'].split( 'githubusercontent.com/')[1] path = os.path.abspath( get_curr_dir(__file__) + '/../../' + path) else: path = 'module does not exist' post_body[module['name'] + '@' + module['revision'] + '/' + module['organization']] = path else: for module in sdos_json['module']: response = http_request('{}search/modules/{},{},{}'.format( yc_api_prefix, module['name'], module['revision'], module['organization']), 'GET', '', credentials, 'application/vnd.yang.data+json', return_code=True) code = response.code if code != 200 and code != 201 and code != 204: load_new_files_to_github = True if force_indexing or (code != 200 and code != 201 and code != 204): if module.get('schema'): path = module['schema'].split('master')[1] path = os.path.abspath( get_curr_dir(__file__) + '/../../' + path) else: path = 'module does not exist' post_body[module['name'] + '@' + module['revision'] + '/' + module['organization']] = path body_to_send = json.dumps({'modules-to-index': post_body}, indent=4) if len(post_body) > 0 and not force_indexing: mf.send_added_new_yang_files(body_to_send) if load_new_files_to_github: LOGGER.info('Starting a new process to populate github') cmd = ['python', '../ietfYangDraftPull/draftPull.py'] proc = subprocess.Popen(cmd, close_fds=True) LOGGER.info('Populating github with process {}'.format(proc)) try: set_key = key except NameError: pass LOGGER.info('Sending data for indexing with body {}'.format(body_to_send)) try: http_request('https://' + api_ip + '/yang-search/metadata-update.php', 'POST', body_to_send, credentials, 'application/json', indexing=create_signature(set_key, body_to_send)) except urllib2.HTTPError as e: LOGGER.error('could not send data for indexing. Reason: {}'.format( e.msg)) except URLError as e: LOGGER.error('could not send data for indexing. Reason: {}'.format( repr(e.message)))
incName = inc.arg incRev = None for sub in inc.substmts: if sub.keyword == 'revision-date': incRev = sub.arg if incRev: name_rev = '{}@{}'.format(incName, incRev) dependencies[key].add(name_rev) if name_rev not in dependants: dependants[name_rev] = set() dependants[name_rev].add(key) else: dependencies[key].add(incName) if incName not in dependants: dependants[incName] = set() dependants[incName].add(key) except: pass if __name__ == "__main__": dependencies = {} dependants = {} for dir in [ get_curr_dir(__file__) + '/../../experimental', get_curr_dir(__file__) + '/../../standard', get_curr_dir(__file__) + '/../../vendor' ]: search_dependencies(dir) pass
import os from tools.parseAndPopulate.modules import Modules from tools.utility.util import get_curr_dir if __name__ == '__main__': for root, subdirs, files in os.walk(get_curr_dir(__file__) + '/../../.'): for f in files: if f.endswith('.yang'): name = f.split('@')[0].split('.')[0] mod = Modules(root + '/' + f, None, None, None) mod.parse_all(name, 'foo', None, '/home/miroslav/results/')