def transform_module(): module = request.body.read() response.content_type = 'application/json' repos = pyang.FileRepository() ctx = pyang.Context(repos) modules = [] modules.append(ctx.add_module("upload module", module)) plugin.init() p = get_plugin_by_name('jsontree') op = optparse.OptionParser() p.add_opts(op) (o, args) = op.parse_args() ctx.opts = o wr = Writer() try: p.emit(ctx, modules, wr) except: bottle.abort(500, 'Internal Server Error') return str(wr)
def __init__(self, context_directories: list, file_name: str, working_directory: str): # Plugins array must be emptied before plugin init plugin.plugins = [] plugin.init([]) self.__ctx = create_context(':'.join(context_directories)) self.__ctx.opts.lint_namespace_prefixes = [] self.__ctx.opts.lint_modulename_prefixes = [] self.__pyang_command = ['pyang'] self.__infile = os.path.join(working_directory, file_name) self.__working_directory = working_directory self.__file_name = file_name
def create_tree(name: str, revision: str): """ Return yang tree representation of yang module with corresponding module name and revision. Arguments: :param name (str) name of the module :param revision (str) revision of the module in format YYYY-MM-DD :return preformatted HTML with corresponding data """ path_to_yang = '{}/{}@{}.yang'.format(ac.d_save_file_dir, name, revision) plugin.plugins = [] plugin.init([]) ctx = create_context('{}:{}'.format(ac.d_yang_models_dir, ac.d_save_file_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] for p in plugin.plugins: p.setup_ctx(ctx) try: with open(path_to_yang, 'r') as f: a = ctx.add_module(path_to_yang, f.read()) except: abort(400, description='File {} was not found'.format(path_to_yang)) if ctx.opts.tree_path is not None: path = ctx.opts.tree_path.split('/') if path[0] == '': path = path[1:] else: path = None ctx.validate() f = io.StringIO() emit_tree(ctx, [a], f, ctx.opts.tree_depth, ctx.opts.tree_line_length, path) stdout = f.getvalue() if stdout == '' and len(ctx.errors) != 0: message = 'This yang file contains major errors and therefore tree can not be created.' return create_bootstrap_danger(message) elif stdout != '' and len(ctx.errors) != 0: message = 'This yang file contains some errors, but tree was created.' return create_bootstrap_warning(stdout, message) elif stdout == '' and len(ctx.errors) == 0: return create_bootstrap_info() else: return '<html><body><pre>{}</pre></body></html>'.format(stdout)
def context_check_update_from(old_schema: str, new_schema: str, yang_models: str, save_file_dir: str): """ Perform pyang --check-update-from validation using context. Argumets: :param old_schema (str) full path to the yang file with older revision :param new_schema (str) full path to the yang file with newer revision :param yang_models (str) path to the directory where YangModels/yang repo is cloned :param save_file_dir (str) path to the directory where all the yang files will be saved """ plugin.plugins = [] plugin.init([]) ctx = create_context( '{}:{}'.format(os.path.abspath(yang_models), save_file_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] optParser = optparse.OptionParser('', add_help_option=False) for p in plugin.plugins: p.setup_ctx(ctx) p.add_opts(optParser) with open(new_schema, 'r', errors='ignore') as f: new_schema_ctx = ctx.add_module(new_schema, f.read()) ctx.opts.check_update_from = old_schema ctx.opts.old_path = [os.path.abspath(yang_models)] ctx.opts.verbose = False ctx.opts.old_deviation = [] retry = 5 while retry: try: ctx.validate() # NOTE: ResourceWarning appears due to the incorrect way pyang opens files for reading # ResourceWarning: Enable tracemalloc to get the object allocation traceback with warnings.catch_warnings(record=True): check_update(ctx, new_schema_ctx) break except Exception as e: retry -= 1 if retry == 0: raise e return ctx, new_schema_ctx
def validate_yangfile(infilename, workdir): logger.info('validating {}'.format(infilename)) pyang_res = {} yanglint_res = {} confdc_res = {} yangdump_res = {} confdc_output = yanglint_output = confdc_stderr = yanglint_stderr = yangdump_output = yangdump_stderr = "" infile = os.path.join(workdir, infilename) confdc_resfile = str(os.path.join(workdir, infilename) + '.cres') confdc_outfile = str(os.path.join(workdir, infilename) + '.cout') yanglint_resfile = str(os.path.join(workdir, infilename) + '.lres') yanglint_outfile = str(os.path.join(workdir, infilename) + '.lout') yangdump_resfile = str(os.path.join(workdir, infilename) + '.ypres') yangdump_outfile = str(os.path.join(workdir, infilename) + '.ypout') basic_append_p = [] pyang_command = [] pyang_command_to_json = [] confdc_command_to_json = [] pyang_context_directories = [workdir] conf_yangdump_dir = '' dep_dir = '' libs = '' try: if os.path.exists(yang_import_dir): basic_append_p = ['-p', yang_import_dir] pyang_context_directories.append(yang_import_dir) yang_import_dir_split = yang_import_dir.split('/') yang_import_dir_split[-1] = 'libs-{}'.format( yang_import_dir_split[-1]) libs = '/'.join(yang_import_dir_split) pyang_command_to_json.extend([pyang_cmd, '-p', libs]) confdc_command_to_json.extend([confdc_cmd, '--yangpath', libs]) cmds = [pyang_cmd] cmds.extend(basic_append_p) # Plugins array must be emptied before plugin init plugin.plugins = [] plugin.init([]) ctx = create_context(':'.join(pyang_context_directories)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] if infilename.startswith("ietf", 0): ctx.opts.ietf = True pyang_command = cmds + ['-p', workdir, '--ietf', infile] pyang_command_to_json.extend(['-p', workdir, '--ietf', infile]) elif infilename.startswith("mef", 0): ctx.opts.mef = True pyang_command = cmds + ['-p', workdir, '--mef', infile] pyang_command_to_json.extend(['-p', workdir, '--mef', infile]) elif infilename.startswith("ieee", 0): ctx.opts.ieee = True pyang_command = cmds + ['-p', workdir, '--ieee', infile] pyang_command_to_json.extend(['-p', workdir, '--ieee', infile]) elif infilename.startswith("bbf", 0): ctx.opts.bbf = True pyang_command = cmds + ['-p', workdir, '--bbf', infile] pyang_command_to_json.extend(['-p', workdir, '--bbf', infile]) pyang_res['time'] = datetime.now(timezone.utc).isoformat() ctx.opts.depend_recurse = True ctx.opts.depend_ignore = [] for p in plugin.plugins: p.setup_ctx(ctx) m = [] with open(infile, 'r', encoding="utf-8") as yang_file: module = yang_file.read() if module is None: logger.info('no module provided') m = ctx.add_module(infile, module) if m is None: m = [] else: m = [m] ctx.validate() f = io.StringIO() emit_depend(ctx, m, f) dep_dir = copy_dependencies(f) pyang_stderr, pyang_output = print_pyang_output(ctx) # Data cleanup due to a recursion problem restore_statements() del ctx pyang_res['stdout'] = pyang_output pyang_res['stderr'] = pyang_stderr pyang_res['name'] = 'pyang' pyang_res['version'] = versions['pyang_version'] pyang_res['code'] = 0 if not pyang_stderr else 1 pyang_res['command'] = ' '.join(pyang_command_to_json) logger.info(' '.join(pyang_command)) cresfp = open(confdc_resfile, 'w+') fxsfile = infile.replace('.yang', '.fxs') cmds = [confdc_cmd, '-o', fxsfile, '-W', 'all'] cmds.extend(['--yangpath', dep_dir]) cmds.extend(['--yangpath', workdir]) confdc_command = cmds + ['-c', infile] confdc_command_to_json.extend( ['-o', fxsfile, '-W', 'all', '-c', infile]) outfp = open(confdc_outfile, 'w+') status = call(confdc_command, stdout=outfp, stderr=cresfp) confdc_res['time'] = datetime.now(timezone.utc).isoformat() if os.path.isfile(confdc_outfile): outfp.seek(0) for line in outfp.readlines(): confdc_output += os.path.basename(line) else: pass confdc_res['stdout'] = confdc_output outfp.close() cresfp.seek(0) for line in cresfp.readlines(): confdc_stderr += os.path.basename(line) confdc_res['stderr'] = confdc_stderr confdc_res['name'] = 'confdc' confdc_res['version'] = versions['confdc_version'] confdc_res['code'] = status confdc_res['command'] = ' '.join(confdc_command_to_json) logger.info(' '.join(confdc_command)) yresfp = open(yanglint_resfile, 'w+') cmds = [yanglint_cmd, '-i', '-p', workdir] cmds.extend(['-p', dep_dir]) yanglint_command = cmds + ['-V', infile] yanglint_command_to_json = [yanglint_cmd, '-i'] if libs != '': yanglint_command_to_json.extend(['-p', libs]) yanglint_command_to_json = ['-p', workdir, '-V', infile] outfp = open(yanglint_outfile, 'w+') status = call(yanglint_command, stdout=outfp, stderr=yresfp) yanglint_res['time'] = datetime.now(timezone.utc).isoformat() if os.path.isfile(yanglint_outfile): outfp.seek(0) for line in outfp.readlines(): yanglint_output += os.path.basename(line) else: pass yanglint_res['stdout'] = yanglint_output yresfp.seek(0) for line in yresfp.readlines(): yanglint_stderr += line outfp.close() yresfp.close() yanglint_res['stderr'] = yanglint_stderr yanglint_res['name'] = 'yanglint' yanglint_res['version'] = versions['yanglint_version'] yanglint_res['code'] = status yanglint_res['command'] = ' '.join(yanglint_command_to_json) logger.info(' '.join(yanglint_command)) context = {'path': dep_dir} path, filename = os.path.split( os.path.dirname(__file__) + '/../templates/yangdump-pro-yangvalidator.conf') rendered_config_text = jinja2.Environment( loader=jinja2.FileSystemLoader(path or './')).get_template( filename).render(context) conf_yangdump_dir = '{}-conf'.format(dep_dir) os.mkdir(conf_yangdump_dir) yangdump_config_file = '{}/yangdump-pro-yangvalidator.conf' with open(yangdump_config_file.format(conf_yangdump_dir), 'w') as ff: ff.write(rendered_config_text) ypresfp = open(yangdump_resfile, 'w+') cmds = [yangdump_cmd, '--quiet-mode', '--config', yangdump_config_file] yangdump_command = cmds + [infile] yangdump_command_to_json = yangdump_command ypoutfp = open(yangdump_outfile, 'w+') status = call(yangdump_command, stdout=ypoutfp, stderr=ypresfp) yangdump_res['time'] = datetime.now(timezone.utc).isoformat() if os.path.isfile(yangdump_outfile): ypoutfp.seek(0) for line in ypoutfp.readlines(): yangdump_output += os.path.basename(line) else: pass yangdump_res['stdout'] = yangdump_output ypresfp.seek(0) for line in ypresfp.readlines(): yangdump_stderr += line ypoutfp.close() ypresfp.close() yangdump_res['stderr'] = yangdump_stderr yangdump_res['name'] = 'yangdump-pro' yangdump_res['version'] = versions['yangdump_version'] yangdump_res['code'] = status yangdump_res['command'] = ' '.join(yangdump_command_to_json) logger.info(' '.join(yangdump_command)) except Exception as e: logger.error('Error: {}'.format(e)) finally: logger.info('Removing temporary directories') if os.path.exists(dep_dir): shutil.rmtree(dep_dir) if os.path.exists(conf_yangdump_dir): shutil.rmtree(conf_yangdump_dir) return pyang_res, confdc_res, yanglint_res, yangdump_res
def run(): usage = """%prog [options] [<filename>...] Validates the YANG module in <filename> (or stdin), and all its dependencies.""" plugindirs = [] # check for --plugindir idx = 1 while '--plugindir' in sys.argv[idx:]: idx = idx + sys.argv[idx:].index('--plugindir') plugindirs.append(sys.argv[idx + 1]) idx = idx + 1 plugin.init(plugindirs) fmts = {} xforms = {} for p in plugin.plugins: p.add_output_format(fmts) p.add_transform(xforms) optlist = [ # use capitalized versions of std options help and version optparse.make_option("-h", "--help1", action="help", help="Show this help message and exit ha2"), optparse.make_option("-v", "--version", action="version", help="Show version number and exit"), optparse.make_option("-V", "--verbose", action="store_true"), optparse.make_option("-e", "--list-errors", dest="list_errors", action="store_true", help="Print a listing of all error and warning " \ "codes and exit."), optparse.make_option("--print-error-code", dest="print_error_code", action="store_true", help="On errors, print the error code instead " \ "of the error message."), optparse.make_option("-W", dest="warnings", action="append", default=[], metavar="WARNING", help="If WARNING is 'error', treat all warnings " \ "as errors, except any listed WARNING. " \ "If WARNING is 'none', do not report any " \ "warnings."), optparse.make_option("-E", dest="errors", action="append", default=[], metavar="WARNING", help="Treat each WARNING as an error. For a " \ "list of warnings, use --list-errors."), optparse.make_option("--ignore-error", dest="ignore_error_tags", action="append", default=[], metavar="ERROR", help="Ignore ERROR. Use with care. For a " \ "list of errors, use --list-errors."), optparse.make_option("--ignore-errors", dest="ignore_errors", action="store_true", help="Ignore all errors. Use with care."), optparse.make_option("--canonical", dest="canonical", action="store_true", help="Validate the module(s) according to the " \ "canonical YANG order."), optparse.make_option("--max-line-length", type="int", dest="max_line_len"), optparse.make_option("--max-identifier-length", type="int", dest="max_identifier_len"), optparse.make_option("-t", "--transform", dest="transforms", default=[], action="append", help="Apply transform TRANSFORM. Supported " \ "transforms are: " + ', '.join(list( xforms.keys()))), optparse.make_option("-f", "--format", dest="format", help="Convert to FORMAT. Supported formats " \ "are: " + ', '.join(list(fmts.keys()))), optparse.make_option("-o", "--output", dest="outfile", help="Write the output to OUTFILE instead " \ "of stdout."), optparse.make_option("-F", "--features", metavar="FEATURES", dest="features", default=[], action="append", help="Features to support, default all. " \ "<modname>:[<feature>,]*"), optparse.make_option("", "--max-status", metavar="MAXSTATUS", dest="max_status", help="Max status to support, one of: " \ "current, deprecated, obsolete"), optparse.make_option("", "--deviation-module", metavar="DEVIATION", dest="deviations", default=[], action="append", help="Deviation module"), optparse.make_option("-p", "--path", dest="path", default=[], action="append", help=os.pathsep + "-separated search path for yin" " and yang modules"), optparse.make_option("--plugindir", dest="plugindir", help="Load pyang plugins from PLUGINDIR"), optparse.make_option("--strict", dest="strict", action="store_true", help="Force strict YANG compliance."), optparse.make_option("--lax-quote-checks", dest="lax_quote_checks", action="store_true", help="Lax check of backslash in quoted strings."), optparse.make_option("--lax-xpath-checks", dest="lax_xpath_checks", action="store_true", help="Lax check of XPath expressions."), optparse.make_option("--trim-yin", dest="trim_yin", action="store_true", help="In YIN input modules, trim whitespace " "in textual arguments."), optparse.make_option("-L", "--hello", dest="hello", action="store_true", help="Filename of a server's hello message is " "given instead of module filename(s)."), optparse.make_option("--keep-comments", dest="keep_comments", action="store_true", help="Pyang will not discard comments; \ has effect if the output plugin can \ handle comments." ), optparse.make_option("--no-path-recurse", dest="no_path_recurse", action="store_true", help="Do not recurse into directories in the \ yang path." ), ] optparser = optparse.OptionParser(usage, add_help_option=False) optparser.version = '%prog ' + pyang.__version__ optparser.add_options(optlist) for p in plugin.plugins: p.add_opts(optparser) (o, args) = optparser.parse_args() if o.list_errors == True: for tag in error.error_codes: (level, fmt) = error.error_codes[tag] if error.is_warning(level): print("Warning: %s" % tag) elif error.allow_warning(level): print("Minor Error: %s" % tag) else: print("Error: %s" % tag) print("Message: %s" % fmt) print("") sys.exit(0) if o.outfile != None and o.format == None: sys.stderr.write("no format specified\n") sys.exit(1) # patch the error spec so that -W errors are treated as warnings for w in o.warnings: if w in error.error_codes: (level, wstr) = error.error_codes[w] if error.allow_warning(level): error.error_codes[w] = (4, wstr) filenames = args # Parse hello if present if o.hello: if len(filenames) > 1: sys.stderr.write("multiple hello files given\n") sys.exit(1) if filenames: try: fd = open(filenames[0], "rb") except IOError as ex: sys.stderr.write("error %s: %s\n" % (filenames[0], str(ex))) sys.exit(1) elif sys.version < "3": fd = sys.stdin else: fd = sys.stdin.buffer hel = hello.HelloParser().parse(fd) path = os.pathsep.join(o.path) # add standard search path if len(o.path) == 0: path = "." else: path += os.pathsep + "." repos = pyang.FileRepository(path, no_path_recurse=o.no_path_recurse, verbose=o.verbose) ctx = pyang.Context(repos) ctx.opts = o ctx.canonical = o.canonical ctx.max_line_len = o.max_line_len ctx.max_identifier_len = o.max_identifier_len ctx.trim_yin = o.trim_yin ctx.lax_xpath_checks = o.lax_xpath_checks ctx.lax_quote_checks = o.lax_quote_checks ctx.strict = o.strict ctx.max_status = o.max_status # make a map of features to support, per module if o.hello: for (mn, rev) in hel.yang_modules(): ctx.features[mn] = hel.get_features(mn) for f in ctx.opts.features: (modulename, features) = parse_features_string(f) ctx.features[modulename] = features for p in plugin.plugins: p.setup_ctx(ctx) xform_objs = [] for transform in o.transforms: if transform not in xforms: sys.stderr.write("unsupported transform '%s'\n" % transform) else: xform_obj = xforms[transform] xform_obj.setup_xform(ctx) xform_objs.append(xform_obj) if len(xform_objs) != len(o.transforms): sys.exit(1) if o.format != None: if o.format not in fmts: sys.stderr.write("unsupported format '%s'\n" % o.format) sys.exit(1) emit_obj = fmts[o.format] if o.keep_comments and emit_obj.handle_comments: ctx.keep_comments = True emit_obj.setup_fmt(ctx) else: emit_obj = None xform_and_emit_objs = xform_objs[:] if emit_obj is not None: xform_and_emit_objs.append(emit_obj) for p in plugin.plugins: p.pre_load_modules(ctx) exit_code = 0 modules = [] if o.hello: ctx.capabilities = hel.registered_capabilities() for (mn, rev) in hel.yang_modules(): mod = ctx.search_module(0, mn, rev) if mod is None: emarg = mn if rev: emarg += "@" + rev sys.stderr.write( "module '%s' specified in hello not found.\n" % emarg) sys.exit(1) modules.append(mod) else: if len(filenames) == 0: text = sys.stdin.read() module = ctx.add_module('<stdin>', text) if module is None: exit_code = 1 else: modules.append(module) if (len(filenames) > 1 and emit_obj is not None and not emit_obj.multiple_modules): sys.stderr.write("too many files to convert\n") sys.exit(1) for filename in filenames: try: fd = io.open(filename, "r", encoding="utf-8") text = fd.read() if o.verbose: util.report_file_read(filename, "(CL)") except IOError as ex: sys.stderr.write("error %s: %s\n" % (filename, str(ex))) sys.exit(1) except UnicodeDecodeError as ex: s = str(ex).replace('utf-8', 'utf8') sys.stderr.write("%s: unicode error: %s\n" % (filename, s)) sys.exit(1) m = syntax.re_filename.search(filename) ctx.yin_module_map = {} if m is not None: (name, rev, format) = m.groups() name = os.path.basename(name) module = ctx.add_module(filename, text, format, name, rev, expect_failure_error=False) else: module = ctx.add_module(filename, text) if module is None: exit_code = 1 else: modules.append(module) modulenames = [] for m in modules: modulenames.append(m.arg) for s in m.search('include'): modulenames.append(s.arg) # apply deviations for filename in ctx.opts.deviations: try: fd = io.open(filename, "r", encoding="utf-8") text = fd.read() except IOError as ex: sys.stderr.write("error %s: %s\n" % (filename, str(ex))) sys.exit(1) except UnicodeDecodeError as ex: s = str(ex).replace('utf-8', 'utf8') sys.stderr.write("%s: unicode error: %s\n" % (filename, s)) sys.exit(1) m = ctx.add_module(filename, text) if m is not None: ctx.deviation_modules.append(m) for p in plugin.plugins: p.pre_validate_ctx(ctx, modules) if len(xform_and_emit_objs) > 0 and len(modules) > 0: for obj in xform_and_emit_objs: obj.pre_validate(ctx, modules) ctx.validate() for m in modules: m.prune() # transform modules if len(xform_objs) > 0 and len(modules) > 0: for xform_obj in xform_objs: try: if not xform_obj.transform(ctx, modules): for module in modules: module.i_is_validated = False statements.validate_module(ctx, module) except error.TransformError as e: if e.msg != "": sys.stderr.write(e.msg + '\n') sys.exit(e.exit_code) # verify the given features for m in modules: if m.arg in ctx.features: for f in ctx.features[m.arg]: if f not in m.i_features: sys.stderr.write("unknown feature %s in module %s\n" % (f, m.arg)) sys.exit(1) if len(xform_and_emit_objs) > 0 and len(modules) > 0: for obj in xform_and_emit_objs: obj.post_validate(ctx, modules) for p in plugin.plugins: p.post_validate_ctx(ctx, modules) def keyfun(e): if e[0].ref == filenames[0]: return 0 else: return 1 ctx.errors.sort(key=lambda e: (e[0].ref, e[0].line)) if len(filenames) > 0: # first print error for the first filename given ctx.errors.sort(key=keyfun) if o.ignore_errors: ctx.errors = [] for (epos, etag, eargs) in ctx.errors: if etag in o.ignore_error_tags: continue if (ctx.implicit_errors == False and hasattr(epos.top, 'i_modulename') and epos.top.arg not in modulenames and epos.top.i_modulename not in modulenames and epos.ref not in filenames): # this module was added implicitly (by import); skip this error # the code includes submodules continue elevel = error.err_level(etag) if error.is_warning(elevel) and etag not in o.errors: kind = "warning" if 'error' in o.warnings and etag not in o.warnings: kind = "error" exit_code = 1 elif 'none' in o.warnings: continue else: kind = "error" exit_code = 1 if o.print_error_code == True: sys.stderr.write(str(epos) + ': %s: %s\n' % (kind, etag)) else: sys.stderr.write(str(epos) + ': %s: ' % kind + \ error.err_to_str(etag, eargs) + '\n') if emit_obj is not None and len(modules) > 0: tmpfile = None if o.outfile == None: if sys.version < '3': fd = codecs.getwriter('utf8')(sys.stdout) else: fd = sys.stdout else: tmpfile = o.outfile + ".tmp" if sys.version < '3': fd = codecs.open(tmpfile, "w+", encoding="utf-8") else: fd = io.open(tmpfile, "w+", encoding="utf-8") try: # 调用具体模块的实现 emit_obj.emit(ctx, modules, fd) except error.EmitError as e: if e.msg != "": sys.stderr.write(e.msg + '\n') if tmpfile != None: fd.close() os.remove(tmpfile) sys.exit(e.exit_code) except: if tmpfile != None: fd.close() os.remove(tmpfile) raise if tmpfile != None: fd.close() os.rename(tmpfile, o.outfile) sys.exit(exit_code)
def is_transitional(rows, output): if output.split('\n')[1].endswith('-state') and output.split( '\n')[0].endswith('-state'): if '+--rw' in output: return False if output.startswith('\n'): name_of_module = output.split('\n')[1].split(': ')[1] else: name_of_module = output.split('\n')[0].split(': ')[1] name_of_module = name_of_module.split('-state')[0] coresponding_nmda_file = self._find_file(name_of_module) if coresponding_nmda_file: name = coresponding_nmda_file.split('/')[-1].split('.')[0] revision = name.split('@')[-1] name = name.split('@')[0] if '{}@{}'.format(name, revision) in self._trees: stdout = self._trees[name][revision] pyang_list_of_rows = stdout.split('\n')[2:] else: plugin.plugins = [] plugin.init([]) ctx = create_context('{}:{}'.format( os.path.abspath(self._yang_models), self._save_file_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] for p in plugin.plugins: p.setup_ctx(ctx) with open(coresponding_nmda_file, 'r') as f: a = ctx.add_module(coresponding_nmda_file, f.read()) if ctx.opts.tree_path is not None: path = ctx.opts.tree_path.split('/') if path[0] == '': path = path[1:] else: path = None ctx.validate() try: f = io.StringIO() emit_tree(ctx, [a], f, ctx.opts.tree_depth, ctx.opts.tree_line_length, path) stdout = f.getvalue() except: stdout = '' pyang_list_of_rows = stdout.split('\n')[2:] if len(ctx.errors) != 0 and len(stdout) == 0: return False if stdout == '': return False for x in range(0, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if rows[x].strip(' ') == '': break if len(rows[x].split('+--')[0]) == 4: if '-state' in rows[x]: return False if len(rows[x].split('augment')[0]) == 2: part = rows[x].strip(' ').split('/')[1] if '-state' in part: return False if '+--ro ' in rows[x]: leaf = \ rows[x].split('+--ro ')[1].split(' ')[0].split( '?')[0] for y in range(0, len(pyang_list_of_rows)): if leaf in pyang_list_of_rows[y]: break else: return False return True else: return False else: return False
def resolve_tree_type(self, all_modules): def is_openconfig(rows, output): count_config = output.count('+-- config') count_state = output.count('+-- state') if count_config != count_state: return False row_number = 0 skip = [] for row in rows: if 'x--' in row or 'o--' in row: continue if '' == row.strip(' '): break if '+--rw' in row and row_number != 0 \ and row_number not in skip and '[' not in row and \ (len(row.replace('|', '').strip(' ').split( ' ')) != 2 or '(' in row): if '->' in row and 'config' in row.split('->')[ 1] and '+--rw config' not in rows[row_number - 1]: row_number += 1 continue if '+--rw config' not in rows[row_number - 1]: if 'augment' in rows[row_number - 1]: if not rows[row_number - 1].endswith(':config:'): return False else: return False length_before = set([len(row.split('+--')[0])]) skip = [] for x in range(row_number, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if len(rows[x].split('+--')[0]) not in length_before: if (len(rows[x].replace('|', '').strip(' ').split( ' ')) != 2 and '[' not in rows[x]) \ or '+--:' in rows[x] or '(' in rows[x]: length_before.add(len(rows[x].split('+--')[0])) else: break if '+--ro' in rows[x]: return False duplicate = \ rows[x].replace('+--rw', '+--ro').split('+--')[1] if duplicate.replace(' ', '') not in output.replace( ' ', ''): return False skip.append(x) if '+--ro' in row and row_number != 0 and row_number not in skip and '[' not in row and \ (len(row.replace('|', '').strip(' ').split( ' ')) != 2 or '(' in row): if '->' in row and 'state' in row.split( '->')[1] and '+--ro state' not in rows[row_number - 1]: row_number += 1 continue if '+--ro state' not in rows[row_number - 1]: if 'augment' in rows[row_number - 1]: if not rows[row_number - 1].endswith(':state:'): return False else: return False length_before = len(row.split('+--')[0]) skip = [] for x in range(row_number, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if len(rows[x].split('+--')[0]) < length_before: break if '+--rw' in rows[x]: return False skip.append(x) row_number += 1 return True def is_combined(rows, output): for row in rows: if row.endswith('-state') and not ('x--' in row or 'o--' in row): return False next_obsolete_or_deprecated = False for row in rows: if next_obsolete_or_deprecated: if 'x--' in row or 'o--' in row: next_obsolete_or_deprecated = False else: return False if 'x--' in row or 'o--' in row: continue if '+--rw config' == row.replace( '|', '').strip(' ') or '+--ro state' == row.replace( '|', '').strip(' '): return False if len(row.split('+--')[0]) == 4: if '-state' in row and '+--ro' in row: return False if 'augment' in row and len(row.split('augment')[0]) == 2: part = row.strip(' ').split('/')[1] if '-state' in part: next_obsolete_or_deprecated = True part = row.strip(' ').split('/')[-1] if ':state:' in part or '/state:' in part \ or ':config:' in part or '/config:' in part: next_obsolete_or_deprecated = True return True def is_transitional(rows, output): if output.split('\n')[1].endswith('-state') and output.split( '\n')[0].endswith('-state'): if '+--rw' in output: return False if output.startswith('\n'): name_of_module = output.split('\n')[1].split(': ')[1] else: name_of_module = output.split('\n')[0].split(': ')[1] name_of_module = name_of_module.split('-state')[0] coresponding_nmda_file = self._find_file(name_of_module) if coresponding_nmda_file: name = coresponding_nmda_file.split('/')[-1].split('.')[0] revision = name.split('@')[-1] name = name.split('@')[0] if '{}@{}'.format(name, revision) in self._trees: stdout = self._trees[name][revision] pyang_list_of_rows = stdout.split('\n')[2:] else: plugin.plugins = [] plugin.init([]) ctx = create_context('{}:{}'.format( os.path.abspath(self._yang_models), self._save_file_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] for p in plugin.plugins: p.setup_ctx(ctx) with open(coresponding_nmda_file, 'r') as f: a = ctx.add_module(coresponding_nmda_file, f.read()) if ctx.opts.tree_path is not None: path = ctx.opts.tree_path.split('/') if path[0] == '': path = path[1:] else: path = None ctx.validate() try: f = io.StringIO() emit_tree(ctx, [a], f, ctx.opts.tree_depth, ctx.opts.tree_line_length, path) stdout = f.getvalue() except: stdout = '' pyang_list_of_rows = stdout.split('\n')[2:] if len(ctx.errors) != 0 and len(stdout) == 0: return False if stdout == '': return False for x in range(0, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if rows[x].strip(' ') == '': break if len(rows[x].split('+--')[0]) == 4: if '-state' in rows[x]: return False if len(rows[x].split('augment')[0]) == 2: part = rows[x].strip(' ').split('/')[1] if '-state' in part: return False if '+--ro ' in rows[x]: leaf = \ rows[x].split('+--ro ')[1].split(' ')[0].split( '?')[0] for y in range(0, len(pyang_list_of_rows)): if leaf in pyang_list_of_rows[y]: break else: return False return True else: return False else: return False def is_split(rows, output): failed = False row_num = 0 if output.split('\n')[1].endswith('-state'): return False for row in rows: if 'x--' in row or 'o--' in row: continue if '+--rw config' == row.replace('|', '').strip( ' ') or '+--ro state' == row.replace('|', '') \ .strip(' '): return False if 'augment' in row: part = row.strip(' ').split('/')[-1] if ':state:' in part or '/state:' in part or ':config:' in part or '/config:' in part: return False for row in rows: if 'x--' in row or 'o--' in row: continue if row == '': break if (len(row.split('+--')[0]) == 4 and 'augment' not in rows[row_num - 1]) or len( row.split('augment')[0]) == 2: if '-state' in row: if 'augment' in row: part = row.strip(' ').split('/')[1] if '-state' not in part: row_num += 1 continue for x in range(row_num + 1, len(rows)): if 'x--' in rows[x] or 'o--' in rows[x]: continue if rows[x].strip(' ') == '' \ or (len(rows[x].split('+--')[ 0]) == 4 and 'augment' not in rows[row_num - 1]) \ or len(row.split('augment')[0]) == 2: break if '+--rw' in rows[x]: failed = True break row_num += 1 if failed: return False else: return True for x, module in enumerate(all_modules.get('module', []), start=1): name = module['name'] revision = module['revision'] name_revision = '{}@{}'.format(name, revision) self._path = '{}/{}.yang'.format(self._save_file_dir, name_revision) yang_file_exists = self._check_schema_file(module) is_latest_revision = self.check_if_latest_revision(module) if not yang_file_exists: LOGGER.error('Skipping module: {}'.format(name_revision)) continue LOGGER.info('Searching tree-type for {}. {} out of {}'.format( name_revision, x, len(all_modules['module']))) if revision in self._trees[name]: stdout = self._trees[name][revision] else: plugin.plugins = [] plugin.init([]) ctx = create_context('{}:{}'.format( os.path.abspath(self._yang_models), self._save_file_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] for p in plugin.plugins: p.setup_ctx(ctx) with open(self._path, 'r', errors='ignore') as f: a = ctx.add_module(self._path, f.read()) if a is None: LOGGER.debug( 'Could not use pyang to generate tree because of errors on module {}' .format(self._path)) module['tree-type'] = 'unclassified' if revision not in self.new_modules[name]: self.new_modules[name][revision] = module else: self.new_modules[name][revision][ 'tree-type'] = 'unclassified' continue if ctx.opts.tree_path is not None: path = ctx.opts.tree_path.split('/') if path[0] == '': path = path[1:] else: path = None retry = 5 while retry: try: ctx.validate() break except Exception as e: retry -= 1 if retry == 0: raise e try: f = io.StringIO() emit_tree(ctx, [a], f, ctx.opts.tree_depth, ctx.opts.tree_line_length, path) stdout = f.getvalue() self._trees[name][revision] = stdout except: module['tree-type'] = 'not-applicable' LOGGER.exception('not-applicable tree created') continue if stdout == '': module['tree-type'] = 'not-applicable' else: if stdout.startswith('\n'): pyang_list_of_rows = stdout.split('\n')[2:] else: pyang_list_of_rows = stdout.split('\n')[1:] if 'submodule' == module['module-type']: LOGGER.debug('Module {} is a submodule'.format(self._path)) module['tree-type'] = 'not-applicable' elif is_latest_revision and is_combined( pyang_list_of_rows, stdout): module['tree-type'] = 'nmda-compatible' elif is_split(pyang_list_of_rows, stdout): module['tree-type'] = 'split' elif is_openconfig(pyang_list_of_rows, stdout): module['tree-type'] = 'openconfig' elif is_transitional(pyang_list_of_rows, stdout): module['tree-type'] = 'transitional-extra' else: module['tree-type'] = 'unclassified' LOGGER.debug('tree type for module {} is {}'.format( module['name'], module['tree-type'])) if (revision not in self._existing_modules_dict[name] or self._existing_modules_dict[name][revision].get( 'tree-type') != module['tree-type']): LOGGER.info('tree-type {} vs {} for module {}@{}'.format( self._existing_modules_dict[name].get(revision, {}).get('tree-type'), module['tree-type'], module['name'], module['revision'])) if revision not in self.new_modules[name]: self.new_modules[name][revision] = module else: self.new_modules[name][revision]['tree-type'] = module[ 'tree-type']
def tree_module_revision(module_name, revision): """ Generates yang tree view of the module. :param module_name: Module for which we are generating the tree. :param revision : Revision of the module :return: json response with yang tree """ response = {} alerts = [] jstree_json = {} nmodule = os.path.basename(module_name) if nmodule != module_name: abort(400, description='Invalid module name specified') else: revisions, organization = get_modules_revision_organization(module_name, revision) if len(revisions) == 0: abort(404, description='Provided module does not exist') if revision is None: # get latest revision of provided module revision = revisions[0] path_to_yang = '{}/{}@{}.yang'.format(ac.d_save_file_dir, module_name, revision) plugin.plugins = [] plugin.init([]) ctx = create_context('{}'.format(ac.d_yang_models_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] for p in plugin.plugins: p.setup_ctx(ctx) try: with open(path_to_yang, 'r') as f: module_context = ctx.add_module(path_to_yang, f.read()) assert module_context except Exception: msg = 'File {} was not found'.format(path_to_yang) bp.LOGGER.exception(msg) abort(400, description=msg) imports_includes = [] imports_includes.extend(module_context.search('import')) imports_includes.extend(module_context.search('include')) import_include_map = {} for imp_inc in imports_includes: prefix = imp_inc.search('prefix') if len(prefix) == 1: prefix = prefix[0].arg else: prefix = 'None' import_include_map[prefix] = imp_inc.arg json_ytree = ac.d_json_ytree yang_tree_file_path = '{}/{}@{}.json'.format(json_ytree, module_name, revision) response['maturity'] = get_module_data('{}@{}/{}'.format(module_name, revision, organization)).get('maturity-level', '').upper() response['import-include'] = import_include_map if os.path.isfile(yang_tree_file_path): try: with open(yang_tree_file_path) as f: json_tree = json.load(f) if json_tree is None: alerts.append('Failed to decode JSON data: ') else: response['namespace'] = json_tree.get('namespace', '') response['prefix'] = json_tree.get('prefix', '') import_include_map[response['prefix']] = module_name data_nodes = build_tree(json_tree, module_name, import_include_map) jstree_json = dict() jstree_json['data'] = [data_nodes] if json_tree.get('rpcs') is not None: rpcs = dict() rpcs['name'] = json_tree['prefix'] + ':rpcs' rpcs['children'] = json_tree['rpcs'] jstree_json['data'].append(build_tree(rpcs, module_name, import_include_map)) if json_tree.get('notifications') is not None: notifs = dict() notifs['name'] = json_tree['prefix'] + ':notifs' notifs['children'] = json_tree['notifications'] jstree_json['data'].append(build_tree(notifs, module_name, import_include_map)) if json_tree.get('augments') is not None: augments = dict() augments['name'] = json_tree['prefix'] + ':augments' augments['children'] = [] for aug in json_tree.get('augments'): aug_info = dict() aug_info['name'] = aug['augment_path'] aug_info['children'] = aug['augment_children'] augments['children'].append(aug_info) jstree_json['data'].append(build_tree(augments, module_name, import_include_map, augments=True)) except Exception as e: alerts.append("Failed to read YANG tree data for {}@{}/{}, {}".format(module_name, revision, organization, e)) else: alerts.append('YANG Tree data does not exist for {}@{}/{}'.format(module_name, revision, organization)) if jstree_json is None: response['jstree_json'] = dict() alerts.append('Json tree could not be generated') else: response['jstree_json'] = jstree_json response['module'] = '{}@{}'.format(module_name, revision) response['warning'] = alerts return make_response(jsonify(response), 200)
help='Set path to config file') args = parser.parse_args() config_path = args.config_path config = create_config(config_path) save_file_dir = config.get('Directory-Section', 'save-file-dir') json_ytree = config.get('Directory-Section', 'json-ytree') jsons = glob.glob('{}/*.json'.format(json_ytree)) num_of_jsons = len(jsons) i = 0 for i, jsn in enumerate(jsons): print('tree {} {} out of {}'.format(jsn, i + 1, num_of_jsons)) file_stat = Path(jsn).stat() if file_stat.st_size != 0: continue plugin.init([]) ctx = create_context(save_file_dir) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] for p in plugin.plugins: p.setup_ctx(ctx) module = jsn.split('/')[-1] name_revision = module.split('@') name = name_revision[0] revision = name_revision[1].split('.')[0] all_modules_path = '{}/{}@{}.yang'.format(save_file_dir, name, revision) parsed_module = None try: with open(all_modules_path, 'r') as f: parsed_module = ctx.add_module(all_modules_path, f.read())
def build_and_populate(lock_exception, lock_write_rest, lock_remove_list, lock_remove_elk, lock_index_elk, es, modules, LOGGER, save_file_dir, threads, log_file, failed_changes_dir, temp_dir, ytree_dir, process_name): x = 0 for module in modules: try: with lock_remove_list: modules_copy.remove(module) x += 1 LOGGER.info( 'yindex on module {}. module {} out of {}, process {}'.format( module.split('/')[-1], x, len(modules), process_name)) # split to module with path and organization [email protected]:cisco m_parts = module.split(":") m = m_parts[0] plugin.init([]) ctx = create_context('{}'.format(save_file_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] for p in plugin.plugins: p.setup_ctx(ctx) with open(m, 'r') as f: parsed_module = ctx.add_module(m, f.read()) ctx.validate() if parsed_module is None: raise Exception('Unable to pyang parse module') f = io.StringIO() ctx.opts.print_revision = True emit_name(ctx, [parsed_module], f) name_revision = f.getvalue().strip() mods = [parsed_module] find_submodules(ctx, mods, parsed_module) f = io.StringIO() ctx.opts.yang_index_make_module_table = True ctx.opts.yang_index_no_schema = True indexerPlugin = IndexerPlugin() indexerPlugin.emit(ctx, [parsed_module], f) yindexes = json.loads(f.getvalue()) name_revision = name_revision.split('@') if len(name_revision) > 1: name = name_revision[0] revision = name_revision[1].split(' ')[0] else: name = name_revision[0] revision = '1970-01-01' if 'belongs-to' in name: name = name.split(' ')[0] try: dateutil.parser.parse(revision) except Exception as e: if revision[-2:] == '29' and revision[-5:-3] == '02': revision = revision.replace('02-29', '02-28') else: revision = '1970-01-01' rev_parts = revision.split('-') try: revision = datetime(int(rev_parts[0]), int(rev_parts[1]), int(rev_parts[2])).date().isoformat() except Exception: revision = '1970-01-01' retry = 3 while retry > 0: try: for m in mods: n = m.arg rev = get_latest_revision(m) if rev == 'unknown': r = '1970-01-01' else: r = rev try: dateutil.parser.parse(r) except Exception as e: if r[-2:] == '29' and r[-5:-3] == '02': r = r.replace('02-29', '02-28') else: r = '1970-01-01' rev_parts = r.split('-') r = datetime(int(rev_parts[0]), int(rev_parts[1]), int(rev_parts[2])).date().isoformat() query = \ { "query": { "bool": { "must": [{ "match_phrase": { "module.keyword": { "query": n } } }, { "match_phrase": { "revision": { "query": r } } }] } } } while lock_index_elk.locked(): sleep(1) with lock_remove_elk: try: LOGGER.debug( 'deleting data from yindex index, process {}' .format(process_name)) es.delete_by_query(index='yindex', body=query, doc_type='modules', conflicts='proceed', request_timeout=40) LOGGER.debug( 'deleting data from modules index, process {}' .format(process_name)) total = es.delete_by_query( index='modules', body=query, doc_type='modules', conflicts='proceed', request_timeout=40)['deleted'] if total > 1: LOGGER.info('{}@{}, process {}'.format( name, revision, process_name)) except NotFoundError as e: pass while lock_remove_elk.locked(): sleep(1) if not lock_index_elk.locked(): lock_index_elk.acquire() for key in yindexes: j = -1 for j in range(0, int(len(yindexes[key]) / 30)): LOGGER.debug( 'pushing new data to yindex {} of {} process {}' .format(j, int(len(yindexes[key]) / 30), process_name)) for success, info in parallel_bulk( es, yindexes[key][j * 30:(j * 30) + 30], thread_count=int(threads), index='yindex', doc_type='modules', request_timeout=40): if not success: LOGGER.error( 'A elasticsearch document failed with info: {}, process {}' .format(info, process_name)) LOGGER.debug('pushing new data to yindex last one') for success, info in parallel_bulk( es, yindexes[key][(j * 30) + 30:], thread_count=int(threads), index='yindex', doc_type='modules', request_timeout=40): if not success: LOGGER.error( 'A elasticsearch document failed with info: {}, process {}' .format(info, process_name)) query = {} query['module'] = name query['organization'] = resolve_organization(parsed_module) query['revision'] = revision query['dir'] = parsed_module.pos.ref LOGGER.debug( 'pushing data to modules index, process {}'.format( process_name)) es.index(index='modules', doc_type='modules', body=query, request_timeout=40) break except (ConnectionTimeout, ConnectionError) as e: retry = retry - 1 if retry > 0: LOGGER.warning( 'module {}@{} timed out, process {}'.format( name, revision, process_name)) else: LOGGER.error( 'module {}@{} timed out too many times failing, process' .format(name, revision, process_name)) raise e with open('{}/{}@{}.json'.format(ytree_dir, name, revision), 'w') as f: try: emit_tree([parsed_module], f, ctx) except: # create empty file so we still have access to that f.write("") with lock_write_rest: with open('{}/rest-of-elk-data.json'.format(temp_dir), 'w') as f: json.dump(list(modules_copy), f) except Exception as e: with lock_exception: with open(log_file, 'a') as f: traceback.print_exc(file=f) m_parts = module.split(":") key = '{}/{}'.format(m_parts[0].split('/')[-1][:-5], m_parts[1]) LOGGER.warning("Exception while adding {}, process".format( key, process_name)) val = m_parts[0] with open(failed_changes_dir, 'r') as f: failed_mods = json.load(f) if key not in failed_mods: failed_mods[key] = val with open(failed_changes_dir, 'w') as f: json.dump(failed_mods, f)
def build_yindex(ytree_dir, modules, LOGGER, save_file_dir, es_host, es_port, es_aws, elk_credentials, threads, log_file, failed_changes_dir, temp_dir, processes): if es_aws: es = Elasticsearch([es_host], http_auth=(elk_credentials[0], elk_credentials[1]), scheme="https", port=443) else: es = Elasticsearch([{'host': '{}'.format(es_host), 'port': es_port}]) initialize_body_yindex = json.load( open('json/initialize_yindex_elasticsearch.json', 'r')) initialize_body_modules = json.load( open('json/initialize_module_elasticsearch.json', 'r')) logging.getLogger('elasticsearch').setLevel(logging.ERROR) for i in range(0, 5, 1): try: es.indices.create(index='yindex', body=initialize_body_yindex, ignore=400) es.indices.create(index='modules', body=initialize_body_modules, ignore=400) except ConnectionError: import time LOGGER.warning( "Could not connect to elasticsearch waiting 30 seconds") time.sleep(30) # it must be able to connect in here es.ping() x = 0 modules_copy = modules.copy() for module in modules: try: modules_copy.remove(module) x += 1 LOGGER.info('yindex on module {}. module {} out of {}'.format( module.split('/')[-1], x, len(modules))) # split to module with path and organization m_parts = module.split(":") m = m_parts[0] plugin.init([]) ctx = create_context('{}'.format(save_file_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] for p in plugin.plugins: p.setup_ctx(ctx) with open(m, 'r') as f: parsed_module = ctx.add_module(m, f.read()) ctx.validate() if parsed_module is None: raise Exception('Unable to pyang parse module') f = io.StringIO() ctx.opts.print_revision = True emit_name(ctx, [parsed_module], f) name_revision = f.getvalue().strip() mods = [parsed_module] find_submodules(ctx, mods, parsed_module) f = io.StringIO() ctx.opts.yang_index_make_module_table = True ctx.opts.yang_index_no_schema = True indexerPlugin = IndexerPlugin() indexerPlugin.emit(ctx, [parsed_module], f) yindexes = json.loads(f.getvalue()) name_revision = name_revision.split('@') if len(name_revision) > 1: name = name_revision[0] revision = name_revision[1].split(' ')[0] else: name = name_revision[0] revision = '1970-01-01' if 'belongs-to' in name: name = name.split(' ')[0] try: dateutil.parser.parse(revision) except Exception as e: if revision[-2:] == '29' and revision[-5:-3] == '02': revision = revision.replace('02-29', '02-28') else: revision = '1970-01-01' rev_parts = revision.split('-') try: revision = datetime(int(rev_parts[0]), int(rev_parts[1]), int(rev_parts[2])).date().isoformat() except Exception: revision = '1970-01-01' retry = 3 while retry > 0: try: for m in mods: n = m.arg rev = get_latest_revision(m) if rev == 'unknown': r = '1970-01-01' else: r = rev try: dateutil.parser.parse(r) except Exception as e: if r[-2:] == '29' and r[-5:-3] == '02': r = r.replace('02-29', '02-28') else: r = '1970-01-01' rev_parts = r.split('-') r = datetime(int(rev_parts[0]), int(rev_parts[1]), int(rev_parts[2])).date().isoformat() try: query = \ { "query": { "bool": { "must": [{ "match_phrase": { "module.keyword": { "query": n } } }, { "match_phrase": { "revision": { "query": r } } }] } } } LOGGER.debug('deleting data from yindex') es.delete_by_query(index='yindex', body=query, doc_type='modules', conflicts='proceed', request_timeout=40) except NotFoundError as e: pass for key in yindexes: j = -1 for j in range(0, int(len(yindexes[key]) / 30)): LOGGER.debug( 'pushing new data to yindex {} of {}'.format( j, int(len(yindexes[key]) / 30))) for success, info in parallel_bulk( es, yindexes[key][j * 30:(j * 30) + 30], thread_count=int(threads), index='yindex', doc_type='modules', request_timeout=40): if not success: LOGGER.error( 'A elasticsearch document failed with info: {}' .format(info)) LOGGER.debug('pushing rest of data to yindex') for success, info in parallel_bulk( es, yindexes[key][(j * 30) + 30:], thread_count=int(threads), index='yindex', doc_type='modules', request_timeout=40): if not success: LOGGER.error( 'A elasticsearch document failed with info: {}' .format(info)) rev = get_latest_revision(parsed_module) if rev == 'unknown': revision = '1970-01-01' else: revision = rev try: dateutil.parser.parse(revision) except Exception as e: if revision[-2:] == '29' and revision[-5:-3] == '02': revision = revision.replace('02-29', '02-28') else: revision = '1970-01-01' rev_parts = revision.split('-') revision = datetime(int(rev_parts[0]), int(rev_parts[1]), int(rev_parts[2])).date().isoformat() query = \ { "query": { "bool": { "must": [{ "match_phrase": { "module.keyword": { "query": name } } }, { "match_phrase": { "revision": { "query": revision } } }] } } } LOGGER.debug('deleting data from modules index') total = es.delete_by_query(index='modules', body=query, doc_type='modules', conflicts='proceed', request_timeout=40)['deleted'] if total > 1: LOGGER.info('{}@{}'.format(name, revision)) query = {} query['module'] = name query['organization'] = resolve_organization(parsed_module) query['revision'] = revision query['dir'] = parsed_module.pos.ref LOGGER.debug('pushing data to modules index') es.index(index='modules', doc_type='modules', body=query, request_timeout=40) break except (ConnectionTimeout, ConnectionError) as e: retry = retry - 1 if retry > 0: LOGGER.warning('module {}@{} timed out'.format( name, revision)) else: LOGGER.error( 'module {}@{} timed out too many times failing'. format(name, revision)) raise e with open('{}/{}@{}.json'.format(ytree_dir, name, revision), 'w') as f: try: emit_tree([parsed_module], f, ctx) except Exception as e: # create empty file so we still have access to that LOGGER.warning( 'unable to create ytree for module {}@{} creating empty file' ) f.write("") with open('{}/rest-of-elk-data.json'.format(temp_dir), 'w') as f: json.dump(modules_copy, f) except Exception as e: with open(log_file, 'a') as f: traceback.print_exc(file=f) m_parts = module.split(":") key = '{}/{}'.format(m_parts[0].split('/')[-1][:-5], m_parts[1]) val = m_parts[0] with open(failed_changes_dir, 'r') as f: failed_mods = json.load(f) if key not in failed_mods: failed_mods[key] = val with open(failed_changes_dir, 'w') as f: json.dump(failed_mods, f)
def create_diff_tree(name1: str, revision1: str, file2: str, revision2: str): """Create preformated HTML which contains diff between two yang trees. Dump content of yang files into tempporary schema-tree-diff.txt file. Make GET request to URL https://www.ietf.org/rfcdiff/rfcdiff.pyht?url1=<file1>&url2=<file2>'. Output of rfcdiff tool then represents response of the method. Arguments: :param name1: (str) name of the first module :param revision1: (str) revision of the first module in format YYYY-MM-DD :param name2: (str) name of the second module :param revision2: (str) revision of the second module in format YYYY-MM-DD :return preformatted HTML with corresponding data """ schema1 = '{}/{}@{}.yang'.format(ac.d_save_file_dir, name1, revision1) schema2 = '{}/{}@{}.yang'.format(ac.d_save_file_dir, file2, revision2) plugin.plugins = [] plugin.init([]) ctx = create_context('{}:{}'.format(ac.d_yang_models_dir, ac.d_save_file_dir)) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = [] ctx.lax_quote_checks = True ctx.lax_xpath_checks = True for p in plugin.plugins: p.setup_ctx(ctx) with open(schema1, 'r') as ff: a = ctx.add_module(schema1, ff.read()) ctx.errors = [] if ctx.opts.tree_path is not None: path = ctx.opts.tree_path.split('/') if path[0] == '': path = path[1:] else: path = None ctx.validate() f = io.StringIO() emit_tree(ctx, [a], f, ctx.opts.tree_depth, ctx.opts.tree_line_length, path) stdout = f.getvalue() file_name1 = 'schema1-tree-diff.txt' full_path_file1 = '{}/{}'.format(ac.w_save_diff_dir, file_name1) with open(full_path_file1, 'w+') as ff: ff.write('<pre>{}</pre>'.format(stdout)) with open(schema2, 'r') as ff: a = ctx.add_module(schema2, ff.read()) ctx.validate() f = io.StringIO() emit_tree(ctx, [a], f, ctx.opts.tree_depth, ctx.opts.tree_line_length, path) stdout = f.getvalue() file_name2 = 'schema2-tree-diff.txt' full_path_file2 = '{}/{}'.format(ac.w_save_diff_dir, file_name2) with open(full_path_file2, 'w+') as ff: ff.write('<pre>{}</pre>'.format(stdout)) tree1 = '{}/compatibility/{}'.format(ac.w_my_uri, file_name1) tree2 = '{}/compatibility/{}'.format(ac.w_my_uri, file_name2) diff_url = ( 'https://www.ietf.org/rfcdiff/rfcdiff.pyht?url1={}&url2={}'.format( tree1, tree2)) response = requests.get(diff_url) os.unlink(full_path_file1) os.unlink(full_path_file2) return '<html><body>{}</body></html>'.format(response.text)