def transform_module(): module = request.body.read() response.content_type = 'application/json' repos = pyang.FileRepository() ctx = pyang.Context(repos) modules = [] modules.append(ctx.add_module("upload module", module)) plugin.init() p = get_plugin_by_name('jsontree') op = optparse.OptionParser() p.add_opts(op) (o, args) = op.parse_args() ctx.opts = o wr = Writer() try: p.emit(ctx, modules, wr) except: bottle.abort(500, 'Internal Server Error') return str(wr)
def __init__(self, resolved_model_dir): self.repos = pyang.FileRepository(resolved_model_dir, False) self.ctx = pyang.Context(self.repos) self.resolved_model_dir = resolved_model_dir self.submodules = [] try: reload(sys) sys.setdefaultencoding('utf8') except: pass
def go(path,filenames): repos = pyang.FileRepository(path) ctx = pyang.Context(repos) ctx.canonical = None ctx.max_line_len = None ctx.max_identifier_len = None ctx.trim_yin = None ctx.lax_xpath_checks = None ctx.deviation_modules = [] # make a map of features to support, per module ctx.features = {} ctx.validate() modules = [] if 1: r = re.compile(r"^(.*?)(\@(\d{4}-\d{2}-\d{2}))?\.(yang|yin)$") for filename in filenames: try: fd = open(filename) text = fd.read() except IOError as ex: sys.stderr.write("error %s: %s\n" % (filename, str(ex))) sys.exit(1) # TODO m = r.search(filename) if m is not None: (name, _dummy, rev, format) = m.groups() name = os.path.basename(name) module = ctx.add_module(filename, text, format, name, rev, expect_failure_error=False) else: module = ctx.add_module(filename, text) if module is None: exit_code = 1 # TODO else: modules.append(module) modulenames = [] for m in modules: modulenames.append(m.arg) for s in m.search('include'): modulenames.append(s.arg) ctx.validate() #dumpContext(ctx) ytypes = {} yobjects = {} dictify(ctx,ytypes, yobjects) #print str(ytypes) #print str(yobjects) #pdb.set_trace() return(ytypes,yobjects)
def load(path, models): repos = pyang.FileRepository(path) ctx = pyang.Context(repos) modules = [] for fname in models: fd = open(fname, 'r') text = fd.read() module = ctx.add_module(fname, text) if module: modules.append(module) return modules, ctx
def yang_context(): """ Fixture statement to generate the pyang context for test purposes. A context is a module which encapsulates the parsing session. :return: pyang context object """ repo = pyang.FileRepository(".") ctx = pyang.Context(repo) return ctx
def check_update(ctx, oldfilename, newmod): oldpath = os.pathsep.join(ctx.opts.old_path) olddir = os.path.dirname(oldfilename) if olddir == '': olddir = '.' oldpath += os.pathsep + olddir oldrepo = pyang.FileRepository(oldpath, use_env=False) oldctx = pyang.Context(oldrepo) oldctx.opts = ctx.opts oldctx.lax_xpath_checks = ctx.lax_xpath_checks oldctx.lax_quote_checks = ctx.lax_quote_checks if ctx.opts.verbose: print("Loading old modules from:") for d in oldrepo.dirs: print(" %s" % d) print("") for p in plugin.plugins: p.setup_ctx(oldctx) for oldfilename in [ctx.opts.check_update_from] + ctx.opts.old_deviation: try: fd = io.open(oldfilename, "r", encoding="utf-8") text = fd.read() except IOError as ex: sys.stderr.write("error %s: %s\n" % (oldfilename, str(ex))) sys.exit(1) if oldfilename in ctx.opts.old_deviation: oldctx.add_module(oldfilename, text) else: oldmod = oldctx.add_module(oldfilename, text) oldctx.validate() ctx.errors.extend(oldctx.errors) if oldmod is None: return for (epos, etag, eargs) in ctx.errors: if (epos.ref in (newmod.pos.ref, oldmod.pos.ref) and \ error.is_error(error.err_level(etag))): return if ctx.opts.verbose: print("Loaded old modules:") for x in oldrepo.get_modules_and_revisions(oldctx): (m, r, (fmt, filename)) = x print(" %s" % filename) print("") chk_module(ctx, oldmod, newmod)
def to(cls, format, **options): """Serialize YANG container to the given output `format`. """ # pyang output plugins need an output stream stream = StringIO() plugin = PYANG_PLUGINS[format] # register plugin options according to pyang script optparser = OptionParser() plugin.add_opts(optparser) # pyang output plugins also need a pyang.Context ctx = pyang.Context(DummyRepository()) # which offers plugin-specific options (just take defaults) ctx.opts = optparser.parse_args([])[0] # ready to serialize! plugin.emit(ctx, [cls.to_statement(**options)], stream) # and return the resulting data stream.seek(0) return stream.read()
def __init__(self, inDir, inPath, inExceptionOnDuplicate, inWithWarning): repos = pyang.FileRepository(inPath, no_path_recurse=True) self.m_ctx = pyang.Context(repos) self.m_modules = [] self.m_filenames = [] self.m_modulenames = [] self.m_expanddefault = False self.m_exception_on_duplicate = inExceptionOnDuplicate self.m_with_warning = inWithWarning path = inDir + '/*.yang' r = re.compile(r"^(.*?)(\@(\d{4}-\d{2}-\d{2}))?\.(yang|yin)$") for filename in glob.glob(path): fd = open(filename, "r", encoding="utf-8") text = fd.read() m = r.search(filename) self.m_ctx.yin_module_map = {} if m is not None: (name, _dummy, rev, format) = m.groups() name = os.path.basename(name) module = self.m_ctx.add_module(filename, text, format, name, rev, expect_failure_error=False) else: module = self.m_ctx.add_module(filename, text) if module and not module.search_one('belongs-to'): chs = [ ch for ch in module.i_children if ch.keyword in pyang.statements.data_definition_keywords or ch.keyword in ('rpc', 'notification') ] if len(chs): self.m_modules.append(module) self.m_modulenames.append(module.arg) self.m_filenames.append(filename) self.m_ctx.validate() def keyfun(e): if e[0].ref == self.m_filenames[0]: return 0 else: return 1 self.m_ctx.errors.sort(key=lambda e: (e[0].ref, e[0].line)) if len(self.m_filenames) > 0: # first print error for the first filename given self.m_ctx.errors.sort(key=keyfun) haserror = False for (epos, etag, eargs) in self.m_ctx.errors: if (self.m_ctx.implicit_errors == False and hasattr(epos.top, 'i_modulename') and epos.top.arg not in self.m_modulenames and epos.top.i_modulename not in self.m_modulenames and epos.ref not in self.m_filenames): # this module was added implicitly (by import); skip this error # the code includes submodules continue elevel = pyang.error.err_level(etag) if pyang.error.is_warning(elevel): kind = "warning" if self.m_with_warning: logger.error( str(epos) + ': %s: ' % kind + pyang.error.err_to_str(etag, eargs) + '\n') else: kind = "error" haserror = True logger.error( str(epos) + ': %s: ' % kind + pyang.error.err_to_str(etag, eargs) + '\n') if haserror: raise YException( 'some errors occur in yang modules, error details refer to log please' ) for module in self.m_modules: YWrapper.count_grouping_uses(module)
def run(): usage = """%prog [options] [<filename>...] Validates the YANG module in <filename> (or stdin), and all its dependencies.""" plugindirs = [] # check for --plugindir idx = 1 while '--plugindir' in sys.argv[idx:]: idx = idx + sys.argv[idx:].index('--plugindir') plugindirs.append(sys.argv[idx + 1]) idx = idx + 1 plugin.init(plugindirs) fmts = {} xforms = {} for p in plugin.plugins: p.add_output_format(fmts) p.add_transform(xforms) optlist = [ # use capitalized versions of std options help and version optparse.make_option("-h", "--help1", action="help", help="Show this help message and exit ha2"), optparse.make_option("-v", "--version", action="version", help="Show version number and exit"), optparse.make_option("-V", "--verbose", action="store_true"), optparse.make_option("-e", "--list-errors", dest="list_errors", action="store_true", help="Print a listing of all error and warning " \ "codes and exit."), optparse.make_option("--print-error-code", dest="print_error_code", action="store_true", help="On errors, print the error code instead " \ "of the error message."), optparse.make_option("-W", dest="warnings", action="append", default=[], metavar="WARNING", help="If WARNING is 'error', treat all warnings " \ "as errors, except any listed WARNING. " \ "If WARNING is 'none', do not report any " \ "warnings."), optparse.make_option("-E", dest="errors", action="append", default=[], metavar="WARNING", help="Treat each WARNING as an error. For a " \ "list of warnings, use --list-errors."), optparse.make_option("--ignore-error", dest="ignore_error_tags", action="append", default=[], metavar="ERROR", help="Ignore ERROR. Use with care. For a " \ "list of errors, use --list-errors."), optparse.make_option("--ignore-errors", dest="ignore_errors", action="store_true", help="Ignore all errors. Use with care."), optparse.make_option("--canonical", dest="canonical", action="store_true", help="Validate the module(s) according to the " \ "canonical YANG order."), optparse.make_option("--max-line-length", type="int", dest="max_line_len"), optparse.make_option("--max-identifier-length", type="int", dest="max_identifier_len"), optparse.make_option("-t", "--transform", dest="transforms", default=[], action="append", help="Apply transform TRANSFORM. Supported " \ "transforms are: " + ', '.join(list( xforms.keys()))), optparse.make_option("-f", "--format", dest="format", help="Convert to FORMAT. Supported formats " \ "are: " + ', '.join(list(fmts.keys()))), optparse.make_option("-o", "--output", dest="outfile", help="Write the output to OUTFILE instead " \ "of stdout."), optparse.make_option("-F", "--features", metavar="FEATURES", dest="features", default=[], action="append", help="Features to support, default all. " \ "<modname>:[<feature>,]*"), optparse.make_option("", "--max-status", metavar="MAXSTATUS", dest="max_status", help="Max status to support, one of: " \ "current, deprecated, obsolete"), optparse.make_option("", "--deviation-module", metavar="DEVIATION", dest="deviations", default=[], action="append", help="Deviation module"), optparse.make_option("-p", "--path", dest="path", default=[], action="append", help=os.pathsep + "-separated search path for yin" " and yang modules"), optparse.make_option("--plugindir", dest="plugindir", help="Load pyang plugins from PLUGINDIR"), optparse.make_option("--strict", dest="strict", action="store_true", help="Force strict YANG compliance."), optparse.make_option("--lax-quote-checks", dest="lax_quote_checks", action="store_true", help="Lax check of backslash in quoted strings."), optparse.make_option("--lax-xpath-checks", dest="lax_xpath_checks", action="store_true", help="Lax check of XPath expressions."), optparse.make_option("--trim-yin", dest="trim_yin", action="store_true", help="In YIN input modules, trim whitespace " "in textual arguments."), optparse.make_option("-L", "--hello", dest="hello", action="store_true", help="Filename of a server's hello message is " "given instead of module filename(s)."), optparse.make_option("--keep-comments", dest="keep_comments", action="store_true", help="Pyang will not discard comments; \ has effect if the output plugin can \ handle comments." ), optparse.make_option("--no-path-recurse", dest="no_path_recurse", action="store_true", help="Do not recurse into directories in the \ yang path." ), ] optparser = optparse.OptionParser(usage, add_help_option=False) optparser.version = '%prog ' + pyang.__version__ optparser.add_options(optlist) for p in plugin.plugins: p.add_opts(optparser) (o, args) = optparser.parse_args() if o.list_errors == True: for tag in error.error_codes: (level, fmt) = error.error_codes[tag] if error.is_warning(level): print("Warning: %s" % tag) elif error.allow_warning(level): print("Minor Error: %s" % tag) else: print("Error: %s" % tag) print("Message: %s" % fmt) print("") sys.exit(0) if o.outfile != None and o.format == None: sys.stderr.write("no format specified\n") sys.exit(1) # patch the error spec so that -W errors are treated as warnings for w in o.warnings: if w in error.error_codes: (level, wstr) = error.error_codes[w] if error.allow_warning(level): error.error_codes[w] = (4, wstr) filenames = args # Parse hello if present if o.hello: if len(filenames) > 1: sys.stderr.write("multiple hello files given\n") sys.exit(1) if filenames: try: fd = open(filenames[0], "rb") except IOError as ex: sys.stderr.write("error %s: %s\n" % (filenames[0], str(ex))) sys.exit(1) elif sys.version < "3": fd = sys.stdin else: fd = sys.stdin.buffer hel = hello.HelloParser().parse(fd) path = os.pathsep.join(o.path) # add standard search path if len(o.path) == 0: path = "." else: path += os.pathsep + "." repos = pyang.FileRepository(path, no_path_recurse=o.no_path_recurse, verbose=o.verbose) ctx = pyang.Context(repos) ctx.opts = o ctx.canonical = o.canonical ctx.max_line_len = o.max_line_len ctx.max_identifier_len = o.max_identifier_len ctx.trim_yin = o.trim_yin ctx.lax_xpath_checks = o.lax_xpath_checks ctx.lax_quote_checks = o.lax_quote_checks ctx.strict = o.strict ctx.max_status = o.max_status # make a map of features to support, per module if o.hello: for (mn, rev) in hel.yang_modules(): ctx.features[mn] = hel.get_features(mn) for f in ctx.opts.features: (modulename, features) = parse_features_string(f) ctx.features[modulename] = features for p in plugin.plugins: p.setup_ctx(ctx) xform_objs = [] for transform in o.transforms: if transform not in xforms: sys.stderr.write("unsupported transform '%s'\n" % transform) else: xform_obj = xforms[transform] xform_obj.setup_xform(ctx) xform_objs.append(xform_obj) if len(xform_objs) != len(o.transforms): sys.exit(1) if o.format != None: if o.format not in fmts: sys.stderr.write("unsupported format '%s'\n" % o.format) sys.exit(1) emit_obj = fmts[o.format] if o.keep_comments and emit_obj.handle_comments: ctx.keep_comments = True emit_obj.setup_fmt(ctx) else: emit_obj = None xform_and_emit_objs = xform_objs[:] if emit_obj is not None: xform_and_emit_objs.append(emit_obj) for p in plugin.plugins: p.pre_load_modules(ctx) exit_code = 0 modules = [] if o.hello: ctx.capabilities = hel.registered_capabilities() for (mn, rev) in hel.yang_modules(): mod = ctx.search_module(0, mn, rev) if mod is None: emarg = mn if rev: emarg += "@" + rev sys.stderr.write( "module '%s' specified in hello not found.\n" % emarg) sys.exit(1) modules.append(mod) else: if len(filenames) == 0: text = sys.stdin.read() module = ctx.add_module('<stdin>', text) if module is None: exit_code = 1 else: modules.append(module) if (len(filenames) > 1 and emit_obj is not None and not emit_obj.multiple_modules): sys.stderr.write("too many files to convert\n") sys.exit(1) for filename in filenames: try: fd = io.open(filename, "r", encoding="utf-8") text = fd.read() if o.verbose: util.report_file_read(filename, "(CL)") except IOError as ex: sys.stderr.write("error %s: %s\n" % (filename, str(ex))) sys.exit(1) except UnicodeDecodeError as ex: s = str(ex).replace('utf-8', 'utf8') sys.stderr.write("%s: unicode error: %s\n" % (filename, s)) sys.exit(1) m = syntax.re_filename.search(filename) ctx.yin_module_map = {} if m is not None: (name, rev, format) = m.groups() name = os.path.basename(name) module = ctx.add_module(filename, text, format, name, rev, expect_failure_error=False) else: module = ctx.add_module(filename, text) if module is None: exit_code = 1 else: modules.append(module) modulenames = [] for m in modules: modulenames.append(m.arg) for s in m.search('include'): modulenames.append(s.arg) # apply deviations for filename in ctx.opts.deviations: try: fd = io.open(filename, "r", encoding="utf-8") text = fd.read() except IOError as ex: sys.stderr.write("error %s: %s\n" % (filename, str(ex))) sys.exit(1) except UnicodeDecodeError as ex: s = str(ex).replace('utf-8', 'utf8') sys.stderr.write("%s: unicode error: %s\n" % (filename, s)) sys.exit(1) m = ctx.add_module(filename, text) if m is not None: ctx.deviation_modules.append(m) for p in plugin.plugins: p.pre_validate_ctx(ctx, modules) if len(xform_and_emit_objs) > 0 and len(modules) > 0: for obj in xform_and_emit_objs: obj.pre_validate(ctx, modules) ctx.validate() for m in modules: m.prune() # transform modules if len(xform_objs) > 0 and len(modules) > 0: for xform_obj in xform_objs: try: if not xform_obj.transform(ctx, modules): for module in modules: module.i_is_validated = False statements.validate_module(ctx, module) except error.TransformError as e: if e.msg != "": sys.stderr.write(e.msg + '\n') sys.exit(e.exit_code) # verify the given features for m in modules: if m.arg in ctx.features: for f in ctx.features[m.arg]: if f not in m.i_features: sys.stderr.write("unknown feature %s in module %s\n" % (f, m.arg)) sys.exit(1) if len(xform_and_emit_objs) > 0 and len(modules) > 0: for obj in xform_and_emit_objs: obj.post_validate(ctx, modules) for p in plugin.plugins: p.post_validate_ctx(ctx, modules) def keyfun(e): if e[0].ref == filenames[0]: return 0 else: return 1 ctx.errors.sort(key=lambda e: (e[0].ref, e[0].line)) if len(filenames) > 0: # first print error for the first filename given ctx.errors.sort(key=keyfun) if o.ignore_errors: ctx.errors = [] for (epos, etag, eargs) in ctx.errors: if etag in o.ignore_error_tags: continue if (ctx.implicit_errors == False and hasattr(epos.top, 'i_modulename') and epos.top.arg not in modulenames and epos.top.i_modulename not in modulenames and epos.ref not in filenames): # this module was added implicitly (by import); skip this error # the code includes submodules continue elevel = error.err_level(etag) if error.is_warning(elevel) and etag not in o.errors: kind = "warning" if 'error' in o.warnings and etag not in o.warnings: kind = "error" exit_code = 1 elif 'none' in o.warnings: continue else: kind = "error" exit_code = 1 if o.print_error_code == True: sys.stderr.write(str(epos) + ': %s: %s\n' % (kind, etag)) else: sys.stderr.write(str(epos) + ': %s: ' % kind + \ error.err_to_str(etag, eargs) + '\n') if emit_obj is not None and len(modules) > 0: tmpfile = None if o.outfile == None: if sys.version < '3': fd = codecs.getwriter('utf8')(sys.stdout) else: fd = sys.stdout else: tmpfile = o.outfile + ".tmp" if sys.version < '3': fd = codecs.open(tmpfile, "w+", encoding="utf-8") else: fd = io.open(tmpfile, "w+", encoding="utf-8") try: # 调用具体模块的实现 emit_obj.emit(ctx, modules, fd) except error.EmitError as e: if e.msg != "": sys.stderr.write(e.msg + '\n') if tmpfile != None: fd.close() os.remove(tmpfile) sys.exit(e.exit_code) except: if tmpfile != None: fd.close() os.remove(tmpfile) raise if tmpfile != None: fd.close() os.rename(tmpfile, o.outfile) sys.exit(exit_code)
for f in failed: failed_download.add(f) # # Now let's check all the schema that we downloaded (from this run # and any other) and parse them with pyang to extract any imports # or includes and verify that they were on the advertised schema # list and didn't fail download. # # TODO: cater for explicitly revisioned imports & includes # imports_and_includes = set() repos = pyang.FileRepository(targetdir) yangfiles = [f for f in listdir(targetdir) if isfile(join(targetdir, f))] for fname in yangfiles: ctx = pyang.Context(repos) if args.process_MIBs_sw or args.display_MIBs_sw: if "MIB" in fname: mib_name = str(fname).rstrip('.yang') mib_filter = '<' + mib_name + ':' + mib_name + ' xmlns:' + mib_name + '="urn:ietf:params:xml:ns:yang:smiv2:' + mib_name + '"/>' try: mib = get(mgr, mib_filter) if args.display_MIBs_sw: print(mib_name) soup = BeautifulSoup(mib) print(soup.prettify()) except RPCError as e: print(mib_name) print(e) fd = open(targetdir + '/' + fname, 'r') text = fd.read()
def init_pyang_context(self, repo_path=''): repo = pyang.FileRepository(repo_path, no_path_recurse=None) self.ctx = pyang.Context(repo) self.ctx.opts = _MetaPyangCtxOpts()
def __init__(self, name, map_list=[], yangpath=""): self.defined_maps = [ "avoid_map", "leaf_map", "pattern_map", "type_map", "xpath_map" ] self.maps = {} for map_name, map_def in map_list: self.replace_map(map_name, map_def) self.node_map = {} self.missing_map = {} path = os.getenv("NCS_DIR") + "/src/confd/yang" if not os.path.exists(path): path = os.getenv("NCS_DIR") + "/src/ncs/yang" if yangpath != "": if type(yangpath) is list: yangpath = ':'.join(yangpath) path = yangpath + ":" + path repos = pyang.FileRepository(path) ctx = pyang.Context(repos) # String or list? if hasattr(name, "lower"): filenames = [name] else: filenames = name modules = [] if len(filenames) == 0: text = sys.stdin.read() module = ctx.add_module("<stdin>", text) assert module is not None modules.append(module) r = re.compile(r"^(.*?)(\@(\d{4}-\d{2}-\d{2}))?\.(yang|yin)$") for filename in filenames: fd = open(filename) text = fd.read() # Submodules should be ignored if "belongs-to" in text: continue m = r.search(filename) ctx.yin_module_map = {} if m is not None: (name, _dummy, rev, format) = m.groups() name = os.path.basename(name) module = ctx.add_module(filename, text, format, name, rev, expect_failure_error=False) else: module = ctx.add_module(filename, text) assert module is not None self.namespace = module.search_one("namespace").arg modules.append(module) ctx.validate() ctx.errors = [] self.modules = modules self.groupings = {} for module in self.modules: self.groupings.update(module.i_groupings)
def _parse_and_return_modules(resolved_model_dir): """ Use pyang to parse the files and get a list of modules. :param str resolved_model_dir The directory where all models to be compiled are found. :raise YdkGenException If there was a problem parsing the modules """ repos = pyang.FileRepository(resolved_model_dir, False) ctx = pyang.Context(repos) statements.add_validation_fun('reference_3', ['deviation'], _add_i_deviation) statements.add_validation_fun('reference_3', ['deviation'], _add_d_info) statements.add_validation_fun('reference_3', ['deviate'], _remove_d_info) filenames = [] #(name, rev, handle) # where handle is (format, absfilename) for (_, _, (_, filename)) in repos.get_modules_and_revisions(ctx): filenames.append(filename) modules = [] r = re.compile(r"^(.*?)(\@(\d{4}-\d{2}-\d{2}))?\.(yang|yin)$") for filename in filenames: f = filename if filename.startswith('file://'): f = filename[len('file://') - 1:] try: fd = open(f) text = fd.read() except IOError as ex: err_msg = "error %s: %s\n" % (filename, str(ex)) logger.error(err_msg) raise YdkGenException(err_msg) m = r.search(filename) ctx.yin_module_map = {} if m is not None: (name, _dummy, rev, _) = m.groups() name = os.path.basename(name) logger.debug('Parsing file %s format %s name %s revision %s', filename, format, name, rev) module = ctx.add_module(filename, text, format, name, rev, expect_failure_error=False) else: module = ctx.add_module(filename, text) if module is None: raise YdkGenException('Could not add module ') else: modules.append(module) # all the module have been added so get the context to validate # call prevalidate before this and post validate after ctx.validate() def keyfun(e): if e[0].ref == filenames[0]: return 0 else: return 1 ctx.errors.sort(key=lambda e: (e[0].ref, e[0].line)) if len(filenames) > 0: # first print error for the first filename given ctx.errors.sort(key=keyfun) error_messages = [] for (epos, etag, eargs) in ctx.errors: elevel = error.err_level(etag) if error.is_warning(elevel): logger.warning('%s: %s\n' % (str(epos), error.err_to_str(etag, eargs))) else: err_msg = '%s: %s\n' % (str(epos), error.err_to_str(etag, eargs)) logger.error(err_msg) error_messages.append(err_msg) if len(error_messages) > 0: err_msg = '\n'.join(error_messages) raise YdkGenException(err_msg) return [m for m in modules if m.keyword == 'module']
def download_device_schema(hostname, device_type, username, password, schema_output_dir): """Derived from https://github.com/CiscoDevNet/ncc/blob/master/ncc-get-all-schema""" schemas_filter = """<netconf-state xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring"> <schemas> <schema> <identifier/> </schema> </schemas> </netconf-state>""" logging.info("Connecting to online device %s ...", hostname) with manager.connect(host=hostname, username=username, password=password, hostkey_verify=False) as nc_mgr: schema_tree = nc_mgr.get(filter=("subtree", schemas_filter)).data schema_list = [ n.text for n in schema_tree.xpath('//*[local-name()="identifier"]') ] not_in_schemas = set() logging.info("Parsing server capabilities ...") for c in nc_mgr.server_capabilities: model = re.search("module=([^&]*)", c) if model is not None: m = model.group(1) if m not in schema_list: not_in_schemas.add(m) deviations = re.search("deviations=([^&<]*)", c) if deviations is not None: d = deviations.group(1) for dfn in d.split(","): if dfn not in schema_list: logging.debug("Deviation %s not in schema list", dfn) not_in_schemas.add(dfn) if len(not_in_schemas) > 0: logging.error( "The following models are advertised in capabilities but are not in schemas tree:" ) for m in sorted(not_in_schemas): logging.error(" {}".format(m)) download_schemas(nc_mgr, schema_output_dir, schema_list) imports_and_includes = set() repos = pyang.FileRepository(schema_output_dir, use_env=False) ctx = pyang.Context(repos) yangfiles = [ f for f in os.listdir(schema_output_dir) if os.path.isfile(os.path.join(schema_output_dir, f)) ] for fname in sorted(yangfiles): logging.debug("Parsing %s", fname) with open(schema_output_dir + "/" + fname, "rb") as fd: text = fd.read().decode("UTF-8") ctx.add_module(fname, text) this_module = os.path.basename(fname).split(".")[0] for ((m, r), module) in ctx.modules.items(): if m == this_module: for s in module.substmts: if (s.keyword == "import") or (s.keyword == "include"): imports_and_includes.add(s.arg) not_advertised = [ str(i) for i in imports_and_includes if i not in schema_list ] if len(not_advertised) > 0: logging.debug( "The following schema are imported or included, but not listed in schemas tree:" ) for m in sorted(not_advertised, key=str.lower): logging.debug(" {}".format(m)) download_schemas(nc_mgr, schema_output_dir, not_advertised)
def generate_yang(test, uuid_set): global count, level_memory global mqtt_commands global my_set global device_category my_set = uuid_set ''' Generate a YANG-in-XML Tree - print the YANG Tree as string with SerialIO - build a new root-Element, called <data> with 'xmlns' Attribute - attach the stringified CDATA to the new Element - print the XML ''' #python-modeled.netconf/modeled/netconf/yang/__init__.py module1 = Statement(None, None, None, 'module', 'mqtt-led') my_namespace = "http://ipv6lab.beuth-hochschule.de/led" my_prefix = "led" namespace = Statement(None, module1, None, 'namespace', my_namespace) module1.substmts.append(namespace) prefix = Statement(None, module1, None, 'prefix', my_prefix) module1.substmts.append(prefix) #http://stackoverflow.com/questions/10844064/items-in-json-object-are-out-of-order-using-json-dumps data = json.loads(test, object_pairs_hook=OrderedDict) count = 0 level_memory = {} #print_dict(data, module1, count) parse_dict(data, module1) #revision = str(datetime.now()) #revision = Statement(None, module, None, 'revision', revision) #module.substmts.append(revision) #https://github.com/mbj4668/pyang/blob/master/pyang/plugin.py #https://github.com/modeled/modeled.netconf/blob/master/modeled/netconf/yang/container.py """Serialize YANG container to the given output `format`. """ # output stream for pyang output plugin stream = StringIO() # gets filled with all availabe pyang output format plugins PYANG_PLUGINS = {} # register and initialise pyang plugin pyang.plugin.init([]) for plugin in pyang.plugin.plugins: plugin.add_output_format(PYANG_PLUGINS) del plugin #for name in PYANG_PLUGINS: # print(name) #... #dsdl #depend #name #omni #yin #tree #jstree #capability #yang #uml #jtox #jsonxsl #sample-xml-skeleton plugin = PYANG_PLUGINS['yang'] # register plugin options according to pyang script optparser = OptionParser() plugin.add_opts(optparser) # pyang plugins also need a pyang.Context ctx = pyang.Context(DummyRepository()) # which offers plugin-specific options (just take defaults) ctx.opts = optparser.parse_args([])[0] # ready to serialize plugin.emit(ctx, [module1], stream) # and return the resulting data stream.seek(0) yang = stream.getvalue() print('\nAusgabe: ') print(stream.read()) print("") #return stream.read() #root = etree.Element("data", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring") #root.text = etree.CDATA(yang) #print etree.tostring(root, pretty_print=True) #return etree.tostring(root, pretty_print=True) #returns the constructed yang from json-config, a list of mqtt-commands and a set of uuids return (yang, mqtt_commands, my_set, device_category)
def check_update(ctx, oldfilename, newmod): oldpath = os.pathsep.join(ctx.opts.old_path) olddir = os.path.dirname(oldfilename) if olddir == '': olddir = '.' oldpath += os.pathsep + olddir oldrepo = pyang.FileRepository(oldpath, use_env=False) oldctx = pyang.Context(oldrepo) oldctx.opts = ctx.opts oldctx.lax_xpath_checks = ctx.lax_xpath_checks oldctx.lax_quote_checks = ctx.lax_quote_checks if ctx.opts.verbose: print("Loading old modules from:") for d in oldrepo.dirs: print(" %s" % d) print("") for p in plugin.plugins: p.setup_ctx(oldctx) oldfilename = ctx.opts.check_update_from try: fd = io.open(oldfilename, "r", encoding="utf-8") text = fd.read() except IOError as ex: sys.stderr.write("error %s: %s\n" % (oldfilename, str(ex))) sys.exit(1) oldmod = oldctx.add_module(oldfilename, text) ctx.errors.extend(oldctx.errors) if oldmod is None: return for (epos, etag, eargs) in ctx.errors: if (epos.ref in (newmod.pos.ref, oldmod.pos.ref) and error.is_error(error.err_level(etag))): return if ctx.opts.verbose: print("Loaded old modules:") for x in oldrepo.get_modules_and_revisions(oldctx): (m, r, (fmt, filename)) = x print(" %s" % filename) print("") chk_modulename(oldmod, newmod, ctx) chk_namespace(oldmod, newmod, ctx) chk_revision(oldmod, newmod, ctx) for olds in oldmod.search('feature'): chk_feature(olds, newmod, ctx) for olds in oldmod.search('identity'): chk_identity(olds, newmod, ctx) for olds in oldmod.search('typedef'): chk_typedef(olds, newmod, ctx) for olds in oldmod.search('grouping'): chk_grouping(olds, newmod, ctx) for olds in oldmod.search('rpc'): chk_rpc(olds, newmod, ctx) for olds in oldmod.search('notification'): chk_notification(olds, newmod, ctx) for olds in oldmod.search('extension'): chk_extension(olds, newmod, ctx) chk_i_children(oldmod, newmod, ctx)
def __init__(self, resolved_model_dir): self.repos = pyang.FileRepository(resolved_model_dir, False) self.ctx = pyang.Context(self.repos) self.resolved_model_dir = resolved_model_dir self.submodules = []