def emit_json_xsl(modules, ctx, fd): """Main control function. Set up the top-level parts of the stylesheet, the process recursively all nodes in all data trees, and finally emit the serialized stylesheet. """ for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("JSONXSL plugin needs a valid module") tree = ET.ElementTree(ss) ET.SubElement(ss, "output", method="text") xsltdir = os.environ.get("PYANG_XSLT_DIR", "/usr/local/share/yang/xslt") ET.SubElement(ss, "include", href=xsltdir + "/jsonxsl-templates.xsl") ET.SubElement(ss, "strip-space", elements="*") nsmap = ET.SubElement(ss, "template", name="nsuri-to-module") ET.SubElement(nsmap, "param", name="uri") choo = ET.SubElement(nsmap, "choose") for module in modules: ns_uri = module.search_one("namespace").arg ss.attrib["xmlns:" + module.i_prefix] = ns_uri when = ET.SubElement(choo, "when", test="$uri='" + ns_uri + "'") xsl_text(module.i_modulename, when) process_module(module) tree.write(fd, encoding="utf-8", xml_declaration=True)
def emit_json_xsl(modules, ctx, fd): """Main control function. Set up the top-level parts of the stylesheet, the process recursively all nodes in all data trees, and finally emit the serialized stylesheet. """ for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("JSONXSL plugin needs a valid module") tree = ET.ElementTree(ss) ET.SubElement(ss, "output", method="text") xsltdir = os.environ.get("PYANG_XSLT_DIR", "/usr/local/share/yang/xslt") ET.SubElement(ss, "include", href=xsltdir + "/jsonxsl-templates.xsl") ET.SubElement(ss, "strip-space", elements="*") nsmap = ET.SubElement(ss, "template", name="nsuri-to-module") ET.SubElement(nsmap, "param", name="uri") choo = ET.SubElement(nsmap, "choose") for module in modules: ns_uri = module.search_one("namespace").arg ss.attrib["xmlns:" + module.i_prefix] = ns_uri when = ET.SubElement(choo, "when", test="$uri='" + ns_uri + "'") xsl_text(module.i_modulename, when) process_module(module) tree.write(fd, encoding="utf-8" if sys.version < "3" else "unicode", xml_declaration=True)
def emit(self, ctx, modules, fd): """Main control function. Set up the top-level parts of the stylesheet, then process recursively all nodes in all data trees, and finally emit the serialized stylesheet. """ for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("JSONXSL plugin needs a valid module") self.real_prefix = unique_prefixes(ctx) self.top_names = [] for m in modules: self.top_names.extend([c.arg for c in m.i_children if c.keyword not in ("rpc", "notification")]) tree = ET.ElementTree(ss) ET.SubElement(ss, "output", method="text") xsltdir = os.environ.get("PYANG_XSLT_DIR", "/usr/local/share/yang/xslt") ET.SubElement(ss, "include", href=xsltdir + "/jsonxsl-templates.xsl") ET.SubElement(ss, "strip-space", elements="*") nsmap = ET.SubElement(ss, "template", name="nsuri-to-module") ET.SubElement(nsmap, "param", name="uri") choo = ET.SubElement(nsmap, "choose") for module in self.real_prefix.keys(): ns_uri = module.search_one("namespace").arg ss.attrib["xmlns:" + self.real_prefix[module]] = ns_uri when = ET.SubElement(choo, "when", test="$uri='" + ns_uri + "'") self.xsl_text(module.i_modulename, when) self.process_module(module) if sys.version > "3": tree.write(fd, encoding="unicode", xml_declaration=True) elif sys.version > "2.7": tree.write(fd, encoding="UTF-8", xml_declaration=True) else: tree.write(fd, encoding="UTF-8")
def emit(self, ctx, modules, fd): modulenames = [m.arg for m in modules] if not ctx.opts.ignore_errors: for (epos, etag, eargs) in ctx.errors: if (epos.top.arg in modulenames and error.is_error(error.err_level(etag))): raise error.EmitError("%s contains errors" % epos.top.arg) emit_docs(ctx, modules, fd)
def emit_dsdl(ctx, modules, fd): for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("DSDL translation needs a valid module") schema = HybridDSDLSchema().from_modules( modules, ctx.opts.dsdl_no_dublin_core, ctx.opts.dsdl_no_documentation, ctx.opts.dsdl_record_defs, debug=0 ) fd.write(schema.serialize())
def emit(self, ctx, modules, fd): # cannot do this unless everything is ok for our module modulenames = [m.arg for m in modules] for (epos, etag, eargs) in ctx.errors: if (epos.top.arg in modulenames and error.is_error(error.err_level(etag))): raise error.EmitError("%s contains errors" % epos.top.arg) emit_depend(ctx, modules, fd)
def emit(self, ctx, modules, fd): """Main control function. Set up the top-level parts of the sample document, then process recursively all nodes in all data trees, and finally emit the sample XML document. """ if ctx.opts.sample_path is not None: path = ctx.opts.sample_path.split('/') if path[0] == '': path = path[1:] else: path = [] for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError( "sample-xml-skeleton plugin needs a valid module") self.doctype = ctx.opts.doctype if self.doctype not in ("config", "data"): raise error.EmitError("Unsupported document type: %s" % self.doctype) self.annots = ctx.opts.sample_annots self.defaults = ctx.opts.sample_defaults self.node_handler = { "container": self.container, "leaf": self.leaf, "anyxml": self.anyxml, "choice": self.process_children, "case": self.process_children, "list": self.list, "leaf-list": self.leaf_list, "rpc": self.ignore, "action": self.ignore, "notification": self.ignore } self.ns_uri = {} for yam in modules: self.ns_uri[yam] = yam.search_one("namespace").arg self.top = etree.Element( self.doctype, {"xmlns": "urn:ietf:params:xml:ns:netconf:base:1.0"}) tree = etree.ElementTree(self.top) for yam in modules: self.process_children(yam, self.top, None, path) if sys.version > "3": fd.write( str( etree.tostring(tree, pretty_print=True, encoding="UTF-8", xml_declaration=True), "UTF-8")) elif sys.version > "2.7": tree.write(fd, encoding="UTF-8", pretty_print=True, xml_declaration=True) else: tree.write(fd, pretty_print=True, encoding="UTF-8")
def emit_dsdl(ctx, modules, fd): for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("DSDL translation needs a valid module") schema = HybridDSDLSchema().from_modules(modules, ctx.opts.dsdl_no_dublin_core, ctx.opts.dsdl_no_documentation, ctx.opts.dsdl_record_defs, debug=0) fd.write(schema.serialize())
def emit(self, ctx, modules, fd): # Require error-free modules modulenames = [m.arg for m in modules] for (epos, etag, eargs) in ctx.errors: if (epos.top.arg in modulenames and error.is_error(error.err_level(etag))): raise error.EmitError("%s contains errors" % epos.top.arg) emit_dot(ctx, modules, fd)
def emit(self, ctx, modules, fd): # cannot do this unless everything is ok for our module modulenames = [m.arg for m in modules] for (epos, etag, eargs) in ctx.errors: if ((epos.top is None or epos.top.arg in modulenames) and error.is_error(error.err_level(etag))): raise error.EmitError("%s contains more fundamental errors "\ "than the pattern statements" % epos.top.arg) emit_clean_pattern(ctx, modules, fd)
def emit_cts(ctx, module, fd): # No errors are allowed for (epos, etag, eargs) in ctx.errors: if (epos.top_name == module.arg and error.is_error(error.err_level(etag))): raise error.EmitError("CTS translation needs a valid module") schema = ConceptualTreeSchema().from_modules((module,), ctx.opts.cts_no_dublin_core, ctx.opts.cts_no_documentation, ctx.opts.cts_record_defs, debug=0) fd.write(schema.serialize())
def emit_cts(ctx, module, fd): # No errors are allowed for (epos, etag, eargs) in ctx.errors: if (epos.top_name == module.arg and error.is_error(error.err_level(etag))): raise error.EmitError("CTS translation needs a valid module") schema = ConceptualTreeSchema().from_modules((module, ), ctx.opts.cts_no_dublin_core, ctx.opts.cts_no_documentation, ctx.opts.cts_record_defs, debug=0) fd.write(schema.serialize())
def check_update(ctx, newmod): oldpath = os.pathsep.join(ctx.opts.old_path) olddir = os.path.dirname(ctx.opts.check_update_from) if olddir == '': olddir = '.' oldpath += os.pathsep + olddir oldrepo = repository.FileRepository(oldpath, use_env=False) oldctx = context.Context(oldrepo) oldctx.opts = ctx.opts oldctx.lax_xpath_checks = ctx.lax_xpath_checks oldctx.lax_quote_checks = ctx.lax_quote_checks if ctx.opts.verbose: print("Loading old modules from:") for d in oldrepo.dirs: print(" %s" % d) print("") for p in plugin.plugins: p.setup_ctx(oldctx) for oldfilename in [ctx.opts.check_update_from] + ctx.opts.old_deviation: try: fd = io.open(oldfilename, "r", encoding="utf-8") text = fd.read() except IOError as ex: sys.stderr.write("error %s: %s\n" % (oldfilename, ex)) sys.exit(1) if oldfilename in ctx.opts.old_deviation: oldctx.add_module(oldfilename, text) else: oldmod = oldctx.add_module(oldfilename, text) oldctx.validate() ctx.errors.extend(oldctx.errors) if oldmod is None: return for epos, etag, eargs in ctx.errors: if (epos.ref in (newmod.pos.ref, oldmod.pos.ref) and error.is_error(error.err_level(etag))): return if ctx.opts.verbose: print("Loaded old modules:") for x in oldrepo.get_modules_and_revisions(oldctx): (m, r, (fmt, filename)) = x print(" %s" % filename) print("") chk_module(ctx, oldmod, newmod)
def emit(self, ctx, modules, fd): """Main control function. Set up the top-level parts of the sample document, then process recursively all nodes in all data trees, and finally emit the sample XML document. """ if ctx.opts.sample_path is not None: path = ctx.opts.sample_path.split('/') if path[0] == '': path = path[1:] else: path = [] for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError( "sample-xml-skeleton plugin needs a valid module") self.doctype = ctx.opts.doctype if self.doctype not in ("config", "data"): raise error.EmitError("Unsupported document type: %s" % self.doctype) self.annots = ctx.opts.sample_annots self.defaults = ctx.opts.sample_defaults self.node_handler = { "container": self.container, "leaf": self.leaf, "anyxml": self.anyxml, "choice": self.process_children, "case": self.process_children, "list": self.list, "leaf-list": self.leaf_list } self.ns_uri = {} for yam in modules: self.ns_uri[yam] = yam.search_one("namespace").arg self.top = etree.Element( self.doctype, {"xmlns": "urn:ietf:params:xml:ns:netconf:base:1.0"}) tree = etree.ElementTree(self.top) for yam in modules: self.process_children(yam, self.top, None, path) if sys.version > "3": fd.write(str(etree.tostring(tree, pretty_print=True, encoding="UTF-8", xml_declaration=True), "UTF-8")) elif sys.version > "2.7": tree.write(fd, encoding="UTF-8", pretty_print=True, xml_declaration=True) else: tree.write(fd, pretty_print=True, encoding="UTF-8")
def emit(self, ctx, modules, fd): for (epos, etag, eargs) in ctx.errors: if (epos.top.arg in self.mods and error.is_error(error.err_level(etag))): self.fatal("%s contains errors" % epos.top.arg) if ctx.opts.uml_pages_layout is not None: if re.match('[0-9]x[0-9]', ctx.opts.uml_pages_layout) is None: self.fatal("Illegal page split option %s, should be [0-9]x[0-9], example 2x2" % ctx.opts.uml_pages_layout) umldoc = uml_emitter(ctx) umldoc.emit(modules, fd)
def emit(self, ctx, modules, fd): for (epos, etag, eargs) in ctx.errors: if ((epos.top is None or epos.top.arg in self.mods) and error.is_error(error.err_level(etag))): self.fatal("%s contains errors" % epos.top.arg) if ctx.opts.uml_pages_layout is not None: if re.match('[0-9]x[0-9]', ctx.opts.uml_pages_layout) is None: self.fatal( "Illegal page split option %s, should be [0-9]x[0-9], example 2x2" % ctx.opts.uml_pages_layout) umldoc = uml_emitter(ctx) umldoc.emit(modules, fd)
def emit(self, ctx, module, fd): # cannot do XSD unless everything is ok for our module for (epos, etag, eargs) in ctx.errors: if (epos.top_name == module.arg and error.is_error(error.err_level(etag))): raise error.EmitError("XSD translation needs a valid module") # we also need to have all other modules found for pre in module.i_prefixes: (modname, revision) = module.i_prefixes[pre] mod = statements.modulename_to_module(module, modname, revision) if mod == None: raise error.EmitError("cannot find module %s, needed by XSD" " translation" % modname) emit_xsd(ctx, module, fd)
def emit(self, ctx, modules, fd): """Main control function. """ for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("sample-json-skeleton plugin needs a valid module") tree = {} self.defaults = ctx.opts.sample_defaults self.doctype = ctx.opts.doctype if self.doctype not in ("config", "data"): raise error.EmitError("Unsupported document type: %s" % self.doctype) for module in modules: self.process_children(module, tree, None) json.dump(tree, fd, indent=4)
def emit(self, ctx, modules, fd): modulenames = [m.arg for m in modules] if not ctx.opts.ignore_errors: for (epos, etag, eargs) in ctx.errors: # # When a module has not yet been parsed then the top.arg does # not exist. This can be the case when an error is created early # in the parsing process. # if not hasattr(epos.top, "arg"): raise error.EmitError("%s contains errors, and was not parsed" % (epos.ref)) if (epos.top.arg in modulenames and error.is_error(error.err_level(etag))): raise error.EmitError("%s contains errors" % epos.top.arg) emit_paths(ctx, modules, fd)
def emit(self, ctx, modules, fd): """Main control function. """ for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("JTOX plugin needs a valid module") tree = {} mods = {} annots = {} for m, p in unique_prefixes(ctx).items(): mods[m.i_modulename] = [p, m.search_one("namespace").arg] for module in modules: for ann in module.search(("ietf-yang-metadata", "annotation")): typ = ann.search_one("type") annots[module.arg + ":" + ann.arg] = ("string" if typ is None else self.base_type(typ)) for module in modules: self.process_children(module, tree, None) json.dump({"modules": mods, "tree": tree, "annotations": annots}, fd)
def emit(self, ctx, modules, fd): """Main control function. """ for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("JTOX plugin needs a valid module") tree = {} mods = {} annots = {} for m,p in unique_prefixes(ctx).items(): mods[m.i_modulename] = [p, m.search_one("namespace").arg] for module in modules: for ann in module.search(("ietf-yang-metadata", "annotation")): typ = ann.search_one("type") annots[module.arg + ":" + ann.arg] = ( "string" if typ is None else self.base_type(typ)) for module in modules: self.process_children(module, tree, None) json.dump({"modules": mods, "tree": tree, "annotations": annots}, fd)
def emit(self, ctx, modules, fd): """Main control function. Set up the top-level parts of the stylesheet, then process recursively all nodes in all data trees, and finally emit the serialized stylesheet. """ for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("JSONXSL plugin needs a valid module") self.real_prefix = unique_prefixes(ctx) self.top_names = [] for m in modules: self.top_names.extend([ c.arg for c in m.i_children if c.keyword not in ("rpc", "notification") ]) tree = ET.ElementTree(ss) ET.SubElement(ss, "output", method="text") xsltdir = os.environ.get("PYANG_XSLT_DIR", "/usr/local/share/yang/xslt") ET.SubElement(ss, "include", href=xsltdir + "/jsonxsl-templates.xsl") ET.SubElement(ss, "strip-space", elements="*") nsmap = ET.SubElement(ss, "template", name="nsuri-to-module") ET.SubElement(nsmap, "param", name="uri") choo = ET.SubElement(nsmap, "choose") for module in self.real_prefix.keys(): ns_uri = module.search_one("namespace").arg ss.attrib["xmlns:" + self.real_prefix[module]] = ns_uri when = ET.SubElement(choo, "when", test="$uri='" + ns_uri + "'") self.xsl_text(module.i_modulename, when) self.process_module(module) if sys.version > "3": tree.write(fd, encoding="unicode", xml_declaration=True) elif sys.version > "2.7": tree.write(fd, encoding="UTF-8", xml_declaration=True) else: tree.write(fd, encoding="UTF-8")
def emit_jtox(modules, ctx, fd): """Main control function. """ for (epos, etag, eargs) in ctx.errors: if error.is_error(error.err_level(etag)): raise error.EmitError("JTOX plugin needs a valid module") tree = {} prefixes = [] def unique_prefix(p): """Disambiguate the module prefix.""" suffix = 0 while p == "nc" or p in prefixes: p += "%d" % suffix suffix += 1 return p for module in modules: uri = module.search_one("namespace").arg prf = unique_prefix(module.i_prefix) prefixes.append(prf) mods[module.i_modulename] = [prf, uri] for module in modules: process_children(module, tree) json.dump({"modules": mods, "tree": tree}, fd)
def emit(self, ctx, modules, fd): """Generates Java classes from the YANG module supplied to pyang. The generated classes are placed in the directory specified by the '-d' or '--jnc-output' flag, or in "gen" if no such flag was provided, using the 'directory' attribute of ctx. If there are existing files in the output directory with the same name as the generated classes, they are silently overwritten. ctx -- Context used to get output directory, verbosity mode, error handling policy (the ignore attribute) and whether or not a schema file should be generated. modules -- A list containing a module statement parsed from the YANG module supplied to pyang. fd -- File descriptor (ignored). """ if ctx.opts.debug or ctx.opts.verbose: print('JNC plugin starting') if not ctx.opts.ignore: for (epos, etag, _) in ctx.errors: if (error.is_error(error.err_level(etag)) and etag in ('MODULE_NOT_FOUND', 'MODULE_NOT_FOUND_REV')): self.fatal("%s contains errors" % epos.top.arg) if (etag in ('TYPE_NOT_FOUND', 'FEATURE_NOT_FOUND', 'IDENTITY_NOT_FOUND', 'GROUPING_NOT_FOUND')): util.print_warning( msg=(etag.lower() + ', generated class ' + 'hierarchy might be incomplete.'), key=etag) else: util.print_warning(msg=(etag.lower() + ', aborting.'), key=etag) self.fatal("%s contains errors" % epos.top.arg) # Sweep, adding included and imported modules, until no change module_set = set(modules) num_modules = 0 while num_modules != len(module_set): num_modules = len(module_set) for module in list(module_set): imported = map(lambda x: x.arg, util.search(module, 'import')) included = map(lambda x: x.arg, util.search(module, 'include')) for (module_stmt, rev) in self.ctx.modules: if module_stmt in chain(imported, included): module_set.add(self.ctx.modules[(module_stmt, rev)]) # Generate files from main modules for module in filter(lambda s: s.keyword == 'module', module_set): self.generate_from(module) # Generate files from augmented modules for aug_module in context.augmented_modules.values(): self.generate_from(aug_module) # Print debug messages saying that we're done. if ctx.opts.debug or ctx.opts.verbose: if not self.ctx.opts.no_classes: print('Java classes generation COMPLETE.') if not self.ctx.opts.no_schema: print('Schema generation COMPLETE.')
def check_update(ctx, oldfilename, newmod): oldpath = os.pathsep.join(ctx.opts.old_path) olddir = os.path.dirname(oldfilename) if olddir == '': olddir = '.' oldpath += os.pathsep + olddir oldrepo = pyang.FileRepository(oldpath, use_env=False) oldctx = pyang.Context(oldrepo) oldctx.opts = ctx.opts oldctx.lax_xpath_checks = ctx.lax_xpath_checks oldctx.lax_quote_checks = ctx.lax_quote_checks if ctx.opts.verbose: print("Loading old modules from:") for d in oldrepo.dirs: print(" %s" % d) print("") for p in plugin.plugins: p.setup_ctx(oldctx) for oldfilename in [ctx.opts.check_update_from] + ctx.opts.old_deviation: try: fd = io.open(oldfilename, "r", encoding="utf-8") text = fd.read() except IOError as ex: sys.stderr.write("error %s: %s\n" % (oldfilename, str(ex))) sys.exit(1) if oldfilename in ctx.opts.old_deviation: oldctx.add_module(oldfilename, text) else: oldmod = oldctx.add_module(oldfilename, text) ctx.errors.extend(oldctx.errors) if oldmod is None: return for (epos, etag, eargs) in ctx.errors: if (epos.ref in (newmod.pos.ref, oldmod.pos.ref) and error.is_error(error.err_level(etag))): return if ctx.opts.verbose: print("Loaded old modules:") for x in oldrepo.get_modules_and_revisions(oldctx): (m, r, (fmt, filename)) = x print(" %s" % filename) print("") chk_modulename(oldmod, newmod, ctx) chk_namespace(oldmod, newmod, ctx) chk_revision(oldmod, newmod, ctx) for olds in oldmod.search('feature'): chk_feature(olds, newmod, ctx) for olds in oldmod.search('identity'): chk_identity(olds, newmod, ctx) for olds in oldmod.search('typedef'): chk_typedef(olds, newmod, ctx) for olds in oldmod.search('grouping'): chk_grouping(olds, newmod, ctx) for olds in oldmod.search('rpc'): chk_rpc(olds, newmod, ctx) for olds in oldmod.search('notification'): chk_notification(olds, newmod, ctx) for olds in oldmod.search('extension'): chk_extension(olds, newmod, ctx) for olds in oldmod.search('augment'): chk_augment(olds, newmod, ctx) chk_i_children(oldmod, newmod, ctx)
def check_update(ctx, oldfilename, newmod): oldpath = os.pathsep.join(ctx.opts.old_path) olddir = os.path.dirname(oldfilename) if olddir == '': olddir = '.' oldpath += os.pathsep + olddir oldrepo = pyang.FileRepository(oldpath, use_env=False) oldctx = pyang.Context(oldrepo) oldctx.opts = ctx.opts oldctx.lax_xpath_checks = ctx.lax_xpath_checks oldctx.lax_quote_checks = ctx.lax_quote_checks if ctx.opts.verbose: print("Loading old modules from:") for d in oldrepo.dirs: print(" %s" % d) print("") for p in plugin.plugins: p.setup_ctx(oldctx) for oldfilename in [ctx.opts.check_update_from] + ctx.opts.old_deviation: try: fd = io.open(oldfilename, "r", encoding="utf-8") text = fd.read() except IOError as ex: sys.stderr.write("error %s: %s\n" % (oldfilename, str(ex))) sys.exit(1) if oldfilename in ctx.opts.old_deviation: oldctx.add_module(oldfilename, text) else: oldmod = oldctx.add_module(oldfilename, text) ctx.errors.extend(oldctx.errors) if oldmod is None: return for (epos, etag, eargs) in ctx.errors: if (epos.ref in (newmod.pos.ref, oldmod.pos.ref) and error.is_error(error.err_level(etag))): return if ctx.opts.verbose: print("Loaded old modules:") for x in oldrepo.get_modules_and_revisions(oldctx): (m, r, (fmt, filename)) = x print(" %s" % filename) print("") chk_modulename(oldmod, newmod, ctx) chk_namespace(oldmod, newmod, ctx) chk_revision(oldmod, newmod, ctx) for olds in oldmod.search('feature'): chk_feature(olds, newmod, ctx) for olds in oldmod.search('identity'): chk_identity(olds, newmod, ctx) for olds in oldmod.search('typedef'): chk_typedef(olds, newmod, ctx) for olds in oldmod.search('grouping'): chk_grouping(olds, newmod, ctx) for olds in oldmod.search('rpc'): chk_rpc(olds, newmod, ctx) for olds in oldmod.search('notification'): chk_notification(olds, newmod, ctx) for olds in oldmod.search('extension'): chk_extension(olds, newmod, ctx) chk_augment(oldmod, newmod, ctx) chk_i_children(oldmod, newmod, ctx)