def translate_src_configuration(schema_path, xpath, src_configuration, device_info): tp_info = tp_list.translate_yang_registry.get(device_info) # get tp_info in tp_list trans_info = locate_translation_point_path(schema_path, tp_info, src_configuration) if trans_info is None: raise Exception("did not find translation script") else: translate_py = trans_info[0] # translation script name binding = trans_info[1] # yang bindings yang_base = trans_info[2] # yang_base # use pyangbind to convert xml_obj to yang_obj dummy_root_node = add_to_dummy_xml(src_configuration) module_yang_obj = pybindIETFXMLDecoder.load_xml(dummy_root_node, parent=binding, yang_base=yang_base) # Use the APP's function to convert from Input Python YANG object to its own type. api_name = "_translate__%s" % (safe_name(module_yang_obj._yang_name)) # print(api_name) # The translate API is part of the Yang module's top level object. translate_api = getattr(translate_py, api_name) translated_obj, target_xpath = translate_api(module_yang_obj, None, xpath) # input_obj, translated_obj, xpath xml = pybindIETFXMLEncoder.serialise(translated_obj) # xml parser = etree.XMLParser(remove_blank_text=True) root = etree.fromstring(xml, parser) # lxml obj return root, target_xpath
def ocContainerFromPath(model: str, xpath: str) -> PybindBase: """Create an empty PybindBase instance of the model for the path. This method look for the model class in the oc_config_validate.models package Args: model: the OC model class name in the oc_config_validate.models package, as `module.class`. xpath: the xpath to the OC container to create. Returns: An PybindBase object of the class. Raises: Error if unable to find the Python class or if the class is not derived from PybindBase. target.XpathError if the xpath is invalid. AttributeError if unable to find an xpath element in the OC class. """ parts = model.split('.') if len(parts) != 2: raise Error("%s is not module.class" % model) model_mod = importlib.import_module("oc_config_validate.models." + parts[0]) model_cls = getattr(model_mod, parts[1]) if not isclass(model_cls): raise Error("%s is not a class in oc_config_validate.models package" % model) gnmi_xpath = target.parsePath(xpath) model_inst = model_cls() for e in gnmi_xpath.elem: model_inst = getattr(model_inst, yangtypes.safe_name(e.name)) if e.key: save_key = {} for k, v in e.key.items(): save_key[yangtypes.safe_name(k)] = v model_inst = model_inst.add(**save_key) if not issubclass(model_inst.__class__, PybindBase): raise Error("%s:%s is not a valid container class" % (model, xpath)) return model_inst
def test_001_check_containers(self): for attr in ["empty-container", "parent", ["parent", "child"]]: if isinstance(attr, list): parent = self.instance for v in attr: parent = getattr(parent, v, None) self.assertIsNot(parent, None) else: elem = getattr(self.instance, safe_name(attr), None) self.assertIsNot(elem, None)
def load_modules(self): """ Dynamically import the bindings modules that are specified in the configuration file. """ imports = [] for module in self._modcfg['modules']: if not module['python_module'] in imports: imports.append(module['python_module']) for modimport in imports: globals()[modimport] = importlib.import_module(modimport) for mod in self._modcfg['modules']: ymod_cls = getattr(globals()[mod['python_module']], safe_name(mod['yang_module']), None) if ymod_cls is None: raise PyNMSServerExampleException("Cannot load module %s from bindings" % safe_name(mod['yang_module'])) print "Registering %s->%s" % (ymod_cls, mod) ymod_cls(path_helper=self._yph)
def translate_to_new_yangobj(module_yang_obj, translate_py): # Use the APP's function to convert from Input Python YANG object to its own type. api_name = "_translate__%s" % (safe_name(module_yang_obj._yang_name)) # The translate API is part of the Yang module's top level object. translate_api = getattr(translate_py, api_name) translated_obj_list = translate_api(module_yang_obj) return translated_obj_list
def load_modules(self): """ Dynamically import the bindings modules that are specified in the configuration file. """ imports = [] for module in self._modcfg['modules']: if not module['python_module'] in imports: imports.append(module['python_module']) for modimport in imports: globals()[modimport] = importlib.import_module(modimport) for mod in self._modcfg['modules']: ymod_cls = getattr(globals()[mod['python_module']], safe_name(mod['yang_module']), None) if ymod_cls is None: raise PyNMSServerExampleException( "Cannot load module %s from bindings" % safe_name(mod['yang_module'])) print "Registering %s->%s" % (ymod_cls, mod) ymod_cls(path_helper=self._yph)
def _load_dict(cls, data): for k, v in data.items(): if cls._yang_type == "list": try: attr = cls[k] except KeyError: attr = cls.add(k) _load_dict(attr, v) elif isinstance(v, dict): attr = getattr(cls, yangtypes.safe_name(k)) _load_dict(attr, v) else: model = getattr(cls, yangtypes.safe_name(k)) # We can't set attributes that are keys if model._is_keyval: continue setter = getattr(cls, "_set_{}".format(yangtypes.safe_name(k))) setter(v) model = getattr(model._parent, yangtypes.safe_name(k)) model._mchanged = True
def load_json(d, parent, yang_base, obj=None, path_helper=None, extmethods=None, overwrite=False): if obj is None: # we need to find the class to create, as one has not been supplied. base_mod_cls = getattr(parent, safe_name(yang_base)) tmp = base_mod_cls(path_helper=False) if path_helper is not None: # check that this path doesn't already exist in the # tree, otherwise we create a duplicate. existing_objs = path_helper.get(tmp._path()) if len(existing_objs) == 0: obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) elif len(existing_objs) == 1: obj = existing_objs[0] else: raise pybindJSONUpdateError('update was attempted to a node that ' + 'was not unique') else: # in this case, we cannot check for an existing object obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) # Handle the case where we are supplied with a scalar value rather than # a list if not isinstance(d, dict) or isinstance(d, list): set_method = getattr(obj._parent, "_set_%s" % safe_name(obj._yang_name)) set_method(d) return obj for key in d: child = getattr(obj, "_get_%s" % safe_name(key), None) if child is None: raise AttributeError("JSON object contained a key that" + "did not exist (%s)" % (key)) chobj = child() set_via_stdmethod = True pybind_attr = getattr(chobj, '_pybind_generated_by', None) if pybind_attr in ["container"]: if overwrite: for elem in chobj._pyangbind_elements: unsetchildelem = getattr(chobj, "_unset_%s" % elem) unsetchildelem() pybindJSONDecoder.load_json(d[key], chobj, yang_base, obj=chobj, path_helper=path_helper) set_via_stdmethod = False elif pybind_attr in ["YANGListType", "list"]: # we need to add each key to the list and then skip a level in the # JSON hierarchy list_obj = getattr(obj, safe_name(key), None) if list_obj is None: raise pybindJSONDecodeError("Could not load list object " + "with name %s" % key) ordered_list = getattr(list_obj, "_ordered", None) if ordered_list: # Put keys in order: okeys = [] kdict = {} for k, v in d[key].iteritems(): if "__yang_order" not in v: # Element is not specified in terms of order, so # push to a list that keeps this order okeys.append(k) else: kdict[v["__yang_order"]] = k # Throw this metadata away v.pop("__yang_order", None) okeys.reverse() key_order = [kdict[k] for k in sorted(kdict)] for add_element in okeys: key_order.append(add_element) else: key_order = d[key].keys() for child_key in key_order: if child_key not in chobj: chobj.add(child_key) parent = chobj[child_key] pybindJSONDecoder.load_json(d[key][child_key], parent, yang_base, obj=parent, path_helper=path_helper) set_via_stdmethod = False if overwrite: for child_key in chobj: if child_key not in d[key]: chobj.delete(child_key) elif pybind_attr in ["TypedListType"]: if not overwrite: list_obj = getattr(obj, "_get_%s" % safe_name(key))() for item in d[key]: if item not in list_obj: list_obj.append(item) list_copy = [] for elem in list_obj: list_copy.append(elem) for e in list_copy: if e not in d[key]: list_obj.remove(e) set_via_stdmethod = False else: # use the set method pass elif pybind_attr in ["RestrictedClassType", "ReferencePathType", "RestrictedPrecisionDecimal"]: # normal but valid types - which use the std set method pass elif pybind_attr is None: # not a pybind attribute at all - keep using the std set method pass else: raise pybindJSONUpdateError("unknown pybind type when loading JSON: %s" % pybind_attr) if set_via_stdmethod: # simply get the set method and then set the value of the leaf set_method = getattr(obj, "_set_%s" % safe_name(key)) set_method(d[key], load=True) return obj
def load_ietf_json(d, parent, yang_base, obj=None, path_helper=None, extmethods=None, overwrite=False, skip_unknown=False): if obj is None: # we need to find the class to create, as one has not been supplied. base_mod_cls = getattr(parent, safe_name(yang_base)) tmp = base_mod_cls(path_helper=False) if path_helper is not None: # check that this path doesn't already exist in the # tree, otherwise we create a duplicate. existing_objs = path_helper.get(tmp._path()) if len(existing_objs) == 0: obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) elif len(existing_objs) == 1: obj = existing_objs[0] else: raise pybindJSONUpdateError( 'update was attempted to a node that ' + 'was not unique') else: # in this case, we cannot check for an existing object obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) # Handle the case where we are supplied with a scalar value rather than # a list if not isinstance(d, dict) or isinstance(d, list): set_method = getattr(obj._parent, "_set_%s" % safe_name(obj._yang_name)) set_method(d) return obj for key in d: # Fix any namespace that was supplied in the JSON if ":" in key: ykey = key.split(":")[-1] else: ykey = key if key == "@": # Handle whole container metadata object for k, v in d[key].iteritems(): obj._add_metadata(k, v) continue elif "@" in key: # Don't handle metadata elements, each element # will look up its own metadata continue std_method_set = False # Handle the case that this is a JSON object if isinstance(d[key], dict): # Iterate through attributes and set to that value attr_get = getattr(obj, "_get_%s" % safe_name(ykey), None) if attr_get is None and skip_unknown is False: raise AttributeError("Invalid attribute specified (%s)" % ykey) elif attr_get is None and skip_unknown is not False: # Skip unknown JSON keys continue chobj = attr_get() if hasattr(chobj, "_presence"): if chobj._presence: chobj._set_present() pybindJSONDecoder.check_metadata_add(key, d, chobj) pybindJSONDecoder.load_ietf_json(d[key], None, None, obj=chobj, path_helper=path_helper, extmethods=extmethods, overwrite=overwrite, skip_unknown=skip_unknown) elif isinstance(d[key], list): for elem in d[key]: # if this is a list, then this is a YANG list this_attr = getattr(obj, "_get_%s" % safe_name(ykey), None) if this_attr is None: raise AttributeError( "List specified that did not exist") this_attr = this_attr() if hasattr(this_attr, "_keyval"): if overwrite: existing_keys = this_attr.keys() for i in existing_keys: this_attr.delete(i) # this handles YANGLists if this_attr._keyval is False: # Keyless list, generate a key k = this_attr.add() nobj = this_attr[k] elif " " in this_attr._keyval: keystr = u"" kwargs = {} for pkv, ykv in zip( this_attr._keyval.split(" "), this_attr._yang_keys.split(" ")): kwargs[pkv] = elem[ykv] keystr += u"%s " % elem[ykv] keystr = keystr.rstrip(" ") if not keystr in this_attr: nobj = this_attr.add(**kwargs) else: nobj = this_attr[keystr] else: k = elem[this_attr._yang_keys] if not k in this_attr: nobj = this_attr.add(k) else: nobj = this_attr[k] pybindJSONDecoder.load_ietf_json( elem, None, None, obj=nobj, path_helper=path_helper, extmethods=extmethods, overwrite=overwrite, skip_unknown=skip_unknown) pybindJSONDecoder.check_metadata_add(key, d, nobj) else: # this is a leaf-list std_method_set = True else: std_method_set = True if std_method_set: get_method = getattr(obj, "_get_%s" % safe_name(ykey), None) if get_method is None and skip_unknown is False: raise AttributeError("JSON object contained a key that " + "did not exist (%s)" % (ykey)) elif get_method is None and skip_unknown is not False: continue chk = get_method() if chk._is_keyval is True: pass elif chk._yang_type == "empty": if d[key] == None: set_method(True) else: set_method = getattr(obj, "_set_%s" % safe_name(ykey), None) if set_method is None: raise AttributeError( "Invalid attribute specified in JSON - %s" % (ykey)) set_method(d[key]) pybindJSONDecoder.check_metadata_add(key, d, get_method()) return obj
def load_json(d, parent, yang_base, obj=None, path_helper=None, extmethods=None, overwrite=False, skip_unknown=False): if obj is None: # we need to find the class to create, as one has not been supplied. base_mod_cls = getattr(parent, safe_name(yang_base)) tmp = base_mod_cls(path_helper=False) if path_helper is not None: # check that this path doesn't already exist in the # tree, otherwise we create a duplicate. existing_objs = path_helper.get(tmp._path()) if len(existing_objs) == 0: obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) elif len(existing_objs) == 1: obj = existing_objs[0] else: raise pybindJSONUpdateError( 'update was attempted to a node that ' + 'was not unique') else: # in this case, we cannot check for an existing object obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) # Handle the case where we are supplied with a scalar value rather than # a list if not isinstance(d, dict) or isinstance(d, list): set_method = getattr(obj._parent, "_set_%s" % safe_name(obj._yang_name)) set_method(d) return obj for key in d: child = getattr(obj, "_get_%s" % safe_name(key), None) if child is None and skip_unknown is False: raise AttributeError("JSON object contained a key that" + "did not exist (%s)" % (key)) elif child is None and skip_unknown: # skip unknown elements if we are asked to by the user` continue chobj = child() if hasattr(chobj, "_presence"): if chobj._presence: chobj._set_present() set_via_stdmethod = True pybind_attr = getattr(chobj, '_pybind_generated_by', None) if pybind_attr in ["container"]: if overwrite: for elem in chobj._pyangbind_elements: unsetchildelem = getattr(chobj, "_unset_%s" % elem) unsetchildelem() pybindJSONDecoder.load_json(d[key], chobj, yang_base, obj=chobj, path_helper=path_helper, skip_unknown=skip_unknown) set_via_stdmethod = False elif pybind_attr in ["YANGListType", "list"]: # we need to add each key to the list and then skip a level in the # JSON hierarchy list_obj = getattr(obj, safe_name(key), None) if list_obj is None and skip_unknown is False: raise pybindJSONDecodeError("Could not load list object " + "with name %s" % key) if list_obj is None and skip_unknown is not False: continue ordered_list = getattr(list_obj, "_ordered", None) if ordered_list: # Put keys in order: okeys = [] kdict = {} for k, v in d[key].iteritems(): if "__yang_order" not in v: # Element is not specified in terms of order, so # push to a list that keeps this order okeys.append(k) else: kdict[v["__yang_order"]] = k # Throw this metadata away v.pop("__yang_order", None) okeys.reverse() key_order = [kdict[k] for k in sorted(kdict)] for add_element in okeys: key_order.append(add_element) else: key_order = d[key].keys() for child_key in key_order: if child_key not in chobj: chobj.add(child_key) parent = chobj[child_key] pybindJSONDecoder.load_json(d[key][child_key], parent, yang_base, obj=parent, path_helper=path_helper, skip_unknown=skip_unknown) set_via_stdmethod = False if overwrite: for child_key in chobj: if child_key not in d[key]: chobj.delete(child_key) elif pybind_attr in ["TypedListType"]: if not overwrite: list_obj = getattr(obj, "_get_%s" % safe_name(key))() for item in d[key]: if item not in list_obj: list_obj.append(item) list_copy = [] for elem in list_obj: list_copy.append(elem) for e in list_copy: if e not in d[key]: list_obj.remove(e) set_via_stdmethod = False else: # use the set method pass elif pybind_attr in [ "RestrictedClassType", "ReferencePathType", "RestrictedPrecisionDecimal" ]: # normal but valid types - which use the std set method pass elif pybind_attr is None: # not a pybind attribute at all - keep using the std set method pass else: raise pybindJSONUpdateError( "unknown pybind type when loading JSON: %s" % pybind_attr) if set_via_stdmethod: # simply get the set method and then set the value of the leaf set_method = getattr(obj, "_set_%s" % safe_name(key)) set_method(d[key], load=True) return obj
def load_xml(d, parent, yang_base, obj=None, path_helper=None, extmethods=None): """low-level XML deserialisation function, based on pybindJSONDecoder.load_ietf_json()""" if obj is None: # we need to find the class to create, as one has not been supplied. base_mod_cls = getattr(parent, safe_name(yang_base)) tmp = base_mod_cls(path_helper=False) if path_helper is not None: # check that this path doesn't already exist in the # tree, otherwise we create a duplicate. existing_objs = path_helper.get(tmp._path()) if len(existing_objs) == 0: obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) elif len(existing_objs) == 1: obj = existing_objs[0] else: raise pybindLoadUpdateError( "update was attempted to a node that " + "was not unique") else: # in this case, we cannot check for an existing object obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) for child in d.getchildren(): # separate element namespace and tag qn = etree.QName(child) namespace, ykey = qn.namespace, qn.localname # need to look up the key in the object to find out what type it should be, # because we can't tell from the XML structure attr_get = getattr(obj, "_get_%s" % safe_name(ykey), None) if attr_get is None: raise AttributeError("Invalid attribute specified (%s)" % ykey) chobj = attr_get() if chobj._yang_type == "container": if hasattr(chobj, "_presence"): if chobj._presence: chobj._set_present() pybindIETFXMLDecoder.load_xml(child, None, None, obj=chobj, path_helper=path_helper, extmethods=extmethods) elif chobj._yang_type == "list": if not chobj._keyval: raise NotImplementedError("keyless list?") # we just need to find the key value to add it to the list key_parts = [] add_kwargs = {} for pkv, ykv in zip(chobj._keyval.split(" "), chobj._yang_keys.split(" ")): add_kwargs[pkv] = child[ykv] key_parts.append(str(child[ykv])) key_str = " ".join(map(str, key_parts)) if key_str not in chobj: nobj = chobj.add(**add_kwargs) else: nobj = chobj[key_str] # now we have created the nested object element, we add other members pybindIETFXMLDecoder.load_xml(child, None, None, obj=nobj, path_helper=path_helper, extmethods=extmethods) elif hasattr(chobj, "_pybind_generated_by" ) and chobj._pybind_generated_by == "TypedListType": # NOTE: this is a little curious, because we are relying on the coercion of types # i.e. lxml will "identify" the type based on its own internal model of Python # types, see: https://lxml.de/2.0/objectify.html#how-data-types-are-matched # There are limitations which need to be addressed, e.g. hexadecimal strings. # Already, we have a stringify-fallback: if we fail on the first attempt then # try again as a pure string (if its allowed). try: chobj.append(child.pyval) except ValueError: if six.text_type in chobj._allowed_type: chobj.append(str(child.pyval)) else: raise else: if chobj._is_keyval is True: # we've already added the key continue val = child.text if chobj._yang_type == "empty": if child.text is None: val = True else: raise ValueError( "Invalid value for empty in input XML - key: %s, got: %s" % (ykey, val)) elif chobj._yang_type == "identityref": if ":" in val: _, val = val.split(":", 1) if val is not None: set_method = getattr(obj, "_set_%s" % safe_name(ykey), None) if set_method is None: raise AttributeError( "Invalid attribute specified in XML - %s" % (ykey)) set_method(val) return obj
def load_ietf_json(d, parent, yang_base, obj=None, path_helper=None, extmethods=None, overwrite=False): if obj is None: # we need to find the class to create, as one has not been supplied. base_mod_cls = getattr(parent, safe_name(yang_base)) tmp = base_mod_cls(path_helper=False) if path_helper is not None: # check that this path doesn't already exist in the # tree, otherwise we create a duplicate. existing_objs = path_helper.get(tmp._path()) if len(existing_objs) == 0: obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) elif len(existing_objs) == 1: obj = existing_objs[0] else: raise pybindJSONUpdateError('update was attempted to a node that ' + 'was not unique') else: # in this case, we cannot check for an existing object obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) # Handle the case where we are supplied with a scalar value rather than # a list if not isinstance(d, dict) or isinstance(d, list): set_method = getattr(obj._parent, "_set_%s" % safe_name(obj._yang_name)) set_method(d) return obj for key in d: # Fix any namespace that was supplied in the JSON if ":" in key: ykey = key.split(":")[-1] else: ykey = key if key == "@": # Handle whole container metadata object for k, v in d[key].iteritems(): obj._add_metadata(k, v) continue elif "@" in key: # Don't handle metadata elements, each element # will look up its own metadata continue std_method_set = False # Handle the case that this is a JSON object if isinstance(d[key], dict): # Iterate through attributes and set to that value attr_get = getattr(obj, "_get_%s" % safe_name(ykey), None) if attr_get is None: raise AttributeError("Invalid attribute specified (%s)" % ykey) pybindJSONDecoder.check_metadata_add(key, d, attr_get()) pybindJSONDecoder.load_ietf_json(d[key], None, None, obj=attr_get(), path_helper=path_helper, extmethods=extmethods, overwrite=overwrite) elif isinstance(d[key], list): for elem in d[key]: # if this is a list, then this is a YANG list this_attr = getattr(obj, "_get_%s" % safe_name(ykey), None) if this_attr is None: raise AttributeError("List specified that did not exist") this_attr = this_attr() if hasattr(this_attr, "_keyval"): if overwrite: existing_keys = this_attr.keys() for i in existing_keys: this_attr.delete(i) # this handles YANGLists if this_attr._keyval is False: # Keyless list, generate a key k = this_attr.add() nobj = this_attr[k] elif " " in this_attr._keyval: keystr = u"" kwargs = {} for pkv, ykv in zip(this_attr._keyval.split(" "), this_attr._yang_keys.split(" ")): kwargs[pkv] = elem[ykv] keystr += u"%s " % elem[ykv] keystr = keystr.rstrip(" ") if not keystr in this_attr: nobj = this_attr.add(**kwargs) else: nobj = this_attr[keystr] else: k = elem[this_attr._yang_keys] if not k in this_attr: nobj = this_attr.add(k) else: nobj = this_attr[k] pybindJSONDecoder.load_ietf_json(elem, None, None, obj=nobj, path_helper=path_helper, extmethods=extmethods, overwrite=overwrite) pybindJSONDecoder.check_metadata_add(key, d, nobj) else: # this is a leaf-list std_method_set = True else: std_method_set = True if std_method_set: get_method = getattr(obj, "_get_%s" % safe_name(ykey), None) if get_method is None: raise AttributeError("JSON object contained a key that" + "did not exist (%s)" % (ykey)) chk = get_method() if chk._is_keyval is True: pass elif chk._yang_type == "empty": if d[key] == None: set_method(True) else: set_method = getattr(obj, "_set_%s" % safe_name(ykey), None) if set_method is None: raise AttributeError("Invalid attribute specified in JSON - %s" % (ykey)) set_method(d[key]) pybindJSONDecoder.check_metadata_add(key, d, get_method()) return obj
def test_001_check_containers(self): self.assertIsNot(getattr(self.instance_a, safe_name("root-tc04-a"), None), None) self.assertIsNot(getattr(self.instance_b, safe_name("root-tc04-b"), None), None)
def load_json(self, d, parent, yang_base, obj=None, path_helper=None, extmethods=None, overwrite=False): if obj is None: # we need to find the class to create, as one has not been supplied. base_mod_cls = getattr(parent, safe_name(yang_base)) tmp = base_mod_cls(path_helper=False) if path_helper is not None: # check that this path doesn't already exist in the # tree, otherwise we create a duplicate. existing_objs = path_helper.get(tmp._path()) if len(existing_objs) == 0: obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) elif len(existing_objs) == 1: obj = existing_objs[0] else: raise pybindJSONUpdateError('update was attempted to a node that ' + 'was not unique') else: # in this case, we cannot check for an existing object obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) for key in d: child = getattr(obj, "_get_%s" % safe_name(key), None) if child is None: raise AttributeError("JSON object contained a key that" + "did not exist (%s)" % (key)) chobj = child() set_via_stdmethod = True pybind_attr = getattr(chobj, '_pybind_generated_by', None) if pybind_attr in ["container"]: if overwrite: for elem in chobj._pyangbind_elements: unsetchildelem = getattr(chobj, "_unset_%s" % elem) unsetchildelem() self.load_json(d[key], chobj, yang_base, obj=chobj, path_helper=path_helper) set_via_stdmethod = False elif pybind_attr in ["YANGListType", "list"]: # we need to add each key to the list and then skip a level in the # JSON hierarchy for child_key in d[key]: if child_key not in chobj: chobj.add(child_key) parent = chobj[child_key] self.load_json(d[key][child_key], parent, yang_base, obj=parent, path_helper=path_helper) set_via_stdmethod = False if overwrite: for child_key in chobj: if child_key not in d[key]: chobj.delete(child_key) elif pybind_attr in ["TypedListType"]: if not overwrite: list_obj = getattr(obj, "_get_%s" % safe_name(key))() for item in d[key]: if item not in list_obj: list_obj.append(item) list_copy = [] for elem in list_obj: list_copy.append(elem) for e in list_copy: if e not in d[key]: list_obj.remove(e) set_via_stdmethod = False else: # use the set method pass elif pybind_attr in ["RestrictedClassType", "ReferencePathType"]: # normal but valid types - which use the std set method pass elif pybind_attr is None: # not a pybind attribute at all - keep using the std set method pass else: raise pybindJSONUpdateError("unknown pybind type when loading JSON: %s" % pybind_attr) if set_via_stdmethod: # simply get the set method and then set the value of the leaf set_method = getattr(obj, "_set_%s" % safe_name(key)) set_method(d[key], load=True) return obj
def service_set_request(request, path_helper, logger): response_msg = pynms_rpc_pb2.SetResponse(request_id=request.request_id) prefix = request.prefix if len(request.prefix) else None logger.debug("Starting set routine with input message as %s" % request) if pynms_rpc_pb2.EncodingType.Name(request.encoding) == 'JSON_IETF': decoder = pybindJSONDecoder.load_ietf_json elif pynms_rpc_pb2.EncodingType.Name(request.encoding) == 'JSON_PYBIND': decoder = pybindJSONDecoder.load_json else: response_msg.response_code = pynms_rpc_pb2.UNSUPPORTED_ENCODING response_msg.message = "Unsupported encoding" return response_msg logger.debug("Determind the encoding to be %s from message" % request.encoding) logger.debug("Starting checkpointing...") checkpoint = {} encoder = pybindJSONEncoder() for operation in request.operation: logger.debug("Looking for %s" % str(operation)) path = operation.path if prefix is None else prefix + "/" + operation.path logger.debug("Trying to checkpoint %s" % path) # TODO: do we assume that the request.operation dataset is ordered # for us? chk_objects = path_helper.get(path) checkpoint[operation.path] = PyNMSServerGRPCMethods.get_encoded_object_set(path_helper, path, "JSON_PYBIND", logger) logger.debug("Checkpointed %s successfully" % operation.path) error_paths = [] completed_paths = [] for operation in request.operation: try: existing_obj = path_helper.get_unique(operation.path) except YANGPathHelperException as m: logger.debug("Hit a YANGPathException when getting %s" % operation.path) error_paths.append({ 'path': path, 'error': pynms_rpc_pb2.INVALID_PATH, 'message': 'Invalid Path' }) break if pynms_rpc_pb2.SetDataCommand.Name(operation.opcode) in ['UPDATE_CONFIG', 'REPLACE_CONFIG']: logger.debug("Running parsing for %s with method %s" % (operation.path, pynms_rpc_pb2.SetDataCommand.Name(operation.opcode))) try: parsed_json = json.loads(operation.value) except ValueError as m: logger.debug("Hit an exception when loading the JSON for %s -> %s" % (operation.path, str(m))) error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.INVALID_CONFIGURATION, 'message': 'Invalid JSON' }) break except Exception as e: logger.debug("Hit unknown exception %s @ %s:152" % (str(e), __file__)) break overwrite = True if pynms_rpc_pb2.SetDataCommand.Name(operation.opcode) == 'REPLACE_CONFIG' else False try: decoder(parsed_json, None, None, obj=existing_obj, path_helper=path_helper, overwrite=overwrite) except ValueError as e: logger.debug("Hit a ValueError when loading the JSON for %s (specified to %s)" % (operation.path, existing_obj._path())) logger.debug("ValueError: %s" % str(msg)) error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.INVALID_CONFIGURATION, 'message': 'Invalid configuration' }) break except AttributeError as msg: logger.debug("Hit a AttributeError when loading the JSON for %s (specified to %s)" % (operation.path, existing_obj._path())) logger.debug("AttributeError: %s" % str(msg)) error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.INVALID_CONFIGURATION, 'message': 'Invalid configuration' }) break except KeyError as msg: logger.debug("Hit a KeyError when loading the JSON for %s" % (operation.path)) error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.INVALID_CONFIGURATION, 'message': 'Attempted to update an existing leaf entry', }) except Exception as e: logger.debug("Hit unknown exception %s @ %s:152" % (str(e), __file__)) break elif pynms_rpc_pb2.SetDataCommand.Name(operation.opcode) in ['DELETE_CONFIG']: if "[" in existing_obj._path()[-1]: path = existing_obj._path()[:-1] containing_obj = path_helper.get_unique(path) parent = getattr(containing_obj,existing_obj._path()[-1].split("[")[0]) else: parent = existing_obj._parent if hasattr(parent, "delete"): keyval = parent._extract_key(existing_obj) # todo, error handling if hasattr(keyval, "_get_ptr"): keyval = keyval._get_ptr() parent.delete(keyval) else: # if the value is not a list entry unset_method = getattr(parent, "_unset_%s" % safe_name(operation.path.split("/")[-1]), None) if unset_method is None: error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.NOK, 'message': "Could not delete %s" % path }) break unset_method() completed_paths.append(operation.path) if len(error_paths) == 0: response_msg.response_code = pynms_rpc_pb2.OK return response_msg # need to rollback # # We need to rollback by applying changes in reverse order to the transaction that # was specified. completed_paths.reverse() for path in completed_paths: original_content = checkpoint[path] existing_object = path_helper.get_unique(path) pybindJSONDecoder.load_json(original_content, None, None, obj=existing_object, path_helper=path_helper, overwrite=True) response_msg.response_code = error_paths[0]['error'] response_msg.message = error_paths[0]['message'] return response_msg
def load_ietf_json(d, parent, yang_base, obj=None, path_helper=None, extmethods=None, overwrite=False, skip_unknown=False, allow_non_config=True, fail_if_value_exist=False, change_tracker=None): if change_tracker is None: change_tracker = NullChangeTracker if obj is None: # we need to find the class to create, as one has not been supplied. base_mod_cls = getattr(parent, safe_name(yang_base)) tmp = base_mod_cls(path_helper=False) if path_helper is not None: # check that this path doesn't already exist in the # tree, otherwise we create a duplicate. existing_objs = path_helper.get(tmp._path()) if len(existing_objs) == 0: obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) elif len(existing_objs) == 1: obj = existing_objs[0] else: raise pybindLoadUpdateError( "update was attempted to a node that " + "was not unique") else: # in this case, we cannot check for an existing object obj = base_mod_cls(path_helper=path_helper, extmethods=extmethods) # Handle the case where we are supplied with a scalar value rather than # a list if not isinstance(d, dict) or isinstance(d, list): pybindJSONDecoder._check_configurable(allow_non_config, obj) if obj._changed(): pybindJSONDecoder._check_fail_if_value_exists( fail_if_value_exist, obj) change_tracker.replaced(ChangeTrackerPath(obj)) else: change_tracker.created(ChangeTrackerPath(obj)) set_method = getattr(obj._parent, "_set_%s" % safe_name(obj._yang_name)) set_method(d) return obj for key in d: # Fix any namespace that was supplied in the JSON if ":" in key: ykey = key.split(":")[-1] else: ykey = key if key == "@": # Handle whole container metadata object for k, v in six.iteritems(d[key]): obj._add_metadata(k, v) continue elif "@" in key: # Don't handle metadata elements, each element # will look up its own metadata continue std_method_set = False # Handle the case that this is a JSON object if isinstance(d[key], dict): # Iterate through attributes and set to that value attr_get = getattr(obj, "_get_%s" % safe_name(ykey), None) if attr_get is None and skip_unknown is False: raise NonExistingPathError.from_obj(obj, (key, )) elif attr_get is None and skip_unknown is not False: # Skip unknown JSON keys continue chobj = attr_get() if not chobj._is_container: raise ValueError("dictionary supplied for non container") pybindJSONDecoder._check_configurable(allow_non_config, chobj) if chobj._changed(): pybindJSONDecoder._check_fail_if_value_exists( fail_if_value_exist, chobj) if overwrite: for e in chobj.elements(): if not getattr(chobj, e)._is_keyval: attr_unset = getattr(chobj, "_unset_" + e) attr_unset() change_tracker.replaced(ChangeTrackerPath(chobj)) else: change_tracker.merged(ChangeTrackerPath(chobj)) else: change_tracker.created(ChangeTrackerPath(chobj)) if hasattr(chobj, "_presence"): if chobj._presence: chobj._set_present() pybindJSONDecoder.check_metadata_add(key, d, chobj) pybindJSONDecoder.load_ietf_json( d[key], None, None, obj=chobj, path_helper=path_helper, extmethods=extmethods, overwrite=overwrite, skip_unknown=skip_unknown, allow_non_config=allow_non_config, fail_if_value_exist=fail_if_value_exist, change_tracker=change_tracker) elif isinstance(d[key], list): # if this is a list, then this is a YANG list this_attr = getattr(obj, "_get_%s" % safe_name(ykey), None) if this_attr is None: raise NonExistingPathError.from_obj(obj, (key, )) this_attr = this_attr() pybindJSONDecoder._check_configurable(allow_non_config, this_attr) if hasattr(this_attr, "_keyval"): if overwrite: existing_keys = list(this_attr.keys()) for i in existing_keys: this_attr.delete(i) change_tracker.replaced(ChangeTrackerPath(this_attr)) else: change_tracker.merged(ChangeTrackerPath(this_attr)) for elem in d[key]: if hasattr(this_attr, "_keyval"): # this handles YANGLists if this_attr._keyval is False: # Keyless list, generate a key k = this_attr.add() nobj = this_attr[k] elif " " in this_attr._keyval: pybindJSONDecoder._check_list_key_exists( this_attr, elem, this_attr._yang_keys.split(" ")) keystr = "" kwargs = {} for pkv, ykv in zip( this_attr._keyval.split(" "), this_attr._yang_keys.split(" ")): kwargs[pkv] = elem[ykv] keystr += "%s " % elem[ykv] keystr = keystr.rstrip(" ") if keystr not in this_attr: nobj = this_attr.add(**kwargs) if fail_if_value_exist: # delete key from elem or else we will fail because key will exist on the new obj elem = { k: v for k, v in elem.items() if k not in kwargs } change_tracker.created(ChangeTrackerPath(nobj)) for ykv in this_attr._yang_keys.split(" "): change_tracker.created( ChangeTrackerPath( getattr(nobj, safe_name(ykv)))) else: nobj = this_attr[keystr] pybindJSONDecoder._check_fail_if_value_exists( fail_if_value_exist, nobj) if overwrite: change_tracker.replaced( ChangeTrackerPath(nobj)) for ykv in this_attr._yang_keys.split(" "): change_tracker.created( ChangeTrackerPath( getattr(nobj, safe_name(ykv)))) else: change_tracker.merged( ChangeTrackerPath(nobj)) for ykv in this_attr._yang_keys.split(" "): change_tracker.replaced( ChangeTrackerPath( getattr(nobj, safe_name(ykv)))) if not allow_non_config: for pkv in kwargs.keys(): pybindJSONDecoder._check_configurable( allow_non_config, getattr(nobj, pkv)) else: pybindJSONDecoder._check_list_key_exists( this_attr, elem, (this_attr._yang_keys, )) k = elem[this_attr._yang_keys] if k not in this_attr: nobj = this_attr.add(k) if fail_if_value_exist: # delete key from elem or else we will fail because key will exist on the new obj elem = { k: v for k, v in elem.items() if k != this_attr._yang_keys } change_tracker.created(ChangeTrackerPath(nobj)) change_tracker.created( ChangeTrackerPath( getattr( nobj, safe_name(this_attr._yang_keys)))) else: nobj = this_attr[k] pybindJSONDecoder._check_fail_if_value_exists( fail_if_value_exist, nobj) if overwrite: change_tracker.replaced( ChangeTrackerPath(nobj)) change_tracker.created( ChangeTrackerPath( getattr( nobj, safe_name( this_attr._yang_keys)))) else: change_tracker.merged( ChangeTrackerPath(nobj)) change_tracker.replaced( ChangeTrackerPath( getattr( nobj, safe_name( this_attr._yang_keys)))) pybindJSONDecoder._check_configurable( allow_non_config, getattr(nobj, this_attr._keyval)) pybindJSONDecoder.load_ietf_json( elem, None, None, obj=nobj, path_helper=path_helper, extmethods=extmethods, overwrite=overwrite, skip_unknown=skip_unknown, allow_non_config=allow_non_config, fail_if_value_exist=fail_if_value_exist, change_tracker=change_tracker) pybindJSONDecoder.check_metadata_add(key, d, nobj) else: # this is a leaf-list std_method_set = True else: std_method_set = True if std_method_set: get_method = getattr(obj, "_get_%s" % safe_name(ykey), None) if get_method is None and skip_unknown is False: raise NonExistingPathError.from_obj(obj, (key, )) elif get_method is None and skip_unknown is not False: continue chk = get_method() # TODO: is this the right place ? if chk._changed(): pybindJSONDecoder._check_fail_if_value_exists( fail_if_value_exist, chk) # TODO: is this the right place ? objp = getattr(obj, "_parent", None) if not objp or ykey not in getattr(objp, "_yang_keys", "").split(" "): if chk._changed(): if is_yang_leaflist(chk) and not overwrite: change_tracker.merged(ChangeTrackerPath(chk)) else: change_tracker.replaced(ChangeTrackerPath(chk)) else: change_tracker.created(ChangeTrackerPath(chk)) pybindJSONDecoder._check_configurable(allow_non_config, chk) val = d[key] if chk._yang_type == "empty": # A 'none' value in the JSON means that an empty value is set, # since this is serialised as [null] in the input JSON. if val == [None]: val = True else: raise ValueError( "Invalid value for empty in input JSON " "key: %s, got: %s" % (ykey, val)) if chk._yang_type == "identityref": # identityref values in IETF JSON may contain their module name, as a prefix, # but we don't build identities with these as valid values. If this is the # case then re-write the value to just be the name of the identity that we # should know about. if ":" in val: _, val = val.split(":", 1) if chk._is_keyval is True: get_method = getattr(obj, "_get_%s" % safe_name(ykey), None) if get_method is None: raise AttributeError( "Invalid attribute specified in JSON - %s" % (ykey)) cur_val = get_method() if cur_val != val: raise CannotChangeKeyOfListElement.from_obj( chk, cur_val, val) elif val is not None: set_method = getattr(obj, "_set_%s" % safe_name(ykey), None) if set_method is None: raise AttributeError( "Invalid attribute specified in JSON - %s" % (ykey)) if is_yang_leaflist(chk) and not overwrite: for el in val: old_len = len(chk) chk.append(el) if len(chk) != old_len: change_tracker.created( ChangeTrackerPath(chk[-1])) else: change_tracker.replaced( ChangeTrackerPath(chk[-1])) else: set_method(val) if is_yang_leaflist(chk): get_method = getattr(obj, "_get_%s" % safe_name(ykey)) for el in get_method(): change_tracker.created(ChangeTrackerPath(el)) pybindJSONDecoder.check_metadata_add(key, d, get_method()) return obj
def test_001_check_containers(self): self.assertIsNot( getattr(self.instance_a, safe_name("root-tc04-a"), None), None) self.assertIsNot( getattr(self.instance_b, safe_name("root-tc04-b"), None), None)
def service_set_request(request, path_helper, logger): response_msg = pynms_rpc_pb2.SetResponse(request_id=request.request_id) prefix = request.prefix if len(request.prefix) else None logger.debug("Starting set routine with input message as %s" % request) if pynms_rpc_pb2.EncodingType.Name(request.encoding) == 'JSON_IETF': decoder = pybindJSONDecoder.load_ietf_json elif pynms_rpc_pb2.EncodingType.Name( request.encoding) == 'JSON_PYBIND': decoder = pybindJSONDecoder.load_json else: response_msg.response_code = pynms_rpc_pb2.UNSUPPORTED_ENCODING response_msg.message = "Unsupported encoding" return response_msg logger.debug("Determind the encoding to be %s from message" % request.encoding) logger.debug("Starting checkpointing...") checkpoint = {} encoder = pybindJSONEncoder() for operation in request.operation: logger.debug("Looking for %s" % str(operation)) path = operation.path if prefix is None else prefix + "/" + operation.path logger.debug("Trying to checkpoint %s" % path) # TODO: do we assume that the request.operation dataset is ordered # for us? chk_objects = path_helper.get(path) checkpoint[operation. path] = PyNMSServerGRPCMethods.get_encoded_object_set( path_helper, path, "JSON_PYBIND", logger) logger.debug("Checkpointed %s successfully" % operation.path) error_paths = [] completed_paths = [] for operation in request.operation: try: existing_obj = path_helper.get_unique(operation.path) except YANGPathHelperException as m: logger.debug("Hit a YANGPathException when getting %s" % operation.path) error_paths.append({ 'path': path, 'error': pynms_rpc_pb2.INVALID_PATH, 'message': 'Invalid Path' }) break if pynms_rpc_pb2.SetDataCommand.Name( operation.opcode) in ['UPDATE_CONFIG', 'REPLACE_CONFIG']: logger.debug( "Running parsing for %s with method %s" % (operation.path, pynms_rpc_pb2.SetDataCommand.Name(operation.opcode))) try: parsed_json = json.loads(operation.value) except ValueError as m: logger.debug( "Hit an exception when loading the JSON for %s -> %s" % (operation.path, str(m))) error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.INVALID_CONFIGURATION, 'message': 'Invalid JSON' }) break except Exception as e: logger.debug("Hit unknown exception %s @ %s:152" % (str(e), __file__)) break overwrite = True if pynms_rpc_pb2.SetDataCommand.Name( operation.opcode) == 'REPLACE_CONFIG' else False try: decoder(parsed_json, None, None, obj=existing_obj, path_helper=path_helper, overwrite=overwrite) except ValueError as e: logger.debug( "Hit a ValueError when loading the JSON for %s (specified to %s)" % (operation.path, existing_obj._path())) logger.debug("ValueError: %s" % str(msg)) error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.INVALID_CONFIGURATION, 'message': 'Invalid configuration' }) break except AttributeError as msg: logger.debug( "Hit a AttributeError when loading the JSON for %s (specified to %s)" % (operation.path, existing_obj._path())) logger.debug("AttributeError: %s" % str(msg)) error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.INVALID_CONFIGURATION, 'message': 'Invalid configuration' }) break except KeyError as msg: logger.debug( "Hit a KeyError when loading the JSON for %s" % (operation.path)) error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.INVALID_CONFIGURATION, 'message': 'Attempted to update an existing leaf entry', }) except Exception as e: logger.debug("Hit unknown exception %s @ %s:152" % (str(e), __file__)) break elif pynms_rpc_pb2.SetDataCommand.Name( operation.opcode) in ['DELETE_CONFIG']: if "[" in existing_obj._path()[-1]: path = existing_obj._path()[:-1] containing_obj = path_helper.get_unique(path) parent = getattr(containing_obj, existing_obj._path()[-1].split("[")[0]) else: parent = existing_obj._parent if hasattr(parent, "delete"): keyval = parent._extract_key(existing_obj) # todo, error handling if hasattr(keyval, "_get_ptr"): keyval = keyval._get_ptr() parent.delete(keyval) else: # if the value is not a list entry unset_method = getattr( parent, "_unset_%s" % safe_name(operation.path.split("/")[-1]), None) if unset_method is None: error_paths.append({ 'path': operation.path, 'error': pynms_rpc_pb2.NOK, 'message': "Could not delete %s" % path }) break unset_method() completed_paths.append(operation.path) if len(error_paths) == 0: response_msg.response_code = pynms_rpc_pb2.OK return response_msg # need to rollback # # We need to rollback by applying changes in reverse order to the transaction that # was specified. completed_paths.reverse() for path in completed_paths: original_content = checkpoint[path] existing_object = path_helper.get_unique(path) pybindJSONDecoder.load_json(original_content, None, None, obj=existing_object, path_helper=path_helper, overwrite=True) response_msg.response_code = error_paths[0]['error'] response_msg.message = error_paths[0]['message'] return response_msg