def register_type(type_class): r"""Register a type class, recording methods for encoding/decoding. Args: type_class (class): Class to be registered. Raises: ValueError: If the type is already registered. ValueError: If the type is a default JSON type. ValueError: If any of the type's properties are not registered. """ global _type_registry type_name = type_class.name if type_name in _type_registry: raise ValueError("Type '%s' already registered." % type_name) if (not type_class._replaces_existing): # pragma: debug if _jsonschema_ver_maj < 3: exist_flag = (type_name in _base_validator.DEFAULT_TYPES) else: exist_flag = (type_name in _base_validator.TYPE_CHECKER._type_checkers) if exist_flag: raise ValueError(("Type '%s' is a JSON default type " "which cannot be replaced.") % type_name) # Check properties for p in type_class.properties: prop_class = get_metaschema_property(p) if prop_class.name != p: raise ValueError("Type '%s' has unregistered property '%s'." % (type_name, p)) # Update property class with this type's info for p in type_class.properties: prop_class = get_metaschema_property(p) # TODO: Make sure this actually modifies the class # Type strings old = copy.deepcopy(list(prop_class.types)) new = [type_name] prop_class.types = tuple(set(old + new)) # Python types old = copy.deepcopy(list(prop_class.python_types)) try: new = list(type_class.python_types) except TypeError: new = [type_class.python_types] prop_class.python_types = tuple(set(old + new)) # Add to registry type_class._datatype = type_name type_class._schema_type = 'type' # type_class._schema_required = type_class.definition_schema()['required'] # type_class._schema_properties = {} # TODO: Transfer from # TODO: Enable schema tracking once ported to jsonschema # from yggdrasil.schema import register_component # register_component(type_class) _type_registry[type_name] = type_class return type_class
def _normalize_schema(validator, ref, instance, schema): r"""Normalize a schema at the root to handle case where only type string specified.""" # if isinstance(instance, str): # instance = dict(type=instance) # return instance if isinstance(instance, str) and (instance in _type_registry): instance = {'type': instance} elif isinstance(instance, dict): if len(instance) == 0: pass elif 'type' not in instance: valid_types = None for k in instance.keys(): prop_class = get_metaschema_property(k, skip_generic=True) if prop_class is None: continue if valid_types is None: valid_types = set(prop_class.types) else: valid_types = (valid_types & set(prop_class.types)) if (valid_types is None) or (len(valid_types) == 0): # There were not any recorded properties so this must be a # dictionary of properties instance = {'type': 'object', 'properties': instance} else: if len(valid_types) > 1: valid_type_classes = sorted( [_type_registry[t] for t in valid_types], key=_specificity_sort_key) s_max = valid_type_classes[0].specificity valid_types = [] for tcls in valid_type_classes: if tcls.specificity > s_max: break valid_types.append(tcls.name) if 'scalar' in valid_types: for t in ['1darray', 'ndarray']: if t in valid_types: valid_types.remove(t) if len(valid_types) > 1: raise Exception("Multiple possible classes: %s" % valid_types) instance['type'] = valid_types[0] elif isinstance(instance, (list, tuple)): # If inside validation of items as a schema, don't assume a # list is a malformed schema. Doing so results in infinite # recursion. if not ((len(validator._schema_path_stack) >= 2) and (validator._schema_path_stack[-2:] == ['items', 0])): instance = {'type': 'array', 'items': instance} if isinstance(instance, dict) and ('type' in instance): typecls = get_type_class(instance['type']) instance = typecls.normalize_definition(instance) return instance
def normalize_definition(cls, obj): r"""Normalizes a type definition. Args: obj (object): Type definition to normalize. Returns: object: Normalized type definition. """ for x in cls.properties: if x not in obj: prop_cls = get_metaschema_property(x) obj = prop_cls.normalize_in_schema(obj) return obj
def guess_type_from_obj(obj): r"""Guess the type class for a given Python object. Args: obj (object): Python object. Returns: MetaschemaType: Instance of the appropriate type class. Raises: ValueError: If a type class cannot be determined. """ type_encoder = get_metaschema_property('type') cls = get_type_class(type_encoder.encode(obj)) return cls
def encode_data_readable(cls, obj, typedef): r"""Encode an object's data in a readable format that may not be decoded in exactly the same way. Args: obj (object): Object to encode. typedef (dict): Type definition that should be used to encode the object. Returns: string: Encoded object. """ arr = cls.to_array(obj) if isinstance(typedef, dict): subtype = typedef.get('subtype', typedef.get('type', None)) else: subtype_cls = get_metaschema_property('subtype') subtype = subtype_cls.encode(obj) if (cls.name in ['1darray', 'ndarray']): return arr.tolist() assert (arr.ndim > 0) if subtype in ['int', 'uint']: return int(arr[0]) elif subtype in ['float']: return float(arr[0]) elif subtype in ['complex']: return str(complex(arr[0])) elif subtype in ['bytes', 'unicode']: out = arr[0] if isinstance(out, bytes): out = out.decode("utf-8") else: out = str(out) return out else: # pragma: debug warnings.warn( ("No method for handling readable serialization of " + "subtype '%s', falling back to default.") % subtype) return super(ScalarMetaschemaType, cls).encode_data_readable(obj, typedef)
def encode_type(cls, obj, typedef=None, is_validated=False, **kwargs): r"""Encode an object's type definition. Args: obj (object): Object to encode. typedef (dict, optional): Type properties that should be used to initialize the encoded type definition in certain cases. Defaults to None and is ignored. **kwargs: Additional keyword arguments are treated as additional schema properties. Raises: MetaschemaTypeError: If the object is not the correct type. Returns: dict: Encoded type definition. """ obj = cls.coerce_type(obj, typedef=typedef) if typedef is None: typedef = {} if not is_validated: if not cls.validate(obj): raise MetaschemaTypeError(("Object could not be encoded as " "'%s' type.") % cls.name) out = copy.deepcopy(kwargs) for x in cls.properties: itypedef = typedef.get(x, out.get(x, None)) if x == 'type': out['type'] = cls.name elif x == 'title': if itypedef is not None: out[x] = itypedef else: prop_cls = get_metaschema_property(x) out[x] = prop_cls.encode(obj, typedef=itypedef) return out
def compare_schema(schema1, schema2, root1=None, root2=None): r"""Compare two schemas for compatibility. Args: schema1 (dict): First schema. schema2 (dict): Second schema. root1 (dict, optional): Root for first schema. Defaults to None and is set to schema1. root2 (dict, optional): Root for second schema. Defaults to None and is set to schema2. Yields: str: Comparision failure messages. """ try: if root1 is None: root1 = jsonschema.RefResolver.from_schema(schema1) if root2 is None: root2 = jsonschema.RefResolver.from_schema(schema2) if (len(schema2) == 1) and ('$ref' in schema2): with root2.resolving(schema2['$ref']) as resolved_schema2: for e in compare_schema(schema1, resolved_schema2, root1=root1, root2=root2): yield e elif (len(schema1) == 1) and ('$ref' in schema1): with root1.resolving(schema1['$ref']) as resolved_schema1: for e in compare_schema(resolved_schema1, schema2, root1=root1, root2=root2): yield e elif ('type' not in schema2) or ('type' not in schema1): yield "Type required in both schemas for comparison." elif (schema1 != schema2): # Convert fixed types to base types type_cls1 = get_type_class(schema1['type']) if type_cls1.is_fixed: schema1 = type_cls1.typedef_fixed2base(schema1) type_list = schema2['type'] if not isinstance(schema2['type'], list): type_list = [type_list] all_errors = [] for itype in type_list: itype_cls2 = get_type_class(itype) ischema2 = copy.deepcopy(schema2) ischema2['type'] = itype if itype_cls2.is_fixed: ischema2 = itype_cls2.typedef_fixed2base(ischema2) # Compare contents of schema ierrors = [] for k, v in ischema2.items(): prop_cls = get_metaschema_property(k, skip_generic=True) if (prop_cls is None) or (k in ['title', 'default']): continue if k not in schema1: ierrors.append("Missing entry for required key '%s'" % k) continue if (k == 'properties') and ('required' in ischema2): vcp = copy.deepcopy(v) for k2 in list(vcp.keys()): if (((k2 not in schema1[k]) and (k2 not in ischema2['required']))): del vcp[k2] else: vcp = v ierrors += list(prop_cls.compare(schema1[k], vcp, root1=root1, root2=root2)) if len(ierrors) == 0: all_errors = [] break else: all_errors += ierrors for e in all_errors: yield e except BaseException as e: yield e
def test_get_metaschema_property(): r"""Test get_metaschema_property.""" assert_equal(properties.get_metaschema_property(non_existant), MetaschemaProperty) assert_not_equal(properties.get_metaschema_property(existing_class), MetaschemaProperty)
def test_get_metaschema_property(): r"""Test get_metaschema_property.""" assert ( properties.get_metaschema_property(non_existant) == MetaschemaProperty) assert (properties.get_metaschema_property(existing_class) != MetaschemaProperty)