def compare(cls, prop1, prop2, root1=None, root2=None): r"""Comparison method for 'items' container property.""" if isinstance(prop1, dict) and isinstance(prop2, dict): for e in compare_schema(prop1, prop2, root1=root1, root2=root2): yield e return elif isinstance(prop1, dict) and isinstance(prop2, cls.python_types): for p2 in prop2: for e in compare_schema(prop1, p2, root1=root1, root2=root2): yield e return elif isinstance(prop1, cls.python_types) and isinstance(prop2, dict): for p1 in prop1: for e in compare_schema(p1, prop2, root1=root1, root2=root2): yield e return elif not (isinstance(prop1, cls.python_types) and isinstance(prop2, cls.python_types)): yield "Values have incorrect type: %s, %s." % (type(prop1), type(prop2)) return if len(prop1) != len(prop2): yield 'Unequal number of elements. %d vs. %d' % (len(prop1), len(prop2)) for p1, p2 in zip(prop1, prop2): for e in compare_schema(p1, p2, root1=root1, root2=root2): yield e
def test_compare_schema(): r"""Test for compare_schema.""" valid = [ ({'type': 'int'}, {'type': 'int'}), ({'type': 'int'}, {'type': 'scalar', 'subtype': 'int'}), ({'type': 'scalar', 'subtype': 'int'}, {'type': 'int'}), ({'type': 'int', 'unnamed': 0}, {'type': 'int', 'unnamed': 1}), ({'type': 'int', 'unnamed': 0}, {'type': 'int'}), ({'type': 'object', 'definitions': {'a': {'type': 'int'}}, 'properties': {'x': {'$ref': '#/definitions/a'}}}, {'type': 'object', 'definitions': {'b': {'type': 'int'}}, 'properties': {'x': {'$ref': '#/definitions/b'}}}), ({'type': 'object', 'properties': {'x': {'type': 'float'}}}, {'type': 'object', 'properties': {'x': {'type': 'float'}, 'y': {'type': 'float'}}, 'required': ['x']})] invalid = [ ({'type': 'int'}, {}), ({}, {'type': 'int'}), ({'type': 'int'}, {'type': 'int', 'precision': 4}), ({'type': 'object', 'definitions': {'a': {'type': 'float'}}, 'properties': {'x': {'$ref': '#/definitions/a'}}}, {'type': 'object', 'definitions': {'b': {'type': 'int'}}, 'properties': {'x': {'$ref': '#/definitions/b'}}})] for x in valid: errors = list(datatypes.compare_schema(*x)) assert(not errors) for x in invalid: errors = list(datatypes.compare_schema(*x)) assert(errors)
def typedef_base2fixed(cls, typedef): r"""Transform a type definition from the unfixed base type to the fixed type alias by removing fixed properties. Args: typedef (dict): Type definition for the unfixed base type that might include properties that are fixed in the base. Returns: dict: Copy of type definition with fixed properties removed. """ out = copy.deepcopy(typedef) if out.get('type', None) == cls.base().name: out.update(cls.fixed_properties) errors = [e for e in compare_schema(typedef, out)] if errors: error_msg = "Error(s) in comparison with fixed properties.\n" for e in errors: error_msg += '\t%s\n' % e raise Exception(error_msg) for k, v in cls.fixed_properties.items(): if k in out: del out[k] out['type'] = cls.name return out
def compare(cls, prop1, prop2, root1=None, root2=None): r"""Comparison method for 'properties' container property.""" for k in prop2.keys(): if k not in prop1: yield "Missing property '%s'" % k continue for e in compare_schema(prop1[k], prop2[k], root1=root1, root2=root2): yield e
def check_encoded(cls, metadata, typedef=None, raise_errors=False, typedef_validated=False, metadata_validated=False): r"""Checks if the metadata for an encoded object matches the type definition. Args: metadata (dict): Meta data to be tested. typedef (dict, optional): Type properties that object should be tested against. Defaults to None and object may have any values for the type properties (so long as they match the schema. raise_errors (bool, optional): If True, any errors determining that encoded object is not of this type will be raised. Defaults to False. typedef_validated (bool, optional): If True, the type definition is taken as already having been validated and will not be validated again during the encoding process. Defaults to False. metadata_validated (bool, optional): If True, the metadata definition is taken as already having been valdiated and will not be validated again. Defaults to False. Returns: bool: True if the metadata matches the type definition, False otherwise. """ if not metadata_validated: try: cls.validate_metadata(metadata) except jsonschema.exceptions.ValidationError: if raise_errors: raise return False if typedef is not None: if not typedef_validated: try: cls.validate_definition(typedef) except jsonschema.exceptions.ValidationError: if raise_errors: raise return False errors = [e for e in compare_schema(metadata, typedef)] if errors: error_msg = "Error(s) in comparison:" for e in errors: error_msg += ('\t%s' % e) if raise_errors: raise ValueError(error_msg) return False return True
def update_serializer(self, extract=False, skip_type=False, **kwargs): r"""Update serializer with provided information. Args: extract (bool, optional): If True, the updated typedef will be the bare minimum as extracted from total set of provided keywords, otherwise the entire set will be sued. Defaults to False. skip_type (bool, optional): If True, everything is updated except the data type. Defaults to False. **kwargs: Additional keyword arguments are processed as part of they type definition and are parsed for old-style keywords. Raises: RuntimeError: If there are keywords that are not valid typedef keywords (currect or old-style). """ old_datatype = None if self.initialized: old_datatype = copy.deepcopy(self.datatype) _metaschema = get_metaschema() # Raise an error if the types are not compatible seritype = kwargs.pop('seritype', self.seritype) if (seritype != self._seritype) and (seritype != 'default'): # pragma: debug raise Exception("Cannot change types form %s to %s." % (self._seritype, seritype)) # Remove metadata keywords unrelated to serialization # TODO: Find a better way of tracking these _remove_kws = [ 'body', 'address', 'size', 'id', 'incomplete', 'raw', 'commtype', 'filetype', 'response_address', 'request_id', 'append', 'in_temp', 'is_series', 'working_dir', 'fmts', 'model_driver', 'env', 'send_converter', 'recv_converter', 'typedef_base' ] kws = list(kwargs.keys()) for k in kws: if (k in _remove_kws) or k.startswith('zmq'): kwargs.pop(k) # Set attributes and remove unused metadata keys for k in self._schema_properties.keys(): if (k in kwargs) and (k != 'datatype'): setattr(self, k, kwargs.pop(k)) # Create preliminary typedef typedef = kwargs.pop('datatype', {}) for k in _metaschema['properties'].keys(): if k in kwargs: typedef[k] = kwargs.pop(k) # Update extra keywords if (len(kwargs) > 0): self.extra_kwargs.update(kwargs) self.debug("Extra kwargs: %s" % str(self.extra_kwargs)) # Update type if not skip_type: # Update typedef from oldstyle keywords in extra_kwargs typedef = self.update_typedef_from_oldstyle(typedef) if typedef.get('type', None): if extract: cls = get_type_class(typedef['type']) typedef = cls.extract_typedef(typedef) self.datatype = get_type_from_def(typedef) # Check to see if new datatype is compatible with new one if old_datatype is not None: errors = list( compare_schema(self.typedef, old_datatype._typedef) or ()) if errors: raise RuntimeError(( "Updated datatype is not compatible with the existing one." + " New:\n%s\nOld:\n%s\n") % (pprint.pformat(self.typedef), pprint.pformat(old_datatype._typedef))) # Enfore that strings used with messages are in bytes for k in self._attr_conv: v = getattr(self, k, None) if isinstance(v, (str, bytes)): setattr(self, k, tools.str2bytes(v))
def assert_result_equal(cls, x, y): r"""Assert that serialized/deserialized objects equal.""" compare_schema(x, y)
def compare(cls, prop1, prop2, root1=None, root2=None): r"""Comparison for the 'temptype' property.""" for e in compare_schema(prop1, prop2, root1=root1, root2=root2): yield e