def create_metaschema(overwrite=False): r"""Create the meta schema for validating ygg schema. Args: overwrite (bool, optional): If True, the existing meta schema will be overwritten. If False and the metaschema exists, an error will be raised. Defaults to False. Returns: dict: Meta schema specifying rules for ygg type schema. This includes all original JSON schema rules with the addition of types and property definitions. Raises: RuntimeError: If the file already exists and overwrite is False. """ if (not overwrite) and os.path.isfile(_metaschema_fname): raise RuntimeError("Metaschema file already exists.") out = copy.deepcopy(_base_validator.META_SCHEMA) out['title'] = "Ygg meta-schema for data type schemas" # Lower versions have a differing draft4 if _jsonschema_ver_maj < 3: for x in ['minItems', 'uniqueItems']: if x in out['properties']['enum']: del out['properties']['enum'][x] # TODO: Replace schema with a link to the metaschema in the documentation # del out['$schema'] # Add properties for k, v in get_registered_properties().items(): if v.schema is not None: assert (k not in out['properties']) out['properties'][k] = v.schema # Add types for k, v in sorted(get_registered_types().items()): if k not in out['definitions']['simpleTypes']['enum']: out['definitions']['simpleTypes']['enum'].append(k) for p in v.properties: assert (p in out['properties']) # Convert to unicode if python 2 if backwards.PY2: # pragma: Python 2 out = backwards.as_unicode(out, recurse=True, convert_types=(str, ), allow_pass=True) # Print print('Created metaschema') pprint.pprint(out) # Save it to a file with open(_metaschema_fname, 'w') as fd: encode_json(out, fd) return out
def serialize(self, obj, no_metadata=False, dont_encode=False, dont_check=False, **kwargs): r"""Serialize a message. Args: obj (object): Python object to be formatted. no_metadata (bool, optional): If True, no metadata will be added to the serialized message. Defaults to False. dont_encode (bool, optional): If True, the input message will not be encoded using type specific or JSON encoding. Defaults to False. dont_check (bool, optional): If True, the object being serialized will not be checked against the type definition. Defaults to False. **kwargs: Additional keyword arguments are added to the metadata. Returns: bytes, str: Serialized message. """ if ((isinstance(obj, backwards.bytes_type) and ((obj == tools.YGG_MSG_EOF) or kwargs.get('raw', False) or dont_encode))): metadata = kwargs data = obj is_raw = True else: metadata, data = self.encode(obj, typedef=self._typedef, typedef_validated=True, dont_check=dont_check, **kwargs) is_raw = False for k in ['size', 'data']: if k in metadata: raise RuntimeError( "'%s' is a reserved keyword in the metadata." % k) if not is_raw: data = encoder.encode_json(data) if no_metadata: return data metadata['size'] = len(data) metadata.setdefault('id', str(uuid.uuid4())) metadata = encoder.encode_json(metadata) msg = YGG_MSG_HEAD + metadata + YGG_MSG_HEAD + data return msg
def func_serialize(self, args): r"""Serialize a message. Args: args (obj): Python object to be serialized. Returns: bytes, str: Serialized message. """ return encode_json(args, indent=self.indent, cls=JSONReadableEncoder)
def func_serialize(self, args): r"""Serialize a message. Args: args (obj): Python object to be serialized. Returns: bytes, str: Serialized message. """ # Convert bytes to str because JSON cannot serialize bytes by default args = backwards.as_str(args, recurse=True, allow_pass=True) return encode_json(args, indent=self.indent, cls=JSONReadableEncoder)
def serialize(self, obj, no_metadata=False, dont_encode=False, dont_check=False, max_header_size=0, **kwargs): r"""Serialize a message. Args: obj (object): Python object to be formatted. no_metadata (bool, optional): If True, no metadata will be added to the serialized message. Defaults to False. dont_encode (bool, optional): If True, the input message will not be encoded using type specific or JSON encoding. Defaults to False. dont_check (bool, optional): If True, the object being serialized will not be checked against the type definition. Defaults to False. max_header_size (int, optional): Maximum size that header should occupy in order to be sent in a single message. A value of 0 indicates that any size header is valid. Defaults to 0. **kwargs: Additional keyword arguments are added to the metadata. Returns: bytes, str: Serialized message. """ for k in ['size', 'data', 'datatype']: if k in kwargs: raise RuntimeError( "'%s' is a reserved keyword in the metadata." % k) if ((isinstance(obj, bytes) and ((obj == constants.YGG_MSG_EOF) or kwargs.get('raw', False) or dont_encode))): metadata = kwargs data = obj is_raw = True else: typedef, data = self.encode(obj, typedef=self._typedef, typedef_validated=True, dont_check=dont_check, **kwargs) metadata = {'datatype': typedef} metadata.update(kwargs) is_raw = False if not is_raw: data = encoder.encode_json(data) if no_metadata: return data metadata['size'] = len(data) metadata.setdefault('id', str(uuid.uuid4())) header = (constants.YGG_MSG_HEAD + encoder.encode_json(metadata) + constants.YGG_MSG_HEAD) if (max_header_size > 0) and (len(header) > max_header_size): metadata_type = metadata metadata = {} for k in [ 'address', 'size', 'id', 'request_id', 'response_address', 'zmq_reply', 'zmq_reply_worker', 'model' ]: if k in metadata_type: metadata[k] = metadata_type.pop(k) assert (metadata) data = (encoder.encode_json(metadata_type) + constants.YGG_MSG_HEAD + data) metadata['size'] = len(data) metadata['type_in_data'] = True header = (constants.YGG_MSG_HEAD + encoder.encode_json(metadata) + constants.YGG_MSG_HEAD) if len(header) > max_header_size: # pragma: debug raise AssertionError(("The header is larger (%d) than the " "maximum (%d): %.100s...") % (len(header), max_header_size, header)) msg = header + data return msg
def create_schema(overwrite=False): r"""Creates a file containing the Obj schema. Args: overwrite (bool, optional): If True and a file already exists, the existing file will be replaced. If False, an error will be raised if the file already exists. """ if (not overwrite) and os.path.isfile(_schema_file): raise RuntimeError("Schema file already exists.") schema = { 'title': 'obj', 'description': 'A mapping container for Obj 3D data.', 'type': 'object', 'required': ['vertices', 'faces'], 'definitions': { 'vertex': { 'description': 'Map describing a single vertex.', 'type': 'object', 'required': ['x', 'y', 'z'], 'additionalProperties': False, 'properties': { 'x': { 'type': _coord_type }, 'y': { 'type': _coord_type }, 'z': { 'type': _coord_type }, 'red': { 'type': _color_type }, 'blue': { 'type': _color_type }, 'green': { 'type': _color_type }, 'w': { 'type': _coord_type, 'default': 1.0 } } }, 'param': { 'description': 'Map describing a single parameter space point.', 'type': 'object', 'required': ['u', 'v'], 'additionalProperties': False, 'properties': { 'u': { 'type': _coord_type }, 'v': { 'type': _coord_type }, 'w': { 'type': _coord_type, 'default': 1.0 } } }, 'normal': { 'description': 'Map describing a single normal.', 'type': 'object', 'required': ['i', 'j', 'k'], 'additionalProperties': False, 'properties': { 'i': { 'type': _coord_type }, 'j': { 'type': _coord_type }, 'k': { 'type': _coord_type } } }, 'texcoord': { 'description': 'Map describing a single texture vertex.', 'type': 'object', 'required': ['u'], 'additionalProperties': False, 'properties': { 'u': { 'type': _coord_type }, 'v': { 'type': _coord_type, 'default': 0.0 }, 'w': { 'type': _coord_type, 'default': 0.0 } } }, 'point': { 'description': 'Array of vertex indices describing a set of points.', 'type': 'array', 'minItems': 1, 'items': { 'type': _index_type } }, 'line': { 'description': ('Array of vertex indices and texture indices ' + 'describing a line.'), 'type': 'array', 'minItems': 2, 'items': { 'type': 'object', 'required': ['vertex_index'], 'additionalProperties': False, 'properties': { 'vertex_index': { 'type': _index_type }, 'texcoord_index': { 'type': _index_type } } } }, 'face': { 'description': ('Array of vertex, texture, and normal indices ' + 'describing a face.'), 'type': 'array', 'minItems': 3, 'items': { 'type': 'object', 'required': ['vertex_index'], 'additionalProperties': False, 'properties': { 'vertex_index': { 'type': _index_type }, 'texcoord_index': { 'type': _index_type }, 'normal_index': { 'type': _index_type } } } }, 'curve': { 'description': 'Properties of describing a curve.', 'type': 'object', 'required': ['starting_param', 'ending_param', 'vertex_indices'], 'additionalProperties': False, 'properties': { 'starting_param': { 'type': _coord_type }, 'ending_param': { 'type': _coord_type }, 'vertex_indices': { 'type': 'array', 'minItems': 2, 'items': { 'type': _index_type } } } }, 'curve2D': { 'description': ('Array of parameter indices describine a 2D curve on ' + 'a surface.'), 'type': 'array', 'minItems': 2, 'items': { 'type': _index_type } }, 'surface': { 'description': 'Properties describing a surface.', 'type': 'object', 'required': [ 'starting_param_u', 'ending_param_u', 'starting_param_v', 'ending_param_v', 'vertex_indices' ], 'additionalProperties': False, 'properties': { 'starting_param_u': { 'type': _coord_type }, 'ending_param_u': { 'type': _coord_type }, 'starting_param_v': { 'type': _coord_type }, 'ending_param_v': { 'type': _coord_type }, 'vertex_indices': { 'type': 'array', 'minItems': 2, 'items': { 'type': 'object', 'required': ['vertex_index'], 'additionalProperties': False, 'properties': { 'vertex_index': { 'type': _index_type }, 'texcoord_index': { 'type': _index_type }, 'normal_index': { 'type': _index_type } } } } } } }, 'properties': { 'material': { 'description': 'Name of the material to use.', 'type': ['unicode', 'string'] }, 'vertices': { 'description': 'Array of vertices.', 'type': 'array', 'items': { '$ref': '#/definitions/vertex' } }, 'params': { 'description': 'Array of parameter coordinates.', 'type': 'array', 'items': { '$ref': '#/definitions/param' } }, 'normals': { 'description': 'Array of normals.', 'type': 'array', 'items': { '$ref': '#/definitions/normal' } }, 'texcoords': { 'description': 'Array of texture vertices.', 'type': 'array', 'items': { '$ref': '#/definitions/texcoord' } }, 'points': { 'description': 'Array of points.', 'type': 'array', 'items': { '$ref': '#/definitions/point' } }, 'lines': { 'description': 'Array of lines.', 'type': 'array', 'items': { '$ref': '#/definitions/line' } }, 'faces': { 'description': 'Array of faces.', 'type': 'array', 'items': { '$ref': '#/definitions/face' } }, 'curves': { 'description': 'Array of curves.', 'type': 'array', 'items': { '$ref': '#/definitions/curve' } }, 'curve2Ds': { 'description': 'Array of curve2Ds.', 'type': 'array', 'items': { '$ref': '#/definitions/curve2D' } }, 'surfaces': { 'description': 'Array of surfaces.', 'type': 'array', 'items': { '$ref': '#/definitions/surface' } } }, 'dependencies': { 'lines': ['vertices'], 'faces': ['vertices'], 'curves': ['vertices'], 'curve2Ds': ['params'], 'surfaces': ['vertices'] } } with open(_schema_file, 'w') as fd: encode_json(schema, fd, indent='\t')
def create_schema(overwrite=False): r"""Creates a file containing the Ply schema. Args: overwrite (bool, optional): If True and a file already exists, the existing file will be replaced. If False, an error will be raised if the file already exists. """ if (not overwrite) and os.path.isfile(_schema_file): raise RuntimeError("Schema file already exists.") schema = { 'title': 'ply', 'description': 'A mapping container for Ply 3D data.', 'type': 'object', 'required': ['vertices', 'faces'], 'definitions': { 'index': {'type': ('int', 'uint')}, 'color': {'type': ('int', 'uint')}, 'coord': {'type': 'float'}, 'vertex': { 'description': 'Map describing a single vertex.', 'type': 'object', 'required': ['x', 'y', 'z'], 'additionalProperties': False, 'properties': {'x': {'type': _coord_type}, 'y': {'type': _coord_type}, 'z': {'type': _coord_type}, 'red': {'type': _color_type}, 'blue': {'type': _color_type}, 'green': {'type': _color_type}}}, 'face': { 'description': 'Map describing a single face.', 'type': 'object', 'required': ['vertex_index'], 'additionalProperties': False, 'properties': { 'vertex_index': { 'type': 'array', 'minItems': 3, 'items': {'type': _index_type}}}}, 'edge': { 'description': 'Vertex indices describing an edge.', 'type': 'object', 'required': ['vertex1', 'vertex2'], 'additionalProperties': False, 'properties': { 'vertex1': {'type': _index_type}, 'vertex2': {'type': _index_type}, 'red': {'type': _color_type}, 'green': {'type': _color_type}, 'blue': {'type': _color_type}}} # 'material': { # 'description': 'Map of material parameters.', # 'type': 'object', # 'required': ['ambient_red', 'ambient_green', # 'ambient_blue', 'ambient_coeff', # 'diffuse_red', 'diffuse_green', # 'diffuse_blue', 'diffuse_coeff', # 'specular_red', 'specular_green', # 'specular_blue', 'specular_coeff', # 'specular_power'], # 'properties': {'ambient_red': {'type': _color_type}, # 'ambient_green': {'type': _color_type}, # 'ambient_blue': {'type': _color_type}, # 'ambient_coeff': {'type': _coord_type}, # 'diffuse_red': {'type': _color_type}, # 'diffuse_green': {'type': _color_type}, # 'diffuse_blue': {'type': _color_type}, # 'diffuse_coeff': {'type': _coord_type}, # 'specular_red': {'type': _color_type}, # 'specular_green': {'type': _color_type}, # 'specular_blue': {'type': _color_type}, # 'specular_coeff': {'type': _coord_type}, # 'specular_power': {'type': _coord_type}}}}, }, 'properties': { 'material': { 'description': 'Name of the material to use.', 'type': ['unicode', 'string']}, # 'materials': { # 'description': 'Array of materials.', # 'type': 'array', 'items': {'$ref': '#/definitions/material'}}, 'vertices': { 'description': 'Array of vertices.', 'type': 'array', 'items': {'$ref': '#/definitions/vertex'}}, 'edges': { 'description': 'Array of edges.', 'type': 'array', 'items': {'$ref': '#/definitions/edge'}}, 'faces': { 'description': 'Array of faces.', 'type': 'array', 'items': {'$ref': '#/definitions/face'}}}, 'dependencies': { 'edges': ['vertices'], 'faces': ['vertices']}} with open(_schema_file, 'w') as fd: encode_json(schema, fd, indent='\t')
def test_JSONEncoder(): r"""Test JSONEncoder error.""" x = TestClass() with pytest.raises(TypeError): encoder.encode_json(x)