예제 #1
0
    def normalize(cls, normalizer, value, instance, schema):
        r"""Method to normalize the instance based on the property value.

        Args:
            normalizer (Normalizer): Normalizer class.
            value (object): Property value.
            instance (object): Object to normalize.
            schema (dict): Schema containing this property.

        Returns:
            object: Normalized object.

        """
        if isinstance(value, (list, tuple)):
            v0 = value[0]
            for v in value:
                t = get_type_class(v)
                # if normalizer.is_type(instance, v):
                if t.validate(v):
                    v0 = v
                    break
            type_cls = get_type_class(v0)
        else:
            type_cls = get_type_class(value)
        kws = {}
        if (((type_cls.name in ['class', 'function'])
             and normalizer._working_dir_stack)):
            kws['working_dir'] = normalizer._working_dir_stack[-1]
        return type_cls.normalize(instance, **kws)
예제 #2
0
    def normalize(cls, normalizer, value, instance, schema):
        r"""Method to normalize the instance based on the property value.

        Args:
            normalizer (Normalizer): Normalizer class.
            value (object): Property value.
            instance (object): Object to normalize.
            schema (dict): Schema containing this property.

        Returns:
            object: Normalized object.

        """
        if isinstance(value, (list, tuple)):
            v0 = value[0]
            for v in value:
                t = get_type_class(v)
                # if normalizer.is_type(instance, v):
                if t.validate(v):
                    v0 = v
                    break
            type_cls = get_type_class(v0)
        else:
            type_cls = get_type_class(value)
        return type_cls.normalize(instance)
예제 #3
0
    def decode(cls,
               metadata,
               data,
               typedef=None,
               typedef_validated=False,
               dont_check=False):
        r"""Decode an object.

        Args:
            metadata (dict): Meta data describing the data.
            data (bytes): Encoded data.
            typedef (dict, optional): Type properties that decoded object should
                be tested against. Defaults to None and object may have any
                values for the type properties (so long as they match the schema).
            typedef_validated (bool, optional): If True, the type definition
                is taken as already having been validated and will not be
                validated again during the encoding process. Defaults to False.
            dont_check (bool, optional): If True, the metadata will not be
                checked against the type definition. Defaults to False.

        Returns:
            object: Decoded object.

        Raises:
            ValueError: If the metadata does not match the type definition.
            ValueError: If the decoded object does not match type definition.

        """
        conv_func = None
        if isinstance(metadata, dict):
            metatype = metadata.get('type', None)
            if (metatype not in [None, 'bytes']) and (typedef == {
                    'type': 'bytes'
            }):
                new_cls = get_type_class(metatype)
                return new_cls.decode(metadata, data, dont_check=dont_check)
            if metatype != cls.name:
                conv_func = conversions.get_conversion(metatype, cls.name)
                if (((conv_func is None)
                     and (len(metadata.get('items', [])) == 1)
                     and cls.check_encoded(metadata['items'][0], typedef))):
                    conv_func = _get_single_array_element
        if (not conv_func) and (not dont_check):
            cls.check_encoded(metadata,
                              typedef,
                              raise_errors=True,
                              typedef_validated=typedef_validated)
        if conv_func:
            new_cls = get_type_class(metadata['type'])
            out = conv_func(
                new_cls.decode(metadata, data, dont_check=dont_check))
        else:
            out = cls.decode_data(data, metadata)
        out = cls.transform_type(out, typedef)
        return out
예제 #4
0
    def transform_type(cls, obj, typedef=None):
        r"""Transform an object based on type info.

        Args:
            obj (object): Object to transform.
            typedef (dict): Type definition that should be used to transform the
                object.

        Returns:
            object: Transformed object.

        """
        if not (isinstance(typedef, dict) and isinstance(
                typedef.get(cls._json_property, None), cls.python_types)
                and isinstance(obj, cls.python_types)):
            return obj
        map_typedef = typedef[cls._json_property]
        map_out = cls._container_type()
        for k, v in cls._iterate(obj):
            if cls._has_element(map_typedef, k):
                cls._assign(
                    map_out, k,
                    get_type_class(map_typedef[k]['type']).transform_type(
                        v, typedef=map_typedef[k]))
            else:
                cls._assign(map_out, k, v)
        return map_out
예제 #5
0
    def update_typedef(self, **kwargs):
        r"""Update the current typedef with new values.

        Args:
            **kwargs: All keyword arguments are considered to be new type
                definitions. If they are a valid definition property, they
                will be copied to the typedef associated with the instance.

        Returns:
            dict: A dictionary of keyword arguments that were not added to the
                type definition.

        """
        map = kwargs.get(self._json_property, None)
        map_out = self._container_type()
        if isinstance(map, self.python_types):
            for k, v in self._iterate(map):
                v_typedef = complete_typedef(v)
                if self._has_element(self._typecls, k):
                    self._assign(map_out, k,
                                 self._typecls[k].update_typedef(**v_typedef))
                else:
                    self._assign(
                        self._typecls, k,
                        get_type_class(v_typedef['type'])(**v_typedef))
                self._assign(map, k, self._typecls[k]._typedef)
            kwargs[self._json_property] = map
        out = super(ContainerMetaschemaType, self).update_typedef(**kwargs)
        if map_out:
            out[self._json_property] = map_out
        return out
예제 #6
0
    def coerce_type(cls, obj, typedef=None, **kwargs):
        r"""Coerce objects of specific types to match the data type.

        Args:
            obj (object): Object to be coerced.
            typedef (dict, optional): Type defintion that object should be
                coerced to. Defaults to None.
            **kwargs: Additional keyword arguments are metadata entries that may
                aid in coercing the type.

        Returns:
            object: Coerced object.

        Raises:
            RuntimeError: If obj is a dictionary, but key_order is not provided.

        """
        if not (isinstance(typedef, dict) and isinstance(
                typedef.get(cls._json_property, None), cls.python_types)
                and isinstance(obj, cls.python_types)):
            return obj
        map_typedef = typedef[cls._json_property]
        map_out = cls._container_type()
        for k, v in cls._iterate(obj):
            if cls._has_element(map_typedef, k):
                cls._assign(
                    map_out, k,
                    get_type_class(map_typedef[k]['type']).coerce_type(
                        v, typedef=map_typedef[k]))
            else:
                cls._assign(map_out, k, v)
        return map_out
예제 #7
0
def _normalize_schema(validator, ref, instance, schema):
    r"""Normalize a schema at the root to handle case where only type
    string specified."""
    # if isinstance(instance, str):
    #     instance = dict(type=instance)
    # return instance
    if isinstance(instance, str) and (instance in _type_registry):
        instance = {'type': instance}
    elif isinstance(instance, dict):
        if len(instance) == 0:
            pass
        elif 'type' not in instance:
            valid_types = None
            for k in instance.keys():
                prop_class = get_metaschema_property(k, skip_generic=True)
                if prop_class is None:
                    continue
                if valid_types is None:
                    valid_types = set(prop_class.types)
                else:
                    valid_types = (valid_types & set(prop_class.types))
            if (valid_types is None) or (len(valid_types) == 0):
                # There were not any recorded properties so this must be a
                # dictionary of properties
                instance = {'type': 'object', 'properties': instance}
            else:
                if len(valid_types) > 1:
                    valid_type_classes = sorted(
                        [_type_registry[t] for t in valid_types],
                        key=_specificity_sort_key)
                    s_max = valid_type_classes[0].specificity
                    valid_types = []
                    for tcls in valid_type_classes:
                        if tcls.specificity > s_max:
                            break
                        valid_types.append(tcls.name)
                    if 'scalar' in valid_types:
                        for t in ['1darray', 'ndarray']:
                            if t in valid_types:
                                valid_types.remove(t)
                    if len(valid_types) > 1:
                        raise Exception("Multiple possible classes: %s" %
                                        valid_types)
                instance['type'] = valid_types[0]
    elif isinstance(instance, (list, tuple)):
        # If inside validation of items as a schema, don't assume a
        # list is a malformed schema. Doing so results in infinite
        # recursion.
        if not ((len(validator._schema_path_stack) >= 2) and
                (validator._schema_path_stack[-2:] == ['items', 0])):
            instance = {'type': 'array', 'items': instance}
    if isinstance(instance, dict) and ('type' in instance):
        typecls = get_type_class(instance['type'])
        instance = typecls.normalize_definition(instance)
    return instance
예제 #8
0
def get_test_data(typename):
    r"""Determine a test data set for the specified type.

    Args:
        typename (str): Name of datatype.

    Returns:
        object: Example of specified datatype.

    """
    typeclass = get_type_class(typename)
    return typeclass.get_test_data()
예제 #9
0
    def get_test_data(cls, typename):
        r"""Determine a test data set for the specified type.

        Args:
            typename (str): Name of datatype.

        Returns:
            object: Example of specified datatype.

        """
        typeclass = get_type_class(typename)
        testclass = typeclass.import_test_class()
        out = testclass._valid_decoded[0]
        return out
예제 #10
0
    def validate_metadata(cls, obj):
        r"""Validates an encoded object.

        Args:
            obj (string): Encoded object to validate.

        """
        if ((isinstance(obj, dict) and ('type' in obj)
             and (obj['type'] != cls.name))):
            type_cls = get_type_class(obj['type'])
            if type_cls.is_fixed and type_cls.issubtype(cls.name):
                obj = type_cls.typedef_fixed2base(obj)
        # jsonschema.validate(obj, cls.metaschema(), cls=cls.validator())
        jsonschema.validate(obj, cls.metadata_schema(), cls=cls.validator())
예제 #11
0
    def _generate_data(cls, typedef):
        r"""Generate mock data for the specified type.

        Args:
            typedef (dict): Type definition.

        Returns:
            object: Python object of the specified type.

        """
        out = cls._container_type()
        for k, v in cls._iterate(typedef[cls._json_property]):
            vcls = get_type_class(v['type'])
            cls._assign(out, k, vcls.generate_data(v))
        return out
예제 #12
0
    def validate_metadata(cls, obj, **kwargs):
        r"""Validates an encoded object.

        Args:
            obj (string): Encoded object to validate.
            **kwargs: Additional keyword arguments are passed to the validator.

        """
        if ((isinstance(obj, dict) and ('type' in obj)
             and (obj['type'] != cls.name))):
            type_cls = get_type_class(obj['type'])
            if type_cls.is_fixed and type_cls.issubtype(cls.name):
                obj = type_cls.typedef_fixed2base(obj)
        # jsonschema.validate(obj, cls.metaschema(), cls=cls.validator())
        # jsonschema.validate(obj, cls.metadata_schema(), cls=cls.validator())
        return validate_instance(obj, cls.metadata_schema(), **kwargs)
예제 #13
0
    def compare(cls, prop1, prop2, root1=None, root2=None):
        r"""Method to determine compatiblity of one property value with another.
        This method is not necessarily symmetric in that the second value may
        not be compatible with the first even if the first is compatible with
        the second.

        Args:
            prop1 (object): Property value to compare against prop2.
            prop2 (object): Property value to compare against.
            
        Yields:
            str: Comparision failure messages.

        """
        type_cls = get_type_class(prop1)
        if not type_cls.issubtype(prop2):
            yield "Type '%s' is not a subtype of type '%s'" % (prop1, prop2)
예제 #14
0
        def is_type(self, instance, types):
            r"""Determine if an object is an example of the given type.

            Args:
                instance (object): Object to test against to the type.
                type (str, list): Name of single type or a list of types that
                    instance should be tested against.

            Returns:
                bool: True if the instance is of the specified type(s). False
                    otherwise.

            """
            out = super(Normalizer, self).is_type(instance, types)
            if (_jsonschema_ver_maj < 3) and out:
                out = get_type_class(types).validate(instance)
            return out
예제 #15
0
    def decode_data(cls, obj, typedef):
        r"""Decode an object.

        Args:
            obj (string): Encoded object to decode.
            typedef (dict): Type definition that should be used to decode the
                object.

        Returns:
            object: Decoded object.

        """
        container = cls._container_type()
        for k, v in cls._iterate(obj):
            vtypedef = cls._get_element(typedef[cls._json_property], k, {})
            vcls = get_type_class(vtypedef['type'])
            cls._assign(container, k, vcls.decode_data(v, vtypedef))
        return container
예제 #16
0
def create_multitype_class(types):
    r"""Create a MultiMetaschemaType class that wraps multiple
    classes.

    Args:
        types (list): List of names of types.

    Returns:
        class: Subclass of MultiMetaschemaType that add classes.

    """
    type_classes = OrderedDict()
    type_name = '_'.join(types)
    class_name = str('MultiMetaschemaType_%s' % type_name)
    for t in types:
        type_classes[t] = get_type_class(t)
    out = type(class_name, (MultiMetaschemaType, ),
               {'type_classes': type_classes,
                'name': type_name})
    return out
예제 #17
0
    def check_received_data(cls, typename, x_recv):
        r"""Check that the received message is equivalent to the
        test data for the specified type.

        Args:
            typename (str): Name of datatype.
            x_recv (object): Received object.

        Raises:
            AssertionError: If the received message is not equivalent
                to the received message.

        """
        typeclass = get_type_class(typename)
        testclass = typeclass.import_test_class()
        x_sent = cls.get_test_data(typename)
        print('RECEIVED:')
        pprint.pprint(x_recv)
        print('EXPECTED:')
        pprint.pprint(x_sent)
        testclass.assert_result_equal(x_recv, x_sent)
예제 #18
0
    def extract_typedef(cls, metadata):
        r"""Extract the minimum typedef required for this type from the provided
        metadata.

        Args:
            metadata (dict): Message metadata.

        Returns:
            dict: Encoded type definition with unncessary properties removed.

        """
        out = super(ContainerMetaschemaType, cls).extract_typedef(metadata)
        if cls._json_property in out:
            contents = out[cls._json_property]
            if isinstance(contents, cls.python_types):
                for k, v in cls._iterate(contents):
                    if 'type' in v:
                        vcls = get_type_class(v['type'])
                        cls._assign(contents, k, vcls.extract_typedef(v))
                out[cls._json_property] = contents
        return out
예제 #19
0
    def update_serializer(self, extract=False, skip_type=False, **kwargs):
        r"""Update serializer with provided information.

        Args:
            extract (bool, optional): If True, the updated typedef will be
                the bare minimum as extracted from total set of provided
                keywords, otherwise the entire set will be sued. Defaults to
                False.
            skip_type (bool, optional): If True, everything is updated except
                the data type. Defaults to False.
            **kwargs: Additional keyword arguments are processed as part of
                they type definition and are parsed for old-style keywords.

        Raises:
            RuntimeError: If there are keywords that are not valid typedef
                keywords (currect or old-style).

        """
        old_datatype = None
        if self.initialized:
            old_datatype = copy.deepcopy(self.datatype)
        _metaschema = get_metaschema()
        # Raise an error if the types are not compatible
        seritype = kwargs.pop('seritype', self.seritype)
        if (seritype != self._seritype) and (seritype !=
                                             'default'):  # pragma: debug
            raise Exception("Cannot change types form %s to %s." %
                            (self._seritype, seritype))
        # Remove metadata keywords unrelated to serialization
        # TODO: Find a better way of tracking these
        _remove_kws = [
            'body', 'address', 'size', 'id', 'incomplete', 'raw', 'commtype',
            'filetype', 'response_address', 'request_id', 'append', 'in_temp',
            'is_series', 'working_dir', 'fmts', 'model_driver', 'env',
            'send_converter', 'recv_converter', 'typedef_base'
        ]
        kws = list(kwargs.keys())
        for k in kws:
            if (k in _remove_kws) or k.startswith('zmq'):
                kwargs.pop(k)
        # Set attributes and remove unused metadata keys
        for k in self._schema_properties.keys():
            if (k in kwargs) and (k != 'datatype'):
                setattr(self, k, kwargs.pop(k))
        # Create preliminary typedef
        typedef = kwargs.pop('datatype', {})
        for k in _metaschema['properties'].keys():
            if k in kwargs:
                typedef[k] = kwargs.pop(k)
        # Update extra keywords
        if (len(kwargs) > 0):
            self.extra_kwargs.update(kwargs)
            self.debug("Extra kwargs: %s" % str(self.extra_kwargs))
        # Update type
        if not skip_type:
            # Update typedef from oldstyle keywords in extra_kwargs
            typedef = self.update_typedef_from_oldstyle(typedef)
            if typedef.get('type', None):
                if extract:
                    cls = get_type_class(typedef['type'])
                    typedef = cls.extract_typedef(typedef)
                self.datatype = get_type_from_def(typedef)
            # Check to see if new datatype is compatible with new one
            if old_datatype is not None:
                errors = list(
                    compare_schema(self.typedef, old_datatype._typedef) or ())
                if errors:
                    raise RuntimeError((
                        "Updated datatype is not compatible with the existing one."
                        + "    New:\n%s\nOld:\n%s\n") %
                                       (pprint.pformat(self.typedef),
                                        pprint.pformat(old_datatype._typedef)))
        # Enfore that strings used with messages are in bytes
        for k in self._attr_conv:
            v = getattr(self, k, None)
            if isinstance(v, (str, bytes)):
                setattr(self, k, tools.str2bytes(v))
예제 #20
0
def test_get_type_class():
    r"""Test get_type_class."""
    for v in _valid_objects.keys():
        datatypes.get_type_class(v)
    with pytest.raises(ValueError):
        datatypes.get_type_class('invalid')
예제 #21
0
    def get_native_type(cls, **kwargs):
        r"""Get the native type.

        Args:
            type (str, optional): Name of |yggdrasil| extended JSON
                type or JSONSchema dictionary defining a datatype.
            **kwargs: Additional keyword arguments may be used in determining
                the precise declaration that should be used.

        Returns:
            str: The native type.

        """
        out = super(FortranModelDriver, cls).get_native_type(**kwargs)
        intent_regex = r'(,\s*intent\(.+?\))'
        for x in re.finditer(intent_regex, out):
            out = out.replace(x.group(0), '')
        type_match = re.search(cls.function_param['type_regex'], out)
        if type_match:
            type_match = type_match.groupdict()
            if type_match.get('shape_var', None):  # pragma: debug
                if ('pointer' not in out) and ('allocatable' not in out):
                    out += ', allocatable'
                if type_match['shape_var'][0] == '*':
                    out = out.replace('*', ':')
                # raise Exception("Used default native_type, but need alias")
            elif type_match.get('length_var', None):
                if ((('pointer' not in out) and ('allocatable' not in out)
                     and (type_match['length_var'] != 'X'))):
                    out += ', allocatable'
                if type_match['length_var'] == '*':
                    out = out.replace('*', ':')
        if not ((out == '*') or ('X' in out)):
            if out.startswith('ygg'):
                out = 'type(%s)' % out
            return out
        from yggdrasil.metaschema.datatypes import get_type_class
        json_type = kwargs.get('datatype', kwargs.get('type', 'bytes'))
        if isinstance(json_type, str):  # pragma: no cover
            json_type = {'type': json_type}
        if 'type' in kwargs:  # pragma: no cover
            json_type.update(kwargs)
        assert (isinstance(json_type, dict))
        json_type = get_type_class(
            json_type['type']).normalize_definition(json_type)
        if out == '*':
            dim_str = ''
            if json_type['type'] == '1darray':
                if 'length' in json_type:
                    dim_str = ', dimension(%s)' % str(json_type['length'])
            elif json_type['type'] == 'ndarray':
                if 'shape' in json_type:
                    dim_str = ', dimension(%s)' % ','.join(
                        [str(x) for x in json_type['shape']])
            json_subtype = copy.deepcopy(json_type)
            json_subtype['type'] = json_subtype.pop('subtype')
            out = cls.get_native_type(datatype=json_subtype) + dim_str
            if not dim_str:
                json_subtype['type'] = out.split('(')[0]
                if json_subtype['type'] == 'character':  # pragma: debug
                    json_subtype['precision'] = ''
                    raise RuntimeError("Character array requires precision.")
                else:
                    json_subtype['precision'] = int(json_subtype['precision'] /
                                                    8)
                json_subtype.setdefault('ndim', 'n')
                out = 'type(%s)' % cls.get_native_type(type=(
                    '%s_pointer' % json_type['type'])).format(**json_subtype)
        elif 'X' in out:
            if cls.allows_realloc(kwargs):
                out = 'type(yggchar_r)'
            else:
                if out.startswith('ygguint'):
                    out = 'type(%s)' % out
                if out.startswith('logical'):
                    precision = json_type.get('precision', 8)
                elif out.startswith('complex'):
                    precision = json_type['precision'] / 2
                elif json_type.get('subtype', json_type['type']) == 'unicode':
                    precision = json_type['precision'] / 4
                else:
                    precision = json_type['precision']
                out = out.replace('X', str(int(precision / 8)))
        return out
예제 #22
0
def test_get_type_class():
    r"""Test get_type_class."""
    for v in _valid_objects.keys():
        datatypes.get_type_class(v)
    assert_raises(ValueError, datatypes.get_type_class, 'invalid')