Пример #1
0
    def validate(cls, obj, raise_errors=False):
        r"""Validate an object to check if it could be of this type.

        Args:
            obj (object): Object to validate.
            raise_errors (bool, optional): If True, errors will be raised when
                the object fails to be validated. Defaults to False.

        Returns:
            bool: True if the object could be of this type, False otherwise.

        """
        if isinstance(obj, np.ndarray) and (obj.ndim == 0):
            obj = obj.reshape((1, ))[0]
        if super(ScalarMetaschemaType,
                 cls).validate(units.get_data(obj), raise_errors=raise_errors):
            dtype = ScalarMetaschemaProperties.data2dtype(obj)
            if cls.is_fixed and ('subtype' in cls.fixed_properties):
                type_list = [
                    ScalarMetaschemaProperties._valid_types[
                        cls.fixed_properties['subtype']]
                ]
            else:
                type_list = ScalarMetaschemaProperties._valid_numpy_types
            if dtype.name.startswith(tuple(type_list)):
                return True
            else:
                if raise_errors:
                    raise ValueError(
                        ("dtype %s dosn't corresponding with any " +
                         "of the accepted types: %s") %
                        (str(dtype), str(type_list)))
        return False
Пример #2
0
    def from_array(cls, arr, unit_str=None, dtype=None, typedef=None):
        r"""Get object representation of the data.

        Args:
            arr (np.ndarray): Numpy array.
            unit_str (str, optional): Units that should be added to returned
                object.
            dtype (np.dtype, optional): Numpy data type that should be maintained
                as a base class when adding units. Defaults to None and is
                determined from the object or typedef (if provided).
            typedef (dict, optional): Type definition that should be used to
                decode the object. Defaults to None and is determined from the
                object or dtype (if provided).

        Returns:
            object: Object representation of the data in the input array.

        """
        # if (typedef is None) and (dtype is not None):
        #     typedef = ScalarMetaschemaProperties.dtype2definition(dtype)
        # elif (dtype is None) and (typedef is not None):
        #     dtype = ScalarMetaschemaProperties.definition2dtype(typedef)
        if (cls.name not in ['1darray', 'ndarray']) and (arr.ndim > 0):
            out = arr[0]
        else:
            out = arr
        if typedef is not None:
            # Cast numpy type to native python type if they are equivalent
            out = cls.as_python_type(out, typedef)
        if unit_str is not None:
            if dtype is None:
                dtype = ScalarMetaschemaProperties.data2dtype(out)
            out = units.add_units(out, unit_str, dtype=dtype)
        return out
Пример #3
0
def test_definition2dtype_errors():
    r"""Check that error raised if type not specified."""
    assert_raises(KeyError, ScalarMetaschemaProperties.definition2dtype, {})
    assert_raises(RuntimeError, ScalarMetaschemaProperties.definition2dtype,
                  {'type': 'float'})
    assert_equal(ScalarMetaschemaProperties.definition2dtype({'type': 'bytes'}),
                 np.dtype((ScalarMetaschemaProperties._valid_types['bytes'])))
Пример #4
0
    def to_array(cls, obj):
        r"""Get np.array representation of the data.

        Args:
            obj (object): Object to get array for.

        Returns:
            np.ndarray: Array representation of object.

        """
        obj_nounits = units.get_data(obj)
        if isinstance(obj_nounits, np.ndarray):
            arr = obj_nounits
        else:
            dtype = ScalarMetaschemaProperties.data2dtype(obj_nounits)
            arr = np.array([obj_nounits], dtype=dtype)
        return arr
Пример #5
0
    def _generate_data(cls, typedef):
        r"""Generate mock data for the specified type.

        Args:
            typedef (dict): Type definition.

        Returns:
            object: Python object of the specified type.

        """
        dtype = ScalarMetaschemaProperties.definition2dtype(typedef)
        if typedef['type'] == '1darray':
            out = np.zeros(typedef.get('length', 2), dtype)
        elif typedef['type'] == 'ndarray':
            out = np.zeros(typedef['shape'], dtype)
        else:
            out = np.zeros(1, dtype)[0]
        out = units.add_units(out, typedef.get('units', ''))
        return out
Пример #6
0
    def transform_type(cls, obj, typedef=None):
        r"""Transform an object based on type info.

        Args:
            obj (object): Object to transform.
            typedef (dict, optional): Type definition that should be used to
                transform the object. Defaults to None.

        Returns:
            object: Transformed object.

        """
        if typedef is None:
            return obj
        typedef0 = cls.encode_type(obj)
        typedef1 = copy.deepcopy(typedef0)
        typedef1.update(**typedef)
        dtype = ScalarMetaschemaProperties.definition2dtype(typedef1)
        arr = cls.to_array(obj).astype(dtype, casting='same_kind')
        out = cls.from_array(arr, unit_str=typedef0.get('units', None), dtype=dtype)
        out = cls.as_python_type(out, typedef)
        return units.convert_to(out, typedef1.get('units', None))
Пример #7
0
    def decode_data(cls, obj, typedef):
        r"""Decode an object.

        Args:
            obj (string): Encoded object to decode.
            typedef (dict): Type definition that should be used to decode the
                object.

        Returns:
            object: Decoded object.

        """
        bytes = backwards.base64_decode(obj.encode('ascii'))
        dtype = ScalarMetaschemaProperties.definition2dtype(typedef)
        arr = np.frombuffer(bytes, dtype=dtype)
        # arr = np.fromstring(bytes, dtype=dtype)
        if 'shape' in typedef:
            arr = arr.reshape(typedef['shape'])
        out = cls.from_array(arr, unit_str=typedef.get('units', None), dtype=dtype)
        # Cast numpy type to native python type if they are equivalent
        out = cls.as_python_type(out, typedef)
        return out
Пример #8
0
    def decode_data(cls, obj, typedef):
        r"""Decode an object.

        Args:
            obj (string): Encoded object to decode.
            typedef (dict): Type definition that should be used to decode the
                object.

        Returns:
            object: Decoded object.

        """
        bytes = base64.decodebytes(obj.encode('ascii'))
        dtype = ScalarMetaschemaProperties.definition2dtype(typedef)
        arr = np.frombuffer(bytes, dtype=dtype)
        # arr = np.fromstring(bytes, dtype=dtype)
        if 'shape' in typedef:
            arr = arr.reshape(typedef['shape'])
        out = cls.from_array(arr,
                             unit_str=typedef.get('units', None),
                             dtype=dtype,
                             typedef=typedef)
        return out
Пример #9
0
    def from_array(cls, arr, unit_str=None, dtype=None):
        r"""Get object representation of the data.

        Args:
            arr (np.ndarray): Numpy array.
            unit_str (str, optional): Units that should be added to returned
                object.
            dtype (np.dtype, optional): Numpy data type that should be maintained
                as a base class when adding units. Defaults to None and is
                determined from the object.

        Returns:
            object: Object representation of the data in the input array.

        """
        if (cls.name not in ['1darray', 'ndarray']) and (arr.ndim > 0):
            out = arr[0]
        else:
            out = arr
        if unit_str is not None:
            if dtype is None:
                dtype = ScalarMetaschemaProperties.data2dtype(out)
            out = units.add_units(out, unit_str, dtype=dtype)
        return out