def validate(cls, obj, raise_errors=False): r"""Validate an object to check if it could be of this type. Args: obj (object): Object to validate. raise_errors (bool, optional): If True, errors will be raised when the object fails to be validated. Defaults to False. Returns: bool: True if the object could be of this type, False otherwise. """ if isinstance(obj, np.ndarray) and (obj.ndim == 0): obj = obj.reshape((1, ))[0] if super(ScalarMetaschemaType, cls).validate(units.get_data(obj), raise_errors=raise_errors): dtype = ScalarMetaschemaProperties.data2dtype(obj) if cls.is_fixed and ('subtype' in cls.fixed_properties): type_list = [ ScalarMetaschemaProperties._valid_types[ cls.fixed_properties['subtype']] ] else: type_list = ScalarMetaschemaProperties._valid_numpy_types if dtype.name.startswith(tuple(type_list)): return True else: if raise_errors: raise ValueError( ("dtype %s dosn't corresponding with any " + "of the accepted types: %s") % (str(dtype), str(type_list))) return False
def from_array(cls, arr, unit_str=None, dtype=None, typedef=None): r"""Get object representation of the data. Args: arr (np.ndarray): Numpy array. unit_str (str, optional): Units that should be added to returned object. dtype (np.dtype, optional): Numpy data type that should be maintained as a base class when adding units. Defaults to None and is determined from the object or typedef (if provided). typedef (dict, optional): Type definition that should be used to decode the object. Defaults to None and is determined from the object or dtype (if provided). Returns: object: Object representation of the data in the input array. """ # if (typedef is None) and (dtype is not None): # typedef = ScalarMetaschemaProperties.dtype2definition(dtype) # elif (dtype is None) and (typedef is not None): # dtype = ScalarMetaschemaProperties.definition2dtype(typedef) if (cls.name not in ['1darray', 'ndarray']) and (arr.ndim > 0): out = arr[0] else: out = arr if typedef is not None: # Cast numpy type to native python type if they are equivalent out = cls.as_python_type(out, typedef) if unit_str is not None: if dtype is None: dtype = ScalarMetaschemaProperties.data2dtype(out) out = units.add_units(out, unit_str, dtype=dtype) return out
def to_array(cls, obj): r"""Get np.array representation of the data. Args: obj (object): Object to get array for. Returns: np.ndarray: Array representation of object. """ obj_nounits = units.get_data(obj) if isinstance(obj_nounits, np.ndarray): arr = obj_nounits else: dtype = ScalarMetaschemaProperties.data2dtype(obj_nounits) arr = np.array([obj_nounits], dtype=dtype) return arr
def from_array(cls, arr, unit_str=None, dtype=None): r"""Get object representation of the data. Args: arr (np.ndarray): Numpy array. unit_str (str, optional): Units that should be added to returned object. dtype (np.dtype, optional): Numpy data type that should be maintained as a base class when adding units. Defaults to None and is determined from the object. Returns: object: Object representation of the data in the input array. """ if (cls.name not in ['1darray', 'ndarray']) and (arr.ndim > 0): out = arr[0] else: out = arr if unit_str is not None: if dtype is None: dtype = ScalarMetaschemaProperties.data2dtype(out) out = units.add_units(out, unit_str, dtype=dtype) return out