def _cast(val, schema): val = _unmake_node(val) if val is not None: if 'datatype' in schema: # Handle lazy array if isinstance(val, ndarray.NDArrayType): val = val._make_array() dtype = ndarray.asdf_datatype_to_numpy_dtype(schema['datatype']) val = util.gentle_asarray(val, dtype) if dtype.fields is not None: val = _as_fitsrec(val) if 'ndim' in schema and len(val.shape) != schema['ndim']: raise ValueError( "Array has wrong number of dimensions. Expected {0}, got {1}".format( schema['ndim'], len(val.shape))) if 'max_ndim' in schema and len(val.shape) > schema['max_ndim']: raise ValueError( "Array has wrong number of dimensions. Expected <= {0}, got {1}".format( schema['max_ndim'], len(val.shape))) if isinstance(val, np.generic) and np.isscalar(val): val = np.asscalar(val) return val
def _cast(val, schema): val = _unmake_node(val) if val is not None: if 'datatype' in schema: # Handle lazy array if isinstance(val, ndarray.NDArrayType): val = val._make_array() dtype = ndarray.asdf_datatype_to_numpy_dtype(schema['datatype']) val = util.gentle_asarray(val, dtype) if dtype.fields is not None: val = _as_fitsrec(val) if 'ndim' in schema and len(val.shape) != schema['ndim']: raise ValueError( "Array has wrong number of dimensions. Expected {0}, got {1}". format(schema['ndim'], len(val.shape))) if 'max_ndim' in schema and len(val.shape) > schema['max_ndim']: raise ValueError( "Array has wrong number of dimensions. Expected <= {0}, got {1}" .format(schema['max_ndim'], len(val.shape))) if isinstance(val, np.generic) and np.isscalar(val): val = np.asscalar(val) return val
def _get_hdu_type(hdu_name, schema=None, value=None): hdu_type = None if hdu_name in (0, 'PRIMARY'): hdu_type = fits.PrimaryHDU elif schema is not None: dtype = ndarray.asdf_datatype_to_numpy_dtype(schema['datatype']) if dtype.fields is not None: hdu_type = fits.BinTableHDU elif value is not None: if hasattr(value, 'dtype') and value.dtype.names is not None: hdu_type = fits.BinTableHDU return hdu_type
def _get_hdu_type(hdu_name, schema=None, value=None): hdu_type = None if hdu_name in (0, 'PRIMARY'): hdu_type = fits.PrimaryHDU elif schema is not None: dtype = ndarray.asdf_datatype_to_numpy_dtype(schema['datatype']) if dtype.fields is not None: hdu_type = fits.BinTableHDU elif value is not None: if hasattr(value, 'dtype') and value.dtype.names is not None: hdu_type = fits.BinTableHDU return hdu_type
def fill_hdu(subschema, path, combiner, shape, recurse): if 'fits_hdu' not in subschema: return dtype = subschema.get('datatype') if dtype is None: return ndim = subschema.get('ndim') if ndim and ndim != len(shape): return dtype = ndarray.asdf_datatype_to_numpy_dtype(dtype) keyword = '.'.join(path) default = subschema.get('default', 0.0) im[keyword] = np.full(shape, default, dtype=dtype)
def fill_hdu(subschema, path, combiner, shape, recurse): if 'fits_hdu' not in subschema: return dtype = subschema.get('datatype') if dtype is None: return ndim = subschema.get('ndim') if ndim and ndim != len(shape): return dtype = ndarray.asdf_datatype_to_numpy_dtype(dtype) keyword = '.'.join(path) default = subschema.get('default', 0.0) im[keyword] = np.full(shape, default, dtype=dtype)
def _cast(val, schema): val = _unmake_node(val) if val is not None: if 'datatype' in schema: val = util.gentle_asarray( val, ndarray.asdf_datatype_to_numpy_dtype(schema['datatype'])) if 'ndim' in schema and len(val.shape) != schema['ndim']: raise ValueError( "Array has wrong number of dimensions. Expected {0}, got {1}". format(schema['ndim'], len(val.shape))) if 'max_ndim' in schema and len(val.shape) > schema['max_ndim']: raise ValueError( "Array has wrong number of dimensions. Expected <= {0}, got {1}" .format(schema['max_ndim'], len(val.shape))) tag = schema.get('tag') if tag is not None: val = tagged.tag_object(tag, val) return val
def _cast(val, schema): val = _unmake_node(val) if val is not None: if 'datatype' in schema: val = util.gentle_asarray( val, ndarray.asdf_datatype_to_numpy_dtype(schema['datatype'])) if 'ndim' in schema and len(val.shape) != schema['ndim']: raise ValueError( "Array has wrong number of dimensions. Expected {0}, got {1}".format( schema['ndim'], len(val.shape))) if 'max_ndim' in schema and len(val.shape) > schema['max_ndim']: raise ValueError( "Array has wrong number of dimensions. Expected <= {0}, got {1}".format( schema['max_ndim'], len(val.shape))) tag = schema.get('tag') if tag is not None: val = tagged.tag_object(tag, val) return val
def _make_default_array(attr, schema, ctx): dtype = schema.get('datatype') if dtype is not None: dtype = ndarray.asdf_datatype_to_numpy_dtype(dtype) ndim = schema.get('ndim', schema.get('max_ndim')) default = schema.get('default', None) primary_array_name = ctx.get_primary_array_name() if attr == primary_array_name: if ctx.shape is not None: shape = ctx.shape elif ndim is not None: shape = tuple([0] * ndim) else: shape = (0,) else: if dtype.names is not None: if ndim is None: shape = (0,) else: shape = tuple([0] * ndim) default = None else: has_primary_array_shape = False if primary_array_name is not None: primary_array = getattr(ctx, primary_array_name, None) has_primary_array_shape = primary_array is not None if has_primary_array_shape: if ndim is None: shape = primary_array.shape else: shape = primary_array.shape[-ndim:] elif ndim is None: shape = (0,) else: shape = tuple([0] * ndim) array = np.empty(shape, dtype=dtype) if default is not None: array[...] = default return array
def _make_default_array(attr, schema, ctx): dtype = schema.get('datatype') if dtype is not None: dtype = ndarray.asdf_datatype_to_numpy_dtype(dtype) ndim = schema.get('ndim', schema.get('max_ndim')) default = schema.get('default', None) primary_array_name = ctx.get_primary_array_name() if attr == primary_array_name: if ctx.shape is not None: shape = ctx.shape elif ndim is not None: shape = tuple([0] * ndim) else: shape = (0,) else: if dtype.names is not None: if ndim is None: shape = (0,) else: shape = tuple([0] * ndim) default = None else: has_primary_array_shape = False if primary_array_name is not None: primary_array = getattr(ctx, primary_array_name, None) has_primary_array_shape = primary_array is not None if has_primary_array_shape: if ndim is None: shape = primary_array.shape else: shape = primary_array.shape[-ndim:] elif ndim is None: shape = (0,) else: shape = tuple([0] * ndim) array = np.empty(shape, dtype=dtype) if default is not None: array[...] = default return array
def _cast(val, schema): val = _unmake_node(val) if val is None: return None if 'datatype' in schema: # Handle lazy array if isinstance(val, ndarray.NDArrayType): val = val._make_array() if (_is_struct_array_schema(schema) and len(val) and (_is_struct_array_precursor(val) or _is_struct_array(val))): # we are dealing with a structured array. Because we may # modify schema (to add shape), we make a deep copy of the # schema here: schema = copy.deepcopy(schema) for t, v in zip(schema['datatype'], val[0]): if not isinstance(t, Mapping): continue aval = np.asanyarray(v) shape = aval.shape val_ndim = len(shape) # make sure that if 'ndim' is specified for a field, # it matches the dimensionality of val's field: if 'ndim' in t and val_ndim != t['ndim']: raise ValueError( "Array has wrong number of dimensions. " "Expected {}, got {}".format(t['ndim'], val_ndim) ) if 'max_ndim' in t and val_ndim > t['max_ndim']: raise ValueError( "Array has wrong number of dimensions. " "Expected <= {}, got {}".format(t['max_ndim'], val_ndim) ) # if shape of a field's value is not specified in the schema, # add it to the schema based on the shape of the actual data: if 'shape' not in t: t['shape'] = shape dtype = ndarray.asdf_datatype_to_numpy_dtype(schema['datatype']) val = util.gentle_asarray(val, dtype) if dtype.fields is not None: val = _as_fitsrec(val) if 'ndim' in schema and len(val.shape) != schema['ndim']: raise ValueError( "Array has wrong number of dimensions. Expected {}, got {}" .format(schema['ndim'], len(val.shape))) if 'max_ndim' in schema and len(val.shape) > schema['max_ndim']: raise ValueError( "Array has wrong number of dimensions. Expected <= {}, got {}" .format(schema['max_ndim'], len(val.shape))) if isinstance(val, np.generic) and np.isscalar(val): val = val.item() return val