def _write_data_raw_full(self, dat, fileobj=None): """Write native data Returns ------- dat : np.ndarray Should already have the on-disk data type, including byte-order """ if fileobj is None: # Create our own context with self.fileobj('image') as fileobj: return self._write_data_raw_full(dat, fileobj) # sanity checks for developers # (asserts because proper conversion/dispatch should happen before) assert isinstance(dat, np.ndarray), "Data should already be numpy" assert dat.dtype == self.dtype, "Data should already have correct type" if not fileobj.fobj.writable(): raise RuntimeError('File object not writable') array_to_file(dat, fileobj, self.dtype, offset=self._image.dataobj.offset, order=self._image.dataobj.order)
def _write_data(self, mghfile, data, header): """ Utility routine to write image Parameters ---------- mghfile : file-like file-like object implementing ``seek`` or ``tell``, and ``write`` data : array-like array to write header : analyze-type header object header """ shape = header.get_data_shape() if data.shape != shape: raise HeaderDataError("Data should be shape (%s)" % ", ".join(str(s) for s in shape)) offset = header.get_data_offset() out_dtype = header.get_data_dtype() array_to_file(data, mghfile, out_dtype, offset)
def _write_data(self, mghfile, data, header): ''' Utility routine to write image Parameters ---------- mghfile : file-like file-like object implementing ``seek`` or ``tell``, and ``write`` data : array-like array to write header : analyze-type header object header ''' shape = header.get_data_shape() if data.shape != shape: raise HeaderDataError('Data should be shape (%s)' % ', '.join(str(s) for s in shape)) offset = header.get_data_offset() out_dtype = header.get_data_dtype() array_to_file(data, mghfile, out_dtype, offset)
def save_new(cls, dat, file_like, like=None, casting='unsafe', _savef=False, **metadata): if isinstance(dat, MappedArray): if like is None: like = dat dat = dat.data(numpy=True) if torch.is_tensor(dat): dat = dat.detach().cpu() dat = np.asanyarray(dat) if like is not None: like = map_array(like) # guess data type: def guess_dtype(): dtype = None if dtype is None: dtype = metadata.get('dtype', None) if dtype is None and like is not None: dtype = like.dtype if dtype is None: dtype = dat.dtype dtype = dtypes.dtype(dtype).numpy return dtype dtype = guess_dtype() def guess_format(): # 1) from extension ok_klasses = [] if isinstance(file_like, str): base, ext = os.path.splitext(file_like) if ext.lower() == '.gz': base, ext = os.path.splitext(base) ok_klasses = [ klass for klass in all_image_classes if ext in klass.valid_exts ] if len(ok_klasses) == 1: return ok_klasses[0] # 2) from like if isinstance(like, BabelArray): return type(like._image) # 3) from extension (if conflict) if len(ok_klasses) != 0: return ok_klasses[0] # 4) fallback to nifti-1 return nib.Nifti1Image format = guess_format() # build header if isinstance(like, BabelArray): # defer metadata conversion to nibabel header = format.header_class.from_header( like._image.dataobj._header) else: header = format.header_class() if like is not None: # copy generic metadata like_metadata = like.metadata() like_metadata.update(metadata) metadata = like_metadata # set shape now so that we can set zooms/etc header.set_data_shape(dat.shape) header = metadata_to_header(header, metadata) # check endianness disk_byteorder = header.endianness data_byteorder = dtype.byteorder if disk_byteorder == '=': disk_byteorder = '<' if sys.byteorder == 'little' else '>' if data_byteorder == '=': data_byteorder = '<' if sys.byteorder == 'little' else '>' if disk_byteorder != data_byteorder: dtype = dtype.newbyteorder() # set scale if hasattr(header, 'set_slope_inter'): slope, inter = header.get_slope_inter() if slope is None: slope = 1 if inter is None: inter = 0 header.set_slope_inter(slope, inter) # unscale if _savef: assert dtypes.dtype(dat.dtype).is_floating_point slope, inter = header.get_slope_inter() if inter not in (0, None) or slope not in (1, None): dat = dat.copy() if inter not in (0, None): dat -= inter if slope not in (1, None): dat /= slope # cast + setdtype dat = volutils.cast(dat, dtype, casting) header.set_data_dtype(dat.dtype) # create image object image = format(dat, affine=None, header=header) # write everything file_map = format.filespec_to_file_map(file_like) fmap_header = file_map.get('header', file_map.get('image')) fmap_image = file_map.get('image') fmap_footer = file_map.get('footer', file_map.get('image')) fhdr = fmap_header.get_prepare_fileobj('wb') if hasattr(header, 'writehdr_to'): header.writehdr_to(fhdr) elif hasattr(header, 'write_to'): header.write_to(fhdr) if fmap_image == fmap_header: fimg = fhdr else: fimg = fmap_image.get_prepare_fileobj('wb') array_to_file(dat, fimg, dtype, offset=header.get_data_offset(), order=image.ImageArrayProxy.order) if fmap_image == fmap_footer: fftr = fimg else: fftr = fmap_footer.get_prepare_fileobj('wb') if hasattr(header, 'writeftr_to'): header.writeftr_to(fftr)