def save_fxn(res, bigdict): outmodel = bigdict['outmodel'] verbose = bigdict['verbose'] contmask = bigdict['contmask'] outfn = bigdict['outfn'] outmodelfn = bigdict['outmodelfn'] modsaveonly = bigdict['modsaveonly'] noModsave = bigdict['noModsave'] cat, modelim, skyim, psf, hdr, msk, prbexport, name = res hdr = fits.Header.fromstring(hdr) # Data Saving if verbose: print('Writing %s %s, found %d sources.' % (outfn, name, len(cat))) sys.stdout.flush() # primary extension includes only header. if not modsaveonly: fits.append(outfn, numpy.zeros(0), hdr) hdupsf = fits.BinTableHDU(psf.serialize()) hdupsf.name = hdr['EXTNAME'][:-4] + '_PSF' hducat = fits.BinTableHDU(cat) hducat.name = hdr['EXTNAME'][:-4] + '_CAT' if not modsaveonly: hdulist = fits.open(outfn, mode='append') hdulist.append(hdupsf) # append the psf field for the ccd hdulist.append(hducat) # append the cat field for the ccd hdulist.close(closed=True) if outmodel: hdr['EXTNAME'] = hdr['EXTNAME'][:-4] + '_MOD' # RICE should be significantly better here and supported in # mrdfits?, but compression_type=RICE_1 seems to cause # quantize_level to be ignored. compkw = { 'compression_type': 'GZIP_1', 'quantize_method': 1, 'quantize_level': -4, 'tile_size': modelim.shape } modhdulist = fits.open(outmodelfn, mode='append') if not noModsave: modhdulist.append(fits.CompImageHDU(modelim, hdr, **compkw)) hdr['EXTNAME'] = hdr['EXTNAME'][:-4] + '_SKY' modhdulist.append(fits.CompImageHDU(skyim, hdr, **compkw)) if msk is not None: hdr['EXTNAME'] = hdr['EXTNAME'][:-4] + '_MSK' modhdulist.append( fits.CompImageHDU(msk.astype('i4'), hdr, **compkw)) if contmask == True: prnebnames = ['prN', 'prL', 'prR', 'prE'] compkw = { 'compression_type': 'GZIP_1', 'quantize_method': 1, 'quantize_level': 2, 'tile_size': (prbexport.shape[0], prbexport.shape[1]) } for i in range(prbexport.shape[2]): hdr['EXTNAME'] = hdr['EXTNAME'][:-4] + '_' + prnebnames[i] modhdulist.append( fits.CompImageHDU(prbexport[:, :, i], hdr, **compkw)) modhdulist.close(closed=True) return
def write_compressed_image_fits(fname, data, header, mask=None, fix_invalid=True, filled_value=0.0, **kwargs): """ Take a data header pair and write a compressed FITS file. Caveat: only 1D, 2D, or 3D images are currently supported by Astropy fits compression. To be compressed, the image data array (n-dimensional) must have at least n-3 single-dimensional entries. Parameters ---------- fname : `str` File name, with extension. data : `numpy.ndarray` n-dimensional data array. header : `dict` A header dictionary. compression_type: `str`, optional Compression algorithm: one of 'RICE_1', 'RICE_ONE', 'PLIO_1', 'GZIP_1', 'GZIP_2', 'HCOMPRESS_1' hcomp_scale: `float`, optional HCOMPRESS scale parameter """ dshape = data.shape dim = data.ndim if dim - np.count_nonzero(np.array(dshape) == 1) > 3: return 0 else: if fix_invalid: data[np.isnan(data)] = filled_value if kwargs is {}: kwargs.update({ 'compression_type': 'RICE_1', 'quantize_level': 4.0 }) if isinstance(fname, str): fname = os.path.expanduser(fname) header, data = headersqueeze(header, data) # todo: need to figure out how to keep information of the trimmed axis in the fits hdunew = fits.CompImageHDU(data=data, header=header, **kwargs) if mask is None: hdulnew = fits.HDUList([fits.PrimaryHDU(), hdunew]) else: hdumask = fits.CompImageHDU(data=mask.astype(np.uint8), **kwargs) hdulnew = fits.HDUList([fits.PrimaryHDU(), hdunew, hdumask]) hdulnew.writeto(fname, output_verify='fix') return 1
def save_image_hdu(hdu, filename, overwrite=False, logger=logger): """Save simple HDU to a fits file.""" base, ext = os.path.splitext(filename) if ext in _compresses: ext2 = ext base, ext = os.path.splitext(base) elif ext not in _supported_formats: # TODO: think its better to save fits or raise error. ext = ".fits" else: ext2 = None if ext2 is not None: ext += ext2 filename = base + ext logger.debug('Saving fits file to: %s', filename) if ext == '.fz': p = fits.PrimaryHDU() c = fits.CompImageHDU(hdu.data, header=hdu.header, compression_type='RICE_1') fits.HDUList([p, c]).writeto(filename, overwrite=overwrite) else: hdu.writeto(filename, overwrite=overwrite)
def writeFits_from_dict(amp_dict, outfile, template_file, bitpix=32): ''' Same as eotest imutils writeFits but takes a dictionary of amplifier as input rather than a list of afwImage images ''' output = fits.HDUList() output.append(fits.PrimaryHDU()) all_amps = imutils.allAmps() for amp in all_amps: if bitpix < 0: output.append(fits.ImageHDU(data=amp_dict[amp])) else: output.append( fits.CompImageHDU(data=amp_dict[amp], compression_type='RICE_1')) with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=UserWarning, append=True) warnings.filterwarnings('ignore', category=AstropyWarning, append=True) warnings.filterwarnings('ignore', category=AstropyUserWarning, append=True) with fits.open(template_file) as template: output[0].header.update(template[0].header) output[0].header['FILENAME'] = outfile for amp in all_amps: output[amp].header.update(template[amp].header) imutils.set_bitpix(output[amp], bitpix) print(np.median(output[amp].data.ravel())) for i in (-3, -2, -1): output.append(template[i]) imutils.fitsWriteto(output, outfile, overwrite=True, checksum=True)
def test_uint(self, utype, compressed): bits = 8 * int(utype[1]) if platform.architecture()[0] == '64bit' or bits != 64: if compressed: hdu = fits.CompImageHDU( np.array([-3, -2, -1, 0, 1, 2, 3], dtype=np.int64)) hdu_number = 1 else: hdu = fits.PrimaryHDU( np.array([-3, -2, -1, 0, 1, 2, 3], dtype=np.int64)) hdu_number = 0 hdu.scale(f'int{bits:d}', '', bzero=2**(bits - 1)) with ignore_warnings(): hdu.writeto(self.temp('tempfile.fits'), overwrite=True) with fits.open(self.temp('tempfile.fits'), uint=True) as hdul: assert hdul[hdu_number].data.dtype == self.utype_map[utype] assert (hdul[hdu_number].data == np.array( [(2**bits) - 3, (2**bits) - 2, (2**bits) - 1, 0, 1, 2, 3], dtype=self.utype_map[utype])).all() hdul.writeto(self.temp('tempfile1.fits')) with fits.open(self.temp('tempfile1.fits'), uint16=True) as hdul1: d1 = hdul[hdu_number].data d2 = hdul1[hdu_number].data assert (d1 == d2).all() if not compressed: # TODO: Enable these lines if CompImageHDUs ever grow # .section support sec = hdul[hdu_number].section[:1] assert sec.dtype.name == f'uint{bits}' assert (sec == d1[:1]).all()
def write_bias_subtracted_MEF(self, outfile, gains=None, overwrite=True): """ Write a bias-subtracted MEF file with the same format as the original raw FITS file. Parameters ---------- outfile: str Output filename. gains: dict [None] Gains to apply to pixel data. If None, then pixel values are written as ADUs. overwrite: bool [True] Flag to overwrite an existing output file. """ hdulist = fits.HDUList() with fits.open(self.imfile) as template: hdulist.append(template[0]) hdulist[0].header['ORIGFILE'] = hdulist[0].header['FILENAME'] hdulist[0].header['FILENAME'] = outfile for amp in self: imarr = self.bias_subtracted_image(amp).getImage().getArray() if gains is not None: imarr *= gains[amp] hdulist.append( fits.CompImageHDU(data=imarr, header=template[amp].header)) with warnings.catch_warnings(): for warning in (UserWarning, AstropyWarning, AstropyUserWarning): warnings.filterwarnings('ignore', category=warning, append=True) fitsWriteto(hdulist, outfile, overwrite=True)
def walk_compress(eodir): #for f in os.listdir(eodir): for root, dirs, files in os.walk(eodir): for f in files: if f[-4:] != 'fits' or os.stat(os.path.join(root, f)).st_size < 1e6: continue print("Compressing %s" % f) h = pyfits.open(os.path.join(root, f)) hdulist = pyfits.HDUList([pyfits.PrimaryHDU(header=h[0].header)]) #hdulist[0].header['WIDTH'] = 576 #hdulist[0].header['HEIGHT'] = 2048 #hdulist[0].header["FILENAME"] = f # extension header for i in range(16): #h[i + 1].header['DATASEC'] = "[4:512, 1:2000]" #h[i + 1].header['CHANNEL'] = i exthdu = pyfits.CompImageHDU(data=h[i + 1].data, header=h[i + 1].header.copy(), compression_type='RICE_1') hdulist.append(exthdu) # auxiliary data for i in range(17, 30, 1): try: hdulist.append(h[i]) except: pass hdulist.writeto(os.path.join(root, f), clobber=True) h.close() del h
def test_compressed_image_data_float32(self): n = np.arange(100, dtype='float32') hdu = fits.ImageHDU(n) comp_hdu = fits.CompImageHDU(hdu.data, hdu.header) comp_hdu.writeto(self.temp('tmp.fits'), checksum=True) hdu.writeto(self.temp('uncomp.fits'), checksum=True) with fits.open(self.temp('tmp.fits'), checksum=True) as hdul: assert np.all(hdul[1].data == comp_hdu.data) assert np.all(hdul[1].data == hdu.data) assert 'CHECKSUM' in hdul[0].header assert hdul[0].header['CHECKSUM'] == 'D8iBD6ZAD6fAD6ZA' assert 'DATASUM' in hdul[0].header assert hdul[0].header['DATASUM'] == '0' assert 'CHECKSUM' in hdul[1].header assert 'DATASUM' in hdul[1].header if not sys.platform.startswith('win32'): # The checksum ends up being different on Windows, possibly due # to slight floating point differences assert hdul[1]._header['CHECKSUM'] == 'eATIf3SHe9SHe9SH' assert hdul[1]._header['DATASUM'] == '1277667818' with fits.open(self.temp('uncomp.fits'), checksum=True) as hdul2: header_comp = hdul[1]._header header_uncomp = hdul2[1].header assert 'ZHECKSUM' in header_comp assert 'CHECKSUM' in header_uncomp assert header_uncomp['CHECKSUM'] == 'Cgr5FZo2Cdo2CZo2' assert header_comp['ZHECKSUM'] == header_uncomp['CHECKSUM'] assert 'ZDATASUM' in header_comp assert 'DATASUM' in header_uncomp assert header_uncomp['DATASUM'] == '2393636889' assert header_comp['ZDATASUM'] == header_uncomp['DATASUM']
def write_fiberflat(outfile, fiberflat, header=None): """Write fiberflat object to outfile Args: outfile: filepath string or (night, expid, camera) tuple fiberflat: FiberFlat object header: (optional) dict or fits.Header object to use as HDU 0 header Returns: filepath of file that was written """ outfile = makepath(outfile, 'fiberflat') if header is None: hdr = fitsheader(fiberflat.header) else: hdr = fitsheader(header) if fiberflat.chi2pdf is not None: hdr['chi2pdf'] = float(fiberflat.chi2pdf) add_dependencies(hdr) ff = fiberflat #- shorthand hdus = fits.HDUList() hdus.append(fits.PrimaryHDU(ff.fiberflat.astype('f4'), header=hdr)) hdus.append(fits.ImageHDU(ff.ivar.astype('f4'), name='IVAR')) hdus.append(fits.CompImageHDU(ff.mask, name='MASK')) hdus.append(fits.ImageHDU(ff.meanspec.astype('f4'), name='MEANSPEC')) hdus.append(fits.ImageHDU(ff.wave.astype('f4'), name='WAVELENGTH')) hdus.writeto(outfile + '.tmp', clobber=True, checksum=True) os.rename(outfile + '.tmp', outfile) return outfile
def fits_mean_file(files, outfile, overwrite=True, bitpix=32): output = fits.HDUList() output.append(fits.PrimaryHDU()) all_amps = allAmps() for amp in all_amps: images = [afwImage.ImageF(item, dm_hdu(amp)) for item in files] if lsst.afw.__version__.startswith('12.0'): images = afwImage.vectorImageF(images) mean_image = afwMath.statisticsStack(images, afwMath.MEAN) if bitpix < 0: output.append(fits.ImageHDU(data=mean_image.getArray())) else: output.append( fits.CompImageHDU(data=mean_image.getArray(), compression_type='RICE_1')) with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=UserWarning, append=True) warnings.filterwarnings('ignore', category=AstropyWarning, append=True) warnings.filterwarnings('ignore', category=AstropyUserWarning, append=True) with fits.open(files[0]) as template: output[0].header.update(template[0].header) output[0].header['FILENAME'] = os.path.basename(outfile) for amp in all_amps: output[amp].header.update(template[amp].header) set_bitpix(output[amp], bitpix) for i in (-3, -2, -1): output.append(template[i]) fitsWriteto(output, outfile, overwrite=overwrite)
def test_compressed_image_data_float32(self): n = np.arange(100, dtype='float32') hdu = fits.ImageHDU(n) comp_hdu = fits.CompImageHDU(hdu.data, hdu.header) comp_hdu.writeto(self.temp('tmp.fits'), checksum=True) hdu.writeto(self.temp('uncomp.fits'), checksum=True) with fits.open(self.temp('tmp.fits'), checksum=True) as hdul: assert np.all(hdul[1].data == comp_hdu.data) assert np.all(hdul[1].data == hdu.data) assert 'CHECKSUM' in hdul[0].header assert hdul[0].header['CHECKSUM'] == 'D8iBD6ZAD6fAD6ZA' assert 'DATASUM' in hdul[0].header assert hdul[0].header['DATASUM'] == '0' assert 'CHECKSUM' in hdul[1].header assert 'DATASUM' in hdul[1].header # The checksum ends up being different on Windows and s390/bigendian, # possibly due to slight floating point differences? See gh-10921. # TODO fix these so they work on all platforms; otherwise pointless. # assert hdul[1]._header['CHECKSUM'] == 'eATIf3SHe9SHe9SH' # assert hdul[1]._header['DATASUM'] == '1277667818' with fits.open(self.temp('uncomp.fits'), checksum=True) as hdul2: header_comp = hdul[1]._header header_uncomp = hdul2[1].header assert 'ZHECKSUM' in header_comp assert 'CHECKSUM' in header_uncomp assert header_uncomp['CHECKSUM'] == 'Cgr5FZo2Cdo2CZo2' assert header_comp['ZHECKSUM'] == header_uncomp['CHECKSUM'] assert 'ZDATASUM' in header_comp assert 'DATASUM' in header_uncomp assert header_uncomp['DATASUM'] == '2393636889' assert header_comp['ZDATASUM'] == header_uncomp['DATASUM']
def write_sky(outfile, skymodel, header=None): """Write sky model. Args: outfile : filename or (night, expid, camera) tuple skymodel : SkyModel object, with the following attributes wave : 1D wavelength in vacuum Angstroms flux : 2D[nspec, nwave] sky flux ivar : 2D inverse variance of sky flux mask : 2D mask for sky flux header : optional fits header data (fits.Header, dict, or list) """ outfile = makepath(outfile, 'sky') #- Convert header to fits.Header if needed if header is not None: hdr = fitsheader(header) else: hdr = fitsheader(skymodel.header) add_dependencies(hdr) hx = fits.HDUList() hdr['EXTNAME'] = ('SKY', 'no dimension') hx.append( fits.PrimaryHDU(skymodel.flux.astype('f4'), header=hdr) ) hx.append( fits.ImageHDU(skymodel.ivar.astype('f4'), name='IVAR') ) hx.append( fits.CompImageHDU(skymodel.mask, name='MASK') ) hx.append( fits.ImageHDU(skymodel.wave.astype('f4'), name='WAVELENGTH') ) hx.writeto(outfile+'.tmp', clobber=True, checksum=True) os.rename(outfile+'.tmp', outfile) return outfile
def saveImage(self, img, cleanImg=None, extraName=None, doStack=True, frameNum=None): filename = self._getNextName(frameNum=frameNum) hdus = pyfits.HDUList() hdus.append(pyfits.CompImageHDU(img, name='IMAGE', uint=True)) hdus.writeto(filename, overwrite=True) self.logger.debug('saveImage: %s', filename) if extraName is not None: linkname = filename.parent / extraName if platform.system() == 'Windows': hdus.writeto(linkname) # Creating sylink requires admin! else: linkname.symlink_to(filename.name) if doStack: if cleanImg is None: cleanImg = img self._updateStack(cleanImg) return filename
def write_flux_calibration(outfile, fluxcalib, header=None): """Writes flux calibration. Args: outfile : output file name fluxcalib : FluxCalib object Options: header : dict-like object of key/value pairs to include in header """ hx = fits.HDUList() hdr = fitsheader(header) add_dependencies(hdr) hdr['EXTNAME'] = 'FLUXCALIB' hdr['BUNIT'] = ('1e+17 cm2 electron s / erg', 'i.e. (electron/Angstrom) / (1e-17 erg/s/cm2/Angstrom)') hx.append(fits.PrimaryHDU(fluxcalib.calib.astype('f4'), header=hdr)) hx.append(fits.ImageHDU(fluxcalib.ivar.astype('f4'), name='IVAR')) hx.append(fits.CompImageHDU(fluxcalib.mask, name='MASK')) hx.append(fits.ImageHDU(fluxcalib.wave.astype('f4'), name='WAVELENGTH')) hx[-1].header['BUNIT'] = 'Angstrom' hx.writeto(outfile + '.tmp', clobber=True, checksum=True) os.rename(outfile + '.tmp', outfile) return outfile
def make_fits(image, filename): """ Creates fits file with given data and filename if the filename exists, it will confirm with user if it should be overwritten. """ hdu = fits.CompImageHDU(image["data"], header=image["header"]) if (os.path.isfile(filename)): # file already exists if (overwrite_ok): os.remove(filename) hdu.writeto(filename) # print("File Overwritten: {}".format(filename)) else: print("Corrected file: \"{}\" already exists. \nOverwrite? [y/n]". format(filename)) if (user_confirm()): os.remove(filename) hdu.writeto(filename) print("File Overwritten: {}".format(filename)) else: print("Corrected file not overwritten.") pass else: print("Created file: {}".format(filename)) hdu.writeto(filename)
def writeFits(images, outfile, template_file, bitpix=32): output = fits.HDUList() output.append(fits.PrimaryHDU()) all_amps = allAmps(template_file) for amp in all_amps: if bitpix < 0: output.append(fits.ImageHDU(data=images[amp].getArray())) else: output.append( fits.CompImageHDU(data=images[amp].getArray(), compression_type='RICE_1')) with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=UserWarning, append=True) warnings.filterwarnings('ignore', category=AstropyWarning, append=True) warnings.filterwarnings('ignore', category=AstropyUserWarning, append=True) with fits.open(template_file) as template: output[0].header.update(template[0].header) output[0].header['FILENAME'] = outfile for amp in all_amps: output[amp].header.update(template[amp].header) set_bitpix(output[amp], bitpix) for i in (-3, -2, -1): output.append(template[i]) fitsWriteto(output, outfile, overwrite=True, checksum=True)
def test_compressed_image_data_int16(self): n = np.arange(100, dtype='int16') hdu = fits.ImageHDU(n) comp_hdu = fits.CompImageHDU(hdu.data, hdu.header) comp_hdu.writeto(self.temp('tmp.fits'), checksum=True) hdu.writeto(self.temp('uncomp.fits'), checksum=True) with fits.open(self.temp('tmp.fits'), checksum=True) as hdul: assert np.all(hdul[1].data == comp_hdu.data) assert np.all(hdul[1].data == hdu.data) assert 'CHECKSUM' in hdul[0].header assert hdul[0].header['CHECKSUM'] == 'D8iBD6ZAD6fAD6ZA' assert 'DATASUM' in hdul[0].header assert hdul[0].header['DATASUM'] == '0' assert 'CHECKSUM' in hdul[1].header assert hdul[1]._header['CHECKSUM'] == 'J5cCJ5c9J5cAJ5c9' assert 'DATASUM' in hdul[1].header assert hdul[1]._header['DATASUM'] == '2453673070' assert 'CHECKSUM' in hdul[1].header with fits.open(self.temp('uncomp.fits'), checksum=True) as hdul2: header_comp = hdul[1]._header header_uncomp = hdul2[1].header assert 'ZHECKSUM' in header_comp assert 'CHECKSUM' in header_uncomp assert header_uncomp['CHECKSUM'] == 'ZE94eE91ZE91bE91' assert header_comp['ZHECKSUM'] == header_uncomp['CHECKSUM'] assert 'ZDATASUM' in header_comp assert 'DATASUM' in header_uncomp assert header_uncomp['DATASUM'] == '160565700' assert header_comp['ZDATASUM'] == header_uncomp['DATASUM']
def saveImage(name, img): hdus = pyfits.HDUList() hdus.append(pyfits.CompImageHDU(img, name='IMAGE', uint=True)) filename = name + '.fits' hdus.writeto(filename, overwrite=True) return filename
def test_simple_write_compressed_difftypeinst(tmpdir): # `hdu_type=fits.CompImageHDU` and `hdu_type=fits.CompImageHDU()` # should produce identical FITS files data, header = _fits.read(TEST_AIA_IMAGE)[0] outfile_type = str(tmpdir / "test_type.fits") outfile_inst = str(tmpdir / "test_inst.fits") _fits.write(outfile_type, data, header, hdu_type=fits.CompImageHDU) _fits.write(outfile_inst, data, header, hdu_type=fits.CompImageHDU()) assert fits.FITSDiff(outfile_type, outfile_inst, ignore_comments=['PCOUNT']).identical
def H_Compression(self, scale_value): """ Uses Astropy package's H-Transform Algorithm to lossfully (or losslessly) compress the background noise of the image. @type self: SuperBit_Compression @type scale_value: Int (Lossy Compression Factor) @rtype: Numpy Matrix """ self.flag_stars() os.chdir(COMP_SOURCE) fits.CompImageHDU(self.h_compress, compression_type='HCOMPRESS_1', \ hcomp_scale=scale_value, hcomp_smooth=1).writeto("HCOMPRESS.fits", overwrite=True) fits.CompImageHDU(self.h_compress, compression_type='HCOMPRESS_1', \ hcomp_scale=scale_value, hcomp_smooth=1).writeto("hcomp_" + self.image_name, overwrite=True) self.h_compress = fits.getdata("hcomp_" + self.image_name) fitsio.write("hcomp_" + self.image_name, self.h_compress, \ header=self.header, clobber=True)
def compress_fits(hdulist): new_hdulist = [] for h in hdulist: if type(h) == fits.PrimaryHDU: if h.data is not None: new_hdulist.append(fits.PrimaryHDU(None, header=h.header)) new_hdulist.append(fits.CompImageHDU(h.data, header=h.header)) else: new_hdulist.append(h) elif type(h) == fits.ImageHDU: new_hdu = fits.CompImageHDU(h.data, header=h.header) new_hdulist.append(new_hdu) else: new_hdulist.append(h) return fits.HDUList(new_hdulist)
def test_failing_compressed_datasum(self): """ Regression test for https://github.com/astropy/astropy/issues/4587 """ n = np.ones((10, 10), dtype='float32') comp_hdu = fits.CompImageHDU(n) comp_hdu.writeto(self.temp('tmp.fits'), checksum=True) with fits.open(self.temp('tmp.fits'), checksum=True) as hdul: assert np.all(hdul[1].data == comp_hdu.data)
def saveImage(self, img, name=None): if name is None: name = self._getNextName() hdus = pyfits.HDUList() hdus.append(pyfits.CompImageHDU(img, name='IMAGE', uint=True)) hdus.writeto(name, overwrite=True) return name
def write_frame(outfile, frame, header=None, fibermap=None): """Write a frame fits file and returns path to file written. Args: outfile: full path to output file, or tuple (night, expid, channel) frame: desispec.frame.Frame object with wave, flux, ivar... Optional: header: astropy.io.fits.Header or dict to override frame.header fibermap: table to store as FIBERMAP HDU Returns: full filepath of output file that was written Note: to create a Frame object to pass into write_frame, frame = Frame(wave, flux, ivar, resolution_data) """ outfile = makepath(outfile, 'frame') if header is not None: hdr = fitsheader(header) else: hdr = fitsheader(frame.meta) add_dependencies(hdr) hdus = fits.HDUList() x = fits.PrimaryHDU(frame.flux.astype('f4'), header=hdr) x.header['EXTNAME'] = 'FLUX' hdus.append(x) hdus.append(fits.ImageHDU(frame.ivar.astype('f4'), name='IVAR')) hdus.append(fits.CompImageHDU(frame.mask, name='MASK')) hdus.append(fits.ImageHDU(frame.wave.astype('f4'), name='WAVELENGTH')) hdus.append( fits.ImageHDU(frame.resolution_data.astype('f4'), name='RESOLUTION')) if fibermap is not None: hdus.append(fits.BinTableHDU(np.asarray(fibermap), name='FIBERMAP')) elif frame.fibermap is not None: hdus.append( fits.BinTableHDU(np.asarray(frame.fibermap), name='FIBERMAP')) elif frame.spectrograph is not None: x.header[ 'FIBERMIN'] = 500 * frame.spectrograph # Hard-coded (as in desispec.frame) else: log.error( "You are likely writing a frame without sufficient fiber info") hdus.writeto(outfile + '.tmp', clobber=True, checksum=True) os.rename(outfile + '.tmp', outfile) return outfile
def save_hdu(hdu, filename, compress=False, overwrite=False): if not compress: logger.debug('Saving fits file to: {}'.format(filename)) hdu.writeto(filename, overwrite=overwrite) if compress: if not filename[:-3] != '.fz': filename = filename + '.fz' logger.debug('Saving fits file to: {}'.format(filename)) p = fits.PrimaryHDU() c = fits.CompImageHDU(hdu.data, header=hdu.header, compression_type='RICE_1') fits.HDUList([p, c]).writeto(filename, overwrite=overwrite)
def fits_compress(path_in: str, path_out: str): with fits.open(path_out, mode='append') as comphdulist: with fits.open(path_in, do_not_scale_image_data=True) as hdulist: for hdu in hdulist: if isinstance(hdu, fits.PrimaryHDU): compressed_hdu = fits.PrimaryHDU(header=hdu.header, data=hdu.data) elif isinstance(hdu, fits.CompImageHDU): compressed_hdu = hdu else: compressed_hdu = fits.CompImageHDU(header=hdu.header, data=hdu.data) comphdulist.append(compressed_hdu)
def pack(uncompressed_hdulist: fits.HDUList) -> fits.HDUList: if uncompressed_hdulist[0].data is None: primary_hdu = fits.PrimaryHDU(header=uncompressed_hdulist[0].header) hdulist = [primary_hdu] else: primary_hdu = fits.PrimaryHDU() compressed_hdu = fits.CompImageHDU( data=uncompressed_hdulist[0].data, header=uncompressed_hdulist[0].header, quantize_level=64, quantize_method=1) hdulist = [primary_hdu, compressed_hdu] for hdu in uncompressed_hdulist[1:]: if isinstance(hdu, fits.ImageHDU): compressed_hdu = fits.CompImageHDU(data=hdu.data, header=hdu.header, quantize_level=64, quantize_method=1) hdulist.append(compressed_hdu) else: hdulist.append(hdu) return fits.HDUList(hdulist)
def procHDUL(raw_hdul): """ Process a HDUList of raw Skipper images into and average and rms HDUList """ # Initalize the processed hdulist avg_hdul = fits.HDUList() rms_hdul = fits.HDUList() # Iterate through different headers (quadrants) for raw_hdu in raw_hdul: if raw_hdu.data is None: continue # if the header file is empty, do nothing # Get number of rows, columns samples for LTA or LEACH automatically nrow, ncol, N = getProcHeaderVals(raw_hdu.header) # define processed hdu with all values of data zero, and same header avg_hdu = fits.CompImageHDU(data=np.zeros((nrow, ncol)), header=raw_hdu.header) rms_hdu = fits.CompImageHDU(data=np.zeros((nrow, ncol)), header=raw_hdu.header) # Iterate through elements of newly defined images for row in range(nrow): for col in range(ncol): # Caluclate average pixel value and rms pixel value avg_hdu.data[row, col] = raw_hdu.data[row, N * col:N * col + N].mean() rms_hdu.data[row, col] = raw_hdu.data[row, N * col:N * col + N].std() # avg_hdu = fits.CompImageHDU(data=avg_image) avg_hdul.append(avg_hdu) rms_hdul.append(rms_hdu) return avg_hdul, rms_hdul
def compress(self, algorithm, quantize_factor=2): """ Compressed data packet using the H Transformation method and saves it into a fits file. @type self: Compression Compression List ---> ['RICE_1', 'GZIP_1', 'GZIP_2', 'PLIO_1', 'HCOMPRESS_1'] @type factor: Integer desired compression factor, particularily used with HCompression or used as a quantization factor. @rtype: None """ compressed_name = algorithm + "_" + str(round( quantize_factor, 3)) + "_" + self.image_name if self.valid_extension(): if algorithm == "HCOMPRESS_1": fits.CompImageHDU(self.original_data, compression_type=algorithm, \ hcomp_scale=quantize_factor).writeto(self.save_directory + compressed_name, overwrite=True) self.image_compressed_name = compressed_name else: fits.CompImageHDU(self.original_data, compression_type = algorithm, \ quantize_level=quantize_factor).writeto(self.save_directory + compressed_name, overwrite=True) self.image_compressed_name = compressed_name
def test_identical_comp_image_hdus(self): """Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/189 For this test we mostly just care that comparing to compressed images does not crash, and returns the correct results. Two compressed images will be considered identical if the decompressed data is the same. Obviously we test whether or not the same compression was used by looking for (or ignoring) header differences. """ data = np.arange(100.0).reshape(10, 10) hdu = fits.CompImageHDU(data=data) hdu.writeto(self.temp('test.fits')) hdula = fits.open(self.temp('test.fits')) hdulb = fits.open(self.temp('test.fits')) diff = FITSDiff(hdula, hdulb) assert diff.identical