def test_remove_hdu(tmpdir): hdulist = fits.HDUList() image = fits.ImageHDU(np.random.random((25, 25))) hdulist.append(image) tree = { 'some_words': 'These are some words', 'nested': { 'a': 100, 'b': 42 }, 'list': [x for x in range(10)], 'image': image.data } asdf_in_fits = str(tmpdir.join('asdf.fits')) with AsdfInFits(hdulist, tree) as aif: aif.write_to(asdf_in_fits) with fits.open(asdf_in_fits) as hdul: assert 'ASDF' in hdul new_fits = str(tmpdir.join('remove.fits')) remove_hdu(asdf_in_fits, new_fits) assert os.path.exists(new_fits) with fits.open(new_fits) as hdul: assert 'ASDF' not in hdul
def test_extract(tmpdir): hdulist = HDUList() image = ImageHDU(np.random.random((25, 25))) hdulist.append(image) tree = { 'some_words': 'These are some words', 'nested': { 'a': 100, 'b': 42 }, 'list': [x for x in range(10)], 'image': image.data } asdf_in_fits = str(tmpdir.join('asdf.fits')) with AsdfInFits(hdulist, tree) as aif: aif.write_to(asdf_in_fits) pure_asdf = str(tmpdir.join('extract.asdf')) extract.extract_file(asdf_in_fits, pure_asdf) assert os.path.exists(pure_asdf) with asdf.open(pure_asdf) as af: assert not isinstance(af, AsdfInFits) assert_tree_match(tree, af.tree)
def load_hdu(self, hdu, dstobj=None, **kwargs): if isinstance(hdu, ( pyfits.ImageHDU, pyfits.CompImageHDU, pyfits.PrimaryHDU, )): # <-- data is an image if dstobj is None: # get model class for this type of object obj_class = self.factory_dict.get('image', None) if obj_class is None: raise FITSError( "I don't know how to load objects of kind 'image'") dstobj = obj_class(logger=self.logger) # For now, call back into the object to load it from pyfits-style # HDU in future migrate to storage-neutral format dstobj.load_hdu(hdu, **kwargs) elif isinstance(hdu, (pyfits.TableHDU, pyfits.BinTableHDU)): # <-- data is a table # Handle ASDF embedded in FITS. # TODO: Populate EXTNAME, EXTVER, NAXISn in ASDF meta from HDU? # TODO: How to read from all the different ASDF layouts? # TODO: Cache the ASDF object? from ginga.util import io_asdf if io_asdf.have_asdf and hdu.name == 'ASDF': from asdf.fits_embed import AsdfInFits from ginga import AstroImage self.logger.debug('Attempting to load {} extension from ' 'FITS'.format(hdu.name)) dstobj = AstroImage.AstroImage() with AsdfInFits.open(self.fits_f) as asdf_f: dstobj.load_asdf(asdf_f) return dstobj if dstobj is None: self.logger.debug('Attempting to load table from FITS') # get model class for this type of object obj_class = self.factory_dict.get('table', None) if obj_class is None: raise FITSError( "I don't know how to load objects of kind 'table'") dstobj = obj_class(logger=self.logger) # For now, call back into the object to load it from pyfits-style # HDU in future migrate to storage-neutral format dstobj.load_hdu(hdu, **kwargs) else: raise FITSError("I don't know how to read this HDU") return dstobj
def load_hdu(self, hdu, dstobj=None, **kwargs): typ = self.get_hdu_type(hdu) if typ == 'image': if dstobj is None: # get model class for this type of object obj_class = self.factory_dict.get('image', None) if obj_class is None: raise FITSError( "I don't know how to load objects of kind 'image'") dstobj = obj_class(logger=self.logger) # For now, call back into the object to load it from pyfits-style # HDU in future migrate to storage-neutral format dstobj.load_hdu(hdu, **kwargs) elif typ == 'table': # <-- data may be a table # Handle ASDF embedded in FITS. # TODO: Populate EXTNAME, EXTVER, NAXISn in ASDF meta from HDU? # TODO: How to read from all the different ASDF layouts? # TODO: Cache the ASDF object? from ginga.util import io_asdf if io_asdf.have_asdf and hdu.name == 'ASDF': from asdf.fits_embed import AsdfInFits from ginga import AstroImage self.logger.debug('Attempting to load {} extension from ' 'FITS'.format(hdu.name)) dstobj = AstroImage.AstroImage() with AsdfInFits.open(self.fits_f) as asdf_f: dstobj.load_asdf(asdf_f) return dstobj if dstobj is None: self.logger.debug('Attempting to load table from FITS') # get model class for this type of object obj_class = self.factory_dict.get('table', None) if obj_class is None: raise FITSError( "I don't know how to load objects of kind 'table'") dstobj = obj_class(logger=self.logger) # For now, call back into the object to load it from pyfits-style # HDU in future migrate to storage-neutral format dstobj.load_hdu(hdu, **kwargs) else: raise FITSError("I don't know how to read this HDU") return dstobj
def load_asdf_hdu_in_fits(self, fits_f, hdu, **kwargs): """*** This is a special method that should only be called from WITHIN io_fits.py to open up ASDF-embedded-in-FITS *** """ # Handle ASDF embedded in FITS (see load_hdu() in io_fits.py) # TODO: Populate EXTNAME, EXTVER, NAXISn in ASDF meta # from HDU? # TODO: How to read from all the different ASDF layouts? # TODO: Cache the ASDF object? # TODO: hdu is ignored for now, but presumably this loader might # eventually want to check it with AsdfInFits.open(fits_f) as asdf_f: data_obj = self.load_asdf(asdf_f, logger=self.logger, **kwargs) # metadata will be hopefully be set back in io_fits return data_obj
def _jwst2data(file_obj, ext, data_label): comp_label = ext.upper() new_data_label = f'{data_label}[{comp_label}]' data = Data(label=new_data_label) unit_attr = f'bunit_{ext}' try: # This is very specific to JWST pipeline image output. with AsdfInFits.open(file_obj) as af: dm = af.tree dm_meta = af.tree["meta"] if (unit_attr in dm_meta and _validate_bunit(dm_meta[unit_attr], raise_error=False)): bunit = dm_meta[unit_attr] else: bunit = '' # This is instance of gwcs.WCS, not astropy.wcs.WCS if 'wcs' in dm_meta: data.coords = dm_meta['wcs'] imdata = dm[ext] component = Component.autotyped(imdata, units=bunit) # Might have bad GWCS. If so, we exclude it. try: data.add_component(component=component, label=comp_label) except Exception: # pragma: no cover data.coords = None data.add_component(component=component, label=comp_label) # TODO: Do not need this when jwst.datamodels finally its own package. # This might happen for grism image; fall back to FITS loader without WCS. except Exception: if ext == 'data': ext = 'sci' hdu = file_obj[ext] return _hdu2data(hdu, data_label, file_obj, include_wcs=False) return data, new_data_label