def trim_throughput(indir, outdir): '''downsample throughput files''' assert os.path.basename(indir) == 'throughput' if not os.path.exists(outdir): os.makedirs(outdir) for targettype in ('elg', 'lrg', 'perfect', 'qso', 'sky', 'star'): filename = 'fiberloss-{}.dat'.format(targettype) shutil.copy(os.path.join(indir, filename), os.path.join(outdir, filename)) for filename in ['thru-b.fits', 'thru-r.fits', 'thru-z.fits']: fx = fits.open(indir + '/' + filename) hdus = HDUList() hdus.append(fx[0]) hdus.append(BinTableHDU(fx[1].data[::20], header=fx[1].header)) hdus.append(BinTableHDU(fx[2].data[::20], header=fx[2].header)) hdus.writeto(outdir + '/' + filename) fx.close() for filename in [ 'DESI-0347_blur.ecsv', 'DESI-0347_offset.ecsv', 'DESI-0347_random_offset_1.fits' ]: shutil.copy(os.path.join(indir, filename), os.path.join(outdir, filename))
def test_extract(tmpdir): hdulist = HDUList() image = ImageHDU(np.random.random((25, 25))) hdulist.append(image) tree = { 'some_words': 'These are some words', 'nested': { 'a': 100, 'b': 42 }, 'list': [x for x in range(10)], 'image': image.data } asdf_in_fits = str(tmpdir.join('asdf.fits')) with AsdfInFits(hdulist, tree) as aif: aif.write_to(asdf_in_fits) pure_asdf = str(tmpdir.join('extract.asdf')) extract.extract_file(asdf_in_fits, pure_asdf) assert os.path.exists(pure_asdf) with asdf.open(pure_asdf) as af: assert not isinstance(af, AsdfInFits) assert_tree_match(tree, af.tree)
def to_hdu_list(self): """Convert to `~astropy.io.fits.HDUList`. """ hdu_list = HDUList() for image in self: hdu = image.to_image_hdu() hdu_list.append(hdu) return hdu_list
def trim_quickpsf(indir, outdir, filename): assert os.path.abspath(indir) != os.path.abspath(outdir) infile = os.path.join(indir, filename) outfile = os.path.join(outdir, filename) fx = fits.open(infile) hdus = HDUList() hdus.append(fx[0]) for i in [1,2,3]: d = fx[i].data hdus.append(BinTableHDU(d[::10], header=fx[i].header)) hdus.writeto(outfile, clobber=True) fx.close()
def createFromScratch(self, phu, extensions=None): """ Creates an AstroData object from a collection of objects. """ lst = HDUList() if phu is not None: if isinstance(phu, PrimaryHDU): lst.append(phu) elif isinstance(phu, Header): lst.append(PrimaryHDU(header=deepcopy(phu), data=DELAYED)) elif isinstance(phu, (dict, list, tuple)): p = PrimaryHDU() p.header.update(phu) lst.append(p) else: raise ValueError( "phu must be a PrimaryHDU or a valid header object") # TODO: Verify the contents of extensions... if extensions is not None: for ext in extensions: lst.append(ext) return self.getAstroData(lst)
def get_stamps(self, oid, candid=None): """Download Stamps for an specific alert. Parameters ---------- oid : :py:class:`str` object ID in ALeRCE DBs. candid : :py:class:`int` Candid of the stamp to be displayed. Returns ------- :class:`astropy.io.fits.HDUList` Science, Template and Difference stamps for an specific alert. """ if candid is None: candid = self._get_first_detection(oid) try: hdulist = HDUList() for stamp_type in ["science", "template", "difference"]: tmp_hdulist = fits_open( "%s?oid=%s&candid=%s&type=%s&format=fits" % ( self.config["AVRO_URL"] + self.config["AVRO_ROUTES"]["get_stamp"], oid, candid, stamp_type, ) ) hdu = tmp_hdulist[0] hdu.header["STAMP_TYPE"] = stamp_type hdulist.append(hdu) return hdulist except HTTPError: warnings.warn("AVRO File not found.", RuntimeWarning) return None
def createFromScratch(self, phu, extensions=None): """ Creates an AstroData object from a collection of objects. """ lst = HDUList() if phu is not None: if isinstance(phu, PrimaryHDU): lst.append(phu) elif isinstance(phu, Header): lst.append(PrimaryHDU(header=deepcopy(phu), data=DELAYED)) elif isinstance(phu, (dict, list, tuple)): p = PrimaryHDU() p.header.update(phu) lst.append(p) else: raise ValueError("phu must be a PrimaryHDU or a valid header object") # TODO: Verify the contents of extensions... if extensions is not None: for ext in extensions: lst.append(ext) return self.getAstroData(lst)
def trim_psf(indir, outdir, filename): assert os.path.abspath(indir) != os.path.abspath(outdir) infile = os.path.join(indir, filename) outfile = os.path.join(outdir, filename) fx = fits.open(infile) hdus = HDUList() #- HDU 0 XCOEFF - data unchanged but update keywords for less samples xcoeff = fx[0].data hdr = fx[0].header hdr['NWAVE'] = 3 #- down from 11 hdr['CRPIX1'] = 23 #- 23=45//2+1, down from 113=225//2+1 hdr['CRPIX1'] = 23 hdr['CDELT1'] = 0.005 #- 5mm instead of 1mm hdr['CDELT2'] = 0.005 #- 5mm instead of 1mm hdr['PIXSIZE'] = 0.005 #- 5mm instead of 1mm hdus.append(PrimaryHDU(xcoeff, header=hdr)) hdus.append(fx['YCOEFF']) #- subsample spots inspots = fx['SPOTS'].data spots = np.zeros((3,3,45,45)) spots[0,0] = rebin_image(inspots[0,0], 5) spots[1,0] = rebin_image(inspots[5,0], 5) spots[2,0] = rebin_image(inspots[10,0], 5) spots[0,1] = rebin_image(inspots[0,5], 5) spots[1,1] = rebin_image(inspots[5,5], 5) spots[2,1] = rebin_image(inspots[10,5], 5) spots[0,2] = rebin_image(inspots[0,10], 5) spots[1,2] = rebin_image(inspots[5,10], 5) spots[2,2] = rebin_image(inspots[10,10], 5) hdus.append(ImageHDU(spots, header=fx['SPOTS'].header)) #- subsample spots x,y locations dx = fx['SPOTX'].data hdus.append(ImageHDU(dx[::5, ::5], header=fx['SPOTX'].header)) dy = fx['SPOTY'].data hdus.append(ImageHDU(dy[::5, ::5], header=fx['SPOTY'].header)) #- Fiberpos unchanged hdus.append(fx['FIBERPOS']) #- Subsample SPOTPOS and SPOTWAVE d = fx['SPOTPOS'].data hdus.append(ImageHDU(d[::5], header=fx['SPOTPOS'].header)) d = fx['SPOTWAVE'].data hdus.append(ImageHDU(d[::5], header=fx['SPOTWAVE'].header)) hdus.writeto(outfile, clobber=True) fx.close()
def ad_to_hdulist(ad): """Creates an HDUList from an AstroData object.""" hdul = HDUList() hdul.append(PrimaryHDU(header=ad.phu, data=DELAYED)) for ext in ad._nddata: meta = ext.meta header, ver = meta['header'], meta['ver'] wcs = ext.wcs if isinstance(wcs, gWCS): # We don't have access to the AD tags so see if it's an image # Catch ValueError as any sort of failure try: wcs_dict = adwcs.gwcs_to_fits(ext, ad.phu) except (ValueError, NotImplementedError) as e: LOGGER.warning(e) else: # Must delete keywords if image WCS has been downscaled # from a higher number of dimensions for i in range(1, 5): for kw in (f'CDELT{i}', f'CRVAL{i}', f'CUNIT{i}', f'CTYPE{i}'): if kw in header: del header[kw] for j in range(1, 5): for kw in (f'CD{i}_{j}', f'PC{i}_{j}', f'CRPIX{j}'): if kw in header: del header[kw] header.update(wcs_dict) # Use "in" here as the dict entry may be (value, comment) if 'APPROXIMATE' not in wcs_dict.get('FITS-WCS', ''): wcs = None # There's no need to create a WCS extension hdul.append(new_imagehdu(ext.data, header, 'SCI')) if ext.uncertainty is not None: hdul.append(new_imagehdu(ext.uncertainty.array, header, 'VAR')) if ext.mask is not None: hdul.append(new_imagehdu(ext.mask, header, 'DQ')) if isinstance(wcs, gWCS): hdul.append(wcs_to_asdftablehdu(ext.wcs, extver=ver)) for name, other in meta.get('other', {}).items(): if isinstance(other, Table): hdul.append(table_to_bintablehdu(other)) elif isinstance(other, np.ndarray): header = meta['other_header'].get(name, meta['header']) hdul.append(new_imagehdu(other, header, name=name)) elif isinstance(other, NDDataObject): hdul.append(new_imagehdu(other.data, meta['header'])) else: raise ValueError("I don't know how to write back an object " "of type {}".format(type(other))) if ad._tables is not None: for name, table in sorted(ad._tables.items()): hdul.append(table_to_bintablehdu(table, extname=name)) return hdul
def ad_to_hdulist(ad): """Creates an HDUList from an AstroData object.""" hdul = HDUList() hdul.append(PrimaryHDU(header=ad.phu, data=DELAYED)) # Find the maximum EXTVER for extensions that belonged with this # object if it was read from a FITS file maxver = max((nd.meta['header'].get('EXTVER', 0) for nd in ad._nddata if nd.meta.get('parent_ad') == id(ad)), default=0) for ext in ad._nddata: header = ext.meta['header'] if not isinstance(header, fits.Header): header = fits.Header(header) if ext.meta.get('parent_ad') == id(ad): # If the extension belonged with this object, use its # original EXTVER ver = header['EXTVER'] else: # Otherwise renumber the extension ver = header['EXTVER'] = maxver + 1 maxver += 1 wcs = ext.wcs if isinstance(wcs, gWCS): # We don't have access to the AD tags so see if it's an image # Catch ValueError as any sort of failure try: wcs_dict = gwcs_to_fits(ext, ad.phu) except (ValueError, NotImplementedError) as e: LOGGER.warning(e) else: # Must delete keywords if image WCS has been downscaled # from a higher number of dimensions for i in range(1, 5): for kw in (f'CDELT{i}', f'CRVAL{i}', f'CUNIT{i}', f'CTYPE{i}'): if kw in header: del header[kw] for j in range(1, 5): for kw in (f'CD{i}_{j}', f'PC{i}_{j}', f'CRPIX{j}'): if kw in header: del header[kw] header.update(wcs_dict) # Use "in" here as the dict entry may be (value, comment) if 'APPROXIMATE' not in wcs_dict.get('FITS-WCS', ''): wcs = None # There's no need to create a WCS extension hdul.append(new_imagehdu(ext.data, header, 'SCI')) if ext.uncertainty is not None: hdul.append(new_imagehdu(ext.uncertainty.array, header, 'VAR')) if ext.mask is not None: hdul.append(new_imagehdu(ext.mask, header, 'DQ')) if isinstance(wcs, gWCS): hdul.append(wcs_to_asdftablehdu(ext.wcs, extver=ver)) for name, other in ext.meta.get('other', {}).items(): if isinstance(other, Table): hdu = table_to_bintablehdu(other, extname=name) elif isinstance(other, np.ndarray): hdu = new_imagehdu(other, header, name=name) elif isinstance(other, NDDataObject): hdu = new_imagehdu(other.data, ext.meta['header']) else: raise ValueError("I don't know how to write back an object " f"of type {type(other)}") hdu.ver = ver hdul.append(hdu) if ad._tables is not None: for name, table in sorted(ad._tables.items()): hdul.append(table_to_bintablehdu(table, extname=name)) return hdul