def writeFitsHdu(self): """Write catalog to a FITS hdu. @returns an HDU with the FITS binary table of the catalog. """ # Note to developers: Because of problems with pickling in older pyfits versions, this # code is duplicated in galsim/config/extra_truth.py, BuildTruthHDU. If you change # this function, you should update BuildTruthHDU as well. from galsim._pyfits import pyfits data = self.makeData() cols = [] for name in data.dtype.names: dt = data.dtype[name] if dt.kind in np.typecodes['AllInteger']: cols.append( pyfits.Column(name=name, format='J', array=data[name])) elif dt.kind in np.typecodes['AllFloat']: cols.append( pyfits.Column(name=name, format='D', array=data[name])) else: cols.append( pyfits.Column(name=name, format='%dA' % dt.itemsize, array=data[name])) cols = pyfits.ColDefs(cols) # Depending on the version of pyfits, one of these should work: try: tbhdu = pyfits.BinTableHDU.from_columns(cols) except AttributeError: # pragma: no cover tbhdu = pyfits.new_table(cols) return tbhdu
def PhotonArray_write(self, file_name): """Write a PhotonArray to a FITS file. The output file will be a FITS binary table with a row for each photon in the PhotonArray. Columns will include 'id' (sequential from 1 to nphotons), 'x', 'y', and 'flux'. Additionally, the columns 'dxdz', 'dydz', and 'wavelength' will be included if they are set for this PhotonArray object. The file can be read back in with the classmethod `PhotonArray.read`. >>> photons.write('photons.fits') >>> photons2 = galsim.PhotonArray.read('photons.fits') @param file_name The file name of the output FITS file. """ from galsim._pyfits import pyfits cols = [] cols.append(pyfits.Column(name='id', format='J', array=range(self.size()))) cols.append(pyfits.Column(name='x', format='D', array=self.x)) cols.append(pyfits.Column(name='y', format='D', array=self.y)) cols.append(pyfits.Column(name='flux', format='D', array=self.flux)) if self.hasAllocatedAngles(): cols.append(pyfits.Column(name='dxdz', format='D', array=self.dxdz)) cols.append(pyfits.Column(name='dydz', format='D', array=self.dydz)) if self.hasAllocatedWavelengths(): cols.append( pyfits.Column(name='wavelength', format='D', array=self.wavelength)) cols = pyfits.ColDefs(cols) try: table = pyfits.BinTableHDU.from_columns(cols) except AttributeError: # pragma: no cover (Might need this for older pyfits versions) table = pyfits.new_table(cols) galsim.fits.writeFile(file_name, table)
def WriteMEDS(obj_list, file_name, clobber=True): """ Writes a MEDS file from a list of MultiExposureObjects. Arguments: ---------- @param obj_list: List of MultiExposureObjects @param file_name: Name of meds file to be written @param clobber Setting `clobber=True` when `file_name` is given will silently overwrite existing files. (Default `clobber = True`.) """ from galsim._pyfits import pyfits # initialise the catalog cat = {} cat['id'] = [] cat['box_size'] = [] cat['ra'] = [] cat['dec'] = [] cat['ncutout'] = [] cat['start_row'] = [] cat['dudrow'] = [] cat['dudcol'] = [] cat['dvdrow'] = [] cat['dvdcol'] = [] cat['row0'] = [] cat['col0'] = [] cat['psf_box_size'] = [] cat['psf_start_row'] = [] # initialise the image vectors vec = {} vec['image'] = [] vec['seg'] = [] vec['weight'] = [] vec['psf'] = [] # initialise the image vector index n_vec = 0 psf_n_vec = 0 # get number of objects n_obj = len(obj_list) # loop over objects for obj in obj_list: # initialise the start indices for each image start_rows = np.ones(MAX_NCUTOUTS) * EMPTY_START_INDEX psf_start_rows = np.ones(MAX_NCUTOUTS) * EMPTY_START_INDEX dudrow = np.ones(MAX_NCUTOUTS) * EMPTY_JAC_diag dudcol = np.ones(MAX_NCUTOUTS) * EMPTY_JAC_offdiag dvdrow = np.ones(MAX_NCUTOUTS) * EMPTY_JAC_offdiag dvdcol = np.ones(MAX_NCUTOUTS) * EMPTY_JAC_diag row0 = np.ones(MAX_NCUTOUTS) * EMPTY_SHIFT col0 = np.ones(MAX_NCUTOUTS) * EMPTY_SHIFT # get the number of cutouts (exposures) n_cutout = obj.n_cutouts # append the catalog for this object cat['id'].append(obj.id) cat['box_size'].append(obj.box_size) # TODO: If the config defines a world position, get the right ra, dec here. cat['ra'].append(0.) cat['dec'].append(0.) cat['ncutout'].append(n_cutout) cat['psf_box_size'].append(obj.psf_box_size) # loop over cutouts for i in range(n_cutout): # assign the start row to the end of image vector start_rows[i] = n_vec psf_start_rows[i] = psf_n_vec # update n_vec to point to the end of image vector n_vec += len(obj.images[i].array.flatten()) if obj.psf is not None: psf_n_vec += len(obj.psf[i].array.flatten()) # append the image vectors vec['image'].append(obj.images[i].array.flatten()) vec['seg'].append(obj.seg[i].array.flatten()) vec['weight'].append(obj.weight[i].array.flatten()) vec['psf'].append(obj.psf[i].array.flatten()) # append the Jacobian # col == x # row == y dudcol[i] = obj.wcs[i].dudx dudrow[i] = obj.wcs[i].dudy dvdcol[i] = obj.wcs[i].dvdx dvdrow[i] = obj.wcs[i].dvdy col0[i] = obj.wcs[i].origin.x row0[i] = obj.wcs[i].origin.y # check if we are running out of memory if sys.getsizeof(vec) > MAX_MEMORY: raise MemoryError( 'Running out of memory > %1.0fGB ' % MAX_MEMORY / 1.e9 + '- you can increase the limit by changing MAX_MEMORY') # update the start rows fields in the catalog cat['start_row'].append(start_rows) cat['psf_start_row'].append(psf_start_rows) # add lists of Jacobians cat['dudrow'].append(dudrow) cat['dudcol'].append(dudcol) cat['dvdrow'].append(dvdrow) cat['dvdcol'].append(dvdcol) cat['row0'].append(row0) cat['col0'].append(col0) # concatenate list to one big vector vec['image'] = np.concatenate(vec['image']) vec['seg'] = np.concatenate(vec['seg']) vec['weight'] = np.concatenate(vec['weight']) vec['psf'] = np.concatenate(vec['psf']) # get the primary HDU primary = pyfits.PrimaryHDU() # second hdu is the object_data # cf. https://github.com/esheldon/meds/wiki/MEDS-Format cols = [] cols.append(pyfits.Column(name='id', format='K', array=cat['id'])) cols.append(pyfits.Column(name='number', format='K', array=cat['id'])) cols.append(pyfits.Column(name='ra', format='D', array=cat['ra'])) cols.append(pyfits.Column(name='dec', format='D', array=cat['dec'])) cols.append( pyfits.Column(name='box_size', format='K', array=cat['box_size'])) cols.append(pyfits.Column(name='ncutout', format='K', array=cat['ncutout'])) cols.append( pyfits.Column(name='file_id', format='%dK' % MAX_NCUTOUTS, array=[1] * n_obj)) cols.append( pyfits.Column(name='start_row', format='%dK' % MAX_NCUTOUTS, array=np.array(cat['start_row']))) cols.append( pyfits.Column(name='orig_row', format='%dD' % MAX_NCUTOUTS, array=[[0] * MAX_NCUTOUTS] * n_obj)) cols.append( pyfits.Column(name='orig_col', format='%dD' % MAX_NCUTOUTS, array=[[0] * MAX_NCUTOUTS] * n_obj)) cols.append( pyfits.Column(name='orig_start_row', format='%dK' % MAX_NCUTOUTS, array=[[0] * MAX_NCUTOUTS] * n_obj)) cols.append( pyfits.Column(name='orig_start_col', format='%dK' % MAX_NCUTOUTS, array=[[0] * MAX_NCUTOUTS] * n_obj)) cols.append( pyfits.Column(name='cutout_row', format='%dD' % MAX_NCUTOUTS, array=np.array(cat['row0']))) cols.append( pyfits.Column(name='cutout_col', format='%dD' % MAX_NCUTOUTS, array=np.array(cat['col0']))) cols.append( pyfits.Column(name='dudrow', format='%dD' % MAX_NCUTOUTS, array=np.array(cat['dudrow']))) cols.append( pyfits.Column(name='dudcol', format='%dD' % MAX_NCUTOUTS, array=np.array(cat['dudcol']))) cols.append( pyfits.Column(name='dvdrow', format='%dD' % MAX_NCUTOUTS, array=np.array(cat['dvdrow']))) cols.append( pyfits.Column(name='dvdcol', format='%dD' % MAX_NCUTOUTS, array=np.array(cat['dvdcol']))) cols.append( pyfits.Column(name='psf_box_size', format='K', array=cat['psf_box_size'])) cols.append( pyfits.Column(name='psf_start_row', format='%dK' % MAX_NCUTOUTS, array=np.array(cat['psf_start_row']))) # Depending on the version of pyfits, one of these should work: try: object_data = pyfits.BinTableHDU.from_columns(cols) object_data.name = 'object_data' except: # pragma: no cover object_data = pyfits.new_table(pyfits.ColDefs(cols)) object_data.update_ext_name('object_data') # third hdu is image_info cols = [] cols.append( pyfits.Column(name='image_path', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='image_ext', format='I', array=[0])) cols.append( pyfits.Column(name='weight_path', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='weight_ext', format='I', array=[0])) cols.append( pyfits.Column(name='seg_path', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='seg_ext', format='I', array=[0])) cols.append( pyfits.Column(name='bmask_path', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='bmask_ext', format='I', array=[0])) cols.append( pyfits.Column(name='bkg_path', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='bkg_ext', format='I', array=[0])) cols.append(pyfits.Column(name='image_id', format='K', array=[-1])) cols.append(pyfits.Column(name='image_flags', format='K', array=[-1])) cols.append(pyfits.Column(name='magzp', format='E', array=[30.])) cols.append(pyfits.Column(name='scale', format='E', array=[1.])) # TODO: Not sure if this is right! cols.append(pyfits.Column(name='position_offset', format='D', array=[0.])) try: image_info = pyfits.BinTableHDU.from_columns(cols) image_info.name = 'image_info' except: # pragma: no cover image_info = pyfits.new_table(pyfits.ColDefs(cols)) image_info.update_ext_name('image_info') # fourth hdu is metadata # default values? cols = [] cols.append(pyfits.Column(name='magzp_ref', format='E', array=[30.])) cols.append( pyfits.Column(name='DESDATA', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='cat_file', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='coadd_image_id', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='coadd_file', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='coadd_hdu', format='K', array=[9999])) cols.append(pyfits.Column(name='coadd_seg_hdu', format='K', array=[9999])) cols.append( pyfits.Column(name='coadd_srclist', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='coadd_wt_hdu', format='K', array=[9999])) cols.append( pyfits.Column(name='coaddcat_file', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='coaddseg_file', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='cutout_file', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='max_boxsize', format='A3', array=['-1'])) cols.append(pyfits.Column(name='medsconf', format='A3', array=['x'])) cols.append(pyfits.Column(name='min_boxsize', format='A2', array=['-1'])) cols.append(pyfits.Column(name='se_badpix_hdu', format='K', array=[9999])) cols.append(pyfits.Column(name='se_hdu', format='K', array=[9999])) cols.append(pyfits.Column(name='se_wt_hdu', format='K', array=[9999])) cols.append(pyfits.Column(name='seg_hdu', format='K', array=[9999])) cols.append(pyfits.Column(name='psf_hdu', format='K', array=[9999])) cols.append(pyfits.Column(name='sky_hdu', format='K', array=[9999])) cols.append(pyfits.Column(name='fake_coadd_seg', format='K', array=[9999])) try: metadata = pyfits.BinTableHDU.from_columns(cols) metadata.name = 'metadata' except: # pragma: no cover metadata = pyfits.new_table(pyfits.ColDefs(cols)) metadata.update_ext_name('metadata') # rest of HDUs are image vectors image_cutouts = pyfits.ImageHDU(vec['image'], name='image_cutouts') weight_cutouts = pyfits.ImageHDU(vec['weight'], name='weight_cutouts') seg_cutouts = pyfits.ImageHDU(vec['seg'], name='seg_cutouts') psf_cutouts = pyfits.ImageHDU(vec['psf'], name='psf') # write all hdu_list = pyfits.HDUList([ primary, object_data, image_info, metadata, image_cutouts, weight_cutouts, seg_cutouts, psf_cutouts ]) hdu_list.writeto(file_name, clobber=clobber)
def storePSFImages(PSF_dict, filename, bandpass_list=None, clobber=False): """ This is a routine to store images of chromatic WFIRST PSFs in different bands for each SCA. It takes an output dict of PSFs (`PSF_dict`) directly from getPSF(). The output will be a file (`filename`) that has all the images, along with an HDU that contains a FITS table indicating the bandpasses, SCAs, and other information needed to reconstruct the PSF information. This routine is not meant to work for PSFs from getPSF() that are completely achromatic. The reason for this is that those PSFs are quite fast to generate, so there is little benefit to storing them. @param PSF_dict A dict of PSF objects for each SCA, in the same format as output by the getPSF() routine (though it can take versions that have been modified, for example in the inclusion of an SED). @param filename The name of the file to which the images and metadata should be written; extension should be *.fits. @param bandpass_list A list of bandpass names for which images should be generated and stored. If None, all WFIRST imaging passbands are used. [default: None] @param clobber Should the routine clobber `filename` (if they already exist)? [default: False] """ from galsim._pyfits import pyfits # Check for sane input PSF_dict. if len(PSF_dict) == 0 or len(PSF_dict) > galsim.wfirst.n_sca or \ min(PSF_dict.keys()) < 1 or max(PSF_dict.keys()) > galsim.wfirst.n_sca: raise ValueError("PSF_dict must come from getPSF()!") # Check if file already exists and warn about clobbering. if os.path.exists(filename): if clobber is False: raise ValueError( "Output file already exists, and clobber is not set!") else: import warnings warnings.warn("Output file already exists, and will be clobbered.") # Check that bandpass list input is okay. It should be strictly a subset of the default list of # bandpasses. if bandpass_list is None: bandpass_list = default_bandpass_list else: if not isinstance(bandpass_list[0], str): raise ValueError("Expected input list of bandpass names!") if not set(bandpass_list).issubset(default_bandpass_list): err_msg = '' for item in default_bandpass_list: err_msg += item + ' ' raise ValueError( "Bandpass list must be a subset of the default list, containing %s" % err_msg) # Get all the WFIRST bandpasses. bandpass_dict = galsim.wfirst.getBandpasses() # Loop through making images and lists of their relevant parameters. im_list = [] bp_name_list = [] SCA_index_list = [] for SCA in PSF_dict: PSF = PSF_dict[SCA] if not isinstance(PSF, galsim.ChromaticOpticalPSF) and \ not isinstance(PSF, galsim.InterpolatedChromaticObject): raise RuntimeError("Error, PSFs are not ChromaticOpticalPSFs.") star = galsim.Gaussian(sigma=1.e-8, flux=1.) for bp_name in bandpass_list: bandpass = bandpass_dict[bp_name] star_sed = galsim.SED(lambda x: 1, 'nm', 'flambda').withFlux(1, bandpass) obj = galsim.Convolve(star * star_sed, PSF) im = obj.drawImage(bandpass, scale=0.5 * galsim.wfirst.pixel_scale, method='no_pixel') im_list.append(im) bp_name_list.append(bp_name) SCA_index_list.append(SCA) # Save images to file. n_ims = len(im_list) galsim.fits.writeMulti(im_list, filename, clobber=clobber) # Add data to file, after constructing a FITS table. Watch out for clobbering. bp_names = pyfits.Column(name='bandpass', format='A10', array=np.array(bp_name_list)) SCA_indices = pyfits.Column(name='SCA', format='J', array=np.array(SCA_index_list)) cols = pyfits.ColDefs([bp_names, SCA_indices]) tbhdu = pyfits.BinTableHDU.from_columns(cols) f = pyfits.open(filename, mode='update') f.append(tbhdu) f.flush() f.close()
def write_meds(file_name, obj_list, clobber=True): """ @brief Writes the galaxy, weights, segmaps images to a MEDS file. Arguments: ---------- @param file_name: Name of meds file to be written @param obj_list: List of MultiExposureObjects @param clobber Setting `clobber=True` when `file_name` is given will silently overwrite existing files. (Default `clobber = True`.) """ import numpy import sys from galsim._pyfits import pyfits # initialise the catalog cat = {} cat['ncutout'] = [] cat['box_size'] = [] cat['start_row'] = [] cat['id'] = [] cat['dudrow'] = [] cat['dudcol'] = [] cat['dvdrow'] = [] cat['dvdcol'] = [] cat['row0'] = [] cat['col0'] = [] # initialise the image vectors vec = {} vec['image'] = [] vec['seg'] = [] vec['weight'] = [] # initialise the image vector index n_vec = 0 # get number of objects n_obj = len(obj_list) # loop over objects for obj in obj_list: # initialise the start indices for each image start_rows = numpy.ones(MAX_NCUTOUTS) * EMPTY_START_INDEX dudrow = numpy.ones(MAX_NCUTOUTS) * EMPTY_JAC_diag dudcol = numpy.ones(MAX_NCUTOUTS) * EMPTY_JAC_offdiag dvdrow = numpy.ones(MAX_NCUTOUTS) * EMPTY_JAC_offdiag dvdcol = numpy.ones(MAX_NCUTOUTS) * EMPTY_JAC_diag row0 = numpy.ones(MAX_NCUTOUTS) * EMPTY_SHIFT col0 = numpy.ones(MAX_NCUTOUTS) * EMPTY_SHIFT # get the number of cutouts (exposures) n_cutout = obj.n_cutouts # append the catalog for this object cat['ncutout'].append(n_cutout) cat['box_size'].append(obj.box_size) cat['id'].append(obj.id) # loop over cutouts for i in range(n_cutout): # assign the start row to the end of image vector start_rows[i] = n_vec # update n_vec to point to the end of image vector n_vec += len(obj.images[i].array.flatten()) # append the image vectors vec['image'].append(obj.images[i].array.flatten()) vec['seg'].append(obj.segs[i].array.flatten()) vec['weight'].append(obj.weights[i].array.flatten()) # append the Jacobian dudrow[i] = obj.wcs[i].dudx dudcol[i] = obj.wcs[i].dudy dvdrow[i] = obj.wcs[i].dvdx dvdcol[i] = obj.wcs[i].dvdy row0[i] = obj.wcs[i].origin.x col0[i] = obj.wcs[i].origin.y # check if we are running out of memory if sys.getsizeof(vec) > MAX_MEMORY: raise MemoryError( 'Running out of memory > %1.0fGB ' % MAX_MEMORY / 1.e9 + '- you can increase the limit by changing MAX_MEMORY') # update the start rows fields in the catalog cat['start_row'].append(start_rows) # add lists of Jacobians cat['dudrow'].append(dudrow) cat['dudcol'].append(dudcol) cat['dvdrow'].append(dvdrow) cat['dvdcol'].append(dvdcol) cat['row0'].append(row0) cat['col0'].append(col0) # concatenate list to one big vector vec['image'] = numpy.concatenate(vec['image']) vec['seg'] = numpy.concatenate(vec['seg']) vec['weight'] = numpy.concatenate(vec['weight']) # get the primary HDU primary = pyfits.PrimaryHDU() # second hdu is the object_data cols = [] cols.append( pyfits.Column(name='ncutout', format='i4', array=cat['ncutout'])) cols.append(pyfits.Column(name='id', format='i4', array=cat['id'])) cols.append( pyfits.Column(name='box_size', format='i4', array=cat['box_size'])) cols.append(pyfits.Column(name='file_id', format='i4', array=[1] * n_obj)) cols.append( pyfits.Column(name='start_row', format='%di4' % MAX_NCUTOUTS, array=numpy.array(cat['start_row']))) cols.append(pyfits.Column(name='orig_row', format='f8', array=[1] * n_obj)) cols.append(pyfits.Column(name='orig_col', format='f8', array=[1] * n_obj)) cols.append( pyfits.Column(name='orig_start_row', format='i4', array=[1] * n_obj)) cols.append( pyfits.Column(name='orig_start_col', format='i4', array=[1] * n_obj)) cols.append( pyfits.Column(name='dudrow', format='%df8' % MAX_NCUTOUTS, array=numpy.array(cat['dudrow']))) cols.append( pyfits.Column(name='dudcol', format='%df8' % MAX_NCUTOUTS, array=numpy.array(cat['dudcol']))) cols.append( pyfits.Column(name='dvdrow', format='%df8' % MAX_NCUTOUTS, array=numpy.array(cat['dvdrow']))) cols.append( pyfits.Column(name='dvdcol', format='%df8' % MAX_NCUTOUTS, array=numpy.array(cat['dvdcol']))) cols.append( pyfits.Column(name='cutout_row', format='%df8' % MAX_NCUTOUTS, array=numpy.array(cat['row0']))) cols.append( pyfits.Column(name='cutout_col', format='%df8' % MAX_NCUTOUTS, array=numpy.array(cat['col0']))) object_data = pyfits.new_table(pyfits.ColDefs(cols)) object_data.update_ext_name('object_data') # third hdu is image_info cols = [] cols.append( pyfits.Column(name='image_path', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='sky_path', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='seg_path', format='A256', array=['generated_by_galsim'])) image_info = pyfits.new_table(pyfits.ColDefs(cols)) image_info.update_ext_name('image_info') # fourth hdu is metadata cols = [] cols.append( pyfits.Column(name='cat_file', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='coadd_file', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='coadd_hdu', format='A1', array=['x'])) cols.append(pyfits.Column(name='coadd_seg_hdu', format='A1', array=['x'])) cols.append( pyfits.Column(name='coadd_srclist', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='coadd_wt_hdu', format='A1', array=['x'])) cols.append( pyfits.Column(name='coaddcat_file', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='coaddseg_file', format='A256', array=['generated_by_galsim'])) cols.append( pyfits.Column(name='cutout_file', format='A256', array=['generated_by_galsim'])) cols.append(pyfits.Column(name='max_boxsize', format='A3', array=['x'])) cols.append(pyfits.Column(name='medsconf', format='A3', array=['x'])) cols.append(pyfits.Column(name='min_boxsize', format='A2', array=['x'])) cols.append(pyfits.Column(name='se_badpix_hdu', format='A1', array=['x'])) cols.append(pyfits.Column(name='se_hdu', format='A1', array=['x'])) cols.append(pyfits.Column(name='se_wt_hdu', format='A1', array=['x'])) cols.append(pyfits.Column(name='seg_hdu', format='A1', array=['x'])) cols.append(pyfits.Column(name='sky_hdu', format='A1', array=['x'])) metadata = pyfits.new_table(pyfits.ColDefs(cols)) metadata.update_ext_name('metadata') # rest of HDUs are image vectors image_cutouts = pyfits.ImageHDU(vec['image'], name='image_cutouts') weight_cutouts = pyfits.ImageHDU(vec['weight'], name='weight_cutouts') seg_cutouts = pyfits.ImageHDU(vec['seg'], name='seg_cutouts') # write all hdu_list = pyfits.HDUList([ primary, object_data, image_info, metadata, image_cutouts, weight_cutouts, seg_cutouts ]) hdu_list.writeto(file_name, clobber=clobber)