def combine_maps(maps_list): # Combined maps_list shape_out = (180, 360) # This is set deliberately low to reduce memory consumption header = sunpy.map.make_fitswcs_header(shape_out, SkyCoord(0, 0, unit=u.deg, frame="heliographic_stonyhurst", obstime=maps_list[0].date), scale=[180 / shape_out[0], 360 / shape_out[1]] * u.deg / u.pix, wavelength=int(maps_list[0].meta['wavelnth']) * u.AA, projection_code="CAR") out_wcs = WCS(header) coordinates = tuple(map(sunpy.map.all_coordinates_from_map, maps_list)) weights = [coord.transform_to("heliocentric").z.value for coord in coordinates] weights = [(w / np.nanmax(w)) ** 3 for w in weights] for w in weights: w[np.isnan(w)] = 0 array, _ = reproject_and_coadd(maps_list, out_wcs, shape_out, input_weights=weights, reproject_function=reproject_interp, match_background=True, background_reference=0) outmaps = sunpy.map.Map((array, header)) return outmaps
def process_coaddition(imlist, outputnames): mcube = np.zeros(shape=(naxis3, naxis2, naxis1)) foot = np.zeros(shape=(naxis3, naxis2, naxis1)) wtnse = np.zeros(shape=(naxis3, naxis2, naxis1)) # varlist = [w.replace('.feather', '.var') for w in imlist] N_ims = len(imlist) hdu_cube = [None] * N_ims hdu_slic = [None] * N_ims var_cube = [None] * N_ims var_slic = [None] * N_ims wcs_obj = [None] * N_ims for i, im in enumerate(imlist): hdu_cube[i] = fits.open(imlist[i])[0] var_cube[i] = fits.open(varlist[i])[0] # --- Loop over velocity channels for ich in range(naxis3): variance_wts = [] for i, im in enumerate(imlist): hdu_slic[i] = fits.PrimaryHDU(data=hdu_cube[i].data[ich], header=hdu_cube[i].header) hdu_slic[i].header['WCSAXES'] = 2 var_slic[i] = fits.PrimaryHDU(data=var_cube[i].data[ich], header=var_cube[i].header) var_slic[i].header['WCSAXES'] = 2 for key in ['CRVAL3', 'CTYPE3', 'CRPIX3', 'CDELT3', 'CUNIT3']: del hdu_slic[i].header[key] del var_slic[i].header[key] variance_wts.append(1 / var_slic[i].data) wcs_obj[i] = wcs.WCS(hdu_slic[i]).dropaxis(2) print('Working on channel', ich, 'of cube ', outputnames, end='\r') data_tuple = [None] * N_ims for i, im in enumerate(imlist): data_tuple[i] = (hdu_slic[i].data, wcs_obj[i]) mcube[ich], foot[ich] = reproject_and_coadd( data_tuple, hd2d, input_weights=variance_wts, reproject_function=reproject_interp) hd3d = hdu_cube[0].header for key in [ 'CRPIX1', 'CDELT1', 'CTYPE1', 'CRVAL1', 'CRPIX2', 'CDELT2', 'CTYPE2', 'CRVAL2', 'LONPOLE', 'LATPOLE' ]: hd3d[key] = hd2d[key] fits.writeto(outputnames + '.cube.fits', mcube.astype(np.float32), hd3d, overwrite=True) wtnse = np.nanmean(1 / np.sqrt(foot), axis=0, keepdims=True) fits.writeto(outputnames + '.rms.fits', wtnse.astype(np.float32), hd3d, overwrite=True)
from reproject.mosaicking import find_optimal_celestial_wcs, reproject_and_coadd from reproject import reproject_interp from astropy.visualization.wcsaxes import SphericalCircle from astropy import units as u import abell_cluster_module as ab for k in range(0, len(ab.clusters)): # with fits.open(work_dir + 'fits/stacked/' + clusters[k] + '_rsi.fits') as hdu_r: with fits.open(ab.work_dir + 'fits/best_single/' + ab.short_sci_fn[k][2]) as hdu_r: plt.figure(figsize=(12, 12)) wcs_out, shape_out = find_optimal_celestial_wcs( hdu_r[1:len(hdu_r)]) # has only CompImageHDU files array, footprint = reproject_and_coadd( hdu_r[1:len(hdu_r)], wcs_out, shape_out=shape_out, reproject_function=reproject_interp) ax = plt.gca(projection=wcs_out) plt.imshow(mm.jarrett(array - np.nanmedian(array), np.nanstd(array), 5), cmap='gray_r') plt.xlabel('R.A.') plt.ylabel('Decl.') c = SphericalCircle( (ab.coords_cl_cen[k].ra.value, ab.coords_cl_cen[k].dec.value) * u.deg, 0.8 * u.deg, edgecolor='gray', facecolor='none',
]: if key in hdu_slic[i].header.keys(): del hdu_slic[i].header[key] print('\nWCS for field', field) print(repr(wcs.WCS(hdu_slic[i]))) print('\nFinished regridding field', field, 'for', line) # --- Calculate the mosaic WCS if line == sline[0]: wcs_out, shape_out = find_optimal_celestial_wcs(hdu_slic) hd2d = wcs_out.to_header() print('\nOutput header:') print(repr(hd2d)) print('Output shape is', shape_out) arr, foot = reproject_and_coadd(hdu_slic, wcs_out, shape_out=shape_out, reproject_function=reproject_interp) # Downsample in RA and DEC if requested if binfactor > 1: print('Downsampling spatially by a factor of', binfactor) arr1 = downsample_axis(arr, binfactor, axis=1) hdr1 = downsample_header(hd2d, binfactor, axis=1) arr = downsample_axis(arr1, binfactor, axis=0) hd2d = downsample_header(hdr1, binfactor, axis=2) print('\nOutput header:') print(repr(hd2d)) fits.writeto('template_' + line + '_' + imtype + '.fits', arr, hd2d, overwrite=True)
def make_polmosaic(self, qimages, uimages, pbimages, sb, psf, reference=None, pbclip=None): """ Function to generate the polarisation mosaic in Q and U """ # Set the directories for the mosaicking utils.set_mosdirs(self) # Get the common psf common_psf = psf qcorrimages = [] # to mosaic ucorrimages = [] # to mosaic quncorrimages = [] # to mosaic uuncorrimages = [] # to mosaic qpbweights = [] # of the pixels upbweights = [] # of the pixels qfreqs = [] ufreqs = [] # weight_images = [] for qimg, uimg, pb in zip(qimages, uimages, pbimages): # prepare the images (squeeze, transfer_coordinates, reproject, regrid pbeam, correct...) with pyfits.open(qimg) as f: qimheader = f[0].header qfreqs.append(qimheader['CRVAl3']) qtg = qimheader['OBJECT'] with pyfits.open(uimg) as f: uimheader = f[0].header ufreqs.append(uimheader['CRVAl3']) utg = uimheader['OBJECT'] # convolution with the common psf qreconvolved_image = qimg.replace('.fits', '_reconv_tmp.fits') qreconvolved_image = fm.fits_reconvolve_psf(qimg, common_psf, out=qreconvolved_image) ureconvolved_image = uimg.replace('.fits', '_reconv_tmp.fits') ureconvolved_image = fm.fits_reconvolve_psf(uimg, common_psf, out=ureconvolved_image) # PB correction qtmpimg = utils.make_tmp_copy(qreconvolved_image) utmpimg = utils.make_tmp_copy(ureconvolved_image) qtmppb = utils.make_tmp_copy(pb) utmppb = utils.make_tmp_copy(pb) qtmpimg = fm.fits_squeeze(qtmpimg) # remove extra dimentions utmpimg = fm.fits_squeeze(utmpimg) # remove extra dimentions qtmppb = fm.fits_transfer_coordinates( qtmpimg, qtmppb) # transfer_coordinates utmppb = fm.fits_transfer_coordinates( utmpimg, utmppb) # transfer_coordinates qtmppb = fm.fits_squeeze(qtmppb) # remove extra dimentions utmppb = fm.fits_squeeze(utmppb) # remove extra dimentions with pyfits.open(qtmpimg) as qf: qimheader = qf[0].header with pyfits.open(qtmppb) as qf: qpbhdu = qf[0] qpbheader = qf[0].header qpbarray = qf[0].data if (qimheader['CRVAL1'] != qpbheader['CRVAL1']) or ( qimheader['CRVAL2'] != qpbheader['CRVAL2']) or ( qimheader['CDELT1'] != qpbheader['CDELT1']) or ( qimheader['CDELT2'] != qpbheader['CDELT2']): qpbarray, qreproj_footprint = reproject_interp( qpbhdu, qimheader) else: pass with pyfits.open(utmpimg) as uf: uimheader = uf[0].header with pyfits.open(utmppb) as uf: upbhdu = uf[0] upbheader = uf[0].header upbarray = uf[0].data if (uimheader['CRVAL1'] != upbheader['CRVAL1']) or ( uimheader['CRVAL2'] != upbheader['CRVAL2']) or ( uimheader['CDELT1'] != upbheader['CDELT1']) or ( uimheader['CDELT2'] != upbheader['CDELT2']): upbarray, ureproj_footprint = reproject_interp( upbhdu, uimheader) else: pass qpbarray = np.float32(qpbarray) upbarray = np.float32(upbarray) qpbarray[qpbarray < self.pol_pbclip] = np.nan upbarray[upbarray < self.pol_pbclip] = np.nan qpb_regr_repr = qtmppb.replace('_tmp.fits', '_repr_tmp.fits') upb_regr_repr = utmppb.replace('_tmp.fits', '_repr_tmp.fits') pyfits.writeto(qpb_regr_repr, qpbarray, qimheader, overwrite=True) pyfits.writeto(upb_regr_repr, upbarray, uimheader, overwrite=True) qimg_corr = qreconvolved_image.replace('.fits', '_pbcorr.fits') uimg_corr = ureconvolved_image.replace('.fits', '_pbcorr.fits') qimg_uncorr = qreconvolved_image.replace('.fits', '_uncorr.fits') uimg_uncorr = ureconvolved_image.replace('.fits', '_uncorr.fits') qimg_corr = fm.fits_operation(qtmpimg, qpbarray, operation='/', out=qimg_corr) uimg_corr = fm.fits_operation(utmpimg, upbarray, operation='/', out=uimg_corr) qimg_uncorr = fm.fits_operation(qimg_corr, qpbarray, operation='*', out=qimg_uncorr) uimg_uncorr = fm.fits_operation(uimg_corr, upbarray, operation='*', out=uimg_uncorr) # cropping qcropped_image = qimg.replace('.fits', '_mos.fits') ucropped_image = uimg.replace('.fits', '_mos.fits') qcropped_image, qcutout = fm.fits_crop(qimg_corr, out=qcropped_image) ucropped_image, ucutout = fm.fits_crop(uimg_corr, out=ucropped_image) quncorr_cropped_image = qimg.replace('.fits', '_uncorr.fits') uuncorr_cropped_image = uimg.replace('.fits', '_uncorr.fits') quncorr_cropped_image, _ = fm.fits_crop(qimg_uncorr, out=quncorr_cropped_image) uuncorr_cropped_image, _ = fm.fits_crop(uimg_uncorr, out=uuncorr_cropped_image) qcorrimages.append(qcropped_image) ucorrimages.append(ucropped_image) quncorrimages.append(quncorr_cropped_image) uuncorrimages.append(uuncorr_cropped_image) # primary beam weights qwg_arr = qpbarray # qwg_arr[np.isnan(qwg_arr)] = 0 # the NaNs weight 0 qwg_arr = qwg_arr**2 / np.nanmax(qwg_arr**2) # normalize qwcut = Cutout2D(qwg_arr, qcutout.input_position_original, qcutout.shape) qpbweights.append(qwcut.data) uwg_arr = upbarray # uwg_arr[np.isnan(uwg_arr)] = 0 # the NaNs weight 0 uwg_arr = uwg_arr**2 / np.nanmax(uwg_arr**2) # normalize uwcut = Cutout2D(uwg_arr, ucutout.input_position_original, ucutout.shape) upbweights.append(uwcut.data) # create the wcs and footprint for the output mosaic print( 'Generating primary beam corrected and uncorrected polarisation mosaics for Stokes Q and U for subband ' + str(sb).zfill(2) + '.') qwcs_out, qshape_out = find_optimal_celestial_wcs(qcorrimages, auto_rotate=False, reference=reference) uwcs_out, ushape_out = find_optimal_celestial_wcs(ucorrimages, auto_rotate=False, reference=reference) qarray, qfootprint = reproject_and_coadd( qcorrimages, qwcs_out, shape_out=qshape_out, reproject_function=reproject_interp, input_weights=qpbweights) uarray, ufootprint = reproject_and_coadd( ucorrimages, uwcs_out, shape_out=ushape_out, reproject_function=reproject_interp, input_weights=upbweights) qarray2, q_ = reproject_and_coadd(quncorrimages, qwcs_out, shape_out=qshape_out, reproject_function=reproject_interp, input_weights=qpbweights) uarray2, u_ = reproject_and_coadd(uuncorrimages, uwcs_out, shape_out=ushape_out, reproject_function=reproject_interp, input_weights=upbweights) qarray = np.float32(qarray) uarray = np.float32(uarray) qarray2 = np.float32(qarray2) uarray2 = np.float32(uarray2) # insert common PSF into the header qpsf = common_psf.to_header_keywords() upsf = common_psf.to_header_keywords() qhdr = qwcs_out.to_header() uhdr = uwcs_out.to_header() qhdr.insert('RADESYS', ('FREQ', np.nanmean(qfreqs))) uhdr.insert('RADESYS', ('FREQ', np.nanmean(ufreqs))) qhdr.insert('RADESYS', ('BMAJ', qpsf['BMAJ'])) uhdr.insert('RADESYS', ('BMAJ', upsf['BMAJ'])) qhdr.insert('RADESYS', ('BMIN', qpsf['BMIN'])) uhdr.insert('RADESYS', ('BMIN', upsf['BMIN'])) qhdr.insert('RADESYS', ('BPA', qpsf['BPA'])) uhdr.insert('RADESYS', ('BPA', upsf['BPA'])) # insert units to header: qhdr.insert('RADESYS', ('BUNIT', 'JY/BEAM')) uhdr.insert('RADESYS', ('BUNIT', 'JY/BEAM')) pyfits.writeto(self.polmosaicdir + '/' + str(qtg).upper() + '_' + str(sb).zfill(2) + '_Q.fits', data=qarray, header=qhdr, overwrite=True) pyfits.writeto(self.polmosaicdir + '/' + str(utg).upper() + '_' + str(sb).zfill(2) + '_U.fits', data=uarray, header=uhdr, overwrite=True) pyfits.writeto(self.polmosaicdir + '/' + str(qtg).upper() + '_' + str(sb).zfill(2) + '_Q_uncorr.fits', data=qarray2, header=qhdr, overwrite=True) pyfits.writeto(self.polmosaicdir + '/' + str(utg).upper() + '_' + str(sb).zfill(2) + '_U_uncorr.fits', data=uarray2, header=uhdr, overwrite=True) utils.clean_polmosaic_tmp_data(self)
def create_mosaic(path, field_name, obs_date): import matplotlib.pyplot as plt import glob import numpy as np from astropy.wcs import WCS import glob import astropy.io.fits as fits import os from astropy.time import Time from datetime import datetime, timedelta from reproject import reproject_interp from reproject.mosaicking import reproject_and_coadd from reproject.mosaicking import find_optimal_celestial_wcs middlelink = '/obs/lenses_EPFL/PRERED/VST/reduced/' + field_name + '/' allothers = '/obs/lenses_EPFL/PRERED/VST/reduced/' + field_name + '_wide_field/' #middlelink = './data2/' #allothers = './data3/' obsdate = obs_date #corrected for LST difference date = datetime.strftime(datetime.strptime(obsdate, '%Y-%m-%d')-timedelta(days=1), '%Y-%m-%d') finalseepoch = np.sort(glob.glob(allothers+'*'+obsdate+'*.fits')) #[:1] eachtimes = np.unique([epochname.split(obsdate)[1].split('_')[0] for epochname in finalseepoch]) coadd = fits.open(glob.glob(middlelink+'/mosaic/*'+date+'*.fits')[0]) names = [] for aaa in range(len(eachtimes)): singleepochs = glob.glob(allothers+'*'+obsdate+eachtimes[aaa]+'*.fits') finalchip = glob.glob(middlelink+'*'+obsdate+eachtimes[aaa]+'*.fits') allepochs = np.array(finalchip+list(singleepochs)) #for epoch in allepochs: # print('scp [email protected]:'+epoch+' ./data3/') #print('scp [email protected]:'+glob.glob(middlelink+'mosaic/*'+date+'*.fits')[0]+' ./data2/') #allepochs = glob.glob(allothers+'/*') all_hdus = [] for epoch in allepochs: all_hdus.append(fits.open(epoch)[0]) #array, footprint = reproject_interp(hdu2, coadd.header) print(all_hdus) from astropy import units as u wcs_out, shape_out = find_optimal_celestial_wcs(all_hdus, resolution=2.14 * u.arcsec) print(wcs_out) array, footprint = reproject_and_coadd(all_hdus, wcs_out, shape_out=shape_out, reproject_function=reproject_interp) datestring = [allepochs[0].split('/')[-1].split('_')[0][6:]] starttime = Time(datestring, format='isot', scale='utc').mjd[0] exptime = all_hdus[0].header['EXPTIME']/(24.*3600.) endtime = starttime+exptime header = wcs_out.to_header() primary_hdu = fits.PrimaryHDU(array, header=header) primary_hdu.header['STARTMJD'] = starttime primary_hdu.header['ENDMJD'] = endtime #hdu = fits.ImageHDU(array) hdul = fits.HDUList([primary_hdu]) name = allepochs[0].split('/')[-1].split('_')[0]+'_fullfield_binned.fits' hdul.writeto(path+name, overwrite=True) names.append(name) return names
# stacked zero point (ZP; mag = mag0 (ZP_inst=25) + ZP) stack_a = [[4.05, 6.06, 6.38], [3.96, 6.04, 6.22], [3.96, 6.15, 6.42]] for k in range(0, len(clusters)): with fits.open(work_dir + 'fits/stacked/' + fn[k][0]) as hdu_u, \ fits.open(work_dir + 'fits/stacked/' + fn[k][1]) as hdu_g, \ fits.open(work_dir + 'fits/stacked/' + fn[k][2]) as hdu_r: wcs_out_u, shape_out_u = find_optimal_celestial_wcs( hdu_u[1:], reference=coords_cl_cen) wcs_out_g, shape_out_g = find_optimal_celestial_wcs( hdu_g[1:], reference=coords_cl_cen) wcs_out_r, shape_out_r = find_optimal_celestial_wcs( hdu_r[1:], reference=coords_cl_cen) array_u, footprint_u = reproject_and_coadd( hdu_u[1:], wcs_out_r, shape_out=shape_out_r, reproject_function=reproject_interp) array_g, footprint_g = reproject_and_coadd( hdu_g[1:], wcs_out_r, shape_out=shape_out_r, reproject_function=reproject_interp) array_r, footprint_r = reproject_and_coadd( hdu_r[1:], wcs_out_r, shape_out=shape_out_r, reproject_function=reproject_interp) # xoff_u, yoff_u, exoff_u, eyoff_u = chi2_shift(array_r[:, :array_u.shape[1]], # array_u[:array_r.shape[0], :],
def make_polmosaic(self, qimages, uimages, pbimages, sb, psf, reference=None, pbclip=None): """ Function to generate the polarisation mosaic in Q and U """ # Set the directories for the mosaicking utils.set_mosdirs(self) # Get the common psf common_psf = psf qcorrimages = [] # to mosaic ucorrimages = [] # to mosaic qpbweights = [] # of the pixels upbweights = [] # of the pixels qrmsweights = [] # of the images themself urmsweights = [] # of the images themself qfreqs = [] ufreqs = [] # weight_images = [] for qimg, uimg, pb in zip(qimages, uimages, pbimages): # prepare the images (squeeze, transfer_coordinates, reproject, regrid pbeam, correct...) with pyfits.open(qimg) as f: qimheader = f[0].header qfreqs.append(qimheader['CRVAl3']) qtg = qimheader['OBJECT'] with pyfits.open(uimg) as f: uimheader = f[0].header ufreqs.append(uimheader['CRVAl3']) utg = uimheader['OBJECT'] qimg = fm.fits_squeeze(qimg) # remove extra dimentions uimg = fm.fits_squeeze(uimg) # remove extra dimentions pb = fm.fits_transfer_coordinates(qimg, pb) # transfer_coordinates pb = fm.fits_squeeze(pb) # remove extra dimensions with pyfits.open(qimg) as f: qimheader = f[0].header qimdata = f[0].data with pyfits.open(uimg) as f: uimheader = f[0].header uimdata = f[0].data with pyfits.open(pb) as f: pbhdu = f[0] autoclip = np.nanmin(f[0].data) # reproject qreproj_arr, qreproj_footprint = reproject_interp( pbhdu, qimheader) ureproj_arr, ureproj_footprint = reproject_interp( pbhdu, uimheader) pbclip = self.pol_pbclip or autoclip print('PB is clipped at %f level', pbclip) qreproj_arr = np.float32(qreproj_arr) ureproj_arr = np.float32(ureproj_arr) qreproj_arr[qreproj_arr < pbclip] = np.nan ureproj_arr[ureproj_arr < pbclip] = np.nan qpb_regr_repr = pb.replace('.fits', '_repr.fits') upb_regr_repr = pb.replace('.fits', '_repr.fits') pyfits.writeto(qpb_regr_repr, qreproj_arr, qimheader, overwrite=True) pyfits.writeto(upb_regr_repr, ureproj_arr, uimheader, overwrite=True) # convolution with common psf qreconvolved_image = qimg.replace('.fits', '_reconv.fits') qreconvolved_image = fm.fits_reconvolve_psf(qimg, common_psf, out=qreconvolved_image) ureconvolved_image = uimg.replace('.fits', '_reconv.fits') ureconvolved_image = fm.fits_reconvolve_psf(uimg, common_psf, out=ureconvolved_image) # PB correction qpbcorr_image = qreconvolved_image.replace('_reconv.fits', '_pbcorr.fits') qpbcorr_image = fm.fits_operation(qreconvolved_image, qreproj_arr, operation='/', out=qpbcorr_image) upbcorr_image = ureconvolved_image.replace('_reconv.fits', '_pbcorr.fits') upbcorr_image = fm.fits_operation(ureconvolved_image, ureproj_arr, operation='/', out=upbcorr_image) # cropping qcropped_image = qimg.replace('.fits', '_mos.fits') qcropped_image, qcutout = fm.fits_crop(qpbcorr_image, out=qcropped_image) qcorrimages.append(qcropped_image) ucropped_image = uimg.replace('.fits', '_mos.fits') ucropped_image, ucutout = fm.fits_crop(upbcorr_image, out=ucropped_image) ucorrimages.append(ucropped_image) # primary beam weights qwg_arr = qreproj_arr - pbclip # the edges weight ~0 qwg_arr[np.isnan(qwg_arr)] = 0 # the NaNs weight 0 qwg_arr = qwg_arr / np.nanmax(qwg_arr) # normalize qwcut = Cutout2D(qwg_arr, qcutout.input_position_original, qcutout.shape) qpbweights.append(qwcut.data) uwg_arr = ureproj_arr - pbclip # the edges weight ~0 uwg_arr[np.isnan(uwg_arr)] = 0 # the NaNs weight 0 uwg_arr = uwg_arr / np.nanmax(uwg_arr) # normalize uwcut = Cutout2D(uwg_arr, ucutout.input_position_original, ucutout.shape) upbweights.append(uwcut.data) # weight the images by RMS noise over the edges ql, qm = qimdata.shape[0] // 10, qimdata.shape[1] // 10 qmask = np.ones(qimdata.shape, dtype=np.bool) qmask[ql:-ql, qm:-qm] = False qimg_noise = np.nanstd(qimdata[qmask]) qimg_weight = 1 / qimg_noise**2 qrmsweights.append(qimg_weight) ul, um = uimdata.shape[0] // 10, uimdata.shape[1] // 10 umask = np.ones(uimdata.shape, dtype=np.bool) umask[ul:-ul, um:-um] = False uimg_noise = np.nanstd(uimdata[umask]) uimg_weight = 1 / uimg_noise**2 urmsweights.append(uimg_weight) # merge the image rms weights and the primary beam pixel weights: qweights = [ qp * qr / max(qrmsweights) for qp, qr in zip(qpbweights, qrmsweights) ] uweights = [ up * ur / max(urmsweights) for up, ur in zip(upbweights, urmsweights) ] # create the wcs and footprint for the output mosaic qwcs_out, qshape_out = find_optimal_celestial_wcs(qcorrimages, auto_rotate=False, reference=reference) uwcs_out, ushape_out = find_optimal_celestial_wcs(ucorrimages, auto_rotate=False, reference=reference) qarray, qfootprint = reproject_and_coadd( qcorrimages, qwcs_out, shape_out=qshape_out, reproject_function=reproject_interp, input_weights=qweights) uarray, ufootprint = reproject_and_coadd( ucorrimages, uwcs_out, shape_out=ushape_out, reproject_function=reproject_interp, input_weights=uweights) qarray = np.float32(qarray) uarray = np.float32(uarray) # insert common PSF into the header qpsf = common_psf.to_header_keywords() qhdr = qwcs_out.to_header() qhdr.insert('RADESYS', ('FREQ', np.nanmean(qfreqs))) qhdr.insert('RADESYS', ('BMAJ', qpsf['BMAJ'])) qhdr.insert('RADESYS', ('BMIN', qpsf['BMIN'])) qhdr.insert('RADESYS', ('BPA', qpsf['BPA'])) upsf = common_psf.to_header_keywords() uhdr = qwcs_out.to_header() uhdr.insert('RADESYS', ('FREQ', np.nanmean(ufreqs))) uhdr.insert('RADESYS', ('BMAJ', upsf['BMAJ'])) uhdr.insert('RADESYS', ('BMIN', upsf['BMIN'])) uhdr.insert('RADESYS', ('BPA', upsf['BPA'])) pyfits.writeto(self.polmosaicdir + '/' + str(qtg).upper() + '_' + str(sb).zfill(2) + '_Q.fits', data=qarray, header=qhdr, overwrite=True) pyfits.writeto(self.polmosaicdir + '/' + str(utg).upper() + '_' + str(sb).zfill(2) + '_U.fits', data=uarray, header=uhdr, overwrite=True) utils.clean_polmosaic_tmp_data(self, sb)
new_world.wcs.crval = [CRVAL1, CRVAL2] new_world.wcs.ctype = ["GLON-SIN", "GLAT-SIN"] array = hdu[0].data[i] file_list.append((array, new_world)) #find wcs: wcs_out, shape_out = find_optimal_celestial_wcs(file_list, projection="SIN", resolution=resolution * u.deg) #combine array: array, footprint = reproject_and_coadd(file_list, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, match_background=False) total_array[i, :, :] = array[:, :] #clip array at zero, and set nan to zero. total_array[i, :, :] = np.nan_to_num(total_array[i, :, :]) total_array[i, :, :] = np.clip(total_array[i, :, :], a_min=0, a_max=None) #smooth array: #gauss_kernel = Gaussian2DKernel(2.) #total_array[i,:,:] = convolve(total_array[i,:,:], gauss_kernel) #write output: this_header = wcs_out.to_header() CRPIX1 = this_header["CRPIX1"]
hdu_cube[i] = fits.open(imlist[i])[0] var_cube[i] = fits.open(varlist[i])[0] variance_wts = [] for i, im in enumerate(imlist): hdu_slic[i] = fits.PrimaryHDU(data=hdu_cube[i].data, header=hdu_cube[i].header) var_slic[i] = fits.PrimaryHDU(data=var_cube[i].data, header=var_cube[i].header) variance_wts.append(1 / var_slic[i].data) wcs_obj[i] = wcs.WCS(hdu_slic[i]) data_tuple = [None] * N_ims for i, im in enumerate(imlist): data_tuple[i] = (hdu_slic[i].data, wcs_obj[i]) mcube, foot = reproject_and_coadd(data_tuple, hd2d, input_weights=variance_wts, reproject_function=reproject_interp) fits.writeto(outputnames + '.mosaic.fits', mcube.astype(np.float32), hd2d, overwrite=True) fits.writeto(outputnames + '.foot.fits', foot.astype(np.float32), hd2d, overwrite=True) foot_sqrt = np.sqrt(foot) foot_sqrt[foot_sqrt == 0] = np.nan wtnse = np.nanmean(1 / foot_sqrt, axis=0, keepdims=True) fits.writeto(outputnames + '.rms.fits',
def make_mosaic_image(evtfile_list, image_file, emin=None, emax=None, reblock=1, use_expmap=False, expmap_energy=None, expmap_weights=None, normalize=True, nhistx=16, nhisty=16, overwrite=False): """ Make a single FITS image from a grid of observations. Optionally, an exposure map can be computed and a flux image may be generated. Parameters ---------- evtfile_list : filename The ASCII table produced by :meth:`~soxs.grid.observe_grid_source` containing the information about the event files and their locations on the sky. image_file : filename The name of the FITS image file to be written. This name will also be used for the exposure map and flux files if they are written. emin : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, optional The minimum energy of the photons to put in the image, in keV. emax : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, optional The maximum energy of the photons to put in the image, in keV. reblock : integer, optional Supply an integer power of 2 here to make an exposure map with a different binning. Default: 1 use_expmap : boolean, optional Whether or not to use (and potentially generate) an exposure map and a flux map. Default: False expmap_energy : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, or NumPy array, optional The energy in keV to use when computing the exposure map, or a set of energies to be used with the *weights* parameter. If providing a set, it must be in keV. expmap_weights : array-like, optional The weights to use with a set of energies given in the *energy* parameter. Used to create a more accurate exposure map weighted by a range of energies. Default: None overwrite : boolean, optional Whether or not to overwrite an existing file with the same name. Default: False """ try: from reproject.mosaicking import find_optimal_celestial_wcs, \ reproject_and_coadd from reproject import reproject_interp except ImportError: raise ImportError("The mosaic functionality of SOXS requires the " "'reproject' package to be installed!") t = ascii.read(evtfile_list, format='commented_header', guess=False, header_start=0, delimiter="\t") files = [] for row in t: evt_file = row["evtfile"] img_file = evt_file.replace("evt", "img") if use_expmap: emap_file = evt_file.replace("evt", "expmap") make_exposure_map(evt_file, emap_file, energy=expmap_energy, weights=expmap_weights, normalize=normalize, overwrite=overwrite, reblock=reblock, nhistx=nhistx, nhisty=nhisty) else: emap_file = None write_image(evt_file, img_file, emin=emin, emax=emax, overwrite=overwrite, reblock=reblock) files.append([img_file, emap_file]) img_hdus = [fits.open(fns[0], memmap=True)[0] for fns in files] wcs_out, shape_out = find_optimal_celestial_wcs(img_hdus) img, footprint = reproject_and_coadd(img_hdus, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, combine_function='sum') hdu = fits.PrimaryHDU(img, header=wcs_out.to_header()) hdu.writeto(image_file, overwrite=overwrite) if use_expmap: if expmap_energy is None: raise RuntimeError("The 'expmap_energy' argument must be set if " "making a mosaicked exposure map!") emap_hdus = [fits.open(fns[1], memmap=True)[1] for fns in files] emap, footprint = reproject_and_coadd( emap_hdus, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, combine_function='sum') hdu = fits.PrimaryHDU(emap, header=wcs_out.to_header()) expmap_file = image_file.replace("fits", "expmap") hdu.writeto(expmap_file, overwrite=overwrite) with np.errstate(invalid='ignore', divide='ignore'): flux = img / emap flux[np.isinf(flux)] = 0.0 flux = np.nan_to_num(flux) flux[flux < 0.0] = 0.0 hdu = fits.PrimaryHDU(flux, header=wcs_out.to_header()) flux_file = image_file.replace("fits", "flux") hdu.writeto(flux_file, overwrite=overwrite)
def main(images, pbimages, reference=None, pbclip=0.1, output='mosaic.fits', clean_temporary_files=True, rmnoise=False, logger=None): if logger is None: logger = logging.getLogger('amos') common_psf = get_common_psf(images) corrimages = [] # to mosaic uncorrimages = [] pbweights = [] # of the pixels rmsweights = [] # of the images themself # weight_images = [] for img, pb in zip(images, pbimages): logger.info('Image: %s', img) logger.info('PBeam: %s', pb) # prepare the images (squeeze, transfer_coordinates, reproject, regrid pbeam, correct...) # convolution with common psf reconvolved_image = os.path.basename(img.replace('.fits', '_reconv_tmp.fits')) reconvolved_image = fits_reconvolve_psf(img, common_psf, out=reconvolved_image) # PB correction pbcorr_image = os.path.basename(img.replace('.fits', '_pbcorr_tmp.fits')) pbcorr_image, uncorr_image, pbarray = pbcorrect(reconvolved_image, pb, pbclip=pbclip, rmnoise=rmnoise, out=pbcorr_image) # cropping cropped_image = os.path.basename(img.replace('.fits', '_mos.fits')) cropped_image, cutout = fits_crop(pbcorr_image, out=cropped_image) uncorr_cropped_image = os.path.basename(img.replace('.fits', '_uncorr.fits')) uncorr_cropped_image, _ = fits_crop(uncorr_image, out=uncorr_cropped_image) corrimages.append(cropped_image) uncorrimages.append(uncorr_cropped_image) # primary beam weights wg_arr = pbarray # wg_arr[np.isnan(wg_arr)] = 0 # the NaNs weight 0 wg_arr = wg_arr**2 / np.nanmax(wg_arr**2) # normalize wcut = Cutout2D(wg_arr, cutout.input_position_original, cutout.shape) pbweights.append(wcut.data) # weight the images by RMS noise over the edges # imdata = np.squeeze(fits.getdata(img)) # l, m = imdata.shape[0]//10, imdata.shape[1]//10 # mask = np.ones(imdata.shape, dtype=np.bool) # mask[l:-l,m:-m] = False # img_noise = np.nanstd(imdata[mask]) # img_weight = 1 / img_noise**2 # rmsweights.append(img_weight) # merge the image rms weights and the primary beam pixel weights: # weights = [p*r/max(rmsweights) for p, r in zip(pbweights, rmsweights)] # create the wcs and footprint for output mosaic logging.info('Mosaicing...') wcs_out, shape_out = find_optimal_celestial_wcs(corrimages, auto_rotate=False, reference=reference) array, footprint = reproject_and_coadd(corrimages, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights=pbweights) array2, _ = reproject_and_coadd(uncorrimages, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights=pbweights) array = np.float32(array) array2 = np.float32(array2) # plt.imshow(array) # insert common PSF into the header psf = common_psf.to_header_keywords() hdr = wcs_out.to_header() hdr.insert('RADESYS', ('FREQ', 1.4E9)) hdr.insert('RADESYS', ('BMAJ', psf['BMAJ'])) hdr.insert('RADESYS', ('BMIN', psf['BMIN'])) hdr.insert('RADESYS', ('BPA', psf['BPA'])) # insert units to header: hdr.insert('RADESYS', ('BUNIT', 'JY/BEAM')) fits.writeto(output, data=array, header=hdr, overwrite=True) fits.writeto(output.replace('.fits', '_uncorr.fits'), data=array2, header=hdr, overwrite=True) logging.info('Wrote %s', output) if clean_temporary_files: logging.debug('Cleaning directory') clean_mosaic_tmp_data('.')
obstime=eui_map.date) ref_coord = HeliographicCarrington(0 * u.deg, 0 * u.deg, sunpy.sun.constants.radius, obstime=eui_map.date, observer=ref_coord_observer) header = sunpy.map.make_fitswcs_header( shape_out, ref_coord, scale=[180 / shape_out[0], 360 / shape_out[1]] * u.deg / u.pix, projection_code="CAR") out_wcs = WCS(header) ############################################################################### # Next we reproject and add together the two maps array, footprint = reproject_and_coadd([eui_map, aia_map], out_wcs, shape_out, reproject_function=reproject_interp) outmap = sunpy.map.Map((array, header)) outmap.plot_settings = aia_map.plot_settings ############################################################################### # Finally, we'll plot the reprojected map. fig = plt.figure() ax = fig.add_subplot(projection=outmap) outmap.plot(axes=ax) plt.show()
unit=u.deg, frame="heliographic_stonyhurst", obstime=maps[0].date), scale=[180 / shape_out[0], 360 / shape_out[1]] * u.deg / u.pix, wavelength=int(maps[0].meta['wavelnth']) * u.AA, projection_code="CAR") out_wcs = WCS(header) coordinates = tuple(map(sunpy.map.all_coordinates_from_map, maps)) weights = [ coord.transform_to("heliocentric").z.value for coord in coordinates ] weights = [(w / np.nanmax(w))**3 for w in weights] for w in weights: w[np.isnan(w)] = 0 array, _ = reproject_and_coadd(maps, out_wcs, shape_out, input_weights=weights, reproject_function=reproject_interp, match_background=True, background_reference=0) outmap = sunpy.map.Map((array, header)) outmap.plot_settings = maps[0].plot_settings outmap.nickname = 'EIT + EUVI/A + EUVI/B' # Output outmap.save(filename_output, filetype='fits', overwrite=True)
for fn in files: basename = os.path.basename(fn) if os.path.exists(basename): continue else: with open(basename, 'wb') as fh: res = requests.get(f'http://miris.kasi.re.kr/{fn}', stream=True) res.raise_for_status() fh.write(res.content) hdus = [fits.open(x) for x in glob.glob("MS*.fits")] # TODO: Change this to GLON-CAR/GLAT-CAR - the GLON-TAN/GLAT-TAN projection looks pretty wacky far from the CMZ! wcs_out, shape_out = find_optimal_celestial_wcs([h[1] for h in hdus]) array_line, footprint = reproject_and_coadd([h[1] for h in hdus if h[0].header['OBS-FILT']=='PAAL'], wcs_out, shape_out=shape_out, reproject_function=reproject_interp) array_cont, footprint = reproject_and_coadd([h[1] for h in hdus if h[0].header['OBS-FILT']=='PAAC'], wcs_out, shape_out=shape_out, reproject_function=reproject_interp) fits.PrimaryHDU(data=array_line, header=wcs_out.to_header()).writeto('gc_mosaic_miris_line.fits', overwrite=True) fits.PrimaryHDU(data=array_line - array_cont, header=wcs_out.to_header()).writeto('gc_mosaic_miris_line_minus_cont.fits', overwrite=True) fits.PrimaryHDU(data=array_cont, header=wcs_out.to_header()).writeto('gc_mosaic_miris_cont.fits', overwrite=True) line = fits.open('gc_mosaic_miris_line.fits') cont = fits.open('gc_mosaic_miris_cont.fits') # "best-fit" offset power-law fit to line vs cont contsub = line[0].data - cont[0].data**1.1 * 0.35 fits.PrimaryHDU(data=contsub, header=wcs_out.to_header()).writeto('gc_mosaic_miris_line_minus_cont_scaled_pow1.1_x0p35.fits')
for ifu_name, tscube in hdfcont_hdr2.itercubes(): slice_ = tscube.f50_from_noise(tscube.sigmas[sel_slice, :, :], sncut) hdus.append( fits.PrimaryHDU( slice_*1e17, header=tscube.wcs.celestial.to_header())) hdus_mask.append( fits.PrimaryHDU( slice_.mask.astype(int), header=tscube.wcs.celestial.to_header())) shape = tscube.sigmas.shape ra, dec, lambda_ = tscube.wcs.all_pix2world(shape[2]/2., shape[1]/2., shape[0]/2., 0) ifu_name_list.append(ifu_name) ifu_ra.append(ra) ifu_dec.append(dec) wcs_out, shape_out = find_optimal_celestial_wcs(hdus, reference=shot_coords) wcs_mask_out, shape_mask_out = find_optimal_celestial_wcs(hdus_mask, reference=shot_coords) array, footprint = reproject_and_coadd(hdus, wcs_out, shape_out=shape_out, reproject_function=reproject_exact) #mask_array, footprint = reproject_and_coadd(hdus_mask, # wcs_mask_out, # shape_out=shape_mask_out, # reproject_function=reproject_exact) config = HDRconfig() galaxy_cat = Table.read(config.rc3cat, format='ascii') gal_coords = SkyCoord(galaxy_cat['Coords'], frame='icrs') sel_reg = np.where(shot_coords.separation(gal_coords) < 1.*u.deg)[0] gal_regions = [] for idx in sel_reg: gal_regions.append( create_gal_ellipse(galaxy_cat, row_index=idx, d25scale=3))
def make_contmosaic(self, images, pbimages, reference=None, rmnoise=False): """ Function to generate the continuum mosaic """ # Get the common psf common_psf = utils.get_common_psf(self, images) print('Clipping primary beam response at the %f level', str(self.cont_pbclip)) corrimages = [] # to mosaic uncorrimages = [] pbweights = [] # of the pixels freqs = [] # weight_images = [] for img, pb in zip(images, pbimages): print('Doing primary beam correction for Beam ' + str(img.split('/')[-1].replace('.fits', '').lstrip('I'))) # prepare the images (squeeze, transfer_coordinates, reproject, regrid pbeam, correct...) with pyfits.open(img) as f: imheader = f[0].header freqs.append(imheader['CRVAl3']) tg = imheader['OBJECT'] # convolution with common psf reconvolved_image = img.replace('.fits', '_reconv_tmp.fits') reconvolved_image = fm.fits_reconvolve_psf(img, common_psf, out=reconvolved_image) # PB correction pbcorr_image = reconvolved_image.replace('.fits', '_pbcorr_tmp.fits') tmpimg = utils.make_tmp_copy(reconvolved_image) tmppb = utils.make_tmp_copy(pb) tmpimg = fm.fits_squeeze(tmpimg) # remove extra dimentions tmppb = fm.fits_transfer_coordinates(tmpimg, tmppb) # transfer_coordinates tmppb = fm.fits_squeeze(tmppb) # remove extra dimentions with pyfits.open(tmpimg) as f: imheader = f[0].header with pyfits.open(tmppb) as f: pbhdu = f[0] pbheader = f[0].header pbarray = f[0].data if (imheader['CRVAL1'] != pbheader['CRVAL1']) or ( imheader['CRVAL2'] != pbheader['CRVAL2']) or ( imheader['CDELT1'] != pbheader['CDELT1']) or ( imheader['CDELT2'] != pbheader['CDELT2']): pbarray, reproj_footprint = reproject_interp( pbhdu, imheader) else: pass pbarray = np.float32(pbarray) pbarray[pbarray < self.cont_pbclip] = np.nan pb_regr_repr = tmppb.replace('_tmp.fits', '_repr_tmp.fits') pyfits.writeto(pb_regr_repr, pbarray, imheader, overwrite=True) img_corr = reconvolved_image.replace('.fits', '_pbcorr.fits') img_uncorr = reconvolved_image.replace('.fits', '_uncorr.fits') img_corr = fm.fits_operation(tmpimg, pbarray, operation='/', out=img_corr) img_uncorr = fm.fits_operation(img_corr, pbarray, operation='*', out=img_uncorr) # cropping cropped_image = img.replace('.fits', '_mos.fits') cropped_image, cutout = fm.fits_crop(img_corr, out=cropped_image) uncorr_cropped_image = img.replace('.fits', '_uncorr.fits') uncorr_cropped_image, _ = fm.fits_crop(img_uncorr, out=uncorr_cropped_image) corrimages.append(cropped_image) uncorrimages.append(uncorr_cropped_image) # primary beam weights wg_arr = pbarray # wg_arr[np.isnan(wg_arr)] = 0 # the NaNs weight 0 wg_arr = wg_arr**2 / np.nanmax(wg_arr**2) # normalize wcut = Cutout2D(wg_arr, cutout.input_position_original, cutout.shape) pbweights.append(wcut.data) # create the wcs and footprint for output mosaic print( 'Generating primary beam corrected and uncorrected continuum mosaic.' ) wcs_out, shape_out = find_optimal_celestial_wcs(corrimages, auto_rotate=False, reference=reference) array, footprint = reproject_and_coadd( corrimages, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights=pbweights) array2, _ = reproject_and_coadd(uncorrimages, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights=pbweights) array = np.float32(array) array2 = np.float32(array2) # insert common PSF into the header psf = common_psf.to_header_keywords() hdr = wcs_out.to_header() hdr.insert('RADESYS', ('FREQ', np.nanmean(freqs))) hdr.insert('RADESYS', ('BMAJ', psf['BMAJ'])) hdr.insert('RADESYS', ('BMIN', psf['BMIN'])) hdr.insert('RADESYS', ('BPA', psf['BPA'])) # insert units to header: hdr.insert('RADESYS', ('BUNIT', 'JY/BEAM')) pyfits.writeto(self.contmosaicdir + '/' + str(tg).upper() + '.fits', data=array, header=hdr, overwrite=True) pyfits.writeto(self.contmosaicdir + '/' + str(tg).upper() + '_uncorr.fits', data=array2, header=hdr, overwrite=True) utils.clean_contmosaic_tmp_data(self)
def construct_psf(image='0_i.fits', index=0, star_table='0_stars_table.ipac', output_dir='output/', save_star_cutout=True, save_bkg=True, star_size=33, psf_size=31): ''' construct psf from a list of stars using python package reproject :param image: .fits file of an image :param index: index of image data in hdu file :param star_table: .ipac file containing 'x' and 'y' columns as peaks of stars :param output_dir: directory to save results :param save_star_cutout: whether to save star cutouts :param star_size: size of star cutout to construct psf (should be odd number) :param psf_size: size of psf image (should be odd number and <= size of star cutout) :return: 2D array of psf ''' print('Start stack stars for: ', image) # --------------------------------------------------------------- # load the image hdu = fits.open(image) w = WCS(hdu[index].header) data = hdu[index].data hdu.close() # --------------------------------------------------------------- # background properties # --------------------------------------------------------------- # mask sources mask = make_source_mask(data, 5, 5, dilate_size=11) # subtract 2D background sigma_clip = SigmaClip(sigma=3.) bkg_estimator = MedianBackground() bkg = Background2D(data, (60, 60), filter_size=(3, 3), sigma_clip=sigma_clip, bkg_estimator=bkg_estimator, mask=mask) data = data - bkg.background if save_bkg: # plot source mask and bkg plt.imshow(bkg.background, origin='lower', cmap='Greys_r', interpolation='nearest') plt.colorbar() plt.savefig(output_dir + image + '_bkg.pdf', bbox_inches='tight') plt.close() # --------------------------------------------------------------- # load psf star table stars_tbl = Table.read(star_table, format='ascii.ipac') hdu_List = [] for each in range(len(stars_tbl)): new_wcs = w new_wcs.wcs.crval = (0.0000, 0.0000) new_wcs.wcs.crpix = [ stars_tbl[each]['x'] + 1, stars_tbl[each]['y'] + 1 ] cut = Cutout2D(data, (stars_tbl[each]['x'], stars_tbl[each]['y']), star_size, wcs=new_wcs) hdu_temp = fits.PrimaryHDU(cut.data, header=cut.wcs.to_header()) hdu_temp.header['skystd'] = bkg.background_rms_median hdu_temp.header['x'] = stars_tbl[each]['x'] hdu_temp.header['y'] = stars_tbl[each]['y'] # save star images to fits files if save_star_cutout: hdu_temp.writeto( output_dir + '{0}_stars_{1}.fits'.format(image.split('.fits')[0], each), overwrite=True) hdu_List.append(hdu_temp) AGN_wcs = WCS(hdu_List[0].header) AGN_wcs.wcs.crval = (0.000000, 0.000000) AGN_wcs.wcs.crpix = ((psf_size + 1) / 2, (psf_size + 1) / 2) # # Use reproject package to generate psf array, footprint = reproject_and_coadd(hdu_List, AGN_wcs, shape_out=(psf_size, psf_size), reproject_function=reproject_exact, combine_function='sum') # save psf to fits file hdu_temp = fits.PrimaryHDU(array) hdu_temp.writeto(output_dir + '{}_psf.fits'.format(image.split('.fits')[0]), overwrite=True) return array
def make_contmosaic(self, images, pbimages, reference=None, pbclip=None): """ Function to generate the continuum mosaic """ # Get the common psf common_psf = utils.get_common_psf(self, images) corrimages = [] # to mosaic pbweights = [] # of the pixels rmsweights = [] # of the images themself freqs = [] # weight_images = [] for img, pb in zip(images, pbimages): # prepare the images (squeeze, transfer_coordinates, reproject, regrid pbeam, correct...) with pyfits.open(img) as f: imheader = f[0].header freqs.append(imheader['CRVAl3']) tg = imheader['OBJECT'] img = fm.fits_squeeze(img) # remove extra dimentions pb = fm.fits_transfer_coordinates(img, pb) # transfer_coordinates pb = fm.fits_squeeze(pb) # remove extra dimensions with pyfits.open(img) as f: imheader = f[0].header imdata = f[0].data with pyfits.open(pb) as f: pbhdu = f[0] autoclip = np.nanmin(f[0].data) # reproject reproj_arr, reproj_footprint = reproject_interp(pbhdu, imheader) pbclip = self.cont_pbclip or autoclip print('PB is clipped at %f level', pbclip) reproj_arr = np.float32(reproj_arr) reproj_arr[reproj_arr < pbclip] = np.nan pb_regr_repr = os.path.basename(pb.replace('.fits', '_repr.fits')) pyfits.writeto(pb_regr_repr, reproj_arr, imheader, overwrite=True) # convolution with common psf reconvolved_image = os.path.basename(img.replace('.fits', '_reconv.fits')) reconvolved_image = fm.fits_reconvolve_psf(img, common_psf, out=reconvolved_image) # PB correction pbcorr_image = os.path.basename(reconvolved_image.replace('.fits', '_pbcorr.fits')) pbcorr_image = fm.fits_operation(reconvolved_image, reproj_arr, operation='/', out=pbcorr_image) # cropping cropped_image = os.path.basename(img.replace('.fits', '_mos.fits')) cropped_image, cutout = fm.fits_crop(pbcorr_image, out=cropped_image) corrimages.append(cropped_image) # primary beam weights wg_arr = reproj_arr - pbclip # the edges weight ~0 wg_arr[np.isnan(wg_arr)] = 0 # the NaNs weight 0 wg_arr = wg_arr / np.nanmax(wg_arr) # normalize wcut = Cutout2D(wg_arr, cutout.input_position_original, cutout.shape) pbweights.append(wcut.data) # weight the images by RMS noise over the edges l, m = imdata.shape[0]//10, imdata.shape[1]//10 mask = np.ones(imdata.shape, dtype=np.bool) mask[l:-l,m:-m] = False img_noise = np.nanstd(imdata[mask]) img_weight = 1 / img_noise**2 rmsweights.append(img_weight) # merge the image rms weights and the primary beam pixel weights: weights = [p*r/max(rmsweights) for p, r in zip(pbweights, rmsweights)] # create the wcs and footprint for output mosaic wcs_out, shape_out = find_optimal_celestial_wcs(corrimages, auto_rotate=False, reference=reference) array, footprint = reproject_and_coadd(corrimages, wcs_out, shape_out=shape_out, reproject_function=reproject_interp, input_weights=weights) array = np.float32(array) # insert common PSF into the header psf = common_psf.to_header_keywords() hdr = wcs_out.to_header() hdr.insert('RADESYS', ('FREQ', np.nanmean(freqs))) hdr.insert('RADESYS', ('BMAJ', psf['BMAJ'])) hdr.insert('RADESYS', ('BMIN', psf['BMIN'])) hdr.insert('RADESYS', ('BPA', psf['BPA'])) pyfits.writeto(self.contmosaicdir + '/' + str(tg).upper() + '.fits', data=array, header=hdr, overwrite=True) utils.clean_contmosaic_tmp_data(self)