示例#1
0
    def matching_kernel(psf1,
                        psf2,
                        window_type='TukeyWindow',
                        alpha=None,
                        beta=None):
        """Use photutils to create a matching kernel given two PSFs and
        the window type and parameters

        Parameters
        ----------
        psf1 : numpy.ndarray
            2D array containing the first PSF

        psf2 : numpy.ndarray
            2D array containing the second PSF

        window_type : str
            Name of the window function to use when filtering the matching kernel

        alpha : float
            Optional input for some of the window functions

        beta : float
            Optional input for some of the window functions

        Returns
        -------
        matched_kernel : numpy.ndarray
            2D array containing the matching PSF kernel
        """
        # Create the filtering window
        orig_window_type = copy.deepcopy(window_type)
        window_type = window_type.lower()
        if window_type == 'tophatwindow':
            window = TopHatWindow(beta=beta)
        elif window_type == 'cosinebellwindow':
            window = CosineBellWindow(alpha=alpha)
        elif window_type == 'splitcosinebellwindow':
            window = SplitCosineBellWindow(alpha=alpha, beta=beta)
        elif window_type == 'tukeywindow':
            window = TukeyWindow(alpha=alpha)
        elif window_type == 'hanningwindow':
            window = HanningWindow()
        else:
            raise ValueError(
                "ERROR: Unrecognized window_type: {}".format(orig_window_type))

        # Create the matching kernel
        matched_kernel = create_matching_kernel(psf1, psf2, window=window)

        return matched_kernel
示例#2
0
import photutils
from photutils import create_matching_kernel
from astropy.io import fits
from matplotlib import pyplot as plt
import numpy as np
import scipy
from photutils import TopHatWindow
from photutils import CosineBellWindow

window = CosineBellWindow(alpha=0.35)
from scipy.signal import convolve as scipy_convolve
window = TopHatWindow(0.35)

dir1 = '/home/sourabh/ULIRG_package/data/OPTICAL_PSF/'
data_ref = fits.getdata(dir1 + 'f165psf.fits')
data_psf = fits.getdata(dir1 + 'PSF_775_gal4_rotate_cut.fits')
kernel = create_matching_kernel(data_psf, data_ref)  # , window = window )
fits.writeto('ker.fits', data=kernel, overwrite=True)
plt.imshow(kernel, cmap='Greys_r', origin='lower')
filename = '/home/sourabh/ULIRG_package/data/IRASF10594+3818/gal1_HA.fits'
fileout = '/home/sourabh/ULIRG_package/data/IRASF10594+3818/gal1_HA_psfmatch.fits'
ker = 'ker.fits'
#ker_shift = np.pad(kernel, ((0, 1), (0, 1)), mode='constant')
data1 = scipy_convolve(data_psf, kernel, mode='same')
fits.writeto('test2.fits', data=data1, overwrite=True)
data3 = data1 - data_ref
fits.writeto('test3.fits', data=data3, overwrite=True)


def psf_match(filename, fileout, ker):
    hdulist = fits.open(filename)
示例#3
0
文件: utils.py 项目: gbrammer/golfir
def effective_psf(log,
                  rd=None,
                  size=30,
                  pixel_scale=0.1,
                  pixfrac=0.2,
                  kernel='square',
                  recenter=False,
                  use_native_orientation=False):
    """
    Drizzle effective PSF model given the oversampled model PRF
    
    ** not used, testing **
    """
    from photutils import (HanningWindow, TukeyWindow, CosineBellWindow,
                           SplitCosineBellWindow, TopHatWindow)
    import grizli.utils

    # r48106752/ch1/bcd/SPITZER_I1_48106752_0001_0000_2_cbcd.fits
    ch = int(log['file'][0].split("_")[1][-1])
    if __name__ == "__main__":
        import golfir
        _path = os.path.dirname(golfir.__file__)
    else:
        _path = os.path.dirname(__file__)

    h_file = os.path.join(_path, f'data/bcd_ch{ch}.header')
    sip_h = pyfits.Header.fromtextfile(h_file)

    N = len(log)

    #if rd is None:
    rd = np.mean(log['crval'][:N // 2], axis=0)

    #ipsf = pyfits.open(f'../AvgPSF/IRAC/apex_sh_IRACPC{ch}_col129_row129_x100.fits', relax=True)

    ipsf = pyfits.open(
        f'../AvgPSF/Cryo/apex_sh_IRAC{ch}_col129_row129_x100.fits', relax=True)

    #ipsf = pyfits.open(f'../AvgPSF/IRAC/IRACPC{ch}_col129_row129.fits')

    if 'PRFXRSMP' in ipsf[0].header:
        osamp = ipsf[0].header['PRFXRSMP']
    else:
        osamp = 100

    ish = ipsf[0].data.shape

    if recenter:
        # Centroid
        yp, xp = np.indices(ish)
        pp = ipsf[0].data
        xc = int(np.round((pp * xp).sum() / pp.sum()))
        yc = int(np.round((pp * yp).sum() / pp.sum()))
        i0 = [xc, yc]
    else:
        i0 = [s // 2 for s in ish]

    # Number of pixels to extract
    inp = np.min([xc, yc]) - osamp // 2
    # Extra padding
    if osamp == 100:
        inp -= 20
    else:
        inp -= 1

    wcs_list = []
    sci_list = []
    wht_list = []

    wht_i = np.ones((256, 256), dtype=np.float32)

    coords = [rd]
    cosd = np.cos(rd[1] / 180 * np.pi)
    for dx in np.linspace(-1, 1, 4):
        for dy in np.linspace(-1, 1, 4):
            if dx == dy == 0:
                continue

            delta = np.array([dx / cosd / 60, dy / 60])
            coords.append(rd + delta)

    for k in range(N):
        print('Parse file: {0}'.format(log['file'][k]))

        if 0:
            cd = log['cd'][k]
            theta = np.arctan2(cd[1][0], cd[1][1]) / np.pi * 180
            print(k, theta)

        sip_h['CRPIX1'] = log['crpix'][k][0]
        sip_h['CRPIX2'] = log['crpix'][k][1]

        sip_h['CRVAL1'] = log['crval'][k][0]
        sip_h['CRVAL2'] = log['crval'][k][1]

        sip_h['LATPOLE'] = log['crval'][k][1]

        for i in range(2):
            for j in range(2):
                key = f'CD{i+1}_{j+1}'
                sip_h[key] = log['cd'][k][i, j]

        wcs_i = pywcs.WCS(sip_h, relax=True)
        wcs_i.pscale = grizli.utils.get_wcs_pscale(wcs_i)

        sci_i = np.zeros((256, 256), dtype=np.float32)

        for coo in coords:
            try:
                xy = wcs_i.all_world2pix([coo], 0).flatten() + 0.5
                # if ch == 2:
                #     xy += 0.25
            except:
                print('wcs failed')
                continue

            xyp = np.cast[int](np.floor(xy))
            phase = np.cast[int](np.round((xy - xyp) * osamp - osamp / 2.))

            oslx = slice(i0[0] - phase[0] - inp, i0[0] - phase[0] + inp + 1,
                         osamp)
            osly = slice(i0[1] - phase[1] - inp, i0[1] - phase[1] + inp + 1,
                         osamp)
            psf_sub = ipsf[0].data[osly, oslx]
            osh = psf_sub.shape

            nparent = 256
            slx0 = slice(xyp[0] - osh[1] // 2, xyp[0] + osh[1] // 2)
            sly0 = slice(xyp[1] - osh[0] // 2, xyp[1] + osh[0] // 2)

            slpx, slcx = (match_slice_to_shape(slx0, nparent))
            slpy, slcy = (match_slice_to_shape(sly0, nparent))
            try:
                sci_i[slpy, slpx] += psf_sub[slcy, slcx]
            except:
                print('slice failed')

        wcs_list.append(wcs_i)
        sci_list.append(sci_i)
        wht_list.append((sci_i != 0).astype(np.float32))

    if use_native_orientation:
        cd = log['cd'][0]
        theta = np.arctan2(cd[1][0], cd[1][1]) / np.pi * 180
    else:
        theta = 0

    for k, coo in enumerate(coords):
        print('Drizzle coords: {0}'.format(coo))

        out_hdu = grizli.utils.make_wcsheader(ra=coo[0],
                                              dec=coo[1],
                                              size=size,
                                              pixscale=pixel_scale,
                                              theta=-theta,
                                              get_hdu=True)
        #out_h, out_wcs = _out
        out_wcs = pywcs.WCS(out_hdu.header)
        out_wcs.pscale = grizli.utils.get_wcs_pscale(out_wcs)

        if False:
            _drz = grizli.utils.drizzle_array_groups(sci_list,
                                                     wht_list,
                                                     wcs_list,
                                                     outputwcs=out_wcs,
                                                     pixfrac=pixfrac,
                                                     kernel=kernel,
                                                     verbose=False)

            drz_psf = _drz[0] / _drz[0].sum()
            pyfits.writeto(f'irsa_{pixel_scale}pix_ch{ch}_{k}_psf.fits',
                           data=drz_psf,
                           overwrite=True)
        else:
            if k == 0:
                _drz = grizli.utils.drizzle_array_groups(sci_list,
                                                         wht_list,
                                                         wcs_list,
                                                         outputwcs=out_wcs,
                                                         pixfrac=pixfrac,
                                                         kernel=kernel,
                                                         verbose=False)
            else:
                _ = grizli.utils.drizzle_array_groups(sci_list,
                                                      wht_list,
                                                      wcs_list,
                                                      outputwcs=out_wcs,
                                                      pixfrac=pixfrac,
                                                      kernel=kernel,
                                                      verbose=False,
                                                      data=_drz[:3])

    sci, wht, ctx, head, w = _drz
    coswindow = CosineBellWindow(alpha=1)(_drz[0].shape)**0.05

    drz_psf = (_drz[0] * coswindow) / (_drz[0] * coswindow).sum()

    pyfits.writeto(f'irsa_{pixel_scale}pix_ch{ch}_psf.fits',
                   data=drz_psf,
                   overwrite=True)
示例#4
0
def run_model(root=''):

    import golfir.irac
    import golfir.model

    from astropy.modeling.models import Moffat2D, Gaussian2D, Sersic2D
    from astropy.modeling.fitting import LevMarLSQFitter

    from tractor.psf import GaussianMixtureEllipsePSF, GaussianMixturePSF

    from golfir.irac import MixturePSF

    from photutils import (HanningWindow, TukeyWindow, CosineBellWindow,
                           SplitCosineBellWindow, TopHatWindow)
    window = CosineBellWindow(1)

    fitter = LevMarLSQFitter()

    import grizli.ds9
    ds9 = grizli.ds9.DS9()

    P0 = None
    bkg_func = None

    kwargs = {
        'ds9': ds9,
        'mag_limit': [24, 27],
        'galfit_flux_limit': np.inf,
        'refine_brightest': False,
        'run_alignment': True,
        'any_limit': 10,
        'point_limit': -10,
        'bright_sn': 10,
        'bkg_kwargs': {
            'order_npix': 64
        },
        'psf_only': False,
        'use_saved_components': False,
        'window': None,
        'use_avg_psf': True,
        'align_type': 1
    }

    files = glob.glob(f'{root}-[kgrz]*sci.fits')
    files.sort()

    bands = [file.split('_drz')[0].split('-')[-1] for file in files]

    orig_pix = 0.262

    for band in bands:
        psf_im = pyfits.open(f'{root}-{band}_psf.fits')[0]
        h = psf_im.header

        psf_obj = MixturePSF(N=psf_im.header['PSFN'])

        for i in range(psf_obj.N):
            psf_obj.coeffs[i] = h[f'PSFC{i}']
            pars = [h[f'PSFP{i}_{j}'] for j in range(6)]
            psf_obj.mogs[i].setParams(pars)

        psf_obj.set_pixelgrid(size=32,
                              instep=orig_pix,
                              outstep=orig_pix,
                              oversample=orig_pix / 0.1)

        modeler = golfir.model.ImageModeler(root=root,
                                            prefer_filter='f160w',
                                            lores_filter=band,
                                            psf_obj=psf_obj)

        if not os.path.exists(f'{root}_waterseg.fits'):
            pyfits.writeto(f'{root}_waterseg.fits', data=modeler.waterseg)
示例#5
0
#!/usr/bin/env python

import numpy as np
from astropy.io import fits
# from scipy import signal, ndimage
# from hconvolve import fftdeconvolve, hconvolve
from scipy.signal import convolve2d
import os, subprocess
from scipy import ndimage
from skimage.restoration import richardson_lucy
from photutils import CosineBellWindow, create_matching_kernel
window_default = CosineBellWindow(alpha=0.35)


def psfmatch(psf_sharp, psf_broad, kernelname, method='fft', window=window_default, iterations=30):
    """Derive the kernel that matches psf_sharp to psf_broad"""
    psf1 = fits.getdata(psf_sharp)
    psf2 = fits.getdata(psf_broad)

    assert psf1.shape[0] % 2 == 1
    assert psf2.shape[0] % 2 == 1
    assert psf1.shape[0] == psf1.shape[1]
    assert psf2.shape[0] == psf2.shape[1]
    
    psf1 = psf1 / psf1.sum()
    psf2 = psf2 / psf2.sum()
    
    if psf1.shape[0] > psf2.shape[0]:
        pad = (psf1.shape[0] - psf2.shape[0]) / 2
        psf1 = psf1[pad:-pad, pad:-pad]
    elif psf2.shape[0] > psf1.shape[0]:
示例#6
0
"""
Created on Tue Jun 12 11:36:12 2018

Code to create a kernel that will match the PSFs of two stacks

@author: ppxee
"""

### Import Modules ###
import numpy as np
from photutils import create_matching_kernel, TopHatWindow, CosineBellWindow
import matplotlib.pyplot as plt
from astropy.io import fits
plt.close('all')

psf11 = fits.open('11B_K_PSF.fits')[0].data
psf12 = fits.open('12B_K_PSF.fits')[0].data

#plt.figure()
#plt.imshow(psf12-psf11)
#plt.figure()
#plt.imshow(psf11-psf12)


kernel = create_matching_kernel(psf11, psf12, window=CosineBellWindow(0.35)) #need to check which is larger

plt.figure()
plt.imshow(kernel)


示例#7
0
def irac_mosaics(root='j000308m3303', home='/GrizliImaging/', pixfrac=0.2, kernel='square', initial_pix=1.0, final_pix=0.5, pulldown_mag=15.2, sync_xbcd=True, skip_fetch=False, radec=None, mosaic_pad=2.5, drizzle_ref_file='', run_alignment=True, assume_close=True, bucket='grizli-v1', aor_query='r*', mips_ext='[_e]bcd.fits', channels=['ch1','ch2','ch3','ch4','mips1'], drz_query='r*', sync_results=True, ref_seg=None, min_frame={'irac':5, 'mips':1.0}, med_max_size=500e6, stop_at='', make_psf=True, **kwargs):
    """
    stop_at: preprocess, make_compact
    
    """
    
    from grizli import utils

    from . import irac
    from .utils import get_wcslist, fetch_irac
    
    PATH = os.path.join(home, root)
    try:
        os.mkdir(PATH)
    except:
        pass

    os.chdir(PATH)
        
    if not skip_fetch:
        # Fetch IRAC bcds
        if not os.path.exists(f'{root}_ipac.fits'):
            os.system(f'wget https://s3.amazonaws.com/{bucket}/IRAC/{root}_ipac.fits')
    
        res = fetch_irac(root=root, path='./', channels=channels)
        
        if res in [False, None]:
            # Nothing to do
            make_html(root, bucket=bucket)

            print(f'### Done: \n https://s3.amazonaws.com/{bucket}/Pipeline/{root}/IRAC/{root}.irac.html')

            utils.log_comment(f'/tmp/{root}.success', 'Done!', 
                              verbose=True, show_date=True)
            return True
            
    # Sync CHArGE HST images
    os.system(f'aws s3 sync s3://{bucket}/Pipeline/{root}/Prep/ ./ '
              f' --exclude "*" --include "{root}*seg.fits*"'
              f' --include "{root}-ir_drz*fits*"'
              f' --include "{root}*psf.fits*"'
              f' --include "{root}-f[01]*_drz*fits.gz"'
              f' --include "{root}*phot.fits"')
    
    # Drizzle properties of the preliminary mosaic
    #pixfrac, pix, kernel = 0.2, 1.0, 'square'       
    
    # Define an output WCS aligned in pixel phase to the HST mosaic ()

    if not os.path.exists('ref_hdu.fits'):
        wcslist = get_wcslist(skip=-500)
        out_hdu = utils.make_maximal_wcs(wcslist, pixel_scale=initial_pix, theta=0, pad=5, get_hdu=True, verbose=True)

        # Make sure pixels align
        ref_file = glob.glob('{0}-f[01]*_drz_sci.fits*'.format(root))
        if len(ref_file) == 0:
            os.system(f'aws s3 sync s3://{bucket}/Pipeline/{root}/Prep/ ./ '
                      f' --exclude "*"'
                      f' --include "{root}-f[678]*_dr*fits.gz"')
            
            ref_file = glob.glob('{0}-f[678]*_dr*_sci.fits*'.format(root))
        
        ref_file = ref_file[-1]

        print(f'\nHST reference image: {ref_file}\n')

        ref_hdu = pyfits.open(ref_file)[0].header
        ref_filter = utils.get_hst_filter(ref_hdu).lower()

        ref_wcs = pywcs.WCS(ref_hdu)
        ref_rd = ref_wcs.all_pix2world(np.array([[-0.5, -0.5]]), 0).flatten()
        target_phase = np.array([0.5, 0.5])#/(pix/0.1)
        for k in ['RADESYS', 'LATPOLE', 'LONPOLE']:
            out_hdu.header[k] = ref_hdu[k]

        # Shift CRVAL to same tangent point
        out_wcs = pywcs.WCS(out_hdu.header)
        out_xy = out_wcs.all_world2pix(np.array([ref_wcs.wcs.crval]), 1).flatten()
        out_hdu.header['CRVAL1'], out_hdu.header['CRVAL2'] = tuple(ref_wcs.wcs.crval)
        out_hdu.header['CRPIX1'], out_hdu.header['CRPIX2'] = tuple(out_xy)

        # Align integer pixel phase
        out_wcs = pywcs.WCS(out_hdu.header)
        out_xy = out_wcs.all_world2pix(np.array([ref_rd]), 0).flatten()
        xy_phase = out_xy - np.floor(out_xy)
        new_crpix = out_wcs.wcs.crpix - (xy_phase - target_phase)
        out_hdu.header['CRPIX1'], out_hdu.header['CRPIX2'] = tuple(new_crpix)
        out_wcs = pywcs.WCS(out_hdu.header)

        out_hdu.writeto('ref_hdu.fits', output_verify='Fix')

    else:
        out_hdu = pyfits.open('ref_hdu.fits')[1]
    
    ########
    
    files = []
    for ch in channels:
        if 'mips' in ch:
            mc = ch.replace('mips','ch')
            files += glob.glob(f'{aor_query}/{mc}/bcd/SPITZER_M*{mips_ext}')
            files += glob.glob(f'{aor_query}/{mc}/bcd/SPITZER_M*xbcd.fits.gz')
        else:
            files += glob.glob(f'{aor_query}/{ch}/bcd/SPITZER_I*cbcd.fits')
            files += glob.glob(f'{aor_query}/{ch}/bcd/SPITZER_I*xbcd.fits.gz')
            
    files.sort()

    roots = np.array([file.split('/')[0] for file in files])
    with_channels = np.array([file.split('_')[1] for file in files])
    all_roots = np.array(['{0}-{1}'.format(r, c.replace('I','ch').replace('M', 'mips')) for r, c in zip(roots, with_channels)])

    tab = {'aor':[], 'N':[], 'channel':[]}
    for r in np.unique(all_roots):
        tab['aor'].append(r.split('-')[0])
        tab['N'].append((all_roots == r).sum())
        tab['channel'].append(r.split('-')[1])

    aors = utils.GTable(tab)
    print(aors)
    
    ########
    SKIP = True          # Don't regenerate finished files
    delete_group = False # Delete intermediate products from memory
    zip_outputs = False    # GZip intermediate products

    aors_ch = {}
    
    ########
    # Process mosaics by AOR
    # Process in groups, helps for fields like HFF with dozens/hundreds of AORs!
    for ch in channels:
            
        aor = aors[(aors['channel'] == ch) & (aors['N'] > 5)]
        if len(aor) == 0:
            continue

        #aors_ch[ch] = []

        if ch in ['ch1','ch2']:
            NPER, instrument = 500, 'irac'
        if ch in ['ch3','ch4']:
            NPER, instrument = 500, 'irac'
        elif ch in ['mips1']:
            NPER, instrument = 400, 'mips'
        
        min_frametime = min_frame[instrument]
        
        nsort = np.cumsum(aor['N']/NPER)
        NGROUP = int(np.ceil(nsort.max()))

        count = 0

        for g in range(NGROUP):
            root_i = root+'-{0:02d}'.format(g)

            gsel = (nsort > g) & (nsort <= g+1)
            aor_ids = list(aor['aor'][gsel])
            print('{0}-{1}   N_AOR = {2:>2d}  N_EXP = {3:>4d}'.format(root_i, ch,  len(aor_ids), aor['N'][gsel].sum()))
            count += gsel.sum()

            files = glob.glob('{0}-{1}*'.format(root_i, ch))
            if (len(files) > 0) & (SKIP): 
                print('Skip {0}-{1}'.format(root_i, ch))
                continue
            
            with open('{0}-{1}.log'.format(root_i, ch),'w') as fp:
                fp.write(time.ctime())
                
            # Do internal alignment to GAIA.  
            # Otherwise, set `radec` to the name of a file that has two columns with 
            # reference ra/dec.
            #radec = None 

            # Pipeline
            if instrument == 'mips':
                aors_ch[ch] = irac.process_all(channel=ch.replace('mips','ch'), output_root=root_i, driz_scale=initial_pix, kernel=kernel, pixfrac=pixfrac, wcslist=None, pad=0, out_hdu=out_hdu, aor_ids=aor_ids, flat_background=False, two_pass=True, min_frametime=min_frametime, instrument=instrument, align_threshold=0.15, radec=radec, run_alignment=False, mips_ext=mips_ext, ref_seg=ref_seg, global_mask=root+'_mask.reg')
            else:
                aors_ch[ch] = irac.process_all(channel=ch, output_root=root_i, driz_scale=initial_pix, kernel=kernel, pixfrac=pixfrac, wcslist=None, pad=0, out_hdu=out_hdu, aor_ids=aor_ids, flat_background=False, two_pass=True, min_frametime=min_frametime, instrument=instrument, radec=radec, run_alignment=run_alignment, assume_close=assume_close, ref_seg=ref_seg, global_mask=root+'_mask.reg', med_max_size=med_max_size)

            if len(aors_ch[ch]) == 0:
                continue

            # PSFs
            plt.ioff()

            if (instrument != 'mips') & make_psf:
                ch_num = int(ch[-1])
                segmask=True

                # psf_size=20
                # for p in [0.1, final_pix]:
                #     irac.mosaic_psf(output_root=root_i, target_pix=p, channel=ch_num, aors=aors_ch[ch], kernel=kernel, pixfrac=pixfrac, size=psf_size, native_orientation=False, instrument=instrument, subtract_background=False, segmentation_mask=segmask, max_R=10)
                #     plt.close('all')

                psf_size=30
                p = 0.1
                irac.mosaic_psf(output_root=root_i, target_pix=p, channel=ch_num, aors=aors_ch[ch], kernel=kernel, pixfrac=pixfrac, size=psf_size, native_orientation=True, subtract_background=False, segmentation_mask=segmask, max_R=10)

                plt.close('all')

            if delete_group:
                del(aors_ch[ch])

            print('Done {0}-{1}, gzip products'.format(root_i, ch))

            if zip_outputs:
                os.system('gzip {0}*-{1}_drz*fits'.format(root_i, ch))
        
        # PSFs
        if (instrument != 'mips') & make_psf:
            # Average PSF
            p = 0.1
            files = glob.glob('*{0}-{1:.1f}*psfr.fits'.format(ch, p))
            if len(files) == 0:
                continue
                
            files.sort()
            avg = None
            for file in files: 
                im = pyfits.open(file)
                if avg is None:
                    wht = im[0].data != 0
                    avg = im[0].data*wht
                else:
                    wht_i = im[0].data != 0
                    avg += im[0].data*wht_i
                    wht += wht_i
                
                im.close()
                
            avg = avg/wht
            avg[wht == 0] = 0

            # Window
            from photutils import (HanningWindow, TukeyWindow, 
                                   CosineBellWindow,
                                   SplitCosineBellWindow, TopHatWindow)

            coswindow = CosineBellWindow(alpha=1)
            avg *= coswindow(avg.shape)**0.05
            avg /= avg.sum()

            pyfits.writeto('{0}-{1}-{2:0.1f}.psfr_avg.fits'.format(root, ch, p), data=avg, header=im[0].header, overwrite=True)
    
    ####
    ## Show the initial product
    plt.ioff()
    for i in range(10):
        files = glob.glob(f'{root}-{i:02d}-ch*sci.fits')
        if len(files) > 0:
            break
            
    files.sort()
    
    if len(files) == 1:
        subs = 1,1
        fs = [7,7]
    elif len(files) == 2:
        subs = 1,2
        fs = [14,7]
    elif len(files) == 3:
        subs = 2,2
        fs = [14,14]
    else:
        subs = 2,2
        fs = [14,14]
        
    fig = plt.figure(figsize=fs)
    for i, file in enumerate(files[:4]):
        im = pyfits.open(file)
        print('{0} {1} {2:.1f} s'.format(file, im[0].header['FILTER'], im[0].header['EXPTIME']))
        ax = fig.add_subplot(subs[0], subs[1], 1+i)
        ax.imshow(im[0].data, vmin=-0.1, vmax=1, cmap='gray_r', origin='lower')
        ax.text(0.05, 0.95, file, ha='left', va='top', color='k', 
                transform=ax.transAxes)
        
        im.close()
        
    if len(files) > 1:
        fig.axes[1].set_yticklabels([])
    
    if len(files) > 2:
        fig.axes[0].set_xticklabels([])
        fig.axes[1].set_xticklabels([])
    
    if len(files) > 3:
        fig.axes[3].set_yticklabels([])
        
    fig.tight_layout(pad=0.5)
    fig.savefig(f'{root}.init.png')
    plt.close('all')
    
    if stop_at == 'preprocess':
        return True
        
    #######
    # Make more compact individual exposures and clean directories
    wfiles = []
    for ch in channels:
        if 'mips' in ch:
            chq = ch.replace('mips','ch')
            wfiles += glob.glob(f'{aor_query}/{chq}/bcd/SPITZER_M*wcs.fits')
        else:
            wfiles += glob.glob(f'{aor_query}/{ch}/bcd/SPITZER_I*wcs.fits')

    #wfiles = glob.glob('r*/*/bcd/*_I[1-4]_*wcs.fits')
    #wfiles += glob.glob('r*/*/bcd/*_M[1-4]_*wcs.fits')
    wfiles.sort()

    for wcsfile in wfiles:
        outfile = wcsfile.replace('_wcs.fits', '_xbcd.fits.gz')
        if os.path.exists(outfile):
            print(outfile)
        else:
            irac.combine_products(wcsfile)
            print('Run: ', outfile)

        if os.path.exists(outfile):
            remove_files = glob.glob('{0}*fits'.format(wcsfile.split('_wcs')[0]))
            for f in remove_files:
                print('   rm ', f)
                os.remove(f)
 
    if stop_at == 'make_compact':
        return True
                                   
    #############
    # Drizzle final mosaics
    # Make final mosaic a bit bigger than the HST image
    pad = mosaic_pad

    # Pixel scale of final mosaic.
    # Don't make too small if not many dithers available as in this example.
    # But for well-sampled mosaics like RELICS / HFF, can push this to perhaps 0.3" / pix
    pixscale = final_pix #0.5

    # Again, if have many dithers maybe can use more aggressive drizzle parameters,
    # like a 'point' kernel or smaller pixfrac (a 'point' kernel is pixfrac=0)
    #kernel, pixfrac = 'square', 0.2

    # Correction for bad columns near bright stars
    #pulldown_mag = 15.2 

    ##############
    # Dilation for CR rejection
    dil = np.ones((3,3))
    driz_cr = [7, 4]
    blot_interp = 'poly5'
    bright_fmax = 0.5
    
    ### Drizzle
    for ch in channels: #[:2]:
        ###########
        # Files and reference image for extra CR rejection
        if ch == 'mips1':
            files = glob.glob('{0}/ch1/bcd/SPITZER_M1_*xbcd.fits*'.format(drz_query, ch))
            files.sort()
            pulldown_mag = -10
            pixscale = 1.
            kernel = 'point'
        else:
            files = glob.glob('{0}/{1}/bcd/*_I?_*xbcd.fits*'.format(drz_query, ch))
            files.sort()

        #ref = pyfits.open('{0}-00-{1}_drz_sci.fits'.format(root, ch))
        #ref_data = ref[0].data.astype(np.float32)

        ref_files = glob.glob(f'{root}-??-{ch}*sci.fits')
        if len(ref_files) == 0:
            continue

        num = None
        for ref_file in ref_files:
            ref = pyfits.open(ref_file)
            wht = pyfits.open(ref_file.replace('_sci.fits', '_wht.fits'))
            if num is None:
                num = ref[0].data*wht[0].data
                den = wht[0].data
            else:
                num += ref[0].data*wht[0].data
                den += wht[0].data

        ref_data = (num/den).astype(np.float32)
        ref_data[den <= 0] = 0

        ref_wcs = pywcs.WCS(ref[0].header, relax=True) 
        ref_wcs.pscale = utils.get_wcs_pscale(ref_wcs) 
        if (not hasattr(ref_wcs, '_naxis1')) & hasattr(ref_wcs, '_naxis'):
            ref_wcs._naxis1, ref_wcs._naxis2 = ref_wcs._naxis

        ##############
        # Output WCS based on HST footprint
        if drizzle_ref_file == '':
            try:
                hst_im = pyfits.open(glob.glob('{0}-f[01]*_drz_sci.fits*'.format(root))[-1])
            except:
                hst_im = pyfits.open(glob.glob('{0}-f[578]*_dr*sci.fits*'.format(root))[-1])
            
    
            hst_wcs = pywcs.WCS(hst_im[0])
            hst_wcs.pscale = utils.get_wcs_pscale(hst_wcs) 

            try:
                size = (np.round(np.array([hst_wcs._naxis1, hst_wcs._naxis2])*hst_wcs.pscale*pad/pixscale)*pixscale)
            except:
                size = (np.round(np.array([hst_wcs._naxis[0], hst_wcs._naxis[1]])*hst_wcs.pscale*pad/pixscale)*pixscale)
            
            hst_rd = hst_wcs.calc_footprint().mean(axis=0)
            _x = utils.make_wcsheader(ra=hst_rd[0], dec=hst_rd[1],
                                      size=size, 
                                      pixscale=pixscale, 
                                      get_hdu=False, theta=0)
            
            out_header, out_wcs = _x
        else:
            driz_ref_im = pyfits.open(drizzle_ref_file)
            out_wcs = pywcs.WCS(driz_ref_im[0].header, relax=True)
            out_wcs.pscale = utils.get_wcs_pscale(out_wcs) 
            
            out_header = utils.to_header(out_wcs)
        
        if (not hasattr(out_wcs, '_naxis1')) & hasattr(out_wcs, '_naxis'):
            out_wcs._naxis1, out_wcs._naxis2 = out_wcs._naxis
            
        ##############
        # Bright stars for pulldown correction
        cat_file = glob.glob(f'{root}-[0-9][0-9]-{ch}.cat.fits')[0]
        ph = utils.read_catalog(cat_file) 
        bright = (ph['mag_auto'] < pulldown_mag) # & (ph['flux_radius'] < 3)
        ph = ph[bright]

        ##############
        # Now do the drizzling
        yp, xp = np.indices((256, 256))
        orig_files = []

        out_header['DRIZ_CR0'] = driz_cr[0]
        out_header['DRIZ_CR1'] = driz_cr[1]
        out_header['KERNEL'] = kernel
        out_header['PIXFRAC'] = pixfrac
        out_header['NDRIZIM'] = 0
        out_header['EXPTIME'] = 0
        out_header['BUNIT'] = 'microJy'
        out_header['FILTER'] = ch

        med_root = 'xxx'
        N = len(files)

        for i, file in enumerate(files):#[:100]):

            print('{0}/{1} {2}'.format(i, N, file))

            if file in orig_files:
                continue

            im = pyfits.open(file)
            ivar = 1/im['CBUNC'].data**2    
            msk = (~np.isfinite(ivar)) | (~np.isfinite(im['CBCD'].data))
            im['CBCD'].data[msk] = 0
            ivar[msk] = 0

            wcs = pywcs.WCS(im['WCS'].header, relax=True)
            wcs.pscale = utils.get_wcs_pscale(wcs)
            if (not hasattr(wcs, '_naxis1')) & hasattr(wcs, '_naxis'):
                wcs._naxis1, wcs._naxis2 = wcs._naxis
            
            fp = Path(wcs.calc_footprint())

            med_root_i = im.filename().split('/')[0]
            if med_root != med_root_i:
                print('\n Read {0}-{1}_med.fits \n'.format(med_root_i, ch))
                med = pyfits.open('{0}-{1}_med.fits'.format(med_root_i, ch))
                med_data = med[0].data.astype(np.float32)
                med_root = med_root_i
                med.close()
                
                try:
                    gaia_rd = utils.read_catalog('{0}-{1}_gaia.radec'.format(med_root_i, ch))
                    ii, rr = gaia_rd.match_to_catalog_sky(ph)
                    gaia_rd = gaia_rd[ii][rr.value < 2]
                    gaia_pts = np.array([gaia_rd['ra'].data, 
                                         gaia_rd['dec'].data]).T
                except:
                    gaia_rd = []

            #data = im['CBCD'].data - aor_med[0].data

            # Change output units to uJy / pix
            if ch == 'mips1':
                # un = 1*u.MJy/u.sr
                # #to_ujy_px = un.to(u.uJy/u.arcsec**2).value*(out_wcs.pscale**2)
                # to_ujy_px = un.to(u.uJy/u.arcsec**2).value*(native_scale**2)
                to_ujy_px = 146.902690
            else:
                # native_scale = 1.223
                # un = 1*u.MJy/u.sr
                # #to_ujy_px = un.to(u.uJy/u.arcsec**2).value*(out_wcs.pscale**2)
                # to_ujy_px = un.to(u.uJy/u.arcsec**2).value*(native_scale**2)
                to_ujy_px = 35.17517196810

            blot_data = ablot.do_blot(ref_data, ref_wcs, wcs, 1, coeffs=True, 
                                      interp=blot_interp, 
                                      sinscl=1.0, stepsize=10, 
                                      wcsmap=None)/to_ujy_px

            # mask for bright stars
            eblot = 1-np.clip(blot_data, 0, bright_fmax)/bright_fmax

            # Initial CR
            clean = im[0].data - med_data - im['WCS'].header['PEDESTAL']
            dq = (clean - blot_data)*np.sqrt(ivar)*eblot > driz_cr[0]

            # Adjacent CRs
            dq_dil = binary_dilation(dq, selem=dil)
            dq |= ((clean - blot_data)*np.sqrt(ivar)*eblot > driz_cr[1]) & (dq_dil)

            # Very negative pixels
            dq |= clean*np.sqrt(ivar) < -4

            original_dq = im['WCS'].data - (im['WCS'].data & 1)
            dq |= original_dq > 0

            # Pulldown correction for bright stars
            if len(gaia_rd) > 0:       
                mat = fp.contains_points(gaia_pts) 
                if mat.sum() > 0:
                    xg, yg = wcs.all_world2pix(gaia_rd['ra'][mat], gaia_rd['dec'][mat], 0)
                    sh = dq.shape
                    mat = (xg > 0) & (xg < sh[1]) & (yg > 0) & (yg < sh[0])
                    if mat.sum() > 0:
                        for xi, yi in zip(xg[mat], yg[mat]):
                            dq |= (np.abs(xp-xi) < 2) & (np.abs(yp-yi) > 10)

            if i == 0:
                res = utils.drizzle_array_groups([clean], [ivar*(dq == 0)], [wcs], outputwcs=out_wcs, kernel=kernel, pixfrac=pixfrac, data=None, verbose=False)
                # Copy header keywords
                wcs_header = utils.to_header(wcs)
                for k in im[0].header:
                    if (k not in ['', 'HISTORY', 'COMMENT']) & (k not in out_header) & (k not in wcs_header):
                        out_header[k] = im[0].header[k]

            else:
                _ = utils.drizzle_array_groups([clean], [ivar*(dq == 0)], [wcs], outputwcs=out_wcs, kernel=kernel, pixfrac=pixfrac, data=res[:3], verbose=False)

            out_header['NDRIZIM'] += 1
            out_header['EXPTIME'] += im[0].header['EXPTIME']
            
            im.close()
            
        # Pixel scale factor for weights
        wht_scale = (out_wcs.pscale/wcs.pscale)**-4

        # Write final images
        pyfits.writeto('{0}-{1}_drz_sci.fits'.format(root, ch), data=res[0]*to_ujy_px, header=out_header, 
                       output_verify='fix', overwrite=True)
        pyfits.writeto('{0}-{1}_drz_wht.fits'.format(root, ch), data=res[1]*wht_scale/to_ujy_px**2, 
                       header=out_header, output_verify='fix', overwrite=True)
    
    ##########
    ## Show the final drizzled images
    plt.ioff()
    files = glob.glob(f'{root}-ch*sci.fits')
    files.sort()
    
    if len(files) == 1:
        subs = 1,1
        fs = [7,7]
    elif len(files) == 2:
        subs = 1,2
        fs = [14,7]
    elif len(files) == 3:
        subs = 2,2
        fs = [14,14]
    else:
        subs = 2,2
        fs = [14,14]
        
    fig = plt.figure(figsize=fs)
    for i, file in enumerate(files[:4]):
        im = pyfits.open(file)
        print('{0} {1} {2:.1f} s'.format(file, im[0].header['FILTER'], im[0].header['EXPTIME']))
        ax = fig.add_subplot(subs[0], subs[1], 1+i)
        scl = (final_pix/initial_pix)**2
        ax.imshow(im[0].data, vmin=-0.1*scl, vmax=1*scl, cmap='gray_r', origin='lower')
        ax.text(0.05, 0.95, file, ha='left', va='top', color='k', 
                transform=ax.transAxes)
        
        im.close()
        
    if len(files) > 1:
        fig.axes[1].set_yticklabels([])
    
    if len(files) > 2:
        fig.axes[0].set_xticklabels([])
        fig.axes[1].set_xticklabels([])
    
    if len(files) > 3:
        fig.axes[3].set_yticklabels([])
        
    fig.tight_layout(pad=0.5)
    fig.savefig(f'{root}.final.png')
    plt.close('all')
    
    if sync_results:
        print('gzip mosaics')
        os.system(f'gzip -f {root}-ch*_drz*fits {root}-mips*_drz*fits')
    
        ######## Sync
        ## Sync
        print(f's3://{bucket}/Pipeline/{root}/IRAC/')
    
        make_html(root, bucket=bucket)
    
        os.system(f'aws s3 sync ./ s3://{bucket}/Pipeline/{root}/IRAC/'
                  f' --exclude "*" --include "{root}-ch*drz*fits*"'
                  f' --include "{root}-mips*drz*fits*"'
                  f' --include "{root}.*png"'
                  ' --include "*-ch*psf*" --include "*log.fits"' 
                  ' --include "*wcs.[lp]*"'
                  ' --include "*html" --include "*fail*"'
                  ' --acl public-read')
    
        if sync_xbcd:
            aor_files = glob.glob('r*-ch*med.fits')
            for aor_file in aor_files:
                aor = aor_file.split('-ch')[0]
                os.system(f'aws s3 sync ./{aor}/ s3://{bucket}/IRAC/AORS/{aor}/ --exclude "*" --include "ch*/bcd/*xbcd.fits.gz" --acl public-read')
                os.system(f'aws s3 cp {aor_file} s3://{bucket}/IRAC/AORS/ --acl public-read')
                
    msg = f'### Done: \n    https://s3.amazonaws.com/{bucket}/Pipeline/{root}/IRAC/{root}.irac.html'
       
    utils.log_comment(f'/tmp/{root}.success', msg, verbose=True, show_date=True)