Esempio n. 1
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('-o',
                        '--out',
                        dest='outfn',
                        help='Output filename',
                        default='TMP/nexp.fits')
    parser.add_argument('--merge',
                        action='store_true',
                        help='Merge sub-tables')
    parser.add_argument('--plot', action='store_true', help='Plot results')
    parser.add_argument('files',
                        metavar='nexp-file.fits.gz',
                        nargs='+',
                        help='List of nexp files to process')

    opt = parser.parse_args()

    fns = opt.files

    if opt.merge:
        from astrometry.util.fits import merge_tables
        TT = []
        for fn in fns:
            T = fits_table(fn)
            print(fn, '->', len(T))
            TT.append(T)
        T = merge_tables(TT)
        T.writeto(opt.outfn)
        print('Wrote', opt.outfn)

    if opt.plot:
        T = fits_table(opt.files[0])
        import pylab as plt
        import matplotlib

        ax = [360, 0, -21, 36]

        def radec_plot():
            plt.axis(ax)
            plt.xlabel('RA (deg)')
            plt.xticks(np.arange(0, 361, 45))
            plt.ylabel('Dec (deg)')

            gl = np.arange(361)
            gb = np.zeros_like(gl)
            from astrometry.util.starutil_numpy import lbtoradec
            rr, dd = lbtoradec(gl, gb)
            plt.plot(rr, dd, 'k-', alpha=0.5, lw=1)
            rr, dd = lbtoradec(gl, gb + 10)
            plt.plot(rr, dd, 'k-', alpha=0.25, lw=1)
            rr, dd = lbtoradec(gl, gb - 10)
            plt.plot(rr, dd, 'k-', alpha=0.25, lw=1)

        plt.figure(figsize=(8, 5))
        plt.subplots_adjust(left=0.1, right=0.98, top=0.93)

        # Map of the tile centers we want to observe...
        O = fits_table('obstatus/decam-tiles_obstatus.fits')
        O.cut(O.in_desi == 1)
        rr, dd = np.meshgrid(np.linspace(ax[1], ax[0], 700),
                             np.linspace(ax[2], ax[3], 200))
        from astrometry.libkd.spherematch import match_radec
        I, J, d = match_radec(O.ra, O.dec, rr.ravel(), dd.ravel(), 1.)
        desimap = np.zeros(rr.shape, bool)
        desimap.flat[J] = True

        def desi_map():
            # Show the DESI tile map in the background.
            from astrometry.util.plotutils import antigray
            plt.imshow(desimap,
                       origin='lower',
                       interpolation='nearest',
                       extent=[ax[1], ax[0], ax[2], ax[3]],
                       aspect='auto',
                       cmap=antigray,
                       vmax=8)

        for band in 'grz':
            plt.clf()
            desi_map()
            N = T.get('nexp_%s' % band)
            I = np.flatnonzero(N > 0)
            #cm = matplotlib.cm.get_cmap('jet', 6)
            #cm = matplotlib.cm.get_cmap('winter', 5)
            cm = matplotlib.cm.viridis
            cm = matplotlib.cm.get_cmap(cm, 5)
            plt.scatter(T.ra[I],
                        T.dec[I],
                        c=N[I],
                        s=2,
                        edgecolors='none',
                        vmin=0.5,
                        vmax=5.5,
                        cmap=cm)
            radec_plot()
            cax = colorbar_axes(plt.gca(), frac=0.06)
            plt.colorbar(cax=cax, ticks=range(6))
            #plt.colorbar(ticks=range(6))
            plt.title('DECaLS DR3: Number of exposures in %s' % band)
            plt.savefig('nexp-%s.png' % band)

            plt.clf()
            desi_map()
            plt.scatter(T.ra,
                        T.dec,
                        c=T.get('nexp_%s' % band),
                        s=2,
                        edgecolors='none',
                        vmin=0,
                        vmax=2.)
            radec_plot()
            plt.colorbar()
            plt.title('DECaLS DR3: PSF size, band %s' % band)
            plt.savefig('psfsize-%s.png' % band)

        return 0

        for col in ['nobjs', 'npsf', 'nsimp', 'nexp', 'ndev', 'ncomp']:
            plt.clf()
            desi_map()
            N = T.get(col)
            mx = np.percentile(N, 99.5)
            plt.scatter(T.ra,
                        T.dec,
                        c=N,
                        s=2,
                        edgecolors='none',
                        vmin=0,
                        vmax=mx)
            radec_plot()
            plt.colorbar()
            plt.title('DECaLS DR3: Number of objects of type %s' % col[1:])
            plt.savefig('nobjs-%s.png' % col[1:])

        Ntot = T.nobjs
        for col in ['npsf', 'nsimp', 'nexp', 'ndev', 'ncomp']:
            plt.clf()
            desi_map()
            N = T.get(col) / Ntot.astype(np.float32)
            mx = np.percentile(N, 99.5)
            plt.scatter(T.ra,
                        T.dec,
                        c=N,
                        s=2,
                        edgecolors='none',
                        vmin=0,
                        vmax=mx)
            radec_plot()
            plt.colorbar()
            plt.title('DECaLS DR3: Fraction of objects of type %s' % col[1:])
            plt.savefig('fobjs-%s.png' % col[1:])

        return 0

    # fnpats = opt.files
    # fns = []
    # for pat in fnpats:
    #     pfns = glob(pat)
    #     fns.extend(pfns)
    #     print('Pattern', pat, '->', len(pfns), 'files')
    #fns = glob('coadd/*/*/*-nexp*')
    #fns = glob('coadd/000/*/*-nexp*')
    #fns = glob('coadd/000/0001*/*-nexp*')
    fns.sort()
    print(len(fns), 'nexp files')

    brickset = set()
    bricklist = []
    gn = []
    rn = []
    zn = []

    gnhist = []
    rnhist = []
    znhist = []

    nnhist = 6

    gdepth = []
    rdepth = []
    zdepth = []

    ibricks = []
    nsrcs = []
    npsf = []
    nsimp = []
    nexp = []
    ndev = []
    ncomp = []

    gpsfsize = []
    rpsfsize = []
    zpsfsize = []
    ebv = []
    gtrans = []
    rtrans = []
    ztrans = []

    bricks = fits_table('survey-bricks.fits.gz')

    #sfd = SFDMap()

    W = H = 3600
    # H=3600
    # xx,yy = np.meshgrid(np.arange(W), np.arange(H))
    unique = np.ones((H, W), bool)
    tlast = 0

    for fn in fns:
        print('File', fn)
        words = fn.split('/')
        dirprefix = '/'.join(words[:-4])
        print('Directory prefix:', dirprefix)
        words = words[-4:]
        brick = words[2]
        print('Brick', brick)
        if not brick in brickset:
            brickset.add(brick)
            bricklist.append(brick)
            gn.append(0)
            rn.append(0)
            zn.append(0)

            gnhist.append([0 for i in range(nnhist)])
            rnhist.append([0 for i in range(nnhist)])
            znhist.append([0 for i in range(nnhist)])

            index = -1
            ibrick = np.nonzero(bricks.brickname == brick)[0][0]
            ibricks.append(ibrick)
            tfn = os.path.join(dirprefix, 'tractor', brick[:3],
                               'tractor-%s.fits' % brick)
            print('Tractor filename', tfn)
            T = fits_table(tfn,
                           columns=[
                               'brick_primary', 'type', 'decam_psfsize', 'ebv',
                               'decam_mw_transmission'
                           ])
            T.cut(T.brick_primary)
            nsrcs.append(len(T))
            types = Counter([t.strip() for t in T.type])
            npsf.append(types['PSF'])
            nsimp.append(types['SIMP'])
            nexp.append(types['EXP'])
            ndev.append(types['DEV'])
            ncomp.append(types['COMP'])
            print('N sources', nsrcs[-1])

            gpsfsize.append(np.median(T.decam_psfsize[:, 1]))
            rpsfsize.append(np.median(T.decam_psfsize[:, 2]))
            zpsfsize.append(np.median(T.decam_psfsize[:, 4]))

            ebv.append(np.median(T.ebv))
            gtrans.append(np.median(T.decam_mw_transmission[:, 1]))
            rtrans.append(np.median(T.decam_mw_transmission[:, 2]))
            ztrans.append(np.median(T.decam_mw_transmission[:, 4]))

            br = bricks[ibrick]

            print('Computing unique brick pixels...')
            #wcs = Tan(fn, 0)
            #W,H = int(wcs.get_width()), int(wcs.get_height())

            pixscale = 0.262 / 3600.
            wcs = Tan(br.ra, br.dec, W / 2. + 0.5, H / 2. + 0.5, -pixscale, 0.,
                      0., pixscale, float(W), float(H))
            import time

            t0 = time.clock()

            unique[:, :] = True

            find_unique_pixels(wcs, W, H, unique, br.ra1, br.ra2, br.dec1,
                               br.dec2)

            # for i in range(W/2):
            #     allin = True
            #     lo,hi = i, W-i-1
            #     # one slice per side
            #     side = slice(lo,hi+1)
            #     top = (lo, side)
            #     bot = (hi, side)
            #     left  = (side, lo)
            #     right = (side, hi)
            #     for slc in [top, bot, left, right]:
            #         #print('xx,yy', xx[slc], yy[slc])
            #         rr,dd = wcs.pixelxy2radec(xx[slc]+1, yy[slc]+1)
            #         U = (rr >= br.ra1 ) * (rr < br.ra2 ) * (dd >= br.dec1) * (dd < br.dec2)
            #         #print('Pixel', i, ':', np.sum(U), 'of', len(U), 'pixels are unique')
            #         allin *= np.all(U)
            #         unique[slc] = U
            #     if allin:
            #         print('Scanned to pixel', i)
            #         break

            t1 = time.clock()
            U = np.flatnonzero(unique)
            t2 = time.clock()
            print(len(U), 'of', W * H, 'pixels are unique to this brick')

            # #t3 = time.clock()
            #rr,dd = wcs.pixelxy2radec(xx+1, yy+1)
            # #t4 = time.clock()
            # #u = (rr >= br.ra1 ) * (rr < br.ra2 ) * (dd >= br.dec1) * (dd < br.dec2)
            # #t5 = time.clock()
            # #U2 = np.flatnonzero(u)
            #U2 = np.flatnonzero((rr >= br.ra1 ) * (rr < br.ra2 ) *
            #                    (dd >= br.dec1) * (dd < br.dec2))
            #assert(np.all(U == U2))
            #assert(len(U) == len(U2))
            # #t6 = time.clock()
            # print(len(U2), 'of', W*H, 'pixels are unique to this brick')
            #

            #print(t0-tlast, 'other time')
            #tlast = time.clock() #t2
            #print('t1:', t1-t0, 't2', t2-t1)

            # #print('t4:', t4-t3, 't5', t5-t4, 't6', t6-t5)
            #

        else:
            index = bricklist.index(brick)
            assert (index == len(bricklist) - 1)

        index = bricklist.index(brick)
        assert (index == len(bricklist) - 1)

        filepart = words[-1]
        filepart = filepart.replace('.fits.gz', '')
        print('File:', filepart)
        band = filepart[-1]
        assert (band in 'grz')

        nlist, nhist = dict(g=(gn, gnhist), r=(rn, rnhist),
                            z=(zn, znhist))[band]

        upix = fitsio.read(fn).flat[U]
        med = np.median(upix)
        print('Band', band, ': Median', med)
        nlist[index] = med

        hist = nhist[index]
        for i in range(nnhist):
            if i < nnhist - 1:
                hist[i] = np.sum(upix == i)
            else:
                hist[i] = np.sum(upix >= i)
        assert (sum(hist) == len(upix))
        print('Number of exposures histogram:', hist)

    ibricks = np.array(ibricks)

    print('Maximum number of sources:', max(nsrcs))

    T = fits_table()
    T.brickname = np.array(bricklist)
    T.ra = bricks.ra[ibricks]
    T.dec = bricks.dec[ibricks]
    T.nexp_g = np.array(gn).astype(np.int16)
    T.nexp_r = np.array(rn).astype(np.int16)
    T.nexp_z = np.array(zn).astype(np.int16)
    T.nexphist_g = np.array(gnhist).astype(np.int32)
    T.nexphist_r = np.array(rnhist).astype(np.int32)
    T.nexphist_z = np.array(znhist).astype(np.int32)
    T.nobjs = np.array(nsrcs).astype(np.int16)
    T.npsf = np.array(npsf).astype(np.int16)
    T.nsimp = np.array(nsimp).astype(np.int16)
    T.nexp = np.array(nexp).astype(np.int16)
    T.ndev = np.array(ndev).astype(np.int16)
    T.ncomp = np.array(ncomp).astype(np.int16)
    T.psfsize_g = np.array(gpsfsize).astype(np.float32)
    T.psfsize_r = np.array(rpsfsize).astype(np.float32)
    T.psfsize_z = np.array(zpsfsize).astype(np.float32)
    T.ebv = np.array(ebv).astype(np.float32)
    T.trans_g = np.array(gtrans).astype(np.float32)
    T.trans_r = np.array(rtrans).astype(np.float32)
    T.trans_z = np.array(ztrans).astype(np.float32)
    T.writeto(opt.outfn)
Esempio n. 2
0
def sky_fibers_for_brick(
        survey,
        brickname,
        bands=['g', 'r', 'z'],
        apertures_arcsec=[0.5, 0.75, 1., 1.5, 2., 3.5, 5., 7.]):
    '''
    Produces a table of possible DESI sky fiber locations for a given
    "brickname" (eg, "0001p000") read from the given LegacySurveyData object *survey*.
    '''
    import fitsio
    from astrometry.util.fits import fits_table
    from astrometry.util.util import Tan
    import photutils
    from legacypipe.utils import find_unique_pixels

    fn = survey.find_file('blobmap', brick=brickname)
    blobs = fitsio.read(fn)
    print('Blobs:', blobs.min(), blobs.max())
    header = fitsio.read_header(fn)
    wcs = Tan(header)

    goodpix = (blobs == -1)
    for band in bands:
        fn = survey.find_file('nexp', brick=brickname, band=band)
        if not os.path.exists(fn):
            # Skip
            continue
        nexp = fitsio.read(fn)
        goodpix[nexp == 0] = False

    # Cut to unique brick area... required since the blob map drops
    # blobs that are completely outside the brick's unique area, thus
    # those locations are not masked.
    brick = survey.get_brick_by_name(brickname)
    H, W = wcs.shape
    U = find_unique_pixels(wcs, W, H, None, brick.ra1, brick.ra2, brick.dec1,
                           brick.dec2)
    goodpix[U == 0] = False
    del U

    x, y, blobdist = sky_fiber_locations(goodpix)

    skyfibers = fits_table()
    skyfibers.brickid = np.zeros(len(x), np.int32) + brick.brickid
    skyfibers.brickname = np.array([brickname] * len(x))
    skyfibers.x = x.astype(np.int16)
    skyfibers.y = y.astype(np.int16)
    skyfibers.blobdist = blobdist
    skyfibers.ra, skyfibers.dec = wcs.pixelxy2radec(x + 1, y + 1)

    pixscale = wcs.pixel_scale()
    apertures = np.array(apertures_arcsec) / pixscale

    # Now, do aperture photometry at these points in the coadd images
    for band in bands:
        imfn = survey.find_file('image', brick=brickname, band=band)
        ivfn = survey.find_file('invvar', brick=brickname, band=band)
        if not (os.path.exists(imfn) and os.path.exists(ivfn)):
            continue
        coimg = fitsio.read(imfn)
        coiv = fitsio.read(ivfn)

        apflux = np.zeros((len(skyfibers), len(apertures)), np.float32)
        apiv = np.zeros((len(skyfibers), len(apertures)), np.float32)
        skyfibers.set('apflux_%s' % band, apflux)
        skyfibers.set('apflux_ivar_%s' % band, apiv)
        with np.errstate(divide='ignore', invalid='ignore'):
            imsigma = 1. / np.sqrt(coiv)
            imsigma[coiv == 0] = 0
        apxy = np.vstack((skyfibers.x, skyfibers.y)).T
        for irad, rad in enumerate(apertures):
            aper = photutils.CircularAperture(apxy, rad)
            p = photutils.aperture_photometry(coimg, aper, error=imsigma)
            apflux[:, irad] = p.field('aperture_sum')
            err = p.field('aperture_sum_err')
            apiv[:, irad] = 1. / err**2

    header = fitsio.FITSHDR()
    for i, ap in enumerate(apertures_arcsec):
        header.add_record(
            dict(name='AP%i' % i, value=ap,
                 comment='Aperture radius (arcsec)'))
    skyfibers._header = header

    return skyfibers
Esempio n. 3
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('-o', '--out', dest='outfn', help='Output filename',
                      default='TMP/nexp.fits')
    parser.add_argument('--merge', action='store_true', help='Merge sub-tables')
    parser.add_argument('--plot', action='store_true', help='Plot results')
    parser.add_argument('files', metavar='nexp-file.fits.gz', nargs='+',
                        help='List of nexp files to process')

    opt = parser.parse_args()

    fns = opt.files

    if opt.merge:
        from astrometry.util.fits import merge_tables
        TT = []
        for fn in fns:
            T = fits_table(fn)
            print(fn, '->', len(T))
            TT.append(T)
        T = merge_tables(TT)
        T.writeto(opt.outfn)
        print('Wrote', opt.outfn)

    if opt.plot:
        T = fits_table(opt.files[0])
        import pylab as plt
        import matplotlib
        
        ax = [360, 0, -21, 36]

        def radec_plot():
            plt.axis(ax)
            plt.xlabel('RA (deg)')
            plt.xticks(np.arange(0, 361, 45))
            plt.ylabel('Dec (deg)')

            gl = np.arange(361)
            gb = np.zeros_like(gl)
            from astrometry.util.starutil_numpy import lbtoradec
            rr,dd = lbtoradec(gl, gb)
            plt.plot(rr, dd, 'k-', alpha=0.5, lw=1)
            rr,dd = lbtoradec(gl, gb+10)
            plt.plot(rr, dd, 'k-', alpha=0.25, lw=1)
            rr,dd = lbtoradec(gl, gb-10)
            plt.plot(rr, dd, 'k-', alpha=0.25, lw=1)
            
        plt.figure(figsize=(8,5))
        plt.subplots_adjust(left=0.1, right=0.98, top=0.93)
        
        # Map of the tile centers we want to observe...
        O = fits_table('obstatus/decam-tiles_obstatus.fits')
        O.cut(O.in_desi == 1)
        rr,dd = np.meshgrid(np.linspace(ax[1],ax[0], 700),
                            np.linspace(ax[2],ax[3], 200))
        from astrometry.libkd.spherematch import match_radec
        I,J,d = match_radec(O.ra, O.dec, rr.ravel(), dd.ravel(), 1.)
        desimap = np.zeros(rr.shape, bool)
        desimap.flat[J] = True

        def desi_map():
            # Show the DESI tile map in the background.
            from astrometry.util.plotutils import antigray
            plt.imshow(desimap, origin='lower', interpolation='nearest',
                       extent=[ax[1],ax[0],ax[2],ax[3]], aspect='auto',
                       cmap=antigray, vmax=8)

        for band in 'grz':
            plt.clf()
            desi_map()
            N = T.get('nexp_%s' % band)
            I = np.flatnonzero(N > 0)
            #cm = matplotlib.cm.get_cmap('jet', 6)
            #cm = matplotlib.cm.get_cmap('winter', 5)
            cm = matplotlib.cm.viridis
            cm = matplotlib.cm.get_cmap(cm, 5)
            plt.scatter(T.ra[I], T.dec[I], c=N[I], s=2,
                        edgecolors='none',
                        vmin=0.5, vmax=5.5, cmap=cm)
            radec_plot()
            cax = colorbar_axes(plt.gca(), frac=0.06)
            plt.colorbar(cax=cax, ticks=range(6))
            #plt.colorbar(ticks=range(6))
            plt.title('DECaLS DR3: Number of exposures in %s' % band)
            plt.savefig('nexp-%s.png' % band)

            plt.clf()
            desi_map()
            plt.scatter(T.ra, T.dec, c=T.get('nexp_%s' % band), s=2,
                        edgecolors='none', vmin=0, vmax=2.)
            radec_plot()
            plt.colorbar()
            plt.title('DECaLS DR3: PSF size, band %s' % band)
            plt.savefig('psfsize-%s.png' % band)

        return 0
            
        for col in ['nobjs', 'npsf', 'nsimp', 'nexp', 'ndev', 'ncomp']:
            plt.clf()
            desi_map()
            N = T.get(col)
            mx = np.percentile(N, 99.5)
            plt.scatter(T.ra, T.dec, c=N, s=2,
                        edgecolors='none', vmin=0, vmax=mx)
            radec_plot()
            plt.colorbar()
            plt.title('DECaLS DR3: Number of objects of type %s' % col[1:])
            plt.savefig('nobjs-%s.png' % col[1:])

        Ntot = T.nobjs
        for col in ['npsf', 'nsimp', 'nexp', 'ndev', 'ncomp']:
            plt.clf()
            desi_map()
            N = T.get(col) / Ntot.astype(np.float32)
            mx = np.percentile(N, 99.5)
            plt.scatter(T.ra, T.dec, c=N, s=2,
                        edgecolors='none', vmin=0, vmax=mx)
            radec_plot()
            plt.colorbar()
            plt.title('DECaLS DR3: Fraction of objects of type %s' % col[1:])
            plt.savefig('fobjs-%s.png' % col[1:])

            
        return 0

    # fnpats = opt.files
    # fns = []
    # for pat in fnpats:
    #     pfns = glob(pat)
    #     fns.extend(pfns)
    #     print('Pattern', pat, '->', len(pfns), 'files')
    #fns = glob('coadd/*/*/*-nexp*')
    #fns = glob('coadd/000/*/*-nexp*')
    #fns = glob('coadd/000/0001*/*-nexp*')
    fns.sort()
    print(len(fns), 'nexp files')
    
    brickset = set()
    bricklist = []
    gn = []
    rn = []
    zn = []
    
    gnhist = []
    rnhist = []
    znhist = []
    
    nnhist = 6
    
    gdepth = []
    rdepth = []
    zdepth = []
    
    ibricks = []
    nsrcs = []
    npsf  = []
    nsimp = []
    nexp  = []
    ndev  = []
    ncomp = []
    
    gpsfsize = []
    rpsfsize = []
    zpsfsize = []
    ebv = []
    gtrans = []
    rtrans = []
    ztrans = []
    
    bricks = fits_table('survey-bricks.fits.gz')
    
    #sfd = SFDMap()
    
    W = H = 3600
    # H=3600
    # xx,yy = np.meshgrid(np.arange(W), np.arange(H))
    unique = np.ones((H,W), bool)
    tlast = 0
    
    for fn in fns:
        print('File', fn)
        words = fn.split('/')
        dirprefix = '/'.join(words[:-4])
        print('Directory prefix:', dirprefix)
        words = words[-4:]
        brick = words[2]
        print('Brick', brick)
        if not brick in brickset:
            brickset.add(brick)
            bricklist.append(brick)
            gn.append(0)
            rn.append(0)
            zn.append(0)
    
            gnhist.append([0 for i in range(nnhist)])
            rnhist.append([0 for i in range(nnhist)])
            znhist.append([0 for i in range(nnhist)])
    
            index = -1
            ibrick = np.nonzero(bricks.brickname == brick)[0][0]
            ibricks.append(ibrick)
            tfn = os.path.join(dirprefix, 'tractor', brick[:3], 'tractor-%s.fits'%brick)
            print('Tractor filename', tfn)
            T = fits_table(tfn, columns=['brick_primary', 'type', 'decam_psfsize',
                                         'ebv', 'decam_mw_transmission'])
            T.cut(T.brick_primary)
            nsrcs.append(len(T))
            types = Counter([t.strip() for t in T.type])
            npsf.append(types['PSF'])
            nsimp.append(types['SIMP'])
            nexp.append(types['EXP'])
            ndev.append(types['DEV'])
            ncomp.append(types['COMP'])
            print('N sources', nsrcs[-1])
    
            gpsfsize.append(np.median(T.decam_psfsize[:,1]))
            rpsfsize.append(np.median(T.decam_psfsize[:,2]))
            zpsfsize.append(np.median(T.decam_psfsize[:,4]))
    
            ebv.append(np.median(T.ebv))
            gtrans.append(np.median(T.decam_mw_transmission[:,1]))
            rtrans.append(np.median(T.decam_mw_transmission[:,2]))
            ztrans.append(np.median(T.decam_mw_transmission[:,4]))
    
            br = bricks[ibrick]
    
            print('Computing unique brick pixels...')
            #wcs = Tan(fn, 0)
            #W,H = int(wcs.get_width()), int(wcs.get_height())
    
            pixscale = 0.262/3600.
            wcs = Tan(br.ra, br.dec, W/2.+0.5, H/2.+0.5,
                      -pixscale, 0., 0., pixscale,
                      float(W), float(H))
            import time
    
            t0 = time.clock()
    
            unique[:,:] = True
    
            find_unique_pixels(wcs, W, H, unique,
                               br.ra1, br.ra2, br.dec1, br.dec2)
    
            # for i in range(W/2):
            #     allin = True
            #     lo,hi = i, W-i-1
            #     # one slice per side
            #     side = slice(lo,hi+1)
            #     top = (lo, side)
            #     bot = (hi, side)
            #     left  = (side, lo)
            #     right = (side, hi)
            #     for slc in [top, bot, left, right]:
            #         #print('xx,yy', xx[slc], yy[slc])
            #         rr,dd = wcs.pixelxy2radec(xx[slc]+1, yy[slc]+1)
            #         U = (rr >= br.ra1 ) * (rr < br.ra2 ) * (dd >= br.dec1) * (dd < br.dec2)
            #         #print('Pixel', i, ':', np.sum(U), 'of', len(U), 'pixels are unique')
            #         allin *= np.all(U)
            #         unique[slc] = U
            #     if allin:
            #         print('Scanned to pixel', i)
            #         break
    
            t1 = time.clock()
            U = np.flatnonzero(unique)
            t2 = time.clock()
            print(len(U), 'of', W*H, 'pixels are unique to this brick')
    
            # #t3 = time.clock()
            #rr,dd = wcs.pixelxy2radec(xx+1, yy+1)
            # #t4 = time.clock()
            # #u = (rr >= br.ra1 ) * (rr < br.ra2 ) * (dd >= br.dec1) * (dd < br.dec2)
            # #t5 = time.clock()
            # #U2 = np.flatnonzero(u)
            #U2 = np.flatnonzero((rr >= br.ra1 ) * (rr < br.ra2 ) *
            #                    (dd >= br.dec1) * (dd < br.dec2))
            #assert(np.all(U == U2))
            #assert(len(U) == len(U2))
            # #t6 = time.clock()
            # print(len(U2), 'of', W*H, 'pixels are unique to this brick')
            # 
    
            #print(t0-tlast, 'other time')
            #tlast = time.clock() #t2
            #print('t1:', t1-t0, 't2', t2-t1)
    
            # #print('t4:', t4-t3, 't5', t5-t4, 't6', t6-t5)
            # 
    
        else:
            index = bricklist.index(brick)
            assert(index == len(bricklist)-1)
    
        index = bricklist.index(brick)
        assert(index == len(bricklist)-1)
    
        filepart = words[-1]
        filepart = filepart.replace('.fits.gz', '')
        print('File:', filepart)
        band = filepart[-1]
        assert(band in 'grz')
    
        nlist,nhist = dict(g=(gn,gnhist), r=(rn,rnhist), z=(zn,znhist))[band]
    
        upix = fitsio.read(fn).flat[U]
        med = np.median(upix)
        print('Band', band, ': Median', med)
        nlist[index] = med
    
        hist = nhist[index]
        for i in range(nnhist):
            if i < nnhist-1:
                hist[i] = np.sum(upix == i)
            else:
                hist[i] = np.sum(upix >= i)
        assert(sum(hist) == len(upix))
        print('Number of exposures histogram:', hist)
    
    ibricks = np.array(ibricks)
    
    print('Maximum number of sources:', max(nsrcs))
    
    T = fits_table()
    T.brickname = np.array(bricklist)
    T.ra  = bricks.ra [ibricks]
    T.dec = bricks.dec[ibricks]
    T.nexp_g = np.array(gn).astype(np.int16)
    T.nexp_r = np.array(rn).astype(np.int16)
    T.nexp_z = np.array(zn).astype(np.int16)
    T.nexphist_g = np.array(gnhist).astype(np.int32)
    T.nexphist_r = np.array(rnhist).astype(np.int32)
    T.nexphist_z = np.array(znhist).astype(np.int32)
    T.nobjs  = np.array(nsrcs).astype(np.int16)
    T.npsf   = np.array(npsf ).astype(np.int16)
    T.nsimp  = np.array(nsimp).astype(np.int16)
    T.nexp   = np.array(nexp ).astype(np.int16)
    T.ndev   = np.array(ndev ).astype(np.int16)
    T.ncomp  = np.array(ncomp).astype(np.int16)
    T.psfsize_g = np.array(gpsfsize).astype(np.float32)
    T.psfsize_r = np.array(rpsfsize).astype(np.float32)
    T.psfsize_z = np.array(zpsfsize).astype(np.float32)
    T.ebv = np.array(ebv).astype(np.float32)
    T.trans_g = np.array(gtrans).astype(np.float32)
    T.trans_r = np.array(rtrans).astype(np.float32)
    T.trans_z = np.array(ztrans).astype(np.float32)
    T.writeto(opt.outfn)
Esempio n. 4
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('-o',
                        '--out',
                        dest='outfn',
                        help='Output filename',
                        default='TMP/nexp.fits')
    parser.add_argument('--merge',
                        action='store_true',
                        help='Merge sub-tables')
    parser.add_argument('--north',
                        action='store_true',
                        default=False,
                        help='Northern survey?')
    parser.add_argument('--plot', action='store_true', help='Plot results')
    parser.add_argument('--depth-hist',
                        action='store_true',
                        help='Depth histograms')
    parser.add_argument('files',
                        metavar='nexp-file.fits.gz',
                        nargs='+',
                        help='List of nexp files to process')

    opt = parser.parse_args()
    fns = opt.files

    if opt.merge:
        from astrometry.util.fits import merge_tables
        TT = []
        for fn in fns:
            T = fits_table(fn)
            print(fn, '->', len(T))
            TT.append(T)
        T = merge_tables(TT)

        add_brick_data(T, opt.north)

        T.writeto(opt.outfn)
        print('Wrote', opt.outfn)
        return

    if opt.plot:
        plots(opt)
        depth_hist(opt)
        return
    if opt.depth_hist:
        depth_hist(opt)
        return

    fns.sort()
    print(len(fns), 'nexp files')
    if len(fns) == 1:
        if not os.path.exists(fns[0]):
            print('No such file.')
            return 0

    brickset = set()
    bricklist = []
    gn = []
    rn = []
    zn = []

    gnhist = []
    rnhist = []
    znhist = []

    nnhist = 6

    ibricks = []
    nsrcs = []
    npsf = []
    nsimp = []
    nrex = []
    nexp = []
    ndev = []
    ncomp = []
    nser = []
    ndup = []

    gpsfsize = []
    rpsfsize = []
    zpsfsize = []

    gpsfdepth = []
    rpsfdepth = []
    zpsfdepth = []
    ggaldepth = []
    rgaldepth = []
    zgaldepth = []

    wise_nobs = []
    wise_trans = []

    ebv = []
    gtrans = []
    rtrans = []
    ztrans = []

    gcosky = []
    rcosky = []
    zcosky = []

    bricks = fits_table('survey-bricks.fits.gz')

    W = H = 3600
    unique = np.ones((H, W), bool)
    tlast = 0

    for ifn, fn in enumerate(fns):
        print('File', (ifn + 1), 'of', len(fns), ':', fn)
        words = fn.split('/')
        dirprefix = '/'.join(words[:-4])
        #print('Directory prefix:', dirprefix)
        words = words[-4:]
        brick = words[2]
        #print('Brick', brick)
        if not brick in brickset:
            try:
                tfn = os.path.join(dirprefix, 'tractor', brick[:3],
                                   'tractor-%s.fits' % brick)
                print('Tractor filename', tfn)
                T = fits_table(
                    tfn,
                    columns=[
                        'brick_primary', 'type', 'psfsize_g', 'psfsize_r',
                        'psfsize_z', 'psfdepth_g', 'psfdepth_r', 'psfdepth_z',
                        'galdepth_g', 'galdepth_r', 'galdepth_z', 'ebv',
                        'mw_transmission_g', 'mw_transmission_r',
                        'mw_transmission_z', 'nobs_w1', 'nobs_w2', 'nobs_w3',
                        'nobs_w4', 'mw_transmission_w1', 'mw_transmission_w2',
                        'mw_transmission_w3', 'mw_transmission_w4'
                    ])
                # we need tho primary header, not the table-hdu header!
                #Thdr = T.get_header()
                Thdr = fitsio.read_header(tfn)
            except:
                print('Failed to read FITS table', tfn)
                import traceback
                traceback.print_exc()
                print('Carrying on.')
                continue

            brickset.add(brick)
            bricklist.append(brick)
            gn.append(0)
            rn.append(0)
            zn.append(0)

            gnhist.append([0 for i in range(nnhist)])
            rnhist.append([0 for i in range(nnhist)])
            znhist.append([0 for i in range(nnhist)])

            index = -1
            ibrick = np.nonzero(bricks.brickname == brick)[0][0]
            ibricks.append(ibrick)

            T.cut(T.brick_primary)
            nsrcs.append(len(T))
            types = Counter([t.strip() for t in T.type])
            npsf.append(types['PSF'])
            nsimp.append(types['SIMP'])
            nrex.append(types['REX'])
            nexp.append(types['EXP'])
            ndev.append(types['DEV'])
            ncomp.append(types['COMP'])
            nser.append(types['SER'])
            ndup.append(types['DUP'])
            print('N sources', nsrcs[-1])

            gpsfsize.append(np.median(T.psfsize_g))
            rpsfsize.append(np.median(T.psfsize_r))
            zpsfsize.append(np.median(T.psfsize_z))

            gpsfdepth.append(np.median(T.psfdepth_g))
            rpsfdepth.append(np.median(T.psfdepth_r))
            zpsfdepth.append(np.median(T.psfdepth_z))

            ggaldepth.append(np.median(T.galdepth_g))
            rgaldepth.append(np.median(T.galdepth_r))
            zgaldepth.append(np.median(T.galdepth_z))

            wise_nobs.append(
                np.median(np.vstack(
                    (T.nobs_w1, T.nobs_w2, T.nobs_w3, T.nobs_w4)).T,
                          axis=0))
            wise_trans.append(
                np.median(np.vstack(
                    (T.mw_transmission_w1, T.mw_transmission_w2,
                     T.mw_transmission_w3, T.mw_transmission_w4)).T,
                          axis=0))

            gtrans.append(np.median(T.mw_transmission_g))
            rtrans.append(np.median(T.mw_transmission_r))
            ztrans.append(np.median(T.mw_transmission_z))

            gcosky.append(Thdr.get('COSKY_G', 0.))
            rcosky.append(Thdr.get('COSKY_R', 0.))
            zcosky.append(Thdr.get('COSKY_Z', 0.))

            ebv.append(np.median(T.ebv))

            br = bricks[ibrick]

            #print('Computing unique brick pixels...')
            pixscale = 0.262 / 3600.
            wcs = Tan(br.ra, br.dec, W / 2. + 0.5, H / 2. + 0.5, -pixscale, 0.,
                      0., pixscale, float(W), float(H))
            unique[:, :] = True
            find_unique_pixels(wcs, W, H, unique, br.ra1, br.ra2, br.dec1,
                               br.dec2)
            U = np.flatnonzero(unique)
            #print(len(U), 'of', W*H, 'pixels are unique to this brick')

        else:
            index = bricklist.index(brick)
            assert (index == len(bricklist) - 1)

        index = bricklist.index(brick)
        assert (index == len(bricklist) - 1)

        filepart = words[-1]
        filepart = filepart.replace('.fits.gz', '')
        filepart = filepart.replace('.fits.fz', '')
        print('File:', filepart)
        band = filepart[-1]
        assert (band in 'grz')

        nlist, nhist = dict(g=(gn, gnhist), r=(rn, rnhist),
                            z=(zn, znhist))[band]

        upix = fitsio.read(fn).flat[U]
        med = np.median(upix)
        print('Band', band, ': Median', med)
        nlist[index] = med

        hist = nhist[index]
        for i in range(nnhist):
            if i < nnhist - 1:
                hist[i] = np.sum(upix == i)
            else:
                hist[i] = np.sum(upix >= i)
        assert (sum(hist) == len(upix))
        print('Number of exposures histogram:', hist)

    ibricks = np.array(ibricks)

    T = fits_table()
    T.brickname = np.array(bricklist)
    T.ra = bricks.ra[ibricks]
    T.dec = bricks.dec[ibricks]
    T.nexp_g = np.array(gn).astype(np.int16)
    T.nexp_r = np.array(rn).astype(np.int16)
    T.nexp_z = np.array(zn).astype(np.int16)
    T.nexphist_g = np.array(gnhist).astype(np.int32)
    T.nexphist_r = np.array(rnhist).astype(np.int32)
    T.nexphist_z = np.array(znhist).astype(np.int32)
    T.nobjs = np.array(nsrcs).astype(np.int32)
    T.npsf = np.array(npsf).astype(np.int32)
    T.nsimp = np.array(nsimp).astype(np.int32)
    T.nrex = np.array(nrex).astype(np.int32)
    T.nexp = np.array(nexp).astype(np.int32)
    T.ndev = np.array(ndev).astype(np.int32)
    T.ncomp = np.array(ncomp).astype(np.int32)
    T.nser = np.array(nser).astype(np.int32)
    T.ndup = np.array(ndup).astype(np.int32)
    T.psfsize_g = np.array(gpsfsize).astype(np.float32)
    T.psfsize_r = np.array(rpsfsize).astype(np.float32)
    T.psfsize_z = np.array(zpsfsize).astype(np.float32)
    with np.errstate(divide='ignore'):
        T.psfdepth_g = (
            -2.5 *
            (-9. + np.log10(5. * np.sqrt(1. / np.array(gpsfdepth))))).astype(
                np.float32)
        T.psfdepth_r = (
            -2.5 *
            (-9. + np.log10(5. * np.sqrt(1. / np.array(rpsfdepth))))).astype(
                np.float32)
        T.psfdepth_z = (
            -2.5 *
            (-9. + np.log10(5. * np.sqrt(1. / np.array(zpsfdepth))))).astype(
                np.float32)
        T.galdepth_g = (
            -2.5 *
            (-9. + np.log10(5. * np.sqrt(1. / np.array(ggaldepth))))).astype(
                np.float32)
        T.galdepth_r = (
            -2.5 *
            (-9. + np.log10(5. * np.sqrt(1. / np.array(rgaldepth))))).astype(
                np.float32)
        T.galdepth_z = (
            -2.5 *
            (-9. + np.log10(5. * np.sqrt(1. / np.array(zgaldepth))))).astype(
                np.float32)
    for k in [
            'psfdepth_g', 'psfdepth_r', 'psfdepth_z', 'galdepth_g',
            'galdepth_r', 'galdepth_z'
    ]:
        v = T.get(k)
        v[np.logical_not(np.isfinite(v))] = 0.
    T.ebv = np.array(ebv).astype(np.float32)
    T.trans_g = np.array(gtrans).astype(np.float32)
    T.trans_r = np.array(rtrans).astype(np.float32)
    T.trans_z = np.array(ztrans).astype(np.float32)
    T.cosky_g = np.array(gcosky).astype(np.float32)
    T.cosky_r = np.array(rcosky).astype(np.float32)
    T.cosky_z = np.array(zcosky).astype(np.float32)
    T.ext_g = -2.5 * np.log10(T.trans_g)
    T.ext_r = -2.5 * np.log10(T.trans_r)
    T.ext_z = -2.5 * np.log10(T.trans_z)
    T.wise_nobs = np.array(wise_nobs).astype(np.int16)
    T.trans_wise = np.array(wise_trans).astype(np.float32)
    T.ext_w1 = -2.5 * np.log10(T.trans_wise[:, 0])
    T.ext_w2 = -2.5 * np.log10(T.trans_wise[:, 1])
    T.ext_w3 = -2.5 * np.log10(T.trans_wise[:, 2])
    T.ext_w4 = -2.5 * np.log10(T.trans_wise[:, 3])

    T.writeto(opt.outfn)
Esempio n. 5
0
def make_depth_cut(survey, ccds, bands, targetrd, brick, W, H, pixscale,
                   plots, ps, splinesky, gaussPsf, pixPsf, normalizePsf, do_calibs,
                   gitver, targetwcs, old_calibs_ok, get_depth_maps=False, margin=0.5,
                   use_approx_wcs=False):
    if plots:
        import pylab as plt

    # Add some margin to our DESI depth requirements
    target_depth_map = dict(g=24.0 + margin, r=23.4 + margin, z=22.5 + margin)

    # List extra (redundant) target percentiles so that increasing the depth at
    # any of these percentiles causes the image to be kept.
    target_percentiles = np.array(list(range(2, 10)) +
                                  list(range(10, 30, 5)) +
                                  list(range(30, 101, 10)))
    target_ddepths = np.zeros(len(target_percentiles), np.float32)
    target_ddepths[target_percentiles < 10] = -0.3
    target_ddepths[target_percentiles <  5] = -0.6
    #print('Target percentiles:', target_percentiles)
    #print('Target ddepths:', target_ddepths)

    cH,cW = H//10, W//10
    coarsewcs = targetwcs.scale(0.1)
    coarsewcs.imagew = cW
    coarsewcs.imageh = cH

    # Unique pixels in this brick (U: cH x cW boolean)
    U = find_unique_pixels(coarsewcs, cW, cH, None,
                           brick.ra1, brick.ra2, brick.dec1, brick.dec2)
    pixscale = 3600. * np.sqrt(np.abs(ccds.cd1_1*ccds.cd2_2 - ccds.cd1_2*ccds.cd2_1))
    seeing = ccds.fwhm * pixscale

    # Compute the rectangle in *coarsewcs* covered by each CCD
    slices = []
    overlapping_ccds = np.zeros(len(ccds), bool)
    for i,ccd in enumerate(ccds):
        wcs = survey.get_approx_wcs(ccd)
        hh,ww = wcs.shape
        rr,dd = wcs.pixelxy2radec([1,ww,ww,1], [1,1,hh,hh])
        ok,xx,yy = coarsewcs.radec2pixelxy(rr, dd)
        y0 = int(np.round(np.clip(yy.min(), 0, cH-1)))
        y1 = int(np.round(np.clip(yy.max(), 0, cH-1)))
        x0 = int(np.round(np.clip(xx.min(), 0, cW-1)))
        x1 = int(np.round(np.clip(xx.max(), 0, cW-1)))
        if y0 == y1 or x0 == x1:
            slices.append(None)
            continue
        # Check whether this CCD overlaps the unique area of this brick...
        if not np.any(U[y0:y1+1, x0:x1+1]):
            info('No overlap with unique area for CCD', ccd.expnum, ccd.ccdname)
            slices.append(None)
            continue
        overlapping_ccds[i] = True
        slices.append((slice(y0, y1+1), slice(x0, x1+1)))

    keep_ccds = np.zeros(len(ccds), bool)
    depthmaps = []

    for band in bands:
        # scalar
        target_depth = target_depth_map[band]
        # vector
        target_depths = target_depth + target_ddepths

        depthiv = np.zeros((cH,cW), np.float32)
        depthmap = np.zeros_like(depthiv)
        depthvalue = np.zeros_like(depthiv)
        last_pcts = np.zeros_like(target_depths)
        # indices of CCDs we still want to look at in the current band
        b_inds = np.where(ccds.filter == band)[0]
        info(len(b_inds), 'CCDs in', band, 'band')
        if len(b_inds) == 0:
            continue
        b_inds = np.array([i for i in b_inds if slices[i] is not None])
        info(len(b_inds), 'CCDs in', band, 'band overlap target')
        if len(b_inds) == 0:
            continue
        # CCDs that we will try before searching for good ones -- CCDs
        # from the same exposure number as CCDs we have chosen to
        # take.
        try_ccds = set()

        # Try DECaLS data first!
        Idecals = np.where(ccds.propid[b_inds] == '2014B-0404')[0]
        if len(Idecals):
            try_ccds.update(b_inds[Idecals])
        debug('Added', len(try_ccds), 'DECaLS CCDs to try-list')

        plot_vals = []

        if plots:
            plt.clf()
            for i in b_inds:
                sy,sx = slices[i]
                x0,x1 = sx.start, sx.stop
                y0,y1 = sy.start, sy.stop
                plt.plot([x0,x0,x1,x1,x0], [y0,y1,y1,y0,y0], 'b-', alpha=0.5)
            plt.title('CCDs overlapping brick: %i in %s band' % (len(b_inds), band))
            ps.savefig()

            nccds = np.zeros((cH,cW), np.int16)
            plt.clf()
            for i in b_inds:
                nccds[slices[i]] += 1
            plt.imshow(nccds, interpolation='nearest', origin='lower', vmin=0)
            plt.colorbar()
            plt.title('CCDs overlapping brick: %i in %s band (%i / %i / %i)' %
                      (len(b_inds), band, nccds.min(), np.median(nccds), nccds.max()))

            ps.savefig()
            #continue

        while len(b_inds):
            if len(try_ccds) == 0:
                # Choose the next CCD to look at in this band.

                # A rough point-source depth proxy would be:
                # metric = np.sqrt(ccds.extime[b_inds]) / seeing[b_inds]
                # If we want to put more weight on choosing good-seeing images, we could do:
                #metric = np.sqrt(ccds.exptime[b_inds]) / seeing[b_inds]**2

                # depth would be ~ 1 / (sig1 * seeing); we privilege good seeing here.
                metric = 1. / (ccds.sig1[b_inds] * seeing[b_inds]**2)

                # This metric is *BIG* for *GOOD* ccds!

                # Here, we try explicitly to include CCDs that cover
                # pixels that are still shallow by the largest amount
                # for the largest number of percentiles of interest;
                # note that pixels with no coverage get depth 0, so
                # score high in this metric.
                #
                # The value is the depth still required to hit the
                # target, summed over percentiles of interest
                # (for pixels unique to this brick)
                depthvalue[:,:] = 0.
                active = (last_pcts < target_depths)
                for d in target_depths[active]:
                    depthvalue += U * np.maximum(0, d - depthmap)
                ccdvalue = np.zeros(len(b_inds), np.float32)
                for j,i in enumerate(b_inds):
                    #ccdvalue[j] = np.sum(depthvalue[slices[i]])
                    # mean -- we want the most bang for the buck per pixel?
                    ccdvalue[j] = np.mean(depthvalue[slices[i]])
                metric *= ccdvalue

                # *ibest* is an index into b_inds
                ibest = np.argmax(metric)
                # *iccd* is an index into ccds.
                iccd = b_inds[ibest]
                ccd = ccds[iccd]
                debug('Chose best CCD: seeing', seeing[iccd], 'exptime', ccds.exptime[iccd], 'with value', ccdvalue[ibest])

            else:
                iccd = try_ccds.pop()
                ccd = ccds[iccd]
                debug('Popping CCD from use_ccds list')

            # remove *iccd* from b_inds
            b_inds = b_inds[b_inds != iccd]

            im = survey.get_image_object(ccd)
            debug('Band', im.band, 'expnum', im.expnum, 'exptime', im.exptime, 'seeing', im.fwhm*im.pixscale, 'arcsec, propid', im.propid)

            im.check_for_cached_files(survey)
            debug(im)

            if do_calibs:
                kwa = dict(git_version=gitver, old_calibs_ok=old_calibs_ok)
                if gaussPsf:
                    kwa.update(psfex=False)
                if splinesky:
                    kwa.update(splinesky=True)
                im.run_calibs(**kwa)

            if use_approx_wcs:
                debug('Using approximate (TAN) WCS')
                wcs = survey.get_approx_wcs(ccd)
            else:
                debug('Reading WCS from', im.imgfn, 'HDU', im.hdu)
                wcs = im.get_wcs()

            x0,x1,y0,y1,slc = im.get_image_extent(wcs=wcs, radecpoly=targetrd)
            if x0==x1 or y0==y1:
                debug('No actual overlap')
                continue
            wcs = wcs.get_subimage(int(x0), int(y0), int(x1-x0), int(y1-y0))

            if 'galnorm_mean' in ccds.get_columns():
                galnorm = ccd.galnorm_mean
                debug('Using galnorm_mean from CCDs table:', galnorm)
            else:
                psf = im.read_psf_model(x0, y0, gaussPsf=gaussPsf, pixPsf=pixPsf,
                                        normalizePsf=normalizePsf)
                psf = psf.constantPsfAt((x1-x0)//2, (y1-y0)//2)
                # create a fake tim to compute galnorm
                from tractor import PixPos, Flux, ModelMask, Image, NullWCS
                from legacypipe.survey import SimpleGalaxy

                h,w = 50,50
                gal = SimpleGalaxy(PixPos(w//2,h//2), Flux(1.))
                tim = Image(data=np.zeros((h,w), np.float32),
                            psf=psf, wcs=NullWCS(pixscale=im.pixscale))
                mm = ModelMask(0, 0, w, h)
                galmod = gal.getModelPatch(tim, modelMask=mm).patch
                galmod = np.maximum(0, galmod)
                galmod /= galmod.sum()
                galnorm = np.sqrt(np.sum(galmod**2))
            detiv = 1. / (im.sig1 / galnorm)**2
            galdepth = -2.5 * (np.log10(5. * im.sig1 / galnorm) - 9.)
            debug('Galnorm:', galnorm, 'sig1:', im.sig1, 'galdepth', galdepth)

            # Add this image the the depth map...
            from astrometry.util.resample import resample_with_wcs, OverlapError
            try:
                Yo,Xo,_,_,_ = resample_with_wcs(coarsewcs, wcs)
                debug(len(Yo), 'of', (cW*cH), 'pixels covered by this image')
            except OverlapError:
                debug('No overlap')
                continue
            depthiv[Yo,Xo] += detiv

            # compute the new depth map & percentiles (including the proposed new CCD)
            depthmap[:,:] = 0.
            depthmap[depthiv > 0] = 22.5 - 2.5*np.log10(5./np.sqrt(depthiv[depthiv > 0]))
            depthpcts = np.percentile(depthmap[U], target_percentiles)

            for i,(p,d,t) in enumerate(zip(target_percentiles, depthpcts, target_depths)):
                info('  pct % 3i, prev %5.2f -> %5.2f vs target %5.2f %s' % (p, last_pcts[i], d, t, ('ok' if d >= t else '')))

            keep = False
            # Did we increase the depth of any target percentile that did not already exceed its target depth?
            if np.any((depthpcts > last_pcts) * (last_pcts < target_depths)):
                keep = True

            # Add any other CCDs from this same expnum to the try_ccds list.
            # (before making the plot)
            I = np.where(ccd.expnum == ccds.expnum[b_inds])[0]
            try_ccds.update(b_inds[I])
            debug('Adding', len(I), 'CCDs with the same expnum to try_ccds list')

            if plots:
                cc = '1' if keep else '0'
                xx = [Xo.min(), Xo.min(), Xo.max(), Xo.max(), Xo.min()]
                yy = [Yo.min(), Yo.max(), Yo.max(), Yo.min(), Yo.min()]
                plot_vals.append(((xx,yy,cc),(last_pcts,depthpcts,keep),im.ccdname))

            if plots and (
                (len(try_ccds) == 0) or np.all(depthpcts >= target_depths)):
                plt.clf()

                plt.subplot2grid((2,2),(0,0))
                plt.imshow(depthvalue, interpolation='nearest', origin='lower',
                           vmin=0)
                plt.xticks([]); plt.yticks([])
                plt.colorbar()
                plt.title('heuristic value')

                plt.subplot2grid((2,2),(0,1))
                plt.imshow(depthmap, interpolation='nearest', origin='lower',
                           vmin=target_depth - 2, vmax=target_depth + 0.5)
                ax = plt.axis()
                for (xx,yy,cc) in [p[0] for p in plot_vals]:
                    plt.plot(xx,yy, '-', color=cc, lw=3)
                plt.axis(ax)
                plt.xticks([]); plt.yticks([])
                plt.colorbar()
                plt.title('depth map')

                plt.subplot2grid((2,2),(1,0), colspan=2)
                ax = plt.gca()
                plt.plot(target_percentiles, target_depths, 'ro', label='Target')
                plt.plot(target_percentiles, target_depths, 'r-')
                for (lp,dp,k) in [p[1] for p in plot_vals]:
                    plt.plot(target_percentiles, lp, 'k-',
                             label='Previous percentiles')
                for (lp,dp,k) in [p[1] for p in plot_vals]:
                    cc = 'b' if k else 'r'
                    plt.plot(target_percentiles, dp, '-', color=cc,
                             label='Depth percentiles')
                ccdnames = ','.join([p[2] for p in plot_vals])
                plot_vals = []

                plt.ylim(target_depth - 2, target_depth + 0.5)
                plt.xscale('log')
                plt.xlabel('Percentile')
                plt.ylabel('Depth')
                plt.title('depth percentiles')
                plt.suptitle('%s %i-%s, exptime %.0f, seeing %.2f, band %s' %
                             (im.camera, im.expnum, ccdnames, im.exptime,
                              im.pixscale * im.fwhm, band))
                ps.savefig()

            if keep:
                info('Keeping this exposure')
            else:
                info('Not keeping this exposure')
                depthiv[Yo,Xo] -= detiv
                continue

            keep_ccds[iccd] = True
            last_pcts = depthpcts

            if np.all(depthpcts >= target_depths):
                info('Reached all target depth percentiles for band', band)
                break

        if get_depth_maps:
            if np.any(depthiv > 0):
                depthmap[:,:] = 0.
                depthmap[depthiv > 0] = 22.5 -2.5*np.log10(5./np.sqrt(depthiv[depthiv > 0]))
                depthmap[np.logical_not(U)] = np.nan
                depthmaps.append((band, depthmap.copy()))

        if plots:
            I = np.where(ccds.filter == band)[0]
            plt.clf()
            plt.plot(seeing[I], ccds.exptime[I], 'k.')
            # which CCDs from this band are we keeping?
            kept, = np.nonzero(keep_ccds)
            if len(kept):
                kept = kept[ccds.filter[kept] == band]
                plt.plot(seeing[kept], ccds.exptime[kept], 'ro')
            plt.xlabel('Seeing (arcsec)')
            plt.ylabel('Exptime (sec)')
            plt.title('CCDs kept for band %s' % band)
            plt.ylim(0, np.max(ccds.exptime[I]) * 1.1)
            ps.savefig()

    if get_depth_maps:
        return (keep_ccds, overlapping_ccds, depthmaps)
    return keep_ccds, overlapping_ccds