Example #1
0
def main():
    import optparse
    import logging
    import sys

    parser = optparse.OptionParser()
    parser.add_option('--threads', dest='threads', default=1, type=int, help='Use this many concurrent processors')
    parser.add_option('-v', '--verbose', dest='verbose', action='count', default=0,
                      help='Make more verbose')

    parser.add_option('--grid', '-g', dest='gridn', type=int, default=5, help='Dust parameter grid size')
    parser.add_option('--steps', '-s', dest='steps', type=int, default=10, help='# Optimization step')
    parser.add_option('--suffix', dest='suffix', default='', help='Output file suffix')

    parser.add_option('--no-100', dest='no100', action='store_true', default=False,
                      help='Omit PACS-100 data?')

    parser.add_option('--callgrind', dest='callgrind', action='store_true', default=False, help='Turn on callgrind around tractor.optimize()')

    parser.add_option('--resume', '-r', dest='resume', type=int, default=-1, help='Resume from a previous run at the given step?')

    parser.add_option('--zoom', dest='zoom', type=float, default=1, help='Scale down the model to only touch the (1/zoom x 1/zoom) central region of the images')

    parser.add_option('--damp', dest='damp', type=float, default=1., help='LSQR damping')

    opt,args = parser.parse_args()

    if opt.verbose == 0:
        lvl = logging.INFO
        log_init(2)
    else:
        lvl = logging.DEBUG
        log_init(3)
    
    logging.basicConfig(level=lvl, format='%(message)s', stream=sys.stdout)

    if opt.threads > 1 and False:
        global dpool
        import debugpool
        dpool = debugpool.DebugPool(opt.threads)
        Time.add_measurement(debugpool.DebugPoolMeas(dpool))
        mp = multiproc(pool=dpool)
    else:
        print('N threads', opt.threads)
        mp = multiproc(opt.threads)#, wrap_all=True)

    if opt.callgrind:
        import callgrind
    else:
        callgrind = None

    np.seterrcall(np_err_handler)
    np.seterr(all='call')
    #np.seterr(all='raise')

    if opt.resume > -1:
        pfn = 'herschel-%02i%s.pickle' % (opt.resume, opt.suffix)
        print('Reading from', pfn)
        tractor = unpickle_from_file(pfn)
        tractor.mp = mp

        ds = tractor.getCatalog()[0]
        print('DustSheet:', ds)

        # derivs = ds.getParamDerivatives(tim)
        # dim = np.zeros(tim.shape)
        # #for k,deriv in enumerate(derivs[:40]):
        # for k,deriv in enumerate(derivs[::10]):
        #   dim[:,:] = 0
        #   deriv.addTo(dim)
        #   plt.clf()
        #   plt.imshow(dim, interpolation='nearest', origin='lower')
        #   plt.savefig('deriv-%04i.png' % k)

        #tim = tractor.getImages()[0]
        # for it,tim in enumerate(tractor.getImages()):
        #   X = ds._getTransformation(tim)
        #   # #print 'X', X
        #   keys = X.keys()
        #   keys.sort()
        #   # for k in keys[::10]:
        #   # for k in keys[:40]:
        #   for k in keys[::202]:
        #       I,G,nil,nil = X[k]
        #       rim = np.zeros_like(tim.getImage())
        #       rim.ravel()[I] = G
        #       plt.clf()
        #       plt.imshow(rim, interpolation='nearest', origin='lower')
        #       plt.colorbar()
        #       plt.savefig('rim-%i-%04i.png' % (it,k))
        #       print 'pix', k
        # sys.exit(0)

        makeplots(tractor, opt.resume, opt.suffix)
        step0 = opt.resume + 1

    else:
        step0 = 0
        tractor = create_tractor(opt)
        tractor.mp = mp

        # zero out invvar outside the model bounds.
        ds = tractor.getCatalog()[0]
        rd = ds.getRaDecCorners(margin=0.5)
        for i,tim in enumerate(tractor.getImages()):
            poly = np.array([tim.getWcs().positionToPixel(
                RaDecPos(rdi[0], rdi[1])) for rdi in rd])
            poly = poly[:-1,:]
            print('Model bounding box in image', tim.name, 'coordinates:')
            #print poly.shape
            print(poly)
            H,W = tim.shape
            xx,yy = np.meshgrid(np.arange(W), np.arange(H))
            inside = point_in_poly(xx, yy, poly)
            tim.inverr[inside == 0] = 0.

        plt.clf()
        for i,tim in enumerate(tractor.images):
            h,w = tim.shape
            rd = [tim.getWcs().pixelToPosition(x,y)
                  for x,y in [(-0.5,-0.5),(w-0.5,-0.5),(w-0.5,h-0.5),
                              (-0.5,h-0.5),(-0.5,-0.5)]]
            plt.plot([p.ra for p in rd], [p.dec for p in rd], '-',
                     label=tim.name)
        rd = ds.getRaDecCorners(margin=0.5)
        plt.plot(rd[:,0], rd[:,1], 'k-', label='Grid')
        mh,mw = ds.shape
        r,d = ds.wcs.pixelxy2radec(1 + np.arange(mw), np.ones(mw))
        plt.plot(r, d, 'k.')
        r,d = ds.wcs.pixelxy2radec(np.ones(mh), 1 + np.arange(mh))
        plt.plot(r, d, 'k.')
        r,d = ds.wcs.pixelxy2radec(1 + np.arange(mw), np.zeros(mw)+mh)
        plt.plot(r, d, 'k.')
        r,d = ds.wcs.pixelxy2radec(np.zeros(mh)+mw, 1 + np.arange(mh))
        plt.plot(r, d, 'k.')
        plt.legend()
        plt.savefig('radec.png')
            
        print('Precomputing transformations...')
        ds = tractor.getCatalog()[0]

        # Split the grid-spread matrix into strips...
        async_results = []
        for im in tractor.getImages():
            args = []
            H,W = ds.shape
            dy = 10
            y = 0
            while y <= H:
                args.append((ds, im, y, min(H, y+dy)))
                y += dy
            async_results.append(mp.map_async(_map_trans, args))
        # Glue to strips back together...
        XX = []
        for ar in async_results:
            Xblocks = ar.get()
            X = Xblocks[0]
            for xi in Xblocks[1:]:
                X.update(xi)
            XX.append(X)
            
        for im,X in zip(tractor.getImages(), XX):
            ds._normalizeTransformation(im, X)
            ds._setTransformation(im, X)
        print('done precomputing.')

        # Plot the grid-spread functions.
        for itim,tim in enumerate(tractor.images):
            T = ds._getTransformation(tim)
            (I,G,nz,NZI) = T[0]
            plt.clf()
            g = np.zeros(tim.shape, np.float32)
            g.flat[I] = G
            plt.imshow(g, interpolation='nearest', origin='lower', cmap='hot')
            plt.colorbar()
            plt.title('Grid-spread function for cell 0, image %s' % tim.name)
            plt.savefig('gsf-%i.png' % itim)

        
        makeplots(tractor, 0, opt.suffix)
        pfn = 'herschel-%02i%s.pickle' % (0, opt.suffix)
        pickle_to_file(tractor, pfn)
        print('Wrote', pfn)

    for im in tractor.getImages():
        im.freezeAllBut('sky')

    for i in range(step0, opt.steps):
        print('Step', i)
        if callgrind:
            callgrind.callgrind_start_instrumentation()

        tractor.optimize(damp=opt.damp, alphas=[1e-3, 1e-2, 0.1, 0.3, 1., 3., 10., 30., 100.])

        if callgrind:
            callgrind.callgrind_stop_instrumentation()

        makeplots(tractor, 1 + i, opt.suffix)
        pfn = 'herschel-%02i%s.pickle' % (1 + i, opt.suffix)
        pickle_to_file(tractor, pfn)
        print('Wrote', pfn)
Example #2
0
def main():
	import optparse
	import logging
	import sys

	parser = optparse.OptionParser()
	parser.add_option('--threads', dest='threads', default=1, type=int, help='Use this many concurrent processors')
	parser.add_option('-v', '--verbose', dest='verbose', action='count', default=0,
					  help='Make more verbose')

	parser.add_option('--grid', '-g', dest='gridn', type=int, default=5, help='Dust parameter grid size')
	parser.add_option('--steps', '-s', dest='steps', type=int, default=10, help='# Optimization step')
	parser.add_option('--suffix', dest='suffix', default='', help='Output file suffix')

	parser.add_option('--no-100', dest='no100', action='store_true', default=False,
					  help='Omit PACS-100 data?')

	parser.add_option('--callgrind', dest='callgrind', action='store_true', default=False, help='Turn on callgrind around tractor.optimize()')

	parser.add_option('--resume', '-r', dest='resume', type=int, default=-1, help='Resume from a previous run at the given step?')

	parser.add_option('--zoom', dest='zoom', type=float, default=1, help='Scale down the model to only touch the (1/zoom x 1/zoom) central region of the images')

	parser.add_option('--damp', dest='damp', type=float, default=1., help='LSQR damping')

	opt,args = parser.parse_args()

	if opt.verbose == 0:
		lvl = logging.INFO
		log_init(2)
	else:
		lvl = logging.DEBUG
		log_init(3)
	
	logging.basicConfig(level=lvl, format='%(message)s', stream=sys.stdout)

	if opt.threads > 1 and False:
		global dpool
		import debugpool
		dpool = debugpool.DebugPool(opt.threads)
		Time.add_measurement(debugpool.DebugPoolMeas(dpool))
		mp = multiproc(pool=dpool)
	else:
		print 'N threads', opt.threads
		mp = multiproc(opt.threads)#, wrap_all=True)

	if opt.callgrind:
		import callgrind
	else:
		callgrind = None

	np.seterrcall(np_err_handler)
	np.seterr(all='call')
	#np.seterr(all='raise')

	if opt.resume > -1:
		pfn = 'herschel-%02i%s.pickle' % (opt.resume, opt.suffix)
		print 'Reading from', pfn
		tractor = unpickle_from_file(pfn)
		tractor.mp = mp

		ds = tractor.getCatalog()[0]
		print 'DustSheet:', ds

		# derivs = ds.getParamDerivatives(tim)
		# dim = np.zeros(tim.shape)
		# #for k,deriv in enumerate(derivs[:40]):
		# for k,deriv in enumerate(derivs[::10]):
		# 	dim[:,:] = 0
		# 	deriv.addTo(dim)
		# 	plt.clf()
		# 	plt.imshow(dim, interpolation='nearest', origin='lower')
		# 	plt.savefig('deriv-%04i.png' % k)

		#tim = tractor.getImages()[0]
		# for it,tim in enumerate(tractor.getImages()):
		# 	X = ds._getTransformation(tim)
		# 	# #print 'X', X
		# 	keys = X.keys()
		# 	keys.sort()
		# 	# for k in keys[::10]:
		# 	# for k in keys[:40]:
		# 	for k in keys[::202]:
		# 		I,G,nil,nil = X[k]
		# 		rim = np.zeros_like(tim.getImage())
		# 		rim.ravel()[I] = G
		# 		plt.clf()
		# 		plt.imshow(rim, interpolation='nearest', origin='lower')
		# 		plt.colorbar()
		# 		plt.savefig('rim-%i-%04i.png' % (it,k))
		# 		print 'pix', k
		# sys.exit(0)

		makeplots(tractor, opt.resume, opt.suffix)
		step0 = opt.resume + 1

	else:
		step0 = 0
		tractor = create_tractor(opt)
		tractor.mp = mp

		# zero out invvar outside the model bounds.
		ds = tractor.getCatalog()[0]
		rd = ds.getRaDecCorners()
		for i,tim in enumerate(tractor.getImages()):
			poly = np.array([tim.getWcs().positionToPixel(RaDecPos(rdi[0], rdi[1])) for rdi in rd])
			poly = poly[:-1,:]
			print 'Model bounding box in image', tim.name, 'coordinates:'
			print poly.shape
			print poly
			H,W = tim.shape
			xx,yy = np.meshgrid(np.arange(W), np.arange(H))
			inside = point_in_poly(xx, yy, poly)
			iv = tim.getInvvar()
			iv[(inside == 0)] = 0.
			tim.setInvvar(iv)

		print 'Precomputing transformations...'
		ds = tractor.getCatalog()[0]

		# Split the grid-spread matrix into strips...
		async_results = []
		for im in tractor.getImages():
			args = []
			H,W = ds.shape
			dy = 10
			y = 0
			while y <= H:
				args.append((ds, im, y, min(H, y+dy)))
				y += dy
			async_results.append(mp.map_async(_map_trans, args))
		# Glue to strips back together...
		XX = []
		for ar in async_results:
			Xblocks = ar.get()
			X = Xblocks[0]
			for xi in Xblocks[1:]:
				X.update(xi)
			XX.append(X)
			
		for im,X in zip(tractor.getImages(), XX):
			ds._normalizeTransformation(im, X)
			ds._setTransformation(im, X)
		print 'done precomputing.'

		makeplots(tractor, 0, opt.suffix)
		pfn = 'herschel-%02i%s.pickle' % (0, opt.suffix)
		pickle_to_file(tractor, pfn)
		print 'Wrote', pfn

	for im in tractor.getImages():
		im.freezeAllBut('sky')

	for i in range(step0, opt.steps):
		if callgrind:
			callgrind.callgrind_start_instrumentation()

		tractor.optimize(damp=opt.damp, alphas=[1e-3, 1e-2, 0.1, 0.3, 1., 3., 10., 30., 100.])

		if callgrind:
			callgrind.callgrind_stop_instrumentation()

		makeplots(tractor, 1 + i, opt.suffix)
		pfn = 'herschel-%02i%s.pickle' % (1 + i, opt.suffix)
		pickle_to_file(tractor, pfn)
		print 'Wrote', pfn
Example #3
0
def main():
    # make a simple psf
    amp = np.array([0.9, 0.1])
    mean = np.random.uniform(size=(2, 2))
    var = np.array([2.0, 20.])
    psf = MixtureOfGaussians(amp, mean, var)
    psf.normalize()
    trueimage = psf.evaluate_grid(-5, 7, -5, 7)
    plt.clf()
    plt.imshow(trueimage, interpolation='nearest', cmap='gray')
    plt.savefig('truth.png')

    # do some simple test fitting to check likelihood code
    invvar = 0.01 * np.ones_like(trueimage)
    flux = 1.0
    y = 0.5
    xlist = np.arange(-1.0, 1.0, 0.01)
    lnLlist = np.array(
        [lnLikelihood(trueimage, flux, x, y, psf, invvar) for x in xlist])
    plt.clf()
    plt.plot(xlist, lnLlist, 'k-', alpha=0.5)
    plt.xlabel('model star position')
    plt.ylabel('$\ln$ likelihood')
    plt.savefig('lnL.png')

    # try an optimization
    if False:
        pars = np.array([0.9, -1.0, 1.0])
        notpars = (trueimage, psf, invvar)
        bestpars = op.fmin(cost, pars, args=(notpars, ))
        print(bestpars)

    # now loop over noisiness
    minx = -3.5
    maxx = -1.5
    picklefn = 'fluxes.pickle'
    if not os.path.exists(picklefn):
        logvarlist = np.random.uniform(minx, maxx, size=(3000))
        fluxlist = np.zeros_like(logvarlist)
        for i, logvar in enumerate(logvarlist):
            noise = np.random.normal(size=trueimage.shape)
            var = 10.**logvar
            image = trueimage + var * noise
            invvar = np.ones_like(image) / var / var
            pars = np.array([1.0, 0.0, 0.0])
            notpars = (image, psf, invvar)
            bestpars = op.fmin(cost, pars, args=(notpars, ))
            fluxlist[i] = bestpars[0]
            print(i, logvar, bestpars)
        an.pickle_to_file((logvarlist, fluxlist), picklefn)
    (logvarlist, fluxlist) = an.unpickle_from_file(picklefn)

    # make plots
    plt.clf()
    plt.axhline(1.1, color='k', alpha=0.25)
    plt.axhline(1.0, color='k', alpha=0.25)
    plt.axhline(0.9, color='k', alpha=0.25)
    plt.plot(logvarlist, fluxlist, 'k.', alpha=0.5)
    plt.xlabel('$\log_{10}$ pixel noise variance')
    plt.ylabel('inferred flux')
    plt.xlim(minx, maxx)
    plt.ylim(0.8, 1.2)
    plt.savefig('flux.png')

    medbins = np.arange(minx, maxx + 0.0001, 0.25)
    for i in range(len(medbins) - 1):
        a, b = medbins[[i, i + 1]]
        inside = np.sort(fluxlist[(logvarlist > a) * (logvarlist < b)])
        nin = len(inside)
        plt.plot([a, b], [inside[0.025 * nin], inside[0.025 * nin]],
                 'k-',
                 alpha=0.5)
        plt.plot([a, b], [inside[0.16 * nin], inside[0.16 * nin]],
                 'k-',
                 alpha=0.5)
        plt.plot([a, b], [inside[0.50 * nin], inside[0.50 * nin]],
                 'k-',
                 alpha=0.5)
        plt.plot([a, b], [inside[0.84 * nin], inside[0.84 * nin]],
                 'k-',
                 alpha=0.5)
        plt.plot([a, b], [inside[0.975 * nin], inside[0.975 * nin]],
                 'k-',
                 alpha=0.5)
    plt.savefig('fluxquant.png')
Example #4
0
def main():
    # make a simple psf
    amp = np.array([0.9, 0.1])
    mean = np.random.uniform(size=(2, 2))
    var = np.array([2.0, 20.])
    psf = MixtureOfGaussians(amp, mean, var)
    psf.normalize()
    trueimage = psf.evaluate_grid(-5, 7, -5, 7)
    plt.clf()
    plt.imshow(trueimage, interpolation='nearest', cmap='gray')
    plt.savefig('truth.png')

    # do some simple test fitting to check likelihood code
    invvar = 0.01 * np.ones_like(trueimage)
    flux = 1.0
    y = 0.5
    xlist = np.arange(-1.0, 1.0, 0.01)
    lnLlist = np.array([lnLikelihood(trueimage, flux, x, y, psf, invvar) for x in xlist])
    plt.clf()
    plt.plot(xlist, lnLlist, 'k-', alpha=0.5)
    plt.xlabel('model star position')
    plt.ylabel('$\ln$ likelihood')
    plt.savefig('lnL.png')

    # try an optimization
    if False:
        pars = np.array([0.9, -1.0, 1.0])
        notpars = (trueimage, psf, invvar)
        bestpars = op.fmin(cost, pars, args=(notpars, ))
        print bestpars

    # now loop over noisiness
    minx = -3.5
    maxx = -1.5
    picklefn = 'fluxes.pickle'
    if not os.path.exists(picklefn):
        logvarlist = np.random.uniform(minx, maxx, size=(3000))
        fluxlist = np.zeros_like(logvarlist)
        for i, logvar in enumerate(logvarlist):
            noise = np.random.normal(size=trueimage.shape)
            var = 10.**logvar
            image = trueimage + var * noise
            invvar = np.ones_like(image) / var / var
            pars = np.array([1.0, 0.0, 0.0])
            notpars = (image, psf, invvar)
            bestpars = op.fmin(cost, pars, args=(notpars, ))
            fluxlist[i] = bestpars[0]
            print i, logvar, bestpars
        an.pickle_to_file((logvarlist, fluxlist), picklefn)
    (logvarlist, fluxlist) = an.unpickle_from_file(picklefn)

    # make plots
    plt.clf()
    plt.axhline(1.1, color='k', alpha=0.25)
    plt.axhline(1.0, color='k', alpha=0.25)
    plt.axhline(0.9, color='k', alpha=0.25)
    plt.plot(logvarlist, fluxlist, 'k.', alpha=0.5)
    plt.xlabel('$\log_{10}$ pixel noise variance')
    plt.ylabel('inferred flux')
    plt.xlim(minx, maxx)
    plt.ylim(0.8, 1.2)
    plt.savefig('flux.png')

    medbins = np.arange(minx, maxx+0.0001, 0.25)
    for i in range(len(medbins)-1):
        a, b = medbins[[i, i+1]]
        inside = np.sort(fluxlist[(logvarlist > a) * (logvarlist < b)])
        nin = len(inside)
        plt.plot([a, b], [inside[0.025* nin], inside[0.025* nin]], 'k-', alpha=0.5)
        plt.plot([a, b], [inside[0.16 * nin], inside[0.16 * nin]], 'k-', alpha=0.5)
        plt.plot([a, b], [inside[0.50 * nin], inside[0.50 * nin]], 'k-', alpha=0.5)
        plt.plot([a, b], [inside[0.84 * nin], inside[0.84 * nin]], 'k-', alpha=0.5)
        plt.plot([a, b], [inside[0.975* nin], inside[0.975* nin]], 'k-', alpha=0.5)
    plt.savefig('fluxquant.png')
Example #5
0
def main():

    if os.path.exists('results.pickle'):
        results = unpickle_from_file('results.pickle')
    else:
        results = []


    
    for isrc,(ra,dec,brickname) in enumerate([
        (0.2266, 3.9822, '0001p040'),
        (7.8324, 1.2544, '0078p012'),
        (1.1020, 3.9040, '0011p040'),
        (7.3252, 4.6847, '0073p047'),
        (3.1874, 3.9724, '0031p040'),
        (9.5112, 4.6934, '0094p047'),
        (4.4941, 1.1058, '0043p010'),
        (3.8900, 0.6041, '0038p005'),
        (8.1934, 4.0124, '0081p040'),
        (6.8125, 0.5463, '0068p005'),
        ]):

        #if isrc < 7:
        #    continue
        if isrc not in [4,6,7,8,9]:
            continue
        
        outdir = 'out_%.4f_%.4f' % (ra,dec)
        datadir = outdir.replace('out_', 'data_')
        #cmd = 'ssh cori "cd legacypipe2/py && python legacypipe/runbrick.py --radec %.4f %.4f --width 100 --height 100 --survey-dir fakedr9 --outdir %s --stage image_coadds --skip-calibs && python legacyanalysis/create_testcase.py --survey-dir fakedr9 %s/coadd/*/*/*-ccds.fits %s %s"' % (ra, dec, outdir, outdir, datadir, brickname)
        #cmd = 'ssh cori "cd legacypipe2/py && python legacypipe/runbrick.py --radec %.4f %.4f --width 100 --height 100 --survey-dir fakedr9 --outdir %s --stage image_coadds --skip-calibs && python legacyanalysis/create_testcase.py --survey-dir fakedr9 --outlier-dir %s %s/coadd/*/*/*-ccds.fits %s %s"' % (ra, dec, outdir, outdir, outdir, datadir, brickname)

        outbrick = ('custom-%06i%s%05i' %
                    (int(1000*ra), 'm' if dec < 0 else 'p',
                     int(1000*np.abs(dec))))

        cmd = 'ssh cori "cd legacypipe2/py && python legacyanalysis/create_testcase.py --survey-dir fakedr9 --outlier-dir %s --outlier-brick %s %s/coadd/*/*/*-ccds.fits %s %s"' % (outdir, outbrick, outdir, datadir, brickname)
        #os.system(cmd)
    
        cmd = 'rsync -arv cori:legacypipe2/py/%s .' % datadir
        #os.system(cmd)
        #continue

        survey = LegacySurveyData(datadir)
    
        b = Duck()
        b.ra = ra
        b.dec = dec
    
        W,H = 80,80
        wcs = wcs_for_brick(b, W=W, H=H)
        targetrd = np.array([wcs.pixelxy2radec(x,y) for x,y in
                             [(1,1),(W,1),(W,H),(1,H),(1,1)]])
        ccds = survey.ccds_touching_wcs(wcs)
        print(len(ccds), 'CCDs')
    
        ims = [survey.get_image_object(ccd) for ccd in ccds]
        keepims = []
        for im in ims:
            h,w = im.shape
            if h >= H and w >= W:
                keepims.append(im)
        ims = keepims
        gims = [im for im in ims if im.band == 'g']
        rims = [im for im in ims if im.band == 'r']
        zims = [im for im in ims if im.band == 'z']
        nk = min([len(gims), len(rims), len(zims), 5])
        ims = gims[:nk] + rims[:nk] + zims[:nk]
        print('Keeping', len(ims), 'images')
        
        tims = [im.get_tractor_image(pixPsf=True, hybridPsf=True, normalizePsf=True, splinesky=True, radecpoly=targetrd) for im in ims]
    
        bands = 'grz'
    
        devsrc = DevGalaxy(RaDecPos(ra,dec), NanoMaggies(**dict([(b,10.) for b in bands])), EllipseESoft(0., 0., 0.))
        tr = Tractor(tims, [devsrc])
        tr.freezeParam('images')
        tr.optimize_loop()
        print('Fit DeV source:', devsrc)
        devmods = list(tr.getModelImages())
        showboth(tims, devmods, wcs, bands);
        s = flux_string(devsrc.brightness)
        plt.suptitle('DeV model: ' + s + '\ndchisq 0.')
        plt.savefig('src%02i-dev.png' % isrc)
        devchi = 2. * tr.getLogLikelihood()
        
        dasrc = DevAgnGalaxy(devsrc.pos.copy(), devsrc.brightness.copy(), devsrc.shape.copy(), NanoMaggies(**dict([(b,1.) for b in bands])))
        #dasrc = DevAgnGalaxy(RaDecPos(ra,dec), NanoMaggies(**dict([(b,10.) for b in bands])), EllipseESoft(0., 0., 0.), NanoMaggies(**dict([(b,1.) for b in bands])))
        tr = Tractor(tims, [dasrc])
        tr.freezeParam('images')
        tr.optimize_loop()
        print('Fit Dev+PSF source:', dasrc)
        damods = list(tr.getModelImages())
        showboth(tims, damods, wcs, bands)
        s1 = flux_string(dasrc.brightnessDev)
        s2 = flux_string(dasrc.brightnessPsf)
        pcts = [100. * dasrc.brightnessPsf.getFlux(b) / dasrc.brightnessDev.getFlux(b) for b in bands]
        s3 = ', '.join(['%.2f' % p for p in pcts])
        dachi = 2. * tr.getLogLikelihood()
        plt.suptitle('DeV + Point Source model: DeV %s, PSF %s' % (s1, s2) + ' (%s %%)' % s3 +
                     '\ndchisq %.1f' % (dachi - devchi))
        plt.savefig('src%02i-devcore.png' % isrc)
    
        #sersrc = SersicGalaxy(RaDecPos(ra, dec), NanoMaggies(**dict([(b,10.) for b in bands])), EllipseESoft(0.5, 0., 0.), SersicIndex(4.0))
        sersrc = SersicGalaxy(devsrc.pos.copy(), devsrc.brightness.copy(), devsrc.shape.copy(), SersicIndex(4.0))
        tr = Tractor(tims, [sersrc])
        tr.freezeParam('images')
        r = tr.optimize_loop()
        print('Opt:', r)
        print('Fit Ser source:', sersrc)
        if sersrc.sersicindex.getValue() >= 6.0:
            sersrc.freezeParam('sersicindex')
            r = tr.optimize_loop()
            print('Re-fit Ser source:', sersrc)
        sermods = list(tr.getModelImages())
        showboth(tims, sermods, wcs, bands)
        s = flux_string(sersrc.brightness)
        serchi = 2. * tr.getLogLikelihood()
        plt.suptitle('Sersic model: %s, index %.2f' % (s, sersrc.sersicindex.getValue()) +
                    '\ndchisq %.1f' % (serchi - devchi))
        plt.savefig('src%02i-ser.png' % isrc)
    
        #sasrc = SersicAgnGalaxy(RaDecPos(ra, dec), NanoMaggies(**dict([(b,10.) for b in bands])), EllipseESoft(0.5, 0., 0.), SersicIndex(4.0), NanoMaggies(**dict([(b,1.) for b in bands])))

        si = sersrc.sersicindex.getValue()
        if si > 6.0:
            si = 4.0
        si = SersicIndex(si)
        
        sasrc = SersicAgnGalaxy(sersrc.pos.copy(), sersrc.brightness.copy(), sersrc.shape.copy(), si, NanoMaggies(**dict([(b,1.) for b in bands])))
        tr = Tractor(tims, [sasrc])
        tr.freezeParam('images')
        r = tr.optimize_loop()
        print('Fit Ser+PSF source:', sasrc)
        if sasrc.sersicindex.getValue() >= 6.0:
            sasrc.freezeParam('sersicindex')
            r = tr.optimize_loop()
            print('Re-fit Ser+PSF source:', sasrc)
        samods = list(tr.getModelImages())
        showboth(tims, samods, wcs, bands)
        s1 = flux_string(sasrc.brightness)
        s2 = flux_string(sasrc.brightnessPsf)
        pcts = [100. * sasrc.brightnessPsf.getFlux(b) / sasrc.brightness.getFlux(b) for b in bands]
        s3 = ', '.join(['%.2f' % p for p in pcts])
        sachi = 2. * tr.getLogLikelihood()
        plt.suptitle('Sersic + Point Source model: Ser %s, index %.2f, PSF %s' % (s1, sasrc.sersicindex.getValue(), s2) +
                     ' (%s %%)' % s3 +
                    '\ndchisq %.1f' % (sachi - devchi))
        plt.savefig('src%02i-sercore.png' % isrc)

        ri = (ra, dec, brickname, devsrc, devchi, dasrc, dachi,
              sersrc, serchi, sasrc, sachi)
        if len(results) > isrc:
            results[isrc] = ri
        else:
            results.append(ri)

    pickle_to_file(results, 'results.pickle')
def bayes_figs(DES, detmaps, detivs, good, wcs, img):

    # First, build empirical SED prior "library" from DES sources
    DES.flux_g = np.maximum(0, DES.flux_auto_g)
    DES.flux_r = np.maximum(0, DES.flux_auto_r)
    DES.flux_i = np.maximum(0, DES.flux_auto_i)
    flux = DES.flux_g + DES.flux_r + DES.flux_i
    K = np.flatnonzero(flux > 0)
    DES.cut(K)
    flux = flux[K]
    DES.f_g = DES.flux_g / flux
    DES.f_r = DES.flux_r / flux
    DES.f_i = DES.flux_i / flux
    print('Kept', len(DES), 'with positive flux')

    nbins=21
    edge = 1. / (nbins-1) / 2.
    #N,xe,ye = loghist(DES.f_g, DES.f_r, range=((0-edge,1+edge),(0-edge,1+edge)), nbins=nbins);
    N,xe,ye = np.histogram2d(DES.f_g, DES.f_r,
                             range=((0-edge,1+edge),(0-edge,1+edge)),
                             bins=nbins)
    N = N.T

    print(np.sum(N > 0), np.sum(N > N.sum()*0.001))
    NN = N.copy()
    NN[N < N.sum()*0.001] = np.nan

    plt.figure(figsize=(3,3))
    plt.subplots_adjust(left=0.2, right=0.98, bottom=0.15, top=0.98)

    plt.clf()
    x0,x1 = -edge, 1+edge
    y0,y1 = x0,x1
    plt.imshow(NN, interpolation='nearest', origin='lower', extent=(x0,x1,y0,y1),
               cmap=antigray, zorder=20)
    plt.plot([x0, x0, x1, x0], [y0,y1,y0,y0], 'k-', zorder=30)
    plt.gca().set_frame_on(False)
    p = Polygon(np.array([[x0, x0, x1, x0], [y0, y1, y0, y0]]).T, color=(0.9,0.9,1),
                zorder=15)
    plt.gca().add_artist(p)
    plt.xlabel('flux fraction g')
    plt.ylabel('flux fraction r')
    plt.savefig('bayes-prior-sed.pdf')

    #iy,ix = np.nonzero(N)
    iy,ix = np.nonzero(N > N.sum()*0.001)
    print(len(iy), 'significant bins')
    # Find f_{g,r} histogram midpoints
    mx = (xe[:-1] + xe[1:]) / 2.
    my = (ye[:-1] + ye[1:]) / 2.
    fg = mx[ix]
    fr = my[iy]
    fi = 1. - (fg + fr)
    fn = N[iy,ix]
    seds = np.clip(np.vstack((fg,fr,fi)).T, 0., 1.)
    weights = fn / np.sum(fn)
    ok = np.flatnonzero(np.sum(seds, axis=1) == 1)
    seds = seds[ok,:]
    weights = weights[ok]
    print(len(weights), 'color-color bins are populated; max weight', weights.max())

    plt.clf()
    plothist(DES.mag_auto_g - DES.mag_auto_r, DES.mag_auto_r - DES.mag_auto_i,
             range=((-0.5, 3),(-0.5, 2)), nbins=20,
             imshowargs=dict(cmap=antigray), dohot=False, docolorbar=False)
    plt.xlabel('g - r (mag)')
    plt.ylabel('r - i (mag)')
    plt.savefig('bayes-data-cc.pdf')

    plt.clf()
    fg_grid,fr_grid = np.meshgrid(xe, ye)
    fi_grid = 1. - (fg_grid + fr_grid)
    gr_grid = -2.5 * np.log10(fg_grid / fr_grid)
    ri_grid = -2.5 * np.log10(fr_grid / fi_grid)
    good_grid = (fi_grid >= 0) * np.isfinite(gr_grid) * np.isfinite(ri_grid)
    h,w = good_grid.shape
    cm = antigray
    ng = 0
    for j0 in range(h-1):
        for i0 in range(w-1):
            j1 = j0+1
            i1 = i0+1
            if not(good_grid[j0,i0] and good_grid[j0,i1] and good_grid[j1,i0] and good_grid[j1,i1]):
                continue
            if N[j0,i0] == 0:
                continue
            ng += N[j0,i0]
            xx = [gr_grid[j0,i0], gr_grid[j0,i1], gr_grid[j1,i1], gr_grid[j1,i0], gr_grid[j0,i0]]
            yy = [ri_grid[j0,i0], ri_grid[j0,i1], ri_grid[j1,i1], ri_grid[j1,i0], ri_grid[j0,i0]]
            xy = np.vstack((xx, yy)).T
            poly = Polygon(xy, color=cm(N[j0,i0]/N.max()))
            plt.gca().add_artist(poly)
    #plt.gca().set_fc('0.5')
    plt.axis([-0.5, 3, -0.5,2])
    plt.xlabel('g - r (mag)')
    plt.ylabel('r - i (mag)')
    print(ng, 'of', N.sum(), 'plotted')
    plt.savefig('bayes-prior-cc.pdf')

    H,W = detmaps[0].shape
    bayesfn = 'lprb.pickle'
    if os.path.exists(bayesfn):
        print('Reading cached', bayesfn)
        lprb = unpickle_from_file(bayesfn)
    else:
        # Number of bands
        J = 3
        # Build detection-map & iv arrays
        D = np.zeros((J,H,W))
        Div = np.zeros((J,H,W))
        for i,(d,div) in enumerate(zip(detmaps,detivs)):
            D[i,:,:] = d
            Div[i,:,:] = div
        alpha = 1.
        t0 = time.process_time()
        lprb = log_pratio_bayes(seds, weights, D, Div, alpha)
        t1 = time.process_time()
        print('Bayes took', t1-t0, 'CPU-seconds')
        pickle_to_file(lprb, bayesfn)
        print('Writing cache', bayesfn)
        # Bayes took 276.118402 CPU-seconds

    sz = 20
    bx,by = detect_sources(lprb, 2000)
    bsources = fits_table()
    bsources.x = bx
    bsources.y = by
    bsources.lprb = lprb[bsources.y, bsources.x]
    bsources.cut((bsources.x > sz) * (bsources.x < (W-sz)) * (bsources.y > sz) * (bsources.y < (H-sz)))
    bsources.cut(good[bsources.y, bsources.x])
    print('Kept', len(bsources))

    g_det, r_det, i_det = detmaps
    
    bsources.g_flux = g_det[bsources.y, bsources.x]
    bsources.r_flux = r_det[bsources.y, bsources.x]
    bsources.i_flux = i_det[bsources.y, bsources.x]
    bsources.ra,bsources.dec = wcs.pixelxy2radec(bsources.x+1, bsources.y+1)
    bsources.g_mag = -2.5*(np.log10(bsources.g_flux) - 9)
    bsources.r_mag = -2.5*(np.log10(bsources.r_flux) - 9)
    bsources.i_mag = -2.5*(np.log10(bsources.i_flux) - 9)
    bsources.gr = bsources.g_mag - bsources.r_mag
    bsources.ri = bsources.r_mag - bsources.i_mag
    I = np.argsort(-bsources.lprb)
    bsources.cut(I)

    # save for later...
    bsources_orig = bsources.copy()
    
    # g + r + i detections
    xg,yg = detect_sources(detmaps[0] * np.sqrt(detivs[0]), 50.)
    xr,yr = detect_sources(detmaps[1] * np.sqrt(detivs[1]), 50.)
    xi,yi = detect_sources(detmaps[2] * np.sqrt(detivs[2]), 50.)
    print('Detected', len(xg),len(xr),len(xi), 'gri')
    xm,ym = xg.copy(),yg.copy()
    for xx,yy in [(xr,yr),(xi,yi)]:
        I,J,d = match_xy(xm,ym, xx,yy, 5.)
        print('Matched:', len(I))
        U = np.ones(len(xx), bool)
        U[J] = False
        print('Unmatched:', np.sum(U))
        xm = np.hstack((xm, xx[U]))
        ym = np.hstack((ym, yy[U]))
    print('Total of', len(xm), 'g+r+i')

    sources = fits_table()
    sources.x = xm
    sources.y = ym
    iy,ix = np.round(sources.y).astype(int), np.round(sources.x).astype(int)
    sources.sn_g = (detmaps[0] * np.sqrt(detivs[0]))[iy,ix]
    sources.sn_r = (detmaps[1] * np.sqrt(detivs[1]))[iy,ix]
    sources.sn_i = (detmaps[2] * np.sqrt(detivs[2]))[iy,ix]
    sources.g_flux = g_det[sources.y, sources.x]
    sources.r_flux = r_det[sources.y, sources.x]
    sources.i_flux = i_det[sources.y, sources.x]
    sources.ra,sources.dec = wcs.pixelxy2radec(sources.x+1, sources.y+1)
    sources.g_mag = -2.5*(np.log10(sources.g_flux) - 9)
    sources.r_mag = -2.5*(np.log10(sources.r_flux) - 9)
    sources.i_mag = -2.5*(np.log10(sources.i_flux) - 9)
    sources.gr = sources.g_mag - sources.r_mag
    sources.ri = sources.r_mag - sources.i_mag
    sources.sn_max = np.maximum(sources.sn_g, np.maximum(sources.sn_r, sources.sn_i))
    sources.cut((sources.x > sz) * (sources.x < (W-sz)) * (sources.y > sz) * (sources.y < (H-sz)))
    sources.cut(good[sources.y, sources.x])
    print('Kept', len(sources))
    I = np.argsort(-sources.sn_max)
    sources.cut(I)

    sources_orig = sources.copy()
    
    N = min(len(sources), len(bsources))
    sources  =  sources[:N]
    bsources = bsources[:N]
    print('Cut both to', N)
    
    # Define unmatched sources as ones that are not in the other's hot region
    hot = binary_fill_holes(np.logical_or(detmaps[0] * np.sqrt(detivs[0]) > 50.,
                            np.logical_or(detmaps[1] * np.sqrt(detivs[1]) > 50.,
                                          detmaps[2] * np.sqrt(detivs[2]) > 50.)))
    Bhot = binary_fill_holes(lprb > 2000.)
    UB = np.flatnonzero((hot[bsources.y, bsources.x] == False))
    US = np.flatnonzero((Bhot[sources.y, sources.x] == False))
    print(len(UB), 'unmatched Bayesian', len(US), 'g+r+i')

    MS = np.flatnonzero(Bhot[sources.y, sources.x])
    
    plt.figure(figsize=(6,4))
    plt.subplots_adjust(left=0.1, right=0.98, bottom=0.12, top=0.98)

    plt.clf()
    # for the legend only
    p1 = plt.plot(-10, -10, 'k.')
    plt.plot(sources.gr[MS], sources.ri[MS], 'k.', alpha=0.1,
             label='Both')
    p2 = plt.plot(sources.gr[US],  sources.ri[US], 'o', mec='r', mfc='none',
             label='g+r+i only')
    p3 = plt.plot(bsources.gr[UB], bsources.ri[UB], 'bx',
             label='Bayesian only')
    plt.xlabel('g - r (mag)')
    plt.ylabel('r - i (mag)')
    plt.legend((p1[0],p2[0],p3[0]),
               ('Detected by both', 'Only detected by g+r+i', 'Only detected by Bayesian'),
               loc='lower left')
    plt.axis([-5, 5, -3, 3])
    plt.savefig('bayes-vs-gri.pdf')

    
    plt.figure(figsize=(4,4))
    plt.subplots_adjust(left=0.01, right=0.99, bottom=0.01, top=0.99)
    plt.clf()
    show_sources(bsources[UB], img, R=10, C=10, divider=1)
    #plt.suptitle('Bayesian only');
    plt.savefig('bayes-only.pdf')

    plt.clf()
    show_sources(sources[US], img, R=10, C=10, divider=1)
    #plt.suptitle('Bayesian only');
    plt.savefig('gri-only.pdf')


    #####
    sources = sources_orig.copy()
    
    # What if we demand 2-band detection?
    #bsources = bsources_orig
    # Detect at a higher threshold to get ~2400 sources
    Bhot = binary_fill_holes(lprb > 4000.)
    bx,by = detect_sources(lprb, 4000.)
    bsources = fits_table()
    bsources.x = bx
    bsources.y = by
    bsources.lprb = lprb[bsources.y, bsources.x]
    bsources.cut((bsources.x > sz) * (bsources.x < (W-sz)) * (bsources.y > sz) * (bsources.y < (H-sz)))
    bsources.cut(good[bsources.y, bsources.x])
    print('Kept', len(bsources))
    bsources.g_flux = g_det[bsources.y, bsources.x]
    bsources.r_flux = r_det[bsources.y, bsources.x]
    bsources.i_flux = i_det[bsources.y, bsources.x]
    bsources.ra,bsources.dec = wcs.pixelxy2radec(bsources.x+1, bsources.y+1)
    bsources.g_mag = -2.5*(np.log10(bsources.g_flux) - 9)
    bsources.r_mag = -2.5*(np.log10(bsources.r_flux) - 9)
    bsources.i_mag = -2.5*(np.log10(bsources.i_flux) - 9)
    bsources.gr = bsources.g_mag - bsources.r_mag
    bsources.ri = bsources.r_mag - bsources.i_mag
    I = np.argsort(-bsources.lprb)
    bsources.cut(I)

    sthresh = 50.
    hotg = binary_fill_holes(detmaps[0] * np.sqrt(detivs[0]) > sthresh)
    hotr = binary_fill_holes(detmaps[1] * np.sqrt(detivs[1]) > sthresh)
    hoti = binary_fill_holes(detmaps[2] * np.sqrt(detivs[2]) > sthresh)
    hotsum = hotg*1 + hotr*1 + hoti*1
    hot = (hotsum >= 2)
    K = np.flatnonzero(hot[sources.y,sources.x])
    # 2386 detected in 2 or more bands
    print(len(K), 'detected in 2 or more bands')
    sources.cut(K)

    N = min(len(sources), len(bsources))
    sources  =  sources[:N]
    bsources = bsources[:N]
    print('Cut both to', N)

    UB = np.flatnonzero((hot[bsources.y, bsources.x] == False))
    US = np.flatnonzero((Bhot[sources.y, sources.x] == False))
    print(len(UB), 'unmatched Bayesian', len(US), '2 of (g+r+i)')

    plt.figure(figsize=(6,4))
    plt.subplots_adjust(left=0.1, right=0.98, bottom=0.12, top=0.98)

    plt.clf()
    plt.plot(sources.gr[US],  sources.ri[US], 'o', mec='r', mfc='none',
             label='g+r+i only')
    plt.plot(bsources.gr[UB], bsources.ri[UB], 'kx',
             label='Bayesian only')
    plt.xlabel('g - r (mag)')
    plt.ylabel('r - i (mag)')
    plt.legend()
    plt.axis([-5, 5, -3, 3])
    plt.savefig('bayes-vs-2gri.pdf')

    plt.figure(figsize=(4,4))
    plt.subplots_adjust(left=0.01, right=0.99, bottom=0.01, top=0.99)
    plt.clf()
    show_sources(bsources[UB], img, R=10, C=10, divider=1)
    plt.savefig('bayes-2only.pdf')

    plt.clf()
    show_sources(sources[US], img, R=10, C=10, divider=1)
    plt.savefig('2gri-only.pdf')