Esempio n. 1
0
    def init(self):
        import os
        from astrometry.util.miscutils import point_in_poly
        from astrometry.net.wcs import TanWCS
        from scipy.ndimage.morphology import binary_dilation

        hp = self.healpix
        nside = self.nside
        topscale = self.version.topscale

        hpwcs, hpxy = get_healpix_wcs(nside, hp, topscale)
        H, W = int(hpwcs.get_height()), int(hpwcs.get_width())

        bands = 3

        enhI = np.zeros((H, W, bands), np.float32)
        enhW = np.zeros((H, W), np.float32)
        xx, yy = np.meshgrid(np.arange(W), np.arange(H))
        # inside-healpix mask.
        enhM = point_in_poly(xx, yy, hpxy - 1.)
        # grow ?
        enhM = binary_dilation(enhM, np.ones((3, 3)))

        del xx
        del yy
        npix = np.sum(enhM)
        for b in range(bands):
            enhI[:, :, b][enhM] = np.random.permutation(npix) / float(npix)
        enhW[enhM] = 1e-3

        mydir = self.get_dir()
        # print 'My directory:', mydir
        if not os.path.exists(mydir):
            # print 'Does not exist'
            try:
                os.makedirs(mydir)
                # print 'Created'
            except:
                import traceback
                print('Failed to create dir:')
                traceback.print_exc()
                pass

        tempfn = self.write_files(enhI, enhW, temp=True)
        dbwcs = TanWCS()
        dbwcs.set_from_tanwcs(hpwcs)
        dbwcs.save()
        with transaction.commit_on_success():
            self.move_temp_files(tempfn)
            self.maxweight = 0.
            self.wcs = dbwcs
            self.save()
Esempio n. 2
0
    def init(self):
        import os
        from astrometry.util.miscutils import point_in_poly
        from astrometry.net.wcs import TanWCS
        from scipy.ndimage.morphology import binary_dilation

        hp = self.healpix
        nside = self.nside
        topscale = self.version.topscale

        hpwcs,hpxy = get_healpix_wcs(nside, hp, topscale)
        H,W = int(hpwcs.get_height()), int(hpwcs.get_width())

        bands = 3

        enhI = np.zeros((H,W,bands), np.float32)
        enhW = np.zeros((H,W), np.float32)
        xx,yy = np.meshgrid(np.arange(W), np.arange(H))
        # inside-healpix mask.
        enhM = point_in_poly(xx, yy, hpxy-1.)
        # grow ?
        enhM = binary_dilation(enhM, np.ones((3,3)))

        del xx
        del yy
        npix = np.sum(enhM)
        for b in range(bands):
            enhI[:,:,b][enhM] = np.random.permutation(npix) / float(npix)
        enhW[enhM] = 1e-3

        mydir = self.get_dir()
        # print 'My directory:', mydir
        if not os.path.exists(mydir):
            # print 'Does not exist'
            try:
                os.makedirs(mydir)
                # print 'Created'
            except:
                import traceback
                print 'Failed to create dir:'
                traceback.print_exc()
                pass

        tempfn = self.write_files(enhI, enhW, temp=True)
        dbwcs = TanWCS()
        dbwcs.set_from_tanwcs(hpwcs)
        dbwcs.save()
        with transaction.commit_on_success():
            self.move_temp_files(tempfn)
            self.maxweight = 0.
            self.wcs = dbwcs
            self.save()
Esempio n. 3
0
def main():
    import optparse
    import logging
    import sys

    parser = optparse.OptionParser()
    parser.add_option('--threads', dest='threads', default=1, type=int, help='Use this many concurrent processors')
    parser.add_option('-v', '--verbose', dest='verbose', action='count', default=0,
                      help='Make more verbose')

    parser.add_option('--grid', '-g', dest='gridn', type=int, default=5, help='Dust parameter grid size')
    parser.add_option('--steps', '-s', dest='steps', type=int, default=10, help='# Optimization step')
    parser.add_option('--suffix', dest='suffix', default='', help='Output file suffix')

    parser.add_option('--no-100', dest='no100', action='store_true', default=False,
                      help='Omit PACS-100 data?')

    parser.add_option('--callgrind', dest='callgrind', action='store_true', default=False, help='Turn on callgrind around tractor.optimize()')

    parser.add_option('--resume', '-r', dest='resume', type=int, default=-1, help='Resume from a previous run at the given step?')

    parser.add_option('--zoom', dest='zoom', type=float, default=1, help='Scale down the model to only touch the (1/zoom x 1/zoom) central region of the images')

    parser.add_option('--damp', dest='damp', type=float, default=1., help='LSQR damping')

    opt,args = parser.parse_args()

    if opt.verbose == 0:
        lvl = logging.INFO
        log_init(2)
    else:
        lvl = logging.DEBUG
        log_init(3)
    
    logging.basicConfig(level=lvl, format='%(message)s', stream=sys.stdout)

    if opt.threads > 1 and False:
        global dpool
        import debugpool
        dpool = debugpool.DebugPool(opt.threads)
        Time.add_measurement(debugpool.DebugPoolMeas(dpool))
        mp = multiproc(pool=dpool)
    else:
        print('N threads', opt.threads)
        mp = multiproc(opt.threads)#, wrap_all=True)

    if opt.callgrind:
        import callgrind
    else:
        callgrind = None

    np.seterrcall(np_err_handler)
    np.seterr(all='call')
    #np.seterr(all='raise')

    if opt.resume > -1:
        pfn = 'herschel-%02i%s.pickle' % (opt.resume, opt.suffix)
        print('Reading from', pfn)
        tractor = unpickle_from_file(pfn)
        tractor.mp = mp

        ds = tractor.getCatalog()[0]
        print('DustSheet:', ds)

        # derivs = ds.getParamDerivatives(tim)
        # dim = np.zeros(tim.shape)
        # #for k,deriv in enumerate(derivs[:40]):
        # for k,deriv in enumerate(derivs[::10]):
        #   dim[:,:] = 0
        #   deriv.addTo(dim)
        #   plt.clf()
        #   plt.imshow(dim, interpolation='nearest', origin='lower')
        #   plt.savefig('deriv-%04i.png' % k)

        #tim = tractor.getImages()[0]
        # for it,tim in enumerate(tractor.getImages()):
        #   X = ds._getTransformation(tim)
        #   # #print 'X', X
        #   keys = X.keys()
        #   keys.sort()
        #   # for k in keys[::10]:
        #   # for k in keys[:40]:
        #   for k in keys[::202]:
        #       I,G,nil,nil = X[k]
        #       rim = np.zeros_like(tim.getImage())
        #       rim.ravel()[I] = G
        #       plt.clf()
        #       plt.imshow(rim, interpolation='nearest', origin='lower')
        #       plt.colorbar()
        #       plt.savefig('rim-%i-%04i.png' % (it,k))
        #       print 'pix', k
        # sys.exit(0)

        makeplots(tractor, opt.resume, opt.suffix)
        step0 = opt.resume + 1

    else:
        step0 = 0
        tractor = create_tractor(opt)
        tractor.mp = mp

        # zero out invvar outside the model bounds.
        ds = tractor.getCatalog()[0]
        rd = ds.getRaDecCorners(margin=0.5)
        for i,tim in enumerate(tractor.getImages()):
            poly = np.array([tim.getWcs().positionToPixel(
                RaDecPos(rdi[0], rdi[1])) for rdi in rd])
            poly = poly[:-1,:]
            print('Model bounding box in image', tim.name, 'coordinates:')
            #print poly.shape
            print(poly)
            H,W = tim.shape
            xx,yy = np.meshgrid(np.arange(W), np.arange(H))
            inside = point_in_poly(xx, yy, poly)
            tim.inverr[inside == 0] = 0.

        plt.clf()
        for i,tim in enumerate(tractor.images):
            h,w = tim.shape
            rd = [tim.getWcs().pixelToPosition(x,y)
                  for x,y in [(-0.5,-0.5),(w-0.5,-0.5),(w-0.5,h-0.5),
                              (-0.5,h-0.5),(-0.5,-0.5)]]
            plt.plot([p.ra for p in rd], [p.dec for p in rd], '-',
                     label=tim.name)
        rd = ds.getRaDecCorners(margin=0.5)
        plt.plot(rd[:,0], rd[:,1], 'k-', label='Grid')
        mh,mw = ds.shape
        r,d = ds.wcs.pixelxy2radec(1 + np.arange(mw), np.ones(mw))
        plt.plot(r, d, 'k.')
        r,d = ds.wcs.pixelxy2radec(np.ones(mh), 1 + np.arange(mh))
        plt.plot(r, d, 'k.')
        r,d = ds.wcs.pixelxy2radec(1 + np.arange(mw), np.zeros(mw)+mh)
        plt.plot(r, d, 'k.')
        r,d = ds.wcs.pixelxy2radec(np.zeros(mh)+mw, 1 + np.arange(mh))
        plt.plot(r, d, 'k.')
        plt.legend()
        plt.savefig('radec.png')
            
        print('Precomputing transformations...')
        ds = tractor.getCatalog()[0]

        # Split the grid-spread matrix into strips...
        async_results = []
        for im in tractor.getImages():
            args = []
            H,W = ds.shape
            dy = 10
            y = 0
            while y <= H:
                args.append((ds, im, y, min(H, y+dy)))
                y += dy
            async_results.append(mp.map_async(_map_trans, args))
        # Glue to strips back together...
        XX = []
        for ar in async_results:
            Xblocks = ar.get()
            X = Xblocks[0]
            for xi in Xblocks[1:]:
                X.update(xi)
            XX.append(X)
            
        for im,X in zip(tractor.getImages(), XX):
            ds._normalizeTransformation(im, X)
            ds._setTransformation(im, X)
        print('done precomputing.')

        # Plot the grid-spread functions.
        for itim,tim in enumerate(tractor.images):
            T = ds._getTransformation(tim)
            (I,G,nz,NZI) = T[0]
            plt.clf()
            g = np.zeros(tim.shape, np.float32)
            g.flat[I] = G
            plt.imshow(g, interpolation='nearest', origin='lower', cmap='hot')
            plt.colorbar()
            plt.title('Grid-spread function for cell 0, image %s' % tim.name)
            plt.savefig('gsf-%i.png' % itim)

        
        makeplots(tractor, 0, opt.suffix)
        pfn = 'herschel-%02i%s.pickle' % (0, opt.suffix)
        pickle_to_file(tractor, pfn)
        print('Wrote', pfn)

    for im in tractor.getImages():
        im.freezeAllBut('sky')

    for i in range(step0, opt.steps):
        print('Step', i)
        if callgrind:
            callgrind.callgrind_start_instrumentation()

        tractor.optimize(damp=opt.damp, alphas=[1e-3, 1e-2, 0.1, 0.3, 1., 3., 10., 30., 100.])

        if callgrind:
            callgrind.callgrind_stop_instrumentation()

        makeplots(tractor, 1 + i, opt.suffix)
        pfn = 'herschel-%02i%s.pickle' % (1 + i, opt.suffix)
        pickle_to_file(tractor, pfn)
        print('Wrote', pfn)
Esempio n. 4
0
def main():
	import optparse
	import logging
	import sys

	parser = optparse.OptionParser()
	parser.add_option('--threads', dest='threads', default=1, type=int, help='Use this many concurrent processors')
	parser.add_option('-v', '--verbose', dest='verbose', action='count', default=0,
					  help='Make more verbose')

	parser.add_option('--grid', '-g', dest='gridn', type=int, default=5, help='Dust parameter grid size')
	parser.add_option('--steps', '-s', dest='steps', type=int, default=10, help='# Optimization step')
	parser.add_option('--suffix', dest='suffix', default='', help='Output file suffix')

	parser.add_option('--no-100', dest='no100', action='store_true', default=False,
					  help='Omit PACS-100 data?')

	parser.add_option('--callgrind', dest='callgrind', action='store_true', default=False, help='Turn on callgrind around tractor.optimize()')

	parser.add_option('--resume', '-r', dest='resume', type=int, default=-1, help='Resume from a previous run at the given step?')

	parser.add_option('--zoom', dest='zoom', type=float, default=1, help='Scale down the model to only touch the (1/zoom x 1/zoom) central region of the images')

	parser.add_option('--damp', dest='damp', type=float, default=1., help='LSQR damping')

	opt,args = parser.parse_args()

	if opt.verbose == 0:
		lvl = logging.INFO
		log_init(2)
	else:
		lvl = logging.DEBUG
		log_init(3)
	
	logging.basicConfig(level=lvl, format='%(message)s', stream=sys.stdout)

	if opt.threads > 1 and False:
		global dpool
		import debugpool
		dpool = debugpool.DebugPool(opt.threads)
		Time.add_measurement(debugpool.DebugPoolMeas(dpool))
		mp = multiproc(pool=dpool)
	else:
		print 'N threads', opt.threads
		mp = multiproc(opt.threads)#, wrap_all=True)

	if opt.callgrind:
		import callgrind
	else:
		callgrind = None

	np.seterrcall(np_err_handler)
	np.seterr(all='call')
	#np.seterr(all='raise')

	if opt.resume > -1:
		pfn = 'herschel-%02i%s.pickle' % (opt.resume, opt.suffix)
		print 'Reading from', pfn
		tractor = unpickle_from_file(pfn)
		tractor.mp = mp

		ds = tractor.getCatalog()[0]
		print 'DustSheet:', ds

		# derivs = ds.getParamDerivatives(tim)
		# dim = np.zeros(tim.shape)
		# #for k,deriv in enumerate(derivs[:40]):
		# for k,deriv in enumerate(derivs[::10]):
		# 	dim[:,:] = 0
		# 	deriv.addTo(dim)
		# 	plt.clf()
		# 	plt.imshow(dim, interpolation='nearest', origin='lower')
		# 	plt.savefig('deriv-%04i.png' % k)

		#tim = tractor.getImages()[0]
		# for it,tim in enumerate(tractor.getImages()):
		# 	X = ds._getTransformation(tim)
		# 	# #print 'X', X
		# 	keys = X.keys()
		# 	keys.sort()
		# 	# for k in keys[::10]:
		# 	# for k in keys[:40]:
		# 	for k in keys[::202]:
		# 		I,G,nil,nil = X[k]
		# 		rim = np.zeros_like(tim.getImage())
		# 		rim.ravel()[I] = G
		# 		plt.clf()
		# 		plt.imshow(rim, interpolation='nearest', origin='lower')
		# 		plt.colorbar()
		# 		plt.savefig('rim-%i-%04i.png' % (it,k))
		# 		print 'pix', k
		# sys.exit(0)

		makeplots(tractor, opt.resume, opt.suffix)
		step0 = opt.resume + 1

	else:
		step0 = 0
		tractor = create_tractor(opt)
		tractor.mp = mp

		# zero out invvar outside the model bounds.
		ds = tractor.getCatalog()[0]
		rd = ds.getRaDecCorners()
		for i,tim in enumerate(tractor.getImages()):
			poly = np.array([tim.getWcs().positionToPixel(RaDecPos(rdi[0], rdi[1])) for rdi in rd])
			poly = poly[:-1,:]
			print 'Model bounding box in image', tim.name, 'coordinates:'
			print poly.shape
			print poly
			H,W = tim.shape
			xx,yy = np.meshgrid(np.arange(W), np.arange(H))
			inside = point_in_poly(xx, yy, poly)
			iv = tim.getInvvar()
			iv[(inside == 0)] = 0.
			tim.setInvvar(iv)

		print 'Precomputing transformations...'
		ds = tractor.getCatalog()[0]

		# Split the grid-spread matrix into strips...
		async_results = []
		for im in tractor.getImages():
			args = []
			H,W = ds.shape
			dy = 10
			y = 0
			while y <= H:
				args.append((ds, im, y, min(H, y+dy)))
				y += dy
			async_results.append(mp.map_async(_map_trans, args))
		# Glue to strips back together...
		XX = []
		for ar in async_results:
			Xblocks = ar.get()
			X = Xblocks[0]
			for xi in Xblocks[1:]:
				X.update(xi)
			XX.append(X)
			
		for im,X in zip(tractor.getImages(), XX):
			ds._normalizeTransformation(im, X)
			ds._setTransformation(im, X)
		print 'done precomputing.'

		makeplots(tractor, 0, opt.suffix)
		pfn = 'herschel-%02i%s.pickle' % (0, opt.suffix)
		pickle_to_file(tractor, pfn)
		print 'Wrote', pfn

	for im in tractor.getImages():
		im.freezeAllBut('sky')

	for i in range(step0, opt.steps):
		if callgrind:
			callgrind.callgrind_start_instrumentation()

		tractor.optimize(damp=opt.damp, alphas=[1e-3, 1e-2, 0.1, 0.3, 1., 3., 10., 30., 100.])

		if callgrind:
			callgrind.callgrind_stop_instrumentation()

		makeplots(tractor, 1 + i, opt.suffix)
		pfn = 'herschel-%02i%s.pickle' % (1 + i, opt.suffix)
		pickle_to_file(tractor, pfn)
		print 'Wrote', pfn
Esempio n. 5
0
def apply_alignments():
    from astrom_common import Affine
    T = fits_table('affines.fits')
    affs = Affine.fromTable(T)
    print('Read affines:', affs)

    ibright = dict([(fn.strip(), i) for fn in affs.filenames])

    corners = {}
    for line in open('corners.txt').readlines():
        line = line.strip()
        words = line.split()
        ras = np.array([float(words[i]) for i in [1, 3, 5, 7]])
        decs = np.array([float(words[i]) for i in [2, 4, 6, 8]])
        corners[words[0]] = (ras, decs)
    from astrometry.util.miscutils import point_in_poly

    fns = (glob('data/M31-*ST/proc_default/M31-*ST.phot.hdf5') +
           glob('data/M31-*ST/M31-*ST.phot.hdf5'))
    fns.sort()
    print('Files:', fns)

    veto_polys = []

    for photfile in fns:
        basename = os.path.basename(photfile)
        basename = basename.replace('.phot.hdf5', '')
        print('Base name:', basename)

        brightfn = basename + '-bright.fits'
        ii = ibright[brightfn]
        aff = affs[ii]

        print('Reading', photfile)
        df = pd.read_hdf(photfile, key='data')
        ds = vaex.from_pandas(df)
        print(len(ds), 'rows')
        ra = ds.evaluate(ds['ra'])
        dec = ds.evaluate(ds['dec'])
        ra, dec = aff.apply(ra, dec)

        corner = corners[basename]
        Tleft = fits_table()
        Tleft.ra = ra
        Tleft.dec = dec
        Tleft.index = np.arange(len(Tleft))
        ras, decs = corner
        poly = np.vstack((ras, decs)).T
        inside = point_in_poly(Tleft.ra, Tleft.dec, poly)
        print(np.sum(inside), 'of', len(Tleft),
              'inside corners of this half-brick')

        inside_veto = np.zeros(len(Tleft), bool)
        for vp in veto_polys:
            inveto = point_in_poly(Tleft.ra, Tleft.dec, vp)
            inside_veto[inveto] = True
        print(np.sum(inside_veto),
              'stars are inside the corners of previous half-bricks')
        print('inside:', type(inside))
        inside[inveto] = False
        print(np.sum(inside), 'stars are uniquely in this half-brick')

        veto_polys.append(poly)

        outfn = 'out-%s.hdf5' % basename
        df[inside].to_hdf5(outfn,
                           key='data',
                           mode='w',
                           format='table',
                           complevel=9,
                           complib='zlib')
        print('Wrote', outfn)
Esempio n. 6
0
def apply_alignments(aff_fn, corners_fn, infns, pandas=True):
    from astrom_common import Affine
    T = fits_table(aff_fn)
    affs = Affine.fromTable(T)
    print('Read affines:', affs)

    ibright = dict([(fn.strip(), i) for i, fn in enumerate(T.filenames)])

    corners = {}
    for line in open(corners_fn).readlines():
        line = line.strip()
        words = line.split()
        ras = np.array([float(words[i]) for i in [1, 3, 5, 7]])
        decs = np.array([float(words[i]) for i in [2, 4, 6, 8]])
        corners[words[0]] = (ras, decs)
    from astrometry.util.miscutils import point_in_poly

    #fns1 = glob('data/M31-*ST/proc_default/M31-*ST.phot.hdf5')
    #fns2 = glob('data/M31-*ST/M31-*ST.phot.hdf5')
    #fns1.sort()
    #fns2.sort()
    #fns = fns1 + fns2
    fns = infns
    print('Files:', fns)

    veto_polys = []

    for photfile in fns:
        basename = os.path.basename(photfile)
        basename = basename.replace('.phot.hdf5', '')
        print('Base name:', basename)

        corner = corners[basename]
        ras, decs = corner
        poly = np.vstack((ras, decs)).T

        outfn2 = 'cut-%s.hdf5' % basename
        if os.path.exists(outfn2):
            print('File', outfn2, 'exists; skipping')
            veto_polys.append(poly)
            continue

        brightfn = basename + '-bright.fits'
        ii = ibright[brightfn]
        aff = affs[ii]

        print('Reading', photfile)
        if pandas:
            df = pd.read_hdf(photfile, key='data')
            ds = vaex.from_pandas(df)
        else:
            ds = vaex.open(photfile)

        def get_field(ds, col):
            if pandas:
                return ds.evaluate(ds[col])
            else:
                return ds.evaluate(ds[col.upper()])

        print(len(ds), 'rows')
        ra = get_field(ds, 'ra')
        dec = get_field(ds, 'dec')
        ra, dec = aff.apply(ra, dec)

        Tleft = fits_table()
        Tleft.ra = ra
        Tleft.dec = dec
        Tleft.index = np.arange(len(Tleft))
        inside = point_in_poly(Tleft.ra, Tleft.dec, poly)
        print(np.sum(inside), 'of', len(Tleft),
              'inside corners of this half-brick')

        inside_veto = np.zeros(len(Tleft), bool)
        for vp in veto_polys:
            inveto = point_in_poly(Tleft.ra, Tleft.dec, vp)
            inside_veto[inveto] = True
        print(np.sum(inside_veto),
              'stars are inside the corners of previous half-bricks')
        print('inside:', type(inside), inside.dtype)
        inside[inside_veto] = False
        print(np.sum(inside), 'stars are uniquely in this half-brick')

        veto_polys.append(poly)

        outfn = 'out/out-%s.hdf5' % basename
        if pandas:
            df[inside].to_hdf(outfn,
                              key='data',
                              mode='w',
                              format='table',
                              complevel=9,
                              complib='zlib')
        else:
            df = ds.take(np.flatnonzero(inside)).to_pandas_df()
            df.to_hdf(outfn,
                      key='data',
                      mode='w',
                      format='table',
                      complevel=9,
                      complib='zlib')
        print('Wrote', outfn)

        outfn = 'cut/cut-%s.hdf5' % basename
        if pandas:
            df[np.logical_not(inside)].to_hdf(outfn,
                                              key='data',
                                              mode='w',
                                              format='table',
                                              complevel=9,
                                              complib='zlib')
        else:
            df = ds.take(np.flatnonzero(np.logical_not(inside))).to_pandas_df()
            df.to_hdf(outfn,
                      key='data',
                      mode='w',
                      format='table',
                      complevel=9,
                      complib='zlib')
        print('Wrote', outfn)