def main(): survey = LegacySurveyData() ccds = survey.get_ccds_readonly() print(len(ccds), 'CCDs') ccds = ccds[ccds.ccd_cuts == 0] print(len(ccds), 'good CCDs') # Find bricks touched by >=1 CCD bricks = survey.get_bricks_readonly() bricks = bricks[(bricks.dec > -20) * (bricks.dec < 35.)] print(len(bricks), 'bricks in Dec range') I, J, d = match_radec(bricks.ra, bricks.dec, ccds.ra, ccds.dec, 0.5, nearest=True) bricks = bricks[I] print(len(bricks), 'bricks') bands = ['g', 'r', 'z'] nexps = {} for b in bands: ne = np.zeros(len(bricks), np.int16) nexps[b] = ne bricks.set('nexp_' + b, ne) npix = {} for b in bands: n = np.zeros(len(bricks), np.int64) npix[b] = n bricks.set('npix_' + b, n) for b in bands: n = np.zeros(len(bricks), np.float32) bricks.set('psfdepth_' + b, n) args = enumerate(bricks) mp = multiproc(8) R = mp.map(one_brick, args) for ibrick, res in enumerate(R): if res is None: continue (npix, nexps, depths) = res for band in bands: bricks.get('npix_' + band)[ibrick] = npix[band] bricks.get('nexp_' + band)[ibrick] = nexps[band] bricks.get('psfdepth_' + band)[ibrick] = depths[band] bricks.cut((bricks.nexp_g + bricks.nexp_r + bricks.nexp_z) > 0) bricks.writeto('/global/cscratch1/sd/dstn/bricks-nexp.fits')
def organize_by_brick(sample_fns, sbricks, outdir=None, seed=None, prefix=None): ''' For each sample_fn, split into bricks get brick sample fn for that brick and sample write it if does not exist sample_fn -- sample_seed.fits file assigned to that mpi task sbricks -- survey bricks table cut to radec region ''' dr = get_bybrick_dir(outdir=outdir) for sample_fn in sample_fns: # Skip if already looped over bricks for this sample check_done = os.path.join(dr, get_sample_fn(seed=seed, prefix=prefix)) check_done = check_done.replace('.fits', '_done.txt') if os.path.exists(check_done): print('check_done exists: %s' % check_done) continue # sample = fits_table(sample_fn) print('sample min,max ra,dec= %f %f %f %f' % (sample.ra.min(),sample.ra.max(),\ sample.dec.min(),sample.dec.max())) # Loop over survey bricks survey = LegacySurveyData() for sbrick in sbricks: # Get output fn for this brick and sample fn = os.path.join( dr, get_brick_sample_fn(brickname=sbrick.brickname, seed=seed, prefix=prefix)) if os.path.exists(fn): continue # Cut sample by brick's bounds brickinfo = survey.get_brick_by_name(sbrick.brickname) brickwcs = wcs_for_brick(brickinfo) ra1, ra2, dec1, dec2 = brickwcs.radec_bounds() keep= (sample.ra >= ra1)*(sample.ra <= ra2)*\ (sample.dec >= dec1)*(sample.dec <= dec2) sample2 = sample.copy() if np.where(keep)[0].size > 0: sample2.cut(keep) sample2.writeto(fn) print('Wrote %s' % fn) else: print('WARNING: sample=%s has no ra,dec in brick=%s' % (sample_fn, sbrick.brickname)) # This sample is done with open(check_done, 'w') as foo: foo.write('done')
def __init__(self, ls_dir=None, outdir=None, savedir=None, jpeg=False): """outdir: required ls_dir: not needed if env var LEGACY_SURVEY_DIR already set save_dir: where write hdf5 files, outdir if None """ self.outdir = outdir self.jpeg = jpeg if ls_dir: os.environ["LEGACY_SURVEY_DIR"] = ls_dir self.savedir = savedir if self.savedir is None: self.savedir = self.outdir self.survey = LegacySurveyData()
def main(outfn='ccds-annotated.fits', ccds=None, **kwargs): survey = LegacySurveyData(ccds=ccds) if ccds is None: ccds = survey.get_ccds() # Set to True if we successfully read the calibration products and computed # annotated values init_annotations(ccds) annotate(ccds, **kwargs) print('Writing to', outfn) ccds.writeto(outfn) print('Wrote', outfn)
def queue(): if False: survey = LegacySurveyData() ccds = survey.get_ccds() bricks = survey.get_bricks() print(len(bricks), 'bricks') print(len(ccds), 'CCDs') bricks.cut((bricks.dec >= -30) * (bricks.dec <= 30)) print(len(bricks), 'in Dec [-30, +30]') I = survey.photometric_ccds(ccds) ccds.cut(I) print(len(ccds), 'pass photometric cut') I, J, d = match_radec(bricks.ra, bricks.dec, ccds.ra, ccds.dec, 0.5, nearest=True) print(len(I), 'bricks with CCDs nearby') bricks.cut(I) bricknames = bricks.brickname else: # DR7: use Martin's list of bricks w/ CCD coverage f = open('nccds.dat') bricknames = [] for line in f.readlines(): words = line.strip().split(' ') brick = words[0] nccd = int(words[1]) if nccd > 0: bricknames.append(brick) # qdo bb = bricknames while len(bb): print(' '.join(bb[:100])) bb = bb[100:] return mp = multiproc(16) N = len(bricks) args = [(brick, i, N, plots, {}) for i, brick in enumerate(bricks)] mp.map(run_one_brick, args)
def main(): B = fits_table( '/global/cfs/cdirs/cosmo/data/legacysurvey/dr8/survey-bricks.fits.gz') B.ll, B.bb = radectolb(B.ra, B.dec) I = np.flatnonzero((B.dec > -70) * (np.abs(B.bb) > 10)) B[I].writeto('bricks-for-gaia.fits') BG = B[I] BG = BG[np.argsort(-BG.dec)] # healpixes = set() # nside = 32 # for r,d in zip(BG.ra,BG.dec): # hpxy = radecdegtohealpix(r, d, nside) # hpring = healpix_xy_to_ring(hpxy, nside) # healpixes.add(hpring) # hr,hd = [],[] # for hp in healpixes: # hp = healpix_ring_to_xy(hp, nside) # r,d = healpix_to_radecdeg(hp, nside, 0.5, 0.5) # hr.append(r) # hd.append(d) # plt.plot(hr, hd, 'b.', alpha=0.1); survey = LegacySurveyData('/global/cfs/cdirs/cosmo/work/legacysurvey/dr9') #BG = BG[:100] # GG = [] # for i,brick in enumerate(BG): # G = one_brick(brick, survey) # GG.append(G) mp = multiproc(32) GG = [] iset = 0 while len(BG): N = 10000 outfn = '/global/cscratch1/sd/dstn/gaia-mask-dr9-set%i.fits' % iset if os.path.exists(outfn): Gset = fits_table(outfn) print('Read', outfn) nb = len(set(Gset.brickname)) if nb != N: print('Warning: file contains', nb, 'bricks, vs', N) else: Gset = mp.map(bounce_one_brick, [(brick, survey) for brick in BG[:N]]) Gset = [G for G in Gset if G is not None] Gset = merge_tables(Gset, columns='fillzero') Gset.writeto(outfn) GG.append(Gset) iset += 1 BG = BG[N:] G = merge_tables(GG, columns='fillzero') G.writeto('/global/cscratch1/sd/dstn/gaia-mask-dr9.fits')
def get_grid_randoms(truth_fn, bricknames=[], south=True, seed=None): rng = np.random.RandomState(seed=seed) randoms = 0 survey = LegacySurveyData(survey_dir='/global/cfs/cdirs/cosmo/work/legacysurvey/dr9') for iseed,brickname in enumerate(bricknames): randoms += get_grid_in_brick(survey,brickname,rng=rng) randoms.photsys = randoms.full('S' if south else 'N') truth = get_truth(truth_fn,south=south) randoms.fill(sample_from_truth(randoms,truth,rng=rng),index_self=None,index_other=None) return randoms
def getbrickfiles(brickname=None): survey = LegacySurveyData() brickinfo = survey.get_brick_by_name(brickname) brickwcs = wcs_for_brick(brickinfo) ccdinfo = survey.ccds_touching_wcs(brickwcs) nccd = len(ccdinfo) calibdir = survey.get_calib_dir() imagedir = survey.survey_dir # Construct image file names and the calibration file names. expnum = ccdinfo.expnum ccdname = ccdinfo.ccdname psffiles = list() skyfiles = list() imagefiles = list() for ccd in ccdinfo: info = survey.get_image_object(ccd) for attr in ['imgfn', 'dqfn', 'wtfn']: fn = getattr(info, attr).replace(imagedir+'/', '') #if '160108_073601' in fn: # pdb.set_trace() imagefiles.append(fn) psffiles.append(info.psffn.replace(calibdir, 'calib')) skyfiles.append(info.splineskyfn.replace(calibdir, 'calib')) #for ii in range(nccd): #exp = '{0:08d}'.format(expnum[ii]) #rootfile = os.path.join(exp[:5], exp, 'decam-'+exp+'-'+ccdname[ii]+'.fits') #psffiles.append(os.path.join('calib', 'decam', 'psfex', rootfile)) #skyfiles.append(os.path.join('calib', 'decam', 'splinesky', rootfile)) #imagefiles.append(os.path.join('images', str(np.core.defchararray.strip(ccdinfo.image_filename[ii])))) #print(np.array(imagefiles)) #print(np.array(psffiles)) #print(np.array(skyfiles)) return imagefiles, psffiles, skyfiles
def __init__( self, expnum=None, ccdname=None, ccdwcs=None, pattern='/project/projectdirs/cosmo/work/ps1/cats/chunks-qz-star-v3/ps1-%(hp)05d.fits' ): """Read PS1 or gaia sources for an exposure number + CCD name or CCD WCS Args: expnum, ccdname: select catalogue with these ccdwcs: or select catalogue with this pattern: absolute path and wildcard for PS1 or Gaia catalogues dr: /project/projectdirs/cosmo/work/ PS1: ${dr}/ps1/cats/chunks-qz-star-v3/ps1-%(hp)05d.fits PS1-Gaia: ${dr}/gaia/chunks-ps1-gaia/chunk-%(hp)05d.fits """ assert ('ps1' in pattern or 'gaia' in pattern) #assert(ps1_or_gaia in ['ps1','ps1_gaia']) #if ps1_or_gaia == 'ps1': # # PS1 "qz" directory # # e.g. /project/projectdirs/cosmo/work/ps1/cats/chunks-qz-star-v2 # self.catdir= os.getenv('PS1CAT_DIR') #elif ps1_or_gaia == 'ps1_gaia': # # PS1-Gaia "qz" matches-only directory # # e.g. /project/projectdirs/cosmo/work/gaia/chunks-ps1-gaia # self.catdir= os.getenv('PS1_GAIA_MATCHES') #fnpattern = os.path.join(self.catdir, prefix + '-%(hp)05d.fits') super(ps1cat, self).__init__(pattern) if ccdwcs is None: from legacypipe.survey import LegacySurveyData survey = LegacySurveyData() ccd = survey.find_ccds(expnum=expnum, ccdname=ccdname)[0] im = survey.get_image_object(ccd) self.ccdwcs = im.get_wcs() else: self.ccdwcs = ccdwcs
def __init__(self, **kw): super(analysis_setup, self).__init__(**kw) # raise ValueError('hey') self.tol = Tolerances().get(survey=self.survey, bands=self.bands, obj=self.obj) self.config_dir= os.path.join(os.path.dirname(self.outdir), 'testcase_%s_%s' % \ (kw['survey'],kw['bands'])) self.rsdir = 'rs0' survey = LegacySurveyData() brickinfo = get_brickinfo_hack(survey, self.brick) self.brickwcs = wcs_for_brick(brickinfo)
def __init__(self, expnum=None, ccdname=None, ccdwcs=None): """Read PS1 or gaia sources for an exposure number + CCD name or CCD WCS Args: expnum, ccdname: select catalogue with these ccdwcs: or select catalogue with this """ self.ps1catdir = os.getenv('PS1CAT_DIR') if self.ps1catdir is None: raise ValueError( 'You must have the PS1CAT_DIR environment variable set to point to healpixed PS1 catalogs' ) fnpattern = os.path.join(self.ps1catdir, 'ps1-%(hp)05d.fits') super(ps1cat, self).__init__(fnpattern) if ccdwcs is None: from legacypipe.survey import LegacySurveyData survey = LegacySurveyData() ccd = survey.find_ccds(expnum=expnum, ccdname=ccdname)[0] im = survey.get_image_object(ccd) self.ccdwcs = im.get_wcs() else: self.ccdwcs = ccdwcs
def __init__(self, expnum=None, ccdname=None, ccdwcs=None): """Initialize the class with either the exposure number *and* CCD name, or directly with the WCS of the CCD of interest. """ # GAIA and PS1 info, gaia for astrometry, ps1 for photometry self.gaiadir = os.getenv('GAIACAT_DIR') # PS1 only self.ps1dir = os.getenv('PS1CAT_DIR') # PS1 only if self.ps1dir is None: raise ValueError('Need PS1CAT_DIR environment variable to be set.') if self.gaiadir is None: print( 'WARNING: GAIACAT_DIR environment variable not set: using Pan-STARRS1 for astrometry' ) self.nside = 32 if ccdwcs is None: from legacypipe.survey import LegacySurveyData survey = LegacySurveyData() ccd = survey.find_ccds(expnum=expnum, ccdname=ccdname)[0] im = survey.get_image_object(ccd) self.ccdwcs = im.get_wcs() else: self.ccdwcs = ccdwcs
def getbrickfiles(brickname=None): survey = LegacySurveyData() brickinfo = survey.get_brick_by_name(brickname) brickwcs = wcs_for_brick(brickinfo) ccdinfo = survey.ccds_touching_wcs(brickwcs) nccd = len(ccdinfo) calibdir = survey.get_calib_dir() imagedir = survey.survey_dir # Construct image file names and the calibration file names. expnum = ccdinfo.expnum ccdname = ccdinfo.ccdname psffiles = list() skyfiles = list() imagefiles = list() for ccd in ccdinfo: info = survey.get_image_object(ccd) for attr in ['imgfn', 'dqfn', 'wtfn']: fn = getattr(info, attr).replace(imagedir + '/', '') #if '160108_073601' in fn: # pdb.set_trace() imagefiles.append(fn) psffiles.append(info.psffn.replace(calibdir, 'calib')) skyfiles.append(info.splineskyfn.replace(calibdir, 'calib')) #for ii in range(nccd): #exp = '{0:08d}'.format(expnum[ii]) #rootfile = os.path.join(exp[:5], exp, 'decam-'+exp+'-'+ccdname[ii]+'.fits') #psffiles.append(os.path.join('calib', 'decam', 'psfex', rootfile)) #skyfiles.append(os.path.join('calib', 'decam', 'splinesky', rootfile)) #imagefiles.append(os.path.join('images', str(np.core.defchararray.strip(ccdinfo.image_filename[ii])))) #print(np.array(imagefiles)) #print(np.array(psffiles)) #print(np.array(skyfiles)) return imagefiles, psffiles, skyfiles
Here is the gory story of the DR5 depth cut: I first ran this code for each brick: legacyanalysis/depth-cut.py --> generates depthcut/*/ccds-*.fits tables of CCDs that pass the depth cut for that brick legacyanalysis/check-depth-cut.py --> to read the per-brick ccds-* tables and cut to the union of all CCDs that pass depth cut in some brick -> depth-cut-kept-ccds.fits legacyanalysis/dr5-cut-ccds.py --> to read depth-cut-kept-ccds.fits and cut the (already-created) annotated-ccds table and create the .kd.fits version of the CCDs table ''' survey = LegacySurveyData() # Read *old* annotated-CCDs tables. ann = survey.get_annotated_ccds() print('Got', len(ann), 'annotated CCDs') ann.about() # build mapping from expnum,ccdname to index in ann table. annmap = dict([((e, c.strip()), i) for i, (e, c) in enumerate(zip(ann.expnum, ann.ccdname))]) # Read the *new* zeropoints file. ccds = fits_table( '/global/cscratch1/sd/kaylanb/dr5_zpts/survey-ccds-legacypipe-hdufix-45455-nocuts.fits.gz' ) print('Read', len(ccds), 'CCDs')
type=str, default=None, help='Override the $LEGACY_SURVEY_DIR environment variable') parser.add_argument('--out', '-o', default='skyfibers.fits', help='Output filename') parser.add_argument('--plots', '-p', default=None, help='Plots base filename') parser.add_argument('--brick', default=None, help='Brick name') opt = parser.parse_args() if not opt.brick: parser.print_help() sys.exit(-1) from legacypipe.survey import LegacySurveyData survey = LegacySurveyData(survey_dir=opt.survey_dir) skyfibers = sky_fibers_for_brick(survey, opt.brick) skyfibers.writeto(opt.out, header=skyfibers._header) print('Wrote', opt.out) if opt.plots: import matplotlib matplotlib.use('Agg') sky_fiber_plots(survey, opt.brick, skyfibers, opt.plots)
def main(args=None): """Main routine which parses the optional inputs.""" t0= Time() # Command line options if args is None: # Read from cmd line parser= get_parser() args = parser.parse_args(args=args) else: # args is already a argparse.Namespace obj pass # Print calling sequence print('Args:', args) if args.do_more == 'yes': assert(not args.minid is None) # Setup loggers if args.verbose: lvl = logging.DEBUG else: lvl = logging.INFO logging.basicConfig(level=lvl, stream=sys.stdout) #,format='%(message)s') log = logging.getLogger('decals_sim') # Sort through args #log.info('decals_sim.py args={}'.format(args)) #max_nobj=500 #max_nchunk=1000 #if args.ith_chunk is not None: assert(args.ith_chunk <= max_nchunk-1) #assert(args.nchunk <= max_nchunk) #assert(args.nobj <= max_nobj) #if args.ith_chunk is not None: # assert(args.nchunk == 1) #if choose a chunk, only doing 1 chunk if args.nobj is None: parser.print_help() sys.exit(1) # Exit if expected output already exists rsdir= get_outdir_runbrick(args.outdir, args.brick,args.rowstart, do_skipids=args.do_skipids, do_more=args.do_more) rsdir= os.path.basename(rsdir) tractor_fn= os.path.join(args.outdir, 'tractor',args.brick[:3],args.brick, rsdir, 'tractor-%s.fits' % args.brick) if (os.path.exists(tractor_fn) & (not args.overwrite_if_exists)): print('Exiting, already finished %s' % tractor_fn) return 0 #sys.exit(0) brickname = args.brick objtype = args.objtype # Output dir decals_sim_dir = args.outdir #nchunk = args.nchunk #rand = np.random.RandomState(args.seed) # determines seed for all chunks #seeds = rand.random_integers(0,2**18, max_nchunk) log.info('Object type = {}'.format(objtype)) #log.info('Number of objects = {}'.format(nobj)) #log.info('Number of chunks = {}'.format(nchunk)) # Optionally zoom into a portion of the brick survey = LegacySurveyData(survey_dir=args.survey_dir) brickinfo= get_brickinfo_hack(survey,brickname) #brickinfo = survey.get_brick_by_name(brickname) #print(brickname) brickwcs = wcs_for_brick(brickinfo) W, H, pixscale = brickwcs.get_width(), brickwcs.get_height(), brickwcs.pixel_scale() log.info('Brick = {}'.format(brickname)) if args.zoom is not None: # See also runbrick.stage_tims() (x0, x1, y0, y1) = args.zoom W = x1 - x0 H = y1 - y0 brickwcs = brickwcs.get_subimage(x0, y0, W, H) log.info('Zoom (pixel boundaries) = {}'.format(args.zoom)) targetrd = np.array([brickwcs.pixelxy2radec(x, y) for x, y in [(1,1), (W,1), (W,H), (1,H), (1,1)]]) radec_center = brickwcs.radec_center() log.info('RA, Dec center = {}'.format(radec_center)) log.info('Brick = {}'.format(brickname)) t0= ptime('First part of Main()',t0) # SAMPLE table sample_kwargs= {"objtype":args.objtype, "brick":args.brick, "outdir":args.outdir, "randoms_db":args.randoms_db, "minid":args.minid, "do_skipids":args.do_skipids, "randoms_from_fits":args.randoms_from_fits, "dont_sort_sampleid":args.dont_sort_sampleid} Samp,seed= get_sample(**sample_kwargs) Samp= Samp[args.rowstart:args.rowstart + args.nobj] # Performance #if objtype in ['elg','lrg']: # Samp=Samp[np.argsort( Samp.get('%s_n' % objtype) )] print('Max sample size=%d, actual sample size=%d' % (args.nobj,len(Samp))) assert(len(Samp) <= args.nobj) t0= ptime('Got randoms sample',t0) # Store args in dict for easy func passing kwargs=dict(Samp=Samp,\ brickname=brickname, \ checkpoint=args.checkpoint, \ seed= seed, decals_sim_dir= decals_sim_dir,\ brickwcs= brickwcs, \ objtype=objtype,\ nobj=len(Samp),\ maxobjs=args.nobj,\ rowst=args.rowstart,\ do_skipids=args.do_skipids,\ do_more=args.do_more,\ minid=args.minid,\ survey_dir=args.survey_dir,\ args=args) # Stop if starting row exceeds length of radec,color table if len(Samp) == 0: fn= get_outdir_runbrick(kwargs['decals_sim_dir'], kwargs['brickname'],kwargs['rowst'], do_skipids=kwargs['do_skipids'],do_more=kwargs['do_more']) fn+= '_exceeded.txt' junk= os.system('touch %s' % fn) print('Wrote %s' % fn) #we want not to add any sample -- obiwan #raise ValueError('starting row=%d exceeds number of artificial sources, quit' % args.rowstart) # Create simulated catalogues and run Tractor create_metadata(kwargs=kwargs) t0= ptime('create_metadata',t0) # do chunks #for ith_chunk in chunk_list: #log.info('Working on chunk {:02d}/{:02d}'.format(ith_chunk,kwargs['nchunk']-1)) # Random ra,dec and source properties create_ith_simcat(d=kwargs) #log.info('HUI-TEST:::out of create_ith_simcat') t0= ptime('create_ith_simcat',t0) # Run tractor #log.info('HUI-TEST:::running tractor') do_one_chunk(d=kwargs) #log.info('HUI-TEST::: checkpoint3i') t0= ptime('do_one_chunk',t0) # Clean up output if args.no_cleanup == False: do_ith_cleanup(d=kwargs) #log.info('HUI-TEST::: checkpoint3j') t0= ptime('do_ith_cleanup',t0) log.info('All done!') #log.info('HUI-TEST::: checkpoint3k') return 0
def main(): """Main program. """ import argparse parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--force', action='store_true', help='Run calib processes even if files already exist?') parser.add_argument('--ccds', help='Set ccds.fits file to load') parser.add_argument('--expnum', type=int, help='Cut to a single exposure') parser.add_argument('--extname', '--ccdname', help='Cut to a single extension/CCD name') parser.add_argument('--no-psf', dest='psfex', action='store_false', help='Do not compute PsfEx calibs') parser.add_argument('--no-sky', dest='sky', action='store_false', help='Do not compute sky models') parser.add_argument('--run-se', action='store_true', help='Run SourceExtractor') parser.add_argument('--splinesky', action='store_true', help='Spline sky, not constant') parser.add_argument('--threads', type=int, help='Run multi-threaded', default=None) parser.add_argument('args',nargs=argparse.REMAINDER) opt = parser.parse_args() survey = LegacySurveyData() if opt.ccds is not None: T = fits_table(opt.ccds) print('Read', len(T), 'from', opt.ccds) else: T = survey.get_ccds() #print len(T), 'CCDs' if len(opt.args) == 0: if opt.expnum is not None: T.cut(T.expnum == opt.expnum) print('Cut to', len(T), 'with expnum =', opt.expnum) if opt.extname is not None: T.cut(np.array([(t.strip() == opt.extname) for t in T.ccdname])) print('Cut to', len(T), 'with extname =', opt.extname) opt.args = range(len(T)) args = [] for a in opt.args: # Check for "expnum-ccdname" format. if '-' in str(a): words = a.split('-') assert(len(words) == 2) expnum = int(words[0]) ccdname = words[1] I = np.flatnonzero((T.expnum == expnum) * (T.ccdname == ccdname)) if len(I) != 1: print('Found', len(I), 'CCDs for expnum', expnum, 'CCDname', ccdname, ':', I) assert(len(I) == 1) t = T[I[0]] else: i = int(a) print('Index', i) t = T[i] print('CCDnmatch', t.ccdnmatch) if t.ccdnmatch < 20 and not opt.force: print('Skipping ccdnmatch = %i' % t.ccdnmatch) continue im = survey.get_image_object(t) print('Running', im.calname) kwargs = dict(psfex=opt.psfex, sky=opt.sky) if opt.force: kwargs.update(force=True) if opt.run_se: kwargs.update(se=True) if opt.splinesky: kwargs.update(splinesky=True) if opt.threads: args.append((im, kwargs)) else: run_calibs((im, kwargs)) if opt.threads: from astrometry.util.multiproc import multiproc mp = multiproc(opt.threads) mp.map(run_calibs, args) return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument('--build-sample', action='store_true', help='Build the sample.') parser.add_argument('--jpg-cutouts', action='store_true', help='Get jpg cutouts from the viewer.') parser.add_argument('--ccd-cutouts', action='store_true', help='Get CCD cutouts of each galaxy.') parser.add_argument('--runbrick', action='store_true', help='Run the pipeline.') parser.add_argument('--build-webpage', action='store_true', help='(Re)build the web content.') args = parser.parse_args() # Top-level directory key = 'LEGACY_SURVEY_LARGE_GALAXIES' if key not in os.environ: print('Required ${} environment variable not set'.format(key)) return 0 largedir = os.getenv(key) samplefile = os.path.join(largedir, 'large-galaxies-sample.fits') # -------------------------------------------------- # Build the sample of large galaxies based on the available imaging. if args.build_sample: # Read the parent catalog. cat = read_rc3() # Create a simple WCS object for each object and find all the CCDs # touching that WCS footprint. survey = LegacySurveyData(version='dr2') # hack! allccds = survey.get_ccds() keep = np.concatenate((survey.apply_blacklist(allccds), survey.photometric_ccds(allccds))) allccds.cut(keep) ccdlist = [] outcat = [] for gal in cat: galwcs = _simplewcs(gal) ccds1 = allccds[ccds_touching_wcs(galwcs, allccds)] ccds1 = ccds1[_uniqccds(ccds1)] if len(ccds1) > 0 and 'g' in ccds1.filter and 'r' in ccds1.filter and 'z' in ccds1.filter: print('Found {} CCDs for {}, D(25)={:.4f}'.format( len(ccds1), gal['GALAXY'], gal['RADIUS'])) ccdsfile = os.path.join(largedir, 'ccds', '{}-ccds.fits'.format(gal['GALAXY'].strip().lower())) print(' Writing {}'.format(ccdsfile)) if os.path.isfile(ccdsfile): os.remove(ccdsfile) ccds1.writeto(ccdsfile) ccdlist.append(ccds1) if len(outcat) == 0: outcat = gal else: outcat = vstack((outcat, gal)) #if gal['GALAXY'] == 'MCG5-19-36': # pdb.set_trace() # Write out the final catalog. samplefile = os.path.join(largedir, 'large-galaxies-sample.fits') if os.path.isfile(samplefile): os.remove(samplefile) print('Writing {}'.format(samplefile)) outcat.write(samplefile) print(outcat) # Do we need to transfer any of the data to nyx? _getfiles(merge_tables(ccdlist)) # -------------------------------------------------- # Get data, model, and residual cutouts from the legacysurvey viewer. Also # get thumbnails that are lower resolution. if args.jpg_cutouts: thumbsize = 100 sample = fits.getdata(samplefile, 1) for gal in sample: size = np.ceil(10*gal['RADIUS']/PIXSCALE) thumbpixscale = PIXSCALE*size/thumbsize #imageurl = 'http://legacysurvey.org/viewer/jpeg-cutout-decals-dr2?ra={:.6f}&dec={:.6f}'.format(gal['RA'], gal['DEC'])+\ # '&pixscale={:.3f}&size={:g}'.format(PIXSCALE, size) #imagejpg = os.path.join(largedir, 'cutouts', gal['GALAXY'].strip().lower()+'-image.jpg') #if os.path.isfile(imagejpg): # os.remove(imagejpg) #os.system('wget --continue -O {:s} "{:s}"' .format(imagejpg, imageurl)) thumburl = 'http://legacysurvey.org/viewer/jpeg-cutout-decals-dr2?ra={:.6f}&dec={:.6f}'.format(gal['RA'], gal['DEC'])+\ '&pixscale={:.3f}&size={:g}'.format(thumbpixscale, thumbsize) thumbjpg = os.path.join(largedir, 'cutouts', gal['GALAXY'].strip().lower()+'-image-thumb.jpg') if os.path.isfile(thumbjpg): os.remove(thumbjpg) os.system('wget --continue -O {:s} "{:s}"' .format(thumbjpg, thumburl)) # -------------------------------------------------- # (Re)build the webpage. if args.build_webpage: # index.html html = open(os.path.join(largedir, 'index.html'), 'w') html.write('<html><body>\n') html.write('<h1>Sample of Large Galaxies</h1>\n') html.write('<table border="2" width="30%">\n') html.write('<tbody>\n') sample = fits.getdata(samplefile, 1) for gal in sample: # Add coordinates and sizes here. galaxy = gal['GALAXY'].strip().lower() html.write('<tr>\n') html.write('<td><a href="html/{}.html">{}</a></td>\n'.format(galaxy, galaxy.upper())) html.write('<td><a href="http://legacysurvey.org/viewer/?ra={:.6f}&dec={:.6f}" target="_blank"><img src=cutouts/{}-image-thumb.jpg alt={} /></a></td>\n'.format(gal['RA'], gal['DEC'], galaxy, galaxy.upper())) # html.write('<td><a href="html/{}.html"><img src=cutouts/{}-image-thumb.jpg alt={} /></a></td>\n'.format(galaxy, galaxy, galaxy.upper())) html.write('</tr>\n') html.write('</tbody>\n') html.write('</table>\n') html.write('</body></html>\n') html.close() sys.exit(1) # individual galaxy pages for gal in sample[:3]: galaxy = gal['GALAXY'].strip().lower() html = open(os.path.join(largedir, 'html/{}.html'.format(galaxy)), 'w') html.write('<html><body>\n') html.write('<a href=../cutouts/{}.jpg><img src=../cutouts/{}-image.jpg alt={} /></a>\n'.format(galaxy, galaxy, galaxy, galaxy.upper())) html.write('</body></html>\n') html.close() # -------------------------------------------------- # Get cutouts of all the CCDs for each galaxy. if args.ccd_cutouts: sample = fits.getdata(samplefile, 1) for gal in sample[1:2]: galaxy = gal['GALAXY'].strip().lower() ccdsfile = os.path.join(largedir, 'ccds', '{}-ccds.fits'.format(galaxy)) ccds = fits.getdata(ccdsfile) pdb.set_trace() # -------------------------------------------------- # Run the pipeline. if args.runbrick: sample = fits.getdata(samplefile, 1) for gal in sample[1:2]: galaxy = gal['GALAXY'].strip().lower() diam = 10*np.ceil(gal['RADIUS']/PIXSCALE).astype('int16') # [pixels] # Note: zoom is relative to the center of an imaginary brick with # dimensions (0, 3600, 0, 3600). survey = LegacySurveyData(version='dr2', output_dir=largedir) run_brick(None, survey, radec=(gal['RA'], gal['DEC']), blobxy=zip([diam/2], [diam/2]), threads=1, zoom=(1800-diam/2, 1800+diam/2, 1800-diam/2, 1800+diam/2), wise=False, forceAll=True, writePickles=False, do_calibs=False, write_metrics=False, pixPsf=True, splinesky=True, early_coadds=True, stages=['writecat'], ceres=False) pdb.set_trace()
def main(): ps = PlotSequence('cov') survey = LegacySurveyData() ra,dec = 242.0, 10.2 fn = 'coverage-ccds.fits' if not os.path.exists(fn): ccds = survey.get_ccds() ccds.cut(ccds.filter == 'r') ccds.cut(ccds.propid == '2014B-0404') ccds.cut(np.hypot(ccds.ra_bore - ra, ccds.dec_bore - dec) < 2.5) print(np.unique(ccds.expnum), 'unique exposures') print('propids', np.unique(ccds.propid)) ccds.writeto(fn) else: ccds = fits_table(fn) plt.clf() for e in np.unique(ccds.expnum): I = np.flatnonzero(ccds.expnum == e) plt.plot(ccds.ra[I], ccds.dec[I], '.') ps.savefig() degw = 3.0 pixscale = 10. W = degw * 3600 / 10. H = W hi = 6 cmap = cmap_discretize('jet', hi+1) wcs = Tan(ra, dec, W/2.+0.5, H/2.+0.5, -pixscale/3600., 0., 0., pixscale/3600., float(W), float(H)) r0,d0 = wcs.pixelxy2radec(1,1) r1,d1 = wcs.pixelxy2radec(W,H) extent = [min(r0,r1),max(r0,r1), min(d0,d1),max(d0,d1)] for expnums in [ [348666], [348666,348710, 348686], [348659, 348667, 348658, 348666, 348665, 348669, 348668], None, [348683, 348687, 347333, 348686, 348685, 348692, 348694, 348659, 348667, 348658, 348666, 348665, 348669, 348668, 348707, 348709, 348708, 348710, 348711, 348716, 348717], ]: nexp = np.zeros((H,W), np.uint8) for ccd in ccds: if expnums is not None and not ccd.expnum in expnums: continue ccdwcs = survey.get_approx_wcs(ccd) r,d = ccdwcs.pixelxy2radec(1, 1) ok,x0,y0 = wcs.radec2pixelxy(r, d) r,d = ccdwcs.pixelxy2radec(ccd.width, ccd.height) ok,x1,y1 = wcs.radec2pixelxy(r, d) xlo = np.clip(int(np.round(min(x0,x1))) - 1, 0, W-1) xhi = np.clip(int(np.round(max(x0,x1))) - 1, 0, W-1) ylo = np.clip(int(np.round(min(y0,y1))) - 1, 0, H-1) yhi = np.clip(int(np.round(max(y0,y1))) - 1, 0, H-1) nexp[ylo:yhi+1, xlo:xhi+1] += 1 plt.clf() plt.imshow(nexp, interpolation='nearest', origin='lower', vmin=-0.5, vmax=hi+0.5, cmap=cmap, extent=extent) plt.colorbar(ticks=np.arange(hi+1)) ps.savefig() O = fits_table('obstatus/decam-tiles_obstatus.fits') O.cut(np.hypot(O.ra - ra, O.dec - dec) < 2.5) for p in [1,2,3]: print('Pass', p, 'exposures:', O.r_expnum[O.get('pass') == p]) O.cut(O.get('pass') == 2) print(len(O), 'pass 2 nearby') d = np.hypot(O.ra - ra, O.dec - dec) print('Dists:', d) I = np.flatnonzero(d < 0.5) assert(len(I) == 1) ocenter = O[I[0]] print('Center expnum', ocenter.r_expnum) I = np.flatnonzero(d >= 0.5) O.cut(I) #center = ccds[ccds.expnum == ocenter.r_expnum] #p2 = ccds[ccds. ok,xc,yc = wcs.radec2pixelxy(ocenter.ra, ocenter.dec) xx,yy = np.meshgrid(np.arange(W)+1, np.arange(H)+1) c_d2 = (xc - xx)**2 + (yc - yy)**2 best = np.ones((H,W), bool) for o in O: ok,x,y = wcs.radec2pixelxy(o.ra, o.dec) d2 = (x - xx)**2 + (y - yy)**2 best[d2 < c_d2] = False del d2 del c_d2,xx,yy # plt.clf() # plt.imshow(best, interpolation='nearest', origin='lower', cmap='gray', # vmin=0, vmax=1) # ps.savefig() plt.clf() plt.imshow(nexp * best, interpolation='nearest', origin='lower', vmin=-0.5, vmax=hi+0.5, cmap=cmap, extent=extent) plt.colorbar(ticks=np.arange(hi+1)) ps.savefig() plt.clf() n,b,p = plt.hist(np.clip(nexp[best], 0, hi), range=(-0.5,hi+0.5), bins=hi+1) plt.xlim(-0.5, hi+0.5) ps.savefig() print('b', b) print('n', n) print('fracs', np.array(n) / np.sum(n)) print('pcts', ', '.join(['%.1f' % f for f in 100. * np.array(n)/np.sum(n)]))
def main(outfn='ccds-annotated.fits', ccds=None): survey = LegacySurveyData(ccds=ccds) if ccds is None: ccds = survey.get_ccds() # File from the "observing" svn repo: # https://desi.lbl.gov/svn/decam/code/observing/trunk tiles = fits_table('decam-tiles_obstatus.fits') I = survey.photometric_ccds(ccds) ccds.photometric = np.zeros(len(ccds), bool) ccds.photometric[I] = True I = survey.apply_blacklist(ccds) ccds.blacklist_ok = np.zeros(len(ccds), bool) ccds.blacklist_ok[I] = True ccds.good_region = np.empty((len(ccds), 4), np.int16) ccds.good_region[:,:] = -1 ccds.ra0 = np.zeros(len(ccds), np.float64) ccds.dec0 = np.zeros(len(ccds), np.float64) ccds.ra1 = np.zeros(len(ccds), np.float64) ccds.dec1 = np.zeros(len(ccds), np.float64) ccds.ra2 = np.zeros(len(ccds), np.float64) ccds.dec2 = np.zeros(len(ccds), np.float64) ccds.ra3 = np.zeros(len(ccds), np.float64) ccds.dec3 = np.zeros(len(ccds), np.float64) ccds.dra = np.zeros(len(ccds), np.float32) ccds.ddec = np.zeros(len(ccds), np.float32) ccds.ra_center = np.zeros(len(ccds), np.float64) ccds.dec_center = np.zeros(len(ccds), np.float64) ccds.sig1 = np.zeros(len(ccds), np.float32) ccds.meansky = np.zeros(len(ccds), np.float32) ccds.stdsky = np.zeros(len(ccds), np.float32) ccds.maxsky = np.zeros(len(ccds), np.float32) ccds.minsky = np.zeros(len(ccds), np.float32) ccds.pixscale_mean = np.zeros(len(ccds), np.float32) ccds.pixscale_std = np.zeros(len(ccds), np.float32) ccds.pixscale_max = np.zeros(len(ccds), np.float32) ccds.pixscale_min = np.zeros(len(ccds), np.float32) ccds.psfnorm_mean = np.zeros(len(ccds), np.float32) ccds.psfnorm_std = np.zeros(len(ccds), np.float32) ccds.galnorm_mean = np.zeros(len(ccds), np.float32) ccds.galnorm_std = np.zeros(len(ccds), np.float32) gaussgalnorm = np.zeros(len(ccds), np.float32) # 2nd moments ccds.psf_mx2 = np.zeros(len(ccds), np.float32) ccds.psf_my2 = np.zeros(len(ccds), np.float32) ccds.psf_mxy = np.zeros(len(ccds), np.float32) # ccds.psf_a = np.zeros(len(ccds), np.float32) ccds.psf_b = np.zeros(len(ccds), np.float32) ccds.psf_theta = np.zeros(len(ccds), np.float32) ccds.psf_ell = np.zeros(len(ccds), np.float32) ccds.humidity = np.zeros(len(ccds), np.float32) ccds.outtemp = np.zeros(len(ccds), np.float32) ccds.tileid = np.zeros(len(ccds), np.int32) ccds.tilepass = np.zeros(len(ccds), np.uint8) ccds.tileebv = np.zeros(len(ccds), np.float32) plvers = [] for iccd,ccd in enumerate(ccds): im = survey.get_image_object(ccd) print('Reading CCD %i of %i:' % (iccd+1, len(ccds)), im) X = im.get_good_image_subregion() for i,x in enumerate(X): if x is not None: ccds.good_region[iccd,i] = x W,H = ccd.width, ccd.height psf = None wcs = None sky = None try: tim = im.get_tractor_image(pixPsf=True, splinesky=True, subsky=False, pixels=False, dq=False, invvar=False) except: import traceback traceback.print_exc() plvers.append('') continue if tim is None: plvers.append('') continue psf = tim.psf wcs = tim.wcs.wcs sky = tim.sky hdr = tim.primhdr # print('Got PSF', psf) # print('Got sky', type(sky)) # print('Got WCS', wcs) ccds.humidity[iccd] = hdr.get('HUMIDITY') ccds.outtemp[iccd] = hdr.get('OUTTEMP') ccds.sig1[iccd] = tim.sig1 plvers.append(tim.plver) # parse 'DECaLS_15150_r' to get tile number obj = ccd.object.strip() words = obj.split('_') tile = None if len(words) == 3 and words[0] == 'DECaLS': try: tileid = int(words[1]) tile = tiles[tileid - 1] if tile.tileid != tileid: I = np.flatnonzero(tile.tileid == tileid) tile = tiles[I[0]] except: pass if tile is not None: ccds.tileid [iccd] = tile.tileid ccds.tilepass[iccd] = tile.get('pass') ccds.tileebv [iccd] = tile.ebv_med # Instantiate PSF on a grid S = 32 xx = np.linspace(1+S, W-S, 5) yy = np.linspace(1+S, H-S, 5) xx,yy = np.meshgrid(xx, yy) psfnorms = [] galnorms = [] for x,y in zip(xx.ravel(), yy.ravel()): p = im.psf_norm(tim, x=x, y=y) g = im.galaxy_norm(tim, x=x, y=y) psfnorms.append(p) galnorms.append(g) ccds.psfnorm_mean[iccd] = np.mean(psfnorms) ccds.psfnorm_std [iccd] = np.std (psfnorms) ccds.galnorm_mean[iccd] = np.mean(galnorms) ccds.galnorm_std [iccd] = np.std (galnorms) # PSF in center of field cx,cy = (W+1)/2., (H+1)/2. p = psf.getPointSourcePatch(cx, cy).patch ph,pw = p.shape px,py = np.meshgrid(np.arange(pw), np.arange(ph)) psum = np.sum(p) # print('psum', psum) p /= psum # centroids cenx = np.sum(p * px) ceny = np.sum(p * py) # print('cenx,ceny', cenx,ceny) # second moments x2 = np.sum(p * (px - cenx)**2) y2 = np.sum(p * (py - ceny)**2) xy = np.sum(p * (px - cenx)*(py - ceny)) # semi-major/minor axes and position angle theta = np.rad2deg(np.arctan2(2 * xy, x2 - y2) / 2.) theta = np.abs(theta) * np.sign(xy) s = np.sqrt(((x2 - y2)/2.)**2 + xy**2) a = np.sqrt((x2 + y2) / 2. + s) b = np.sqrt((x2 + y2) / 2. - s) ell = 1. - b/a # print('PSF second moments', x2, y2, xy) # print('PSF position angle', theta) # print('PSF semi-axes', a, b) # print('PSF ellipticity', ell) ccds.psf_mx2[iccd] = x2 ccds.psf_my2[iccd] = y2 ccds.psf_mxy[iccd] = xy ccds.psf_a[iccd] = a ccds.psf_b[iccd] = b ccds.psf_theta[iccd] = theta ccds.psf_ell [iccd] = ell print('Computing Gaussian approximate PSF quantities...') # Galaxy norm using Gaussian approximation of PSF. realpsf = tim.psf tim.psf = im.read_psf_model(0, 0, gaussPsf=True, psf_sigma=tim.psf_sigma) gaussgalnorm[iccd] = im.galaxy_norm(tim, x=cx, y=cy) tim.psf = realpsf # Sky -- evaluate on a grid (every ~10th pixel) skygrid = sky.evaluateGrid(np.linspace(0, ccd.width-1, int(1+ccd.width/10)), np.linspace(0, ccd.height-1, int(1+ccd.height/10))) ccds.meansky[iccd] = np.mean(skygrid) ccds.stdsky[iccd] = np.std(skygrid) ccds.maxsky[iccd] = skygrid.max() ccds.minsky[iccd] = skygrid.min() # WCS ccds.ra0[iccd],ccds.dec0[iccd] = wcs.pixelxy2radec(1, 1) ccds.ra1[iccd],ccds.dec1[iccd] = wcs.pixelxy2radec(1, H) ccds.ra2[iccd],ccds.dec2[iccd] = wcs.pixelxy2radec(W, H) ccds.ra3[iccd],ccds.dec3[iccd] = wcs.pixelxy2radec(W, 1) midx, midy = (W+1)/2., (H+1)/2. rc,dc = wcs.pixelxy2radec(midx, midy) ra,dec = wcs.pixelxy2radec([1,W,midx,midx], [midy,midy,1,H]) ccds.dra [iccd] = max(degrees_between(ra, dc+np.zeros_like(ra), rc, dc)) ccds.ddec[iccd] = max(degrees_between(rc+np.zeros_like(dec), dec, rc, dc)) ccds.ra_center [iccd] = rc ccds.dec_center[iccd] = dc # Compute scale change across the chip # how many pixels to step step = 10 xx = np.linspace(1+step, W-step, 5) yy = np.linspace(1+step, H-step, 5) xx,yy = np.meshgrid(xx, yy) pixscale = [] for x,y in zip(xx.ravel(), yy.ravel()): sx = [x-step, x-step, x+step, x+step, x-step] sy = [y-step, y+step, y+step, y-step, y-step] sr,sd = wcs.pixelxy2radec(sx, sy) rc,dc = wcs.pixelxy2radec(x, y) # project around a tiny little TAN WCS at (x,y), with 1" pixels locwcs = Tan(rc, dc, 0., 0., 1./3600, 0., 0., 1./3600, 1., 1.) ok,lx,ly = locwcs.radec2pixelxy(sr, sd) #print('local x,y:', lx, ly) A = polygon_area((lx, ly)) pixscale.append(np.sqrt(A / (2*step)**2)) # print('Pixel scales:', pixscale) ccds.pixscale_mean[iccd] = np.mean(pixscale) ccds.pixscale_min[iccd] = min(pixscale) ccds.pixscale_max[iccd] = max(pixscale) ccds.pixscale_std[iccd] = np.std(pixscale) ccds.plver = np.array(plvers) sfd = tractor.sfd.SFDMap() allbands = 'ugrizY' filts = ['%s %s' % ('DES', f) for f in allbands] wisebands = ['WISE W1', 'WISE W2', 'WISE W3', 'WISE W4'] ebv,ext = sfd.extinction(filts + wisebands, ccds.ra_center, ccds.dec_center, get_ebv=True) ext = ext.astype(np.float32) ccds.ebv = ebv.astype(np.float32) ccds.decam_extinction = ext[:,:len(allbands)] ccds.wise_extinction = ext[:,len(allbands):] # Depth detsig1 = ccds.sig1 / ccds.psfnorm_mean depth = 5. * detsig1 # that's flux in nanomaggies -- convert to mag ccds.psfdepth = -2.5 * (np.log10(depth) - 9) detsig1 = ccds.sig1 / ccds.galnorm_mean depth = 5. * detsig1 # that's flux in nanomaggies -- convert to mag ccds.galdepth = -2.5 * (np.log10(depth) - 9) # Depth using Gaussian FWHM. psf_sigma = ccds.fwhm / 2.35 gnorm = 1./(2. * np.sqrt(np.pi) * psf_sigma) detsig1 = ccds.sig1 / gnorm depth = 5. * detsig1 # that's flux in nanomaggies -- convert to mag ccds.gausspsfdepth = -2.5 * (np.log10(depth) - 9) # Gaussian galaxy depth detsig1 = ccds.sig1 / gaussgalnorm depth = 5. * detsig1 # that's flux in nanomaggies -- convert to mag ccds.gaussgaldepth = -2.5 * (np.log10(depth) - 9) ccds.writeto(outfn)
main(args=['--brick', '2447p120', '--zoom', '1020', '1070', '2775', '2815', '--no-wise', '--force-all', '--no-write', '--survey-dir', surveydir, '--outdir', outdir, '--checkpoint', checkpoint_fn, '--checkpoint-period', '1', '--threads', '2']) # Read catalog into Tractor sources to test read_fits_catalog from legacypipe.catalog import read_fits_catalog from legacypipe.survey import LegacySurveyData from astrometry.util.fits import fits_table from tractor.galaxy import DevGalaxy from tractor import PointSource survey = LegacySurveyData(survey_dir=outdir) fn = survey.find_file('tractor', brick='2447p120') T = fits_table(fn) cat = read_fits_catalog(T) print('Read catalog:', cat) assert(len(cat) == 2) src = cat[0] assert(type(src) == DevGalaxy) assert(np.abs(src.pos.ra - 244.77975) < 0.00001) assert(np.abs(src.pos.dec - 12.07234) < 0.00001) src = cat[1] assert(type(src) == PointSource) assert(np.abs(src.pos.ra - 244.77833) < 0.00001) assert(np.abs(src.pos.dec - 12.07252) < 0.00001) # DevGalaxy(pos=RaDecPos[244.77975494973529, 12.072348111713127], brightness=NanoMaggies: g=19.2, r=17.9, z=17.1, shape=re=2.09234, e1=-0.198453, e2=0.023652, # PointSource(RaDecPos[244.77833280764278, 12.072521274981987], NanoMaggies: g=25, r=23, z=21.7)
def main(): """Main program. """ import argparse parser = argparse.ArgumentParser(description="This script is used to produce lists of CCDs or bricks, for production purposes (building qdo queue, eg).") parser.add_argument('--calibs', action='store_true', help='Output CCDs that need to be calibrated.') parser.add_argument('--nper', type=int, default=None, help='Batch N calibs per line') parser.add_argument('--forced', action='store_true', help='Output forced-photometry commands') parser.add_argument('--lsb', action='store_true', help='Output Low-Surface-Brightness commands') parser.add_argument('--touching', action='store_true', help='Cut to only CCDs touching selected bricks') parser.add_argument('--near', action='store_true', help='Quick cut to only CCDs near selected bricks') parser.add_argument('--check', action='store_true', help='Check which calibrations actually need to run.') parser.add_argument('--check-coadd', action='store_true', help='Check which caoadds actually need to run.') parser.add_argument('--out', help='Output filename for calibs, default %(default)s', default='jobs') parser.add_argument('--command', action='store_true', help='Write out full command-line to run calib') parser.add_argument('--opt', help='With --command, extra options to add') parser.add_argument('--maxdec', type=float, help='Maximum Dec to run') parser.add_argument('--mindec', type=float, help='Minimum Dec to run') parser.add_argument('--region', help='Region to select') parser.add_argument('--bricks', help='Set bricks.fits file to load') parser.add_argument('--ccds', help='Set ccds.fits file to load') parser.add_argument('--ignore_cuts', action='store_true',default=False,help='no photometric or blacklist cuts') parser.add_argument('--save_to_fits', action='store_true',default=False,help='save cut brick,ccd to fits table') parser.add_argument('--name', action='store',default='dr3',help='save with this suffix, e.g. refers to ccds table') parser.add_argument('--delete-sky', action='store_true', help='Delete any existing sky calibration files') parser.add_argument('--delete-pvastrom', action='store_true', help='Delete any existing PV WCS calibration files') parser.add_argument('--write-ccds', help='Write CCDs list as FITS table?') parser.add_argument('--brickq', type=int, default=None, help='Queue only bricks with the given "brickq" value [0 to 3]') parser.add_argument('--brickq-deps', action='store_true', default=False, help='Queue bricks directly using qdo API, setting brickq dependencies') parser.add_argument('--queue', default='bricks', help='With --brickq-deps, the QDO queue name to use') opt = parser.parse_args() survey = LegacySurveyData() if opt.bricks is not None: B = fits_table(opt.bricks) log('Read', len(B), 'from', opt.bricks) else: B = survey.get_bricks() if opt.ccds is not None: T = fits_table(opt.ccds) log('Read', len(T), 'from', opt.ccds) else: T = survey.get_ccds() log(len(T), 'CCDs') T.index = np.arange(len(T)) if opt.ignore_cuts == False: I = survey.photometric_ccds(T) print(len(I), 'CCDs are photometric') T.cut(I) I = survey.apply_blacklist(T) print(len(I), 'CCDs are not blacklisted') T.cut(I) print(len(T), 'CCDs remain') # I,J,d,counts = match_radec(B.ra, B.dec, T.ra, T.dec, 0.2, nearest=True, count=True) # plt.clf() # plt.hist(counts, counts.max()+1) # plt.savefig('bricks.png') # B.cut(I[counts >= 9]) # plt.clf() # plt.plot(B.ra, B.dec, 'b.') # #plt.scatter(B.ra[I], B.dec[I], c=counts) # plt.savefig('bricks2.png') # DES Stripe82 #rlo,rhi = 350.,360. # rlo,rhi = 300., 10. # dlo,dhi = -6., 4. # TINY bit #rlo,rhi = 350.,351.1 #dlo,dhi = 0., 1.1 # EDR+ # 860 bricks # ~10,000 CCDs #rlo,rhi = 239,246 #dlo,dhi = 5, 13 # DR1 #rlo,rhi = 0, 360 # part 1 #dlo,dhi = 25, 40 # part 2 #dlo,dhi = 20,25 # part 3 #dlo,dhi = 15,20 # part 4 #dlo,dhi = 10,15 # part 5 #dlo,dhi = 5,10 # the rest #dlo,dhi = -11, 5 #dlo,dhi = 15,25.5 dlo,dhi = -25, 40 rlo,rhi = 0, 360 # Arjun says 3x3 coverage area is roughly # RA=240-252 DEC=6-12 (but not completely rectangular) # COSMOS #rlo,rhi = 148.9, 151.2 #dlo,dhi = 0.9, 3.5 # A nice well-behaved region (EDR2/3) # rlo,rhi = 243.6, 244.6 # dlo,dhi = 8.1, 8.6 # 56 bricks, ~725 CCDs #B.cut((B.ra > 240) * (B.ra < 242) * (B.dec > 5) * (B.dec < 7)) # 240 bricks, ~3000 CCDs #B.cut((B.ra > 240) * (B.ra < 244) * (B.dec > 5) * (B.dec < 9)) # 535 bricks, ~7000 CCDs #B.cut((B.ra > 240) * (B.ra < 245) * (B.dec > 5) * (B.dec < 12)) if opt.region in ['test1', 'test2', 'test3', 'test4']: nm = dict(test1='2446p115', # weird stuff around bright star test2='1183p292', # faint sources around bright galaxy test3='3503p005', # DES test4='1163p277', # Pollux )[opt.region] B.cut(np.flatnonzero(np.array([s == nm for s in B.brickname]))) log('Cut to', len(B), 'bricks') log(B.ra, B.dec) dlo,dhi = -90,90 rlo,rhi = 0, 360 elif opt.region == 'edr': # EDR: # 535 bricks, ~7000 CCDs rlo,rhi = 240,245 dlo,dhi = 5, 12 elif opt.region == 'edrplus': rlo,rhi = 235,248 dlo,dhi = 5, 15 elif opt.region == 'edr-south': rlo,rhi = 240,245 dlo,dhi = 5, 10 elif opt.region == 'cosmos1': # 16 bricks in the core of the COSMOS field. rlo,rhi = 149.75, 150.75 dlo,dhi = 1.6, 2.6 elif opt.region == 'pristine': # Stream? rlo,rhi = 240,250 dlo,dhi = 10,15 elif opt.region == 'des': dlo, dhi = -6., 4. rlo, rhi = 317., 7. T.cut(np.flatnonzero(np.array(['CPDES82' in fn for fn in T.cpimage]))) log('Cut to', len(T), 'CCDs with "CPDES82" in filename') elif opt.region == 'subdes': rlo,rhi = 320., 360. dlo,dhi = -1.25, 1.25 elif opt.region == 'northwest': rlo,rhi = 240,360 dlo,dhi = 20,40 elif opt.region == 'north': rlo,rhi = 120,240 dlo,dhi = 20,40 elif opt.region == 'northeast': rlo,rhi = 0,120 dlo,dhi = 20,40 elif opt.region == 'southwest': rlo,rhi = 240,360 dlo,dhi = -20,0 elif opt.region == 'south': rlo,rhi = 120,240 dlo,dhi = -20,0 elif opt.region == 'southeast': rlo,rhi = 0,120 dlo,dhi = -20,0 elif opt.region == 'southsoutheast': rlo,rhi = 0,120 dlo,dhi = -20,-10 elif opt.region == 'midwest': rlo,rhi = 240,360 dlo,dhi = 0,20 elif opt.region == 'middle': rlo,rhi = 120,240 dlo,dhi = 0,20 elif opt.region == 'mideast': rlo,rhi = 0,120 dlo,dhi = 0,20 elif opt.region == 'grz': # Bricks with grz coverage. # Be sure to use --bricks survey-bricks-in-dr1.fits # which has_[grz] columns. B.cut((B.has_g == 1) * (B.has_r == 1) * (B.has_z == 1)) log('Cut to', len(B), 'bricks with grz coverage') elif opt.region == 'nogrz': # Bricks without grz coverage. # Be sure to use --bricks survey-bricks-in-dr1.fits # which has_[grz] columns. B.cut(np.logical_not((B.has_g == 1) * (B.has_r == 1) * (B.has_z == 1))) log('Cut to', len(B), 'bricks withOUT grz coverage') elif opt.region == 'deep2': rlo,rhi = 250,260 dlo,dhi = 30,35 elif opt.region == 'deep2f3': rlo,rhi = 351.25, 353.75 dlo,dhi = 0, 0.5 elif opt.region == 'virgo': rlo,rhi = 185,190 dlo,dhi = 10, 15 elif opt.region == 'virgo2': rlo,rhi = 182,192 dlo,dhi = 8, 18 elif opt.region == 'lsb': rlo,rhi = 147.2, 147.8 dlo,dhi = -0.4, 0.4 elif opt.region == 'eboss-elg': # RA -45 to +45 # Dec -5 to +7 rlo,rhi = 315., 45. dlo,dhi = -5., 7. elif opt.region == 'eboss-ngc': # NGC ELGs # RA 115 to 175 # Dec 15 to 30 rlo,rhi = 115., 175. dlo,dhi = 15., 30. elif opt.region == 'mzls': dlo,dhi = 30., 90. elif opt.region == 'dr4-bootes': # https://desi.lbl.gov/trac/wiki/DecamLegacy/DR4sched #dlo,dhi = 34., 35. #rlo,rhi = 209.5, 210.5 dlo,dhi = 33., 36. rlo,rhi = 216.5, 219.5 if opt.mindec is not None: dlo = opt.mindec if opt.maxdec is not None: dhi = opt.maxdec if rlo < rhi: B.cut((B.ra >= rlo) * (B.ra <= rhi) * (B.dec >= dlo) * (B.dec <= dhi)) else: # RA wrap B.cut(np.logical_or(B.ra >= rlo, B.ra <= rhi) * (B.dec >= dlo) * (B.dec <= dhi)) log(len(B), 'bricks in range') for name in B.get('brickname'): print(name) B.writeto('bricks-cut.fits') I,J,d = match_radec(B.ra, B.dec, T.ra, T.dec, survey.bricksize) keep = np.zeros(len(B), bool) for i in I: keep[i] = True B.cut(keep) log('Cut to', len(B), 'bricks near CCDs') plt.clf() plt.plot(B.ra, B.dec, 'b.') plt.title('DR3 bricks') plt.axis([360, 0, np.min(B.dec)-1, np.max(B.dec)+1]) plt.savefig('bricks.png') if opt.brickq is not None: B.cut(B.brickq == opt.brickq) log('Cut to', len(B), 'with brickq =', opt.brickq) if opt.touching: keep = np.zeros(len(T), bool) for j in J: keep[j] = True T.cut(keep) log('Cut to', len(T), 'CCDs near bricks') # Aside -- how many near DR1=1 CCDs? if False: T2 = D.get_ccds() log(len(T2), 'CCDs') T2.cut(T2.dr1 == 1) log(len(T2), 'CCDs marked DR1=1') log(len(B), 'bricks in range') I,J,d = match_radec(B.ra, B.dec, T2.ra, T2.dec, survey.bricksize) keep = np.zeros(len(B), bool) for i in I: keep[i] = True B2 = B[keep] log('Total of', len(B2), 'bricks near CCDs with DR1=1') for band in 'grz': Tb = T2[T2.filter == band] log(len(Tb), 'in filter', band) I,J,d = match_radec(B2.ra, B2.dec, Tb.ra, Tb.dec, survey.bricksize) good = np.zeros(len(B2), np.uint8) for i in I: good[i] = 1 B2.set('has_' + band, good) B2.writeto('survey-bricks-in-dr1.fits') sys.exit(0) # sort by dec decreasing #B.cut(np.argsort(-B.dec)) # RA increasing B.cut(np.argsort(B.ra)) for b in B: if opt.check: fn = 'dr1n/tractor/%s/tractor-%s.fits' % (b.brickname[:3], b.brickname) if os.path.exists(fn): print('Exists:', fn, file=sys.stderr) continue if opt.check_coadd: fn = 'dr1b/coadd/%s/%s/decals-%s-image.jpg' % (b.brickname[:3], b.brickname, b.brickname) if os.path.exists(fn): print('Exists:', fn, file=sys.stderr) continue print(b.brickname) if opt.save_to_fits: assert(opt.touching) # Write cut tables to file for tab,typ in zip([B,T],['bricks','ccds']): fn='%s-%s-cut.fits' % (typ,opt.name) if os.path.exists(fn): os.remove(fn) tab.writeto(fn) print('Wrote %s' % fn) # Write text files listing ccd and filename names nm1,nm2= 'ccds-%s.txt'% opt.name,'filenames-%s.txt' % opt.name if os.path.exists(nm1): os.remove(nm1) if os.path.exists(nm2): os.remove(nm2) f1,f2=open(nm1,'w'),open(nm2,'w') fns= list(set(T.get('image_filename'))) for fn in fns: f2.write('%s\n' % fn.strip()) for ti in T: f1.write('%s\n' % ti.get('image_filename').strip()) f1.close() f2.close() print('Wrote *-names.txt') if opt.brickq_deps: import qdo from legacypipe.survey import on_bricks_dependencies #... find Queue... q = qdo.connect(opt.queue, create_ok=True) print('Connected to QDO queue', opt.queue, q) brick_to_task = dict() I = survey.photometric_ccds(T) print(len(I), 'CCDs are photometric') T.cut(I) I = survey.apply_blacklist(T) print(len(I), 'CCDs are not blacklisted') T.cut(I) print(len(T), 'CCDs remaining') T.wra = T.ra + (T.ra > 180) * -360 wra = rlo - 360 plt.clf() plt.plot(T.wra, T.dec, 'b.') ax = [wra, rhi, dlo, dhi] plt.axis(ax) plt.title('CCDs') plt.savefig('q-ccds.png') B.wra = B.ra + (B.ra > 180) * -360 # this slight overestimate (for DECam images) is fine radius = 0.3 Iccds = match_radec(B.ra, B.dec, T.ra, T.dec, radius, indexlist=True) ikeep = [] for ib,(b,Iccd) in enumerate(zip(B, Iccds)): if Iccd is None or len(Iccd) == 0: print('No matched CCDs to brick', b.brickname) continue wcs = wcs_for_brick(b) cI = ccds_touching_wcs(wcs, T[np.array(Iccd)]) print(len(cI), 'CCDs touching brick', b.brickname) if len(cI) == 0: continue ikeep.append(ib) B.cut(np.array(ikeep)) print('Cut to', len(B), 'bricks touched by CCDs') for brickq in range(4): I = np.flatnonzero(B.brickq == brickq) print(len(I), 'bricks with brickq =', brickq) J = np.flatnonzero(B.brickq < brickq) preB = B[J] reqs = [] if brickq > 0: for b in B[I]: # find brick dependencies brickdeps = on_bricks_dependencies(b, survey, bricks=preB) # convert to task ids taskdeps = [brick_to_task.get(b.brickname,None) for b in brickdeps] # If we dropped a dependency brick from a previous brickq because # of no overlapping CCDs, it won't appear in the brick_to_task map. taskdeps = [t for t in taskdeps if t is not None] reqs.append(taskdeps) plt.clf() plt.plot(B.wra, B.dec, '.', color='0.5') plt.plot(B.wra[I], B.dec[I], 'b.') plt.axis(ax) plt.title('Bricks: brickq=%i' % brickq) plt.savefig('q-bricks-%i.png' % brickq) # submit to qdo queue print('Queuing', len(B[I]), 'bricks') if brickq == 0: reqs = None else: assert(len(I) == len(reqs)) taskids = q.add_multiple(B.brickname[I], requires=reqs) assert(len(taskids) == len(I)) print('Queued', len(taskids), 'bricks') brick_to_task.update(dict(zip(B.brickname[I], taskids))) if not (opt.calibs or opt.forced or opt.lsb): sys.exit(0) bands = 'grz' log('Filters:', np.unique(T.filter)) T.cut(np.flatnonzero(np.array([f in bands for f in T.filter]))) log('Cut to', len(T), 'CCDs in filters', bands) if opt.touching: allI = set() for b in B: wcs = wcs_for_brick(b) I = ccds_touching_wcs(wcs, T) log(len(I), 'CCDs for brick', b.brickid, 'RA,Dec (%.2f, %.2f)' % (b.ra, b.dec)) if len(I) == 0: continue allI.update(I) allI = list(allI) allI.sort() elif opt.near: # Roughly brick radius + DECam image radius radius = 0.35 allI,nil,nil = match_radec(T.ra, T.dec, B.ra, B.dec, radius, nearest=True) else: allI = np.arange(len(T)) if opt.write_ccds: T[allI].writeto(opt.write_ccds) log('Wrote', opt.write_ccds) ## Be careful here -- T has been cut; we want to write out T.index. ## 'allI' contains indices into T. if opt.forced: log('Writing forced-photometry commands to', opt.out) f = open(opt.out,'w') log('Total of', len(allI), 'CCDs') for j,i in enumerate(allI): expstr = '%08i' % T.expnum[i] outfn = os.path.join('forced', expstr[:5], expstr, 'decam-%s-%s-forced.fits' % (expstr, T.ccdname[i])) imgfn = os.path.join(survey.survey_dir, 'images', T.image_filename[i].strip()) if (not os.path.exists(imgfn) and imgfn.endswith('.fz') and os.path.exists(imgfn[:-3])): imgfn = imgfn[:-3] #f.write('python legacypipe/forced_photom_decam.py %s %i DR3 %s\n' % # (imgfn, T.image_hdu[i], outfn)) f.write('python legacypipe/forced_photom_decam.py --apphot --constant-invvar %i %s DR3 %s\n' % (T.expnum[i], T.ccdname[i], outfn)) f.close() log('Wrote', opt.out) sys.exit(0) if opt.lsb: log('Writing LSB commands to', opt.out) f = open(opt.out,'w') log('Total of', len(allI), 'CCDs') for j,i in enumerate(allI): exp = T.expnum[i] ext = T.ccdname[i].strip() outfn = 'lsb/lsb-%s-%s.fits' % (exp, ext) f.write('python projects/desi/lsb.py --expnum %i --extname %s --out %s -F -n > lsb/lsb-%s-%s.log 2>&1\n' % (exp, ext, outfn, exp, ext)) f.close() log('Wrote', opt.out) sys.exit(0) log('Writing calibs to', opt.out) f = open(opt.out,'w') log('Total of', len(allI), 'CCDs') batch = [] def write_batch(f, batch, cmd): if cmd is None: cmd = '' f.write(cmd + ' '.join(batch) + '\n') cmd = None if opt.command: cmd = 'python legacypipe/run-calib.py ' if opt.opt is not None: cmd += opt.opt + ' ' for j,i in enumerate(allI): if opt.delete_sky or opt.delete_pvastrom: log(j+1, 'of', len(allI)) im = survey.get_image_object(T[i]) if opt.delete_sky and os.path.exists(im.skyfn): log(' deleting:', im.skyfn) os.unlink(im.skyfn) if opt.delete_pvastrom and os.path.exists(im.pvwcsfn): log(' deleting:', im.pvwcsfn) os.unlink(im.pvwcsfn) if opt.check: log(j+1, 'of', len(allI)) im = survey.get_image_object(T[i]) if not im.run_calibs(im, just_check=True): log('Calibs for', im.expnum, im.ccdname, im.calname, 'already done') continue if opt.command: s = '%i-%s' % (T.expnum[i], T.ccdname[i]) prefix = 'python legacypipe/run-calib.py ' + opt.opt #('python legacypipe/run-calib.py --expnum %i --ccdname %s' % # (T.expnum[i], T.ccdname[i])) else: s = '%i' % T.index[i] prefix = '' if j < 10: print('Index', T.index[i], 'expnum', T.expnum[i], 'ccdname', T.ccdname[i], 'filename', T.image_filename[i]) if not opt.nper: f.write(prefix + s + '\n') else: batch.append(s) if len(batch) >= opt.nper: write_batch(f, batch, cmd) batch = [] if opt.check: f.flush() if len(batch): write_batch(f, batch, cmd) f.close() log('Wrote', opt.out) return 0
def main(): survey = LegacySurveyData() ccds = survey.get_ccds() print(len(ccds), 'CCDs') expnums = np.unique(ccds.expnum) print(len(expnums), 'unique exposures') for expnum in expnums: expnumstr = '%08i' % expnum skyoutfn = os.path.join('splinesky', expnumstr[:5], 'decam-%s.fits' % expnumstr) psfoutfn = os.path.join('psfex', expnumstr[:5], 'decam-%s.fits' % expnumstr) if os.path.exists(skyoutfn) and os.path.exists(psfoutfn): print('Exposure', expnum, 'is done already') continue C = ccds[ccds.expnum == expnum] print(len(C), 'CCDs in expnum', expnum) psfex = [] psfhdrvals = [] splinesky = [] skyhdrvals = [] for ccd in C: im = survey.get_image_object(ccd) fn = im.splineskyfn if os.path.exists(fn): T = fits_table(fn) splinesky.append(T) # print(fn) # T.about() hdr = fitsio.read_header(fn) skyhdrvals.append([hdr[k] for k in [ 'SKY', 'LEGPIPEV', 'PLVER']] + [expnum, ccd.ccdname]) else: print('File not found:', fn) fn = im.psffn if os.path.exists(fn): T = fits_table(fn) hdr = fitsio.read_header(fn, ext=1) keys = ['LOADED', 'ACCEPTED', 'CHI2', 'POLNAXIS', 'POLNGRP', 'PSF_FWHM', 'PSF_SAMP', 'PSFNAXIS', 'PSFAXIS1', 'PSFAXIS2', 'PSFAXIS3',] if hdr['POLNAXIS'] == 0: # No polynomials. Fake it. T.polgrp1 = np.array([0]) T.polgrp2 = np.array([0]) T.polname1 = np.array(['fake']) T.polname2 = np.array(['fake']) T.polzero1 = np.array([0]) T.polzero2 = np.array([0]) T.polscal1 = np.array([1]) T.polscal2 = np.array([1]) T.poldeg1 = np.array([0]) T.poldeg2 = np.array([0]) else: keys.extend([ 'POLGRP1', 'POLNAME1', 'POLZERO1', 'POLSCAL1', 'POLGRP2', 'POLNAME2', 'POLZERO2', 'POLSCAL2', 'POLDEG1']) for k in keys: T.set(k.lower(), np.array([hdr[k]])) psfex.append(T) #print(fn) #T.about() hdr = fitsio.read_header(fn) psfhdrvals.append([hdr.get(k,'') for k in [ 'LEGPIPEV', 'PLVER']] + [expnum, ccd.ccdname]) else: print('File not found:', fn) if len(psfex): padded = pad_arrays([p.psf_mask[0] for p in psfex]) cols = psfex[0].columns() cols.remove('psf_mask') T = merge_tables(psfex, columns=cols) T.psf_mask = np.concatenate([[p] for p in padded]) T.legpipev = np.array([h[0] for h in psfhdrvals]) T.plver = np.array([h[1] for h in psfhdrvals]) T.expnum = np.array([h[2] for h in psfhdrvals]) T.ccdname = np.array([h[3] for h in psfhdrvals]) fn = psfoutfn trymakedirs(fn, dir=True) T.writeto(fn) print('Wrote', fn) if len(splinesky): T = fits_table() T.gridw = np.array([t.gridvals[0].shape[1] for t in splinesky]) T.gridh = np.array([t.gridvals[0].shape[0] for t in splinesky]) padded = pad_arrays([t.gridvals[0] for t in splinesky]) T.gridvals = np.concatenate([[p] for p in padded]) padded = pad_arrays([t.xgrid[0] for t in splinesky]) T.xgrid = np.concatenate([[p] for p in padded]) padded = pad_arrays([t.xgrid[0] for t in splinesky]) T.ygrid = np.concatenate([[p] for p in padded]) cols = splinesky[0].columns() print('Columns:', cols) for c in ['gridvals', 'xgrid', 'ygrid']: cols.remove(c) T.add_columns_from(merge_tables(splinesky, columns=cols)) T.skyclass = np.array([h[0] for h in skyhdrvals]) T.legpipev = np.array([h[1] for h in skyhdrvals]) T.plver = np.array([h[2] for h in skyhdrvals]) T.expnum = np.array([h[3] for h in skyhdrvals]) T.ccdname = np.array([h[4] for h in skyhdrvals]) fn = skyoutfn trymakedirs(fn, dir=True) T.writeto(fn) print('Wrote', fn)
def psf_residuals(expnum,ccdname,stampsize=35,nstar=30, magrange=(13,17),verbose=0, splinesky=False): # Set the debugging level. if verbose==0: lvl = logging.INFO else: lvl = logging.DEBUG logging.basicConfig(level=lvl,format='%(message)s',stream=sys.stdout) pngprefix = 'qapsf-{}-{}'.format(expnum,ccdname) # Gather all the info we need about this CCD. survey = LegacySurveyData() ccd = survey.find_ccds(expnum=expnum,ccdname=ccdname)[0] band = ccd.filter ps1band = dict(g=0,r=1,i=2,z=3,Y=4) print('Band {}'.format(band)) #scales = dict(g=0.0066, r=0.01, z=0.025) #vmin, vmax = np.arcsinh(-1), np.arcsinh(100) #print(scales[band]) im = survey.get_image_object(ccd) iminfo = im.get_image_info() H,W = iminfo['dims'] wcs = im.get_wcs() # Choose a uniformly selected subset of PS1 stars on this CCD. ps1 = ps1cat(ccdwcs=wcs) cat = ps1.get_stars(band=band,magrange=magrange) rand = np.random.RandomState(seed=expnum*ccd.ccdnum) these = rand.choice(len(cat)-1,nstar,replace=False) #these = rand.random_integers(0,len(cat)-1,nstar) cat = cat[these] cat = cat[np.argsort(cat.median[:,ps1band[band]])] # sort by magnitude #print(cat.nmag_ok) get_tim_kwargs = dict(pixPsf=True, splinesky=splinesky) # Make a QAplot of the positions of all the stars. tim = im.get_tractor_image(**get_tim_kwargs) img = tim.getImage() #img = tim.getImage()/scales[band] fig = plt.figure(figsize=(5,10)) ax = fig.gca() ax.get_xaxis().get_major_formatter().set_useOffset(False) #ax.imshow(np.arcsinh(img),cmap='gray',interpolation='nearest', # origin='lower',vmin=vmax,vmax=vmax) ax.imshow(img, **tim.ima) ax.axis('off') ax.set_title('{}: {}/{} AM={:.2f} Seeing={:.3f}"'. format(band,expnum,ccdname,ccd.airmass,ccd.seeing)) for istar, ps1star in enumerate(cat): ra, dec = (ps1star.ra, ps1star.dec) ok, xpos, ypos = wcs.radec2pixelxy(ra, dec) ax.text(xpos,ypos,'{:2d}'.format(istar+1),color='red', horizontalalignment='left') circ = plt.Circle((xpos,ypos),radius=30,color='g',fill=False,lw=1) ax.add_patch(circ) #radec = wcs.radec_bounds() #ax.scatter(cat.ra,cat.dec) #ax.set_xlim([radec[1],radec[0]])#*[1.0002,0.9998]) #ax.set_ylim([radec[2],radec[3]])#*[0.985,1.015]) #ax.set_xlabel('$RA\ (deg)$',fontsize=18) #ax.set_ylabel('$Dec\ (deg)$',fontsize=18) fig.savefig(pngprefix+'-ccd.png',bbox_inches='tight') # Initialize the many-stamp QAplot ncols = 3 nrows = np.ceil(nstar/ncols).astype('int') inchperstamp = 2.0 fig = plt.figure(figsize=(inchperstamp*3*ncols,inchperstamp*nrows)) irow = 0 icol = 0 for istar, ps1star in enumerate(cat): ra, dec = (ps1star.ra, ps1star.dec) mag = ps1star.median[ps1band[band]] # r-band ok, xpos, ypos = wcs.radec2pixelxy(ra, dec) ix,iy = int(xpos), int(ypos) # create a little tractor Image object around the star slc = (slice(max(iy-stampsize, 0), min(iy+stampsize+1, H)), slice(max(ix-stampsize, 0), min(ix+stampsize+1, W))) # The PSF model 'const2Psf' is the one used in DR1: a 2-component # Gaussian fit to PsfEx instantiated in the image center. tim = im.get_tractor_image(slc=slc, **get_tim_kwargs) stamp = tim.getImage() ivarstamp = tim.getInvvar() # Initialize a tractor PointSource from PS1 measurements flux = NanoMaggies.magToNanomaggies(mag) star = PointSource(RaDecPos(ra,dec), NanoMaggies(**{band: flux})) # Fit just the source RA,Dec,flux. tractor = Tractor([tim], [star]) tractor.freezeParam('images') print('2-component MOG:', tim.psf) tractor.printThawedParams() for step in range(50): dlnp,X,alpha = tractor.optimize() if dlnp < 0.1: break print('Fit:', star) model_mog = tractor.getModelImage(0) chi2_mog = -2.0*tractor.getLogLikelihood() mag_mog = NanoMaggies.nanomaggiesToMag(star.brightness)[0] # Now change the PSF model to a pixelized PSF model from PsfEx instantiated # at this place in the image. psf = PixelizedPsfEx(im.psffn) tim.psf = psf.constantPsfAt(xpos, ypos) #print('PSF model:', tim.psf) #tractor.printThawedParams() for step in range(50): dlnp,X,alpha = tractor.optimize() if dlnp < 0.1: break print('Fit:', star) model_psfex = tractor.getModelImage(0) chi2_psfex = -2.0*tractor.getLogLikelihood() mag_psfex = NanoMaggies.nanomaggiesToMag(star.brightness)[0] #mn, mx = np.percentile((stamp-model_psfex)[ivarstamp>0],[1,95]) sig = np.std((stamp-model_psfex)[ivarstamp>0]) mn, mx = [-2.0*sig,5*sig] # Generate a QAplot. if (istar>0) and (istar%(ncols)==0): irow = irow+1 icol = 3*istar - 3*ncols*irow #print(istar, irow, icol, icol+1, icol+2) ax1 = plt.subplot2grid((nrows,3*ncols), (irow,icol), aspect='equal') ax1.axis('off') #ax1.imshow(stamp, **tim.ima) ax1.imshow(stamp,cmap='gray',interpolation='nearest', origin='lower',vmin=mn,vmax=mx) ax1.text(0.1,0.9,'{:2d}'.format(istar+1),color='white', horizontalalignment='left',verticalalignment='top', transform=ax1.transAxes) ax2 = plt.subplot2grid((nrows,3*ncols), (irow,icol+1), aspect='equal') ax2.axis('off') #ax2.imshow(stamp-model_mog, **tim.ima) ax2.imshow(stamp-model_mog,cmap='gray',interpolation='nearest', origin='lower',vmin=mn,vmax=mx) ax2.text(0.1,0.9,'MoG',color='white', horizontalalignment='left',verticalalignment='top', transform=ax2.transAxes) ax2.text(0.08,0.08,'{:.3f}'.format(mag_mog),color='white', horizontalalignment='left',verticalalignment='bottom', transform=ax2.transAxes) #ax2.set_title('{:.3f}, {:.2f}'.format(mag_psfex,chi2_psfex),fontsize=14) #ax2.set_title('{:.3f}, $\chi^{2}$={:.2f}'.format(mag_psfex,chi2_psfex)) ax3 = plt.subplot2grid((nrows,3*ncols), (irow,icol+2), aspect='equal') ax3.axis('off') #ax3.imshow(stamp-model_psfex, **tim.ima) ax3.imshow(stamp-model_psfex,cmap='gray',interpolation='nearest', origin='lower',vmin=mn,vmax=mx) ax3.text(0.1,0.9,'PSFEx',color='white', horizontalalignment='left',verticalalignment='top', transform=ax3.transAxes) ax3.text(0.08,0.08,'{:.3f}'.format(mag_psfex),color='white', horizontalalignment='left',verticalalignment='bottom', transform=ax3.transAxes) if istar==(nstar-1): break fig.savefig(pngprefix+'-stargrid.png',bbox_inches='tight')
def main(): # indir = '/global/cscratch1/sd/dstn/dr8test-1' # name = 'dr8-test1' # pretty = 'DR8 test1' # indir = '/scratch1/scratchdirs/desiproc/dr8test002/' # name = 'dr8-test2' # pretty = 'DR8 test2 (outliers)' # indir = '/scratch1/scratchdirs/desiproc/dr8test003/' # name = 'dr8-test3' # pretty = 'DR8 test3 (outliers)' # # indir = '/scratch1/scratchdirs/desiproc/dr8test004/' # name = 'dr8-test4' # pretty = 'DR8 test4 (large-galaxies)' # indir = '/global/cscratch1/sd/dstn/dr8test005/' # name = 'dr8-test5' # pretty = 'DR8 test5 (trident)' # indir = '/global/cscratch1/sd/dstn/dr8test006/' # name = 'dr8-test6' # pretty = 'DR8 test6 (sky)' # indir = '/global/cscratch1/sd/dstn/dr8test007/' # name = 'dr8-test7' # pretty = 'DR8 test7 (outliers)' #indir = '/global/cscratch1/sd/dstn/dr8test14/' #name = 'dr8-test14' #pretty = 'DR8 test14 (rc)' #indir = '/global/project/projectdirs/cosmo/work/legacysurvey/dr8a/' #name = 'dr8a' #pretty = 'DR8a (rc)' if False: indir = '/global/project/projectdirs/cosmo/work/legacysurvey/dr8b/runbrick-decam/' name = 'dr8b-decam' pretty = 'DR8b DECam' survey_dir = '/global/project/projectdirs/cosmo/work/legacysurvey/dr8b/runbrick-decam' if True: indir = '/global/project/projectdirs/cosmo/work/legacysurvey/dr8b/runbrick-90prime-mosaic/' name = 'dr8b-90p-mos' pretty = 'DR8b BASS+MzLS' survey_dir = '/global/project/projectdirs/cosmo/work/legacysurvey/dr8b/runbrick-90prime-mosaic' # ln -s /global/project/projectdirs/cosmo/work/legacysurvey/dr8b/runbrick-decam/coadds-only/coadd/ . sublayers = ['', '-model', '-resid'] subpretty = {'': ' images', '-model': ' models', '-resid': ' residuals'} # survey_dir = '/global/cscratch1/sd/desiproc/dr7' # sublayers = [''] # subpretty = {'':' images'} #survey_dir = '/global/cscratch1/sd/dstn/dr8-depthcut' #survey_dir = '/global/project/projectdirs/cosmo/work/legacysurvey/dr8a/' rsync = False datadir = 'data' survey = LegacySurveyData(survey_dir=survey_dir) fn = 'map/test_layers.py' txt = open(fn).read() for x in sublayers: txt = txt + '\n' + 'test_layers.append(("%s%s", "%s%s"))\n' % ( name, x, pretty, subpretty[x]) open(fn, 'wb').write(txt.encode()) print('Wrote', fn) basedir = os.path.join(datadir, name) if rsync: cmd = 'rsync -LRarv %s/./{coadd/*/*/*-{image-,model-,ccds}*.fits*,tractor} %s/%s' % ( indir, datadir, name) print(cmd) os.system(cmd) # ...? cmd = 'rsync -Rarv %s/./{images,survey-ccds*.fits} %s/%s' % ( survey_dir, datadir, name) print(cmd) os.system(cmd) else: # symlink if os.path.exists(basedir): print('Not symlinking', indir, 'to', basedir, ': already exists!') else: os.makedirs(basedir) for subdir in ['coadd', 'tractor']: os.symlink(os.path.join(indir, subdir), os.path.join(basedir, subdir), target_is_directory=True) for fn in ['images', 'calib']: os.symlink(os.path.join(indir, subdir), os.path.join(basedir, subdir), target_is_directory=False) for pat in ['survey-ccds-*']: for fn in [ os.path.basename(f) for f in glob(os.path.join(indir, pat)) ]: os.symlink(os.path.join(indir, subdir), os.path.join(basedir, subdir), target_is_directory=False) allbricks = survey.get_bricks_readonly() imagefns = glob(os.path.join(basedir, 'coadd', '*', '*', '*-image-*.fits*')) extraimagefns = glob( os.path.join(basedir, 'extra-images', 'coadd', '*', '*', '*-image-*.fits*')) print('Image filenames:', len(imagefns), 'plus', len(extraimagefns), 'extras') imagefns += extraimagefns brickset = set() for fn in imagefns: dirs = fn.split('/') brickname = dirs[-2] brickset.add(brickname) print(len(brickset), 'bricks found') I, = np.nonzero([b in brickset for b in allbricks.brickname]) bricks = allbricks[I] brickfn = os.path.join(basedir, 'survey-bricks.fits.gz') bricks.writeto(brickfn) print('Wrote', brickfn) threads = 8 tharg = '--threads %i ' % threads #tharg = '' for x in sublayers: cmd = 'python -u render-tiles.py --kind %s%s --bricks' % (name, x) print(cmd) os.system(cmd) # images for scale in range(1, 8): cmd = 'python -u render-tiles.py --kind %s --scale --zoom %i %s' % ( name, scale, tharg) print(cmd) os.system(cmd) # models for scale in range(1, 8): cmd = 'python -u render-tiles.py --kind %s-model --scale --zoom %i %s' % ( name, scale, tharg) print(cmd) os.system(cmd) # resids for scale in range(1, 8): cmd = 'python -u render-tiles.py --kind %s-resid --scale --zoom %i %s' % ( name, scale, tharg) print(cmd) os.system(cmd) for x in sublayers: cmd = 'python -u render-tiles.py --kind %s%s --top' % (name, x) print(cmd) os.system(cmd)
def main(): ps = PlotSequence('cov') survey = LegacySurveyData() ra, dec = 242.0, 10.2 fn = 'coverage-ccds.fits' if not os.path.exists(fn): ccds = survey.get_ccds() ccds.cut(ccds.filter == 'r') ccds.cut(ccds.propid == '2014B-0404') ccds.cut(np.hypot(ccds.ra_bore - ra, ccds.dec_bore - dec) < 2.5) print(np.unique(ccds.expnum), 'unique exposures') print('propids', np.unique(ccds.propid)) ccds.writeto(fn) else: ccds = fits_table(fn) plt.clf() for e in np.unique(ccds.expnum): I = np.flatnonzero(ccds.expnum == e) plt.plot(ccds.ra[I], ccds.dec[I], '.') ps.savefig() degw = 3.0 pixscale = 10. W = degw * 3600 / 10. H = W hi = 6 cmap = cmap_discretize('jet', hi + 1) wcs = Tan(ra, dec, W / 2. + 0.5, H / 2. + 0.5, -pixscale / 3600., 0., 0., pixscale / 3600., float(W), float(H)) r0, d0 = wcs.pixelxy2radec(1, 1) r1, d1 = wcs.pixelxy2radec(W, H) extent = [min(r0, r1), max(r0, r1), min(d0, d1), max(d0, d1)] for expnums in [ [348666], [348666, 348710, 348686], [348659, 348667, 348658, 348666, 348665, 348669, 348668], None, [ 348683, 348687, 347333, 348686, 348685, 348692, 348694, 348659, 348667, 348658, 348666, 348665, 348669, 348668, 348707, 348709, 348708, 348710, 348711, 348716, 348717 ], ]: nexp = np.zeros((H, W), np.uint8) for ccd in ccds: if expnums is not None and not ccd.expnum in expnums: continue ccdwcs = survey.get_approx_wcs(ccd) r, d = ccdwcs.pixelxy2radec(1, 1) ok, x0, y0 = wcs.radec2pixelxy(r, d) r, d = ccdwcs.pixelxy2radec(ccd.width, ccd.height) ok, x1, y1 = wcs.radec2pixelxy(r, d) xlo = np.clip(int(np.round(min(x0, x1))) - 1, 0, W - 1) xhi = np.clip(int(np.round(max(x0, x1))) - 1, 0, W - 1) ylo = np.clip(int(np.round(min(y0, y1))) - 1, 0, H - 1) yhi = np.clip(int(np.round(max(y0, y1))) - 1, 0, H - 1) nexp[ylo:yhi + 1, xlo:xhi + 1] += 1 plt.clf() plt.imshow(nexp, interpolation='nearest', origin='lower', vmin=-0.5, vmax=hi + 0.5, cmap=cmap, extent=extent) plt.colorbar(ticks=np.arange(hi + 1)) ps.savefig() O = fits_table('obstatus/decam-tiles_obstatus.fits') O.cut(np.hypot(O.ra - ra, O.dec - dec) < 2.5) for p in [1, 2, 3]: print('Pass', p, 'exposures:', O.r_expnum[O.get('pass') == p]) O.cut(O.get('pass') == 2) print(len(O), 'pass 2 nearby') d = np.hypot(O.ra - ra, O.dec - dec) print('Dists:', d) I = np.flatnonzero(d < 0.5) assert (len(I) == 1) ocenter = O[I[0]] print('Center expnum', ocenter.r_expnum) I = np.flatnonzero(d >= 0.5) O.cut(I) #center = ccds[ccds.expnum == ocenter.r_expnum] #p2 = ccds[ccds. ok, xc, yc = wcs.radec2pixelxy(ocenter.ra, ocenter.dec) xx, yy = np.meshgrid(np.arange(W) + 1, np.arange(H) + 1) c_d2 = (xc - xx)**2 + (yc - yy)**2 best = np.ones((H, W), bool) for o in O: ok, x, y = wcs.radec2pixelxy(o.ra, o.dec) d2 = (x - xx)**2 + (y - yy)**2 best[d2 < c_d2] = False del d2 del c_d2, xx, yy # plt.clf() # plt.imshow(best, interpolation='nearest', origin='lower', cmap='gray', # vmin=0, vmax=1) # ps.savefig() plt.clf() plt.imshow(nexp * best, interpolation='nearest', origin='lower', vmin=-0.5, vmax=hi + 0.5, cmap=cmap, extent=extent) plt.colorbar(ticks=np.arange(hi + 1)) ps.savefig() plt.clf() n, b, p = plt.hist(np.clip(nexp[best], 0, hi), range=(-0.5, hi + 0.5), bins=hi + 1) plt.xlim(-0.5, hi + 0.5) ps.savefig() print('b', b) print('n', n) print('fracs', np.array(n) / np.sum(n)) print('pcts', ', '.join(['%.1f' % f for f in 100. * np.array(n) / np.sum(n)]))
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('--plots', action='store_true') parser.add_argument('--brick', help='Brick name to run') parser.add_argument( '--input-dir', default='/global/projecta/projectdirs/cosmo/work/legacysurvey/dr7') #/global/cscratch1/sd/desiproc/dr7out') parser.add_argument('--survey-dir', default='/global/cscratch1/sd/dstn/dr7-depthcut') parser.add_argument('--output-dir', default='/global/cscratch1/sd/dstn/bright') opt = parser.parse_args() plots = opt.plots ps = PlotSequence('bright') brickname = opt.brick insurvey = LegacySurveyData(opt.input_dir, cache_dir=opt.survey_dir) outsurvey = LegacySurveyData(opt.output_dir, output_dir=opt.output_dir) bfn = insurvey.find_file('blobmap', brick=brickname) print('Found blob map', bfn) blobs = fitsio.read(bfn) h, w = blobs.shape brick = insurvey.get_brick_by_name(brickname) brickwcs = wcs_for_brick(brick) radius = np.sqrt(2.) * 0.25 * 1.01 neighbors = insurvey.get_bricks_near(brick.ra, brick.dec, radius) print(len(neighbors), 'bricks nearby') def showbool(X): d = downsample_max(X, 8) h, w = X.shape plt.imshow(d, interpolation='nearest', origin='lower', vmin=0, vmax=1, extent=[0, w, 0, h], cmap='gray') brightblobs = set() for nb in neighbors: if nb.brickname == brickname: # ignore myself! continue print('Neighbor:', nb.brickname) mfn = insurvey.find_file('maskbits', brick=nb.brickname) if not os.path.exists(mfn): print('No maskbits file:', mfn) continue maskbits = fitsio.read(mfn) bright = ((maskbits & MASKBITS['BRIGHT']) > 0) print(np.sum(bright > 0), 'BRIGHT pixels set') primary = (maskbits & MASKBITS['NPRIMARY'] == 0) print(np.sum(primary), 'PRIMARY pixels set') edge = binary_dilation(primary, structure=np.ones((3, 3), bool)) edge = edge * np.logical_not(primary) brightedge = edge & bright if plots: plt.clf() showbool(bright) plt.title('bright: brick %s' % nb.brickname) ps.savefig() # plt.clf() # showbool(primary) # plt.title('PRIMARY, brick %s' % nb.brickname) # ps.savefig() # # plt.clf() # showbool(edge) # plt.title('boundary, brick %s' % nb.brickname) # ps.savefig() plt.clf() showbool(brightedge) plt.title('bright at edge, brick %s' % nb.brickname) ps.savefig() nwcs = wcs_for_brick(nb) yy, xx = np.nonzero(brightedge) print(len(yy), 'bright edge pixels') if len(yy) == 0: continue rr, dd = nwcs.pixelxy2radec(xx + 1, yy + 1) print('RA range', rr.min(), rr.max(), 'vs brick', brick.ra1, brick.ra2) print('Dec range', dd.min(), dd.max(), 'vs brick', brick.dec1, brick.dec2) # Find pixels that are within this brick's unique area I, = np.nonzero((rr >= brick.ra1) * (rr <= brick.ra2) * (dd >= brick.dec1) * (dd <= brick.dec2)) if plots: plt.clf() plt.plot( [brick.ra1, brick.ra1, brick.ra2, brick.ra2, brick.ra1], [brick.dec1, brick.dec2, brick.dec2, brick.dec1, brick.dec1], 'b-') plt.plot(rr, dd, 'k.') plt.plot(rr[I], dd[I], 'r.') plt.title('Bright pixels from %s' % nb.brickname) ps.savefig() if len(I) == 0: print('No edge pixels touch') #plt.plot(br,bd, 'b-') continue #print('Edge pixels touch!') #plt.plot(br,bd, 'r-', zorder=20) ok, x, y = brickwcs.radec2pixelxy(rr[I], dd[I]) x = np.round(x).astype(int) - 1 y = np.round(y).astype(int) - 1 print('Pixel ranges X', x.min(), x.max(), 'Y', y.min(), y.max()) assert (np.all((x >= 0) * (x < w) * (y >= 0) * (y < h))) print('Adding blobs:', np.unique(blobs[y, x])) brightblobs.update(blobs[y, x]) print('Blobs touching bright pixels:', brightblobs) print() brightblobs.discard(-1) if len(brightblobs) == 0: print('No neighboring bright blobs to update!') return print('Updating', len(brightblobs), 'blobs:', brightblobs) tmap = np.zeros(blobs.max() + 2, bool) for b in brightblobs: tmap[b + 1] = True touching = tmap[blobs + 1] if plots: plt.clf() showbool(touching) plt.title('Blobs touching bright, brick %s' % brickname) ps.savefig() mfn = insurvey.find_file('maskbits', brick=brickname) maskbits, hdr = fitsio.read(mfn, header=True) updated = maskbits | (MASKBITS['BRIGHT'] * touching) if np.all(maskbits == updated): print('No bits updated! (Bright stars were already masked)') return maskbits = updated if plots: plt.clf() showbool((maskbits & MASKBITS['BRIGHT']) > 0) plt.title('New maskbits map for BRIGHT, brick %s' % brickname) ps.savefig() with outsurvey.write_output('maskbits', brick=brickname) as out: out.fits.write(maskbits, hdr=hdr) tfn = insurvey.find_file('tractor', brick=brickname) phdr = fitsio.read_header(tfn, ext=0) hdr = fitsio.read_header(tfn, ext=1) T = fits_table(tfn) print('Read', len(T), 'sources') print('Bright:', Counter(T.brightstarinblob)) iby = np.clip(np.round(T.by), 0, h - 1).astype(int) ibx = np.clip(np.round(T.bx), 0, w - 1).astype(int) if plots: before = np.flatnonzero(T.brightstarinblob) T.brightstarinblob |= touching[iby, ibx] print('Bright:', Counter(T.brightstarinblob)) # yuck -- copy the TUNIT headers from input to output. units = [ hdr.get('TUNIT%i' % (i + 1), '') for i in range(len(T.get_columns())) ] if plots: plt.clf() showbool((maskbits & MASKBITS['BRIGHT']) > 0) ax = plt.axis() after = np.flatnonzero(T.brightstarinblob) plt.plot(T.bx[before], T.by[before], 'gx') plt.plot(T.bx[after], T.by[after], 'r.') plt.axis(ax) plt.title('sources with brightstarinblob, brick %s' % brickname) ps.savefig() with outsurvey.write_output('tractor', brick=brickname) as out: T.writeto(None, fits_object=out.fits, primheader=phdr, header=hdr, units=units)
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('--mzls', action='store_true', help='Set MzLS (default: DECaLS)') parser.add_argument('--ann', help='Set annotated-CCDs file') opt = parser.parse_args() if opt.mzls: from mosaic import MosaicNominalCalibration from camera_mosaic import database_filename, camera_name nom = MosaicNominalCalibration() obstatus_fn = 'obstatus/mosaic-tiles_obstatus.fits' out_fn = 'mosaic-obstatus-depth.fits' bands = 'z' declo, dechi = -5, 90 bad_expid_fn = 'obstatus/bad_expid.txt' else: from decam import DecamNominalCalibration from camera_decam import database_filename, camera_name nom = DecamNominalCalibration() # ln -s ~/observing/obstatus/bad_expid.txt obstatus/decam-bad_expid.txt obstatus_fn = 'obstatus/decam-tiles_obstatus.fits' out_fn = 'decam-obstatus-depth.fits' bad_expid_fn = 'obstatus/decam-bad_expid.txt' bands = 'grz' declo, dechi = -20, 35 f = open(bad_expid_fn) bad_expids = set() for line in f: line = line.strip() if len(line) == 0: continue if line[0] == '#': continue words = line.split() try: expnum = int(words[0]) except: print('Skipping line:', line) continue bad_expids.add(expnum) print('Read', len(bad_expids), 'bad exposure numbers') # Convert copilot db to fits. import obsdb from copilot import db_to_fits obsdb.django_setup(database_filename=database_filename) ccds = obsdb.MeasuredCCD.objects.all() copilot = db_to_fits(ccds) all_copilot = copilot.copy() fn = 'copilot.fits' copilot.writeto(fn) print('Wrote', fn) print(len(copilot), 'measured CCDs in copilot database') copilot.cut(np.array([c.strip() == camera_name for c in copilot.camera])) print(len(copilot), 'copilot CCDs with camera = "%s"' % camera_name) copilot.cut(copilot.expnum > 0) print(len(copilot), 'measured CCDs in copilot database with EXPNUM') print('Copilot expfactor extremes:', np.percentile(copilot.expfactor[copilot.expfactor != 0], [1, 99])) survey = LegacySurveyData() if opt.ann: ccds = fits_table(opt.ann) else: print('Reading annotated CCDs files...') ccds = survey.get_annotated_ccds() print(len(ccds), 'CCDs') # Fix parsing of OBJECT field to tileid... from obsbot import get_tile_id_from_name tileids = [] for o in ccds.object: tid = get_tile_id_from_name(o.strip()) if tid is None: tid = 0 tileids.append(tid) tileids = np.array(tileids) print(len(np.unique(tileids)), 'unique tile ids in annotated file, from OBJECT') print(len(np.unique(ccds.tileid)), 'unique tile ids in ann file from TILEID') D = np.flatnonzero(tileids != ccds.tileid) print(len(D), 'different tileids') print('From OBJECT:', tileids[D]) print('From TILEID:', ccds.tileid[D]) ccds.tileid = tileids O = fits_table(obstatus_fn) print(len(O), 'tiles') if opt.mzls: from camera_mosaic import fix_expnums # Fix MzLS exposure numbers with wrong leading "3". fix_expnums(ccds.expnum) # Also fix leading "3" in expnums in OBSTATUS file fix_expnums(O.z_expnum) # And copilot database fix_expnums(copilot.expnum) print('Z_EXPNUM range:', O.z_expnum.min(), 'min >0:', O.z_expnum[O.z_expnum > 0].min(), O.z_expnum.max()) print('Pass numbers:', np.unique(O.get('pass'))) if opt.mzls: goodtiles = (O.in_desi * (O.dec > 30) * (O.get('pass') <= 3)) print(sum(goodtiles), 'tiles of interest') else: goodtiles = (O.in_desi * (O.get('pass') <= 3)) print(sum(goodtiles), 'tiles in the footprint') #O.cut(goodtiles) #print('Cut to', len(O), 'tiles of interest') # *after* fixing tileids allccds = ccds.copy() # Map tile IDs back to index in the obstatus file. tileid_to_index = np.empty(max(O.tileid) + 1, int) tileid_to_index[:] = -1 tileid_to_index[O.tileid] = np.arange(len(O)) assert (len(np.unique(O.tileid)) == len(O)) I = tileid_to_index[O.tileid] assert (np.all(I == np.arange(len(O)))) # Look at whether exposures from other programs are near our tile centers. # Basically nope. # plt.clf() # e,K = np.unique(ccds.expnum, return_index=True) # I,J,d = match_radec(O.ra, O.dec, ccds.ra_bore[K], ccds.dec_bore[K], # 1./60., nearest=True) # KK = K[np.flatnonzero(ccds.tileid[K] > 0)] # I,J,d2 = match_radec(O.ra, O.dec, ccds.ra_bore[KK], ccds.dec_bore[KK], # 1./60., nearest=True) # ha = dict(range=(0., 60.), bins=60, histtype='step') # plt.hist(d * 3600., color='b', **ha) # plt.hist(d2 * 3600., color='r', **ha) # plt.xlabel('Distance from tile to nearest DECam boresight (arcsec)') # plt.savefig('dists.png') notileids = ccds[ccds.tileid <= 0] print(len(notileids), 'CCDs have no tileid') I, J, d = match_radec(notileids.ra_bore, notileids.dec_bore, O.ra, O.dec, 0.5, nearest=True) plt.clf() plt.hist(d, bins=50) plt.xlabel('Distance to nearest tile center (deg)') plt.savefig('tiledist.png') plt.clf() plt.hist(d * 3600, bins=50, range=(0, 30)) plt.xlabel('Distance to nearest tile center (arcsec)') plt.savefig('tiledist2.png') ccds.cut(ccds.tileid > 0) print(len(ccds), 'CCDs with tileid') expnums, I = np.unique(ccds.expnum, return_index=True) print(len(expnums), 'unique exposures (with tileids)') ccds.photometric = (ccds.ccd_cuts == 0) # Compute the mean depth per exposure E = ccds[I] for expnum in expnums: I = np.flatnonzero(ccds.expnum == expnum) j = np.flatnonzero(E.expnum == expnum) assert (len(j) == 1) j = j[0] E.photometric[j] = np.all(ccds.photometric[I]) #E.photometric[j] = np.all(ccds.ccd_cuts[I] == 0) if len(np.unique(ccds.photometric[I])) == 2: print('Exposure', expnum, 'has photometric and non-photometric CCDs') non = I[ccds.photometric[I] == False] phot = I[ccds.photometric[I]] if opt.mzls and len(phot) == 3: print('Accepting an exposure with 3 good CCDs') E.photometric[j] = True # And remove this exposure from the bad_expid list. if expnum in bad_expids: bad_expids.remove(expnum) print('Removing exposure', expnum, 'from bad_expid file') continue for ii in non: print( ' http://legacysurvey.org/viewer-dev/?ra=%.3f&dec=%.3f&zoom=11&ccds3&bad=%i-%s' % (ccds.ra_center[ii], ccds.dec_center[ii], expnum, ccds.ccdname[ii])) print( ' http://legacysurvey.org/viewer-dev/ccd/decals-dr5/decam-%s-%s-%s/' % (ccds.expnum[ii], ccds.ccdname[ii], ccds.filter[ii])) print(' image:', ccds.image_filename[I][0]) print(' boresight:', ccds.ra_bore[I][0], ccds.dec_bore[I][0]) #print(' ccdnames:', ccds.ccdname[I]) print(' photometric:', len(phot), ', non-photometric:', len(non)) print(' median phot depth:', np.median(ccds.galdepth[phot])) #print(' depth:', ccds.galdepth[I]) print(' non-photometric CCDs:', ccds.ccdname[non]) print(' depths:', ccds.galdepth[non]) print(' ccdnmatch', ccds.ccdnmatch[non], 'vs', ccds.ccdnmatch[phot]) print(' ccdtransp:', ccds.ccdtransp[non], 'vs', ccds.ccdtransp[phot]) print(' ccd zpt vs frame zpt:', ccds.ccdzpt[non] - ccds.zpt[non]) dp = ccds.ccdzpt[phot] - ccds.zpt[phot] print(' phot ccds zpt vs frame: range', dp.min(), dp.max(), 'mean', dp.mean()) whitelist = [ 346662, 346664, 346665, # S3/S29 striping 346754, # one bad chip, wispy 346967, 347304, # M5 globular 347664, # zpt scatter 347744, # weird eye-shaped ghost; but lots of cov. 347755, 347768, 347769, 347782, # shallow, zpt scatter -- wispy pattern on focal plane 347918, 347920, # straddling transparency cut 347934, 347936, 347941, 347945, 347947, # zpt scatter 392377, 392380, 393173, # bright star 393671, # bright star 393672, 393673, # scatter 425339, 425340, # strangely low ccdnmatch 426225, 430808, # globular cluster 431640, # bright star 431644, # globular 432154, # one amp high bias 432159, # transp. on boundary 432179, # one amp high bias, + 432747, 432748, 432751, # scatter 433305, 433306, # bright star 497062, 497064, 497065, # low ccdnmatch 509516, 509517, # bright star 511247, 511263, # low ccdnmatch 511513, 511514, # bright star 512303, # bright star 520560, # bright star 521782, # scatter 522212, # bright star 535138, # bright stars, satellite hits? 535141, 535142, 535143, 535149, # low ccdnmatch 535210, # bright star 535695, # globular 536065, # globular 536385, # strangely zero ccdnmatch 547761, # nice galaxy 548257, # bright star 553779, # scatter 553795, # shallow 554284, # marginal zpt 563659, # zpt scatter 563850, # mild striping 563852, # ?? 583118, # bright stars? 592621, # marginal zpt, scatter 592859, # some pattern noise 605068, # ?? 625710, # strangely low ccdnmatch 631005, # bright star 634493, # globular 634786, # strangely low ccdnmatch 634877, # globular 635535, # bright star, glob 635962, # low ccdnmatch 635973, # bias level? 636018, # bias level? 637688, # bright star ] blacklist = [ 425328, # 2.7" seeing 488244, 488256, 488260, 488261, 488263, 488268, # weird striping 488270, # weird striping 496913, 496917, 496918, 496919, 496920, 496921, 496922, # 3" seeing 496923, 496925, 496926, 496927, 496928, 496930, # 3" seeing 509162, 509163, 509166, 509172, 509176, 509182, 509202, # 3" seeing 535471, # 4" seeing! 535498, # 3" seeing 548218, # double PSF -- telescope moved? 563835, # striping 563842, # striping ] if expnum in whitelist: print('** Exposure', expnum, 'in whitelist -- marking as photometric') E.photometric[j] = True # Don't include zeros in computing average depths! Igood = I[(ccds.galdepth[I] > 0) * (ccds.ccdzpt[I] < 30)] if len(Igood) > 0: E.galdepth[j] = np.mean(ccds.galdepth[Igood]) else: E.galdepth[j] = 0. del expnums keep = np.array([not (expnum in bad_expids) for expnum in ccds.expnum]) ccds.cut(keep) print(len(ccds), 'CCDs NOT in the bad_expids file') keep = np.array([not (expnum in bad_expids) for expnum in copilot.expnum]) copilot.cut(keep) print(len(copilot), 'copilot exposures NOT in the bad_expids file') keep = np.array([not (expnum in bad_expids) for expnum in E.expnum]) E.cut(keep) print(len(E), 'CCD Exposures NOT in the bad_expids file') # plt.clf() # plt.plot(O.ra, O.dec, 'k.') # plt.axis([360,0,-25,35]) # plt.title('All tiles') # plt.savefig('tiles-all.png') # # print('in_desi:', np.unique(O.in_desi)) # plt.clf() # J = np.flatnonzero(O.in_desi == 1) # plt.plot(O.ra[J], O.dec[J], 'k.') # plt.axis([360,0,-25,35]) # plt.title('In DESI') # plt.savefig('tiles-desi.png') # # print('in_des:', np.unique(O.in_des)) # plt.clf() # J = np.flatnonzero(O.in_des == 1) # plt.plot(O.ra[J], O.dec[J], 'k.') # plt.axis([360,0,-25,35]) # plt.title('IN DES') # plt.savefig('tiles-des.png') #print('Number of exposures of each tile:') #print(Counter(E.tileid).most_common()) print() print() print('Number of exposures of tiles:') for band in bands: I = np.flatnonzero(E.filter == band) c = Counter(E.tileid[I]) c2 = Counter([v for k, v in c.most_common()]) print(' ', band, 'band:', c2.most_common()) # Detection inverse-variance is the quantity that adds when there are # multiple exposures. # detsig1 = ccds.sig1 / ccds.galnorm_mean # depth = 5. * detsig1 # # that's flux in nanomaggies -- convert to mag # ccds.galdepth = -2.5 * (np.log10(depth) - 9) with np.errstate(divide='ignore', over='ignore'): # actually 5*detsig1... detsig = 10.**((E.galdepth - 22.5) / -2.5) E.detiv = 1. / detsig**2 E.detiv[E.galdepth == 0] = 0. print('Smallest detivs:', E.detiv[np.argsort(E.detiv)[:10]]) print('w/ galdepths:', E.galdepth[np.argsort(E.detiv)[:10]]) print('Smallest positive detivs:', E.detiv[np.argsort(E.detiv + 1e12 * (E.detiv == 0))[:10]]) print('w/ galdepths:', E.galdepth[np.argsort(E.detiv + 1e12 * (E.detiv == 0))[:10]]) for band in bands: print() print('------------------') print(band, 'band.') # "I" indexes into exposures E. I = np.flatnonzero( (E.filter == band) * E.photometric * np.isfinite(E.detiv)) print(len(I), 'photometric exposures in', band) # "iv" is parallel to O; will be converted to "galdepth". iv = np.zeros(len(O), np.float32) # "J" indexes into obstatus tiles O. J = tileid_to_index[E.tileid[I]] assert (np.all((J >= 0) * (J < len(O)))) assert (np.all(O.tileid[J] == E.tileid[I])) #print('tileid range', E.tileid[I].min(), E.tileid[I].max()) # d = np.array([degrees_between(*a) for a in # zip(E.ra_bore[I], E.dec_bore[I], O.ra[J], O.dec[J])]) # print('Degrees between tiles & exposures:', d) np.add.at(iv, J, E.detiv[I]) print('galdepth range:', E.galdepth[I].min(), E.galdepth[I].max()) print('detiv range:', E.detiv[I].min(), E.detiv[I].max()) #print('index range:', J.min(), J.max()) nexp = np.zeros(len(O), int) np.add.at(nexp, J, 1) print('tile exposure counts:', Counter(nexp)) # convert iv back to galdepth in mags with np.errstate(divide='ignore'): galdepth = -2.5 * (np.log10(np.sqrt(1. / iv)) - 9) galdepth[iv == 0] = 0. # Shallowest before extinction correction #I = np.argsort(iv + 1e6*(iv == 0)) I = np.argsort(galdepth + 50. * (galdepth == 0)) print( 'Shallowest depth estimates from annotated CCDs file, before extinction:' ) for i in I[:10]: print(' ', galdepth[i], 'iv', iv[i], 'tile', O.tileid[i], 'expnum', O.get('%s_expnum' % band)[i]) e = O.get('%s_expnum' % band)[i] j = np.flatnonzero(E.expnum == e) print(' galdepth', E.galdepth[j]) fid = nom.fiducial_exptime(band) extinction = O.ebv_med * fid.A_co #print('Extinction range:', extinction.min(), extinction.max()) galdepth -= extinction galdepth[iv == 0] = 0. # Shallowest galdepth > 0 I = np.argsort(galdepth + 50. * (galdepth == 0)) print('Shallowest depth estimates from annotated CCDs file:') for i in I[:10]: print(' ', galdepth[i], 'tile', O.tileid[i], 'expnum', O.get('%s_expnum' % band)[i]) #print('galdepth deciles:', np.percentile(galdepth, [0,10,20,30,40,50,60,70,80,90,100])) # Z_DONE, Z_EXPNUM but no Z_DEPTH missing_depth = np.flatnonzero( (O.get('%s_expnum' % band) > 0) * (O.get('%s_done' % band) == 1) * (galdepth == 0)) print('Found', len(missing_depth), 'tiles with', band, 'DONE and EXPNUM but no DEPTH; setting to DEPTH=30') print(' eg, EXPNUMs', O.get('%s_expnum' % band)[missing_depth[:10]], 'DATE', O.get('%s_date' % band)[missing_depth[:10]]) # Don't actually update 'galdepth[missing_depth]' until after this next check... # Flag tiles that have *only* non-photometric exposures with depth = 1. I = np.flatnonzero((E.filter == band) * np.logical_not(E.photometric)) print(len(I), 'exposures are non-photometric in', band, 'band') J = tileid_to_index[E.tileid[I]] only_nonphot = J[galdepth[J] == 0.] print(len(only_nonphot), 'tiles have only non-photometric exposures') print('Marking', len(only_nonphot), 'non-photometric tiles in', band, 'with depth=1') orig_galdepth = galdepth.copy() galdepth[missing_depth] = 30. galdepth[only_nonphot] = 1. J = tileid_to_index[E.tileid[I]] nonphot = (galdepth[J] == 1.) print('Non-photometric galdepths:', E.galdepth[I]) print('Non-photometric galdepths:', E.galdepth[I[nonphot]]) plt.clf() phot = np.flatnonzero((E.filter == band) * E.photometric) plt.hist(E.galdepth[phot], range=(18, 26), bins=50, histtype='step', color='b', label='Photometric') plt.hist(E.galdepth[I[nonphot]], range=(18, 26), bins=50, histtype='step', color='r', label='Non-phot') plt.legend() plt.savefig('nonphot-%s.png' % band) # expnum_to_copilot = np.empty(expnums.max()+1, int) # expnum_to_copilot[:] = -1 # expnum_to_copilot[copilot.expnum] = np.arange(len(copilot)) expnum_to_copilot = dict([(e, i) for i, e in enumerate(copilot.expnum)]) if False: # Let's check the accuracy of the copilot's depth estimates... target_exptime = copilot.expfactor * fid.exptime # What fraction of the target exposure time did we take? depth_factor = copilot.exptime / target_exptime nomdepth = fid.single_exposure_depth depth = nomdepth + 2.5 * np.log10(np.sqrt(depth_factor)) #print('Copilot predicted depths:', depth) IC = np.array( [expnum_to_copilot.get(e, -1) for e in allccds.expnum]) K = np.flatnonzero(IC >= 0) ext = np.array([ e['ugrizY'.index(f)] for e, f in zip(allccds.decam_extinction, allccds.filter) ]) dd = allccds.galdepth - ext print('Making scatterplot...', len(K), 'points') plt.clf() #plt.plot(dd[K], depth[IC[K]], 'b.', alpha=0.2, mec='none') plt.scatter(dd[K], depth[IC[K]], c=np.clip(copilot.expfactor[IC[K]], 0, 2), s=10, alpha=0.2, edgecolors='none') plt.colorbar() plt.xlabel('Pipeline depth') plt.ylabel('Copilot depth proxy') plt.plot([20, 25], [20, 25], 'k-', alpha=0.25) plt.plot([20, 25], [20 + 0.1, 25 + 0.1], 'k--', alpha=0.25) plt.plot([20, 25], [20 - 0.1, 25 - 0.1], 'k--', alpha=0.25) plt.axis([20.5, 23, 21, 23.5]) plt.title( 'Copilot vs Pipeline depth estimates. (color = exp.factor)') plt.savefig('depth-copilot-%s.png' % band) print('Made scatterplot') plt.clf() ha = dict(bins=60, range=(0, 30), log=True, histtype='step') plt.hist(O.get('%s_depth' % band), color='k', label='Before', **ha) plt.hist(orig_galdepth, color='b', label='Annotated CCDs', **ha) # Do we have measurements for any of these missing tiles in the copilot db? for code in [30, 0]: Igal = np.flatnonzero( (O.get('%s_expnum' % band) > 0) * (O.get('%s_done' % band) == 1) * (galdepth == code)) expnums = O.get('%s_expnum' % band)[Igal] print( len(expnums), 'still marked DONE, with EXPNUM, but missing DEPTH, with code =', code) Ihuh = np.flatnonzero( (O.get('%s_done' % band) == 1) * (galdepth == code)) print(len(Ihuh), 'tiles marked DONE, without EXPNUM, and DEPTH =', code) if len(Ihuh): print('Tile ids:', O.tileid[Ihuh]) for t in O.tileid[Ihuh]: I = np.flatnonzero(E.tileid == t) print(' tile', t, ': exposure numbers:', E.expnum[I]) print(' with depths', E.galdepth[I]) i = tileid_to_index[t] if i >= 0: print(' depth', galdepth[i]) else: print(' no depth') # Within an arcmin? I, J, d = match_radec(O.ra[Ihuh], O.dec[Ihuh], all_copilot.rabore, all_copilot.decbore, 1. / 60., nearest=True) print('For', len(Ihuh), 'weird tiles,') print(len(I), 'matches within an arcmin in the copilot db') print('Smallest distances:', d[np.argsort(d)[:10]]) I, J, d = match_radec(O.ra[Ihuh], O.dec[Ihuh], allccds.ra_bore, allccds.dec_bore, 1. / 60., nearest=True) print(len(I), 'matches within an arcmin in the CCDs table') print('Smallest distances:', d[np.argsort(d)[:10]]) O[Ihuh].writeto('weird-%s-%i.fits' % (band, code)) if len(expnums) == 0: continue IC = np.array([expnum_to_copilot.get(e, -1) for e in expnums]) K = np.flatnonzero(IC >= 0) expnums = expnums[K] # these are the indices into O / galdepth Igal = Igal[K] co = copilot[IC[K]] print(len(expnums), 'matched to copilot database') target_exptime = co.expfactor * fid.exptime # What fraction of the target exposure time did we take? depth_factor = co.exptime / target_exptime nomdepth = fid.single_exposure_depth print('Nominal single-exposure depth:', nomdepth) co.depth = nomdepth + 2.5 * np.log10(np.sqrt(depth_factor)) print('Copilot predicted depths:', co.depth) J = np.flatnonzero(np.isfinite(co.depth)) co = co[J] # indices into O Igal = Igal[J] print(len(Igal), 'good copilot depth estimates') pcts = [0, 1, 5, 25, 50, 75, 95, 99, 100] print('Copilot depth percentiles:', np.percentile(co.depth, pcts)) print('Shallowest exposures:') I = np.argsort(co.depth) for i in I[:10]: print(' Expnum', co.expnum[i], 'depth', co.depth[i], 'exptime', co.exptime[i]) co[I].writeto('depths.fits') from astrometry.util.starutil_numpy import mjdtodate print('Copilot-matched entries:') I = np.argsort(co.expnum) for i in I: print(' EXPNUM', co.expnum[i], 'date', mjdtodate(co.mjd_obs[i]), ' copilot name', co.filename[i]) # e = co.expnum[i] # I = np.flatnonzero(allccds.expnum == e-1) # fn1 = None # fn2 = None # if len(I): # print(' CCDs file contains', len(I), 'entries for expnum', e-1) # print(' filename', allccds.image_filename[I[0]]) # fn1 = allccds.image_filename[I[0]] # else: # print(' No CCDs file entries for expnum', e-1) # I = np.flatnonzero(allccds.expnum == e+1) # if len(I): # print(' CCDs file contains', len(I), 'entries for expnum', e+1) # print(' filename', allccds.image_filename[I[0]]) # fn2 = allccds.image_filename[I[0]] # else: # print(' No CCDs file entries for expnum', e+1) # # if fn1 is not None and fn2 is not None: # full1 = os.path.join(survey.get_image_dir(), fn1) # #print('Full path 1:', full1) # if os.path.exists(full1): # print('exists') # full2 = os.path.join(survey.get_image_dir(), fn2) # #print('Full path 2:', full2) # if os.path.exists(full2): # print('exists') # if os.path.exists(full1) and os.path.exists(full2): # dir1 = os.path.dirname(full1) # dir2 = os.path.dirname(full2) # if dir1 == dir2: # #print('dir:', dir1) # fns = os.listdir(dir1) # fns.sort() # fns = [fn for fn in fns if ('oki' in fn or 'ooi' in fn)] # base1 = os.path.basename(full1) # base2 = os.path.basename(full2) # i1 = fns.index(base1) # i2 = fns.index(base2) # print('Files found at list elements', i1, i2) # #print(fns[i1:i2+1]) # for fn in fns[i1:i2+1]: # print('EXPNUM', e, 'range', os.path.join(os.path.dirname(fn1), fn)) # if i1 + 4 == i2: # print('EXPNUM', e, 'expected', os.path.join(os.path.dirname(fn1), fns[i1+2])) # if i1 + 2 == i2: # print('EXPNUM', e, 'expected', os.path.join(os.path.dirname(fn1), fns[i1+1])) #print('Before:', galdepth[Igal]) galdepth[Igal] = co.depth #print('After:', galdepth[Igal]) Igal = np.flatnonzero( (O.get('%s_expnum' % band) > 0) * (O.get('%s_done' % band) == 1) * (galdepth == code)) expnums = O.get('%s_expnum' % band)[Igal] print( len(expnums), 'still marked DONE, with EXPNUM, but missing DEPTH with code =', code, 'after copilot patching') print('Exposure numbers:', expnums) print('Exposure dates:', O.get('%s_date' % band)[Igal]) print('Date counter:', Counter(O.get('%s_date' % band)[Igal]).most_common()) O.set('%s_depth' % band, galdepth) plt.hist(O.get('%s_depth' % band), color='r', label='After', **ha) plt.savefig('depth-hist-%s.png' % band) #print('Depth deciles: [', ', '.join(['%.3f' % f for f in np.percentile(O.get('%s_depth' % band), [0,10,20,30,40,50,60,70,80,90,100])]) + ']') rlo, rhi = 0, 360 dlo, dhi = declo, dechi J = np.flatnonzero( (O.in_desi == 1) * (O.in_des == 0) * (O.dec > dlo) * (O.dec < dhi)) print('Median E(B-V) in DECaLS area:', np.median(O.ebv_med[J])) print('Median extinction in DECaLS area, %s band:' % band, np.median(extinction[J])) I2 = np.flatnonzero((O.get('%s_expnum' % band) > 0) * (O.get('%s_depth' % band) == 30) * (O.get('%s_done' % band) == 1) * (O.in_desi == 1)) print(len(I2), 'with EXPNUM and DONE and IN_DESI, but no DEPTH') # Sort by expnum I2 = I2[np.argsort(O.get('%s_expnum' % band)[I2])] print('Exposure numbers:', sorted(O.get('%s_expnum' % band)[I2])) print('Dates:', sorted(O.get('%s_date' % band)[I2])) print('Dates:', np.unique(O.get('%s_date' % band)[I2])) for i in I2: print(' date', O.get('%s_date' % band)[i], 'expnum', O.get('%s_expnum' % band)[i]) # for i2,o in zip(I2, O[I2]): # print() # e = o.get('%s_expnum' % band) # print(' Expnum', e, 'orig galdepth', orig_galdepth[i2]) # date = o.get('%s_date' % band) # print(' Date', date) # print(' Pass', o.get('pass'), 'Tile', o.tileid) # jj = np.flatnonzero(allccds.expnum == e) # print(' In DESI:', o.in_desi, 'In DES:', o.in_des) # print(' ', len(jj), 'matching CCDs') # if len(jj) == 0: # continue # print(' CCDs OBJECT', [ob.strip() for ob in allccds.object[jj]]) # print(' CCDs Tileid', allccds.tileid[jj]) # print(' CCDs galdepth', allccds.galdepth[jj]) # print(' CCDs photometric', allccds.photometric[jj]) # # ii = np.flatnonzero(E.expnum == e) # print(' ', len(ii), 'Exposures matching') # if len(ii): # ee = E[ii[0]] # print(' exposure tileid', ee.tileid) # print(' index', tileid_to_index[ee.tileid]) # print(' vs i2=', i2) # print(' only_nonphot', only_nonphot[i2], 'missing_depth', missing_depth[i2]) # # kk = np.flatnonzero(allccds.tileid == o.tileid) # kk = np.array(sorted(set(kk) - set(jj))) # print(' ', len(kk), 'other CCDs of this tile') # #print('Dates:', O.get('%s_date' % band)[I]) from astrometry.util.plotutils import antigray rr, dd = np.meshgrid(np.linspace(rlo, rhi, 720), np.linspace(dlo, dhi, 360)) JJ, II, d = match_radec(rr.ravel(), dd.ravel(), O.ra, O.dec, 1.5, nearest=True) indesi = np.zeros(rr.shape, bool) indesi.flat[JJ] = ((O.in_desi[II] == 1) * (O.in_des[II] == 0)) plt.figure(figsize=(14, 6)) plt.subplots_adjust(left=0.1, right=0.99) for passnum in [1, 2, 3]: print('Pass', passnum) plt.clf() J = np.flatnonzero( (O.in_desi == 1) * (O.in_des == 0) * (O.dec > dlo) * (O.dec < dhi) * (O.get('pass') == passnum)) #plt.plot(O.ra[J], O.dec[J], 'k.', alpha=0.5) # Plot the gray background showing the in_desi footprint plt.imshow(indesi, extent=[rlo, rhi, dlo, dhi], vmin=0, vmax=4, cmap=antigray, aspect='auto', interpolation='nearest', origin='lower') depth = O.get('%s_depth' % band) #J = np.flatnonzero((O.get('pass') == passnum) * (depth > 0)) J = np.flatnonzero( (O.get('pass') == passnum) * (depth > 1) * (depth < 30)) # print('Depths:', depth[J]) pct = np.percentile(depth[J], [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]) #print('Depth deciles:', np.percentile(depth[J], [0,10,20,30,40,50,60,70,80,90,100])) print('Depth deciles: [', ', '.join(['%.3f' % f for f in pct]) + ']') if len(J) == 0: sys.exit(0) target = fid.single_exposure_depth print('Target depth:', target) cmap = cmap_discretize('RdBu', 11) dm = 0.275 plt.scatter(O.ra[J], O.dec[J], c=depth[J] - target, linewidths=0, cmap=cmap, vmin=-dm, vmax=+dm, zorder=-10, s=1) plt.colorbar(ticks=np.arange(-0.25, 0.251, 0.05)) hh, ww = rr.shape rgba = np.zeros((hh, ww, 4), np.float32) JJ, II, d = match_radec(rr.ravel(), dd.ravel(), O.ra[J], O.dec[J], 1., nearest=True) Jy, Jx = np.unravel_index(JJ, rr.shape) rgba[Jy, Jx, :] = cmap((np.clip(depth[J[II]] - target, -dm, dm) - (-dm)) / (dm - (-dm))) plt.imshow(rgba, extent=[rlo, rhi, dlo, dhi], aspect='auto', interpolation='nearest', origin='lower') I = np.flatnonzero((depth == 0) * (O.get('%s_done' % band) == 1) * (O.get('pass') == passnum)) plt.plot(O.ra[I], O.dec[I], 'g.') plt.title('Band %s, Pass %i' % (band, passnum)) plt.xlabel('RA (deg)') plt.ylabel('Dec (deg)') plt.axis([rhi, rlo, dlo, dhi]) plt.savefig('depth-%s-%i.png' % (band, passnum)) plt.clf() print('Fiducial single-exposure-depth:', fid.single_exposure_depth) for passnum in [1, 2, 3]: depth = O.get('%s_depth' % band) J = np.flatnonzero( (O.get('pass') == passnum) * (depth > 1) * (depth < 30)) depth = depth[J] print('Pass', passnum) print(sum(depth < fid.single_exposure_depth - 0.25), 'of', len(depth), 'tiles are more than 0.25 mag shallow') odepth = O.get('%s_depth' % band) K = np.flatnonzero( (O.get('%s_done' % band) == 0) * (O.get('pass') == passnum) * (odepth > 1) * (odepth < 30)) print(sum(odepth[K] < fid.single_exposure_depth - 0.25), 'of', len(odepth[K]), 'DONE=0 tiles are more than 0.25 mag shallow') for k in K: print(' EXPNUM', O.get('%s_expnum' % band)[k], 'DATE', O.get('%s_date' % band)[k], 'DEPTH', O.get('%s_depth' % band)[k]) K = np.flatnonzero( (O.get('%s_done' % band) == 1) * (O.get('pass') == passnum) * (odepth > 1) * (odepth < 30)) print(sum(odepth[K] < fid.single_exposure_depth - 0.25), 'of', len(odepth[K]), 'DONE=1 tiles are more than 0.25 mag shallow') K = np.flatnonzero((O.get('%s_done' % band) == 1) * (O.get('pass') == passnum) * (odepth == 1)) print(len(K), 'DONE=1 tiles have DEPTH=1 (non-photometric)') K = np.flatnonzero((O.get('%s_done' % band) == 1) * (O.get('pass') == passnum) * (odepth == 30)) print(len(K), 'DONE=1 tiles have DEPTH=30 (unknown depth)') K = np.flatnonzero((O.get('%s_done' % band) == 1) * (O.get('pass') == passnum) * (odepth == 0)) print(len(K), 'DONE=1 tiles have DEPTH=0') K = np.flatnonzero((O.get('%s_done' % band) == 0) * (O.get('pass') == passnum) * (odepth != 0)) print(len(K), 'tiles have DONE=0 but DEPTH != 0') mlo, mhi = 21, 24 plt.hist(np.clip(depth, mlo, mhi), bins=100, range=(mlo, mhi), histtype='step', color=' bgr'[passnum], label='Pass %i' % passnum) plt.axvline(fid.single_exposure_depth, color='k') plt.axvline(fid.single_exposure_depth - 0.25, color='k', linestyle='--') plt.xlabel('Depth (mag)') plt.legend(loc='upper left') plt.title('Depth: %s' % band) plt.savefig('depth-%s.png' % band) for passnum in [1, 2, 3]: depth = O.get('%s_depth' % band) roi = ((O.in_desi == 1) * (O.in_des == 0) * (O.dec > dlo) * (O.dec < dhi) * (O.get('pass') == passnum)) J = np.flatnonzero(roi) done = np.flatnonzero(roi * (O.get('%s_done' % band) == 1)) redo = np.flatnonzero(roi * np.logical_or( (depth > 1) * (depth < 30) * (depth < fid.single_exposure_depth - 0.25), depth == 1)) print( 'Band %s, pass %i: total tiles %i, done %i, redo %i, keep %i' % (band, passnum, len(J), len(done), len(redo), len(done) - len(redo))) A = np.flatnonzero(roi * (depth > 1) * (depth < 30) * (depth > fid.single_exposure_depth - 0.25)) B = np.flatnonzero(roi * (depth > 1) * (depth < 30) * (depth <= fid.single_exposure_depth - 0.25)) C = np.flatnonzero(roi * (depth == 1)) D = np.flatnonzero(roi * (depth == 30)) print( 'Band %s, pass %i: total tiles: %i, A: %i, B: %i, C: %i, D: %i' % (band, passnum, len(J), len(A), len(B), len(C), len(D))) plt.clf() plt.plot(O.ra[J], O.dec[J], 'ko', alpha=0.1) plt.plot(O.ra[done], O.dec[done], 'k.') plt.plot(O.ra[redo], O.dec[redo], 'r.') plt.axis([360, 0, -20, 38]) plt.title('Tiles to redo: %s band, pass %i: %i of %i' % (band, passnum, len(redo), len(done))) plt.savefig('redo-%s-%i.png' % (band, passnum)) if band == 'z': redo = np.flatnonzero( (O.get('pass') == passnum) * np.logical_or( (depth > 1) * (depth < 30) * (depth < fid.single_exposure_depth - 0.5), depth == 1)) A = np.flatnonzero(roi * (depth > 1) * (depth < 30) * (depth > fid.single_exposure_depth - 0.5)) B = np.flatnonzero(roi * (depth > 1) * (depth < 30) * (depth <= fid.single_exposure_depth - 0.5)) print( 'Band %s, pass %i: total tiles: %i, A: %i, B: %i, C: %i, D: %i (shallow = 0.5 mag less than target)' % (band, passnum, len(J), len(A), len(B), len(C), len(D))) plt.clf() plt.plot(O.ra[J], O.dec[J], 'ko', alpha=0.1) plt.plot(O.ra[done], O.dec[done], 'k.') plt.plot(O.ra[redo], O.dec[redo], 'r.') plt.axis([360, 0, -20, 38]) plt.title( 'Tiles to redo (> 0.5 mag shallow): %s band, pass %i: %i of %i' % (band, passnum, len(redo), len(done))) plt.savefig('redo2-%s-%i.png' % (band, passnum)) print('Passes 1-3 combined:') depth = O.get('%s_depth' % band) J = np.flatnonzero((depth > 1) * (depth < 30)) depth = depth[J] print(sum(depth < fid.single_exposure_depth - 0.25), 'of', len(depth), 'tiles are more than 0.25 mag shallow') odepth = O.get('%s_depth' % band) K = np.flatnonzero( (O.get('%s_done' % band) == 0) * (odepth > 1) * (odepth < 30)) print(sum(odepth[K] < fid.single_exposure_depth - 0.25), 'of', len(odepth[K]), 'DONE=0 tiles are more than 0.25 mag shallow') for k in K: print(' EXPNUM', O.get('%s_expnum' % band)[k], 'DATE', O.get('%s_date' % band)[k], 'DEPTH', O.get('%s_depth' % band)[k]) K = np.flatnonzero( (O.get('%s_done' % band) == 1) * (odepth > 1) * (odepth < 30)) print(sum(odepth[K] < fid.single_exposure_depth - 0.25), 'of', len(odepth[K]), 'DONE=1 tiles are more than 0.25 mag shallow') K = np.flatnonzero((O.get('%s_done' % band) == 1) * (odepth > 1) * (odepth < 30) * goodtiles) print(sum(odepth[K] < fid.single_exposure_depth - 0.25), 'of', len(odepth[K]), 'interesting DONE=1 tiles are more than 0.25 mag shallow') K = np.flatnonzero((O.get('%s_done' % band) == 1) * (odepth == 1)) print(len(K), 'DONE=1 tiles have DEPTH=1 (non-photometric)') K = np.flatnonzero( (O.get('%s_done' % band) == 1) * (odepth == 1) * goodtiles) print(len(K), 'interesting DONE=1 tiles have DEPTH=1 (non-photometric)') K = np.flatnonzero((O.get('%s_done' % band) == 1) * (odepth == 30)) print(len(K), 'DONE=1 tiles have DEPTH=30 (unknown depth)') K = np.flatnonzero( (O.get('%s_done' % band) == 1) * (odepth == 30) * goodtiles) print(len(K), 'interesting DONE=1 tiles have DEPTH=30 (unknown depth)') K = np.flatnonzero((O.get('%s_done' % band) == 1) * (odepth == 0)) print(len(K), 'DONE=1 tiles have DEPTH=0') K = np.flatnonzero( (O.get('%s_done' % band) == 1) * (odepth == 0) * goodtiles) print(len(K), 'interesting DONE=1 tiles have DEPTH=0') K = np.flatnonzero((O.get('%s_done' % band) == 0) * (odepth != 0)) print(len(K), 'tiles have DONE=0 but DEPTH != 0') O.writeto(out_fn)
def main(survey=None, opt=None, args=None): '''Driver function for forced photometry of individual Legacy Survey images. ''' if args is None: args = sys.argv[1:] print('forced_photom.py', ' '.join(args)) if opt is None: parser = get_parser() opt = parser.parse_args(args) import logging if opt.verbose == 0: lvl = logging.INFO else: lvl = logging.DEBUG logging.basicConfig(level=lvl, format='%(message)s', stream=sys.stdout) # tractor logging is *soooo* chatty logging.getLogger('tractor.engine').setLevel(lvl + 10) t0 = Time() if survey is None: survey = LegacySurveyData(survey_dir=opt.survey_dir, cache_dir=opt.cache_dir, output_dir=opt.out_dir) if opt.skip: if opt.out is not None: outfn = opt.out else: outfn = survey.find_file('forced', output=True, camera=opt.camera, expnum=opt.expnum) if os.path.exists(outfn): print('Ouput file exists:', outfn) return 0 if opt.derivs and opt.agn: print('Sorry, can\'t do --derivs AND --agn') return -1 if opt.out is None and opt.out_dir is None: print('Must supply either --out or --out-dir') return -1 if opt.expnum is None and opt.out is None: print('If no --expnum is given, must supply --out filename') return -1 if not opt.forced: opt.apphot = True zoomslice = None if opt.zoom is not None: (x0, x1, y0, y1) = opt.zoom zoomslice = (slice(y0, y1), slice(x0, x1)) ps = None if opt.plots is not None: from astrometry.util.plotutils import PlotSequence ps = PlotSequence(opt.plots) # Cache CCDs files before the find_ccds call... # Copy required files into the cache? if opt.pre_cache: def copy_files_to_cache(fns): for fn in fns: cachefn = fn.replace(survey.survey_dir, survey.cache_dir) if not cachefn.startswith(survey.cache_dir): print('Skipping', fn) continue outdir = os.path.dirname(cachefn) trymakedirs(outdir) print('Copy', fn) print(' to', cachefn) shutil.copyfile(fn, cachefn) assert (survey.cache_dir is not None) fnset = set() fn = survey.find_file('bricks') fnset.add(fn) fns = survey.find_file('ccd-kds') fnset.update(fns) copy_files_to_cache(fnset) # Read metadata from survey-ccds.fits table ccds = survey.find_ccds(camera=opt.camera, expnum=opt.expnum, ccdname=opt.ccdname) print(len(ccds), 'with camera', opt.camera, 'and expnum', opt.expnum, 'and ccdname', opt.ccdname) # sort CCDs ccds.cut(np.lexsort((ccds.ccdname, ccds.expnum, ccds.camera))) # If there is only one catalog survey_dir, we pass it to get_catalog_in_wcs # as the northern survey. catsurvey_north = survey catsurvey_south = None if opt.catalog_dir_north is not None: assert (opt.catalog_dir_south is not None) assert (opt.catalog_resolve_dec_ngc is not None) catsurvey_north = LegacySurveyData(survey_dir=opt.catalog_dir_north) catsurvey_south = LegacySurveyData(survey_dir=opt.catalog_dir_south) elif opt.catalog_dir is not None: catsurvey_north = LegacySurveyData(survey_dir=opt.catalog_dir) # Copy required CCD & calib files into the cache? if opt.pre_cache: assert (survey.cache_dir is not None) fnset = set() for ccd in ccds: im = survey.get_image_object(ccd) for key in im.get_cacheable_filename_variables(): fn = getattr(im, key) if fn is None or not (os.path.exists(fn)): continue fnset.add(fn) copy_files_to_cache(fnset) args = [] for ccd in ccds: args.append((survey, catsurvey_north, catsurvey_south, opt.catalog_resolve_dec_ngc, ccd, opt, zoomslice, ps)) if opt.threads: from astrometry.util.multiproc import multiproc from astrometry.util.timingpool import TimingPool, TimingPoolMeas pool = TimingPool(opt.threads) poolmeas = TimingPoolMeas(pool, pickleTraffic=False) Time.add_measurement(poolmeas) mp = multiproc(None, pool=pool) tm = Time() FF = mp.map(bounce_one_ccd, args) print('Multi-processing forced-phot:', Time() - tm) del mp Time.measurements.remove(poolmeas) del poolmeas pool.close() pool.join() del pool else: FF = map(bounce_one_ccd, args) FF = [F for F in FF if F is not None] if len(FF) == 0: print('No photometry results to write.') return 0 # Keep only the first header _, version_hdr, _, _ = FF[0] # unpack results outlier_masks = [m for _, _, m, _ in FF] outlier_hdrs = [h for _, _, _, h in FF] FF = [F for F, _, _, _ in FF] F = merge_tables(FF) if len(ccds): version_hdr.delete('CPHDU') version_hdr.delete('CCDNAME') from legacypipe.utils import add_bits from legacypipe.bits import DQ_BITS add_bits(version_hdr, DQ_BITS, 'DQMASK', 'DQ', 'D') from legacyzpts.psfzpt_cuts import CCD_CUT_BITS add_bits(version_hdr, CCD_CUT_BITS, 'CCD_CUTS', 'CC', 'C') for i, ap in enumerate(apertures_arcsec): version_hdr.add_record( dict(name='APRAD%i' % i, value=ap, comment='(optical) Aperture radius, in arcsec')) unitmap = { 'exptime': 'sec', 'flux': 'nanomaggy', 'flux_ivar': '1/nanomaggy^2', 'apflux': 'nanomaggy', 'apflux_ivar': '1/nanomaggy^2', 'psfdepth': '1/nanomaggy^2', 'galdepth': '1/nanomaggy^2', 'sky': 'nanomaggy/arcsec^2', 'psfsize': 'arcsec', 'fwhm': 'pixels', 'ccdrarms': 'arcsec', 'ccddecrms': 'arcsec', 'ra': 'deg', 'dec': 'deg', 'skyrms': 'counts/sec', 'dra': 'arcsec', 'ddec': 'arcsec', 'dra_ivar': '1/arcsec^2', 'ddec_ivar': '1/arcsec^2' } columns = F.get_columns() order = [ 'release', 'brickid', 'brickname', 'objid', 'camera', 'expnum', 'ccdname', 'filter', 'mjd', 'exptime', 'psfsize', 'fwhm', 'ccd_cuts', 'airmass', 'sky', 'skyrms', 'psfdepth', 'galdepth', 'ccdzpt', 'ccdrarms', 'ccddecrms', 'ccdphrms', 'ra', 'dec', 'flux', 'flux_ivar', 'fracflux', 'rchisq', 'fracmasked', 'fracin', 'apflux', 'apflux_ivar', 'x', 'y', 'dqmask', 'dra', 'ddec', 'dra_ivar', 'ddec_ivar' ] columns = [c for c in order if c in columns] units = [unitmap.get(c, '') for c in columns] if opt.out is not None: outdir = os.path.dirname(opt.out) if len(outdir): trymakedirs(outdir) tmpfn = os.path.join(outdir, 'tmp-' + os.path.basename(opt.out)) fitsio.write(tmpfn, None, header=version_hdr, clobber=True) F.writeto(tmpfn, units=units, append=True, columns=columns) os.rename(tmpfn, opt.out) print('Wrote', opt.out) else: with survey.write_output('forced', camera=opt.camera, expnum=opt.expnum) as out: F.writeto(None, fits_object=out.fits, primheader=version_hdr, units=units, columns=columns) print('Wrote', out.real_fn) if opt.outlier_mask is not None: # Add outlier bit meanings to the primary header version_hdr.add_record( dict(name='COMMENT', value='Outlier mask bit meanings')) version_hdr.add_record( dict(name='OUTL_POS', value=1, comment='Outlier mask bit for Positive outlier')) version_hdr.add_record( dict(name='OUTL_NEG', value=2, comment='Outlier mask bit for Negative outlier')) if opt.outlier_mask == 'default': outdir = os.path.join(opt.out_dir, 'outlier-masks') camexp = set(zip(ccds.camera, ccds.expnum)) for c, e in camexp: I = np.flatnonzero((ccds.camera == c) * (ccds.expnum == e)) ccd = ccds[I[0]] imfn = ccd.image_filename.strip() outfn = os.path.join(outdir, imfn.replace('.fits', '-outlier.fits')) trymakedirs(outfn, dir=True) tempfn = outfn.replace('.fits', '-tmp.fits') with fitsio.FITS(tempfn, 'rw', clobber=True) as fits: fits.write(None, header=version_hdr) for i in I: mask = outlier_masks[i] _, _, _, meth, tile = survey.get_compression_args( 'outliers_mask', shape=mask.shape) fits.write(mask, header=outlier_hdrs[i], extname=ccds.ccdname[i], compress=meth, tile_dims=tile) os.rename(tempfn, outfn) print('Wrote', outfn) elif opt.outlier_mask is not None: with fitsio.FITS(opt.outlier_mask, 'rw', clobber=True) as F: F.write(None, header=version_hdr) for i, (hdr, mask) in enumerate(zip(outlier_hdrs, outlier_masks)): _, _, _, meth, tile = survey.get_compression_args( 'outliers_mask', shape=mask.shape) F.write(mask, header=hdr, extname=ccds.ccdname[i], compress=meth, tile_dims=tile) print('Wrote', opt.outlier_mask) tnow = Time() print('Total:', tnow - t0) return 0
sys.exit(0) survey = LegacySurveyData() ccds = survey.get_ccds_readonly() #ccds = ccds[np.abs(ccds.mjd_obs - 57444) < 7.] #print(len(ccds), 'CCDs near mjd') ccds.cut(ccds.ccdname == 'N4') print(len(ccds), 'exposures') print('bands:', np.unique(ccds.filter)) ## HACK np.random.seed(44) # Alternate 'oki' and 'ooi' images... oki = np.array(['oki' in ccd.image_filename for ccd in ccds]) I1 = np.flatnonzero(oki) I2 = np.flatnonzero(oki == False) print(len(I1), 'oki images')
matplotlib.use('Agg') import pylab as plt import numpy as np from legacypipe.survey import LegacySurveyData from legacyanalysis.gaiacat import GaiaCatalog from legacypipe.survey import GaiaSource, GaiaPosition from astrometry.util.util import Tan from astrometry.util.starutil_numpy import mjdtodate from tractor import TAITime #ra,dec = 357.3060, 2.3957 #ccd1 = ccds[(ccds.expnum == 563212) * (ccds.ccdname == 'N17')] ra, dec = 124.0317, 1.3028 expnum, ccdname = 393203, 'N11' survey = LegacySurveyData() W, H = 200, 200 pixscale = 0.262 cd = pixscale / 3600. targetwcs = Tan(ra, dec, W / 2., H / 2., -cd, 0., 0., cd, float(W), float(H)) rr, dd = targetwcs.pixelxy2radec([1, W, W, 1, 1], [1, 1, H, H, 1]) targetrd = np.vstack((rr, dd)).T ccds = survey.ccds_touching_wcs(targetwcs) print(len(ccds), 'CCDs touching WCS') print('MJDs', ccds.mjd_obs) ccds.writeto('test-ccds.fits')
def main(survey=None, opt=None): '''Driver function for forced photometry of individual DECam images. ''' if opt is None: parser = get_parser() opt = parser.parse_args() Time.add_measurement(MemMeas) t0 = Time() if os.path.exists(opt.outfn): print('Ouput file exists:', opt.outfn) sys.exit(0) if not opt.forced: opt.apphot = True zoomslice = None if opt.zoom is not None: (x0,x1,y0,y1) = opt.zoom zoomslice = (slice(y0,y1), slice(x0,x1)) ps = None if opt.plots is not None: from astrometry.util.plotutils import PlotSequence ps = PlotSequence(opt.plots) # Try parsing filename as exposure number. try: expnum = int(opt.filename) opt.filename = None except: # make this 'None' for survey.find_ccds() expnum = None # Try parsing HDU number try: opt.hdu = int(opt.hdu) ccdname = None except: ccdname = opt.hdu opt.hdu = -1 if survey is None: survey = LegacySurveyData() if opt.filename is not None and opt.hdu >= 0: # Read metadata from file T = exposure_metadata([opt.filename], hdus=[opt.hdu]) print('Metadata:') T.about() else: # Read metadata from survey-ccds.fits table T = survey.find_ccds(expnum=expnum, ccdname=ccdname) print(len(T), 'with expnum', expnum, 'and CCDname', ccdname) if opt.hdu >= 0: T.cut(T.image_hdu == opt.hdu) print(len(T), 'with HDU', opt.hdu) if opt.filename is not None: T.cut(np.array([f.strip() == opt.filename for f in T.image_filename])) print(len(T), 'with filename', opt.filename) assert(len(T) == 1) ccd = T[0] im = survey.get_image_object(ccd) tim = im.get_tractor_image(slc=zoomslice, pixPsf=True, splinesky=True, constant_invvar=opt.constant_invvar) print('Got tim:', tim) print('Read image:', Time()-t0) if opt.catfn in ['DR1', 'DR2', 'DR3']: margin = 20 TT = [] chipwcs = tim.subwcs bricks = bricks_touching_wcs(chipwcs, survey=survey) for b in bricks: # there is some overlap with this brick... read the catalog. fn = survey.find_file('tractor', brick=b.brickname) if not os.path.exists(fn): print('WARNING: catalog', fn, 'does not exist. Skipping!') continue print('Reading', fn) T = fits_table(fn) ok,xx,yy = chipwcs.radec2pixelxy(T.ra, T.dec) W,H = chipwcs.get_width(), chipwcs.get_height() I = np.flatnonzero((xx >= -margin) * (xx <= (W+margin)) * (yy >= -margin) * (yy <= (H+margin))) T.cut(I) print('Cut to', len(T), 'sources within image + margin') # print('Brick_primary:', np.unique(T.brick_primary)) T.cut(T.brick_primary) print('Cut to', len(T), 'on brick_primary') T.cut((T.out_of_bounds == False) * (T.left_blob == False)) print('Cut to', len(T), 'on out_of_bounds and left_blob') if len(T): TT.append(T) if len(TT) == 0: print('No sources to photometer.') return 0 T = merge_tables(TT, columns='fillzero') T._header = TT[0]._header del TT # Fix up various failure modes: # FixedCompositeGalaxy(pos=RaDecPos[240.51147402832561, 10.385488075518923], brightness=NanoMaggies: g=(flux -2.87), r=(flux -5.26), z=(flux -7.65), fracDev=FracDev(0.60177207), shapeExp=re=3.78351e-44, e1=9.30367e-13, e2=1.24392e-16, shapeDev=re=inf, e1=-0, e2=-0) # -> convert to EXP I = np.flatnonzero(np.array([((t.type == 'COMP') and (not np.isfinite(t.shapedev_r))) for t in T])) if len(I): print('Converting', len(I), 'bogus COMP galaxies to EXP') for i in I: T.type[i] = 'EXP' # Same thing with the exp component. # -> convert to DEV I = np.flatnonzero(np.array([((t.type == 'COMP') and (not np.isfinite(t.shapeexp_r))) for t in T])) if len(I): print('Converting', len(I), 'bogus COMP galaxies to DEV') for i in I: T.type[i] = 'DEV' if opt.write_cat: T.writeto(opt.write_cat) print('Wrote catalog to', opt.write_cat) else: T = fits_table(opt.catfn) surveydir = survey.get_survey_dir() del survey cat = read_fits_catalog(T) # print('Got cat:', cat) print('Read catalog:', Time()-t0) print('Forced photom...') opti = None forced_kwargs = {} if opt.ceres: from tractor.ceres_optimizer import CeresOptimizer B = 8 opti = CeresOptimizer(BW=B, BH=B) #forced_kwargs.update(verbose=True) for src in cat: # Limit sizes of huge models from tractor.galaxy import ProfileGalaxy if isinstance(src, ProfileGalaxy): px,py = tim.wcs.positionToPixel(src.getPosition()) h = src._getUnitFluxPatchSize(tim, px, py, tim.modelMinval) MAXHALF = 128 if h > MAXHALF: print('halfsize', h,'for',src,'-> setting to',MAXHALF) src.halfsize = MAXHALF tr = Tractor([tim], cat, optimizer=opti) tr.freezeParam('images') for src in cat: src.freezeAllBut('brightness') src.getBrightness().freezeAllBut(tim.band) disable_galaxy_cache() F = fits_table() F.brickid = T.brickid F.brickname = T.brickname F.objid = T.objid F.filter = np.array([tim.band] * len(T)) F.mjd = np.array([tim.primhdr['MJD-OBS']] * len(T)) F.exptime = np.array([tim.primhdr['EXPTIME']] * len(T)).astype(np.float32) ok,x,y = tim.sip_wcs.radec2pixelxy(T.ra, T.dec) F.x = (x-1).astype(np.float32) F.y = (y-1).astype(np.float32) if opt.forced: if opt.plots is None: forced_kwargs.update(wantims=False) R = tr.optimize_forced_photometry(variance=True, fitstats=True, shared_params=False, priors=False, **forced_kwargs) if opt.plots: (data,mod,ie,chi,roi) = R.ims1[0] ima = tim.ima imchi = dict(interpolation='nearest', origin='lower', vmin=-5, vmax=5) plt.clf() plt.imshow(data, **ima) plt.title('Data: %s' % tim.name) ps.savefig() plt.clf() plt.imshow(mod, **ima) plt.title('Model: %s' % tim.name) ps.savefig() plt.clf() plt.imshow(chi, **imchi) plt.title('Chi: %s' % tim.name) ps.savefig() F.flux = np.array([src.getBrightness().getFlux(tim.band) for src in cat]).astype(np.float32) F.flux_ivar = R.IV.astype(np.float32) F.fracflux = R.fitstats.profracflux.astype(np.float32) F.rchi2 = R.fitstats.prochi2 .astype(np.float32) print('Forced photom:', Time()-t0) if opt.apphot: import photutils img = tim.getImage() ie = tim.getInvError() with np.errstate(divide='ignore'): imsigma = 1. / ie imsigma[ie == 0] = 0. apimg = [] apimgerr = [] # Aperture photometry locations xxyy = np.vstack([tim.wcs.positionToPixel(src.getPosition()) for src in cat]).T apxy = xxyy - 1. apertures = apertures_arcsec / tim.wcs.pixel_scale() print('Apertures:', apertures, 'pixels') for rad in apertures: aper = photutils.CircularAperture(apxy, rad) p = photutils.aperture_photometry(img, aper, error=imsigma) apimg.append(p.field('aperture_sum')) apimgerr.append(p.field('aperture_sum_err')) ap = np.vstack(apimg).T ap[np.logical_not(np.isfinite(ap))] = 0. F.apflux = ap.astype(np.float32) ap = 1./(np.vstack(apimgerr).T)**2 ap[np.logical_not(np.isfinite(ap))] = 0. F.apflux_ivar = ap.astype(np.float32) print('Aperture photom:', Time()-t0) program_name = sys.argv[0] version_hdr = get_version_header(program_name, surveydir) filename = getattr(ccd, 'image_filename') if filename is None: # HACK -- print only two directory names + filename of CPFILE. fname = os.path.basename(im.imgfn) d = os.path.dirname(im.imgfn) d1 = os.path.basename(d) d = os.path.dirname(d) d2 = os.path.basename(d) filename = os.path.join(d2, d1, fname) print('Trimmed filename to', filename) version_hdr.add_record(dict(name='CPFILE', value=filename, comment='CP file')) version_hdr.add_record(dict(name='CPHDU', value=im.hdu, comment='CP ext')) version_hdr.add_record(dict(name='CAMERA', value=ccd.camera, comment='Camera')) version_hdr.add_record(dict(name='EXPNUM', value=im.expnum, comment='Exposure num')) version_hdr.add_record(dict(name='CCDNAME', value=im.ccdname, comment='CCD name')) version_hdr.add_record(dict(name='FILTER', value=tim.band, comment='Bandpass of this image')) version_hdr.add_record(dict(name='EXPOSURE', value='%s-%s-%s' % (ccd.camera, im.expnum, im.ccdname), comment='Name of this image')) keys = ['TELESCOP','OBSERVAT','OBS-LAT','OBS-LONG','OBS-ELEV', 'INSTRUME'] for key in keys: if key in tim.primhdr: version_hdr.add_record(dict(name=key, value=tim.primhdr[key])) hdr = fitsio.FITSHDR() units = {'exptime':'sec', 'flux':'nanomaggy', 'flux_ivar':'1/nanomaggy^2'} columns = F.get_columns() for i,col in enumerate(columns): if col in units: hdr.add_record(dict(name='TUNIT%i' % (i+1), value=units[col])) outdir = os.path.dirname(opt.outfn) if len(outdir): trymakedirs(outdir) fitsio.write(opt.outfn, None, header=version_hdr, clobber=True) F.writeto(opt.outfn, header=hdr, append=True) print('Wrote', opt.outfn) if opt.save_model or opt.save_data: hdr = fitsio.FITSHDR() tim.getWcs().wcs.add_to_header(hdr) if opt.save_model: print('Getting model image...') mod = tr.getModelImage(tim) fitsio.write(opt.save_model, mod, header=hdr, clobber=True) print('Wrote', opt.save_model) if opt.save_data: fitsio.write(opt.save_data, tim.getImage(), header=hdr, clobber=True) print('Wrote', opt.save_data) print('Finished forced phot:', Time()-t0) return 0
bricks = args.bricks kwargs = dict(get_depth_maps=args.depth_maps) if args.margin is not None: kwargs.update(margin=args.margin) print('args:', bricks) if len(bricks) == 1 and bricks[0] == 'qdo': import qdo #... find Queue... qname = args.queue q = qdo.connect(qname) print('Connected to QDO queue', qname, q) survey = LegacySurveyData() while True: task = q.get(timeout=10) if task is None: break try: print('Task:', task.task) brickname = task.task print('Checking for existing out file') # shortcut dirnm = os.path.join('depthcuts', brickname[:3]) outfn = os.path.join(dirnm, 'ccds-%s.fits' % brickname) if os.path.exists(outfn): print('Exists:', outfn)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--build-sample', action='store_true', help='Build the sample.') parser.add_argument('--jpg-cutouts', action='store_true', help='Get jpg cutouts from the viewer.') parser.add_argument('--ccd-cutouts', action='store_true', help='Get CCD cutouts of each galaxy.') parser.add_argument('--runbrick', action='store_true', help='Run the pipeline.') parser.add_argument('--build-webpage', action='store_true', help='(Re)build the web content.') args = parser.parse_args() # Top-level directory key = 'LEGACY_SURVEY_LARGE_GALAXIES' if key not in os.environ: print('Required ${} environment variable not set'.format(key)) return 0 largedir = os.getenv(key) samplefile = os.path.join(largedir, 'large-galaxies-sample.fits') # -------------------------------------------------- # Build the sample of large galaxies based on the available imaging. if args.build_sample: # Read the parent catalog. cat = read_rc3() # Create a simple WCS object for each object and find all the CCDs # touching that WCS footprint. survey = LegacySurveyData(version='dr2') # hack! allccds = survey.get_ccds() keep = np.concatenate((survey.apply_blacklist(allccds), survey.photometric_ccds(allccds))) allccds.cut(keep) ccdlist = [] outcat = [] for gal in cat: galwcs = _simplewcs(gal) ccds1 = allccds[ccds_touching_wcs(galwcs, allccds)] ccds1 = ccds1[_uniqccds(ccds1)] if len( ccds1 ) > 0 and 'g' in ccds1.filter and 'r' in ccds1.filter and 'z' in ccds1.filter: print('Found {} CCDs for {}, D(25)={:.4f}'.format( len(ccds1), gal['GALAXY'], gal['RADIUS'])) ccdsfile = os.path.join( largedir, 'ccds', '{}-ccds.fits'.format(gal['GALAXY'].strip().lower())) print(' Writing {}'.format(ccdsfile)) if os.path.isfile(ccdsfile): os.remove(ccdsfile) ccds1.writeto(ccdsfile) ccdlist.append(ccds1) if len(outcat) == 0: outcat = gal else: outcat = vstack((outcat, gal)) #if gal['GALAXY'] == 'MCG5-19-36': # pdb.set_trace() # Write out the final catalog. samplefile = os.path.join(largedir, 'large-galaxies-sample.fits') if os.path.isfile(samplefile): os.remove(samplefile) print('Writing {}'.format(samplefile)) outcat.write(samplefile) print(outcat) # Do we need to transfer any of the data to nyx? _getfiles(merge_tables(ccdlist)) # -------------------------------------------------- # Get data, model, and residual cutouts from the legacysurvey viewer. Also # get thumbnails that are lower resolution. if args.jpg_cutouts: thumbsize = 100 sample = fits.getdata(samplefile, 1) for gal in sample: size = np.ceil(10 * gal['RADIUS'] / PIXSCALE) thumbpixscale = PIXSCALE * size / thumbsize #imageurl = 'http://legacysurvey.org/viewer/jpeg-cutout-decals-dr2?ra={:.6f}&dec={:.6f}'.format(gal['RA'], gal['DEC'])+\ # '&pixscale={:.3f}&size={:g}'.format(PIXSCALE, size) #imagejpg = os.path.join(largedir, 'cutouts', gal['GALAXY'].strip().lower()+'-image.jpg') #if os.path.isfile(imagejpg): # os.remove(imagejpg) #os.system('wget --continue -O {:s} "{:s}"' .format(imagejpg, imageurl)) thumburl = 'http://legacysurvey.org/viewer/jpeg-cutout-decals-dr2?ra={:.6f}&dec={:.6f}'.format(gal['RA'], gal['DEC'])+\ '&pixscale={:.3f}&size={:g}'.format(thumbpixscale, thumbsize) thumbjpg = os.path.join( largedir, 'cutouts', gal['GALAXY'].strip().lower() + '-image-thumb.jpg') if os.path.isfile(thumbjpg): os.remove(thumbjpg) os.system('wget --continue -O {:s} "{:s}"'.format( thumbjpg, thumburl)) # -------------------------------------------------- # (Re)build the webpage. if args.build_webpage: # index.html html = open(os.path.join(largedir, 'index.html'), 'w') html.write('<html><body>\n') html.write('<h1>Sample of Large Galaxies</h1>\n') html.write('<table border="2" width="30%">\n') html.write('<tbody>\n') sample = fits.getdata(samplefile, 1) for gal in sample: # Add coordinates and sizes here. galaxy = gal['GALAXY'].strip().lower() html.write('<tr>\n') html.write('<td><a href="html/{}.html">{}</a></td>\n'.format( galaxy, galaxy.upper())) html.write( '<td><a href="http://legacysurvey.org/viewer/?ra={:.6f}&dec={:.6f}" target="_blank"><img src=cutouts/{}-image-thumb.jpg alt={} /></a></td>\n' .format(gal['RA'], gal['DEC'], galaxy, galaxy.upper())) # html.write('<td><a href="html/{}.html"><img src=cutouts/{}-image-thumb.jpg alt={} /></a></td>\n'.format(galaxy, galaxy, galaxy.upper())) html.write('</tr>\n') html.write('</tbody>\n') html.write('</table>\n') html.write('</body></html>\n') html.close() sys.exit(1) # individual galaxy pages for gal in sample[:3]: galaxy = gal['GALAXY'].strip().lower() html = open(os.path.join(largedir, 'html/{}.html'.format(galaxy)), 'w') html.write('<html><body>\n') html.write( '<a href=../cutouts/{}.jpg><img src=../cutouts/{}-image.jpg alt={} /></a>\n' .format(galaxy, galaxy, galaxy, galaxy.upper())) html.write('</body></html>\n') html.close() # -------------------------------------------------- # Get cutouts of all the CCDs for each galaxy. if args.ccd_cutouts: sample = fits.getdata(samplefile, 1) for gal in sample[1:2]: galaxy = gal['GALAXY'].strip().lower() ccdsfile = os.path.join(largedir, 'ccds', '{}-ccds.fits'.format(galaxy)) ccds = fits.getdata(ccdsfile) pdb.set_trace() # -------------------------------------------------- # Run the pipeline. if args.runbrick: sample = fits.getdata(samplefile, 1) for gal in sample[1:2]: galaxy = gal['GALAXY'].strip().lower() diam = 10 * np.ceil(gal['RADIUS'] / PIXSCALE).astype( 'int16') # [pixels] # Note: zoom is relative to the center of an imaginary brick with # dimensions (0, 3600, 0, 3600). survey = LegacySurveyData(version='dr2', output_dir=largedir) run_brick(None, survey, radec=(gal['RA'], gal['DEC']), blobxy=zip([diam / 2], [diam / 2]), threads=1, zoom=(1800 - diam / 2, 1800 + diam / 2, 1800 - diam / 2, 1800 + diam / 2), wise=False, forceAll=True, writePickles=False, do_calibs=False, write_metrics=False, pixPsf=True, splinesky=True, early_coadds=True, stages=['writecat'], ceres=False) pdb.set_trace()
def run_one_brick(X): brick, ibrick, nbricks, plots, kwargs = X survey = LegacySurveyData() print() print() print('Brick', (ibrick + 1), 'of', nbricks, ':', brick.brickname) dirnm = os.path.join('depthcuts', brick.brickname[:3]) outfn = os.path.join(dirnm, 'ccds-%s.fits' % brick.brickname) if os.path.exists(outfn): print('Exists:', outfn) return 0 H, W = 3600, 3600 pixscale = 0.262 bands = ['g', 'r', 'z'] # Get WCS object describing brick targetwcs = wcs_for_brick(brick, W=W, H=H, pixscale=pixscale) targetrd = np.array([ targetwcs.pixelxy2radec(x, y) for x, y in [(1, 1), (W, 1), (W, H), (1, H), (1, 1)] ]) gitver = get_git_version() ccds = survey.ccds_touching_wcs(targetwcs) if ccds is None: print('No CCDs actually touching brick') return 0 print(len(ccds), 'CCDs actually touching brick') ccds.cut(np.in1d(ccds.filter, bands)) print('Cut on filter:', len(ccds), 'CCDs remain.') if 'ccd_cuts' in ccds.get_columns(): norig = len(ccds) ccds.cut(ccds.ccd_cuts == 0) print(len(ccds), 'of', norig, 'CCDs pass cuts') else: print('No CCD cuts') if len(ccds) == 0: print('No CCDs left') return 0 ps = None if plots: from astrometry.util.plotutils import PlotSequence ps = PlotSequence('depth-%s' % brick.brickname) splinesky = True gaussPsf = False pixPsf = True do_calibs = False normalizePsf = True get_depth_maps = kwargs.pop('get_depth_maps', False) try: D = make_depth_cut(survey, ccds, bands, targetrd, brick, W, H, pixscale, plots, ps, splinesky, gaussPsf, pixPsf, normalizePsf, do_calibs, gitver, targetwcs, get_depth_maps=get_depth_maps, **kwargs) if get_depth_maps: keep, overlapping, depthmaps = D else: keep, overlapping = D except: print('Failed to make_depth_cut():') import traceback traceback.print_exc() return -1 print(np.sum(overlapping), 'CCDs overlap the brick') print(np.sum(keep), 'CCDs passed depth cut') ccds.overlapping = overlapping ccds.passed_depth_cut = keep if not os.path.exists(dirnm): try: os.makedirs(dirnm) except: pass if get_depth_maps: for band, depthmap in depthmaps: doutfn = os.path.join(dirnm, 'depth-%s-%s.fits' % (brick.brickname, band)) hdr = fitsio.FITSHDR() # Plug the WCS header cards into these images targetwcs.add_to_header(hdr) hdr.delete('IMAGEW') hdr.delete('IMAGEH') hdr.add_record(dict(name='EQUINOX', value=2000.)) hdr.add_record(dict(name='FILTER', value=band)) fitsio.write(doutfn, depthmap, header=hdr) print('Wrote', doutfn) tmpfn = os.path.join(os.path.dirname(outfn), 'tmp-' + os.path.basename(outfn)) ccds.writeto(tmpfn) os.rename(tmpfn, outfn) print('Wrote', outfn) return 0
def psf_residuals(expnum, ccdname, stampsize=35, nstar=30, magrange=(13, 17), verbose=0, splinesky=False): # Set the debugging level. if verbose == 0: lvl = logging.INFO else: lvl = logging.DEBUG logging.basicConfig(level=lvl, format='%(message)s', stream=sys.stdout) pngprefix = 'qapsf-{}-{}'.format(expnum, ccdname) # Gather all the info we need about this CCD. survey = LegacySurveyData() ccd = survey.find_ccds(expnum=expnum, ccdname=ccdname)[0] band = ccd.filter ps1band = dict(g=0, r=1, i=2, z=3, Y=4) print('Band {}'.format(band)) #scales = dict(g=0.0066, r=0.01, z=0.025) #vmin, vmax = np.arcsinh(-1), np.arcsinh(100) #print(scales[band]) im = survey.get_image_object(ccd) iminfo = im.get_image_info() H, W = iminfo['dims'] wcs = im.get_wcs() # Choose a uniformly selected subset of PS1 stars on this CCD. ps1 = ps1cat(ccdwcs=wcs) cat = ps1.get_stars(band=band, magrange=magrange) rand = np.random.RandomState(seed=expnum * ccd.ccdnum) these = rand.choice(len(cat) - 1, nstar, replace=False) #these = rand.random_integers(0,len(cat)-1,nstar) cat = cat[these] cat = cat[np.argsort(cat.median[:, ps1band[band]])] # sort by magnitude #print(cat.nmag_ok) get_tim_kwargs = dict(pixPsf=True, splinesky=splinesky) # Make a QAplot of the positions of all the stars. tim = im.get_tractor_image(**get_tim_kwargs) img = tim.getImage() #img = tim.getImage()/scales[band] fig = plt.figure(figsize=(5, 10)) ax = fig.gca() ax.get_xaxis().get_major_formatter().set_useOffset(False) #ax.imshow(np.arcsinh(img),cmap='gray',interpolation='nearest', # origin='lower',vmin=vmax,vmax=vmax) ax.imshow(img, **tim.ima) ax.axis('off') ax.set_title('{}: {}/{} AM={:.2f} Seeing={:.3f}"'.format( band, expnum, ccdname, ccd.airmass, ccd.seeing)) for istar, ps1star in enumerate(cat): ra, dec = (ps1star.ra, ps1star.dec) ok, xpos, ypos = wcs.radec2pixelxy(ra, dec) ax.text(xpos, ypos, '{:2d}'.format(istar + 1), color='red', horizontalalignment='left') circ = plt.Circle((xpos, ypos), radius=30, color='g', fill=False, lw=1) ax.add_patch(circ) #radec = wcs.radec_bounds() #ax.scatter(cat.ra,cat.dec) #ax.set_xlim([radec[1],radec[0]])#*[1.0002,0.9998]) #ax.set_ylim([radec[2],radec[3]])#*[0.985,1.015]) #ax.set_xlabel('$RA\ (deg)$',fontsize=18) #ax.set_ylabel('$Dec\ (deg)$',fontsize=18) fig.savefig(pngprefix + '-ccd.png', bbox_inches='tight') # Initialize the many-stamp QAplot ncols = 3 nrows = np.ceil(nstar / ncols).astype('int') inchperstamp = 2.0 fig = plt.figure(figsize=(inchperstamp * 3 * ncols, inchperstamp * nrows)) irow = 0 icol = 0 for istar, ps1star in enumerate(cat): ra, dec = (ps1star.ra, ps1star.dec) mag = ps1star.median[ps1band[band]] # r-band ok, xpos, ypos = wcs.radec2pixelxy(ra, dec) ix, iy = int(xpos), int(ypos) # create a little tractor Image object around the star slc = (slice(max(iy - stampsize, 0), min(iy + stampsize + 1, H)), slice(max(ix - stampsize, 0), min(ix + stampsize + 1, W))) # The PSF model 'const2Psf' is the one used in DR1: a 2-component # Gaussian fit to PsfEx instantiated in the image center. tim = im.get_tractor_image(slc=slc, **get_tim_kwargs) stamp = tim.getImage() ivarstamp = tim.getInvvar() # Initialize a tractor PointSource from PS1 measurements flux = NanoMaggies.magToNanomaggies(mag) star = PointSource(RaDecPos(ra, dec), NanoMaggies(**{band: flux})) # Fit just the source RA,Dec,flux. tractor = Tractor([tim], [star]) tractor.freezeParam('images') print('2-component MOG:', tim.psf) tractor.printThawedParams() for step in range(50): dlnp, X, alpha = tractor.optimize() if dlnp < 0.1: break print('Fit:', star) model_mog = tractor.getModelImage(0) chi2_mog = -2.0 * tractor.getLogLikelihood() mag_mog = NanoMaggies.nanomaggiesToMag(star.brightness)[0] # Now change the PSF model to a pixelized PSF model from PsfEx instantiated # at this place in the image. psf = PixelizedPsfEx(im.psffn) tim.psf = psf.constantPsfAt(xpos, ypos) #print('PSF model:', tim.psf) #tractor.printThawedParams() for step in range(50): dlnp, X, alpha = tractor.optimize() if dlnp < 0.1: break print('Fit:', star) model_psfex = tractor.getModelImage(0) chi2_psfex = -2.0 * tractor.getLogLikelihood() mag_psfex = NanoMaggies.nanomaggiesToMag(star.brightness)[0] #mn, mx = np.percentile((stamp-model_psfex)[ivarstamp>0],[1,95]) sig = np.std((stamp - model_psfex)[ivarstamp > 0]) mn, mx = [-2.0 * sig, 5 * sig] # Generate a QAplot. if (istar > 0) and (istar % (ncols) == 0): irow = irow + 1 icol = 3 * istar - 3 * ncols * irow #print(istar, irow, icol, icol+1, icol+2) ax1 = plt.subplot2grid((nrows, 3 * ncols), (irow, icol), aspect='equal') ax1.axis('off') #ax1.imshow(stamp, **tim.ima) ax1.imshow(stamp, cmap='gray', interpolation='nearest', origin='lower', vmin=mn, vmax=mx) ax1.text(0.1, 0.9, '{:2d}'.format(istar + 1), color='white', horizontalalignment='left', verticalalignment='top', transform=ax1.transAxes) ax2 = plt.subplot2grid((nrows, 3 * ncols), (irow, icol + 1), aspect='equal') ax2.axis('off') #ax2.imshow(stamp-model_mog, **tim.ima) ax2.imshow(stamp - model_mog, cmap='gray', interpolation='nearest', origin='lower', vmin=mn, vmax=mx) ax2.text(0.1, 0.9, 'MoG', color='white', horizontalalignment='left', verticalalignment='top', transform=ax2.transAxes) ax2.text(0.08, 0.08, '{:.3f}'.format(mag_mog), color='white', horizontalalignment='left', verticalalignment='bottom', transform=ax2.transAxes) #ax2.set_title('{:.3f}, {:.2f}'.format(mag_psfex,chi2_psfex),fontsize=14) #ax2.set_title('{:.3f}, $\chi^{2}$={:.2f}'.format(mag_psfex,chi2_psfex)) ax3 = plt.subplot2grid((nrows, 3 * ncols), (irow, icol + 2), aspect='equal') ax3.axis('off') #ax3.imshow(stamp-model_psfex, **tim.ima) ax3.imshow(stamp - model_psfex, cmap='gray', interpolation='nearest', origin='lower', vmin=mn, vmax=mx) ax3.text(0.1, 0.9, 'PSFEx', color='white', horizontalalignment='left', verticalalignment='top', transform=ax3.transAxes) ax3.text(0.08, 0.08, '{:.3f}'.format(mag_psfex), color='white', horizontalalignment='left', verticalalignment='bottom', transform=ax3.transAxes) if istar == (nstar - 1): break fig.savefig(pngprefix + '-stargrid.png', bbox_inches='tight')
def main(): """Main program. """ import argparse parser = argparse.ArgumentParser( description= "This script is used to produce lists of CCDs or bricks, for production purposes (building qdo queue, eg)." ) parser.add_argument('--calibs', action='store_true', help='Output CCDs that need to be calibrated.') parser.add_argument('--nper', type=int, default=None, help='Batch N calibs per line') parser.add_argument( '--byexp', action='store_true', default=False, help='Run one whole exposure per job (not one CCD per job)') parser.add_argument('--forced', action='store_true', help='Output forced-photometry commands') parser.add_argument('--lsb', action='store_true', help='Output Low-Surface-Brightness commands') parser.add_argument('--stage', help='Stage image files to given directory') parser.add_argument('--touching', action='store_true', help='Cut to only CCDs touching selected bricks') parser.add_argument('--near', action='store_true', help='Quick cut to only CCDs near selected bricks') parser.add_argument('--check-coadd', action='store_true', help='Check which coadds actually need to run.') parser.add_argument('--out', help='Output filename for calibs, default %(default)s', default='jobs') parser.add_argument('--command', action='store_true', help='Write out full command-line to run calib') parser.add_argument('--opt', help='With --command, extra options to add') parser.add_argument('--maxra', type=float, help='Maximum RA to run') parser.add_argument('--minra', type=float, help='Minimum RA to run') parser.add_argument('--maxdec', type=float, help='Maximum Dec to run') parser.add_argument('--mindec', type=float, help='Minimum Dec to run') parser.add_argument('--region', help='Region to select') parser.add_argument('--bricks', help='Set bricks.fits file to load') parser.add_argument('--ccds', help='Set ccds.fits file to load') parser.add_argument('--ignore_cuts', action='store_true', default=False, help='no photometric cuts') parser.add_argument('--save_to_fits', action='store_true', default=False, help='save cut brick,ccd to fits table') parser.add_argument( '--name', action='store', default='dr3', help='save with this suffix, e.g. refers to ccds table') parser.add_argument('--delete-sky', action='store_true', help='Delete any existing sky calibration files') parser.add_argument('--write-ccds', help='Write CCDs list as FITS table?') parser.add_argument('--nccds', action='store_true', default=False, help='Prints number of CCDs per brick') parser.add_argument('--bands', default='g,r,z', help='Set bands to keep: comma-separated list.') opt = parser.parse_args() want_ccds = (opt.calibs or opt.forced or opt.lsb) want_bricks = not want_ccds survey = LegacySurveyData() if opt.bricks is not None: B = fits_table(opt.bricks) log('Read', len(B), 'from', opt.bricks) else: B = survey.get_bricks() log('Bricks Dec range:', B.dec.min(), B.dec.max()) if opt.ccds is not None: T = fits_table(opt.ccds) log('Read', len(T), 'from', opt.ccds) else: T = survey.get_ccds() log(len(T), 'CCDs') T.index = np.arange(len(T)) if opt.ignore_cuts == False: log('Applying CCD cuts...') if 'ccd_cuts' in T.columns(): T.cut(T.ccd_cuts == 0) log(len(T), 'CCDs survive cuts') bands = opt.bands.split(',') log('Filters:', np.unique(T.filter)) T.cut(np.flatnonzero(np.array([f in bands for f in T.filter]))) log('Cut to', len(T), 'CCDs in filters', bands) log('CCDs Dec range:', T.dec.min(), T.dec.max()) # I,J,d,counts = match_radec(B.ra, B.dec, T.ra, T.dec, 0.2, nearest=True, count=True) # plt.clf() # plt.hist(counts, counts.max()+1) # plt.savefig('bricks.png') # B.cut(I[counts >= 9]) # plt.clf() # plt.plot(B.ra, B.dec, 'b.') # #plt.scatter(B.ra[I], B.dec[I], c=counts) # plt.savefig('bricks2.png') # DES Stripe82 #rlo,rhi = 350.,360. # rlo,rhi = 300., 10. # dlo,dhi = -6., 4. # TINY bit #rlo,rhi = 350.,351.1 #dlo,dhi = 0., 1.1 # EDR+ # 860 bricks # ~10,000 CCDs #rlo,rhi = 239,246 #dlo,dhi = 5, 13 # DR1 #rlo,rhi = 0, 360 # part 1 #dlo,dhi = 25, 40 # part 2 #dlo,dhi = 20,25 # part 3 #dlo,dhi = 15,20 # part 4 #dlo,dhi = 10,15 # part 5 #dlo,dhi = 5,10 # the rest #dlo,dhi = -11, 5 #dlo,dhi = 15,25.5 dlo, dhi = -25, 40 rlo, rhi = 0, 360 # Arjun says 3x3 coverage area is roughly # RA=240-252 DEC=6-12 (but not completely rectangular) # COSMOS #rlo,rhi = 148.9, 151.2 #dlo,dhi = 0.9, 3.5 # A nice well-behaved region (EDR2/3) # rlo,rhi = 243.6, 244.6 # dlo,dhi = 8.1, 8.6 # 56 bricks, ~725 CCDs #B.cut((B.ra > 240) * (B.ra < 242) * (B.dec > 5) * (B.dec < 7)) # 240 bricks, ~3000 CCDs #B.cut((B.ra > 240) * (B.ra < 244) * (B.dec > 5) * (B.dec < 9)) # 535 bricks, ~7000 CCDs #B.cut((B.ra > 240) * (B.ra < 245) * (B.dec > 5) * (B.dec < 12)) if opt.region in ['test1', 'test2', 'test3', 'test4']: nm = dict( test1='2446p115', # weird stuff around bright star test2='1183p292', # faint sources around bright galaxy test3='3503p005', # DES test4='1163p277', # Pollux )[opt.region] B.cut(np.flatnonzero(np.array([s == nm for s in B.brickname]))) log('Cut to', len(B), 'bricks') log(B.ra, B.dec) dlo, dhi = -90, 90 rlo, rhi = 0, 360 elif opt.region == 'edr': # EDR: # 535 bricks, ~7000 CCDs rlo, rhi = 240, 245 dlo, dhi = 5, 12 elif opt.region == 'dr8-decam': rlo, rhi = 0, 360 dlo, dhi = -70, 40 log('DR8-DECam region') elif opt.region == 'edrplus': rlo, rhi = 235, 248 dlo, dhi = 5, 15 elif opt.region == 'edr-south': rlo, rhi = 240, 245 dlo, dhi = 5, 10 elif opt.region == 'cosmos1': # 16 bricks in the core of the COSMOS field. rlo, rhi = 149.75, 150.75 dlo, dhi = 1.6, 2.6 elif opt.region == 'pristine': # Stream? rlo, rhi = 240, 250 dlo, dhi = 10, 15 elif opt.region == 'des': dlo, dhi = -6., 4. rlo, rhi = 317., 7. T.cut(np.flatnonzero(np.array(['CPDES82' in fn for fn in T.cpimage]))) log('Cut to', len(T), 'CCDs with "CPDES82" in filename') elif opt.region == 'subdes': rlo, rhi = 320., 360. dlo, dhi = -1.25, 1.25 elif opt.region == 'northwest': rlo, rhi = 240, 360 dlo, dhi = 20, 40 elif opt.region == 'north': rlo, rhi = 120, 240 dlo, dhi = 20, 40 elif opt.region == 'northeast': rlo, rhi = 0, 120 dlo, dhi = 20, 40 elif opt.region == 'southwest': rlo, rhi = 240, 360 dlo, dhi = -20, 0 elif opt.region == 'south': rlo, rhi = 120, 240 dlo, dhi = -20, 0 elif opt.region == 'southeast': rlo, rhi = 0, 120 dlo, dhi = -20, 0 elif opt.region == 'southsoutheast': rlo, rhi = 0, 120 dlo, dhi = -20, -10 elif opt.region == 'midwest': rlo, rhi = 240, 360 dlo, dhi = 0, 20 elif opt.region == 'middle': rlo, rhi = 120, 240 dlo, dhi = 0, 20 elif opt.region == 'mideast': rlo, rhi = 0, 120 dlo, dhi = 0, 20 elif opt.region == 'grz': # Bricks with grz coverage. # Be sure to use --bricks survey-bricks-in-dr1.fits # which has_[grz] columns. B.cut((B.has_g == 1) * (B.has_r == 1) * (B.has_z == 1)) log('Cut to', len(B), 'bricks with grz coverage') elif opt.region == 'nogrz': # Bricks without grz coverage. # Be sure to use --bricks survey-bricks-in-dr1.fits # which has_[grz] columns. B.cut(np.logical_not((B.has_g == 1) * (B.has_r == 1) * (B.has_z == 1))) log('Cut to', len(B), 'bricks withOUT grz coverage') elif opt.region == 'deep2': rlo, rhi = 250, 260 dlo, dhi = 30, 35 elif opt.region == 'deep2f2': rlo, rhi = 251.4, 254.4 dlo, dhi = 34.6, 35.3 elif opt.region == 'deep2f3': rlo, rhi = 351.25, 353.75 dlo, dhi = 0, 0.5 elif opt.region == 'deep3': rlo, rhi = 214, 216 dlo, dhi = 52.25, 53.25 elif opt.region == 'virgo': rlo, rhi = 185, 190 dlo, dhi = 10, 15 elif opt.region == 'virgo2': rlo, rhi = 182, 192 dlo, dhi = 8, 18 elif opt.region == 'coma': # van Dokkum et al Coma cluster ultra-diffuse galaxies: 3x3 field centered on Coma cluster rc, dc = 195., 28. dd = 1.5 cosdec = np.cos(np.deg2rad(dc)) rlo, rhi = rc - dd / cosdec, rc + dd / cosdec dlo, dhi = dc - dd, dc + dd elif opt.region == 'lsb': rlo, rhi = 147.2, 147.8 dlo, dhi = -0.4, 0.4 elif opt.region == 'eboss-sgc': # generous boundaries to make sure get all relevant images # RA -45 to +45 # Dec -5 to +7 rlo, rhi = 310., 50. dlo, dhi = -6., 6. elif opt.region == 'eboss-ngc': # generous boundaries to make sure get all relevant images # NGC ELGs # RA 115 to 175 # Dec 15 to 30 # rlo,rhi = 122., 177. # dlo,dhi = 12., 32. rlo, rhi = 126., 168. dlo, dhi = 18., 33. elif opt.region == 'mzls': dlo, dhi = -10., 90. # -10: pull in Stripe 82 data too elif opt.region == 'dr4-bootes': # https://desi.lbl.gov/trac/wiki/DecamLegacy/DR4sched #dlo,dhi = 34., 35. #rlo,rhi = 209.5, 210.5 dlo, dhi = 33., 36. rlo, rhi = 216.5, 219.5 elif opt.region == 'des-sn-x3': #rlo,rhi = 36., 37. #dlo,dhi = -5., -4. rlo, rhi = 36., 36.5 dlo, dhi = -4.5, -4. elif opt.region == 'ngc2632': # open cluster rlo, rhi = 129.0, 131.0 dlo, dhi = 19.0, 20.5 elif opt.region == 'dr8sky': rlo, rhi = 35.0, 37.0 dlo, dhi = -3.0, -1.0 # ADM DR8 test regions, see, e.g.: # https://desi.lbl.gov/trac/wiki/DecamLegacy/DR8#Testregions elif opt.region == 'dr8-test-s82': rlo, rhi = 0, 45 dlo, dhi = -1.25, 1.25 elif opt.region == 'dr8-test-hsc-sgc': rlo, rhi = 30, 40 dlo, dhi = -6.5, -1.25 elif opt.region == 'dr8-test-hsc-ngc': rlo, rhi = 177.5, 182.5 dlo, dhi = -1, 1 elif opt.region == 'dr8-test-edr': rlo, rhi = 240, 245 dlo, dhi = 5, 12 elif opt.region == 'dr8-test-hsc-north': rlo, rhi = 240, 250 dlo, dhi = 42, 45 elif opt.region == 'dr8-test-deep2-egs': rlo, rhi = 213, 216.5 dlo, dhi = 52, 54 elif opt.region == 'dr8-test-overlap': rlo, rhi = 132, 140.5 dlo, dhi = 31.5, 35 if opt.mindec is not None: dlo = opt.mindec if opt.maxdec is not None: dhi = opt.maxdec if opt.minra is not None: rlo = opt.minra if opt.maxra is not None: rhi = opt.maxra if rlo < rhi: B.cut((B.ra >= rlo) * (B.ra <= rhi) * (B.dec >= dlo) * (B.dec <= dhi)) else: # RA wrap B.cut( np.logical_or(B.ra >= rlo, B.ra <= rhi) * (B.dec >= dlo) * (B.dec <= dhi)) log(len(B), 'bricks in range; cut Dec range', B.dec.min(), B.dec.max()) #for name in B.get('brickname'): # print(name) #B.writeto('bricks-cut.fits') bricksize = 0.25 # A bit more than 0.25-degree brick radius + Bok image radius ~ 0.57 search_radius = 1.05 * np.sqrt(2.) * (bricksize + (0.455 * 4096 / 3600.)) / 2. log(len(T), 'CCDs') log(len(B), 'Bricks') I, J, d = match_radec(B.ra, B.dec, T.ra, T.dec, search_radius, nearest=True) B.cut(I) log('Cut to', len(B), 'bricks near CCDs') log('Bricks Dec range:', B.dec.min(), B.dec.max()) # plt.clf() # plt.plot(B.ra, B.dec, 'b.') # plt.title('DR3 bricks') # plt.axis([360, 0, np.min(B.dec)-1, np.max(B.dec)+1]) # plt.savefig('bricks.png') if opt.touching: I, J, d = match_radec(T.ra, T.dec, B.ra, B.dec, search_radius, nearest=True) # list the ones that will be cut # drop = np.ones(len(T)) # drop[I] = False # for i in np.flatnonzero(drop): # from astrometry.util.starutil_numpy import degrees_between # dists = degrees_between(B.ra, B.dec, T.ra[i], T.dec[i]) # mindist = min(dists) # print('Dropping:', T.ra[i], T.dec[i], 'min dist', mindist, 'search_radius', search_radius) T.cut(I) log('Cut to', len(T), 'CCDs near bricks') # sort by RA increasing B.cut(np.argsort(B.ra)) if opt.save_to_fits: assert (opt.touching) # Write cut tables to file for tab, typ in zip([B, T], ['bricks', 'ccds']): fn = '%s-%s-cut.fits' % (typ, opt.region) if os.path.exists(fn): os.remove(fn) tab.writeto(fn) log('Wrote %s' % fn) # Write text files listing ccd and filename names # nm1,nm2= 'ccds-%s.txt'% opt.region,'filenames-%s.txt' % opt.region # if os.path.exists(nm1): # os.remove(nm1) # if os.path.exists(nm2): # os.remove(nm2) # f1,f2=open(nm1,'w'),open(nm2,'w') # fns= list(set(T.get('image_filename'))) # for fn in fns: # f2.write('%s\n' % fn.strip()) # for ti in T: # f1.write('%s\n' % ti.get('image_filename').strip()) # f1.close() # f2.close() # log('Wrote *-names.txt') if opt.touching: if want_bricks: # Shortcut the list of bricks that are definitely touching CCDs -- # a brick-ccd pair within this radius must be touching. closest_radius = 0.95 * (bricksize + 0.262 * 2048 / 3600.) / 2. J1, nil, nil = match_radec(B.ra, B.dec, T.ra, T.dec, closest_radius, nearest=True) log(len(J1), 'of', len(B), 'bricks definitely touch CCDs') tocheck = np.ones(len(B), bool) tocheck[J1] = False J2 = [] for j in np.flatnonzero(tocheck): b = B[j] wcs = wcs_for_brick(b) I = ccds_touching_wcs(wcs, T) log(len(I), 'CCDs for brick', b.brickname) if len(I) == 0: continue J2.append(j) J = np.hstack((J1, J2)) J = np.sort(J).astype(int) B.cut(J) log('Cut to', len(B), 'bricks touching CCDs') else: J = [] allI = set() for j, b in enumerate(B): wcs = wcs_for_brick(b) I = ccds_touching_wcs(wcs, T) log(len(I), 'CCDs for brick', b.brickname) if len(I) == 0: continue allI.update(I) J.append(j) allI = list(allI) allI.sort() B.cut(np.array(J)) log('Cut to', len(B), 'bricks touching CCDs') elif opt.near: # Find CCDs near bricks allI, nil, nil = match_radec(T.ra, T.dec, B.ra, B.dec, search_radius, nearest=True) # Find bricks near CCDs J, nil, nil = match_radec(B.ra, B.dec, T.ra, T.dec, search_radius, nearest=True) B.cut(J) log('Cut to', len(B), 'bricks near CCDs') else: allI = np.arange(len(T)) if opt.byexp: nil, eI = np.unique(T.expnum[allI], return_index=True) allI = allI[eI] print('Cut to', len(allI), 'expnums') if opt.nccds: from queue import Queue from threading import Thread log('Checking number of CCDs per brick') def worker(): while True: i = q.get() if i is None: break b = B[i] wcs = wcs_for_brick(b) I = ccds_touching_wcs(wcs, T) log(b.brickname, len(I)) q.task_done() q = Queue() num_threads = 24 threads = [] for i in range(num_threads): t = Thread(target=worker) t.start() threads.append(t) for i in range(len(B)): q.put(i) q.join() for i in range(num_threads): q.put(None) for t in threads: t.join() if opt.write_ccds: T[allI].writeto(opt.write_ccds) log('Wrote', opt.write_ccds) if want_bricks: # Print the list of bricks and exit. for b in B: print(b.brickname) if opt.save_to_fits: B.writeto('bricks-%s-touching.fits' % opt.region) if not want_ccds: sys.exit(0) ## Be careful here -- T has been cut; we want to write out T.index. ## 'allI' contains indices into T. if opt.stage is not None: cmd_pat = 'rsync -LRarv %s %s' fns = set() for iccd in allI: im = survey.get_image_object(T[iccd]) fns.update([ im.imgfn, im.wtfn, im.dqfn, im.psffn, im.merged_psffn, im.merged_splineskyfn, im.splineskyfn ]) for i, fn in enumerate(fns): print('File', i + 1, 'of', len(fns), ':', fn) if not os.path.exists(fn): print('No such file:', fn) continue base = survey.get_survey_dir() if base.endswith('/'): base = base[:-1] rel = os.path.relpath(fn, base) dest = os.path.join(opt.stage, rel) print('Dest:', dest) if os.path.exists(dest): print('Exists:', dest) continue cmd = cmd_pat % ('%s/./%s' % (base, rel), opt.stage) print(cmd) rtn = os.system(cmd) assert (rtn == 0) sys.exit(0) if opt.forced: log('Writing forced-photometry commands to', opt.out) f = open(opt.out, 'w') log('Total of', len(allI), 'CCDs') for j, i in enumerate(allI): expstr = '%08i' % T.expnum[i] imgfn = os.path.join(survey.survey_dir, 'images', T.image_filename[i].strip()) if (not os.path.exists(imgfn) and imgfn.endswith('.fz') and os.path.exists(imgfn[:-3])): imgfn = imgfn[:-3] outfn = os.path.join( expstr[:5], expstr, 'forced-%s-%s-%s.fits' % (T.camera[i].strip(), expstr, T.ccdname[i])) f.write( 'python legacypipe/forced_photom.py --apphot --derivs --catalog-dir /project/projectdirs/cosmo/data/legacysurvey/dr7/ %i %s forced/%s\n' % (T.expnum[i], T.ccdname[i], outfn)) f.close() log('Wrote', opt.out) fn = 'forced-ccds.fits' T[allI].writeto(fn) print('Wrote', fn) sys.exit(0) if opt.lsb: log('Writing LSB commands to', opt.out) f = open(opt.out, 'w') log('Total of', len(allI), 'CCDs') for j, i in enumerate(allI): exp = T.expnum[i] ext = T.ccdname[i].strip() outfn = 'lsb/lsb-%s-%s.fits' % (exp, ext) f.write( 'python legacyanalysis/lsb.py --expnum %i --extname %s --out %s -F -n > lsb/lsb-%s-%s.log 2>&1\n' % (exp, ext, outfn, exp, ext)) f.close() log('Wrote', opt.out) sys.exit(0) log('Writing calibs to', opt.out) f = open(opt.out, 'w') log('Total of', len(allI), 'CCDs') batch = [] def write_batch(f, batch, cmd): if cmd is None: cmd = '' f.write(cmd + ' '.join(batch) + '\n') cmd = None if opt.command: cmd = 'python legacypipe/run-calib.py ' if opt.opt is not None: cmd += opt.opt + ' ' for j, i in enumerate(allI): if opt.delete_sky: log(j + 1, 'of', len(allI)) im = survey.get_image_object(T[i]) if opt.delete_sky and os.path.exists(im.skyfn): log(' deleting:', im.skyfn) os.unlink(im.skyfn) if opt.command: if opt.byexp: s = '--expnum %i' % (T.expnum[i]) else: s = '%i-%s' % (T.expnum[i], T.ccdname[i]) prefix = 'python legacypipe/run-calib.py ' if opt.opt is not None: prefix = prefix + opt.opt #('python legacypipe/run-calib.py --expnum %i --ccdname %s' % # (T.expnum[i], T.ccdname[i])) else: s = '%i' % T.index[i] prefix = '' if j < 10: print('Index', T.index[i], 'expnum', T.expnum[i], 'ccdname', T.ccdname[i], 'filename', T.image_filename[i]) if not opt.nper: f.write(prefix + s + '\n') else: batch.append(s) if len(batch) >= opt.nper: write_batch(f, batch, cmd) batch = [] if len(batch): write_batch(f, batch, cmd) f.close() log('Wrote', opt.out) return 0
def main(args=None): """Main routine which parses the optional inputs.""" # Command line options parser= get_parser() args = parser.parse_args(args=args) # Setup loggers if args.verbose: lvl = logging.DEBUG else: lvl = logging.INFO logging.basicConfig(level=lvl, stream=sys.stdout) #,format='%(message)s') log = logging.getLogger('decals_sim') # Sort through args log.info('decals_sim.py args={}'.format(args)) max_nobj=500 max_nchunk=1000 if args.ith_chunk is not None: assert(args.ith_chunk <= max_nchunk-1) assert(args.nchunk <= max_nchunk) assert(args.nobj <= max_nobj) if args.ith_chunk is not None: assert(args.nchunk == 1) #if choose a chunk, only doing 1 chunk if args.nobj is None: parser.print_help() sys.exit(1) brickname = args.brick objtype = args.objtype.upper() lobjtype = objtype.lower() for obj in ('LRG', 'LSB', 'QSO'): if objtype == obj: log.warning('{} objtype not yet supported!'.format(objtype)) return 0 # Deal with the paths. if 'DECALS_SIM_DIR' in os.environ: decals_sim_dir = os.getenv('DECALS_SIM_DIR') else: decals_sim_dir = '.' nobj = args.nobj nchunk = args.nchunk rand = np.random.RandomState(args.seed) # determines seed for all chunks seeds = rand.random_integers(0,2**18, max_nchunk) log.info('Object type = {}'.format(objtype)) log.info('Number of objects = {}'.format(nobj)) log.info('Number of chunks = {}'.format(nchunk)) # Optionally zoom into a portion of the brick survey = LegacySurveyData() brickinfo = survey.get_brick_by_name(brickname) brickwcs = wcs_for_brick(brickinfo) W, H, pixscale = brickwcs.get_width(), brickwcs.get_height(), brickwcs.pixel_scale() log.info('Brick = {}'.format(brickname)) if args.zoom is not None: # See also runbrick.stage_tims() (x0, x1, y0, y1) = args.zoom W = x1 - x0 H = y1 - y0 brickwcs = brickwcs.get_subimage(x0, y0, W, H) log.info('Zoom (pixel boundaries) = {}'.format(args.zoom)) targetrd = np.array([brickwcs.pixelxy2radec(x, y) for x, y in [(1,1), (W,1), (W,H), (1,H), (1,1)]]) radec_center = brickwcs.radec_center() log.info('RA, Dec center = {}'.format(radec_center)) log.info('Brick = {}'.format(brickname)) if args.ith_chunk is not None: chunk_list= [args.ith_chunk] else: chunk_list= range(nchunk) # Store args in dict for easy func passing kwargs=dict(seeds=seeds,\ brickname=brickname, \ decals_sim_dir= decals_sim_dir,\ brickwcs= brickwcs, \ objtype=objtype,\ lobjtype=lobjtype,\ nobj=nobj,\ nchunk=nchunk,\ args=args) # Create simulated catalogues and run Tractor create_metadata(kwargs=kwargs) # do chunks for ith_chunk in chunk_list: log.info('Working on chunk {:02d}/{:02d}'.format(ith_chunk,kwargs['nchunk']-1)) # Random ra,dec and source properties create_ith_simcat(ith_chunk, d=kwargs) # Run tractor do_one_chunk(d=kwargs) # Clean up output do_ith_cleanup(ith_chunk=ith_chunk, d=kwargs) log.info('All done!')
def main(survey=None, opt=None): print(' '.join(sys.argv)) '''Driver function for forced photometry of individual Legacy Survey images. ''' if opt is None: parser = get_parser() opt = parser.parse_args() Time.add_measurement(MemMeas) t0 = tlast = Time() if opt.skip and os.path.exists(opt.outfn): print('Ouput file exists:', opt.outfn) sys.exit(0) if opt.derivs and opt.agn: print('Sorry, can\'t do --derivs AND --agn') sys.exit(0) if not opt.forced: opt.apphot = True zoomslice = None if opt.zoom is not None: (x0, x1, y0, y1) = opt.zoom zoomslice = (slice(y0, y1), slice(x0, x1)) ps = None if opt.plots is not None: from astrometry.util.plotutils import PlotSequence ps = PlotSequence(opt.plots) # Try parsing first arg as exposure number (otherwise, it's a filename) try: expnum = int(opt.expnum) filename = None except: # make this 'None' for survey.find_ccds() expnum = None filename = opt.expnum # Try parsing HDU: "all" or HDU name or HDU number. all_hdus = (opt.ccdname == 'all') hdu = -1 ccdname = None if not all_hdus: try: hdu = int(opt.ccdname) except: ccdname = opt.ccdname if survey is None: survey = LegacySurveyData(survey_dir=opt.survey_dir) catsurvey_north = survey catsurvey_south = None if opt.catalog_dir_north is not None: assert (opt.catalog_dir_south is not None) assert (opt.catalog_resolve_dec_ngc is not None) catsurvey_north = LegacySurveyData(survey_dir=opt.catalog_dir_north) catsurvey_south = LegacySurveyData(survey_dir=opt.catalog_dir_south) if opt.catalog_dir is not None: catsurvey_north = LegacySurveyData(survey_dir=opt.catalog_dir) if filename is not None and hdu >= 0: # FIXME -- try looking up in CCDs file? # Read metadata from file print('Warning: faking metadata from file contents') T = exposure_metadata([filename], hdus=[hdu]) print('Metadata:') T.about() if not 'ccdzpt' in T.columns(): phdr = fitsio.read_header(filename) T.ccdzpt = np.array([phdr['MAGZERO']]) print('WARNING: using header MAGZERO') T.ccdraoff = np.array([0.]) T.ccddecoff = np.array([0.]) print('WARNING: setting CCDRAOFF, CCDDECOFF to zero.') else: # Read metadata from survey-ccds.fits table T = survey.find_ccds(expnum=expnum, ccdname=ccdname) print(len(T), 'with expnum', expnum, 'and ccdname', ccdname) if hdu >= 0: T.cut(T.image_hdu == hdu) print(len(T), 'with HDU', hdu) if filename is not None: T.cut(np.array([f.strip() == filename for f in T.image_filename])) print(len(T), 'with filename', filename) if opt.camera is not None: T.cut(T.camera == opt.camera) print(len(T), 'with camera', opt.camera) if not all_hdus: assert (len(T) == 1) args = [] for ccd in T: args.append((survey, catsurvey_north, catsurvey_south, opt.catalog_resolve_dec_ngc, ccd, opt, zoomslice, ps)) if opt.threads: from astrometry.util.multiproc import multiproc from astrometry.util.timingpool import TimingPool, TimingPoolMeas pool = TimingPool(opt.threads) poolmeas = TimingPoolMeas(pool, pickleTraffic=False) Time.add_measurement(poolmeas) mp = multiproc(None, pool=pool) tm = Time() FF = mp.map(bounce_one_ccd, args) print('Multi-processing forced-phot:', Time() - tm) else: FF = map(bounce_one_ccd, args) FF = [F for F in FF if F is not None] if len(FF) == 0: print('No photometry results to write.') return 0 # Keep only the first header _, version_hdr = FF[0] FF = [F for F, hdr in FF] F = merge_tables(FF) if all_hdus: version_hdr.delete('CPHDU') version_hdr.delete('CCDNAME') units = { 'exptime': 'sec', 'flux': 'nanomaggy', 'flux_ivar': '1/nanomaggy^2', 'apflux': 'nanomaggy', 'apflux_ivar': '1/nanomaggy^2', 'psfdepth': '1/nanomaggy^2', 'galdepth': '1/nanomaggy^2', 'sky': 'nanomaggy/arcsec^2', 'psfsize': 'arcsec' } if opt.derivs: units.update({ 'dra': 'arcsec', 'ddec': 'arcsec', 'dra_ivar': '1/arcsec^2', 'ddec_ivar': '1/arcsec^2' }) columns = F.get_columns() order = [ 'release', 'brickid', 'brickname', 'objid', 'camera', 'expnum', 'ccdname', 'filter', 'mjd', 'exptime', 'psfsize', 'ccd_cuts', 'airmass', 'sky', 'psfdepth', 'galdepth', 'ra', 'dec', 'flux', 'flux_ivar', 'fracflux', 'rchisq', 'fracmasked', 'apflux', 'apflux_ivar', 'x', 'y', 'dqmask', 'dra', 'ddec', 'dra_ivar', 'ddec_ivar' ] columns = [c for c in order if c in columns] # Set units headers (must happen after column ordering is set!) hdr = fitsio.FITSHDR() for i, col in enumerate(columns): if col in units: hdr.add_record(dict(name='TUNIT%i' % (i + 1), value=units[col])) outdir = os.path.dirname(opt.outfn) if len(outdir): trymakedirs(outdir) tmpfn = os.path.join(outdir, 'tmp-' + os.path.basename(opt.outfn)) fitsio.write(tmpfn, None, header=version_hdr, clobber=True) F.writeto(tmpfn, header=hdr, append=True, columns=columns) os.rename(tmpfn, opt.outfn) print('Wrote', opt.outfn) tnow = Time() print('Total:', tnow - t0) return 0
import numpy as np from astrometry.util.plotutils import * from legacyanalysis.ps1cat import ps1cat from legacypipe.survey import LegacySurveyData from tractor import Image, PointSource, PixPos, NanoMaggies, Tractor ps = PlotSequence('rewcs') expnum, ccdname = 431109, 'N14' cat = ps1cat(expnum=expnum, ccdname=ccdname) stars = cat.get_stars() print len(stars), 'stars' survey = LegacySurveyData() ccd = survey.find_ccds(expnum=expnum,ccdname=ccdname)[0] im = survey.get_image_object(ccd) wcs = im.get_wcs() tim = im.get_tractor_image(pixPsf=True, splinesky=True) margin = 15 ok,stars.xx,stars.yy = wcs.radec2pixelxy(stars.ra, stars.dec) stars.xx -= 1. stars.yy -= 1. W,H = wcs.get_width(), wcs.get_height() stars.ix = np.round(stars.xx).astype(int) stars.iy = np.round(stars.yy).astype(int) stars.cut((stars.ix >= margin) * (stars.ix < (W-margin)) * (stars.iy >= margin) * (stars.iy < (H-margin)))
alldec = [] allstate = [] alltasks = [] # allra.append(ra) # alldec.append(dec) # allstate.append([state] * len(ra)) # alltasks.append(tasks) ra = np.hstack(allra) dec = np.hstack(alldec) state = np.hstack(allstate) tasks = np.hstack(alltasks) # Match to actual table of bricks to get brickq. survey = LegacySurveyData() bricks = survey.get_bricks_readonly() I,J,d = match_radec(ra, dec, bricks.ra, bricks.dec, 0.2, nearest=True) print(len(ra), 'jobs') print(len(I), 'matches') ra = ra[I] dec = dec[I] state = state[I] tasks = tasks[I] brickq = bricks.brickq[J] for q in [0,1,2,3]: print() print('Brickq', q) plt.clf()
def main(): indir = '/global/cscratch1/sd/dstn/dr8test-1' name = 'dr8-test1' pretty = 'DR8 test1' sublayers = ['', '-model', '-resid'] subpretty = {'': ' images', '-model': ' models', '-resid': ' residuals'} survey_dir = '/global/cscratch1/sd/desiproc/dr7' datadir = 'data' survey = LegacySurveyData(survey_dir=survey_dir) fn = 'map/test_layers.py' txt = open(fn).read() for x in sublayers: txt = txt + '\n' + 'test_layers.append(("%s%s", "%s%s"))\n' % ( name, x, pretty, subpretty[x]) open(fn, 'wb').write(txt.encode()) print('Wrote', fn) cmd = 'rsync -LRarv %s/./{coadd/*/*/*-{image,model}-*.fits*,tractor} %s/%s' % ( indir, datadir, name) print(cmd) os.system(cmd) basedir = os.path.join(datadir, name) allbricks = survey.get_bricks_readonly() imagefns = glob(os.path.join(basedir, 'coadd', '*', '*', '*-image-*.fits*')) print('Image filenames:', len(imagefns)) brickset = set() for fn in imagefns: dirs = fn.split('/') brickname = dirs[-2] brickset.add(brickname) print(len(brickset), 'bricks found') I, = np.nonzero([b in brickset for b in allbricks.brickname]) bricks = allbricks[I] brickfn = os.path.join(basedir, 'survey-bricks.fits.gz') bricks.writeto(brickfn) print('Wrote', brickfn) threads = 8 tharg = '--threads %i ' % threads # images for scale in range(1, 8): cmd = 'python -u render-tiles.py --kind %s --scale --zoom %i %s' % ( name, scale, tharg) print(cmd) os.system(cmd) # models for scale in range(1, 8): cmd = 'python -u render-tiles.py --kind %s-model --scale --zoom %i %s' % ( name, scale, tharg) print(cmd) os.system(cmd) for x in sublayers: cmd = 'python -u render-tiles.py --kind %s%s --top' % (name, x) print(cmd) os.system(cmd)
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument( '--expnum', type=str, help='Run specified exposure numbers (can be comma-separated list') parser.add_argument( '--all-found', action='store_true', default=False, help='Only write output if all required input files are found') parser.add_argument('--ccds', help='Set ccds.fits file to load, default is all') parser.add_argument('--continue', dest='con', help='Continue even if one exposure is bad', action='store_true', default=False) parser.add_argument('--outdir', help='Output directory, default %(default)s', default='calib') opt = parser.parse_args() survey = LegacySurveyData() if opt.ccds: ccds = fits_table(opt.ccds) ccds = survey.cleanup_ccds_table(ccds) survey.ccds = ccds if opt.expnum is not None: expnums = [(None, int(x, 10)) for x in opt.expnum.split(',')] else: ccds = survey.get_ccds() expnums = set(zip(ccds.camera, ccds.expnum)) print(len(expnums), 'unique camera+expnums') for i, (camera, expnum) in enumerate(expnums): print() print('Exposure', i + 1, 'of', len(expnums), ':', camera, 'expnum', expnum) if camera is None: C = survey.find_ccds(expnum=expnum) print(len(C), 'CCDs with expnum', expnum) camera = C.camera[0] print('Set camera to', camera) C = survey.find_ccds(expnum=expnum, camera=camera) print(len(C), 'CCDs with expnum', expnum, 'and camera', camera) im0 = survey.get_image_object(C[0]) skyoutfn = im0.merged_skyfn psfoutfn = im0.merged_psffn print('Checking for', skyoutfn) print('Checking for', psfoutfn) if os.path.exists(skyoutfn) and os.path.exists(psfoutfn): print('Exposure', expnum, 'is done already') continue if not os.path.exists(skyoutfn): try: merge_splinesky(survey, expnum, C, skyoutfn, opt) except: if not opt.con: raise import traceback traceback.print_exc() print('Exposure failed:', expnum, '. Continuing...') if not os.path.exists(psfoutfn): try: merge_psfex(survey, expnum, C, psfoutfn, opt) except: if not opt.con: raise import traceback traceback.print_exc() print('Exposure failed:', expnum, '. Continuing...')
if __name__ == '__main__': import sys import argparse parser = argparse.ArgumentParser() parser.add_argument('--survey-dir', type=str, default=None, help='Override the $LEGACY_SURVEY_DIR environment variable') parser.add_argument('-d', '--outdir', help='Set output base directory', default='tractor2') parser.add_argument('--north', help='Set Dec limits for Northern Cap surveys', action='store_true') parser.add_argument('--overwrite', action='store_true', default=False, help='Overwrite existing output files? Default is to skip them.') opt = parser.parse_args() survey = LegacySurveyData(survey_dir=opt.survey_dir, output_dir=opt.outdir) bricks = survey.get_bricks() if opt.north: bricks.cut(bricks.dec > 30) else: bricks.cut(bricks.dec > -25) bricks.cut(bricks.dec < 40) ## HACK -- cut to COSMOS #bricks.cut((np.abs(bricks.ra - 150) < 2) * # (np.abs(bricks.dec - 2.2) < 2)) #print('Cut to', len(bricks), 'bricks near COSMOS') # Note to self: don't bother multiprocessing this; I/O bound for brick in bricks.brickname: