def get_supp_skies(ras, decs, radius=2.): """Random locations, avoid Gaia, format, return supplemental skies. Parameters ---------- ras : :class:`~numpy.ndarray` Right Ascensions of sky locations (degrees). decs : :class:`~numpy.ndarray` Declinations of sky locations (degrees). radius : :class:`float`, optional, defaults to 2 Radius at which to avoid (all) Gaia sources (arcseconds). Returns ------- :class:`~numpy.ndarray` A structured array of supplemental sky positions in the DESI sky target format that avoid Gaia sources by `radius`. Notes ----- - Written to be used when `ras` and `decs` are within a single Gaia-file HEALPixel, but should work for all cases. """ # ADM determine Gaia files of interest and read the RAs/Decs. fns = find_gaia_files([ras, decs], neighbors=True, radec=True) gobjs = np.concatenate( [fitsio.read(fn, columns=["RA", "DEC"]) for fn in fns]) # ADM convert radius to an array. r = np.zeros(len(gobjs)) + radius # ADM determine matches between Gaia and the passed RAs/Decs. isin = is_in_circle(ras, decs, gobjs["RA"], gobjs["DEC"], r) good = ~isin # ADM build the output array from the sky targets data model. nskies = np.sum(good) supsky = np.zeros(nskies, dtype=skydatamodel.dtype) # ADM populate output array with the RA/Dec of the sky locations. supsky["RA"], supsky["DEC"] = ras[good], decs[good] # ADM add the brickid and name. supsky["BRICKID"] = bricks.brickid(ras[good], decs[good]) supsky["BRICKNAME"] = bricks.brickname(ras[good], decs[good]) supsky["BLOBDIST"] = 2. # ADM set all fluxes and IVARs to -1, so they're ill-defined. for name in skydatamodel.dtype.names: if "FLUX" in name: supsky[name] = -1. return supsky
def supplement_skies(nskiespersqdeg=None, numproc=16, gaiadir=None, mindec=-30., mingalb=10., radius=2., minobjid=0): """Generate supplemental sky locations using Gaia-G-band avoidance. Parameters ---------- nskiespersqdeg : :class:`float`, optional The minimum DENSITY of sky fibers to generate. Defaults to reading from :func:`~desimodel.io` with a margin of 4x. numproc : :class:`int`, optional, defaults to 16 The number of processes over which to parallelize. gaiadir : :class:`str`, optional, defaults to $GAIA_DIR The GAIA_DIR environment variable is set to this directory. If None is passed, then it's assumed to already exist. mindec : :class:`float`, optional, defaults to -30 Minimum declination (o) to include for output sky locations. mingalb : :class:`float`, optional, defaults to 10 Closest latitude to Galactic plane for output sky locations (e.g. send 10 to limit to areas beyond -10o <= b < 10o). radius : :class:`float`, optional, defaults to 2 Radius at which to avoid (all) Gaia sources (arcseconds). minobjid : :class:`int`, optional, defaults to 0 The minimum OBJID to start counting from in a brick. Used to make sure supplemental skies have different OBJIDs from regular skies. Returns ------- :class:`~numpy.ndarray` a structured array of supplemental sky positions in the DESI sky target format within the passed `mindec` and `mingalb` limits. Notes ----- - The environment variable $GAIA_DIR must be set, or `gaiadir` must be passed. """ log.info("running on {} processors".format(numproc)) # ADM if the GAIA directory was passed, set it. if gaiadir is not None: os.environ["GAIA_DIR"] = gaiadir # ADM if needed, determine the density of sky fibers to generate. if nskiespersqdeg is None: nskiespersqdeg = density_of_sky_fibers(margin=4) # ADM determine the HEALPixel nside of the standard Gaia files. anyfiles = find_gaia_files([0, 0], radec=True) hdr = fitsio.read_header(anyfiles[0], "GAIAHPX") nside = hdr["HPXNSIDE"] # ADM create a set of random locations accounting for mindec. log.info("Generating supplemental sky locations at Dec > {}o...t={:.1f}s". format(mindec, time() - start)) from desitarget.randoms import randoms_in_a_brick_from_edges ras, decs = randoms_in_a_brick_from_edges(0., 360., mindec, 90., density=nskiespersqdeg, wrap=False) # ADM limit randoms by mingalb. log.info( "Generated {} sky locations. Limiting to |b| > {}o...t={:.1f}s".format( len(ras), mingalb, time() - start)) bnorth = is_in_gal_box([ras, decs], [0, 360, mingalb, 90], radec=True) bsouth = is_in_gal_box([ras, decs], [0, 360, -90, -mingalb], radec=True) ras, decs = ras[bnorth | bsouth], decs[bnorth | bsouth] # ADM find HEALPixels for the random points. log.info( "Cut to {} sky locations. Finding their HEALPixels...t={:.1f}s".format( len(ras), time() - start)) theta, phi = np.radians(90 - decs), np.radians(ras) pixels = hp.ang2pix(nside, theta, phi, nest=True) upixels = np.unique(pixels) npixels = len(upixels) log.info("Running across {} HEALPixels.".format(npixels)) # ADM parallelize across pixels. The function to run on every pixel. def _get_supp(pix): """wrapper on get_supp_skies() given a HEALPixel""" ii = (pixels == pix) return get_supp_skies(ras[ii], decs[ii], radius=radius) # ADM this is just to count pixels in _update_status. npix = np.zeros((), dtype='i8') t0 = time() def _update_status(result): """wrapper function for the critical reduction operation, that occurs on the main parallel process""" if npix % 500 == 0 and npix > 0: rate = npix / (time() - t0) log.info('{}/{} HEALPixels; {:.1f} pixels/sec'.format( npix, npixels, rate)) npix[...] += 1 # this is an in-place modification. return result # - Parallel process across the unique pixels. if numproc > 1: pool = sharedmem.MapReduce(np=numproc) with pool: supp = pool.map(_get_supp, upixels, reduce=_update_status) else: supp = [] for upix in upixels: supp.append(_update_status(_get_supp(upix))) # ADM Concatenate the parallelized results into one rec array. supp = np.concatenate(supp) # ADM build the OBJIDs from the number of sources per brick. # ADM the for loop doesn't seem the smartest way, but it is O(n). log.info("Begin assigning OBJIDs to bricks...t={:.1f}s".format(time() - start)) brxid = supp["BRICKID"] # ADM start each brick counting from minobjid. cntr = np.zeros(np.max(brxid) + 1, dtype=int) + minobjid objid = [] for ibrx in brxid: cntr[ibrx] += 1 objid.append(cntr[ibrx]) # ADM ensure the number of sky positions that were generated doesn't exceed # ADM the largest possible OBJID (which is unlikely). if np.any(cntr > 2**targetid_mask.OBJID.nbits): log.fatal( '{} sky locations requested in brick {}, but OBJID cannot exceed {}' .format(nskies, brickname, 2**targetid_mask.OBJID.nbits)) raise ValueError supp["OBJID"] = np.array(objid) log.info("Assigned OBJIDs to bricks...t={:.1f}s".format(time() - start)) # ADM add the TARGETID, DESITARGET bits etc. nskies = len(supp) desi_target = np.zeros(nskies, dtype='>i8') desi_target |= desi_mask.SKY desi_target |= desi_mask.SUPP_SKY dum = np.zeros_like(desi_target) supp = finalize(supp, desi_target, dum, dum, sky=1) log.info('Done...t={:.1f}s'.format(time() - start)) return supp
def make_bright_star_mask_in_hp(nside, pixnum, verbose=True, gaiaepoch=2015.5, maglim=12., matchrad=1., maskepoch=2023.0): """Make a bright star mask in a HEALPixel using Tycho, Gaia and URAT. Parameters ---------- nside : :class:`int` (NESTED) HEALPixel nside. pixnum : :class:`int` A single HEALPixel number. verbose : :class:`bool` If ``True`` then log informational messages. Returns ------- :class:`recarray` The bright star mask in the form of `maskdatamodel.dtype`. Notes ----- - Runs in a a minute or so for a typical nside=4 pixel. - See :func:`~desitarget.brightmask.make_bright_star_mask` for descriptions of the output mask and the other input parameters. """ # ADM start the clock. t0 = time() # ADM read in the Tycho files. tychofns = find_tycho_files_hp(nside, pixnum, neighbors=False) tychoobjs = [] for fn in tychofns: tychoobjs.append(fitsio.read(fn, ext='TYCHOHPX')) tychoobjs = np.concatenate(tychoobjs) # ADM create the Tycho reference magnitude, which is VT then HP # ADM then BT in order of preference. tychomag = tychoobjs["MAG_VT"].copy() tychomag[tychomag == 0] = tychoobjs["MAG_HP"][tychomag == 0] tychomag[tychomag == 0] = tychoobjs["MAG_BT"][tychomag == 0] # ADM discard any Tycho objects below the input magnitude limit # ADM and outside of the HEALPixels of interest. theta, phi = np.radians(90-tychoobjs["DEC"]), np.radians(tychoobjs["RA"]) tychohpx = hp.ang2pix(nside, theta, phi, nest=True) ii = (tychohpx == pixnum) & (tychomag < maglim) tychomag, tychoobjs = tychomag[ii], tychoobjs[ii] if verbose: log.info('Read {} (mag < {}) Tycho objects (pix={})...t={:.1f} mins'. format(np.sum(ii), maglim, pixnum, (time()-t0)/60)) # ADM read in the associated Gaia files. Also grab # ADM neighboring pixels to prevent edge effects. gaiafns = find_gaia_files(tychoobjs, neighbors=True) gaiaobjs = [] cols = 'SOURCE_ID', 'RA', 'DEC', 'PHOT_G_MEAN_MAG', 'PMRA', 'PMDEC' for fn in gaiafns: if os.path.exists(fn): gaiaobjs.append(fitsio.read(fn, ext='GAIAHPX', columns=cols)) gaiaobjs = np.concatenate(gaiaobjs) gaiaobjs = rfn.rename_fields(gaiaobjs, {"SOURCE_ID": "REF_ID"}) # ADM limit Gaia objects to 3 magnitudes fainter than the passed # ADM limit. This leaves some (!) leeway when matching to Tycho. gaiaobjs = gaiaobjs[gaiaobjs['PHOT_G_MEAN_MAG'] < maglim + 3] if verbose: log.info('Read {} (G < {}) Gaia sources (pix={})...t={:.1f} mins'.format( len(gaiaobjs), maglim+3, pixnum, (time()-t0)/60)) # ADM substitute URAT where Gaia proper motions don't exist. ii = ((np.isnan(gaiaobjs["PMRA"]) | (gaiaobjs["PMRA"] == 0)) & (np.isnan(gaiaobjs["PMDEC"]) | (gaiaobjs["PMDEC"] == 0))) if verbose: log.info('Add URAT for {} Gaia objs with no PMs (pix={})...t={:.1f} mins' .format(np.sum(ii), pixnum, (time()-t0)/60)) urat = add_urat_pms(gaiaobjs[ii], numproc=1) if verbose: log.info('Found an additional {} URAT objects (pix={})...t={:.1f} mins' .format(np.sum(urat["URAT_ID"] != -1), pixnum, (time()-t0)/60)) for col in "PMRA", "PMDEC": gaiaobjs[col][ii] = urat[col] # ADM need to track the URATID to track which objects have # ADM substituted proper motions. uratid = np.zeros_like(gaiaobjs["REF_ID"])-1 uratid[ii] = urat["URAT_ID"] # ADM match to remove Tycho objects already in Gaia. Prefer the more # ADM accurate Gaia proper motions. Note, however, that Tycho epochs # ADM can differ from the mean (1991.5) by as as much as 0.86 years, # ADM so a star with a proper motion as large as Barnard's Star # ADM (10.3 arcsec) can be off by a significant margin (~10"). margin = 10. ra, dec = rewind_coords(gaiaobjs["RA"], gaiaobjs["DEC"], gaiaobjs["PMRA"], gaiaobjs["PMDEC"], epochnow=gaiaepoch) # ADM match Gaia to Tycho with a suitable margin. if verbose: log.info('Match Gaia to Tycho with margin={}" (pix={})...t={:.1f} mins' .format(margin, pixnum, (time()-t0)/60)) igaia, itycho = radec_match_to([ra, dec], [tychoobjs["RA"], tychoobjs["DEC"]], sep=margin, radec=True) if verbose: log.info('{} matches. Refining at 1" (pix={})...t={:.1f} mins'.format( len(itycho), pixnum, (time()-t0)/60)) # ADM match Gaia to Tycho at the more exact reference epoch. epoch_ra = tychoobjs[itycho]["EPOCH_RA"] epoch_dec = tychoobjs[itycho]["EPOCH_DEC"] # ADM some of the Tycho epochs aren't populated. epoch_ra[epoch_ra == 0], epoch_dec[epoch_dec == 0] = 1991.5, 1991.5 ra, dec = rewind_coords(gaiaobjs["RA"][igaia], gaiaobjs["DEC"][igaia], gaiaobjs["PMRA"][igaia], gaiaobjs["PMDEC"][igaia], epochnow=gaiaepoch, epochpast=epoch_ra, epochpastdec=epoch_dec) # ADM catch the corner case where there are no initial matches. if ra.size > 0: _, refined = radec_match_to([ra, dec], [tychoobjs["RA"][itycho], tychoobjs["DEC"][itycho]], radec=True) else: refined = np.array([], dtype='int') # ADM retain Tycho objects that DON'T match Gaia. keep = np.ones(len(tychoobjs), dtype='bool') keep[itycho[refined]] = False tychokeep, tychomag = tychoobjs[keep], tychomag[keep] if verbose: log.info('Kept {} Tychos with no Gaia match (pix={})...t={:.1f} mins' .format(len(tychokeep), pixnum, (time()-t0)/60)) # ADM now we're done matching to Gaia, limit Gaia to the passed # ADM magnitude limit and to the HEALPixel boundary of interest. theta, phi = np.radians(90-gaiaobjs["DEC"]), np.radians(gaiaobjs["RA"]) gaiahpx = hp.ang2pix(nside, theta, phi, nest=True) ii = (gaiahpx == pixnum) & (gaiaobjs['PHOT_G_MEAN_MAG'] < maglim) gaiakeep, uratid = gaiaobjs[ii], uratid[ii] if verbose: log.info('Mask also comprises {} Gaia sources (pix={})...t={:.1f} mins' .format(len(gaiakeep), pixnum, (time()-t0)/60)) # ADM move the coordinates forwards to the input mask epoch. epoch_ra, epoch_dec = tychokeep["EPOCH_RA"], tychokeep["EPOCH_DEC"] # ADM some of the Tycho epochs aren't populated. epoch_ra[epoch_ra == 0], epoch_dec[epoch_dec == 0] = 1991.5, 1991.5 ra, dec = rewind_coords( tychokeep["RA"], tychokeep["DEC"], tychokeep["PM_RA"], tychokeep["PM_DEC"], epochnow=epoch_ra, epochnowdec=epoch_dec, epochpast=maskepoch) tychokeep["RA"], tychokeep["DEC"] = ra, dec ra, dec = rewind_coords( gaiakeep["RA"], gaiakeep["DEC"], gaiakeep["PMRA"], gaiakeep["PMDEC"], epochnow=gaiaepoch, epochpast=maskepoch) gaiakeep["RA"], gaiakeep["DEC"] = ra, dec # ADM finally, format according to the mask data model... gaiamask = np.zeros(len(gaiakeep), dtype=maskdatamodel.dtype) tychomask = np.zeros(len(tychokeep), dtype=maskdatamodel.dtype) for col in "RA", "DEC": gaiamask[col] = gaiakeep[col] gaiamask["PM"+col] = gaiakeep["PM"+col] tychomask[col] = tychokeep[col] tychomask["PM"+col] = tychokeep["PM_"+col] gaiamask["REF_ID"] = gaiakeep["REF_ID"] # ADM take care to rigorously convert to int64 for Tycho. tychomask["REF_ID"] = tychokeep["TYC1"].astype('int64')*int(1e6) + \ tychokeep["TYC2"].astype('int64')*10 + tychokeep["TYC3"] gaiamask["REF_CAT"], tychomask["REF_CAT"] = 'G2', 'T2' gaiamask["REF_MAG"] = gaiakeep['PHOT_G_MEAN_MAG'] tychomask["REF_MAG"] = tychomag gaiamask["URAT_ID"], tychomask["URAT_ID"] = uratid, -1 gaiamask["TYPE"], tychomask["TYPE"] = 'PSF', 'PSF' mask = np.concatenate([gaiamask, tychomask]) # ADM ...and add the mask radii. mask["IN_RADIUS"], mask["NEAR_RADIUS"] = radii(mask["REF_MAG"]) if verbose: log.info("Done making mask...(pix={})...t={:.1f} mins".format( pixnum, (time()-t0)/60.)) return mask
from pkg_resources import resource_filename from desitarget.gaiamatch import find_gaia_files from desitarget import io start = time() # ADM choose the Gaia files to cover the same object # ADM locations as the sweeps/tractor files. datadir = resource_filename('desitarget.test', 't') tractorfiles = sorted(io.list_tractorfiles(datadir)) sweepfiles = sorted(io.list_sweepfiles(datadir)) # ADM read in each of the relevant Gaia files. gaiafiles = [] for fn in sweepfiles + tractorfiles: objs = fitsio.read(fn, columns=["RA", "DEC"]) gaiafiles.append(find_gaia_files(objs, neighbors=False)) gaiafiles = np.unique(gaiafiles) # ADM loop through the Gaia files and write out some rows # ADM to the "t4" unit test directory. if not os.path.exists("t4"): os.makedirs(os.path.join("t4", "healpix")) for fn in gaiafiles: objs = fitsio.read(fn) outfile = os.path.join("t4", "healpix", os.path.basename(fn)) fitsio.write(outfile, objs[:25], clobber=True) print("writing {}".format(outfile)) print('Done...t={:.2f}s'.format(time() - start))
from desitarget.uratmatch import find_urat_files from desitarget import io start = time() # ADM choose the Gaia files to cover the same object # ADM locations as the sweeps/tractor files. datadir = resource_filename('desitarget.test', 't') tractorfiles = sorted(io.list_tractorfiles(datadir)) sweepfiles = sorted(io.list_sweepfiles(datadir)) # ADM read in relevant Gaia files. gaiafiles = [] for fn in sweepfiles + tractorfiles: objs = fitsio.read(fn, columns=["RA", "DEC"]) gaiafiles.append(find_gaia_files(objs, neighbors=False)) gaiafiles = np.unique(np.concatenate(gaiafiles)) # ADM loop through the Gaia files and write out some rows # ADM to the "t4" unit test directory. tychofiles, uratfiles = [], [] if not os.path.exists("t4"): os.makedirs(os.path.join("t4", "healpix")) for fn in gaiafiles: objs, hdr = fitsio.read(fn, 1, header=True) outfile = os.path.join("t4", "healpix", os.path.basename(fn)) fitsio.write(outfile, objs[:25], header=hdr, clobber=True, extname="GAIAHPX")