def calc_objseps(gid): _gaia, _spit = gather_by_id(eg_gid) _gra, _gde = _gaia.ra.data, _gaia.dec.data _gcoords = _gaia.ra.data, _gaia.dec.data raw_arcsep = 3.6e3 * angle.dAngSep(*_gcoords, _spit[ 'dra'], _spit[ 'dde']) win_arcsep = 3.6e3 * angle.dAngSep(*_gcoords, _spit['wdra'], _spit['wdde']) return {'raw':raw_arcsep, 'win':win_arcsep}
def fdiag_covers_position_single(self, coord, dfrac=0.3): """Check whether coord is less than dfrac*diagonal degrees from the image center.""" tra, tde = coord.ra.degree, coord.dec.degree #sys.stderr.write("Target RA: %8.4f\n" % tra) #sys.stderr.write("Target DE: %8.4f\n" % tde) ## large angular separation rules out coverage: ctr_sep_deg = angle.dAngSep(*self._ctr_radec, coord.ra.degree, coord.dec.degree) #sys.stderr.write("ctr_sep_deg: %.4f\n" % ctr_sep_deg) return (ctr_sep_deg < dfrac * self._diag_deg)
def _set_center_and_diagonal(self): corner_xx = np.array([1, self.ishape[0]]) corner_yy = np.array([1, self.ishape[1]]) corner_ra, corner_de = self.wcs.all_pix2world(corner_xx, corner_yy, 1, ra_dec_order=True) self._diag_deg = angle.dAngSep(corner_ra[0], corner_de[0], corner_ra[1], corner_de[1]) #self._half_diag_deg = 0.5 * self._diag_deg # calculate mid-image RA, DE: center_xx = np.average(corner_xx) center_yy = np.average(corner_yy) self._ctr_radec = self.wcs.all_pix2world(center_xx, center_yy, 1) return
## Iterate over individual image tables (prevents double-counting): #for ci,ccat in enumerate(cdata, 1): # sys.stderr.write("\n------------------------------\n") # sys.stderr.write("Checking image %d of %d ...\n" % (ci, len(cdata))) # for gi,(gix, gsrc) in enumerate(gm._srcdata.iterrows(), 1): # sys.stderr.write("Checking Gaia source %d of %d ...\n" % (gi, n_gaia)) # pass # pass # if (ntodo > 0) and (ii >= ntodo): # break ## First, check which Gaia sources might get used: tik = time.time() for ii,(index, gsrc) in enumerate(gm._srcdata.iterrows(), 1): sys.stderr.write("\rChecking Gaia source %d of %d ... " % (ii, n_gaia)) sep_sec = 3600. * angle.dAngSep(gsrc.ra, gsrc.dec, every_dra, every_dde) gcounter[gsrc.source_id] += np.sum(sep_sec <= toler_sec) tok = time.time() sys.stderr.write("done. (%.3f s)\n" % (tok-tik)) gc.collect() ## Make Gaia subset of useful objects: need_srcs = 3 useful_ids = [kk for kk,vv in gcounter.items() if vv>need_srcs] use_gaia = gm._srcdata[gm._srcdata.source_id.isin(useful_ids)] n_useful = len(use_gaia) sys.stderr.write("Found possible matches to %d of %d Gaia sources.\n" % (n_useful, len(gm._srcdata))) gc.collect() if n_useful < 5: sys.stderr.write("Gaia match error: found %d useful objects\n" % n_useful)
#rdata = data.copy() # before exclusions ## Exclude super-short exposures: old_size = len(data) #data = rdata[(rdata['EXPTIME'] > 1.1)] data = data[(data['EXPTIME'] > 1.1)] new_size = len(data) sys.stderr.write("Dropped %d of %d sources with short exposures.\n" % (old_size - new_size, old_size)) north = data[_DE] > 0.0 ndata = data[north] sdata = data[~north] ## Proximity to northern target: nsep = angle.dAngSep(ntarg_ra, ntarg_de, ndata[_RA], ndata[_DE]) ssep = angle.dAngSep(starg_ra, starg_de, sdata[_RA], sdata[_DE]) ndata = append_fields(ndata, 'sep', nsep, usemask=False) sdata = append_fields(sdata, 'sep', ssep, usemask=False) nnear = (nsep < 0.03) snear = (ssep < 0.03) nkeep = ndata[nnear] skeep = sdata[snear] def qline(target): #keys = ['cln_fcat', 'cln_cbcd', 'jdutc', 'EXPTIME', _RA, _DE, 'sep'] keys = ['fcat', 'cbcd', 'jdutc', 'EXPTIME', _RA, _DE, 'sep'] vals = tuple([target[kk] for kk in keys]) return "%s %s %16.7f %6.2f %12.7f %12.7f %7.4f" % vals
#sys.stderr.write("Looking for repeating sources ... \n") #tik = time.time() #for rtrial,dtrial in zip(every_dra, every_dde): # sep_sec = 3600.0 * angle.dAngSep(rtrial, dtrial, every_dra, every_dde) # matches = sep_sec <= context.gaia_tol_arcsec #tok = time.time() #sys.stderr.write("Each-against-all took %.3f sec\n" % (tok-tik)) ## In first pass, count matches to each ID: sys.stderr.write("Checking which master list sources are used ...\n") tik = time.time() n_detect = len(det_data) scounter = {x: 0 for x in det_data['srcid']} for ii, sdata in enumerate(det_data, 1): sys.stderr.write("\rChecking detection %d of %d ... " % (ii, n_detect)) sep_sec = 3600. * angle.dAngSep(sdata['dra'], sdata['dde'], every_dra, every_dde) scounter[sdata['srcid']] += np.sum(sep_sec <= context.gaia_tol_arcsec) tok = time.time() sys.stderr.write("done. (%.3f s)\n" % (tok - tik)) gc.collect() ## Collect subset of useful detections: useful = np.array( [scounter[x] > context.min_src_hits for x in det_data['srcid']]) use_dets = det_data[useful] ## Self-associate sources: sys.stderr.write("Associating catalog objects:\n") tik = time.time() smatches = {x: [] for x in use_dets['srcid']} for ci, extcat in enumerate(cdata, 1):
kf_borders = kelt_fields.borders() ##--------------------------------------------------------------------------## ## Brute-force borders and winding number calculation: tik = time.time() nfields = len(kf_borders) kfb_sep_stats = { 'min': [], 'max': [], 'avg': [], 'wind': [], } for i, kfedge in enumerate(kf_borders): #sys.stderr.write("\rField %d of %d ... " % (i+1, nfields)) kfbra, kfbde = kfedge kfb_sep = angle.dAngSep(kfbra, kfbde, context.ra, context.de) fcenter = (kra[i], kde[i]) winding = kfc.count_windings((context.ra, context.de), fcenter) kfb_sep_stats['min'].append(kfb_sep.min()) kfb_sep_stats['max'].append(kfb_sep.max()) kfb_sep_stats['avg'].append(kfb_sep.mean()) kfb_sep_stats['wind'].append(winding) pass tok = time.time() if (context.vlevel >= 1): sys.stderr.write("done. Took %.3f seconds.\n" % (tok - tik)) #n_windings = np.array(n_windings) ## Promote results to numpy arrays: for kk in kfb_sep_stats.keys():