def nearest_source(band, skypos, radius=0.01, maglimit=20.0, verbose=0, catalog="MCAT", retries=20): """Return targeting parameters for the nearest MCAT source to a position. """ out = np.array( gQuery.getArray( gQuery.mcat_sources(band, skypos[0], skypos[1], radius, maglimit=maglimit), verbose=verbose, retries=retries ) ) if not len(out) and band == "FUV": if verbose: print "No nearby MCAT source found in FUV. Trying NUV..." band = "NUV" out = np.array( gQuery.getArray( gQuery.mcat_sources(band, skypos[0], skypos[1], radius, maglimit=maglimit), verbose=verbose, retries=retries, ) ) if not len(out) and band == "NUV": if verbose: print "No nearby MCAT source found. Using input sky position." return skypos[0], skypos[1], 0.01 # dist=np.sqrt( (out[:,0]-skypos[0])**2 + (out[:,1]-skypos[1])**2) dist = angularSeparation(out[:, 0], out[:, 1], skypos[0], skypos[1]) if verbose > 1: print "Finding nearest among " + str(len(dist)) + " nearby sources." # Note that this doesn't cope with multiple entries for the same source. s = out[np.where(dist == dist.min())][0] # RA, Dec, NUV mag, FUV mag, NUV fwhm, FUV fwhm return avg_sources(band, [s[0], s[1]], verbose=verbose, retries=retries)
def get_mcat_data(skypos,rad): # Try once with the default radius. out = np.array(gQuery.getArray( gQuery.mcat_visit_sources(skypos[0],skypos[1],rad))) # If no MCAT sources found, try again with a radius 5 times bigger. if len(out) == 0: out = np.array(gQuery.getArray( gQuery.mcat_visit_sources(skypos[0],skypos[1],rad*5.))) # FIXME: The APER entries should really be generated try: return {'objid':np.array(out[:,0],dtype='int64'), 'ra':np.array(out[:,1],dtype='float32'), 'dec':np.array(out[:,2],dtype='float32'), 'NUV':{'mag':np.array(out[:,3],dtype='float32'), 'skybg':np.array(out[:,6],dtype='float32'), 'expt':np.array(out[:,11],dtype='float32'), 'fwhm':np.array(out[:,8],dtype='float32'), 1:{'mag':np.array(out[:,19],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,33],dtype='float32')}, 2:{'mag':np.array(out[:,20],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,34],dtype='float32')}, 3:{'mag':np.array(out[:,21],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,35],dtype='float32')}, 4:{'mag':np.array(out[:,22],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,36],dtype='float32')}, 5:{'mag':np.array(out[:,23],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,37],dtype='float32')}, 6:{'mag':np.array(out[:,24],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,38],dtype='float32')}, 7:{'mag':np.array(out[:,25],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,39],dtype='float32')} }, 'FUV':{'mag':np.array(out[:,4],dtype='float32'), 'skybg':np.array(out[:,7],dtype='float32'), 'expt':np.array(out[:,10],dtype='float32'), 'fwhm':np.array(out[:,9],dtype='float32'), 1:{'mag':np.array(out[:,12],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,26],dtype='float32')}, 2:{'mag':np.array(out[:,13],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,27],dtype='float32')}, 3:{'mag':np.array(out[:,14],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,28],dtype='float32')}, 4:{'mag':np.array(out[:,15],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,29],dtype='float32')}, 5:{'mag':np.array(out[:,16],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,30],dtype='float32')}, 6:{'mag':np.array(out[:,17],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,31],dtype='float32')}, 7:{'mag':np.array(out[:,18],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,32],dtype='float32')} } } except IndexError: # If there are STILL no detections, then pass a dict with empty values. # A default set of values will then be used. return {'objid':None,'ra':None,'dec':None,'NUV':None,'FUV':None} except: raise
def web_query_aspect(eclipse, retries=20): """Grabs the aspect data from MAST databases based on eclipse.""" print "Attempting to query MAST database for aspect records." entries = gQuery.getArray(gQuery.aspect_ecl(eclipse), retries=retries) n = len(entries) print " Located " + str(n) + " aspect entries." if not n: print "No aspect entries for eclipse " + str(eclipse) return ra, dec, twist, time, flags = [], [], [], [], [] header = {"RA": [], "DEC": [], "ROLL": []} ra0, dec0, twist0 = [], [], [] for i in xrange(n): # The times are *1000 in the database to integerify time.append(float(entries[i][2]) / 1000.0) ra.append(float(entries[i][3])) dec.append(float(entries[i][4])) twist.append(float(entries[i][5])) flags.append(float(entries[i][6])) ra0.append(float(entries[i][7])) dec0.append(float(entries[i][8])) twist0.append(float(entries[i][9])) # Need to sort the output so that it is time ordered before returning. # Although it should already be ordered by time because that is requested # in the SQL query above. If this is time consuming, remove it. ix = np.argsort(np.array(time)) header = {"RA": np.array(ra0)[ix], "DEC": np.array(dec0)[ix], "ROLL": np.array(twist0)[ix]} return np.array(ra)[ix], np.array(dec)[ix], np.array(twist)[ix], np.array(time)[ix], header, np.array(flags)[ix]
def makemap(band,skypos,trange,skyrange,response=False,verbose=0,detsize=1.1): imsz = gxt.deg2pix(skypos,skyrange) photons = np.array(gQuery.getArray(gQuery.skyrect(band, skypos[0],skypos[1],trange[0],trange[1],skyrange[0],skyrange[1]), verbose=verbose),dtype='float64') try: events = {'t':photons[:,0 ]/tscale,'ra':photons[:,1],'dec':photons[:,2], 'xi':photons[:,3],'eta':photons[:,4], 'x':photons[:,5], 'y':photons[:,6]} except IndexError: if verbose>2: print 'No events found at {s} +/- {r} in {t}.'.format( s=skypos,r=skyrange,t=trange) return np.zeros(imsz) # Trim the data on detsize col, row = ct.xieta2colrow(events['xi'],events['eta'],band) ix = np.where((1.25/800.)*mc.distance(col,row,400,400)<=detsize) n = len(ix[0]) m = len(col) #print 'With detsize {d} using {n} of {m} data.'.format(d=detsize,n=n,m=m) if n == 0: return np.zeros(imsz) for k in events.keys(): events[k] = events[k][ix] events = ct.hashresponse(band,events) wcs = define_wcs(skypos,skyrange,width=False,height=False) coo = zip(events['ra'],events['dec']) foc = wcs.sip_pix2foc(wcs.wcs_world2pix(coo,1),1) weights = 1./events['response'] if response else None H,xedges,yedges=np.histogram2d(foc[:,1]-0.5,foc[:,0]-0.5,bins=imsz, range=([ [0,imsz[0]],[0,imsz[1]] ]),weights=weights) return H
def pullphotons(band, ra0, dec0, tranges, radius, events={}, verbose=0, tscale=1000., calpath='../cal/', chunksz=10e6): """Retrieve photons within an aperture from the database.""" events = {'t':[],'ra':[],'dec':[],'xi':[],'eta':[]} if verbose: print "Retrieving photons at ["+str(ra0)+", "+str(dec0)+"] within a radius of "+str(radius) for trange in tranges: if verbose: mc.print_inline(" and between "+str(trange[0])+" and "+ str(trange[1])+".") stream = gQuery.getArray( gQuery.allphotons(band, ra0, dec0, trange[0], trange[1], radius), verbose=verbose,retries=100) if not stream: continue events['t'] = events['t']+np.array(np.array(stream, dtype='float64')[:,0]/tscale).tolist() # The float64 precision _is_ significant for RA / Dec. events['ra'] = events['ra']+np.array(np.array(stream, dtype='float64')[:,1]).tolist() events['dec'] = events['dec']+np.array(np.array(stream, dtype='float64')[:,2]).tolist() events['xi'] = events['xi']+np.array(np.array(stream, dtype='float32')[:,3]).tolist() events['eta'] = events['eta']+np.array(np.array(stream, dtype='float32')[:,4]).tolist() events['t'] = np.array(events['t'],dtype='float64') events['ra'] = np.array(events['ra'],dtype='float64') events['dec'] = np.array(events['dec'],dtype='float64') events['xi'] = np.array(events['xi'],dtype='float32') events['eta'] = np.array(events['eta'],dtype='float32') events = hashresponse(band, events, calpath=calpath, verbose=verbose) return events
def nearest_distinct_source(band,skypos,radius=0.1,maglimit=20.0,verbose=0, catalog='MCAT',retries=20): """Return parameters for the nearest non-targeted source.""" out = np.array(gQuery.getArray(gQuery.mcat_sources(band,skypos[0],skypos[1],radius,maglimit=maglimit),verbose=verbose,retries=retries)) #dist=np.sqrt( (out[:,0]-skypos[0])**2 + (out[:,1]-skypos[1])**2) dist = angularSeparation(out[:,0],out[:,1],skypos[0],skypos[1]) ix = np.where(dist>0.005) return np.array(out)[ix][np.where(dist[ix]==dist[ix].min())][0]
def avg_sources(band,skypos,radius=0.001,maglimit=20.0,verbose=0, catalog='MCAT',retries=20): """Return the mean position of sources within the search radius.""" out = np.array(gQuery.getArray(gQuery.mcat_sources(band,skypos[0], skypos[1],radius,maglimit=maglimit),verbose=verbose,retries=retries)) ix = np.where(out[:,-2]>0) if band=='NUV' else np.where(out[:,-1]>0) fwhm = out[ix,-2].mean() if band=='NUV' else out[ix,-1].mean() return out[ix,0].mean(),out[ix,1].mean(),round(fwhm,4)
def get_mags(band,ra0,dec0,radius,maglimit,mode='coadd', zpmag={'NUV':20.08,'FUV':18.82},verbose=0): """Given RA, Dec and search radius, searches the coadd MCAT for sources. Returns a dict() which contains magnitudes for all of the APER settings. Note: Visit mode returns a lot more sources, more slowly than coadd mode given the same search parameters. You should probably use smaller search radii in visit mode. If you're just trying to find unique sources in a large region, use coadd mode and then pass the result through the parse_unique_sources() function contained in this module. """ zpf,zpn = zpmag['FUV'],zpmag['NUV'] if mode=='coadd': out =np.array(gQuery.getArray( gQuery.mcat_sources(band,ra0,dec0,radius,maglimit=maglimit), verbose=verbose)) if not len(out): print "Warning: No sources found!" return 0 return {'ra':out[:,0],'dec':out[:,1], 'FUV':{'mag':out[:,3],1:out[:,9]+zpf,2:out[:,10]+zpf, 3:out[:,11]+zpf,4:out[:,12]+zpf,5:out[:,13]+zpf, 6:out[:,14],7:out[:,15]+zpf}, 'NUV':{'mag':out[:,2],1:out[:,16]+zpn,2:out[:,17]+zpn, 3:out[:,18]+zpn,4:out[:,19]+zpn,5:out[:,20]+zpn, 6:out[:,21]+zpn,7:out[:,22]+zpn}} elif mode=='visit': out = np.array(gQuery.getArray( gQuery.mcat_visit_sources(ra0,dec0,radius), verbose=verbose)) # NOTE: For runtime considerations, mcat_visit_sources() does not # make any slices on time or maglimit, so we need to do it here. ix = np.where((out[:,2 if band=='NUV' else 3]<maglimit) & (out[:,2 if band=='NUV' else 3]>0)) return {'ra':out[:,0][ix],'dec':out[:,1][ix], 'NUV':{'mag':out[:,2][ix],'expt':out[:,8][ix], 1:out[:,18][ix]+zpn,2:out[:,19][ix]+zpn,3:out[:,20][ix]+zpn, 4:out[:,21][ix]+zpn,5:out[:,22][ix]+zpn,6:out[:,23][ix]+zpn, 7:out[:,24][ix]+zpn}, 'FUV':{'mag':out[:,3][ix],'expt':out[:,9][ix], 1:out[:,11][ix]+zpf,2:out[:,12][ix]+zpf,3:out[:,13][ix]+zpf, 4:out[:,14][ix]+zpf,5:out[:,15][ix]+zpf,6:out[:,16][ix]+zpf, 7:out[:,17][ix]+zpf}} else: print "mode must be in [coadd,visit]" return
def rrhr(band,skypos,tranges,skyrange,width=False,height=False,stepsz=1., verbose=0,calpath='../cal/',tscale=1000.,response=True,hdu=False, retries=20): """Generate a high resolution relative response (rrhr) map.""" imsz = gxt.deg2pix(skypos,skyrange) # TODO the if width / height flat = get_fits_data(flat_filename(band,calpath),verbose=verbose) flatinfo = get_fits_header(flat_filename(band,calpath)) npixx,npixy = flat.shape fltsz = flat.shape pixsz = flatinfo['CDELT2'] detsize = 1.25 # Rotate the flat into the correct orientation to start. flat = np.flipud(np.rot90(flat)) # NOTE: This upsample interpolation is done _last_ in the canonical # pipeline as part of the poissonbg.c routine. # The interpolation function is "congrid" in the same file. # TODO: Should this be first order interpolation? (i.e. bilinear) hrflat = scipy.ndimage.interpolation.zoom(flat,4.,order=0,prefilter=False) img = np.zeros(hrflat.shape)[ hrflat.shape[0]/2.-imsz[0]/2.:hrflat.shape[0]/2.+imsz[0]/2., hrflat.shape[1]/2.-imsz[1]/2.:hrflat.shape[1]/2+imsz[1]/2.] for trange in tranges: t0,t1=trange entries = gQuery.getArray(gQuery.aspect(t0,t1),retries=retries) n = len(entries) asptime = np.float64(np.array(entries)[:,2])/tscale aspra = np.float32(np.array(entries)[:,3]) aspdec = np.float32(np.array(entries)[:,4]) asptwist= np.float32(np.array(entries)[:,5]) aspflags= np.float32(np.array(entries)[:,6]) asptwist= np.float32(np.array(entries)[:,9]) aspra0 = np.zeros(n)+skypos[0] aspdec0 = np.zeros(n)+skypos[1] xi_vec, eta_vec = gnomonic.gnomfwd_simple( aspra,aspdec,aspra0,aspdec0,-asptwist,1.0/36000.,0.) col = 4.*( ((( xi_vec/36000.)/(detsize/2.)*(detsize/(fltsz[0]*pixsz)) + 1.)/2. * fltsz[0]) - (fltsz[0]/2.) ) row = 4.*( (((eta_vec/36000.)/(detsize/2.)*(detsize/(fltsz[1]*pixsz)) + 1.)/2. * fltsz[1]) - (fltsz[1]/2.) ) vectors = rotvec(np.array([col,row]),-asptwist) for i in range(n): if verbose>1: print_inline('Stamping '+str(asptime[i])) # FIXME: Clean this mess up a little just for clarity. img += scipy.ndimage.interpolation.shift(scipy.ndimage.interpolation.rotate(hrflat,-asptwist[i],reshape=False,order=0,prefilter=False),[vectors[1,i],vectors[0,i]],order=0,prefilter=False)[hrflat.shape[0]/2.-imsz[0]/2.:hrflat.shape[0]/2.+imsz[0]/2.,hrflat.shape[1]/2.-imsz[1]/2.:hrflat.shape[1]/2+imsz[1]/2.]*dbt.compute_exptime(band,[asptime[i],asptime[i]+1],verbose=verbose,retries=retries)*gxt.compute_flat_scale(asptime[i]+0.5,band,verbose=0) return img
def bg_sources(band,ra0,dec0,radius,maskdepth=20.0,maskradius=1.5,margin=0.001): sources = gQuery.getArray(gQuery.mcat_sources(band,ra0,dec0,radius+margin, maglimit=maskdepth)) try: return {'ra':np.float32(np.array(sources)[:,0]), 'dec':np.float32(np.array(sources)[:,1]), 'fwhm':np.float32(np.array(sources)[:,7:9]), 'maskdepth':maskdepth,'maskradius':maskradius, 'radius':radius} except IndexError: return {'ra':np.array([]),'dec':np.array([]), 'fwhm':np.array([]),'maglimit':maskdepth,'radius':radius}
def compute_shutter(band,trange,verbose=0,retries=20,shutgap=0.05, timestamplist=False): try: t = (timestamplist if np.array(timestamplist).any() else np.array(gQuery.getArray( gQuery.uniquetimes(band,trange[0],trange[1],flag=True), verbose=verbose),dtype='float64')[:,0]/gQuery.tscale) except IndexError: # Shutter this whole time range. return trange[1]-trange[0] t = np.sort(np.unique(np.append(t,trange))) ix = np.where(t[1:]-t[:-1]>=shutgap) return np.array(t[1:]-t[:-1])[ix].sum()
def get_aspect(band,skypos,trange=[6e8,11e8],tscale=1000.,verbose=0): """Get aspect solution in a dict() for given time range.""" asp = np.array(gQuery.getArray(gQuery.aspect(trange[0],trange[1]), verbose=verbose)) return {'eclipse':np.array(asp[:,0],dtype='int16'),'filename':asp[:,1], 't':np.array(asp[:,2],dtype='float64')/tscale, 'ra':np.array(asp[:,3],dtype='float64'), 'dec':np.array(asp[:,4],dtype='float64'), 'twist':np.array(asp[:,5],dtype='float64'), 'flag':np.array(asp[:,6],dtype='int8'), 'ra0':np.array(asp[:,7],dtype='float64'), 'dec0':np.array(asp[:,8],dtype='float64'), 'twist0':np.array(asp[:,9],dtype='float64')}
def mcat_skybg(band, skypos, radius, verbose=0, retries=20): """Estimate the sky background using the MCAT skybg for nearby sources.""" # Setting maglimit to 30 so that it gets _everything_... sources = gQuery.getArray(gQuery.mcat_sources(band, skypos[0], skypos[1], radius, maglimit=30), retries=retries) # The MCAT reports skybg in photons/sec/sq.arcsec if band == "NUV": skybg = np.float32(np.array(sources)[:, 5]).mean() else: skybg = np.float32(np.array(sources)[:, 6]).mean() # And radius is in degrees return skybg * area(radius * 60.0 * 60.0)
def exposure(band,trange,verbose=0,retries=20): """Compute the effective exposure time for a time range.""" rawexpt = trange[1]-trange[0] if rawexpt<=0: return 0. shutdead = gQuery.getArray(gQuery.shutdead(band,trange[0],trange[1]), verbose=verbose,retries=retries) # NOTE: The deadtime correction in shutdead does not work properly in FUV # so we're doing it separately for now. deadtime = gQuery.getValue(gQuery.deadtime(band,trange[0],trange[1]), verbose=verbose,retries=retries) #return (rawexpt-shutdead[0][0])*(1.-shutdead[1][0]) return (rawexpt-shutdead[0][0])*(1.-deadtime)
def exp_from_objid(objid): out = np.array(gQuery.getArray(gQuery.mcat_objid_search(objid))) return { "NUV": { "expt": np.array(out[:, 7], dtype="float")[0], "t0": np.array(out[:, 9], dtype="float64")[0] - GPSSECS, "t1": np.array(out[:, 10], dtype="float64")[0] - GPSSECS, }, "FUV": { "expt": np.array(out[:, 8], dtype="float")[0], "t0": np.array(out[:, 11], dtype="float64")[0] - GPSSECS, "t1": np.array(out[:, 12], dtype="float64")[0] - GPSSECS, }, }
def get_mcat_data(skypos,rad): out = np.array(gQuery.getArray( gQuery.mcat_visit_sources(skypos[0],skypos[1],rad))) # FIXME: The APER entries should really be generated try: return {'objid':np.array(out[:,0],dtype='int64'), 'ra':np.array(out[:,1],dtype='float32'), 'dec':np.array(out[:,2],dtype='float32'), 'NUV':{'mag':np.array(out[:,3],dtype='float32'), 'skybg':np.array(out[:,6],dtype='float32'), 'expt':np.array(out[:,11],dtype='float32'), 'fwhm':np.array(out[:,8],dtype='float32'), 1:{'mag':np.array(out[:,19],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,33],dtype='float32')}, 2:{'mag':np.array(out[:,20],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,34],dtype='float32')}, 3:{'mag':np.array(out[:,21],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,35],dtype='float32')}, 4:{'mag':np.array(out[:,22],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,36],dtype='float32')}, 5:{'mag':np.array(out[:,23],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,37],dtype='float32')}, 6:{'mag':np.array(out[:,24],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,38],dtype='float32')}, 7:{'mag':np.array(out[:,25],dtype='float32')+zpmag('NUV'), 'err':np.array(out[:,39],dtype='float32')} }, 'FUV':{'mag':np.array(out[:,4],dtype='float32'), 'skybg':np.array(out[:,7],dtype='float32'), 'expt':np.array(out[:,10],dtype='float32'), 'fwhm':np.array(out[:,9],dtype='float32'), 1:{'mag':np.array(out[:,12],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,26],dtype='float32')}, 2:{'mag':np.array(out[:,13],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,27],dtype='float32')}, 3:{'mag':np.array(out[:,14],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,28],dtype='float32')}, 4:{'mag':np.array(out[:,15],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,29],dtype='float32')}, 5:{'mag':np.array(out[:,16],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,30],dtype='float32')}, 6:{'mag':np.array(out[:,17],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,31],dtype='float32')}, 7:{'mag':np.array(out[:,18],dtype='float32')+zpmag('FUV'), 'err':np.array(out[:,32],dtype='float32')} } } except IndexError: return False except: raise
def exposure(band,trange,verbose=0,retries=20): rawexpt = trange[1]-trange[0] if rawexpt==0.: return 0. try: t = np.array(gQuery.getArray( gQuery.uniquetimes(band,trange[0],trange[1],flag=True), verbose=verbose),dtype='float64')[:,0]/gQuery.tscale except IndexError: # Shutter this whole time range. if verbose: print 'No data in {t0},{t1}'.format(t0=trange[0],t1=trange[1]) return 0 shutter = compute_shutter(band,trange,verbose=verbose,retries=retries, timestamplist=t) deadtime = empirical_deadtime(band,trange,verbose=verbose,retries=retries, timestamplist=t) return (rawexpt-shutter)*(1.-deadtime)
def makemap(band,skypos,trange,skyrange,response=False,verbose=0): imsz = gxt.deg2pix(skypos,skyrange) photons = np.array(gQuery.getArray(gQuery.skyrect(band, skypos[0],skypos[1],trange[0],trange[1],skyrange[0],skyrange[1]), verbose=verbose),dtype='float64') events = {'t':photons[:,0 ]/tscale, 'ra':photons[:,1], 'dec':photons[:,2], 'xi':photons[:,3],'eta':photons[:,4], 'x':photons[:,5], 'y':photons[:,6]} if len(events['t'])==0: return np.zeros(imsz) events = ct.hashresponse(band,events) wcs = define_wcs(skypos,skyrange,width=False,height=False) coo = zip(events['ra'],events['dec']) foc = wcs.sip_pix2foc(wcs.wcs_world2pix(coo,1),1) weights = 1./events['response'] if response else None H,xedges,yedges=np.histogram2d(foc[:,1]-0.5,foc[:,0]-0.5,bins=imsz, range=([ [0,imsz[0]],[0,imsz[1]] ]),weights=weights) return H
def countmap(band,skypos,tranges,skyrange,width=False,height=False, verbose=0,tscale=1000.,memlight=False,hdu=False,retries=20): """Create a count (cnt) map.""" imsz = gxt.deg2pix(skypos,skyrange) count = np.zeros(imsz) for trange in tranges: # If memlight is requested, break the integration into # smaller chunks. step = memlight if memlight else trange[1]-trange[0] for i in np.arange(trange[0],trange[1],step): t0,t1=i,i+step if verbose: print_inline('Coadding '+str(t0)+' to '+str(t1)) events = gQuery.getArray(gQuery.rect(band,skypos[0],skypos[1],t0,t1, skyrange[0],skyrange[1]), verbose=verbose,retries=retries) # Check that there is actually data here. if not events: if verbose>1: print "No data in "+str([t0,t1]) continue times = np.array(events,dtype='float64')[:,0 ]/tscale coo = np.array(events,dtype='float64')[:,1:] # If there's no data, return a blank image. if len(coo)==0: if verbose: print 'No data in this frame: '+str([t0,t1]) continue # Define World Coordinate System (WCS) wcs = define_wcs(skypos,skyrange,width=False,height=False) # Map the sky coordinates onto the focal plane foc = wcs.sip_pix2foc(wcs.wcs_world2pix(coo,1),1) # Bin the events into actual image pixels H,xedges,yedges=np.histogram2d(foc[:,1]-0.5,foc[:,0]-0.5, bins=imsz,range=([ [0,imsz[0]],[0,imsz[1]] ])) count += H return count
def stimcount_shuttered(band,trange,verbose=0,retries=20.,timestamplist=False): try: t = (timestamplist if np.array(timestamplist).any() else np.array(gQuery.getArray( gQuery.uniquetimes(band,trange[0],trange[1]), verbose=verbose),dtype='float64')[:,0]/gQuery.tscale) except IndexError: # Shutter this whole time range. if verbose: print 'No data in {t0},{t1}'.format(t0=trange[0],t1=trange[1]) return 0 times = np.sort(np.unique(np.append(t,trange))) tranges = distinct_tranges(times,maxgap=0.05) stimcount = 0 for trange in tranges: stimcount += gQuery.getValue(gQuery.stimcount(band,trange[0],trange[1]), verbose=verbose)+gQuery.getValue( gQuery.stimcount(band,trange[0],trange[1], null=False),verbose=verbose) return stimcount
def get_aspect(band,skypos,trange=[6e8,11e8],verbose=0, detsize=1.25): """Get aspect solution in a dict() for given time range.""" asp = np.array(gQuery.getArray(gQuery.aspect_skypos(skypos[0],skypos[1], detsize=detsize),verbose=verbose)) data = {'eclipse':np.array(asp[:,0],dtype='int16'),'filename':asp[:,1], 't':np.array(asp[:,2],dtype='float64')/tscale, 'ra':np.array(asp[:,3],dtype='float64'), 'dec':np.array(asp[:,4],dtype='float64'), 'twist':np.array(asp[:,5],dtype='float64'), 'flag':np.array(asp[:,6],dtype='int8'), 'ra0':np.array(asp[:,7],dtype='float64'), 'dec0':np.array(asp[:,8],dtype='float64'), 'twist0':np.array(asp[:,9],dtype='float64')} ix = np.where((data['t']>trange[0]) & (data['t']<trange[1]) & (angularSeparation(skypos[0],skypos[1], data['ra'],data['dec'])<=detsize/2.)) for key in data.keys(): data[key] = data[key][ix] return data
def globalcount_shuttered(band,trange,verbose=0,timestamplist=False): try: t = (timestamplist if np.array(timestamplist).any() else np.array(gQuery.getArray( gQuery.uniquetimes(band,trange[0],trange[1],flag=True), verbose=verbose),dtype='float64')[:,0]/gQuery.tscale) except IndexError: # Shutter this whole time range. if verbose: print 'No data in {t0},{t1}'.format(t0=trange[0],t1=trange[1]) return 0 times = np.sort(np.unique(np.append(t,trange))) tranges = distinct_tranges(times,maxgap=0.05) nonnullevents,nullevents = 0,0 for trange in tranges: nullevents += gQuery.getValue( gQuery.deadtime2(band,trange[0],trange[1]),verbose=verbose) nonnullevents += gQuery.getValue(gQuery.deadtime1(band,trange[0], trange[1]),verbose=verbose) return nullevents+nonnullevents
def query_photons(band,ra0,dec0,tranges,radius,verbose=0): """Retrieve photons within an aperture from the database.""" stream = [] if verbose: print "Retrieving photons within {rad} degrees of [{r}, {d}]".format( rad=radius,r=ra0,d=dec0) for trange in tranges: if verbose: mc.print_inline(" and between "+str(trange[0])+" and "+ str(trange[1])+".") thisstream = gQuery.getArray( gQuery.allphotons(band, ra0, dec0, trange[0], trange[1], radius), verbose=verbose,retries=100) stream.extend(thisstream) stream = np.array(stream, 'f8').T colnames = ['t', 'ra', 'dec', 'xi', 'eta', 'x', 'y'] dtypes = ['f8', 'f8', 'f8', 'f4', 'f4', 'f4', 'f4'] cols = map(np.asarray, stream, dtypes) events = dict(zip(colnames, cols)) events['t']/=tscale # Adjust the timestamp by tscale return events
def get_valid_times(band,skypos,trange=None,detsize=1.1,verbose=0,retries=100., skyrange=None): if not np.array(trange).tolist(): trange = [1,1000000000000] if len(np.shape(trange))==2: trange=trange[0] # FIXME: This is probably not an optimally efficient way to check an entire # region of sky for data, but it's not hugely dumb and does work... # Assemble sky positions on a grid within the targeted region. skypos_list = [skypos] if skyrange: for r in np.linspace(skypos[0]-skyrange[0]/2.,skypos[0]+skyrange[0]/2., np.ceil(skyrange[0]/(detsize/2.)),endpoint=True): for d in np.linspace(skypos[1]-skyrange[1]/2., skypos[1]+skyrange[1]/2.,np.ceil(skyrange[1]/(detsize/2.)), endpoint=True): skypos_list += [[r,d]] times = [] for skypos in skypos_list: try: times = (list(times) + list(np.array(gQuery.getArray(gQuery.exposure_ranges( band,skypos[0],skypos[1],t0=trange[0],t1=trange[1], detsize=detsize),verbose=verbose,retries=retries), dtype='float64')[:,0]/tscale)) except IndexError: if verbose: print "No exposure time available at {pos}".format(pos=skypos) return np.array([],dtype='float64') except TypeError: print "Is one of the inputs malformed?" raise except: raise return np.sort(np.unique(times))
def get_aspect(band, skypos, trange=[6e8, 11e8], verbose=0, detsize=1.25): """Get aspect solution in a dict() for given time range.""" asp = np.array(gQuery.getArray(gQuery.aspect_skypos(skypos[0], skypos[1], detsize=detsize), verbose=verbose)) data = { "eclipse": np.array(asp[:, 0], dtype="int16"), "filename": asp[:, 1], "t": np.array(asp[:, 2], dtype="float64") / tscale, "ra": np.array(asp[:, 3], dtype="float64"), "dec": np.array(asp[:, 4], dtype="float64"), "twist": np.array(asp[:, 5], dtype="float64"), "flag": np.array(asp[:, 6], dtype="int8"), "ra0": np.array(asp[:, 7], dtype="float64"), "dec0": np.array(asp[:, 8], dtype="float64"), "twist0": np.array(asp[:, 9], dtype="float64"), } ix = np.where( (data["t"] > trange[0]) & (data["t"] < trange[1]) & (angularSeparation(skypos[0], skypos[1], data["ra"], data["dec"]) <= detsize / 2.0) ) for key in data.keys(): data[key] = data[key][ix] return data
def get_mcat_data(skypos, rad): # Try once with the default radius. out = np.array(gQuery.getArray(gQuery.mcat_visit_sources(skypos[0], skypos[1], rad))) # If no MCAT sources found, try again with a radius 5 times bigger. if len(out) == 0: out = np.array(gQuery.getArray(gQuery.mcat_visit_sources(skypos[0], skypos[1], rad * 5.0))) # FIXME: The APER entries should really be generated try: return { "objid": np.array(out[:, 0], dtype="int64"), "ra": np.array(out[:, 1], dtype="float32"), "dec": np.array(out[:, 2], dtype="float32"), "NUV": { "mag": np.array(out[:, 3], dtype="float32"), "skybg": np.array(out[:, 6], dtype="float32"), "expt": np.array(out[:, 11], dtype="float32"), "fwhm": np.array(out[:, 8], dtype="float32"), 1: { "mag": np.array(out[:, 19], dtype="float32") + zpmag("NUV"), "err": np.array(out[:, 33], dtype="float32"), }, 2: { "mag": np.array(out[:, 20], dtype="float32") + zpmag("NUV"), "err": np.array(out[:, 34], dtype="float32"), }, 3: { "mag": np.array(out[:, 21], dtype="float32") + zpmag("NUV"), "err": np.array(out[:, 35], dtype="float32"), }, 4: { "mag": np.array(out[:, 22], dtype="float32") + zpmag("NUV"), "err": np.array(out[:, 36], dtype="float32"), }, 5: { "mag": np.array(out[:, 23], dtype="float32") + zpmag("NUV"), "err": np.array(out[:, 37], dtype="float32"), }, 6: { "mag": np.array(out[:, 24], dtype="float32") + zpmag("NUV"), "err": np.array(out[:, 38], dtype="float32"), }, 7: { "mag": np.array(out[:, 25], dtype="float32") + zpmag("NUV"), "err": np.array(out[:, 39], dtype="float32"), }, }, "FUV": { "mag": np.array(out[:, 4], dtype="float32"), "skybg": np.array(out[:, 7], dtype="float32"), "expt": np.array(out[:, 10], dtype="float32"), "fwhm": np.array(out[:, 9], dtype="float32"), 1: { "mag": np.array(out[:, 12], dtype="float32") + zpmag("FUV"), "err": np.array(out[:, 26], dtype="float32"), }, 2: { "mag": np.array(out[:, 13], dtype="float32") + zpmag("FUV"), "err": np.array(out[:, 27], dtype="float32"), }, 3: { "mag": np.array(out[:, 14], dtype="float32") + zpmag("FUV"), "err": np.array(out[:, 28], dtype="float32"), }, 4: { "mag": np.array(out[:, 15], dtype="float32") + zpmag("FUV"), "err": np.array(out[:, 29], dtype="float32"), }, 5: { "mag": np.array(out[:, 16], dtype="float32") + zpmag("FUV"), "err": np.array(out[:, 30], dtype="float32"), }, 6: { "mag": np.array(out[:, 17], dtype="float32") + zpmag("FUV"), "err": np.array(out[:, 31], dtype="float32"), }, 7: { "mag": np.array(out[:, 18], dtype="float32") + zpmag("FUV"), "err": np.array(out[:, 32], dtype="float32"), }, }, } except IndexError: # If there are STILL no detections, then pass a dict with empty values. # A default set of values will then be used. return {"objid": None, "ra": None, "dec": None, "NUV": None, "FUV": None} except: raise
def compute_shutter(band,trange,verbose=0,retries=20,shutgap=0.05): t = np.sort(np.array(gQuery.getArray(gQuery.uniquetimes( band,trange[0],trange[1])),dtype='float64')[:,0]/gQuery.tscale) ix = np.where(t[1:]-t[:-1]>=shutgap) return len(ix[0])*shutgap
def exp_from_objid(objid): out = np.array(gQuery.getArray(gQuery.mcat_objid_search(objid))) return {'NUV':{'expt':np.array(out[:,7],dtype='float')[0],'t0':np.array(out[:,9],dtype='float64')[0]-GPSSECS,'t1':np.array(out[:,10],dtype='float64')[0]-GPSSECS},'FUV':{'expt':np.array(out[:,8],dtype='float')[0],'t0':np.array(out[:,11],dtype='float64')[0]-GPSSECS,'t1':np.array(out[:,12],dtype='float64')[0]-GPSSECS}}
def fGetTimeRanges(band,skypos,trange=None,tscale=1000.,detsize=1.25,verbose=0, maxgap=1.,minexp=1.,retries=100.,predicted=False): """Find the contiguous time ranges within a time range at a specific location. minexp - Do not include exposure time less than this. maxgap - Gaps in exposure longer than this initiate a new time range. detsize - Fiddle with this if you want to exlude the edges of the detector. predicted - Use the aspect solutions to estimate what exposure will be available once the database is fully populated. """ try: if not np.array(trange).tolist(): trange = [1,1000000000000] if len(np.shape(trange))==2: trange=trange[0] times = (np.array(gQuery.getArray(gQuery.exposure_ranges(band, skypos[0],skypos[1],t0=trange[0],t1=trange[1],detsize=detsize, tscale=tscale),verbose=verbose,retries=retries), dtype='float64')[:,0]/tscale if not predicted else get_aspect(band,skypos,trange, tscale=tscale,verbose=verbose)['t']) except IndexError: if verbose: print "No exposure time available at {pos}".format(pos=skypos) return np.array([],dtype='float64') except TypeError: print "Is one of the inputs malformed?" raise except: raise if verbose: print_inline('Parsing '+str(len(times)-1)+' seconds of exposure.: ['+str(trange[0])+', '+str(trange[1])+']') blah = [] for i in xrange(len(times[0:-1])): blah.append(times[i+1]-times[i]) # A drop in data with duration greater than maxgap initiates a # new exposure gaps = np.where(np.array(blah)>maxgap) ngaps = len(gaps[0]) chunks = [] for i in range(ngaps): if not i: chunk = [times[0],times[gaps[0][i]]] elif i==ngaps-1: chunk = [times[gaps[0][i]+1],times[-1]] else: chunk = [times[gaps[0][i]+1],times[gaps[0][i+1]]] # If the duration of this slice is less than minexp, do not # count it as valid exposure. if chunk[1]-chunk[0]<minexp: continue else: chunks.append(chunk) if not ngaps: if times.min()==times.max(): chunks.append([times.min(),times.min()+1]) else: chunks.append([times.min(),times.max()]) return np.array(chunks,dtype='float64')