def getsdsscat(self): print 'Getting SDSS spec cat for ', self.prefix drsearch = self.dr * 60. #search radius in arcmin for sdss query #zmin=self.cz-.005 #zmax=self.cz+.005 #from this, we will make a field sample and a cluster sample #query="select n.distance,g.ra,g.dec, g.u, g.g, g.r, g.i, g.z, s.z,l.ew,l.ewErr, s.plate, s.fiberID, s.tile, g.objID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z, g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expAB_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z, g.run, g.rerun, g.camcol, g.field,g.err_u,g.err_g,g.err_r,g.err_i,g.err_z, g.rowc_u, g.rowc_g, g.rowc_r,g.rowc_i,g.rowc_z,g.colc_u,g.colc_g,g.colc_r,g.colc_i,g.colc_z from galaxy g, specobj s, specline l, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objid = s.bestobjid and g.objID = n.objID and l.specobjid = s.specobjid and s.z < %5.4f and s.z > %5.4f and (g.PrimTarget & 0x00000040) > 0 and l.LineId = 6565 order by distance" % (self.cra,self.cdec,drsearch,zmax,zmin) #query="select n.distance,g.ra,g.dec, g.u, g.g, g.r, g.i, g.z, s.z,l.ew,l.ewErr, s.plate, s.fiberID, s.tile, g.objID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z, g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expAB_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z, g.run, g.rerun, g.camcol, g.field,g.err_u,g.err_g,g.err_r,g.err_i,g.err_z, g.rowc_u, g.rowc_g, g.rowc_r,g.rowc_i,g.rowc_z,g.colc_u,g.colc_g,g.colc_r,g.colc_i,g.colc_z from galaxy g, specobj s, specline l, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objid = s.bestobjid and g.objID = n.objID and l.specobjid = s.specobjid and s.z < %5.4f and s.z > %5.4f and l.LineId = 6565 order by distance" % (self.cra,self.cdec,drsearch,zmax,zmin) # removing PrimTarget selection flag query = "select n.distance,g.ra,g.dec, g.u, g.g, g.r, g.i, g.z, s.z,l.ew,l.ewErr, s.plate, s.fiberID, s.tile, g.objID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z, g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expAB_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z, g.run, g.rerun, g.camcol, g.field,g.err_u,g.err_g,g.err_r,g.err_i,g.err_z, g.rowc_u, g.rowc_g, g.rowc_r,g.rowc_i,g.rowc_z,g.colc_u,g.colc_g,g.colc_r,g.colc_i,g.colc_z from galaxy g, specobj s, specline l, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objid = s.bestobjid and g.objID = n.objID and l.specobjid = s.specobjid and s.z < %5.4f and s.z > %5.4f and l.LineId = 6565" % ( self.cra, self.cdec, drsearch, zmax, zmin) # query="select n.distance,g.ra,g.dec, g.u, g.g, g.r, g.i, g.z, s.z,l.ew,l.ewErr, s.plate, s.fiberID, s.tile, g.objID from galaxy g, specobj s, specline l, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objid = s.bestobjid and g.objID = n.objID and l.specobjid = s.specobjid and s.z < %5.4f and s.z > %5.4f and (g.PrimTarget & 0x00000040) > 0 and l.LineId = 6565 order by distance" % (self.cra,self.cdec,drsearch,zmax,zmin)#g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z, g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z #print query try: lines = sqlcl.query(query).readlines() except IOError: print "IOError for cluster", self.prefix, " trying spec query again" lines = sqlcl.query(query).readlines() print self.prefix, ": got number + 1 of spec objects = ", len(lines) n = homedir + 'research/LocalClusters/SDSSCatalogs/' + str( self.prefix) + 'galaxy.dat' outfile = open(n, 'w') j = 0 if (len(lines) > 1.): for line in lines[1:]: if j < 0: print line j = j + 1 outfile.write(line) outfile.close()
def getsdsscat(self): print 'Getting SDSS spec cat for ', self.prefix drsearch = 3. * 60. #search radius in arcmin for sdss query #zmin=self.cz-.005 #zmax=self.cz+.005 #from this, we will make a field sample and a cluster sample query = "select n.distance,g.ra,g.dec, g.u, g.g, g.r, g.i, g.z, s.z,l.ew,l.ewErr from galaxy g, specobj s, specline l, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objid = s.bestobjid and g.objID = n.objID and l.specobjid = s.specobjid and s.z < %5.4f and s.z > %5.4f and (g.PrimTarget & 0x00000040) > 0 and l.LineId = 6565 order by distance" % ( self.cra, self.cdec, drsearch, zmax, zmin) try: lines = sqlcl.query(query).readlines() except IOError: print "IOError for cluster", self.prefix, i, " trying spec query again" lines = sqlcl.query(query).readlines() print self.prefix, ": got number + 1 of spec objects = ", len(lines) n = '/home/rfinn/research/LocalClusters/SDSSCatalogs/' + str( self.prefix) + 'galaxy.dat' outfile = open(n, 'w') j = 0 if (len(lines) > 1.): for line in lines[1:]: if j < 0: print line j = j + 1 outfile.write(line) outfile.close()
def getsdss(self): #dA=DA(self.z[i],h100) #r200arcmin=self.r200[i]*1000./dA/60. drsearch = self.Brmax + 1. #2xR200 in arcmin for sdss query query = "select g.ra, g.dec, g.isoA_r, g.isoB_r, n.distance from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 order by distance" % ( self.ra, self.dec, drsearch ) #added flags to get rid of saturated objects, stars, etc try: lines = sqlcl.query(query).readlines() except IOError: print "IOError for cluster", self.id[ i], i, " trying phot query again" lines = sqlcl.query(query).readlines() print "got number+1 phot objects = ", len(lines) out1 = open('coma-sdss-phot.dat', 'w') for line in lines: out1.write(line) out1.close() query = "select g.ra,g.dec, g.isoA_r, g.isoB_r, n.distance, s.z from galaxy g, specobj s, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objid = s.bestobjid and g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 order by distance" % ( self.ra, self.dec, drsearch) try: lines = sqlcl.query(query).readlines() except IOError: print "IOError for cluster", self.id[ i], i, " trying spec query again" lines = sqlcl.query(query).readlines() out2 = open('coma-sdss-spec.dat', 'w') for line in lines: out2.write(line) out2.close()
def getsdsscatalogs(): drsearch = 5. * 60. #search radius in arcmin for sdss query zmin = .0 zmax = 6. ra = 180. dec = 0. #print i,cid[i]," ra, dec, dr, mr = %12.8f %12.8f %8.3f %5.2f" % (cra[i],cdec[i],drsearch) query = "select n.distance,g.ra,g.dec, g.u, g.g, g.r, g.i, g.z, s.z from galaxy g, specobj s, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objid = s.bestobjid and g.objID = n.objID and s.z < %5.4f and s.z > %5.4f and (g.PrimTarget & 0x00000040) > 0 order by distance" % ( ra, dec, drsearch, zmax, zmin) print query try: lines = sqlcl.query(query).readlines() except IOError: print "IOError for cluster trying spec query again" lines = sqlcl.query(query).readlines() print "got number + 1 of spec objects = ", len(lines) n = 'JonGalaxy.dat' outfile = open(n, 'w') j = 0 if (len(lines) > 1.): for line in lines[1:]: if j < 0: print line j = j + 1 outfile.write(line) outfile.close()
def getsdssphotcat(self): print "getting phot cat for cluster",self.prefix drsearch=3.*60.#search radius in arcmin for sdss query #Vg=0.3556-0.7614*((self.avegr)-0.6148)#(V-g) from Blanton et al 2003 #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.plate_ID, g.MJD, from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.g < %5.2f) and ((0.384*g.g + 0.716*g.r)< %5.2f)" % (self.ra[i],self.dec[i],drsearch,(mr+1.5),mr) query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID,g.extinction_u, g.extinction_g, g.extinction_r, g.extinction_i, g.extinction_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 " % (self.cra,self.cdec,drsearch) try: lines=sqlcl.query(query).readlines() except IOError: print "IOError for cluster",self.prefix," trying phot query again" lines=sqlcl.query(query).readlines() # lines=sqlcl.query(query).readlines() #print query print "got number+1 phot objects = ",len(lines) #print lines n='/home/rfinn/research/LocalClusters/SDSSCatalogs/'+str(self.prefix)+'galaxy.photcat.dat' outfile=open(n,'w') j=0 if (len(lines) > 1.): for line in lines[1:]: if j < 0: print line j=j+1 outfile.write(line) outfile.close()
def query_galaxies(ra,dec): gal_sdss_data = [] gal_sdss_PID = [] gal_sdss_SID = [] for i in range(ra.size): ra1 = ra[i].split(':') dec1 = dec[i].split(':') coord = SkyCoord(ra1[0]+'h'+ra1[1]+'m'+ra1[2]+'s '+dec1[0]+'d'+dec1[1]+'m'+dec1[2]+'s',frame='icrs') print coord.ra.deg print coord.dec.deg #query = sqlcl.query("SELECT gn.objid, ISNULL(s.specobjid,0) AS specobjid, p.ra, p.dec,p.Petromag_u-p.extinction_u AS U_mag,p.Petromag_g-p.extinction_g AS G_mag,p.Petromag_r-p.extinction_r AS R_mag,p.Petromag_i-p.extinction_i AS I_mag,p.Petromag_z-p.extinction_z AS Z_mag, ISNULL(s.z, 0) AS z, ISNULL(pz.z, 0) AS pz FROM (Galaxy AS p JOIN dbo.fGetNearbyObjEq("+str(coord.ra.deg)+","+str(coord.dec.deg)+","+str(0.05)+") AS GN ON p.objID = GN.objID LEFT OUTER JOIN SpecObj s ON s.bestObjID = p.objID) LEFT OUTER JOIN Photoz pz on pz.objid = p.objid WHERE p.Petromag_r-p.extinction_r < 19.1 and p.clean = 1").readlines() query = sqlcl.query("SELECT gn.objid, ISNULL(s.specobjid,0) AS specobjid, p.ra, p.dec,p.Petromag_u-p.extinction_u AS U_mag,p.Petromag_g-p.extinction_g AS G_mag,p.Petromag_r-p.extinction_r AS R_mag,p.Petromag_i-p.extinction_i AS I_mag,p.Petromag_z-p.extinction_z AS Z_mag, ISNULL(s.z, 0) AS z, ISNULL(s.zErr, 0) AS z_err, ISNULL(pz.z, 0) AS pz FROM (Galaxy AS p JOIN dbo.fGetNearbyObjEq("+str(coord.ra.deg)+","+str(coord.dec.deg)+","+str(0.05)+") AS GN ON p.objID = GN.objID LEFT OUTER JOIN SpecObj s ON s.bestObjID = p.objID) LEFT OUTER JOIN Photoz pz on pz.objid = p.objid WHERE p.Petromag_r-p.extinction_r < 19.1").readlines() if len(query) > 4: print 'oops! More than 1 candidate found' if len(query) == 2: print 'No targets found' gal_sdss_data.append([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) gal_sdss_PID.append(0) gal_sdss_SID.append(0) continue gal_sdss_data.append(map(float,query[2].split(','))) gal_sdss_PID.append(query[2].split(',')[0]) gal_sdss_SID.append(query[2].split(',')[1]) print 'Done with galaxy',i gal_sdss_data = np.array(gal_sdss_data) S_df = pd.DataFrame(gal_sdss_data,columns=['#objID','SpecObjID','ra','dec','umag','gmag','rmag','imag','zmag','spec_z','spec_z_err','photo_z']) S_df['#objID'] = gal_sdss_PID S_df['SpecObjID'] = gal_sdss_SID return S_df
def sdss_cat(run,rerun,camcol,field): field = int(field) field_OR = '(' + reduce(lambda x,y: x + ' or ' + y, ['s.field=' + str(x) for x in [field,field+1,field+2,field+3,field+4,field+5]]) + ')' query = 'select ccFlag, t.j as psfmag_J, sqrt(1/t.jivar) as psfmagerr_J, s.psfmag_u, s.psfmag_g, s.psfmag_r, s.psfmag_i, s.psfmag_z, s.psfmagerr_u, s.psfmagerr_g, s.psfmagerr_r, s.psfmagerr_i, s.psfmagerr_z from twomass as t, star as s where t.objID=s.objID and ' + field_OR + ' and s.run=' + str(run) + ' and s.rerun=' + str(rerun) + ' and s.camcol=' + str(camcol) + ' and t.jivar != 0 and flags & dbo.fPhotoFlags(\'BLENDED\') = 0 and ccFlag = "000" ' query = 'select s.psfmag_u, s.psfmag_g, s.psfmag_r, s.psfmag_i, s.psfmag_z, s.psfmagerr_u, s.psfmagerr_g, s.psfmagerr_r, s.psfmagerr_i, s.psfmagerr_z from star as s where ' + field_OR + ' and s.run=' + str(run) + ' and s.rerun=' + str(rerun) + ' and s.camcol=' + str(camcol) + ' and flags & dbo.fPhotoFlags(\'BLENDED\') = 0 ' print query import sqlcl lines = sqlcl.query(query).readlines() print lines print lines[0] keys = lines[0][:-1].split(',') sdss = [] if lines[0] != 'No objects have been found': for l in lines[1:]: d = dict(zip(keys,[float(x) for x in l.split(',')])) #d['stripe'] = d['dbo.fStripeOfRun(run)'] sdss.append(d) else: sdss = [] return sdss
def getsdssspeccats(self): #get photometric sources within 2R200 print "elapsed time = ", time.clock() - starttime self.mcut = N.zeros(len(self.z), 'f') for i in range(len(self.z)): dL = self.dL[i] print "getting spec cat for cluster abell", self.id[i] r200arcmin = self.r200deg[i] * 60. drsearch = 3. * r200arcmin #2xR200 in arcmin for sdss query #Vg=0.3556-0.7614*((self.avegr)-0.6148)#(V-g) from Blanton et al 2003 mr = mabscut - 0.1331 + 5. * N.log10(dL) + 25. + self.kcorr[i] print i, self.z[i], dL, mr self.mcut[i] = mr dz = 3 * self.sigma[i] / (3.e5) * (1 + self.z[i]) zmax = self.z[i] + .5 * dz zmin = self.z[i] - .5 * dz print "ra, dec, dr, mr = %12.8f %12.8f %8.3f %5.2f" % ( self.ra[i], self.dec[i], drsearch, mr) query = "select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID,g.extinction_u, g.extinction_g, g.extinction_r, g.extinction_i, g.extinction_z, l.ew, l.ewErr, l2.ew, l2.ewErr from galaxy g, specobj s, SpecLine l, SpecLine l2, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and g.objID = s.bestobjid and s.specobjID=l.specobjID and s.specobjID=l2.specobjID and (g.g < %5.2f) and (g.PrimTarget & 0x00000040) > 0 and (s.z > %6.4f) and (s.z < %6.4f) and l.LineID = 3727 and l2.LineID = 6565" % ( self.ra[i], self.dec[i], drsearch, (mr + 3), zmin, zmax) # query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID,g.extinction_u, g.extinction_g, g.extinction_r, g.extinction_i, g.extinction_z, l.ew, l.ewErr, l2.ew, l2.ewErr from galaxy g, specobj s, SpecLine l, SpecLine l2, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and g.objID = s.bestobjid and s.specobjID=l.specobjID and s.specobjID=l2.specobjID and (g.g < %5.2f) and (g.PrimTarget & 0x00000040) > 0 and l.LineID = 3727 and l2.LineID = 6565" % (self.ra[i],self.dec[i],drsearch,(mr)) lines = sqlcl.query(query).readlines() #print query print "got number+1 spec objects w/in 2R200= ", len(lines) #print lines output = "abell" + str(self.id[i]) + ".spec2r200.dat" outfile = open(output, 'w') outfile.write("#%s " % (lines[0])) for line in lines[1:]: outfile.write("%s " % (line)) outfile.close()
def isInSDSS_DR12(ra, dec): querry = "select dbo.fInFootprintEq(" + str(ra) + "," + str(dec) + ", 1)" lines = sqlcl.query(querry).readlines() if lines[2] == "True\n": return 1 else: return 0
def really_isInSDSS_DR12(ra, dec): querry = "SELECT TOP 10 p.fieldID FROM Field AS p WHERE " + str( dec) + " BETWEEN p.decMin AND p.decMAx AND " + str( ra) + " BETWEEN p.raMin AND p.raMax" lines = sqlcl.query(querry).readlines() if len(lines) == 2: return 0 else: return 1
def executeQ(query,res,formats): # returns a tuple of (results, modified) lines = sqlcl.query(query).readlines() if lines[0][:-1] != '#Table1': # check if the query came back correctly print 'INCORRECT FORMAT RETURNED, returning previous results list' return (res, False) else: for l in lines[2:]: s = l[:-1].split(',') # NOTE: the formats array must exactly match the format of the returned data val = map(lambda x,y: x(y), formats, s) res.append(val) return (res, True)
def executeQ(query,res): # returns a tuple of (results, modified) lines = sqlcl.query(query).readlines() if lines[0][:-1] != '#Table1': # check if the query came back correctly print 'INCORRECT FORMAT RETURNED, returning previous results list' return (res, False) else: for l in lines[2:]: s = l[:-1].split(',') # NOTE: this line is heavily dependant on what we query, and in what order val = [float(s[0]),float(s[1]),int(s[2]),int(s[3]),float(s[4]),int(s[5]),float(s[6]),float(s[7]),float(s[8]),float(s[9])] res.append(val) return (res, True)
def dataCollect(self): import os queries = [self.sql] url = os.getenv("SQLCLURL", SqlReader.default_url) fmt = SqlReader.default_fmt # Run all queries sequentially for qry in queries: file = sqlcl.query(qry, url, fmt) self.fileWrite(file)
def getsdssspeccat(self): print 'Getting SDSS spec cat for ',self.prefix drsearch=3.*60.#search radius in arcmin for sdss query #zmin=self.cz-.005 #zmax=self.cz+.005 #from this, we will make a field sample and a cluster sample query="select n.distance,g.ra,g.dec, g.u, g.g, g.r, g.i, g.z, s.z,l.ew,l.ewErr, s.plate, s.fiberID, s.tile from galaxy g, specobj s, specline l, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objid = s.bestobjid and g.objID = n.objID and l.specobjid = s.specobjid and s.z < %5.4f and s.z > %5.4f and (g.PrimTarget & 0x00000040) > 0 and l.LineId = 6565 order by distance" % (self.cra,self.cdec,drsearch,zmax,zmin) try: lines=sqlcl.query(query).readlines() except IOError: print "IOError for cluster",self.prefix," trying spec query again" lines=sqlcl.query(query).readlines() print self.prefix,": got number + 1 of spec objects = ",len(lines) n='/home/rfinn/research/LocalClusters/SDSSCatalogs/'+str(self.prefix)+'galaxy.dat' outfile=open(n,'w') j=0 if (len(lines) > 1.): for line in lines[1:]: if j < 0: print line j=j+1 outfile.write(line) outfile.close()
def getsdssphotcats(self): #get photometric sources within 2R200 print "elapsed time = ",time.clock()-starttime self.mcut=N.zeros(len(self.z),'f') cl=N.arange(17,len(self.z),1) self.nphot=N.zeros(len(self.z),'f') self.nspec=N.zeros(len(self.z),'f') for i in range(len(self.z)): #for i in cl: dL = self.dL[i] print "getting phot cat for cluster abell",self.id[i] r200arcmin=self.r200deg[i]*60. #drsearch=2.*r200arcmin#2xR200 in arcmin for sdss query drsearch=1.*r200arcmin#2xR200 in arcmin for sdss query #Vg=0.3556-0.7614*((self.avegr)-0.6148)#(V-g) from Blanton et al 2003 mr=mabscut - 0.1331 + 5.*N.log10(dL)+25.+self.kcorr[i] print i, self.z[i], dL, mr self.mcut[i]=mr print "ra, dec, dr, mr = %12.8f %12.8f %8.3f %5.2f" % (self.ra[i],self.dec[i],drsearch,mr) #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.plate_ID, g.MJD, from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.g < %5.2f) and ((0.384*g.g + 0.716*g.r)< %5.2f)" % (self.ra[i],self.dec[i],drsearch,(mr+1.5),mr) query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID,g.extinction_u, g.extinction_g, g.extinction_r, g.extinction_i, g.extinction_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.g < %5.2f) and (g.PrimTarget & 0x00000040) > 0 " % (self.ra[i],self.dec[i],drsearch,(mr)) lines=sqlcl.query(query).readlines() #print query print "got number+1 phot objects = ",len(lines) #print lines self.nphot[i]=1.*len(lines) query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID,g.extinction_u, g.extinction_g, g.extinction_r, g.extinction_i, g.extinction_z, l.ew, l.ewErr, l2.ew, l2.ewErr from galaxy g, specobj s, SpecLine l, SpecLine l2, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and g.objID = s.bestobjid and s.specobjID=l.specobjID and s.specobjID=l2.specobjID and (g.g < %5.2f) and (g.PrimTarget & 0x00000040) > 0 and l.LineID = 3727 and l2.LineID = 6565" % (self.ra[i],self.dec[i],drsearch,(mr)) lines=sqlcl.query(query).readlines() #print query print "got number+1 spec objects w/in R200= ",len(lines) #print lines self.nspec[i]=1.*len(lines) self.compl=self.nspec/self.nphot print "average completeness of sdss spectroscopy is = ",N.average(self.compl), pylab.std(self.compl)
def getSDSSfields(ra, dec, size): # all in degree delta = 0.6*size+0.2 #if size > 1: #delta = 1.2*size+0.4 ra_max = ra+2*delta ra_min = ra-2*delta dec_max = dec+delta dec_min = dec-delta querry = """ SELECT fieldID, run, camCol, field, ra, dec, run, rerun FROM Field """ querry += "WHERE ra BETWEEN "+str(ra_min)+" and "+str(ra_max)+" and dec BETWEEN "+str(dec_min)+" and "+str(dec_max) print querry lines = sqlcl.query(querry).readlines() N = len(lines) field_lst = [] for i in np.arange(2,N): line = lines[i] line = line.split(',') run = line[1] camcol = line[2] field = line[3] ra_ = line[4] dec_ = line[5] field_lst.append([run, camcol, field]) return field_lst
def dataQuery(self, myquery): """ Return a table or print a error of data query from SkyServer of SDSS. """ print 'Quering the SkySever' page = sqlcl.query(myquery, fmt='html').read() soup = BeautifulSoup(page) tables = [table for table in soup.findAll("table")] if (len(tables) >=1): print 'Data recived' return tables else: h3 = [h for h in soup.findAll("h3")] error = str(h3[1]) error = replace(error,'<h3 bgcolor=\"pink\"><font color=\"red\">', '' ) error = replace(error,'</font></h3>', '' ) error = replace(error, '<br />', '\n') print error return (None,error)
def remote_search_field(ra, dec, radius, use_all=False, extra=0.05): """ Given the Ra, DEC for the center of the field, the radius of the search area in unit of degree, returns the information for the FIELDS that cover this search area. The remote search use SQLCL to access the SDSS on-line database; The search is not very accurate @use_all : If True, all fields, including the ones with primaryArea==0, will be returned. """ if (ra < 0.0) or (ra > 360.0): raise Exception("RA should be between 0 and 360 degree!") if (dec < -90.0) or (dec > 90.0): raise Exception("Dec should be between -90 and 90 degree!") if radius > 1.0: warning = "Radius is too large!! Be careful!! (Radius < 1.0deg)" highlight_output(warning) query = define_query(ra, dec, radius, use_all = use_all, extra=extra) result = sqlcl.query(query).readlines() n_field = (len(result) - 2) if n_field <= 0: raise Exception("No useful field is returned!! Check!!") else: result = result[2:] data = [] for ii in result: line = ii.replace("\n"," ") temp = np.genfromtxt(StringIO(line), delimiter=",", dtype=None) data.append(temp) dtype=[('fieldID', int), ('run', int), ('rerun', int), ('camcol', int), ('field', int), ('quality', int), ('score', float), ('ra', float), ('dec', float), ('raMin', float), ('raMax', float), ('decMin', float), ('decMax', float), ('nGalaxy', int), ('nStars', float), ('primaryArea', float)] result = np.array(data, dtype=dtype) return result
def remote_search_field(ra,dec,radius,use_all=False,extra=None): """ Given the Ra, DEC for the center of the field, the radius of the search area in unit of degree, returns the information for the FIELDS that cover this search area. The remote search use SQLCL to access the SDSS on-line database; The search is not very accurate @use_all : If True, all fields, including the ones with primaryArea==0, will be returned. """ if (ra<0.0) or (ra>360.0): raise Exception("RA should be between 0 and 360 degree!") if (dec<-90.0) or (dec>90.0): raise Exception("Dec should be between -90 and 90 degree!") if radius > 1.0: warning = "Radius is too large!! Be careful!! (Radius<1.0deg)" highlight_output(warning) query = define_query(ra,dec,radius,use_all=use_all,extra=extra) result = sqlcl.query(query).readlines() n_field = (len(result)-2) if n_field == 0: raise Exception("No useful field is returned!! Check!!") data = [] for ii in result[2:]: line = ii.replace("\n"," ") data.append(line.split(',')) result = np.recarray(data, dtype=[('fieldid', int), ('run', int), ('rerun', int), ('camcol', int), ('field', int), ('quality', str), ('score', float), ('ra', float), ('dec', float), ('primaryarea', float)]) print result.shape # TODO: Not working perfectly right now; find a way to work around , # Maybe simply use a different format return result
def query(self,query): result = sqlcl.query(query).readlines() data =[] count =0 if (DEBUG): print (result) for i in result: if count>1: list =i.split(',') if (len(list)>2): list[2]= list[2][:-1] data.append(list) count += 1 if (DEBUG):print (result) if (len(data)>0): if (len(data[0])>0): while (data[0][0][1:6]=="ERROR"): #Case where doing more than 60 queries in 1 minute print("ERROR: Too much query in 1 minute. Sleep for 60 second.") time.sleep(60) data = self.query(query) return (data)
def executeQueries(queries, formats): #run those queries yo first = True results = [] count = 1 for q in queries: print 'running query ' + `count` count = count + 1 lines = sqlcl.query(q).readlines() if lines[0][:-1] != '#Table1': # the query failed, just print it and exit print 'the query failed, moving to next query.' else: # if first: #add fields # results.append(lines[1][:-1].split(',')) # first = False for l in lines[2:]: results.append(map(lambda x,y: x(y), formats ,l[:-1].split(','))) return results
def getSDSSfields(ra, dec, size): # all in degree delta = size ra_max = ra + (delta / np.cos(abs(np.radians(dec)))) ra_min = ra - (delta / np.cos(abs(np.radians(dec)))) dec_max = dec + delta dec_min = dec - delta querry = """ SELECT fieldID, run, camCol, field, ra, dec, run, rerun FROM Field """ querry += "WHERE ra BETWEEN " + str(ra_min) + " and " + str( ra_max) + " and dec BETWEEN " + str(dec_min) + " and " + str( dec_max) lines = sqlcl.query(querry).readlines() N = len(lines) field_lst = [] for i in np.arange(2, N): line = str(lines[i]) line = line.split(',') run = line[1] camcol = line[2] field = line[3] ra_ = line[4] dec_ = line[5] field_lst.append([run, camcol, field]) return field_lst
def get_sdss_photometry(coords): # print len(coords) if len(coords) == 1 or len(coords) != 4: print "It checks the SDSS PhotoObjAll catalog to find all photometric objects\n within a given radius." print "usage : sdss_photo_check.py (ra) (dec) -r (search radius)" print "\tra : degree" print "\tdec : degree" print "\tband : filter (ugriz)" print "\tsearch radius : arcsec (optional) (default : 3 arcsec)" print "output : ObjId model_u model_g model_r model_i model_z" sys.exit() # print coords, "****************************", coords[2] ra = str(coords[0]) dec = str(coords[1]) band = coords[2] search_rad = str(coords[3]) sql_query = ( "select P.modelMag_" + band + " from PhotoObjAll P, dbo.fGetNearbyObjAllEq(" + ra + "," + dec + "," + search_rad + ") n where P.objID = n.objID" ) query_result = sqlcl.query(sql_query).readlines() if len(query_result) > 1: data_part = string.split(query_result[1], ",") output_string = "" for x in data_part: output_string = output_string + x.strip() + " " print output_string time.sleep(1.0) else: print "No object found"
def photoobj_search(ra, dec, radius, band='r', use_all=True, extra=0.05): """ Searh for photometric information of all objects within certain distance between the central RA, DEC """ if (ra < 0.0) or (ra > 360.0): raise Exception("RA should be between 0 and 360 degree!") if (dec < -90.0) or (dec > 90.0): raise Exception("Dec should be between -90 and 90 degree!") if radius > 1.0: warning = "Radius is too large!! Be careful!! (Radius < 1.0deg)" highlight_output(warning) query = photo_query(ra, dec, radius, band=band, use_all = use_all, extra=extra) result = sqlcl.query(query).readlines() n_field = (len(result) - 2) if n_field <= 0: raise Exception("No useful field is returned!! Check!!") else: result = result[2:] data = [] for ii in result: line = ii.replace("\n", "") temp = np.genfromtxt(StringIO(line), delimiter=",", dtype=None) data.append(temp) dtype = [('objID', int), ('ra', float), ('dec', float), ('type', int), ('clean', int), ('nChild', int), ('petroR90', float), ('psfMag', float), ('cModelMag', float), ('expAB', float), ('expPhi', float), ('devAB', float), ('devPhi', float)] table = np.array(data, dtype) return table
def getsdssphotcats(self): #get photometric sources within 2R200 print "elapsed time = ", time.clock() - starttime self.mcut = N.zeros(len(self.z), 'f') cl = N.arange(17, len(self.z), 1) for i in range(len(self.z)): #for i in cl: dL = self.dL[i] print "getting phot cat for cluster abell", self.id[i] r200arcmin = self.r200deg[i] * 60. #drsearch=2.*r200arcmin#2xR200 in arcmin for sdss query drsearch = 3. * r200arcmin #2xR200 in arcmin for sdss query #Vg=0.3556-0.7614*((self.avegr)-0.6148)#(V-g) from Blanton et al 2003 mr = mabscut - 0.1331 + 5. * N.log10(dL) + 25. + self.kcorr[i] print i, self.z[i], dL, mr self.mcut[i] = mr print "ra, dec, dr, mr = %12.8f %12.8f %8.3f %5.2f" % ( self.ra[i], self.dec[i], drsearch, mr) #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.plate_ID, g.MJD, from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.g < %5.2f) and ((0.384*g.g + 0.716*g.r)< %5.2f)" % (self.ra[i],self.dec[i],drsearch,(mr+1.5),mr) query = "select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID,g.extinction_u, g.extinction_g, g.extinction_r, g.extinction_i, g.extinction_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.g < %5.2f) and (g.PrimTarget & 0x00000040) > 0 " % ( self.ra[i], self.dec[i], drsearch, (mr)) #line from sdssinter.py code #query="select n.distance from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and g.r< %5.2f and (g.PrimTarget & 0x00000040) > 0 order by distance" % (self.ra[i],self.dec[i],drsearch,mr)#added flags to get rid of saturated objects, stars, etc #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID" % (self.ra[i],self.dec[i],drsearch)#no mag cut #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and g.r<17.7 and g.g<18.0" % (self.ra[i],self.dec[i],drsearch) lines = sqlcl.query(query).readlines() #print query print "got number+1 phot objects = ", len(lines) #print lines output = "abell" + str(self.id[i]) + ".phot.dat" outfile = open(output, 'w') outfile.write("#%s " % (lines[0])) for line in lines[1:]: outfile.write("%s " % (line)) outfile.close()
lowlim = float(sys.argv[3]) highlim = float(sys.argv[4]) fullcat_name_path = os.path.join(CURRENT_DIR, "Full_SDSS.dat") trimmedcat_name_path = os.path.join(CURRENT_DIR, "Trimmed_SDSS.dat") fullcat = open(fullcat_name_path, 'w') trimmedcat = open(trimmedcat_name_path, 'w') query = """ SELECT TOP """ + str(Num) + """ cast(str(p.ra,13,8) as float) as ra,cast(str(p.[dec],13,8) as float) as dec,p.psfMag_u,p.psfMag_g,p.psfMag_r,p.psfMag_i,p.psfMag_z,p.psfMagErr_u,p.psfMagErr_g,p.psfMagErr_r,p.psfMagErr_i,p.psfMagErr_z,dbo.fIAUFromEq(p.ra,p.[dec]) as SDSSname FROM ..PhotoObj AS p """ + "JOIN dbo.fGetNearbyObjEq(" + str(RA) + "," + str(DEC) + "," + str( Area) + ") AS b ON b.objID = P.objID" data = sqlcl.query(query).read() #print data print >> fullcat, str(data) fullcat.close() ############################### slim down ######################### table = numpy.genfromtxt(fullcat_name_path, delimiter=',', dtype=str, skip_header=2, unpack=True) RA_star, DEC_star, psfMag_u, psfMag_g, psfMag_r, psfMag_i, psfMag_z, psfMagErr_u, psfMagErr_g, psfMagErr_r, psfMagErr_i, psfMagErr_z, SDSSname = table[:] RA_star = numpy.array(RA_star, dtype=float) DEC_star = numpy.array(DEC_star, dtype=float) psfMag_u = numpy.array(psfMag_u, dtype=float)
import sqlcl import sys, os from csvMathMod import csv2math # Path for transfer files prefix = "/ram/" # prefix for transfer files xfer = "xfer-" if len(sys.argv) != 2 : print('Syntax is : python sdss_query.py "SQL query"') exit(0) print("Executing Query : "+sys.argv[1]+"\n") lines = sqlcl.query(sys.argv[1]).readlines() # New versions seem to produce an extra first line with Table so remove print lines[1:] os.system("rm "+prefix+xfer+"* 2> /dev/null") csv2math(lines[1:])
#!/usr/bin/env python import numpy as np import sqlcl from StringIO import StringIO ra = 239.583329 dec = 0 #27.233413 rad = 20.0 radians = (rad / 60.0) * np.pi / 180.0 solid_angle = 2.0 * np.pi * (1.0 - np.cos(radians)) solid_angle2 = 2.0 * np.pi * (radians**2 / 2.0) area = np.pi * rad**2 print "radians: %f solid angle: %e solid angle2: %e square deg: %f " % ( radians, solid_angle, solid_angle2, solid_angle2 / (np.pi * 4) * 41253) print "sq arcmin: %f sq degree: %f" % (np.pi * rad**2, np.pi * (rad / 60.0)**2) result = sqlcl.query( "select p.ra, p.dec from PhotoObjAll p, dbo.fGetNearbyObjEq(%f,%f,%f) as r where p.ObjID = r.ObjID" % (ra, dec, rad)).read() datagal = np.genfromtxt(StringIO(result), names=True, delimiter=",") print datagal['dec'].size print "number of elements per sq arcmin: %f" % (datagal['dec'].size / area) ra1 = datagal['ra'].min() ra2 = datagal['ra'].max() dec1 = datagal['dec'].min() dec2 = datagal['dec'].max() print ra print "min ra: %f max ra: %f diff: %f diff arcmin: %f" % (ra1, ra2, (ra2 - ra1), (ra2 - ra1) * 60.0) print "min dec: %f max dec: %f diff: %f diff arcmin: %f" % (dec1, dec2,
def getsdssphotcat(self): print 'Getting SDSS phot cat for ', self.prefix drsearch = self.dr * 60. #search radius in arcmin for sdss query #zmin=self.cz-.005 #zmax=self.cz+.005 #from this, we will make a field sample and a cluster sample flag = 0 nrun = 1 print 'getting to while loop' while flag == 0: print 'inside while loop' #Vg=0.3556-0.7614*((self.avegr)-0.6148)#(V-g) from Blanton et al 2003 #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.plate_ID, g.MJD, from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.g < %5.2f) and ((0.384*g.g + 0.716*g.r)< %5.2f)" % (self.ra[i],self.dec[i],drsearch,(mr+1.5),mr) #changed so that only galaxies w/out spectra are returned #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z, g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expAB_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z, g.run, g.rerun, g.camcol, g.field, g.err_u,g.err_g,g.err_r,g.err_i,g.err_z,g.rowc_u, g.rowc_g, g.rowc_r,g.rowc_i,g.rowc_z,g.colc_u,g.colc_g,g.colc_r,g.colc_i,g.colc_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 and (g.specObjID = 0)" % (self.cra,self.cdec,drsearch) query = "select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z, g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expAB_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z, g.run, g.rerun, g.camcol, g.field, g.err_u,g.err_g,g.err_r,g.err_i,g.err_z,g.rowc_u, g.rowc_g, g.rowc_r,g.rowc_i,g.rowc_z,g.colc_u,g.colc_g,g.colc_r,g.colc_i,g.colc_z from galaxy g where g.r < 22 and g.ra > %12.8f and g.ra < %12.8f and g.dec > %12.8f and g.dec < %12.8f and (g.specObjID = 0)" % ( self.cra - drsearch / 2., self.cra + drsearch / 2., self.cdec - drsearch / 2., self.cdec + drsearch / 2.) #query="select g.ra, g.dec from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 and (g.specObjID = 0)" % (self.cra,self.cdec,drsearch)#changed so that only galaxies w/out spectra are returned # the following timed out in 10 min #query="select count(*) from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.specObjID = 0)" % (self.cra,self.cdec,drsearch)#changed so that only galaxies w/out spectra are returned # the following timed out at 10 min #query="select count(*) from galaxy g where (g.ra between %12.8f and %12.8f) and (g.dec between %12.8f and %12.8f) and (g.PrimTarget & 0x00000040) > 0 and (g.specObjID = 0)" % (self.cra-drsearch,self.cra+drsearch,self.cdec-drsearch,self.cdec+drsearch) # now trying this #query="select count(*) from galaxy g where (g.ra between %12.8f and %12.8f) and (g.dec between %12.8f and %12.8f) and (g.specObjID = 0)" % (self.cra-drsearch,self.cra+drsearch,self.cdec-drsearch,self.cdec+drsearch) # sdss website says the following query completes in 18 sec. let's see how it does... #query='SELECT p.ra, p.dec, p.ModelMag_i, p.extinction_i FROM TargetInfo t, PhotoTag p WHERE (t.primtarget & 0x00000006>0) and p.objid=t.targetobjid' # this does, in fact, complete very quickly! #query="select count(*) from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 and (g.specObjID = 0)" % (self.cra,self.cdec,drsearch) # query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z, g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expAB_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z, g.run, g.rerun, g.camcol, g.field, g.err_u,g.err_g,g.err_r,g.err_i,g.err_z,g.rowc_u, g.rowc_g, g.rowc_r,g.rowc_i,g.rowc_z,g.colc_u,g.colc_g,g.colc_r,g.colc_i,g.colc_z from galaxy g where (g.ra between %12.8f and %12.8f) and (g.dec between %12.8f and %12.8f) and (g.PrimTarget & 0x00000040) > 0 and (g.specObjID = 0)" % (self.cra-drsearch,self.cra+drsearch,self.cdec-drsearch,self.cdec+drsearch)#changed so that only galaxies w/out spectra are returned print query start_time = time.time() try: lines = sqlcl.query(query).readlines() except IOError: print "IOError for cluster", self.prefix, " trying phot query again" lines = sqlcl.query(query).readlines() elapsed_time = time.time() - start_time print 'time to execute query = ', elapsed_time, ' sec, ', elapsed_time / 60., ' min' print "got number+1 phot objects = ", len(lines) n = homedir + 'research/LocalClusters/SDSSCatalogs/' + str( self.prefix) + 'galaxy.photcat.dat' outfile = open(n, 'w') j = 0 flag = 1 if (len(lines) > 1.): for line in lines[1:]: if j < 0: print line j = j + 1 outfile.write(line) if line.find('Server.ScriptTimeout') > -1: flag = 0 elif line.find('Timeout') > -1: flag = 0 outfile.close() nrun += 1 if nrun > 15: return if flag == 0: print self.prefix print 'Running query again b/c of ScriptTimeout' print 'starting attempt = ', nrun
def source_info(self,r_fits_filename): ''' [ra,dec,margin,radius,pgc] ==> Is margin info necessary(?) YES Input: Filename String of R band Mosaic fit file Returns the updated [ra,dec,margin,radius,pgc] info about the identified RC3 source as a list If no RC3 source is identified then ['@','@',margin_value,'@','@'] is returned If RC3 lie outside of SDSS footprint then [-1,-1,-1,-1,-1] is returned ''' try: updated = open("rc3_updated.txt",'a') self.num_iterations +=1 print ("{}th iteration".format(self.num_iterations)) if (self.num_iterations < 3): #5 is too much print ("------------------source_info----------------------") file = r_fits_filename print("Source info for {}".format(file)) if (file ==-1): #special value reserved for not in SDSS footprint galaxies return [-1,-1,-1,-1,-1] # File info hdulist = pyfits.open(file) rc3_ra= hdulist[0].header['RA'] rc3_dec= hdulist[0].header['DEC'] rc3_radius = hdulist[0].header['RADIUS'] margin = hdulist[0].header['MARGIN'] pgc = hdulist[0].header['PGC'] #Source Extraction os.system("sex {} -c default.sex".format(file)) # A list of other RC3 galaxies that lies in the field # In the case of source confusion, find all the rc3 that lies in the field. other_rc3s = sqlcl.query("SELECT distinct rc3.ra, rc3.dec FROM PhotoObj as po JOIN RC3 as rc3 ON rc3.objid = po.objid WHERE po.ra between {0}-{1} and {0}+{1} and po.dec between {2}-{3} and {2}+{3}".format(str(rc3_ra),str(margin),str(rc3_dec),str(margin))).readlines() print (other_rc3s) data =[] count =0 for i in other_rc3s: if count>1: list =i.split(',') list[0] = float(list[0]) list[1]= float(list[1][:-1]) data.append(list) count += 1 print ("ra,dec of catalog sources") rc3_data = map (np.array,data) print ("rc3_data: "+str(rc3_data)) # if (len(rc3_data)>0): # print ("here2") distances=[] for i in range(len(data)-1):#len(data)//2): if (len(data)>1 ): #odd number (unpaired) RC3s that lie in the field is ignored for now # (but we have to take it into consideration eventually) # and len(data)%2==0 d2p= np.array(data[i])-np.array(data[i+1]) print ("d2p: {}".format(d2p)) distances.append(d2p) if(len(distances)!=0): print (distances) if (len(distances)>1): print ("More than 2 galaxies inside field!") print (distances) #Conduct pairwise comparison catalog = open("test.cat",'r') #Creating a list of radius radius_list = [] # Creating a corresponding list of ra,dec #sextract = [] sextract_dict ={} for line in catalog: # print (line) line = line.split() if (line[0]!='#'): # print("HERE!") #sextract.append(np.array([line[2],line[3]])) radius=np.sqrt((float(line[6])-float(line[4]))**2+(float(line[7])-float(line[5]))**2)/2 #print(radius) radius_list.append(radius) coord = np.array([float(line[2]),float(line[3])]) sextract_dict[radius]=coord print ("Radius: "+str(radius_list)) print ("SExtract_dict: "+str(sextract_dict)) if (len(sextract_dict)>0): #special value that indicate empty list (no object detected by SExtractor) radii='@' new_ra='@' new_dec='@' catalog = open("test.cat",'r') n=-1 if (len(distances)!=0): # if there is source confusion, then we want to keep the nth largest radius print ("Source Confusion") n=len(distances)+1 print ("sextract_dict:") print (sextract_dict) print ("N-th largest radius:") print(heapq.nlargest(n,sextract_dict)) #nth largest radius nth_largest=heapq.nlargest(n,sextract_dict) sextract=[] for i in heapq.nlargest(n,sextract_dict): sextract.append(np.array(sextract_dict[i])) print ("sextract:") print (sextract) # radius nth_largest=[i for i in nth_largest if float(i)>15.] print(nth_largest) if(len(nth_largest)!=0): radii = nth_largest[0] #Coordinate matching by pairs diff = [] #all possible coordinate pairs coord_match=[] for i in rc3_data : #determine shift vector for j in sextract: print (str(i)+" " +str(j)) coord_match.append([i,j]) diff.append((j-i).tolist()) print ("coord_match: "+str(coord_match)) print ("diff: "+str(diff)) abs_diff = map (lambda x : map(lambda y:abs(y), x), diff) print ("abs_diff: "+str(abs_diff)) tmp = heapq.nsmallest(n,abs_diff) print ("tmp : "+str(tmp)) # Bascially doing this , the long way, becasuse Python apparently can not do list -by element comparison and complains #inx=abs_diff.index(np.array(i)) inx=[] for i in tmp: for j in abs_diff: #print (i) #print (j) if (i==j): print (abs_diff.index(j)) inx.append(abs_diff.index(j)) print (inx) matched=[] for i in inx: print coord_match[i] matched.append(coord_match[i]) # for j in coord_match: # print ([i,j]) # if (all(np.array([1,2])==np.array([1,2]))) # #matches = [coord for coord in coord_match ] # #print (matches) # print ("Matched coordinates: "+str(coord_match[inx])) # A list of other RC3 galaxies that lies in the field other_rc3s = sqlcl.query("SELECT distinct rc3.pgc,rc3.ra,rc3.dec FROM PhotoObj as po JOIN RC3 as rc3 ON rc3.objid = po.objid WHERE po.ra between {0}-{1} and {0}+{1} and po.dec between {2}-{3} and {2}+{3}".format(str(rc3_ra),str(margin),str(rc3_dec),str(margin))).readlines() print ("PGC of other_rc3s") print (other_rc3s) info ={} count =0 for i in other_rc3s: if count>1: list =i.split(',') pgc = int(list[0][6:]) ra= float(list[1][:-1]) dec= float(list[2][:-1]) info[pgc]= [ra,dec] count += 1 print (info) print ("The galaxy that we want to mosaic is: "+str(info[self.pgc])) new_ra= info[self.pgc][0] new_dec = info[self.pgc][1] else: print ("Source is Obvious") n=1 # if no source confusion then just keep the maximum radius catalog = open("test.cat",'r') #Creating a list of radius radius = [] for line in catalog: #print (line) line = line.split() if (line[0]!='#'): radius.append(np.sqrt((float(line[6])-float(line[4]))**2+(float(line[7])-float(line[5]))**2)/2) #special value that indicate empty list (no object detected by SExtractor) radii='@' new_ra='@' new_dec='@' catalog = open("test.cat",'r') # If there is no other RC3 in the field, it means the largest galaxy in the field is the RC3 we are interested in # So find max radius and treat as if it is rc3 for i in catalog: line = i.split() if (line[0]!='#' ): radii = np.sqrt((float(line[6])-float(line[4]))**2+(float(line[7])-float(line[5]))**2)/2 if (radii==max(radius)): print ('Biggest Galaxy with radius {} pixels!'.format(str(radii))) radii = radii new_ra= line[2] new_dec = line[3] break print ("new_ra and new_dec: {} , {} ".format(str(new_ra),str(new_dec))) if (radii!='@' and float(radii)>15): # There exist 1 or more detected source print ("Radii: {} pixel".format(str(radii))) radii = 0.00010995650106797878*radii #pixel to degree conversion print ("Radii: {} degrees".format(str(radii))) print ("rc3: {} , updated: {} ".format(rc3_ra, new_ra)) print ("rc3: {} , updated: {} ".format(rc3_dec,new_dec)) print ("rc3: {} , updated: {} ".format(rc3_radius,radii)) updated.write("{} {} {} {} {} \n".format(rc3_ra,rc3_dec,new_ra,new_dec,radii)) self.mosaic_all_bands(new_ra,new_dec,margin,radii,pgc) return [float(new_ra),float(new_dec),margin,radii,pgc] # margin was already set as 6*rc3_radius during initial_run # all additional mosaicking steps shoudl be 1.5 times this else: #radii =@ if all SExtracted radius is <15 print ("No detected RC3 sources in image. Mosaic using a larger margin") # original automated mosaic program default 6*radius # call on mosaic program with +50% original margin r_mosaic_filename = self.mosaic_band('r',rc3_ra,rc3_dec,1.5*margin,rc3_radius,pgc) self.source_info(r_mosaic_filename) return ['@','@',1.5*margin,'@','@'] else : no_detection = open("../no_detected_rc3_candidate_nearby.txt",'a') # 'a' for append #'w') no_detection.write("rc3_ra rc3_dec rc3_radius pgc \n") no_detection.write("{} {} {} {} \n".format(self.rc3_ra,self.rc3_dec,self.rc3_radius,self.pgc)) except (IOError): print ("File Not Found Error, if rfits is not found then mosaic an rfits") self.mosaic_band('r',self.rc3_ra,self.rc3_dec,3*self.rc3_radius,self.rc3_radius,self.pgc) except: print("Something went wrong when mosaicing PGC{}, just ignore it and keep mosaicing the next galaxy".format(str(pgc))) error = open ("sourceinfo_error.txt","a") error.write("{} {} {} {} \n".format(self.rc3_ra,self.rc3_dec,self.rc3_radius,self.pgc)) return['x','x','x','x','x']
def run(img, outcat, type, limits=None): import os, sys, anydbm, time print img, outcat, type if type == 'star': mag_type = 'psf' if type == 'galaxy': mag_type = 'petro' print img os.system("rm outim") os.system('rm ' + outcat) os.system('rm sdss_out') if limits is not None: ramin = limits['ramin'] ramax = limits['ramax'] decmin = limits['decmin'] decmax = limits['decmax'] else: import commands, string command = 'dfits ' + img + ' | fitsort -d CD2_1' print command print commands.getoutput(command) if string.find(commands.getoutput(command), 'KEY') == -1: imcom = "dfits " + img + " | fitsort CRPIX1 CRPIX2 CRVAL1 CRVAL2 CD2_1 CD1_2 CD2_2 CD1_1 > ./outim" else: imcom = "dfits " + img + " | fitsort CRPIX1 CRPIX2 CRVAL1 CRVAL2 CDELT1 CDELT2 > ./outim" print imcom os.system(imcom) import re print open('outim', 'r').readlines() com = re.split('\s+', open("outim", 'r').readlines()[1][:-1]) print com crpix1 = float(com[1]) crpix2 = float(com[2]) crval1 = float(com[3]) crval2 = float(com[4]) if string.find(commands.getoutput(command), 'KEY') == -1: cdelt1A = float(com[5]) cdelt2A = float(com[6]) cdelt1B = float(com[7]) cdelt2B = float(com[8]) if float(cdelt1A) != 0: cdelt1 = cdelt1A cdelt2 = cdelt2A else: cdelt1 = cdelt1B cdelt2 = cdelt2B else: cdelt1 = float(com[5]) cdelt2 = float(com[6]) print crpix1, crval1, cdelt1 #ramin = crval1 - crpix1*cdelt1 ramin = crval1 - 9000 * abs(cdelt1) print ramin ramax = crval1 + 9000 * abs(cdelt1) if ramax < ramin: top = ramin ramin = ramax ramax = top decmin = crval2 - 9000 * abs(cdelt2) decmax = crval2 + 9000 * abs(cdelt2) import sqlcl #lines = sqlcl.query("select ra,dec,u,g,r,i,z from star").readlines() #flags = reduce(lambda x,y: x + ' AND ' + y, [" ((flags_" + color + " & 0x10000000) != 0) \ # AND ((flags_" + color + " & 0x8100000800a4) = 0) \ # AND (((flags_" + color + " & 0x400000000000) = 0) or (psfmagerr_" + color + " <= 0.2)) \ # AND (((flags_" + color + " & 0x100000000000) = 0) or (flags_" + color + " & 0x1000) = 0) \ # AND (flags_" + color + " & dbo.fPhotoFlags('BLENDED') = 0) " for color in ['u','g','r','i','z']]) if type == 'star': flags = '\n\ ((flags & 0x10000000) != 0) \n\ AND ((flags & 0x8100000c00a4) = 0) \n\ AND (((flags & 0x400000000000) = 0) or (psfmagerr_g <= 0.2)) \n\ AND (((flags & 0x100000000000) = 0) or (flags & 0x1000) = 0) \n' elif type == 'galaxy': flags = '\n\ ((flags & 0x10000000) != 0) \n\ AND ((flags & 0x8100000c00a0) = 0) \n\ AND (((flags & 0x400000000000) = 0) or (psfmagerr_g <= 0.2)) \n\ AND (((flags & 0x100000000000) = 0) or (flags & 0x1000) = 0) \n' query = "select clean, ra,dec,raErr,decErr," + mag_type + "Mag_u," + mag_type + "Mag_g," + mag_type + "Mag_r," + mag_type + "Mag_i," + mag_type + "Mag_z," + mag_type + "MagErr_u," + mag_type + "MagErr_g," + mag_type + "MagErr_r," + mag_type + "MagErr_i," + mag_type + "MagErr_z, flags from " + type + " where ra between " + str( ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str( decmin)[:8] + " and " + str( decmax)[:8] + " AND clean=1 and " + flags #query = "select clean, ra,dec,raErr,decErr," + mag_type + "Mag_u," + mag_type + "Mag_g," + mag_type + "Mag_r," + mag_type + "Mag_i," + mag_type + "Mag_z," + mag_type + "MagErr_u," + mag_type + "MagErr_g," + mag_type + "MagErr_r," + mag_type + "MagErr_i," + mag_type + "MagErr_z,flags_u,flags_g,flags_r,flags_i,flags_z, flags from " + type + " where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " AND flags & dbo.fPhotoFlags('BLENDED') = 0 "# AND " + flags #query = "select ra,dec,raErr,decErr,objID, petroMag_u,petroMag_g,petroMag_r,petroMag_i,petroMag_z,petroMagErr_u,petroMagErr_g,petroMagErr_r,petroMagErr_i,petroMagErr_z,flags_u,flags_g,flags_r,flags_i,flags_z, flags from galaxy where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] #+ " AND flags & dbo.fPhotoFlags('BLENDED') = 0 "# AND " + flags #query = "select top 10 flags_u, flags2_u from star where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " "\ #query = "select top 10 petroMagu, petroMagg, petroMagr, petroMagi, petroMagz, from star where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " " print query lines = sqlcl.query(query).readlines() uu = open('store', 'w') import pickle pickle.dump(lines, uu) #import pickle #f=open('store','r') #m=pickle.Unpickler(f) #lines=m.load() columns = lines[0][:-1].split(',') #print columns data = [] #print columns #print lines if lines[0][0:2] == 'No': return False, None for line in range(1, len(lines[1:]) + 1): #print lines[line] dt0 = {} for j in range(len(lines[line][:-1].split(','))): dt0[columns[j]] = lines[line][:-1].split(',')[j] import string if string.find(lines[line][:-1], 'font') == -1: data.append(dt0) #if string.find(lines[line][:-1],'font') != -1: #print lines[line][:-1] print len(data) print len(data[0]) outwrite = open('sdss_out', 'w') print len(data) keys = [ 'SeqNr', ['dec', 'Dec'], ['ra', 'Ra'], 'raErr', 'decErr', 'umag', 'gmag', 'rmag', 'imag', 'Bmag', 'Vmag', 'Rmag', 'Imag', 'zmag', 'uerr', 'gerr', 'rerr', 'ierr', 'Berr', 'Verr', 'Rerr', 'Ierr', 'zerr', 'umg', 'gmr', 'rmi', 'imz', 'BmV', 'VmR', 'RmI', 'Imz', 'umgerr', 'gmrerr', 'rmierr', 'imzerr', 'BmVerr', 'VmRerr', 'RmIerr', 'Imzerr', 'A_WCS', 'B_WCS', 'THETAWCS', 'Flag', 'Clean', ['ra', 'ALPHA_J2000'], ['dec', 'DELTA_J2000'] ] #keys = ['SeqNr',['dec','Dec'],['ra','Ra'],'raErr','decErr','umag','gmag','rmag','imag','Bmag','Vmag','Rmag','Imag','zmag','uerr','gerr','rerr','ierr','Berr','Verr','Rerr','Ierr','zerr','umg','gmr','rmi','imz','BmV','VmR','RmI','Imz','umgerr','gmrerr','rmierr','imzerr','BmVerr','VmRerr','RmIerr','Imzerr','flags_u','flags_g','flags_r','flags_i','flags_z','A_WCS','B_WCS','THETAWCS','Flag','Clean',['ra','ALPHA_J2000'],['dec','DELTA_J2000']] seqnr = 1 for els in range(len(data)): clean = data[els]['clean'] if 1 == 1: #int(flag)==1 :# 1==1: #data[els].has_key('u'): import math #print data[els].keys() ab_correction = { 'u': -0.036, 'g': 0.012, 'r': 0.010, 'i': 0.028, 'z': 0.040 } u = convert_to_pogson(float(data[els][mag_type + 'Mag_u']), 'u') + ab_correction['u'] g = convert_to_pogson(float(data[els][mag_type + 'Mag_g']), 'g') + ab_correction['g'] r = convert_to_pogson(float(data[els][mag_type + 'Mag_r']), 'r') + ab_correction['r'] i = convert_to_pogson(float(data[els][mag_type + 'Mag_i']), 'i') + ab_correction['i'] z = convert_to_pogson(float(data[els][mag_type + 'Mag_z']), 'z') + ab_correction['z'] uerr = float(data[els][mag_type + 'MagErr_u']) gerr = float(data[els][mag_type + 'MagErr_g']) rerr = float(data[els][mag_type + 'MagErr_r']) ierr = float(data[els][mag_type + 'MagErr_i']) zerr = float(data[els][mag_type + 'MagErr_z']) data[els]['Bmag'] = u - 0.8116 * (u - g) + 0.1313 # sigma = 0.0095 data[els]['Berr'] = math.sqrt((uerr * 0.19)**2. + (0.8119 * gerr)**2.) #B = g + 0.3130*(g - r) + 0.2271# sigma = 0.0107 #V = g - 0.2906*(u - g) + 0.0885# sigma = 0.0129 data[els]['Vmag'] = g - 0.5784 * (g - r) - 0.0038 # sigma = 0.0054 data[els]['Verr'] = math.sqrt((gerr * 0.42)**2. + (0.57 * rerr)**2.) #R = r - 0.1837*(g - r) - 0.0971# sigma = 0.0106 data[els]['Rmag'] = r - 0.2936 * (r - i) - 0.1439 # sigma = 0.0072 data[els]['Rerr'] = math.sqrt((rerr * 0.71)**2. + (0.29 * ierr)**2.) data[els]['Imag'] = r - 1.2444 * (r - i) - 0.3820 # sigma = 0.0078 data[els]['Ierr'] = math.sqrt((rerr * 0.24)**2. + (1.244 * ierr)**2.) #I = i - 0.3780*(i - z) -0.3974# sigma = 0.0063 data[els]['umag'] = u data[els]['gmag'] = g data[els]['rmag'] = r data[els]['imag'] = i data[els]['zmag'] = z data[els]['umg'] = data[els]['umag'] - data[els]['gmag'] data[els]['gmr'] = data[els]['gmag'] - data[els]['rmag'] data[els]['rmi'] = data[els]['rmag'] - data[els]['imag'] data[els]['imz'] = data[els]['imag'] - data[els]['zmag'] data[els]['uerr'] = uerr data[els]['gerr'] = gerr data[els]['rerr'] = rerr data[els]['ierr'] = ierr data[els]['zerr'] = zerr data[els]['umgerr'] = math.sqrt(data[els]['uerr']**2. + data[els]['gerr']**2.) data[els]['gmrerr'] = math.sqrt(data[els]['gerr']**2. + data[els]['rerr']**2.) data[els]['rmierr'] = math.sqrt(data[els]['rerr']**2. + data[els]['ierr']**2.) data[els]['imzerr'] = math.sqrt(data[els]['ierr']**2. + data[els]['zerr']**2.) data[els]['BmV'] = data[els]['Bmag'] - data[els]['Vmag'] data[els]['VmR'] = data[els]['Vmag'] - data[els]['Rmag'] data[els]['RmI'] = data[els]['Rmag'] - data[els]['Imag'] data[els]['Imz'] = data[els]['Imag'] - data[els]['zmag'] data[els]['BmVerr'] = math.sqrt(data[els]['Berr']**2. + data[els]['Verr']**2.) data[els]['VmRerr'] = math.sqrt(data[els]['Verr']**2. + data[els]['Rerr']**2.) data[els]['RmIerr'] = math.sqrt(data[els]['Rerr']**2. + data[els]['Ierr']**2.) data[els]['Imzerr'] = math.sqrt(data[els]['Ierr']**2. + data[els]['zerr']**2.) #error = (float(data[els]['rowcErr_r'])**2. + float(data[els]['colcErr_r'])**2.)**0.5*0.4/3600. #if error < 0.0004: error=0.0004 data[els]['A_WCS'] = 0.0004 #error #data[els]['Err'] #'0.0004' data[els]['B_WCS'] = 0.0004 #error #data[els]['decErr'] #'0.0004' data[els]['THETAWCS'] = '0' data[els]['Clean'] = str(clean) data[els]['Flag'] = '0' #str(clean) seqnr += 1 data[els]['SeqNr'] = seqnr lineh = "" #print data[els] if 1 == 1: # data[els]['clean'] == 0: #inspect_flags([data[els]['flags_u'],data[els]['flags_g'],data[els]['flags_r'],data[els]['flags_i'],data[els]['flags_z']],[data[els]['flags2_u'],data[els]['flags2_g'],data[els]['flags2_r'],data[els]['flags2_i'],data[els]['flags2_z']]): #print keys for key in keys: if len(key) == 2: key_dict = key[0] key = key[1] else: key_dict = key if (key == 'SeqNr' or key_dict == 'ra' or key_dict == 'dec' or key[0:3] == 'Fla'): num = '%(s)s' % {'s': str(data[els][key_dict])} else: num = '%(num).4f' % {'num': float(data[els][key_dict])} num = '%s' % num num.strip() #elif key[0:2] != 'ra' and key[0:3] != 'dec': #yy = '' #for y in range(128): # if y < len(str(data[els][key])): # yy = yy + str(data[els][key])[y] # else: # yy = yy + ' ' #num = yy #else: num = str(data[els][key]) lineh = lineh + num + " " #print lineh outwrite.write(lineh + "\n") outwrite.close() #lineh= "lc -C -B " #for key in data[els].keys(): # lineh = lineh + " -N '1 1 " + str(key) + "' " #lineh = lineh + " < outwrite > outf.cat" #print lineh #os.system(lineh) asc = open('asctoldac_sdss.conf', 'w') asc.write('VERBOSE = DEBUG\n') for column in keys: if len(column) == 2: name = column[1] else: name = column if column == 'objID' or column[0:3] == 'fla': type = 'STRING' htype = 'STRING' depth = '128' elif column == 'Flag': type = 'SHORT' htype = 'INT' depth = '1' elif column == 'SeqNr': type = 'LONG' htype = 'INT' depth = '1' elif len(column) == 2: #column == 'Ra' or column == 'Dec': type = 'DOUBLE' htype = 'FLOAT' depth = '1' else: type = 'FLOAT' htype = 'FLOAT' depth = '1' asc.write('#\nCOL_NAME = ' + name + '\nCOL_TTYPE= ' + type + '\nCOL_HTYPE= ' + htype + '\nCOL_COMM= ""\nCOL_UNIT= ""\nCOL_DEPTH= ' + depth + '\n') asc.close() command = "asctoldac -i sdss_out -c asctoldac_sdss.conf -t STDTAB -o " + outcat os.system(command) print command if len(data) > 10: cov = True else: cov = False return cov, outcat
query = "select ra,dec,z, zConf from SpecObj where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " AND z < 1.5 AND zConf > 0.90 " # AND flags & dbo.fPhotoFlags('BLENDED') = 0 "# AND " + flags #query = "select clean, ra,dec,raErr,decErr,objID,rowcErr_u,colcErr_u,rowcErr_g,colcErr_g,rowcErr_r,colcErr_r,rowcErr_i,colcErr_i,rowcErr_z,colcErr_z,psfMag_u,psfMag_g,psfMag_r,psfMag_i,psfMag_z,psfMagErr_u,psfMagErr_g,psfMagErr_r,psfMagErr_i,psfMagErr_z,flags_u,flags_g,flags_r,flags_i,flags_z, flags from star where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " AND flags & dbo.fPhotoFlags('BLENDED') = 0 "# AND " + flags print query #query = "select top 10 flags_u, flags2_u from star where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " "\ #query = "select top 10 psfMagu, psfMagg, psfMagr, psfMagi, psfMagz, from star where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " " print query lines = sqlcl.query(query).readlines() uu = open('store','w') import pickle pickle.dump(lines,uu) #import pickle #f=open('store','r') #m=pickle.Unpickler(f) #lines=m.load() #raw_input() columns = lines[0][:-1].split(',') print columns data = [] print columns
import sqlcl as s import time curT = lambda: int(round(time.time() * 1000)) times1 = [] times2 = [] num = 30 for i in range(num): print "starting test " + `i` t1 = curT() r1 = s.query("SELECT objID, ra, dec FROM PhotoPrimary WHERE ((ra BETWEEN 331.04711618888882 AND 331.04767158888887) AND (dec BETWEEN 6.2921503555555551 AND 6.2927057555555548))").read() t11 = curT() times1.append(t11-t1) t2 = curT() r2 = s.query("SELECT p.objid, p.ra, p.dec FROM fGetNearbyObjEq(331.047393889,6.29242805556,0.02) n, PhotoPrimary p WHERE n.objID=p.objID").read() t22 = curT() times2.append(t22-t2) avg1 = sum(times1)/num avg2 = sum(times2)/num print "Results for 1" print times1 print "average: " + `avg1` print '\n' print "Results for 2"
def get_sdss_spectra(umg, imz, gmr, rmi, number=4, tol=0.01, S_N=5): import sqlcl dict_names = [ 'plate', 'MJD', 'fiberID', 'ra', 'dec', 'mag_0', 'mag_1', 'mag_2' ] query = 'select top ' + str(number) + ' ' + reduce( lambda x, y: x + ',' + y, ['s.' + x for x in dict_names] ) + ' from specobjall as s join specphotoall as p on s.specobjid = p.specobjid where abs(s.mag_0 - s.mag_1 - ' + str( gmr ) + ') < ' + str(tol) + ' and abs(s.mag_1 - s.mag_2 - ' + str( rmi ) + ') < ' + str(tol) + ' and abs(s.mag_0 - s.mag_2 - ' + str( gmr + rmi ) + ') < ' + str(tol) + ' and s.sn_0 > ' + str( S_N ) + ' and s.sn_1 > ' + str(S_N) + ' and s.sn_2 > ' + str( S_N ) + ' and abs(s.mag_0 - s.mag_1 - (p.fibermag_g - p.fibermag_r)) < 0.1 and abs(s.mag_1 - s.mag_2 - (p.fibermag_r - p.fibermag_i)) < 0.1 order by -1.*s.sn_1' if rmi < 0.7: pattern = 'zbelodiesptype like "%v%" and zbelodiesptype not like "%var%"' #elif 0.7 < rmi < 1.0: pattern = '(zbelodiesptype like "%G%v%" or zbelodiesptype like "%K%v%" or zbelodiesptype like "%M%v%")' else: pattern = 'zbelodiesptype like "%M%v%"' query = 'select top ' + str(number) + ' ' + reduce( lambda x, y: x + ',' + y, ['s.' + x for x in dict_names] ) + ' from specobjall as s join specphoto as p on s.specobjid = p.specobjid join sppParams sp on sp.specobjid = s.specobjid where zbclass="STAR" and ' + pattern + ' and abs(s.mag_0 - s.mag_1 - ' + str( gmr ) + ') < ' + str(tol) + ' and abs(s.mag_1 - s.mag_2 - ' + str( rmi ) + ') < ' + str(tol) + ' and abs(s.mag_0 - s.mag_2 - ' + str( gmr + rmi ) + ') < ' + str(tol) + ' and s.sn_0 > ' + str( S_N ) + ' and s.sn_1 > ' + str(S_N) + ' and s.sn_2 > ' + str( S_N ) + ' and abs(s.mag_0 - s.mag_1 - (p.fibermag_g - p.fibermag_r)) < 0.1 and abs(s.mag_1 - s.mag_2 - (p.fibermag_r - p.fibermag_i)) < 0.1 and abs(' + str( umg) + ' - (p.psfMag_u - p.psfMag_g)) < 0.05 and abs(' + str( imz) + ' - (p.psfMag_i - p.psfMag_z)) < 0.05 \ order by -1.*s.sn_1' query = 'select top ' + str(number) + ' ' + reduce( lambda x, y: x + ',' + y, ['s.' + x for x in dict_names] ) + ' from specobjall as s join specphoto as p on s.specobjid = p.specobjid join sppParams sp on sp.specobjid = s.specobjid where zbclass="STAR" and ' + pattern + ' and abs(s.mag_0 - s.mag_1 - ' + str( gmr ) + ') < ' + str(tol) + ' and abs(s.mag_1 - s.mag_2 - ' + str( rmi ) + ') < ' + str(tol) + ' and abs(s.mag_0 - s.mag_2 - ' + str( gmr + rmi ) + ') < ' + str(tol) + ' and s.sn_0 > ' + str( S_N ) + ' and s.sn_1 > ' + str(S_N) + ' and s.sn_2 > ' + str( S_N ) + ' and abs(s.mag_0 - s.mag_1 - (p.fibermag_g - p.fibermag_r)) < 0.1 and abs(s.mag_1 - s.mag_2 - (p.fibermag_r - p.fibermag_i)) < 0.1 and abs(' + str( umg) + ' - (p.psfMag_u - p.psfMag_g)) < 0.05 and abs(' + str( imz) + ' - (p.psfMag_i - p.psfMag_z)) < 0.05 \ order by -1.*s.sn_1' import time time.sleep(1.5) print query lines = sqlcl.query(query).readlines() print lines dicts = [] if lines[0] != 'N': for line in lines[1:]: dict = {} line = line.replace('\n', '') import re res = re.split(',', line) print res for i in range(len(res)): if dict_names[i] == 'fiberID' or dict_names[ i] == 'plate' or dict_names[i] == 'MJD': dict[dict_names[i]] = int(res[i]) else: dict[dict_names[i]] = (res[i]) print dict dicts.append(dict) print dicts return dicts
import string infile_dir = sys.argv[1] oufile_dir = sys.argv[2] infile_list = os.listdir(infile_dir) url = sqlcl.default_url fmt = 'csv' for f in infile_list: infile = './' + os.path.join(infile_dir, f) oufile = './' + os.path.join(oufile_dir, f) + '.' + fmt print "request %s ..." % infile qry = open(infile).read() file = sqlcl.query(qry, url, fmt) line = file.readline() print "save %s to %s..." % (infile, oufile) oufile_fd = open(oufile, 'w') sqlcl.write_header(oufile_fd, "#", url, qry) if line.startswith('ERROR'): oufile_fd.write('ERROR') oufile_fd.close() continue while line: oufile_fd.write(string.rstrip(line) + os.linesep) line = file.readline()
import sqlcl import sys, os from csvMathMod import csv2math # Path for transfer files prefix = "/ram/" # prefix for transfer files xfer = "xfer-" if len(sys.argv) != 2: print('Syntax is : python sdss_query.py "SQL query"') exit(0) print("Executing Query : " + sys.argv[1] + "\n") lines = sqlcl.query(sys.argv[1]).readlines() # New versions seem to produce an extra first line with Table so remove print lines[1:] os.system("rm " + prefix + xfer + "* 2> /dev/null") csv2math(lines[1:])
def get_SDSS(ra0, dec0, rad=1/60., name='', silent=False, debug=False): ''' >> data = get_SDSS(ra, dec, rad=1, dir='./' name='mydata') submit CAS job via sqlcl input: ra, dec (deg), can be arrays optional input: radius=1/60. (deg) name if given, we write file name silent=False shut it. note: slow for many object because we loop over input coords; this could be log(N) faster if I knew how to upload coordinates and run fgetNearByObjEq on this list. ''' if np.isscalar(ra0): ra0 = [ra0] dec0 = [dec0] out_list = [] for ra, dec in zip(ra0, dec0): cas = SDSS_cas.replace('__RA__',str(ra)).replace('__DEC__', str(dec)).replace('__RAD__',str(rad*60)) if not(silent): print 'running CASjob:\n',cas result = sqlcl.query(cas) if not(silent): print 'CAS job done, now reading query...' #print result.readlines() lines = result.readlines() if debug: print ra, dec, lines if len(lines)<=2: print 'no sources found, for ra,dec:', ra, dec else: data = readascii(lines=lines[2:], names=lines[1].split(','), delimiter=',') out_list.append(data) if len(out_list)==0: return None # no data, with one input if len(out_list)==1: out = out_list[0] if len(out_list)>1: data_arr = np.repeat(out_list[0],1) for dd in out_list[1:]: data_arr = rec.merge_rec(data_arr, np.repeat(dd,1)) #data_arr = np.concatenate(data_arr, dd) out= data_arr if name: if not(silent): print '# of entries:', len(out) print 'writing to ', name pyfits.writeto(name, out, clobber=True) return out
def getimg(ira, idec, imsize, BW=False, DSS=None, fullname=False, slitangle="Parallactic"): ''' Grab an SDSS image from the given URL, if possible Parameters: ---------- ira: (float or Quantity) RA in decimal degrees idec: (float or Quantity) DEC in decimal degrees ''' # Strip units as need be try: ra = ira.value except KeyError: ra = ira dec = idec except AttributeError: ra = ira dec = idec else: dec = idec.value # Get URL if DSS == None: # Default url = sdsshttp(ra, dec, imsize) else: url = dsshttp(ra, dec, imsize) # DSS # Request rtv = requests.get(url) # Commenting out the next section for now because it crashes in SDSS.query_region and this does not affect finder chart generation # Query for photometry # coord = SkyCoord(ra=ra*u.degree, dec=dec*u.degree) # phot = SDSS.query_region(coord, radius=0.02*u.deg) #print phot # if phot is None: # print('getimg: Pulling from DSS instead of SDSS') # BW = 1 # url = dsshttp(ra,dec,imsize) # DSS # rtv = requests.get(url) img = Image.open(StringIO(rtv.content)) # B&W ? if BW: import PIL.ImageOps img2 = img.convert("L") img2 = PIL.ImageOps.invert(img2) img = img2 # Find offset by submitting a query through sqlcl.py, saving it as all_offsets.da t, and searching through it with open('all_offsets.dat', 'w') as query: conditions = "SELECT r, ra, dec FROM Star WHERE ra BETWEEN " + str( ira - imsize / 120.0) + " AND " + str( ira + imsize / 120.0) + " AND dec BETWEEN " + str( idec - imsize / 120.0) + " AND " + str( idec + imsize / 120.0) + " and (r < 19) and (r > 7)" query_results = sqlcl.query(conditions).read() query.write(query_results) query.close() offsets = Table.read('all_offsets.dat', format='ascii', names=('r_mag', 'RA', 'DEC')) # xdb.set_trace() # min_distance = 0.4 # if abs(ra-off_ra) <= 0.035 and abs(dec-off_dec) <= 0.035: # distance=(((ra-off_ra)**2)+((dec-off_dec)**2))**(1./2.) # if distance < min_distance: # min_distance = distance # min_r_mag = off_r_mag # min_off_ra = off_ra # min_off_dec = off_dec min_r_mag = 19 if len(offsets) == 0: print "No offsets found within +/-0.03 in RA and DEC with r_mag <", min_r_mag if len(offsets) >= 1: for j in range(0, len(offsets)): off_r_mag = offsets['r_mag'][j] off_ra = offsets['RA'][j] off_dec = offsets['DEC'][j] if off_r_mag <= min_r_mag: min_r_mag = off_r_mag min_off_ra = off_ra min_off_dec = off_dec # Change object coordinates from decimal degrees to HMS/DMS string_ra = str(ira) string_dec = str(idec) hms_ra, dms_dec, other = calc_offset.decdeg_to_radec( string_ra, string_dec) # Change target coordinates from decimal degrees to HMS/DMS string_off_ra = str(min_off_ra) string_off_dec = str(min_off_dec) hms_off_ra, dms_off_dec, off_other = calc_offset.decdeg_to_radec( string_off_ra, string_off_dec) # Calculate offset move del_ra, e_or_w, del_dec, n_or_s = offset(ra, dec, min_off_ra, min_off_dec) # Obtain a name for the file if fullname: outfil = "../finding_charts/" outfil += "J" + hms_ra[0] + hms_ra[1] + hms_ra[2].split(".")[0] outfil += "_" outfil += dms_dec[0].replace("-", "m").replace( "+", "p") + dms_dec[1] + dms_dec[2].split(".")[0] outfil += ".pdf" else: outfil = "J" + hms_ra[0] + hms_ra[1] + dms_dec[0] + dms_dec[ 1] + ".pdf" # Check if the file already exists if os.path.exists(outfil): ans = "" while (ans != "n") and (ans != "y"): ans = raw_input("\nFile already exists:\n" + outfil + "\nOverwrite (y/n): ") if ans == "n": print "Finder not written!" return # Plot the figure fig = plt.figure(dpi=1200) fig.set_size_inches(8.0, 10.5) # Font plt.rcParams['font.family'] = 'times new roman' ax = plt.gca() # Image if BW == 1: cmm = cm.Greys_r else: cmm = None cradius = imsize / 30. plt.imshow(img, cmap=cmm, aspect='equal', extent=(-imsize / 2., imsize / 2, -imsize / 2., imsize / 2)) # Axes plt.xlim(-imsize / 2., imsize / 2.) plt.ylim(-imsize / 2., imsize / 2.) # Label plt.xlabel('Relative ArcMin', fontsize=20) xpos = 0.12 * imsize ypos = 0.02 * imsize plt.text(-imsize / 2. - xpos, 0., 'EAST', rotation=90., fontsize=20) plt.text(0., imsize / 2. + ypos, 'NORTH', fontsize=20, horizontalalignment='center') plt.text(0., -imsize / 2. - 8 * ypos, 'Slit Angle = ' + slitangle, fontsize=20, horizontalalignment='center') # Title plt.text(0.25, 1.28, 'Object Coordinates:', fontsize=18, horizontalalignment='center', transform=ax.transAxes) plt.text(0.25, 1.23, 'RA = ' + str(hms_ra[0]) + ":" + str(hms_ra[1]) + ":" + str(hms_ra[2]), fontsize=20, horizontalalignment='center', transform=ax.transAxes) plt.text(0.25, 1.18, 'DEC = ' + str(dms_dec[0]) + ":" + str(dms_dec[1]) + ":" + str(dms_dec[2]), fontsize=20, horizontalalignment='center', transform=ax.transAxes) plt.text(0.75, 1.28, 'Offset Coordinates (r=' + str(format(min_r_mag, '.2f')) + '):', fontsize=18, horizontalalignment='center', transform=ax.transAxes) plt.text(0.75, 1.23, 'RA = ' + str(hms_off_ra[0]) + ":" + str(hms_off_ra[1]) + ":" + str(hms_off_ra[2]), fontsize=20, horizontalalignment='center', transform=ax.transAxes) plt.text(0.75, 1.18, 'DEC = ' + str(dms_off_dec[0]) + ":" + str(dms_off_dec[1]) + ":" + str(dms_off_dec[2]), fontsize=20, horizontalalignment='center', transform=ax.transAxes) plt.text(0.5, 1.13, 'From Offset, move to get to Object:', fontsize=18, horizontalalignment='center', transform=ax.transAxes) plt.text(0.5, 1.08, str(format(del_ra, '.2f')) + "'' " + e_or_w + " and " + str(format(del_dec, '.2f')) + "'' " + n_or_s, fontsize=20, horizontalalignment='center', transform=ax.transAxes) # Circle for target, then offset circle = plt.Circle((0, 0), cradius, color='y', fill=False) plt.gca().add_artist(circle) if e_or_w == 'W' and n_or_s == 'S': circle_offset = plt.Circle((-del_ra / 60, del_dec / 60), cradius, color='g', fill=False) plt.gca().add_artist(circle_offset) elif e_or_w == 'W' and n_or_s == 'N': circle_offset = plt.Circle((-del_ra / 60, -del_dec / 60), cradius, color='g', fill=False) plt.gca().add_artist(circle_offset) elif e_or_w == 'E' and n_or_s == 'S': circle_offset = plt.Circle((del_ra / 60, del_dec / 60), cradius, color='g', fill=False) plt.gca().add_artist(circle_offset) elif e_or_w == 'E' and n_or_s == 'N': circle_offset = plt.Circle((del_ra / 60, -del_dec / 60), cradius, color='g', fill=False) plt.gca().add_artist(circle_offset) # Spectrum?? show_spec = False if show_spec: spec_img = xgs.get_spec_img(ra_tab['RA'][qq], ra_tab['DEC'][qq]) plt.imshow(spec_img, extent=(-imsize / 2.1, imsize * (-0.1), -imsize / 2.1, imsize * (-0.2))) # Write if show_spec: plt.savefig(outfil, dpi=300) else: plt.savefig(outfil) print 'finder: Wrote ' + outfil
def crossID(ra, dec, unit=None, dr=2., fields=None): """ Perform object cross-ID in SDSS using SQL. Search for objects near position (ra, dec) within some radius using Tamas Budavari's SQL tool (sqlcl.py). Parameters ---------- ra : float, int, str, tuple An object that represents a right ascension angle. dec : float, int, str, tuple An object that represents a declination angle. unit : `~astropy.units.UnitBase`, str The unit of the value specified for the angle dr : int, float Radius of region to perform object cross-ID (arcseconds). fields : list, optional SDSS PhotoObj or SpecObj quantities to return. If None, defaults to quantities required to find corresponding spectra and images of matched objects (e.g. plate, fiberID, mjd, etc.). See documentation for astropy.coordinates.angles for more information about ('ra', 'dec', 'unit') parameters. Examples -------- xid = sdss.crossID(ra='0h8m05.63s', dec='14d50m23.3s') for match in xid: print match['ra'], match['dec'], match['objid'] Returns ------- List of all objects found within search radius. Each element of list is a dictionary containing information about each matched object. """ if not isinstance(ra, coord.angles.RA): ra = coord.RA(ra, unit=unit) if not isinstance(ra, coord.angles.Dec): dec = coord.Dec(dec, unit=unit) if fields is None: fields = photoobj_defs + specobj_defs # Convert arcseconds to degrees dr /= 3600. Nfields = len(fields) q_select = 'SELECT ' for field in fields: if field in photoobj_defs: q_select += 'p.%s,' % field if field in specobj_defs: q_select += 's.%s,' % field q_select = q_select.rstrip(',') q_select += ' ' q_from = 'FROM PhotoObjAll AS p ' q_join = 'JOIN SpecObjAll s ON p.objID = s.bestObjID ' q_where = 'WHERE (p.ra between %g and %g) and (p.dec between %g and %g)' \ % (ra.degrees-dr, ra.degrees+dr, dec.degrees-dr, dec.degrees+dr) q = sqlcl.query("%s%s%s%s" % (q_select, q_from, q_join, q_where)) results = [] cols = q.readline() while True: line = q.readline().replace('\n', '').split(',') if len(line) == 1: break tmp = {} for i, val in enumerate(line): field = fields[i] if val.isdigit(): tmp[field] = int(val) else: try: tmp[field] = float(val) except ValueError: tmp[field] = str(val) results.append(tmp) return results
def getsdssphotcatv2(self): # going to split query into 3 separate calls. Hopefully this will alleviate the timeout errors! # then can merge files with 'join' print 'Getting SDSS phot cat for ', self.prefix drsearch = self.dr * 60. #search radius in arcmin for sdss query for k in range(3): if k == 0: #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 and (g.specObjID = 0)" % (self.cra,self.cdec,drsearch) # removing PrimTarget constraint #query="select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.specObjID = 0)" % (self.cra,self.cdec,drsearch) # adding r mag cut query = "select g.ra, g.dec, g.u, g.g, g.r, g.i, g.z, g.objid, g.specObjID, g.petroMag_u, g.petroMag_g, g.petroMag_r, g.petroMag_i, g.petroMag_z,g.petroRad_u, g.petroRad_g, g.petroRad_r, g.petroRad_i, g.petroRad_z, g.petroR50_u, g.petroR50_g, g.petroR50_r, g.petroR50_i, g.petroR50_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.r < 22 and g.objID = n.objID and (g.specObjID = 0)" % ( self.cra, self.cdec, drsearch) if k == 1: #query="select g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expAB_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 and (g.specObjID = 0)" % (self.cra,self.cdec,drsearch) query = "select g.petroR90_u, g.petroR90_g, g.petroR90_r, g.petroR90_i, g.petroR90_z, g.isoA_r, g.isoB_r, g.isoPhi_r, g.isoPhiErr_r, g.deVRad_r, g.deVRadErr_r, g.deVPhi_r, g.deVPhiErr_r, g.deVMag_r, g.expRad_r, g.expRadErr_r, g.expAB_r, g.expABErr_r, g.expPhi_r, g.expPhiErr_r, g.expMag_r, g.expMagErr_r, g.extinction_u,g.extinction_g,g.extinction_r,g.extinction_i,g.extinction_z, g.dered_u, g.dered_g, g.dered_r, g.dered_i, g.dered_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.r < 22 and g.objID = n.objID and (g.specObjID = 0)" % ( self.cra, self.cdec, drsearch) if k == 2: #query="select g.run, g.rerun, g.camcol, g.field, g.err_u,g.err_g,g.err_r,g.err_i,g.err_z,g.rowc_u, g.rowc_g, g.rowc_r,g.rowc_i,g.rowc_z,g.colc_u,g.colc_g,g.colc_r,g.colc_i,g.colc_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.objID = n.objID and (g.PrimTarget & 0x00000040) > 0 and (g.specObjID = 0)" % (self.cra,self.cdec,drsearch) query = "select g.run, g.rerun, g.camcol, g.field, g.err_u,g.err_g,g.err_r,g.err_i,g.err_z,g.rowc_u, g.rowc_g, g.rowc_r,g.rowc_i,g.rowc_z,g.colc_u,g.colc_g,g.colc_r,g.colc_i,g.colc_z from galaxy g, dbo.fGetNearbyObjEq(%12.8f,%12.8f,%8.3f) n where g.r < 22 and g.objID = n.objID and (g.specObjID = 0)" % ( self.cra, self.cdec, drsearch) # part 1 of query part = k + 1 print 'running part %i of query' % (part) flag = 0 nrun = 1 print 'getting to while loop' while flag == 0: print 'inside while loop' #changed so that only galaxies w/out spectra are returned print query start_time = time.time() try: lines = sqlcl.query(query).readlines() except IOError: print "IOError for cluster", self.prefix, " trying phot query again" lines = sqlcl.query(query).readlines() elapsed_time = time.time() - start_time print 'time to execute query = ', elapsed_time, ' sec, ', elapsed_time / 60., ' min' print "got number+1 phot objects = ", len(lines) n = homedir + 'research/LocalClusters/SDSSCatalogs/' + str( self.prefix) + 'galaxy.photcat.p' + str(part) + '.dat' outfile = open(n, 'w') j = 0 flag = 1 if (len(lines) > 1.): for line in lines[1:]: if j < 0: print line j = j + 1 outfile.write(line) if line.find('Server.ScriptTimeout') > -1: flag = 0 elif line.find('Timeout') > -1: flag = 0 elif line.find('ERROR') > -1: flag = 0 elif line.find('error') > -1: flag = 0 outfile.close() nrun += 1 if nrun > 15: break if flag == 0: print self.prefix print 'Running query again b/c of ScriptTimeout' print 'starting attempt = ', nrun
from math import trunc import sqlcl #Must run in Python 2 DEBUG = True ra= 4.33041666667 dec=6.72333333333 radius = 0.0162872870485/2. #radius = diameter/2 margin = 1.2* radius #radius + radius * 0.2 #This is proper margin, but for multiple entries we are using margin =0.5 as test #margin=0.2* radius + radius # margin =0.5 if (DEBUG) : print ("Querying data that lies inside margin") data =[] result=sqlcl.query("SELECT distinct run,camcol,field FROM PhotoObj WHERE ra between " + str(ra) +"-" +str(margin) +" and "+ str(ra) +"+"+ str(margin) +" and dec between "+ str(dec) + "-"+ str(margin)+ " and "+str(dec)+"+"+ str(margin)).readlines() print result count =0 for i in result: if count>1: list =i.split(',') list[2]= list[2][:-1] data.append(list) count +=1 if (DEBUG): print ( "Complete Query. These data lies within margin: ") print (data) bands=['u','g','r','i','z'] for ele in bands:
def sort_data(): objid=[] extinction=[] teff=[] hbreqw=[] hbcont=[] hdflux=[] hdreqw=[] hdcont=[] objs=[] plate=[] fiber=[] mjd=[] n2flux=[] #n2 is now h-a, but both are not used n2reqw=[] magu=[] magg=[] magr=[] magi=[] magz=[] array1=[objid,extinction,teff,hbreqw,hbcont,hdflux,hdreqw,hdcont,objs,plate,fiber,mjd,\ n2flux,n2reqw,magu,magg,magr,magi,magz] #and p.extinction_g between "+str(exta)+" AND "+str(extb)+ ## query = "SELECT p.objID, \ p.extinction_g, s.elodieTEff, p.extinction_g, p.extinction_g, p.extinction_g, p.extinction_g, p.extinction_g, \ p.obj, s.plate, s.fiberID, s.mjd, p.extinction_g, p.extinction_g, p.psfMag_u, \ p.psfMag_g, p.psfMag_r, p.psfMag_i, p.psfMag_z \ FROM PhotoObj AS p \ JOIN SpecObj as s ON s.specobjID=p.specobjID \ WHERE psfMag_r BETWEEN 15.0 and 19.0 \ and p.type=6 \ and dbo.fPhotoStatus('PRIMARY')>0 and dbo.fPhotoFlags('STATIONARY')>0 \ and calibStatus_r=1 \ and s.elodieTEff!=0 and s.elodieFeH!=0 and s.elodieLogG!=0 \ and ((flags&dbo.fPhotoFlags('BLENDED')) \ +(flags&dbo.fPhotoFlags('DEBLEND_TOO_MANY_PEAKS')) + \ (flags&dbo.fPhotoFlags('SATURATED')) \ +(flags&dbo.fPhotoFlags('BADSKY'))+ \ (flags&dbo.fPhotoFlags('COSMIC_RAY')) \ +(flags&dbo.fPhotoFlags('PEAKS_TOO_CLOSE'))+ \ (flags&dbo.fPhotoFlags('NOTCHECKED_CENTER')) \ +(flags&dbo.fPhotoFlags('SATUR_CENTER'))+ \ (flags&dbo.fPhotoFlags('INTERP_CENTER')) \ +(flags&dbo.fPhotoFlags('INTERP'))+ \ (flags&dbo.fPhotoFlags('PSF_FLUX_INTERP')))=0 \ AND (psfMag_u-psfmag_g) between 0.82-0.08 and 0.82+0.08 \ AND (psfMag_g-psfmag_r) between 0.3-0.08 and 0.30+0.08 \ AND (psfMag_r-psfmag_i) between 0.09-0.08 and 0.09+0.08 \ AND (psfMag_i-psfmag_z) between 0.02-0.08 and 0.02+0.08 \ ORDER BY p.extinction_g DESC" alldata=sqlcl.query(query).read() interim=alldata.replace("\n",",") nent=19 #(number of query columns) compiled=interim.split(",") #sort all values into arrays: for j in range(nent): for i in range(j+nent,len(compiled)-1,nent): array1[j].append(float(compiled[i])) #note, plate/mjd/fiber should be int #some of the values should be type(int): for i in range(len(plate)): plate[i]=int(plate[i]) mjd[i]=int(mjd[i]) fiber[i]=int(fiber[i]) return plate, mjd, fiber, extinction, objid
import sqlcl import numpy as np with open("rc3_ra_dec_diameter_pgc.txt",'r') as f: for line in f: ra = float(line.split()[0]) dec = float(line.split()[1]) radius = float(line.split()[2])/2. #radius = diameter/2' margin = 3*radius pgc=str(line.split()[3]).replace(' ', '') clean=True filename = "{},{}".format(str(ra),str(dec)) other_rc3s = sqlcl.query("SELECT distinct rc3.ra, rc3.dec FROM PhotoObj as po JOIN RC3 as rc3 ON rc3.objid = po.objid WHERE po.ra between {0}-{1} and {0}+{1} and po.dec between {2}-{3} and {2}+{3}".format(str(ra),str(margin),str(dec),str(margin))).readlines() data =[] count =0 for i in other_rc3s: if count>1: list =i.split(',') list[0] = float(list[0]) list[1]= float(list[1][:-1]) data.append(list) count += 1 print (data) if (len(data)>1 and len(data)%2==0): d2p= np.array(data[0])-np.array(data[1]) print ("d2p: {}".format(d2p)) # if negative then a is on the left of b # if positive then b is on the right of b # or something like that # then we do pairwise comparison to figure out their relative locations #Finding the difference between the 2 points # d2p = np.array()-np.array()
sys.exit() search_rad = str(3.0/60.0) photo_objid = "" optlist, args = getopt.getopt(sys.argv[1:], 'r:i:') for o, a in optlist: if o == "-r": search_rad = str(float(a)/60.0) if o == "-i": photo_objid = a if photo_objid == "" : ra = sys.argv[1] dec = sys.argv[2] sql_query = "select P.objid, P.modelMag_u, P.modelMag_g, P.modelMag_r, P.modelMag_i, P.modelMag_z from PhotoObjAll P, dbo.fGetNearbyObjAllEq("+ra+","+dec+","+search_rad+") n where P.objID = n.objID and P.specObjID > 0" query_result = sqlcl.query(sql_query).readlines() if len(query_result) > 1 : data_part = string.split(query_result[1],",") photo_objid = data_part[0] time.sleep(1.0) sql_query = "select S.SpecObjID, S.plate, S.fiberID, S.mjd, S.z, S.zErr, S.specClass from SpecObjAll S where S.bestObjID ="+photo_objid query_result = sqlcl.query(sql_query).readlines() if len(query_result) > 1 : temp = string.split(query_result[1], ",") output_str = "" for x in temp: output_str = output_str + x.strip() + " " print output_str else : print "No spectrum found but photometric objid = ", photo_objid time.sleep(1.0)
def mosaic_band(self,band,ra,dec,margin,radius,pgc):#,clean=True): ''' Input: source info param Create a mosaic fit file for the specified band. Return: String filename of resulting mosaic ''' print ("------------------mosaic_band----------------------") DEBUG = True # output = open("rc3_galaxies_outside_SDSS_footprint.txt",'a') # 'a' for append #'w') # unclean = open("rc3_galaxies_unclean","a") # filename = "{},{}".format(str(ra),str(dec)) filename = str(ra)+str(dec) #print (margin/radius) if (DEBUG) : print ("Querying data that lies inside margin") #result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE ra between {0}-{1} and {0}+{1}and dec between {2}-{3} and {2}+{3}".format(str(ra),str(margin),str(dec),str(margin))).readlines() result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE ra between "+str(ra)+"-"+str(margin)+" and " +str(ra)+"+"+str(margin)+"and dec between "+str(dec)+"-"+str(margin)+" and "+ str(dec)+"+"+str(margin)).readlines() clean_result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE CLEAN =1 and ra between "+str(ra)+"-"+str(margin)+" and " +str(ra)+"+"+str(margin)+"and dec between "+str(dec)+"-"+str(margin)+" and "+ str(dec)+"+"+str(margin)).readlines() # clean_result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE CLEAN =1 and ra between {0}-{1} and {0}+{1}and dec between {2}-{3} and {2}+{3}".format(str(ra),str(margin),str(dec),str(margin))) .readlines() clean = True print (result) print (clean_result) if (result[0][5:]=="<html>"): print("strange error from SQL server") return -1 if (result[1]=='error_message\n' or clean_result[1]=='error_message\n'): #Case where doing more than 60 queries in 1 minute time.sleep(60) #results are messed up, need to re-query result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE ra between "+str(ra)+"-"+str(margin)+" and " +str(ra)+"+"+str(margin)+"and dec between "+str(dec)+"-"+str(margin)+" and "+ str(dec)+"+"+str(margin)).readlines() clean_result = sqlcl.query( "SELECT distinct run,camcol,field FROM PhotoObj WHERE CLEAN =1 and ra between "+str(ra)+"-"+str(margin)+" and " +str(ra)+"+"+str(margin)+"and dec between "+str(dec)+"-"+str(margin)+" and "+ str(dec)+"+"+str(margin)).readlines() if (len(result)!=len(clean_result) and band=='u'): #only print this once in the u band. If it is unclean in u band (ex. cosmic ray, bright star..etc) then it must be unclean in the other bands too. print ("Data contain unclean images") clean=False unclean.write(str(ra)+" "+str(dec)+" "+str(radius)+" "+pgc) # unclean.write("{} {} {} {} \n".format(str(ra),str(dec),str(radius),pgc)) data =[] count =0 for i in result: if count>1: list =i.split(',') list[2]= list[2][:-1] data.append(list) count += 1 print (data) if (len(data)==0 and band=='r'): #you will only evounter non-footprint galaxy inint run , because after that we just take the footprint gaalxy already mosaiced (init) from rfits if (DEBUG): print ('The given ra, dec of this galaxy does not lie in the SDSS footprint. Onto the next galaxy!')#Exit Program.' output.write(str(ra)+ " "+ str(dec)+" "+str(radius)+"\n") # output.write("{} {} {} {} \n".format(str(ra),str(dec),str(radius),pgc)) output.write(str(ra)+" "+str(dec)+" "+str(radius)+" "+pgc) #sys.exit() return -1 #special value reserved for not in SDSS footprint galaxies else : if (DEBUG): print ( "Complete Query. These data lies within margin: ") print (data) # os.mkdir(filename) # os.chdir(filename) #if (os.path.exists(band)): #os.system("rm -r "+band) os.mkdir(band) os.chdir(band) os.mkdir ("raw") os.mkdir ("projected") os.chdir("raw") if (DEBUG): print ("Retrieving data from SDSS SAS server for "+ band +"band") for i in data : out = "frame-"+str(band)+"-"+str(i[0]).zfill(6)+"-"+str(i[1])+"-"+str(i[2]).zfill(4) os.system("wget http://mirror.sdss3.org/sas/dr10/boss/photoObj/frames/301/"+str(i[0])+"/"+str(i[1])+"/"+out+".fits.bz2") os.system("bunzip2 "+out+".fits.bz2") os.chdir("../") if (DEBUG) : print("Creating mosaic for "+band+" band.") outfile_r="SDSS_"+band+"_"+str(ra)+"_"+str(dec)+"r.fits" outfile="SDSS_"+band+"_"+str(ra)+"_"+str(dec)+".fits" if (len(data)==1): #With header info, len of processed result list is 1 if there is only 1 field lying in the margin, simply do mSubImage without mosaicing #This patch should not be necessary but the program is aparently not mosaicing for the case where there is only one field. print ("Only one field in region of interest") os.chdir("raw") montage.mSubimage(out+".fits",outfile,ra,dec,2*margin) # mSubImage takes xsize which should be twice the margin (margin measures center to edge of image) #os.chdir("../..") hdulist = pyfits.open(outfile) shutil.move(outfile,"../..") os.chdir("../..") else: montage.mImgtbl("raw","images.tbl") montage.mHdr(str(ra)+" "+str(dec),margin,out+".hdr") if (DEBUG): print ("Reprojecting images") os.chdir("raw") montage.mProjExec("../images.tbl","../"+out+".hdr","../projected", "../stats.tbl") os.chdir("..") montage.mImgtbl("projected","pimages.tbl") os.chdir("projected") montage.mAdd("../pimages.tbl","../"+out+".hdr","SDSS_"+out+".fits") # outfile_r="SDSS_{}_{}_{}r.fits".format(band,str(ra),str(dec)) #outfile_r="SDSS_"+band+"_"+str(ra)+"_"+str(dec)+"r.fits" montage.mSubimage("SDSS_"+out+".fits",outfile_r,ra,dec,2*margin) # mSubImage takes xsize which should be twice the margin (margin measures center to edge of image) shutil.move(outfile_r,os.getcwd()[:-11] )#if change to :-11 then move out of u,g,r,i,z directory, may be more convenient for mJPEG if (DEBUG) : print ("Completed Mosaic for " + band) os.chdir("../..") hdulist = pyfits.open(outfile_r) hdulist[0].header['RA']=ra hdulist[0].header['DEC']=dec hdulist[0].header['RADIUS']=radius hdulist[0].header['PGC']=pgc hdulist[0].header['NED']=("http://ned.ipac.caltech.edu/cgi-bin/objsearch?objname="+ str(hdulist[0].header['PGC'])+"&extend=no&hconst=73&omegam=0.27&omegav=0.73&corr_z=1&out_csys=Equatorial&out_equinox=J2000.0&obj_sort=RA+or+Longitude&of=pre_text&zv_breaker=30000.0&list_limit=5&img_stamp=YES") hdulist[0].header['CLEAN']=clean hdulist[0].header['MARGIN']=margin #if (os.path.exists(outfile)): #os.system("rm "+ outfile) hdulist.writeto(outfile) if (os.path.exists(outfile_r)): os.system("rm "+outfile_r) #print("Deleting") os.system("rm -r "+band+"/") print ("Completed Mosaic") return outfile
def query_sdss_culster(file_loc, cat_ra, cat_dec, cat_z, cat_lambda, name, num, start=0, plot=False, spider_rad=None, spider_mean=False, query_galaxy_only=True, r200_factor=None, richness_mass_author=None): fails = [] if(query_galaxy_only): query_table = "Galaxy" else: query_table = "PhotoObj" print("querying...") hfile = h5py.File(file_loc+"query_results.hdf5",mode='a') for i in range(start,num): #try: start = time.time() print("%d/%d"%(i+1,num)) keys = hfile.keys() if("%s%d"%(name,i) in keys and "%s_prop%d"%(name,i) in keys): continue; if "%s%d"%(name,i) in keys: del hfile["%s%d"%(name,i)] if "%s_prop%d"%(name,i) in keys: del hfile["%s_prop%d"%(name,i)] #query columns are defined here: #http://skyserver.sdss.org/dr8/en/help/browser/browser.asp?n=PhotoObjAll&t=U ra = cat_ra[i] dec = cat_dec[i] z = cat_z[i] richness = cat_lambda[i] # Xray Spiders have their own r200c, so we don't need to compute it. if spider_rad is None: mass, r200 = background.lambda_to_m200_r200(richness,z, richness_mass_author=richness_mass_author) rad = background.r200_to_arcmin(r200, z) else: r200c_deg = spider_rad[i] rad = r200c_deg * 60 r200 = background.arcmin_to_r200(rad, z) mass = background.r200c_to_m200c(r200, z) if spider_mean: m200m, r200m, c200m =mass_adv.changeMassDefinitionCModel(mass, z, "200c", "200m", c_model='child18') mass = m200m r200m_r200c_ratio = r200m/r200 rad *= r200m_r200c_ratio r200 *= r200m_r200c_ratio hgroup = hfile.create_group("%s_prop%d"%(name,i)) hgroup.create_dataset("ra",data=ra) hgroup.create_dataset("dec",data=dec) hgroup.create_dataset("z",data=z) hgroup.create_dataset("mass",data=mass) hgroup.create_dataset("rad",data=rad) hgroup.create_dataset("r200",data=r200) hgroup.create_dataset("richness", data = richness) ## Query and save objects around target query_str = "select p.ra, p.dec, p.type,p.insidemask,p.flags_g,p.flags_i,p.flags_r,p.cModelMag_u, p.cModelMagErr_u,p.cModelMag_g, p.cModelMagErr_g,p.cModelMag_r, p.cModelMagErr_r,p.cModelMag_i, p.cModelMagErr_i,p.cModelMag_z, p.cModelMagErr_z from "+query_table+" p join dbo.fGetNearbyObjEq(%f,%f,%f) r on p.ObjID = r.ObjID"%(ra, dec, r200_factor*rad) result = sqlcl.query(query_str).read() # datagal = np.genfromtxt(StringIO(result),names=True,delimiter=', ',dtype=['f8','f8','i2','i1','i8','i8','i8', 'f4','f4', 'f4','f4', 'f4','f4', 'f4','f4', 'f4','f4']) try: datagal = np.genfromtxt(StringIO(result),names=True,skip_header=1,delimiter=',',dtype=['f8','f8','i2','i1','i8','i8','i8', 'f4','f4', 'f4','f4', 'f4','f4', 'f4','f4', 'f4','f4']) except ValueError as e: print(query_str) continue hgroup = hfile.create_group("%s%d"%(name,i)) hgroup.create_dataset("ra",data=datagal['ra']) hgroup.create_dataset("dec",data=datagal['dec']) hgroup.create_dataset("type",data=datagal['type']) hgroup.create_dataset("insidemask",data=datagal['insidemask']) hgroup.create_dataset("flags_g",data=datagal['flags_g']) hgroup.create_dataset("flags_i",data=datagal['flags_i']) hgroup.create_dataset("flags_r",data=datagal['flags_r']) hgroup.create_dataset("mag_u",data=datagal['cModelMag_u']) hgroup.create_dataset("mag_err_u",data=datagal['cModelMagErr_u']) hgroup.create_dataset("mag_g",data=datagal['cModelMag_g']) hgroup.create_dataset("mag_err_g",data=datagal['cModelMagErr_g']) hgroup.create_dataset("mag_r",data=datagal['cModelMag_r']) hgroup.create_dataset("mag_err_r",data=datagal['cModelMagErr_r']) hgroup.create_dataset("mag_i",data=datagal['cModelMag_i']) hgroup.create_dataset("mag_err_i",data=datagal['cModelMagErr_i']) hgroup.create_dataset("mag_z",data=datagal['cModelMag_z']) hgroup.create_dataset("mag_err_z",data=datagal['cModelMagErr_z']) end = time.time() print(" time: %.2f"%float(end-start)) if(plot): plt.figure() legends = ["uknown","cosmic_ray","defect","galaxy","ghost","knownobj","star","trail","sky","notatype"] slct1 = datagal['insidemask']==0 for i in range(0,10): slct = (datagal["type"] == i) & slct1 plt.plot(datagal['ra'][slct],datagal['dec'][slct],'x',label=legends[i]) plt.legend(loc='best') plt.xlabel('ra') plt.ylabel('dec') plt.show() # except ValueError as ie: # print ie # fails.append(i) # print "Failure" np.save(file_loc+"fail_indexs.npy",fails) hfile.close();
def sdss_sql(): #i = -1 for q in range(Nz): ''' i = i+1 if ( i % 10 != 0 ): continue ''' time = z[q] ra = Ra[q] dec = Dec[q] c_ra0 = str(ra - r_select) c_dec0 = str(dec - r_select) c_ra1 = str(ra + r_select) c_dec1 = str(dec + r_select) qry = """ SELECT ALL p.ra,p.dec,p.u,p.g,p.r,p.i,p.z,p.type, p.probPSF, p.petroR90_u, p.petroR90_g, p.petroR90_r, p.petroR90_i, p.petroR90_z, p.deVRad_u, p.deVRad_g, p.deVRad_r, p.deVRad_i, p.deVRad_z, p.deVAB_u, p.deVAB_g, p.deVAB_r, p.deVAB_i, p.deVAB_z, p.deVPhi_u, p.deVPhi_g, p.deVPhi_r, p.deVPhi_i, p.deVPhi_z, p.expRad_u, p.expRad_g, p.expRad_r, p.expRad_i, p.expRad_z, p.expAB_u, p.expAB_g, p.expAB_r, p.expAB_i, p.expAB_z, p.expPhi_u, p.expPhi_g, p.expPhi_r, p.expPhi_i, p.expPhi_z FROM PhotoObj AS p WHERE p.ra BETWEEN %s AND %s AND p.dec BETWEEN %s AND %s AND p.type = 6 ORDER by p.r """ % (c_ra0, c_ra1, c_dec0, c_dec1) ''' # add date: 2019.8.30 qry = """ SELECT ALL p.ra, p.dec, p.u, p.g, p.r, p.i, p.z, p.type, p.isoA_u, p.isoA_g, p.isoA_r, p.isoA_i, p.isoA_z, p.isoB_u, p.isoB_g, p.isoB_r, p.isoB_i, p.isoB_z, p.isoPhi_u, p.isoPhi_g, p.isoPhi_r, p.isoPhi_i, p.isoPhi_z, p.flags, dbo.fPhotoFlagsN(p.flags) FROM PhotoObj AS p WHERE p.ra BETWEEN %s AND %s AND p.dec BETWEEN %s AND %s AND (p.type = 6 OR (p.flags & dbo.fPhotoFlags('SATURATED')) > 0) ORDER by p.r """ % (c_ra0, c_ra1, c_dec0, c_dec1) ''' cord_z = z_cod[q] cord_ra = ra_cod[q] cord_dec = dec_cod[q] print( qry ) file = sqlcl.query( qry, url, fmt ) #fd = open( '/home/xkchen/mywork/ICL/data/star_catalog/ra%.3f_dec%.3f_z%.3f.csv'%(cord_ra, cord_dec, cord_z), 'w' ) fd = open( './ra%.3f_dec%.3f_z%.3f.csv'%(cord_ra, cord_dec, cord_z), 'w' ) #sqlcl.write_header( fd, "#", url, qry ) lines = file.readlines() for l in lines: dtl = l.decode('utf-8') fd.write( dtl ) fd.close() if q == 5: break return
def search_sdss(blazardir='/Users/willettk/Astronomy/Research/blazars/',blazarfile='plotkin_dr8_upload.txt',savstring='',envsize=1000,ngals=1000, save=False, timing=True): import sqlcl import cosmocalc import time timestart = time.time() # Read in the list of blazars from combined catalogs (Plotkin, BZCAT, TeVcat) pf = open(blazardir+blazarfile,'r') # Python dictionary for the relevant data to be returned sdss_data_zns = { 'objid':[], 'ra':[], 'dec':[], 'type':[], 'nchild':[], 'u':[], 'g':[], 'r':[], 'i':[], 'z':[], 'p_el':[], 'p_cw':[], 'p_acw':[], 'p_edge':[], 'p_dk':[], 'p_mg':[], 'p_cs':[], 'blazar_name':[] } sdss_data_zs = { 'objid':[], 'ra':[], 'dec':[], 'type':[], 'nchild':[], 'u':[], 'g':[], 'r':[], 'i':[], 'z':[], 'p_el':[], 'p_cw':[], 'p_acw':[], 'p_edge':[], 'p_dk':[], 'p_mg':[], 'p_cs':[], 'blazar_name':[] } sdss_data_nozoo = { 'objid':[], 'ra':[], 'dec':[], 'type':[], 'nchild':[], 'u':[], 'g':[], 'r':[], 'i':[], 'z':[], 'blazar_name':[] } errorarr = {'type':[],'name':[]} for line in pf: # blazar_name, blazar_ra, blazar_dec, blazar_z, blazar_type = line.split() blazar_z = float(blazar_z) # # Compute the angular size of 1000 kpc # platescale = cosmocalc.cosmocalc(z=blazar_z, H0=71, WM=0.27)['PS_kpc'] # kpc per arcsec angsize = envsize / platescale / 60. # # Query the SDSS DR8 catalog # blazar_query_zns = """ SELECT top %i p.objID, p.ra, p.dec, p.type, p.nchild, p.u, p.g, p.r, p.i, p.z, zns.p_el, zns.p_cw, zns.p_acw, zns.p_edge, zns.p_dk, zns.p_mg, zns.p_cs FROM fGetNearbyObjEq(%f,%f,%f) n JOIN PhotoObj p on p.objid = n.objid JOIN zooNoSpec zns on zns.objid = n.objid ORDER BY p.objID """ % (ngals, float(blazar_ra), float(blazar_dec), angsize) blazar_query_zs = """ SELECT top %i p.objID, p.ra, p.dec, p.type, p.nchild, p.u, p.g, p.r, p.i, p.z, zs.p_el, zs.p_cw, zs.p_acw, zs.p_edge, zs.p_dk, zs.p_mg, zs.p_cs FROM fGetNearbyObjEq(%f,%f,%f) n JOIN PhotoObj p on p.objid = n.objid JOIN zooSpec zs on zs.objid = n.objid ORDER BY p.objID """ % (ngals, float(blazar_ra), float(blazar_dec), angsize) blazar_query_nozoo = """ SELECT top %i p.objID, p.ra, p.dec, p.type, p.nchild, p.u, p.g, p.r, p.i, p.z FROM fGetNearbyObjEq(%f,%f,%f) n JOIN PhotoObj p on p.objid = n.objid ORDER BY p.objID """ % (ngals, float(blazar_ra), float(blazar_dec), angsize) # bqtime = time.time() # queryreturn_zns = sqlcl.query(blazar_query_zns, fmt='csv') queryreturn_zs = sqlcl.query(blazar_query_zs, fmt='csv') queryreturn_nozoo = sqlcl.query(blazar_query_nozoo, fmt='csv') # lqtime = time.time() # # Make sure queries do not exceed 60 per minute (intrinsic limit of database) # if (lqtime - bqtime) < 3.1: time.sleep(3.1 - (lqtime - bqtime)) # qr_read_zns = queryreturn_zns.read() qr_read_zs = queryreturn_zs.read() qr_read_nozoo = queryreturn_nozoo.read() # # Print detections to the screen # if (qr_read_zns[:26] != 'No objects have been found') and (qr_read_zns[:5] != 'ERROR'): print blazar_name # qrsplit = qr_read_zns.split() print qrsplit[1:] for n in arange(len(qrsplit[1:]))+1: neighbor_data = qrsplit[n].split(',') sdss_data_zns['objid'].append(neighbor_data[0]) sdss_data_zns['ra'].append(neighbor_data[1]) sdss_data_zns['dec'].append(neighbor_data[2]) sdss_data_zns['type'].append(neighbor_data[3]) sdss_data_zns['nchild'].append(neighbor_data[4]) sdss_data_zns['u'].append(neighbor_data[5]) sdss_data_zns['g'].append(neighbor_data[6]) sdss_data_zns['r'].append(neighbor_data[7]) sdss_data_zns['i'].append(neighbor_data[8]) sdss_data_zns['z'].append(neighbor_data[9]) sdss_data_zns['p_el'].append(neighbor_data[10]) sdss_data_zns['p_cw'].append(neighbor_data[11]) sdss_data_zns['p_acw'].append(neighbor_data[12]) sdss_data_zns['p_edge'].append(neighbor_data[13]) sdss_data_zns['p_dk'].append(neighbor_data[14]) sdss_data_zns['p_mg'].append(neighbor_data[15]) sdss_data_zns['p_cs'].append(neighbor_data[16]) sdss_data_zns['blazar_name'].append(blazar_name) if (qr_read_zs[:26] != 'No objects have been found') and (qr_read_zs[:5] != 'ERROR'): print blazar_name # qrsplit = qr_read_zs.split() print qrsplit[1:] for n in arange(len(qrsplit[1:]))+1: neighbor_data = qrsplit[n].split(',') sdss_data_zs['objid'].append(neighbor_data[0]) sdss_data_zs['ra'].append(neighbor_data[1]) sdss_data_zs['dec'].append(neighbor_data[2]) sdss_data_zs['type'].append(neighbor_data[3]) sdss_data_zs['nchild'].append(neighbor_data[4]) sdss_data_zs['u'].append(neighbor_data[5]) sdss_data_zs['g'].append(neighbor_data[6]) sdss_data_zs['r'].append(neighbor_data[7]) sdss_data_zs['i'].append(neighbor_data[8]) sdss_data_zs['z'].append(neighbor_data[9]) sdss_data_zs['p_el'].append(neighbor_data[10]) sdss_data_zs['p_cw'].append(neighbor_data[11]) sdss_data_zs['p_acw'].append(neighbor_data[12]) sdss_data_zs['p_edge'].append(neighbor_data[13]) sdss_data_zs['p_dk'].append(neighbor_data[14]) sdss_data_zs['p_mg'].append(neighbor_data[15]) sdss_data_zs['p_cs'].append(neighbor_data[16]) sdss_data_zs['blazar_name'].append(blazar_name) if (qr_read_nozoo[:26] != 'No objects have been found') and (qr_read_nozoo[:5] != 'ERROR'): print blazar_name # qrsplit = qr_read_nozoo.split() print qrsplit[1:] for n in arange(len(qrsplit[1:]))+1: neighbor_data = qrsplit[n].split(',') sdss_data_nozoo['objid'].append(neighbor_data[0]) sdss_data_nozoo['ra'].append(neighbor_data[1]) sdss_data_nozoo['dec'].append(neighbor_data[2]) sdss_data_nozoo['type'].append(neighbor_data[3]) sdss_data_nozoo['nchild'].append(neighbor_data[4]) sdss_data_nozoo['u'].append(neighbor_data[5]) sdss_data_nozoo['g'].append(neighbor_data[6]) sdss_data_nozoo['r'].append(neighbor_data[7]) sdss_data_nozoo['i'].append(neighbor_data[8]) sdss_data_nozoo['z'].append(neighbor_data[9]) sdss_data_nozoo['blazar_name'].append(blazar_name) if qr_read_zns[:5] == 'ERROR': errorarr['type'].append('zns') errorarr['name'].append(blazar_name) if qr_read_zs[:5] == 'ERROR': errorarr['type'].append('zs') errorarr['name'].append(blazar_name) if qr_read_nozoo[:5] == 'ERROR': errorarr['type'].append('nozoo') errorarr['name'].append(blazar_name) if save is True: import pickle # Save dictionary to file so it doesn't have to be rerun output = open('sdss_'+str(envsize)+'kpc_zns'+str(savstring)+'.pkl', 'w') pickle.dump(sdss_data_zns, output) output.close() output = open('sdss_'+str(envsize)+'kpc_zs'+str(savstring)+'.pkl', 'w') pickle.dump(sdss_data_zs, output) output.close() output = open('sdss_'+str(envsize)+'kpc_nozoo'+str(savstring)+'.pkl', 'w') pickle.dump(sdss_data_nozoo, output) output.close() print "Saved dictionaries to file" endtime = time.time() print 'Total time elapsed is %5.2f min' % ((endtime - starttime)*60)
def query_sdss_mask(file_loc, cat_ra, cat_dec, cat_z, cat_lambda, name, num, r200_factor=1.0, start=0, plot=False, save_data=True, spider_rad=None, spider_mean=False, richness_mass_author='Rykoff'): global num_pass global num_fail fails = [] print file_loc + name + "_mask.hdf5" if (save_data): hfile = h5py.File(file_loc + name + "_mask.hdf5", 'w') for i in range(start, num): start = time.time() #query columns are defined here: #http://skyserver.sdss.org/dr8/en/help/browser/browser.asp?n=PhotoObjAll&t=U print "%d/%d " % (i, num), ra = cat_ra[i] dec = cat_dec[i] z = cat_z[i] richness = cat_lambda[i] mass, r200 = background.lambda_to_m200_r200( richness, z, richness_mass_author=richness_mass_author) if spider_rad is None: r200_deg = background.r200_to_arcmin(r200, z) / 60.0 rad = background.r200_to_arcmin(r200, z) * r200_factor else: r200c_deg = spider_rad[i] rad = r200c_deg * 60 r200 = background.arcmin_to_r200(rad, z) mass = background.r200c_to_m200c(r200, z) if spider_mean: m200m, r200m, c200m = mass_adv.changeMassDefinitionCModel( mass, z, "200c", "200m", c_model='child18') mass = m200m r200m_r200c_ratio = r200m / r200 rad *= r200m_r200c_ratio r200 *= r200m_r200c_ratio print "ra", ra, "dec", dec print "z", z, "l", richness, "mass", mass, "r200", r200, ## Query and save objects around target rad_deg = rad / 60.0 rad_mask = rad_deg * 3 a = np.cos(np.pi * dec / 180.0) query_str = "SELECT ra, dec, radius, type,area FROM Mask where ra < %f and ra > %f and dec < %f and dec > %f and type != 4" % ( ra + rad_mask / a, ra - rad_mask / a, dec + rad_mask, dec - rad_mask) result = sqlcl.query(query_str) result.readline() try: data_mask = np.genfromtxt(StringIO(result.read()), names=True, delimiter=",", dtype=['f4', 'f4', 'f4', 'i4', 'S500']) mask_pass = mask_outside_r200(ra, dec, rad_deg, data_mask['area'], data_mask['type']) except ValueError as e: print("failed query, auto fail") mask_pass = False if (mask_pass): num_pass += 1 else: num_fail += 1 print "\tpass:"******"\tfract total: %.3f " % ( float(num_pass) / float(num_pass + num_fail)), if (save_data): hgroup = hfile.create_group(str(i)) dataset = hgroup.require_dataset("mask_pass", (1, 1), 'u1', mask_pass) if (mask_pass): dataset[0, 0] = True else: dataset[0, 0] = False dt_str = h5py.special_dtype(vlen=str) dataset = hgroup.require_dataset("mask_points", (data_mask["area"].size, ), dtype=dt_str) if (data_mask["area"].size == 1): dataset[0] = data_mask['area'] else: for j in range(0, data_mask["area"].size): dataset[j] = data_mask["area"][j] if (plot): plt.figure() plt.title("pass: "******"cirlce test: dist: ",dist,"ra,dec: ",ras[j],decs[j] # plt.plot(ras,decs,'-k') for j in range(0, data_mask['area'].size): if (data_mask['area'].size == 1): x, y = area_str_to_lines(str(data_mask['area'])) mask_type = data_mask['type'] else: x, y = area_str_to_lines(data_mask['area'][j]) mask_type = data_mask['type'][j] if (mask_type == 0): plot_type = 'r' elif (mask_type == 1): plot_type = 'b' elif (mask_type == 2): plot_type = 'g--' elif (mask_type == 3): plot_type = 'k:' plt.plot(x, y, plot_type) plt.plot([], [], 'r', label='bleeding') plt.plot([], [], 'b', label='bright star') plt.plot([], [], 'g--', label='trail') plt.plot([], [], 'k:', label='quality hole') plt.legend(loc='best') plt.xlabel('ra') plt.ylabel('dec') plt.xlim([ra - rad_mask, ra + rad_mask]) plt.ylim([dec - rad_mask, dec + rad_mask]) plt.show() #close the file we have been writing to. end = time.time() print " time: %.2f" % float(end - start) if (save_data): hfile.close()
def ObsRealism( inputName, outputName, band='r', cosmo=FlatLambdaCDM(H0=70, Om0=0.3), common_args={ 'redshift': 0.1, # mock observation redshift 'rebin_to_CCD': False, # rebin to CCD angular scale 'CCD_scale': 0.396, # CCD angular scale in [arcsec/pixel] 'add_false_sky': False, # add gaussian sky 'false_sky_sig': 24.2, # gaussian sky standard dev [AB mag/arcsec2] 'add_false_psf': False, # convolve with gaussian psf 'false_psf_fwhm': 1.0, # gaussian psf FWHM [arcsec] 'add_poisson': False, # add poisson noise to galaxy 'add_sdss_sky': False, # insert into real SDSS sky (using sdss_args) 'add_sdss_psf': False, # convolve with real SDSS psf (using sdss_args) }, sdss_args={ 'sdss_run': 745, # sdss run 'sdss_rerun': 40, # sdss rerun 'sdss_camcol': 1, # sdss camcol 'sdss_field': 517, # sdss field 'sdss_ra': 236.1900, # ra for image centroid 'sdss_dec': -0.9200, # ec for image centroid }): ''' Add realism to idealized unscaled image. "redshift": The redshift at which the synthetic image is to be mock-observed. Given that the image should be in surface brightness units and appropriately dimmed by (1+z)^-5, the redshift is only used to determine the angular-to-physical scale of the image -- to which it is appropriately rebinned corresponding to the desired CCD pixel scale. "rebin_to_CCD": If TRUE, the image is rebinned to the CCD scale identified by the "CCD_scale" keyword. The rebinning is determined by first computing the physical-to-angular scale associated with the target redshift [kpc/arcsec]. Combining this number with the scale of the original image in physical units [kpc/pixel], we obtain the rebinning factor that is neccesary to bring the image to the desired CCD pixel scale [arcsec/pixel]. "CCD_scale": The CCD scale to which the images are rebinned if rebin_to_CCD is TRUE. "add_false_sky": If TRUE, a Gaussian sky is added to the image with a noise level that is idenfitied by the "false_sky_sig" keyword. "false_sky_sig": The standard deviation of Gaussian sky that is added to the image if "add_false_sky" is TRUE. The value must be expressed in relative magnitude units (AB mag/arcsec2). "add_false_psf": If TRUE, a Gaussian PSF is added to the image with a FWHM that is idenfitied by the "false_psf_fwhm" keyword. "false_psf_fwhm": The FWHM of the PSF that is convolved with the image if "add_false_psf" is TRUE. The value must be expressed in arcsec. "add_poisson": If TRUE, add Poisson noise to the image using either the calibration info and gain from the real image properties ("add_sdss_sky"=TRUE) or generic values derived from averages over SDSS fields. "add_sdss_sky": If True, insert into real SDSS sky using arguments in "sdss_args". "add_sdss_psf": If True and "add_sdss_sky"=True, reconstruct the PSF at the injection location and convolve with the image. ''' # mock observation redshift redshift = common_args['redshift'] # speed of light [m/s] speed_of_light = 2.99792458e8 # kiloparsec per arcsecond scale kpc_per_arcsec = cosmo.kpc_proper_per_arcmin( z=redshift).value / 60. # [kpc/arcsec] # luminosity distance in Mpc luminosity_distance = cosmo.luminosity_distance(z=redshift) # [Mpc] # img header and data with fits.open(inputName, mode='readonly') as hdul: # img header header = hdul[0].header # img data img_data = hdul[0].data # # header properties # sim_tag = header['SIMTAG'] # sub_tag = header['SUBTAG'] # isnap = header['ISNAP'] # axis = header['CAMERA'] # band = header['FILTER'][0] # # # unique simulID # simulID = '{}-{}-{}-{}'.format(sim_tag,sub_tag,isnap,axis) # # band = header['FILTER'][0] # collect physical pixel scale kpc_per_pixel = header['CDELT1'] / 1000. # [kpc/pixel] # compute angular pixel scale from cosmology arcsec_per_pixel = kpc_per_pixel / kpc_per_arcsec # [arcsec/pixel] # img in AB nanomaggies per arcsec2 img_nanomaggies = 10**(-0.4 * (img_data - 22.5)) # [nmgys/arcsec2] # apply pixel scale [arcsec/pixel]2 to convert to calibrated flux img_nanomaggies *= arcsec_per_pixel**2 # [nmgs] # update units of image header to linear calibrated scale header['BUNIT'] = 'AB nanomaggies' # print('\nRaw image:') # print('kpc_per_arcsec: {}'.format(kpc_per_arcsec)) # print('kpc_per_pixel: {}'.format(kpc_per_pixel)) # print('arcsec_per_pixel: {}'.format(arcsec_per_pixel)) # m_AB = -2.5*np.log10(np.sum(img_nanomaggies))+22.5 # print('AB_magnitude: {} at z={}'.format(m_AB,redshift)) # M_AB = m_AB-5*np.log10(luminosity_distance.value)-25 # print('AB_Magnitude: {}'.format(M_AB)) # Add levels of realism if common_args['rebin_to_CCD']: ''' Rebin image to a given angular CCD scale ''' # telescope ccd angular scale ccd_scale = common_args['CCD_scale'] # axes of original image nPixelsOld = img_nanomaggies.shape[0] # axes of regridded image nPixelsNew = int(np.floor((arcsec_per_pixel / ccd_scale) * nPixelsOld)) # rebin to new ccd scale if nPixelsNew > nPixelsOld: interp = RectBivariateSpline(np.linspace(-1, 1, nPixelsOld), np.linspace(-1, 1, nPixelsOld), img_nanomaggies, kx=1, ky=1) img_nanomaggies = interp(np.linspace( -1, 1, nPixelsNew), np.linspace( -1, 1, nPixelsNew)) * (nPixelsOld / nPixelsNew)**2 else: img_nanomaggies = rebin(img_nanomaggies, (nPixelsNew, nPixelsNew)) # new kpc_per_pixel on ccd kpc_per_pixel = kpc_per_arcsec * ccd_scale # new arcsec per pixel arcsec_per_pixel = ccd_scale # header updates if nPixelsNew % 2: CRPIX = float(nPixelsNew / 2) else: CRPIX = float(nPixelsNew / 2) + 0.5 header['CRPIX1'] = CRPIX header['CRPIX2'] = CRPIX header['CDELT1'] = kpc_per_pixel * 1000 header['CDELT2'] = kpc_per_pixel * 1000 # print('\nAfter CCD scaling:') # print('kpc_per_arcsec: {}'.format(kpc_per_arcsec)) # print('kpc_per_pixel: {}'.format(kpc_per_pixel)) # print('arcsec_per_pixel: {}'.format(arcsec_per_pixel)) # m_AB = -2.5*np.log10(np.sum(img_nanomaggies))+22.5 # print('AB_magnitude: {} at z={}'.format(m_AB,redshift)) # M_AB = m_AB-5*np.log10(luminosity_distance.value)-25 # print('AB_Magnitude: {}'.format(M_AB)) # convolve with gaussian psf if common_args['add_false_psf']: ''' Add Gaussian PSF to image with provided FWHM in arcseconds. ''' std = common_args['false_psf_fwhm'] / arcsec_per_pixel / 2.355 kernel = Gaussian2DKernel(stddev=std) img_nanomaggies = convolve(img_nanomaggies, kernel) # add poisson noise to image if common_args['add_poisson'] and not common_args['add_sdss_sky']: ''' Add shot noise to image assuming the average SDSS field properties for zeropoint, airmass, atmospheric extinction, and gain. The noise calculation assumes that the number of counts in the converted image is the mean number of counts in the Poisson distribution. Thereby, the standard error in that number of counts is the square root of the number of counts in each pixel. For details on the methods applied here, see: http://classic.sdss.org/dr7/algorithms/fluxcal.html Average quantites obtained from SkyServer SQL form. http://skyserver.sdss.org/dr7/en/tools/search/sql.asp DR7 Query Form: SELECT AVG(airmass_x),AVG(aa_x),AVG(kk_x),AVG(gain_x) FROM Field ''' # average sdss photometric field properties (gain is inverse gain) airmass = {'u': 1.178, 'g': 1.178, 'r': 1.177, 'i': 1.177, 'z': 1.178} aa = {'u': -23.80, 'g': -24.44, 'r': -24.03, 'i': -23.67, 'z': -21.98} kk = {'u': 0.5082, 'g': 0.1898, 'r': 0.1032, 'i': 0.0612, 'z': 0.0587} gain = {'u': 1.680, 'g': 3.850, 'r': 4.735, 'i': 5.111, 'z': 4.622} exptime = 53.907456 # seconds # conversion factor from nanomaggies to counts counts_per_nanomaggy = exptime * 10**( -0.4 * (22.5 + aa[band] + kk[band] * airmass[band])) # image in counts for given field properties img_counts = np.clip(img_nanomaggies * counts_per_nanomaggy, a_min=0, a_max=None) # poisson noise [adu] computed accounting for gain [e/adu] img_counts = np.random.poisson(lam=img_counts * gain[band]) / gain[band] # convert back to nanomaggies img_nanomaggies = img_counts / counts_per_nanomaggy # add gaussian sky to image if common_args['add_false_sky']: ''' Add sky with noise level set by "false_sky_sig" keyword. "false_sky_sig" should be in relative AB magnitudes/arcsec2 units. In other words, 10**(-0.4*false_sky_sig) gives the sample standard deviation in the sky in linear flux units [maggies/arcsec2] around a sky level of zero. ''' # sky sig in AB mag/arcsec2 false_sky_sig = common_args['false_sky_sig'] # conversion from mag/arcsec2 to nanomaggies/arcsec2 false_sky_sig = 10**(0.4 * (22.5 - false_sky_sig)) # account for pixel scale in final image false_sky_sig *= arcsec_per_pixel**2 # create false sky image sky = false_sky_sig * np.random.randn(*img_nanomaggies.shape) # add false sky to image in nanomaggies img_nanomaggies += sky # add image to real sdss sky if common_args['add_sdss_sky']: ''' Extract field from galaxy survey database using effectively weighted by the number of galaxies in each field. For this to work, the desired field mask should already have been generated and the insertion location selected. ''' import sqlcl from astropy.wcs import WCS run = sdss_args['sdss_run'] rerun = sdss_args['sdss_rerun'] camcol = sdss_args['sdss_camcol'] field = sdss_args['sdss_field'] ra = sdss_args['sdss_ra'] dec = sdss_args['sdss_dec'] exptime = 53.907456 # seconds # sdss data archive server das_url = 'http://das.sdss.org/' # get and uzip corrected image corr_url = das_url + 'imaging/{}/{}/corr/{}/'.format( run, rerun, camcol) corr_image_name = 'fpC-{:06}-{}{}-{:04}.fit'.format( run, band, camcol, field) if not os.access(corr_image_name, 0): corr_url += '{}.gz'.format(corr_image_name) os.system('wget {}'.format(corr_url)) os.system('gunzip {}'.format(corr_image_name)) # get wcs mapping w = WCS(corr_image_name) # determine column and row position in image colc, rowc = w.all_world2pix(ra, dec, 1, ra_dec_order=True) # convert to integers colc, rowc = int(np.around(colc)), int(np.around(rowc)) # get field properties from skyServer dbcmd = [ 'SELECT aa_{b},kk_{b},airmass_{b},gain_{b},sky_{b},skysig_{b}'. format(b=band), 'FROM Field where run={} AND rerun={}'.format(run, rerun), 'AND camcol={} AND field={}'.format(camcol, field) ] lines = sqlcl.query(' '.join(dbcmd)).readlines() # zeropoint, atmospheric extinction, airmass, inverse gain, sky, sky uncertainty aa, kk, airmass, gain, sky, skysig = [ float(var) for var in lines[1].decode("utf-8").split('\n')[0].split(',') ] #print(aa,kk,airmass,gain,sky,skysig) # convert sky to nanomaggies from maggies/arcsec2 sky *= (1e9 * 0.396127**2) # convert skysig to nanomaggies from relative sky magnitude errors skysig *= sky * np.log(10) / 2.5 # software bias added to corrected images to avoid negative values softbias = float(fits.getheader(corr_image_name)['SOFTBIAS']) # subtract softbias from corrected image to get image in DN corr_image_data = fits.getdata(corr_image_name).astype( float) - softbias # [counts] # conversion from nanomaggies to counts counts_per_nanomaggy = exptime * 10**(-0.4 * (22.5 + aa + kk * airmass)) # convert image in counts to nanomaggies with Field properties corr_image_data /= counts_per_nanomaggy # [nanomaggies] if common_args['add_sdss_psf'] and not common_args['add_false_psf']: ''' Grab, reconstruct, and convolve real SDSS PSF image with the image in nanomaggies. ''' # get corresponding psf reconstruction image psf_url = das_url + 'imaging/{}/{}/objcs/{}/'.format( run, rerun, camcol) psf_image_name = 'psField-{:06}-{}-{:04}.fit'.format( run, camcol, field) if os.access(psf_image_name, 0): os.remove(psf_image_name) psf_url += psf_image_name os.system('wget {}'.format(psf_url)) psf_ext = {'u': 1, 'g': 2, 'r': 3, 'i': 4, 'z': 5} psfname = 'sdss_psf.fit' os.system( '{}/Sources/utils/sdss-apps/readAtlasImages-v5_4_11/read_PSF {} {} {} {} {}' .format(realsim_dir, psf_image_name, psf_ext[band], rowc, colc, psfname)) if os.access(psf_image_name, 0): os.remove(psf_image_name) # remove softbias from PSF psfdata = fits.getdata(psfname).astype(float) - 1000. # normalize for convolution with image in nanomaggies psfdata /= np.sum(psfdata) # convolve with image in nanomaggies img_nanomaggies = convolve(img_nanomaggies, psfdata) if os.access(psfname, 0): os.remove(psfname) if common_args['add_poisson']: ''' Add Poisson noise to the PSF-convolved image with noise level corresponding to the real SDSS field properties. ''' # image in counts for given field properties img_counts = np.clip(img_nanomaggies * counts_per_nanomaggy, a_min=0, a_max=None) # poisson noise [adu] computed accounting for gain [e/adu] img_counts = np.random.poisson(lam=img_counts * gain) / gain # convert back to nanomaggies img_nanomaggies = img_counts / counts_per_nanomaggy # add real sky pixel by pixel to image in nanomaggies corr_ny, corr_nx = corr_image_data.shape ny, nx = img_nanomaggies.shape for xx in range(nx): for yy in range(ny): corr_x = int(colc - nx / 2 + xx) corr_y = int(rowc - ny / 2 + yy) if corr_x >= 0 and corr_x <= corr_nx - 1 and corr_y >= 0 and corr_y <= corr_ny - 1: img_nanomaggies[yy, xx] += corr_image_data[corr_y, corr_x] else: img_nanomaggies[yy, xx] == 0. if os.access(corr_image_name, 0): os.remove(corr_image_name) # add field info to image header warnings.simplefilter('ignore', category=AstropyWarning) header.append(('RUN', run, 'SDSS image RUN'), end=True) header.append(('RERUN', rerun, 'SDSS image RERUN'), end=True) header.append(('CAMCOL', camcol, 'SDSS image CAMCOL'), end=True) header.append(('FIELD', field, 'SDSS image FIELD'), end=True) header.append(('RA', float(ra), 'Cutout centroid RA'), end=True) header.append(('DEC', float(dec), 'Cutout centroid DEC'), end=True) header.append(('COLC', colc, 'SDSS image column center'), end=True) header.append(('ROWC', rowc, 'SDSS image row center'), end=True) header.append(('GAIN', gain, 'SDSS CCD GAIN'), end=True) header.append(('ZERO', aa, 'SDSS image zeropoint'), end=True) header.append(('EXTC', kk, 'SDSS image atm. extinction coefficient'), end=True) header.append(('AIRM', airmass, 'SDSS image airmass'), end=True) header.append( ('SKY', sky, 'Average sky in full SDSS field [nanomaggies]'), end=True) header.append(('SKYSIG', skysig, 'Average sky uncertainty per pixel [nanomaggies]'), end=True) gimage = outputName if os.access(gimage, 0): os.remove(gimage) # print('\nAfter Realism:') # print('kpc_per_arcsec: {}'.format(kpc_per_arcsec)) # print('kpc_per_pixel: {}'.format(kpc_per_pixel)) # print('arcsec_per_pixel: {}'.format(arcsec_per_pixel)) # m_AB = -2.5*np.log10(np.sum(img_nanomaggies))+22.5 # print('AB_magnitude: {} at z={}'.format(m_AB,redshift)) # M_AB = m_AB-5*np.log10(luminosity_distance.value)-25 # print('AB_Magnitude: {}'.format(M_AB)) hdu_pri = fits.PrimaryHDU(img_nanomaggies) header['REDSHIFT'] = (redshift, 'Redshift') header.append(('COSMO', 'FLAT_LCDM', 'Cosmology'), end=True) header.append(('OMEGA_M', cosmo.Om(0), 'Matter density'), end=True) header.append(('OMEGA_L', cosmo.Ode(0), 'Dark energy density'), end=True) header.append(('SCALE_1', arcsec_per_pixel, '[arcsec/pixel]'), end=True) header.append(('SCALE_2', kpc_per_pixel, '[kpc/pixel]'), end=True) header.append(('SCALE_3', kpc_per_arcsec, '[kpc/arcsec]'), end=True) header.append(('LUMDIST', cosmo.luminosity_distance(z=redshift).value, 'Luminosity Distance [Mpc]'), end=True) warnings.simplefilter('ignore', category=AstropyWarning) header.extend(zip(common_args.keys(), common_args.values()), unique=True) hdu_pri.header = header hdu_pri.writeto(gimage)
def add_sdss_stars(radeg, decdeg, out_sdss, sdss_fields): import sqlcl #lines = sqlcl.query("select ra,dec,u,g,r,i,z from star").readlines() ramin = (radeg - 0.33333) ramax = (radeg + 0.33333) decmin = (decdeg - 0.33333) decmax = (decdeg + 0.33333) query = "select clean, ra,dec,raErr,decErr,objID,psfMag_u,psfMag_g,psfMag_r,psfMag_i,psfMag_z,psfMagErr_u,psfMagErr_g,psfMagErr_r,psfMagErr_i,psfMagErr_z,flags_u,flags_g,flags_r,flags_i,flags_z from star where ra between " + str( ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str( decmin)[:8] + " and " + str(decmax)[:8] + " \ AND ((flags & 0x10000000) != 0) \ AND ((flags & 0x8100000800a4) = 0) \ AND (((flags & 0x400000000000) = 0) or (psfmagerr_g <= 0.2)) \ AND (((flags & 0x100000000000) = 0) or (flags & 0x1000) = 0) \ " #query = "select top 10 flags_u, flags2_u from star where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " "\ #query = "select top 10 psfMagu, psfMagg, psfMagr, psfMagi, psfMagz, from star where ra between " + str(ramin)[:8] + " and " + str(ramax)[:8] + " and dec between " + str(decmin)[:8] + " and " +str(decmax)[:8] + " " print query lines = sqlcl.query(query).readlines() uu = open('store', 'w') import pickle pickle.dump(lines, uu) #raw_input() columns = lines[0][:-1].split(',') print columns data = [] print columns for line in range(1, len(lines[1:]) + 1): dt0 = {} for j in range(len(lines[line][:-1].split(','))): dt0[columns[j]] = lines[line][:-1].split(',')[j] #print line import string if string.find(lines[line][:-1], 'font') == -1: data.append(dt0) #if string.find(lines[line][:-1],'font') != -1: #print lines[line][:-1] #raw_input() print len(data) if len(data) > 0: sdss_fields.write( str(radeg) + " " + str(decdeg) + " 0.3333333 0.3333333\n") print len(data) seqnr = 1 for els in range(len(data)): if 1 == 1: #data[els].has_key('u'): import math ra = data[els]['ra'] dec = data[els]['dec'] u = data[els]['psfMag_u'] g = data[els]['psfMag_g'] r = data[els]['psfMag_r'] i = data[els]['psfMag_i'] z = data[els]['psfMag_z'] uerr = data[els]['psfMagErr_u'] gerr = data[els]['psfMagErr_g'] rerr = data[els]['psfMagErr_r'] ierr = data[els]['psfMagErr_i'] zerr = data[els]['psfMagErr_z'] vars = [ra, dec, u, g, r, i, z, uerr, gerr, rerr, ierr, zerr] varsstr = reduce(lambda x, y: x + ' ' + y, vars) out_sdss.write(varsstr + '\n')
Num = str(sys.argv[2]) lowlim = float(sys.argv[3]) highlim = float(sys.argv[4]) fullcat_name_path = os.path.join(CURRENT_DIR,"Full_SDSS.dat") trimmedcat_name_path = os.path.join(CURRENT_DIR,"Trimmed_SDSS.dat") fullcat = open(fullcat_name_path,'w') trimmedcat =open(trimmedcat_name_path,'w') query = """ SELECT TOP """+str(Num)+""" cast(str(p.ra,13,8) as float) as ra,cast(str(p.[dec],13,8) as float) as dec,p.psfMag_u,p.psfMag_g,p.psfMag_r,p.psfMag_i,p.psfMag_z,p.psfMagErr_u,p.psfMagErr_g,p.psfMagErr_r,p.psfMagErr_i,p.psfMagErr_z,dbo.fIAUFromEq(p.ra,p.[dec]) as SDSSname FROM ..PhotoObj AS p """+"JOIN dbo.fGetNearbyObjEq("+str(RA)+","+str(DEC)+","+str(Area)+") AS b ON b.objID = P.objID" data = sqlcl.query(query).read() #print data print>>fullcat,str(data) fullcat.close() ############################### slim down ######################### table = numpy.genfromtxt(fullcat_name_path,delimiter =',',dtype = str,skip_header = 2,unpack = True) RA_star,DEC_star,psfMag_u,psfMag_g,psfMag_r,psfMag_i,psfMag_z,psfMagErr_u,psfMagErr_g,psfMagErr_r,psfMagErr_i,psfMagErr_z,SDSSname = table[:] RA_star = numpy.array(RA_star,dtype = float) DEC_star= numpy.array(DEC_star,dtype = float) psfMag_u= numpy.array(psfMag_u,dtype = float) psfMag_g= numpy.array(psfMag_g,dtype = float) psfMag_r= numpy.array(psfMag_r,dtype = float) psfMag_i= numpy.array(psfMag_i,dtype = float) psfMag_z= numpy.array(psfMag_z,dtype = float)
if (len(sys.argv) == 1 or len(sys.argv) != 3): print "It checks the SDSS PhotoObjAll catalog to find all photometric objects\n within a given radius." print "usage : sdss_photo_check.py (ra) (dec) -r (search radius)" print "\tra : degree" print "\tdec : degree" print "\tsearch radius : arcsec (optional) (default : 3 arcsec)" print "output : ObjId model_u model_g model_r model_i model_z" sys.exit() search_rad = str(3.0 / 60.0) args = "-r" optlist, args = getopt.getopt(args, 'r:') for o, a in optlist: if o == "-r": search_rad = str(float(a) / 60.0) ra = sys.argv[1] dec = sys.argv[2] sql_query = "select P.objid, P.modelMag_u, P.modelMag_g, P.modelMag_r, P.modelMag_i, P.modelMag_z from PhotoObjAll P, dbo.fGetNearbyObjAllEq(" + ra + "," + dec + "," + search_rad + ") n where P.objID = n.objID" query_result = sqlcl.query(sql_query).readlines() if len(query_result) > 1: data_part = string.split(query_result[1], ",") output_string = "" for x in data_part: output_string = output_string + x.strip() + " " print output_string time.sleep(1.0) else: print "No object found"
join galaxy as g on s.bestObjID = g.objID where s.ra between " +\ str(ramin) + " and " + str(ramax) + " and s.dec between " +\ str(decmin) + " and " + str(decmax) if objects_mode == "GALPHOT" : query = "select modelMag_u, modelMag_g, modelMag_r, modelMag_i, \ modelMag_z, modelMagErr_u, modelMagErr_g, \ modelMagErr_r, modelMagErr_i, modelMagErr_z, \ objID, ra, dec, raErr, decErr, flags \ from galaxy where ra between " + \ str(ramin) + " and " + str(ramax) + " and dec between " +\ str(decmin) + " and " + str(decmax) + \ " AND flags & dbo.fPhotoFlags('BLENDED') = 0 " # query the SDSS database: lines = sqlcl.query(query,public_url).readlines() if len(lines) == 8: print "An error occured during your request; probably" print "the selected area is too large" sys.exit(1) # This became necessary due to a change in SDSS! if catalog == "SDSSDR10": START=2 columns = lines[1][:-1].split(',') else: START=1 columns = lines[0][:-1].split(',') data = []
for i in range(count, count + queryLimit): ra = data[i]['RAJ2000'] dec = data[i]['DEJ2000'] if needOR: query = query + "\nOR" else: needOR = True query = query + " ((ra BETWEEN " + `ra - region` + " AND " + `ra + region` + ") AND (dec BETWEEN " + `dec - region` + " AND " + `dec + region` + "))" if printQueries: f1 = open("queryText/ppqueryText"+`count`+".txt", "w") f1.write(query) f1.close() print "written " + `count` if executeQuery: lines = sql.query(query).readlines() #res = [] if lines[0][0:-1] != "#Table1": print "INCORRECT FORMAT RETURNED" else: extra = False if len(lines[2:]) > queryLimit: print "this query has extras" extra = True for line in lines[2:]: splitLine = line[:-1].split(',') arr = [int(splitLine[0]),float(splitLine[1]),float(splitLine[2])] if extra: arr.append("-----") res.append(arr) if extra: