def calc_ebv(ra, dec): #determine E(B-V) from dust maps #ebv = np.zeros(len(ra)) #mapdir = './' #nmap = fits.getdata('data/SFD_dust_4096_ngp.fits') #smap = fits.getdata('data/SFD_dust_4096_sgp.fits') c = SkyCoord(ra=ra * u.degree, dec=dec * u.degree, frame='icrs') coo = c.to_string('hmsdms') #l = c_gal.l.degree #b = c_gal.b.degree #im = IrsaDust.get_images(coo[0],image_type='ebv',radius=2*u.deg) #print(im) table = IrsaDust.get_query_table(coo[0], section='ebv') ebv = table['ext SFD mean'][0] #wn = wcs.WCS('data/SFD_dust_4096_ngp.fits') #ws = wcs.WCS('data/SFD_dust_4096_sgp.fits') #print wn #for i in range(0,len(ra)): # l, b = equatorial2galactic(ra[i], dec[i]) #if b >= 0.0: # ebvmap = fits.getdata('data/SFD_dust_4096_ngp.fits') # w = wn # #hem = ' ngp ' #else: # ebvmap = fits.getdata('data/SFD_dust_4096_sgp.fits') # w = ws # #hem = ' sgp ' #x, y = w.all_world2pix(l, b, 1) #converts the position in degrees to pixels coordinates #ebv = ebvmap[np.int_(np.round(y)),np.int_(np.round(x))] #print ra[i], dec[i], ' > ', l, b, hem, x, y, ebv[i] return ebv
def correct_for_dust(wavelength, ra, dec): """Query IRSA dust map for E(B-V) value and returns reddening array ---------- wavelength : numpy array-like Wavelength values for which to return reddening ra : float Right Ascencion in degrees dec : float Declination in degrees Returns ------- reddening : numpy array Notes ----- For info on the dust maps, see http://irsa.ipac.caltech.edu/applications/DUST/ """ from astroquery.irsa_dust import IrsaDust import astropy.coordinates as coord import astropy.units as u C = coord.SkyCoord(ra * u.deg, dec * u.deg, frame='fk5') # dust_image = IrsaDust.get_images(C, radius=2 *u.deg, image_type='ebv', timeout=60)[0] # ebv = np.mean(dust_image[0].data[40:42, 40:42]) dust_table = IrsaDust.get_query_table(C, section='ebv', timeout=60) ebv = dust_table["ext SandF ref"][0] from dust_extinction.parameter_averages import F04 # initialize the model ext = F04(Rv=3.1) reddening = 1 / ext.extinguish(wavelength * u.angstrom, Ebv=ebv) return reddening, ebv
def galextinction(file, spec): # Take the ebv of the galaxy from IrsaDust name = file table = IrsaDust.get_query_table(name, section='ebv') ebv = table['ext SFD mean'][0] spec.deredden(ebv=ebv) # Deredden in place return spec
def get_extinction(coords, filters=['PS1_g', 'PS1_r', 'PS1_i']): global extdata filters = Table([filters], names=['filter']) kk = join(extdata, filters, keys=['filter']) AEBV = np.array(kk['AEBV2']) t = IrsaDust.get_query_table(coords, section='ebv') ebv = np.array(t['ext SFD ref']) return ebv * AEBV
def get_ebv(self): """ This function return the E(B-V) Galactic extinction in the line of sight at position of the source. Returns ---------------- ebv : float E(B-V) Galactic extinction in the line of sight at ra and dec given. """ coo = SkyCoord(ra=float(self.ra), dec=float(self.dec), unit=units.deg, frame=FK5) table = IrsaDust.get_query_table(coo, section='ebv') ebv = table['ext SandF mean'][0] return ebv
def getebv(self): """ Given an RA/DEC position return EBV in tables Written by MF in Durham, Oct 2014. """ #query IRAS service #http://irsa.ipac.caltech.edu/applications/DUST/ #parse table to get the mean/std of #SandF = Schlafly & Finkbeiner 2011 (ApJ 737, 103) [ext SandF mean,ext SandF std] #SFD = Schlegel et al. 1998 (ApJ 500, 525) [ext SFD mean,ext SFD std] if (self.coord == -1): print("Error: coordinates must be set") exit() from astroquery.irsa_dust import IrsaDust ebv = IrsaDust.get_query_table(self.coord, section='ebv') self.ebv = ebv return ebv
def getebv(self): """ Given an RA/DEC position return EBV in tables Written by MF in Durham, Oct 2014. """ #query IRAS service #http://irsa.ipac.caltech.edu/applications/DUST/ #parse table to get the mean/std of #SandF = Schlafly & Finkbeiner 2011 (ApJ 737, 103) [ext SandF mean,ext SandF std] #SFD = Schlegel et al. 1998 (ApJ 500, 525) [ext SFD mean,ext SFD std] if (self.coord == -1): print "Error: coordinates must be set" exit() from astroquery.irsa_dust import IrsaDust ebv = IrsaDust.get_query_table(self.coord,section='ebv') self.ebv=ebv return ebv
def do_cleanup(catalog): """Cleanup catalog after importing all data.""" task_str = catalog.get_current_task_str() # Set preferred names, calculate some columns based on imported data, # sanitize some fields keys = list(catalog.entries.keys()) cleanupcnt = 0 for oname in pbar(keys, task_str): # Some events may be merged in cleanup process, skip them if # non-existent. try: name = catalog.add_entry(oname) except Exception: catalog.log.warning( '"{}" was not found, suggests merge occurred in cleanup ' 'process.'.format(oname)) continue # Set the preferred name, switching to that name if name changed. name = catalog.entries[name].set_preferred_name() aliases = catalog.entries[name].get_aliases() catalog.entries[name].purge_bandless_photometry() catalog.entries[name].set_first_max_light() if SUPERNOVA.DISCOVER_DATE not in catalog.entries[name]: prefixes = ['MLS', 'SSS', 'CSS', 'GRB '] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:2])): discoverdate = ('/'.join([ '20' + alias.replace(prefix, '')[:2], alias.replace(prefix, '')[2:4], alias.replace(prefix, '')[4:6] ])) if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( SUPERNOVA.DISCOVER_DATE, discoverdate, source, derived=True) break if SUPERNOVA.DISCOVER_DATE in catalog.entries[name]: break if SUPERNOVA.DISCOVER_DATE not in catalog.entries[name]: prefixes = [ 'ASASSN-', 'PS1-', 'PS1', 'PS', 'iPTF', 'PTF', 'SCP-', 'SNLS-', 'SPIRITS', 'LSQ', 'DES', 'SNHiTS', 'Gaia', 'GND', 'GNW', 'GSD', 'GSW', 'EGS', 'COS', 'OGLE', 'HST' ] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:2]) and is_number(alias.replace(prefix, '')[:1])): discoverdate = '20' + alias.replace(prefix, '')[:2] if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( SUPERNOVA.DISCOVER_DATE, discoverdate, source, derived=True) break if SUPERNOVA.DISCOVER_DATE in catalog.entries[name]: break if SUPERNOVA.DISCOVER_DATE not in catalog.entries[name]: prefixes = ['SNF'] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:4])): discoverdate = ('/'.join([ alias.replace(prefix, '')[:4], alias.replace(prefix, '')[4:6], alias.replace(prefix, '')[6:8] ])) if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( SUPERNOVA.DISCOVER_DATE, discoverdate, source, derived=True) break if SUPERNOVA.DISCOVER_DATE in catalog.entries[name]: break if SUPERNOVA.DISCOVER_DATE not in catalog.entries[name]: prefixes = ['PTFS', 'SNSDF'] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:2])): discoverdate = ('/'.join([ '20' + alias.replace(prefix, '')[:2], alias.replace(prefix, '')[2:4] ])) if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( SUPERNOVA.DISCOVER_DATE, discoverdate, source, derived=True) break if SUPERNOVA.DISCOVER_DATE in catalog.entries[name]: break if SUPERNOVA.DISCOVER_DATE not in catalog.entries[name]: prefixes = ['AT', 'SN', 'OGLE-', 'SM ', 'KSN'] for alias in aliases: for prefix in prefixes: if alias.startswith(prefix): year = re.findall(r'\d+', alias) if len(year) == 1: year = year[0] else: continue if alias.replace(prefix, '').index(year) != 0: continue if (year and is_number(year) and '.' not in year and len(year) <= 4): discoverdate = year if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( SUPERNOVA.DISCOVER_DATE, discoverdate, source, derived=True) break if SUPERNOVA.DISCOVER_DATE in catalog.entries[name]: break if (SUPERNOVA.RA not in catalog.entries[name] or SUPERNOVA.DEC not in catalog.entries[name]): prefixes = [ 'PSN J', 'MASJ', 'CSS', 'SSS', 'MASTER OT J', 'HST J', 'TCP J', 'MACS J', '2MASS J', 'EQ J', 'CRTS J', 'SMT J' ] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:6])): noprefix = alias.split(':')[-1].replace(prefix, '').replace( '.', '') decsign = '+' if '+' in noprefix else '-' noprefix = noprefix.replace('+', '|').replace('-', '|') nops = noprefix.split('|') if len(nops) < 2: continue rastr = nops[0] decstr = nops[1] ra = ':'.join([rastr[:2], rastr[2:4], rastr[4:6]]) + \ ('.' + rastr[6:] if len(rastr) > 6 else '') dec = ( decsign + ':'.join([decstr[:2], decstr[2:4], decstr[4:6]]) + ('.' + decstr[6:] if len(decstr) > 6 else '')) if catalog.args.verbose: tprint('Added ra/dec from name: ' + ra + ' ' + dec) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity(SUPERNOVA.RA, ra, source, derived=True) catalog.entries[name].add_quantity(SUPERNOVA.DEC, dec, source, derived=True) break if SUPERNOVA.RA in catalog.entries[name]: break no_host = (SUPERNOVA.HOST not in catalog.entries[name] or not any([ x[QUANTITY.VALUE] == 'Milky Way' for x in catalog.entries[name][SUPERNOVA.HOST] ])) if (SUPERNOVA.RA in catalog.entries[name] and SUPERNOVA.DEC in catalog.entries[name] and no_host): from astroquery.irsa_dust import IrsaDust if name not in catalog.extinctions_dict: try: ra_dec = catalog.entries[name][ SUPERNOVA.RA][0][QUANTITY.VALUE] + \ " " + \ catalog.entries[name][SUPERNOVA.DEC][0][QUANTITY.VALUE] result = IrsaDust.get_query_table(ra_dec, section='ebv') except (KeyboardInterrupt, SystemExit): raise except Exception: warnings.warn("Coordinate lookup for " + name + " failed in IRSA.") else: ebv = result['ext SandF mean'][0] ebverr = result['ext SandF std'][0] catalog.extinctions_dict[name] = [ebv, ebverr] if name in catalog.extinctions_dict: sources = uniq_cdl([ catalog.entries[name].add_self_source(), catalog.entries[name].add_source( bibcode='2011ApJ...737..103S') ]) (catalog.entries[name].add_quantity( SUPERNOVA.EBV, str(catalog.extinctions_dict[name][0]), sources, e_value=str(catalog.extinctions_dict[name][1]), derived=True)) if ((SUPERNOVA.HOST in catalog.entries[name] and (SUPERNOVA.HOST_RA not in catalog.entries[name] or SUPERNOVA.HOST_DEC not in catalog.entries[name]))): for host in catalog.entries[name][SUPERNOVA.HOST]: alias = host[QUANTITY.VALUE] if ' J' in alias and is_number(alias.split(' J')[-1][:6]): noprefix = alias.split(' J')[-1].split(':')[-1].replace( '.', '') decsign = '+' if '+' in noprefix else '-' noprefix = noprefix.replace('+', '|').replace('-', '|') nops = noprefix.split('|') if len(nops) < 2: continue rastr = nops[0] decstr = nops[1] hostra = (':'.join([rastr[:2], rastr[2:4], rastr[4:6]]) + ('.' + rastr[6:] if len(rastr) > 6 else '')) hostdec = decsign + ':'.join([ decstr[:2], decstr[2:4], decstr[4:6] ]) + ('.' + decstr[6:] if len(decstr) > 6 else '') if catalog.args.verbose: tprint('Added hostra/hostdec from name: ' + hostra + ' ' + hostdec) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity(SUPERNOVA.HOST_RA, hostra, source, derived=True) catalog.entries[name].add_quantity(SUPERNOVA.HOST_DEC, hostdec, source, derived=True) break if SUPERNOVA.HOST_RA in catalog.entries[name]: break if (SUPERNOVA.REDSHIFT not in catalog.entries[name] and SUPERNOVA.VELOCITY in catalog.entries[name]): # Find the "best" velocity to use for this bestsig = 0 for hv in catalog.entries[name][SUPERNOVA.VELOCITY]: sig = get_sig_digits(hv[QUANTITY.VALUE]) if sig > bestsig: besthv = hv[QUANTITY.VALUE] bestsrc = hv['source'] bestsig = sig if bestsig > 0 and is_number(besthv): voc = float(besthv) * 1.e5 / CLIGHT source = catalog.entries[name].add_self_source() sources = uniq_cdl([source] + bestsrc.split(',')) (catalog.entries[name].add_quantity( SUPERNOVA.REDSHIFT, pretty_num(sqrt((1. + voc) / (1. - voc)) - 1., sig=bestsig), sources, kind='heliocentric', derived=True)) if (SUPERNOVA.REDSHIFT not in catalog.entries[name] and len(catalog.nedd_dict) > 0 and SUPERNOVA.HOST in catalog.entries[name]): reference = "NED-D" refurl = "http://ned.ipac.caltech.edu/Library/Distances/" refbib = "1991ASSL..171...89H" for host in catalog.entries[name][SUPERNOVA.HOST]: if host[QUANTITY.VALUE] in catalog.nedd_dict: source = catalog.entries[name].add_source( bibcode='2016A&A...594A..13P') secondarysource = catalog.entries[name].add_source( name=reference, url=refurl, bibcode=refbib, secondary=True) meddist = statistics.median( catalog.nedd_dict[host[QUANTITY.VALUE]]) redz = z_at_value(cosmo.comoving_distance, float(meddist) * un.Mpc) redshift = pretty_num(redz, sig=get_sig_digits(str(meddist))) catalog.entries[name].add_quantity( [SUPERNOVA.REDSHIFT, SUPERNOVA.HOST_REDSHIFT], redshift, uniq_cdl([source, secondarysource]), kind='host', derived=True) if (SUPERNOVA.MAX_ABS_MAG not in catalog.entries[name] and SUPERNOVA.MAX_APP_MAG in catalog.entries[name] and SUPERNOVA.LUM_DIST in catalog.entries[name]): # Find the "best" distance to use for this bestsig = 0 for ld in catalog.entries[name][SUPERNOVA.LUM_DIST]: sig = get_sig_digits(ld[QUANTITY.VALUE]) if sig > bestsig: bestld = ld[QUANTITY.VALUE] bestsrc = ld[QUANTITY.SOURCE] bestsig = sig if bestsig > 0 and is_number(bestld) and float(bestld) > 0.: source = catalog.entries[name].add_self_source() sources = uniq_cdl([source] + bestsrc.split(',')) bestldz = z_at_value(cosmo.luminosity_distance, float(bestld) * un.Mpc) pnum = (float(catalog.entries[name][SUPERNOVA.MAX_APP_MAG][0][ QUANTITY.VALUE]) - 5.0 * (log10(float(bestld) * 1.0e6) - 1.0) + 2.5 * log10(1.0 + bestldz)) pnum = pretty_num(pnum, sig=bestsig + 1) catalog.entries[name].add_quantity(SUPERNOVA.MAX_ABS_MAG, pnum, sources, derived=True) if (SUPERNOVA.MAX_VISUAL_ABS_MAG not in catalog.entries[name] and SUPERNOVA.MAX_VISUAL_APP_MAG in catalog.entries[name] and SUPERNOVA.LUM_DIST in catalog.entries[name]): # Find the "best" distance to use for this bestsig = 0 for ld in catalog.entries[name][SUPERNOVA.LUM_DIST]: sig = get_sig_digits(ld[QUANTITY.VALUE]) if sig > bestsig: bestld = ld[QUANTITY.VALUE] bestsrc = ld[QUANTITY.SOURCE] bestsig = sig if bestsig > 0 and is_number(bestld) and float(bestld) > 0.: source = catalog.entries[name].add_self_source() sources = uniq_cdl([source] + bestsrc.split(',')) # FIX: what's happening here?! pnum = (float(catalog.entries[name][ SUPERNOVA.MAX_VISUAL_APP_MAG][0][QUANTITY.VALUE]) - 5.0 * (log10(float(bestld) * 1.0e6) - 1.0)) pnum = pretty_num(pnum, sig=bestsig + 1) catalog.entries[name].add_quantity( SUPERNOVA.MAX_VISUAL_ABS_MAG, pnum, sources, derived=True) if SUPERNOVA.REDSHIFT in catalog.entries[name]: # Find the "best" redshift to use for this bestz, bestkind, bestsig, bestsrc = catalog.entries[ name].get_best_redshift() if bestsig > 0: try: bestz = float(bestz) except Exception: print(catalog.entries[name]) raise if SUPERNOVA.VELOCITY not in catalog.entries[name]: source = catalog.entries[name].add_self_source() # FIX: what's happening here?! pnum = CLIGHT / KM * \ ((bestz + 1.)**2. - 1.) / ((bestz + 1.)**2. + 1.) pnum = pretty_num(pnum, sig=bestsig) catalog.entries[name].add_quantity( SUPERNOVA.VELOCITY, pnum, source, kind=(SUPERNOVA.VELOCITY.kind_preference[bestkind] if bestkind else '')) if bestz > 0.: if SUPERNOVA.LUM_DIST not in catalog.entries[name]: dl = cosmo.luminosity_distance(bestz) sources = [ catalog.entries[name].add_self_source(), catalog.entries[name].add_source( bibcode='2016A&A...594A..13P') ] sources = uniq_cdl(sources + bestsrc.split(',')) catalog.entries[name].add_quantity( SUPERNOVA.LUM_DIST, pretty_num(dl.value, sig=bestsig + 1), sources, kind=(SUPERNOVA.LUM_DIST.kind_preference[bestkind] if bestkind else ''), derived=True) if (SUPERNOVA.MAX_ABS_MAG not in catalog.entries[name] and SUPERNOVA.MAX_APP_MAG in catalog.entries[name]): source = catalog.entries[name].add_self_source() pnum = pretty_num( float(catalog.entries[name][ SUPERNOVA.MAX_APP_MAG][0][QUANTITY.VALUE]) - 5.0 * (log10(dl.to('pc').value) - 1.0) + 2.5 * log10(1.0 + bestz), sig=bestsig + 1) catalog.entries[name].add_quantity( SUPERNOVA.MAX_ABS_MAG, pnum, sources, derived=True) if (SUPERNOVA.MAX_VISUAL_ABS_MAG not in catalog.entries[name] and SUPERNOVA.MAX_VISUAL_APP_MAG in catalog.entries[name]): source = catalog.entries[name].add_self_source() pnum = pretty_num(float(catalog.entries[name][ SUPERNOVA.MAX_VISUAL_APP_MAG][0][ QUANTITY.VALUE]) - 5.0 * (log10(dl.to('pc').value) - 1.0), sig=bestsig + 1) catalog.entries[name].add_quantity( SUPERNOVA.MAX_VISUAL_ABS_MAG, pnum, sources, derived=True) if SUPERNOVA.COMOVING_DIST not in catalog.entries[name]: cd = cosmo.comoving_distance(bestz) sources = [ catalog.entries[name].add_self_source(), catalog.entries[name].add_source( bibcode='2016A&A...594A..13P') ] sources = uniq_cdl(sources + bestsrc.split(',')) catalog.entries[name].add_quantity( SUPERNOVA.COMOVING_DIST, pretty_num(cd.value, sig=bestsig), sources, derived=True) if SUPERNOVA.HOST_REDSHIFT in catalog.entries[name]: # Find the "best" redshift to use for this bestz, bestkind, bestsig, bestsrc = catalog.entries[ name].get_best_redshift(SUPERNOVA.HOST_REDSHIFT) if bestsig > 0: try: bestz = float(bestz) except Exception: print(catalog.entries[name]) raise if SUPERNOVA.HOST_VELOCITY not in catalog.entries[name]: source = catalog.entries[name].add_self_source() # FIX: what's happening here?! pnum = CLIGHT / KM * \ ((bestz + 1.)**2. - 1.) / ((bestz + 1.)**2. + 1.) pnum = pretty_num(pnum, sig=bestsig) catalog.entries[name].add_quantity( SUPERNOVA.HOST_VELOCITY, pnum, source, kind=(SUPERNOVA.HOST_VELOCITY.kind_preference[bestkind] if bestkind else '')) if bestz > 0.: if SUPERNOVA.HOST_LUM_DIST not in catalog.entries[name]: dl = cosmo.luminosity_distance(bestz) sources = [ catalog.entries[name].add_self_source(), catalog.entries[name].add_source( bibcode='2016A&A...594A..13P') ] sources = uniq_cdl(sources + bestsrc.split(',')) catalog.entries[name].add_quantity( SUPERNOVA.HOST_LUM_DIST, pretty_num(dl.value, sig=bestsig + 1), sources, kind=(SUPERNOVA.HOST_LUM_DIST. kind_preference[bestkind] if bestkind else ''), derived=True) if SUPERNOVA.HOST_COMOVING_DIST not in catalog.entries[ name]: cd = cosmo.comoving_distance(bestz) sources = [ catalog.entries[name].add_self_source(), catalog.entries[name].add_source( bibcode='2016A&A...594A..13P') ] sources = uniq_cdl(sources + bestsrc.split(',')) catalog.entries[name].add_quantity( SUPERNOVA.HOST_COMOVING_DIST, pretty_num(cd.value, sig=bestsig), sources, derived=True) if all([ x in catalog.entries[name] for x in [ SUPERNOVA.RA, SUPERNOVA.DEC, SUPERNOVA.HOST_RA, SUPERNOVA.HOST_DEC ] ]): # For now just using first coordinates that appear in entry try: c1 = coord( ra=catalog.entries[name][SUPERNOVA.RA][0][QUANTITY.VALUE], dec=catalog.entries[name][SUPERNOVA.DEC][0][ QUANTITY.VALUE], unit=(un.hourangle, un.deg)) c2 = coord(ra=catalog.entries[name][SUPERNOVA.HOST_RA][0][ QUANTITY.VALUE], dec=catalog.entries[name][SUPERNOVA.HOST_DEC][0][ QUANTITY.VALUE], unit=(un.hourangle, un.deg)) except (KeyboardInterrupt, SystemExit): raise except Exception: pass else: sources = uniq_cdl([catalog.entries[name].add_self_source()] + catalog.entries[name][SUPERNOVA.RA][0][ QUANTITY.SOURCE].split(',') + catalog.entries[name][SUPERNOVA.DEC][0][ QUANTITY.SOURCE].split(',') + catalog.entries[name][SUPERNOVA.HOST_RA][0][ QUANTITY.SOURCE].split(',') + catalog.entries[name][SUPERNOVA.HOST_DEC][0] [QUANTITY.SOURCE].split(',')) if SUPERNOVA.HOST_OFFSET_ANG not in catalog.entries[name]: hosa = Decimal(c1.separation(c2).arcsecond) hosa = pretty_num(hosa) catalog.entries[name].add_quantity( SUPERNOVA.HOST_OFFSET_ANG, hosa, sources, derived=True, u_value='arcseconds') if (SUPERNOVA.COMOVING_DIST in catalog.entries[name] and SUPERNOVA.REDSHIFT in catalog.entries[name] and SUPERNOVA.HOST_OFFSET_DIST not in catalog.entries[name]): offsetsig = get_sig_digits(catalog.entries[name][ SUPERNOVA.HOST_OFFSET_ANG][0][QUANTITY.VALUE]) sources = uniq_cdl( sources.split(',') + (catalog.entries[name][SUPERNOVA.COMOVING_DIST][0][ QUANTITY.SOURCE]).split(',') + (catalog.entries[name][SUPERNOVA.REDSHIFT][0][ QUANTITY.SOURCE]).split(',')) (catalog.entries[name].add_quantity( SUPERNOVA.HOST_OFFSET_DIST, pretty_num( float(catalog.entries[name][ SUPERNOVA.HOST_OFFSET_ANG][0][QUANTITY.VALUE]) / 3600. * (pi / 180.) * float(catalog.entries[name][ SUPERNOVA.COMOVING_DIST][0][QUANTITY.VALUE]) * 1000. / (1.0 + float(catalog.entries[name][ SUPERNOVA.REDSHIFT][0][QUANTITY.VALUE])), sig=offsetsig), sources)) catalog.entries[name].sanitize() catalog.journal_entries(bury=True, final=True, gz=True) cleanupcnt = cleanupcnt + 1 if catalog.args.travis and cleanupcnt % 1000 == 0: break catalog.save_caches() return
filts.append(eff) ax1.fill(wlr,eff,label=f.split('.')[0],alpha=.5,edgecolor="none") ax1.axhline(spec,color="black",lw=3,alpha=.5) ax1.set_ylabel("Throughput") ax1.axes.get_xaxis().set_visible(False) corrections=np.empty((len(filters),len(coords))) mags_notred=np.empty(len(filters)) mags_red=np.empty((len(filters),len(coords))) alambdas=[ [[] for _ in coords] for _ in filts] # the following loop queries the IrsaDust database to obtain A_v according to S&F # and converts it to A_lambda following the Fitzpatrick law for i,c in enumerate(coords): C = coord.SkyCoord(c,frame="fk5") table=IrsaDust.get_query_table(c, radius=2.0 * u.deg) a_v=table["ext SandF mean"] al_plot=f99(wl,a_v.data[0]*3.1) for j,f in enumerate(filts): alambdas[j][i]=f99(wls[j],a_v.data[0]*3.1) ax2.plot(wl,al_plot,label="D"+str(i+1)) ax2.set_xlabel(r"$\lambda$ in $\rm \AA$") ax2.set_ylabel("Extinction in magnitudes") alambdas=np.array(alambdas) # the following loop calculates the magnitudes of the flat f_nu spectra for j,f in enumerate(filts): diffs=np.gradient(wls[j]) flux=sum(wls[j]*spec*f*diffs) norm=sum(f*diffs/wls[j]) for k,c in enumerate(coords):
def do_cleanup(catalog): """Task to cleanup catalog before final write.""" task_str = catalog.get_current_task_str() # Set preferred names, calculate some columns based on imported data, # sanitize some fields keys = catalog.entries.copy().keys() cleanupcnt = 0 for oname in pbar(keys, task_str): name = catalog.add_entry(oname) # Set the preferred name, switching to that name if name changed. name = catalog.entries[name].set_preferred_name() aliases = catalog.entries[name].get_aliases() catalog.entries[name].set_first_max_light() if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]: prefixes = ['MLS', 'SSS', 'CSS', 'GRB '] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:2])): discoverdate = ('/'.join([ '20' + alias.replace(prefix, '')[:2], alias.replace(prefix, '')[2:4], alias.replace(prefix, '')[4:6] ])) if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( TIDALDISRUPTION.DISCOVER_DATE, discoverdate, source, derived=True) break if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]: break if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]: prefixes = [ 'ASASSN-', 'PS1-', 'PS1', 'PS', 'iPTF', 'PTF', 'SCP-', 'SNLS-', 'SPIRITS', 'LSQ', 'DES', 'SNHiTS', 'Gaia', 'GND', 'GNW', 'GSD', 'GSW', 'EGS', 'COS', 'OGLE', 'HST' ] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:2]) and is_number(alias.replace(prefix, '')[:1])): discoverdate = '20' + alias.replace(prefix, '')[:2] if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( TIDALDISRUPTION.DISCOVER_DATE, discoverdate, source, derived=True) break if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]: break if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]: prefixes = ['SNF'] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:4])): discoverdate = ('/'.join([ alias.replace(prefix, '')[:4], alias.replace(prefix, '')[4:6], alias.replace(prefix, '')[6:8] ])) if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( TIDALDISRUPTION.DISCOVER_DATE, discoverdate, source, derived=True) break if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]: break if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]: prefixes = ['PTFS', 'SNSDF'] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:2])): discoverdate = ('/'.join([ '20' + alias.replace(prefix, '')[:2], alias.replace(prefix, '')[2:4] ])) if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( TIDALDISRUPTION.DISCOVER_DATE, discoverdate, source, derived=True) break if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]: break if TIDALDISRUPTION.DISCOVER_DATE not in catalog.entries[name]: prefixes = ['AT', 'SN', 'OGLE-', 'SM ', 'KSN-'] for alias in aliases: for prefix in prefixes: if alias.startswith(prefix): year = re.findall(r'\d+', alias) if len(year) == 1: year = year[0] else: continue if alias.replace(prefix, '').index(year) != 0: continue if (year and is_number(year) and '.' not in year and len(year) <= 4): discoverdate = year if catalog.args.verbose: tprint('Added discoverdate from name [' + alias + ']: ' + discoverdate) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( TIDALDISRUPTION.DISCOVER_DATE, discoverdate, source, derived=True) break if TIDALDISRUPTION.DISCOVER_DATE in catalog.entries[name]: break if (TIDALDISRUPTION.RA not in catalog.entries[name] or TIDALDISRUPTION.DEC not in catalog.entries[name]): prefixes = [ 'PSN J', 'MASJ', 'CSS', 'SSS', 'MASTER OT J', 'HST J', 'TCP J', 'MACS J', '2MASS J', 'EQ J', 'CRTS J', 'SMT J' ] for alias in aliases: for prefix in prefixes: if (alias.startswith(prefix) and is_number(alias.replace(prefix, '')[:6])): noprefix = alias.split(':')[-1].replace( prefix, '').replace('.', '') decsign = '+' if '+' in noprefix else '-' noprefix = noprefix.replace('+', '|').replace('-', '|') nops = noprefix.split('|') if len(nops) < 2: continue rastr = nops[0] decstr = nops[1] ra = ':'.join([rastr[:2], rastr[2:4], rastr[4:6]]) + \ ('.' + rastr[6:] if len(rastr) > 6 else '') dec = (decsign + ':'.join( [decstr[:2], decstr[2:4], decstr[4:6]]) + ('.' + decstr[6:] if len(decstr) > 6 else '')) if catalog.args.verbose: tprint('Added ra/dec from name: ' + ra + ' ' + dec) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( TIDALDISRUPTION.RA, ra, source, derived=True) catalog.entries[name].add_quantity( TIDALDISRUPTION.DEC, dec, source, derived=True) break if TIDALDISRUPTION.RA in catalog.entries[name]: break no_host = (TIDALDISRUPTION.HOST not in catalog.entries[name] or not any([ x[QUANTITY.VALUE] == 'Milky Way' for x in catalog.entries[name][TIDALDISRUPTION.HOST] ])) if (TIDALDISRUPTION.RA in catalog.entries[name] and TIDALDISRUPTION.DEC in catalog.entries[name] and no_host): from astroquery.irsa_dust import IrsaDust if name not in catalog.extinctions_dict: try: ra_dec = (catalog.entries[name][TIDALDISRUPTION.RA][0][ QUANTITY.VALUE] + " " + catalog.entries[name][ TIDALDISRUPTION.DEC][0][QUANTITY.VALUE]) result = IrsaDust.get_query_table(ra_dec, section='ebv') except (KeyboardInterrupt, SystemExit): raise except Exception: warnings.warn("Coordinate lookup for " + name + " failed in IRSA.") else: ebv = result['ext SandF mean'][0] ebverr = result['ext SandF std'][0] catalog.extinctions_dict[name] = [ebv, ebverr] if name in catalog.extinctions_dict: sources = uniq_cdl([ catalog.entries[name].add_self_source(), catalog.entries[name] .add_source(bibcode='2011ApJ...737..103S') ]) (catalog.entries[name].add_quantity( TIDALDISRUPTION.EBV, str(catalog.extinctions_dict[name][0]), sources, e_value=str(catalog.extinctions_dict[name][1]), derived=True)) if ((TIDALDISRUPTION.HOST in catalog.entries[name] and (TIDALDISRUPTION.HOST_RA not in catalog.entries[name] or TIDALDISRUPTION.HOST_DEC not in catalog.entries[name]))): for host in catalog.entries[name][TIDALDISRUPTION.HOST]: alias = host[QUANTITY.VALUE] if ' J' in alias and is_number(alias.split(' J')[-1][:6]): noprefix = alias.split(' J')[-1].split(':')[-1].replace( '.', '') decsign = '+' if '+' in noprefix else '-' noprefix = noprefix.replace('+', '|').replace('-', '|') nops = noprefix.split('|') if len(nops) < 2: continue rastr = nops[0] decstr = nops[1] hostra = (':'.join([rastr[:2], rastr[2:4], rastr[4:6]]) + ('.' + rastr[6:] if len(rastr) > 6 else '')) hostdec = decsign + ':'.join([ decstr[:2], decstr[2:4], decstr[4:6] ]) + ('.' + decstr[6:] if len(decstr) > 6 else '') if catalog.args.verbose: tprint('Added hostra/hostdec from name: ' + hostra + ' ' + hostdec) source = catalog.entries[name].add_self_source() catalog.entries[name].add_quantity( TIDALDISRUPTION.HOST_RA, hostra, source, derived=True) catalog.entries[name].add_quantity( TIDALDISRUPTION.HOST_DEC, hostdec, source, derived=True) break if TIDALDISRUPTION.HOST_RA in catalog.entries[name]: break if (TIDALDISRUPTION.REDSHIFT not in catalog.entries[name] and TIDALDISRUPTION.VELOCITY in catalog.entries[name]): # Find the "best" velocity to use for this bestsig = 0 for hv in catalog.entries[name][TIDALDISRUPTION.VELOCITY]: sig = get_sig_digits(hv[QUANTITY.VALUE]) if sig > bestsig: besthv = hv[QUANTITY.VALUE] bestsrc = hv['source'] bestsig = sig if bestsig > 0 and is_number(besthv): voc = float(besthv) * 1.e5 / CLIGHT source = catalog.entries[name].add_self_source() sources = uniq_cdl([source] + bestsrc.split(',')) (catalog.entries[name].add_quantity( TIDALDISRUPTION.REDSHIFT, pretty_num( sqrt((1. + voc) / (1. - voc)) - 1., sig=bestsig), sources, kind='heliocentric', derived=True)) if (TIDALDISRUPTION.REDSHIFT not in catalog.entries[name] and len(catalog.nedd_dict) > 0 and TIDALDISRUPTION.HOST in catalog.entries[name]): reference = "NED-D" refurl = "http://ned.ipac.caltech.edu/Library/Distances/" for host in catalog.entries[name][TIDALDISRUPTION.HOST]: if host[QUANTITY.VALUE] in catalog.nedd_dict: source = catalog.entries[name].add_source( bibcode='2016A&A...594A..13P') secondarysource = catalog.entries[name].add_source( name=reference, url=refurl, secondary=True) meddist = statistics.median(catalog.nedd_dict[host[ QUANTITY.VALUE]]) redz = z_at_value(cosmo.comoving_distance, float(meddist) * un.Mpc) redshift = pretty_num( redz, sig=get_sig_digits(str(meddist))) catalog.entries[name].add_quantity( TIDALDISRUPTION.REDSHIFT, redshift, uniq_cdl([source, secondarysource]), kind='host', derived=True) if (TIDALDISRUPTION.MAX_ABS_MAG not in catalog.entries[name] and TIDALDISRUPTION.MAX_APP_MAG in catalog.entries[name] and TIDALDISRUPTION.LUM_DIST in catalog.entries[name]): # Find the "best" distance to use for this bestsig = 0 for ld in catalog.entries[name][TIDALDISRUPTION.LUM_DIST]: sig = get_sig_digits(ld[QUANTITY.VALUE]) if sig > bestsig: bestld = ld[QUANTITY.VALUE] bestsrc = ld['source'] bestsig = sig if bestsig > 0 and is_number(bestld) and float(bestld) > 0.: source = catalog.entries[name].add_self_source() sources = uniq_cdl([source] + bestsrc.split(',')) bestldz = z_at_value(cosmo.luminosity_distance, float(bestld) * un.Mpc) pnum = (float(catalog.entries[name][ TIDALDISRUPTION.MAX_APP_MAG][0][QUANTITY.VALUE]) - 5.0 * (log10(float(bestld) * 1.0e6) - 1.0 ) + 2.5 * log10(1.0 + bestldz)) pnum = pretty_num(pnum, sig=bestsig) catalog.entries[name].add_quantity( TIDALDISRUPTION.MAX_ABS_MAG, pnum, sources, derived=True) if TIDALDISRUPTION.REDSHIFT in catalog.entries[name]: # Find the "best" redshift to use for this bestz, bestkind, bestsig, bestsrc = catalog.entries[ name].get_best_redshift() if bestsig > 0: try: bestz = float(bestz) except Exception: print(catalog.entries[name]) raise if TIDALDISRUPTION.VELOCITY not in catalog.entries[name]: source = catalog.entries[name].add_self_source() # FIX: what's happening here?! pnum = CLIGHT / KM * \ ((bestz + 1.)**2. - 1.) / ((bestz + 1.)**2. + 1.) pnum = pretty_num(pnum, sig=bestsig) catalog.entries[name].add_quantity( TIDALDISRUPTION.VELOCITY, pnum, source, kind=PREF_KINDS[bestkind], derived=True) if bestz > 0.: from astropy.cosmology import Planck15 as cosmo if TIDALDISRUPTION.LUM_DIST not in catalog.entries[name]: dl = cosmo.luminosity_distance(bestz) sources = [ catalog.entries[name].add_self_source(), catalog.entries[name] .add_source(bibcode='2016A&A...594A..13P') ] sources = uniq_cdl(sources + bestsrc.split(',')) catalog.entries[name].add_quantity( TIDALDISRUPTION.LUM_DIST, pretty_num( dl.value, sig=bestsig), sources, kind=PREF_KINDS[bestkind], derived=True) if (TIDALDISRUPTION.MAX_ABS_MAG not in catalog.entries[name] and TIDALDISRUPTION.MAX_APP_MAG in catalog.entries[name]): source = catalog.entries[name].add_self_source() pnum = pretty_num( float(catalog.entries[name][ TIDALDISRUPTION.MAX_APP_MAG][0][ QUANTITY.VALUE]) - 5.0 * (log10(dl.to('pc').value) - 1.0 ) + 2.5 * log10(1.0 + bestz), sig=bestsig + 1) catalog.entries[name].add_quantity( TIDALDISRUPTION.MAX_ABS_MAG, pnum, sources, derived=True) if TIDALDISRUPTION.COMOVING_DIST not in catalog.entries[ name]: cd = cosmo.comoving_distance(bestz) sources = [ catalog.entries[name].add_self_source(), catalog.entries[name] .add_source(bibcode='2016A&A...594A..13P') ] sources = uniq_cdl(sources + bestsrc.split(',')) catalog.entries[name].add_quantity( TIDALDISRUPTION.COMOVING_DIST, pretty_num( cd.value, sig=bestsig), sources, derived=True) if all([ x in catalog.entries[name] for x in [ TIDALDISRUPTION.RA, TIDALDISRUPTION.DEC, TIDALDISRUPTION.HOST_RA, TIDALDISRUPTION.HOST_DEC ] ]): # For now just using first coordinates that appear in entry try: c1 = coord( ra=catalog.entries[name][TIDALDISRUPTION.RA][0][ QUANTITY.VALUE], dec=catalog.entries[name][TIDALDISRUPTION.DEC][0][ QUANTITY.VALUE], unit=(un.hourangle, un.deg)) c2 = coord( ra=catalog.entries[name][TIDALDISRUPTION.HOST_RA][0][ QUANTITY.VALUE], dec=catalog.entries[name][TIDALDISRUPTION.HOST_DEC][0][ QUANTITY.VALUE], unit=(un.hourangle, un.deg)) except (KeyboardInterrupt, SystemExit): raise except Exception: pass else: sources = uniq_cdl( [catalog.entries[name].add_self_source()] + catalog. entries[name][TIDALDISRUPTION.RA][0]['source'].split(',') + catalog.entries[name][TIDALDISRUPTION.DEC][0]['source']. split(',') + catalog.entries[name][TIDALDISRUPTION.HOST_RA] [0]['source'].split(',') + catalog.entries[name][ TIDALDISRUPTION.HOST_DEC][0]['source'].split(',')) if 'hostoffsetang' not in catalog.entries[name]: hosa = Decimal( hypot(c1.ra.degree - c2.ra.degree, c1.dec.degree - c2.dec.degree)) hosa = pretty_num(hosa * Decimal(3600.)) catalog.entries[name].add_quantity( TIDALDISRUPTION.HOST_OFFSET_ANG, hosa, sources, derived=True, u_value='arcseconds') if (TIDALDISRUPTION.COMOVING_DIST in catalog.entries[name] and TIDALDISRUPTION.REDSHIFT in catalog.entries[name] and TIDALDISRUPTION.HOST_OFFSET_DIST not in catalog.entries[name]): offsetsig = get_sig_digits(catalog.entries[name][ TIDALDISRUPTION.HOST_OFFSET_ANG][0][QUANTITY.VALUE]) sources = uniq_cdl( sources.split(',') + (catalog.entries[name][ TIDALDISRUPTION.COMOVING_DIST][0]['source']). split(',') + (catalog.entries[name][ TIDALDISRUPTION.REDSHIFT][0]['source']).split(',')) (catalog.entries[name].add_quantity( TIDALDISRUPTION.HOST_OFFSET_DIST, pretty_num( float(catalog.entries[name][ TIDALDISRUPTION.HOST_OFFSET_ANG][0][ QUANTITY.VALUE]) / 3600. * (pi / 180.) * float(catalog.entries[name][ TIDALDISRUPTION.COMOVING_DIST][0][ QUANTITY.VALUE]) * 1000. / (1.0 + float(catalog.entries[name][ TIDALDISRUPTION.REDSHIFT][0][QUANTITY.VALUE])), sig=offsetsig), sources)) catalog.entries[name].sanitize() catalog.journal_entries(bury=True, final=True, gz=True) cleanupcnt = cleanupcnt + 1 if catalog.args.travis and cleanupcnt % 1000 == 0: break catalog.save_caches() return
filts.append(eff) ax1.fill(wlr,eff,label=f.split('.')[0],edgecolor="none",color=filtercolor[i]) ax1.axhline(spec,color="black",lw=3,alpha=.5) # ax1.set_xlabel(r"$\lambda$ in $\AA$") ax1.set_ylabel("Throughput") ax1.axes.get_xaxis().set_visible(False) wl=np.sort(wl) corrections=np.empty((len(filters),len(coords))) mags_notred=np.empty(len(filters)) mags_red=np.empty((len(filters),len(coords))) alambdas=[ [[] for _ in coords] for _ in filts] color=cm.viridis(np.linspace(0,1,len(coords))) for i,c in enumerate(coords): C = coord.SkyCoord(str(c[0])+" "+str(c[1]),unit="deg",frame="fk5") table=IrsaDust.get_query_table(C,radius=None) eb_v=table["ext SandF mean"] #print eb_v.data[0] al_plot=f99(wl,eb_v.data[0]*3.1) for j,f in enumerate(filts): alambdas[j][i]=f99(wls[j],eb_v.data[0]*3.1) ax2.plot(wl,al_plot,label=str(c[0])[:6]+" "+str(c[1])[:4],color=color[i]) ax2.set_xlabel(r"$\lambda$ in $\rm \AA$") ax2.set_ylabel("Extinction in magnitudes") ax2.set_ylim([0,0.07]) alambdas=np.array(alambdas) for j,f in enumerate(filts): diffs=np.gradient(wls[j]) flux=sum(wls[j]*spec*f*diffs) #integration norm=sum(f*diffs/wls[j]) #normalisation following GALEXEV docs.
def ppxf_population_gas_sdss(file, z, name): # Read SDSS DR8 galaxy spectrum taken from here http://www.sdss3.org/dr8/ # The spectrum is *already* log rebinned by the SDSS DR8 # pipeline and log_rebin should not be used in this case. hdulist = pyfits.open(file) VAC = (10**hdulist[1].data.loglam) wave = [] for i in range(0, len(VAC)): wave.append(VAC[i] / (1.0 + 2.735182E-4 + 131.4182 / VAC[i]**2 + 2.76249E8 / VAC[i]**4) / (1+z)) flux = hdulist[1].data.flux*10**-17 err = hdulist[1].data.ivar*10**-17 #bunit = hdulist[0].header['bunit'] #c0 = hdulist[0].header['coeff0'] #c1 = hdulist[0].header['coeff1'] #units = 'erg/s/cm^2/Ang' xarr = pyspeckit.units.SpectroscopicAxis(wave, units='angstroms') spec = pyspeckit.OpticalSpectrum(header=hdulist[0].header, xarr=xarr, data=flux*1e17, error=err) #spec.units = 'erg s^{-1} cm^{-2} \\AA^{-1}' #spec.xarr.units='angstroms' #Galactic extinction correction #Take the ebv of the galaxy from IrsaDust table = IrsaDust.get_query_table(name, section='ebv') ebv = table['ext SFD mean'][0] spec.deredden(ebv=ebv) # deredden in place t = hdulist[1].data #z = float(hdu[1].header["Z"]) # SDSS redshift estimate # Create the mask # Only use the wavelength range in common between galaxy and stellar library. mask = [True]*(len(wave)) for i in range(0, len(wave)): #mask[i]=(wave[i] > 3540) & (wave[i] < 7409) mask[i] = (wave[i] > 3750) & (wave[i] < 7400) # take a smaller the wavelength range #mask for the galaxy galaxy = t.field('flux')/np.median(t.field('flux')) # Normalize spectrum to avoid numerical issues galaxymask = [] for i in range(0, len(mask)): if mask[i]: galaxymask.append(galaxy[i]) galaxy = np.array(galaxymask) #mask for the wavelength #create an array with only the allowed values of the wavelenght wavemask = [] for i in range(0, len(mask)): if mask[i]: wavemask.append(wave[i]) wave = np.array(wavemask) #create a mask for the emission lines NeIIIa = 3869.9 NeIIIb = 3971.1 Heps = 3890.2 Hdelta = 4102.9 Hgamma = 4341.7 OIIIc = 4364.4 HeIIa = 4687.0 HeIIb = 5413.0 SIII = 6313.8 OIa = 5578.9 OIb = 6365.5 Hbeta = 4861.33 OIIIa = 4958.92 OIIIb = 5006.84 OI = 6300.30 NIIa = 6549.86 NIIb = 6585.27 Halpha = 6564.614 SIIa = 6718.2 SIIb = 6732.68 ArIII = 7137.8 delta = 10 delta2 = 20 maskHa = [True]*(len(wave)) for i in range(0, len(wave)): maskHa[i] = (((wave[i] < (Halpha - delta2)) or (wave[i] > (Halpha + delta2))) & ((wave[i] < (Hbeta - delta2)) or (wave[i] > (Hbeta + delta2))) & ((wave[i] < (OIIIa - delta)) or (wave[i] > (OIIIa + delta))) & ((wave[i] < (OIIIb - delta)) or (wave[i] > (OIIIb + delta))) & ((wave[i] < (OI - delta)) or (wave[i] > (OI + delta))) & ((wave[i] < (NIIa - delta)) or (wave[i] > (NIIa + delta))) & ((wave[i] < (NIIb - delta)) or (wave[i] > (NIIb + delta))) & ((wave[i] < (SIIa - delta)) or (wave[i] > (SIIa + delta))) & ((wave[i] < (SIIb - delta)) or (wave[i] > (SIIb + delta))) & ((wave[i] < (NeIIIa - delta)) or (wave[i] > (NeIIIa + delta))) & ((wave[i] < (NeIIIb - delta)) or (wave[i] > (NeIIIb + delta))) & ((wave[i] < (Heps - delta)) or (wave[i] > (Heps + delta))) & ((wave[i] < (Hdelta - delta)) or (wave[i] > (Hdelta + delta))) & ((wave[i] < (Hgamma - delta)) or (wave[i] > (Hgamma + delta))) & ((wave[i] < (OIIIc - delta)) or (wave[i] > (OIIIc + delta))) & ((wave[i] < (HeIIa - delta)) or (wave[i] > (HeIIa + delta))) & ((wave[i] < (HeIIb - delta)) or (wave[i] > (HeIIb + delta))) & ((wave[i] < (SIII - delta)) or (wave[i] > (SIII + delta))) & ((wave[i] < (OIa - delta)) or (wave[i] > (OIa + delta))) & ((wave[i] < (OIb - delta)) or (wave[i] > (OIb + delta))) & ((wave[i] < (ArIII - delta)) or (wave[i] > (ArIII + delta)))) # mask for the wavelength for the emission lines # create an array with only the allowed values of the wavelenght wavemask = [] for i in range(0, len(maskHa)): if maskHa[i]: wavemask.append(wave[i]) wave = np.array(wavemask) #Use this mask for the galaxy galaxymask = [] for i in range(0, len(maskHa)): if maskHa[i]: galaxymask.append(galaxy[i]) galaxy = np.array(galaxymask) # The noise level is chosen to give Chi^2/DOF=1 without regularization (REGUL=0) # # noise = galaxy*0 + 0.01528 # Assume constant noise per pixel here # The velocity step was already chosen by the SDSS pipeline # and we convert it below to km/s # c = 299792.458 # speed of light in km/s velscale = np.log(wave[1]/wave[0])*c FWHM_gal = 2.76 # SDSS has an instrumental resolution FWHM of 2.76A. stars_templates, lamRange_temp, logLam_temp = \ setup_spectral_library(velscale, FWHM_gal) # The stellar templates are reshaped into a 2-dim array with each spectrum # as a column, however we save the original array dimensions, which are # needed to specify the regularization dimensions # reg_dim = stars_templates.shape[1:] stars_templates = stars_templates.reshape(stars_templates.shape[0], -1) # See the pPXF documentation for the keyword REGUL, # for an explanation of the following two lines # stars_templates /= np.median(stars_templates) # Normalizes stellar templates by a scalar regul_err = 0.004 # Desired regularization error # Construct a set of Gaussian emission line templates # gas_templates = util.emission_lines(logLam_temp, FWHM_gal) # Combines the stellar and gaseous templates into a single array # during the PPXF fit they will be assigned a different kinematic # COMPONENT value # templates = np.hstack([stars_templates, gas_templates]) # The galaxy and the template spectra do not have the same starting wavelength. # For this reason an extra velocity shift DV has to be applied to the template # to fit the galaxy spectrum. We remove this artificial shift by using the # keyword VSYST in the call to PPXF below, so that all velocities are # measured with respect to DV. This assume the redshift is negligible. # In the case of a high-redshift galaxy one should de-redshift its # wavelength to the rest frame before using the line below as described # in PPXF_KINEMATICS_EXAMPLE_SAURON. # z = 0 # redshift already corrected c = 299792.458 dv = (np.log(lamRange_temp[0])-np.log(wave[0]))*c # km/s vel = c*z # Initial estimate of the galaxy velocity in km/s # Here the actual fit starts. The best fit is plotted on the screen. # # IMPORTANT: Ideally one would like not to use any polynomial in the fit # as the continuum shape contains important information on the population. # Unfortunately this is often not feasible, due to small calibration # uncertainties in the spectral shape. To avoid affecting the line strength of # the spectral features, we exclude additive polynomials (DEGREE=-1) and only use # multiplicative ones (MDEGREE=10). This is only recommended for population, not # for kinematic extraction, where additive polynomials are always recommended. # start = [vel, 180.] # (km/s), starting guess for [V,sigma] t = clock() plt.clf() plt.subplot(211) # Assign component=0 to the stellar templates and # component=1 to the gas emission lines templates. # One can easily assign different components to different gas species # e.g. component=1 for the Balmer series, component=2 for the [OIII] doublet, ...) # Input a negative MOMENTS value to keep fixed the LOSVD of a component. # component = [0]*stars_templates.shape[1] + [1]*gas_templates.shape[1] moments = [4, 4] # fit (V,sig,h3,h4) for both the stars and the gas start = [start, start] # adopt the same starting value for both gas and stars pp = ppxf(file, templates, wave, galaxy, noise, velscale, start, plot=True, moments=moments, degree=-1, mdegree=10, vsyst=dv, clean=False, regul=1./regul_err, reg_dim=reg_dim, component=component) # When the two numbers below are the same, the solution is the smoothest # consistent with the observed spectrum. # print 'Desired Delta Chi^2:', np.sqrt(2*galaxy.size) print 'Current Delta Chi^2:', (pp.chi2 - 1)*galaxy.size print 'elapsed time in PPXF (s):', clock() - t plt.subplot(212) #plt.set_cmap('gist_heat') # = IDL's loadct, 3 plt.imshow(np.rot90(pp.weights[:np.prod(reg_dim)].reshape(reg_dim)/pp.weights.sum()), interpolation='nearest', aspect='auto', extent=(np.log(1.0), np.log(17.7828), -1.9, 0.45)) plt.set_cmap('gist_heat') # = IDL's loadct, 3 plt.colorbar() plt.title("Mass Fraction") plt.xlabel("log Age (Gyr)") plt.ylabel("[M/H]") plt.tight_layout() # Save the figure name = splitext(basename(file))[0] plt.savefig(name) return
pflag = 'GAIADR2' elif type(result['PLX_VALUE'][indr]) != np.ma.core.MaskedConstant: p = round(float(result['PLX_VALUE'][indr]), 2) if type(result['PLX_VALUE'][indr]) != np.ma.core.MaskedConstant: perr = round(float(result['PLX_ERROR'][indr]), 2) else: perr = empty pflag = 'Simbad' else: try: pos = coord.SkyCoord(ra=ra, dec=dec, unit=(u.hourangle,u.deg), frame='icrs') #AvSF = Schlafly & Finkbeiner 2011 (ApJ 737, 103) tableAv = IrsaDust.get_query_table(pos, radius='02d', section='ebv', timeout=60) Av = tableAv['ext SandF mean'].data[0] Averr = tableAv['ext SandF std'].data[0] except: Av = 0 Averr = 0 try: p, perr = [round(x, 2) for x in parallax(Teff, Tefferr, float(logg), float(loggerr), V, Verr, M, Merr, Av, Averr)] pflag = 'Spec' except: p = 'NULL'
if plx!='NULL': p = plx perr = eplx pflag = 'GAIADR2' elif type(result['PLX_VALUE'][indr])!=np.ma.core.MaskedConstant: p=round(float(result['PLX_VALUE'][indr]),2) if type(result['PLX_VALUE'][indr])!=np.ma.core.MaskedConstant: perr=round(float(result['PLX_ERROR'][indr]),2) else: perr=empty pflag='Simbad' else: try: pos=coord.SkyCoord(ra=ra, dec=dec,unit=(u.hourangle,u.deg),frame='icrs') #AvSF = Schlafly & Finkbeiner 2011 (ApJ 737, 103) tableAv = IrsaDust.get_query_table(pos, radius='02d', section='ebv', timeout=60) Av=tableAv['ext SandF mean'].data[0] Averr=tableAv['ext SandF std'].data[0] except: Av=0 Averr=0 try: p,perr = map(lambda x: round(x,2), parallax(Teff,Tefferr, float(logg),float(loggerr), V,Verr, M,Merr,Av,Averr)) pflag = 'Spec' except: p = 'NULL' perr = 'NULL' pflag = 'NULL' # Comments if result['SP_TYPE'][indr]!='' and result['SP_TYPE'][indr][0]=='M':
def main(): db = sqlite3.connect('test_schedule_v8_msip.db') table = pd.read_sql_query("SELECT * from SUMMARY", db) ind = table['subprogram'] == 'all_sky' msip = table[ind] release_date = '20180622' survey = 'ZTF_MSIP' filters = ''.join(np.unique(msip['filter'])) user = '******' host = 'grimnir.stsci.edu' comment = 'Based on ZTF observing log DB from Eric Bellm, Rahul Biswas on {}'.format( release_date) pixsize = 1. fields = np.unique(msip['fieldID']) nlibid = len(fields) outlines = [] outlines.append('SURVEY: {}'.format(survey)) outlines.append('FILTERS: {}'.format(filters)) outlines.append('TELESCOPE: ZTF') outlines.append('USER: {}'.format(user)) outlines.append('HOST: {}'.format(host)) outlines.append('SKYSIG_UNIT: ADU_PER_SQARCSEC') outlines.append('PIXSIZE: {:0.1f}'.format(pixsize)) outlines.append('NLIBID: {}'.format(nlibid)) outlines.append('COMMENT: {}'.format(comment)) outlines.append('BEGIN LIBGEN') for field in fields: outlines.append('# --------------------------------------------') # select from table, not MSIP in case some of the other programs # observe the same field this may not be useful since we don't have # access to non-MSIP data but in principle these observations have been # taken and could be used to classify the data outlines.append('LIBID: {}'.format(field)) indf = (table['fieldID'] == field) # all the positions appear to be identical, so there's no way to # account for dithers or overlaps ra = np.unique(table[indf]['fieldRA'])[0] dec = np.unique(table[indf]['fieldDec'])[0] coo = coord.SkyCoord(ra * u.deg, dec * u.deg, frame='icrs') dust = IrsaDust.get_query_table(coo, section='ebv') mwebv = dust['ext SandF mean'][0] nobs = len(table[indf]) outlines.append( 'RA: {} DEC: {} NOBS: {} PIXSIZE: {} MWEBV: {} FIELD: {}' .format(ra, dec, nobs, pixsize, mwebv, field)) outlines.append( '# CCD CCD PSF1 PSF2 PSF2/1') outlines.append( '# MJD ID*NEXPOSE FLT GAIN NOISE SKYSIG (pixels) RATIO ZPTAVG ZPTERR MAG' ) entries = at.Table.from_pandas(table[indf]) for entry in entries: # get some quantities flt = entry['filter'] skymag = entry['filtSkyBright'] depth = entry['fiveSigmaDepth'] snr = 5. fwhm = entry['FWHMeff'] term1 = 2.0 * depth - skymag term2 = -(depth - skymag) # convert FWHM from arcsec to sigma_gaussian in pixels sigma_pixel = fwhm / 2.35 / pixsize pixel_area = area = (1.51 * fwhm)**2 arg = pixel_area * snr * snr # Background dominated limit assuming counts with system transmission only # is approximately equal to counts with total transmission zpt_approx = term1 + 2.5 * np.log10(arg) tmp = 10.**(-0.4 * term2) zpt_cor = 2.5 * np.log10(1. + 1. / (pixel_area * tmp)) simlib_zptavg = zpt_approx + zpt_cor npix_asec = 1. / pixsize**2. skysig = np.sqrt( (1.0 / npix_asec) * 10.**(-0.4 * (skymag - simlib_zptavg))) lst = [ 'S:', "{0:5.4f}".format(entry['expMJD']), "{0:10d}*2".format(entry['obsHistID']), entry['filter'], "{0:5.2f}".format(1.), # CCD Gain "{0:5.2f}".format(0.25), # CCD Noise "{0:6.2f}".format(skysig), # SKYSIG "{0:4.2f}".format(sigma_pixel), # PSF1 "{0:4.2f}".format(0.), # PSF2 "{0:4.3f}".format(0.), # PSFRatio "{0:6.2f}".format(simlib_zptavg), # ZPTAVG "{0:6.3f}".format(0.005), # ZPTNoise "{0:+7.3f}".format(-99.) ] # MAG out = ' '.join(lst) outlines.append(out) outlines.append('END_LIBID: {}'.format(field)) outlines = '\n'.join(outlines) with open('ztf_msip_simlib_{}.dat'.format(release_date), 'w') as f: f.write(outlines)
def GetAndUploadAllData(self,objs,ras,decs,doNED=True): TransientUploadDict = {} assert len(ras) == len(decs) if type(ras[0]) == float: scall = SkyCoord(ras,decs,frame="fk5",unit=u.deg) else: scall = SkyCoord(ras,decs,frame="fk5",unit=(u.hourangle,u.deg)) ebvall,nedtables = [],[] ebvtstart = time.time() if doNED: for sc in scall: dust_table_l = IrsaDust.get_query_table(sc) ebvall += [dust_table_l['ext SandF mean'][0]] try: ned_region_table = Ned.query_region(sc, radius=self.nedradius*u.arcmin, equinox='J2000.0') except: ned_region_table = None nedtables += [ned_region_table] print('E(B-V)/NED time: %.1f seconds'%(time.time()-ebvtstart)) tstart = time.time() TNSData = [] json_data = [] for j in range(len(objs)): TNSGetSingle = [("objname",objs[j]), ("photometry","1"), ("spectra","1")] response=get(self.tnsapi, TNSGetSingle, self.tnsapikey) json_data += [format_to_json(response.text)] print(time.time()-tstart) print('getting TNS content takes %.1f seconds'%(time.time()-tstart)) for j,jd in zip(range(len(objs)),json_data): tallstart = time.time() obj = objs[j] iobj = np.where(obj == np.array(objs))[0] if len(iobj) > 1: iobj = int(iobj[0]) else: iobj = int(iobj) if doNED: sc,ebv,nedtable = scall[iobj],ebvall[iobj],nedtables[iobj] else: sc = scall[iobj]; ebv = None; nedtable = None print("Object: %s\nRA: %s\nDEC: %s" % (obj,ras[iobj],decs[iobj])) ######################################################## # For Item in Email, Get NED ######################################################## if type(jd['data']['reply']['name']) == str: jd = jd['data']['reply'] else: jd = None transientdict = self.getTNSData(jd,obj,sc,ebv) try: photdict = self.getZTFPhotometry(sc) except: photdict = None try: if jd: photdict,nondetectdate,nondetectmaglim,nondetectfilt,nondetectins = \ self.getTNSPhotometry(jd,PhotUploadAll=photdict) specdict = self.getTNSSpectra(jd,sc) transientdict['transientphotometry'] = photdict transientdict['transientspectra'] = specdict if nondetectdate: transientdict['non_detect_date'] = nondetectdate if nondetectmaglim: transientdict['non_detect_limit'] = nondetectmaglim if nondetectfilt: transientdict['non_detect_band'] = nondetectfilt if nondetectfilt: transientdict['non_detect_instrument'] = nondetectins except: pass try: if doNED: hostdict,hostcoords = self.getNEDData(jd,sc,nedtable) transientdict['host'] = hostdict transientdict['candidate_hosts'] = hostcoords except: pass TransientUploadDict[obj] = transientdict TransientUploadDict['noupdatestatus'] = self.noupdatestatus self.UploadTransients(TransientUploadDict) return(len(TransientUploadDict))
def read_lasair_json(object_name='ZTF18acsovsw'): """ Read light curve from lasair website API based on object name. Parameters ---------- object_name : str The LASAIR object name. E.g. object_name='ZTF18acsovsw' """ print(object_name) if isinstance(object_name, tuple): object_name, z_in = object_name else: z_in = None url = 'https://lasair.roe.ac.uk/object/{}/json/'.format(object_name) data = read_json(url) objid = data['objectId'] ra = data['objectData']['ramean'] dec = data['objectData']['decmean'] # lasair_classification = data['objectData']['classification'] tns_info = data['objectData']['annotation'] photoZ = None for cross_match in data['crossmatches']: # print(cross_match) photoZ = cross_match['photoZ'] separation_arcsec = cross_match['separationArcsec'] catalogue_object_type = cross_match['catalogue_object_type'] if photoZ is None: # TODO: Get correct redshift try: if "z=" in tns_info: photoZ = tns_info.split('z=')[1] redshift = float(photoZ.replace(')', '').split()[0]) elif "Z=" in tns_info: photoZ = tns_info.split('Z=')[1] redshift = float(photoZ.split()[0]) else: redshift = None except Exception as e: redshift = None print(e) else: redshift = photoZ if z_in is not None: redshift = z_in print("Redshift is {}".format(redshift)) objid += "_z={}".format(round(redshift, 2)) # Get extinction TODO: Maybe add this to RAPID code coo = coord.SkyCoord(ra * u.deg, dec * u.deg, frame='icrs') dust = IrsaDust.get_query_table(coo, section='ebv') mwebv = dust['ext SandF mean'][0] print("MWEBV") print(mwebv) mjd = [] passband = [] mag = [] magerr = [] photflag = [] zeropoint = [] for cand in data['candidates']: mjd.append(cand['mjd']) passband.append(cand['fid']) mag.append(cand['magpsf']) if 'sigmapsf' in cand: magerr.append(cand['sigmapsf']) photflag.append(4096) if cand['magzpsci'] == 0: zeropoint.append(26.2) # TODO: Tell LASAIR their zeropoints are wrong else: zeropoint.append(cand['magzpsci']) else: magerr.append(0.1 * cand['magpsf']) photflag.append(0) zeropoint.append(26.2) mjd, passband, mag, magerr, photflag, zeropoint = convert_lists_to_arrays(mjd, passband, mag, magerr, photflag, zeropoint) deleteindexes = np.where(magerr == None) mjd, passband, mag, magerr, photflag, zeropoint = delete_indexes(deleteindexes, mjd, passband, mag, magerr, photflag, zeropoint) return mjd, passband, mag, magerr, photflag, zeropoint, ra, dec, objid, redshift, mwebv
def read_lasair_json(object_name='ZTF18acsovsw'): """ Read light curve from lasair website API based on object name. Parameters ---------- object_name : str The LASAIR object name. E.g. object_name='ZTF18acsovsw' """ print(object_name) if isinstance(object_name, tuple): object_name, z_in = object_name else: z_in = None url = 'https://lasair.roe.ac.uk/object/{}/json/'.format(object_name) data = read_json(url) objid = data['objectId'] ra = data['objectData']['ramean'] dec = data['objectData']['decmean'] # lasair_classification = data['objectData']['classification'] tns_info = data['objectData']['annotation'] photoZ = None for cross_match in data['crossmatches']: # print(cross_match) photoZ = cross_match['photoZ'] separation_arcsec = cross_match['separationArcsec'] catalogue_object_type = cross_match['catalogue_object_type'] if z_in is not None and not np.isnan(z_in): redshift = z_in else: if photoZ is None: # TODO: Get correct redshift try: if "z=" in tns_info: photoZ = tns_info.split('z=')[1] redshift = float(photoZ.replace(')', '').split()[0]) # print("PHOTOZZZZZZZZZZZZ", redshift, tns_info) elif "Z=" in tns_info: photoZ = tns_info.split('Z=')[1] redshift = float(photoZ.split()[0]) # print("PHOTOZZZZZZZZZZZZ", redshift, tns_info) else: # return print("TRYING ARBITRARY GUESS REDSHIFT = 0.1") redshift = None except Exception as e: # return redshift = None print(e) else: redshift = photoZ print("Redshift is {}".format(redshift)) if redshift is not None: objid += "_z={}".format(round(redshift, 2)) # Get extinction TODO: Maybe add this to RAPID code coo = coord.SkyCoord(ra * u.deg, dec * u.deg, frame='icrs') dust = IrsaDust.get_query_table(coo, section='ebv') mwebv = dust['ext SandF mean'][0] print("MWEBV") print(mwebv) mjd = [] passband = [] mag = [] magerr = [] photflag = [] zeropoint = [] dc_mag = [] dc_magerr = [] magnr, sigmagnr, isdiffpos = [], [], [] for cand in data['candidates']: mjd.append(cand['mjd']) passband.append(cand['fid']) mag.append(cand['magpsf']) if 'sigmapsf' in cand: magerr.append(cand['sigmapsf']) photflag.append(4096) # if cand['magzpsci'] == 0: # print("NO ZEROPOINT") # zeropoint.append(26.2) # TODO: Tell LASAIR their zeropoints are wrong # else: # zeropoint.append(cand['magzpsci']) #26.2) # if cand['magzpsci'] == 0: print(object_name, zeropoint) raise Exception return zeropoint.append(cand['magzpsci']) dc_mag.append(cand['dc_mag']) dc_magerr.append(cand['dc_sigmag']) magnr.append(cand['magnr']) sigmagnr.append(cand['sigmagnr']) isdiffpos.append(cand['isdiffpos']) else: magerr.append( np.nan ) #0.01 * cand['magpsf']) #magerr.append(None) #magerr.append(0.1 * cand['magpsf']) # photflag.append(0) zeropoint.append(np.nan) #26.2) dc_mag.append(np.nan) dc_magerr.append(np.nan) magnr.append(np.nan) sigmagnr.append(np.nan) isdiffpos.append(None) mjd, passband, mag, magerr, photflag, zeropoint, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos = convert_lists_to_arrays( mjd, passband, mag, magerr, photflag, zeropoint, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos) # deleteindexes = np.where(magerr == None) # [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29] # # mjd, passband, mag, magerr, photflag, zeropoint, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos = delete_indexes(deleteindexes, mjd, passband, mag, magerr, photflag, zeropoint, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos) deleteindexes = np.where( (photflag == 0) & (mjd > min(mjd[photflag > 0])) ) # delete where nondetections after first detection mjd, passband, mag, magerr, photflag, zeropoint, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos = delete_indexes( deleteindexes, mjd, passband, mag, magerr, photflag, zeropoint, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos) deleteindexes = np.where( (mag < (np.median(mag[photflag == 0]) - 0.5 * np.std(mag[photflag == 0]))) & (photflag == 0)) # Remove non detection outliers mjd, passband, mag, magerr, photflag, zeropoint, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos = delete_indexes( deleteindexes, mjd, passband, mag, magerr, photflag, zeropoint, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos) return mjd, passband, mag, magerr, photflag, zeropoint, ra, dec, objid, redshift, mwebv, dc_mag, dc_magerr, magnr, sigmagnr, isdiffpos
def ProcessTNSEmails(self,post=True,posturl=None,db=None): body = "" html = "" tns_objs = [] radius = 5 # arcminutes ######################################################## # Get All Email ######################################################## mail = imaplib.IMAP4_SSL('imap.gmail.com', 993) #, ssl_context=ctx ## NOTE: This is not the way to do this. You will want to implement an industry-standard login step ## mail.login(self.login, self.password) mail.select('TNS', readonly=False) retcode, msg_ids_bytes = mail.search(None, '(UNSEEN)') msg_ids = msg_ids_bytes[0].decode("utf-8").split(" ") try: if retcode != "OK" or msg_ids[0] == "": raise ValueError("No messages") except ValueError as err: print("%s. Exiting..." % err.args) mail.close() mail.logout() del mail print("Process done.") return for i in range(len(msg_ids)): ######################################################## # Iterate Over Email ######################################################## typ, data = mail.fetch(msg_ids[i],'(RFC822)') msg = email.message_from_bytes(data[0][1]) # Mark messages as "Unseen" # result, wdata = mail.store(msg_ids[i], '-FLAGS', '\Seen') if msg.is_multipart(): for part in msg.walk(): ctype = part.get_content_type() cdispo = str(part.get('Content-Disposition')) # skip any text/plain (txt) attachments if ctype == 'text/plain' and 'attachment' not in cdispo: body = part.get_payload(decode=True) # decode break # not multipart - i.e. plain text, no attachments, keeping fingers crossed else: body = msg.get_payload(decode=True) objs = re.findall(reg_obj,body) print(objs) ras = re.findall(reg_ra,body) print(ras) decs = re.findall(reg_dec,body) print(decs) try: ######################################################## # For Item in Email, Get TNS ######################################################## for j in range(len(objs)): print("Object: %s\nRA: %s\nDEC: %s" % (objs[j].decode('utf-8'), ras[j].decode('utf-8'), decs[j].decode('utf-8'))) # Get TNS page int_name="" evt_type="" z="" host_name="" host_redshift = "" ned_url = "" tns_url = "https://wis-tns.weizmann.ac.il/object/" + objs[j].decode("utf-8") print(tns_url) tstart = time.time() try: response = requests.get(tns_url,timeout=20) html = response.content except: print('trying again') response = requests.get(tns_url,timeout=20) html = response.content soup = BeautifulSoup(html, "lxml") # Get Internal Name, Type, Disc. Date, Disc. Mag, Redshift, Host Name, Host Redshift, NED URL int_name = soup.find('td', attrs={'class':'cell-internal_name'}).text evt_type = soup.find('div', attrs={'class':'field-type'}).find('div').find('b').text evt_type = evt_type #.replace(' ','') disc_date = soup.find('div', attrs={'class':'field field-discoverydate'}).find('div').find('b').text disc_mag = soup.find('div', attrs={'class':'field field-discoverymag'}).find('div').find('b').text try: source_group = soup.find('div', attrs={'class':'field field-source_group_name'}).find('div').find('b').text except AttributeError: source_group = "Unknown" try: disc_filter = soup.find('td', attrs={'cell':'cell-filter_name'}).text except AttributeError: disc_filter = "Unknown" if '-' in disc_filter: disc_instrument = disc_filter.split('-')[1] else: disc_instrument = 'Unknown' # lets pull the photometry nondetectmaglim = None nondetectdate = None nondetectfilt = None tmag,tmagerr,tflux,tfluxerr,tfilt,tinst,tobsdate = \ np.array([]),np.array([]),np.array([]),np.array([]),\ np.array([]),np.array([]),np.array([]) try: tables = soup.find_all('table',attrs={'class':'photometry-results-table'}) for table in tables: data = [] table_body = table.find('tbody') header = table.find('thead') headcols = header.find_all('th') header = np.array([ele.text.strip() for ele in headcols]) #header.append([ele for ele in headcols if ele]) rows = table_body.find_all('tr') for row in rows: cols = row.find_all('td') data.append([ele.text.strip() for ele in cols]) for datarow in data: datarow = np.array(datarow) if photkeydict['unit'] in header: if 'mag' in datarow[header == photkeydict['unit']][0].lower(): if photkeydict['magflux'] in header: tmag = np.append(tmag,datarow[header == photkeydict['magflux']]) tflux = np.append(tflux,'') else: tmag = np.append(tmag,'') tflux = np.append(tflux,'') if photkeydict['magfluxerr'] in header: tmagerr = np.append(tmagerr,datarow[header == photkeydict['magfluxerr']]) tfluxerr = np.append(tfluxerr,'') else: tmagerr = np.append(tmagerr,None) tfluxerr= np.append(tfluxerr,'') elif 'flux' in datarow[header == photkeydict['unit']][0].lower(): if photkeydict['magflux'] in header: tflux = np.append(tflux,datarow[header == photkeydict['magflux']]) tmag = np.append(tmag,'') else: tflux = np.append(tflux,'') tmag = np.append(tmag,'') if photkeydict['magfluxerr'] in header: tfluxerr = np.append(tfluxerr,datarow[header == photkeydict['magfluxerr']]) tmagerr = np.append(tmagerr,'') else: tfluxerr = np.append(tfluxerr,None) tmagerr = np.append(tmagerr,'') if photkeydict['filter'] in header: tfilt = np.append(tfilt,datarow[header == photkeydict['filter']]) if photkeydict['inst'] in header: tinst = np.append(tinst,datarow[header == photkeydict['inst']]) if photkeydict['obsdate'] in header: tobsdate = np.append(tobsdate,datarow[header == photkeydict['obsdate']]) if photkeydict['remarks'] in header and photkeydict['maglim'] in header: if 'last' in datarow[header == photkeydict['remarks']][0].lower() and \ 'non' in datarow[header == photkeydict['remarks']][0].lower() and \ 'detection' in datarow[header == photkeydict['remarks']][0].lower(): nondetectmaglim = datarow[header == photkeydict['maglim']][0] nondetectdate = datarow[header == photkeydict['obsdate']][0] nondetectfilt = datarow[header == photkeydict['filter']][0] # set the discovery flag disc_flag = np.zeros(len(tmag)) iMagsExist = np.where(tmag != '')[0] if len(iMagsExist) == 1: disc_flag[np.where(tmag != '')] = 1 elif len(iMagsExist) > 1: mjd = np.zeros(len(iMagsExist)) for d in range(len(mjd)): mjd[d] = date_to_mjd(tobsdate[d]) iMinMJD = np.where(mjd == np.min(mjd))[0] if len(iMinMJD) > 1: iMinMJD = [iMinMJD[0]] for im,iim in zip(iMagsExist,range(len(iMagsExist))): if len(iMinMJD) and iim == iMinMJD[0]: disc_flag[im] = 1 except: print('Error : couldn\'t get photometry!!!') z = soup.find('div', attrs={'class':'field-redshift'}).find('div').find('b').text hn_div = soup.find('div', attrs={'class':'field-hostname'}) if hn_div is not None: host_name = hn_div.find('div').find('b').text z_div = soup.find('div', attrs={'class':'field-host_redshift'}) if z_div is not None: host_redshift = z_div.find('div').find('b').text ned_url = soup.find('div', attrs={'class':'additional-links clearfix'}).find('a')['href'] # Get photometry records table = soup.findAll('table', attrs={'class':'photometry-results-table'}) prs = [] for k in range(len(table)): table_body = table[k].find('tbody') rows = table_body.find_all('tr') print(type(rows)) for l in range(len(rows)): prs.append(phot_row(rows[l])) ######################################################## # For Item in Email, Get NED ######################################################## ra_j = ras[j].decode("utf-8") dec_j = decs[j].decode("utf-8") co = coordinates.SkyCoord(ra=ra_j, dec=dec_j, unit=(u.hour, u.deg), frame='fk4', equinox='J2000.0') dust_table_l = IrsaDust.get_query_table(co) ebv = dust_table_l['ext SandF mean'][0] ned_region_table = None gal_candidates = 0 radius = 5 while (radius < 11 and gal_candidates < 21): try: print("Radius: %s" % radius) ned_region_table = Ned.query_region(co, radius=radius*u.arcmin, equinox='J2000.0') gal_candidates = len(ned_region_table) radius += 1 print("Result length: %s" % gal_candidates) except Exception as e: radius += 1 print("NED exception: %s" % e.args) galaxy_names = [] galaxy_zs = [] galaxy_seps = [] galaxies_with_z = [] galaxy_ras = [] galaxy_decs = [] galaxy_mags = [] if ned_region_table is not None: print("NED Matches: %s" % len(ned_region_table)) galaxy_candidates = np.asarray([entry.decode("utf-8") for entry in ned_region_table["Type"]]) galaxies_indices = np.where(galaxy_candidates == 'G') galaxies = ned_region_table[galaxies_indices] print("Galaxy Candidates: %s" % len(galaxies)) # Get Galaxy name, z, separation for each galaxy with z for l in range(len(galaxies)): if isinstance(galaxies[l]["Redshift"], float): galaxies_with_z.append(galaxies[l]) galaxy_names.append(galaxies[l]["Object Name"]) galaxy_zs.append(galaxies[l]["Redshift"]) galaxy_seps.append(galaxies[l]["Distance (arcmin)"]) galaxy_ras.append(galaxies[l]["RA(deg)"]) galaxy_decs.append(galaxies[l]["DEC(deg)"]) galaxy_mags.append(galaxies[l]["Magnitude and Filter"]) print("Galaxies with z: %s" % len(galaxies_with_z)) # Get Dust in LoS for each galaxy with z if len(galaxies_with_z) > 0: for l in range(len(galaxies_with_z)): co_l = coordinates.SkyCoord(ra=galaxies_with_z[l]["RA(deg)"], dec=galaxies_with_z[l]["DEC(deg)"], unit=(u.deg, u.deg), frame='fk4', equinox='J2000.0') else: print("No NED Galaxy hosts with z") tns_objs.append(tns_obj(name = objs[j].decode("utf-8"), tns_url = tns_url, internal_name = int_name, event_type = evt_type, ra = ras[j].decode("utf-8"), dec = decs[j].decode("utf-8"), ebv = ebv, z = z, tns_host = host_name, tns_host_z = host_redshift, ned_nearest_host = galaxy_names, ned_nearest_z = galaxy_zs, ned_nearest_sep = galaxy_seps, discovery_date = disc_date, phot_rows = prs, disc_mag = disc_mag )) if post: snid = objs[j].decode("utf-8") # if source_group doesn't exist, we need to add it groupid = db.get_ID_from_DB('observationgroups',source_group) if not groupid: groupid = db.get_ID_from_DB('observationgroups','Unknown')#db.post_object_to_DB('observationgroup',{'name':source_group}) # get the status statusid = db.get_ID_from_DB('transientstatuses','New') if not statusid: raise RuntimeError('Error : not all statuses are defined') # put in the hosts hostcoords = ''; hosturl = ''; ned_mag = '' for z,name,ra,dec,sep,mag in zip(galaxy_zs,galaxy_names,galaxy_ras,galaxy_decs,galaxy_seps,galaxy_mags): if sep == np.min(galaxy_seps): hostdict = {'name':name,'ra':ra,'dec':dec,'redshift':z} hostoutput = db.post_object_to_DB('host',hostdict,return_full=True) hosturl = hostoutput['url'] ned_mag = mag hostcoords += 'ra=%.7f, dec=%.7f\n'%(ra,dec) # put in the spec type eventid = db.get_ID_from_DB('transientclasses',evt_type) if not eventid: eventid = db.get_ID_from_DB('transientclasses','Unknown')#db.post_object_to_DB('transientclasses',{'name':evt_type}) # first check if already exists dbid = db.get_ID_from_DB('transients',snid) k2id = db.get_ID_from_DB('internalsurveys','K2') # then POST or PUT, depending # put in main transient sc = SkyCoord(ras[j].decode("utf-8"),decs[j].decode("utf-8"),FK5,unit=(u.hourangle,u.deg)) db.options.best_spec_classapi = db.options.transientclassesapi newobjdict = {'name':objs[j].decode("utf-8"), 'ra':sc.ra.deg, 'dec':sc.dec.deg, #'status':statusid, 'obs_group':groupid, 'host':hosturl, 'candidate_hosts':hostcoords, 'best_spec_class':eventid, 'TNS_spec_class':evt_type, 'mw_ebv':ebv, 'disc_date':disc_date.replace(' ','T'), 'tags':[]} if nondetectdate: newobjdict['non_detect_date'] = nondetectdate.replace(' ','T') if nondetectmaglim: newobjdict['non_detect_limit'] = nondetectmaglim if nondetectfilt: nondetectid = db.get_ID_from_DB('photometricbands',nondetectfilt) if nondetectid: newobjdict['non_detect_filter'] = nondetectid if dbid: # if the status is ignore, we're going to promote this to new status_getid = db.get_key_from_object(dbid,'status') statusname = db.get_key_from_object(status_getid,'name') if statusname == 'Ignore': newobjdict['status'] = statusid transientid = db.patch_object_to_DB('transient',newobjdict,dbid) else: newobjdict['status'] = statusid transientid = db.post_object_to_DB('transient',newobjdict) # only add in host info and photometry if galaxy wasn't already in the database # (avoids duplicates) if not dbid: # the photometry table probably won't exist, so add this in # phot table needs an instrument, which needs a telescope, which needs an observatory for ins in np.unique(tinst): instrumentid = db.get_ID_from_DB('instruments',ins) if not instrumentid: instrumentid = db.get_ID_from_DB('instruments','Unknown') if not instrumentid: observatoryid = db.post_object_to_DB( 'observatory',{'name':'Unknown','tz_name':0,'utc_offset':0}) teldict= {'name':'Unknown', 'observatory':observatoryid, 'longitude':0, 'latitude':0, 'elevation':0} telid = db.post_object_to_DB('telescope',teldict) instrumentid = db.post_object_to_DB( 'instrument',{'name':'Unknown','telescope':telid}) phottabledict = {'transient':transientid, 'obs_group':groupid, 'instrument':instrumentid} phottableid = db.post_object_to_DB('photometry',phottabledict) for f in np.unique(tfilt): bandid = db.get_ID_from_DB('photometricbands',f) if not bandid: bandid = db.post_object_to_DB('band',{'name':f,'instrument':instrumentid}) # put in the photometry for m,me,f,fe,od,df in zip(tmag[(f == tfilt) & (ins == tinst)], tmagerr[(f == tfilt) & (ins == tinst)], tflux[(f == tfilt) & (ins == tinst)], tfluxerr[(f == tfilt) & (ins == tinst)], tobsdate[(f == tfilt) & (ins == tinst)], disc_flag[(f == tfilt) & (ins == tinst)]): if not m and not me and not f and not fe: continue # TODO: compare od to disc_date.replace(' ','T') # if they're close or equal? Set discovery flag photdatadict = {'obs_date':od.replace(' ','T'), 'band':bandid, 'photometry':phottableid} if m: photdatadict['mag'] = m if me: photdatadict['mag_err'] = me if f: photdatadict['flux'] = f if fe: photdatadict['flux_err'] = fe if df: photdatadict['discovery_point'] = 1 photdataid = db.post_object_to_DB('photdata',photdatadict) # put in the galaxy photometry if ned_mag: try: unknowninstid = db.get_ID_from_DB('instruments','Unknown') unknowngroupid = db.get_ID_from_DB('observationgroups','NED') if not unknowngroupid: unknowngroupid = db.get_ID_from_DB('observationgroups','Unknown') unknownbandid = db.get_ID_from_DB('photometricbands','Unknown') hostphottabledict = {'host':hosturl, 'obs_group':unknowngroupid, 'instrument':unknowninstid} hostphottableid = db.post_object_to_DB('hostphotometry',hostphottabledict) # put in the photometry hostphotdatadict = {'obs_date':disc_date.replace(' ','T'),#'2000-01-01 00:00:00', 'mag':ned_mag.decode('utf-8')[:-1], 'band':unknownbandid, 'photometry':hostphottableid} hostphotdataid = db.post_object_to_DB('hostphotdata',hostphotdatadict) except: print('getting host mag failed') # Mark messages as "Seen" result, wdata = mail.store(msg_ids[i], '+FLAGS', '\\Seen') except: # ValueError as err: for j in range(len(objs)): print('Something went wrong!!! Sticking to basic info only') print("Object: %s\nRA: %s\nDEC: %s" % (objs[j].decode('utf-8'), ras[j].decode('utf-8'), decs[j].decode('utf-8'))) snid = objs[j].decode("utf-8") # if source_group doesn't exist, we need to add it source_group = "Unknown" groupid = db.get_ID_from_DB('observationgroups',source_group) if not groupid: groupid = db.get_ID_from_DB('observationgroups','Unknown')#db.post_object_to_DB('observationgroup',{'name':source_group}) # get the status statusid = db.get_ID_from_DB('transientstatuses','New') if not statusid: raise RuntimeError('Error : not all statuses are defined') dbid = db.get_ID_from_DB('transients',snid) k2id = db.get_ID_from_DB('internalsurveys','K2') # then POST or PUT, depending # put in main transient sc = SkyCoord(ras[j].decode("utf-8"),decs[j].decode("utf-8"),FK5,unit=(u.hourangle,u.deg)) db.options.best_spec_classapi = db.options.transientclassesapi newobjdict = {'name':objs[j].decode("utf-8"), 'ra':sc.ra.deg, 'dec':sc.dec.deg, 'status':statusid, 'obs_group':groupid, 'tags':[]} if dbid: transientid = db.put_object_to_DB('transient',newobjdict,dbid) else: transientid = db.post_object_to_DB('transient',newobjdict) #WriteOutput(tns_objs) print("Process done.")