def LoadGCN(triggerid, clobber=False, redownload_gcn=False): ### LOAD or CREATE PICKLE STORAGE FILE # Attempt to load pickle file pklpath = storepath + 'sw' + str(triggerid) + 'GCN.pkl' loadedgcn = qPickle.load(pklpath) # If couldn't load, or clobber == True, create a new instance of the class if clobber or not loadedgcn: # Create new instance of GCN Notice loadedgcn = GCNNotice(triggerid, clobber=redownload_gcn) try: # Extract values from GCN Notice loadedgcn.extract_values() loadedgcn.get_positions() if loadedgcn.successful_load: # Save new Pickle file qPickle.save(loadedgcn, pklpath, clobber=True) else: errtitle = 'Could not succesfully load GCN for trigger %s' % ( str(triggerid)) qErr.qErr(errtitle=errtitle) return except: errtitle = 'Could not extract GCN values for trigger %s' % ( str(triggerid)) qErr.qErr(errtitle=errtitle) return loadedgcn
def qExtinction(source_name,ra,dec): extdictpath = storepath+'GRBextinction.pkl' ext_dict = qPickle.load(extdictpath) if not ext_dict: ext_dict = {} if source_name in ext_dict: gal_EB_V = ext_dict[source_name] else: try: ra_str = str(ra)+'d' dec_str = str(dec)+'d' best_position = (ra_str,dec_str) ext_list = extinction(lon=best_position[0],\ lat=best_position[1],system_in='Equatorial',\ system_out='Galactic',obs_epoch="2005.0") gal_EB_V = ext_list[0] except: raise Exception('Cannot Grab Extinction Values') ext_dict.update({source_name:gal_EB_V}) qPickle.save(ext_dict,extdictpath,clobber=True) return gal_EB_V
def qExtinction(source_name, ra, dec): extdictpath = storepath + 'GRBextinction.pkl' ext_dict = qPickle.load(extdictpath) if not ext_dict: ext_dict = {} if source_name in ext_dict: gal_EB_V = ext_dict[source_name] else: try: ra_str = str(ra) + 'd' dec_str = str(dec) + 'd' best_position = (ra_str, dec_str) ext_list = extinction(lon=best_position[0],\ lat=best_position[1],system_in='Equatorial',\ system_out='Galactic',obs_epoch="2005.0") gal_EB_V = ext_list[0] except: raise Exception('Cannot Grab Extinction Values') ext_dict.update({source_name: gal_EB_V}) qPickle.save(ext_dict, extdictpath, clobber=True) return gal_EB_V
def LoadGCN(triggerid, clobber=False, redownload_gcn=False): ### LOAD or CREATE PICKLE STORAGE FILE # Attempt to load pickle file pklpath = storepath+'sw'+str(triggerid)+'GCN.pkl' loadedgcn = qPickle.load(pklpath) # If couldn't load, or clobber == True, create a new instance of the class if clobber or not loadedgcn: # Create new instance of GCN Notice loadedgcn = GCNNotice(triggerid, clobber=redownload_gcn) try: # Extract values from GCN Notice loadedgcn.extract_values() loadedgcn.get_positions() if loadedgcn.successful_load: # Save new Pickle file qPickle.save(loadedgcn,pklpath,clobber=True) else: errtitle = 'Could not succesfully load GCN for trigger %s' % (str(triggerid)) qErr.qErr(errtitle=errtitle) return except: errtitle = 'Could not extract GCN values for trigger %s' % (str(triggerid)) qErr.qErr(errtitle=errtitle) return loadedgcn
def getstar(reg, out_pickle, filename_h, filename_j, filename_k, \ ap_h=None,ap_j=None,ap_k=None,triggerid=None, calibration_reg=None, caliblimit=False): ''' After creating a calibration deep-stack for all images, use this function to perform photomotery of all calibration stars in the calibration region file, and outputs a pickle file which is to be used as a photometry dictionary which is to be used to replace 2mass. Requirements: q_phot and qPickle. note: The keyword calibration_reg is for the calibration stars used to calibrate these calibration stars. For now, just leave as 'none' ''' stardict = {} stardict_h = {} stardict_j = {} stardict_k = {} if not ap_h: ap_h = raw_input('Enter H aperture: ') if not ap_j: ap_j = raw_input('Enter J aperture: ') if not ap_k: ap_k = raw_input('Enter K aperture: ') regpath = storepath + reg regfile = open(regpath, 'r') reglist = regfile.readlines() temppath = storepath + 'temp.reg' star_pos_list = [] ################################################################## #This part is actually not needed, but in case we want to get the star's postition... for line in reglist: if 'circle' in line: star_str = line.strip('circle').strip().strip('")').strip('(').split(',') ra_str = star_str[0] dec_str = star_str[1] star_pos = (float(ra_str), float(dec_str)) star_pos_list += [star_pos] else: pass #End uneeded part of uneededness ################################################################### callist = [] for line in reglist: if 'circle' in line: callist += [line] else: pass keylist = [] for index, star_reg in enumerate(callist): if os.path.exists(temppath): os.remove(temppath) starname = 'star'+str(index) tempreg = open(temppath, 'w') tempreg.write('# Region file format: DS9 version 4.1\n') secondstr='global color=green dashlist=8 3 width=2 font="helvetica '+ \ '16 normal" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 '+ \ 'delete=1 include=1 source=1\n' tempreg.write(secondstr) tempreg.write('fk5\n') tmp_str = star_reg tempreg.write(tmp_str) tempreg.close() star_str = star_reg.strip('circle').strip().strip('")').strip('(').split(',') ra_str = star_str[0] dec_str = star_str[1] ra_round = ra_str[0:8] dec_round =dec_str[0:7] star_pos = (ra_round, dec_round) star_pos_str = str(star_pos) data_h = q_phot.dophot(filename_h, temppath, calreg=calibration_reg, ap=ap_h, caliblimit=caliblimit) parent_label = star_pos_str time = float(t_mid.t_mid(filename_h, trigger=triggerid)) terr = float(t_mid.t_mid(filename_h, trigger=triggerid,delta = True))/2. timetuple = (time, terr) data_h.update({'t_mid':timetuple}) this_star_dict_h = {parent_label:data_h} stardict_h.update(this_star_dict_h) keylist.append(parent_label) data_j = q_phot.dophot(filename_j, temppath, calreg=calibration_reg, ap=ap_j, caliblimit=caliblimit) parent_label = star_pos_str time = float(t_mid.t_mid(filename_j, trigger=triggerid)) terr = float(t_mid.t_mid(filename_j, trigger=triggerid,delta = True))/2. timetuple = (time, terr) data_j.update({'t_mid':timetuple}) this_star_dict_j = {parent_label:data_j} stardict_j.update(this_star_dict_j) data_k = q_phot.dophot(filename_k, temppath, calreg=calibration_reg, ap=ap_k, caliblimit=caliblimit) parent_label = star_pos_str time = float(t_mid.t_mid(filename_k, trigger=triggerid)) terr = float(t_mid.t_mid(filename_k, trigger=triggerid,delta = True))/2. timetuple = (time, terr) data_k.update({'t_mid':timetuple}) this_star_dict_k = {parent_label:data_k} stardict_k.update(this_star_dict_k) h_dict = {'h':stardict_h} j_dict = {'j':stardict_j} k_dict = {'k':stardict_k} stardict.update(h_dict) stardict.update(j_dict) stardict.update(k_dict) picklepath = storepath + out_pickle + '.data' qPickle.save(stardict, picklepath, clobber = True) print 'Created a dictionary for the following star locations:' print keylist return stardict
def magplot(reg, filelist, out_pickle=None, ap=None, triggerid = None, globit = False, noerr=False, magrange=None, caliblimit=True): ''' Plot magnitudes of calibration stars as a function of time. Do after the initial coaddition of triplestacks to plot the magnitudes of calibration stars as a function of time for each science image. Do once for EACH BAND Requirements: q_phot and t_mid. Arguments: reg: region file of calibration stars to test and plot filelist: EITHER: list of files to run photometry on, or a string to do a glob search of files in a directory (if globit=True) out_pickle: override the filename of the pickle file to write to. Will use default naming convention if not specified. triggerid: Swift trigger id of the GRB (if applicable; otherwise None) globit: if True, do a glob search to get a list of filenames to run photometry on instead of specifying an explicit list. testind: index of image in the filelist from which to run the initial photometry on to get the calibration star keywords. make sure all your calib stars are present and viewable in this image. noerr: if True, do not plot error bars ''' if globit == True: globstr1 = str(filelist) + '*coadd*[0-9].fits' globstr2 = str(filelist) + '*coadd.fits' globlist1 = glob.glob(globstr1) globlist2 = glob.glob(globstr2) filelist = globlist1 + globlist2 print 'globit actiavated' print filelist unique_name = (filelist[0].split('_'))[2] filt = filelist[0][0] calib_star_keys = [] testind = 0 caldict = {} matplotlib.pyplot.clf() regpath = storepath + reg temppath = storepath + 'temp.reg' regfile = open(regpath, 'r') reglist = regfile.readlines() callist = [] for line in reglist: if 'circle' in line: callist += [line] else: pass colornumber = len(callist) n_stars = len(callist) while((len(calib_star_keys) < len(callist)) and testind < len(filelist)): tempreg = open(temppath, 'w') tempreg.write('# Region file format: DS9 version 4.1\n') secondstr='global color=green dashlist=8 3 width=2 font="helvetica '+ \ '16 normal" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 '+ \ 'delete=1 include=1 source=1\n' tempreg.write(secondstr) tempreg.write('fk5\n') #tmp_str = star_reg print callist[0] # Add a few arcseconds to the first position to make sure we don't use it as a target test_ra = float(callist[0].lstrip('circle(').split(',')[0]) + 0.005 test_dec = float(callist[0].lstrip('circle(').split(',')[1]) + 0.005 tmp_str = 'circle(%f,%f,4") # width=2 font="helvetica 16 normal"\n' % (test_ra,test_dec) tempreg.write(tmp_str) tempreg.close() # Grab the calib stars we will be looping over: calregion = '/calstarregs/' + os.path.basename(reg) print "Using image #%i to get the calib stars; if not all are present \ in final plot, try a different image" % (testind) testimage = filelist[testind] photdict = q_phot.photreturn(os.path.basename(reg), testimage, reg=temppath, calregion=calregion, aper=ap, auto_upper=False, caliblimit=caliblimit, trigger_id=triggerid) for key in photdict[testimage]['calib_stars'].keys(): if not key in calib_star_keys: calib_star_keys.append(key) testind += 1 print 'length of stuff' print len(callist) print len(calib_star_keys) for index, ra_str in enumerate(calib_star_keys): # if os.path.exists(temppath): # os.remove(temppath) datalist = [] dataerrlist = [] timelist = [] timeerrlist = [] colorstr = str(float((1/colornumber))*float(index + 1)) colortuple = (colorstr, 0.5, 0) starname = 'star'+str(index) precal_dict = {} for image in filelist: print '**************************************' print 'Photometry of star' + str(index) print 'doing image ' + image calregion = '/calstarregs/' + os.path.basename(reg) data = q_phot.photreturn(os.path.basename(reg), image, reg=temppath, calregion=calregion, aper=ap, auto_upper=False, caliblimit=caliblimit) image_data = data[image] if image[0] != filt: raise ValueError('Filter for %s does not match the others') % (image) if ra_str in image_data['calib_stars']: datalist += [image_data['calib_stars'][ra_str]['new_mag']] dataerrlist += [image_data['calib_stars'][ra_str]['new_e_mag']] time = float(t_mid.t_mid(image, trigger=triggerid)) terr = float(t_mid.t_mid(image, trigger=triggerid,delta = True))/2. timetuple = (time, terr) image_data.update({'t_mid':timetuple}) timelist += [time] timeerrlist += [terr] dec_str = str(image_data['calib_stars'][ra_str]['dec'])[0:7] parent_label = image precal_dict.update({parent_label:image_data}) else: print 'WARNING: CALIB STAR %s IS NOT USABLE FOR THIS IMAGE' % (ra_str) datarr = array(datalist) daterrarr = array(dataerrlist) timarr = array(timelist) timerrarr = array(timeerrlist) if noerr==True: pylab.plot(timarr,datarr,'o',label=str((ra_str,dec_str))) else: pylab.errorbar(timarr,datarr,yerr=daterrarr,fmt='o',label=str((ra_str,dec_str))) #star_pos_str) caldict.update({ra_str:precal_dict}) #matplotlib.pyplot.errorbar(timarr, datarr, yerr = daterrarr, label = starname, fmt='k.', color = colortuple) star_stdv = numpy.std(datarr) print 'The standard deviation of the calibration stars is: (DISREGARD THIS, THIS STDV IS PROBABLY WRONG)' print star_stdv plottitle = '%s Calibration Stars Magnitude vs. t_mid' % (filt) plotylabel = '%s Magnitude' % (filt) matplotlib.pyplot.title(plottitle) matplotlib.pyplot.xlabel('Time After Burst (s)') matplotlib.pyplot.ylabel(plotylabel) ax = matplotlib.pyplot.gca() ax.set_ylim(ax.get_ylim()[::-1]) ax.set_xlim((ax.get_xlim()[0]),(ax.get_xlim()[1])*1.2) matplotlib.pyplot.legend() if magrange: ax.set_ylim(magrange) F = pylab.gcf() DefaultSize = F.get_size_inches() DPI = F.get_dpi() # F.set_size_inches( (DefaultSize[0]*2.5, DefaultSize[1]*2.5) ) # was getting incresingly larger with multiple runs F.set_size_inches((20, 15)) n_stars_str = str(n_stars) if not out_pickle: picklepath = storepath + unique_name + '_' + filt + '_' + 'ap' + str(ap) + '_' + n_stars_str + '_cal_stars.data' else: picklepath = out_pickle filepath = storepath + unique_name + '_' + filt + '_' + 'ap' + str(ap) + '_' + n_stars_str + '_cal_stars.png' #matplotlib.pyplot.savefig(filepath) qPickle.save(caldict, picklepath, clobber=True) F.savefig(filepath) return caldict
def SaveDB(loadeddb): # If the attributes of the loaded GCN have changed since loading, use # this to save the new version of the GCN. pklpath = storepath+'DB_'+str(getattr(loadeddb,'name'))+'.pkl' qPickle.save(loadeddb,pklpath,clobber=True)
def p_photreturn(self,outname,ap,limsigma=3.0,plotcalib=True,\ offset_calc_type='weighted_mean',clobber=False, \ utburst=None): ''' attempt to build up same structure as the photdict from q_phot keys: filename ''' if utburst == None: utburst = datetime.datetime(1858, 11, 17) #just use mjd offset_calc_type=offset_calc_type.lower() photdict={} newname = self.imagefilename + '_ap' + str(ap) filepath = storepath + 'phot_'+ outname #outname is the filepath # if calregion: # calibration_list = openCalRegion(calregion) # n_calstars = len(calibration_list) # filepath += '_WithCalReg' + str(n_calstars) # if stardict: # filepath += '_WithDeepStack' filepath += '.data' while clobber == False: # why did i make this a while loop?? lol if os.path.isfile(filepath) == True: data = qPickle.load(filepath) if newname in data: return data else: clobber = True else: clobber = True while clobber == True: if os.path.isfile(filepath) == False: photdict = {} else: #f = file(filepath) photdict = qPickle.load(filepath) # This line loads the pickle file, enabling photLoop to work # create dictionary for file data = self.do_phot(ap=ap,limsigma=limsigma,plotcalib=plotcalib,offset_calc_type=offset_calc_type) if not data: print "Photometry failed. No data returned." return #rerun to get upper limit?? label = newname # somehow update time here? if self.scope == 'pairitel': tdict = {'utburst':utburst,'STOP_CPU':data['STOP_CPU'],'STRT_CPU':data['STRT_CPU']} time = float(t_mid.t_mid(time_dict=tdict)) terr = float(t_mid.t_mid(delta = True, time_dict=tdict))/2. timetuple = (time, terr) data.update({'t_mid':timetuple}) elif self.scope == 'kait': # untested tmid = startexp2tmid(utburst,data['STRT_CPU'],data['EXPTIME']) terr = data['EXPTIME'] timetuple = (time, terr) data.update({'t_mid':timetuple}) photdict.update({label:data}) qPickle.save(photdict, filepath, clobber = True) return photdict qPickle.save(photdict, filepath, clobber = True) return photdict
def SaveGCN(loadedgcn): # If the attributes of the loaded GCN have changed since loading, use # this to save the new version of the GCN. pklpath = storepath + 'sw' + str(loadedgcn.triggerid) + 'GCN.pkl' qPickle.save(loadedgcn, pklpath, clobber=True)
def _do_new_entry_actions(new_entry, email=True, email_to='*****@*****.**'): # being fed a parsed rss entry try: psn_id_full = new_entry.id.split('/followups/')[1].strip('"').split( '.html')[0] # for some reason, the URL has a space when PSN label gets added # http://cbat.eps.harvard.edu/unconf/followups/PSN J15111485+4609115 #u'PSN J15111485+4609115' psn_id = str(psn_id_full.split()[-1]) #u'J15111485+4609115' except: qErr.qErr(errtitle="PSN ID URL malformed", errtext=new_entry.id) psn_id = "Unknown" psn_id_full = "Unknown" # check if it's in the pickle file # if so, update it - add to summary list all_pkl_path = storepath + 'psn_parsed_entries.pkl' all_entries = qPickle.load(all_pkl_path) if all_entries == None: all_entries = {} is_new_id = False if not psn_id in all_entries.keys(): is_new_id = True all_entries.update({psn_id: {}}) #update with a new empty dict with proper id # load and parse the PSN string psn_url = "http://cbat.eps.harvard.edu/unconf/followups/%s" % (psn_id) psn_string = _download_and_obtain_psn_string(psn_url) if psn_string != None: psn_dict = _parse_psn_format(psn_string) else: psn_dict = None ## Make html if psn_dict: all_entries[psn_id].update( psn_dict ) #add/update the dictionary values; though should they change? dss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=dss2red" % ( psn_dict['ra_deg'], psn_dict['dec_deg'], psn_dict['designation']) dss_html = "<a href='%s'>DSS Finding Chart</a><br>" % (dss_url) sdss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=sdss" % ( psn_dict['ra_deg'], psn_dict['dec_deg'], psn_dict['designation']) sdss_html = "<a href='%s'>SDSS Finding Chart</a> (May not be available)<br>" % ( sdss_url) pretty_output = ''' <br><br> <table border="0"> <tr><td>Object:</td><td>%s</td></tr> <tr><td>Designation:</td><td>%s</td></tr> <tr><td>Discovery date:</td><td>%s</td></tr> <tr><td>Mag at date:</td><td>%s</td></tr> <tr><td>Filter:</td><td>%s</td></tr> <tr><td>RA:</td><td>%s (= %f)</td></tr> <tr><td>Dec:</td><td>%s (= %f)</td></tr> <tr><td>Presumed host:</td><td>%s</td></tr> <tr><td>Offset from host:</td><td>%s, %s (arcsec)</td></tr> <tr><td>Discoverer:</td><td>%s</td></tr> <tr><td>Obs. arc:</td><td>%s</td></tr> </table> <br> ''' % (psn_dict['obj_type'], psn_dict['designation'], psn_dict['date_string'].replace(' ', '-').replace( '2013', 'UT2013'), psn_dict['mag'], psn_dict['filter'], psn_dict['ra'], psn_dict['ra_deg'], psn_dict['dec'], psn_dict['dec_deg'], psn_dict['locale'], psn_dict['ra_offset'], psn_dict['dec_offset'], psn_dict['discoverer'], psn_dict['arc']) else: pretty_output = 'Cannot parse PSN Message.' print pretty_output html_body = '''<html><body> <a href="%s">%s</a>''' % (psn_url, psn_id) if is_new_id: html_body += ' (First report of this transient)' else: html_body += ' (Update)' html_body += '<br><br>' if psn_dict: html_body += dss_html html_body += sdss_html html_body += pretty_output html_body += new_entry.summary html_body += '<br><br><br></body></html>' if 'summary_list' in all_entries[psn_id]: summary_list = all_entries[psn_id]['summary_list'] summary_list.append(new_entry.summary) else: summary_list = [new_entry.summary] all_entries[psn_id].update({'summary_list': summary_list}) # do email if new if email == True: if is_new_id: subject = "New Transient %s" % (psn_id_full) else: subject = "Update to Transient %s" % (psn_id_full) print "Sending email: '%s'" % (subject) send_gmail.domail(email_to, subject, html_body, html=True) # do separate/no email if updated? # save the updated pickle file qPickle.save(all_entries, all_pkl_path, clobber=True) return is_new_id
def _do_new_entry_actions(new_entry,email=True,email_to='*****@*****.**'): # being fed a parsed rss entry try: psn_id_full=new_entry.id.split('/followups/')[1].strip('"').split('.html')[0] # for some reason, the URL has a space when PSN label gets added # http://cbat.eps.harvard.edu/unconf/followups/PSN J15111485+4609115 #u'PSN J15111485+4609115' psn_id = str(psn_id_full.split()[-1]) #u'J15111485+4609115' except: qErr.qErr(errtitle="PSN ID URL malformed", errtext=new_entry.id) psn_id = "Unknown" psn_id_full = "Unknown" # check if it's in the pickle file # if so, update it - add to summary list all_pkl_path = storepath + 'psn_parsed_entries.pkl' all_entries = qPickle.load(all_pkl_path) if all_entries == None: all_entries = {} is_new_id = False if not psn_id in all_entries.keys(): is_new_id = True all_entries.update({psn_id:{}}) #update with a new empty dict with proper id # load and parse the PSN string psn_url="http://cbat.eps.harvard.edu/unconf/followups/%s" % (psn_id) psn_string = _download_and_obtain_psn_string(psn_url) if psn_string != None: psn_dict = _parse_psn_format(psn_string) else: psn_dict = None ## Make html if psn_dict: all_entries[psn_id].update(psn_dict) #add/update the dictionary values; though should they change? dss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=dss2red" % (psn_dict['ra_deg'],psn_dict['dec_deg'],psn_dict['designation']) dss_html = "<a href='%s'>DSS Finding Chart</a><br>" % (dss_url) sdss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=sdss" % (psn_dict['ra_deg'],psn_dict['dec_deg'],psn_dict['designation']) sdss_html = "<a href='%s'>SDSS Finding Chart</a> (May not be available)<br>" % (sdss_url) pretty_output=''' <br><br> <table border="0"> <tr><td>Object:</td><td>%s</td></tr> <tr><td>Designation:</td><td>%s</td></tr> <tr><td>Discovery date:</td><td>%s</td></tr> <tr><td>Mag at date:</td><td>%s</td></tr> <tr><td>Filter:</td><td>%s</td></tr> <tr><td>RA:</td><td>%s (= %f)</td></tr> <tr><td>Dec:</td><td>%s (= %f)</td></tr> <tr><td>Presumed host:</td><td>%s</td></tr> <tr><td>Offset from host:</td><td>%s, %s (arcsec)</td></tr> <tr><td>Discoverer:</td><td>%s</td></tr> <tr><td>Obs. arc:</td><td>%s</td></tr> </table> <br> ''' % (psn_dict['obj_type'],psn_dict['designation'], psn_dict['date_string'].replace(' ','-').replace('2013','UT2013'), psn_dict['mag'],psn_dict['filter'], psn_dict['ra'],psn_dict['ra_deg'],psn_dict['dec'],psn_dict['dec_deg'], psn_dict['locale'],psn_dict['ra_offset'],psn_dict['dec_offset'], psn_dict['discoverer'],psn_dict['arc']) else: pretty_output = 'Cannot parse PSN Message.' print pretty_output html_body = '''<html><body> <a href="%s">%s</a>''' % (psn_url,psn_id) if is_new_id: html_body += ' (First report of this transient)' else: html_body += ' (Update)' html_body += '<br><br>' if psn_dict: html_body += dss_html html_body += sdss_html html_body += pretty_output html_body+= new_entry.summary html_body+= '<br><br><br></body></html>' if 'summary_list' in all_entries[psn_id]: summary_list = all_entries[psn_id]['summary_list'] summary_list.append(new_entry.summary) else: summary_list = [new_entry.summary] all_entries[psn_id].update({'summary_list':summary_list}) # do email if new if email == True: if is_new_id: subject = "New Transient %s" % (psn_id_full) else: subject = "Update to Transient %s" % (psn_id_full) print "Sending email: '%s'" % (subject) send_gmail.domail(email_to,subject,html_body,html=True) # do separate/no email if updated? # save the updated pickle file qPickle.save(all_entries,all_pkl_path,clobber=True) return is_new_id
def SaveGCN(loadedgcn): # If the attributes of the loaded GCN have changed since loading, use # this to save the new version of the GCN. pklpath = storepath+'sw'+str(loadedgcn.triggerid)+'GCN.pkl' qPickle.save(loadedgcn,pklpath,clobber=True)
def SaveDB(loadeddb): # If the attributes of the loaded GCN have changed since loading, use # this to save the new version of the GCN. pklpath = storepath + 'DB_' + str(getattr(loadeddb, 'name')) + '.pkl' qPickle.save(loadeddb, pklpath, clobber=True)
def Monitor_PSN_RSS(feed_url="http://www.cbat.eps.harvard.edu/unconf/tocp.xml", save_latest=True): ''' This function checks to see if a particular RSS entry has already been loaded by entering it in a sqlite database. To keep checking this feed, put in infinite while loop with a set delay time # # while(True): # sql_tuple_list = Monitor_PSN_RSS("http://feedurl.xml") # time.sleep(60) ''' from time import strftime import sqlite3 try: import feedparser except: print "feedparser module not installed" print "visit http://www.feedparser.org/" sys.exit(1) # Database management code stolen from http://www.halotis.com/2009/07/01/rss-twitter-bot-in-python/ DATABASE = storepath + 'psn_rss_feed.sqlite' conn = sqlite3.connect(DATABASE) conn.row_factory = sqlite3.Row c = conn.cursor() # Create the table if it doesn't exist c.execute( 'CREATE TABLE IF NOT EXISTS RSSContent (`updated`, `title`, `dateAdded`, `id`, `content`, `url`)' ) sql_entry_list = [] new_rss_entry_list = [] rssinst = feedparser.parse(feed_url) if save_latest: try: last_entry = rssinst['entries'][ 0] # saving this for testing purposes last_entry_outpath = storepath + 'psn_last_entry.pkl' qPickle.save(last_entry, last_entry_outpath, clobber=True) except: qErr.qErr("Could not save last_entry") duplicate_count = 0 for entry in rssinst['entries']: if duplicate_count < 3: # check for duplicates c.execute('select * from RSSContent where updated=?', (entry.updated, )) #should be unique if not c.fetchall(): if True: xml_file = entry.link # apparently the entry.link is the address I wanted # print xml_file shortened_link = xml_file if not 'link' in entry: errtitle = 'link value not in RSS entry' qErr.qErr(errtitle=errtitle) if not 'title' in entry: errtitle = 'title value not in RSS entry' qErr.qErr(errtitle=errtitle) if not 'summary' in entry: errtitle = 'summary value not in RSS entry; using blank value' print errtitle summary = 'unknown' else: summary = entry.summary if not 'id' in entry: errtitle = 'id value not in RSS entry; using blank value' print errtitle entryid = 'unknown' else: entryid = entry.id try: sql_entry = (entry.updated, entry.title, entryid, summary, entry.link) print sql_entry c.execute( 'insert into RSSContent (`updated`, `title`, `id`, `content`, `url`) values (?,?,?,?,?)', sql_entry) sql_entry_list.append(sql_entry) new_rss_entry_list.append(entry) except: qErr.qErr() print "Could not update RSS database for entry %s" % ( entry.updated) else: duplicate_count += 1 conn.commit() else: # break the loop if more than 3 duplicates; really only need to # see one duplicate to break the loop, but adding this just in case # (since newer feed entries are at the top, no need to loop through # every single one. if there are no new ones, you should know immediately) break return new_rss_entry_list
def magplot(reg, filelist, out_pickle=None, ap=None, triggerid=None, globit=False, noerr=False, magrange=None, caliblimit=True): ''' Plot magnitudes of calibration stars as a function of time. Do after the initial coaddition of triplestacks to plot the magnitudes of calibration stars as a function of time for each science image. Do once for EACH BAND Requirements: q_phot and t_mid. Arguments: reg: region file of calibration stars to test and plot filelist: EITHER: list of files to run photometry on, or a string to do a glob search of files in a directory (if globit=True) out_pickle: override the filename of the pickle file to write to. Will use default naming convention if not specified. triggerid: Swift trigger id of the GRB (if applicable; otherwise None) globit: if True, do a glob search to get a list of filenames to run photometry on instead of specifying an explicit list. testind: index of image in the filelist from which to run the initial photometry on to get the calibration star keywords. make sure all your calib stars are present and viewable in this image. noerr: if True, do not plot error bars ''' if globit == True: globstr1 = str(filelist) + '*coadd*[0-9].fits' globstr2 = str(filelist) + '*coadd.fits' globlist1 = glob.glob(globstr1) globlist2 = glob.glob(globstr2) filelist = globlist1 + globlist2 print 'globit actiavated' print filelist unique_name = (filelist[0].split('_'))[2] filt = filelist[0][0] calib_star_keys = [] testind = 0 caldict = {} matplotlib.pyplot.clf() regpath = storepath + reg temppath = storepath + 'temp.reg' regfile = open(regpath, 'r') reglist = regfile.readlines() callist = [] for line in reglist: if 'circle' in line: callist += [line] else: pass colornumber = len(callist) n_stars = len(callist) while ((len(calib_star_keys) < len(callist)) and testind < len(filelist)): tempreg = open(temppath, 'w') tempreg.write('# Region file format: DS9 version 4.1\n') secondstr='global color=green dashlist=8 3 width=2 font="helvetica '+ \ '16 normal" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 '+ \ 'delete=1 include=1 source=1\n' tempreg.write(secondstr) tempreg.write('fk5\n') #tmp_str = star_reg print callist[0] # Add a few arcseconds to the first position to make sure we don't use it as a target test_ra = float(callist[0].lstrip('circle(').split(',')[0]) + 0.005 test_dec = float(callist[0].lstrip('circle(').split(',')[1]) + 0.005 tmp_str = 'circle(%f,%f,4") # width=2 font="helvetica 16 normal"\n' % ( test_ra, test_dec) tempreg.write(tmp_str) tempreg.close() # Grab the calib stars we will be looping over: calregion = '/calstarregs/' + os.path.basename(reg) print "Using image #%i to get the calib stars; if not all are present \ in final plot, try a different image" % (testind) testimage = filelist[testind] photdict = q_phot.photreturn(os.path.basename(reg), testimage, reg=temppath, calregion=calregion, aper=ap, auto_upper=False, caliblimit=caliblimit, trigger_id=triggerid) for key in photdict[testimage]['calib_stars'].keys(): if not key in calib_star_keys: calib_star_keys.append(key) testind += 1 print 'length of stuff' print len(callist) print len(calib_star_keys) for index, ra_str in enumerate(calib_star_keys): # if os.path.exists(temppath): # os.remove(temppath) datalist = [] dataerrlist = [] timelist = [] timeerrlist = [] colorstr = str(float((1 / colornumber)) * float(index + 1)) colortuple = (colorstr, 0.5, 0) starname = 'star' + str(index) precal_dict = {} for image in filelist: print '**************************************' print 'Photometry of star' + str(index) print 'doing image ' + image calregion = '/calstarregs/' + os.path.basename(reg) data = q_phot.photreturn(os.path.basename(reg), image, reg=temppath, calregion=calregion, aper=ap, auto_upper=False, caliblimit=caliblimit) image_data = data[image] if image[0] != filt: raise ValueError('Filter for %s does not match the others') % ( image) if ra_str in image_data['calib_stars']: datalist += [image_data['calib_stars'][ra_str]['new_mag']] dataerrlist += [image_data['calib_stars'][ra_str]['new_e_mag']] time = float(t_mid.t_mid(image, trigger=triggerid)) terr = float(t_mid.t_mid(image, trigger=triggerid, delta=True)) / 2. timetuple = (time, terr) image_data.update({'t_mid': timetuple}) timelist += [time] timeerrlist += [terr] dec_str = str(image_data['calib_stars'][ra_str]['dec'])[0:7] parent_label = image precal_dict.update({parent_label: image_data}) else: print 'WARNING: CALIB STAR %s IS NOT USABLE FOR THIS IMAGE' % ( ra_str) datarr = array(datalist) daterrarr = array(dataerrlist) timarr = array(timelist) timerrarr = array(timeerrlist) if noerr == True: pylab.plot(timarr, datarr, 'o', label=str((ra_str, dec_str))) else: pylab.errorbar(timarr, datarr, yerr=daterrarr, fmt='o', label=str((ra_str, dec_str))) #star_pos_str) caldict.update({ra_str: precal_dict}) #matplotlib.pyplot.errorbar(timarr, datarr, yerr = daterrarr, label = starname, fmt='k.', color = colortuple) star_stdv = numpy.std(datarr) print 'The standard deviation of the calibration stars is: (DISREGARD THIS, THIS STDV IS PROBABLY WRONG)' print star_stdv plottitle = '%s Calibration Stars Magnitude vs. t_mid' % (filt) plotylabel = '%s Magnitude' % (filt) matplotlib.pyplot.title(plottitle) matplotlib.pyplot.xlabel('Time After Burst (s)') matplotlib.pyplot.ylabel(plotylabel) ax = matplotlib.pyplot.gca() ax.set_ylim(ax.get_ylim()[::-1]) ax.set_xlim((ax.get_xlim()[0]), (ax.get_xlim()[1]) * 1.2) matplotlib.pyplot.legend() if magrange: ax.set_ylim(magrange) F = pylab.gcf() DefaultSize = F.get_size_inches() DPI = F.get_dpi() # F.set_size_inches( (DefaultSize[0]*2.5, DefaultSize[1]*2.5) ) # was getting incresingly larger with multiple runs F.set_size_inches((20, 15)) n_stars_str = str(n_stars) if not out_pickle: picklepath = storepath + unique_name + '_' + filt + '_' + 'ap' + str( ap) + '_' + n_stars_str + '_cal_stars.data' else: picklepath = out_pickle filepath = storepath + unique_name + '_' + filt + '_' + 'ap' + str( ap) + '_' + n_stars_str + '_cal_stars.png' #matplotlib.pyplot.savefig(filepath) qPickle.save(caldict, picklepath, clobber=True) F.savefig(filepath) return caldict
def Monitor_PSN_RSS(feed_url="http://www.cbat.eps.harvard.edu/unconf/tocp.xml",save_latest=True): ''' This function checks to see if a particular RSS entry has already been loaded by entering it in a sqlite database. To keep checking this feed, put in infinite while loop with a set delay time # # while(True): # sql_tuple_list = Monitor_PSN_RSS("http://feedurl.xml") # time.sleep(60) ''' from time import strftime import sqlite3 try: import feedparser except: print "feedparser module not installed" print "visit http://www.feedparser.org/" sys.exit(1) # Database management code stolen from http://www.halotis.com/2009/07/01/rss-twitter-bot-in-python/ DATABASE = storepath + 'psn_rss_feed.sqlite' conn = sqlite3.connect(DATABASE) conn.row_factory = sqlite3.Row c = conn.cursor() # Create the table if it doesn't exist c.execute('CREATE TABLE IF NOT EXISTS RSSContent (`updated`, `title`, `dateAdded`, `id`, `content`, `url`)') sql_entry_list=[] new_rss_entry_list=[] rssinst = feedparser.parse(feed_url) if save_latest: try: last_entry = rssinst['entries'][0] # saving this for testing purposes last_entry_outpath = storepath + 'psn_last_entry.pkl' qPickle.save(last_entry,last_entry_outpath,clobber=True) except: qErr.qErr("Could not save last_entry") duplicate_count = 0 for entry in rssinst['entries']: if duplicate_count < 3: # check for duplicates c.execute('select * from RSSContent where updated=?', (entry.updated,)) #should be unique if not c.fetchall(): if True: xml_file = entry.link # apparently the entry.link is the address I wanted # print xml_file shortened_link = xml_file if not 'link' in entry: errtitle='link value not in RSS entry' qErr.qErr(errtitle=errtitle) if not 'title' in entry: errtitle='title value not in RSS entry' qErr.qErr(errtitle=errtitle) if not 'summary' in entry: errtitle='summary value not in RSS entry; using blank value' print errtitle summary = 'unknown' else: summary = entry.summary if not 'id' in entry: errtitle='id value not in RSS entry; using blank value' print errtitle entryid = 'unknown' else: entryid = entry.id try: sql_entry = (entry.updated, entry.title, entryid, summary, entry.link) print sql_entry c.execute('insert into RSSContent (`updated`, `title`, `id`, `content`, `url`) values (?,?,?,?,?)', sql_entry) sql_entry_list.append(sql_entry) new_rss_entry_list.append(entry) except: qErr.qErr() print "Could not update RSS database for entry %s" % (entry.updated) else: duplicate_count += 1 conn.commit() else: # break the loop if more than 3 duplicates; really only need to # see one duplicate to break the loop, but adding this just in case # (since newer feed entries are at the top, no need to loop through # every single one. if there are no new ones, you should know immediately) break return new_rss_entry_list
def getstar(reg, out_pickle, filename_h, filename_j, filename_k, \ ap_h=None,ap_j=None,ap_k=None,triggerid=None, calibration_reg=None, caliblimit=False): ''' After creating a calibration deep-stack for all images, use this function to perform photomotery of all calibration stars in the calibration region file, and outputs a pickle file which is to be used as a photometry dictionary which is to be used to replace 2mass. Requirements: q_phot and qPickle. note: The keyword calibration_reg is for the calibration stars used to calibrate these calibration stars. For now, just leave as 'none' ''' stardict = {} stardict_h = {} stardict_j = {} stardict_k = {} if not ap_h: ap_h = raw_input('Enter H aperture: ') if not ap_j: ap_j = raw_input('Enter J aperture: ') if not ap_k: ap_k = raw_input('Enter K aperture: ') regpath = storepath + reg regfile = open(regpath, 'r') reglist = regfile.readlines() temppath = storepath + 'temp.reg' star_pos_list = [] ################################################################## #This part is actually not needed, but in case we want to get the star's postition... for line in reglist: if 'circle' in line: star_str = line.strip('circle').strip().strip('")').strip( '(').split(',') ra_str = star_str[0] dec_str = star_str[1] star_pos = (float(ra_str), float(dec_str)) star_pos_list += [star_pos] else: pass #End uneeded part of uneededness ################################################################### callist = [] for line in reglist: if 'circle' in line: callist += [line] else: pass keylist = [] for index, star_reg in enumerate(callist): if os.path.exists(temppath): os.remove(temppath) starname = 'star' + str(index) tempreg = open(temppath, 'w') tempreg.write('# Region file format: DS9 version 4.1\n') secondstr='global color=green dashlist=8 3 width=2 font="helvetica '+ \ '16 normal" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 '+ \ 'delete=1 include=1 source=1\n' tempreg.write(secondstr) tempreg.write('fk5\n') tmp_str = star_reg tempreg.write(tmp_str) tempreg.close() star_str = star_reg.strip('circle').strip().strip('")').strip( '(').split(',') ra_str = star_str[0] dec_str = star_str[1] ra_round = ra_str[0:8] dec_round = dec_str[0:7] star_pos = (ra_round, dec_round) star_pos_str = str(star_pos) data_h = q_phot.dophot(filename_h, temppath, calreg=calibration_reg, ap=ap_h, caliblimit=caliblimit) parent_label = star_pos_str time = float(t_mid.t_mid(filename_h, trigger=triggerid)) terr = float(t_mid.t_mid(filename_h, trigger=triggerid, delta=True)) / 2. timetuple = (time, terr) data_h.update({'t_mid': timetuple}) this_star_dict_h = {parent_label: data_h} stardict_h.update(this_star_dict_h) keylist.append(parent_label) data_j = q_phot.dophot(filename_j, temppath, calreg=calibration_reg, ap=ap_j, caliblimit=caliblimit) parent_label = star_pos_str time = float(t_mid.t_mid(filename_j, trigger=triggerid)) terr = float(t_mid.t_mid(filename_j, trigger=triggerid, delta=True)) / 2. timetuple = (time, terr) data_j.update({'t_mid': timetuple}) this_star_dict_j = {parent_label: data_j} stardict_j.update(this_star_dict_j) data_k = q_phot.dophot(filename_k, temppath, calreg=calibration_reg, ap=ap_k, caliblimit=caliblimit) parent_label = star_pos_str time = float(t_mid.t_mid(filename_k, trigger=triggerid)) terr = float(t_mid.t_mid(filename_k, trigger=triggerid, delta=True)) / 2. timetuple = (time, terr) data_k.update({'t_mid': timetuple}) this_star_dict_k = {parent_label: data_k} stardict_k.update(this_star_dict_k) h_dict = {'h': stardict_h} j_dict = {'j': stardict_j} k_dict = {'k': stardict_k} stardict.update(h_dict) stardict.update(j_dict) stardict.update(k_dict) picklepath = storepath + out_pickle + '.data' qPickle.save(stardict, picklepath, clobber=True) print 'Created a dictionary for the following star locations:' print keylist return stardict
def p_photreturn(self,outname,ap,limsigma=3.0,plotcalib=True,\ offset_calc_type='weighted_mean',clobber=False, \ utburst=None): ''' attempt to build up same structure as the photdict from q_phot keys: filename ''' if utburst == None: utburst = datetime.datetime(1858, 11, 17) #just use mjd offset_calc_type = offset_calc_type.lower() photdict = {} newname = self.imagefilename + '_ap' + str(ap) filepath = storepath + 'phot_' + outname #outname is the filepath # if calregion: # calibration_list = openCalRegion(calregion) # n_calstars = len(calibration_list) # filepath += '_WithCalReg' + str(n_calstars) # if stardict: # filepath += '_WithDeepStack' filepath += '.data' while clobber == False: # why did i make this a while loop?? lol if os.path.isfile(filepath) == True: data = qPickle.load(filepath) if newname in data: return data else: clobber = True else: clobber = True while clobber == True: if os.path.isfile(filepath) == False: photdict = {} else: #f = file(filepath) photdict = qPickle.load( filepath ) # This line loads the pickle file, enabling photLoop to work # create dictionary for file data = self.do_phot(ap=ap, limsigma=limsigma, plotcalib=plotcalib, offset_calc_type=offset_calc_type) if not data: print "Photometry failed. No data returned." return #rerun to get upper limit?? label = newname # somehow update time here? if self.scope == 'pairitel': tdict = { 'utburst': utburst, 'STOP_CPU': data['STOP_CPU'], 'STRT_CPU': data['STRT_CPU'] } time = float(t_mid.t_mid(time_dict=tdict)) terr = float(t_mid.t_mid(delta=True, time_dict=tdict)) / 2. timetuple = (time, terr) data.update({'t_mid': timetuple}) elif self.scope == 'kait': # untested tmid = startexp2tmid(utburst, data['STRT_CPU'], data['EXPTIME']) terr = data['EXPTIME'] timetuple = (time, terr) data.update({'t_mid': timetuple}) photdict.update({label: data}) qPickle.save(photdict, filepath, clobber=True) return photdict qPickle.save(photdict, filepath, clobber=True) return photdict