def _get_last_entry(): '''For testing purposes, get last entry of of the psn feed''' last_entry_outpath = storepath + 'psn_last_entry.pkl' last_entry = qPickle.load(last_entry_outpath) if last_entry == None: Monitor_PSN_RSS() last_entry = qPickle.load(last_entry_outpath) return last_entry
def multibeuermann_test2(): import matplotlib from MiscBin import qPickle matplotlib.pyplot.clf() # s = s + f_c,f * ( (t/tbk_c,f) ^ (-sh_c * alpha_c,b) + (t/tbk_c,f) ^ (-sh_c * alpha_c,a) )^(-1./sh_c) t = [] photdict = qPickle.load('/Users/pierrechristian/qrepo/store/picklefiles/very_good_pickles/071025_goodpickle.data') for epoch in photdict: if 'h' in epoch: t += [float(photdict[epoch]['t_mid'][0])] t = numpy.array(t) print t P1 = [-1.576, 1.782, 574.66, -1, 6100] P2 = [1.242, -10.218, 1436.927, -1, 1394.88] s = multibeuermann(t, P1, sec_comp=P2) matplotlib.pyplot.errorbar(t, s, linestyle = 'None', marker = 'o') print s ax = matplotlib.pyplot.gca() ax.set_ylim(ax.get_ylim()[::-1]) # reversing the ylimits matplotlib.pyplot.xlabel('Time since Burst (s)') matplotlib.pyplot.ylabel('Mag') ax = matplotlib.pyplot.gca() ax.set_xscale('log') savepath ='./071025_multibeuermann_test2.png' print 'lightcurve saved to ' + savepath matplotlib.pyplot.savefig(savepath) matplotlib.pyplot.close()
def multibeuermann_test2(): import matplotlib from MiscBin import qPickle matplotlib.pyplot.clf() # s = s + f_c,f * ( (t/tbk_c,f) ^ (-sh_c * alpha_c,b) + (t/tbk_c,f) ^ (-sh_c * alpha_c,a) )^(-1./sh_c) t = [] photdict = qPickle.load( '/Users/pierrechristian/qrepo/store/picklefiles/very_good_pickles/071025_goodpickle.data' ) for epoch in photdict: if 'h' in epoch: t += [float(photdict[epoch]['t_mid'][0])] t = numpy.array(t) print t P1 = [-1.576, 1.782, 574.66, -1, 6100] P2 = [1.242, -10.218, 1436.927, -1, 1394.88] s = multibeuermann(t, P1, sec_comp=P2) matplotlib.pyplot.errorbar(t, s, linestyle='None', marker='o') print s ax = matplotlib.pyplot.gca() ax.set_ylim(ax.get_ylim()[::-1]) # reversing the ylimits matplotlib.pyplot.xlabel('Time since Burst (s)') matplotlib.pyplot.ylabel('Mag') ax = matplotlib.pyplot.gca() ax.set_xscale('log') savepath = './071025_multibeuermann_test2.png' print 'lightcurve saved to ' + savepath matplotlib.pyplot.savefig(savepath) matplotlib.pyplot.close()
def LoadGCN(triggerid, clobber=False, redownload_gcn=False): ### LOAD or CREATE PICKLE STORAGE FILE # Attempt to load pickle file pklpath = storepath + 'sw' + str(triggerid) + 'GCN.pkl' loadedgcn = qPickle.load(pklpath) # If couldn't load, or clobber == True, create a new instance of the class if clobber or not loadedgcn: # Create new instance of GCN Notice loadedgcn = GCNNotice(triggerid, clobber=redownload_gcn) try: # Extract values from GCN Notice loadedgcn.extract_values() loadedgcn.get_positions() if loadedgcn.successful_load: # Save new Pickle file qPickle.save(loadedgcn, pklpath, clobber=True) else: errtitle = 'Could not succesfully load GCN for trigger %s' % ( str(triggerid)) qErr.qErr(errtitle=errtitle) return except: errtitle = 'Could not extract GCN values for trigger %s' % ( str(triggerid)) qErr.qErr(errtitle=errtitle) return loadedgcn
def makedict(df_grb, outputtext=False, very_good_pickle_path=vgpp): ''' make dict for all_bursts() ''' all_GRB_dict = {} GRB_list = list(df_grb.index) cc=0 for index, GRB in enumerate(GRB_list): globstr = very_good_pickle_path + GRB + '*' globresult = glob.glob(globstr) if len (globresult) == 0: print "Can't Find any files in {}. External HD plugged in?".format(globresult) raise(ValueError) pathstr = globresult[0] if not os.path.exists(pathstr): print "The path {} doesn't appear to exist. Wrong path?".format(pathstr) print pathstr result = qPickle.load(pathstr) galebv=df_grb.loc[GRB]['galebv'] redshift=df_grb.loc[GRB]['z'] if outputtext: try: q_phot.textoutput(result,name=GRB,galebv=galebv,redshift=redshift) except: print "CANNOT MAKE OUTPUTTEXT FOR {}".format(GRB) cc+=1 if cc==3: raise ValueError GRB_dict = {GRB:result} all_GRB_dict.update(GRB_dict) return all_GRB_dict
def update_z_all_GRBs(GRB_list, z_list, very_good_pickle_path='/Volumes/MyPassport/PTELBACKUP2/picklefiles/very_good_pickles/'): '''Updates the z values of all GRBs with known z''' from Phot import q_phot from MiscBin import qPickle from glob import glob all_GRB_dict = {} for index, GRB in enumerate(GRB_list): globstr = very_good_pickle_path + GRB + '*' pathstr = glob(globstr)[0] print pathstr result = qPickle.load(pathstr) q_phot.update_all_z(result, z_list[index], 0) GRB_dict = {GRB:result} all_GRB_dict.update(GRB_dict) return all_GRB_dict
def makedict(GRB_list, outputtext=False, very_good_pickle_path='/Volumes/MyPassport/PTELBACKUP2/picklefiles/very_good_pickles/'): ''' make dict for all_bursts() ''' from Phot import q_phot from MiscBin import qPickle from glob import glob all_GRB_dict = {} for index, GRB in enumerate(GRB_list): globstr = very_good_pickle_path + GRB + '*' pathstr = glob(globstr)[0] print pathstr result = qPickle.load(pathstr) if outputtext: q_phot.textoutput(result,name=GRB) GRB_dict = {GRB:result} all_GRB_dict.update(GRB_dict) return all_GRB_dict
def LoadDB(name, clobber=False): ### LOAD or CREATE PICKLE STORAGE FILE # Attempt to load pickle file pklpath = storepath + 'DB_' + str(name) + '.pkl' loadeddb = qPickle.load(pklpath) # If couldn't load, or clobber == True, create a new instance of the class if clobber or not loadeddb: loadeddb = None # Create new instance of db Notice # loadeddb = GRBdb(name,redownload_gcn=redownload_gcn,incl_reg=incl_reg,incl_fc=incl_fc) # try: # if loadeddb.successful_load: # # Save new Pickle file # qPickle.save(loadeddb,pklpath,clobber=True) # else: # print 'Could not succesfully load db.' # return # except: # print "Could not Extract Values for db." # qErr.qErr() return loadeddb
def LoadDB(name, clobber=False): ### LOAD or CREATE PICKLE STORAGE FILE # Attempt to load pickle file pklpath = storepath+'DB_'+str(name)+'.pkl' loadeddb = qPickle.load(pklpath) # If couldn't load, or clobber == True, create a new instance of the class if clobber or not loadeddb: loadeddb = None # Create new instance of db Notice # loadeddb = GRBdb(name,redownload_gcn=redownload_gcn,incl_reg=incl_reg,incl_fc=incl_fc) # try: # if loadeddb.successful_load: # # Save new Pickle file # qPickle.save(loadeddb,pklpath,clobber=True) # else: # print 'Could not succesfully load db.' # return # except: # print "Could not Extract Values for db." # qErr.qErr() return loadeddb
def update_z_all_GRBs( GRB_list, z_list, very_good_pickle_path='/Volumes/MyPassport/PTELBACKUP2/picklefiles/very_good_pickles/' ): '''Updates the z values of all GRBs with known z''' from Phot import q_phot from MiscBin import qPickle from glob import glob all_GRB_dict = {} for index, GRB in enumerate(GRB_list): globstr = very_good_pickle_path + GRB + '*' pathstr = glob(globstr)[0] print pathstr result = qPickle.load(pathstr) q_phot.update_all_z(result, z_list[index], 0) GRB_dict = {GRB: result} all_GRB_dict.update(GRB_dict) return all_GRB_dict
def makedict( GRB_list, outputtext=False, very_good_pickle_path='/Volumes/MyPassport/PTELBACKUP2/picklefiles/very_good_pickles/' ): ''' make dict for all_bursts() ''' from Phot import q_phot from MiscBin import qPickle from glob import glob all_GRB_dict = {} for index, GRB in enumerate(GRB_list): globstr = very_good_pickle_path + GRB + '*' pathstr = glob(globstr)[0] print pathstr result = qPickle.load(pathstr) if outputtext: q_phot.textoutput(result, name=GRB) GRB_dict = {GRB: result} all_GRB_dict.update(GRB_dict) return all_GRB_dict
def qExtinction(source_name,ra,dec): extdictpath = storepath+'GRBextinction.pkl' ext_dict = qPickle.load(extdictpath) if not ext_dict: ext_dict = {} if source_name in ext_dict: gal_EB_V = ext_dict[source_name] else: try: ra_str = str(ra)+'d' dec_str = str(dec)+'d' best_position = (ra_str,dec_str) ext_list = extinction(lon=best_position[0],\ lat=best_position[1],system_in='Equatorial',\ system_out='Galactic',obs_epoch="2005.0") gal_EB_V = ext_list[0] except: raise Exception('Cannot Grab Extinction Values') ext_dict.update({source_name:gal_EB_V}) qPickle.save(ext_dict,extdictpath,clobber=True) return gal_EB_V
def qExtinction(source_name, ra, dec): extdictpath = storepath + 'GRBextinction.pkl' ext_dict = qPickle.load(extdictpath) if not ext_dict: ext_dict = {} if source_name in ext_dict: gal_EB_V = ext_dict[source_name] else: try: ra_str = str(ra) + 'd' dec_str = str(dec) + 'd' best_position = (ra_str, dec_str) ext_list = extinction(lon=best_position[0],\ lat=best_position[1],system_in='Equatorial',\ system_out='Galactic',obs_epoch="2005.0") gal_EB_V = ext_list[0] except: raise Exception('Cannot Grab Extinction Values') ext_dict.update({source_name: gal_EB_V}) qPickle.save(ext_dict, extdictpath, clobber=True) return gal_EB_V
def LoadGCN(triggerid, clobber=False, redownload_gcn=False): ### LOAD or CREATE PICKLE STORAGE FILE # Attempt to load pickle file pklpath = storepath+'sw'+str(triggerid)+'GCN.pkl' loadedgcn = qPickle.load(pklpath) # If couldn't load, or clobber == True, create a new instance of the class if clobber or not loadedgcn: # Create new instance of GCN Notice loadedgcn = GCNNotice(triggerid, clobber=redownload_gcn) try: # Extract values from GCN Notice loadedgcn.extract_values() loadedgcn.get_positions() if loadedgcn.successful_load: # Save new Pickle file qPickle.save(loadedgcn,pklpath,clobber=True) else: errtitle = 'Could not succesfully load GCN for trigger %s' % (str(triggerid)) qErr.qErr(errtitle=errtitle) return except: errtitle = 'Could not extract GCN values for trigger %s' % (str(triggerid)) qErr.qErr(errtitle=errtitle) return loadedgcn
def makedict(df_grb, outputtext=False, very_good_pickle_path=vgpp): ''' make dict for all_bursts() ''' all_GRB_dict = {} GRB_list = list(df_grb.index) cc = 0 for index, GRB in enumerate(GRB_list): globstr = very_good_pickle_path + GRB + '*' globresult = glob.glob(globstr) if len(globresult) == 0: print "Can't Find any files in {}. External HD plugged in?".format( globresult) raise (ValueError) pathstr = globresult[0] if not os.path.exists(pathstr): print "The path {} doesn't appear to exist. Wrong path?".format( pathstr) print pathstr result = qPickle.load(pathstr) galebv = df_grb.loc[GRB]['galebv'] redshift = df_grb.loc[GRB]['z'] if outputtext: try: q_phot.textoutput(result, name=GRB, galebv=galebv, redshift=redshift) except: print "CANNOT MAKE OUTPUTTEXT FOR {}".format(GRB) cc += 1 if cc == 3: raise ValueError GRB_dict = {GRB: result} all_GRB_dict.update(GRB_dict) return all_GRB_dict
def p_photreturn(self,outname,ap,limsigma=3.0,plotcalib=True,\ offset_calc_type='weighted_mean',clobber=False, \ utburst=None): ''' attempt to build up same structure as the photdict from q_phot keys: filename ''' if utburst == None: utburst = datetime.datetime(1858, 11, 17) #just use mjd offset_calc_type=offset_calc_type.lower() photdict={} newname = self.imagefilename + '_ap' + str(ap) filepath = storepath + 'phot_'+ outname #outname is the filepath # if calregion: # calibration_list = openCalRegion(calregion) # n_calstars = len(calibration_list) # filepath += '_WithCalReg' + str(n_calstars) # if stardict: # filepath += '_WithDeepStack' filepath += '.data' while clobber == False: # why did i make this a while loop?? lol if os.path.isfile(filepath) == True: data = qPickle.load(filepath) if newname in data: return data else: clobber = True else: clobber = True while clobber == True: if os.path.isfile(filepath) == False: photdict = {} else: #f = file(filepath) photdict = qPickle.load(filepath) # This line loads the pickle file, enabling photLoop to work # create dictionary for file data = self.do_phot(ap=ap,limsigma=limsigma,plotcalib=plotcalib,offset_calc_type=offset_calc_type) if not data: print "Photometry failed. No data returned." return #rerun to get upper limit?? label = newname # somehow update time here? if self.scope == 'pairitel': tdict = {'utburst':utburst,'STOP_CPU':data['STOP_CPU'],'STRT_CPU':data['STRT_CPU']} time = float(t_mid.t_mid(time_dict=tdict)) terr = float(t_mid.t_mid(delta = True, time_dict=tdict))/2. timetuple = (time, terr) data.update({'t_mid':timetuple}) elif self.scope == 'kait': # untested tmid = startexp2tmid(utburst,data['STRT_CPU'],data['EXPTIME']) terr = data['EXPTIME'] timetuple = (time, terr) data.update({'t_mid':timetuple}) photdict.update({label:data}) qPickle.save(photdict, filepath, clobber = True) return photdict qPickle.save(photdict, filepath, clobber = True) return photdict
def _do_new_entry_actions(new_entry, email=True, email_to='*****@*****.**'): # being fed a parsed rss entry try: psn_id_full = new_entry.id.split('/followups/')[1].strip('"').split( '.html')[0] # for some reason, the URL has a space when PSN label gets added # http://cbat.eps.harvard.edu/unconf/followups/PSN J15111485+4609115 #u'PSN J15111485+4609115' psn_id = str(psn_id_full.split()[-1]) #u'J15111485+4609115' except: qErr.qErr(errtitle="PSN ID URL malformed", errtext=new_entry.id) psn_id = "Unknown" psn_id_full = "Unknown" # check if it's in the pickle file # if so, update it - add to summary list all_pkl_path = storepath + 'psn_parsed_entries.pkl' all_entries = qPickle.load(all_pkl_path) if all_entries == None: all_entries = {} is_new_id = False if not psn_id in all_entries.keys(): is_new_id = True all_entries.update({psn_id: {}}) #update with a new empty dict with proper id # load and parse the PSN string psn_url = "http://cbat.eps.harvard.edu/unconf/followups/%s" % (psn_id) psn_string = _download_and_obtain_psn_string(psn_url) if psn_string != None: psn_dict = _parse_psn_format(psn_string) else: psn_dict = None ## Make html if psn_dict: all_entries[psn_id].update( psn_dict ) #add/update the dictionary values; though should they change? dss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=dss2red" % ( psn_dict['ra_deg'], psn_dict['dec_deg'], psn_dict['designation']) dss_html = "<a href='%s'>DSS Finding Chart</a><br>" % (dss_url) sdss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=sdss" % ( psn_dict['ra_deg'], psn_dict['dec_deg'], psn_dict['designation']) sdss_html = "<a href='%s'>SDSS Finding Chart</a> (May not be available)<br>" % ( sdss_url) pretty_output = ''' <br><br> <table border="0"> <tr><td>Object:</td><td>%s</td></tr> <tr><td>Designation:</td><td>%s</td></tr> <tr><td>Discovery date:</td><td>%s</td></tr> <tr><td>Mag at date:</td><td>%s</td></tr> <tr><td>Filter:</td><td>%s</td></tr> <tr><td>RA:</td><td>%s (= %f)</td></tr> <tr><td>Dec:</td><td>%s (= %f)</td></tr> <tr><td>Presumed host:</td><td>%s</td></tr> <tr><td>Offset from host:</td><td>%s, %s (arcsec)</td></tr> <tr><td>Discoverer:</td><td>%s</td></tr> <tr><td>Obs. arc:</td><td>%s</td></tr> </table> <br> ''' % (psn_dict['obj_type'], psn_dict['designation'], psn_dict['date_string'].replace(' ', '-').replace( '2013', 'UT2013'), psn_dict['mag'], psn_dict['filter'], psn_dict['ra'], psn_dict['ra_deg'], psn_dict['dec'], psn_dict['dec_deg'], psn_dict['locale'], psn_dict['ra_offset'], psn_dict['dec_offset'], psn_dict['discoverer'], psn_dict['arc']) else: pretty_output = 'Cannot parse PSN Message.' print pretty_output html_body = '''<html><body> <a href="%s">%s</a>''' % (psn_url, psn_id) if is_new_id: html_body += ' (First report of this transient)' else: html_body += ' (Update)' html_body += '<br><br>' if psn_dict: html_body += dss_html html_body += sdss_html html_body += pretty_output html_body += new_entry.summary html_body += '<br><br><br></body></html>' if 'summary_list' in all_entries[psn_id]: summary_list = all_entries[psn_id]['summary_list'] summary_list.append(new_entry.summary) else: summary_list = [new_entry.summary] all_entries[psn_id].update({'summary_list': summary_list}) # do email if new if email == True: if is_new_id: subject = "New Transient %s" % (psn_id_full) else: subject = "Update to Transient %s" % (psn_id_full) print "Sending email: '%s'" % (subject) send_gmail.domail(email_to, subject, html_body, html=True) # do separate/no email if updated? # save the updated pickle file qPickle.save(all_entries, all_pkl_path, clobber=True) return is_new_id
def _do_new_entry_actions(new_entry,email=True,email_to='*****@*****.**'): # being fed a parsed rss entry try: psn_id_full=new_entry.id.split('/followups/')[1].strip('"').split('.html')[0] # for some reason, the URL has a space when PSN label gets added # http://cbat.eps.harvard.edu/unconf/followups/PSN J15111485+4609115 #u'PSN J15111485+4609115' psn_id = str(psn_id_full.split()[-1]) #u'J15111485+4609115' except: qErr.qErr(errtitle="PSN ID URL malformed", errtext=new_entry.id) psn_id = "Unknown" psn_id_full = "Unknown" # check if it's in the pickle file # if so, update it - add to summary list all_pkl_path = storepath + 'psn_parsed_entries.pkl' all_entries = qPickle.load(all_pkl_path) if all_entries == None: all_entries = {} is_new_id = False if not psn_id in all_entries.keys(): is_new_id = True all_entries.update({psn_id:{}}) #update with a new empty dict with proper id # load and parse the PSN string psn_url="http://cbat.eps.harvard.edu/unconf/followups/%s" % (psn_id) psn_string = _download_and_obtain_psn_string(psn_url) if psn_string != None: psn_dict = _parse_psn_format(psn_string) else: psn_dict = None ## Make html if psn_dict: all_entries[psn_id].update(psn_dict) #add/update the dictionary values; though should they change? dss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=dss2red" % (psn_dict['ra_deg'],psn_dict['dec_deg'],psn_dict['designation']) dss_html = "<a href='%s'>DSS Finding Chart</a><br>" % (dss_url) sdss_url = "http://fc.qmorgan.com/fcserver.py?ra=%f&dec=%f&uncertainty=2&err_shape=combo&incl_scale=yes&size=4&src_name=%s&pos_label=Pos&cont_str=&survey=sdss" % (psn_dict['ra_deg'],psn_dict['dec_deg'],psn_dict['designation']) sdss_html = "<a href='%s'>SDSS Finding Chart</a> (May not be available)<br>" % (sdss_url) pretty_output=''' <br><br> <table border="0"> <tr><td>Object:</td><td>%s</td></tr> <tr><td>Designation:</td><td>%s</td></tr> <tr><td>Discovery date:</td><td>%s</td></tr> <tr><td>Mag at date:</td><td>%s</td></tr> <tr><td>Filter:</td><td>%s</td></tr> <tr><td>RA:</td><td>%s (= %f)</td></tr> <tr><td>Dec:</td><td>%s (= %f)</td></tr> <tr><td>Presumed host:</td><td>%s</td></tr> <tr><td>Offset from host:</td><td>%s, %s (arcsec)</td></tr> <tr><td>Discoverer:</td><td>%s</td></tr> <tr><td>Obs. arc:</td><td>%s</td></tr> </table> <br> ''' % (psn_dict['obj_type'],psn_dict['designation'], psn_dict['date_string'].replace(' ','-').replace('2013','UT2013'), psn_dict['mag'],psn_dict['filter'], psn_dict['ra'],psn_dict['ra_deg'],psn_dict['dec'],psn_dict['dec_deg'], psn_dict['locale'],psn_dict['ra_offset'],psn_dict['dec_offset'], psn_dict['discoverer'],psn_dict['arc']) else: pretty_output = 'Cannot parse PSN Message.' print pretty_output html_body = '''<html><body> <a href="%s">%s</a>''' % (psn_url,psn_id) if is_new_id: html_body += ' (First report of this transient)' else: html_body += ' (Update)' html_body += '<br><br>' if psn_dict: html_body += dss_html html_body += sdss_html html_body += pretty_output html_body+= new_entry.summary html_body+= '<br><br><br></body></html>' if 'summary_list' in all_entries[psn_id]: summary_list = all_entries[psn_id]['summary_list'] summary_list.append(new_entry.summary) else: summary_list = [new_entry.summary] all_entries[psn_id].update({'summary_list':summary_list}) # do email if new if email == True: if is_new_id: subject = "New Transient %s" % (psn_id_full) else: subject = "Update to Transient %s" % (psn_id_full) print "Sending email: '%s'" % (subject) send_gmail.domail(email_to,subject,html_body,html=True) # do separate/no email if updated? # save the updated pickle file qPickle.save(all_entries,all_pkl_path,clobber=True) return is_new_id
def p_photreturn(self,outname,ap,limsigma=3.0,plotcalib=True,\ offset_calc_type='weighted_mean',clobber=False, \ utburst=None): ''' attempt to build up same structure as the photdict from q_phot keys: filename ''' if utburst == None: utburst = datetime.datetime(1858, 11, 17) #just use mjd offset_calc_type = offset_calc_type.lower() photdict = {} newname = self.imagefilename + '_ap' + str(ap) filepath = storepath + 'phot_' + outname #outname is the filepath # if calregion: # calibration_list = openCalRegion(calregion) # n_calstars = len(calibration_list) # filepath += '_WithCalReg' + str(n_calstars) # if stardict: # filepath += '_WithDeepStack' filepath += '.data' while clobber == False: # why did i make this a while loop?? lol if os.path.isfile(filepath) == True: data = qPickle.load(filepath) if newname in data: return data else: clobber = True else: clobber = True while clobber == True: if os.path.isfile(filepath) == False: photdict = {} else: #f = file(filepath) photdict = qPickle.load( filepath ) # This line loads the pickle file, enabling photLoop to work # create dictionary for file data = self.do_phot(ap=ap, limsigma=limsigma, plotcalib=plotcalib, offset_calc_type=offset_calc_type) if not data: print "Photometry failed. No data returned." return #rerun to get upper limit?? label = newname # somehow update time here? if self.scope == 'pairitel': tdict = { 'utburst': utburst, 'STOP_CPU': data['STOP_CPU'], 'STRT_CPU': data['STRT_CPU'] } time = float(t_mid.t_mid(time_dict=tdict)) terr = float(t_mid.t_mid(delta=True, time_dict=tdict)) / 2. timetuple = (time, terr) data.update({'t_mid': timetuple}) elif self.scope == 'kait': # untested tmid = startexp2tmid(utburst, data['STRT_CPU'], data['EXPTIME']) terr = data['EXPTIME'] timetuple = (time, terr) data.update({'t_mid': timetuple}) photdict.update({label: data}) qPickle.save(photdict, filepath, clobber=True) return photdict qPickle.save(photdict, filepath, clobber=True) return photdict