def get_coincs_from_coire(self, files, stat='snr'): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) statistic = CoincInspiralUtils.coincStatistic(stat, None, None) coincTrigs = CoincInspiralUtils.coincInspiralTable( inspTrigs, statistic) try: inspInj = SimInspiralUtils.ReadSimInspiralFromFiles(files) coincTrigs.add_sim_inspirals(inspInj) #FIXME: name the exception! except: pass #now extract the relevant information into CoincData objects for ctrig in coincTrigs: coinc = CoincData() coinc.set_ifos(ctrig.get_ifos()[1]) coinc.set_gps( dict( (trig.ifo, LIGOTimeGPS(trig.get_end())) for trig in ctrig)) coinc.set_snr( dict((trig.ifo, getattr(ctrig, trig.ifo).snr) for trig in ctrig)) coinc.set_effDs( dict((trig.ifo, getattr(ctrig, trig.ifo).eff_distance) for trig in ctrig)) coinc.set_masses(dict((trig.ifo,getattr(ctrig,trig.ifo).mass1) for trig in ctrig), \ dict((trig.ifo,getattr(ctrig,trig.ifo).mass2) for trig in ctrig)) try: effDs_inj = {} for ifo in coinc.ifo_list: if ifo == 'H1': effDs_inj[ifo] = getattr(ctrig, 'sim').eff_dist_h elif ifo == 'L1': effDs_inj[ifo] = getattr(ctrig, 'sim').eff_dist_l elif ifo == 'V1': effDs_inj[ifo] = getattr(ctrig, 'sim').eff_dist_v dist_inj = getattr(ctrig, 'sim').distance coinc.set_inj_params(getattr(ctrig,'sim').latitude,getattr(ctrig,'sim').longitude, \ getattr(ctrig,'sim').mass1,getattr(ctrig,'sim').mass2,dist_inj,effDs_inj) coinc.is_injection = True #FIXME: name the exception! except: pass self.append(coinc)
def get_slide_coincs_from_cache(cachefile, pattern, match, verb, coinc_stat): full_coinc_table = [] cache = cachefile.sieve(description=pattern, exact_match=match) found, missed = cache.checkfilesexist() files = found.pfnlist() if not len(files): print >>sys.stderr, "cache contains no files with " + pattern + " description" return None # split the time slide files into 105 groups to aid with I/O num_files=len(files) #Changed by Tristan Miller as a memory fix #groups_of_files = split_seq(files,105) groups_of_files = split_seq(files,50) for filegroup in groups_of_files: if filegroup: # extract the coinc table coinc_table = SnglInspiralUtils.ReadSnglInspiralFromFiles(filegroup, mangle_event_id=False, verbose=verb, non_lsc_tables_ok=False) segDict = SearchSummaryUtils.GetSegListFromSearchSummaries(filegroup) rings = segments.segmentlist(iterutils.flatten(segDict.values())) rings.sort() for k,ring in enumerate(rings): rings[k] = segments.segment(rings[k][0], rings[k][1] + 10**(-9)) shift_vector = {"H1": 0, "H2": 0, "L1": 5, "V1": 5} if coinc_table: SnglInspiralUtils.slideTriggersOnRingWithVector(coinc_table, shift_vector, rings) full_coinc_table.extend(CoincInspiralUtils.coincInspiralTable(coinc_table,coinc_stat)) return full_coinc_table
def get_coincs_from_coire(self,files,stat='snr'): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) statistic = CoincInspiralUtils.coincStatistic(stat,None,None) coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,statistic) try: inspInj = SimInspiralUtils.ReadSimInspiralFromFiles(files) coincTrigs.add_sim_inspirals(inspInj) #FIXME: name the exception! except: pass #now extract the relevant information into CoincData objects for ctrig in coincTrigs: coinc = CoincData() coinc.set_ifos(ctrig.get_ifos()[1]) coinc.set_gps(dict((trig.ifo,lal.LIGOTimeGPS(trig.get_end())) for trig in ctrig)) coinc.set_snr(dict((trig.ifo,getattr(ctrig,trig.ifo).snr) for trig in ctrig)) coinc.set_effDs(dict((trig.ifo,getattr(ctrig,trig.ifo).eff_distance) for trig in ctrig)) coinc.set_masses(dict((trig.ifo,getattr(ctrig,trig.ifo).mass1) for trig in ctrig), \ dict((trig.ifo,getattr(ctrig,trig.ifo).mass2) for trig in ctrig)) try: effDs_inj = {} for ifo in coinc.ifo_list: if ifo == 'H1': effDs_inj[ifo] = getattr(ctrig,'sim').eff_dist_h elif ifo == 'L1': effDs_inj[ifo] = getattr(ctrig,'sim').eff_dist_l elif ifo == 'V1': effDs_inj[ifo] = getattr(ctrig,'sim').eff_dist_v dist_inj = getattr(ctrig,'sim').distance coinc.set_inj_params(getattr(ctrig,'sim').latitude,getattr(ctrig,'sim').longitude, \ getattr(ctrig,'sim').mass1,getattr(ctrig,'sim').mass2,dist_inj,effDs_inj) coinc.is_injection = True #FIXME: name the exception! except: pass self.append(coinc)
def get_coincs_from_cache(cachefile, pattern, match, verb, coinc_stat): cache = cachefile.sieve(description=pattern, exact_match=match) found, missed = cache.checkfilesexist() files = found.pfnlist() if not len(files): print >>sys.stderr, "cache contains no files with " + pattern + " description" return None # extract the coinc table coinc_table = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, mangle_event_id=True, verbose=verb, non_lsc_tables_ok=False) # extract the list of coinc triggers return CoincInspiralUtils.coincInspiralTable(coinc_table,coinc_stat)
def new_coincs_from_coincs(coincTable, coinc_stat): """ We not only want to analyze each triple coincident trigger, but also the 3 double coincident triggers that you can make from the triple. Similarly, for the quadruples, we also want the 4 triples and 6 doubles that you can make from the quadruple. However, in order to be able to store these new (sub)coincidences, we have to assign each of them a unique event ID. """ newCoincTable = CoincInspiralUtils.coincInspiralTable() id_generator = SnglInspiralUtils.SnglInspiralID_old() for row in coincTable: break_up_coinc(row, newCoincTable, coinc_stat, id_generator) return newCoincTable
# remove files that had no bkg from corsefiles list and add their names to # warn_msg if no_bkg_frgnd: warn_msg = 'No foreground or background in files:\n' for idx in sorted(no_bkg_frgnd.items(), key=itemgetter(1), reverse=True): warn_msg = warn_msg + ' ' + os.path.basename(corsefiles.pop( idx[1])) + '\n' # check if still have a corsefiles list; if all the files that were globbed # don't have foreground and background, just make a generic plot with # warn_msg on it; this avoids future errors if not corsefiles: warn_msg = warn_msg + 'These were all the globbed files.' sys.exit(0) coincStat = CoincInspiralUtils.coincStatistic("far") for thisfile in corsefiles: insptrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles([thisfile]) coincT[thisfile] = CoincInspiralUtils.coincInspiralTable( insptrigs, coincStat) coincT[thisfile].sort() # sort by descending FAN # if this file has no_bkg, but does have foreground, get the ifo coincident # type from the first foreground trigger if thisfile in no_bkg: coincifos[thisfile] = coincT[thisfile][0].get_ifos()[0] #for thisfile in corsefiles: # if NormTime[corsefiles[0]] != NormTime[thisfile]: # print >> sys.stderr, "Can't combine experiments with " + \ # "different analysis times." # sys.exit( 1 ) maxFANs = [] # for storing max FAN of bkg (the dict is hard to sort by value)
H1H2_eff_snr = math.sqrt((events.H1.get_effective_snr())**2 + (events.H2.get_effective_snr())**2) return {"H1H2_eff_snr": H1H2_eff_snr} elif hasattr(events, "H1") and hasattr(events, "L1"): H1L1_eff_snr = math.sqrt((events.H1.get_effective_snr())**2 + (events.L1.get_effective_snr())**2) return {"H1L1_eff_snr": H1L1_eff_snr} elif hasattr(events, "H2") and hasattr(events, "L1"): H2L1_eff_snr = math.sqrt((events.H2.get_effective_snr())**2 + (events.L1.get_effective_snr())**2) return {"H2L1_eff_snr": H2L1_eff_snr} statistic = CoincInspiralUtils.coincStatistic(opts.statistic) ############################################################################### # read in zero lag coinc triggers zerolagTriggers = None zerolagTriggers = SnglInspiralUtils.ReadSnglInspiralFromFiles( zerolagfiles, mangle_event_id=True) # construct the zero lag coincs zerolagCoincTriggers= \ CoincInspiralUtils.coincInspiralTable(zerolagTriggers, statistic) slidesTriggers = None slidesTriggers = SnglInspiralUtils.ReadSnglInspiralFromFiles( slidesfiles, mangle_event_id=True)
# get tables ready for putting things in #instruments = ("H1", "H2", "L1", "V1") #ZeroTable = dict(("".join(key), []) for n in range(2, len(instruments + 1)) for key in iterutils.choices(instruments, n)) PlaygroundZeroTable = {'H1H2':[],'H1L1':[],'H1V1':[],'H2L1':[],'H2V1':[],'L1V1':[],'H1H2L1':[],'H1L1V1':[],'H2L1V1':[],'H1H2V1':[],'H1H2L1V1':[]} FullDataZeroTable = {'H1H2':[],'H1L1':[],'H1V1':[],'H2L1':[],'H2V1':[],'L1V1':[],'H1H2L1':[],'H1L1V1':[],'H2L1V1':[],'H1H2V1':[],'H1H2L1V1':[]} SlideTable = {'H1H2':[],'H1L1':[],'H1V1':[],'H2L1':[],'H2V1':[],'L1V1':[],'H1H2L1':[],'H1L1V1':[],'H2L1V1':[],'H1H2V1':[],'H1H2L1V1':[]} InjTable = {'H1H2':[],'H1L1':[],'H1V1':[],'H2L1':[],'H2V1':[],'L1V1':[],'H1H2L1':[],'H1L1V1':[],'H2L1V1':[],'H1H2V1':[],'H1H2L1V1':[]} KnownTable = {'H1H2':[],'H1L1':[],'H1V1':[],'H2L1':[],'H2V1':[],'L1V1':[],'H1H2L1':[],'H1L1V1':[],'H2L1V1':[],'H1H2V1':[],'H1H2L1V1':[]} # too keep I/O functioning smoothly, save the relevant lines from the cache into a new, smaller cache cache_file = glob.glob(opts.cache_file)[0] os.system('cat ' + cache_file + ' | grep COIRE | grep SECOND | grep ' + opts.veto_category + ' > mvsc_cache.cache') # now we can open the cache file cachefile = lal.Cache.fromfile(open('mvsc_cache.cache')).sieve(ifos=opts.ifo_times, exact_match=opts.exact_match) # initialize CoincInspiralUtils, which is going to pull coincidences out of the xml files that you provide as arguments to the options coinc_stat=CoincInspiralUtils.coincStatistic(opts.statistic) # Read in the files SlideCoincs = get_slide_coincs_from_cache(cachefile, opts.slide_pattern, opts.exact_match, opts.verbose, coinc_stat) if SlideCoincs: SlideCoincs = new_coincs_from_coincs(SlideCoincs, coinc_stat) PlaygroundZeroCoincs = get_coincs_from_cache(cachefile, opts.playground_zerolag_pattern, opts.exact_match, opts.verbose, coinc_stat) if PlaygroundZeroCoincs: PlaygroundZeroCoincs = new_coincs_from_coincs(PlaygroundZeroCoincs, coinc_stat) FullDataZeroCoincs = get_coincs_from_cache(cachefile, opts.full_data_zerolag_pattern, opts.exact_match, opts.verbose, coinc_stat) if FullDataZeroCoincs: FullDataZeroCoincs = new_coincs_from_coincs(FullDataZeroCoincs, coinc_stat) InjCoincs = get_coincs_from_cache(cachefile, opts.found_pattern, opts.exact_match, opts.verbose, coinc_stat) if InjCoincs: InjCoincs = new_coincs_from_coincs(InjCoincs,coinc_stat) params = {"single":['snr','chisq','rsqveto_duration','get_effective_snr()','eff_distance','get_end()'], "metricInfo":['ethinca'],"coincRelativeDelta":['mchirp','eta'], "coincDelta":['time'], "coincInfo":['class']} if SlideCoincs: parse_coinc(SlideCoincs,SlideTable,params) if PlaygroundZeroCoincs: parse_coinc(PlaygroundZeroCoincs,PlaygroundZeroTable,params) if FullDataZeroCoincs: parse_coinc(FullDataZeroCoincs,FullDataZeroTable,params)
# End: loop over corsefiles # remove files that had no bkg from corsefiles list and add their names to # warn_msg if no_bkg_frgnd: warn_msg = 'No foreground or background in files:\n' for idx in sorted(no_bkg_frgnd.items(), key=itemgetter(1), reverse=True): warn_msg = warn_msg + ' ' + os.path.basename(corsefiles.pop(idx[1])) + '\n' # check if still have a corsefiles list; if all the files that were globbed # don't have foreground and background, just make a generic plot with # warn_msg on it; this avoids future errors if not corsefiles: warn_msg = warn_msg + 'These were all the globbed files.' sys.exit(0) coincStat = CoincInspiralUtils.coincStatistic("far") for thisfile in corsefiles: insptrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles( [thisfile] ) coincT[ thisfile ] = CoincInspiralUtils.coincInspiralTable( insptrigs, coincStat ) coincT[ thisfile ].sort() # sort by descending FAN # if this file has no_bkg, but does have foreground, get the ifo coincident # type from the first foreground trigger if thisfile in no_bkg: coincifos[thisfile] = coincT[ thisfile ][0].get_ifos()[0] #for thisfile in corsefiles: # if NormTime[corsefiles[0]] != NormTime[thisfile]: # print >> sys.stderr, "Can't combine experiments with " + \ # "different analysis times." # sys.exit( 1 ) maxFANs = [] # for storing max FAN of bkg (the dict is hard to sort by value) FANc = [] # for storing the combined FANs of foreground triggers
############################### # Construct list based on ifos supplied at command line ifolist = [ifo for ifo in ('G1','H1', 'H2', 'L1', 'V1') \ if getattr(opts, "%s_data" % ifo.lower())] if opts.two_ifo: ifo_combo = list(iterutils.choices(ifolist, 2)) if opts.three_ifo: ifo_combo = list(iterutils.choices(ifolist, 2)) + list( iterutils.choices(ifolist, 3)) if opts.four_ifo: ifo_combo=list(iterutils.choices(ifolist,2)) + list(iterutils.choices(ifolist,3)) + \ list(iterutils.choices(ifolist,4)) if opts.analyze_all: ifo_combo = CoincInspiralUtils.get_ifo_combos(ifolist) ############################################################################## # try to make a directory to store the cache files and job logs try: os.mkdir('logs') except: pass ############################################################################## # create the config parser object and read in the ini file #cp = ConfigParser.ConfigParser() #cp.read(opts.config_file) ############################################################################## # if a usertag has been specified, override the config file
############################### # Construct list based on ifos supplied at command line ifolist = [ifo for ifo in ('G1','H1', 'H2', 'L1', 'V1') \ if getattr(opts, "%s_data" % ifo.lower())] if opts.two_ifo: ifo_combo=list(iterutils.choices(ifolist,2)) if opts.three_ifo: ifo_combo=list(iterutils.choices(ifolist,2)) + list(iterutils.choices(ifolist,3)) if opts.four_ifo: ifo_combo=list(iterutils.choices(ifolist,2)) + list(iterutils.choices(ifolist,3)) + \ list(iterutils.choices(ifolist,4)) if opts.analyze_all: ifo_combo=CoincInspiralUtils.get_ifo_combos(ifolist) ############################################################################## # try to make a directory to store the cache files and job logs try: os.mkdir('logs') except: pass ############################################################################## # create the config parser object and read in the ini file #cp = ConfigParser.ConfigParser() #cp.read(opts.config_file) ############################################################################## # if a usertag has been specified, override the config file if opts.user_tag is not None:
def investigateTimeseries(self, triggerFiles, inj, ifoName, stage, number ): """ Investigate inspiral triggers and create a time-series of the SNRs around the injected time @param triggerFiles: List of files containing the inspiral triggers @param inj: the current missed injection @param ifoName: the IFO for which the plot is made @param stage: the name of the stage (FIRST, SECOND) @param number: the consecutive number for this inspiral followup """ # read the inspiral file(s) if self.verbose: print "Processing INSPIRAL triggers from files ", triggerFiles snglTriggers = SnglInspiralUtils.ReadSnglInspiralFromFiles( \ triggerFiles , verbose=False) # create a figure and initialize some lists fig=figure() foundSet = set() loudest_details = {} noTriggersFound = True if snglTriggers is None: # put message on the plot instead self.putText( 'No sngl_inspiral triggers in %s' % str(triggerFiles)) else: # selection segment timeInjection = self.getTimeSim( inj ) segSmall = segments.segment( timeInjection-self.injection_window, \ timeInjection+self.injection_window ) segLarge = segments.segment( timeInjection-self.time_window, \ timeInjection+self.time_window ) # create coincidences for THINCA stage coincTriggers = None if 'THINCA' in stage: coincTriggers = CoincInspiralUtils.coincInspiralTable( snglTriggers, \ CoincInspiralUtils.coincStatistic("snr") ) selectedCoincs = coincTriggers.vetoed( segSmall ) # loop over the IFOs (although this is a plot for IFO 'ifoName') for ifo in self.colors.keys(): # get the singles for this ifo snglInspiral = snglTriggers.ifocut(ifo) # select a range of triggers selectedLarge = snglInspiral.vetoed( segLarge ) timeLarge = [ self.getTimeTrigger( sel )-timeInjection \ for sel in selectedLarge ] selectedSmall = snglInspiral.vetoed( segSmall ) timeSmall = [ self.getTimeTrigger( sel )-timeInjection \ for sel in selectedSmall ] # use the set of selected coincident triggers in the THINCA stages if coincTriggers: selectedSmall = selectedCoincs.cluster(2* self.injection_window).getsngls(ifo) timeSmall = [ self.getTimeTrigger( sel )-timeInjection \ for sel in selectedSmall ] # skip if no triggers in the large time window if len(timeLarge)==0: continue noTriggersFound = False # add IFO to this set; the injection is found for this IFO and stage if len(timeSmall)>0: foundSet.add(ifo) # record details of the loudest trigger loudest_details[ifo] = {} loudest = selectedSmall[selectedSmall.get_column('snr').argmax()] loudest_details[ifo]["snr"] = loudest.snr loudest_details[ifo]["mchirp"] = loudest.mchirp loudest_details[ifo]["eff_dist"] = loudest.eff_distance loudest_details[ifo]["chisq"] = loudest.chisq loudest_details[ifo]["timeTrigger"] = self.getTimeTrigger( loudest ) timeTrigger = self.getTimeTrigger( loudest ) vetoSegs = self.vetodict[ifoName] # plot the triggers plot( timeLarge, selectedLarge.get_column('snr'),\ self.colors[ifo]+'o', label="_nolegend_") plot( timeSmall, selectedSmall.get_column('snr'), \ self.colors[ifo]+'s', label=ifo) # draw the injection times and other stuff if noTriggersFound: self.putText( 'No triggers/coincidences found within time window') ylims=axes().get_ylim() plot( [0,0], ylims, 'g--', label="_nolegend_") plot( [-self.injection_window, -self.injection_window], ylims, 'c:',\ label="_nolegend_") plot( [+self.injection_window, +self.injection_window], ylims, 'c:',\ label="_nolegend_") self.highlightVeto( timeInjection, segLarge, ifoName, ylims ) # save the plot grid(True) legend() ylims=axes().get_ylim() axis([-self.time_window, +self.time_window, ylims[0], ylims[1]]) xlabel('time [s]') ylabel('SNR') title(stage+'_'+str(self.number)) fname = self.savePlot( stage ) close(fig) result = {'filename':fname, 'foundset':foundSet, 'loudest_details':loudest_details} return result
def double_params_func(events, timeslides=0): if hasattr(events, "H1") and hasattr(events, "H2"): H1H2_eff_snr = math.sqrt((events.H1.get_effective_snr()) ** 2 + (events.H2.get_effective_snr()) ** 2) return {"H1H2_eff_snr": H1H2_eff_snr} elif hasattr(events, "H1") and hasattr(events, "L1"): H1L1_eff_snr = math.sqrt((events.H1.get_effective_snr()) ** 2 + (events.L1.get_effective_snr()) ** 2) return {"H1L1_eff_snr": H1L1_eff_snr} elif hasattr(events, "H2") and hasattr(events, "L1"): H2L1_eff_snr = math.sqrt((events.H2.get_effective_snr()) ** 2 + (events.L1.get_effective_snr()) ** 2) return {"H2L1_eff_snr": H2L1_eff_snr} statistic = CoincInspiralUtils.coincStatistic(opts.statistic) ############################################################################### # read in zero lag coinc triggers zerolagTriggers = None zerolagTriggers = SnglInspiralUtils.ReadSnglInspiralFromFiles(zerolagfiles, mangle_event_id=True) # construct the zero lag coincs zerolagCoincTriggers = CoincInspiralUtils.coincInspiralTable(zerolagTriggers, statistic) slidesTriggers = None slidesTriggers = SnglInspiralUtils.ReadSnglInspiralFromFiles(slidesfiles, mangle_event_id=True) # construct the time slides coincs for single. slidesCoincs = CoincInspiralUtils.coincInspiralTable(slidesTriggers, statistic)