trgdata = {} for gps in args: print "gps : %.9f" % (gps) minwin = opts.window ### go find triggers if opts.verbose: print "\tdiscoverying KW triggers within [%.9f, %.9f]" % (gps - opts.window, gps + opts.window) ### figure out which files you want filenames = [] coverage = [] for gdsdir in kwgdsdirs: for filename in idq.get_all_files_in_range(gdsdir, gps - opts.window, gps + opts.window, pad=0, suffix=".trg"): seg = idq.extract_start_stop(filename, suffix=".trg") if not event.livetime(event.andsegments([coverage, [seg]])): coverage = event.fixsegments(coverage + [seg]) filenames.append(filename) ### figure out the extent of the coverage if len(event.include([[gps]], coverage, tcent=0)) == 0: if opts.force: if opts.verbose: print "no triggers found for gps : %.3f" % (gps) continue else: raise ValueError("no triggers found for gps : %.3f" % (gps)) for s, e in coverage: if s < gps: if gps - s < minwin:
for dat in idq.get_all_files_in_range(realtimedir, gpsstart - lookback, gpsstart + stride, pad=0, suffix='.dat'): datsD[idq.extract_dat_name(dat)].append(dat) ### throw away any un-needed files for key in datsD.keys(): if key not in classifiers: datsD.pop(key) else: ### throw out files that don't contain any science time datsD[key] = [ dat for dat in datsD[key] if event.livetime( event.andsegments([ idqsegs, [idq.extract_start_stop(dat, suffix='.dat')] ])) ] if opts.mode == "npy": ### need rank files ### find all *rank*npy.gz files, bin them according to classifier logger.info(' finding all *rank*.npy.gz files') ranksD = defaultdict(list) for rank in [ rank for rank in idq.get_all_files_in_range(realtimedir, gpsstart - lookback, gpsstart + stride, pad=0, suffix='.npy.gz') if "rank" in rank
if config.has_option(opts.classifier, 'plotting_label'): plotting_label = config.get(opts.classifier, 'plotting_label') else: plotting_label = opts.classifier #=================================================================================================== ### Find all FAP files if opts.verbose: print "finding all fap*gwf files" faps = [ fap for fap in idq.get_all_files_in_range( realtimedir, opts.start, opts.end, pad=0, suffix='.gwf') if ('fap' in fap) and ( opts.classifier == idq.extract_fap_name(fap)) and event.livetime( event.andsegments([[idq.extract_start_stop(fap, suffix=".gwf")], idqsegs])) ] ### compute total time covered #T = event.livetime( [idq.extract_start_stop(fap, suffix='.gwf') for fap in faps] )*1.0 T = event.livetime(idqsegs) * 1.0 ### combine timeseries and generate segments if opts.verbose: print "generating segments from %d fap files" % (len(faps)) segs = dict((fapThr, [[], 1.0]) for fapThr in opts.FAPthr) t, ts = idq.combine_gwf(faps, [fap_channame]) for t, ts in zip(t, ts): t, ts = idq.timeseries_in_segments(t, ts, idqsegs)
# update mappings via uroc files #=============================================================================================== ### find all *dat files, bin them according to classifier ### needed for opts.mode=="dat" and KDE estimates logger.info('finding all *dat files') datsD = defaultdict( list ) for dat in idq.get_all_files_in_range(realtimedir, gpsstart-lookback, gpsstart+stride, pad=0, suffix='.dat' ): datsD[idq.extract_dat_name( dat )].append( dat ) ### throw away any un-needed files for key in datsD.keys(): if key not in classifiers: datsD.pop(key) else: ### throw out files that don't contain any science time datsD[key] = [ dat for dat in datsD[key] if event.livetime(event.andsegments([idqsegs, [idq.extract_start_stop(dat, suffix='.dat')]])) ] if opts.mode=="npy": ### need rank files ### find all *rank*npy.gz files, bin them according to classifier logger.info(' finding all *rank*.npy.gz files') ranksD = defaultdict( list ) for rank in [rank for rank in idq.get_all_files_in_range(realtimedir, gpsstart-lookback, gpsstart+stride, pad=0, suffix='.npy.gz') if "rank" in rank]: ranksD[idq.extract_fap_name( rank )].append( rank ) ### should just work... ### throw away files we will never need for key in ranksD.keys(): if key not in classifiers: ### throw away unwanted files ranksD.pop(key) else: ### keep only files that overlap with scisegs ranksD[key] = [ rank for rank in ranksD[key] if event.livetime(event.andsegments([idqsegs, [idq.extract_start_stop(rank, suffix='.npy.gz')]])) ]
rank_channame = idq.channame(ifo, opts.classifier, "%s_rank"%tag) fap_channame = idq.channame(ifo, opts.classifier, "%s_fap"%tag) fapUL_channame = idq.channame(ifo, opts.classifier, "%s_fapUL"%tag) flavor = config.get(opts.classifier, 'flavor') if config.has_option(opts.classifier, 'plotting_label'): plotting_label = config.get(opts.classifier, 'plotting_label') else: plotting_label = opts.classifier #=================================================================================================== ### Find all FAP files if opts.verbose: print "finding all fap*gwf files" faps = [fap for fap in idq.get_all_files_in_range( realtimedir, opts.start, opts.end, pad=0, suffix='.gwf') if ('fap' in fap) and (opts.classifier==idq.extract_fap_name( fap )) and event.livetime(event.andsegments([[idq.extract_start_stop(fap, suffix=".gwf")], idqsegs])) ] ### compute total time covered #T = event.livetime( [idq.extract_start_stop(fap, suffix='.gwf') for fap in faps] )*1.0 T = event.livetime( idqsegs )*1.0 ### combine timeseries and generate segments if opts.verbose: print "generating segments from %d fap files"%(len(faps)) segs = dict( (fapThr, [[], 1.0]) for fapThr in opts.FAPthr ) t, ts = idq.combine_gwf(faps, [fap_channame]) for t, ts in zip(t, ts): t, ts = idq.timeseries_in_segments( t, ts, idqsegs ) for fapThr in opts.FAPthr:
trgdata = {} for gps in args: print "gps : %.9f"%(gps) minwin = opts.window ### go find triggers if opts.verbose: print "\tdiscoverying KW triggers within [%.9f, %.9f]"%(gps-opts.window, gps+opts.window) ### figure out which files you want filenames = [] coverage = [] for gdsdir in kwgdsdirs: for filename in idq.get_all_files_in_range(gdsdir, gps-opts.window, gps+opts.window, pad=0, suffix=".trg"): seg = idq.extract_start_stop(filename, suffix=".trg") if not event.livetime(event.andsegments([coverage, [seg]])): coverage = event.fixsegments( coverage + [seg] ) filenames.append( filename ) ### figure out the extent of the coverage if len(event.include([[gps]], coverage, tcent=0)) == 0: if opts.force: if opts.verbose: print "no triggers found for gps : %.3f"%(gps) continue else: raise ValueError("no triggers found for gps : %.3f"%(gps)) for s, e in coverage: if s < gps: if gps-s < minwin:
#--- ### look up KW trg files that intersect segs if opts.verbose: print( "finding relevant kw_trgfiles" ) kw_trgfiles = [] ### iterate over different configurations used in training for kwconf, dirname in eval(config.get('general', 'kw')).items(): ### this is kinda ugly... if opts.verbose: print( " searching for KW trgfiles corresponding to %s in %s within [%.3f, %.3f]"%(kwconf, dirname, segs[0][0], segs[-1][1]) ) ### iterate over all trg files found in that directory for trgfile in idq.get_all_files_in_range(dirname, segs[0][0], segs[-1][1], pad=0, suffix='.trg'): ### check whether there is some overlap ### not gauranteed if there are gaps between min and max gps times if event.livetime(event.andsegments([[idq.extract_start_stop(trgfile, suffix='.trg')], segs])): if opts.verbose: print( " kept : "+trgfile ) kw_trgfiles.append( trgfile ) elif opts.verbose: print( " discarded : "+trgfile ) #--- if opts.verbose: print( "evaluating %d times using %d KW trgfiles"%(Ngps, len(kw_trgfiles) ) ) ### set up output pointers if opts.output_filename: datfile = os.path.basename(opts.output_filename) output_dir = os.path.dirname(opts.output_filename)
# load dq segments if opts.verbose: print "reading segments from %s" % opts.dq_segments (dq_segments, covered_segments) = \ idq.extract_dq_segments(open(opts.dq_segments, 'r'), opts.dq_segments_name) # sort and merge segments dq_segments = event.fixsegments(dq_segments) ### filter patfiles by scisegs, keep only those events with non-zero overlap with science time patfiles = [ pat for pat in patfiles if event.livetime( event.andsegments( [dq_segments, [idq.extract_start_stop(pat, suffix=".pat")]])) ] if opts.verbose: print "%d patfiles remain after taking overlap with segments" % len( patfiles) if len(patfiles) == 0: print 'No *.pat files found in the gps range ' \ + str(gps_start_time) + ' - ' + str(gps_end_time) print 'Exiting with status 2.' sys.exit(2) # load auxmvc vector samples auxmvc_samples = auxmvc_utils.ReadMVSCTriggers(patfiles, Classified=False)
if opts.verbose: print( " searching for KW trgfiles corresponding to %s in %s within [%.3f, %.3f]" % (kwconf, dirname, segs[0][0], segs[-1][1])) ### iterate over all trg files found in that directory for trgfile in idq.get_all_files_in_range(dirname, segs[0][0], segs[-1][1], pad=0, suffix='.trg'): ### check whether there is some overlap ### not gauranteed if there are gaps between min and max gps times if event.livetime( event.andsegments( [[idq.extract_start_stop(trgfile, suffix='.trg')], segs])): if opts.verbose: print(" kept : " + trgfile) kw_trgfiles.append(trgfile) elif opts.verbose: print(" discarded : " + trgfile) #--- if opts.verbose: print("evaluating %d times using %d KW trgfiles" % (Ngps, len(kw_trgfiles))) ### set up output pointers if opts.output_filename: datfile = os.path.basename(opts.output_filename)
f.close() #======================== # go findeth the frame data #======================== logger.info(' finding all *fap*.gwf files') fapsD = defaultdict( list ) for fap in [fap for fap in idq.get_all_files_in_range(realtimedir, lookup_startgps, lookup_endgps, pad=0, suffix='.gwf') if "fap" in fap]: fapsD[idq.extract_fap_name( fap )].append( fap ) ### throw away files we will never need for key in fapsD.keys(): if key not in opts.classifier: ### throw away unwanted files fapsD.pop(key) else: ### keep only files that overlap with scisegs fapsD[key] = [ fap for fap in fapsD[key] if event.livetime(event.andsegments([idqsegs, [idq.extract_start_stop(fap, suffix='.gwf')]])) ] #======================== # iterate through classifiers -> generate segments #======================== ### set up xml document from glue.ligolw import ligolw from glue.ligolw import utils as ligolw_utils from glue.ligolw import lsctables from glue.ligolw.utils import process xmldoc = ligolw.Document() xml_element = ligolw.LIGO_LW() xmldoc.appendChild( xml_element )
for fap in [ fap for fap in idq.get_all_files_in_range( realtimedir, lookup_startgps, lookup_endgps, pad=0, suffix='.gwf') if "fap" in fap ]: fapsD[idq.extract_fap_name(fap)].append(fap) ### throw away files we will never need for key in fapsD.keys(): if key not in opts.classifier: ### throw away unwanted files fapsD.pop(key) else: ### keep only files that overlap with scisegs fapsD[key] = [ fap for fap in fapsD[key] if event.livetime( event.andsegments( [idqsegs, [idq.extract_start_stop(fap, suffix='.gwf')]])) ] #======================== # iterate through classifiers -> generate segments #======================== ### set up xml document from glue.ligolw import ligolw from glue.ligolw import utils as ligolw_utils from glue.ligolw import lsctables from glue.ligolw.utils import process xmldoc = ligolw.Document() xml_element = ligolw.LIGO_LW()