seg_file, dq_name) ### read in segments from xml file if opts.verbose: print 'finding idq segments' idqsegs = idq.get_idq_segments(realtimedir, opts.start, opts.end, suffix='.dat') if opts.verbose: print 'taking intersection between science segments and idq segments' idqsegs = event.andsegments([scisegs, idqsegs]) ### write segment file if opts.ignore_science_segments: idqseg_path = idq.idqsegascii(gdbdir, filetag, opts.start, opts.end - opts.start) else: idqseg_path = idq.idqsegascii(gdbdir, '%s_%s' % (filetag, dq_name), opts.start, opts.end - opts.start) if opts.verbose: print " writing : " + idqseg_path f = open(idqseg_path, 'w') for seg in idqsegs: print >> f, seg[0], seg[1] f.close() #================================================= rank_channame = idq.channame(ifo, opts.classifier, "%s_rank" % tag) fap_channame = idq.channame(ifo, opts.classifier, "%s_fap" % tag) fapUL_channame = idq.channame(ifo, opts.classifier, "%s_fapUL" % tag)
### if we aren't building auxmvc vectors, we re-use pat files from realtime job ### this requires us to redefine the 'science-segments' as the intersection of scisegs with realtime segs ### we call this intersection "idq_segs" else: ### we're re-using pat files! try: ### determine segments from realtime filenames realtime_segs = idq.get_idq_segments(realtimedir, gpsstart - lookback, gpsstart + stride, suffix='.pat') ### read in science segments (scisegs, coveredseg) = idq.extract_dq_segments(seg_file, dq_name) ### take the intersection of these segments idq_segs = event.andsegments([scisegs, realtime_segs]) ### write segment file idqseg_path = idq.idqsegascii(output_dir, '_%s'%dq_name, gpsstart - lookback, lookback+stride) f = open(idqseg_path, 'w') for seg in idq_segs: print >> f, seg[0], seg[1] f.close() ### we may want to remove the unsafe channels, but this could be tricky and we don't want to throw away GW channels accidentally ovlsegs = idqseg_path except Exception as e: traceback.print_exc() logger.info('WARNING: failed to generate iDQ segments from realtime output.') if opts.force: raise e else: gpsstart += stride
gpsstart += stride continue logger.info('finding idq segments') idqsegs = idq.get_idq_segments(realtimedir, gpsstart - lookback, gpsstart + stride, suffix='.dat') logger.info( 'taking intersection between science segments and idq segments') idqsegs = event.andsegments([scisegs, idqsegs]) ### write segment file if opts.ignore_science_segments: idqseg_path = idq.idqsegascii(output_dir, '', gpsstart - lookback, lookback + stride) else: idqseg_path = idq.idqsegascii(output_dir, '_%s' % dq_name, gpsstart - lookback, lookback + stride) f = open(idqseg_path, 'w') for seg in idqsegs: print >> f, seg[0], seg[1] f.close() #=============================================================================================== # update mappings via uroc files #=============================================================================================== ### find all *dat files, bin them according to classifier ### needed for opts.mode=="dat" and KDE estimates logger.info('finding all *dat files')
print ' writing science segments to file : '+seg_file ligolw_utils.write_filename(xmldoc, seg_file, gz=seg_file.endswith(".gz")) (scisegs, coveredseg) = idq.extract_dq_segments(seg_file, dq_name) ### read in segments from xml file if opts.verbose: print 'finding idq segments' idqsegs = idq.get_idq_segments(realtimedir, opts.start, opts.end, suffix='.dat') if opts.verbose: print 'taking intersection between science segments and idq segments' idqsegs = event.andsegments( [scisegs, idqsegs] ) ### write segment file if opts.ignore_science_segments: idqseg_path = idq.idqsegascii(gdbdir, filetag, opts.start, opts.end-opts.start) else: idqseg_path = idq.idqsegascii(gdbdir, '%s_%s'%(filetag, dq_name), opts.start, opts.end-opts.start) if opts.verbose: print " writing : "+idqseg_path f = open(idqseg_path, 'w') for seg in idqsegs: print >> f, seg[0], seg[1] f.close() #================================================= rank_channame = idq.channame(ifo, opts.classifier, "%s_rank"%tag) fap_channame = idq.channame(ifo, opts.classifier, "%s_fap"%tag) fapUL_channame = idq.channame(ifo, opts.classifier, "%s_fapUL"%tag)
(scisegs, coveredseg) = idq.extract_dq_segments(seg_file, dq_name) ### read in segments from xml file ### modify scisegs to account for shifts logger.info('modifying scisegs to account for shifts') modified_scisegs = [(s-opts.right_padding-opts.t_lag, e+opts.left_padding-opts.t_lag) for s, e in scisegs] ### find idq segments logger.info('finding idq segments') idqsegs = idq.get_idq_segments(realtimedir, lookup_startgps, lookup_endgps, suffix='.dat') logger.info('taking intersection between modified science segments and idq segments') idqsegs = event.andsegments( [modified_scisegs, idqsegs] ) ### write segment file if opts.ignore_science_segments: idqseg_path = idq.idqsegascii(opts.output_dir, '', startgps, stride) else: idqseg_path = idq.idqsegascii(opts.output_dir, '_%s'%dq_name, startgps , stride) f = open(idqseg_path, 'w') for seg in idqsegs: print >> f, seg[0], seg[1] f.close() #======================== # go findeth the frame data #======================== logger.info(' finding all *fap*.gwf files') fapsD = defaultdict( list ) for fap in [fap for fap in idq.get_all_files_in_range(realtimedir, lookup_startgps, lookup_endgps, pad=0, suffix='.gwf') if "fap" in fap]: fapsD[idq.extract_fap_name( fap )].append( fap )
e + opts.left_padding - opts.t_lag) for s, e in scisegs] ### find idq segments logger.info('finding idq segments') idqsegs = idq.get_idq_segments(realtimedir, lookup_startgps, lookup_endgps, suffix='.dat') logger.info( 'taking intersection between modified science segments and idq segments') idqsegs = event.andsegments([modified_scisegs, idqsegs]) ### write segment file if opts.ignore_science_segments: idqseg_path = idq.idqsegascii(opts.output_dir, '', startgps, stride) else: idqseg_path = idq.idqsegascii(opts.output_dir, '_%s' % dq_name, startgps, stride) f = open(idqseg_path, 'w') for seg in idqsegs: print >> f, seg[0], seg[1] f.close() #======================== # go findeth the frame data #======================== logger.info(' finding all *fap*.gwf files') fapsD = defaultdict(list) for fap in [ fap for fap in idq.get_all_files_in_range(