gdbdir = config.get('gdb general', 'main_gdb_dir') if not opts.skip_gracedb_upload: if config.has_option('gdb general', 'gdb_url'): gracedb = GraceDb(config.get('gdb general', 'gdb_url')) else: gracedb = GraceDb() if config.has_option(opts.classifier, 'plotting_label'): plotting_label = config.get(opts.classifier, 'plotting_label') else: plotting_label = opts.classifier #=================================================================================================== rank_channame = idq.channame(ifo, opts.classifier, "%s_rank" % tag) fap_channame = idq.channame(ifo, opts.classifier, "%s_fap" % tag) fapUL_channame = idq.channame(ifo, opts.classifier, "%s_fapUL" % tag) #=================================================================================================== # get all *.gwf files in range if opts.verbose: print "Finding relevant *.gwf files" rank_filenames = [] fap_filenames = [] all_files = idq.get_all_files_in_range(realtimedir, opts.plotting_gps_start, opts.plotting_gps_end, pad=0,
gdbdir = config.get('gdb general','main_gdb_dir') if not opts.skip_gracedb_upload: if config.has_option('gdb general', 'gdb_url'): gracedb = GraceDb(config.get('gdb general', 'gdb_url')) else: gracedb = GraceDb() if config.has_option(opts.classifier, 'plotting_label'): plotting_label = config.get(opts.classifier, 'plotting_label') else: plotting_label = opts.classifier #=================================================================================================== rank_channame = idq.channame(ifo, opts.classifier, "%s_rank"%tag) fap_channame = idq.channame(ifo, opts.classifier, "%s_fap"%tag) fapUL_channame = idq.channame(ifo, opts.classifier, "%s_fapUL"%tag) #=================================================================================================== # get all *.gwf files in range if opts.verbose: print "Finding relevant *.gwf files" rank_filenames = [] fap_filenames = [] all_files = idq.get_all_files_in_range(realtimedir, opts.plotting_gps_start, opts.plotting_gps_end, pad=0, suffix='.gwf') for filename in all_files: if opts.classifier == idq.extract_fap_name(filename): # and ifo in filename: ### this last bit not needed? if 'rank' in filename:
#======================== # which classifiers #======================== ### ensure we have a section for each classifier and fill out dictionary of options classifiersD, mla, ovl = idq.config_to_classifiersD(config) ### get combiners information and add these to classifiersD combinersD, referenced_classifiers = idq.config_to_combinersD(config) for combiner, value in combinersD.items(): classifiersD[combiner] = value classifiers = sorted(classifiersD.keys()) ### compute channel names stored in frames channameD = dict((name, { 'rank': idq.channame(ifo, name, "%s_rank" % usertag), 'fap': idq.channame(ifo, name, "%s_fap" % usertag), 'fapUL': idq.channame(ifo, name, "%s_fapUL" % usertag) }) for name in classifiers) #if mla: # ### reading parameters from config file needed for mla # auxmvc_coinc_window = config.getfloat('build_auxmvc_vectors','time-window') # auxmc_gw_signif_thr = config.getfloat('build_auxmvc_vectors','signif-threshold') # auxmvc_selected_channels = config.get('general','selected-channels') # auxmvc_unsafe_channels = config.get('general','unsafe-channels') #======================== # realtime #======================== realtimedir = config.get('general', 'realtimedir')
segsum = lsctables.New( lsctables.SegmentSumTable, columns=["process_id", "segment_sum_id", "start_time", "start_time_ns", "end_time", "end_time_ns", "comment", "segment_def_id"] ) segtab = lsctables.New( lsctables.SegmentTable, columns=["process_id", "segment_def_id", "segment_id", "start_time", "start_time_ns", "end_time", "end_time_ns"] ) xml_element.appendChild( segdef ) xml_element.appendChild( segsum ) xml_element.appendChild( segtab ) ### iterate through classifiers for classifier in opts.classifier: logger.info('Begin: generating segments for %s'%classifier) faps = fapsD[classifier] logger.info(' found %d files'%(len(faps))) ### need to load in time-series from frames here! chan = idq.channame(ifo, classifier, "%s_fap"%usertag) t, ts = idq.combine_gwf(faps, [chan]) ### loads in the data from frames logger.info(' found %d continous segments'%(len(t))) ### set up segdef row fap2segdef_id = {} for FAPthr in opts.FAPthr: segdef_id = segdef.get_next_id() segdef_row = lsctables.SegmentDef() segdef_row.process_id = proc_id segdef_row.segment_def_id = segdef_id segdef_row.ifos = ifo segdef_row.name = classifier segdef_row.version = 1
#======================== # which classifiers #======================== ### ensure we have a section for each classifier and fill out dictionary of options classifiersD, mla, ovl = idq.config_to_classifiersD( config ) ### get combiners information and add these to classifiersD combinersD, referenced_classifiers = idq.config_to_combinersD( config ) for combiner, value in combinersD.items(): classifiersD[combiner] = value classifiers = sorted(classifiersD.keys()) ### compute channel names stored in frames channameD = dict( (name, {'rank':idq.channame(ifo, name, "%s_rank"%usertag), 'fap':idq.channame(ifo, name, "%s_fap"%usertag), 'fapUL':idq.channame(ifo, name, "%s_fapUL"%usertag)}) for name in classifiers ) #if mla: # ### reading parameters from config file needed for mla # auxmvc_coinc_window = config.getfloat('build_auxmvc_vectors','time-window') # auxmc_gw_signif_thr = config.getfloat('build_auxmvc_vectors','signif-threshold') # auxmvc_selected_channels = config.get('general','selected-channels') # auxmvc_unsafe_channels = config.get('general','unsafe-channels') #======================== # realtime #======================== realtimedir = config.get('general', 'realtimedir')
"end_time_ns" ]) xml_element.appendChild(segdef) xml_element.appendChild(segsum) xml_element.appendChild(segtab) ### iterate through classifiers for classifier in opts.classifier: logger.info('Begin: generating segments for %s' % classifier) faps = fapsD[classifier] logger.info(' found %d files' % (len(faps))) ### need to load in time-series from frames here! chan = idq.channame(ifo, classifier, "%s_fap" % usertag) t, ts = idq.combine_gwf(faps, [chan]) ### loads in the data from frames logger.info(' found %d continous segments' % (len(t))) ### set up segdef row fap2segdef_id = {} for FAPthr in opts.FAPthr: segdef_id = segdef.get_next_id() segdef_row = lsctables.SegmentDef() segdef_row.process_id = proc_id segdef_row.segment_def_id = segdef_id segdef_row.ifos = ifo segdef_row.name = classifier segdef_row.version = 1