def measure_efficiency(filenames, threshold, live_time_program = "lalapps_power", upper_limit_scale = "E", tmp_path = None, verbose = False): # FIXME: instruments are hard-coded. bad bad bad. sigh... if upper_limit_scale == "E": efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.egw_over_rsquared), r"Equivalent Isotropic Energy ($M_{\odot} / \mathrm{pc}^{2}$)", 0.1) elif upper_limit_scale == "hrss": efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.hrss), r"$h_{\mathrm{rss}}$", 0.1) else: raise ValueError("bad upper_limit_scale %s" % repr(upper_limit_scale)) # # Iterate over injection files. # for n, filename in enumerate(filenames): # # Open the database file. # if verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose) connection = sqlite3.connect(working_filename) connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic) database = SnglBurstUtils.CoincDatabase(connection, live_time_program) if verbose: SnglBurstUtils.summarize_coinc_database(database) # # Process database contents. # efficiency.add_contents(database, threshold) # # Done with this file. # database.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = verbose) # # Compute efficiency from the data that have been collected # if verbose: print("binning and smoothnig efficiency data ...", file=sys.stderr) efficiency.finish(threshold) # # Done # return efficiency
def measure_threshold(filenames, n_survivors, live_time_program = "lalapps_power", tmp_path = None, open_box = False, verbose = False): # # Initialize the book-keeping object. # rate_vs_threshold_data = RateVsThresholdData() # # Iterate over non-injection files. # for n, filename in enumerate(filenames): # # Open the database file. # if verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose) database = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program) if verbose: SnglBurstUtils.summarize_coinc_database(database) # # Process database contents. # rate_vs_threshold_data.update_from(database, filename = filename, verbose = verbose) # # Done with this file. # database.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = verbose) # # Determine likelihood threshold. # if verbose: print("finishing rate vs. threshold measurement ...", file=sys.stderr) rate_vs_threshold_data.finish(n_survivors, open_box = open_box, verbose = verbose) # # Done. # return rate_vs_threshold_data
def process_file(filename, products, live_time_program, tmp_path = None, veto_segments_name = None, verbose = False): # # connect to database and summarize contents # working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose) contents = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program, search = "StringCusp", veto_segments_name = veto_segments_name) if verbose: SnglBurstUtils.summarize_coinc_database(contents, filename = working_filename) # # augment summary with extra stuff we need. the filename # is recorded for dumping debuggin information related to # missed injections. if burca was run with the # --coincidence-segments option then the value is copied # into a segmentlistdict to facilitate the computation of # livetime # contents.filename = filename contents.coincidence_segments = ligolwprocess.get_process_params(contents.xmldoc, "lalapps_burca", "--coincidence-segments") if contents.coincidence_segments: # as a side-effect, this enforces the rule that # burca has been run on the input file exactly once contents.coincidence_segments, = contents.coincidence_segments # FIXME: remove when LAL accepts unicode contents.coincidence_segments = contents.coincidence_segments.encode('utf-8') contents.coincidence_segments = segments.segmentlistdict.fromkeys(contents.seglists, segmentsUtils.from_range_strings(contents.coincidence_segments.split(","), boundtype = dbtables.lsctables.LIGOTimeGPS).coalesce()) else: contents.coincidence_segments = None # # process contents # for n, product in enumerate(products): if verbose: print("%s: adding to product %d ..." % (working_filename, n), file=sys.stderr) product.add_contents(contents, verbose = verbose) # # close # contents.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)
a.coinc_event_id AS coinc_event_id, b.event_id AS event_id FROM coinc_event_map AS a JOIN coinc_event_map AS b ON ( a.table_name == 'sim_burst' AND b.table_name == 'coinc_event' AND b.coinc_event_id == a.coinc_event_id ) """) for n, plot in enumerate(coincplots): if options.verbose: print("adding to coinc plot %d ..." % options.coinc_plot[n], file=sys.stderr) plot.add_contents(database) database.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = options.verbose) # # compute the binning for the efficiency contour plots # def make_binning(plots): plots = [plot for instrument in plots.keys() for plot in plots[instrument] if isinstance(plot, SimBurstUtils.Efficiency_hrss_vs_freq)] if not plots: return None minx = min([min(plot.injected_x) for plot in plots]) maxx = max([max(plot.injected_x) for plot in plots]) miny = min([min(plot.injected_y) for plot in plots]) maxy = max([max(plot.injected_y) for plot in plots])
FROM coinc_event_map AS a JOIN coinc_event_map AS b ON ( a.table_name == 'sim_burst' AND b.table_name == 'coinc_event' AND b.coinc_event_id == a.coinc_event_id ) """) for n, plot in enumerate(coincplots): if options.verbose: print("adding to coinc plot %d ..." % options.coinc_plot[n], file=sys.stderr) plot.add_contents(database) database.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose=options.verbose) # # compute the binning for the efficiency contour plots # def make_binning(plots): plots = [ plot for instrument in plots.keys() for plot in plots[instrument] if isinstance(plot, SimBurstUtils.Efficiency_hrss_vs_freq) ] if not plots: return None minx = min([min(plot.injected_x) for plot in plots])
def dump_confidence_likelihood_scatter_data(globs, live_time_program = "lalapps_power", tmp_path = None, verbose = False): # # Collect file names. # if verbose: print("building file list ...", file=sys.stderr) filenames = sorted(filename for g in globs for filename in glob.glob(g)) # # Initialize storage. # injections = [] background = [] zero_lag = [] # # Iterate over files. # for n, filename in enumerate(filenames): # # Open the database file. # if verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose) connection = sqlite3.connect(working_filename) connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic) database = SnglBurstUtils.CoincDatabase(connection, live_time_program) if verbose: SnglBurstUtils.summarize_coinc_database(database) # # Process database contents. Assume all files with # sim_burst tables are the outputs of injection runs, and # others aren't. # if database.sim_burst_table is None: # non-injections for id, l, c, is_background in bb_id_likelihood_confidence_background(database): record = (coinc_detection_statistic(l, c), l, c) if is_background: if len(background) < 1e6: heapq.heappush(background, record) else: heapq.heappushpop(background, record) else: if len(zero_lag) < 1e6: heapq.heappush(zero_lag, record) else: heapq.heappushpop(zero_ag, record) else: # injections create_sim_coinc_map_view(database.connection) for a, l, c in database.connection.cursor().execute(""" SELECT burst_coinc_amplitude, burst_coinc_likelihood, burst_coinc_confidence FROM sim_coinc_map WHERE sim_coinc_def_id == ? """, (database.sce_definer_id,)): record = (-a, l, c) if len(injections) < 1e6: heapq.heappush(injections, record) else: heapq.heappushpop(injections, record) # # Done with this file. # database.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = verbose) # # Dump scatter plot data. # if verbose: print("writing scatter plot data ...", file=sys.stderr) f = file("lalapps_excesspowerfinal_background_scatter.dat", "w") for a, l, c in background: print("%.16g %.16g" % (l, c), file=f) f = file("lalapps_excesspowerfinal_zero_lag_scatter.dat", "w") for a, l, c in zero_lag: print("%.16g %.16g" % (l, c), file=f) f = file("lalapps_excesspowerfinal_injections_scatter.dat", "w") for a, l, c in injections: print("%.16g %.16g" % (l, c), file=f) if verbose: print("done.", file=sys.stderr)