def measure_efficiency(filenames, threshold, live_time_program = "lalapps_power", upper_limit_scale = "E", tmp_path = None, verbose = False): # FIXME: instruments are hard-coded. bad bad bad. sigh... if upper_limit_scale == "E": efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.egw_over_rsquared), r"Equivalent Isotropic Energy ($M_{\odot} / \mathrm{pc}^{2}$)", 0.1) elif upper_limit_scale == "hrss": efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.hrss), r"$h_{\mathrm{rss}}$", 0.1) else: raise ValueError("bad upper_limit_scale %s" % repr(upper_limit_scale)) # # Iterate over injection files. # for n, filename in enumerate(filenames): # # Open the database file. # if verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose) connection = sqlite3.connect(str(working_filename)) connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic) database = SnglBurstUtils.CoincDatabase(connection, live_time_program) if verbose: SnglBurstUtils.summarize_coinc_database(database) # # Process database contents. # efficiency.add_contents(database, threshold) # # Done with this file. # database.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = verbose) # # Compute efficiency from the data that have been collected # if verbose: print("binning and smoothnig efficiency data ...", file=sys.stderr) efficiency.finish(threshold) # # Done # return efficiency
def measure_threshold(filenames, n_survivors, live_time_program = "lalapps_power", tmp_path = None, open_box = False, verbose = False): # # Initialize the book-keeping object. # rate_vs_threshold_data = RateVsThresholdData() # # Iterate over non-injection files. # for n, filename in enumerate(filenames): # # Open the database file. # if verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose) database = SnglBurstUtils.CoincDatabase(sqlite3.connect(str(working_filename)), live_time_program) if verbose: SnglBurstUtils.summarize_coinc_database(database) # # Process database contents. # rate_vs_threshold_data.update_from(database, filename = filename, verbose = verbose) # # Done with this file. # database.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = verbose) # # Determine likelihood threshold. # if verbose: print("finishing rate vs. threshold measurement ...", file=sys.stderr) rate_vs_threshold_data.finish(n_survivors, open_box = open_box, verbose = verbose) # # Done. # return rate_vs_threshold_data
def process_file(filename, products, live_time_program, tmp_path = None, veto_segments_name = None, verbose = False): # # connect to database and summarize contents # working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose) contents = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program, search = "StringCusp", veto_segments_name = veto_segments_name) if verbose: SnglBurstUtils.summarize_coinc_database(contents, filename = working_filename) # # augment summary with extra stuff we need. the filename # is recorded for dumping debuggin information related to # missed injections. if burca was run with the # --coincidence-segments option then the value is copied # into a segmentlistdict to facilitate the computation of # livetime # contents.filename = filename contents.coincidence_segments = ligolwprocess.get_process_params(contents.xmldoc, "lalapps_burca", "--coincidence-segments") if contents.coincidence_segments: # as a side-effect, this enforces the rule that # burca has been run on the input file exactly once contents.coincidence_segments, = contents.coincidence_segments contents.coincidence_segments = segments.segmentlistdict.fromkeys(contents.seglists, segmentsUtils.from_range_strings(contents.coincidence_segments.split(","), boundtype = dbtables.lsctables.LIGOTimeGPS).coalesce()) else: contents.coincidence_segments = None # # process contents # for n, product in enumerate(products): if verbose: print >>sys.stderr, "%s: adding to product %d ..." % (working_filename, n) product.add_contents(contents, verbose = verbose) # # close # contents.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)
working_filename = dbtables.get_connection_filename( filename, tmp_path=options.tmp_space, verbose=options.verbose) connection = sqlite3.connect(working_filename) if options.tmp_space is not None: dbtables.set_temp_store_directory(connection, options.tmp_space, verbose=options.verbose) # # Summarize the database. # contents = SnglBurstUtils.CoincDatabase( connection, live_time_program="StringSearch", search="StringCusp", veto_segments_name=options.vetoes_name) if options.verbose: SnglBurstUtils.summarize_coinc_database(contents) if not contents.seglists and options.verbose: print >> sys.stderr, "\twarning: no segments found" # # Build triangulators. The timing uncertainty of +/- 8e-5 s was # measured with lalapps_string_plot_binj and is essentially # identical for H1, H2, L1, and V1. # triangulators = stringutils.triangulators( dict((instrument, 8e-5) for instrument in contents.instruments))
plots = [plots[i] for i in options.plot] # # Process files # for n, filename in enumerate( ligolw_utils.sort_files_by_size(filenames, options.verbose, reverse=True)): if options.verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) working_filename = dbtables.get_connection_filename( filename, tmp_path=options.tmp_space, verbose=options.verbose) database = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), options.live_time_program, search="StringCusp") if options.verbose: SnglBurstUtils.summarize_coinc_database(database) is_injection_db = "sim_burst" in dbtables.get_table_names( database.connection) if is_injection_db: database.connection.cursor().execute( """ CREATE TEMPORARY TABLE sim_burst_map AS SELECT a.event_id AS simulation_id, a.coinc_event_id AS coinc_event_id, b.event_id AS event_id
def dump_confidence_likelihood_scatter_data(globs, live_time_program = "lalapps_power", tmp_path = None, verbose = False): # # Collect file names. # if verbose: print("building file list ...", file=sys.stderr) filenames = sorted(filename for g in globs for filename in glob.glob(g)) # # Initialize storage. # injections = [] background = [] zero_lag = [] # # Iterate over files. # for n, filename in enumerate(filenames): # # Open the database file. # if verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose) connection = sqlite3.connect(working_filename) connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic) database = SnglBurstUtils.CoincDatabase(connection, live_time_program) if verbose: SnglBurstUtils.summarize_coinc_database(database) # # Process database contents. Assume all files with # sim_burst tables are the outputs of injection runs, and # others aren't. # if database.sim_burst_table is None: # non-injections for id, l, c, is_background in bb_id_likelihood_confidence_background(database): record = (coinc_detection_statistic(l, c), l, c) if is_background: if len(background) < 1e6: heapq.heappush(background, record) else: heapq.heappushpop(background, record) else: if len(zero_lag) < 1e6: heapq.heappush(zero_lag, record) else: heapq.heappushpop(zero_ag, record) else: # injections create_sim_coinc_map_view(database.connection) for a, l, c in database.connection.cursor().execute(""" SELECT burst_coinc_amplitude, burst_coinc_likelihood, burst_coinc_confidence FROM sim_coinc_map WHERE sim_coinc_def_id == ? """, (database.sce_definer_id,)): record = (-a, l, c) if len(injections) < 1e6: heapq.heappush(injections, record) else: heapq.heappushpop(injections, record) # # Done with this file. # database.connection.close() dbtables.discard_connection_filename(filename, working_filename, verbose = verbose) # # Dump scatter plot data. # if verbose: print("writing scatter plot data ...", file=sys.stderr) f = file("lalapps_excesspowerfinal_background_scatter.dat", "w") for a, l, c in background: print("%.16g %.16g" % (l, c), file=f) f = file("lalapps_excesspowerfinal_zero_lag_scatter.dat", "w") for a, l, c in zero_lag: print("%.16g %.16g" % (l, c), file=f) f = file("lalapps_excesspowerfinal_injections_scatter.dat", "w") for a, l, c in injections: print("%.16g %.16g" % (l, c), file=f) if verbose: print("done.", file=sys.stderr)
) JOIN sngl_burst ON ( b.table_name == 'sngl_burst' AND b.event_id == sngl_burst.event_id ) WHERE a.table_name == 'sim_burst' AND a.event_id == sim_burst.simulation_id AND sngl_burst.ifo == ? ) """, (instrument, )): yield contents.sim_burst_table.row_from_cols(values) for n, filename in enumerate(filenames): if options.verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) database = SnglBurstUtils.CoincDatabase(sqlite3.connect(filename), "lalapps_power") if options.verbose: SnglBurstUtils.summarize_coinc_database(database) for instrument in database.instruments: for sim in found_injections(database, instrument): plotname = "%s%d_%s.%s" % ( options.base, sim.time_at_instrument(instrument).seconds, instrument, options.format) if options.verbose: print("--> %s" % plotname, file=sys.stderr) time_freq_plot(database, instrument, sim).savefig(plotname) database.connection.close()
return [l[i] for i in plots] options, filenames = parse_command_line() plots = new_plots(options.plot) for n, filename in enumerate( ligolw_utils.sort_files_by_size(filenames, options.verbose, reverse=True)[options.skip:]): if options.verbose: print("%d/%d: %s" % (n + 1, len(filenames) - options.skip, filename), file=sys.stderr) database = SnglBurstUtils.CoincDatabase(sqlite3.connect(filename), options.live_time_program) if options.verbose: SnglBurstUtils.summarize_coinc_database(database) for n, plot in zip(options.plot, plots): if options.verbose: print("adding to burca plot %d ..." % n, file=sys.stderr) plot.add_contents(database) database.connection.close() # delete the plots as we go to save memory n = 0 format = "%%s%%0%dd.%%s" % (int(math.log10(max(options.plot) or 1)) + 1) while len(plots): filename = format % (options.base, options.plot[n], options.format)
cached_likelihood_files = set() for n, filename in enumerate(filenames): # # Open the file. # if options.verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr) working_filename = dbtables.get_connection_filename( filename, tmp_path=options.tmp_space, verbose=options.verbose) connection = sqlite3.connect(working_filename) connection.execute("PRAGMA temp_store_directory = '%s';" % dbtables.tempfile.gettempdir()) database = SnglBurstUtils.CoincDatabase(connection, options.program) if options.verbose: SnglBurstUtils.summarize_coinc_database(database) # # Retrieve appropriate likelihood data. # if options.likelihood_data_cache: likelihood_files = set( c.path for c in options.likelihood_data_cache if c.segmentlistdict.intersects(database.seglists)) else: likelihood_files = options.likelihood_data if likelihood_files != cached_likelihood_files: distributions = load_likelihood_data(likelihood_files,
if options.verbose: print >> sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename) working_filename = dbtables.get_connection_filename( filename, tmp_path=options.tmp_space, verbose=options.verbose) connection = sqlite3.connect(working_filename) connection.execute("PRAGMA synchronous = OFF;") connection.execute("PRAGMA temp_store_directory = '%s';" % dbtables.tempfile.gettempdir()) # # Summarize the database. # database = SnglBurstUtils.CoincDatabase(connection, options.live_time_program) if options.verbose: SnglBurstUtils.summarize_coinc_database(database) segs |= database.seglists # # Record statistics. Assume all files with sim_burst tables are # the outputs of injection runs, and others aren't. # if database.sim_burst_table is None: # iterate over burst<-->burst coincs for is_background, events, offsetvector in database.get_noninjections( ): params = distributions.coinc_params(events, offsetvector, MW_CENTER_J2000_RA_RAD,
if options.verbose: print >> sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename), working_filename = dbtables.get_connection_filename( filename, tmp_path=options.tmp_space, verbose=options.verbose) connection = sqlite3.connect(working_filename) connection.execute("PRAGMA synchronous = OFF;") connection.execute("PRAGMA temp_store_directory = '%s';" % dbtables.tempfile.gettempdir()) # # Apply vetoes # apply_excess_power_veto(SnglBurstUtils.CoincDatabase( connection, "lalapps_power"), veto_segs, verbose=options.verbose) # # Clean up # if options.verbose: print >> sys.stderr, "committing ..." connection.commit() if not options.no_vacuum: if options.verbose: print >> sys.stderr, "vacuuming ..." connection.cursor().execute("VACUUM;") connection.close()
# Open the database file. # if options.verbose: print("%d/%d: %s" % (n + 1, len(filenames), filename), end=' ', file=sys.stderr) working_filename = dbtables.get_connection_filename(filename, tmp_path = options.tmp_space, verbose = options.verbose) connection = sqlite3.connect(working_filename) connection.execute("PRAGMA synchronous = OFF;") connection.execute("PRAGMA temp_store_directory = '%s';" % dbtables.tempfile.gettempdir()) # # Apply vetoes # apply_excess_power_veto(SnglBurstUtils.CoincDatabase(connection, "lalapps_power"), veto_segs, verbose = options.verbose) # # Clean up # if options.verbose: print("committing ...", file=sys.stderr) connection.commit() if not options.no_vacuum: if options.verbose: print("vacuuming ...", file=sys.stderr) connection.cursor().execute("VACUUM;") connection.close() del connection dbtables.put_connection_filename(filename, working_filename, verbose = options.verbose)