Пример #1
0
def get_injections(injfnames, FAR, zero_lag_segments, verbose = False):
  """
  """
  def injection_was_made(geocent_end_time, geocent_end_time_ns, zero_lag_segments = zero_lag_segments):
    """
    return True if injection was made in the given segmentlist
    """
    return lsctables.LIGOTimeGPS(geocent_end_time, geocent_end_time_ns) in zero_lag_segments

  found = []
  missed = []
  print("", file=sys.stderr)
  for cnt, f in enumerate(injfnames):
    print("getting injections below FAR: " + str(FAR) + ":\t%.1f%%\r" % (100.0 * cnt / len(injfnames),), end=' ', file=sys.stderr)
    working_filename = dbtables.get_connection_filename(f, tmp_path = None, verbose = verbose)
    connection = sqlite3.connect(working_filename)
    connection.create_function("injection_was_made", 2, injection_was_made)

    make_sim_inspiral = lsctables.SimInspiralTable.get_table(dbtables.get_xml(connection))._row_from_cols

    for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.combined_far < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
    """, (FAR,)):
      sim = make_sim_inspiral(values)
      if values[-1]:
        found.append(sim)
      else:
        missed.append(sim)

    # done
    connection.close()
    dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
    dbtables.DBTable_set_connection(None)

  print("\nFound = %d Missed = %d" % (len(found), len(missed)), file=sys.stderr)
  return found, missed
Пример #2
0
def get_far_threshold_and_segments(zerofname, live_time_program, instruments, verbose = False):
  """
  return the false alarm rate of the most rare zero-lag coinc, and a
  dictionary of the thinca segments indexed by instrument.
  """
  # open database
  working_filename = dbtables.get_connection_filename(zerofname, verbose = verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)

  # extract false alarm rate threshold
  query = 'SELECT MIN(coinc_inspiral.combined_far) FROM coinc_inspiral JOIN coinc_event ON (coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id) WHERE (coinc_event.instruments == "' + str(instruments) + '") AND NOT EXISTS(SELECT * FROM time_slide WHERE time_slide.time_slide_id == coinc_event.time_slide_id AND time_slide.offset != 0);'
  print(query)
  far, = connection.cursor().execute(query).fetchone()

  # extract segments.
  seglists = db_thinca_rings.get_thinca_zero_lag_segments(connection, program_name = live_time_program)

  # done
  connection.close()
  dbtables.discard_connection_filename(zerofname, working_filename, verbose = verbose)
  dbtables.DBTable_set_connection(None)
  print("WARNING replacing far with 10^-7", file=sys.stderr)
  far = 1.0e-7
  return far, seglists
Пример #3
0
def get_far_threshold_and_segments(zerofname,
                                   live_time_program,
                                   instruments,
                                   verbose=False):
    """
  return the false alarm rate of the most rare zero-lag coinc, and a
  dictionary of the thinca segments indexed by instrument.
  """
    # open database
    working_filename = dbtables.get_connection_filename(zerofname,
                                                        verbose=verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)

    # extract false alarm rate threshold
    query = 'SELECT MIN(coinc_inspiral.combined_far) FROM coinc_inspiral JOIN coinc_event ON (coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id) WHERE (coinc_event.instruments == "' + str(
        instruments
    ) + '") AND NOT EXISTS(SELECT * FROM time_slide WHERE time_slide.time_slide_id == coinc_event.time_slide_id AND time_slide.offset != 0);'
    print(query)
    far, = connection.cursor().execute(query).fetchone()

    # extract segments.
    seglists = db_thinca_rings.get_thinca_zero_lag_segments(
        connection, program_name=live_time_program)

    # done
    connection.close()
    dbtables.discard_connection_filename(zerofname,
                                         working_filename,
                                         verbose=verbose)
    dbtables.DBTable_set_connection(None)
    print("WARNING replacing far with 10^-7", file=sys.stderr)
    far = 1.0e-7
    return far, seglists
Пример #4
0
def get_far_threshold_and_segments(zerofname,
                                   instruments,
                                   live_time_program,
                                   veto_seg_name="vetoes",
                                   verbose=True):
    """
  return the false alarm rate of the most rare zero-lag coinc, and a
  dictionary of the thinca segments indexed by instrument.
  """
    # open database
    working_filename = dbtables.get_connection_filename(zerofname,
                                                        verbose=verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)

    # extract false alarm rate threshold
    query = 'SELECT MIN(coinc_inspiral.false_alarm_rate) FROM coinc_inspiral JOIN coinc_event ON (coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id) WHERE ( coinc_event.instruments = "' + instruments + '" AND NOT EXISTS(SELECT * FROM time_slide WHERE time_slide.time_slide_id == coinc_event.time_slide_id AND time_slide.offset != 0) );'
    print("\n", query)
    far, = connection.cursor().execute(query).fetchone()

    # extract segments.
    seglists = db_thinca_rings.get_thinca_zero_lag_segments(
        connection, program_name=live_time_program)

    # done
    connection.close()
    dbtables.discard_connection_filename(zerofname,
                                         working_filename,
                                         verbose=verbose)
    dbtables.DBTable_set_connection(None)

    return far, seglists
Пример #5
0
def get_injections(injfnames, FAR, zero_lag_segments, verbose = False):
  """
  """
  def injection_was_made(geocent_end_time, geocent_end_time_ns, zero_lag_segments = zero_lag_segments):
    """
    return True if injection was made in the given segmentlist
    """
    return lsctables.LIGOTimeGPS(geocent_end_time, geocent_end_time_ns) in zero_lag_segments

  found = []
  missed = []
  print >>sys.stderr, ""
  for cnt, f in enumerate(injfnames):
    print >>sys.stderr, "getting injections below FAR: " + str(FAR) + ":\t%.1f%%\r" % (100.0 * cnt / len(injfnames),),
    working_filename = dbtables.get_connection_filename(f, tmp_path = None, verbose = verbose)
    connection = sqlite3.connect(working_filename)
    connection.create_function("injection_was_made", 2, injection_was_made)

    make_sim_inspiral = lsctables.table.get_table(dbtables.get_xml(connection), lsctables.SimInspiralTable.tableName)._row_from_cols

    for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.combined_far < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
    """, (FAR,)):
      sim = make_sim_inspiral(values)
      if values[-1]:
        found.append(sim)
      else:
        missed.append(sim)

    # done
    connection.close()
    dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
    dbtables.DBTable_set_connection(None)

  print >>sys.stderr, "\nFound = %d Missed = %d" % (len(found), len(missed))
  return found, missed
Пример #6
0
def get_vetoes(fname, veto_segments_name = "vetoes", verbose=True):
  working_filename = dbtables.get_connection_filename(fname, verbose = verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)
  veto_segments = db_thinca_rings.get_veto_segments(connection, veto_segments_name)
  connection.close()
  dbtables.discard_connection_filename(fname, working_filename, verbose = verbose)
  dbtables.DBTable_set_connection(None)
  return veto_segments
def measure_efficiency(filenames, threshold, live_time_program = "lalapps_power", upper_limit_scale = "E", tmp_path = None, verbose = False):
	# FIXME:  instruments are hard-coded.  bad bad bad.  sigh...
	if upper_limit_scale == "E":
		efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.egw_over_rsquared), r"Equivalent Isotropic Energy ($M_{\odot} / \mathrm{pc}^{2}$)", 0.1)
	elif upper_limit_scale == "hrss":
		efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.hrss), r"$h_{\mathrm{rss}}$", 0.1)
	else:
		raise ValueError("bad upper_limit_scale %s" % repr(upper_limit_scale))

	#
	# Iterate over injection files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		connection = sqlite3.connect(working_filename)
		connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic)
		database = SnglBurstUtils.CoincDatabase(connection, live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.
		#

		efficiency.add_contents(database, threshold)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Compute efficiency from the data that have been collected
	#

	if verbose:
		print >>sys.stderr, "binning and smoothnig efficiency data ..."
	efficiency.finish(threshold)

	#
	# Done
	#

	return efficiency
Пример #8
0
def get_vetoes(fname, veto_segments_name="vetoes", verbose=True):
    working_filename = dbtables.get_connection_filename(fname, verbose=verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)
    veto_segments = db_thinca_rings.get_veto_segments(connection,
                                                      veto_segments_name)
    connection.close()
    dbtables.discard_connection_filename(fname,
                                         working_filename,
                                         verbose=verbose)
    dbtables.DBTable_set_connection(None)
    return veto_segments
Пример #9
0
def measure_efficiency(filenames, threshold, live_time_program = "lalapps_power", upper_limit_scale = "E", tmp_path = None, verbose = False):
	# FIXME:  instruments are hard-coded.  bad bad bad.  sigh...
	if upper_limit_scale == "E":
		efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.egw_over_rsquared), r"Equivalent Isotropic Energy ($M_{\odot} / \mathrm{pc}^{2}$)", 0.1)
	elif upper_limit_scale == "hrss":
		efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.hrss), r"$h_{\mathrm{rss}}$", 0.1)
	else:
		raise ValueError("bad upper_limit_scale %s" % repr(upper_limit_scale))

	#
	# Iterate over injection files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		connection = sqlite3.connect(working_filename)
		connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic)
		database = SnglBurstUtils.CoincDatabase(connection, live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.
		#

		efficiency.add_contents(database, threshold)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Compute efficiency from the data that have been collected
	#

	if verbose:
		print >>sys.stderr, "binning and smoothnig efficiency data ..."
	efficiency.finish(threshold)

	#
	# Done
	#

	return efficiency
Пример #10
0
def measure_threshold(filenames, n_survivors, live_time_program = "lalapps_power", tmp_path = None, open_box = False, verbose = False):
	#
	# Initialize the book-keeping object.
	#

	rate_vs_threshold_data = RateVsThresholdData()

	#
	# Iterate over non-injection files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		database = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.
		#

		rate_vs_threshold_data.update_from(database, filename = filename, verbose = verbose)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Determine likelihood threshold.
	#

	if verbose:
		print >>sys.stderr, "finishing rate vs. threshold measurement ..."
	rate_vs_threshold_data.finish(n_survivors, open_box = open_box, verbose = verbose)

	#
	# Done.
	#

	return rate_vs_threshold_data
Пример #11
0
def measure_threshold(filenames, n_survivors, live_time_program = "lalapps_power", tmp_path = None, open_box = False, verbose = False):
	#
	# Initialize the book-keeping object.
	#

	rate_vs_threshold_data = RateVsThresholdData()

	#
	# Iterate over non-injection files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		database = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.
		#

		rate_vs_threshold_data.update_from(database, filename = filename, verbose = verbose)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Determine likelihood threshold.
	#

	if verbose:
		print >>sys.stderr, "finishing rate vs. threshold measurement ..."
	rate_vs_threshold_data.finish(n_survivors, open_box = open_box, verbose = verbose)

	#
	# Done.
	#

	return rate_vs_threshold_data
Пример #12
0
def setup_files(dir_name, gps_start_time, gps_end_time):
    # Filter out the ones that are outside our time range
    xml_files = segmentdb_utils.get_all_files_in_range(dir_name, gps_start_time, gps_end_time)

    handle, temp_db  = tempfile.mkstemp(suffix='.sqlite')
    os.close(handle)

    target     = dbtables.get_connection_filename(temp_db, None, True, False)
    connection = ligolw_sqlite.setup(target)

    ligolw_sqlite.insert_from_urls(connection, xml_files) # [temp_xml])

    segmentdb_utils.ensure_segment_table(connection)

    return temp_db, connection
Пример #13
0
def setup_files(dir_name, gps_start_time, gps_end_time):
    # Filter out the ones that are outside our time range
    xml_files = segmentdb_utils.get_all_files_in_range(dir_name, gps_start_time, gps_end_time)

    handle, temp_db  = tempfile.mkstemp(suffix='.sqlite')
    os.close(handle)

    target     = dbtables.get_connection_filename(temp_db, None, True, False)
    connection = ligolw_sqlite.setup(target)

    ligolw_sqlite.insert_from_urls(connection, xml_files) # [temp_xml])

    segmentdb_utils.ensure_segment_table(connection)
        
    return temp_db, connection
Пример #14
0
def process_file(filename, products, live_time_program, tmp_path = None, veto_segments_name = None, verbose = False):
	#
	# connect to database and summarize contents
	#

	working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
	contents = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program, search = "StringCusp", veto_segments_name = veto_segments_name)
	if verbose:
		SnglBurstUtils.summarize_coinc_database(contents, filename = working_filename)

	#
	# augment summary with extra stuff we need.  the filename
	# is recorded for dumping debuggin information related to
	# missed injections.  if burca was run with the
	# --coincidence-segments option then the value is copied
	# into a segmentlistdict to facilitate the computation of
	# livetime
	#

	contents.filename = filename

	contents.coincidence_segments = ligolwprocess.get_process_params(contents.xmldoc, "lalapps_burca", "--coincidence-segments")
	if contents.coincidence_segments:
		# as a side-effect, this enforces the rule that
		# burca has been run on the input file exactly once
		contents.coincidence_segments, = contents.coincidence_segments
		contents.coincidence_segments = segments.segmentlistdict.fromkeys(contents.seglists, segmentsUtils.from_range_strings(contents.coincidence_segments.split(","), boundtype = dbtables.lsctables.LIGOTimeGPS).coalesce())
	else:
		contents.coincidence_segments = None

	#
	# process contents
	#

	for n, product in enumerate(products):
		if verbose:
			print >>sys.stderr, "%s: adding to product %d ..." % (working_filename, n)
		product.add_contents(contents, verbose = verbose)

	#
	# close
	#

	contents.connection.close()
	dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)
Пример #15
0
def process_file(filename, products, live_time_program, tmp_path = None, veto_segments_name = None, verbose = False):
	#
	# connect to database and summarize contents
	#

	working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
	contents = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program, search = "StringCusp", veto_segments_name = veto_segments_name)
	if verbose:
		SnglBurstUtils.summarize_coinc_database(contents, filename = working_filename)

	#
	# augment summary with extra stuff we need.  the filename
	# is recorded for dumping debuggin information related to
	# missed injections.  if burca was run with the
	# --coincidence-segments option then the value is copied
	# into a segmentlistdict to facilitate the computation of
	# livetime
	#

	contents.filename = filename

	contents.coincidence_segments = ligolwprocess.get_process_params(contents.xmldoc, "lalapps_burca", "--coincidence-segments")
	if contents.coincidence_segments:
		# as a side-effect, this enforces the rule that
		# burca has been run on the input file exactly once
		contents.coincidence_segments, = contents.coincidence_segments
		contents.coincidence_segments = segments.segmentlistdict.fromkeys(contents.seglists, segmentsUtils.from_range_strings(contents.coincidence_segments.split(","), boundtype = dbtables.lsctables.LIGOTimeGPS).coalesce())
	else:
		contents.coincidence_segments = None

	#
	# process contents
	#

	for n, product in enumerate(products):
		if verbose:
			print >>sys.stderr, "%s: adding to product %d ..." % (working_filename, n)
		product.add_contents(contents, verbose = verbose)

	#
	# close
	#

	contents.connection.close()
	dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)
Пример #16
0
def get_ifos(zerofname, verbose=True):
  # open database
  working_filename = dbtables.get_connection_filename(zerofname, verbose = verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)

  # extract false alarm rate threshold
  # FIXME This may not be robust if triggers are missing from a given category, for
  # example no triples in zero lag or time slides.
  query = 'SELECT distinct(ifos) FROM coinc_inspiral'
  ifo_list = []
  for i in connection.cursor().execute(query): ifo_list.append(i)

  # done
  connection.close()
  dbtables.discard_connection_filename(zerofname, working_filename, verbose = verbose)
  dbtables.DBTable_set_connection(None)
  return ifo_list
Пример #17
0
def open_pipedown_database(database_filename,tmp_space):
    """
    Open the connection to the pipedown database
    """
    if not os.access(database_filename,os.R_OK):
	raise Exception('Unable to open input file: %s'%(database_filename))
    from glue.ligolw import dbtables
    try:
        import sqlite3
    except ImportError:
        # Pre-2.5
        from pysqlite2 import dbapi2 as sqlite3
    working_filename=dbtables.get_connection_filename(database_filename,tmp_path=tmp_space)
    connection = sqlite3.connect(working_filename)
    if tmp_space:
	dbtables.set_temp_store_directory(connection,tmp_space)
    dbtables.DBTable_set_connection(connection)
    return (connection,working_filename) 
Пример #18
0
  def update_coincs(self, fnames):
    """
    This function iterates over the databases and updates the the likelihood 
    column with the proper lv stat
    """
    for f in fnames:
      working_filename = dbtables.get_connection_filename(zerofname, verbose = True)
      connection = sqlite3.connect(working_filename)
      dbtables.DBTable_set_connection(connection)
      connection.create_function("lvstat", 3, self.lvstat)
      query = "UPDATE coinc_event SET likelihood = (SELECT lvstat(coinc_event.instruments, coinc_inspiral.ifos, coinc_inspiral.false_alarm_rate) FROM coinc_inspiral WHERE coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id)"
      print query

      connection.cursor().execute(query)
      connection.commit()
      connection.close()
      dbtables.discard_connection_filename(zerofname, working_filename, verbose = True)
      dbtables.DBTable_set_connection(None)
Пример #19
0
    def update_coincs(self, fnames):
        """
    This function iterates over the databases and updates the the likelihood 
    column with the proper lv stat
    """
        for f in fnames:
            working_filename = dbtables.get_connection_filename(zerofname,
                                                                verbose=True)
            connection = sqlite3.connect(working_filename)
            dbtables.DBTable_set_connection(connection)
            connection.create_function("lvstat", 3, self.lvstat)
            query = "UPDATE coinc_event SET likelihood = (SELECT lvstat(coinc_event.instruments, coinc_inspiral.ifos, coinc_inspiral.false_alarm_rate) FROM coinc_inspiral WHERE coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id)"
            print(query)

            connection.cursor().execute(query)
            connection.commit()
            connection.close()
            dbtables.discard_connection_filename(zerofname,
                                                 working_filename,
                                                 verbose=True)
            dbtables.DBTable_set_connection(None)
Пример #20
0
def get_ifos(zerofname, verbose=True):
    # open database
    working_filename = dbtables.get_connection_filename(zerofname,
                                                        verbose=verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)

    # extract false alarm rate threshold
    # FIXME This may not be robust if triggers are missing from a given category, for
    # example no triples in zero lag or time slides.
    query = 'SELECT distinct(ifos) FROM coinc_inspiral'
    ifo_list = []
    for i in connection.cursor().execute(query):
        ifo_list.append(i)

    # done
    connection.close()
    dbtables.discard_connection_filename(zerofname,
                                         working_filename,
                                         verbose=verbose)
    dbtables.DBTable_set_connection(None)
    return ifo_list
Пример #21
0
def get_far_threshold_and_segments(zerofname, instruments, live_time_program, veto_seg_name="vetoes", verbose = True):
  """
  return the false alarm rate of the most rare zero-lag coinc, and a
  dictionary of the thinca segments indexed by instrument.
  """
  # open database
  working_filename = dbtables.get_connection_filename(zerofname, verbose = verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)

  # extract false alarm rate threshold
  query = 'SELECT MIN(coinc_inspiral.false_alarm_rate) FROM coinc_inspiral JOIN coinc_event ON (coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id) WHERE ( coinc_event.instruments = "' + instruments + '" AND NOT EXISTS(SELECT * FROM time_slide WHERE time_slide.time_slide_id == coinc_event.time_slide_id AND time_slide.offset != 0) );'
  print "\n", query
  far, = connection.cursor().execute(query).fetchone()

  # extract segments.
  seglists = db_thinca_rings.get_thinca_zero_lag_segments(connection, program_name = live_time_program)

  # done
  connection.close()
  dbtables.discard_connection_filename(zerofname, working_filename, verbose = verbose)
  dbtables.DBTable_set_connection(None)

  return far, seglists
Пример #22
0

# FIXME These values should probably be command line arguments or derived from the database
secs_in_year = 31556926.0
max_dist = 2000
min_mass = 1
max_mass = 99
min_mtotal = 25
max_mtotal = 100
mass_bins = 11
dist_bins = 50

opts, filenames = parse_command_line()

if opts.veto_segments_name is not None:
    working_filename = dbtables.get_connection_filename(opts.full_data_file,
                                                        verbose=opts.verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)
    veto_segments = db_thinca_rings.get_veto_segments(connection,
                                                      opts.veto_segments_name)
    connection.close()
    dbtables.discard_connection_filename(opts.full_data_file,
                                         working_filename,
                                         verbose=opts.verbose)
    dbtables.DBTable_set_connection(None)
else:
    veto_segments = segments.segmentlistdict()

if not opts.burst_found and not opts.burst_missed:
    FAR, seglists = get_far_threshold_and_segments(
        opts.full_data_file,
Пример #23
0
def dump_confidence_likelihood_scatter_data(globs, live_time_program = "lalapps_power", tmp_path = None, verbose = False):
	#
	# Collect file names.
	#

	if verbose:
		print >>sys.stderr, "building file list ..."
	filenames = sorted(filename for g in globs for filename in glob.glob(g))

	#
	# Initialize storage.
	#

	injections = []
	background = []
	zero_lag = []

	#
	# Iterate over files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		connection = sqlite3.connect(working_filename)
		connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic)
		database = SnglBurstUtils.CoincDatabase(connection, live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.  Assume all files with
		# sim_burst tables are the outputs of injection runs, and
		# others aren't.
		#

		if database.sim_burst_table is None:
			# non-injections
			for id, l, c, is_background in bb_id_likelihood_confidence_background(database):
				record = (coinc_detection_statistic(l, c), l, c)
				if is_background:
					if len(background) < 1e6:
						heapq.heappush(background, record)
					else:
						heapq.heappushpop(background, record)
				else:
					if len(zero_lag) < 1e6:
						heapq.heappush(zero_lag, record)
					else:
						heapq.heappushpop(zero_ag, record)
		else:
			# injections
			create_sim_coinc_map_view(database.connection)
			for a, l, c in database.connection.cursor().execute("""
SELECT
	burst_coinc_amplitude,
	burst_coinc_likelihood,
	burst_coinc_confidence
FROM
	sim_coinc_map
WHERE
	sim_coinc_def_id == ?
			""", (database.sce_definer_id,)):
				record = (-a, l, c)
				if len(injections) < 1e6:
					heapq.heappush(injections, record)
				else:
					heapq.heappushpop(injections, record)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Dump scatter plot data.
	#

	if verbose:
		print >>sys.stderr, "writing scatter plot data ..."

	f = file("lalapps_excesspowerfinal_background_scatter.dat", "w")
	for a, l, c in background:
		print >>f, "%.16g %.16g" % (l, c)

	f = file("lalapps_excesspowerfinal_zero_lag_scatter.dat", "w")
	for a, l, c in zero_lag:
		print >>f, "%.16g %.16g" % (l, c)

	f = file("lalapps_excesspowerfinal_injections_scatter.dat", "w")
	for a, l, c in injections:
		print >>f, "%.16g %.16g" % (l, c)

	if verbose:
		print >>sys.stderr, "done."
Пример #24
0
in S6, fulldata databases include timeslides, injections, and zerolag
so in general, you will only be providing one file, for example:
~/lalsuite/pylal/bin/mvsc_plot.py H1L1-FULL_DATA_CAT_2_VETO_CLUSTERED_CBC_RESULTS-951868815-1209600.sqlite
"""
, version='%prog')
(opts,files)=parser.parse_args()

timeslide_likelihood = []
timeslide_snr = []
zerolag_likelihood = []
zerolag_snr = []
injection_likelihood = []
injection_snr = []
for filename in files:
  local_disk = None #"/tmp"
  working_filename = dbtables.get_connection_filename(filename, tmp_path = local_disk, verbose = True)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)
  xmldoc = dbtables.get_xml(connection)
  cursor = connection.cursor()
  for likelihood, snr, is_background in connection.cursor().execute("""
  SELECT 
    insp_coinc_event.likelihood, 
    coinc_inspiral.snr,
    EXISTS (
      SELECT
        * 
      FROM 
        time_slide 
      WHERE
       time_slide.time_slide_id == insp_coinc_event.time_slide_id
Пример #25
0
        engine, 'segment_summary',
        [('H1', 'DMT-TESTSEG_2', 1, 924900000, 924900016, 0, 0),
         ('H1', 'DMT-TESTSEG_3', 1, 924900000, 924900016, 0, 0)])

    if res[0] != segmentlist([segment(924900000, 924900010)]):
        return False

    if res[1] != segmentlist([segment(924900008, 924900016)]):
        return False

    return True


if __name__ == '__main__':
    db_name = 'seg_test_db.sqlite'
    target = dbtables.get_connection_filename(db_name, None, True, False)
    connection = ligolw_sqlite.setup(target)

    engine = query_engine.SqliteQueryEngine(connection)

    ligolw_sqlite.insert(connection, ['test_segdb_utils.xml'])

    print "Testing basic segment summary...",
    print test_basic_seg_summary(engine) and "suceeded." or "FAILED."

    print "Testing expanding version numbers...",
    print test_expand_versions(engine) and "suceeded." or "FAILED."

    print "Testing optimized segment query...",
    print test_optimized_query(engine) and "suceeded." or "FAILED."
Пример #26
0
  def __init__(self, flist, opts):
    self.far = {}
    self.segments = segments.segmentlistdict()
    self.non_inj_fnames = []
    self.inj_fnames = []
    #self.der_fit = None
    self.twoDMassBins = None
    #self.dBin = {}
    self.gw = None
    self.found = {}
    self.missed = {}
    self.wnfunc = None
    self.opts = opts
    if opts.bootstrap_iterations: self.bootnum = int(opts.bootstrap_iterations)
    else: self.bootnum = 100
    self.veto_segments = segments.segmentlistdict()
    self.zero_lag_segments = {}
    self.instruments = []
    self.livetime = {}
    self.minmass = None
    self.maxmass = None
    self.mintotal = None
    self.maxtotal = None

    for f in flist: 
      if opts.verbose: print >> sys.stderr, "Gathering stats from: %s...." % (f,)
      working_filename = dbtables.get_connection_filename(f, verbose = opts.verbose)
      connection = sqlite3.connect(working_filename)
      dbtables.DBTable_set_connection(connection)
      xmldoc = dbtables.get_xml(connection)

      # look for a sim table
      try:
        sim_inspiral_table = table.get_table(xmldoc, dbtables.lsctables.SimInspiralTable.tableName)
        self.inj_fnames.append(f)
        sim = True
      except ValueError:
        self.non_inj_fnames.append(f)
        sim = False

      if not sim: 
        if opts.veto_segments_name is not None: self.veto_segments = db_thinca_rings.get_veto_segments(connection, opts.veto_segments_name)
        self.get_instruments(connection)
        self.segments += db_thinca_rings.get_thinca_zero_lag_segments(connection, program_name = opts.live_time_program)
        self.get_far_thresholds(connection)
      else: 
        self.get_mass_ranges(connection)
      

      #connection.close()
      dbtables.discard_connection_filename(f, working_filename, verbose = opts.verbose)
      dbtables.DBTable_set_connection(None)      

    # FIXME Do these have to be done by instruments?
    self.segments -= self.veto_segments

    # compute far, segments and livetime by instruments
    for i in self.instruments:
      self.far[i] = min(self.far[i])
      # FIXME this bombs if any of the FARS are zero. maybe it should continue
      # and just remove that instrument combo from the calculation
      if self.far[i] == 0: 
        print >> sys.stderr, "Encountered 0 FAR in %s, ABORTING" % (i,)
        sys.exit(1)
      self.zero_lag_segments[i] = self.segments.intersection(i) - self.segments.union(set(self.segments.keys()) - i)
      # Livetime must have playground removed
      self.livetime[i] = float(abs(self.zero_lag_segments[i] - segmentsUtils.S2playground(self.segments.extent_all())))
      if opts.verbose: print >> sys.stderr, "%s FAR %e, livetime %f" % (",".join(sorted(list(i))), self.far[i], self.livetime[i])

    # get a 2D mass binning
    self.twoDMassBins = self.get_2d_mass_bins(self.minmass, self.maxmass, opts.mass_bins)
Пример #27
0
def get_injections(injfnames,
                   zero_lag_segments,
                   ifos="H1,H2,L1",
                   FAR=1.0,
                   verbose=True):
    """
  The LV stat uses simulations above threshold, not some IFAR of the loudest event, so the default should be "inf"
  """
    def injection_was_made(geocent_end_time,
                           geocent_end_time_ns,
                           zero_lag_segments=zero_lag_segments):
        """
    return True if injection was made in the given segmentlist
    """
        return lsctables.LIGOTimeGPS(geocent_end_time,
                                     geocent_end_time_ns) in zero_lag_segments

    found = []
    missed = []
    print("", file=sys.stderr)
    for cnt, f in enumerate(injfnames):
        print("getting injections: " + str(FAR) + ":\t%.1f%%\r" %
              (100.0 * cnt / len(injfnames), ),
              end=' ',
              file=sys.stderr)
        working_filename = dbtables.get_connection_filename(f,
                                                            tmp_path=None,
                                                            verbose=verbose)
        connection = sqlite3.connect(working_filename)
        connection.create_function("injection_was_made", 2, injection_was_made)

        make_sim_inspiral = lsctables.SimInspiralTable.get_table(
            dbtables.get_xml(connection))._row_from_cols

        # FIXME may not be done correctly if injections are done in timeslides
        # FIXME may not be done correctly if injections aren't logarithmic in d
        # do we really want d^3 waiting?
        # FIXME do we really want injections independent of their FAR

        for values in connection.cursor().execute(
                """
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.false_alarm_rate < ?
      AND coinc_inspiral.ifos == ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
    """, (
                    FAR,
                    ifos,
                )):
            sim = make_sim_inspiral(values)
            if values[-1]:
                found.append(sim)
            else:
                missed.append(sim)

        # done
        connection.close()
        dbtables.discard_connection_filename(f,
                                             working_filename,
                                             verbose=verbose)
        dbtables.DBTable_set_connection(None)

    print("\nFound = %d Missed = %d" % (len(found), len(missed)),
          file=sys.stderr)
    return found, missed
Пример #28
0
    help=
    "glob of full data sqlite databases (these should already include the timeslides)"
)
(opts, args) = parser.parse_args()

inj_files = glob.glob(opts.injections)
fulldata_files = glob.glob(opts.fulldata)

timeslide_likelihood = []
timeslide_snr = []
zerolag_likelihood = []
zerolag_snr = []
for filename in fulldata_files:
    local_disk = None  #"/tmp"
    working_filename = dbtables.get_connection_filename(filename,
                                                        tmp_path=local_disk,
                                                        verbose=True)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)
    xmldoc = dbtables.get_xml(connection)
    cursor = connection.cursor()
    for likelihood, snr, is_background in connection.cursor().execute("""
  SELECT 
    insp_coinc_event.likelihood, 
    coinc_inspiral.snr,
    EXISTS (
      SELECT
        * 
      FROM 
        time_slide 
      WHERE
Пример #29
0
def get_injections(injfnames, zero_lag_segments, ifos="H1,H2,L1", FAR=1.0, verbose = True):
  """
  The LV stat uses simulations above threshold, not some IFAR of the loudest event, so the default should be "inf"
  """
  def injection_was_made(geocent_end_time, geocent_end_time_ns, zero_lag_segments = zero_lag_segments):
    """
    return True if injection was made in the given segmentlist
    """
    return lsctables.LIGOTimeGPS(geocent_end_time, geocent_end_time_ns) in zero_lag_segments

  found = []
  missed = []
  print >>sys.stderr, ""
  for cnt, f in enumerate(injfnames):
    print >>sys.stderr, "getting injections: " + str(FAR) + ":\t%.1f%%\r" % (100.0 * cnt / len(injfnames),),
    working_filename = dbtables.get_connection_filename(f, tmp_path = None, verbose = verbose)
    connection = sqlite3.connect(working_filename)
    connection.create_function("injection_was_made", 2, injection_was_made)

    make_sim_inspiral = lsctables.table.get_table(dbtables.get_xml(connection), lsctables.SimInspiralTable.tableName)._row_from_cols

    # FIXME may not be done correctly if injections are done in timeslides
    # FIXME may not be done correctly if injections aren't logarithmic in d
    # do we really want d^3 waiting?
    # FIXME do we really want injections independent of their FAR

    for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.false_alarm_rate < ?
      AND coinc_inspiral.ifos == ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
    """, (FAR,ifos,)):
      sim = make_sim_inspiral(values)
      if values[-1]:
        found.append(sim)
      else:
        missed.append(sim)

    # done
    connection.close()
    dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
    dbtables.DBTable_set_connection(None)

  print >>sys.stderr, "\nFound = %d Missed = %d" % (len(found), len(missed))
  return found, missed
Пример #30
0
# iterate over database files accumulating background statistics
#


if options.verbose:
	print >>sys.stderr, "collecting background statistics ..."


for n, filename in enumerate(filenames):
	#
	# open the database
	#

	if options.verbose:
		print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
	working_filename = dbtables.get_connection_filename(filename, tmp_path = options.tmp_space, verbose = options.verbose)
	connection = sqlite3.connect(working_filename)

	#
	# if the database contains a sim_inspiral table then it is assumed
	# to represent an injection run.  its rings must not added to the
	# livetime, and it cannot provide background coincs, so it is just
	# skipped altogether in this first pass.
	#

	if "sim_ringdown" in dbtables.get_table_names(connection):
		if options.verbose:
			print >>sys.stderr, "\tdatabase contains sim_ringdown table, skipping ..."

		#
		# close the database
Пример #31
0
    def __init__(self, opts, flist):
        self.segments = segments.segmentlistdict()
        self.non_inj_fnames = []
        self.inj_fnames = []
        self.found = {}
        self.missed = {}
        self.opts = opts
        self.veto_segments = segments.segmentlistdict()
        self.zero_lag_segments = {}
        self.instruments = []
        self.livetime = {}
        self.multi_burst_table = None
        self.coinc_inspiral_table = None

        for f in flist:
            if opts.verbose:
                print("Gathering stats from: %s...." % (f, ), file=sys.stderr)
            working_filename = dbtables.get_connection_filename(
                f, tmp_path=opts.tmp_space, verbose=opts.verbose)
            connection = sqlite3.connect(working_filename)
            dbtables.DBTable_set_connection(connection)
            xmldoc = dbtables.get_xml(connection)

            # look for a sim table
            try:
                sim_inspiral_table = dbtables.lsctables.SimInspiralTable.get_table(
                    xmldoc)
                self.inj_fnames.append(f)
                sim = True
            except ValueError:
                self.non_inj_fnames.append(f)
                sim = False

            # FIGURE OUT IF IT IS A BURST OR INSPIRAL RUN
            try:
                self.multi_burst_table = dbtables.lsctables.MultiBurstTable.get_table(
                    xmldoc)
            except ValueError:
                self.multi_burst_table = None
            try:
                self.coinc_inspiral_table = dbtables.lsctables.CoincInspiralTable.get_table(
                    xmldoc)
            except ValueError:
                self.coinc_inspiral_table = None
            if self.multi_burst_table and self.coinc_inspiral_table:
                print("both burst and inspiral tables found.  Aborting",
                      file=sys.stderr)
                raise ValueError

            if not sim:
                self.get_instruments(connection)
                self.segments += self.get_segments(connection, xmldoc)
                #FIXME, don't assume veto segments are the same in every file!
                self.veto_segments = self.get_veto_segments(connection)

            dbtables.discard_connection_filename(f,
                                                 working_filename,
                                                 verbose=opts.verbose)
            dbtables.DBTable_set_connection(None)

        # remove redundant instruments
        self.instruments = list(set(self.instruments))
        # FIXME Do these have to be done by instruments?
        self.segments -= self.veto_segments

        # segments and livetime by instruments
        for i in self.instruments:
            self.zero_lag_segments[i] = self.segments.intersection(
                i) - self.segments.union(set(self.segments.keys()) - i)
            self.livetime[i] = float(abs(self.zero_lag_segments[i]))
Пример #32
0
	def __init__(self, filelist, live_time_program = None, veto_segments_name = None, data_segments_name = "datasegments", tmp_path = None, verbose = False):

		self.segments = segments.segmentlistdict()
		self.instruments = set()
		self.table_name = None
		self.found_injections_by_instrument_set = {}
		self.missed_injections_by_instrument_set = {}
		self.total_injections_by_instrument_set = {}
		self.zerolag_fars_by_instrument_set = {}
		self.ts_fars_by_instrument_set = {}
		self.numslides = set()

		for f in filelist:
			if verbose:
				print >> sys.stderr, "Gathering stats from: %s...." % (f,)
			working_filename = dbtables.get_connection_filename(f, tmp_path = tmp_path, verbose = verbose)
			connection = sqlite3.connect(working_filename)
			xmldoc = dbtables.get_xml(connection)

			sim = False

			# look for a sim inspiral table.  This is IMR work we have to have one of these :)
			try:
				sim_inspiral_table = table.get_table(xmldoc, dbtables.lsctables.SimInspiralTable.tableName)
				sim = True
			except ValueError:
				pass

			# look for the relevant table for analyses
			for table_name in allowed_analysis_table_names():
				try:
					setattr(self, table_name, table.get_table(xmldoc, table_name))
					if self.table_name is None or self.table_name == table_name:
						self.table_name = table_name
					else:
						raise ValueError("detected more than one table type out of " + " ".join(allowed_analysis_table_names()))
				except ValueError:
					setattr(self, table_name, None)

			# the non simulation databases are where we get information about segments
			if not sim:
				self.numslides.add(connection.cursor().execute('SELECT count(DISTINCT(time_slide_id)) FROM time_slide').fetchone()[0])
				[self.instruments.add(ifos) for ifos in get_instruments_from_coinc_event_table(connection)]
				# save a reference to the segments for this file, needed to figure out the missed and found injections
				self.this_segments = get_segments(connection, xmldoc, self.table_name, live_time_program, veto_segments_name, data_segments_name = data_segments_name)
				# FIXME we don't really have any reason to use playground segments, but I put this here as a reminder
				# self.this_playground_segments = segmentsUtils.S2playground(self.this_segments.extent_all())
				self.segments += self.this_segments

				# get the far thresholds for the loudest events in these databases
				for (instruments_set, far, ts) in get_event_fars(connection, self.table_name):
					if not ts:
						self.zerolag_fars_by_instrument_set.setdefault(instruments_set, []).append(far)
					else:
						self.ts_fars_by_instrument_set.setdefault(instruments_set, []).append(far)
			# get the injections
			else:
				# We need to know the segments in this file to determine which injections are found
				self.this_injection_segments = get_segments(connection, xmldoc, self.table_name, live_time_program, veto_segments_name, data_segments_name = data_segments_name)
				self.this_injection_instruments = []
				distinct_instruments = connection.cursor().execute('SELECT DISTINCT(instruments) FROM coinc_event WHERE instruments!=""').fetchall()
				for instruments, in distinct_instruments:
					instruments_set = frozenset(lsctables.instrument_set_from_ifos(instruments))
					self.this_injection_instruments.append(instruments_set)
					segments_to_consider_for_these_injections = self.this_injection_segments.intersection(instruments_set) - self.this_injection_segments.union(set(self.this_injection_segments.keys()) - instruments_set)
					found, total, missed = get_min_far_inspiral_injections(connection, segments = segments_to_consider_for_these_injections, table_name = self.table_name)
					if verbose:
						print >> sys.stderr, "%s total injections: %d; Found injections %d: Missed injections %d" % (instruments, len(total), len(found), len(missed))
					self.found_injections_by_instrument_set.setdefault(instruments_set, []).extend(found)
					self.total_injections_by_instrument_set.setdefault(instruments_set, []).extend(total)
					self.missed_injections_by_instrument_set.setdefault(instruments_set, []).extend(missed)

			# All done
			dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
		if len(self.numslides) > 1:
			raise ValueError('number of slides differs between input files')
		elif self.numslides:
			self.numslides = min(self.numslides)
		else:
			self.numslides = 0
Пример #33
0
	def __init__(self, opts, flist):
		self.segments = segments.segmentlistdict()
		self.non_inj_fnames = []
		self.inj_fnames = []
		self.found = {}
		self.missed = {}
		self.opts = opts
		self.veto_segments = segments.segmentlistdict()
		self.zero_lag_segments = {}
		self.instruments = []
		self.livetime = {}
		self.multi_burst_table = None
		self.coinc_inspiral_table = None

		for f in flist:
			if opts.verbose: print >> sys.stderr, "Gathering stats from: %s...." % (f,)
			working_filename = dbtables.get_connection_filename(f, tmp_path=opts.tmp_space, verbose = opts.verbose)
			connection = sqlite3.connect(working_filename)
			dbtables.DBTable_set_connection(connection)
			xmldoc = dbtables.get_xml(connection)

			# look for a sim table
			try:
				sim_inspiral_table = dbtables.lsctables.SimInspiralTable.get_table(xmldoc)
				self.inj_fnames.append(f)
				sim = True
			except ValueError:
				self.non_inj_fnames.append(f)
				sim = False

			# FIGURE OUT IF IT IS A BURST OR INSPIRAL RUN
			try:
				self.multi_burst_table = dbtables.lsctables.MultiBurstTable.get_table(xmldoc)
			except ValueError:
				self.multi_burst_table = None
			try:
				self.coinc_inspiral_table = dbtables.lsctables.CoincInspiralTable.get_table(xmldoc)
			except ValueError:
				self.coinc_inspiral_table = None
			if self.multi_burst_table and self.coinc_inspiral_table:
				print >>sys.stderr, "both burst and inspiral tables found.  Aborting"
				raise ValueError

			if not sim:
				self.get_instruments(connection)
				self.segments += self.get_segments(connection,xmldoc)
				#FIXME, don't assume veto segments are the same in every file!
				self.veto_segments = self.get_veto_segments(connection)

			dbtables.discard_connection_filename(f, working_filename, verbose = opts.verbose)
			dbtables.DBTable_set_connection(None)

		# remove redundant instruments
		self.instruments = list(set(self.instruments))
		# FIXME Do these have to be done by instruments?
		self.segments -= self.veto_segments

		# segments and livetime by instruments
		for i in self.instruments:
			self.zero_lag_segments[i] = self.segments.intersection(i) - self.segments.union(set(self.segments.keys()) - i)
			self.livetime[i] = float(abs(self.zero_lag_segments[i]))
Пример #34
0
	def get_injections(self, instruments, FAR=float("inf")):
		injfnames = self.inj_fnames
		zero_lag_segments = self.zero_lag_segments[instruments]
		verbose = self.opts.verbose
		found = []
		missed = []
		print >>sys.stderr, ""
		for cnt, f in enumerate(injfnames):
			print >>sys.stderr, "getting injections below FAR: " + str(FAR) + ":\t%.1f%%\r" % (100.0 * cnt / len(injfnames),),
			working_filename = dbtables.get_connection_filename(f, tmp_path = opts.tmp_space, verbose = verbose)
			connection = sqlite3.connect(working_filename)
			dbtables.DBTable_set_connection(connection)
			xmldoc = dbtables.get_xml(connection)
			# DON'T BOTHER CONTINUING IF THE INSTRUMENTS OF INTEREST ARE NOT HERE
			instruments_in_this_file = []
			for i in connection.cursor().execute('SELECT DISTINCT(instruments) FROM coinc_event'):
				if i[0]: instruments_in_this_file.append(frozenset(lsctables.instrument_set_from_ifos(i[0])))
			if instruments not in instruments_in_this_file:
				connection.close()
				dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
				dbtables.DBTable_set_connection(None)
				continue

			# WORK OUT CORRECT SEGMENTS FOR THIS FILE WHERE WE SHOULD SEE INJECTIONS
			segments = self.get_segments(connection, xmldoc)
			segments -= self.veto_segments
			#print thincasegments
			zero_lag_segments  = segments.intersection(instruments) - segments.union(set(segments.keys()) - instruments)
			###############

			# DEFINE THE INJECTION WAS MADE FUNCTION
			def injection_was_made(geocent_end_time, geocent_end_time_ns, zero_lag_segments = zero_lag_segments):
				"""
				return True if injection was made in the given segmentlist
				"""
				return lsctables.LIGOTimeGPS(geocent_end_time, geocent_end_time_ns) in zero_lag_segments

			connection.create_function("injection_was_made", 2, injection_was_made)
			make_sim_inspiral = lsctables.SimInspiralTable.get_table(dbtables.get_xml(connection)).row_from_cols

			# INSPIRAL
			if self.coinc_inspiral_table:
				for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.combined_far < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
				""", (FAR,)):
					sim = make_sim_inspiral(values)
					if values[-1]:
						found.append(sim)
					else:
						missed.append(sim)

			# BURSTS
			if self.multi_burst_table:
				for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN multi_burst ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == multi_burst.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND multi_burst.false_alarm_rate < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
				""", (FAR,)):
					sim = make_sim_inspiral(values)
					if values[-1]:
						found.append(sim)
					else:
						missed.append(sim)
			# done
			dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
			dbtables.DBTable_set_connection(None)

			print >>sys.stderr, "\nFound = %d Missed = %d" % (len(found), len(missed))
		return found, missed
Пример #35
0
	def __init__(self, filelist, live_time_program = None, veto_segments_name = None, data_segments_name = "datasegments", tmp_path = None, verbose = False):

		self.segments = segments.segmentlistdict()
		self.instruments = set()
		self.table_name = None
		self.found_injections_by_instrument_set = {}
		self.missed_injections_by_instrument_set = {}
		self.total_injections_by_instrument_set = {}
		self.zerolag_fars_by_instrument_set = {}
		self.ts_fars_by_instrument_set = {}
		self.numslides = set()

		for f in filelist:
			if verbose:
				print >> sys.stderr, "Gathering stats from: %s...." % (f,)
			working_filename = dbtables.get_connection_filename(f, tmp_path = tmp_path, verbose = verbose)
			connection = sqlite3.connect(working_filename)
			xmldoc = dbtables.get_xml(connection)

			sim = False

			# look for a sim inspiral table.  This is IMR work we have to have one of these :)
			try:
				sim_inspiral_table = table.get_table(xmldoc, dbtables.lsctables.SimInspiralTable.tableName)
				sim = True
			except ValueError:
				pass

			# look for the relevant table for analyses
			for table_name in allowed_analysis_table_names():
				try:
					setattr(self, table_name, table.get_table(xmldoc, table_name))
					if self.table_name is None or self.table_name == table_name:
						self.table_name = table_name
					else:
						raise ValueError("detected more than one table type out of " + " ".join(allowed_analysis_table_names()))
				except ValueError:
					setattr(self, table_name, None)

			# the non simulation databases are where we get information about segments
			if not sim:
				self.numslides.add(connection.cursor().execute('SELECT count(DISTINCT(time_slide_id)) FROM time_slide').fetchone()[0])
				[self.instruments.add(ifos) for ifos in get_instruments_from_coinc_event_table(connection)]
				# save a reference to the segments for this file, needed to figure out the missed and found injections
				self.this_segments = get_segments(connection, xmldoc, self.table_name, live_time_program, veto_segments_name, data_segments_name = data_segments_name)
				# FIXME we don't really have any reason to use playground segments, but I put this here as a reminder
				# self.this_playground_segments = segmentsUtils.S2playground(self.this_segments.extent_all())
				self.segments += self.this_segments

				# get the far thresholds for the loudest events in these databases
				for (instruments_set, far, ts) in get_event_fars(connection, self.table_name):
					if not ts:
						self.zerolag_fars_by_instrument_set.setdefault(instruments_set, []).append(far)
					else:
						self.ts_fars_by_instrument_set.setdefault(instruments_set, []).append(far)
			# get the injections
			else:
				# We need to know the segments in this file to determine which injections are found
				self.this_injection_segments = get_segments(connection, xmldoc, self.table_name, live_time_program, veto_segments_name, data_segments_name = data_segments_name)
				self.this_injection_instruments = []
				distinct_instruments = connection.cursor().execute('SELECT DISTINCT(instruments) FROM coinc_event WHERE instruments!=""').fetchall()
				for instruments, in distinct_instruments:
					instruments_set = frozenset(lsctables.instrument_set_from_ifos(instruments))
					self.this_injection_instruments.append(instruments_set)
					segments_to_consider_for_these_injections = self.this_injection_segments.intersection(instruments_set) - self.this_injection_segments.union(set(self.this_injection_segments.keys()) - instruments_set)
					found, total, missed = get_min_far_inspiral_injections(connection, segments = segments_to_consider_for_these_injections, table_name = self.table_name)
					if verbose:
						print >> sys.stderr, "%s total injections: %d; Found injections %d: Missed injections %d" % (instruments, len(total), len(found), len(missed))
					self.found_injections_by_instrument_set.setdefault(instruments_set, []).extend(found)
					self.total_injections_by_instrument_set.setdefault(instruments_set, []).extend(total)
					self.missed_injections_by_instrument_set.setdefault(instruments_set, []).extend(missed)

			# All done
			dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
		if len(self.numslides) > 1:
			raise ValueError('number of slides differs between input files')
		elif self.numslides:
			self.numslides = min(self.numslides)
		else:
			self.numslides = 0
Пример #36
0
# FIXME These values should probably be command line arguments or derived from the database
secs_in_year = 31556926.0
max_dist = 2000
min_mass = 1
max_mass = 99
min_mtotal = 25
max_mtotal = 100
mass_bins = 11
dist_bins = 50


opts, filenames = parse_command_line()

if opts.veto_segments_name is not None:
  working_filename = dbtables.get_connection_filename(opts.full_data_file, verbose = opts.verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)
  veto_segments = db_thinca_rings.get_veto_segments(connection, opts.veto_segments_name)
  connection.close()
  dbtables.discard_connection_filename(opts.full_data_file, working_filename, verbose = opts.verbose)
  dbtables.DBTable_set_connection(None)
else:
  veto_segments = segments.segmentlistdict()

if not opts.burst_found and not opts.burst_missed:
  FAR, seglists = get_far_threshold_and_segments(opts.full_data_file, opts.live_time_program, instruments=lsctables.ifos_from_instrument_set(opts.instruments),verbose = opts.verbose)


  # times when only exactly the required instruments are on
  seglists -= veto_segments
Пример #37
0
    def get_injections(self, instruments, FAR=float("inf")):
        injfnames = self.inj_fnames
        zero_lag_segments = self.zero_lag_segments[instruments]
        verbose = self.opts.verbose
        found = []
        missed = []
        print("", file=sys.stderr)
        for cnt, f in enumerate(injfnames):
            print("getting injections below FAR: " + str(FAR) + ":\t%.1f%%\r" %
                  (100.0 * cnt / len(injfnames), ),
                  end=' ',
                  file=sys.stderr)
            working_filename = dbtables.get_connection_filename(
                f, tmp_path=opts.tmp_space, verbose=verbose)
            connection = sqlite3.connect(working_filename)
            dbtables.DBTable_set_connection(connection)
            xmldoc = dbtables.get_xml(connection)
            # DON'T BOTHER CONTINUING IF THE INSTRUMENTS OF INTEREST ARE NOT HERE
            instruments_in_this_file = []
            for i in connection.cursor().execute(
                    'SELECT DISTINCT(instruments) FROM coinc_event'):
                if i[0]:
                    instruments_in_this_file.append(
                        frozenset(lsctables.instrument_set_from_ifos(i[0])))
            if instruments not in instruments_in_this_file:
                connection.close()
                dbtables.discard_connection_filename(f,
                                                     working_filename,
                                                     verbose=verbose)
                dbtables.DBTable_set_connection(None)
                continue

            # WORK OUT CORRECT SEGMENTS FOR THIS FILE WHERE WE SHOULD SEE INJECTIONS
            segments = self.get_segments(connection, xmldoc)
            segments -= self.veto_segments
            #print thincasegments
            zero_lag_segments = segments.intersection(
                instruments) - segments.union(
                    set(segments.keys()) - instruments)

            ###############

            # DEFINE THE INJECTION WAS MADE FUNCTION
            def injection_was_made(geocent_end_time,
                                   geocent_end_time_ns,
                                   zero_lag_segments=zero_lag_segments):
                """
				return True if injection was made in the given segmentlist
				"""
                return lsctables.LIGOTimeGPS(
                    geocent_end_time, geocent_end_time_ns) in zero_lag_segments

            connection.create_function("injection_was_made", 2,
                                       injection_was_made)
            make_sim_inspiral = lsctables.SimInspiralTable.get_table(
                dbtables.get_xml(connection)).row_from_cols

            # INSPIRAL
            if self.coinc_inspiral_table:
                for values in connection.cursor().execute(
                        """
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.combined_far < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
				""", (FAR, )):
                    sim = make_sim_inspiral(values)
                    if values[-1]:
                        found.append(sim)
                    else:
                        missed.append(sim)

            # BURSTS
            if self.multi_burst_table:
                for values in connection.cursor().execute(
                        """
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN multi_burst ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == multi_burst.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND multi_burst.false_alarm_rate < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
				""", (FAR, )):
                    sim = make_sim_inspiral(values)
                    if values[-1]:
                        found.append(sim)
                    else:
                        missed.append(sim)
            # done
            dbtables.discard_connection_filename(f,
                                                 working_filename,
                                                 verbose=verbose)
            dbtables.DBTable_set_connection(None)

            print("\nFound = %d Missed = %d" % (len(found), len(missed)),
                  file=sys.stderr)
        return found, missed
Пример #38
0
def dump_confidence_likelihood_scatter_data(globs, live_time_program = "lalapps_power", tmp_path = None, verbose = False):
	#
	# Collect file names.
	#

	if verbose:
		print >>sys.stderr, "building file list ..."
	filenames = sorted(filename for g in globs for filename in glob.glob(g))

	#
	# Initialize storage.
	#

	injections = []
	background = []
	zero_lag = []

	#
	# Iterate over files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		connection = sqlite3.connect(working_filename)
		connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic)
		database = SnglBurstUtils.CoincDatabase(connection, live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.  Assume all files with
		# sim_burst tables are the outputs of injection runs, and
		# others aren't.
		#

		if database.sim_burst_table is None:
			# non-injections
			for id, l, c, is_background in bb_id_likelihood_confidence_background(database):
				record = (coinc_detection_statistic(l, c), l, c)
				if is_background:
					if len(background) < 1e6:
						heapq.heappush(background, record)
					else:
						heapq.heappushpop(background, record)
				else:
					if len(zero_lag) < 1e6:
						heapq.heappush(zero_lag, record)
					else:
						heapq.heappushpop(zero_ag, record)
		else:
			# injections
			create_sim_coinc_map_view(database.connection)
			for a, l, c in database.connection.cursor().execute("""
SELECT
	burst_coinc_amplitude,
	burst_coinc_likelihood,
	burst_coinc_confidence
FROM
	sim_coinc_map
WHERE
	sim_coinc_def_id == ?
			""", (database.sce_definer_id,)):
				record = (-a, l, c)
				if len(injections) < 1e6:
					heapq.heappush(injections, record)
				else:
					heapq.heappushpop(injections, record)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Dump scatter plot data.
	#

	if verbose:
		print >>sys.stderr, "writing scatter plot data ..."

	f = file("lalapps_excesspowerfinal_background_scatter.dat", "w")
	for a, l, c in background:
		print >>f, "%.16g %.16g" % (l, c)

	f = file("lalapps_excesspowerfinal_zero_lag_scatter.dat", "w")
	for a, l, c in zero_lag:
		print >>f, "%.16g %.16g" % (l, c)

	f = file("lalapps_excesspowerfinal_injections_scatter.dat", "w")
	for a, l, c in injections:
		print >>f, "%.16g %.16g" % (l, c)

	if verbose:
		print >>sys.stderr, "done."
#
# iterate over files
#

from glue.ligolw import dbtables

for n, filename in enumerate(filenames):
    #
    # Open the database file.
    #

    if options.verbose:
        print >> sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)

    working_filename = dbtables.get_connection_filename(
        filename, tmp_path=options.tmp_space, verbose=options.verbose)
    connection = sqlite3.connect(working_filename)
    if options.tmp_space is not None:
        dbtables.set_temp_store_directory(connection,
                                          options.tmp_space,
                                          verbose=options.verbose)

    #
    # Summarize the database.
    #

    contents = SnglBurstUtils.CoincDatabase(
        connection,
        live_time_program="StringSearch",
        search="StringCusp",
        veto_segments_name=options.vetoes_name)
Пример #40
0
def test_optimized_query(engine):
    res = segmentdb_utils.query_segments( engine, 'segment_summary', [ ('H1','DMT-TESTSEG_2',1,924900000,924900016,0,0),
                                                                      ('H1','DMT-TESTSEG_3',1,924900000,924900016,0,0) ] )

    if res[0] != segmentlist([segment(924900000, 924900010)]): 
        return False

    if res[1] != segmentlist([segment(924900008, 924900016)]):
        return False

    return True


if __name__ == '__main__':
    db_name    = 'seg_test_db.sqlite'
    target     = dbtables.get_connection_filename(db_name, None, True, False)
    connection = ligolw_sqlite.setup(target)

    engine     = query_engine.SqliteQueryEngine(connection)

    ligolw_sqlite.insert(connection, ['test_segdb_utils.xml'])

    print("Testing basic segment summary...", end=' ')
    print(test_basic_seg_summary(engine) and "suceeded." or "FAILED.")

    print("Testing expanding version numbers...", end=' ')
    print(test_expand_versions(engine) and "suceeded." or "FAILED.")

    print("Testing optimized segment query...", end=' ')
    print(test_optimized_query(engine) and "suceeded." or "FAILED.")