Example #1
0
def get_injections(injfnames, FAR, zero_lag_segments, verbose = False):
  """
  """
  def injection_was_made(geocent_end_time, geocent_end_time_ns, zero_lag_segments = zero_lag_segments):
    """
    return True if injection was made in the given segmentlist
    """
    return lsctables.LIGOTimeGPS(geocent_end_time, geocent_end_time_ns) in zero_lag_segments

  found = []
  missed = []
  print("", file=sys.stderr)
  for cnt, f in enumerate(injfnames):
    print("getting injections below FAR: " + str(FAR) + ":\t%.1f%%\r" % (100.0 * cnt / len(injfnames),), end=' ', file=sys.stderr)
    working_filename = dbtables.get_connection_filename(f, tmp_path = None, verbose = verbose)
    connection = sqlite3.connect(working_filename)
    connection.create_function("injection_was_made", 2, injection_was_made)

    make_sim_inspiral = lsctables.SimInspiralTable.get_table(dbtables.get_xml(connection))._row_from_cols

    for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.combined_far < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
    """, (FAR,)):
      sim = make_sim_inspiral(values)
      if values[-1]:
        found.append(sim)
      else:
        missed.append(sim)

    # done
    connection.close()
    dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
    dbtables.DBTable_set_connection(None)

  print("\nFound = %d Missed = %d" % (len(found), len(missed)), file=sys.stderr)
  return found, missed
Example #2
0
def get_far_threshold_and_segments(zerofname, live_time_program, instruments, verbose = False):
  """
  return the false alarm rate of the most rare zero-lag coinc, and a
  dictionary of the thinca segments indexed by instrument.
  """
  # open database
  working_filename = dbtables.get_connection_filename(zerofname, verbose = verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)

  # extract false alarm rate threshold
  query = 'SELECT MIN(coinc_inspiral.combined_far) FROM coinc_inspiral JOIN coinc_event ON (coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id) WHERE (coinc_event.instruments == "' + str(instruments) + '") AND NOT EXISTS(SELECT * FROM time_slide WHERE time_slide.time_slide_id == coinc_event.time_slide_id AND time_slide.offset != 0);'
  print(query)
  far, = connection.cursor().execute(query).fetchone()

  # extract segments.
  seglists = db_thinca_rings.get_thinca_zero_lag_segments(connection, program_name = live_time_program)

  # done
  connection.close()
  dbtables.discard_connection_filename(zerofname, working_filename, verbose = verbose)
  dbtables.DBTable_set_connection(None)
  print("WARNING replacing far with 10^-7", file=sys.stderr)
  far = 1.0e-7
  return far, seglists
Example #3
0
def get_far_threshold_and_segments(zerofname,
                                   live_time_program,
                                   instruments,
                                   verbose=False):
    """
  return the false alarm rate of the most rare zero-lag coinc, and a
  dictionary of the thinca segments indexed by instrument.
  """
    # open database
    working_filename = dbtables.get_connection_filename(zerofname,
                                                        verbose=verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)

    # extract false alarm rate threshold
    query = 'SELECT MIN(coinc_inspiral.combined_far) FROM coinc_inspiral JOIN coinc_event ON (coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id) WHERE (coinc_event.instruments == "' + str(
        instruments
    ) + '") AND NOT EXISTS(SELECT * FROM time_slide WHERE time_slide.time_slide_id == coinc_event.time_slide_id AND time_slide.offset != 0);'
    print(query)
    far, = connection.cursor().execute(query).fetchone()

    # extract segments.
    seglists = db_thinca_rings.get_thinca_zero_lag_segments(
        connection, program_name=live_time_program)

    # done
    connection.close()
    dbtables.discard_connection_filename(zerofname,
                                         working_filename,
                                         verbose=verbose)
    dbtables.DBTable_set_connection(None)
    print("WARNING replacing far with 10^-7", file=sys.stderr)
    far = 1.0e-7
    return far, seglists
Example #4
0
def get_far_threshold_and_segments(zerofname,
                                   instruments,
                                   live_time_program,
                                   veto_seg_name="vetoes",
                                   verbose=True):
    """
  return the false alarm rate of the most rare zero-lag coinc, and a
  dictionary of the thinca segments indexed by instrument.
  """
    # open database
    working_filename = dbtables.get_connection_filename(zerofname,
                                                        verbose=verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)

    # extract false alarm rate threshold
    query = 'SELECT MIN(coinc_inspiral.false_alarm_rate) FROM coinc_inspiral JOIN coinc_event ON (coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id) WHERE ( coinc_event.instruments = "' + instruments + '" AND NOT EXISTS(SELECT * FROM time_slide WHERE time_slide.time_slide_id == coinc_event.time_slide_id AND time_slide.offset != 0) );'
    print("\n", query)
    far, = connection.cursor().execute(query).fetchone()

    # extract segments.
    seglists = db_thinca_rings.get_thinca_zero_lag_segments(
        connection, program_name=live_time_program)

    # done
    connection.close()
    dbtables.discard_connection_filename(zerofname,
                                         working_filename,
                                         verbose=verbose)
    dbtables.DBTable_set_connection(None)

    return far, seglists
Example #5
0
def get_injections(injfnames, FAR, zero_lag_segments, verbose = False):
  """
  """
  def injection_was_made(geocent_end_time, geocent_end_time_ns, zero_lag_segments = zero_lag_segments):
    """
    return True if injection was made in the given segmentlist
    """
    return lsctables.LIGOTimeGPS(geocent_end_time, geocent_end_time_ns) in zero_lag_segments

  found = []
  missed = []
  print >>sys.stderr, ""
  for cnt, f in enumerate(injfnames):
    print >>sys.stderr, "getting injections below FAR: " + str(FAR) + ":\t%.1f%%\r" % (100.0 * cnt / len(injfnames),),
    working_filename = dbtables.get_connection_filename(f, tmp_path = None, verbose = verbose)
    connection = sqlite3.connect(working_filename)
    connection.create_function("injection_was_made", 2, injection_was_made)

    make_sim_inspiral = lsctables.table.get_table(dbtables.get_xml(connection), lsctables.SimInspiralTable.tableName)._row_from_cols

    for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.combined_far < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
    """, (FAR,)):
      sim = make_sim_inspiral(values)
      if values[-1]:
        found.append(sim)
      else:
        missed.append(sim)

    # done
    connection.close()
    dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
    dbtables.DBTable_set_connection(None)

  print >>sys.stderr, "\nFound = %d Missed = %d" % (len(found), len(missed))
  return found, missed
Example #6
0
def get_vetoes(fname, veto_segments_name = "vetoes", verbose=True):
  working_filename = dbtables.get_connection_filename(fname, verbose = verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)
  veto_segments = db_thinca_rings.get_veto_segments(connection, veto_segments_name)
  connection.close()
  dbtables.discard_connection_filename(fname, working_filename, verbose = verbose)
  dbtables.DBTable_set_connection(None)
  return veto_segments
Example #7
0
def get_vetoes(fname, veto_segments_name="vetoes", verbose=True):
    working_filename = dbtables.get_connection_filename(fname, verbose=verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)
    veto_segments = db_thinca_rings.get_veto_segments(connection,
                                                      veto_segments_name)
    connection.close()
    dbtables.discard_connection_filename(fname,
                                         working_filename,
                                         verbose=verbose)
    dbtables.DBTable_set_connection(None)
    return veto_segments
Example #8
0
def measure_efficiency(filenames, threshold, live_time_program = "lalapps_power", upper_limit_scale = "E", tmp_path = None, verbose = False):
	# FIXME:  instruments are hard-coded.  bad bad bad.  sigh...
	if upper_limit_scale == "E":
		efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.egw_over_rsquared), r"Equivalent Isotropic Energy ($M_{\odot} / \mathrm{pc}^{2}$)", 0.1)
	elif upper_limit_scale == "hrss":
		efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.hrss), r"$h_{\mathrm{rss}}$", 0.1)
	else:
		raise ValueError("bad upper_limit_scale %s" % repr(upper_limit_scale))

	#
	# Iterate over injection files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		connection = sqlite3.connect(working_filename)
		connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic)
		database = SnglBurstUtils.CoincDatabase(connection, live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.
		#

		efficiency.add_contents(database, threshold)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Compute efficiency from the data that have been collected
	#

	if verbose:
		print >>sys.stderr, "binning and smoothnig efficiency data ..."
	efficiency.finish(threshold)

	#
	# Done
	#

	return efficiency
def measure_efficiency(filenames, threshold, live_time_program = "lalapps_power", upper_limit_scale = "E", tmp_path = None, verbose = False):
	# FIXME:  instruments are hard-coded.  bad bad bad.  sigh...
	if upper_limit_scale == "E":
		efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.egw_over_rsquared), r"Equivalent Isotropic Energy ($M_{\odot} / \mathrm{pc}^{2}$)", 0.1)
	elif upper_limit_scale == "hrss":
		efficiency = EfficiencyData(("H1", "H2", "L1"), (lambda sim, instrument: sim.hrss), r"$h_{\mathrm{rss}}$", 0.1)
	else:
		raise ValueError("bad upper_limit_scale %s" % repr(upper_limit_scale))

	#
	# Iterate over injection files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		connection = sqlite3.connect(working_filename)
		connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic)
		database = SnglBurstUtils.CoincDatabase(connection, live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.
		#

		efficiency.add_contents(database, threshold)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Compute efficiency from the data that have been collected
	#

	if verbose:
		print >>sys.stderr, "binning and smoothnig efficiency data ..."
	efficiency.finish(threshold)

	#
	# Done
	#

	return efficiency
Example #10
0
def measure_threshold(filenames, n_survivors, live_time_program = "lalapps_power", tmp_path = None, open_box = False, verbose = False):
	#
	# Initialize the book-keeping object.
	#

	rate_vs_threshold_data = RateVsThresholdData()

	#
	# Iterate over non-injection files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		database = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.
		#

		rate_vs_threshold_data.update_from(database, filename = filename, verbose = verbose)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Determine likelihood threshold.
	#

	if verbose:
		print >>sys.stderr, "finishing rate vs. threshold measurement ..."
	rate_vs_threshold_data.finish(n_survivors, open_box = open_box, verbose = verbose)

	#
	# Done.
	#

	return rate_vs_threshold_data
def measure_threshold(filenames, n_survivors, live_time_program = "lalapps_power", tmp_path = None, open_box = False, verbose = False):
	#
	# Initialize the book-keeping object.
	#

	rate_vs_threshold_data = RateVsThresholdData()

	#
	# Iterate over non-injection files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		database = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.
		#

		rate_vs_threshold_data.update_from(database, filename = filename, verbose = verbose)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Determine likelihood threshold.
	#

	if verbose:
		print >>sys.stderr, "finishing rate vs. threshold measurement ..."
	rate_vs_threshold_data.finish(n_survivors, open_box = open_box, verbose = verbose)

	#
	# Done.
	#

	return rate_vs_threshold_data
Example #12
0
def process_file(filename, products, live_time_program, tmp_path = None, veto_segments_name = None, verbose = False):
	#
	# connect to database and summarize contents
	#

	working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
	contents = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program, search = "StringCusp", veto_segments_name = veto_segments_name)
	if verbose:
		SnglBurstUtils.summarize_coinc_database(contents, filename = working_filename)

	#
	# augment summary with extra stuff we need.  the filename
	# is recorded for dumping debuggin information related to
	# missed injections.  if burca was run with the
	# --coincidence-segments option then the value is copied
	# into a segmentlistdict to facilitate the computation of
	# livetime
	#

	contents.filename = filename

	contents.coincidence_segments = ligolwprocess.get_process_params(contents.xmldoc, "lalapps_burca", "--coincidence-segments")
	if contents.coincidence_segments:
		# as a side-effect, this enforces the rule that
		# burca has been run on the input file exactly once
		contents.coincidence_segments, = contents.coincidence_segments
		contents.coincidence_segments = segments.segmentlistdict.fromkeys(contents.seglists, segmentsUtils.from_range_strings(contents.coincidence_segments.split(","), boundtype = dbtables.lsctables.LIGOTimeGPS).coalesce())
	else:
		contents.coincidence_segments = None

	#
	# process contents
	#

	for n, product in enumerate(products):
		if verbose:
			print >>sys.stderr, "%s: adding to product %d ..." % (working_filename, n)
		product.add_contents(contents, verbose = verbose)

	#
	# close
	#

	contents.connection.close()
	dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)
Example #13
0
def process_file(filename, products, live_time_program, tmp_path = None, veto_segments_name = None, verbose = False):
	#
	# connect to database and summarize contents
	#

	working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
	contents = SnglBurstUtils.CoincDatabase(sqlite3.connect(working_filename), live_time_program, search = "StringCusp", veto_segments_name = veto_segments_name)
	if verbose:
		SnglBurstUtils.summarize_coinc_database(contents, filename = working_filename)

	#
	# augment summary with extra stuff we need.  the filename
	# is recorded for dumping debuggin information related to
	# missed injections.  if burca was run with the
	# --coincidence-segments option then the value is copied
	# into a segmentlistdict to facilitate the computation of
	# livetime
	#

	contents.filename = filename

	contents.coincidence_segments = ligolwprocess.get_process_params(contents.xmldoc, "lalapps_burca", "--coincidence-segments")
	if contents.coincidence_segments:
		# as a side-effect, this enforces the rule that
		# burca has been run on the input file exactly once
		contents.coincidence_segments, = contents.coincidence_segments
		contents.coincidence_segments = segments.segmentlistdict.fromkeys(contents.seglists, segmentsUtils.from_range_strings(contents.coincidence_segments.split(","), boundtype = dbtables.lsctables.LIGOTimeGPS).coalesce())
	else:
		contents.coincidence_segments = None

	#
	# process contents
	#

	for n, product in enumerate(products):
		if verbose:
			print >>sys.stderr, "%s: adding to product %d ..." % (working_filename, n)
		product.add_contents(contents, verbose = verbose)

	#
	# close
	#

	contents.connection.close()
	dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)
Example #14
0
  def update_coincs(self, fnames):
    """
    This function iterates over the databases and updates the the likelihood 
    column with the proper lv stat
    """
    for f in fnames:
      working_filename = dbtables.get_connection_filename(zerofname, verbose = True)
      connection = sqlite3.connect(working_filename)
      dbtables.DBTable_set_connection(connection)
      connection.create_function("lvstat", 3, self.lvstat)
      query = "UPDATE coinc_event SET likelihood = (SELECT lvstat(coinc_event.instruments, coinc_inspiral.ifos, coinc_inspiral.false_alarm_rate) FROM coinc_inspiral WHERE coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id)"
      print query

      connection.cursor().execute(query)
      connection.commit()
      connection.close()
      dbtables.discard_connection_filename(zerofname, working_filename, verbose = True)
      dbtables.DBTable_set_connection(None)
Example #15
0
def get_ifos(zerofname, verbose=True):
  # open database
  working_filename = dbtables.get_connection_filename(zerofname, verbose = verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)

  # extract false alarm rate threshold
  # FIXME This may not be robust if triggers are missing from a given category, for
  # example no triples in zero lag or time slides.
  query = 'SELECT distinct(ifos) FROM coinc_inspiral'
  ifo_list = []
  for i in connection.cursor().execute(query): ifo_list.append(i)

  # done
  connection.close()
  dbtables.discard_connection_filename(zerofname, working_filename, verbose = verbose)
  dbtables.DBTable_set_connection(None)
  return ifo_list
Example #16
0
    def update_coincs(self, fnames):
        """
    This function iterates over the databases and updates the the likelihood 
    column with the proper lv stat
    """
        for f in fnames:
            working_filename = dbtables.get_connection_filename(zerofname,
                                                                verbose=True)
            connection = sqlite3.connect(working_filename)
            dbtables.DBTable_set_connection(connection)
            connection.create_function("lvstat", 3, self.lvstat)
            query = "UPDATE coinc_event SET likelihood = (SELECT lvstat(coinc_event.instruments, coinc_inspiral.ifos, coinc_inspiral.false_alarm_rate) FROM coinc_inspiral WHERE coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id)"
            print(query)

            connection.cursor().execute(query)
            connection.commit()
            connection.close()
            dbtables.discard_connection_filename(zerofname,
                                                 working_filename,
                                                 verbose=True)
            dbtables.DBTable_set_connection(None)
Example #17
0
def get_ifos(zerofname, verbose=True):
    # open database
    working_filename = dbtables.get_connection_filename(zerofname,
                                                        verbose=verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)

    # extract false alarm rate threshold
    # FIXME This may not be robust if triggers are missing from a given category, for
    # example no triples in zero lag or time slides.
    query = 'SELECT distinct(ifos) FROM coinc_inspiral'
    ifo_list = []
    for i in connection.cursor().execute(query):
        ifo_list.append(i)

    # done
    connection.close()
    dbtables.discard_connection_filename(zerofname,
                                         working_filename,
                                         verbose=verbose)
    dbtables.DBTable_set_connection(None)
    return ifo_list
Example #18
0
def get_far_threshold_and_segments(zerofname, instruments, live_time_program, veto_seg_name="vetoes", verbose = True):
  """
  return the false alarm rate of the most rare zero-lag coinc, and a
  dictionary of the thinca segments indexed by instrument.
  """
  # open database
  working_filename = dbtables.get_connection_filename(zerofname, verbose = verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)

  # extract false alarm rate threshold
  query = 'SELECT MIN(coinc_inspiral.false_alarm_rate) FROM coinc_inspiral JOIN coinc_event ON (coinc_event.coinc_event_id == coinc_inspiral.coinc_event_id) WHERE ( coinc_event.instruments = "' + instruments + '" AND NOT EXISTS(SELECT * FROM time_slide WHERE time_slide.time_slide_id == coinc_event.time_slide_id AND time_slide.offset != 0) );'
  print "\n", query
  far, = connection.cursor().execute(query).fetchone()

  # extract segments.
  seglists = db_thinca_rings.get_thinca_zero_lag_segments(connection, program_name = live_time_program)

  # done
  connection.close()
  dbtables.discard_connection_filename(zerofname, working_filename, verbose = verbose)
  dbtables.DBTable_set_connection(None)

  return far, seglists
    def get_injections(self, instruments, FAR=float("inf")):
        injfnames = self.inj_fnames
        zero_lag_segments = self.zero_lag_segments[instruments]
        verbose = self.opts.verbose
        found = []
        missed = []
        print("", file=sys.stderr)
        for cnt, f in enumerate(injfnames):
            print("getting injections below FAR: " + str(FAR) + ":\t%.1f%%\r" %
                  (100.0 * cnt / len(injfnames), ),
                  end=' ',
                  file=sys.stderr)
            working_filename = dbtables.get_connection_filename(
                f, tmp_path=opts.tmp_space, verbose=verbose)
            connection = sqlite3.connect(working_filename)
            dbtables.DBTable_set_connection(connection)
            xmldoc = dbtables.get_xml(connection)
            # DON'T BOTHER CONTINUING IF THE INSTRUMENTS OF INTEREST ARE NOT HERE
            instruments_in_this_file = []
            for i in connection.cursor().execute(
                    'SELECT DISTINCT(instruments) FROM coinc_event'):
                if i[0]:
                    instruments_in_this_file.append(
                        frozenset(lsctables.instrument_set_from_ifos(i[0])))
            if instruments not in instruments_in_this_file:
                connection.close()
                dbtables.discard_connection_filename(f,
                                                     working_filename,
                                                     verbose=verbose)
                dbtables.DBTable_set_connection(None)
                continue

            # WORK OUT CORRECT SEGMENTS FOR THIS FILE WHERE WE SHOULD SEE INJECTIONS
            segments = self.get_segments(connection, xmldoc)
            segments -= self.veto_segments
            #print thincasegments
            zero_lag_segments = segments.intersection(
                instruments) - segments.union(
                    set(segments.keys()) - instruments)

            ###############

            # DEFINE THE INJECTION WAS MADE FUNCTION
            def injection_was_made(geocent_end_time,
                                   geocent_end_time_ns,
                                   zero_lag_segments=zero_lag_segments):
                """
				return True if injection was made in the given segmentlist
				"""
                return lsctables.LIGOTimeGPS(
                    geocent_end_time, geocent_end_time_ns) in zero_lag_segments

            connection.create_function("injection_was_made", 2,
                                       injection_was_made)
            make_sim_inspiral = lsctables.SimInspiralTable.get_table(
                dbtables.get_xml(connection)).row_from_cols

            # INSPIRAL
            if self.coinc_inspiral_table:
                for values in connection.cursor().execute(
                        """
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.combined_far < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
				""", (FAR, )):
                    sim = make_sim_inspiral(values)
                    if values[-1]:
                        found.append(sim)
                    else:
                        missed.append(sim)

            # BURSTS
            if self.multi_burst_table:
                for values in connection.cursor().execute(
                        """
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN multi_burst ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == multi_burst.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND multi_burst.false_alarm_rate < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
				""", (FAR, )):
                    sim = make_sim_inspiral(values)
                    if values[-1]:
                        found.append(sim)
                    else:
                        missed.append(sim)
            # done
            dbtables.discard_connection_filename(f,
                                                 working_filename,
                                                 verbose=verbose)
            dbtables.DBTable_set_connection(None)

            print("\nFound = %d Missed = %d" % (len(found), len(missed)),
                  file=sys.stderr)
        return found, missed
    def __init__(self, opts, flist):
        self.segments = segments.segmentlistdict()
        self.non_inj_fnames = []
        self.inj_fnames = []
        self.found = {}
        self.missed = {}
        self.opts = opts
        self.veto_segments = segments.segmentlistdict()
        self.zero_lag_segments = {}
        self.instruments = []
        self.livetime = {}
        self.multi_burst_table = None
        self.coinc_inspiral_table = None

        for f in flist:
            if opts.verbose:
                print("Gathering stats from: %s...." % (f, ), file=sys.stderr)
            working_filename = dbtables.get_connection_filename(
                f, tmp_path=opts.tmp_space, verbose=opts.verbose)
            connection = sqlite3.connect(working_filename)
            dbtables.DBTable_set_connection(connection)
            xmldoc = dbtables.get_xml(connection)

            # look for a sim table
            try:
                sim_inspiral_table = dbtables.lsctables.SimInspiralTable.get_table(
                    xmldoc)
                self.inj_fnames.append(f)
                sim = True
            except ValueError:
                self.non_inj_fnames.append(f)
                sim = False

            # FIGURE OUT IF IT IS A BURST OR INSPIRAL RUN
            try:
                self.multi_burst_table = dbtables.lsctables.MultiBurstTable.get_table(
                    xmldoc)
            except ValueError:
                self.multi_burst_table = None
            try:
                self.coinc_inspiral_table = dbtables.lsctables.CoincInspiralTable.get_table(
                    xmldoc)
            except ValueError:
                self.coinc_inspiral_table = None
            if self.multi_burst_table and self.coinc_inspiral_table:
                print("both burst and inspiral tables found.  Aborting",
                      file=sys.stderr)
                raise ValueError

            if not sim:
                self.get_instruments(connection)
                self.segments += self.get_segments(connection, xmldoc)
                #FIXME, don't assume veto segments are the same in every file!
                self.veto_segments = self.get_veto_segments(connection)

            dbtables.discard_connection_filename(f,
                                                 working_filename,
                                                 verbose=opts.verbose)
            dbtables.DBTable_set_connection(None)

        # remove redundant instruments
        self.instruments = list(set(self.instruments))
        # FIXME Do these have to be done by instruments?
        self.segments -= self.veto_segments

        # segments and livetime by instruments
        for i in self.instruments:
            self.zero_lag_segments[i] = self.segments.intersection(
                i) - self.segments.union(set(self.segments.keys()) - i)
            self.livetime[i] = float(abs(self.zero_lag_segments[i]))
Example #21
0
max_mtotal = 100
mass_bins = 11
dist_bins = 50

opts, filenames = parse_command_line()

if opts.veto_segments_name is not None:
    working_filename = dbtables.get_connection_filename(opts.full_data_file,
                                                        verbose=opts.verbose)
    connection = sqlite3.connect(working_filename)
    dbtables.DBTable_set_connection(connection)
    veto_segments = db_thinca_rings.get_veto_segments(connection,
                                                      opts.veto_segments_name)
    connection.close()
    dbtables.discard_connection_filename(opts.full_data_file,
                                         working_filename,
                                         verbose=opts.verbose)
    dbtables.DBTable_set_connection(None)
else:
    veto_segments = segments.segmentlistdict()

if not opts.burst_found and not opts.burst_missed:
    FAR, seglists = get_far_threshold_and_segments(
        opts.full_data_file,
        opts.live_time_program,
        instruments=lsctables.ifos_from_instrument_set(opts.instruments),
        verbose=opts.verbose)

    # times when only exactly the required instruments are on
    seglists -= veto_segments
    zero_lag_segments = seglists.intersection(
Example #22
0
	def __init__(self, filelist, live_time_program = None, veto_segments_name = None, data_segments_name = "datasegments", tmp_path = None, verbose = False):

		self.segments = segments.segmentlistdict()
		self.instruments = set()
		self.table_name = None
		self.found_injections_by_instrument_set = {}
		self.missed_injections_by_instrument_set = {}
		self.total_injections_by_instrument_set = {}
		self.zerolag_fars_by_instrument_set = {}
		self.ts_fars_by_instrument_set = {}
		self.numslides = set()

		for f in filelist:
			if verbose:
				print >> sys.stderr, "Gathering stats from: %s...." % (f,)
			working_filename = dbtables.get_connection_filename(f, tmp_path = tmp_path, verbose = verbose)
			connection = sqlite3.connect(working_filename)
			xmldoc = dbtables.get_xml(connection)

			sim = False

			# look for a sim inspiral table.  This is IMR work we have to have one of these :)
			try:
				sim_inspiral_table = table.get_table(xmldoc, dbtables.lsctables.SimInspiralTable.tableName)
				sim = True
			except ValueError:
				pass

			# look for the relevant table for analyses
			for table_name in allowed_analysis_table_names():
				try:
					setattr(self, table_name, table.get_table(xmldoc, table_name))
					if self.table_name is None or self.table_name == table_name:
						self.table_name = table_name
					else:
						raise ValueError("detected more than one table type out of " + " ".join(allowed_analysis_table_names()))
				except ValueError:
					setattr(self, table_name, None)

			# the non simulation databases are where we get information about segments
			if not sim:
				self.numslides.add(connection.cursor().execute('SELECT count(DISTINCT(time_slide_id)) FROM time_slide').fetchone()[0])
				[self.instruments.add(ifos) for ifos in get_instruments_from_coinc_event_table(connection)]
				# save a reference to the segments for this file, needed to figure out the missed and found injections
				self.this_segments = get_segments(connection, xmldoc, self.table_name, live_time_program, veto_segments_name, data_segments_name = data_segments_name)
				# FIXME we don't really have any reason to use playground segments, but I put this here as a reminder
				# self.this_playground_segments = segmentsUtils.S2playground(self.this_segments.extent_all())
				self.segments += self.this_segments

				# get the far thresholds for the loudest events in these databases
				for (instruments_set, far, ts) in get_event_fars(connection, self.table_name):
					if not ts:
						self.zerolag_fars_by_instrument_set.setdefault(instruments_set, []).append(far)
					else:
						self.ts_fars_by_instrument_set.setdefault(instruments_set, []).append(far)
			# get the injections
			else:
				# We need to know the segments in this file to determine which injections are found
				self.this_injection_segments = get_segments(connection, xmldoc, self.table_name, live_time_program, veto_segments_name, data_segments_name = data_segments_name)
				self.this_injection_instruments = []
				distinct_instruments = connection.cursor().execute('SELECT DISTINCT(instruments) FROM coinc_event WHERE instruments!=""').fetchall()
				for instruments, in distinct_instruments:
					instruments_set = frozenset(lsctables.instrument_set_from_ifos(instruments))
					self.this_injection_instruments.append(instruments_set)
					segments_to_consider_for_these_injections = self.this_injection_segments.intersection(instruments_set) - self.this_injection_segments.union(set(self.this_injection_segments.keys()) - instruments_set)
					found, total, missed = get_min_far_inspiral_injections(connection, segments = segments_to_consider_for_these_injections, table_name = self.table_name)
					if verbose:
						print >> sys.stderr, "%s total injections: %d; Found injections %d: Missed injections %d" % (instruments, len(total), len(found), len(missed))
					self.found_injections_by_instrument_set.setdefault(instruments_set, []).extend(found)
					self.total_injections_by_instrument_set.setdefault(instruments_set, []).extend(total)
					self.missed_injections_by_instrument_set.setdefault(instruments_set, []).extend(missed)

			# All done
			dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
		if len(self.numslides) > 1:
			raise ValueError('number of slides differs between input files')
		elif self.numslides:
			self.numslides = min(self.numslides)
		else:
			self.numslides = 0
            distributions.numerator.increment(
                weight=weight_func(sim),
                **distributions.coinc_params([
                    event for event in events
                    if event.ifo not in contents.vetoseglists
                    or event.peak not in contents.vetoseglists[event.ifo]
                ], offsetvector))

    #
    # Clean up.
    #

    contents.xmldoc.unlink()
    connection.close()
    dbtables.discard_connection_filename(filename,
                                         working_filename,
                                         verbose=options.verbose)

#
# Output.
#

ligolw_search_summary.append_search_summary(xmldoc,
                                            process,
                                            ifos=segs.keys(),
                                            inseg=segs.extent_all(),
                                            outseg=segs.extent_all())
xmldoc.childNodes[-1].appendChild(
    distributions.to_xml(u"string_cusp_likelihood"))
ligolw_process.set_process_end_time(process)
Example #24
0
def dump_confidence_likelihood_scatter_data(globs, live_time_program = "lalapps_power", tmp_path = None, verbose = False):
	#
	# Collect file names.
	#

	if verbose:
		print >>sys.stderr, "building file list ..."
	filenames = sorted(filename for g in globs for filename in glob.glob(g))

	#
	# Initialize storage.
	#

	injections = []
	background = []
	zero_lag = []

	#
	# Iterate over files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		connection = sqlite3.connect(working_filename)
		connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic)
		database = SnglBurstUtils.CoincDatabase(connection, live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.  Assume all files with
		# sim_burst tables are the outputs of injection runs, and
		# others aren't.
		#

		if database.sim_burst_table is None:
			# non-injections
			for id, l, c, is_background in bb_id_likelihood_confidence_background(database):
				record = (coinc_detection_statistic(l, c), l, c)
				if is_background:
					if len(background) < 1e6:
						heapq.heappush(background, record)
					else:
						heapq.heappushpop(background, record)
				else:
					if len(zero_lag) < 1e6:
						heapq.heappush(zero_lag, record)
					else:
						heapq.heappushpop(zero_ag, record)
		else:
			# injections
			create_sim_coinc_map_view(database.connection)
			for a, l, c in database.connection.cursor().execute("""
SELECT
	burst_coinc_amplitude,
	burst_coinc_likelihood,
	burst_coinc_confidence
FROM
	sim_coinc_map
WHERE
	sim_coinc_def_id == ?
			""", (database.sce_definer_id,)):
				record = (-a, l, c)
				if len(injections) < 1e6:
					heapq.heappush(injections, record)
				else:
					heapq.heappushpop(injections, record)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Dump scatter plot data.
	#

	if verbose:
		print >>sys.stderr, "writing scatter plot data ..."

	f = file("lalapps_excesspowerfinal_background_scatter.dat", "w")
	for a, l, c in background:
		print >>f, "%.16g %.16g" % (l, c)

	f = file("lalapps_excesspowerfinal_zero_lag_scatter.dat", "w")
	for a, l, c in zero_lag:
		print >>f, "%.16g %.16g" % (l, c)

	f = file("lalapps_excesspowerfinal_injections_scatter.dat", "w")
	for a, l, c in injections:
		print >>f, "%.16g %.16g" % (l, c)

	if verbose:
		print >>sys.stderr, "done."
Example #25
0
def get_injections(injfnames,
                   zero_lag_segments,
                   ifos="H1,H2,L1",
                   FAR=1.0,
                   verbose=True):
    """
  The LV stat uses simulations above threshold, not some IFAR of the loudest event, so the default should be "inf"
  """
    def injection_was_made(geocent_end_time,
                           geocent_end_time_ns,
                           zero_lag_segments=zero_lag_segments):
        """
    return True if injection was made in the given segmentlist
    """
        return lsctables.LIGOTimeGPS(geocent_end_time,
                                     geocent_end_time_ns) in zero_lag_segments

    found = []
    missed = []
    print("", file=sys.stderr)
    for cnt, f in enumerate(injfnames):
        print("getting injections: " + str(FAR) + ":\t%.1f%%\r" %
              (100.0 * cnt / len(injfnames), ),
              end=' ',
              file=sys.stderr)
        working_filename = dbtables.get_connection_filename(f,
                                                            tmp_path=None,
                                                            verbose=verbose)
        connection = sqlite3.connect(working_filename)
        connection.create_function("injection_was_made", 2, injection_was_made)

        make_sim_inspiral = lsctables.SimInspiralTable.get_table(
            dbtables.get_xml(connection))._row_from_cols

        # FIXME may not be done correctly if injections are done in timeslides
        # FIXME may not be done correctly if injections aren't logarithmic in d
        # do we really want d^3 waiting?
        # FIXME do we really want injections independent of their FAR

        for values in connection.cursor().execute(
                """
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.false_alarm_rate < ?
      AND coinc_inspiral.ifos == ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
    """, (
                    FAR,
                    ifos,
                )):
            sim = make_sim_inspiral(values)
            if values[-1]:
                found.append(sim)
            else:
                missed.append(sim)

        # done
        connection.close()
        dbtables.discard_connection_filename(f,
                                             working_filename,
                                             verbose=verbose)
        dbtables.DBTable_set_connection(None)

    print("\nFound = %d Missed = %d" % (len(found), len(missed)),
          file=sys.stderr)
    return found, missed
Example #26
0
  def __init__(self, flist, opts):
    self.far = {}
    self.segments = segments.segmentlistdict()
    self.non_inj_fnames = []
    self.inj_fnames = []
    #self.der_fit = None
    self.twoDMassBins = None
    #self.dBin = {}
    self.gw = None
    self.found = {}
    self.missed = {}
    self.wnfunc = None
    self.opts = opts
    if opts.bootstrap_iterations: self.bootnum = int(opts.bootstrap_iterations)
    else: self.bootnum = 100
    self.veto_segments = segments.segmentlistdict()
    self.zero_lag_segments = {}
    self.instruments = []
    self.livetime = {}
    self.minmass = None
    self.maxmass = None
    self.mintotal = None
    self.maxtotal = None

    for f in flist: 
      if opts.verbose: print >> sys.stderr, "Gathering stats from: %s...." % (f,)
      working_filename = dbtables.get_connection_filename(f, verbose = opts.verbose)
      connection = sqlite3.connect(working_filename)
      dbtables.DBTable_set_connection(connection)
      xmldoc = dbtables.get_xml(connection)

      # look for a sim table
      try:
        sim_inspiral_table = table.get_table(xmldoc, dbtables.lsctables.SimInspiralTable.tableName)
        self.inj_fnames.append(f)
        sim = True
      except ValueError:
        self.non_inj_fnames.append(f)
        sim = False

      if not sim: 
        if opts.veto_segments_name is not None: self.veto_segments = db_thinca_rings.get_veto_segments(connection, opts.veto_segments_name)
        self.get_instruments(connection)
        self.segments += db_thinca_rings.get_thinca_zero_lag_segments(connection, program_name = opts.live_time_program)
        self.get_far_thresholds(connection)
      else: 
        self.get_mass_ranges(connection)
      

      #connection.close()
      dbtables.discard_connection_filename(f, working_filename, verbose = opts.verbose)
      dbtables.DBTable_set_connection(None)      

    # FIXME Do these have to be done by instruments?
    self.segments -= self.veto_segments

    # compute far, segments and livetime by instruments
    for i in self.instruments:
      self.far[i] = min(self.far[i])
      # FIXME this bombs if any of the FARS are zero. maybe it should continue
      # and just remove that instrument combo from the calculation
      if self.far[i] == 0: 
        print >> sys.stderr, "Encountered 0 FAR in %s, ABORTING" % (i,)
        sys.exit(1)
      self.zero_lag_segments[i] = self.segments.intersection(i) - self.segments.union(set(self.segments.keys()) - i)
      # Livetime must have playground removed
      self.livetime[i] = float(abs(self.zero_lag_segments[i] - segmentsUtils.S2playground(self.segments.extent_all())))
      if opts.verbose: print >> sys.stderr, "%s FAR %e, livetime %f" % (",".join(sorted(list(i))), self.far[i], self.livetime[i])

    # get a 2D mass binning
    self.twoDMassBins = self.get_2d_mass_bins(self.minmass, self.maxmass, opts.mass_bins)
def dump_confidence_likelihood_scatter_data(globs, live_time_program = "lalapps_power", tmp_path = None, verbose = False):
	#
	# Collect file names.
	#

	if verbose:
		print >>sys.stderr, "building file list ..."
	filenames = sorted(filename for g in globs for filename in glob.glob(g))

	#
	# Initialize storage.
	#

	injections = []
	background = []
	zero_lag = []

	#
	# Iterate over files.
	#

	for n, filename in enumerate(filenames):
		#
		# Open the database file.
		#

		if verbose:
			print >>sys.stderr, "%d/%d: %s" % (n + 1, len(filenames), filename)
		working_filename = dbtables.get_connection_filename(filename, tmp_path = tmp_path, verbose = verbose)
		connection = sqlite3.connect(working_filename)
		connection.create_function("coinc_detection_statistic", 2, coinc_detection_statistic)
		database = SnglBurstUtils.CoincDatabase(connection, live_time_program)
		if verbose:
			SnglBurstUtils.summarize_coinc_database(database)

		#
		# Process database contents.  Assume all files with
		# sim_burst tables are the outputs of injection runs, and
		# others aren't.
		#

		if database.sim_burst_table is None:
			# non-injections
			for id, l, c, is_background in bb_id_likelihood_confidence_background(database):
				record = (coinc_detection_statistic(l, c), l, c)
				if is_background:
					if len(background) < 1e6:
						heapq.heappush(background, record)
					else:
						heapq.heappushpop(background, record)
				else:
					if len(zero_lag) < 1e6:
						heapq.heappush(zero_lag, record)
					else:
						heapq.heappushpop(zero_ag, record)
		else:
			# injections
			create_sim_coinc_map_view(database.connection)
			for a, l, c in database.connection.cursor().execute("""
SELECT
	burst_coinc_amplitude,
	burst_coinc_likelihood,
	burst_coinc_confidence
FROM
	sim_coinc_map
WHERE
	sim_coinc_def_id == ?
			""", (database.sce_definer_id,)):
				record = (-a, l, c)
				if len(injections) < 1e6:
					heapq.heappush(injections, record)
				else:
					heapq.heappushpop(injections, record)

		#
		# Done with this file.
		#

		database.connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = verbose)

	#
	# Dump scatter plot data.
	#

	if verbose:
		print >>sys.stderr, "writing scatter plot data ..."

	f = file("lalapps_excesspowerfinal_background_scatter.dat", "w")
	for a, l, c in background:
		print >>f, "%.16g %.16g" % (l, c)

	f = file("lalapps_excesspowerfinal_zero_lag_scatter.dat", "w")
	for a, l, c in zero_lag:
		print >>f, "%.16g %.16g" % (l, c)

	f = file("lalapps_excesspowerfinal_injections_scatter.dat", "w")
	for a, l, c in injections:
		print >>f, "%.16g %.16g" % (l, c)

	if verbose:
		print >>sys.stderr, "done."
Example #28
0
max_mass = 99
min_mtotal = 25
max_mtotal = 100
mass_bins = 11
dist_bins = 50


opts, filenames = parse_command_line()

if opts.veto_segments_name is not None:
  working_filename = dbtables.get_connection_filename(opts.full_data_file, verbose = opts.verbose)
  connection = sqlite3.connect(working_filename)
  dbtables.DBTable_set_connection(connection)
  veto_segments = db_thinca_rings.get_veto_segments(connection, opts.veto_segments_name)
  connection.close()
  dbtables.discard_connection_filename(opts.full_data_file, working_filename, verbose = opts.verbose)
  dbtables.DBTable_set_connection(None)
else:
  veto_segments = segments.segmentlistdict()

if not opts.burst_found and not opts.burst_missed:
  FAR, seglists = get_far_threshold_and_segments(opts.full_data_file, opts.live_time_program, instruments=lsctables.ifos_from_instrument_set(opts.instruments),verbose = opts.verbose)


  # times when only exactly the required instruments are on
  seglists -= veto_segments
  zero_lag_segments = seglists.intersection(opts.instruments) - seglists.union(set(seglists.keys()) - opts.instruments)

  live_time = float(abs(zero_lag_segments))
  print(FAR, live_time)
Example #29
0
def get_injections(injfnames, zero_lag_segments, ifos="H1,H2,L1", FAR=1.0, verbose = True):
  """
  The LV stat uses simulations above threshold, not some IFAR of the loudest event, so the default should be "inf"
  """
  def injection_was_made(geocent_end_time, geocent_end_time_ns, zero_lag_segments = zero_lag_segments):
    """
    return True if injection was made in the given segmentlist
    """
    return lsctables.LIGOTimeGPS(geocent_end_time, geocent_end_time_ns) in zero_lag_segments

  found = []
  missed = []
  print >>sys.stderr, ""
  for cnt, f in enumerate(injfnames):
    print >>sys.stderr, "getting injections: " + str(FAR) + ":\t%.1f%%\r" % (100.0 * cnt / len(injfnames),),
    working_filename = dbtables.get_connection_filename(f, tmp_path = None, verbose = verbose)
    connection = sqlite3.connect(working_filename)
    connection.create_function("injection_was_made", 2, injection_was_made)

    make_sim_inspiral = lsctables.table.get_table(dbtables.get_xml(connection), lsctables.SimInspiralTable.tableName)._row_from_cols

    # FIXME may not be done correctly if injections are done in timeslides
    # FIXME may not be done correctly if injections aren't logarithmic in d
    # do we really want d^3 waiting?
    # FIXME do we really want injections independent of their FAR

    for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.false_alarm_rate < ?
      AND coinc_inspiral.ifos == ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
    """, (FAR,ifos,)):
      sim = make_sim_inspiral(values)
      if values[-1]:
        found.append(sim)
      else:
        missed.append(sim)

    # done
    connection.close()
    dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
    dbtables.DBTable_set_connection(None)

  print >>sys.stderr, "\nFound = %d Missed = %d" % (len(found), len(missed))
  return found, missed
Example #30
0
	def get_injections(self, instruments, FAR=float("inf")):
		injfnames = self.inj_fnames
		zero_lag_segments = self.zero_lag_segments[instruments]
		verbose = self.opts.verbose
		found = []
		missed = []
		print >>sys.stderr, ""
		for cnt, f in enumerate(injfnames):
			print >>sys.stderr, "getting injections below FAR: " + str(FAR) + ":\t%.1f%%\r" % (100.0 * cnt / len(injfnames),),
			working_filename = dbtables.get_connection_filename(f, tmp_path = opts.tmp_space, verbose = verbose)
			connection = sqlite3.connect(working_filename)
			dbtables.DBTable_set_connection(connection)
			xmldoc = dbtables.get_xml(connection)
			# DON'T BOTHER CONTINUING IF THE INSTRUMENTS OF INTEREST ARE NOT HERE
			instruments_in_this_file = []
			for i in connection.cursor().execute('SELECT DISTINCT(instruments) FROM coinc_event'):
				if i[0]: instruments_in_this_file.append(frozenset(lsctables.instrument_set_from_ifos(i[0])))
			if instruments not in instruments_in_this_file:
				connection.close()
				dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
				dbtables.DBTable_set_connection(None)
				continue

			# WORK OUT CORRECT SEGMENTS FOR THIS FILE WHERE WE SHOULD SEE INJECTIONS
			segments = self.get_segments(connection, xmldoc)
			segments -= self.veto_segments
			#print thincasegments
			zero_lag_segments  = segments.intersection(instruments) - segments.union(set(segments.keys()) - instruments)
			###############

			# DEFINE THE INJECTION WAS MADE FUNCTION
			def injection_was_made(geocent_end_time, geocent_end_time_ns, zero_lag_segments = zero_lag_segments):
				"""
				return True if injection was made in the given segmentlist
				"""
				return lsctables.LIGOTimeGPS(geocent_end_time, geocent_end_time_ns) in zero_lag_segments

			connection.create_function("injection_was_made", 2, injection_was_made)
			make_sim_inspiral = lsctables.SimInspiralTable.get_table(dbtables.get_xml(connection)).row_from_cols

			# INSPIRAL
			if self.coinc_inspiral_table:
				for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN coinc_inspiral ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == coinc_inspiral.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND coinc_inspiral.combined_far < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
				""", (FAR,)):
					sim = make_sim_inspiral(values)
					if values[-1]:
						found.append(sim)
					else:
						missed.append(sim)

			# BURSTS
			if self.multi_burst_table:
				for values in connection.cursor().execute("""
SELECT
  sim_inspiral.*,
  -- true if injection matched a coinc below the false alarm rate threshold
  EXISTS (
    SELECT
      *
    FROM
      coinc_event_map AS mapa
      JOIN coinc_event_map AS mapb ON (
        mapa.coinc_event_id == mapb.coinc_event_id
      )
      JOIN multi_burst ON (
        mapb.table_name == "coinc_event"
        AND mapb.event_id == multi_burst.coinc_event_id
      )
    WHERE
      mapa.table_name == "sim_inspiral"
      AND mapa.event_id == sim_inspiral.simulation_id
      AND multi_burst.false_alarm_rate < ?
  )
FROM
  sim_inspiral
WHERE
  -- only interested in injections that were injected
  injection_was_made(sim_inspiral.geocent_end_time, sim_inspiral.geocent_end_time_ns)
				""", (FAR,)):
					sim = make_sim_inspiral(values)
					if values[-1]:
						found.append(sim)
					else:
						missed.append(sim)
			# done
			dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
			dbtables.DBTable_set_connection(None)

			print >>sys.stderr, "\nFound = %d Missed = %d" % (len(found), len(missed))
		return found, missed
Example #31
0
	# if the database contains a sim_inspiral table then it is assumed
	# to represent an injection run.  its rings must not added to the
	# livetime, and it cannot provide background coincs, so it is just
	# skipped altogether in this first pass.
	#

	if "sim_ringdown" in dbtables.get_table_names(connection):
		if options.verbose:
			print >>sys.stderr, "\tdatabase contains sim_ringdown table, skipping ..."

		#
		# close the database
		#

		connection.close()
		dbtables.discard_connection_filename(filename, working_filename, verbose = options.verbose)
		continue

	#
	# compute and record background livetime
	#

	background.add_livetime(connection, options.live_time_program, veto_segments_name = options.veto_segments_name, verbose = options.verbose)

	#
	# count background coincs
	#

	if options.verbose:
		print >>sys.stderr, "\tcollecting background statistics ..."
	for on_instruments, participating_instruments, frequency, snr, uncombined_ifar, likelihood in connection.cursor().execute("""
Example #32
0
	def __init__(self, filelist, live_time_program = None, veto_segments_name = None, data_segments_name = "datasegments", tmp_path = None, verbose = False):

		self.segments = segments.segmentlistdict()
		self.instruments = set()
		self.table_name = None
		self.found_injections_by_instrument_set = {}
		self.missed_injections_by_instrument_set = {}
		self.total_injections_by_instrument_set = {}
		self.zerolag_fars_by_instrument_set = {}
		self.ts_fars_by_instrument_set = {}
		self.numslides = set()

		for f in filelist:
			if verbose:
				print >> sys.stderr, "Gathering stats from: %s...." % (f,)
			working_filename = dbtables.get_connection_filename(f, tmp_path = tmp_path, verbose = verbose)
			connection = sqlite3.connect(working_filename)
			xmldoc = dbtables.get_xml(connection)

			sim = False

			# look for a sim inspiral table.  This is IMR work we have to have one of these :)
			try:
				sim_inspiral_table = table.get_table(xmldoc, dbtables.lsctables.SimInspiralTable.tableName)
				sim = True
			except ValueError:
				pass

			# look for the relevant table for analyses
			for table_name in allowed_analysis_table_names():
				try:
					setattr(self, table_name, table.get_table(xmldoc, table_name))
					if self.table_name is None or self.table_name == table_name:
						self.table_name = table_name
					else:
						raise ValueError("detected more than one table type out of " + " ".join(allowed_analysis_table_names()))
				except ValueError:
					setattr(self, table_name, None)

			# the non simulation databases are where we get information about segments
			if not sim:
				self.numslides.add(connection.cursor().execute('SELECT count(DISTINCT(time_slide_id)) FROM time_slide').fetchone()[0])
				[self.instruments.add(ifos) for ifos in get_instruments_from_coinc_event_table(connection)]
				# save a reference to the segments for this file, needed to figure out the missed and found injections
				self.this_segments = get_segments(connection, xmldoc, self.table_name, live_time_program, veto_segments_name, data_segments_name = data_segments_name)
				# FIXME we don't really have any reason to use playground segments, but I put this here as a reminder
				# self.this_playground_segments = segmentsUtils.S2playground(self.this_segments.extent_all())
				self.segments += self.this_segments

				# get the far thresholds for the loudest events in these databases
				for (instruments_set, far, ts) in get_event_fars(connection, self.table_name):
					if not ts:
						self.zerolag_fars_by_instrument_set.setdefault(instruments_set, []).append(far)
					else:
						self.ts_fars_by_instrument_set.setdefault(instruments_set, []).append(far)
			# get the injections
			else:
				# We need to know the segments in this file to determine which injections are found
				self.this_injection_segments = get_segments(connection, xmldoc, self.table_name, live_time_program, veto_segments_name, data_segments_name = data_segments_name)
				self.this_injection_instruments = []
				distinct_instruments = connection.cursor().execute('SELECT DISTINCT(instruments) FROM coinc_event WHERE instruments!=""').fetchall()
				for instruments, in distinct_instruments:
					instruments_set = frozenset(lsctables.instrument_set_from_ifos(instruments))
					self.this_injection_instruments.append(instruments_set)
					segments_to_consider_for_these_injections = self.this_injection_segments.intersection(instruments_set) - self.this_injection_segments.union(set(self.this_injection_segments.keys()) - instruments_set)
					found, total, missed = get_min_far_inspiral_injections(connection, segments = segments_to_consider_for_these_injections, table_name = self.table_name)
					if verbose:
						print >> sys.stderr, "%s total injections: %d; Found injections %d: Missed injections %d" % (instruments, len(total), len(found), len(missed))
					self.found_injections_by_instrument_set.setdefault(instruments_set, []).extend(found)
					self.total_injections_by_instrument_set.setdefault(instruments_set, []).extend(total)
					self.missed_injections_by_instrument_set.setdefault(instruments_set, []).extend(missed)

			# All done
			dbtables.discard_connection_filename(f, working_filename, verbose = verbose)
		if len(self.numslides) > 1:
			raise ValueError('number of slides differs between input files')
		elif self.numslides:
			self.numslides = min(self.numslides)
		else:
			self.numslides = 0
Example #33
0
	def __init__(self, opts, flist):
		self.segments = segments.segmentlistdict()
		self.non_inj_fnames = []
		self.inj_fnames = []
		self.found = {}
		self.missed = {}
		self.opts = opts
		self.veto_segments = segments.segmentlistdict()
		self.zero_lag_segments = {}
		self.instruments = []
		self.livetime = {}
		self.multi_burst_table = None
		self.coinc_inspiral_table = None

		for f in flist:
			if opts.verbose: print >> sys.stderr, "Gathering stats from: %s...." % (f,)
			working_filename = dbtables.get_connection_filename(f, tmp_path=opts.tmp_space, verbose = opts.verbose)
			connection = sqlite3.connect(working_filename)
			dbtables.DBTable_set_connection(connection)
			xmldoc = dbtables.get_xml(connection)

			# look for a sim table
			try:
				sim_inspiral_table = dbtables.lsctables.SimInspiralTable.get_table(xmldoc)
				self.inj_fnames.append(f)
				sim = True
			except ValueError:
				self.non_inj_fnames.append(f)
				sim = False

			# FIGURE OUT IF IT IS A BURST OR INSPIRAL RUN
			try:
				self.multi_burst_table = dbtables.lsctables.MultiBurstTable.get_table(xmldoc)
			except ValueError:
				self.multi_burst_table = None
			try:
				self.coinc_inspiral_table = dbtables.lsctables.CoincInspiralTable.get_table(xmldoc)
			except ValueError:
				self.coinc_inspiral_table = None
			if self.multi_burst_table and self.coinc_inspiral_table:
				print >>sys.stderr, "both burst and inspiral tables found.  Aborting"
				raise ValueError

			if not sim:
				self.get_instruments(connection)
				self.segments += self.get_segments(connection,xmldoc)
				#FIXME, don't assume veto segments are the same in every file!
				self.veto_segments = self.get_veto_segments(connection)

			dbtables.discard_connection_filename(f, working_filename, verbose = opts.verbose)
			dbtables.DBTable_set_connection(None)

		# remove redundant instruments
		self.instruments = list(set(self.instruments))
		# FIXME Do these have to be done by instruments?
		self.segments -= self.veto_segments

		# segments and livetime by instruments
		for i in self.instruments:
			self.zero_lag_segments[i] = self.segments.intersection(i) - self.segments.union(set(self.segments.keys()) - i)
			self.livetime[i] = float(abs(self.zero_lag_segments[i]))