Пример #1
0
def ligolw_bucut(xmldoc,
                 options,
                 burst_test_func,
                 veto_segments=segments.segmentlistdict(),
                 del_non_coincs=False,
                 del_skipped_injections=False,
                 program=None,
                 verbose=False):
    contents = DocContents(xmldoc, program)

    process = append_process(xmldoc, options)

    apply_filters(contents,
                  burst_test_func,
                  veto_segments,
                  del_non_coincs=del_non_coincs,
                  del_skipped_injections=del_skipped_injections,
                  verbose=verbose)

    ligolw_process.set_process_end_time(process)

    seg = contents.outsegs.extent_all()
    ligolw_search_summary.append_search_summary(xmldoc,
                                                process,
                                                inseg=seg,
                                                outseg=seg,
                                                nevents=len(
                                                    contents.snglbursttable))

    return xmldoc
def gen_likelihood_control(coinc_params_distributions,
                           seglists,
                           name=u"ligolw_burca_tailor",
                           comment=u""):
    xmldoc = ligolw.Document()
    node = xmldoc.appendChild(ligolw.LIGO_LW())

    process = ligolw_process.register_to_xmldoc(xmldoc,
                                                program=process_program_name,
                                                paramdict={},
                                                version=__version__,
                                                cvs_repository="lscsoft",
                                                cvs_entry_time=__date__,
                                                comment=comment)
    coinc_params_distributions.process_id = process.process_id
    ligolw_search_summary.append_search_summary(xmldoc,
                                                process,
                                                ifos=seglists.keys(),
                                                inseg=seglists.extent_all(),
                                                outseg=seglists.extent_all())

    node.appendChild(coinc_params_distributions.to_xml(name))

    ligolw_process.set_process_end_time(process)

    return xmldoc
def gen_likelihood_control(coinc_params_distributions, seglists, name = u"ligolw_burca_tailor", comment = u""):
	xmldoc = ligolw.Document()
	node = xmldoc.appendChild(ligolw.LIGO_LW())

	process = ligolw_process.register_to_xmldoc(xmldoc, program = process_program_name, paramdict = {}, version = __version__, cvs_repository = "lscsoft", cvs_entry_time = __date__, comment = comment)
	coinc_params_distributions.process_id = process.process_id
	ligolw_search_summary.append_search_summary(xmldoc, process, ifos = seglists.keys(), inseg = seglists.extent_all(), outseg = seglists.extent_all())

	node.appendChild(coinc_params_distributions.to_xml(name))

	ligolw_process.set_process_end_time(process)

	return xmldoc
Пример #4
0
def ligolw_bucut(xmldoc, options, burst_test_func, veto_segments = segments.segmentlistdict(), del_non_coincs = False, del_skipped_injections = False, program = None, verbose = False):
	contents = DocContents(xmldoc, program)

	process = append_process(xmldoc, options)

	apply_filters(contents, burst_test_func, veto_segments, del_non_coincs = del_non_coincs, del_skipped_injections = del_skipped_injections, verbose = verbose)

	ligolw_process.set_process_end_time(process)

	seg = contents.outsegs.extent_all()
	ligolw_search_summary.append_search_summary(xmldoc, process, inseg = seg, outseg = seg, nevents = len(contents.snglbursttable))

	return xmldoc
Пример #5
0
def ligolw_sicluster(doc, **kwargs):
    # Extract segments and tables
    inseg, outseg, snglinspiraltable = get_tables(doc)

    # Add process information
    try:
        process = append_process(doc, **kwargs)
    except ValueError:
        process = None

    # Delete all triggers below threshold
    if kwargs["snr_threshold"] > 0:
        thresh = float(kwargs["snr_threshold"])
        if kwargs["verbose"]:
            print >>sys.stderr, "discarding triggers with snr < %f ..." % \
              kwargs["snr_threshold"]
        for i in range(len(snglinspiraltable) - 1, -1, -1):
            if snglinspiraltable[i].snr <= thresh:
                del snglinspiraltable[i]

    # Cluster
    snglcluster.cluster_events(
        snglinspiraltable,
        testfunc=lambda a, b: SnglInspiralUtils.CompareSnglInspiral(
            a, b, twindow=kwargs["cluster_window"]),
        clusterfunc=SnglInspiralCluster,
        sortfunc=SnglInspiralUtils.CompareSnglInspiralByEndTime,
        bailoutfunc=lambda a, b: SnglInspiralUtils.CompareSnglInspiral(
            a, b, twindow=kwargs["cluster_window"]),
        verbose=kwargs["verbose"])

    # Sort by signal-to-noise ratio
    if kwargs["sort_ascending_snr"] or kwargs["sort_descending_snr"]:
        if kwargs["verbose"]:
            print >> sys.stderr, "sorting by snr ..."
        snglinspiraltable.sort(SnglInspiralUtils.CompareSnglInspiralBySnr)
        if kwargs["sort_descending_snr"]:
            snglinspiraltable.reverse()

    # Add search summary information
    if process and inseg and outseg:
        ligolw_search_summary.append_search_summary(
            doc,
            process,
            inseg=inseg,
            outseg=outseg,
            nevents=len(snglinspiraltable))
    if process:
        ligolw_process.set_process_end_time(process)

    return doc
Пример #6
0
def ligolw_sicluster(doc, **kwargs):
  # Extract segments and tables
  inseg, outseg, snglinspiraltable = get_tables(doc)

  # Add process information
  try:
    process = append_process(doc, **kwargs)
  except ValueError:
    process = None

  # Delete all triggers below threshold
  if kwargs["snr_threshold"] > 0:
    thresh = float(kwargs["snr_threshold"])
    if kwargs["verbose"]:
      print >>sys.stderr, "discarding triggers with snr < %f ..." % \
        kwargs["snr_threshold"]
    for i in range(len(snglinspiraltable) - 1, -1, -1):
      if snglinspiraltable[i].snr <= thresh:
        del snglinspiraltable[i]

  # Cluster
  snglcluster.cluster_events(
    snglinspiraltable,
    testfunc = lambda a, b: SnglInspiralUtils.CompareSnglInspiral(a, b, twindow = kwargs["cluster_window"]),
    clusterfunc = SnglInspiralCluster,
    sortfunc = SnglInspiralUtils.CompareSnglInspiralByEndTime,
    bailoutfunc = lambda a, b: SnglInspiralUtils.CompareSnglInspiral(a, b, twindow = kwargs["cluster_window"]),
    verbose = kwargs["verbose"]
  )

  # Sort by signal-to-noise ratio
  if kwargs["sort_ascending_snr"] or kwargs["sort_descending_snr"]:
    if kwargs["verbose"]:
      print >>sys.stderr, "sorting by snr ..."
    snglinspiraltable.sort(SnglInspiralUtils.CompareSnglInspiralBySnr)
    if kwargs["sort_descending_snr"]:
      snglinspiraltable.reverse()

  # Add search summary information
  if process and inseg and outseg:
    ligolw_search_summary.append_search_summary(doc, process, inseg = inseg, outseg = outseg, 
      nevents = len(snglinspiraltable))
  if process:
    ligolw_process.set_process_end_time(process)

  return doc
Пример #7
0
def create_xml(ts_data,psd_segment_length,window_fraction,event_list,station,setname="MagneticFields"):
    __program__ = 'pyburst_excesspower'
    start_time = LIGOTimeGPS(int(ts_data.start_time))
    end_time = LIGOTimeGPS(int(ts_data.end_time))
    inseg = segment(start_time,end_time)
    xmldoc = ligolw.Document()
    xmldoc.appendChild(ligolw.LIGO_LW())
    ifo = 'H1'#channel_name.split(":")[0]
    straindict = psd.insert_psd_option_group.__dict__
    proc_row = register_to_xmldoc(xmldoc, __program__,straindict, ifos=[ifo],version=git_version.id, cvs_repository=git_version.branch, cvs_entry_time=git_version.date)
    outseg = determine_output_segment(inseg, psd_segment_length, ts_data.sample_rate, window_fraction)
    ss = append_search_summary(xmldoc, proc_row, ifos=(station,), inseg=inseg, outseg=outseg)
    for sb in event_list:
        sb.process_id = proc_row.process_id
        sb.search = proc_row.program
        sb.ifo, sb.channel = station, setname
    xmldoc.childNodes[0].appendChild(event_list)
    fname = make_filename(station, inseg)
    utils.write_filename(xmldoc, fname, gz=fname.endswith("gz"))
# Write process metadata to output file.
process = command.register_to_xmldoc(out_xmldoc,
                                     parser,
                                     opts,
                                     ifos=opts.detector,
                                     comment="Simulated coincidences")

# Add search summary to output file.
all_time = segments.segment(
    [glue.lal.LIGOTimeGPS(0),
     glue.lal.LIGOTimeGPS(2e9)])
search_summary_table = lsctables.New(lsctables.SearchSummaryTable)
out_xmldoc.childNodes[0].appendChild(search_summary_table)
summary = ligolw_search_summary.append_search_summary(out_xmldoc,
                                                      process,
                                                      inseg=all_time,
                                                      outseg=all_time)

# Read PSDs.
progress.update(-1, 'reading ' + opts.reference_psd.name)
xmldoc, _ = ligolw_utils.load_fileobj(
    opts.reference_psd, contenthandler=lal.series.PSDContentHandler)
psds = lal.series.read_psd_xmldoc(xmldoc, root_name=None)
psds = {
    key: timing.InterpolatedPSD(filter.abscissa(psd), psd.data.data)
    for key, psd in psds.items() if psd is not None
}

# Read injection file.
progress.update(-1, 'reading ' + opts.input.name)
xmldoc, _ = ligolw_utils.load_fileobj(
Пример #9
0
def ligolw_bucluster(
	xmldoc,
	program,
	process,
	prefunc,
	postfunc,
	testfunc,
	clusterfunc,
	sortfunc = None,
	bailoutfunc = None,
	verbose = False
):
	"""
	Run the clustering algorithm on the list of burst candidates.  The
	return value is the tuple (xmldoc, changed), where xmldoc is the
	input document, and changed is a boolean that is True if the
	contents of the sngl_burst table were altered, and False if the
	triggers were not modified by the clustering process.

	If the document does not contain a sngl_burst table, then the
	document is not modified (including no modifications to the process
	metadata tables).
	"""

	#
	# Extract live time segment and sngl_burst table
	#

	try:
		sngl_burst_table = lsctables.SnglBurstTable.get_table(xmldoc)
	except ValueError:
		# no-op:  document does not contain a sngl_burst table
		if verbose:
			print >>sys.stderr, "document does not contain a sngl_burst table, skipping ..."
		return xmldoc, False
	seglists = ligolw_search_summary.segmentlistdict_fromsearchsummary(xmldoc, program = program).coalesce()

	#
	# Remove all H2 triggers intersecting the frequency band
	# 1138.6 Hz -- 1216.0 Hz
	#
	# FIXME:  put this into the excess power pipeline, correctly
	#

	#bad_band = segments.segment(1138.586956521739, 1216.0326086956522)
	#for i in xrange(len(sngl_burst_table) - 1, -1, -1):
	#	a = sngl_burst_table[i]
	#	if a.ifo == "H2" and a.band.intersects(bad_band):
	#		del sngl_burst_table[i]

	#
	# Preprocess candidates
	#

	if verbose:
		print >>sys.stderr, "pre-processing ..."
	preprocess_output = prefunc(sngl_burst_table)

	#
	# Cluster
	#

	table_changed = snglcluster.cluster_events(sngl_burst_table, testfunc, clusterfunc, sortfunc = sortfunc, bailoutfunc = bailoutfunc, verbose = verbose)

	#
	# Postprocess candidates
	#

	if verbose:
		print >>sys.stderr, "post-processing ..."
	postfunc(sngl_burst_table, preprocess_output)

	#
	# Update instrument list in process table and add search summary
	# information
	#

	process.instruments = seglists.keys()
	ligolw_search_summary.append_search_summary(xmldoc, process, inseg = seglists.extent_all(), outseg = seglists.extent_all(), nevents = len(sngl_burst_table))

	#
	# Done
	#

	return xmldoc, table_changed
				distributions.numerator.increment(params, weight = weight_func(sim))

	#
	# Clean up.
	#

	contents.xmldoc.unlink()
	connection.close()
	dbtables.discard_connection_filename(filename, working_filename, verbose = options.verbose)


#
# Output.
#


ligolw_search_summary.append_search_summary(xmldoc, process, ifos = segs.keys(), inseg = segs.extent_all(), outseg = segs.extent_all())
xmldoc.childNodes[-1].appendChild(distributions.to_xml(u"string_cusp_likelihood"))
ligolw_process.set_process_end_time(process)


def T010150_basename(instruments, description, seg):
	start = int(math.floor(seg[0]))
	duration = int(math.ceil(seg[1] - start))
	return "%s-%s-%d-%d" % ("+".join(sorted(instruments)), description, start, duration)
if options.T010150:
	filename = "%s.xml.gz" % T010150_basename(segs.keys(), options.T010150, segs.extent_all())
else:
	filename = options.output
ligolw_utils.write_filename(xmldoc, filename, verbose = verbose, gz = (filename or "stdout").endswith(".gz"))
Пример #11
0
def fake_trigger_generator(instrument='H1'):
    """
    Generate fake trigger maps.

    Parameters
    ----------
    instrument : str
      Instrument name
    """
    xmldoc = ligolw.Document()
    xmldoc.appendChild(ligolw.LIGO_LW())
    # Process information
    proc = process.append_process(xmldoc, "fake_search")
    process.append_process_params(xmldoc, proc, {})
    t0 = 1e9
    ntrig = 1000
    ifo = instrument
    inseg = segment(LIGOTimeGPS(t0), LIGOTimeGPS(t0 + ntrig / 10))
    outseg = segment(LIGOTimeGPS(t0), LIGOTimeGPS(t0 + ntrig / 10))
    # Search summary
    search_summary.append_search_summary(xmldoc,
                                         proc,
                                         comment="Fake triggers",
                                         ifos=(ifo, ),
                                         inseg=inseg,
                                         outseg=outseg)
    columns = [
        'chisq_dof', 'bandwidth', 'central_freq', 'confidence', 'peak_time_ns',
        'start_time', 'process_id', 'fhigh', 'stop_time_ns', 'channel', 'ifo',
        'duration', 'event_id', 'hrss', 'stop_time', 'peak_time', 'snr',
        'search', 'start_time_ns', 'flow', 'amplitude'
    ]
    table = lsctables.New(lsctables.SnglBurstTable, columns)
    # Generate uniformly distributed trigger times with approximate rate of 10 s
    times = t0 + uniform.rvs(0, ntrig / 10., ntrig)
    for t in times:
        row = table.RowType()
        # time frequency position and extent
        row.chisq_dof = int(2 + expon.rvs(2))
        row.duration = 1. / 2**int(uniform.rvs(0, 7))
        row.bandwidth = row.chisq_dof / row.duration / 2

        row.central_freq = uniform.rvs(16, 2048)
        row.flow = max(row.central_freq - row.bandwidth, 0)
        row.fhigh = min(row.central_freq + row.bandwidth, 2048)

        ns, sec = math.modf(t)
        ns = int("%09d" % (ns * 1e9))
        row.peak_time, row.peak_time_ns = int(sec), ns

        ns, sec = math.modf(t - row.duration / 2)
        ns = int("%09d" % (ns * 1e9))
        row.start_time, row.start_time_ns = int(sec), ns

        ns, sec = math.modf(t + row.duration / 2)
        ns = int("%09d" % (ns * 1e9))
        row.stop_time, row.stop_time_ns = int(sec), ns

        # TODO: Correlate some triggers, an upward fluctuation often triggers a few
        # tiles ontop of each other

        # SNR and confidence
        row.snr = 5.
        while row.snr < 2 * row.chisq_dof:
            row.snr = chi2.rvs(row.chisq_dof)
        row.confidence = chi2.sf(row.snr, row.chisq_dof)
        row.snr = math.sqrt(row.snr / row.chisq_dof - 1)
        row.hrss = row.amplitude = 1e-21

        # metadata
        row.search = "fake_search"
        row.channel = "FAKE"
        row.ifo = ifo

        row.event_id = table.get_next_id()
        row.process_id = proc.process_id

        table.append(row)

    xmldoc.childNodes[0].appendChild(table)

    utils.write_filename(xmldoc,
                         "%s-FAKE_SEARCH-%d-%d.xml.gz" % (ifo, int(t0), 10000),
                         gz=True)
Пример #12
0
data_length = sample_rate * data_duration # data length in samples


# Open output file.
out_xmldoc = ligolw.Document()
out_xmldoc.appendChild(ligolw.LIGO_LW())

# Write process metadata to output file.
process = command.register_to_xmldoc(
    out_xmldoc, parser, opts, ifos=opts.detector, comment="Little hope!")

# Add search summary to output file.
all_time = segments.segment([glue.lal.LIGOTimeGPS(0), glue.lal.LIGOTimeGPS(2e9)])
search_summary_table = lsctables.New(lsctables.SearchSummaryTable)
out_xmldoc.childNodes[0].appendChild(search_summary_table)
summary = ligolw_search_summary.append_search_summary(out_xmldoc, process,
    inseg=all_time, outseg=all_time)

# Read template bank file.
progress.update(-1, 'reading ' + opts.template_bank.name)
xmldoc, _ = ligolw_utils.load_fileobj(
    opts.template_bank, contenthandler=ligolw_bayestar.LSCTablesContentHandler)

# Determine the low frequency cutoff from the template bank file.
template_bank_f_low = ligolw_bayestar.get_template_bank_f_low(xmldoc)

template_bank = ligolw_table.get_table(xmldoc,
    lsctables.SnglInspiralTable.tableName)

# Read injection file.
progress.update(-1, 'reading ' + opts.input.name)
xmldoc, _ = ligolw_utils.load_fileobj(
def main(args=None):
    p = parser()
    opts = p.parse_args(args)

    # LIGO-LW XML imports.
    from glue.ligolw import ligolw
    from glue.ligolw.param import Param
    from glue.ligolw.utils import process as ligolw_process
    from glue.ligolw.utils.search_summary import append_search_summary
    from glue.ligolw import utils as ligolw_utils
    from glue.ligolw.lsctables import (New, CoincDefTable, CoincID,
                                       CoincInspiralTable, CoincMapTable,
                                       CoincTable, ProcessParamsTable,
                                       ProcessTable, SimInspiralTable,
                                       SnglInspiralTable, TimeSlideTable)

    # glue, LAL and pylal imports.
    from ligo import segments
    import glue.lal
    import lal.series
    import lalsimulation
    from lalinspiral.inspinjfind import InspiralSCExactCoincDef
    from lalinspiral.thinca import InspiralCoincDef
    from tqdm import tqdm

    # FIXME: disable progress bar monitor thread.
    #
    # I was getting error messages that look like this:
    #
    # Traceback (most recent call last):
    #   File "/tqdm/_tqdm.py", line 885, in __del__
    #     self.close()
    #   File "/tqdm/_tqdm.py", line 1090, in close
    #     self._decr_instances(self)
    #   File "/tqdm/_tqdm.py", line 454, in _decr_instances
    #     cls.monitor.exit()
    #   File "/tqdm/_monitor.py", line 52, in exit
    #     self.join()
    #   File "/usr/lib64/python3.6/threading.py", line 1053, in join
    #     raise RuntimeError("cannot join current thread")
    # RuntimeError: cannot join current thread
    #
    # I don't know what causes this... maybe a race condition in tqdm's cleanup
    # code. Anyway, this should disable the tqdm monitor thread entirely.
    tqdm.monitor_interval = 0

    # BAYESTAR imports.
    from ..io.events.ligolw import ContentHandler
    from ..bayestar import filter

    # Read PSDs.
    xmldoc, _ = ligolw_utils.load_fileobj(
        opts.reference_psd, contenthandler=lal.series.PSDContentHandler)
    psds = lal.series.read_psd_xmldoc(xmldoc, root_name=None)
    psds = {
        key: filter.InterpolatedPSD(filter.abscissa(psd), psd.data.data)
        for key, psd in psds.items() if psd is not None
    }
    psds = [psds[ifo] for ifo in opts.detector]

    # Extract simulation table from injection file.
    inj_xmldoc, _ = ligolw_utils.load_fileobj(opts.input,
                                              contenthandler=ContentHandler)
    orig_sim_inspiral_table = SimInspiralTable.get_table(inj_xmldoc)

    # Prune injections that are outside distance limits.
    orig_sim_inspiral_table[:] = [
        row for row in orig_sim_inspiral_table
        if opts.min_distance <= row.distance <= opts.max_distance
    ]

    # Open output file.
    xmldoc = ligolw.Document()
    xmlroot = xmldoc.appendChild(ligolw.LIGO_LW())

    # Create tables. Process and ProcessParams tables are copied from the
    # injection file.
    coinc_def_table = xmlroot.appendChild(New(CoincDefTable))
    coinc_inspiral_table = xmlroot.appendChild(New(CoincInspiralTable))
    coinc_map_table = xmlroot.appendChild(New(CoincMapTable))
    coinc_table = xmlroot.appendChild(New(CoincTable))
    xmlroot.appendChild(ProcessParamsTable.get_table(inj_xmldoc))
    xmlroot.appendChild(ProcessTable.get_table(inj_xmldoc))
    sim_inspiral_table = xmlroot.appendChild(New(SimInspiralTable))
    sngl_inspiral_table = xmlroot.appendChild(New(SnglInspiralTable))
    time_slide_table = xmlroot.appendChild(New(TimeSlideTable))

    # Write process metadata to output file.
    process = register_to_xmldoc(xmldoc,
                                 p,
                                 opts,
                                 ifos=opts.detector,
                                 comment="Simulated coincidences")

    # Add search summary to output file.
    all_time = segments.segment(
        [glue.lal.LIGOTimeGPS(0),
         glue.lal.LIGOTimeGPS(2e9)])
    append_search_summary(xmldoc, process, inseg=all_time, outseg=all_time)

    # Create a time slide entry.  Needed for coinc_event rows.
    time_slide_id = time_slide_table.get_time_slide_id(
        {ifo: 0
         for ifo in opts.detector}, create_new=process)

    # Populate CoincDef table.
    inspiral_coinc_def = copy.copy(InspiralCoincDef)
    inspiral_coinc_def.coinc_def_id = coinc_def_table.get_next_id()
    coinc_def_table.append(inspiral_coinc_def)
    found_coinc_def = copy.copy(InspiralSCExactCoincDef)
    found_coinc_def.coinc_def_id = coinc_def_table.get_next_id()
    coinc_def_table.append(found_coinc_def)

    # Precompute values that are common to all simulations.
    detectors = [
        lalsimulation.DetectorPrefixToLALDetector(ifo) for ifo in opts.detector
    ]
    responses = [det.response for det in detectors]
    locations = [det.location for det in detectors]

    if opts.jobs == 1:
        pool_map = map
    else:
        from .. import omp
        from multiprocessing import Pool
        omp.num_threads = 1  # disable OpenMP parallelism
        pool_map = Pool(opts.jobs).imap

    func = functools.partial(simulate,
                             psds=psds,
                             responses=responses,
                             locations=locations,
                             measurement_error=opts.measurement_error,
                             f_low=opts.f_low,
                             waveform=opts.waveform)

    # Make sure that each thread gets a different random number state.
    # We start by drawing a random integer s in the main thread, and
    # then the i'th subprocess will seed itself with the integer i + s.
    #
    # The seed must be an unsigned 32-bit integer, so if there are n
    # threads, then s must be drawn from the interval [0, 2**32 - n).
    #
    # Note that *we* are thread 0, so there are a total of
    # n=1+len(sim_inspiral_table) threads.
    seed = np.random.randint(0, 2**32 - len(sim_inspiral_table) - 1)
    np.random.seed(seed)

    count_coincs = 0

    with tqdm(total=len(orig_sim_inspiral_table)) as progress:
        for sim_inspiral, simulation in zip(
                orig_sim_inspiral_table,
                pool_map(
                    func,
                    zip(
                        np.arange(len(orig_sim_inspiral_table)) + seed + 1,
                        orig_sim_inspiral_table))):
            progress.update()

            sngl_inspirals = []
            used_snr_series = []
            net_snr = 0.0
            count_triggers = 0

            # Loop over individual detectors and create SnglInspiral entries.
            for ifo, (horizon, abs_snr, arg_snr, toa, series) \
                    in zip(opts.detector, simulation):

                if np.random.uniform() > opts.duty_cycle:
                    continue
                elif abs_snr >= opts.snr_threshold:
                    # If SNR < threshold, then the injection is not found.
                    # Skip it.
                    count_triggers += 1
                    net_snr += np.square(abs_snr)
                elif not opts.keep_subthreshold:
                    continue

                # Create SnglInspiral entry.
                used_snr_series.append(series)
                sngl_inspirals.append(
                    sngl_inspiral_table.RowType(**dict(
                        dict.fromkeys(sngl_inspiral_table.validcolumns, None),
                        process_id=process.process_id,
                        ifo=ifo,
                        mass1=sim_inspiral.mass1,
                        mass2=sim_inspiral.mass2,
                        spin1x=sim_inspiral.spin1x,
                        spin1y=sim_inspiral.spin1y,
                        spin1z=sim_inspiral.spin1z,
                        spin2x=sim_inspiral.spin2x,
                        spin2y=sim_inspiral.spin2y,
                        spin2z=sim_inspiral.spin2z,
                        end=toa,
                        snr=abs_snr,
                        coa_phase=arg_snr,
                        eff_distance=horizon / abs_snr)))

            net_snr = np.sqrt(net_snr)

            # If too few triggers were found, then skip this event.
            if count_triggers < opts.min_triggers:
                continue

            # If network SNR < threshold, then the injection is not found.
            # Skip it.
            if net_snr < opts.net_snr_threshold:
                continue

            # Add Coinc table entry.
            coinc = coinc_table.appendRow(
                coinc_event_id=coinc_table.get_next_id(),
                process_id=process.process_id,
                coinc_def_id=inspiral_coinc_def.coinc_def_id,
                time_slide_id=time_slide_id,
                insts=opts.detector,
                nevents=len(opts.detector),
                likelihood=None)

            # Add CoincInspiral table entry.
            coinc_inspiral_table.appendRow(
                coinc_event_id=coinc.coinc_event_id,
                instruments=[
                    sngl_inspiral.ifo for sngl_inspiral in sngl_inspirals
                ],
                end=lal.LIGOTimeGPS(1e-9 * np.mean([
                    sngl_inspiral.end.ns() for sngl_inspiral in sngl_inspirals
                    if sngl_inspiral.end is not None
                ])),
                mass=sim_inspiral.mass1 + sim_inspiral.mass2,
                mchirp=sim_inspiral.mchirp,
                combined_far=0.0,  # Not provided
                false_alarm_rate=0.0,  # Not provided
                minimum_duration=None,  # Not provided
                snr=net_snr)

            # Record all sngl_inspiral records and associate them with coincs.
            for sngl_inspiral, series in zip(sngl_inspirals, used_snr_series):
                # Give this sngl_inspiral record an id and add it to the table.
                sngl_inspiral.event_id = sngl_inspiral_table.get_next_id()
                sngl_inspiral_table.append(sngl_inspiral)

                if opts.enable_snr_series:
                    elem = lal.series.build_COMPLEX8TimeSeries(series)
                    elem.appendChild(
                        Param.from_pyvalue(u'event_id',
                                           sngl_inspiral.event_id))
                    xmlroot.appendChild(elem)

                # Add CoincMap entry.
                coinc_map_table.appendRow(
                    coinc_event_id=coinc.coinc_event_id,
                    table_name=sngl_inspiral_table.tableName,
                    event_id=sngl_inspiral.event_id)

            # Record injection
            if not opts.preserve_ids:
                sim_inspiral.simulation_id = sim_inspiral_table.get_next_id()
            sim_inspiral_table.append(sim_inspiral)

            count_coincs += 1
            progress.set_postfix(saved=count_coincs)

    # Record coincidence associating injections with events.
    for i, sim_inspiral in enumerate(sim_inspiral_table):
        coinc = coinc_table.appendRow(
            coinc_event_id=coinc_table.get_next_id(),
            process_id=process.process_id,
            coinc_def_id=found_coinc_def.coinc_def_id,
            time_slide_id=time_slide_id,
            instruments=None,
            nevents=None,
            likelihood=None)
        coinc_map_table.appendRow(coinc_event_id=coinc.coinc_event_id,
                                  table_name=sim_inspiral_table.tableName,
                                  event_id=sim_inspiral.simulation_id)
        coinc_map_table.appendRow(coinc_event_id=coinc.coinc_event_id,
                                  table_name=coinc_table.tableName,
                                  event_id=CoincID(i))

    # Record process end time.
    ligolw_process.set_process_end_time(process)

    # Write output file.
    write_fileobj(xmldoc, opts.output)
Пример #14
0
def bucluster(
	xmldoc,
	program,
	process,
	prefunc,
	postfunc,
	testfunc,
	clusterfunc,
	sortfunc = None,
	bailoutfunc = None,
	verbose = False
):
	"""
	Run the clustering algorithm on the list of burst candidates.  The
	return value is the tuple (xmldoc, changed), where xmldoc is the
	input document, and changed is a boolean that is True if the
	contents of the sngl_burst table were altered, and False if the
	triggers were not modified by the clustering process.

	If the document does not contain a sngl_burst table, then the
	document is not modified (including no modifications to the process
	metadata tables).
	"""

	#
	# Extract live time segment and sngl_burst table
	#

	try:
		sngl_burst_table = lsctables.SnglBurstTable.get_table(xmldoc)
	except ValueError:
		# no-op:  document does not contain a sngl_burst table
		if verbose:
			print >>sys.stderr, "document does not contain a sngl_burst table, skipping ..."
		return xmldoc, False
	seglists = ligolw_search_summary.segmentlistdict_fromsearchsummary(xmldoc, program = program).coalesce()

	#
	# Preprocess candidates
	#

	if verbose:
		print >>sys.stderr, "pre-processing ..."
	preprocess_output = prefunc(sngl_burst_table)

	#
	# Cluster
	#

	table_changed = snglcluster.cluster_events(sngl_burst_table, testfunc, clusterfunc, sortfunc = sortfunc, bailoutfunc = bailoutfunc, verbose = verbose)

	#
	# Postprocess candidates
	#

	if verbose:
		print >>sys.stderr, "post-processing ..."
	postfunc(sngl_burst_table, preprocess_output)

	#
	# Update instrument list in process table and add search summary
	# information
	#

	process.instruments = seglists.keys()
	ligolw_search_summary.append_search_summary(xmldoc, process, inseg = seglists.extent_all(), outseg = seglists.extent_all(), nevents = len(sngl_burst_table))

	#
	# Done
	#

	return xmldoc, table_changed
Пример #15
0
def bucluster(xmldoc,
              program,
              process,
              prefunc,
              postfunc,
              testfunc,
              clusterfunc,
              sortfunc=None,
              bailoutfunc=None,
              verbose=False):
    """
	Run the clustering algorithm on the list of burst candidates.  The
	return value is the tuple (xmldoc, changed), where xmldoc is the
	input document, and changed is a boolean that is True if the
	contents of the sngl_burst table were altered, and False if the
	triggers were not modified by the clustering process.

	If the document does not contain a sngl_burst table, then the
	document is not modified (including no modifications to the process
	metadata tables).
	"""

    #
    # Extract live time segment and sngl_burst table
    #

    try:
        sngl_burst_table = lsctables.SnglBurstTable.get_table(xmldoc)
    except ValueError:
        # no-op:  document does not contain a sngl_burst table
        if verbose:
            print >> sys.stderr, "document does not contain a sngl_burst table, skipping ..."
        return xmldoc, False
    seglists = ligolw_search_summary.segmentlistdict_fromsearchsummary(
        xmldoc, program=program).coalesce()

    #
    # Preprocess candidates
    #

    if verbose:
        print >> sys.stderr, "pre-processing ..."
    preprocess_output = prefunc(sngl_burst_table)

    #
    # Cluster
    #

    table_changed = snglcluster.cluster_events(sngl_burst_table,
                                               testfunc,
                                               clusterfunc,
                                               sortfunc=sortfunc,
                                               bailoutfunc=bailoutfunc,
                                               verbose=verbose)

    #
    # Postprocess candidates
    #

    if verbose:
        print >> sys.stderr, "post-processing ..."
    postfunc(sngl_burst_table, preprocess_output)

    #
    # Update instrument list in process table and add search summary
    # information
    #

    process.instruments = seglists.keys()
    ligolw_search_summary.append_search_summary(xmldoc,
                                                process,
                                                inseg=seglists.extent_all(),
                                                outseg=seglists.extent_all(),
                                                nevents=len(sngl_burst_table))

    #
    # Done
    #

    return xmldoc, table_changed
Пример #16
0
def excess_power2(
    ts_data,  # Time series from magnetic field data
    psd_segment_length,  # Length of each segment in seconds
    psd_segment_stride,  # Separation between 2 consecutive segments in seconds
    psd_estimation,  # Average method
    window_fraction,  # Withening window fraction
    tile_fap,  # Tile false alarm probability threshold in Gaussian noise.
    station,  # Station
    nchans=None,  # Total number of channels
    band=None,  # Channel bandwidth
    fmin=0,  # Lowest frequency of the filter bank.
    fmax=None,  # Highest frequency of the filter bank.
    max_duration=None,  # Maximum duration of the tile
    wtype='tukey'):  # Whitening type, can tukey or hann
    """
    Perform excess-power search analysis on magnetic field data.
    This method will produce a bunch of time-frequency plots for every
    tile duration and bandwidth analysed as well as a XML file identifying
    all the triggers found in the selected data within the user-defined
    time range.

    Parameters
    ----------
    ts_data : TimeSeries
      Time Series from magnetic field data
    psd_segment_length : float
      Length of each segment in seconds
    psd_segment_stride : float
      Separation between 2 consecutive segments in seconds
    psd_estimation : string
      Average method
    window_fraction : float
      Withening window fraction
    tile_fap : float
      Tile false alarm probability threshold in Gaussian noise.
    nchans : int
      Total number of channels
    band : float
      Channel bandwidth
    fmin : float
      Lowest frequency of the filter bank.
    fmax : float
      Highest frequency of the filter bank
    """
    # Determine sampling rate based on extracted time series
    sample_rate = ts_data.sample_rate
    # Check if tile maximum frequency is not defined
    if fmax is None or fmax > sample_rate / 2.:
        # Set the tile maximum frequency equal to the Nyquist frequency
        # (i.e. half the sampling rate)
        fmax = sample_rate / 2.0
    # Check whether or not tile bandwidth and channel are defined
    if band is None and nchans is None:
        # Exit program with error message
        exit("Either bandwidth or number of channels must be specified...")
    else:
        # Check if tile maximum frequency larger than its minimum frequency
        assert fmax >= fmin
        # Define spectral band of data
        data_band = fmax - fmin
        # Check whether tile bandwidth or channel is defined
        if band is not None:
            # Define number of possible filter bands
            nchans = int(data_band / band) - 1
        elif nchans is not None:
            # Define filter bandwidth
            band = data_band / nchans
            nchans = nchans - 1
        # Check if number of channels is superior than unity
        assert nchans > 1
    # Print segment information
    print '|- Estimating PSD from segments of time',
    print '%.2f s in length, with %.2f s stride...' % (psd_segment_length,
                                                       psd_segment_stride)
    # Convert time series as array of float
    data = ts_data.astype(numpy.float64)
    # Define segment length for PSD estimation in sample unit
    seg_len = int(psd_segment_length * sample_rate)
    # Define separation between consecutive segments in sample unit
    seg_stride = int(psd_segment_stride * sample_rate)
    # Calculate the overall PSD from individual PSD segments
    fd_psd = psd.welch(data,
                       avg_method=psd_estimation,
                       seg_len=seg_len,
                       seg_stride=seg_stride)
    # We need this for the SWIG functions...
    lal_psd = fd_psd.lal()
    # Plot the power spectral density
    plot_spectrum(fd_psd)
    # Create whitening window
    print "|- Whitening window and spectral correlation..."
    if wtype == 'hann':
        window = lal.CreateHannREAL8Window(seg_len)
    elif wtype == 'tukey':
        window = lal.CreateTukeyREAL8Window(seg_len, window_fraction)
    else:
        raise ValueError("Can't handle window type %s" % wtype)
    # Create FFT plan
    fft_plan = lal.CreateForwardREAL8FFTPlan(len(window.data.data), 1)
    # Perform two point spectral correlation
    spec_corr = lal.REAL8WindowTwoPointSpectralCorrelation(window, fft_plan)
    # Initialise filter bank
    print "|- Create filter..."
    filter_bank, fdb = [], []
    # Loop for each channels
    for i in range(nchans):
        channel_flow = fmin + band / 2 + i * band
        channel_width = band
        # Create excess power filter
        lal_filter = lalburst.CreateExcessPowerFilter(channel_flow,
                                                      channel_width, lal_psd,
                                                      spec_corr)
        filter_bank.append(lal_filter)
        fdb.append(Spectrum.from_lal(lal_filter))
    # Calculate the minimum bandwidth
    min_band = (len(filter_bank[0].data.data) - 1) * filter_bank[0].deltaF / 2
    # Plot filter bank
    plot_bank(fdb)
    # Convert filter bank from frequency to time domain
    print "|- Convert all the frequency domain to the time domain..."
    tdb = []
    # Loop for each filter's spectrum
    for fdt in fdb:
        zero_padded = numpy.zeros(int((fdt.f0 / fdt.df).value) + len(fdt))
        st = int((fdt.f0 / fdt.df).value)
        zero_padded[st:st + len(fdt)] = numpy.real_if_close(fdt.value)
        n_freq = int(sample_rate / 2 / fdt.df.value) * 2
        tdt = numpy.fft.irfft(zero_padded, n_freq) * math.sqrt(sample_rate)
        tdt = numpy.roll(tdt, len(tdt) / 2)
        tdt = TimeSeries(tdt,
                         name="",
                         epoch=fdt.epoch,
                         sample_rate=sample_rate)
        tdb.append(tdt)
    # Plot time series filter
    plot_filters(tdb, fmin, band)
    # Compute the renormalization for the base filters up to a given bandwidth.
    mu_sq_dict = {}
    # Loop through powers of 2 up to number of channels
    for nc_sum in range(0, int(math.log(nchans, 2))):
        nc_sum = 2**nc_sum - 1
        print "|- Calculating renormalization for resolution level containing %d %fHz channels" % (
            nc_sum + 1, min_band)
        mu_sq = (nc_sum + 1) * numpy.array([
            lalburst.ExcessPowerFilterInnerProduct(f, f, spec_corr, None)
            for f in filter_bank
        ])
        # Uncomment to get all possible frequency renormalizations
        #for n in xrange(nc_sum, nchans): # channel position index
        for n in xrange(nc_sum, nchans, nc_sum + 1):  # channel position index
            for k in xrange(0, nc_sum):  # channel sum index
                # FIXME: We've precomputed this, so use it instead
                mu_sq[n] += 2 * lalburst.ExcessPowerFilterInnerProduct(
                    filter_bank[n - k], filter_bank[n - 1 - k], spec_corr,
                    None)
        #print mu_sq[nc_sum::nc_sum+1]
        mu_sq_dict[nc_sum] = mu_sq
    # Create an event list where all the triggers will be stored
    event_list = lsctables.New(lsctables.SnglBurstTable, [
        'start_time', 'start_time_ns', 'peak_time', 'peak_time_ns', 'duration',
        'bandwidth', 'central_freq', 'chisq_dof', 'confidence', 'snr',
        'amplitude', 'channel', 'ifo', 'process_id', 'event_id', 'search',
        'stop_time', 'stop_time_ns'
    ])
    # Create repositories to save TF and time series plots
    os.system('mkdir -p segments/time-frequency')
    os.system('mkdir -p segments/time-series')
    # Define time edges
    t_idx_min, t_idx_max = 0, seg_len
    while t_idx_max <= len(ts_data):
        # Define starting and ending time of the segment in seconds
        start_time = ts_data.start_time + t_idx_min / float(
            ts_data.sample_rate)
        end_time = ts_data.start_time + t_idx_max / float(ts_data.sample_rate)
        print "\n|-- Analyzing block %i to %i (%.2f percent)" % (
            start_time, end_time, 100 * float(t_idx_max) / len(ts_data))
        # Model a withen time series for the block
        tmp_ts_data = types.TimeSeries(ts_data[t_idx_min:t_idx_max] *
                                       window.data.data,
                                       delta_t=1. / ts_data.sample_rate,
                                       epoch=start_time)
        # Save time series in relevant repository
        segfolder = 'segments/%i-%i' % (start_time, end_time)
        os.system('mkdir -p ' + segfolder)
        plot_ts(tmp_ts_data,
                fname='segments/time-series/%i-%i.png' %
                (start_time, end_time))
        # Convert times series to frequency series
        fs_data = tmp_ts_data.to_frequencyseries()
        print "|-- Frequency series data has variance: %s" % fs_data.data.std(
        )**2
        # Whitening (FIXME: Whiten the filters, not the data)
        fs_data.data /= numpy.sqrt(fd_psd) / numpy.sqrt(2 * fd_psd.delta_f)
        print "|-- Whitened frequency series data has variance: %s" % fs_data.data.std(
        )**2
        print "|-- Create time-frequency plane for current block"
        # Return the complex snr, along with its associated normalization of the template,
        # matched filtered against the data
        #filter.matched_filter_core(types.FrequencySeries(tmp_filter_bank,delta_f=fd_psd.delta_f),
        #                           fs_data,h_norm=1,psd=fd_psd,low_frequency_cutoff=filter_bank[0].f0,
        #                           high_frequency_cutoff=filter_bank[0].f0+2*band)
        print "|-- Filtering all %d channels..." % nchans
        # Initialise 2D zero array
        tmp_filter_bank = numpy.zeros(len(fd_psd), dtype=numpy.complex128)
        # Initialise 2D zero array for time-frequency map
        tf_map = numpy.zeros((nchans, seg_len), dtype=numpy.complex128)
        # Loop over all the channels
        for i in range(nchans):
            # Reset filter bank series
            tmp_filter_bank *= 0.0
            # Index of starting frequency
            f1 = int(filter_bank[i].f0 / fd_psd.delta_f)
            # Index of ending frequency
            f2 = int((filter_bank[i].f0 + 2 * band) / fd_psd.delta_f) + 1
            # (FIXME: Why is there a factor of 2 here?)
            tmp_filter_bank[f1:f2] = filter_bank[i].data.data * 2
            # Define the template to filter the frequency series with
            template = types.FrequencySeries(tmp_filter_bank,
                                             delta_f=fd_psd.delta_f,
                                             copy=False)
            # Create filtered series
            filtered_series = filter.matched_filter_core(
                template,
                fs_data,
                h_norm=None,
                psd=None,
                low_frequency_cutoff=filter_bank[i].f0,
                high_frequency_cutoff=filter_bank[i].f0 + 2 * band)
            # Include filtered series in the map
            tf_map[i, :] = filtered_series[0].numpy()
        # Plot spectrogram
        plot_spectrogram(numpy.abs(tf_map).T,
                         tmp_ts_data.delta_t,
                         band,
                         ts_data.sample_rate,
                         start_time,
                         end_time,
                         fname='segments/time-frequency/%i-%i.png' %
                         (start_time, end_time))
        # Loop through all summed channels
        for nc_sum in range(0, int(math.log(nchans, 2)))[::-1]:
            nc_sum = 2**nc_sum - 1
            mu_sq = mu_sq_dict[nc_sum]
            # Clip the boundaries to remove window corruption
            clip_samples = int(psd_segment_length * window_fraction *
                               ts_data.sample_rate / 2)
            # Constructing tile and calculate their energy
            print "\n|--- Constructing tile with %d summed channels..." % (
                nc_sum + 1)
            # Current bandwidth of the time-frequency map tiles
            df = band * (nc_sum + 1)
            dt = 1.0 / (2 * df)
            # How much each "step" is in the time domain -- under sampling rate
            us_rate = int(round(dt / ts_data.delta_t))
            print "|--- Undersampling rate for this level: %f" % (
                ts_data.sample_rate / us_rate)
            print "|--- Calculating tiles..."
            # Making independent tiles
            # because [0:-0] does not give the full array
            tf_map_temp = tf_map[:,clip_samples:-clip_samples:us_rate] \
                          if clip_samples > 0 else tf_map[:,::us_rate]
            tiles = tf_map_temp.copy()
            # Here's the deal: we're going to keep only the valid output and
            # it's *always* going to exist in the lowest available indices
            stride = nc_sum + 1
            for i in xrange(tiles.shape[0] / stride):
                numpy.absolute(tiles[stride * i:stride * (i + 1)].sum(axis=0),
                               tiles[stride * (i + 1) - 1])
            tiles = tiles[nc_sum::nc_sum + 1].real**2 / mu_sq[nc_sum::nc_sum +
                                                              1].reshape(
                                                                  -1, 1)
            print "|--- TF-plane is %dx%s samples" % tiles.shape
            print "|--- Tile energy mean %f, var %f" % (numpy.mean(tiles),
                                                        numpy.var(tiles))
            # Define maximum number of degrees of freedom and check it larger or equal to 2
            max_dof = 32 if max_duration == None else 2 * max_duration * df
            assert max_dof >= 2
            # Loop through multiple degrees of freedom
            for j in [2**l for l in xrange(0, int(math.log(max_dof, 2)))]:
                # Duration is fixed by the NDOF and bandwidth
                duration = j * dt
                print "\n|----- Explore signal duration of %f s..." % duration
                print "|----- Summing DOF = %d ..." % (2 * j)
                tlen = tiles.shape[1] - 2 * j + 1 + 1
                dof_tiles = numpy.zeros((tiles.shape[0], tlen))
                sum_filter = numpy.array([1, 0] * (j - 1) + [1])
                for f in range(tiles.shape[0]):
                    # Sum and drop correlate tiles
                    dof_tiles[f] = fftconvolve(tiles[f], sum_filter, 'valid')
                print "|----- Summed tile energy mean: %f, var %f" % (
                    numpy.mean(dof_tiles), numpy.var(dof_tiles))
                plot_spectrogram(
                    dof_tiles.T,
                    dt,
                    df,
                    ts_data.sample_rate,
                    start_time,
                    end_time,
                    fname='segments/%i-%i/tf_%02ichans_%02idof.png' %
                    (start_time, end_time, nc_sum + 1, 2 * j))
                threshold = scipy.stats.chi2.isf(tile_fap, j)
                print "|------ Threshold for this level: %f" % threshold
                spant, spanf = dof_tiles.shape[1] * dt, dof_tiles.shape[0] * df
                print "|------ Processing %.2fx%.2f time-frequency map." % (
                    spant, spanf)
                # Since we clip the data, the start time needs to be adjusted accordingly
                window_offset_epoch = fs_data.epoch + psd_segment_length * window_fraction / 2
                window_offset_epoch = LIGOTimeGPS(float(window_offset_epoch))
                for i, j in zip(*numpy.where(dof_tiles > threshold)):
                    event = event_list.RowType()
                    # The points are summed forward in time and thus a `summed point' is the
                    # sum of the previous N points. If this point is above threshold, it
                    # corresponds to a tile which spans the previous N points. However, the
                    # 0th point (due to the convolution specifier 'valid') is actually
                    # already a duration from the start time. All of this means, the +
                    # duration and the - duration cancels, and the tile 'start' is, by
                    # definition, the start of the time frequency map if j = 0
                    # FIXME: I think this needs a + dt/2 to center the tile properly
                    event.set_start(window_offset_epoch + float(j * dt))
                    event.set_stop(window_offset_epoch + float(j * dt) +
                                   duration)
                    event.set_peak(event.get_start() + duration / 2)
                    event.central_freq = filter_bank[
                        0].f0 + band / 2 + i * df + 0.5 * df
                    event.duration = duration
                    event.bandwidth = df
                    event.chisq_dof = 2 * duration * df
                    event.snr = math.sqrt(dof_tiles[i, j] / event.chisq_dof -
                                          1)
                    # FIXME: Magic number 0.62 should be determine empircally
                    event.confidence = -lal.LogChisqCCDF(
                        event.snr * 0.62, event.chisq_dof * 0.62)
                    event.amplitude = None
                    event.process_id = None
                    event.event_id = event_list.get_next_id()
                    event_list.append(event)
                for event in event_list[::-1]:
                    if event.amplitude != None:
                        continue
                    etime_min_idx = float(event.get_start()) - float(
                        fs_data.epoch)
                    etime_min_idx = int(etime_min_idx / tmp_ts_data.delta_t)
                    etime_max_idx = float(event.get_start()) - float(
                        fs_data.epoch) + event.duration
                    etime_max_idx = int(etime_max_idx / tmp_ts_data.delta_t)
                    # (band / 2) to account for sin^2 wings from finest filters
                    flow_idx = int((event.central_freq - event.bandwidth / 2 -
                                    (df / 2) - fmin) / df)
                    fhigh_idx = int((event.central_freq + event.bandwidth / 2 +
                                     (df / 2) - fmin) / df)
                    # TODO: Check that the undersampling rate is always commensurate
                    # with the indexing: that is to say that
                    # mod(etime_min_idx, us_rate) == 0 always
                    z_j_b = tf_map[flow_idx:fhigh_idx,
                                   etime_min_idx:etime_max_idx:us_rate]
                    event.amplitude = 0
                print "|------ Total number of events: %d" % len(event_list)
        t_idx_min += int(seg_len * (1 - window_fraction))
        t_idx_max += int(seg_len * (1 - window_fraction))
    setname = "MagneticFields"
    __program__ = 'pyburst_excesspower'
    start_time = LIGOTimeGPS(int(ts_data.start_time))
    end_time = LIGOTimeGPS(int(ts_data.end_time))
    inseg = segment(start_time, end_time)
    xmldoc = ligolw.Document()
    xmldoc.appendChild(ligolw.LIGO_LW())
    ifo = 'H1'  #channel_name.split(":")[0]
    straindict = psd.insert_psd_option_group.__dict__
    proc_row = register_to_xmldoc(xmldoc,
                                  __program__,
                                  straindict,
                                  ifos=[ifo],
                                  version=git_version.id,
                                  cvs_repository=git_version.branch,
                                  cvs_entry_time=git_version.date)
    dt_stride = psd_segment_length
    sample_rate = ts_data.sample_rate
    # Amount to overlap successive blocks so as not to lose data
    window_overlap_samples = window_fraction * sample_rate
    outseg = inseg.contract(window_fraction * dt_stride / 2)
    # With a given dt_stride, we cannot process the remainder of this data
    remainder = math.fmod(abs(outseg), dt_stride * (1 - window_fraction))
    # ...so make an accounting of it
    outseg = segment(outseg[0], outseg[1] - remainder)
    ss = append_search_summary(xmldoc,
                               proc_row,
                               ifos=(station, ),
                               inseg=inseg,
                               outseg=outseg)
    for sb in event_list:
        sb.process_id = proc_row.process_id
        sb.search = proc_row.program
        sb.ifo, sb.channel = station, setname
    xmldoc.childNodes[0].appendChild(event_list)
    fname = 'excesspower.xml.gz'
    utils.write_filename(xmldoc, fname, gz=fname.endswith("gz"))
Пример #17
0
def ligolw_bucluster(xmldoc,
                     program,
                     process,
                     prefunc,
                     postfunc,
                     testfunc,
                     clusterfunc,
                     sortfunc=None,
                     bailoutfunc=None,
                     verbose=False):
    """
	Run the clustering algorithm on the list of burst candidates.  The
	return value is the tuple (xmldoc, changed), where xmldoc is the
	input document, and changed is a boolean that is True if the
	contents of the sngl_burst table were altered, and False if the
	triggers were not modified by the clustering process.

	If the document does not contain a sngl_burst table, then the
	document is not modified (including no modifications to the process
	metadata tables).
	"""

    #
    # Extract live time segment and sngl_burst table
    #

    try:
        sngl_burst_table = lsctables.SnglBurstTable.get_table(xmldoc)
    except ValueError:
        # no-op:  document does not contain a sngl_burst table
        if verbose:
            print >> sys.stderr, "document does not contain a sngl_burst table, skipping ..."
        return xmldoc, False
    seglists = ligolw_search_summary.segmentlistdict_fromsearchsummary(
        xmldoc, program=program).coalesce()

    #
    # Remove all H2 triggers intersecting the frequency band
    # 1138.6 Hz -- 1216.0 Hz
    #
    # FIXME:  put this into the excess power pipeline, correctly
    #

    #bad_band = segments.segment(1138.586956521739, 1216.0326086956522)
    #for i in xrange(len(sngl_burst_table) - 1, -1, -1):
    #	a = sngl_burst_table[i]
    #	if a.ifo == "H2" and a.band.intersects(bad_band):
    #		del sngl_burst_table[i]

    #
    # Preprocess candidates
    #

    if verbose:
        print >> sys.stderr, "pre-processing ..."
    preprocess_output = prefunc(sngl_burst_table)

    #
    # Cluster
    #

    table_changed = snglcluster.cluster_events(sngl_burst_table,
                                               testfunc,
                                               clusterfunc,
                                               sortfunc=sortfunc,
                                               bailoutfunc=bailoutfunc,
                                               verbose=verbose)

    #
    # Postprocess candidates
    #

    if verbose:
        print >> sys.stderr, "post-processing ..."
    postfunc(sngl_burst_table, preprocess_output)

    #
    # Update instrument list in process table and add search summary
    # information
    #

    process.instruments = seglists.keys()
    ligolw_search_summary.append_search_summary(xmldoc,
                                                process,
                                                inseg=seglists.extent_all(),
                                                outseg=seglists.extent_all(),
                                                nevents=len(sngl_burst_table))

    #
    # Done
    #

    return xmldoc, table_changed
    # Clean up.
    #

    contents.xmldoc.unlink()
    connection.close()
    dbtables.discard_connection_filename(filename,
                                         working_filename,
                                         verbose=options.verbose)

#
# Output.
#

ligolw_search_summary.append_search_summary(xmldoc,
                                            process,
                                            ifos=segs.keys(),
                                            inseg=segs.extent_all(),
                                            outseg=segs.extent_all())
xmldoc.childNodes[-1].appendChild(
    distributions.to_xml(u"string_cusp_likelihood"))
ligolw_process.set_process_end_time(process)


def T010150_basename(instruments, description, seg):
    start = int(math.floor(seg[0]))
    duration = int(math.ceil(seg[1] - start))
    return "%s-%s-%d-%d" % ("+".join(
        sorted(instruments)), description, start, duration)


if options.T010150:
Пример #19
0
def excess_power(
    ts_data,  # Time series from magnetic field data 
    band=None,  # Channel bandwidth
    channel_name='channel-name',  # Channel name
    fmin=0,  # Lowest frequency of the filter bank.
    fmax=None,  # Highest frequency of the filter bank.
    impulse=False,  # Impulse response
    make_plot=True,  # Condition to produce plots
    max_duration=None,  # Maximum duration of the tile
    nchans=256,  # Total number of channels
    psd_estimation='median-mean',  # Average method
    psd_segment_length=60,  # Length of each segment in seconds
    psd_segment_stride=30,  # Separation between 2 consecutive segments in seconds
    station='station-name',  # Station name
    tile_fap=1e-7,  # Tile false alarm probability threshold in Gaussian noise.
    verbose=True,  # Print details
    window_fraction=0,  # Withening window fraction
    wtype='tukey'):  # Whitening type, can tukey or hann
    '''
    Perform excess-power search analysis on magnetic field data.
    This method will produce a bunch of time-frequency plots for every
    tile duration and bandwidth analysed as well as a XML file identifying
    all the triggers found in the selected data within the user-defined
    time range.

    Parameters
    ----------
    ts_data : TimeSeries
      Time Series from magnetic field data
    psd_segment_length : float
      Length of each segment in seconds
    psd_segment_stride : float
      Separation between 2 consecutive segments in seconds
    psd_estimation : string
      Average method
    window_fraction : float
      Withening window fraction
    tile_fap : float
      Tile false alarm probability threshold in Gaussian noise.
    nchans : int
      Total number of channels
    band : float
      Channel bandwidth
    fmin : float
      Lowest frequency of the filter bank.
    fmax : float
      Highest frequency of the filter bank

    Examples
    --------
    The program can be ran as an executable by using the ``excesspower`` command
    line as follows::

      excesspower --station "mainz01" \\
                  --start-time "2017-04-15-17-1" \\
                  --end-time "2017-04-15-18" \\
                  --rep "/Users/vincent/ASTRO/data/GNOME/GNOMEDrive/gnome/serverdata/" \\
                  --resample 512 \\
                  --verbose

    '''
    # Determine sampling rate based on extracted time series
    sample_rate = ts_data.sample_rate
    # Check if tile maximum frequency is not defined
    if fmax is None or fmax > sample_rate / 2.:
        # Set the tile maximum frequency equal to the Nyquist frequency
        # (i.e. half the sampling rate)
        fmax = sample_rate / 2.0
    # Check whether or not tile bandwidth and channel are defined
    if band is None and nchans is None:
        # Exit program with error message
        exit("Either bandwidth or number of channels must be specified...")
    else:
        # Check if tile maximum frequency larger than its minimum frequency
        assert fmax >= fmin
        # Define spectral band of data
        data_band = fmax - fmin
        # Check whether tile bandwidth or channel is defined
        if band is not None:
            # Define number of possible filter bands
            nchans = int(data_band / band)
        elif nchans is not None:
            # Define filter bandwidth
            band = data_band / nchans
            nchans -= 1
        # Check if number of channels is superior than unity
        assert nchans > 1
    # Print segment information
    if verbose: print '|- Estimating PSD from segments of',
    if verbose:
        print '%.2f s, with %.2f s stride...' % (psd_segment_length,
                                                 psd_segment_stride)
    # Convert time series as array of float
    data = ts_data.astype(numpy.float64)
    # Define segment length for PSD estimation in sample unit
    seg_len = int(psd_segment_length * sample_rate)
    # Define separation between consecutive segments in sample unit
    seg_stride = int(psd_segment_stride * sample_rate)
    # Minimum frequency of detectable signal in a segment
    delta_f = 1. / psd_segment_length
    # Calculate PSD length counting the zero frequency element
    fd_len = fmax / delta_f + 1
    # Calculate the overall PSD from individual PSD segments
    if impulse:
        # Produce flat data
        flat_data = numpy.ones(int(fd_len)) * 2. / fd_len
        # Create PSD frequency series
        fd_psd = types.FrequencySeries(flat_data, 1. / psd_segment_length,
                                       ts_data.start_time)
    else:
        # Create overall PSD using Welch's method
        fd_psd = psd.welch(data,
                           avg_method=psd_estimation,
                           seg_len=seg_len,
                           seg_stride=seg_stride)
    if make_plot:
        # Plot the power spectral density
        plot_spectrum(fd_psd)
    # We need this for the SWIG functions
    lal_psd = fd_psd.lal()
    # Create whitening window
    if verbose: print "|- Whitening window and spectral correlation..."
    if wtype == 'hann': window = lal.CreateHannREAL8Window(seg_len)
    elif wtype == 'tukey':
        window = lal.CreateTukeyREAL8Window(seg_len, window_fraction)
    else:
        raise ValueError("Can't handle window type %s" % wtype)
    # Create FFT plan
    fft_plan = lal.CreateForwardREAL8FFTPlan(len(window.data.data), 1)
    # Perform two point spectral correlation
    spec_corr = lal.REAL8WindowTwoPointSpectralCorrelation(window, fft_plan)
    # Determine length of individual filters
    filter_length = int(2 * band / fd_psd.delta_f) + 1
    # Initialise filter bank
    if verbose:
        print "|- Create bank of %i filters of %i Hz bandwidth..." % (
            nchans, filter_length)
    # Initialise array to store filter's frequency series and metadata
    lal_filters = []
    # Initialise array to store filter's time series
    fdb = []
    # Loop over the channels
    for i in range(nchans):
        # Define central position of the filter
        freq = fmin + band / 2 + i * band
        # Create excess power filter
        lal_filter = lalburst.CreateExcessPowerFilter(freq, band, lal_psd,
                                                      spec_corr)
        # Testing spectral correlation on filter
        #print lalburst.ExcessPowerFilterInnerProduct(lal_filter, lal_filter, spec_corr, None)
        # Append entire filter structure
        lal_filters.append(lal_filter)
        # Append filter's spectrum
        fdb.append(FrequencySeries.from_lal(lal_filter))
        #print fdb[0].frequencies
        #print fdb[0]
    if make_plot:
        # Plot filter bank
        plot_bank(fdb)
        # Convert filter bank from frequency to time domain
        if verbose:
            print "|- Convert all the frequency domain to the time domain..."
        tdb = []
        # Loop for each filter's spectrum
        for fdt in fdb:
            zero_padded = numpy.zeros(int((fdt.f0 / fdt.df).value) + len(fdt))
            st = int((fdt.f0 / fdt.df).value)
            zero_padded[st:st + len(fdt)] = numpy.real_if_close(fdt.value)
            n_freq = int(sample_rate / 2 / fdt.df.value) * 2
            tdt = numpy.fft.irfft(zero_padded, n_freq) * math.sqrt(sample_rate)
            tdt = numpy.roll(tdt, len(tdt) / 2)
            tdt = TimeSeries(tdt,
                             name="",
                             epoch=fdt.epoch,
                             sample_rate=sample_rate)
            tdb.append(tdt)
        # Plot time series filter
        plot_filters(tdb, fmin, band)
    # Computer whitened inner products of input filters with themselves
    #white_filter_ip = numpy.array([lalburst.ExcessPowerFilterInnerProduct(f, f, spec_corr, None) for f in lal_filters])
    # Computer unwhitened inner products of input filters with themselves
    #unwhite_filter_ip = numpy.array([lalburst.ExcessPowerFilterInnerProduct(f, f, spec_corr, lal_psd) for f in lal_filters])
    # Computer whitened filter inner products between input adjacent filters
    #white_ss_ip = numpy.array([lalburst.ExcessPowerFilterInnerProduct(f1, f2, spec_corr, None) for f1, f2 in zip(lal_filters[:-1], lal_filters[1:])])
    # Computer unwhitened filter inner products between input adjacent filters
    #unwhite_ss_ip = numpy.array([lalburst.ExcessPowerFilterInnerProduct(f1, f2, spec_corr, lal_psd) for f1, f2 in zip(lal_filters[:-1], lal_filters[1:])])
    # Check filter's bandwidth is equal to user defined channel bandwidth
    min_band = (len(lal_filters[0].data.data) - 1) * lal_filters[0].deltaF / 2
    assert min_band == band
    # Create an event list where all the triggers will be stored
    event_list = lsctables.New(lsctables.SnglBurstTable, [
        'start_time', 'start_time_ns', 'peak_time', 'peak_time_ns', 'duration',
        'bandwidth', 'central_freq', 'chisq_dof', 'confidence', 'snr',
        'amplitude', 'channel', 'ifo', 'process_id', 'event_id', 'search',
        'stop_time', 'stop_time_ns'
    ])
    # Create repositories to save TF and time series plots
    os.system('mkdir -p segments/time-frequency')
    os.system('mkdir -p segments/time-series')
    # Define time edges
    t_idx_min, t_idx_max = 0, seg_len
    # Loop over each segment
    while t_idx_max <= len(ts_data):
        # Define first and last timestamps of the block
        start_time = ts_data.start_time + t_idx_min / float(
            ts_data.sample_rate)
        end_time = ts_data.start_time + t_idx_max / float(ts_data.sample_rate)
        if verbose:
            print "\n|- Analyzing block %i to %i (%.2f percent)" % (
                start_time, end_time, 100 * float(t_idx_max) / len(ts_data))
        # Debug for impulse response
        if impulse:
            for i in range(t_idx_min, t_idx_max):
                ts_data[i] = 1000. if i == (t_idx_max + t_idx_min) / 2 else 0.
        # Model a withen time series for the block
        tmp_ts_data = types.TimeSeries(ts_data[t_idx_min:t_idx_max] *
                                       window.data.data,
                                       delta_t=1. / ts_data.sample_rate,
                                       epoch=start_time)
        # Save time series in relevant repository
        os.system('mkdir -p segments/%i-%i' % (start_time, end_time))
        if make_plot:
            # Plot time series
            plot_ts(tmp_ts_data,
                    fname='segments/time-series/%i-%i.png' %
                    (start_time, end_time))
        # Convert times series to frequency series
        fs_data = tmp_ts_data.to_frequencyseries()
        if verbose:
            print "|- Frequency series data has variance: %s" % fs_data.data.std(
            )**2
        # Whitening (FIXME: Whiten the filters, not the data)
        fs_data.data /= numpy.sqrt(fd_psd) / numpy.sqrt(2 * fd_psd.delta_f)
        if verbose:
            print "|- Whitened frequency series data has variance: %s" % fs_data.data.std(
            )**2
        if verbose: print "|- Create time-frequency plane for current block"
        # Return the complex snr, along with its associated normalization of the template,
        # matched filtered against the data
        #filter.matched_filter_core(types.FrequencySeries(tmp_filter_bank,delta_f=fd_psd.delta_f),
        #                           fs_data,h_norm=1,psd=fd_psd,low_frequency_cutoff=lal_filters[0].f0,
        #                           high_frequency_cutoff=lal_filters[0].f0+2*band)
        if verbose: print "|- Filtering all %d channels...\n" % nchans,
        # Initialise 2D zero array
        tmp_filter_bank = numpy.zeros(len(fd_psd), dtype=numpy.complex128)
        # Initialise 2D zero array for time-frequency map
        tf_map = numpy.zeros((nchans, seg_len), dtype=numpy.complex128)
        # Loop over all the channels
        for i in range(nchans):
            # Reset filter bank series
            tmp_filter_bank *= 0.0
            # Index of starting frequency
            f1 = int(lal_filters[i].f0 / fd_psd.delta_f)
            # Index of last frequency bin
            f2 = int((lal_filters[i].f0 + 2 * band) / fd_psd.delta_f) + 1
            # (FIXME: Why is there a factor of 2 here?)
            tmp_filter_bank[f1:f2] = lal_filters[i].data.data * 2
            # Define the template to filter the frequency series with
            template = types.FrequencySeries(tmp_filter_bank,
                                             delta_f=fd_psd.delta_f,
                                             copy=False)
            # Create filtered series
            filtered_series = filter.matched_filter_core(
                template,
                fs_data,
                h_norm=None,
                psd=None,
                low_frequency_cutoff=lal_filters[i].f0,
                high_frequency_cutoff=lal_filters[i].f0 + 2 * band)
            # Include filtered series in the map
            tf_map[i, :] = filtered_series[0].numpy()
        if make_plot:
            # Plot spectrogram
            plot_spectrogram(numpy.abs(tf_map).T,
                             dt=tmp_ts_data.delta_t,
                             df=band,
                             ymax=ts_data.sample_rate / 2.,
                             t0=start_time,
                             t1=end_time,
                             fname='segments/time-frequency/%i-%i.png' %
                             (start_time, end_time))
            plot_tiles_ts(numpy.abs(tf_map),
                          2,
                          1,
                          sample_rate=ts_data.sample_rate,
                          t0=start_time,
                          t1=end_time,
                          fname='segments/%i-%i/ts.png' %
                          (start_time, end_time))
            #plot_tiles_tf(numpy.abs(tf_map),2,1,ymax=ts_data.sample_rate/2,
            #              sample_rate=ts_data.sample_rate,t0=start_time,t1=end_time,
            #              fname='segments/%i-%i/tf.png'%(start_time,end_time))
        # Loop through powers of 2 up to number of channels
        for nc_sum in range(0, int(math.log(nchans, 2)))[::-1]:
            # Calculate total number of summed channels
            nc_sum = 2**nc_sum
            if verbose:
                print "\n\t|- Contructing tiles containing %d narrow band channels" % nc_sum
            # Compute full bandwidth of virtual channel
            df = band * nc_sum
            # Compute minimal signal's duration in virtual channel
            dt = 1.0 / (2 * df)
            # Compute under sampling rate
            us_rate = int(round(dt / ts_data.delta_t))
            if verbose:
                print "\t|- Undersampling rate for this level: %f" % (
                    ts_data.sample_rate / us_rate)
            if verbose: print "\t|- Calculating tiles..."
            # Clip the boundaries to remove window corruption
            clip_samples = int(psd_segment_length * window_fraction *
                               ts_data.sample_rate / 2)
            # Undersample narrow band channel's time series
            # Apply clipping condition because [0:-0] does not give the full array
            tf_map_temp = tf_map[:,clip_samples:-clip_samples:us_rate] \
                          if clip_samples > 0 else tf_map[:,::us_rate]
            # Initialise final tile time-frequency map
            tiles = numpy.zeros(((nchans + 1) / nc_sum, tf_map_temp.shape[1]))
            # Loop over tile index
            for i in xrange(len(tiles)):
                # Sum all inner narrow band channels
                ts_tile = numpy.absolute(tf_map_temp[nc_sum * i:nc_sum *
                                                     (i + 1)].sum(axis=0))
                # Define index of last narrow band channel for given tile
                n = (i + 1) * nc_sum - 1
                n = n - 1 if n == len(lal_filters) else n
                # Computer withened inner products of each input filter with itself
                mu_sq = nc_sum * lalburst.ExcessPowerFilterInnerProduct(
                    lal_filters[n], lal_filters[n], spec_corr, None)
                #kmax = nc_sum-1 if n==len(lal_filters) else nc_sum-2
                # Loop over the inner narrow band channels
                for k in xrange(0, nc_sum - 1):
                    # Computer whitened filter inner products between input adjacent filters
                    mu_sq += 2 * lalburst.ExcessPowerFilterInnerProduct(
                        lal_filters[n - k], lal_filters[n - 1 - k], spec_corr,
                        None)
                # Normalise tile's time series
                tiles[i] = ts_tile.real**2 / mu_sq
            if verbose: print "\t|- TF-plane is %dx%s samples" % tiles.shape
            if verbose:
                print "\t|- Tile energy mean %f, var %f" % (numpy.mean(tiles),
                                                            numpy.var(tiles))
            # Define maximum number of degrees of freedom and check it larger or equal to 2
            max_dof = 32 if max_duration == None else int(max_duration / dt)
            assert max_dof >= 2
            # Loop through multiple degrees of freedom
            for j in [2**l for l in xrange(0, int(math.log(max_dof, 2)))]:
                # Duration is fixed by the NDOF and bandwidth
                duration = j * dt
                if verbose: print "\n\t\t|- Summing DOF = %d ..." % (2 * j)
                if verbose:
                    print "\t\t|- Explore signal duration of %f s..." % duration
                # Construct filter
                sum_filter = numpy.array([1, 0] * (j - 1) + [1])
                # Calculate length of filtered time series
                tlen = tiles.shape[1] - sum_filter.shape[0] + 1
                # Initialise filtered time series array
                dof_tiles = numpy.zeros((tiles.shape[0], tlen))
                # Loop over tiles
                for f in range(tiles.shape[0]):
                    # Sum and drop correlate tiles
                    dof_tiles[f] = fftconvolve(tiles[f], sum_filter, 'valid')
                if verbose:
                    print "\t\t|- Summed tile energy mean: %f" % (
                        numpy.mean(dof_tiles))
                if verbose:
                    print "\t\t|- Variance tile energy: %f" % (
                        numpy.var(dof_tiles))
                if make_plot:
                    plot_spectrogram(
                        dof_tiles.T,
                        dt,
                        df,
                        ymax=ts_data.sample_rate / 2,
                        t0=start_time,
                        t1=end_time,
                        fname='segments/%i-%i/%02ichans_%02idof.png' %
                        (start_time, end_time, nc_sum, 2 * j))
                    plot_tiles_ts(
                        dof_tiles,
                        2 * j,
                        df,
                        sample_rate=ts_data.sample_rate / us_rate,
                        t0=start_time,
                        t1=end_time,
                        fname='segments/%i-%i/%02ichans_%02idof_ts.png' %
                        (start_time, end_time, nc_sum, 2 * j))
                    plot_tiles_tf(
                        dof_tiles,
                        2 * j,
                        df,
                        ymax=ts_data.sample_rate / 2,
                        sample_rate=ts_data.sample_rate / us_rate,
                        t0=start_time,
                        t1=end_time,
                        fname='segments/%i-%i/%02ichans_%02idof_tf.png' %
                        (start_time, end_time, nc_sum, 2 * j))
                threshold = scipy.stats.chi2.isf(tile_fap, j)
                if verbose:
                    print "\t\t|- Threshold for this level: %f" % threshold
                spant, spanf = dof_tiles.shape[1] * dt, dof_tiles.shape[0] * df
                if verbose:
                    print "\t\t|- Processing %.2fx%.2f time-frequency map." % (
                        spant, spanf)
                # Since we clip the data, the start time needs to be adjusted accordingly
                window_offset_epoch = fs_data.epoch + psd_segment_length * window_fraction / 2
                window_offset_epoch = LIGOTimeGPS(float(window_offset_epoch))
                for i, j in zip(*numpy.where(dof_tiles > threshold)):
                    event = event_list.RowType()
                    # The points are summed forward in time and thus a `summed point' is the
                    # sum of the previous N points. If this point is above threshold, it
                    # corresponds to a tile which spans the previous N points. However, the
                    # 0th point (due to the convolution specifier 'valid') is actually
                    # already a duration from the start time. All of this means, the +
                    # duration and the - duration cancels, and the tile 'start' is, by
                    # definition, the start of the time frequency map if j = 0
                    # FIXME: I think this needs a + dt/2 to center the tile properly
                    event.set_start(window_offset_epoch + float(j * dt))
                    event.set_stop(window_offset_epoch + float(j * dt) +
                                   duration)
                    event.set_peak(event.get_start() + duration / 2)
                    event.central_freq = lal_filters[
                        0].f0 + band / 2 + i * df + 0.5 * df
                    event.duration = duration
                    event.bandwidth = df
                    event.chisq_dof = 2 * duration * df
                    event.snr = math.sqrt(dof_tiles[i, j] / event.chisq_dof -
                                          1)
                    # FIXME: Magic number 0.62 should be determine empircally
                    event.confidence = -lal.LogChisqCCDF(
                        event.snr * 0.62, event.chisq_dof * 0.62)
                    event.amplitude = None
                    event.process_id = None
                    event.event_id = event_list.get_next_id()
                    event_list.append(event)
                for event in event_list[::-1]:
                    if event.amplitude != None:
                        continue
                    etime_min_idx = float(event.get_start()) - float(
                        fs_data.epoch)
                    etime_min_idx = int(etime_min_idx / tmp_ts_data.delta_t)
                    etime_max_idx = float(event.get_start()) - float(
                        fs_data.epoch) + event.duration
                    etime_max_idx = int(etime_max_idx / tmp_ts_data.delta_t)
                    # (band / 2) to account for sin^2 wings from finest filters
                    flow_idx = int((event.central_freq - event.bandwidth / 2 -
                                    (df / 2) - fmin) / df)
                    fhigh_idx = int((event.central_freq + event.bandwidth / 2 +
                                     (df / 2) - fmin) / df)
                    # TODO: Check that the undersampling rate is always commensurate
                    # with the indexing: that is to say that
                    # mod(etime_min_idx, us_rate) == 0 always
                    z_j_b = tf_map[flow_idx:fhigh_idx,
                                   etime_min_idx:etime_max_idx:us_rate]
                    # FIXME: Deal with negative hrss^2 -- e.g. remove the event
                    try:
                        event.amplitude = measure_hrss(
                            z_j_b, unwhite_filter_ip[flow_idx:fhigh_idx],
                            unwhite_ss_ip[flow_idx:fhigh_idx - 1],
                            white_ss_ip[flow_idx:fhigh_idx - 1],
                            fd_psd.delta_f, tmp_ts_data.delta_t,
                            len(lal_filters[0].data.data), event.chisq_dof)
                    except ValueError:
                        event.amplitude = 0
                if verbose:
                    print "\t\t|- Total number of events: %d" % len(event_list)
        t_idx_min += int(seg_len * (1 - window_fraction))
        t_idx_max += int(seg_len * (1 - window_fraction))
    setname = "MagneticFields"
    __program__ = 'pyburst_excesspower_gnome'
    start_time = LIGOTimeGPS(int(ts_data.start_time))
    end_time = LIGOTimeGPS(int(ts_data.end_time))
    inseg = segment(start_time, end_time)
    xmldoc = ligolw.Document()
    xmldoc.appendChild(ligolw.LIGO_LW())
    ifo = channel_name.split(":")[0]
    straindict = psd.insert_psd_option_group.__dict__
    proc_row = register_to_xmldoc(xmldoc,
                                  __program__,
                                  straindict,
                                  ifos=[ifo],
                                  version=git_version.id,
                                  cvs_repository=git_version.branch,
                                  cvs_entry_time=git_version.date)
    dt_stride = psd_segment_length
    sample_rate = ts_data.sample_rate
    # Amount to overlap successive blocks so as not to lose data
    window_overlap_samples = window_fraction * sample_rate
    outseg = inseg.contract(window_fraction * dt_stride / 2)
    # With a given dt_stride, we cannot process the remainder of this data
    remainder = math.fmod(abs(outseg), dt_stride * (1 - window_fraction))
    # ...so make an accounting of it
    outseg = segment(outseg[0], outseg[1] - remainder)
    ss = append_search_summary(xmldoc,
                               proc_row,
                               ifos=(station, ),
                               inseg=inseg,
                               outseg=outseg)
    for sb in event_list:
        sb.process_id = proc_row.process_id
        sb.search = proc_row.program
        sb.ifo, sb.channel = station, setname
    xmldoc.childNodes[0].appendChild(event_list)
    ifostr = ifo if isinstance(ifo, str) else "".join(ifo)
    st_rnd, end_rnd = int(math.floor(inseg[0])), int(math.ceil(inseg[1]))
    dur = end_rnd - st_rnd
    fname = "%s-excesspower-%d-%d.xml.gz" % (ifostr, st_rnd, dur)
    utils.write_filename(xmldoc, fname, gz=fname.endswith("gz"))
    plot_triggers(fname)