Esempio n. 1
0
def primary_vetoed(starttime=None, hveto_path=None, snr=6.0, significance=5.0):
    """Catalogue all vetoed primary triggers from a given analysis

    This utility queries the output of an hveto analysis for the triggers
    vetoed from its primary channel over all rounds (up to thresholds on
    signal-to-noise ratio and round significance).

    Parameters
    ----------
    starttime : `str` or `float`
        start GPS time for this analysis

    hveto_path : 'str'
        path of the hveto files directory,
        not required if ``starttime`` is given

    snr : `float`, optional
        signal-to-noise ratio threshold on triggers, default: 6.0

    significance : `float`, optional
        hveto significance threshold on auxiliary channels, default: 5.0

    Returns
    -------
    catalogue : `~gwpy.table.EventTable`
        a tabular catalogue of primary triggers vetoed in the hveto run
    """
    path = const.get_hvetopath(starttime) if starttime else hveto_path
    t_vetoed = EventTable(names=[
        'time', 'snr', 'peak_frequency', 'channel', 'winner', 'significance'
    ])
    try:
        files = glob.glob(os.path.join(path, 'triggers', '*VETOED*.txt'))
        t_summary = EventTable.read(os.path.join(path, 'summary-stats.txt'),
                                    format='ascii')
        n = len(t_summary)
        files = files[:n]
        t_vetoed = EventTable.read(files, format='ascii')
        lenoffiles = t_summary['nveto']
        winsig = [
            round(t_summary['significance'][i], 4) for i in range(n)
            for j in range(lenoffiles[i])
        ]
        winchans = [
            t_summary['winner'][i] for i in range(n)
            for j in range(lenoffiles[i])
        ]
        rounds = [i + 1 for i in range(n) for j in range(lenoffiles[i])]
        colsig = Column(data=winsig, name='significance')
        colwin = Column(data=winchans, name='winner')
        colround = Column(data=rounds, name='round')
        t_vetoed.add_column(colwin)
        t_vetoed.add_column(colsig)
        t_vetoed.add_column(colround)
        t_vetoed = t_vetoed.filter('snr>{0}'.format(snr),
                                   'significance>{0}'.format(significance))
    except (FileNotFoundError, ValueError):
        warnings.warn("Could not find Hveto analysis for this day")
    return t_vetoed
Esempio n. 2
0
    def __init__(self, paramsfile, eventNumber):
        """Initialize an XEvent on-source off-source or injection with pfile

        Parameters
        ----------
        paramsfile (str):
            a xpipeline param file
        eventNumber (int):
            an integer refering to what events from the
            input/event_off/on/inj.txt to grab for processing

        Returns:

            `XEvent`
        """
        print("You are generating an xevent by supplying a "
              "a xpipeline params file, this will overwite the defaults")
        with open(paramsfile, 'r') as f:
            for line in f.readlines():
                parsed_text = line.split('\n')[0].split(':')
                # check if param is also command separated
                try:
                    parsed_text[1].split(',')[1]
                    setattr(self, parsed_text[0], parsed_text[1].split(','))
                except:
                    setattr(self, parsed_text[0], parsed_text[1])

        self.phi = list(
            EventTable.read(self.skyPositionList, format='ascii')['col2'])
        self.theta = list(
            EventTable.read(self.skyPositionList, format='ascii')['col1'])
        self.event_time = list(
            EventTable.read(self.eventFileName,
                            format='ascii')['col1'])[eventNumber]

        for key, item in vars(self).items():
            try:
                setattr(self, key, float(item))
            except:
                pass

        analysistimes = [float(i) for i in self.analysistimes]
        self.analysistimes = analysistimes
        channel_names = []
        frame_types = []
        detectors = {}
        with open(self.channelFileName, 'r') as f:
            for det in f.readlines():
                detector_name = det.split(' ')[0].split(':')[0]
                channel_names.append(det.split(' ')[0])
                frame_types.append(det.split(' ')[1])
                detectors[detector_name] = Detector(detector_name)

        self.channel_names = channel_names
        self.frame_types = frame_types
        self.detectors = detectors
Esempio n. 3
0
def read_cache(cache, segments, etg, nproc=1, timecolumn=None, **kwargs):
    """Read a table of events from a cache

    This function is mainly meant for use from the `get_triggers` method

    Parameters
    ----------
    cache : :class:`glue.lal.Cache`
        the formatted list of files to read
    segments : `~gwpy.segments.SegmentList`
        the list of segments to read
    etg : `str`
        the name of the trigger generator that created the files
    nproc : `int`, optional
        the number of parallel processes to use when reading
    **kwargs
        other keyword arguments are passed to the `EventTable.read` or
        `{tableclass}.read` methods

    Returns
    -------
    table : `~gwpy.table.EventTable`, `None`
        a table of events, or `None` if the cache has no overlap with
        the segments
    """
    if isinstance(cache, Cache):
        cache = cache.sieve(segmentlist=segments)
        cache = cache.checkfilesexist()[0]
        cache.sort(key=lambda x: x.segment[0])
        cache = cache.pfnlist()  # some readers only like filenames
    else:
        cache = [urlparse(url).path for url in cache]
    if etg == 'pycbc_live':  # remove empty HDF5 files
        cache = filter_pycbc_live_files(cache, ifo=kwargs['ifo'])

    if len(cache) == 0:
        return

    # read triggers
    table = EventTable.read(cache, **kwargs)

    # store read keywords in the meta table
    if timecolumn:
        table.meta['timecolumn'] = timecolumn

    # get back from cache entry
    if isinstance(cache, CacheEntry):
        cache = Cache([cache])

    # append new events to existing table
    try:
        csegs = cache_segments(cache) & segments
    except (AttributeError, TypeError, ValueError):
        csegs = SegmentList()
    table.meta['segments'] = csegs

    if timecolumn:  # already filtered on-the-fly
        return table
    # filter now
    return keep_in_segments(table, segments, etg)
Esempio n. 4
0
def read_cache(cache, segments, etg, nproc=1, timecolumn=None, **kwargs):
    """Read a table of events from a cache

    This function is mainly meant for use from the `get_triggers` method

    Parameters
    ----------
    cache : :class:`glue.lal.Cache`
        the formatted list of files to read
    segments : `~gwpy.segments.SegmentList`
        the list of segments to read
    etg : `str`
        the name of the trigger generator that created the files
    nproc : `int`, optional
        the number of parallel processes to use when reading
    **kwargs
        other keyword arguments are passed to the `EventTable.read` or
        `{tableclass}.read` methods

    Returns
    -------
    table : `~gwpy.table.EventTable`, `None`
        a table of events, or `None` if the cache has no overlap with
        the segments
    """
    if isinstance(cache, Cache):
        cache = cache.sieve(segmentlist=segments)
        cache = cache.checkfilesexist()[0]
        cache.sort(key=lambda x: x.segment[0])
        if etg == 'pycbc_live':  # remove empty HDF5 files
            cache = type(cache)(
                filter_pycbc_live_files(cache, ifo=kwargs['ifo']))
    # if no files, skip
    if len(cache) == 0:
        return
    # use multiprocessing except for ascii reading
    # (since astropy doesn't allow it)
    if kwargs.get('format', 'none').startswith('ascii.'):
        cache = cache.pfnlist()
    else:
        kwargs['nproc'] = nproc
    if len(cache) == 1:
        cache = cache[0]

    # read triggers
    table = EventTable.read(cache, **kwargs)
    if timecolumn:
        table.meta['timecolumn'] = timecolumn

    # get back from cache entry
    if isinstance(cache, CacheEntry):
        cache = Cache([cache])
    # append new events to existing table
    try:
        csegs = cache_segments(cache)
    except (AttributeError, TypeError):
        csegs = SegmentList()
    table.meta['segments'] = csegs
    return keep_in_segments(table, segments, etg)
def getOmicronTriggers(start, end, channel, max_snr, segs=None):
    try:
        cache = find_trigger_files(channel, 'OMICRON', start, end)
        t = EventTable.read(cache,
                            format='ligolw',
                            tablename='sngl_burst',
                            selection=['snr<=%f' % max_snr])
        if (segs is not None):
            t = t.filter(('peak_time', in_segmentlist, segs))
        print("SUCCESS fetch for " + str(channel))
        return t
    except:
        print("failed fetch for " + str(channel))
Esempio n. 6
0
def main():
    # get the command line args
    args = parser()
    np.random.seed(args.seed)

    # set path to file
    cur_path = os.path.dirname(__file__)
    new_path = os.path.relpath(args.dataset, cur_path)

    # load dataset
    data = load_data(new_path)

    # redefine things for conciseness
    Tobs = args.Tobs  # observation time
    fs = args.fsample  # sampling frequency
    dets = args.detectors  # detectors
    ndet = len(dets)  # number of detectors
    N = Tobs * fs  # the total number of time samples
    n = N // 2 + 1  # the number of frequency bins
    tmp_bank = args.temp_bank  # template bank file
    f_low = args.cutoff_freq  # cutoff frequency used in template generation

    psds = [gen_psd(fs, Tobs, op='AdvDesign', det=d) for d in args.detectors]
    wpsds = (2.0 / fs) * np.ones(
        (ndet, n))  # define effective PSD for whited data

    # load template bank
    tmp_bank = np.array(
        EventTable.read(tmp_bank,
                        format='ligolw.sngl_inspiral',
                        columns=['mass1', 'mass2', 'eta', 'mchirp']))

    # loop over stuff
    output, chi_test = looper(data, tmp_bank, Tobs, fs, dets, psds, wpsds,
                              args.basename, args.w_basename, args.cutoff_freq,
                              args.wave_bank)
    chi_test = [chi_test, data[1]]
    output = [output, data[1]]

    # save list of rho for test signals and test noise
    pickle_out = open("%srho_values.pickle" % args.basename, "wb")
    pickle.dump(output, pickle_out)
    pickle_out.close()

    # save list of chi rho for test purposes only
    pickle_out = open("%schirho_values.pickle" % args.basename, "wb")
    pickle.dump(chi_test, pickle_out)
    pickle_out.close()
Esempio n. 7
0
def plot_triggers(filename='excesspower.xml.gz', fname='triggers.png'):
    """
    Plot excess power trigger results in a time-frequency frame.
    """
    events = EventTable.read(filename, format='ligolw.sngl_burst')
    #plot = events.plot('time','central_freq','duration','bandwidth',color='snr')
    time = events['peak_time'] + events['peak_time_ns'] * 1e-9
    events.add_column(events['peak_time'] + events['peak_time_ns'] * 1e-9,
                      name='time')
    plot = events.plot('time', 'central_freq', color='snr', edgecolor='none')
    plot.axes[0].set_epoch(int(min(time)))
    plot.set_xlim((int(min(time)), round(max(time))))
    plot.set_ylabel('Frequency [Hz]')
    plot.set_yscale('log')
    #plot.set_title('GNOME '+station+' station event triggers')
    plot.add_colorbar(cmap='copper_r', label='Tile Energy')
    plt.savefig(fname, dpi=300, transparent=True)
Esempio n. 8
0
def Get_Rates_3(chunks, segs, verbose = False):
    """Returns the glitch rates for a given set of time chunks
    defined by a list of start times, with an end time at the last entry.
    
    Arguments:
    chunks -- Sorted list of times representing the beginnings of the 
              time periods for which rate is to be calculated, with 'end' 
              tacked on.
    segs -- Ordered and non-overlapping SegmentList such that every 
            element in 'chunks' (except the last one) is in an entry in 
            'segs'.
    verbose -- Set to 'True' if you want to see the ends of each chunk in
               'chunks' printed as it is processed.
    
    Returns:
    normcounts -- A list of glitch rates (Hz) associated with each time
                  period represented in 'chunks'."""
    traced = False
    normcounts = []
    j = 0
    for i in range(len(chunks)-1):
        while not chunks[i] in segs[j]:
            j = j+1
        segend = segs[j][1]
        if chunks[i+1]>segend:
            chunkend = segend
        else:
            chunkend = chunks[i+1]
        if verbose:
            print(from_gps(chunks[i]), from_gps(chunkend))
        files = find_trigger_files('L1:GDS-CALIB_STRAIN', 'Omicron', chunks[i], chunkend)
        if len(files)>0:
            events = EventTable.read(files, format='ligolw', tablename='sngl_burst', 
                                     columns=['peak','peak_time_ns', 'peak_frequency', 'snr'])
            events = events[(events['peak']>=chunks[i]) & (events['peak']<chunkend)]  
            counts = len(events['peak'])
            length = chunkend - chunks[i]
            normcount = counts/(length)
            normcounts.append(normcount)
        else:
            normcounts.append(0)
        
    return normcounts
Esempio n. 9
0
from gwpy.table import EventTable
events = EventTable.read(
    'H1-LDAS_STRAIN-968654552-10.xml.gz', format='ligolw.sngl_burst',
    columns=['time', 'central_freq', 'snr'])
Esempio n. 10
0
from gwpy.table import EventTable
events = EventTable.read(
    'H1-LDAS_STRAIN-968654552-10.xml.gz', tablename='sngl_burst',
    columns=['peak', 'central_freq', 'snr'])
Esempio n. 11
0
from gwpy.table import EventTable
events = EventTable.read(
    'H1-LDAS_STRAIN-968654552-10.xml.gz',
    format='ligolw.sngl_burst',
    columns=['time', 'central_freq', 'bandwidth', 'duration', 'snr'])
Esempio n. 12
0
over a small stretch of data.

The data from which these events were generated contain a simulated
gravitational-wave signal, or hardware injection, used to validate
the performance of the LIGO detectors and downstream data analysis procedures.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.table'

# First, we import the `EventTable` object and read in a set of events from
# a LIGO_LW-format XML file containing a
# :class:`sngl_burst <glue.ligolw.lsctables.SnglBurstTable>` table
from gwpy.table import EventTable
events = EventTable.read('H1-LDAS_STRAIN-968654552-10.xml.gz',
                         tablename='sngl_burst',
                         columns=['time', 'snr'])

# .. note::
#
#    Here we manually specify the `columns` to read in order to optimise
#    the `read()` operation to parse only the data we actually need.

# and can generate a new `~gwpy.plotter.HistogramPlot` using the
# :meth:`~EventTable.hist` instance method using `weights=1/10.`
# to convert the counts from the histogram into a rate in Hertz

plot = events.hist('snr',
                   weights=1 / 10.,
                   logbins=True,
                   bins=50,
Esempio n. 13
0
def mkSegment(gst, get, utc_date, txt=True):

    for key in keys:
        sources = GetFilelist(gst, get, key)

        first = True
        for source in sources:
            events = EventTable.read(
                source,
                tablename='sngl_burst',
                columns=['start_time', 'start_time_ns', 'duration', 'snr'])
            #events = EventTable.read(source, tablename='sngl_burst',columns=['peak_time', 'peak_time_ns','start_time', 'start_time_ns', 'duration', 'peak_frequency', 'central_freq', 'bandwidth', 'channel', 'amplitude', 'snr', 'confidence', 'chisq', 'chisq_dof', 'param_one_name', 'param_one_value'])
            col = events.get_column('start_time')
            if first:
                if len(col) > 0:
                    mergedevents = events
                    first = False
                else:
                    pass
            else:
                mergedevents = vstack([mergedevents, events])

        for snr in snrs[key]:
            Triggered = DataQualityFlag(name="K1:" + key,
                                        known=[(gst, get)],
                                        active=[],
                                        label="Glitch",
                                        description="Glitch veto segment K1:" +
                                        key + " >= SNR" + str(snr))
            #Triggered.ifo = "K1"

            if not first:

                fevents = mergedevents.filter(
                    ('snr', mylib.Islargerequal, snr))
                durations = fevents.get_column('duration')
                start_times = fevents.get_column('start_time')
                for start_time, duration in zip(start_times, durations):
                    tmpstart = int(start_time)
                    #tmpend = start_time + duration
                    tmpend = int(start_time + 1)
                    tmpsegment = Segment(tmpstart, tmpend)

                    tmpTriggered = DataQualityFlag(known=[(gst, get)],
                                                   active=[(tmpstart, tmpend)])
                    Triggered |= tmpTriggered

                    #dqflag['K1-GRD_SCIENCE_MODE'].description = "Observation mode. K1:GRD-IFO_STATE_N == 1000"
                    #dqflag['K1-GRD_LOCKED'].name = "K1:GRD-LSC_LOCK_STATE_N >= 300 & K1:GRD-LSC_LOCK_STATE_N <= 1000"

            # write down 15 min segments.
            if txt:
                with open(filepath_txt[key + str(snr)], mode='w') as f:
                    for seg in Triggered.active:
                        f.write('{0} {1}\n'.format(int(seg[0]), int(seg[1])))

            # if accumulated file exists, it is added.
            if os.path.exists(filepath_xml[key + str(snr)]):
                tmp = DataQualityFlag.read(filepath_xml[key + str(snr)])
                Triggered = Triggered + tmp

            Triggered.write(filepath_xml[key + str(snr)], overwrite=True)
Esempio n. 14
0
from gwpy.table import EventTable
from mylib import mylib

inputfile = "/home/controls/triggers/K1/CAL_CS_PROC_DARM_DISPLACEMENT_DQ_OMICRON/12606/K1-CAL_CS_PROC_DARM_DISPLACEMENT_DQ_OMICRON-1260615498-60.xml.gz"
events = EventTable.read(inputfile, tablename='sngl_burst', columns = ['peak_time', 'peak_time_ns', 'start_time', 'start_time_ns', 'duration', 'peak_frequency', 'central_freq', 'bandwidth','snr'])

events=events.filter(('snr', mylib.Islarger,(100)))
events=events.filter(('peak_time', mylib.between,(1260615552,1260615558)))

events=events.filter(('peak_frequency', mylib.between,(900,1000)))

events.pprint(max_lines=500)
Esempio n. 15
0
The data from which these events were generated are a simulation of Gaussian noise
with the Advanced LIGO design spectrum, and so don't actually contain any real
gravitational waves, but will help tune the algorithm to improve detection of
future, real signals.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.table'

# First, we import the `EventTable` object and read in a set of events from
# a LIGO_LW-format XML file containing a
# :class:`sngl_burst <glue.ligolw.lsctables.SnglBurstTable>` table
from gwpy.table import EventTable

events = EventTable.read(
    '../../gwpy/tests/data/H1-LDAS_STRAIN-968654552-10.xml.gz',
    format='ligolw.sngl_burst',
    columns=['time', 'snr'])

# .. note::
#
#    Here we manually specify the `columns` to read in order to optimise
#    the `read()` operation to parse only the data we actually need.

# Now we can use the :meth:`~EventTable.binned_event_rates` method to
# calculate the event rate in a number of bins of SNR.
rates = events.binned_event_rates(1,
                                  'snr', [2, 3, 5, 8],
                                  operator='>=',
                                  start=968654552,
                                  end=968654562)
# .. note::
Esempio n. 16
0
from gwpy.table import EventTable
events = EventTable.read(
    'H1-LDAS_STRAIN-968654552-10.xml.gz', tablename='sngl_burst',
    columns=['time', 'central_freq', 'bandwidth', 'duration', 'snr'])
Esempio n. 17
0
def main():
    """
    The main code - reads in data and template bank and performs matched
    filtering analysis
    """

    # get the command line args
    args = parser()
    np.random.seed(args.seed)

    # redefine things for conciseness
    Tobs = safe * args.Tobs  # observation time
    fs = args.fsample  # sampling frequency
    isnr = args.isnr  # integrated SNR
    N = Tobs * fs  # the total number of time samples
    n = N // 2 + 1  # the number of frequency bins
    beta = [0.5, 1.0
            ]  # the desired window for merger time in fractions of input Tobs
    tmp_bank = args.temp_bank

    # set path to file
    cur_path = os.path.dirname(__file__)
    new_path = os.path.relpath(args.dataset, cur_path)

    # make the psds
    psd = gen_psd(fs, Tobs, op='AdvDesign', det='H1').data.data
    wpsd = (2.0 / fs) * np.ones(n)  # define effective PSD for whited data

    # compute indices defining
    low_idx, high_idx = convert_beta([0, 1], fs, Tobs)

    # load template bank
    tmp_bank = np.array(
        EventTable.read(tmp_bank,
                        format='ligolw.sngl_inspiral',
                        columns=['mass1', 'mass2', 'eta', 'mchirp']))

    # load signal/noise dataset
    data = load_data(new_path)
    Nsig = args.Nsig  #data[0].shape[0]

    # load exact template parameters
    # remove after doing ideal template test!!!
    cur_path = os.path.dirname(__file__)
    new_path = os.path.relpath(args.params, cur_path)
    sig_param = load_data(new_path)

    #chi_bool = True
    #chi_rho = []
    #if chi_bool == True:
    #    count = 0
    #    for idx in xrange(Nsig):
    #        par = gen_par(tmp_bank[0],fs,Tobs,beta=beta)
    #        if data[1][idx] == 0:
    # whitened first template
    #            if count == 0:
    #                temp_par = par
    #            else:
    #                temp_par = gen_par(tmp_bank[0],fs,Tobs,beta=beta)
    #            fhp, fhc = gen_fs(tmp_bank[0],Tobs,temp_par)

    #            fmin = get_fmin(par.M,par.eta,Tobs)

    # whiten frequency domain template
    #            wfhp = whiten_data(fhp,Tobs,fs,psd,flag='fd')
    #            wfhc = whiten_data(fhc,Tobs,fs,psd,flag='fd')

    # calculate chi distribution. For testing purposes only!
    #            chi_rho.append(snr_ts(data[0][idx][0],wfhp,wfhc,T_obs,fs,wpsd,fmin,flag='fd')[int(N/2)])
    #            count+=1
    #            print '{}: Chi Rho for signal {} = {}'.format(time.asctime(),idx,chi_rho[-1])

    # save list of chi rho for test purposes only
    #    pickle_out = open("%schirho_values.pickle" % basename, "wb")
    #    pickle.dump(chi_rho, pickle_out)
    #    pickle_out.close()

    # loop over signals
    maxSNRts = np.zeros(
        Nsig)  # store maximised (over time) measured signal SNR
    label = []
    print '{}: starting to generate data'.format(time.asctime())
    for i in xrange(Nsig):
        i = i + args.start_sig
        label.append(data[1][i])
        ###-CHANGE-TO-READ-IN-TEST-DATA-#############################################################
        # read in whitened time domain data
        # generate parameters and unwhitened timeseries
        par = gen_par(tmp_bank[0], fs, Tobs,
                      beta=beta)  # uncomment to get your code back

        # Chris original code
        #par = chris_gen_par(fs,Tobs,beta=beta)
        #sig,noise,_,_ = gen_ts(fs,Tobs,isnr,'H1',par)
        #data_comb = sig + noise
        #wdata = whiten_data(data_comb,Tobs,fs,psd,flag='td')
        ##############################################################################################

        ##############################################################################################
        # loop over templates
        for k, _ in enumerate(
                tmp_bank):  # stop indexing after performing ideal test!
            #for k in xrange(args.Ntemp):

            ###-CHANGE-TO-READ-IN-TEMPLATE-###########################################################
            # read in template bank mass parameters
            # fhp,fhc = ????
            # generate unwhitened frequency domain waveform
            if k == 0:
                temp_par = par  #gen_par(tmp_bank[0],fs,Tobs,beta=beta) # uncomment to get your code back
            #elif k==4 and sig_param[i]!=None: # remove this after doing ideal template fix!!!!!
            #    ideal_temp = [sig_param[i].m1,sig_param[i].m2,sig_param[i].eta,sig_param[i].mc]
            #    temp_par = gen_par(ideal_temp,fs,Tobs,beta=[0,1])
            else:
                temp_par = gen_par(tmp_bank[k], fs, Tobs, beta=[0, 1])

                # Chris original code
                #temp_par = gen_par(fs,Tobs,beta=beta)

            temp_par.fmin = get_fmin(temp_par.M, temp_par.eta, Tobs)
            fhp, fhc = gen_fs(fs, Tobs, temp_par)
            #print '{}: Generated random mass frequency domain template'.format(time.asctime())
            ##########################################################################################

            # compute lower cut-off freq for template based on chirp mass and Tobs
            #print '{}: Template fmin -> {}'.format(time.asctime(),fmin)

            # whiten frequency domain template
            wfhp = whiten_data(fhp, Tobs, fs, psd, flag='fd')
            wfhc = whiten_data(fhc, Tobs, fs, psd, flag='fd')
            #print '{}: Whitened frequcny domain h+(f) and hx(f)'.format(time.asctime())

            ##########################################################################################
            # compute SNR timeseries using frequency domain template

            # Chris original code
            #SNRts = snr_ts(wdata,wfhp,wfhc,Tobs,fs,wpsd,fmin,flag='fd')

            SNRts = snr_ts(data[0][i][0],
                           wfhp,
                           wfhc,
                           Tobs,
                           fs,
                           wpsd,
                           temp_par.fmin,
                           flag='fd')
            temp = np.max(SNRts[low_idx:high_idx])
            if temp > maxSNRts[i - args.start_sig]:
                maxSNRts[i - args.start_sig] = temp
        print '{}: maximised signal {} SNR (FD template) type {} = {}'.format(
            time.asctime(), i, data[1][i], maxSNRts[i - args.start_sig])
    """# seperate noise from signal
    noise = []
    signals = []
    for idx, i in enumerate(maxSNRts):
        if data[1][idx] == 0:
            noise.append(i)
        if data[1][idx] == 1:
            signals.append(i)

    # make distribution plots
    nbins_sig = int(np.sqrt(len(signals)))
    nbins_noise = int(np.sqrt(len(noise)))
    temp = np.linspace(0,isnr+8,1000)
    plt.figure()
    plt.hist(signals,nbins_sig,normed=True,alpha=0.5,label='max-temp sig (FD)')
    plt.hist(noise,nbins_noise,normed=True,alpha=0.5,label='max-temp noise (FD)')
    plt.plot(temp,norm.pdf(temp,loc=isnr),'k',label='1-temp noise (expect)')
    plt.xlim([0,np.max(temp)]) 
    plt.legend(loc='upper right')
    plt.xlabel('measured SNR')
    plt.ylabel('p(SNR)')
    plt.savefig('%smf_template.png' % args.basename)

    plt.ylim(ymin=1e-3,ymax=10)
    plt.yscale('log', nonposy='clip')
    plt.savefig('%slog_mf_template.png' % args.basename)
    """
    # save list of rho for test signals and test noise
    pickle_out = open(
        "%srho_values_%s-%s.pickle" %
        (args.basename, str(args.start_sig), str(args.start_sig + Nsig)), "wb")
    pickle.dump(maxSNRts, pickle_out)
    pickle_out.close()

    # save labels
    pickle_out = open(
        "%srho_labels_%s-%s.pickle" %
        (args.basename, str(args.start_sig), str(args.start_sig + Nsig)), "wb")
    pickle.dump(label, pickle_out)
    pickle_out.close()
Esempio n. 18
0
from gwpy.table import EventTable
events = EventTable.read('H1-LDAS_STRAIN-968654552-10.xml.gz',
                         format='ligolw.sngl_burst',
                         columns=['snr'])
plot = events.hist('snr',
                   weights=1 / 10.,
                   logbins=True,
                   bins=50,
                   histtype='stepfilled')
ax = plot.gca()
ax.set_xlabel('Signal-to-noise ratio (SNR)')
ax.set_ylabel('Rate [Hz]')
ax.set_title('LHO event triggers for GW100916')
ax.autoscale(axis='x', tight=True)
plot.show()
Esempio n. 19
0
from gwpy.table import EventTable

inputfile = "/home/detchar/triggers/K1/AOS_TMSX_IR_PDA1_OUT_DQ_OMICRON/12709/K1-AOS_TMSX_IR_PDA1_OUT_DQ_OMICRON-1270976190-60.xml.gz"
events = EventTable.read(inputfile, tablename='sngl_burst')
print(events)
Esempio n. 20
0
 def setUp(self):
     self.table = EventTable.read(TEST_OMEGA_FILE, format='ascii.omega')
Esempio n. 21
0
           "LSC-REFL_PDA1_RF45_I_ERR_DQ":20,
           "LSC-POP_PDA1_RF17_Q_ERR_DQ":18,
           "LSC-POP_PDA1_DC_OUT_DQ":20,
           "LSC-AS_PDA1_RF17_Q_ERR_DQ":25,
           "CAL-CS_PROC_IMC_FREQUENCY_DQ":21,
           "CAL-CS_PROC_XARM_FREQUENCY_DQ":21,
           #"CAL-CS_PROC_DARM_DISPLACEMENT_DQ":100,
           "CAL-CS_PROC_DARM_DISPLACEMENT_DQ":100,
           #"CAL-CS_PROC_DARM_DISPLACEMENT_DQ":20,
           "CAL-CS_PROC_MICH_DISPLACEMENT_DQ":100,
           "CAL-CS_PROC_SRCL_DISPLACEMENT_DQ":100,
           "CAL-CS_PROC_C00_STRAIN_DBL_DQ":100}

# Open omicron file

events = EventTable.read(inputfile, tablename='sngl_burst', columns=['peak_time', 'peak_time_ns', 'start_time', 'start_time_ns', 'duration', 'peak_frequency', 'central_freq', 'bandwidth', 'channel', 'amplitude', 'snr', 'confidence', 'chisq', 'chisq_dof', 'param_one_name', 'param_one_value'])
# Tablename option
#'process', 'process_params', 'sngl_burst', 'segment_definer', 'segment_summary', 'segment'
# Column option
#ifo peak_time peak_time_ns start_time start_time_ns duration search process_id event_id peak_frequency central_freq bandwidth channel amplitude snr confidence chisq chisq_dof param_one_name param_one_value
#events = EventTable.read('K1-IMC_CAV_ERR_OUT_DQ_OMICRON-1241900058-60.xml.gz', tablename='sngl_burst', columns=['peak_time', 'peak_time_ns', 'start_time', 'start_time_ns', 'duration', 'peak_frequency', 'central_freq', 'bandwidth', 'channel', 'amplitude', 'snr', 'confidence', 'chisq', 'chisq_dof', 'param_one_name', 'param_one_value'])

channels = events.get_column('channel')

if len(channels) == 0:
    print("No event.")
    print("Successfully finished!")
    exit()
else:
    channel = channels[0]
Esempio n. 22
0
The data from which these events were generated are a simulation of Gaussian noise
with the Advanced LIGO design spectrum, and so don't actually contain any real
gravitational waves, but will help tune the algorithm to improve detection of
future, real signals.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.table'

# First, we import the `EventTable` object and read in a set of events from
# a LIGO_LW-format XML file containing a
# :class:`sngl_burst <glue.ligolw.lsctables.SnglBurstTable>` table
from gwpy.table import EventTable
events = EventTable.read(
    '../../gwpy/tests/data/H1-LDAS_STRAIN-968654552-10.xml.gz',
    format='ligolw.sngl_burst', columns=['time', 'snr'])

# .. note::
#
#    Here we manually specify the `columns` to read in order to optimise
#    the `read()` operation to parse only the data we actually need.

# Now we can use the :meth:`~EventTable.binned_event_rates` method to
# calculate the event rate in a number of bins of SNR.
rates = events.binned_event_rates(1, 'snr', [2, 3, 5, 8], operator='>=',
                                  start=968654552, end=968654562)
# .. note::
#
#    The list `[2, 3, 5, 8]` and operator `>=` specifies SNR tresholds of 
#    2, 3, 5, and 8.
Esempio n. 23
0
from gwpy.table import EventTable
events = EventTable.read('H1-LDAS_STRAIN-968654552-10.xml.gz',
                         tablename='sngl_burst', columns=['time', 'snr'])
Esempio n. 24
0
def get_triggers(channel,
                 etg,
                 segments,
                 cache=None,
                 snr=None,
                 frange=None,
                 raw=False,
                 trigfind_kwargs={},
                 **read_kwargs):
    """Get triggers for the given channel
    """
    etg = _sanitize_name(etg)
    # format arguments
    try:
        readfmt = read_kwargs.pop("format", DEFAULT_FORMAT[etg])
    except KeyError:
        raise ValueError("unsupported ETG {!r}".format(etg))
    trigfind_kwargs, read_kwargs = _format_params(channel, etg, readfmt,
                                                  trigfind_kwargs, read_kwargs)

    # find triggers
    if cache is None:
        cache = find_trigger_files(channel, etg, segments, **trigfind_kwargs)

    # read files
    tables = []
    for segment in segments:
        segaslist = SegmentList([segment])
        segcache = io_cache.sieve(cache, segment=segment)
        # try and work out if cache overextends segment (so we need to crop)
        cachesegs = io_cache.cache_segments(segcache)
        outofbounds = abs(cachesegs - segaslist)
        if segcache:
            if len(segcache) == 1:  # just pass the single filename
                segcache = segcache[0]
            new = EventTable.read(segcache, **read_kwargs)
            new.meta = {k: new.meta[k] for k in TABLE_META if new.meta.get(k)}
            if outofbounds:
                new = new[in_segmentlist(new[new.dtype.names[0]], segaslist)]
            tables.append(new)
    if len(tables):
        table = vstack_tables(tables)
    else:
        table = EventTable(
            names=read_kwargs.get('columns', ['time', 'frequency', 'snr']))

    # parse time, frequency-like and snr-like column names
    columns = table.dtype.names
    tcolumn = columns[0]
    fcolumn = columns[1]
    scolumn = columns[2]

    # filter
    keep = numpy.ones(len(table), dtype=bool)
    if snr is not None:
        keep &= table[scolumn] >= snr
    if frange is not None:
        keep &= table[fcolumn] >= frange[0]
        keep &= table[fcolumn] < frange[1]
    table = table[keep]

    # return basic table if 'raw'
    if raw:
        return table

    # rename time column so that all tables match in at least that
    if tcolumn != "time":
        table.rename_column(tcolumn, 'time')

    # add channel column to identify all triggers
    table.add_column(
        table.Column(data=numpy.repeat(channel, len(table)), name='channel'))

    table.sort('time')
    return table
Esempio n. 25
0
import argparse

parser = argparse.ArgumentParser(description='Make trigger file from burst event txt file.')
parser.add_argument('-i','--infile',help='input burst event txt file.',default='/users/DET/tools/Hveto/Script/Burst/EVENTS_LHVK_20191219.txt')
parser.add_argument('-o','--outfile',help='output xml file.',default='test.xml')
parser.add_argument('-n','--noise',help='Noise investigation.',action='store_true')
parser.add_argument('-f','--force',help='All events are processed.',action='store_true')

args = parser.parse_args()

infile = args.infile
outfile = args.outfile
noise = args.noise
force = args.force

t = EventTable.read(infile, format = 'ascii.cwb')

#columns = ['time for J1 detector','duration','central frequency','bandwidth','hrss for J1 detector','sSNR for J1 detector','likelihood'])

t.keep_columns(['time for J1 detector','duration','central frequency','bandwidth','hrss for J1 detector','sSNR for J1 detector','likelihood','time shift'])

if force:
    print("================= All events ================")
    pass
elif noise:
    # Significant event in KAGRA data
    print("================= Significant events in KAGRA data ================")
    t = t.filter(('sSNR for J1 detector', mylib.Islarger, 0.1 ))
else:
    # Physically meaningful events.
    print("================= GW candidate ================")
Esempio n. 26
0
gravitational-wave burst detection algorithm, over a small stretch of data.

The data from which these events were generated contain a simulated
gravitational-wave signal, or hardware injection, used to validate
the performance of the LIGO detectors and downstream data analysis procedures.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.table'

# First, we import the `EventTable` object and read in a set of events from
# a LIGO_LW-format XML file containing a
# :class:`sngl_burst <glue.ligolw.lsctables.SnglBurstTable>` table
from gwpy.table import EventTable
events = EventTable.read('H1-LDAS_STRAIN-968654552-10.xml.gz',
                         tablename='sngl_burst',
                         columns=['peak', 'central_freq', 'snr'])

# .. note::
#
#    Here we manually specify the `columns` to read in order to optimise
#    the `read()` operation to parse only the data we actually need.

# We can now make a scatter plot by specifying the x- and y-axis columns,
# and (optionally) the colour:
plot = events.plot('peak', 'central_freq', color='snr')
ax = plot.gca()
ax.set_yscale('log')
ax.set_ylabel('Frequency [Hz]')
ax.set_epoch(968654552)
ax.set_xlim(968654552, 968654552 + 10)
Esempio n. 27
0
def get_triggers(channel, etg, segments, cache=None, snr=None, frange=None,
                 raw=False, trigfind_kwargs={}, **read_kwargs):
    """Get triggers for the given channel
    """
    etg = _sanitize_name(etg)
    # format arguments
    try:
        readfmt = read_kwargs.pop("format", DEFAULT_FORMAT[etg])
    except KeyError:
        raise ValueError("unsupported ETG {!r}".format(etg))
    trigfind_kwargs, read_kwargs = _format_params(
        channel,
        etg,
        readfmt,
        trigfind_kwargs,
        read_kwargs
    )

    # find triggers
    if cache is None:
        cache = find_trigger_files(channel, etg, segments, **trigfind_kwargs)

    # read files
    tables = []
    for segment in segments:
        segaslist = SegmentList([segment])
        segcache = io_cache.sieve(cache, segment=segment)
        # try and work out if cache overextends segment (so we need to crop)
        cachesegs = io_cache.cache_segments(segcache)
        outofbounds = abs(cachesegs - segaslist)
        if segcache:
            if len(segcache) == 1:  # just pass the single filename
                segcache = segcache[0]
            new = EventTable.read(segcache, **read_kwargs)
            new.meta = {k: new.meta[k] for k in TABLE_META if new.meta.get(k)}
            if outofbounds:
                new = new[new[new.dtype.names[0]].in_segmentlist(segaslist)]
            tables.append(new)
    if len(tables):
        table = vstack_tables(tables)
    else:
        table = EventTable(names=read_kwargs.get(
            'columns', ['time', 'frequency', 'snr']))

    # parse time, frequency-like and snr-like column names
    columns = table.dtype.names
    tcolumn = columns[0]
    fcolumn = columns[1]
    scolumn = columns[2]

    # filter
    keep = numpy.ones(len(table), dtype=bool)
    if snr is not None:
        keep &= table[scolumn] >= snr
    if frange is not None:
        keep &= table[fcolumn] >= frange[0]
        keep &= table[fcolumn] < frange[1]
    table = table[keep]

    # return basic table if 'raw'
    if raw:
        return table

    # rename time column so that all tables match in at least that
    if tcolumn != "time":
        table.rename_column(tcolumn, 'time')

    # add channel column to identify all triggers
    table.add_column(table.Column(data=numpy.repeat(channel, len(table)),
                                  name='channel'))

    table.sort('time')
    return table
Esempio n. 28
0
MODEL_NAME_CNN = os.path.join(os.path.split(__file__)[0], '..', '..', 'models',
                              'multi_view_classifier.h5')

SCRATCHY_TIMESERIES_PATH = os.path.join(os.path.split(__file__)[0], 'data',
                                        'timeseries',
                                        'scratchy_timeseries_test.h5')

PROJECT_PICKLE = os.path.join(os.path.split(__file__)[0], 'data',
                              'API', '1104.pkl')

SCRATCHY_TIMESERIES = TimeSeries.read(SCRATCHY_TIMESERIES_PATH)
EVENT_TIME = 1127700030.877928972

RESULTS_TABLE = EventTable.read(os.path.join(os.path.split(__file__)[0], 'data',
                                        'table',
                                        'scratchy_results_table.h5'), format='hdf5')

class TestUtils(object):
    """`TestCase` for the GravitySpy
    """
    def test_make_q_scans(self):

        results = classify(event_time=EVENT_TIME,
                           channel_name='L1:GDS-CALIB_STRAIN',
                           project_info_pickle=PROJECT_PICKLE,
                           path_to_cnn=MODEL_NAME_CNN,
                           timeseries=SCRATCHY_TIMESERIES)

        results.convert_unicode_to_bytestring()
        pandas.testing.assert_frame_equal(results.to_pandas(),
Esempio n. 29
0
def main(args=None):
    """Run the primary scattering command-line tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    # set up logger
    logger = cli.logger(
        name=PROG.split('python -m ').pop(),
        level='DEBUG' if args.verbose else 'INFO',
    )

    # useful variables
    fthresh = (
        int(args.frequency_threshold) if args.frequency_threshold.is_integer()
        else args.frequency_threshold)
    multiplier = args.multiplier_for_threshold
    tstr = str(fthresh).replace('.', '_')
    gpsstr = '%s-%s' % (int(args.gpsstart), int(args.gpsend - args.gpsstart))
    args.optic = args.optic or list(OPTIC_MOTION_CHANNELS.keys())

    # go to working directory
    indir = os.getcwd()
    if not os.path.isdir(args.output_dir):
        os.makedirs(args.output_dir)
    os.chdir(args.output_dir)

    # set up output files
    summfile = '{}-SCATTERING_SUMMARY-{}.csv'.format(
        args.ifo, gpsstr)
    segfile = '{}-SCATTERING_SEGMENTS_{}_HZ-{}.h5'.format(
        args.ifo, tstr, gpsstr)

    # log start of process
    logger.info('{} Scattering {}-{}'.format(
        args.ifo, int(args.gpsstart), int(args.gpsend)))

    # -- get state segments -----------

    span = Segment(args.gpsstart, args.gpsend)

    # get segments
    if args.state_flag is not None:
        state = DataQualityFlag.query(
            args.state_flag, int(args.gpsstart), int(args.gpsend),
            url=DEFAULT_SEGMENT_SERVER,
        ).coalesce()
        statea = []
        padding = args.segment_start_pad + args.segment_end_pad
        for i, seg in enumerate(state.active):
            if abs(seg) > padding:
                statea.append(Segment(
                    seg[0] + args.segment_start_pad,
                    seg[1] - args.segment_end_pad,
                ))
            else:
                logger.debug(
                    "Segment length {} shorter than padding length {}, "
                    "skipping segment {}-{}".format(abs(seg), padding, *seg),
                )
        statea = SegmentList(statea)
        logger.debug("Downloaded %d segments for %s"
                     % (len(statea), args.state_flag))
    else:
        statea = SegmentList([span])
    livetime = float(abs(statea))
    logger.debug("Processing %.2f s of livetime" % livetime)

    # -- load h(t) --------------------

    args.main_channel = args.main_channel.format(IFO=args.ifo)
    logger.debug("Loading Omicron triggers for %s" % args.main_channel)

    if args.gpsstart >= 1230336018:  # Jan 1 2019
        ext = "h5"
        names = ["time", "frequency", "snr"]
        read_kw = {
            "columns": names,
            "selection": [
                "{0} < frequency < {1}".format(
                    args.fmin, multiplier * fthresh),
                ("time", in_segmentlist, statea),
            ],
            "format": "hdf5",
            "path": "triggers",
        }
    else:
        ext = "xml.gz"
        names = ['peak', 'peak_frequency', 'snr']
        read_kw = {
            "columns": names,
            "selection": [
                "{0} < peak_frequency < {1}".format(
                    args.fmin, multiplier * fthresh),
                ('peak', in_segmentlist, statea),
            ],
            "format": 'ligolw',
            "tablename": "sngl_burst",
        }

    fullcache = []
    for seg in statea:
        cache = gwtrigfind.find_trigger_files(
            args.main_channel, 'omicron', seg[0], seg[1], ext=ext,
        )
        if len(cache) == 0:
            warnings.warn(
                "No Omicron triggers found for %s in segment [%d .. %d)"
                % (args.main_channel, seg[0], seg[1]),
            )
            continue
        fullcache.extend(cache)

    # read triggers
    if fullcache:
        trigs = EventTable.read(fullcache, nproc=args.nproc, **read_kw)
    else:  # no files (no livetime?)
        trigs = EventTable(names=names)

    highsnrtrigs = trigs[trigs['snr'] >= 8]
    logger.debug("%d read" % len(trigs))

    # -- prepare HTML -----------------

    links = [
        '%d-%d' % (int(args.gpsstart), int(args.gpsend)),
        ('Parameters', '#parameters'),
        ('Segments', (
            ('State flag', '#state-flag'),
            ('Optical sensors', '#osems'),
            ('Transmons', '#transmons'),
        )),
    ]
    if args.omega_scans:
        links.append(('Scans', '#omega-scans'))
    (brand, class_) = htmlio.get_brand(args.ifo, 'Scattering', args.gpsstart)
    navbar = htmlio.navbar(links, class_=class_, brand=brand)
    page = htmlio.new_bootstrap_page(
        title='%s Scattering | %d-%d' % (
            args.ifo, int(args.gpsstart), int(args.gpsend)),
        navbar=navbar)
    page.div(class_='pb-2 mt-3 mb-2 border-bottom')
    page.h1('%s Scattering: %d-%d'
            % (args.ifo, int(args.gpsstart), int(args.gpsend)))
    page.div.close()  # pb-2 mt-3 mb-2 border-bottom
    page.h2('Parameters', class_='mt-4 mb-4', id_='parameters')
    page.div(class_='row')
    page.div(class_='col-md-9 col-sm-12')
    page.add(htmlio.parameter_table(
        start=int(args.gpsstart), end=int(args.gpsend), flag=args.state_flag))
    page.div.close()  # col-md-9 col-sm-12

    # link to summary files
    page.div(class_='col-md-3 col-sm-12')
    page.add(htmlio.download_btn(
        [('Segments (HDF)', segfile),
         ('Triggers (CSV)', summfile)],
        btnclass='btn btn-%s dropdown-toggle' % args.ifo.lower(),
    ))
    page.div.close()  # col-md-3 col-sm-12
    page.div.close()  # row

    # command-line
    page.h5('Command-line:')
    page.add(htmlio.get_command_line(about=False, prog=PROG))

    # section header
    page.h2('Segments', class_='mt-4', id_='segments')

    if statea:  # contextual information
        paper = markup.oneliner.a(
            'Accadia et al. (2010)', target='_blank', class_='alert-link',
            href='http://iopscience.iop.org/article/10.1088/0264-9381/27'
                 '/19/194011')
        msg = (
            "Segments marked \"optical sensors\" below show evidence of beam "
            "scattering between {0} and {1} Hz based on the velocity of optic "
            "motion, with fringe frequencies projected using equation (3) of "
            "{2}. Segments marked \"transmons\" are based on whitened, "
            "band-limited RMS trends of transmon sensors. In both cases, "
            "yellow panels denote weak evidence for scattering, while red "
            "panels denote strong evidence."
         ).format(args.fmin, multiplier * fthresh, str(paper))
        page.add(htmlio.alert(msg, context=args.ifo.lower()))
    else:  # null segments
        page.add(htmlio.alert('No active analysis segments were found',
                              context='warning', dismiss=False))

    # record state segments
    if args.state_flag is not None:
        page.h3('State flag', class_='mt-3', id_='state-flag')
        page.div(id_='accordion1')
        page.add(htmlio.write_flag_html(
            state, span, 'state', parent='accordion1', context='success',
            plotdir='', facecolor=(0.2, 0.8, 0.2), edgecolor='darkgreen',
            known={'facecolor': 'red', 'edgecolor': 'darkred', 'height': 0.4}))
        page.div.close()

    # -- find scattering evidence -----

    # read data for OSEMs and transmons
    osems = ['%s:%s' % (args.ifo, c) for optic in args.optic for
             c in OPTIC_MOTION_CHANNELS[optic]]
    transmons = ['%s:%s' % (args.ifo, c) for c in TRANSMON_CHANNELS]
    allchannels = osems + transmons

    logger.info("Reading all timeseries data")
    alldata = []
    n = len(statea)
    for i, seg in enumerate(statea):
        msg = "{0}/{1} {2}:".rjust(30).format(
            str(i + 1).rjust(len(str(n))),
            n,
            str(seg),
        ) if args.verbose else False
        alldata.append(
            get_data(allchannels, seg[0], seg[1],
                     frametype=args.frametype.format(IFO=args.ifo),
                     verbose=msg, nproc=args.nproc).resample(128))
    try:  # ensure that only available channels are analyzed
        osems = list(
            set(alldata[0].keys()) & set(alldata[-1].keys()) & set(osems))
        transmons = list(
            set(alldata[0].keys()) & set(alldata[-1].keys()) & set(transmons))
    except IndexError:
        osems = []
        transmons = []

    # initialize scattering segments
    scatter_segments = DataQualityDict()
    actives = SegmentList()

    # scattering based on OSEM velocity
    if statea:
        page.h3('Optical sensors (OSEMs)', class_='mt-3', id_='osems')
        page.div(id_='osems-group')
    logger.info('Searching for scatter based on OSEM velocity')

    for i, channel in enumerate(sorted(osems)):
        logger.info("-- Processing %s --" % channel)
        chanstr = re.sub('[:-]', '_', channel).replace('_', '-', 1)
        optic = channel.split('-')[1].split('_')[0]
        flag = '%s:DCH-%s_SCATTERING_GE_%s_HZ:1' % (args.ifo, optic, tstr)
        scatter_segments[channel] = DataQualityFlag(
            flag,
            isgood=False,
            description="Evidence for scattering above {0} Hz from {1} in "
                        "{2}".format(fthresh, optic, channel),
        )
        # set up plot(s)
        plot = Plot(figsize=[12, 12])
        axes = {}
        axes['position'] = plot.add_subplot(
            411, xscale='auto-gps', xlabel='')
        axes['fringef'] = plot.add_subplot(
            412, sharex=axes['position'], xlabel='')
        axes['triggers'] = plot.add_subplot(
            413, sharex=axes['position'], xlabel='')
        axes['segments'] = plot.add_subplot(
            414, projection='segments', sharex=axes['position'])
        plot.subplots_adjust(bottom=.07, top=.95)
        fringecolors = [None] * len(FREQUENCY_MULTIPLIERS)
        histdata = dict((x, numpy.ndarray((0,))) for
                        x in FREQUENCY_MULTIPLIERS)
        linecolor = None
        # loop over state segments and find scattering fringes
        for j, seg in enumerate(statea):
            logger.debug("Processing segment [%d .. %d)" % seg)
            ts = alldata[j][channel]
            # get raw data and plot
            line = axes['position'].plot(ts, color=linecolor)[0]
            linecolor = line.get_color()
            # get fringe frequency and plot
            fringef = get_fringe_frequency(ts, multiplier=1)
            for k, m in list(enumerate(FREQUENCY_MULTIPLIERS))[::-1]:
                fm = fringef * m
                line = axes['fringef'].plot(
                    fm, color=fringecolors[k],
                    label=(j == 0 and r'$f\times%d$' % m or None))[0]
                fringecolors[k] = line.get_color()
                histdata[m] = numpy.resize(
                    histdata[m], (histdata[m].size + fm.size,))
                histdata[m][-fm.size:] = fm.value
            # get segments and plot
            scatter = get_segments(
                fringef * multiplier,
                fthresh,
                name=flag,
                pad=args.segment_padding
            )
            axes['segments'].plot(
                scatter, facecolor='red', edgecolor='darkred',
                known={'alpha': 0.6, 'facecolor': 'lightgray',
                       'edgecolor': 'gray', 'height': 0.4},
                height=0.8, y=0, label=' ',
            )
            scatter_segments[channel] += scatter
            logger.debug(
                "    Found %d scattering segments" % (len(scatter.active)))
        logger.debug("Completed channel %s, found %d segments in total"
                     % (channel, len(scatter_segments[channel].active)))

        # calculate efficiency and deadtime of veto
        deadtime = abs(scatter_segments[channel].active)
        try:
            deadtimepc = deadtime / livetime * 100
        except ZeroDivisionError:
            deadtimepc = 0.
        logger.info("Deadtime: %.2f%% (%.2f/%ds)"
                    % (deadtimepc, deadtime, livetime))
        efficiency = in_segmentlist(highsnrtrigs[names[0]],
                                    scatter_segments[channel].active).sum()
        try:
            efficiencypc = efficiency / len(highsnrtrigs) * 100
        except ZeroDivisionError:
            efficiencypc = 0.
        logger.info("Efficiency (SNR>=8): %.2f%% (%d/%d)"
                    % (efficiencypc, efficiency, len(highsnrtrigs)))
        if deadtimepc == 0.:
            effdt = 0
        else:
            effdt = efficiencypc/deadtimepc
        logger.info("Efficiency/Deadtime: %.2f" % effdt)

        if abs(scatter_segments[channel].active):
            actives.extend(scatter_segments[channel].active)

        # finalize plot
        logger.debug("Plotting")
        name = texify(channel)
        axes['position'].set_title("Scattering evidence in %s" % name)
        axes['position'].set_xlabel('')
        axes['position'].set_ylabel(r'Position [$\mu$m]')
        axes['position'].text(
            0.01, 0.95, 'Optic position',
            transform=axes['position'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        axes['fringef'].plot(
            span, [fthresh, fthresh], 'k--')
        axes['fringef'].set_xlabel('')
        axes['fringef'].set_ylabel(r'Frequency [Hz]')
        axes['fringef'].yaxis.tick_right()
        axes['fringef'].yaxis.set_label_position("right")
        axes['fringef'].set_ylim(0, multiplier * fthresh)
        axes['fringef'].text(
            0.01, 0.95, 'Calculated fringe frequency',
            transform=axes['fringef'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        handles, labels = axes['fringef'].get_legend_handles_labels()
        axes['fringef'].legend(handles[::-1], labels[::-1], loc='upper right',
                               borderaxespad=0, bbox_to_anchor=(-0.01, 1.),
                               handlelength=1)

        axes['triggers'].scatter(
            trigs[names[0]],
            trigs[names[1]],
            c=trigs[names[2]],
            edgecolor='none',
        )
        name = texify(args.main_channel)
        axes['triggers'].text(
            0.01, 0.95,
            '%s event triggers (Omicron)' % name,
            transform=axes['triggers'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        axes['triggers'].set_ylabel('Frequency [Hz]')
        axes['triggers'].set_ylim(args.fmin, multiplier * fthresh)
        axes['triggers'].colorbar(cmap='YlGnBu', clim=(3, 100), norm='log',
                                  label='Signal-to-noise ratio')
        axes['segments'].set_ylim(-.55, .55)
        axes['segments'].text(
            0.01, 0.95,
            r'Time segments with $f\times%d > %.2f$ Hz' % (
                multiplier, fthresh),
            transform=axes['segments'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        for ax in axes.values():
            ax.set_epoch(int(args.gpsstart))
            ax.set_xlim(*span)
        png = '%s_SCATTERING_%s_HZ-%s.png' % (chanstr, tstr, gpsstr)
        try:
            plot.save(png)
        except OverflowError as e:
            warnings.warn(str(e))
            plot.axes[1].set_ylim(0, multiplier * fthresh)
            plot.refresh()
            plot.save(png)
        plot.close()
        logger.debug("%s written." % png)

        # make histogram
        histogram = Plot(figsize=[12, 6])
        ax = histogram.gca()
        hrange = (0, multiplier * fthresh)
        for m, color in list(zip(histdata, fringecolors))[::-1]:
            if histdata[m].size:
                ax.hist(
                    histdata[m], facecolor=color, alpha=.6, range=hrange,
                    bins=50, histtype='stepfilled', label=r'$f\times%d$' % m,
                    cumulative=-1, weights=ts.dx.value, bottom=1e-100,
                    log=True)
            else:
                ax.plot(histdata[m], color=color, label=r'$f\times%d$' % m)
                ax.set_yscale('log')
        ax.set_ylim(.01, float(livetime))
        ax.set_ylabel('Time with fringe above frequency [s]')
        ax.set_xlim(*hrange)
        ax.set_xlabel('Frequency [Hz]')
        ax.set_title(axes['position'].get_title())
        handles, labels = ax.get_legend_handles_labels()
        ax.legend(handles[::-1], labels[::-1], loc='upper right')
        hpng = '%s_SCATTERING_HISTOGRAM-%s.png' % (chanstr, gpsstr)
        histogram.save(hpng)
        histogram.close()
        logger.debug("%s written." % hpng)

        # write HTML
        if deadtime != 0 and effdt > 2:
            context = 'danger'
        elif ((deadtime != 0 and effdt < 2) or
              (histdata[multiplier].size and
               histdata[multiplier].max() >=
                  fthresh/2.)):
            context = 'warning'
        else:
            continue
        page.div(class_='card border-%s mb-1 shadow-sm' % context)
        page.div(class_='card-header text-white bg-%s' % context)
        page.a(channel, class_='collapsed card-link cis-link',
               href='#osem%s' % i, **{'data-toggle': 'collapse'})
        page.div.close()  # card-header
        page.div(id_='osem%s' % i, class_='collapse',
                 **{'data-parent': '#osems-group'})
        page.div(class_='card-body')
        page.div(class_='row')
        img = htmlio.FancyPlot(
            png, caption=SCATTER_CAPTION.format(CHANNEL=channel))
        page.div(class_='col-md-10 offset-md-1')
        page.add(htmlio.fancybox_img(img))
        page.div.close()  # col-md-10 offset-md-1
        himg = htmlio.FancyPlot(
            hpng, caption=HIST_CAPTION.format(CHANNEL=channel))
        page.div(class_='col-md-10 offset-md-1')
        page.add(htmlio.fancybox_img(himg))
        page.div.close()  # col-md-10 offset-md-1
        page.div.close()  # row
        segs = StringIO()
        if deadtime:
            page.p("%d segments were found predicting a scattering fringe "
                   "above %.2f Hz." % (
                       len(scatter_segments[channel].active),
                       fthresh))
            page.table(class_='table table-sm table-hover')
            page.tbody()
            page.tr()
            page.th('Deadtime')
            page.td('%.2f/%d seconds' % (deadtime, livetime))
            page.td('%.2f%%' % deadtimepc)
            page.tr.close()
            page.tr()
            page.th('Efficiency<br><small>(SNR&ge;8 and '
                    '%.2f Hz</sub>&ltf<sub>peak</sub>&lt;%.2f Hz)</small>'
                    % (args.fmin, multiplier * fthresh))
            page.td('%d/%d events' % (efficiency, len(highsnrtrigs)))
            page.td('%.2f%%' % efficiencypc)
            page.tr.close()
            page.tr()
            page.th('Efficiency/Deadtime')
            page.td()
            page.td('%.2f' % effdt)
            page.tr.close()
            page.tbody.close()
            page.table.close()
            scatter_segments[channel].active.write(segs, format='segwizard',
                                                   coltype=float)
            page.pre(segs.getvalue())
        else:
            page.p("No segments were found with scattering above %.2f Hz."
                   % fthresh)
        page.div.close()  # card-body
        page.div.close()  # collapse
        page.div.close()  # card

    if statea:  # close accordion
        page.div.close()  # osems-group

    # scattering based on transmon BLRMS
    if statea:
        page.h3('Transmons', class_='mt-3', id_='transmons')
        page.div(id_='transmons-group')
    logger.info('Searching for scatter based on band-limited RMS of transmons')

    for i, channel in enumerate(sorted(transmons)):
        logger.info("-- Processing %s --" % channel)
        optic = channel.split('-')[1][:6]
        flag = '%s:DCH-%s_SCATTERING_BLRMS:1' % (args.ifo, optic)
        scatter_segments[channel] = DataQualityFlag(
            flag,
            isgood=False,
            description="Evidence for scattering from whitened, band-limited "
                        "RMS trends of {0}".format(channel),
        )

        # loop over state segments and compute BLRMS
        for j, seg in enumerate(statea):
            logger.debug("Processing segment [%d .. %d)" % seg)
            wblrms = get_blrms(
                alldata[j][channel],
                flow=args.bandpass_flow,
                fhigh=args.bandpass_fhigh,
            )
            scatter = get_segments(
                wblrms,
                numpy.mean(wblrms) + args.sigma * numpy.std(wblrms),
                name=flag,
            )
            scatter_segments[channel] += scatter
            logger.debug(
                "    Found %d scattering segments" % (len(scatter.active)))
        logger.debug("Completed channel %s, found %d segments in total"
                     % (channel, len(scatter_segments[channel].active)))

        # calculate efficiency and deadtime of veto
        deadtime = abs(scatter_segments[channel].active)
        try:
            deadtimepc = deadtime / livetime * 100
        except ZeroDivisionError:
            deadtimepc = 0.
        logger.info("Deadtime: %.2f%% (%.2f/%ds)"
                    % (deadtimepc, deadtime, livetime))
        highsnrtrigs = trigs[trigs['snr'] <= 200]
        efficiency = in_segmentlist(highsnrtrigs[names[0]],
                                    scatter_segments[channel].active).sum()
        try:
            efficiencypc = efficiency / len(highsnrtrigs) * 100
        except ZeroDivisionError:
            efficiencypc = 0.
        logger.info("Efficiency (SNR>=8): %.2f%% (%d/%d)"
                    % (efficiencypc, efficiency, len(highsnrtrigs)))
        if deadtimepc == 0.:
            effdt = 0
        else:
            effdt = efficiencypc/deadtimepc
        logger.info("Efficiency/Deadtime: %.2f" % effdt)

        if abs(scatter_segments[channel].active):
            actives.extend(scatter_segments[channel].active)

        # write HTML
        if deadtime != 0 and effdt > 2:
            context = 'danger'
        elif deadtime != 0 and effdt < 2:
            context = 'warning'
        else:
            continue
        page.add(htmlio.write_flag_html(
            scatter_segments[channel], span, i, parent='transmons-group',
            title=channel, context=context, plotdir=''))

    if statea:  # close accordion
        page.div.close()  # transmons-group

    actives = actives.coalesce()  # merge contiguous segments
    if statea and not actives:
        page.add(htmlio.alert(
            'No evidence of scattering found in the channels analyzed',
            context=args.ifo.lower(), dismiss=False))

    # identify triggers during active segments
    logger.debug('Writing a summary CSV record')
    ind = [i for i, trigtime in enumerate(highsnrtrigs[names[0]])
           if trigtime in actives]
    gps = highsnrtrigs[names[0]][ind]
    freq = highsnrtrigs[names[1]][ind]
    snr = highsnrtrigs[names[2]][ind]
    segs = [y for x in gps for y in actives if x in y]
    table = EventTable(
        [gps, freq, snr, [seg[0] for seg in segs], [seg[1] for seg in segs]],
        names=('trigger_time', 'trigger_frequency', 'trigger_snr',
               'segment_start', 'segment_end'))
    logger.info('The following {} triggers fell within active scattering '
                'segments:\n\n'.format(len(table)))
    print(table)
    print('\n\n')
    table.write(summfile, overwrite=True)

    # -- launch omega scans -----------

    nscans = min(args.omega_scans, len(table))
    if nscans > 0:
        # launch scans
        scandir = 'scans'
        ind = random.sample(range(0, len(table)), nscans)
        omegatimes = [str(t) for t in table['trigger_time'][ind]]
        logger.debug('Collected {} event times to omega scan: {}'.format(
            nscans, ', '.join(omegatimes)))
        logger.info('Creating workflow for omega scans')
        flags = batch.get_command_line_flags(
            ifo=args.ifo, ignore_state_flags=True)
        condorcmds = batch.get_condor_arguments(timeout=4, gps=args.gpsstart)
        batch.generate_dag(omegatimes, flags=flags, submit=True,
                           outdir=scandir, condor_commands=condorcmds)
        logger.info('Launched {} omega scans to condor'.format(nscans))
        # render HTML
        page.h2('Omega scans', class_='mt-4', id_='omega-scans')
        msg = (
            'The following event times correspond to significant Omicron '
            'triggers that occur during the scattering segments found above. '
            'To compare these against fringe frequency projections, please '
            'use the "simple scattering" module:',
            markup.oneliner.pre(
                '$ python -m gwdetchar.scattering.simple --help',
            ),
        )
        page.add(htmlio.alert(msg, context=args.ifo.lower()))
        page.add(htmlio.scaffold_omega_scans(
            omegatimes, args.main_channel, scandir=scandir))
    elif args.omega_scans:
        logger.info('No events found during active scattering segments')

    # -- finalize ---------------------

    # write segments
    scatter_segments.write(segfile, path="segments", overwrite=True)
    logger.debug("%s written" % segfile)

    # write HTML
    htmlio.close_page(page, 'index.html')
    logger.info("-- index.html written, all done --")

    # return to original directory
    os.chdir(indir)