示例#1
0
def main_compare_gif_gotic2():
    start = tconvert('Oct 15 2018 00:00:00')
    end = tconvert('Oct 21 2018 00:00:00')

    # gif data
    pfx = '/Users/miyo/Dropbox/KagraData/gif/'
    segments = GifData.findfiles(start, end, 'CALC_STRAIN', prefix=pfx)
    allfiles = [path for files in segments for path in files]
    strain = TimeSeries.read(source=allfiles,
                             name='CALC_STRAIN',
                             format='gif',
                             pad=numpy.nan,
                             nproc=2)
    strain = strain.detrend('linear')

    # gotic data
    source = '201805010000_201811010000.gotic'
    gifx = KagraGoticStrain.read(source, start=start, end=end).x
    gifx = gifx.detrend('linear')
    gifx = gifx * 0.9

    # plot
    plot = Plot(gifx, strain, xscale='auto-gps')
    plot.legend()
    plot.subplots_adjust(right=.86)
    plot.savefig('result.png')
    plot.close()
示例#2
0
文件: plot.py 项目: uberpye/gwdetchar
def spectral_comparison(gps,
                        qspecgram,
                        fringe,
                        output,
                        thresh=15,
                        multipliers=(1, 2, 4, 8),
                        colormap='viridis',
                        figsize=[12, 8]):
    """Compare a high-resolution spectrogram with projected fringe frequencies

    Parameters
    ----------
    gps : `float`
        reference GPS time (in seconds) to serve as the origin

    qspecgram : `~gwpy.spectrogram.Spectrogram`
        an interpolated high-resolution spectrogram

    fringe : `~gwpy.timeseries.TimeSeries`
        projected fringe frequencies (in Hz)

    output : `str`
        name of the output file

    thresh : `float`, optional
        frequency threshold (Hz) for scattering fringes, default: 15

    multipliers : `tuple`, optional
        collection of fringe harmonic numbers to plot, can be given in
        any order, default: `(1, 2, 4, 8)`

    colormap : `str`, optional
        matplotlib colormap to use, default: viridis

    figsize : `tuple`, optional
        size (width x height) of the final figure, default: `(12, 8)`
    """
    plot = Plot(figsize=figsize)
    # format spectral plot
    ax1 = plot.add_subplot(211)
    ax1.set_title('{0} at {1:.2f} with $Q$ of {2:.1f}'.format(
        qspecgram.name, gps, qspecgram.q))
    _format_spectrogram(ax1, qspecgram, colormap=colormap)
    ax1.set_xlabel(None)
    # format fringe frequency plot
    ax2 = plot.add_subplot(212, sharex=ax1)
    ax2.set_title('{} scattering fringes'.format(fringe.name))
    _format_timeseries(ax2,
                       gps,
                       fringe,
                       multipliers=multipliers,
                       thresh=thresh)
    # format timeseries axes
    ax2.set_ylim([-1, 60])
    ax2.set_ylabel('Projected Frequency [Hz]')
    # save plot and close
    plot.savefig(output, bbox_inches='tight')
    plot.close()
示例#3
0
def spectral_comparison(gps, qspecgram, fringe, output, thresh=15,
                        multipliers=(1, 2, 4, 8), colormap='viridis',
                        figsize=[12, 8]):
    """Compare a high-resolution spectrogram with projected fringe frequencies

    Parameters
    ----------
    gps : `float`
        reference GPS time (in seconds) to serve as the origin

    qspecgram : `~gwpy.spectrogram.Spectrogram`
        an interpolated high-resolution spectrogram

    fringe : `~gwpy.timeseries.TimeSeries`
        projected fringe frequencies (in Hz)

    output : `str`
        name of the output file

    thresh : `float`, optional
        frequency threshold (Hz) for scattering fringes, default: 15

    multipliers : `tuple`, optional
        collection of fringe harmonic numbers to plot, can be given in
        any order, default: `(1, 2, 4, 8)`

    colormap : `str`, optional
        matplotlib colormap to use, default: viridis

    figsize : `tuple`, optional
        size (width x height) of the final figure, default: `(12, 8)`
    """
    plot = Plot(figsize=figsize)
    # format spectral plot
    ax1 = plot.add_subplot(211)
    ax1.set_title('{0} at {1:.2f} with $Q$ of {2:.1f}'.format(
        qspecgram.name, gps, qspecgram.q))
    _format_spectrogram(ax1, qspecgram, colormap=colormap)
    ax1.set_xlabel(None)
    # format fringe frequency plot
    ax2 = plot.add_subplot(212, sharex=ax1)
    ax2.set_title('{} scattering fringes'.format(fringe.name))
    _format_timeseries(ax2, gps, fringe, multipliers=multipliers,
                       thresh=thresh)
    # format timeseries axes
    ax2.set_ylim([-1, 60])
    ax2.set_ylabel('Projected Frequency [Hz]')
    # save plot and close
    plot.savefig(output, bbox_inches='tight')
    plot.close()
示例#4
0
def plot_timeseries(*data, **kwargs):
    title = kwargs.pop('title', None)
    ylim = kwargs.pop('ylim', None)
    fname = kwargs.pop('fname', 'TimeSeries.png')
    plot = Plot(figsize=(15, 10))
    ax0 = plot.gca()
    ax0.plot(*data)
    ax0.legend([text.to_string(_data.name) for _data in data], fontsize=20)
    ax0.set_xscale('auto-gps')
    ax0.set_ylabel(text.to_string(data[0].unit))
    plot.add_state_segments(segments_daq_iy0_ok, label='IY0 DAQ State')
    plot.add_state_segments(segments_daq_ix1_ok, label='IX1 DAQ State')
    plot.suptitle(title, fontsize=40)
    if ylim:
        ax0.set_ylim(ylim[0], ylim[1])
    plot.savefig(fname)
    plot.close()
示例#5
0
文件: plot.py 项目: uberpye/gwdetchar
def spectral_overlay(gps,
                     qspecgram,
                     fringe,
                     output,
                     multipliers=(1, 2, 4, 8),
                     figsize=[12, 4]):
    """Overlay scattering fringe projections on top of a high-resolution
    spectrogram

    Parameters
    ----------
    gps : `float`
        reference GPS time (in seconds) to serve as the origin

    qspecgram : `~gwpy.spectrogram.Spectrogram`
        an interpolated high-resolution spectrogram

    fringe : `~gwpy.timeseries.TimeSeries`
        projected fringe frequencies (in Hz)

    output : `str`
        name of the output file

    multipliers : `tuple`, optional
        collection of fringe harmonic numbers to plot, can be given in
        any order, default: `(1, 2, 4, 8)`

    figsize : `tuple`, optional
        size (width x height) of the final figure, default: `(12, 4)`
    """
    plot = Plot(figsize=figsize)
    ax = plot.gca()
    # format spectrogram plot
    ax.set_title('Fringes: {0}, Spectrogram: {1}'.format(
        fringe.name, qspecgram.name))
    _format_spectrogram(ax, qspecgram, colormap='binary')
    # overlay fringe frequencies
    _format_timeseries(ax, gps, fringe, multipliers=multipliers, linewidth=1.5)
    ax.set_ylim([
        qspecgram.f0.to('Hz').value,
        qspecgram.frequencies.max().to('Hz').value
    ])
    # save plot and close
    plot.savefig(output, bbox_inches='tight')
    plot.close()
示例#6
0
def spectral_overlay(gps, qspecgram, fringe, output,
                     multipliers=(1, 2, 4, 8), figsize=[12, 4]):
    """Overlay scattering fringe projections on top of a high-resolution
    spectrogram

    Parameters
    ----------
    gps : `float`
        reference GPS time (in seconds) to serve as the origin

    qspecgram : `~gwpy.spectrogram.Spectrogram`
        an interpolated high-resolution spectrogram

    fringe : `~gwpy.timeseries.TimeSeries`
        projected fringe frequencies (in Hz)

    output : `str`
        name of the output file

    multipliers : `tuple`, optional
        collection of fringe harmonic numbers to plot, can be given in
        any order, default: `(1, 2, 4, 8)`

    figsize : `tuple`, optional
        size (width x height) of the final figure, default: `(12, 4)`
    """
    plot = Plot(figsize=figsize)
    ax = plot.gca()
    # format spectrogram plot
    ax.set_title('Fringes: {0}, Spectrogram: {1}'.format(
        fringe.name, qspecgram.name))
    _format_spectrogram(ax, qspecgram, colormap='binary')
    # overlay fringe frequencies
    _format_timeseries(ax, gps, fringe, multipliers=multipliers,               
                       linewidth=1.5)
    ax.set_ylim([qspecgram.f0.to('Hz').value,
                 qspecgram.frequencies.max().to('Hz').value])
    # save plot and close
    plot.savefig(output, bbox_inches='tight')
    plot.close()
示例#7
0
            figsize=[12, 6],
            separate=True,
            sharex=True,
            color='gwpy:ligo-hanford')
ax1, ax2 = plot.axes
ax1.set_title('LIGO-Hanford strain data around GW150914')
ax1.text(1.0, 1.01, 'Unfiltered data', transform=ax1.transAxes, ha='right')
ax1.set_ylabel('Amplitude [strain]', y=-0.2)
ax2.set_ylabel('')
ax2.text(1.0,
         1.01,
         '50-250\,Hz bandpass, notches at 60, 120, 180 Hz',
         transform=ax2.transAxes,
         ha='right')
plot.show()
plot.close()  # hide

# We see now a spike around 16 seconds into the data, so let's zoom into
# that time (and prettify):

plot = hfilt.plot(color='gwpy:ligo-hanford')
ax = plot.gca()
ax.set_title('LIGO-Hanford strain data around GW150914')
ax.set_ylabel('Amplitude [strain]')
ax.set_xlim(1126259462, 1126259462.6)
ax.set_xscale('seconds', epoch=1126259462)
plot.show()
plot.close()  # hide

# Congratulations, you have succesfully filtered LIGO data to uncover the
# first ever directly-detected gravitational wave signal, GW150914!
示例#8
0
        ax.loglog(adcnoise, 'r--', label='ADC Noise', linewidth=1, alpha=0.7)
        ax.loglog(ampnoise, 'g--', label='Amp Noise', linewidth=1, alpha=0.7)
        ax.loglog(aanoise, 'b--', label='AA Noise', linewidth=1, alpha=0.7)
        ax.loglog(exv0, 'k-', label='EXV')
        ax.loglog(ixv1, label='IXV1')
        #ax.loglog(ixv2,label='IXV2')
        #ax.loglog(d12,label='IXV1-IXV2')
        #ax.loglog(strain,label='StrainMeter')
        #ImportError: No module named gwpy.plot

        ax.legend(fontsize=8, loc='lower left')
        ax.set_title(
            'Seismometer, {dname}'.format(dname=dataname.replace('_', '')),
            fontsize=16)
        plot.savefig('./{dname}/ASD_{dname}_X.png'.format(dname=dataname))
        plot.close()
        print('save ./{dname}/ASD_{dname}_X.png'.format(dname=dataname))

#
    f = np.logspace(-3, 2, 1e4)
    w = 2.0 * np.pi * f
    L = 3000.0  # m
    c_p = 5500.0  # m/sec
    cdmr_p = lambda w, c: np.sqrt((1.0 + np.cos(L * w / c)) /
                                  (1.0 - np.cos(L * w / c)))
    cdmr_ps = lambda w, c: np.sqrt((1.0 + jv(0, 2 * L * w / c)) /
                                   (1.0 - jv(0, 2 * L * w / c)))

    if True:
        from gwpy.plot import Plot
        import matplotlib.pyplot as plt
示例#9
0
# Finally, we can :meth:`~TimeSeries.plot` the original and filtered data,
# adding some code to prettify the figure:

from gwpy.plot import Plot
plot = Plot(hdata, hfilt, figsize=[12, 6], separate=True, sharex=True,
            color='gwpy:ligo-hanford')
ax1, ax2 = plot.axes
ax1.set_title('LIGO-Hanford strain data around GW150914')
ax1.text(1.0, 1.01, 'Unfiltered data', transform=ax1.transAxes, ha='right')
ax1.set_ylabel('Amplitude [strain]', y=-0.2)
ax2.set_ylabel('')
ax2.text(1.0, 1.01, '50-250\,Hz bandpass, notches at 60, 120, 180 Hz',
         transform=ax2.transAxes, ha='right')
plot.show()
plot.close()  # hide

# We see now a spike around 16 seconds into the data, so let's zoom into
# that time (and prettify):

plot = hfilt.plot(color='gwpy:ligo-hanford')
ax = plot.gca()
ax.set_title('LIGO-Hanford strain data around GW150914')
ax.set_ylabel('Amplitude [strain]')
ax.set_xlim(1126259462, 1126259462.6)
ax.set_xscale('seconds', epoch=1126259462)
plot.show()
plot.close()  # hide

# Congratulations, you have succesfully filtered LIGO data to uncover the
# first ever directly-detected gravitational wave signal, GW150914!
示例#10
0
def significance_drop(outfile, old, new, show_channel_names=None, **kwargs):
    """Plot the signifiance drop for each channel
    """
    channels = sorted(old.keys())
    if show_channel_names is None:
        show_channel_names = len(channels) <= 50

    plot = Plot(figsize=(18, 6))
    plot.subplots_adjust(left=.07, right=.93)
    ax = plot.gca()
    if show_channel_names:
        plot.subplots_adjust(bottom=.4)

    winner = sorted(old.items(), key=lambda x: x[1])[-1][0]

    for i, c in enumerate(channels):
        if c == winner:
            color = 'orange'
        elif old[c] > new[c]:
            color = 'dodgerblue'
        else:
            color = 'red'
        ax.plot([i, i], [old[c], new[c]], color=color, linestyle='-',
                marker='o', markeredgecolor='k', markeredgewidth=.5,
                markersize=10, label=c, zorder=old[c])

    ax.set_xlim(-1, len(channels))
    ax.set_ybound(lower=0)

    # set xticks to show channel names
    if show_channel_names:
        ax.set_xticks(range(len(channels)))
        ax.set_xticklabels([c.replace('_','\_') for c in channels])
        for i, t in enumerate(ax.get_xticklabels()):
            t.set_rotation(270)
            t.set_verticalalignment('top')
            t.set_horizontalalignment('center')
            t.set_fontsize(8)
    # or just show systems of channels
    else:
        plot.canvas.draw()
        systems = {}
        for i, c in enumerate(channels):
            sys = c.split(':', 1)[1].split('-')[0].split('_')[0]
            try:
                systems[sys][1] += 1
            except KeyError:
                systems[sys] = [i, 1]
        systems = sorted(systems.items(), key=lambda x: x[1][0])
        labels, counts = zip(*systems)
        xticks, xmticks = zip(*[(a, a+b/2.) for (a, b) in counts])
        # show ticks at the edge of each group
        ax.set_xticks(xticks, minor=False)
        ax.set_xticklabels([], minor=False)
        # show label in the centre of each group
        ax.set_xticks(xmticks, minor=True)
        for t in ax.set_xticklabels(labels, minor=True):
            t.set_rotation(270)

    kwargs.setdefault('ylabel', 'Significance')

    # create interactivity
    if outfile.endswith('.svg'):
        _finalize_plot(plot, ax, outfile.replace('.svg', '.png'),
                       close=False, **kwargs)
        tooltips = []
        ylim = ax.get_ylim()
        yoffset = (ylim[1] - ylim[0]) * 0.061
        bbox = {'fc': 'w', 'ec': '.5', 'alpha': .9, 'boxstyle': 'round'}
        xthresh = len(channels) / 10.
        for i, l in enumerate(ax.lines):
            x = l.get_xdata()[1]
            if x < xthresh:
                ha = 'left'
            elif x > (len(channels) - xthresh):
                ha ='right'
            else:
                ha = 'center'
            y = l.get_ydata()[0] + yoffset
            c = l.get_label()
            tooltips.append(ax.annotate(c.replace('_', r'\_'), (x, y),
                                        ha=ha, zorder=ylim[1], bbox=bbox))
            l.set_gid('line-%d' % i)
            tooltips[-1].set_gid('tooltip-%d' % i)

        f = BytesIO()
        plot.savefig(f, format='svg')
        tree, xmlid = etree.XMLID(f.getvalue())
        tree.set('onload', 'init(evt)')
        for i in range(len(tooltips)):
            try:
                e = xmlid['tooltip-%d' % i]
            except KeyError:
                warnings.warn("Failed to recover tooltip %d" % i)
                continue
            e.set('visibility', 'hidden')
        for i, l in enumerate(ax.lines):
            e = xmlid['line-%d' % i]
            e.set('onmouseover', 'ShowTooltip(this)')
            e.set('onmouseout', 'HideTooltip(this)')
        tree.insert(0, etree.XML(SHOW_HIDE_JAVASCRIPT))
        etree.ElementTree(tree).write(outfile)
        plot.close()
    else:
        _finalize_plot(plot, ax, outfile, **kwargs)
示例#11
0
    def process_channel(input_,):
        chan, ts = input_
        flat = ts.value.min() == ts.value.max()
        if flat:
            corr1 = None
            corr2 = None
            corr1s = None
            corr2s = None
            plot1 = None
            plot2 = None
            plot3 = None
        else:
            corr1 = numpy.corrcoef(ts.value, darmblrms.value)[0, 1]
            corr1s = spearmanr(ts.value, darmblrms.value)[0]
            if args.trend_type == 'minute':
                corr2 = numpy.corrcoef(ts.value, rangets.value)[0, 1]
                corr2s = spearmanr(ts.value, rangets.value)[0]
            else:
                corr2 = 0.0
                corr2s = 0.0
            # if all corralations are below threshold it does not plot
            if((abs(corr1) < args.threshold)
               and (abs(corr1s) < args.threshold)
               and (abs(corr2) < args.threshold)
               and (abs(corr2s) < args.threshold)):
                plot1 = None
                plot2 = None
                plot3 = None
                return (chan, corr1, corr2, plot1,
                        plot2, plot3, corr1s, corr2s)

            plot = Plot(darmblrms, ts, rangets,
                        xscale="auto-gps", separate=True,
                        figsize=(12, 12))
            plot.subplots_adjust(*p1)
            plot.axes[0].set_ylabel('$h(t)$ BLRMS [strain]')
            plot.axes[1].set_ylabel('Channel units')
            plot.axes[2].set_ylabel('Sensitive range [Mpc]')
            for ax in plot.axes:
                ax.legend(loc='best')
                ax.set_xlim(start, end)
                ax.set_epoch(start)
            channelstub = re_delim.sub('_', str(chan)).replace('_', '-', 1)
            plot1 = '%s_TRENDS-%s.png' % (channelstub, gpsstub)
            try:
                plot.save(plot1)
            except (IOError, IndexError):
                plot.save(plot1)
            except RuntimeError as e:
                if 'latex' in str(e).lower():
                    plot.save(plot1)
                else:
                    raise
            plot.close()

            # plot auto-scaled verions
            tsscaled = ts.detrend()
            tsrms = numpy.sqrt(sum(tsscaled**2.0)/len(tsscaled))
            if args.trend_type == 'minute':
                tsscaled *= (rangerms / tsrms)
                if corr1 > 0:
                    tsscaled *= -1
            else:
                tsscaled *= (darmrms / tsrms)
                if corr1 < 0:
                    tsscaled *= -1
            plot = Plot(darmscaled, rangescaled, tsscaled,
                        xscale="auto-gps", figsize=[12, 6])
            plot.subplots_adjust(*p2)
            ax = plot.gca()
            ax.set_xlim(start, end)
            ax.set_epoch(start)
            ax.set_ylabel('Scaled amplitude [arbitrary units]')
            ax.legend(loc='best')
            plot2 = '%s_COMPARISON-%s.png' % (channelstub, gpsstub)
            try:
                plot.save(plot2)
            except (IOError, IndexError):
                plot.save(plot2)
            except RuntimeError as e:
                if 'latex' in str(e).lower():
                    plot.save(plot2)
                else:
                    raise
            plot.close()

            # plot scatter plots
            rangeColor = 'red'
            darmblrmsColor = 'blue'

            tsCopy = ts.reshape(-1, 1)
            rangetsCopy = rangets.reshape(-1, 1)
            darmblrmsCopy = darmblrms.reshape(-1, 1)

            darmblrmsReg = linear_model.LinearRegression()
            darmblrmsReg.fit(tsCopy, darmblrmsCopy)
            darmblrmsFit = darmblrmsReg.predict(tsCopy)

            rangeReg = linear_model.LinearRegression()
            rangeReg.fit(tsCopy, rangetsCopy)
            rangeFit = rangeReg.predict(tsCopy)

            fig = Plot(figsize=(12, 6))
            fig.subplots_adjust(*p2)
            ax = fig.add_subplot(121)
            ax.set_xlabel('Channel units')
            ax.set_ylabel('Sensitive range [Mpc]')
            yrange = abs(max(darmblrms.value) - min(darmblrms.value))
            upperLim = max(darmblrms.value) + .1 * yrange
            lowerLim = min(darmblrms.value) - .1 * yrange
            ax.set_ylim(lowerLim, upperLim)
            ax.text(.9, .1, 'r = ' + str('{0:.2}'.format(corr1)),
                    verticalalignment='bottom', horizontalalignment='right',
                    transform=ax.transAxes, color='black', size=20,
                    bbox=dict(boxstyle='square', facecolor='white', alpha=.75,
                              edgecolor='black'))
            fig.add_scatter(ts, darmblrms, color=darmblrmsColor)
            fig.add_line(ts, darmblrmsFit, color='black')

            ax = fig.add_subplot(122)
            ax.set_xlabel('Channel units')
            ax.set_ylabel('$h(t)$ BLRMS [strain]')
            ax.text(.9, .1, 'r = ' + str('{0:.2}'.format(corr2)),
                    verticalalignment='bottom', horizontalalignment='right',
                    transform=ax.transAxes, color='black', size=20,
                    bbox=dict(boxstyle='square', facecolor='white', alpha=.75,
                              edgecolor='black'))
            fig.add_scatter(ts, rangets, color=rangeColor)
            fig.add_line(ts, rangeFit, color='black')

            plot3 = '%s_SCATTER-%s.png' % (channelstub, gpsstub)
            try:
                fig.save(plot3)
            except (IOError, IndexError):
                fig.save(plot3)
            except RuntimeError as e:
                if 'latex' in str(e).lower():
                    fig.save(plot3)
                else:
                    raise
            plt.close(fig)

        # increment counter and print status
        with counter.get_lock():
            counter.value += 1
            pc = 100 * counter.value / nchan
            LOGGER.debug("Completed [%d/%d] %3d%% %-50s"
                         % (counter.value, nchan, pc, '(%s)' % str(chan)))
            sys.stdout.flush()
        return chan, corr1, corr2, plot1, plot2, plot3, corr1s, corr2s
示例#12
0
文件: plot.py 项目: gwdetchar/hveto
def significance_drop(outfile, old, new, show_channel_names=None, **kwargs):
    """Plot the signifiance drop for each channel
    """
    channels = sorted(old.keys())
    if show_channel_names is None:
        show_channel_names = len(channels) <= 50

    plot = Plot(figsize=(20, 5))
    plot.subplots_adjust(left=.07, right=.93)
    ax = plot.gca()
    if show_channel_names:
        plot.subplots_adjust(bottom=.4)

    winner = sorted(old.items(), key=lambda x: x[1])[-1][0]

    for i, c in enumerate(channels):
        if c == winner:
            color = 'orange'
        elif old[c] > new[c]:
            color = 'dodgerblue'
        else:
            color = 'red'
        ax.plot([i, i], [old[c], new[c]],
                color=color,
                linestyle='-',
                marker='o',
                markeredgecolor='k',
                markeredgewidth=.5,
                markersize=10,
                label=c,
                zorder=old[c])

    ax.set_xlim(-1, len(channels))
    ax.set_ybound(lower=0)

    # set xticks to show channel names
    if show_channel_names:
        ax.set_xticks(range(len(channels)))
        ax.set_xticklabels([texify(c) for c in channels])
        for i, t in enumerate(ax.get_xticklabels()):
            t.set_rotation(270)
            t.set_verticalalignment('top')
            t.set_horizontalalignment('center')
            t.set_fontsize(8)
    # or just show systems of channels
    else:
        plot.canvas.draw()
        systems = {}
        for i, c in enumerate(channels):
            sys = c.split(':', 1)[1].split('-')[0].split('_')[0]
            try:
                systems[sys][1] += 1
            except KeyError:
                systems[sys] = [i, 1]
        systems = sorted(systems.items(), key=lambda x: x[1][0])
        labels, counts = zip(*systems)
        xticks, xmticks = zip(*[(a, a + b / 2.) for (a, b) in counts])
        # show ticks at the edge of each group
        ax.set_xticks(xticks, minor=False)
        ax.set_xticklabels([], minor=False)
        # show label in the centre of each group
        ax.set_xticks(xmticks, minor=True)
        for t in ax.set_xticklabels(labels, minor=True):
            t.set_rotation(270)

    kwargs.setdefault('ylabel', 'Significance')

    # create interactivity
    if outfile.endswith('.svg'):
        _finalize_plot(plot,
                       ax,
                       outfile.replace('.svg', '.png'),
                       close=False,
                       **kwargs)
        tooltips = []
        ylim = ax.get_ylim()
        yoffset = (ylim[1] - ylim[0]) * 0.061
        bbox = {'fc': 'w', 'ec': '.5', 'alpha': .9, 'boxstyle': 'round'}
        xthresh = len(channels) / 10.
        for i, l in enumerate(ax.lines):
            x = l.get_xdata()[1]
            if x < xthresh:
                ha = 'left'
            elif x > (len(channels) - xthresh):
                ha = 'right'
            else:
                ha = 'center'
            y = l.get_ydata()[0] + yoffset
            c = l.get_label()
            tooltips.append(
                ax.annotate(texify(c), (x, y),
                            ha=ha,
                            zorder=ylim[1],
                            bbox=bbox))
            l.set_gid('line-%d' % i)
            tooltips[-1].set_gid('tooltip-%d' % i)

        f = BytesIO()
        plot.savefig(f, format='svg')
        tree, xmlid = etree.XMLID(f.getvalue())
        tree.set('onload', 'init(evt)')
        for i in range(len(tooltips)):
            try:
                e = xmlid['tooltip-%d' % i]
            except KeyError:
                warnings.warn("Failed to recover tooltip %d" % i)
                continue
            e.set('visibility', 'hidden')
        for i, l in enumerate(ax.lines):
            e = xmlid['line-%d' % i]
            e.set('onmouseover', 'ShowTooltip(this)')
            e.set('onmouseout', 'HideTooltip(this)')
        tree.insert(0, etree.XML(SHOW_HIDE_JAVASCRIPT))
        etree.ElementTree(tree).write(outfile)
        plot.close()
    else:
        _finalize_plot(plot, ax, outfile, **kwargs)
示例#13
0
def main(args=None):
    use('agg')
    rcParams.update({
        'figure.subplot.bottom': 0.15,
        'figure.subplot.left': 0.1,
        'figure.subplot.right': 0.83,
        'figure.subplot.top': 0.93,
        'figure.subplot.hspace': 0.25,
        'axes.labelsize': 20,
        'grid.color': 'gray',
    })
    grid = GridSpec(2, 1)

    logger = log.Logger('omicron-status')

    try:
        omicronversion = str(get_omicron_version())
    except KeyError:
        omicronversion = 'Unknown'
        logger.warning("Omicron version unknown")
    else:
        logger.info("Found omicron version: %s" % omicronversion)

    parser = create_parser()
    args = parser.parse_args(args=args)

    if args.ifo is None:
        parser.error("Cannot determine IFO prefix from sytem, "
                     "please pass --ifo on the command line")

    group = args.group

    logger.info("Checking status for %r group" % group)

    archive = args.archive_directory
    proddir = args.production_directory.with_name(
        args.production_directory.name.format(group=args.group), )
    outdir = args.output_directory
    outdir.mkdir(exist_ok=True, parents=True)
    tag = args.latency_archive_tag.format(group=args.group)

    filetypes = ['h5', 'xml.gz', 'root']

    logger.debug("Set output directory to %s" % outdir)
    logger.debug(
        "Will process the following filetypes: {}".format(
            ", ".join(filetypes), ), )

    # -- parse configuration file and get parameters --------------------------

    cp = configparser.ConfigParser()
    ok = cp.read(args.config_file)
    if args.config_file not in ok:
        raise IOError(
            "Failed to read configuration file %r" % args.config_file, )
    logger.info("Configuration read")

    # validate
    if not cp.has_section(group):
        raise configparser.NoSectionError(group)

    # get parameters
    obs = args.ifo[0]
    frametype = cp.get(group, 'frametype')
    padding = cp.getint(group, 'overlap-duration') / 2.
    mingap = cp.getint(group, 'chunk-duration')

    channels = args.channel
    if not channels:
        channels = [
            c.split()[0]
            for c in cp.get(group, 'channels').strip('\n').split('\n')
        ]
    channels.sort()
    logger.debug("Found %d channels" % len(channels))

    start = args.gps_start_time
    end = args.gps_end_time
    if end == NOW:
        end -= padding

    if args.state_flag:
        stateflag = args.state_flag
        statepad = tuple(map(float, args.state_pad.split(',')))
    else:
        try:
            stateflag = cp.get(group, 'state-flag')
        except configparser.NoOptionError:
            stateflag = None
        else:
            try:
                statepad = tuple(
                    map(
                        float,
                        cp.get(group, 'state-padding').split(','),
                    ))
            except configparser.NoOptionError:
                statepad = (0, 0)
    if stateflag:
        logger.debug("Parsed state flag: %r" % stateflag)
        logger.debug("Parsed state padding: %s" % repr(statepad))
    logger.info("Processing %d-%d" % (start, end))

    # -- define nagios JSON printer -------------------------------------------

    def print_nagios_json(code, message, outfile, tag='status', **extras):
        out = {
            'created_gps':
            NOW,
            'status_intervals': [
                {
                    'start_sec': 0,
                    'end_sec': args.unknown,
                    'num_status': code,
                    'txt_status': message
                },
                {
                    'start_sec': args.unknown,
                    'num_status': 3,
                    'txt_status': 'Omicron %s check is not running' % tag
                },
            ],
            'author': {
                'name': 'Duncan Macleod',
                'email': '*****@*****.**',
            },
            'omicron': {
                'version': omicronversion,
                'group': group,
                'channels': ' '.join(channels),
                'frametype': frametype,
                'state-flag': stateflag,
            },
            'pyomicron': {
                'version': __version__,
            },
        }
        out.update(extras)
        with open(outfile, 'w') as f:
            f.write(json.dumps(out))
        logger.debug("nagios info written to %s" % outfile)

    # -- get condor status ------------------------------------------------

    if not args.skip_condor:
        # connect to scheduler
        try:
            schedd = htcondor.Schedd()
        except RuntimeError as e:
            logger.warning("Caught %s: %s" % (type(e).__name__, e))
            logger.info("Failed to connect to HTCondor scheduler, cannot "
                        "determine condor status for %s" % group)
            schedd = None

    if not args.skip_condor and schedd:
        logger.info("-- Checking condor status --")

        # get DAG status
        jsonfp = outdir / "nagios-condor-{}.json".format(group)
        okstates = ['Running', 'Idle', 'Completed']
        try:
            # check manager status
            qstr = 'OmicronManager == "{}" && Owner == "{}"'.format(
                group,
                args.user,
            )
            try:
                jobs = schedd.query(qstr, ['JobStatus'])
            except IOError as e:
                warnings.warn("Caught IOError: %s [retrying...]" % str(e))
                sleep(2)
                jobs = schedd.query(qstr, ['JobStatus'])
            logger.debug(
                "Found {} jobs for query {!r}".format(len(jobs), qstr), )
            if len(jobs) > 1:
                raise RuntimeError(
                    "Multiple OmicronManager jobs found for %r" % group)
            elif len(jobs) == 0:
                raise RuntimeError(
                    "No OmicronManager job found for %r" % group, )
            status = condor.JOB_STATUS[jobs[0]['JobStatus']]
            if status not in okstates:
                raise RuntimeError("OmicronManager status for %r: %r" %
                                   (group, status))
            logger.debug("Manager status is %r" % status)
            # check node status
            jobs = schedd.query(
                'OmicronProcess == "{}" && Owner == "{}"'.format(
                    group,
                    args.user,
                ),
                ['JobStatus', 'ClusterId'],
            )
            logger.debug(
                "Found {} jobs for query {!r}".format(len(jobs), qstr), )
            for job in jobs:
                status = condor.JOB_STATUS[job['JobStatus']]
                if status not in okstates:
                    raise RuntimeError("Omicron node %s (%r) is %r" %
                                       (job['ClusterId'], group, status))
        except RuntimeError as e:
            print_nagios_json(2, str(e), jsonfp, tag='condor')
            logger.warning("Failed to determine condor status: %r" % str(e))
        except IOError as e:
            logger.warning("Caught %s: %s" % (type(e).__name__, e))
            logger.info("Failed to connect to HTCondor scheduler, cannot "
                        "determine condor status for %s" % group)
        else:
            print_nagios_json(
                0,
                "Condor processing for %r is OK" % group,
                jsonfp,
                tag='condor',
            )
            logger.info("Condor processing is OK")

    if not args.skip_job_duration:
        # get job duration history
        plot = Plot(figsize=[12, 3])
        plot.subplots_adjust(bottom=.22, top=.87)
        ax = plot.gca(xscale="auto-gps")
        times, jobdur = condor.get_job_duration_history_shell('OmicronProcess',
                                                              group,
                                                              maxjobs=5000)
        logger.debug("Recovered duration history for %d omicron.exe jobs" %
                     len(times))
        line = ax.plot([0], [1], label='Omicron.exe')[0]
        ax.plot(times,
                jobdur,
                linestyle=' ',
                marker='.',
                color=line.get_color())
        times, jobdur = condor.get_job_duration_history_shell(
            'OmicronPostProcess', group, maxjobs=5000)
        logger.debug("Recovered duration history for %d post-processing jobs" %
                     len(times))
        line = ax.plot([0], [1], label='Post-processing')[0]
        ax.plot(times,
                jobdur,
                linestyle=' ',
                marker='.',
                color=line.get_color())
        ax.legend(loc='upper left',
                  borderaxespad=0,
                  bbox_to_anchor=(1.01, 1),
                  handlelength=1)
        ax.set_xlim(args.gps_start_time, args.gps_end_time)
        ax.set_epoch(ax.get_xlim()[1])
        ax.set_yscale('log')
        ax.set_title('Omicron job durations for %r' % group)
        ax.set_ylabel('Job duration [seconds]')
        ax.xaxis.labelpad = 5
        png = str(outdir / "nagios-condor-{}.png".format(group))
        plot.save(png)
        plot.close()
        logger.debug("Saved condor plot to %s" % png)

    if args.skip_file_checks:
        sys.exit(0)

    # -- get file latency and archive completeness ----------------------------

    logger.info("-- Checking file archive --")

    # get frame segments
    segs = segments.get_frame_segments(obs, frametype, start, end)

    # get state segments
    if stateflag is not None:
        segs &= segments.query_state_segments(
            stateflag,
            start,
            end,
            pad=statepad,
        )

    try:
        end = segs[-1][1]
    except IndexError:
        pass

    # apply inwards padding to generate resolvable segments
    for i in range(len(segs) - 1, -1, -1):
        # if segment is shorter than padding, ignore it completely
        if abs(segs[i]) <= padding * 2:
            del segs[i]
        # otherwise apply padding to generate trigger segment
        else:
            segs[i] = segs[i].contract(padding)
    logger.debug("Found %d seconds of analysable time" % abs(segs))

    # load archive latency
    latencyfile = outdir / "nagios-latency-{}.h5".format(tag)
    times = dict((c, dict((ft, None) for ft in filetypes)) for c in channels)
    ldata = dict((c, dict((ft, None) for ft in filetypes)) for c in channels)
    try:
        with h5py.File(latencyfile, 'r') as h5file:
            for c in channels:
                for ft in filetypes:
                    try:
                        times[c][ft] = h5file[c]['time'][ft][:]
                        ldata[c][ft] = h5file[c]['latency'][ft][:]
                    except KeyError:
                        times[c][ft] = numpy.ndarray((0, ))
                        ldata[c][ft] = numpy.ndarray((0, ))
    except OSError as exc:  # file not found, or is corrupt
        warnings.warn("failed to load latency data from {}: {}".format(
            latencyfile,
            str(exc),
        ))
        for c in channels:
            for ft in filetypes:
                if not times[c].get(ft):
                    times[c][ft] = numpy.ndarray((0, ))
                    ldata[c][ft] = numpy.ndarray((0, ))
    else:
        logger.debug("Parsed latency data from %s" % latencyfile)

    # load acknowledged gaps
    acksegfile = str(outdir / "acknowledged-gaps-{}.txt".format(tag))
    try:
        acknowledged = SegmentList.read(acksegfile,
                                        gpstype=float,
                                        format="segwizard")
    except IOError:  # no file
        acknowledged = SegmentList()
    else:
        logger.debug(
            "Read %d segments from %s" % (len(acknowledged), acksegfile), )
        acknowledged.coalesce()

    # build legend for segments
    leg = OrderedDict()
    leg['Analysable'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightgray',
        edgecolor='gray',
    )
    leg['Available'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightgreen',
        edgecolor='green',
    )
    leg['Missing'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='red',
        edgecolor='darkred',
    )
    leg['Unresolvable'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='magenta',
        edgecolor='purple',
    )
    leg['Overlapping'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='yellow',
        edgecolor='orange',
    )
    leg['Pending'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightskyblue',
        edgecolor='blue',
    )
    leg['Acknowledged'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='sandybrown',
        edgecolor='brown',
    )

    logger.debug("Checking archive for each channel...")

    # find files
    latency = {}
    gaps = {}
    overlap = {}
    pending = {}
    plots = {}
    for c in channels:
        # create data storate
        latency[c] = {}
        gaps[c] = {}
        overlap[c] = {}
        pending[c] = {}

        # create figure
        plot = Plot(figsize=[12, 5])
        lax = plot.add_subplot(grid[0, 0], xscale="auto-gps")
        sax = plot.add_subplot(grid[1, 0], sharex=lax, projection='segments')
        colors = ['lightblue', 'dodgerblue', 'black']

        for y, ft in enumerate(filetypes):
            # find files
            cache = io.find_omicron_files(c, start, end, archive, ext=ft)
            cpend = sieve_cache(io.find_pending_files(c, proddir, ext=ft),
                                segment=Segment(start, end))
            # get available segments
            avail = segments.cache_segments(cache)
            found = avail & segs
            pending[c][ft] = segments.cache_segments(cpend) & segs
            # remove gaps at the end that represent latency
            try:
                latency[c][ft] = abs(segs & type(
                    segs)([type(segs[0])(found[-1][1], segs[-1][1])])) / 3600.
            except IndexError:
                latency[c][ft] = 0
                processed = segs
            else:
                processed = segs & type(segs)(
                    [type(segs[0])(start, found[-1][1])])
            gaps[c][ft] = type(found)()
            lost = type(found)()
            for s in processed - found:
                if abs(s) < mingap and s in list(segs):
                    lost.append(s)
                else:
                    gaps[c][ft].append(s)
            # remove acknowledged gaps
            ack = gaps[c][ft] & acknowledged
            gaps[c][ft] -= acknowledged
            # print warnings
            if abs(gaps[c][ft]):
                warnings.warn("Gaps found in %s files for %s:\n%s" %
                              (c, ft, gaps[c][ft]))
            overlap[c][ft] = segments.cache_overlaps(cache)
            if abs(overlap[c][ft]):
                warnings.warn("Overlap found in %s files for %s:\n%s" %
                              (c, ft, overlap[c][ft]))

            # append archive
            times[c][ft] = numpy.concatenate((times[c][ft][-99999:], [NOW]))
            ldata[c][ft] = numpy.concatenate(
                (ldata[c][ft][-99999:], [latency[c][ft]]))

            # plot
            line = lax.plot(
                times[c][ft],
                ldata[c][ft],
                label=ft,
                color=colors[y],
            )[0]
            lax.plot(times[c][ft],
                     ldata[c][ft],
                     marker='.',
                     linestyle=' ',
                     color=line.get_color())
            sax.plot_segmentlist(segs,
                                 y=y,
                                 label=ft,
                                 alpha=.5,
                                 facecolor=leg['Analysable'].get_facecolor(),
                                 edgecolor=leg['Analysable'].get_edgecolor())
            sax.plot_segmentlist(pending[c][ft],
                                 y=y,
                                 facecolor=leg['Pending'].get_facecolor(),
                                 edgecolor=leg['Pending'].get_edgecolor())
            sax.plot_segmentlist(avail,
                                 y=y,
                                 label=ft,
                                 alpha=.2,
                                 height=.1,
                                 facecolor=leg['Available'].get_facecolor(),
                                 edgecolor=leg['Available'].get_edgecolor())
            sax.plot_segmentlist(found,
                                 y=y,
                                 label=ft,
                                 alpha=.5,
                                 facecolor=leg['Available'].get_facecolor(),
                                 edgecolor=leg['Available'].get_edgecolor())
            sax.plot_segmentlist(lost,
                                 y=y,
                                 facecolor=leg['Unresolvable'].get_facecolor(),
                                 edgecolor=leg['Unresolvable'].get_edgecolor())
            sax.plot_segmentlist(gaps[c][ft],
                                 y=y,
                                 facecolor=leg['Missing'].get_facecolor(),
                                 edgecolor=leg['Missing'].get_edgecolor())
            sax.plot_segmentlist(overlap[c][ft],
                                 y=y,
                                 facecolor=leg['Overlapping'].get_facecolor(),
                                 edgecolor=leg['Overlapping'].get_edgecolor())
            sax.plot_segmentlist(ack,
                                 y=y,
                                 facecolor=leg['Acknowledged'].get_facecolor(),
                                 edgecolor=leg['Acknowledged'].get_edgecolor())

        # finalise plot
        lax.axhline(args.warning / 3600.,
                    color=(1.0, 0.7, 0.0),
                    linestyle='--',
                    linewidth=2,
                    label='Warning',
                    zorder=-1)
        lax.axhline(args.error / 3600.,
                    color='red',
                    linestyle='--',
                    linewidth=2,
                    label='Critical',
                    zorder=-1)
        lax.set_title('Omicron status: {}'.format(c))
        lax.set_ylim(0, args.error / 1800.)
        lax.set_ylabel('Latency [hours]')
        lax.legend(loc='upper left',
                   bbox_to_anchor=(1.01, 1),
                   borderaxespad=0,
                   handlelength=2,
                   fontsize=12.4)
        lax.set_xlabel(' ')
        for ax in plot.axes:
            ax.set_xlim(args.gps_start_time, args.gps_end_time)
            ax.set_epoch(ax.get_xlim()[1])
        sax.xaxis.labelpad = 5
        sax.set_ylim(-.5, len(filetypes) - .5)
        sax.legend(leg.values(),
                   leg.keys(),
                   handlelength=1,
                   fontsize=12.4,
                   loc='lower left',
                   bbox_to_anchor=(1.01, 0),
                   borderaxespad=0)
        plots[c] = png = outdir / "nagios-latency-{}.png".format(
            c.replace(':', '-'), )
        plot.save(png)
        plot.close()
        logger.debug("    %s" % c)

    # update latency and write archive
    h5file = h5py.File(latencyfile, 'w')
    for c in channels:
        g = h5file.create_group(c)
        for name, d in zip(['time', 'latency'], [times[c], ldata[c]]):
            g2 = g.create_group(name)
            for ft in filetypes:
                g2.create_dataset(ft, data=d[ft], compression='gzip')
    h5file.close()
    logger.debug("Stored latency data as HDF in %s" % latencyfile)

    # write nagios output for files
    status = []
    for segset, tag in zip([gaps, overlap], ['gaps', 'overlap']):
        chans = [(c, segset[c]) for c in segset
                 if abs(reduce(operator.or_, segset[c].values()))]
        jsonfp = outdir / "nagios-{}-{}.json".format(tag, group)
        status.append((tag, jsonfp))
        if chans:
            gapstr = '\n'.join('%s: %s' % c for c in chans)
            code = 1
            message = ("%s found in Omicron files for group %r\n%s" %
                       (tag.title(), group, gapstr))
        else:
            code = 0
            message = ("No %s found in Omicron files for group %r" %
                       (tag, group))
        print_nagios_json(code, message, jsonfp, tag=tag, **{tag: dict(chans)})

    # write group JSON
    jsonfp = outdir / "nagios-latency-{}.json".format(group)
    status.append(('latency', jsonfp))
    code = 0
    message = 'No channels have high latency for group %r' % group
    ldict = dict((c, max(latency[c].values())) for c in latency)
    for x, dt in zip([2, 1], [args.error, args.warning]):
        dh = dt / 3600.
        chans = [c for c in ldict if ldict[c] >= dh]
        if chans:
            code = x
            message = (
                "%d channels found with high latency (above %s seconds)" %
                (len(chans), dt))
            break
    print_nagios_json(code, message, jsonfp, tag='latency', latency=ldict)

    # auto-detect 'standard' JSON files
    for tag, name in zip(
        ['condor', 'omicron-online'],
        ['condor', 'processing'],
    ):
        f = outdir / "nagios-{}-{}.json".format(tag, group)
        if f.is_file():
            status.insert(0, (name, f))

    # write HTML summary
    if args.html:
        page = markup.page()
        page.init(
            title="%s Omicron Online status" % group,
            css=[
                ('//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/'
                 'bootstrap.min.css'),
                ('//cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/'
                 'jquery.fancybox.min.css'),
            ],
            script=[
                '//code.jquery.com/jquery-1.11.2.min.js',
                ('//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/'
                 'bootstrap.min.js'),
                ('//cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/'
                 'jquery.fancybox.min.js'),
            ],
        )
        page.div(class_='container')
        # write header
        page.div(class_='page-header')
        page.h1('Omicron Online status: %s' % group)
        page.div.close()  # page-header
        # write summary
        page.div(id_='json')
        page.h2("Processing status")
        for tag, f in status:
            jf = f.name
            page.a("%s status" % tag.title(),
                   href=jf,
                   role='button',
                   target="_blank",
                   id_="nagios-%s" % tag,
                   class_='btn btn-default json-status')
        page.p(style="padding-top: 5px;")
        page.small(
            "Hover over button for explanation, click to open JSON file", )
        page.p.close()
        page.div.close()  # id=json
        # show plots
        page.div(id_='plots')
        page.h2("Channel details")
        page.div(class_='row')
        for channel in sorted(channels):
            png = plots[channel].name
            page.div(class_="col-sm-6 col-md-4")
            page.div(class_="panel panel-default")
            page.div(class_='panel-heading')
            page.h3(channel, class_='panel-title', style="font-size: 14px;")
            page.div.close()  # panel-heading
            page.div(class_='panel-body')
            page.a(href=png,
                   target="_blank",
                   class_="fancybox",
                   rel="channel-status-img")
            page.img(src=png, class_='img-responsive')
            page.a.close()
            page.div.close()  # panel-body
            page.div.close()  # panel
            page.div.close()  # col
        page.div.close()  # row
        page.div.close()  # id=plots

        # dump parameters
        page.div(id_="parameters")
        page.h2("Parameters")
        for key, val in cp.items(group):
            page.p()
            page.strong("%s:" % key)
            page.add(val)
            page.p.close()
        page.div.close()  # id=parameters

        # finish and close
        page.div.close()  # container
        page.script("""
        function setStatus(data, id) {
            var txt = data.status_intervals[0].txt_status.split("\\n")[0];
            $("#"+id).attr("title", txt);
            var stat = data.status_intervals[0].num_status;
            if (stat == 0) {
                $("#"+id).addClass("btn-success"); }
            else if (stat == 1) {
                $("#"+id).addClass("btn-warning"); }
            else if (stat == 2){
                $("#"+id).addClass("btn-danger"); }
        }

        $(document).ready(function() {
            $(".json-status").each(function() {
                var jsonf = $(this).attr("href");
                var id = $(this).attr("id");
                $.getJSON(jsonf, function(data) { setStatus(data, id); });
            });

            $(".fancybox").fancybox({nextEffect: 'none', prevEffect: 'none'});
        });""",
                    type="text/javascript")
        with (outdir / "index.html").open("w") as f:
            f.write(str(page))
        logger.debug("HTML summary written to %s" % f.name)
示例#14
0
def main(args=None):
    """Run the primary scattering command-line tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    # set up logger
    logger = cli.logger(
        name=PROG.split('python -m ').pop(),
        level='DEBUG' if args.verbose else 'INFO',
    )

    # useful variables
    fthresh = (
        int(args.frequency_threshold) if args.frequency_threshold.is_integer()
        else args.frequency_threshold)
    multiplier = args.multiplier_for_threshold
    tstr = str(fthresh).replace('.', '_')
    gpsstr = '%s-%s' % (int(args.gpsstart), int(args.gpsend - args.gpsstart))
    args.optic = args.optic or list(OPTIC_MOTION_CHANNELS.keys())

    # go to working directory
    indir = os.getcwd()
    if not os.path.isdir(args.output_dir):
        os.makedirs(args.output_dir)
    os.chdir(args.output_dir)

    # set up output files
    summfile = '{}-SCATTERING_SUMMARY-{}.csv'.format(
        args.ifo, gpsstr)
    segfile = '{}-SCATTERING_SEGMENTS_{}_HZ-{}.h5'.format(
        args.ifo, tstr, gpsstr)

    # log start of process
    logger.info('{} Scattering {}-{}'.format(
        args.ifo, int(args.gpsstart), int(args.gpsend)))

    # -- get state segments -----------

    span = Segment(args.gpsstart, args.gpsend)

    # get segments
    if args.state_flag is not None:
        state = DataQualityFlag.query(
            args.state_flag, int(args.gpsstart), int(args.gpsend),
            url=DEFAULT_SEGMENT_SERVER,
        ).coalesce()
        statea = []
        padding = args.segment_start_pad + args.segment_end_pad
        for i, seg in enumerate(state.active):
            if abs(seg) > padding:
                statea.append(Segment(
                    seg[0] + args.segment_start_pad,
                    seg[1] - args.segment_end_pad,
                ))
            else:
                logger.debug(
                    "Segment length {} shorter than padding length {}, "
                    "skipping segment {}-{}".format(abs(seg), padding, *seg),
                )
        statea = SegmentList(statea)
        logger.debug("Downloaded %d segments for %s"
                     % (len(statea), args.state_flag))
    else:
        statea = SegmentList([span])
    livetime = float(abs(statea))
    logger.debug("Processing %.2f s of livetime" % livetime)

    # -- load h(t) --------------------

    args.main_channel = args.main_channel.format(IFO=args.ifo)
    logger.debug("Loading Omicron triggers for %s" % args.main_channel)

    if args.gpsstart >= 1230336018:  # Jan 1 2019
        ext = "h5"
        names = ["time", "frequency", "snr"]
        read_kw = {
            "columns": names,
            "selection": [
                "{0} < frequency < {1}".format(
                    args.fmin, multiplier * fthresh),
                ("time", in_segmentlist, statea),
            ],
            "format": "hdf5",
            "path": "triggers",
        }
    else:
        ext = "xml.gz"
        names = ['peak', 'peak_frequency', 'snr']
        read_kw = {
            "columns": names,
            "selection": [
                "{0} < peak_frequency < {1}".format(
                    args.fmin, multiplier * fthresh),
                ('peak', in_segmentlist, statea),
            ],
            "format": 'ligolw',
            "tablename": "sngl_burst",
        }

    fullcache = []
    for seg in statea:
        cache = gwtrigfind.find_trigger_files(
            args.main_channel, 'omicron', seg[0], seg[1], ext=ext,
        )
        if len(cache) == 0:
            warnings.warn(
                "No Omicron triggers found for %s in segment [%d .. %d)"
                % (args.main_channel, seg[0], seg[1]),
            )
            continue
        fullcache.extend(cache)

    # read triggers
    if fullcache:
        trigs = EventTable.read(fullcache, nproc=args.nproc, **read_kw)
    else:  # no files (no livetime?)
        trigs = EventTable(names=names)

    highsnrtrigs = trigs[trigs['snr'] >= 8]
    logger.debug("%d read" % len(trigs))

    # -- prepare HTML -----------------

    links = [
        '%d-%d' % (int(args.gpsstart), int(args.gpsend)),
        ('Parameters', '#parameters'),
        ('Segments', (
            ('State flag', '#state-flag'),
            ('Optical sensors', '#osems'),
            ('Transmons', '#transmons'),
        )),
    ]
    if args.omega_scans:
        links.append(('Scans', '#omega-scans'))
    (brand, class_) = htmlio.get_brand(args.ifo, 'Scattering', args.gpsstart)
    navbar = htmlio.navbar(links, class_=class_, brand=brand)
    page = htmlio.new_bootstrap_page(
        title='%s Scattering | %d-%d' % (
            args.ifo, int(args.gpsstart), int(args.gpsend)),
        navbar=navbar)
    page.div(class_='pb-2 mt-3 mb-2 border-bottom')
    page.h1('%s Scattering: %d-%d'
            % (args.ifo, int(args.gpsstart), int(args.gpsend)))
    page.div.close()  # pb-2 mt-3 mb-2 border-bottom
    page.h2('Parameters', class_='mt-4 mb-4', id_='parameters')
    page.div(class_='row')
    page.div(class_='col-md-9 col-sm-12')
    page.add(htmlio.parameter_table(
        start=int(args.gpsstart), end=int(args.gpsend), flag=args.state_flag))
    page.div.close()  # col-md-9 col-sm-12

    # link to summary files
    page.div(class_='col-md-3 col-sm-12')
    page.add(htmlio.download_btn(
        [('Segments (HDF)', segfile),
         ('Triggers (CSV)', summfile)],
        btnclass='btn btn-%s dropdown-toggle' % args.ifo.lower(),
    ))
    page.div.close()  # col-md-3 col-sm-12
    page.div.close()  # row

    # command-line
    page.h5('Command-line:')
    page.add(htmlio.get_command_line(about=False, prog=PROG))

    # section header
    page.h2('Segments', class_='mt-4', id_='segments')

    if statea:  # contextual information
        paper = markup.oneliner.a(
            'Accadia et al. (2010)', target='_blank', class_='alert-link',
            href='http://iopscience.iop.org/article/10.1088/0264-9381/27'
                 '/19/194011')
        msg = (
            "Segments marked \"optical sensors\" below show evidence of beam "
            "scattering between {0} and {1} Hz based on the velocity of optic "
            "motion, with fringe frequencies projected using equation (3) of "
            "{2}. Segments marked \"transmons\" are based on whitened, "
            "band-limited RMS trends of transmon sensors. In both cases, "
            "yellow panels denote weak evidence for scattering, while red "
            "panels denote strong evidence."
         ).format(args.fmin, multiplier * fthresh, str(paper))
        page.add(htmlio.alert(msg, context=args.ifo.lower()))
    else:  # null segments
        page.add(htmlio.alert('No active analysis segments were found',
                              context='warning', dismiss=False))

    # record state segments
    if args.state_flag is not None:
        page.h3('State flag', class_='mt-3', id_='state-flag')
        page.div(id_='accordion1')
        page.add(htmlio.write_flag_html(
            state, span, 'state', parent='accordion1', context='success',
            plotdir='', facecolor=(0.2, 0.8, 0.2), edgecolor='darkgreen',
            known={'facecolor': 'red', 'edgecolor': 'darkred', 'height': 0.4}))
        page.div.close()

    # -- find scattering evidence -----

    # read data for OSEMs and transmons
    osems = ['%s:%s' % (args.ifo, c) for optic in args.optic for
             c in OPTIC_MOTION_CHANNELS[optic]]
    transmons = ['%s:%s' % (args.ifo, c) for c in TRANSMON_CHANNELS]
    allchannels = osems + transmons

    logger.info("Reading all timeseries data")
    alldata = []
    n = len(statea)
    for i, seg in enumerate(statea):
        msg = "{0}/{1} {2}:".rjust(30).format(
            str(i + 1).rjust(len(str(n))),
            n,
            str(seg),
        ) if args.verbose else False
        alldata.append(
            get_data(allchannels, seg[0], seg[1],
                     frametype=args.frametype.format(IFO=args.ifo),
                     verbose=msg, nproc=args.nproc).resample(128))
    try:  # ensure that only available channels are analyzed
        osems = list(
            set(alldata[0].keys()) & set(alldata[-1].keys()) & set(osems))
        transmons = list(
            set(alldata[0].keys()) & set(alldata[-1].keys()) & set(transmons))
    except IndexError:
        osems = []
        transmons = []

    # initialize scattering segments
    scatter_segments = DataQualityDict()
    actives = SegmentList()

    # scattering based on OSEM velocity
    if statea:
        page.h3('Optical sensors (OSEMs)', class_='mt-3', id_='osems')
        page.div(id_='osems-group')
    logger.info('Searching for scatter based on OSEM velocity')

    for i, channel in enumerate(sorted(osems)):
        logger.info("-- Processing %s --" % channel)
        chanstr = re.sub('[:-]', '_', channel).replace('_', '-', 1)
        optic = channel.split('-')[1].split('_')[0]
        flag = '%s:DCH-%s_SCATTERING_GE_%s_HZ:1' % (args.ifo, optic, tstr)
        scatter_segments[channel] = DataQualityFlag(
            flag,
            isgood=False,
            description="Evidence for scattering above {0} Hz from {1} in "
                        "{2}".format(fthresh, optic, channel),
        )
        # set up plot(s)
        plot = Plot(figsize=[12, 12])
        axes = {}
        axes['position'] = plot.add_subplot(
            411, xscale='auto-gps', xlabel='')
        axes['fringef'] = plot.add_subplot(
            412, sharex=axes['position'], xlabel='')
        axes['triggers'] = plot.add_subplot(
            413, sharex=axes['position'], xlabel='')
        axes['segments'] = plot.add_subplot(
            414, projection='segments', sharex=axes['position'])
        plot.subplots_adjust(bottom=.07, top=.95)
        fringecolors = [None] * len(FREQUENCY_MULTIPLIERS)
        histdata = dict((x, numpy.ndarray((0,))) for
                        x in FREQUENCY_MULTIPLIERS)
        linecolor = None
        # loop over state segments and find scattering fringes
        for j, seg in enumerate(statea):
            logger.debug("Processing segment [%d .. %d)" % seg)
            ts = alldata[j][channel]
            # get raw data and plot
            line = axes['position'].plot(ts, color=linecolor)[0]
            linecolor = line.get_color()
            # get fringe frequency and plot
            fringef = get_fringe_frequency(ts, multiplier=1)
            for k, m in list(enumerate(FREQUENCY_MULTIPLIERS))[::-1]:
                fm = fringef * m
                line = axes['fringef'].plot(
                    fm, color=fringecolors[k],
                    label=(j == 0 and r'$f\times%d$' % m or None))[0]
                fringecolors[k] = line.get_color()
                histdata[m] = numpy.resize(
                    histdata[m], (histdata[m].size + fm.size,))
                histdata[m][-fm.size:] = fm.value
            # get segments and plot
            scatter = get_segments(
                fringef * multiplier,
                fthresh,
                name=flag,
                pad=args.segment_padding
            )
            axes['segments'].plot(
                scatter, facecolor='red', edgecolor='darkred',
                known={'alpha': 0.6, 'facecolor': 'lightgray',
                       'edgecolor': 'gray', 'height': 0.4},
                height=0.8, y=0, label=' ',
            )
            scatter_segments[channel] += scatter
            logger.debug(
                "    Found %d scattering segments" % (len(scatter.active)))
        logger.debug("Completed channel %s, found %d segments in total"
                     % (channel, len(scatter_segments[channel].active)))

        # calculate efficiency and deadtime of veto
        deadtime = abs(scatter_segments[channel].active)
        try:
            deadtimepc = deadtime / livetime * 100
        except ZeroDivisionError:
            deadtimepc = 0.
        logger.info("Deadtime: %.2f%% (%.2f/%ds)"
                    % (deadtimepc, deadtime, livetime))
        efficiency = in_segmentlist(highsnrtrigs[names[0]],
                                    scatter_segments[channel].active).sum()
        try:
            efficiencypc = efficiency / len(highsnrtrigs) * 100
        except ZeroDivisionError:
            efficiencypc = 0.
        logger.info("Efficiency (SNR>=8): %.2f%% (%d/%d)"
                    % (efficiencypc, efficiency, len(highsnrtrigs)))
        if deadtimepc == 0.:
            effdt = 0
        else:
            effdt = efficiencypc/deadtimepc
        logger.info("Efficiency/Deadtime: %.2f" % effdt)

        if abs(scatter_segments[channel].active):
            actives.extend(scatter_segments[channel].active)

        # finalize plot
        logger.debug("Plotting")
        name = texify(channel)
        axes['position'].set_title("Scattering evidence in %s" % name)
        axes['position'].set_xlabel('')
        axes['position'].set_ylabel(r'Position [$\mu$m]')
        axes['position'].text(
            0.01, 0.95, 'Optic position',
            transform=axes['position'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        axes['fringef'].plot(
            span, [fthresh, fthresh], 'k--')
        axes['fringef'].set_xlabel('')
        axes['fringef'].set_ylabel(r'Frequency [Hz]')
        axes['fringef'].yaxis.tick_right()
        axes['fringef'].yaxis.set_label_position("right")
        axes['fringef'].set_ylim(0, multiplier * fthresh)
        axes['fringef'].text(
            0.01, 0.95, 'Calculated fringe frequency',
            transform=axes['fringef'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        handles, labels = axes['fringef'].get_legend_handles_labels()
        axes['fringef'].legend(handles[::-1], labels[::-1], loc='upper right',
                               borderaxespad=0, bbox_to_anchor=(-0.01, 1.),
                               handlelength=1)

        axes['triggers'].scatter(
            trigs[names[0]],
            trigs[names[1]],
            c=trigs[names[2]],
            edgecolor='none',
        )
        name = texify(args.main_channel)
        axes['triggers'].text(
            0.01, 0.95,
            '%s event triggers (Omicron)' % name,
            transform=axes['triggers'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        axes['triggers'].set_ylabel('Frequency [Hz]')
        axes['triggers'].set_ylim(args.fmin, multiplier * fthresh)
        axes['triggers'].colorbar(cmap='YlGnBu', clim=(3, 100), norm='log',
                                  label='Signal-to-noise ratio')
        axes['segments'].set_ylim(-.55, .55)
        axes['segments'].text(
            0.01, 0.95,
            r'Time segments with $f\times%d > %.2f$ Hz' % (
                multiplier, fthresh),
            transform=axes['segments'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        for ax in axes.values():
            ax.set_epoch(int(args.gpsstart))
            ax.set_xlim(*span)
        png = '%s_SCATTERING_%s_HZ-%s.png' % (chanstr, tstr, gpsstr)
        try:
            plot.save(png)
        except OverflowError as e:
            warnings.warn(str(e))
            plot.axes[1].set_ylim(0, multiplier * fthresh)
            plot.refresh()
            plot.save(png)
        plot.close()
        logger.debug("%s written." % png)

        # make histogram
        histogram = Plot(figsize=[12, 6])
        ax = histogram.gca()
        hrange = (0, multiplier * fthresh)
        for m, color in list(zip(histdata, fringecolors))[::-1]:
            if histdata[m].size:
                ax.hist(
                    histdata[m], facecolor=color, alpha=.6, range=hrange,
                    bins=50, histtype='stepfilled', label=r'$f\times%d$' % m,
                    cumulative=-1, weights=ts.dx.value, bottom=1e-100,
                    log=True)
            else:
                ax.plot(histdata[m], color=color, label=r'$f\times%d$' % m)
                ax.set_yscale('log')
        ax.set_ylim(.01, float(livetime))
        ax.set_ylabel('Time with fringe above frequency [s]')
        ax.set_xlim(*hrange)
        ax.set_xlabel('Frequency [Hz]')
        ax.set_title(axes['position'].get_title())
        handles, labels = ax.get_legend_handles_labels()
        ax.legend(handles[::-1], labels[::-1], loc='upper right')
        hpng = '%s_SCATTERING_HISTOGRAM-%s.png' % (chanstr, gpsstr)
        histogram.save(hpng)
        histogram.close()
        logger.debug("%s written." % hpng)

        # write HTML
        if deadtime != 0 and effdt > 2:
            context = 'danger'
        elif ((deadtime != 0 and effdt < 2) or
              (histdata[multiplier].size and
               histdata[multiplier].max() >=
                  fthresh/2.)):
            context = 'warning'
        else:
            continue
        page.div(class_='card border-%s mb-1 shadow-sm' % context)
        page.div(class_='card-header text-white bg-%s' % context)
        page.a(channel, class_='collapsed card-link cis-link',
               href='#osem%s' % i, **{'data-toggle': 'collapse'})
        page.div.close()  # card-header
        page.div(id_='osem%s' % i, class_='collapse',
                 **{'data-parent': '#osems-group'})
        page.div(class_='card-body')
        page.div(class_='row')
        img = htmlio.FancyPlot(
            png, caption=SCATTER_CAPTION.format(CHANNEL=channel))
        page.div(class_='col-md-10 offset-md-1')
        page.add(htmlio.fancybox_img(img))
        page.div.close()  # col-md-10 offset-md-1
        himg = htmlio.FancyPlot(
            hpng, caption=HIST_CAPTION.format(CHANNEL=channel))
        page.div(class_='col-md-10 offset-md-1')
        page.add(htmlio.fancybox_img(himg))
        page.div.close()  # col-md-10 offset-md-1
        page.div.close()  # row
        segs = StringIO()
        if deadtime:
            page.p("%d segments were found predicting a scattering fringe "
                   "above %.2f Hz." % (
                       len(scatter_segments[channel].active),
                       fthresh))
            page.table(class_='table table-sm table-hover')
            page.tbody()
            page.tr()
            page.th('Deadtime')
            page.td('%.2f/%d seconds' % (deadtime, livetime))
            page.td('%.2f%%' % deadtimepc)
            page.tr.close()
            page.tr()
            page.th('Efficiency<br><small>(SNR&ge;8 and '
                    '%.2f Hz</sub>&ltf<sub>peak</sub>&lt;%.2f Hz)</small>'
                    % (args.fmin, multiplier * fthresh))
            page.td('%d/%d events' % (efficiency, len(highsnrtrigs)))
            page.td('%.2f%%' % efficiencypc)
            page.tr.close()
            page.tr()
            page.th('Efficiency/Deadtime')
            page.td()
            page.td('%.2f' % effdt)
            page.tr.close()
            page.tbody.close()
            page.table.close()
            scatter_segments[channel].active.write(segs, format='segwizard',
                                                   coltype=float)
            page.pre(segs.getvalue())
        else:
            page.p("No segments were found with scattering above %.2f Hz."
                   % fthresh)
        page.div.close()  # card-body
        page.div.close()  # collapse
        page.div.close()  # card

    if statea:  # close accordion
        page.div.close()  # osems-group

    # scattering based on transmon BLRMS
    if statea:
        page.h3('Transmons', class_='mt-3', id_='transmons')
        page.div(id_='transmons-group')
    logger.info('Searching for scatter based on band-limited RMS of transmons')

    for i, channel in enumerate(sorted(transmons)):
        logger.info("-- Processing %s --" % channel)
        optic = channel.split('-')[1][:6]
        flag = '%s:DCH-%s_SCATTERING_BLRMS:1' % (args.ifo, optic)
        scatter_segments[channel] = DataQualityFlag(
            flag,
            isgood=False,
            description="Evidence for scattering from whitened, band-limited "
                        "RMS trends of {0}".format(channel),
        )

        # loop over state segments and compute BLRMS
        for j, seg in enumerate(statea):
            logger.debug("Processing segment [%d .. %d)" % seg)
            wblrms = get_blrms(
                alldata[j][channel],
                flow=args.bandpass_flow,
                fhigh=args.bandpass_fhigh,
            )
            scatter = get_segments(
                wblrms,
                numpy.mean(wblrms) + args.sigma * numpy.std(wblrms),
                name=flag,
            )
            scatter_segments[channel] += scatter
            logger.debug(
                "    Found %d scattering segments" % (len(scatter.active)))
        logger.debug("Completed channel %s, found %d segments in total"
                     % (channel, len(scatter_segments[channel].active)))

        # calculate efficiency and deadtime of veto
        deadtime = abs(scatter_segments[channel].active)
        try:
            deadtimepc = deadtime / livetime * 100
        except ZeroDivisionError:
            deadtimepc = 0.
        logger.info("Deadtime: %.2f%% (%.2f/%ds)"
                    % (deadtimepc, deadtime, livetime))
        highsnrtrigs = trigs[trigs['snr'] <= 200]
        efficiency = in_segmentlist(highsnrtrigs[names[0]],
                                    scatter_segments[channel].active).sum()
        try:
            efficiencypc = efficiency / len(highsnrtrigs) * 100
        except ZeroDivisionError:
            efficiencypc = 0.
        logger.info("Efficiency (SNR>=8): %.2f%% (%d/%d)"
                    % (efficiencypc, efficiency, len(highsnrtrigs)))
        if deadtimepc == 0.:
            effdt = 0
        else:
            effdt = efficiencypc/deadtimepc
        logger.info("Efficiency/Deadtime: %.2f" % effdt)

        if abs(scatter_segments[channel].active):
            actives.extend(scatter_segments[channel].active)

        # write HTML
        if deadtime != 0 and effdt > 2:
            context = 'danger'
        elif deadtime != 0 and effdt < 2:
            context = 'warning'
        else:
            continue
        page.add(htmlio.write_flag_html(
            scatter_segments[channel], span, i, parent='transmons-group',
            title=channel, context=context, plotdir=''))

    if statea:  # close accordion
        page.div.close()  # transmons-group

    actives = actives.coalesce()  # merge contiguous segments
    if statea and not actives:
        page.add(htmlio.alert(
            'No evidence of scattering found in the channels analyzed',
            context=args.ifo.lower(), dismiss=False))

    # identify triggers during active segments
    logger.debug('Writing a summary CSV record')
    ind = [i for i, trigtime in enumerate(highsnrtrigs[names[0]])
           if trigtime in actives]
    gps = highsnrtrigs[names[0]][ind]
    freq = highsnrtrigs[names[1]][ind]
    snr = highsnrtrigs[names[2]][ind]
    segs = [y for x in gps for y in actives if x in y]
    table = EventTable(
        [gps, freq, snr, [seg[0] for seg in segs], [seg[1] for seg in segs]],
        names=('trigger_time', 'trigger_frequency', 'trigger_snr',
               'segment_start', 'segment_end'))
    logger.info('The following {} triggers fell within active scattering '
                'segments:\n\n'.format(len(table)))
    print(table)
    print('\n\n')
    table.write(summfile, overwrite=True)

    # -- launch omega scans -----------

    nscans = min(args.omega_scans, len(table))
    if nscans > 0:
        # launch scans
        scandir = 'scans'
        ind = random.sample(range(0, len(table)), nscans)
        omegatimes = [str(t) for t in table['trigger_time'][ind]]
        logger.debug('Collected {} event times to omega scan: {}'.format(
            nscans, ', '.join(omegatimes)))
        logger.info('Creating workflow for omega scans')
        flags = batch.get_command_line_flags(
            ifo=args.ifo, ignore_state_flags=True)
        condorcmds = batch.get_condor_arguments(timeout=4, gps=args.gpsstart)
        batch.generate_dag(omegatimes, flags=flags, submit=True,
                           outdir=scandir, condor_commands=condorcmds)
        logger.info('Launched {} omega scans to condor'.format(nscans))
        # render HTML
        page.h2('Omega scans', class_='mt-4', id_='omega-scans')
        msg = (
            'The following event times correspond to significant Omicron '
            'triggers that occur during the scattering segments found above. '
            'To compare these against fringe frequency projections, please '
            'use the "simple scattering" module:',
            markup.oneliner.pre(
                '$ python -m gwdetchar.scattering.simple --help',
            ),
        )
        page.add(htmlio.alert(msg, context=args.ifo.lower()))
        page.add(htmlio.scaffold_omega_scans(
            omegatimes, args.main_channel, scandir=scandir))
    elif args.omega_scans:
        logger.info('No events found during active scattering segments')

    # -- finalize ---------------------

    # write segments
    scatter_segments.write(segfile, path="segments", overwrite=True)
    logger.debug("%s written" % segfile)

    # write HTML
    htmlio.close_page(page, 'index.html')
    logger.info("-- index.html written, all done --")

    # return to original directory
    os.chdir(indir)