Ejemplo n.º 1
0
def get_state_segments(channel, frametype, start, end, bits=[0], nproc=1,
                       pad=(0, 0)):
    """Read state segments from a state-vector channel in the frames
    """
    ifo = channel[:2]
    pstart = start - pad[0]
    pend = end + pad[1]

    # find frame cache
    cache = data.find_frames(ifo, frametype, pstart, pend)

    # optimise I/O based on type and library
    io_kw = {}
    try:
        from LDAStools import frameCPP  # noqa: F401
    except ImportError:
        pass
    else:
        io_kw['format'] = 'gwf.framecpp'
        if RAW_TYPE_REGEX.match(frametype):
            io_kw['type'] = 'adc'
        elif channel.endswith('GDS-CALIB_STATE_VECTOR'):
            io_kw['type'] = 'proc'

    bits = list(map(str, bits))
    # FIXME: need to read from cache with single segment but doesn't match
    # [start, end)

    # Virgo drops the state vector regularly, so need to sieve the files
    if channel == "V1:DQ_ANALYSIS_STATE_VECTOR":
        span = gwf_data_segments(cache, channel)
    else:
        span = SegmentList([Segment(pstart, pend)])

    # read data segments
    segs = SegmentList()
    try:
        csegs = cache_segments(cache)
    except KeyError:
        return segs
    for seg in csegs & span:
        sv = StateVector.read(cache, channel, nproc=nproc, start=seg[0],
                              end=seg[1], bits=bits, gap='pad', pad=0,
                              **io_kw).astype('uint32')
        segs += sv.to_dqflags().intersection().active

    # truncate to integers, and apply padding
    for i, seg in enumerate(segs):
        segs[i] = type(seg)(int(ceil(seg[0])) + pad[0],
                            int(floor(seg[1])) - pad[1])
    segs.coalesce()

    return segs.coalesce()
Ejemplo n.º 2
0
def get_guardian_segments(node, frametype, start, end, nproc=1, pad=(0, 0),
                          strict=False):
    """Determine state segments for a given guardian node
    """
    ifo, node = node.split(':', 1)
    if node.startswith('GRD-'):
        node = node[4:]
    pstart = start - pad[0]
    pend = end + pad[1]

    # find frame cache
    cache = data.find_frames(ifo, frametype, pstart, pend)

    # pre-format data segments
    span = SegmentList([Segment(pstart, pend)])
    segs = SegmentList()
    csegs = cache_segments(cache)
    if not csegs:
        return csegs

    # read data
    stub = "{}:GRD-{}".format(ifo, node)
    if strict:
        channels = ["{}_OK".format(stub)]
    else:
        state = "{}_STATE_N".format(stub)
        nominal = "{}_NOMINAL_N".format(stub)
        active = "{}_ACTIVE".format(stub)
        channels = [state, nominal, active]
    for seg in csegs & span:
        if strict:
            sv = StateVector.read(
                cache, channels[0], nproc=nproc, start=seg[0], end=seg[1],
                bits=[0], gap='pad', pad=0,).astype('uint32')
            segs += sv.to_dqflags().intersection().active
        else:
            gdata = TimeSeriesDict.read(
                cache, channels, nproc=nproc, start=seg[0], end=seg[1],
                gap='pad', pad=0)
            ok = ((gdata[state].value == gdata[nominal].value) &
                  (gdata[active].value == 1)).view(StateTimeSeries)
            ok.t0 = gdata[state].t0
            ok.dt = gdata[state].dt
            segs += ok.to_dqflag().active

    # truncate to integers, and apply padding
    for i, seg in enumerate(segs):
        segs[i] = type(seg)(int(ceil(seg[0])) + pad[0],
                            int(floor(seg[1])) - pad[1])
    segs.coalesce()

    return segs.coalesce()
Ejemplo n.º 3
0
    def combined_time_volume(self, allsegments, allranges):
        try:
            combined_range = TimeSeries(numpy.zeros(allranges[0].size),
                                        xindex=allranges[0].times,
                                        unit='Mpc')
        except IndexError:
            combined_range = TimeSeries(numpy.zeros(allranges[0].size),
                                        unit='Mpc',
                                        x0=allranges[0].x0,
                                        dx=allranges[0].dx)

        # get coincident observing segments
        pairs = list(combinations(allsegments, 2))
        coincident = SegmentList()
        for pair in pairs:
            coincident.extend(pair[0] & pair[1])
        coincident = coincident.coalesce()

        # get effective network range
        values = [r.value for r in allranges]
        values = [min(nlargest(2, x)) for x in zip(*values)]
        size = min([r.size for r in allranges])
        combined_range[:size] = values * combined_range.unit

        # compute time-volume
        return self.calculate_time_volume(coincident, combined_range)
Ejemplo n.º 4
0
Archivo: range.py Proyecto: gwpy/gwsumm
    def combined_time_volume(self, allsegments, allranges):
        try:
            combined_range = TimeSeries(numpy.zeros(allranges[0].size),
                                        xindex=allranges[0].times, unit='Mpc')
        except IndexError:
            combined_range = TimeSeries(
                numpy.zeros(allranges[0].size), unit='Mpc',
                x0=allranges[0].x0, dx=allranges[0].dx)

        # get coincident observing segments
        pairs = list(combinations(allsegments, 2))
        coincident = SegmentList()
        for pair in pairs:
            coincident.extend(pair[0] & pair[1])
        coincident = coincident.coalesce()

        # get effective network range
        values = [r.value for r in allranges]
        values = [min(nlargest(2, x)) for x in zip(*values)]
        size = min([r.size for r in allranges])
        combined_range[:size] = values * combined_range.unit

        # compute time-volume
        return self.calculate_time_volume(coincident, combined_range)
Ejemplo n.º 5
0
def get_guardian_segments(node,
                          frametype,
                          start,
                          end,
                          nproc=1,
                          pad=(0, 0),
                          strict=False):
    """Determine state segments for a given guardian node
    """
    ifo, node = node.split(':', 1)
    if node.startswith('GRD-'):
        node = node[4:]
    pstart = start - pad[0]
    pend = end + pad[1]

    # find frame cache
    cache = data.find_frames(ifo, frametype, pstart, pend)

    # pre-format data segments
    span = SegmentList([Segment(pstart, pend)])
    segs = SegmentList()
    csegs = cache_segments(cache)
    if not csegs:
        return csegs

    # read data
    stub = "{}:GRD-{}".format(ifo, node)
    if strict:
        channels = ["{}_OK".format(stub)]
    else:
        state = "{}_STATE_N".format(stub)
        nominal = "{}_NOMINAL_N".format(stub)
        active = "{}_ACTIVE".format(stub)
        channels = [state, nominal, active]
    for seg in csegs & span:
        if strict:
            sv = StateVector.read(
                cache,
                channels[0],
                nproc=nproc,
                start=seg[0],
                end=seg[1],
                bits=[0],
                gap='pad',
                pad=0,
            ).astype('uint32')
            segs += sv.to_dqflags().intersection().active
        else:
            gdata = TimeSeriesDict.read(cache,
                                        channels,
                                        nproc=nproc,
                                        start=seg[0],
                                        end=seg[1],
                                        gap='pad',
                                        pad=0)
            ok = ((gdata[state].value == gdata[nominal].value) &
                  (gdata[active].value == 1)).view(StateTimeSeries)
            ok.t0 = gdata[state].t0
            ok.dt = gdata[state].dt
            segs += ok.to_dqflag().active

    # truncate to integers, and apply padding
    for i, seg in enumerate(segs):
        segs[i] = type(seg)(int(ceil(seg[0])) + pad[0],
                            int(floor(seg[1])) - pad[1])
    segs.coalesce()

    return segs.coalesce()
Ejemplo n.º 6
0
def get_state_segments(channel,
                       frametype,
                       start,
                       end,
                       bits=[0],
                       nproc=1,
                       pad=(0, 0)):
    """Read state segments from a state-vector channel in the frames
    """
    ifo = channel[:2]
    pstart = start - pad[0]
    pend = end + pad[1]

    # find frame cache
    cache = data.find_frames(ifo, frametype, pstart, pend)

    # optimise I/O based on type and library
    io_kw = {}
    try:
        from LDAStools import frameCPP  # noqa: F401
    except ImportError:
        pass
    else:
        io_kw['format'] = 'gwf.framecpp'
        if RAW_TYPE_REGEX.match(frametype):
            io_kw['type'] = 'adc'
        elif channel.endswith('GDS-CALIB_STATE_VECTOR'):
            io_kw['type'] = 'proc'

    bits = list(map(str, bits))
    # FIXME: need to read from cache with single segment but doesn't match
    # [start, end)

    # Virgo drops the state vector regularly, so need to sieve the files
    if channel == "V1:DQ_ANALYSIS_STATE_VECTOR":
        span = gwf_data_segments(cache, channel)
    else:
        span = SegmentList([Segment(pstart, pend)])

    # read data segments
    segs = SegmentList()
    try:
        csegs = cache_segments(cache)
    except KeyError:
        return segs
    for seg in csegs & span:
        sv = StateVector.read(cache,
                              channel,
                              nproc=nproc,
                              start=seg[0],
                              end=seg[1],
                              bits=bits,
                              gap='pad',
                              pad=0,
                              **io_kw).astype('uint32')
        segs += sv.to_dqflags().intersection().active

    # truncate to integers, and apply padding
    for i, seg in enumerate(segs):
        segs[i] = type(seg)(int(ceil(seg[0])) + pad[0],
                            int(floor(seg[1])) - pad[1])
    segs.coalesce()

    return segs.coalesce()
Ejemplo n.º 7
0
def main(args=None):
    use('agg')
    rcParams.update({
        'figure.subplot.bottom': 0.15,
        'figure.subplot.left': 0.1,
        'figure.subplot.right': 0.83,
        'figure.subplot.top': 0.93,
        'figure.subplot.hspace': 0.25,
        'axes.labelsize': 20,
        'grid.color': 'gray',
    })
    grid = GridSpec(2, 1)

    logger = log.Logger('omicron-status')

    try:
        omicronversion = str(get_omicron_version())
    except KeyError:
        omicronversion = 'Unknown'
        logger.warning("Omicron version unknown")
    else:
        logger.info("Found omicron version: %s" % omicronversion)

    parser = create_parser()
    args = parser.parse_args(args=args)

    if args.ifo is None:
        parser.error("Cannot determine IFO prefix from sytem, "
                     "please pass --ifo on the command line")

    group = args.group

    logger.info("Checking status for %r group" % group)

    archive = args.archive_directory
    proddir = args.production_directory.with_name(
        args.production_directory.name.format(group=args.group), )
    outdir = args.output_directory
    outdir.mkdir(exist_ok=True, parents=True)
    tag = args.latency_archive_tag.format(group=args.group)

    filetypes = ['h5', 'xml.gz', 'root']

    logger.debug("Set output directory to %s" % outdir)
    logger.debug(
        "Will process the following filetypes: {}".format(
            ", ".join(filetypes), ), )

    # -- parse configuration file and get parameters --------------------------

    cp = configparser.ConfigParser()
    ok = cp.read(args.config_file)
    if args.config_file not in ok:
        raise IOError(
            "Failed to read configuration file %r" % args.config_file, )
    logger.info("Configuration read")

    # validate
    if not cp.has_section(group):
        raise configparser.NoSectionError(group)

    # get parameters
    obs = args.ifo[0]
    frametype = cp.get(group, 'frametype')
    padding = cp.getint(group, 'overlap-duration') / 2.
    mingap = cp.getint(group, 'chunk-duration')

    channels = args.channel
    if not channels:
        channels = [
            c.split()[0]
            for c in cp.get(group, 'channels').strip('\n').split('\n')
        ]
    channels.sort()
    logger.debug("Found %d channels" % len(channels))

    start = args.gps_start_time
    end = args.gps_end_time
    if end == NOW:
        end -= padding

    if args.state_flag:
        stateflag = args.state_flag
        statepad = tuple(map(float, args.state_pad.split(',')))
    else:
        try:
            stateflag = cp.get(group, 'state-flag')
        except configparser.NoOptionError:
            stateflag = None
        else:
            try:
                statepad = tuple(
                    map(
                        float,
                        cp.get(group, 'state-padding').split(','),
                    ))
            except configparser.NoOptionError:
                statepad = (0, 0)
    if stateflag:
        logger.debug("Parsed state flag: %r" % stateflag)
        logger.debug("Parsed state padding: %s" % repr(statepad))
    logger.info("Processing %d-%d" % (start, end))

    # -- define nagios JSON printer -------------------------------------------

    def print_nagios_json(code, message, outfile, tag='status', **extras):
        out = {
            'created_gps':
            NOW,
            'status_intervals': [
                {
                    'start_sec': 0,
                    'end_sec': args.unknown,
                    'num_status': code,
                    'txt_status': message
                },
                {
                    'start_sec': args.unknown,
                    'num_status': 3,
                    'txt_status': 'Omicron %s check is not running' % tag
                },
            ],
            'author': {
                'name': 'Duncan Macleod',
                'email': '*****@*****.**',
            },
            'omicron': {
                'version': omicronversion,
                'group': group,
                'channels': ' '.join(channels),
                'frametype': frametype,
                'state-flag': stateflag,
            },
            'pyomicron': {
                'version': __version__,
            },
        }
        out.update(extras)
        with open(outfile, 'w') as f:
            f.write(json.dumps(out))
        logger.debug("nagios info written to %s" % outfile)

    # -- get condor status ------------------------------------------------

    if not args.skip_condor:
        # connect to scheduler
        try:
            schedd = htcondor.Schedd()
        except RuntimeError as e:
            logger.warning("Caught %s: %s" % (type(e).__name__, e))
            logger.info("Failed to connect to HTCondor scheduler, cannot "
                        "determine condor status for %s" % group)
            schedd = None

    if not args.skip_condor and schedd:
        logger.info("-- Checking condor status --")

        # get DAG status
        jsonfp = outdir / "nagios-condor-{}.json".format(group)
        okstates = ['Running', 'Idle', 'Completed']
        try:
            # check manager status
            qstr = 'OmicronManager == "{}" && Owner == "{}"'.format(
                group,
                args.user,
            )
            try:
                jobs = schedd.query(qstr, ['JobStatus'])
            except IOError as e:
                warnings.warn("Caught IOError: %s [retrying...]" % str(e))
                sleep(2)
                jobs = schedd.query(qstr, ['JobStatus'])
            logger.debug(
                "Found {} jobs for query {!r}".format(len(jobs), qstr), )
            if len(jobs) > 1:
                raise RuntimeError(
                    "Multiple OmicronManager jobs found for %r" % group)
            elif len(jobs) == 0:
                raise RuntimeError(
                    "No OmicronManager job found for %r" % group, )
            status = condor.JOB_STATUS[jobs[0]['JobStatus']]
            if status not in okstates:
                raise RuntimeError("OmicronManager status for %r: %r" %
                                   (group, status))
            logger.debug("Manager status is %r" % status)
            # check node status
            jobs = schedd.query(
                'OmicronProcess == "{}" && Owner == "{}"'.format(
                    group,
                    args.user,
                ),
                ['JobStatus', 'ClusterId'],
            )
            logger.debug(
                "Found {} jobs for query {!r}".format(len(jobs), qstr), )
            for job in jobs:
                status = condor.JOB_STATUS[job['JobStatus']]
                if status not in okstates:
                    raise RuntimeError("Omicron node %s (%r) is %r" %
                                       (job['ClusterId'], group, status))
        except RuntimeError as e:
            print_nagios_json(2, str(e), jsonfp, tag='condor')
            logger.warning("Failed to determine condor status: %r" % str(e))
        except IOError as e:
            logger.warning("Caught %s: %s" % (type(e).__name__, e))
            logger.info("Failed to connect to HTCondor scheduler, cannot "
                        "determine condor status for %s" % group)
        else:
            print_nagios_json(
                0,
                "Condor processing for %r is OK" % group,
                jsonfp,
                tag='condor',
            )
            logger.info("Condor processing is OK")

    if not args.skip_job_duration:
        # get job duration history
        plot = Plot(figsize=[12, 3])
        plot.subplots_adjust(bottom=.22, top=.87)
        ax = plot.gca(xscale="auto-gps")
        times, jobdur = condor.get_job_duration_history_shell('OmicronProcess',
                                                              group,
                                                              maxjobs=5000)
        logger.debug("Recovered duration history for %d omicron.exe jobs" %
                     len(times))
        line = ax.plot([0], [1], label='Omicron.exe')[0]
        ax.plot(times,
                jobdur,
                linestyle=' ',
                marker='.',
                color=line.get_color())
        times, jobdur = condor.get_job_duration_history_shell(
            'OmicronPostProcess', group, maxjobs=5000)
        logger.debug("Recovered duration history for %d post-processing jobs" %
                     len(times))
        line = ax.plot([0], [1], label='Post-processing')[0]
        ax.plot(times,
                jobdur,
                linestyle=' ',
                marker='.',
                color=line.get_color())
        ax.legend(loc='upper left',
                  borderaxespad=0,
                  bbox_to_anchor=(1.01, 1),
                  handlelength=1)
        ax.set_xlim(args.gps_start_time, args.gps_end_time)
        ax.set_epoch(ax.get_xlim()[1])
        ax.set_yscale('log')
        ax.set_title('Omicron job durations for %r' % group)
        ax.set_ylabel('Job duration [seconds]')
        ax.xaxis.labelpad = 5
        png = str(outdir / "nagios-condor-{}.png".format(group))
        plot.save(png)
        plot.close()
        logger.debug("Saved condor plot to %s" % png)

    if args.skip_file_checks:
        sys.exit(0)

    # -- get file latency and archive completeness ----------------------------

    logger.info("-- Checking file archive --")

    # get frame segments
    segs = segments.get_frame_segments(obs, frametype, start, end)

    # get state segments
    if stateflag is not None:
        segs &= segments.query_state_segments(
            stateflag,
            start,
            end,
            pad=statepad,
        )

    try:
        end = segs[-1][1]
    except IndexError:
        pass

    # apply inwards padding to generate resolvable segments
    for i in range(len(segs) - 1, -1, -1):
        # if segment is shorter than padding, ignore it completely
        if abs(segs[i]) <= padding * 2:
            del segs[i]
        # otherwise apply padding to generate trigger segment
        else:
            segs[i] = segs[i].contract(padding)
    logger.debug("Found %d seconds of analysable time" % abs(segs))

    # load archive latency
    latencyfile = outdir / "nagios-latency-{}.h5".format(tag)
    times = dict((c, dict((ft, None) for ft in filetypes)) for c in channels)
    ldata = dict((c, dict((ft, None) for ft in filetypes)) for c in channels)
    try:
        with h5py.File(latencyfile, 'r') as h5file:
            for c in channels:
                for ft in filetypes:
                    try:
                        times[c][ft] = h5file[c]['time'][ft][:]
                        ldata[c][ft] = h5file[c]['latency'][ft][:]
                    except KeyError:
                        times[c][ft] = numpy.ndarray((0, ))
                        ldata[c][ft] = numpy.ndarray((0, ))
    except OSError as exc:  # file not found, or is corrupt
        warnings.warn("failed to load latency data from {}: {}".format(
            latencyfile,
            str(exc),
        ))
        for c in channels:
            for ft in filetypes:
                if not times[c].get(ft):
                    times[c][ft] = numpy.ndarray((0, ))
                    ldata[c][ft] = numpy.ndarray((0, ))
    else:
        logger.debug("Parsed latency data from %s" % latencyfile)

    # load acknowledged gaps
    acksegfile = str(outdir / "acknowledged-gaps-{}.txt".format(tag))
    try:
        acknowledged = SegmentList.read(acksegfile,
                                        gpstype=float,
                                        format="segwizard")
    except IOError:  # no file
        acknowledged = SegmentList()
    else:
        logger.debug(
            "Read %d segments from %s" % (len(acknowledged), acksegfile), )
        acknowledged.coalesce()

    # build legend for segments
    leg = OrderedDict()
    leg['Analysable'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightgray',
        edgecolor='gray',
    )
    leg['Available'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightgreen',
        edgecolor='green',
    )
    leg['Missing'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='red',
        edgecolor='darkred',
    )
    leg['Unresolvable'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='magenta',
        edgecolor='purple',
    )
    leg['Overlapping'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='yellow',
        edgecolor='orange',
    )
    leg['Pending'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='lightskyblue',
        edgecolor='blue',
    )
    leg['Acknowledged'] = SegmentRectangle(
        [0, 1],
        0,
        facecolor='sandybrown',
        edgecolor='brown',
    )

    logger.debug("Checking archive for each channel...")

    # find files
    latency = {}
    gaps = {}
    overlap = {}
    pending = {}
    plots = {}
    for c in channels:
        # create data storate
        latency[c] = {}
        gaps[c] = {}
        overlap[c] = {}
        pending[c] = {}

        # create figure
        plot = Plot(figsize=[12, 5])
        lax = plot.add_subplot(grid[0, 0], xscale="auto-gps")
        sax = plot.add_subplot(grid[1, 0], sharex=lax, projection='segments')
        colors = ['lightblue', 'dodgerblue', 'black']

        for y, ft in enumerate(filetypes):
            # find files
            cache = io.find_omicron_files(c, start, end, archive, ext=ft)
            cpend = sieve_cache(io.find_pending_files(c, proddir, ext=ft),
                                segment=Segment(start, end))
            # get available segments
            avail = segments.cache_segments(cache)
            found = avail & segs
            pending[c][ft] = segments.cache_segments(cpend) & segs
            # remove gaps at the end that represent latency
            try:
                latency[c][ft] = abs(segs & type(
                    segs)([type(segs[0])(found[-1][1], segs[-1][1])])) / 3600.
            except IndexError:
                latency[c][ft] = 0
                processed = segs
            else:
                processed = segs & type(segs)(
                    [type(segs[0])(start, found[-1][1])])
            gaps[c][ft] = type(found)()
            lost = type(found)()
            for s in processed - found:
                if abs(s) < mingap and s in list(segs):
                    lost.append(s)
                else:
                    gaps[c][ft].append(s)
            # remove acknowledged gaps
            ack = gaps[c][ft] & acknowledged
            gaps[c][ft] -= acknowledged
            # print warnings
            if abs(gaps[c][ft]):
                warnings.warn("Gaps found in %s files for %s:\n%s" %
                              (c, ft, gaps[c][ft]))
            overlap[c][ft] = segments.cache_overlaps(cache)
            if abs(overlap[c][ft]):
                warnings.warn("Overlap found in %s files for %s:\n%s" %
                              (c, ft, overlap[c][ft]))

            # append archive
            times[c][ft] = numpy.concatenate((times[c][ft][-99999:], [NOW]))
            ldata[c][ft] = numpy.concatenate(
                (ldata[c][ft][-99999:], [latency[c][ft]]))

            # plot
            line = lax.plot(
                times[c][ft],
                ldata[c][ft],
                label=ft,
                color=colors[y],
            )[0]
            lax.plot(times[c][ft],
                     ldata[c][ft],
                     marker='.',
                     linestyle=' ',
                     color=line.get_color())
            sax.plot_segmentlist(segs,
                                 y=y,
                                 label=ft,
                                 alpha=.5,
                                 facecolor=leg['Analysable'].get_facecolor(),
                                 edgecolor=leg['Analysable'].get_edgecolor())
            sax.plot_segmentlist(pending[c][ft],
                                 y=y,
                                 facecolor=leg['Pending'].get_facecolor(),
                                 edgecolor=leg['Pending'].get_edgecolor())
            sax.plot_segmentlist(avail,
                                 y=y,
                                 label=ft,
                                 alpha=.2,
                                 height=.1,
                                 facecolor=leg['Available'].get_facecolor(),
                                 edgecolor=leg['Available'].get_edgecolor())
            sax.plot_segmentlist(found,
                                 y=y,
                                 label=ft,
                                 alpha=.5,
                                 facecolor=leg['Available'].get_facecolor(),
                                 edgecolor=leg['Available'].get_edgecolor())
            sax.plot_segmentlist(lost,
                                 y=y,
                                 facecolor=leg['Unresolvable'].get_facecolor(),
                                 edgecolor=leg['Unresolvable'].get_edgecolor())
            sax.plot_segmentlist(gaps[c][ft],
                                 y=y,
                                 facecolor=leg['Missing'].get_facecolor(),
                                 edgecolor=leg['Missing'].get_edgecolor())
            sax.plot_segmentlist(overlap[c][ft],
                                 y=y,
                                 facecolor=leg['Overlapping'].get_facecolor(),
                                 edgecolor=leg['Overlapping'].get_edgecolor())
            sax.plot_segmentlist(ack,
                                 y=y,
                                 facecolor=leg['Acknowledged'].get_facecolor(),
                                 edgecolor=leg['Acknowledged'].get_edgecolor())

        # finalise plot
        lax.axhline(args.warning / 3600.,
                    color=(1.0, 0.7, 0.0),
                    linestyle='--',
                    linewidth=2,
                    label='Warning',
                    zorder=-1)
        lax.axhline(args.error / 3600.,
                    color='red',
                    linestyle='--',
                    linewidth=2,
                    label='Critical',
                    zorder=-1)
        lax.set_title('Omicron status: {}'.format(c))
        lax.set_ylim(0, args.error / 1800.)
        lax.set_ylabel('Latency [hours]')
        lax.legend(loc='upper left',
                   bbox_to_anchor=(1.01, 1),
                   borderaxespad=0,
                   handlelength=2,
                   fontsize=12.4)
        lax.set_xlabel(' ')
        for ax in plot.axes:
            ax.set_xlim(args.gps_start_time, args.gps_end_time)
            ax.set_epoch(ax.get_xlim()[1])
        sax.xaxis.labelpad = 5
        sax.set_ylim(-.5, len(filetypes) - .5)
        sax.legend(leg.values(),
                   leg.keys(),
                   handlelength=1,
                   fontsize=12.4,
                   loc='lower left',
                   bbox_to_anchor=(1.01, 0),
                   borderaxespad=0)
        plots[c] = png = outdir / "nagios-latency-{}.png".format(
            c.replace(':', '-'), )
        plot.save(png)
        plot.close()
        logger.debug("    %s" % c)

    # update latency and write archive
    h5file = h5py.File(latencyfile, 'w')
    for c in channels:
        g = h5file.create_group(c)
        for name, d in zip(['time', 'latency'], [times[c], ldata[c]]):
            g2 = g.create_group(name)
            for ft in filetypes:
                g2.create_dataset(ft, data=d[ft], compression='gzip')
    h5file.close()
    logger.debug("Stored latency data as HDF in %s" % latencyfile)

    # write nagios output for files
    status = []
    for segset, tag in zip([gaps, overlap], ['gaps', 'overlap']):
        chans = [(c, segset[c]) for c in segset
                 if abs(reduce(operator.or_, segset[c].values()))]
        jsonfp = outdir / "nagios-{}-{}.json".format(tag, group)
        status.append((tag, jsonfp))
        if chans:
            gapstr = '\n'.join('%s: %s' % c for c in chans)
            code = 1
            message = ("%s found in Omicron files for group %r\n%s" %
                       (tag.title(), group, gapstr))
        else:
            code = 0
            message = ("No %s found in Omicron files for group %r" %
                       (tag, group))
        print_nagios_json(code, message, jsonfp, tag=tag, **{tag: dict(chans)})

    # write group JSON
    jsonfp = outdir / "nagios-latency-{}.json".format(group)
    status.append(('latency', jsonfp))
    code = 0
    message = 'No channels have high latency for group %r' % group
    ldict = dict((c, max(latency[c].values())) for c in latency)
    for x, dt in zip([2, 1], [args.error, args.warning]):
        dh = dt / 3600.
        chans = [c for c in ldict if ldict[c] >= dh]
        if chans:
            code = x
            message = (
                "%d channels found with high latency (above %s seconds)" %
                (len(chans), dt))
            break
    print_nagios_json(code, message, jsonfp, tag='latency', latency=ldict)

    # auto-detect 'standard' JSON files
    for tag, name in zip(
        ['condor', 'omicron-online'],
        ['condor', 'processing'],
    ):
        f = outdir / "nagios-{}-{}.json".format(tag, group)
        if f.is_file():
            status.insert(0, (name, f))

    # write HTML summary
    if args.html:
        page = markup.page()
        page.init(
            title="%s Omicron Online status" % group,
            css=[
                ('//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/'
                 'bootstrap.min.css'),
                ('//cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/'
                 'jquery.fancybox.min.css'),
            ],
            script=[
                '//code.jquery.com/jquery-1.11.2.min.js',
                ('//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/'
                 'bootstrap.min.js'),
                ('//cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/'
                 'jquery.fancybox.min.js'),
            ],
        )
        page.div(class_='container')
        # write header
        page.div(class_='page-header')
        page.h1('Omicron Online status: %s' % group)
        page.div.close()  # page-header
        # write summary
        page.div(id_='json')
        page.h2("Processing status")
        for tag, f in status:
            jf = f.name
            page.a("%s status" % tag.title(),
                   href=jf,
                   role='button',
                   target="_blank",
                   id_="nagios-%s" % tag,
                   class_='btn btn-default json-status')
        page.p(style="padding-top: 5px;")
        page.small(
            "Hover over button for explanation, click to open JSON file", )
        page.p.close()
        page.div.close()  # id=json
        # show plots
        page.div(id_='plots')
        page.h2("Channel details")
        page.div(class_='row')
        for channel in sorted(channels):
            png = plots[channel].name
            page.div(class_="col-sm-6 col-md-4")
            page.div(class_="panel panel-default")
            page.div(class_='panel-heading')
            page.h3(channel, class_='panel-title', style="font-size: 14px;")
            page.div.close()  # panel-heading
            page.div(class_='panel-body')
            page.a(href=png,
                   target="_blank",
                   class_="fancybox",
                   rel="channel-status-img")
            page.img(src=png, class_='img-responsive')
            page.a.close()
            page.div.close()  # panel-body
            page.div.close()  # panel
            page.div.close()  # col
        page.div.close()  # row
        page.div.close()  # id=plots

        # dump parameters
        page.div(id_="parameters")
        page.h2("Parameters")
        for key, val in cp.items(group):
            page.p()
            page.strong("%s:" % key)
            page.add(val)
            page.p.close()
        page.div.close()  # id=parameters

        # finish and close
        page.div.close()  # container
        page.script("""
        function setStatus(data, id) {
            var txt = data.status_intervals[0].txt_status.split("\\n")[0];
            $("#"+id).attr("title", txt);
            var stat = data.status_intervals[0].num_status;
            if (stat == 0) {
                $("#"+id).addClass("btn-success"); }
            else if (stat == 1) {
                $("#"+id).addClass("btn-warning"); }
            else if (stat == 2){
                $("#"+id).addClass("btn-danger"); }
        }

        $(document).ready(function() {
            $(".json-status").each(function() {
                var jsonf = $(this).attr("href");
                var id = $(this).attr("id");
                $.getJSON(jsonf, function(data) { setStatus(data, id); });
            });

            $(".fancybox").fancybox({nextEffect: 'none', prevEffect: 'none'});
        });""",
                    type="text/javascript")
        with (outdir / "index.html").open("w") as f:
            f.write(str(page))
        logger.debug("HTML summary written to %s" % f.name)
Ejemplo n.º 8
0
def main(args=None):
    """Run the primary scattering command-line tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    # set up logger
    logger = cli.logger(
        name=PROG.split('python -m ').pop(),
        level='DEBUG' if args.verbose else 'INFO',
    )

    # useful variables
    fthresh = (
        int(args.frequency_threshold) if args.frequency_threshold.is_integer()
        else args.frequency_threshold)
    multiplier = args.multiplier_for_threshold
    tstr = str(fthresh).replace('.', '_')
    gpsstr = '%s-%s' % (int(args.gpsstart), int(args.gpsend - args.gpsstart))
    args.optic = args.optic or list(OPTIC_MOTION_CHANNELS.keys())

    # go to working directory
    indir = os.getcwd()
    if not os.path.isdir(args.output_dir):
        os.makedirs(args.output_dir)
    os.chdir(args.output_dir)

    # set up output files
    summfile = '{}-SCATTERING_SUMMARY-{}.csv'.format(
        args.ifo, gpsstr)
    segfile = '{}-SCATTERING_SEGMENTS_{}_HZ-{}.h5'.format(
        args.ifo, tstr, gpsstr)

    # log start of process
    logger.info('{} Scattering {}-{}'.format(
        args.ifo, int(args.gpsstart), int(args.gpsend)))

    # -- get state segments -----------

    span = Segment(args.gpsstart, args.gpsend)

    # get segments
    if args.state_flag is not None:
        state = DataQualityFlag.query(
            args.state_flag, int(args.gpsstart), int(args.gpsend),
            url=DEFAULT_SEGMENT_SERVER,
        ).coalesce()
        statea = []
        padding = args.segment_start_pad + args.segment_end_pad
        for i, seg in enumerate(state.active):
            if abs(seg) > padding:
                statea.append(Segment(
                    seg[0] + args.segment_start_pad,
                    seg[1] - args.segment_end_pad,
                ))
            else:
                logger.debug(
                    "Segment length {} shorter than padding length {}, "
                    "skipping segment {}-{}".format(abs(seg), padding, *seg),
                )
        statea = SegmentList(statea)
        logger.debug("Downloaded %d segments for %s"
                     % (len(statea), args.state_flag))
    else:
        statea = SegmentList([span])
    livetime = float(abs(statea))
    logger.debug("Processing %.2f s of livetime" % livetime)

    # -- load h(t) --------------------

    args.main_channel = args.main_channel.format(IFO=args.ifo)
    logger.debug("Loading Omicron triggers for %s" % args.main_channel)

    if args.gpsstart >= 1230336018:  # Jan 1 2019
        ext = "h5"
        names = ["time", "frequency", "snr"]
        read_kw = {
            "columns": names,
            "selection": [
                "{0} < frequency < {1}".format(
                    args.fmin, multiplier * fthresh),
                ("time", in_segmentlist, statea),
            ],
            "format": "hdf5",
            "path": "triggers",
        }
    else:
        ext = "xml.gz"
        names = ['peak', 'peak_frequency', 'snr']
        read_kw = {
            "columns": names,
            "selection": [
                "{0} < peak_frequency < {1}".format(
                    args.fmin, multiplier * fthresh),
                ('peak', in_segmentlist, statea),
            ],
            "format": 'ligolw',
            "tablename": "sngl_burst",
        }

    fullcache = []
    for seg in statea:
        cache = gwtrigfind.find_trigger_files(
            args.main_channel, 'omicron', seg[0], seg[1], ext=ext,
        )
        if len(cache) == 0:
            warnings.warn(
                "No Omicron triggers found for %s in segment [%d .. %d)"
                % (args.main_channel, seg[0], seg[1]),
            )
            continue
        fullcache.extend(cache)

    # read triggers
    if fullcache:
        trigs = EventTable.read(fullcache, nproc=args.nproc, **read_kw)
    else:  # no files (no livetime?)
        trigs = EventTable(names=names)

    highsnrtrigs = trigs[trigs['snr'] >= 8]
    logger.debug("%d read" % len(trigs))

    # -- prepare HTML -----------------

    links = [
        '%d-%d' % (int(args.gpsstart), int(args.gpsend)),
        ('Parameters', '#parameters'),
        ('Segments', (
            ('State flag', '#state-flag'),
            ('Optical sensors', '#osems'),
            ('Transmons', '#transmons'),
        )),
    ]
    if args.omega_scans:
        links.append(('Scans', '#omega-scans'))
    (brand, class_) = htmlio.get_brand(args.ifo, 'Scattering', args.gpsstart)
    navbar = htmlio.navbar(links, class_=class_, brand=brand)
    page = htmlio.new_bootstrap_page(
        title='%s Scattering | %d-%d' % (
            args.ifo, int(args.gpsstart), int(args.gpsend)),
        navbar=navbar)
    page.div(class_='pb-2 mt-3 mb-2 border-bottom')
    page.h1('%s Scattering: %d-%d'
            % (args.ifo, int(args.gpsstart), int(args.gpsend)))
    page.div.close()  # pb-2 mt-3 mb-2 border-bottom
    page.h2('Parameters', class_='mt-4 mb-4', id_='parameters')
    page.div(class_='row')
    page.div(class_='col-md-9 col-sm-12')
    page.add(htmlio.parameter_table(
        start=int(args.gpsstart), end=int(args.gpsend), flag=args.state_flag))
    page.div.close()  # col-md-9 col-sm-12

    # link to summary files
    page.div(class_='col-md-3 col-sm-12')
    page.add(htmlio.download_btn(
        [('Segments (HDF)', segfile),
         ('Triggers (CSV)', summfile)],
        btnclass='btn btn-%s dropdown-toggle' % args.ifo.lower(),
    ))
    page.div.close()  # col-md-3 col-sm-12
    page.div.close()  # row

    # command-line
    page.h5('Command-line:')
    page.add(htmlio.get_command_line(about=False, prog=PROG))

    # section header
    page.h2('Segments', class_='mt-4', id_='segments')

    if statea:  # contextual information
        paper = markup.oneliner.a(
            'Accadia et al. (2010)', target='_blank', class_='alert-link',
            href='http://iopscience.iop.org/article/10.1088/0264-9381/27'
                 '/19/194011')
        msg = (
            "Segments marked \"optical sensors\" below show evidence of beam "
            "scattering between {0} and {1} Hz based on the velocity of optic "
            "motion, with fringe frequencies projected using equation (3) of "
            "{2}. Segments marked \"transmons\" are based on whitened, "
            "band-limited RMS trends of transmon sensors. In both cases, "
            "yellow panels denote weak evidence for scattering, while red "
            "panels denote strong evidence."
         ).format(args.fmin, multiplier * fthresh, str(paper))
        page.add(htmlio.alert(msg, context=args.ifo.lower()))
    else:  # null segments
        page.add(htmlio.alert('No active analysis segments were found',
                              context='warning', dismiss=False))

    # record state segments
    if args.state_flag is not None:
        page.h3('State flag', class_='mt-3', id_='state-flag')
        page.div(id_='accordion1')
        page.add(htmlio.write_flag_html(
            state, span, 'state', parent='accordion1', context='success',
            plotdir='', facecolor=(0.2, 0.8, 0.2), edgecolor='darkgreen',
            known={'facecolor': 'red', 'edgecolor': 'darkred', 'height': 0.4}))
        page.div.close()

    # -- find scattering evidence -----

    # read data for OSEMs and transmons
    osems = ['%s:%s' % (args.ifo, c) for optic in args.optic for
             c in OPTIC_MOTION_CHANNELS[optic]]
    transmons = ['%s:%s' % (args.ifo, c) for c in TRANSMON_CHANNELS]
    allchannels = osems + transmons

    logger.info("Reading all timeseries data")
    alldata = []
    n = len(statea)
    for i, seg in enumerate(statea):
        msg = "{0}/{1} {2}:".rjust(30).format(
            str(i + 1).rjust(len(str(n))),
            n,
            str(seg),
        ) if args.verbose else False
        alldata.append(
            get_data(allchannels, seg[0], seg[1],
                     frametype=args.frametype.format(IFO=args.ifo),
                     verbose=msg, nproc=args.nproc).resample(128))
    try:  # ensure that only available channels are analyzed
        osems = list(
            set(alldata[0].keys()) & set(alldata[-1].keys()) & set(osems))
        transmons = list(
            set(alldata[0].keys()) & set(alldata[-1].keys()) & set(transmons))
    except IndexError:
        osems = []
        transmons = []

    # initialize scattering segments
    scatter_segments = DataQualityDict()
    actives = SegmentList()

    # scattering based on OSEM velocity
    if statea:
        page.h3('Optical sensors (OSEMs)', class_='mt-3', id_='osems')
        page.div(id_='osems-group')
    logger.info('Searching for scatter based on OSEM velocity')

    for i, channel in enumerate(sorted(osems)):
        logger.info("-- Processing %s --" % channel)
        chanstr = re.sub('[:-]', '_', channel).replace('_', '-', 1)
        optic = channel.split('-')[1].split('_')[0]
        flag = '%s:DCH-%s_SCATTERING_GE_%s_HZ:1' % (args.ifo, optic, tstr)
        scatter_segments[channel] = DataQualityFlag(
            flag,
            isgood=False,
            description="Evidence for scattering above {0} Hz from {1} in "
                        "{2}".format(fthresh, optic, channel),
        )
        # set up plot(s)
        plot = Plot(figsize=[12, 12])
        axes = {}
        axes['position'] = plot.add_subplot(
            411, xscale='auto-gps', xlabel='')
        axes['fringef'] = plot.add_subplot(
            412, sharex=axes['position'], xlabel='')
        axes['triggers'] = plot.add_subplot(
            413, sharex=axes['position'], xlabel='')
        axes['segments'] = plot.add_subplot(
            414, projection='segments', sharex=axes['position'])
        plot.subplots_adjust(bottom=.07, top=.95)
        fringecolors = [None] * len(FREQUENCY_MULTIPLIERS)
        histdata = dict((x, numpy.ndarray((0,))) for
                        x in FREQUENCY_MULTIPLIERS)
        linecolor = None
        # loop over state segments and find scattering fringes
        for j, seg in enumerate(statea):
            logger.debug("Processing segment [%d .. %d)" % seg)
            ts = alldata[j][channel]
            # get raw data and plot
            line = axes['position'].plot(ts, color=linecolor)[0]
            linecolor = line.get_color()
            # get fringe frequency and plot
            fringef = get_fringe_frequency(ts, multiplier=1)
            for k, m in list(enumerate(FREQUENCY_MULTIPLIERS))[::-1]:
                fm = fringef * m
                line = axes['fringef'].plot(
                    fm, color=fringecolors[k],
                    label=(j == 0 and r'$f\times%d$' % m or None))[0]
                fringecolors[k] = line.get_color()
                histdata[m] = numpy.resize(
                    histdata[m], (histdata[m].size + fm.size,))
                histdata[m][-fm.size:] = fm.value
            # get segments and plot
            scatter = get_segments(
                fringef * multiplier,
                fthresh,
                name=flag,
                pad=args.segment_padding
            )
            axes['segments'].plot(
                scatter, facecolor='red', edgecolor='darkred',
                known={'alpha': 0.6, 'facecolor': 'lightgray',
                       'edgecolor': 'gray', 'height': 0.4},
                height=0.8, y=0, label=' ',
            )
            scatter_segments[channel] += scatter
            logger.debug(
                "    Found %d scattering segments" % (len(scatter.active)))
        logger.debug("Completed channel %s, found %d segments in total"
                     % (channel, len(scatter_segments[channel].active)))

        # calculate efficiency and deadtime of veto
        deadtime = abs(scatter_segments[channel].active)
        try:
            deadtimepc = deadtime / livetime * 100
        except ZeroDivisionError:
            deadtimepc = 0.
        logger.info("Deadtime: %.2f%% (%.2f/%ds)"
                    % (deadtimepc, deadtime, livetime))
        efficiency = in_segmentlist(highsnrtrigs[names[0]],
                                    scatter_segments[channel].active).sum()
        try:
            efficiencypc = efficiency / len(highsnrtrigs) * 100
        except ZeroDivisionError:
            efficiencypc = 0.
        logger.info("Efficiency (SNR>=8): %.2f%% (%d/%d)"
                    % (efficiencypc, efficiency, len(highsnrtrigs)))
        if deadtimepc == 0.:
            effdt = 0
        else:
            effdt = efficiencypc/deadtimepc
        logger.info("Efficiency/Deadtime: %.2f" % effdt)

        if abs(scatter_segments[channel].active):
            actives.extend(scatter_segments[channel].active)

        # finalize plot
        logger.debug("Plotting")
        name = texify(channel)
        axes['position'].set_title("Scattering evidence in %s" % name)
        axes['position'].set_xlabel('')
        axes['position'].set_ylabel(r'Position [$\mu$m]')
        axes['position'].text(
            0.01, 0.95, 'Optic position',
            transform=axes['position'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        axes['fringef'].plot(
            span, [fthresh, fthresh], 'k--')
        axes['fringef'].set_xlabel('')
        axes['fringef'].set_ylabel(r'Frequency [Hz]')
        axes['fringef'].yaxis.tick_right()
        axes['fringef'].yaxis.set_label_position("right")
        axes['fringef'].set_ylim(0, multiplier * fthresh)
        axes['fringef'].text(
            0.01, 0.95, 'Calculated fringe frequency',
            transform=axes['fringef'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        handles, labels = axes['fringef'].get_legend_handles_labels()
        axes['fringef'].legend(handles[::-1], labels[::-1], loc='upper right',
                               borderaxespad=0, bbox_to_anchor=(-0.01, 1.),
                               handlelength=1)

        axes['triggers'].scatter(
            trigs[names[0]],
            trigs[names[1]],
            c=trigs[names[2]],
            edgecolor='none',
        )
        name = texify(args.main_channel)
        axes['triggers'].text(
            0.01, 0.95,
            '%s event triggers (Omicron)' % name,
            transform=axes['triggers'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        axes['triggers'].set_ylabel('Frequency [Hz]')
        axes['triggers'].set_ylim(args.fmin, multiplier * fthresh)
        axes['triggers'].colorbar(cmap='YlGnBu', clim=(3, 100), norm='log',
                                  label='Signal-to-noise ratio')
        axes['segments'].set_ylim(-.55, .55)
        axes['segments'].text(
            0.01, 0.95,
            r'Time segments with $f\times%d > %.2f$ Hz' % (
                multiplier, fthresh),
            transform=axes['segments'].transAxes, va='top', ha='left',
            bbox={'edgecolor': 'none', 'facecolor': 'white', 'alpha': .5})
        for ax in axes.values():
            ax.set_epoch(int(args.gpsstart))
            ax.set_xlim(*span)
        png = '%s_SCATTERING_%s_HZ-%s.png' % (chanstr, tstr, gpsstr)
        try:
            plot.save(png)
        except OverflowError as e:
            warnings.warn(str(e))
            plot.axes[1].set_ylim(0, multiplier * fthresh)
            plot.refresh()
            plot.save(png)
        plot.close()
        logger.debug("%s written." % png)

        # make histogram
        histogram = Plot(figsize=[12, 6])
        ax = histogram.gca()
        hrange = (0, multiplier * fthresh)
        for m, color in list(zip(histdata, fringecolors))[::-1]:
            if histdata[m].size:
                ax.hist(
                    histdata[m], facecolor=color, alpha=.6, range=hrange,
                    bins=50, histtype='stepfilled', label=r'$f\times%d$' % m,
                    cumulative=-1, weights=ts.dx.value, bottom=1e-100,
                    log=True)
            else:
                ax.plot(histdata[m], color=color, label=r'$f\times%d$' % m)
                ax.set_yscale('log')
        ax.set_ylim(.01, float(livetime))
        ax.set_ylabel('Time with fringe above frequency [s]')
        ax.set_xlim(*hrange)
        ax.set_xlabel('Frequency [Hz]')
        ax.set_title(axes['position'].get_title())
        handles, labels = ax.get_legend_handles_labels()
        ax.legend(handles[::-1], labels[::-1], loc='upper right')
        hpng = '%s_SCATTERING_HISTOGRAM-%s.png' % (chanstr, gpsstr)
        histogram.save(hpng)
        histogram.close()
        logger.debug("%s written." % hpng)

        # write HTML
        if deadtime != 0 and effdt > 2:
            context = 'danger'
        elif ((deadtime != 0 and effdt < 2) or
              (histdata[multiplier].size and
               histdata[multiplier].max() >=
                  fthresh/2.)):
            context = 'warning'
        else:
            continue
        page.div(class_='card border-%s mb-1 shadow-sm' % context)
        page.div(class_='card-header text-white bg-%s' % context)
        page.a(channel, class_='collapsed card-link cis-link',
               href='#osem%s' % i, **{'data-toggle': 'collapse'})
        page.div.close()  # card-header
        page.div(id_='osem%s' % i, class_='collapse',
                 **{'data-parent': '#osems-group'})
        page.div(class_='card-body')
        page.div(class_='row')
        img = htmlio.FancyPlot(
            png, caption=SCATTER_CAPTION.format(CHANNEL=channel))
        page.div(class_='col-md-10 offset-md-1')
        page.add(htmlio.fancybox_img(img))
        page.div.close()  # col-md-10 offset-md-1
        himg = htmlio.FancyPlot(
            hpng, caption=HIST_CAPTION.format(CHANNEL=channel))
        page.div(class_='col-md-10 offset-md-1')
        page.add(htmlio.fancybox_img(himg))
        page.div.close()  # col-md-10 offset-md-1
        page.div.close()  # row
        segs = StringIO()
        if deadtime:
            page.p("%d segments were found predicting a scattering fringe "
                   "above %.2f Hz." % (
                       len(scatter_segments[channel].active),
                       fthresh))
            page.table(class_='table table-sm table-hover')
            page.tbody()
            page.tr()
            page.th('Deadtime')
            page.td('%.2f/%d seconds' % (deadtime, livetime))
            page.td('%.2f%%' % deadtimepc)
            page.tr.close()
            page.tr()
            page.th('Efficiency<br><small>(SNR&ge;8 and '
                    '%.2f Hz</sub>&ltf<sub>peak</sub>&lt;%.2f Hz)</small>'
                    % (args.fmin, multiplier * fthresh))
            page.td('%d/%d events' % (efficiency, len(highsnrtrigs)))
            page.td('%.2f%%' % efficiencypc)
            page.tr.close()
            page.tr()
            page.th('Efficiency/Deadtime')
            page.td()
            page.td('%.2f' % effdt)
            page.tr.close()
            page.tbody.close()
            page.table.close()
            scatter_segments[channel].active.write(segs, format='segwizard',
                                                   coltype=float)
            page.pre(segs.getvalue())
        else:
            page.p("No segments were found with scattering above %.2f Hz."
                   % fthresh)
        page.div.close()  # card-body
        page.div.close()  # collapse
        page.div.close()  # card

    if statea:  # close accordion
        page.div.close()  # osems-group

    # scattering based on transmon BLRMS
    if statea:
        page.h3('Transmons', class_='mt-3', id_='transmons')
        page.div(id_='transmons-group')
    logger.info('Searching for scatter based on band-limited RMS of transmons')

    for i, channel in enumerate(sorted(transmons)):
        logger.info("-- Processing %s --" % channel)
        optic = channel.split('-')[1][:6]
        flag = '%s:DCH-%s_SCATTERING_BLRMS:1' % (args.ifo, optic)
        scatter_segments[channel] = DataQualityFlag(
            flag,
            isgood=False,
            description="Evidence for scattering from whitened, band-limited "
                        "RMS trends of {0}".format(channel),
        )

        # loop over state segments and compute BLRMS
        for j, seg in enumerate(statea):
            logger.debug("Processing segment [%d .. %d)" % seg)
            wblrms = get_blrms(
                alldata[j][channel],
                flow=args.bandpass_flow,
                fhigh=args.bandpass_fhigh,
            )
            scatter = get_segments(
                wblrms,
                numpy.mean(wblrms) + args.sigma * numpy.std(wblrms),
                name=flag,
            )
            scatter_segments[channel] += scatter
            logger.debug(
                "    Found %d scattering segments" % (len(scatter.active)))
        logger.debug("Completed channel %s, found %d segments in total"
                     % (channel, len(scatter_segments[channel].active)))

        # calculate efficiency and deadtime of veto
        deadtime = abs(scatter_segments[channel].active)
        try:
            deadtimepc = deadtime / livetime * 100
        except ZeroDivisionError:
            deadtimepc = 0.
        logger.info("Deadtime: %.2f%% (%.2f/%ds)"
                    % (deadtimepc, deadtime, livetime))
        highsnrtrigs = trigs[trigs['snr'] <= 200]
        efficiency = in_segmentlist(highsnrtrigs[names[0]],
                                    scatter_segments[channel].active).sum()
        try:
            efficiencypc = efficiency / len(highsnrtrigs) * 100
        except ZeroDivisionError:
            efficiencypc = 0.
        logger.info("Efficiency (SNR>=8): %.2f%% (%d/%d)"
                    % (efficiencypc, efficiency, len(highsnrtrigs)))
        if deadtimepc == 0.:
            effdt = 0
        else:
            effdt = efficiencypc/deadtimepc
        logger.info("Efficiency/Deadtime: %.2f" % effdt)

        if abs(scatter_segments[channel].active):
            actives.extend(scatter_segments[channel].active)

        # write HTML
        if deadtime != 0 and effdt > 2:
            context = 'danger'
        elif deadtime != 0 and effdt < 2:
            context = 'warning'
        else:
            continue
        page.add(htmlio.write_flag_html(
            scatter_segments[channel], span, i, parent='transmons-group',
            title=channel, context=context, plotdir=''))

    if statea:  # close accordion
        page.div.close()  # transmons-group

    actives = actives.coalesce()  # merge contiguous segments
    if statea and not actives:
        page.add(htmlio.alert(
            'No evidence of scattering found in the channels analyzed',
            context=args.ifo.lower(), dismiss=False))

    # identify triggers during active segments
    logger.debug('Writing a summary CSV record')
    ind = [i for i, trigtime in enumerate(highsnrtrigs[names[0]])
           if trigtime in actives]
    gps = highsnrtrigs[names[0]][ind]
    freq = highsnrtrigs[names[1]][ind]
    snr = highsnrtrigs[names[2]][ind]
    segs = [y for x in gps for y in actives if x in y]
    table = EventTable(
        [gps, freq, snr, [seg[0] for seg in segs], [seg[1] for seg in segs]],
        names=('trigger_time', 'trigger_frequency', 'trigger_snr',
               'segment_start', 'segment_end'))
    logger.info('The following {} triggers fell within active scattering '
                'segments:\n\n'.format(len(table)))
    print(table)
    print('\n\n')
    table.write(summfile, overwrite=True)

    # -- launch omega scans -----------

    nscans = min(args.omega_scans, len(table))
    if nscans > 0:
        # launch scans
        scandir = 'scans'
        ind = random.sample(range(0, len(table)), nscans)
        omegatimes = [str(t) for t in table['trigger_time'][ind]]
        logger.debug('Collected {} event times to omega scan: {}'.format(
            nscans, ', '.join(omegatimes)))
        logger.info('Creating workflow for omega scans')
        flags = batch.get_command_line_flags(
            ifo=args.ifo, ignore_state_flags=True)
        condorcmds = batch.get_condor_arguments(timeout=4, gps=args.gpsstart)
        batch.generate_dag(omegatimes, flags=flags, submit=True,
                           outdir=scandir, condor_commands=condorcmds)
        logger.info('Launched {} omega scans to condor'.format(nscans))
        # render HTML
        page.h2('Omega scans', class_='mt-4', id_='omega-scans')
        msg = (
            'The following event times correspond to significant Omicron '
            'triggers that occur during the scattering segments found above. '
            'To compare these against fringe frequency projections, please '
            'use the "simple scattering" module:',
            markup.oneliner.pre(
                '$ python -m gwdetchar.scattering.simple --help',
            ),
        )
        page.add(htmlio.alert(msg, context=args.ifo.lower()))
        page.add(htmlio.scaffold_omega_scans(
            omegatimes, args.main_channel, scandir=scandir))
    elif args.omega_scans:
        logger.info('No events found during active scattering segments')

    # -- finalize ---------------------

    # write segments
    scatter_segments.write(segfile, path="segments", overwrite=True)
    logger.debug("%s written" % segfile)

    # write HTML
    htmlio.close_page(page, 'index.html')
    logger.info("-- index.html written, all done --")

    # return to original directory
    os.chdir(indir)
Ejemplo n.º 9
0
    # Get the SNR threshold to use for this channel.
    omic_trigs = omic_trigger_tables[i] # get triggers from one channel at a time!
    snr_thresh = get_percentile(omic_trigs, percentile[j])
    # Apply the snr filter to the triggers and get their peaktimes
    selection = filter_threshold(omic_trigs, snr_thresh)
    peaktime = np.array(selection.getColumnByName('peak_time')[:], dtype=float) + \
      np.array(selection.getColumnByName('peak_time_ns')[:], dtype=float) * 1.0e-9
    # Now we can calculate the offsets for this channel
    veto_segs = []
    t0 = time.time()
    get_vetotimes(peaktime, end_times, veto_segs)
    t1 = time.time()
    print "This took %f seconds to run to completion\n" %(t1 - t0)

    # Coalesce the segments and write them out to the file
    print "Offsets calculated. Coalesce the segments now\n"
    veto_segs = SegmentList(veto_segs)
    # Merge contiguous veto sections and sort the list of segments
    veto_segs.coalesce()

    # Write all the segments to disk. Read in the [0, 2] columns to recover the data
    # We will use h5py to write out all the segments in groups organized by the name
    # of the channel
    print "Write the segments to file\n"
    grp = thresh_grp.create_group('%s' %channels[i])
    SegmentList.write(veto_segs, grp, 'vetosegs')
    print "All done!"

f.close()
print "Finished computing the offsets for all the channels!!!! Wooohoo!!!\n"