예제 #1
0
QUERY_FLAGS = ['X1:TEST-FLAG:1', 'Y1:TEST-FLAG2:4']

QUERY_RESULT = DataQualityDict()

QUERY_RESULT['X1:TEST-FLAG:1'] = DataQualityFlag(
    'X1:TEST-FLAG:1',
    known=[(0, 10)],
    active=[(0, 1), (1, 2), (3, 4), (6, 9)])

QUERY_RESULT['Y1:TEST-FLAG2:4'] = DataQualityFlag(
    'Y1:TEST-FLAG2:4',
    known=[(0, 5), (9, 10)],
    active=[])

QUERY_RESULTC = type(QUERY_RESULT)({x: y.copy().coalesce() for
                                    x, y in QUERY_RESULT.items()})


def query_segdb(query_func, *args, **kwargs):
    """Mock a query to an S6-style DB2 database
    """
    try:
        with mock.patch('glue.segmentdb.segmentdb_utils.setup_database'), \
             mock.patch('glue.segmentdb.segmentdb_utils.expand_version_number',
                        mocks.segdb_expand_version_number(1, 4)), \
             mock.patch('glue.segmentdb.segmentdb_utils.query_segments',
                        mocks.segdb_query_segments(QUERY_RESULT)):
            return query_func(*args, **kwargs)
    except ImportError as e:
        pytest.skip(str(e))
예제 #2
0
def main(args=None):
    """Run the software saturation command-line interface
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    # get IFO
    ifo = args.ifo.upper()
    site = ifo[0]
    frametype = args.frametype or '%s_R' % ifo

    # let's go
    LOGGER.info('{} Software saturations {}-{}'.format(
        args.ifo, int(args.gpsstart), int(args.gpsend)))

    # get segments
    span = Segment(args.gpsstart, args.gpsend)
    if args.state_flag:
        state = DataQualityFlag.query(args.state_flag, int(args.gpsstart),
                                      int(args.gpsend),
                                      url=const.DEFAULT_SEGMENT_SERVER)
        for i, seg in enumerate(state.active):
            state.active[i] = type(seg)(seg[0], seg[1]-args.pad_state_end)
        segs = state.active.coalesce()
        LOGGER.debug("Recovered %d seconds of time for %s"
                     % (abs(segs), args.state_flag))
    else:
        segs = SegmentList([Segment(args.gpsstart, args.gpsend)])

    # find frames
    cache = gwdatafind.find_urls(
        site, frametype, int(args.gpsstart), int(args.gpsend))

    # find channels
    if not os.getenv('LIGO_DATAFIND_SERVER'):
        raise RuntimeError("No LIGO_DATAFIND_SERVER variable set, don't know "
                           "how to discover channels")
    else:
        LOGGER.debug("Identifying channels in frame files")
        if len(cache) == 0:
            raise RuntimeError(
                "No frames recovered for %s in interval [%s, %s)" %
                (frametype, int(args.gpsstart),
                 int(args.gpsend)))
        allchannels = get_channel_names(cache[0])
        LOGGER.debug("   Found %d channels" % len(allchannels))
        sys.stdout.flush()
        channels = core.find_limit_channels(allchannels, skip=args.skip)
        LOGGER.info(
            "   Parsed %d channels with '_LIMIT' and '_LIMEN' or '_SWSTAT'"
            % sum(map(len, channels)))

    # -- read channels and check limits -------------

    saturations = DataQualityDict()
    bad = set()

    # TODO: use multiprocessing to separate channel list into discrete chunks
    #       should give a factor of X for X processes

    # check limens
    for suffix, clist in zip(['LIMEN', 'SWSTAT'], channels):
        nchans = len(clist)
        # group channels in sets for batch processing
        #     min of <number of channels>, user group size (sensible number),
        #     and 512 Mb of RAM for single-precision EPICS
        try:
            dur = max([float(abs(s)) for s in segs])
        except ValueError:
            ngroup = args.group_size
        else:
            ngroup = int(
                min(nchans, args.group_size, 2 * 1024**3 / 4. / 16. / dur))
        LOGGER.info('Processing %s channels in groups of %d' % (
            suffix, ngroup))
        sys.stdout.flush()
        sets = core.grouper(clist, ngroup)
        for i, cset in enumerate(sets):
            # remove empty entries use to pad the list to 8 elements
            cset = list(cset)
            while cset[-1] is None:
                cset.pop(-1)
            for seg in segs:
                cache2 = sieve_cache(cache, segment=seg)
                if not len(cache2):
                    continue
                saturated = core.is_saturated(
                    cset, cache2, seg[0], seg[1], indicator=suffix,
                    nproc=args.nproc)
                for new in saturated:
                    try:
                        saturations[new.name] += new
                    except KeyError:
                        saturations[new.name] = new
            for j, c in enumerate(cset):
                try:
                    sat = saturations[c]
                except KeyError:
                    LOGGER.debug('%40s:      SKIP      [%d/%d]'
                                 % (c, i*ngroup + j + 1, nchans))
                else:
                    if abs(sat.active):
                        LOGGER.debug('%40s: ---- FAIL ---- [%d/%d]'
                                     % (c, i*ngroup + j + 1, nchans))
                        for seg in sat.active:
                            LOGGER.debug(" " * 42 + str(seg))
                        bad.add(c)
                    else:
                        LOGGER.debug('%40s:      PASS      [%d/%d]'
                                     % (c, i*ngroup + j + 1, nchans))
                sys.stdout.flush()

    # -- log results and exit -----------------------

    if len(bad):
        LOGGER.info("Saturations were found for all of the following:\n\n")
        for c in bad:
            print(c)
        print('\n\n')
    else:
        LOGGER.info("No software saturations were found in any channels")

    # write segments to file
    outfile = ('%s-SOFTWARE_SATURATIONS-%d-%d.h5'
               % (ifo, int(args.gpsstart),
                  int(args.gpsend) - int(args.gpsstart)))
    LOGGER.info("Writing saturation segments to %s" % outfile)
    saturations.write(outfile, path="segments", overwrite=True)

    if args.html:
        # get base path
        base = os.path.dirname(args.html)
        os.chdir(base)
        if args.plot:
            args.plot = os.path.curdir
        segfile = os.path.relpath(outfile, os.path.dirname(args.html))
        if os.path.basename(args.html) == 'index.html':
            links = [
                '%d-%d' % (int(args.gpsstart), int(args.gpsend)),
                ('Parameters', '#parameters'),
                ('Segments', [('Software saturations',
                               '#software-saturations')]),
                ('Results', '#results'),
            ]
            if args.state_flag:
                links[2][1].insert(0, ('State flag', '#state-flag'))
            (brand, class_) = htmlio.get_brand(ifo, 'Saturations',
                                               args.gpsstart)
            navbar = htmlio.navbar(links, class_=class_, brand=brand)
            page = htmlio.new_bootstrap_page(
                navbar=navbar, title='%s Saturations | %d-%d' % (
                    ifo, int(args.gpsstart), int(args.gpsend)))
        else:
            page = markup.page()
            page.div(class_='container')
        # -- header
        page.div(class_='pb-2 mt-3 mb-2 border-bottom')
        page.h1('%s Software Saturations: %d-%d'
                % (ifo, int(args.gpsstart), int(args.gpsend)))
        page.div.close()
        # -- paramters
        content = [
            ('State end padding', args.pad_state_end),
            ('Skip', ', '.join(map(repr, args.skip)))]
        page.h2('Parameters', class_='mt-4 mb-4', id_='parameters')
        page.div(class_='row')
        page.div(class_='col-md-9 col-sm-12')
        page.add(htmlio.parameter_table(
            content, start=args.gpsstart, end=args.gpsend,
            flag=args.state_flag))
        page.div.close()  # col-md-9 col-sm-12
        page.div(class_='col-md-3 col-sm-12')
        page.add(htmlio.download_btn(
            [('Segments (HDF)', segfile)],
            btnclass='btn btn-%s dropdown-toggle' % ifo.lower(),
        ))
        page.div.close()  # col-md-9 col-sm-12
        page.div.close()  # row
        page.h5('Command-line:')
        page.add(htmlio.get_command_line(about=False, prog=PROG))
        # -- segments
        page.h2('Segments', class_='mt-4', id_='segments')
        msg = ("This analysis searched {0} filter bank readback channels for "
               "time periods during which their OUTPUT value matched or "
               "exceeded the LIMIT value set in software. Signals that "
               "achieve saturation are shown below, and saturation segments "
               "are available by expanding a given panel.").format(
                   sum(map(len, channels)))
        page.add(htmlio.alert(msg, context=ifo.lower()))
        # record state segments
        if args.state_flag:
            page.h3('State flag', class_='mt-3', id_='state-flag')
            page.div(id_='accordion1')
            page.add(htmlio.write_flag_html(
                state, span, 'state', parent='accordion1', context='success',
                plotdir=args.plot, facecolor=(0.2, 0.8, 0.2),
                edgecolor='darkgreen', known={
                    'facecolor': 'red',
                    'edgecolor': 'darkred',
                    'height': 0.4},
            ))
            page.div.close()
        # record saturation segments
        if len(bad):
            page.h3('Software saturations', class_='mt-3',
                    id_='software-saturations')
            page.div(id_='accordion2')
            for i, (c, flag) in enumerate(saturations.items()):
                if abs(flag.active) > 0:
                    title = '%s [%d]' % (flag.name, len(flag.active))
                    page.add(htmlio.write_flag_html(
                        flag, span=span, id=i, parent='accordion2',
                        title=title, plotdir=args.plot))
            page.div.close()
        else:
            page.add(htmlio.alert('No software saturations were found in this '
                                  'analysis', context=ifo.lower(),
                                  dismiss=False))
        # -- results table
        page.h2('Results summary', class_='mt-4', id_='results')
        page.add(htmlio.alert('All channels for which the LIMIT setting was '
                              'active are shown below.', context=ifo.lower()))
        page.table(class_='table table-striped table-hover')
        # write table header
        page.thead()
        page.tr()
        for header in ['Channel', 'Result', 'Num. saturations']:
            page.th(header)
        page.thead.close()
        # write body
        page.tbody()
        for c, seglist in saturations.items():
            passed = abs(seglist.active) == 0
            if passed:
                page.tr()
            else:
                page.tr(class_='table-warning')
            page.td(c)
            page.td(passed and 'Pass' or 'Fail')
            page.td(len(seglist.active))
            page.tr.close()
        page.tbody.close()
        page.table.close()
        # close and write
        htmlio.close_page(page, args.html)
예제 #3
0
QUERY_FLAGS = ['X1:TEST-FLAG:1', 'Y1:TEST-FLAG2:4']

QUERY_RESULT = DataQualityDict()

QUERY_RESULT['X1:TEST-FLAG:1'] = DataQualityFlag('X1:TEST-FLAG:1',
                                                 known=[(0, 10)],
                                                 active=[(0, 1), (1, 2),
                                                         (3, 4), (6, 9)])

QUERY_RESULT['Y1:TEST-FLAG2:4'] = DataQualityFlag('Y1:TEST-FLAG2:4',
                                                  known=[(0, 5), (9, 10)],
                                                  active=[])

QUERY_RESULTC = type(QUERY_RESULT)(
    {x: y.copy().coalesce()
     for x, y in QUERY_RESULT.items()})


def query_segdb(query_func, *args, **kwargs):
    """Mock a query to an S6-style DB2 database
    """
    try:
        with mock.patch('glue.segmentdb.segmentdb_utils.setup_database'), \
             mock.patch('glue.segmentdb.segmentdb_utils.expand_version_number',
                        mocks.segdb_expand_version_number(1, 4)), \
             mock.patch('glue.segmentdb.segmentdb_utils.query_segments',
                        mocks.segdb_query_segments(QUERY_RESULT)):
            return query_func(*args, **kwargs)
    except ImportError as e:
        pytest.skip(str(e))
예제 #4
0
def main(args=None):
    """Run the online Guardian node visualization tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    fec_map = args.fec_map
    simulink = args.simulink
    daqsvn = args.daqsvn or ('https://daqsvn.ligo-la.caltech.edu/websvn/'
                             'listing.php?repname=daq_maps')
    if args.ifo == 'H1':
        if not fec_map:
            fec_map = 'https://lhocds.ligo-wa.caltech.edu/exports/detchar/fec/'
        if not simulink:
            simulink = 'https://lhocds.ligo-wa.caltech.edu/daq/simulink/'
    if args.ifo == 'L1':
        if not fec_map:
            fec_map = 'https://llocds.ligo-la.caltech.edu/exports/detchar/fec/'
        if not simulink:
            simulink = 'https://llocds.ligo-la.caltech.edu/daq/simulink/'

    span = Segment(args.gpsstart, args.gpsend)

    # let's go
    LOGGER.info('{} Overflows {}-{}'.format(args.ifo, int(args.gpsstart),
                                            int(args.gpsend)))

    # get segments
    if args.state_flag:
        state = DataQualityFlag.query(args.state_flag,
                                      int(args.gpsstart),
                                      int(args.gpsend),
                                      url=const.DEFAULT_SEGMENT_SERVER)
        tmp = type(state.active)()
        for i, seg in enumerate(state.active):
            if abs(seg) < args.segment_end_pad:
                continue
            tmp.append(type(seg)(seg[0], seg[1] - args.segment_end_pad))
        state.active = tmp.coalesce()
        statea = state.active
    else:
        statea = SegmentList([span])

    if not args.output_file:
        duration = abs(span)
        args.output_file = ('%s-OVERFLOWS-%d-%d.h5' %
                            (args.ifo, int(args.gpsstart), duration))
        LOGGER.debug("Set default output file as %s" % args.output_file)

    # set up container
    overflows = DataQualityDict()

    # prepare data access
    if args.nds:
        from gwpy.io import nds2 as io_nds2
        host, port = args.nds.rsplit(':', 1)
        ndsconnection = io_nds2.connect(host, port=int(port))
        if ndsconnection.get_protocol() == 1:
            cachesegs = SegmentList(
                [Segment(int(args.gpsstart), int(args.gpsend))])
        else:
            cachesegs = io_nds2.get_availability(
                ['{0}:FEC-1_DAC_OVERFLOW_ACC_0_0'.format(args.ifo)],
                int(args.gpsstart),
                int(args.gpsend),
            )
    else:  # get frame cache
        cache = gwdatafind.find_urls(args.ifo[0], args.frametype,
                                     int(args.gpsstart), int(args.gpsend))
        cachesegs = statea & cache_segments(cache)

    flag_desc = "ADC/DAC Overflow indicated by {0}"

    # get channel and find overflows
    for dcuid in args.dcuid:
        LOGGER.info("Processing DCUID %d" % dcuid)
        channel = daq.ligo_accum_overflow_channel(dcuid, args.ifo)
        overflows[channel] = DataQualityFlag(channel, known=cachesegs)
        if args.deep:
            LOGGER.debug(" -- Getting list of overflow channels")
            try:
                channels = daq.ligo_model_overflow_channels(dcuid,
                                                            args.ifo,
                                                            args.frametype,
                                                            gpstime=span[0],
                                                            nds=args.nds)
            except IndexError:  # no frame found for GPS start, try GPS end
                channels = daq.ligo_model_overflow_channels(dcuid,
                                                            args.ifo,
                                                            args.frametype,
                                                            gpstime=span[-1])
            for chan in channels:  # set up flags early
                overflows[chan] = DataQualityFlag(
                    chan,
                    known=cachesegs,
                    description=flag_desc.format(chan),
                    isgood=False,
                )
            LOGGER.debug(" -- %d channels found" % len(channel))
        for seg in cachesegs:
            LOGGER.debug(" -- Processing {}-{}".format(*seg))
            if args.nds:
                read_kw = dict(connection=ndsconnection)
            else:
                read_kw = dict(source=cache, nproc=args.nproc)
            msg = "Reading ACCUM_OVERFLOW data:".rjust(30)
            data = get_data(channel,
                            seg[0],
                            seg[1],
                            pad=0.,
                            verbose=msg,
                            **read_kw)
            new = daq.find_overflow_segments(
                data,
                cumulative=True,
            )
            overflows[channel] += new
            LOGGER.info(" -- {} overflows found".format(len(new.active)))
            if not new.active:
                continue
            # go deep!
            for s, e in tqdm.tqdm(new.active.protract(2),
                                  unit='ovfl',
                                  desc='Going deep'.rjust(30)):
                data = get_data(channels, s, e, **read_kw)
                for ch in channels:
                    try:
                        overflows[ch] += daq.find_overflow_segments(
                            data[ch],
                            cumulative=True,
                        )
                    except KeyError:
                        warnings.warn("Skipping {}".format(ch), UserWarning)
                        continue
        LOGGER.debug(" -- Search complete")

    # write output
    LOGGER.info("Writing segments to %s" % args.output_file)
    table = table_from_segments(
        overflows,
        sngl_burst=args.output_file.endswith((".xml", ".xml.gz")),
    )
    if args.integer_segments:
        for key in overflows:
            overflows[key] = overflows[key].round()
    if args.output_file.endswith((".h5", "hdf", ".hdf5")):
        with h5py.File(args.output_file, "w") as h5f:
            table.write(h5f, path="triggers")
            overflows.write(h5f, path="segments")
    else:
        table.write(args.output_file, overwrite=True)
        overflows.write(args.output_file, overwrite=True, append=True)

    # write HTML
    if args.html:
        # get base path
        base = os.path.dirname(args.html)
        os.chdir(base)
        if args.plot:
            args.plot = os.path.curdir
        if args.output_file:
            args.output_file = os.path.relpath(args.output_file,
                                               os.path.dirname(args.html))
        if os.path.basename(args.html) == 'index.html':
            links = [
                '%d-%d' % (int(args.gpsstart), int(args.gpsend)),
                ('Parameters', '#parameters'),
                ('Segments', [('Overflows', '#overflows')]),
                ('Results', '#results'),
            ]
            if args.state_flag:
                links[2][1].insert(0, ('State flag', '#state-flag'))
            (brand, class_) = htmlio.get_brand(args.ifo, 'Overflows',
                                               args.gpsstart)
            navbar = htmlio.navbar(links, class_=class_, brand=brand)
            page = htmlio.new_bootstrap_page(
                title='%s Overflows | %d-%d' %
                (args.ifo, int(args.gpsstart), int(args.gpsend)),
                navbar=navbar)
        else:
            page = htmlio.markup.page()
            page.div(class_='container')

        # -- header
        page.div(class_='pb-2 mt-3 mb-2 border-bottom')
        page.h1('%s ADC/DAC Overflows: %d-%d' %
                (args.ifo, int(args.gpsstart), int(args.gpsend)))
        page.div.close()

        # -- paramters
        content = [('DCUIDs', ' '.join(map(str, args.dcuid)))]
        if daqsvn:
            content.append(('FEC configuration', (
                '<a href="{0}" target="_blank" title="{1} FEC configuration">'
                '{0}</a>').format(daqsvn, args.ifo)))
        if fec_map:
            content.append(
                ('FEC map', '<a href="{0}" target="_blank" title="{1} FEC '
                 'map">{0}</a>'.format(fec_map, args.ifo)))
        if simulink:
            content.append(
                ('Simulink models', '<a href="{0}" target="_blank" title="{1} '
                 'Simulink models">{0}</a>'.format(simulink, args.ifo)))
        page.h2('Parameters', class_='mt-4 mb-4', id_='parameters')
        page.div(class_='row')
        page.div(class_='col-md-9 col-sm-12')
        page.add(
            htmlio.parameter_table(content,
                                   start=args.gpsstart,
                                   end=args.gpsend,
                                   flag=args.state_flag))
        page.div.close()  # col-md-9 col-sm-12

        # link to summary file
        if args.output_file:
            ext = ('HDF' if args.output_file.endswith(
                (".h5", "hdf", ".hdf5")) else 'XML')
            page.div(class_='col-md-3 col-sm-12')
            page.add(
                htmlio.download_btn(
                    [('Segments ({})'.format(ext), args.output_file)],
                    btnclass='btn btn-%s dropdown-toggle' % args.ifo.lower(),
                ))
            page.div.close()  # col-md-3 col-sm-12
        page.div.close()  # row

        # -- command-line
        page.h5('Command-line:')
        page.add(htmlio.get_command_line(about=False, prog=PROG))

        # -- segments
        page.h2('Segments', class_='mt-4', id_='segments')

        # give contextual information
        msg = ("This analysis searched for digital-to-analogue (DAC) or "
               "analogue-to-digital (ADC) conversion overflows in the {0} "
               "real-time controls system. ").format(
                   SITE_MAP.get(args.ifo, 'LIGO'))
        if args.deep:
            msg += (
                "A hierarchichal search was performed, with one cumulative "
                "overflow counter checked per front-end controller (FEC). "
                "For those models that indicated an overflow, the card- and "
                "slot-specific channels were then checked. ")
        msg += (
            "Consant overflow is shown as yellow, while transient overflow "
            "is shown as red. If a data-quality flag was loaded for this "
            "analysis, it will be displayed in green.")
        page.add(htmlio.alert(msg, context=args.ifo.lower()))
        # record state segments
        if args.state_flag:
            page.h3('State flag', class_='mt-3', id_='state-flag')
            page.div(id_='accordion1')
            page.add(
                htmlio.write_flag_html(state,
                                       span,
                                       'state',
                                       parent='accordion1',
                                       context='success',
                                       plotdir=args.plot,
                                       facecolor=(0.2, 0.8, 0.2),
                                       edgecolor='darkgreen',
                                       known={
                                           'facecolor': 'red',
                                           'edgecolor': 'darkred',
                                           'height': 0.4,
                                       }))
            page.div.close()
        # record overflow segments
        if sum(abs(s.active) for s in overflows.values()):
            page.h3('Overflows', class_='mt-3', id_='overflows')
            page.div(id_='accordion2')
            for i, (c, flag) in enumerate(list(overflows.items())):
                if abs(flag.active) == 0:
                    continue
                if abs(flag.active) == abs(cachesegs):
                    context = 'warning'
                else:
                    context = 'danger'
                try:
                    channel = cds.get_real_channel(flag.name)
                except Exception:
                    title = '%s [%d]' % (flag.name, len(flag.active))
                else:
                    title = '%s (%s) [%d]' % (flag.name, channel,
                                              len(flag.active))
                page.add(
                    htmlio.write_flag_html(flag,
                                           span,
                                           i,
                                           parent='accordion2',
                                           title=title,
                                           context=context,
                                           plotdir=args.plot))
            page.div.close()
        else:
            page.add(
                htmlio.alert('No overflows were found in this analysis',
                             context=args.ifo.lower(),
                             dismiss=False))

        # -- results table
        page.h2('Results summary', class_='mt-4', id_='results')
        page.table(class_='table table-striped table-hover')
        # write table header
        page.thead()
        page.tr()
        for header in ['Channel', 'Connected signal', 'Num. overflows']:
            page.th(header)
        page.thead.close()
        # write body
        page.tbody()
        for c, seglist in overflows.items():
            t = abs(seglist.active)
            if t == 0:
                page.tr()
            elif t == abs(cachesegs):
                page.tr(class_='table-warning')
            else:
                page.tr(class_='table-danger')
            page.td(c)
            try:
                page.td(cds.get_real_channel(str(c)))
            except Exception:
                page.td()
            page.td(len(seglist.active))
            page.tr.close()
        page.tbody.close()
        page.table.close()

        # -- close and write
        htmlio.close_page(page, args.html)
        LOGGER.info("HTML written to %s" % args.html)