Beispiel #1
0
 def frame_read(self, format=None):
     ts = self.TEST_CLASS.read(
         TEST_GWF_FILE, self.channel, format=format)
     self.assertEqual(ts.epoch, Time(968654552, format='gps', scale='utc'))
     self.assertEqual(ts.sample_rate, units.Quantity(16384, 'Hz'))
     self.assertEqual(ts.unit, units.Unit('strain'))
     return ts
Beispiel #2
0
 def frame_read(self, format=None):
     ts = self.TEST_CLASS.read(TEST_GWF_FILE, self.channel, format=format)
     self.assertEqual(ts.epoch, Time(968654552, format='gps', scale='utc'))
     self.assertEqual(ts.sample_rate, units.Quantity(16384, 'Hz'))
     self.assertEqual(ts.unit, units.Unit('strain'))
     # check that channel carries the correct parameters
     self.assertEqual(ts.channel.sample_rate, ts.sample_rate)
     self.assertEqual(ts.channel.unit, ts.unit)
     return ts
Beispiel #3
0
    def test_epoch(self):
        """Test `gwpy.timeseries.TimeSeriesBase.epoch`
        """
        # check basic conversion from t0 -> epoch
        a = self.create(t0=1126259462)
        assert a.epoch == Time('2015-09-14 09:50:45', format='iso')

        # test that we can't delete epoch
        with pytest.raises(AttributeError):
            del a.epoch

        # check None gets preserved
        a.epoch = None
        with pytest.raises(AttributeError):
            a._t0

        # check other types
        a.epoch = Time('2015-09-14 09:50:45', format='iso')
        utils.assert_quantity_almost_equal(a.t0,
                                           units.Quantity(1126259462, 's'))
Beispiel #4
0
                             StateTimeSeriesDict, Bits)
from gwpy.segments import (Segment, SegmentList, DataQualityFlag,
                           DataQualityDict)
from gwpy.frequencyseries import (FrequencySeries, SpectralVariance)
from gwpy.types import Array2D
from gwpy.spectrogram import Spectrogram
from gwpy.plotter import (TimeSeriesPlot, SegmentPlot)

import mocks
import utils
from mocks import mock
from test_array import TestSeries

SEED = 1
numpy.random.seed(SEED)
GPS_EPOCH = Time(0, format='gps', scale='utc')
ONE_HZ = units.Quantity(1, 'Hz')
ONE_SECOND = units.Quantity(1, 'second')

TEST_GWF_FILE = os.path.join(
    os.path.split(__file__)[0], 'data', 'HLV-GW100916-968654552-1.gwf')
TEST_HDF_FILE = '%s.hdf' % TEST_GWF_FILE[:-4]
TEST_SEGMENT = Segment(968654552, 968654553)

FIND_CHANNEL = 'L1:DCS-CALIB_STRAIN_C01'
FIND_FRAMETYPE = 'L1_HOFT_C01'

LOSC_IFO = 'L1'
LOSC_GW150914 = 1126259462
LOSC_GW150914_SEGMENT = Segment(LOSC_GW150914 - 2, LOSC_GW150914 + 2)
LOSC_GW150914_DQ_BITS = [
Beispiel #5
0
    def process(self,
                nds=None,
                nproc=1,
                config=GWSummConfigParser(),
                datacache=None,
                trigcache=None,
                datafind_error='raise',
                **kwargs):
        """Process data for the given state.
        """

        ifo = self.ifo

        for p in self.plots:
            if p.outputfile in globalv.WRITTEN_PLOTS:
                p.new = False

        # --------------------------------------------------------------------
        # work out which channels are needed

        tschannels = [
            HEPI_GPS_CHANNEL % (ifo, chamber) for chamber in self.chambers
        ]
        svchannels = [
            HEPI_LATCH_CHANNEL % (ifo, chamber) for chamber in self.chambers
        ]
        if self.chambers == HAMs:
            tschannels.extend([
                HAM_ISI_GPS_CHANNEL % (ifo, chamber)
                for chamber in self.chambers[1:]
            ])
            svchannels.extend([
                HAM_ISI_LATCH_CHANNEL % (ifo, chamber)
                for chamber in self.chambers[1:]
            ])
        else:
            tschannels.extend([
                BSC_ST1_GPS_CHANNEL % (ifo, chamber)
                for chamber in self.chambers
            ])
            svchannels.extend([
                BSC_ST1_LATCH_CHANNEL % (ifo, chamber)
                for chamber in self.chambers
            ])
            tschannels.extend([
                BSC_ST2_GPS_CHANNEL % (ifo, chamber)
                for chamber in self.chambers
            ])
            svchannels.extend([
                BSC_ST2_LATCH_CHANNEL % (ifo, chamber)
                for chamber in self.chambers
            ])

        state = sorted(self.states, key=lambda s: abs(s.active))[0]

        vprint("    %d channels identified for TimeSeries\n" % len(tschannels))
        tripdata = get_timeseries_dict(tschannels,
                                       state,
                                       config=config,
                                       nds=nds,
                                       nproc=nproc,
                                       datafind_error=datafind_error,
                                       cache=datacache)
        vprint("    All time-series data loaded\n")

        vprint("    %d channels identified as StateVectors\n" %
               len(svchannels))
        latchdata = get_timeseries_dict(svchannels,
                                        state,
                                        config=config,
                                        nds=nds,
                                        nproc=nproc,
                                        datafind_error=datafind_error,
                                        statevector=True,
                                        cache=datacache)
        vprint("    All state-vector data loaded\n")

        # --------------------------------------------------------------------
        # find the trips

        self.trips = []
        for (gpschannel, latch) in zip(tschannels, svchannels):
            # get channel data
            gpschannel = get_channel(gpschannel)
            latch = get_channel(latch)
            chamber = gpschannel.subsystem
            system = gpschannel.system
            vprint("    Locating WD trips for %s %s %s...\n" %
                   (ifo, chamber, system))

            # find times of a trip
            trips = []
            for i, ts in enumerate(tripdata[gpschannel.name]):
                alltrips = ts.times[((numpy.diff(ts.value) > 0)
                                     & (ts.value[1:] > 1e8)).nonzero()[0] + 1]
                for j, gpstime in enumerate(alltrips.value):
                    trips.append((i, gpstime))

            vprint("        Found %d WD trips.\n" % len(trips))

            # associate cause
            for i, trip in enumerate(trips):
                tsid, t = trip
                gpstime = int(t)
                # extract 1 second of LATCH data
                ldata = latchdata[latch.name][tsid].crop(gpstime,
                                                         gpstime + 1).value
                # find transition value
                try:
                    bits = ldata[sorted(
                        numpy.unique(ldata, return_index=True)[1])][1]
                except IndexError:
                    bits = ldata[0]
                except (IndexError, ValueError):
                    bits = None
                # associate cause
                if not bits:
                    if re.match(r'ST\d', latch.signal):
                        stage = 'ISI %s' % latch.signal.split('_')[0]
                    else:
                        stage = system
                    causes = ['%s Unknown' % stage]
                else:
                    allbits = numpy.nonzero(map(int,
                                                bin(int(bits))[2:][::-1]), )[0]
                    causes = [latch.bits[b] for b in allbits]
                t2 = Time(t, format='gps', scale='utc')
                vprint("        Trip GPS %s (%s), triggers:\n" % (t, t2.iso))
                for cause in causes:
                    vprint("            %s\n" % cause)
                    # configure plot
                    mapsec = 'sei-wd-map-%s' % cause
                    if (not config.has_section(mapsec)
                            and re.match(r'ISI ST\d ', cause)):
                        mapsec = ('sei-wd-map-%s' %
                                  (' '.join(cause.split(' ', 2)[::2])))
                    if config.has_section(mapsec):
                        pstart = gpstime - self.plot_duration / 2.
                        if self.chambers == HAMs or system == 'HPI':
                            platform = chamber
                        else:
                            platform = '%s_%s' % (
                                chamber, gpschannel.signal.split('_')[0])
                        p = os.path.join(
                            self.plotdir, '%s-%s_%s_WATCHDOG_TRIP-%d-%d.png' %
                            (ifo, system, platform, pstart,
                             self.plot_duration))
                        self.plots.append(
                            get_plot('watchdog')(t,
                                                 chamber,
                                                 cause,
                                                 config,
                                                 p,
                                                 ifo=ifo,
                                                 nds=nds is True or False))
                        plot = self.plots[-1]
                    else:
                        plot = None
                    self.trips.append((t, chamber, cause, plot))

        super(SEIWatchDogTab, self).process(config=config,
                                            nds=nds,
                                            nproc=nproc,
                                            datacache=datacache,
                                            trigcache=trigcache,
                                            **kwargs)
Beispiel #6
0
    def write_state_html(self, state):
        """Build HTML summary of watchdog trips
        """
        # find one example of each channel, and get the bits
        hepichannel = get_channel(HEPI_LATCH_CHANNEL %
                                  (self.ifo, self.chambers[1]))
        hepimask = hepichannel.bits + ['HEPI Unknown']
        if self.chambers == HAMs:
            isichannels = [
                get_channel(HAM_ISI_LATCH_CHANNEL %
                            (self.ifo, self.chambers[0]))
            ]
            isimask = isichannels[0].bits + ['ISI Unknown']
        else:
            isichannels = [
                get_channel(BSC_ST1_LATCH_CHANNEL %
                            (self.ifo, self.chambers[0])),
                get_channel(BSC_ST2_LATCH_CHANNEL %
                            (self.ifo, self.chambers[0]))
            ]
            isimask = (isichannels[0].bits + ['ISI ST1 Unknown'] +
                       isichannels[1].bits + ['ISI ST2 Unknown'])
        mask = hepimask + isimask

        # count trips
        count = {}
        for _, chamber, trigger, _ in self.trips:
            key = (chamber, trigger)
            try:
                count[key] += 1
            except KeyError:
                count[key] = 1

        page = markup.page()

        # build summary table
        page.div(class_='well')
        chambertype = self.chambers[0][:-1]
        id_ = '{}-{}'.format(self.ifo.lower(), chambertype.lower())
        page.table(class_='table table-condensed table-hover watchdog',
                   id_=id_)
        page.caption("Number of watch-dog trips per %s chamber (column) and "
                     "trigger (row)" % (chambertype))
        page.thead()
        # add headers
        page.tr(class_='header')
        for th in ['WD'] + self.chambers + ['Sub-total']:
            page.th(th)
        page.tr.close()
        page.thead.close()

        # add rows
        page.tbody()
        totals = numpy.zeros((len(mask), len(self.chambers) + 1), dtype=int)
        rows = []
        for i, bit in enumerate(mask):
            class_ = []
            if (i == len(hepimask)
                    or i == len(hepimask + isichannels[0].bits)):
                class_.append('tdiv')
            if re_no_count.match(bit):
                class_.append('ignore')
            if class_:
                page.tr(class_=' '.join(class_))
            else:
                page.tr()
            page.th(bit)
            for j, chamber in enumerate(self.chambers):
                try:
                    c = count[(chamber, bit)]
                except KeyError:
                    c = 0
                    pass
                page.td(c or '-')
                # exclude IOP from total
                if not re_no_count.match(bit):
                    totals[i][j] = c
            # add row total
            totals[i][-1] = totals[i].sum()
            page.th(str(totals[i][-1]))
            page.tr.close()
        page.tbody.close()

        # add totals
        page.thead()
        page.tr(class_='header')
        page.th('Totals')
        for i in range(totals.shape[1]):
            page.th(str(totals[:, i].sum()))
        page.tr.close()
        page.thead.close()
        page.table.close()
        page.button('Export to CSV',
                    class_='btn btn-default btn-table',
                    onclick="exportTableToCSV('{name}.csv', '{name}')".format(
                        name=id_))
        page.div.close()

        # build trip groups
        self.trips.sort(key=lambda x: (x[0], x[2] in mask and (mask).index(x[
            2]) or 1000, x[3] is None))
        groups = OrderedDict()
        j = 0
        for i in range(len(self.trips)):
            if i == 0:
                j = i
                groups[j] = []
                continue
            t = self.trips[i][0]
            t2 = self.trips[i - 1][0]
            if (t - t2) < self.window:
                groups[j].append(i)
            else:
                j = i
                groups[j] = []

        # build trip table
        page.h1('Trip list')
        page.div(class_='well')

        utc = tz.gettz('UTC')
        if self.ifo in ['H1', 'C1', 'P1']:
            localzone = tz.gettz('America/Los_Angeles')
        elif self.ifo in ['L1']:
            localzone = tz.gettz('America/Chicago')
        elif self.ifo in ['K1']:
            localzone = tz.gettz('Asia/Tokyo')
        else:
            localzone = tz.gettz('Europe/Berlin')
        headers = [
            'GPS time', 'UTC time', 'Local time', 'Chamber', 'Trigger', 'Plot',
            'Associated'
        ]
        rows = []
        for id in groups:
            t, chamber, trigger, plot = self.trips[id]
            t2 = Time(t, format='gps', scale='utc')
            tlocal = Time(
                t2.datetime.replace(tzinfo=utc).astimezone(localzone),
                format='datetime',
                scale='utc')
            rows.append([t, t2.iso, tlocal.iso, chamber, trigger])
            if plot:
                rows[-1].append(
                    markup.oneliner.a('[Click here]',
                                      href=plot.href,
                                      class_='fancybox plot',
                                      **{'data-fancybox-group': '1'}))
            else:
                rows[-1].append('-')
            assoc = []
            for id2 in groups[id]:
                t2, chamber2, trigger2, plot2 = self.trips[id2]
                dt = t2 - t
                tag = '%s %s (+%.2fs)' % (chamber2, trigger2, dt)
                if plot2:
                    assoc.append(
                        markup.oneliner.a(tag,
                                          href=plot2.href,
                                          class_='fancybox plot',
                                          **{'data-fancybox-group': '1'}))
                else:
                    assoc.append(tag)
            if assoc:
                rows[-1].append('<br>'.join(assoc))
            else:
                rows[-1].append('-')
        page.add(
            str(
                html.table(
                    headers,
                    rows,
                    caption=
                    ('List of %s watch-dog trips in interval [%d .. %d) - '
                     'trips are considered \'associated\' if they fall within '
                     '%s seconds of each other.' %
                     (chambertype, self.start, self.end, self.window)))))

        wdp = []
        for i, p in enumerate(self.plots):
            if 'WATCHDOG_TRIP' in p.href:
                wdp.append(i)
        for idx in wdp[::-1]:
            self.plots.pop(idx)

        # write trips to data file
        tripfile = os.path.join(self.path, 'trips.dat')
        with open(tripfile, 'w') as f:
            for id in groups:
                t, chamber, cause, _ = self.trips[id]
                if cause in hepimask:
                    stage = 'HEPI'
                elif self.chambers == HAMs:
                    stage = 'ISI'
                elif cause in isichannels[0].bits:
                    stage = 'ISI1'
                else:
                    stage = 'ISI2'
                cause = cause.replace(' ', '_')
                print(t, chamber, stage, cause, file=f)
        page.p()
        page.add('The list of trips can be downloaded ')
        page.a('here.',
               href=tripfile,
               alt=os.path.basename(tripfile),
               title='Trip data')
        page.p.close()

        page.div.close()

        # write to file
        idx = self.states.index(state)
        with open(self.frames[idx], 'w') as fobj:
            fobj.write(str(page))
        return self.frames[idx]
Beispiel #7
0
    def write_state_html(self, state):
        # write results table
        performance = [(str(m), '%.2f %s' % (r.value, r.unit),
                        m.description.split('\n')[0])
                       for (m, r) in self.results[state].iteritems()]
        pre = markup.page()
        pre.div(class_='scaffold well')
        pre.strong('Flag performance summary')
        pre.add(
            str(data_table(['Metric', 'Result', 'Description'], performance)))
        pre.div.close()
        pre.h2('Figures of Merit')
        # write configuration table
        post = markup.page()

        def add_config_entry(title, entry):
            post.tr()
            post.th(title)
            post.td(entry)
            post.tr.close()

        post.h2('Analysis configuration')
        post.div()
        post.table(class_='table table-condensed table-hover')
        add_config_entry('Flags', '<br>'.join(map(str, self.flags)))
        if len(self.flags) > 1 and self.intersection:
            add_config_entry('Flag combination', 'Intersection (logical AND)')
        elif len(self.flags) > 1:
            add_config_entry('Flag combination', 'Union (logical OR)')
        add_config_entry(
            'Analysis start time', '%s (%s)' %
            (str(Time(float(self.start), format='gps',
                      scale='utc').iso), self.start))
        add_config_entry(
            'Analysis end time', '%s (%s)' %
            (str(Time(float(self.end), format='gps',
                      scale='utc').iso), self.end))
        add_config_entry('Event trigger channel', str(self.channel))
        add_config_entry('Event trigger generator', str(self.etg))
        post.table.close()
        post.div.close()
        post.h2('Segment information')
        post.div(class_='panel-group', id="accordion")
        for i, flag in enumerate([self.metaflag] + self.flags):
            flag = get_segments(flag,
                                state.active,
                                query=False,
                                padding=self.padding).copy()
            post.div(class_='panel well panel-primary')
            post.div(class_='panel-heading')
            post.a(href='#flag%d' % i,
                   **{
                       'data-toggle': 'collapse',
                       'data-parent': '#accordion'
                   })
            if i == 0:
                post.h4(self.intersection and 'Intersection' or 'Union',
                        class_='panel-title')
            elif self.labels[i - 1] != str(flag):
                post.h4('%s (%s)' % (flag.name, self.labels[i - 1]),
                        class_='panel-title')
            else:
                post.h4(flag.name, class_='panel-title')
            post.a.close()
            post.div.close()
            post.div(id_='flag%d' % i, class_='panel-collapse collapse')
            post.div(class_='panel-body')
            # write segment summary
            post.p('This flag was defined and had a known state during '
                   'the following segments:')
            post.add(self.print_segments(flag.known))
            # write segment table
            post.p('This flag was active during the following segments:')
            post.add(self.print_segments(flag.active))

            post.div.close()
            post.div.close()
            post.div.close()
        post.div.close()

        # then write standard data tab
        return super(get_tab('default'), self).write_state_html(state,
                                                                plots=True,
                                                                pre=pre,
                                                                post=post)
Beispiel #8
0
def main(args=None):
    """Run the GWSumm command-line interface
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    if args.debug:
        warnings.simplefilter('error', DeprecationWarning)

    # set verbose output options
    globalv.VERBOSE = args.verbose

    # find all config files
    args.config_file = [
        os.path.expanduser(fp) for csv in args.config_file
        for fp in csv.split(',')
    ]

    # check segdb option
    if args.on_segdb_error not in ['raise', 'warn', 'ignore']:
        parser.error("Invalid option --on-segdb-error='%s'" %
                     args.on_segdb_error)

    # read configuration file
    config = GWSummConfigParser()
    config.optionxform = str
    if args.ifo:
        config.set_ifo_options(args.ifo, section=DEFAULTSECT)
    config.set(DEFAULTSECT, 'user', getpass.getuser())
    config.read(args.config_file)

    try:
        ifo = config.get(DEFAULTSECT, 'IFO')
    except NoOptionError:
        ifo = None
    finally:
        globalv.IFO = ifo

    # interpolate section names
    interp = {}
    if ifo:
        interp['ifo'] = ifo.lower()
        interp['IFO'] = ifo.title()
    config.interpolate_section_names(**interp)

    # double-check week mode matches calendar setting
    if args.mode == 'week':
        if config.has_option("calendar", "start-of-week"):
            weekday = getattr(calendar,
                              config.get("calendar", "start-of-week").upper())
            if weekday != args.week.timetuple().tm_wday:
                msg = ("Cannot process week starting on %s. The "
                       "'start-of-week' option in the [calendar] section "
                       "of the INI file specifies weeks start on %ss." %
                       (args.week.strftime('%Y%m%d'),
                        config.get("calendar", "start-of-week")))
                raise parser.error(msg)

    # record times in ConfigParser
    config.set_date_options(args.gpsstart, args.gpsend, section=DEFAULTSECT)

    # convert times for convenience
    span = Segment(args.gpsstart, args.gpsend)
    utc = tconvert(args.gpsstart)
    starttime = Time(float(args.gpsstart), format='gps')
    endtime = Time(float(args.gpsend), format='gps')

    # set mode and output directory
    mode.set_mode(args.mode)
    try:
        path = mode.get_base(utc)
    except ValueError:
        path = os.path.join('%d-%d' % (args.gpsstart, args.gpsend))

    # set LAL FFT plan wisdom level
    duration = min(globalv.NOW, args.gpsend) - args.gpsstart
    if duration > 200000:
        fft_lal.LAL_FFTPLAN_LEVEL = 3
    elif duration > 40000:
        fft_lal.LAL_FFTPLAN_LEVEL = 2
    else:
        fft_lal.LAL_FFTPLAN_LEVEL = 1

    # set global html only flag
    if args.html_only:
        globalv.HTMLONLY = True

    # build directories
    mkdir(args.output_dir)
    os.chdir(args.output_dir)
    plotdir = os.path.join(path, 'plots')
    mkdir(plotdir)

    # -- setup --------------------------------------

    LOGGER.info(" -- GW interferometer summary information system -- ")
    LOGGER.debug("This is process {}".format(os.getpid()))
    LOGGER.debug("You have selected {} mode".format(mode.get_mode().name))
    LOGGER.debug("Start time: {0} ({1})".format(starttime.utc.iso,
                                                starttime.gps))
    LOGGER.debug("End time: {0} ({1})".format(endtime.utc.iso, endtime.gps))
    LOGGER.debug("Output directory: {}".format(
        os.path.abspath(os.path.join(args.output_dir, path))))

    # -- Finalise configuration
    LOGGER.info("Loading configuration")
    plugins = config.load_plugins()
    if plugins:
        LOGGER.debug(" -- Loaded {} plugins:".format(len(plugins)))
        for mod in plugins:
            LOGGER.debug("        %s" % mod)
    units = config.load_units()
    LOGGER.debug("    Loaded %d units" % len(units))
    channels = config.load_channels()
    LOGGER.debug("    Loaded %d channels" % len(channels))
    states = config.load_states()
    LOGGER.debug("    Loaded %d states" % len(states))
    rcp = config.load_rcParams()
    LOGGER.debug("    Loaded %d rcParams" % len(rcp))

    # read list of tabs
    tablist = TabList.from_ini(config,
                               match=args.process_tab,
                               path=path,
                               plotdir=plotdir)
    tablist.sort(reverse=True)
    tabs = sorted(tablist.get_hierarchy(), key=tablist._sortkey)
    LOGGER.info("    Loaded %d tabs [%d parents overall]" %
                (len(tablist), len(tabs)))

    # read caches
    cache = {}
    for (key,
         var) in zip(['datacache', 'trigcache', 'segmentcache'],
                     [args.data_cache, args.event_cache, args.segment_cache]):
        if var:
            LOGGER.info("Reading %s from %d files... " % (key, len(var)))
            cache[key] = Cache()
            for fp in var:
                with open(fp, 'r') as f:
                    cache[key].extend(Cache.fromfile(f))
            cache[key] = cache[key].sieve(segment=span)
            LOGGER.debug("Done [%d entries]" % len(cache[key]))

    # -- read archive -------------------------------

    if not hasattr(args, 'archive'):
        args.archive = False

    if args.html_only:
        args.archive = False
        args.daily_archive = False
    elif args.archive is True:
        args.archive = 'GW_SUMMARY_ARCHIVE'

    archives = []

    if args.archive:
        archivedir = os.path.join(path, 'archive')
        mkdir(archivedir)
        args.archive = os.path.join(
            archivedir, '%s-%s-%d-%d.h5' %
            (ifo, args.archive, args.gpsstart, args.gpsend - args.gpsstart))
        if os.path.isfile(args.archive):
            archives.append(args.archive)
        else:
            LOGGER.debug(
                "No archive found in %s, one will be created at the end" %
                args.archive)

    # read daily archive for week/month/... mode
    if hasattr(args, 'daily_archive') and args.daily_archive:
        # find daily archive files
        archives.extend(
            archive.find_daily_archives(args.gpsstart, args.gpsend, ifo,
                                        args.daily_archive, archivedir))
        # then don't read any actual data
        cache['datacache'] = Cache()

    for arch in archives:
        LOGGER.info("Reading archived data from %s" % arch)
        archive.read_data_archive(arch)
        LOGGER.debug("Archive data loaded")

    # -- read HTML configuration --------------------

    css = config.get_css(section='html')
    javascript = config.get_javascript(section='html')

    # enable comments
    try:
        globalv.HTML_COMMENTS_NAME = config.get('html', 'disqus-shortname')
    except (NoOptionError, NoSectionError):
        pass

    # find new ifo bases
    ifobases = {}
    try:
        bases_ = config.nditems('html')
    except NoSectionError:
        pass
    else:
        base_reg = re.compile(r'-base\Z')
        for key, val in bases_:
            if base_reg.search(key):
                ifobases[key.rsplit('-', 1)[0]] = val
    ifobases = OrderedDict(sorted(ifobases.items(), key=lambda x: x[0]))

    # -- write auxiliary pages ----------------------

    # get URL from output directory
    if 'public_html' in os.getcwd():
        urlbase = os.path.sep + os.path.join(
            '~%s' % config.get(DEFAULTSECT, 'user'),
            os.getcwd().split('public_html', 1)[1][1:])
        base = urlbase
    # otherwise get URL from html config
    elif ifo in ifobases:
        urlbase = urlparse(ifobases[ifo]).path
        base = urlbase
    # otherwise let the write_html processor work it out on-the-fly
    else:
        urlbase = None
        base = None

    # get link to issues report page
    try:
        issues = config.get('html', 'issues')
    except KeyError:
        issues = True

    # write 404 error page
    if not args.no_htaccess and not args.no_html and urlbase:
        top = os.path.join(urlbase, path)
        four0four = get_tab('404')(span=span,
                                   parent=None,
                                   path=path,
                                   index=os.path.join(path, '404.html'))
        four0four.write_html(css=css,
                             js=javascript,
                             tabs=tabs,
                             ifo=ifo,
                             ifomap=ifobases,
                             top=top,
                             base=base,
                             writedata=not args.html_only,
                             writehtml=not args.no_html,
                             issues=issues)
        url404 = os.path.join(urlbase, four0four.index)
        with open(os.path.join(path, '.htaccess'), 'w') as htaccess:
            print('Options -Indexes', file=htaccess)
            print('ErrorDocument 404 %s' % url404, file=htaccess)
            print('ErrorDocument 403 %s' % url404, file=htaccess)

    # write config page
    about = get_tab('about')(span=span, parent=None, path=path)
    if not args.no_html:
        mkdir(about.path)
        about.write_html(css=css,
                         js=javascript,
                         tabs=tabs,
                         config=config.files,
                         prog=PROG,
                         ifo=ifo,
                         ifomap=ifobases,
                         about=about.index,
                         base=base,
                         issues=issues,
                         writedata=not args.html_only,
                         writehtml=not args.no_html)

    # -- read bulk data -----------------------------

    # XXX: bulk data reading could optimise things
    # XXX: but has never been used, so should remove (DMM 18/01/16)
    if args.bulk_read and not args.html_only:
        LOGGER.info("Reading all data in BULK")
        allts = set()
        allsv = set()
        allflags = set()
        for tab in tablist:
            snames = []
            for state in tab.states:
                snames.append(state.name)
                if state.definition:
                    allflags.update(re_flagdiv.split(state.definition))
            # get all data defined for the 'all' state
            if ALLSTATE in snames:
                allts.update(
                    tab.get_channels('timeseries', 'spectrogram', 'spectrum',
                                     'histogram'))
                allsv.update(tab.get_channels('statevector'))
                allflags.update(tab.get_flags('segments'))
            # or get data for plots defined over all states
            else:
                for plot in tab.plots:
                    if plot.state is not None:
                        continue
                    if plot.type in [
                            'timeseries', 'spectrogram', 'spectrum',
                            'histogram'
                    ]:
                        allts.update(plot.channels)
                    elif plot.type in ['statevector']:
                        allsv.update(plot.channels)
                    elif plot.type in ['segments']:
                        allflags.update([
                            f for cflag in plot.flags
                            for f in re_flagdiv.split(cflag)[::2] if f
                        ])
        allseg = SegmentList([span])
        if len(allflags):
            LOGGER.info(
                "%d data-quality flags identified for segment query from all "
                "tabs" % len(allflags))
            get_segments(allflags, allseg, config=config, return_=False)
        if len(allts):
            LOGGER.info("%d channels identified for TimeSeries from all tabs" %
                        len(allts))
            get_timeseries_dict(allts,
                                allseg,
                                config=config,
                                nds=args.nds,
                                nproc=args.multiprocess,
                                return_=False)
        if len(allsv):
            LOGGER.info(
                "%d channels identified for StateVector from all tabs" %
                len(allsv))
            get_timeseries_dict(allsv,
                                allseg,
                                config=config,
                                nds=args.nds,
                                statevector=True,
                                nproc=args.multiprocess,
                                return_=False)

    # -- process all tabs ---------------------------

    # TODO: consider re-working this loop as TabList.process_all

    for tab in tablist:
        if tab.parent:
            name = '%s/%s' % (tab.parent.name, tab.name)
        else:
            name = tab.name
        if not args.html_only and isinstance(tab, get_tab('_processed')):
            LOGGER.debug("Processing %s" % name)
            tab.process(config=config,
                        nds=args.nds,
                        nproc=args.multiprocess,
                        segdb_error=args.on_segdb_error,
                        datafind_error=args.on_datafind_error,
                        **cache)
        if not tab.hidden and not isinstance(tab, get_tab('link')):
            mkdir(tab.href)
            tab.write_html(css=css,
                           js=javascript,
                           tabs=tabs,
                           ifo=ifo,
                           ifomap=ifobases,
                           about=about.index,
                           base=base,
                           issues=issues,
                           writedata=not args.html_only,
                           writehtml=not args.no_html)

        # archive this tab
        if args.archive:
            LOGGER.info("Writing data to archive")
            archive.write_data_archive(args.archive)
            LOGGER.debug("Archive written to {}".format(
                os.path.abspath(args.archive)))
        LOGGER.debug("%s complete" % (name))

    LOGGER.info("-- Data products written, all done --")
Beispiel #9
0
    def write_state_html(self, state):
        """Write the content of inner HTML for the given state
        """
        def format_result(res):
            fmt = '%d' if (
                res.value < 0.01 or
                (res.unit == Unit('%') and res.value > 99.99)) else '%.2f'
            return ''.join([fmt % res.value, str(res.unit)])

        def add_config_entry(page, title, entry):
            page.tr()
            page.th(title)
            page.td(entry)
            page.tr.close()

        # write results table
        performance = [(
            str(m),
            format_result(r),
            m.description.split('\n')[0],
        ) for (m, r) in self.results[state].items()]
        pre = markup.page()
        pre.p(self.foreword)
        pre.h4('Flag performance summary', class_='mt-4')
        pre.add(
            str(
                gwhtml.table(['Metric', 'Result', 'Description'],
                             performance,
                             id=self.title)))
        pre.h2('Figures of Merit', class_='mt-4 mb-2')
        # write configuration table
        post = markup.page()
        post.h2('Analysis configuration', class_='mt-4')
        post.div()
        post.table(class_='table table-sm table-hover')
        add_config_entry(post, 'Flags', '<br>'.join(list(map(str,
                                                             self.flags))))
        if len(self.flags) > 1 and self.intersection:
            add_config_entry(post, 'Flag combination',
                             'Intersection (logical AND)')
        elif len(self.flags) > 1:
            add_config_entry(post, 'Flag combination', 'Union (logical OR)')
        add_config_entry(
            post, 'Analysis start time', '%s (%s)' %
            (str(Time(float(self.start), format='gps',
                      scale='utc').iso), self.start))
        add_config_entry(
            post, 'Analysis end time', '%s (%s)' %
            (str(Time(float(self.end), format='gps',
                      scale='utc').iso), self.end))
        add_config_entry(post, 'Event trigger channel', str(self.channel))
        add_config_entry(post, 'Event trigger generator', str(self.etg))
        post.table.close()
        post.div.close()
        post.h2('Segment information', class_='mt-4')
        post.div(class_='mt-2', id="accordion")
        for i, flag in enumerate([self.metaflag] + self.flags):
            flag = get_segments(flag,
                                state.active,
                                query=False,
                                padding=self.padding).copy()
            post.div(class_='card border-info mb-1 shadow-sm')
            post.div(class_='card-header text-white bg-info')
            if i == 0:
                title = self.intersection and 'Intersection' or 'Union'
            elif self.labels[i - 1] != str(flag):
                title = '%s (%s)' % (flag.name, self.labels[i - 1])
            else:
                title = flag.name
            post.a(title,
                   class_='card-link cis-link collapsed',
                   href='#flag%d' % i,
                   **{
                       'data-toggle': 'collapse',
                       'aria-expanded': 'false'
                   })
            post.div.close()  # card-header
            post.div(id_='flag%d' % i,
                     class_='collapse',
                     **{'data-parent': '#accordion'})
            post.div(class_='card-body')
            # write segment summary
            post.p('This flag was defined and had a known state during '
                   'the following segments:')
            post.add(self.print_segments(flag.known))
            # write segment table
            post.p('This flag was active during the following segments:')
            post.add(self.print_segments(flag.active))
            post.div.close()  # card-body
            post.div.close()  # collapse
            post.div.close()  # card
        post.div.close()

        # then write standard data tab
        return super(get_tab('default'), self).write_state_html(state,
                                                                plots=True,
                                                                pre=pre,
                                                                post=post)
Beispiel #10
0
    def write_state_html(self, state):
        """Build HTML summary of watchdog trips
        """
        # find one example of each channel, and get the bits
        hepichannel = get_channel(HEPI_LATCH_CHANNEL
                                  % (self.ifo, self.chambers[1]))
        hepimask = hepichannel.bits
        if self.chambers == HAMs:
             isichannels = [get_channel(HAM_ISI_LATCH_CHANNEL
                                        % (self.ifo, self.chambers[0]))]
        else:
             isichannels = [get_channel(BSC_ST1_LATCH_CHANNEL
                                        % (self.ifo, self.chambers[0])),
                            get_channel(BSC_ST2_LATCH_CHANNEL
                                        % (self.ifo, self.chambers[0]))]
        isimask = [bit for c in isichannels for bit in c.bits]
        mask = hepimask + isimask

        # count trips
        count = {}
        for _, chamber, trigger, _ in self.trips:
            key = (chamber, trigger)
            try:
                count[key] += 1
            except KeyError:
                count[key] = 1

        page = html.markup.page()

        # build summary table
        page.div(class_='well')
        page.table(class_='watchdog data')
        # add headers
        page.tr(class_='header')
        for th in ['WD'] + self.chambers + ['Sub-total']:
            page.th(th)
        page.tr.close()

        # add rows
        totals = numpy.zeros((len(mask), len(self.chambers) + 1),
                             dtype=int)
        rows = []
        for i, bit in enumerate(mask):
            class_ = []
            if (i == len(hepimask) or
                    i == len(hepimask + isichannels[0].bits)):
                class_.append('tdiv')
            if re_no_count.match(bit):
                class_.append('IOP')
            if class_:
                page.tr(class_=' '.join(class_))
            else:
                page.tr()
            page.th(bit)
            row = []
            for j, chamber in enumerate(self.chambers):
                try:
                    c = count[(chamber, bit)]
                except KeyError:
                    c = 0
                    pass
                page.td(c or '-')
                # exclude IOP from total
                if not re_no_count.match(bit):
                    totals[i][j] = c
            # add row total
            totals[i][-1] = totals[i].sum()
            page.th(totals[i][-1])
            page.tr.close()
        # add totals
        page.tr(class_='header')
        page.th('Totals')
        for i in range(totals.shape[1]):
            t = totals[:,i].sum()
            page.th(t)
        page.tr.close()
        page.table.close()
        page.div.close()

        # build trip groups
        self.trips.sort(key=lambda (x, y, z, p):
                                (x, z in mask and (mask).index(z) or 1000,
                                 p is None))
        groups = OrderedDict()
        j = 0
        for i in xrange(len(self.trips)):
            if i == 0:
                j = i
                groups[j] = []
                continue
            t = self.trips[i][0]
            t2 = self.trips[i-1][0]
            if (t - t2) < self.window:
                groups[j].append(i)
            else:
                j = i
                groups[j] = []

        # build trip table
        page.h1('Trip list')
        page.div(class_='well')

        page.p('In the following table, individual watchdog trips are '
               'considered \'associated\' if they fall within %s seconds '
               'of each other.' % self.window)
        utc = tz.gettz('UTC')
        if self.ifo in ['H1', 'C1', 'P1']:
            localzone = tz.gettz('America/Los_Angeles')
        elif self.ifo in ['L1']:
            localzone = tz.gettz('America/Chicago')
        else:
            localzone = tz.gettz('Europe/Berlin')
        headers = ['GPS time', 'UTC time', 'Local time', 'Chamber', 'Trigger',
                   'Plot', 'Associated']
        rows = []
        for id in groups:
            t, chamber, trigger, plot = self.trips[id]
            t2 = Time(t, format='gps', scale='utc')
            tlocal = Time(t2.datetime.replace(tzinfo=utc).astimezone(localzone),
                          format='datetime', scale='utc')
            rows.append([t, t2.iso, tlocal.iso, chamber, trigger])
            if plot:
                rows[-1].append(html.markup.oneliner.a(
                    '[Click here]', href=plot.href, class_='fancybox plot',
                    **{'data-fancybox-group': '1'}))
            else:
                rows[-1].append('-')
            assoc = []
            for id2 in groups[id]:
                t2, chamber2, trigger2, plot2 = self.trips[id2]
                dt = t2 - t
                tag = '%s %s (+%.2fs)' % (chamber2, trigger2, dt)
                if plot2:
                    assoc.append(html.markup.oneliner.a(
                                     tag, href=plot2.href,
                                     class_='fancybox plot',
                                     **{'data-fancybox-group': '1'}))
                else:
                    assoc.append(tag)
            if assoc:
                rows[-1].append('<br>'.join(assoc))
            else:
                rows[-1].append('-')
        page.add(str(html.data_table(headers, rows, table='wdtrips data')))

        wdp = []
        for i, p in enumerate(self.plots):
            if 'WATCHDOG_TRIP' in p.href:
                wdp.append(i)
        for idx in wdp[::-1]:
            self.plots.pop(idx)

        # write trips to data file
        tripfile = os.path.join(self.path, 'trips.dat')
        with open(tripfile, 'w') as f:
            for id in groups:
                t, chamber, cause, _ = self.trips[id]
                if cause in hepimask:
                    stage = 'HEPI'
                elif self.chambers == HAMs:
                    stage = 'ISI'
                elif cause in isichannels[0].bits:
                    stage = 'ISI1'
                else:
                    stage = 'ISI2'
                cause = cause.replace(' ', '_')
                print(t, chamber, stage, cause, file=f)
        page.p()
        page.add('The list of trips can be downloaded ')
        page.a('here.', href=tripfile, alt=os.path.basename(tripfile),
               title='Trip data')
        page.p.close()

        page.div.close()

        # write to file
        idx = self.states.index(state)
        with open(self.frames[idx], 'w') as fobj:
            fobj.write(str(page))
        return self.frames[idx]
Beispiel #11
0
 def frame_read(self, format=None):
     ts = TimeSeries.read(self.framefile, 'L1:LDAS-STRAIN', format=format)
     self.assertTrue(ts.epoch == Time(968654552, format='gps', scale='utc'))
     self.assertTrue(ts.sample_rate == units.Quantity(16384, 'Hz'))
     self.assertTrue(ts.unit == units.Unit('strain'))