Example #1
0
    def test_connect(self):
        """Test :func:`gwpy.io.connect`
        """
        import nds2
        nds_connection = mocks.nds2_connection(host='nds.test.gwpy')
        with mock.patch('nds2.connection') as mock_connection:
            mock_connection.return_value = nds_connection
            conn = io_nds2.connect('nds.test.gwpy')
            assert conn.get_host() == 'nds.test.gwpy'
            assert conn.get_port() == 31200

        nds_connection = mocks.nds2_connection(host='nds2.test.gwpy',
                                               port=8088)
        with mock.patch('nds2.connection') as mock_connection:
            mock_connection.return_value = nds_connection
            conn = io_nds2.connect('nds2.test.gwpy')
            assert conn.get_host() == 'nds2.test.gwpy'
            assert conn.get_port() == 8088
Example #2
0
    def test_connect(self):
        """Test :func:`gwpy.io.connect`
        """
        import nds2
        nds_connection = mocks.nds2_connection(host='nds.test.gwpy')
        with mock.patch('nds2.connection') as mock_connection:
            mock_connection.return_value = nds_connection
            conn = io_nds2.connect('nds.test.gwpy')
            assert conn.get_host() == 'nds.test.gwpy'
            assert conn.get_port() == 31200

        nds_connection = mocks.nds2_connection(host='nds2.test.gwpy',
                                               port=8088)
        with mock.patch('nds2.connection') as mock_connection:
            mock_connection.return_value = nds_connection
            conn = io_nds2.connect('nds2.test.gwpy')
            assert conn.get_host() == 'nds2.test.gwpy'
            assert conn.get_port() == 8088
Example #3
0
def _get_timeseries_dict(channels,
                         segments,
                         config=None,
                         cache=None,
                         query=True,
                         nds=None,
                         frametype=None,
                         nproc=1,
                         return_=True,
                         statevector=False,
                         archive=True,
                         datafind_error='raise',
                         dtype=None,
                         **ioargs):
    """Internal method to retrieve the data for a set of like-typed
    channels using the :meth:`TimeSeriesDict.read` accessor.
    """
    channels = list(map(get_channel, channels))

    # set classes
    if statevector:
        ListClass = StateVectorList
        DictClass = StateVectorDict
    else:
        ListClass = TimeSeriesList
        DictClass = TimeSeriesDict

    # check we have a configparser
    if config is None:
        config = GWSummConfigParser()

    # read segments from global memory
    keys = dict((c.ndsname, make_globalv_key(c)) for c in channels)
    havesegs = reduce(
        operator.and_,
        (globalv.DATA.get(keys[channel.ndsname], ListClass()).segments
         for channel in channels))
    new = segments - havesegs

    # read channel information
    filter_ = dict()
    resample = dict()
    dtype_ = dict()
    for channel in channels:
        name = str(channel)
        try:
            filter_[name] = channel.filter
        except AttributeError:
            pass
        try:
            resample[name] = float(channel.resample)
        except AttributeError:
            pass
        if channel.dtype is not None:
            dtype_[name] = channel.dtype
        elif dtype is not None:
            dtype_[name] = dtype

    # work out whether to use NDS or not
    if nds is None and cache is not None:
        nds = False
    elif nds is None:
        nds = 'LIGO_DATAFIND_SERVER' not in os.environ

    # read new data
    query &= (abs(new) > 0)
    if cache is not None:
        query &= len(cache) > 0
    if query:
        for channel in channels:
            globalv.DATA.setdefault(keys[channel.ndsname], ListClass())

        ifo = channels[0].ifo

        # open NDS connection
        if nds:
            if config.has_option('nds', 'host'):
                host = config.get('nds', 'host')
                port = config.getint('nds', 'port')
                ndsconnection = io_nds2.connect(host, port)
            else:
                ndsconnection = None
            frametype = source = 'nds'
            ndstype = channels[0].type

            # get NDS channel segments
            if ndsconnection is not None and ndsconnection.get_protocol() > 1:
                span = list(map(int, new.extent()))
                avail = io_nds2.get_availability(channels,
                                                 *span,
                                                 connection=ndsconnection)
                new &= avail.intersection(avail.keys())

        # or find frame type and check cache
        else:
            frametype = frametype or channels[0].frametype
            new = exclude_short_trend_segments(new, ifo, frametype)

            if cache is not None:
                fcache = sieve_cache(cache, ifo=ifo[0], tag=frametype)
            else:
                fcache = []

            if (cache is None or len(fcache) == 0) and len(new):
                span = new.extent()
                fcache, frametype = find_best_frames(ifo,
                                                     frametype,
                                                     span[0],
                                                     span[1],
                                                     config=config,
                                                     gaps='ignore',
                                                     onerror=datafind_error)

            # parse discontiguous cache blocks and rebuild segment list
            new &= cache_segments(fcache)
            source = 'files'

            # if reading Virgo h(t) GWF data, filter out files that don't
            # contain the channel (Virgo state-vector only)
            _names = set(map(str, channels))
            _virgohoft = _names.intersection(VIRGO_HOFT_CHANNELS)
            if _virgohoft:
                vprint("    Determining available segments for "
                       "Virgo h(t) data...")
                new &= data_segments(fcache, _virgohoft.pop())

            # set channel type if reading with frameCPP
            if fcache and all_adc(fcache):
                ioargs['type'] = 'adc'

        # store frametype for display in Channel Information tables
        for channel in channels:
            channel.frametype = frametype

        # check whether each channel exists for all new times already
        qchannels = []
        for channel in channels:
            oldsegs = globalv.DATA.get(keys[channel.ndsname],
                                       ListClass()).segments
            if abs(new - oldsegs) != 0 and nds:
                qchannels.append(channel.ndsname)
            elif abs(new - oldsegs) != 0:
                qchannels.append(str(channel))

        # loop through segments, recording data for each
        if len(new):
            vprint(
                "    Fetching data (from %s) for %d channels [%s]:\n" %
                (source, len(qchannels), nds and ndstype or frametype or ''))
        vstr = "        [{0[0]}, {0[1]})"
        for segment in new:
            # force reading integer-precision segments
            segment = type(segment)(int(segment[0]), int(segment[1]))
            if abs(segment) < 1:
                continue

            # reset to minute trend sample times
            if frame_trend_type(ifo, frametype) == 'minute':
                segment = Segment(*io_nds2.minute_trend_times(*segment))
                if abs(segment) < 60:
                    continue

            if nds:  # fetch
                tsd = DictClass.fetch(qchannels,
                                      segment[0],
                                      segment[1],
                                      connection=ndsconnection,
                                      type=ndstype,
                                      verbose=vstr.format(segment),
                                      **ioargs)
            else:  # read
                # NOTE: this sieve explicitly casts our segment to
                #       ligo.segments.segment to prevent `TypeError` from
                #       a mismatch with ligo.segments.segment
                segcache = sieve_cache(fcache, segment=segment)
                segstart, segend = map(float, segment)
                tsd = DictClass.read(segcache,
                                     qchannels,
                                     start=segstart,
                                     end=segend,
                                     nproc=nproc,
                                     verbose=vstr.format(segment),
                                     **ioargs)

            vprint("        post-processing...\n")

            # apply type casting (copy=False means same type just returns)
            for chan, ts in tsd.items():
                tsd[chan] = ts.astype(dtype_.get(chan, ts.dtype),
                                      casting='unsafe',
                                      copy=False)

            # apply resampling
            tsd = resample_timeseries_dict(tsd, nproc=1, **resample)

            # post-process
            for c, data in tsd.items():
                channel = get_channel(c)
                key = keys[channel.ndsname]
                if (key in globalv.DATA
                        and data.span in globalv.DATA[key].segments):
                    continue
                if data.unit is None:
                    data.unit = 'undef'
                for i, seg in enumerate(globalv.DATA[key].segments):
                    if seg in data.span:
                        # new data completely covers existing segment
                        # (and more), so just remove the old stuff
                        globalv.DATA[key].pop(i)
                        break
                    elif seg.intersects(data.span):
                        # new data extends existing segment, so only keep
                        # the really new stuff
                        data = data.crop(*(data.span - seg))
                        break

                # filter
                try:
                    filt = filter_[str(channel)]
                except KeyError:
                    pass
                else:
                    data = filter_timeseries(data, filt)

                if isinstance(data, StateVector) or ':GRD-' in str(channel):
                    data.override_unit(units.dimensionless_unscaled)
                    if hasattr(channel, 'bits'):
                        data.bits = channel.bits
                elif data.unit is None:
                    data.override_unit(channel.unit)

                # update channel type for trends
                if data.channel.type is None and (data.channel.trend
                                                  is not None):
                    if data.dt.to('s').value == 1:
                        data.channel.type = 's-trend'
                    elif data.dt.to('s').value == 60:
                        data.channel.type = 'm-trend'

                # append and coalesce
                add_timeseries(data, key=key, coalesce=True)

    # rebuilt global channel list with new parameters
    update_channel_params()

    if not return_:
        return

    return locate_data(channels, segments, list_class=ListClass)
Example #4
0
def main(args=None):
    """Run the online Guardian node visualization tool
    """
    parser = create_parser()
    args = parser.parse_args(args=args)

    fec_map = args.fec_map
    simulink = args.simulink
    daqsvn = args.daqsvn or ('https://daqsvn.ligo-la.caltech.edu/websvn/'
                             'listing.php?repname=daq_maps')
    if args.ifo == 'H1':
        if not fec_map:
            fec_map = 'https://lhocds.ligo-wa.caltech.edu/exports/detchar/fec/'
        if not simulink:
            simulink = 'https://lhocds.ligo-wa.caltech.edu/daq/simulink/'
    if args.ifo == 'L1':
        if not fec_map:
            fec_map = 'https://llocds.ligo-la.caltech.edu/exports/detchar/fec/'
        if not simulink:
            simulink = 'https://llocds.ligo-la.caltech.edu/daq/simulink/'

    span = Segment(args.gpsstart, args.gpsend)

    # let's go
    LOGGER.info('{} Overflows {}-{}'.format(args.ifo, int(args.gpsstart),
                                            int(args.gpsend)))

    # get segments
    if args.state_flag:
        state = DataQualityFlag.query(args.state_flag,
                                      int(args.gpsstart),
                                      int(args.gpsend),
                                      url=const.DEFAULT_SEGMENT_SERVER)
        tmp = type(state.active)()
        for i, seg in enumerate(state.active):
            if abs(seg) < args.segment_end_pad:
                continue
            tmp.append(type(seg)(seg[0], seg[1] - args.segment_end_pad))
        state.active = tmp.coalesce()
        statea = state.active
    else:
        statea = SegmentList([span])

    if not args.output_file:
        duration = abs(span)
        args.output_file = ('%s-OVERFLOWS-%d-%d.h5' %
                            (args.ifo, int(args.gpsstart), duration))
        LOGGER.debug("Set default output file as %s" % args.output_file)

    # set up container
    overflows = DataQualityDict()

    # prepare data access
    if args.nds:
        from gwpy.io import nds2 as io_nds2
        host, port = args.nds.rsplit(':', 1)
        ndsconnection = io_nds2.connect(host, port=int(port))
        if ndsconnection.get_protocol() == 1:
            cachesegs = SegmentList(
                [Segment(int(args.gpsstart), int(args.gpsend))])
        else:
            cachesegs = io_nds2.get_availability(
                ['{0}:FEC-1_DAC_OVERFLOW_ACC_0_0'.format(args.ifo)],
                int(args.gpsstart),
                int(args.gpsend),
            )
    else:  # get frame cache
        cache = gwdatafind.find_urls(args.ifo[0], args.frametype,
                                     int(args.gpsstart), int(args.gpsend))
        cachesegs = statea & cache_segments(cache)

    flag_desc = "ADC/DAC Overflow indicated by {0}"

    # get channel and find overflows
    for dcuid in args.dcuid:
        LOGGER.info("Processing DCUID %d" % dcuid)
        channel = daq.ligo_accum_overflow_channel(dcuid, args.ifo)
        overflows[channel] = DataQualityFlag(channel, known=cachesegs)
        if args.deep:
            LOGGER.debug(" -- Getting list of overflow channels")
            try:
                channels = daq.ligo_model_overflow_channels(dcuid,
                                                            args.ifo,
                                                            args.frametype,
                                                            gpstime=span[0],
                                                            nds=args.nds)
            except IndexError:  # no frame found for GPS start, try GPS end
                channels = daq.ligo_model_overflow_channels(dcuid,
                                                            args.ifo,
                                                            args.frametype,
                                                            gpstime=span[-1])
            for chan in channels:  # set up flags early
                overflows[chan] = DataQualityFlag(
                    chan,
                    known=cachesegs,
                    description=flag_desc.format(chan),
                    isgood=False,
                )
            LOGGER.debug(" -- %d channels found" % len(channel))
        for seg in cachesegs:
            LOGGER.debug(" -- Processing {}-{}".format(*seg))
            if args.nds:
                read_kw = dict(connection=ndsconnection)
            else:
                read_kw = dict(source=cache, nproc=args.nproc)
            msg = "Reading ACCUM_OVERFLOW data:".rjust(30)
            data = get_data(channel,
                            seg[0],
                            seg[1],
                            pad=0.,
                            verbose=msg,
                            **read_kw)
            new = daq.find_overflow_segments(
                data,
                cumulative=True,
            )
            overflows[channel] += new
            LOGGER.info(" -- {} overflows found".format(len(new.active)))
            if not new.active:
                continue
            # go deep!
            for s, e in tqdm.tqdm(new.active.protract(2),
                                  unit='ovfl',
                                  desc='Going deep'.rjust(30)):
                data = get_data(channels, s, e, **read_kw)
                for ch in channels:
                    try:
                        overflows[ch] += daq.find_overflow_segments(
                            data[ch],
                            cumulative=True,
                        )
                    except KeyError:
                        warnings.warn("Skipping {}".format(ch), UserWarning)
                        continue
        LOGGER.debug(" -- Search complete")

    # write output
    LOGGER.info("Writing segments to %s" % args.output_file)
    table = table_from_segments(
        overflows,
        sngl_burst=args.output_file.endswith((".xml", ".xml.gz")),
    )
    if args.integer_segments:
        for key in overflows:
            overflows[key] = overflows[key].round()
    if args.output_file.endswith((".h5", "hdf", ".hdf5")):
        with h5py.File(args.output_file, "w") as h5f:
            table.write(h5f, path="triggers")
            overflows.write(h5f, path="segments")
    else:
        table.write(args.output_file, overwrite=True)
        overflows.write(args.output_file, overwrite=True, append=True)

    # write HTML
    if args.html:
        # get base path
        base = os.path.dirname(args.html)
        os.chdir(base)
        if args.plot:
            args.plot = os.path.curdir
        if args.output_file:
            args.output_file = os.path.relpath(args.output_file,
                                               os.path.dirname(args.html))
        if os.path.basename(args.html) == 'index.html':
            links = [
                '%d-%d' % (int(args.gpsstart), int(args.gpsend)),
                ('Parameters', '#parameters'),
                ('Segments', [('Overflows', '#overflows')]),
                ('Results', '#results'),
            ]
            if args.state_flag:
                links[2][1].insert(0, ('State flag', '#state-flag'))
            (brand, class_) = htmlio.get_brand(args.ifo, 'Overflows',
                                               args.gpsstart)
            navbar = htmlio.navbar(links, class_=class_, brand=brand)
            page = htmlio.new_bootstrap_page(
                title='%s Overflows | %d-%d' %
                (args.ifo, int(args.gpsstart), int(args.gpsend)),
                navbar=navbar)
        else:
            page = htmlio.markup.page()
            page.div(class_='container')

        # -- header
        page.div(class_='pb-2 mt-3 mb-2 border-bottom')
        page.h1('%s ADC/DAC Overflows: %d-%d' %
                (args.ifo, int(args.gpsstart), int(args.gpsend)))
        page.div.close()

        # -- paramters
        content = [('DCUIDs', ' '.join(map(str, args.dcuid)))]
        if daqsvn:
            content.append(('FEC configuration', (
                '<a href="{0}" target="_blank" title="{1} FEC configuration">'
                '{0}</a>').format(daqsvn, args.ifo)))
        if fec_map:
            content.append(
                ('FEC map', '<a href="{0}" target="_blank" title="{1} FEC '
                 'map">{0}</a>'.format(fec_map, args.ifo)))
        if simulink:
            content.append(
                ('Simulink models', '<a href="{0}" target="_blank" title="{1} '
                 'Simulink models">{0}</a>'.format(simulink, args.ifo)))
        page.h2('Parameters', class_='mt-4 mb-4', id_='parameters')
        page.div(class_='row')
        page.div(class_='col-md-9 col-sm-12')
        page.add(
            htmlio.parameter_table(content,
                                   start=args.gpsstart,
                                   end=args.gpsend,
                                   flag=args.state_flag))
        page.div.close()  # col-md-9 col-sm-12

        # link to summary file
        if args.output_file:
            ext = ('HDF' if args.output_file.endswith(
                (".h5", "hdf", ".hdf5")) else 'XML')
            page.div(class_='col-md-3 col-sm-12')
            page.add(
                htmlio.download_btn(
                    [('Segments ({})'.format(ext), args.output_file)],
                    btnclass='btn btn-%s dropdown-toggle' % args.ifo.lower(),
                ))
            page.div.close()  # col-md-3 col-sm-12
        page.div.close()  # row

        # -- command-line
        page.h5('Command-line:')
        page.add(htmlio.get_command_line(about=False, prog=PROG))

        # -- segments
        page.h2('Segments', class_='mt-4', id_='segments')

        # give contextual information
        msg = ("This analysis searched for digital-to-analogue (DAC) or "
               "analogue-to-digital (ADC) conversion overflows in the {0} "
               "real-time controls system. ").format(
                   SITE_MAP.get(args.ifo, 'LIGO'))
        if args.deep:
            msg += (
                "A hierarchichal search was performed, with one cumulative "
                "overflow counter checked per front-end controller (FEC). "
                "For those models that indicated an overflow, the card- and "
                "slot-specific channels were then checked. ")
        msg += (
            "Consant overflow is shown as yellow, while transient overflow "
            "is shown as red. If a data-quality flag was loaded for this "
            "analysis, it will be displayed in green.")
        page.add(htmlio.alert(msg, context=args.ifo.lower()))
        # record state segments
        if args.state_flag:
            page.h3('State flag', class_='mt-3', id_='state-flag')
            page.div(id_='accordion1')
            page.add(
                htmlio.write_flag_html(state,
                                       span,
                                       'state',
                                       parent='accordion1',
                                       context='success',
                                       plotdir=args.plot,
                                       facecolor=(0.2, 0.8, 0.2),
                                       edgecolor='darkgreen',
                                       known={
                                           'facecolor': 'red',
                                           'edgecolor': 'darkred',
                                           'height': 0.4,
                                       }))
            page.div.close()
        # record overflow segments
        if sum(abs(s.active) for s in overflows.values()):
            page.h3('Overflows', class_='mt-3', id_='overflows')
            page.div(id_='accordion2')
            for i, (c, flag) in enumerate(list(overflows.items())):
                if abs(flag.active) == 0:
                    continue
                if abs(flag.active) == abs(cachesegs):
                    context = 'warning'
                else:
                    context = 'danger'
                try:
                    channel = cds.get_real_channel(flag.name)
                except Exception:
                    title = '%s [%d]' % (flag.name, len(flag.active))
                else:
                    title = '%s (%s) [%d]' % (flag.name, channel,
                                              len(flag.active))
                page.add(
                    htmlio.write_flag_html(flag,
                                           span,
                                           i,
                                           parent='accordion2',
                                           title=title,
                                           context=context,
                                           plotdir=args.plot))
            page.div.close()
        else:
            page.add(
                htmlio.alert('No overflows were found in this analysis',
                             context=args.ifo.lower(),
                             dismiss=False))

        # -- results table
        page.h2('Results summary', class_='mt-4', id_='results')
        page.table(class_='table table-striped table-hover')
        # write table header
        page.thead()
        page.tr()
        for header in ['Channel', 'Connected signal', 'Num. overflows']:
            page.th(header)
        page.thead.close()
        # write body
        page.tbody()
        for c, seglist in overflows.items():
            t = abs(seglist.active)
            if t == 0:
                page.tr()
            elif t == abs(cachesegs):
                page.tr(class_='table-warning')
            else:
                page.tr(class_='table-danger')
            page.td(c)
            try:
                page.td(cds.get_real_channel(str(c)))
            except Exception:
                page.td()
            page.td(len(seglist.active))
            page.tr.close()
        page.tbody.close()
        page.table.close()

        # -- close and write
        htmlio.close_page(page, args.html)
        LOGGER.info("HTML written to %s" % args.html)
Example #5
0
def _get_timeseries_dict(channels, segments, config=None,
                         cache=None, query=True, nds=None, frametype=None,
                         nproc=1, return_=True, statevector=False,
                         archive=True, datafind_error='raise', dtype=None,
                         **ioargs):
    """Internal method to retrieve the data for a set of like-typed
    channels using the :meth:`TimeSeriesDict.read` accessor.
    """
    channels = list(map(get_channel, channels))

    # set classes
    if statevector:
        ListClass = StateVectorList
        DictClass = StateVectorDict
    else:
        ListClass = TimeSeriesList
        DictClass = TimeSeriesDict

    # check we have a configparser
    if config is None:
        config = GWSummConfigParser()

    # read segments from global memory
    keys = dict((c.ndsname, make_globalv_key(c)) for c in channels)
    havesegs = reduce(operator.and_,
                      (globalv.DATA.get(keys[channel.ndsname],
                                        ListClass()).segments
                       for channel in channels))
    new = segments - havesegs

    # read channel information
    filter_ = dict()
    resample = dict()
    dtype_ = dict()
    for channel in channels:
        name = str(channel)
        try:
            filter_[name] = channel.filter
        except AttributeError:
            pass
        try:
            resample[name] = float(channel.resample)
        except AttributeError:
            pass
        if channel.dtype is not None:
            dtype_[name] = channel.dtype
        elif dtype is not None:
            dtype_[name] = dtype

    # work out whether to use NDS or not
    if nds is None and cache is not None:
        nds = False
    elif nds is None:
        nds = 'LIGO_DATAFIND_SERVER' not in os.environ

    # read new data
    query &= (abs(new) > 0)
    if cache is not None:
        query &= len(cache) > 0
    if query:
        for channel in channels:
            globalv.DATA.setdefault(keys[channel.ndsname], ListClass())

        ifo = channels[0].ifo

        # open NDS connection
        if nds:
            if config.has_option('nds', 'host'):
                host = config.get('nds', 'host')
                port = config.getint('nds', 'port')
                ndsconnection = io_nds2.connect(host, port)
            else:
                ndsconnection = None
            frametype = source = 'nds'
            ndstype = channels[0].type

            # get NDS channel segments
            if ndsconnection is not None and ndsconnection.get_protocol() > 1:
                span = list(map(int, new.extent()))
                avail = io_nds2.get_availability(
                    channels, *span, connection=ndsconnection
                )
                new &= avail.intersection(avail.keys())

        # or find frame type and check cache
        else:
            frametype = frametype or channels[0].frametype
            new = exclude_short_trend_segments(new, ifo, frametype)

            if cache is not None:
                fcache = sieve_cache(cache, ifo=ifo[0], tag=frametype)
            else:
                fcache = []

            if (cache is None or len(fcache) == 0) and len(new):
                span = new.extent()
                fcache, frametype = find_best_frames(
                    ifo, frametype, span[0], span[1],
                    config=config, gaps='ignore', onerror=datafind_error)

            # parse discontiguous cache blocks and rebuild segment list
            new &= cache_segments(fcache)
            source = 'files'

            # if reading Virgo h(t) GWF data, filter out files that don't
            # contain the channel (Virgo state-vector only)
            _names = set(map(str, channels))
            _virgohoft = _names.intersection(VIRGO_HOFT_CHANNELS)
            if _virgohoft:
                vprint("    Determining available segments for "
                       "Virgo h(t) data...")
                new &= data_segments(fcache, _virgohoft.pop())

            # set channel type if reading with frameCPP
            if fcache and all_adc(fcache):
                ioargs['type'] = 'adc'

        # store frametype for display in Channel Information tables
        for channel in channels:
            channel.frametype = frametype

        # check whether each channel exists for all new times already
        qchannels = []
        for channel in channels:
            oldsegs = globalv.DATA.get(keys[channel.ndsname],
                                       ListClass()).segments
            if abs(new - oldsegs) != 0 and nds:
                qchannels.append(channel.ndsname)
            elif abs(new - oldsegs) != 0:
                qchannels.append(str(channel))

        # loop through segments, recording data for each
        if len(new):
            vprint("    Fetching data (from %s) for %d channels [%s]:\n"
                   % (source, len(qchannels),
                      nds and ndstype or frametype or ''))
        vstr = "        [{0[0]}, {0[1]})"
        for segment in new:
            # force reading integer-precision segments
            segment = type(segment)(int(segment[0]), int(segment[1]))
            if abs(segment) < 1:
                continue

            # reset to minute trend sample times
            if frame_trend_type(ifo, frametype) == 'minute':
                segment = Segment(*io_nds2.minute_trend_times(*segment))
                if abs(segment) < 60:
                    continue

            if nds:  # fetch
                tsd = DictClass.fetch(qchannels, segment[0], segment[1],
                                      connection=ndsconnection, type=ndstype,
                                      verbose=vstr.format(segment), **ioargs)
            else:  # read
                # NOTE: this sieve explicitly casts our segment to
                #       ligo.segments.segment to prevent `TypeError` from
                #       a mismatch with ligo.segments.segment
                segcache = sieve_cache(fcache, segment=segment)
                segstart, segend = map(float, segment)
                tsd = DictClass.read(segcache, qchannels, start=segstart,
                                     end=segend, nproc=nproc,
                                     verbose=vstr.format(segment), **ioargs)

            vprint("        post-processing...\n")

            # apply type casting (copy=False means same type just returns)
            for chan, ts in tsd.items():
                tsd[chan] = ts.astype(dtype_.get(chan, ts.dtype),
                                      casting='unsafe', copy=False)

            # apply resampling
            tsd = resample_timeseries_dict(tsd, nproc=1, **resample)

            # post-process
            for c, data in tsd.items():
                channel = get_channel(c)
                key = keys[channel.ndsname]
                if (key in globalv.DATA and
                        data.span in globalv.DATA[key].segments):
                    continue
                if data.unit is None:
                    data.unit = 'undef'
                for i, seg in enumerate(globalv.DATA[key].segments):
                    if seg in data.span:
                        # new data completely covers existing segment
                        # (and more), so just remove the old stuff
                        globalv.DATA[key].pop(i)
                        break
                    elif seg.intersects(data.span):
                        # new data extends existing segment, so only keep
                        # the really new stuff
                        data = data.crop(*(data.span - seg))
                        break

                # filter
                try:
                    filt = filter_[str(channel)]
                except KeyError:
                    pass
                else:
                    data = filter_timeseries(data, filt)

                if isinstance(data, StateVector) or ':GRD-' in str(channel):
                    data.override_unit(units.dimensionless_unscaled)
                    if hasattr(channel, 'bits'):
                        data.bits = channel.bits
                elif data.unit is None:
                    data.override_unit(channel.unit)

                # update channel type for trends
                if data.channel.type is None and (
                        data.channel.trend is not None):
                    if data.dt.to('s').value == 1:
                        data.channel.type = 's-trend'
                    elif data.dt.to('s').value == 60:
                        data.channel.type = 'm-trend'

                # append and coalesce
                add_timeseries(data, key=key, coalesce=True)

    # rebuilt global channel list with new parameters
    update_channel_params()

    if not return_:
        return

    return locate_data(channels, segments, list_class=ListClass)