Exemplo n.º 1
0
Arquivo: nds2.py Projeto: skykain/gwpy
def connect(host, port=None):
    """Open an `nds2.connection` to a given host and port

    Parameters
    ----------
    host : `str`
        name of server with which to connect

    port : `int`, optional
        connection port

    Returns
    -------
    connection : `nds2.connection`
        a new open connection to the given NDS host
    """
    import nds2
    # pylint: disable=no-member

    # set default port for NDS1 connections (required, I think)
    if port is None and NDS1_HOSTNAME.match(host):
        port = 8088

    if port is None:
        return nds2.connection(host)
    return nds2.connection(host, port)
Exemplo n.º 2
0
def connect(host, port=None):
    """Open an `nds2.connection` to a given host and port

    Parameters
    ----------
    host : `str`
        name of server with which to connect

    port : `int`, optional
        connection port

    Returns
    -------
    connection : `nds2.connection`
        a new open connection to the given NDS host
    """
    import nds2
    # pylint: disable=no-member

    # set default port for NDS1 connections (required, I think)
    if port is None and NDS1_HOSTNAME.match(host):
        port = 8088

    if port is None:
        return nds2.connection(host)
    return nds2.connection(host, port)
Exemplo n.º 3
0
def read(start, end, chlst, nds_hostname='k1nds0', **kwargs):
    ''' read data using nds.    
    
    Parameters
    ----------
    start : int
        Start GPS time.
    end : int
        End GPS time.
    chlst : list
        List of the CDS channel.
    
    Returns
    -------
    data : list 
        

    Example
    -------
    >>> channel = 'K1:PEM-EX1_SEIS_WE_SENSINF_OUT16'
    >>> read(1205201472,1205205568,channel)
    <list of the numpy array>
    '''
    if not isinstance(chlst, list):
        raise ChannelNameException('Please give chlst as list type.\n'\
                                   'Given chlst is {}, which type is {}'\
                                       .format(chlst,type(chlst)))
    elif isinstance(chlst, str):
        chlst = [chlst]
    conn = nds2.connection('10.68.10.121', 8088)  # nds0
    buffers = conn.fetch(start, end, chlst)
    return buffers
Exemplo n.º 4
0
 def connection_tester(self, ifo, s):
     self.logger.info('Testing connection to server at {0} ({1})...'.format(ifo, s))
     try:
         conn = nds2.connection(s)
         self.logger.success('Connection established, printing details... \n {0}'.format(conn))
         return conn
     except Exception as e:
         self.logger.error('Unable to open connection with server... \n {0}, {1}'.format(s, e))
         return e
Exemplo n.º 5
0
def stream_data(channels,
                duration=600,
                start_time=0,
                fs_up=256,
                portNumber=31200):

    nds_osx = ('/opt/local/Library/Frameworks/Python.framework/' +
               'Versions/2.7/lib/python2.7/site-packages/')
    nds_sandbox = '/usr/lib/python2.7/dist-packages/'

    if os.path.exists(nds_osx):
        sys.path.append(nds_osx)
    elif os.path.exists(nds_sandbox):
        sys.path.append(nds_sandbox)

    # Connect to the right server
    ifo = channels[0][:2]
    if ifo == 'L1':
        ndsServer = 'nds.ligo-la.caltech.edu'
    elif ifo == 'H1':
        ndsServer = 'nds.ligo-wa.caltech.edu'
    else:
        sys.exit("unknown IFO specified")

    # Setup connection to the NDS
    try:
        conn = nds2.connection(ndsServer, portNumber)
    except RuntimeError:
        alert(
            'ERROR: Need to run `kinit albert.einstein` before nds2 '
            'can establish a connection',
            color='FAIL')
        sys.exit(1)

    # get data
    data = conn.fetch(start_time, start_time + duration, channels)
    data = np.array(data)

    # stack data and downsample
    vdata = []
    for k in range(len(channels)):
        fsdown = data[k].channel.sample_rate
        down_factor = int(fsdown // fs_up)

        fir_aa = sig.firwin(20 * down_factor + 1,
                            0.8 / down_factor,
                            window='blackmanharris')

        # Using fir_aa[1:-1] cuts off a leading and trailing zero
        downdata = sig.decimate(data[k].data,
                                down_factor,
                                ftype=sig.dlti(fir_aa[1:-1], 1.0),
                                zero_phase=True)
        vdata.append(downdata)

    return np.array(vdata).T
Exemplo n.º 6
0
def connect(host, port=None):
    """Open an `nds2.connection` to a given host and port

    Parameters
    ----------
    host : `str`
        name of server with which to connect

    port : `int`, optional
        connection port

    Returns
    -------
    connection : `nds2.connection`
        a new open connection to the given NDS host
    """
    if port is None:
        return nds2.connection(host)
    return nds2.connection(host, port)
Exemplo n.º 7
0
def stream_data(start, channels,
                dur  = 600,
                fsup = 512,
                ifo  = 'H1'):
    """
    Collect LIGO data using nds2

    Parameters
    ----------
    start : `int`
        GPS start time (UTC)
    channels : `list`
        channel names to scrape
    dur : `int`
        duration (in seconds) of data to scrape
    fsup : `int`
        sample rate to return data with
    ifo : `str`
        Interferometer ('H1' or 'L1')

    Returns
    -------
    vdata : `numpy.ndarray`
        array of collected data
    """

    import nds2
    if ifo == 'H1':
        server = 'nds.ligo-wa.caltech.edu'
    else:
        server = 'nds.ligo-la.caltech.edu'

    # Setup connection to the NDS
    conn = nds2.connection(server, 31200)
    data = []
    for i in range(len(channels)):
        temp = conn.fetch(start, start + dur, [channels[i]])
        data.append(temp)

    # Get the data and stack it (the data are the columns)
    vdata = []
    for k in range(len(channels)):
        fsdown = data[k][0].channel.sample_rate
        down_factor = int(fsdown // fsup)

        fir_aa = sig.firwin(20 * down_factor + 1, 0.8 / down_factor,
                            window='blackmanharris')

        # Using fir_aa[1:-1] cuts off a leading and trailing zero
        downdata = sig.decimate(data[k][0].data, down_factor,
                                ftype = sig.dlti(fir_aa[1:-1], 1.0),
                                zero_phase = True)
        vdata.append(downdata)

    return np.array(vdata)
Exemplo n.º 8
0
def _ligo_model_overflow_channels_nds(dcuid, ifo, gpstime, host):
    import nds2

    if host is True:
        try:
            host = os.getenv('NDSSERVER', '').split(',', 1)[0]
        except KeyError:
            raise ValueError("Cannot determine default NDSSERVER, please pass "
                             "nds=<host:port> or set NDSSERVER environment "
                             "variable")
    try:
        host, port = host.rsplit(':', 1)
    except ValueError:
        connection = nds2.connection(host)
    else:
        connection = nds2.connection(host, int(port))

    if connection.get_protocol() > 1:
        connection.set_epoch(gpstime, gpstime + 1)

    # NOTE: the `3` here is the channel type mask for 'ONLINE | RAW'
    return map(attrgetter('name'), connection.find_channels(
        '{ifo}:FEC-{dcuid}_*_OVERFLOW_*'.format(ifo=ifo, dcuid=dcuid), 3))
Exemplo n.º 9
0
def _ligo_model_overflow_channels_nds(dcuid, ifo, gpstime, host):
    import nds2

    if host is True:
        try:
            host = os.getenv('NDSSERVER', '').split(',', 1)[0]
        except KeyError:
            raise ValueError("Cannot determine default NDSSERVER, please pass "
                             "nds=<host:port> or set NDSSERVER environment "
                             "variable")
    try:
        host, port = host.rsplit(':', 1)
    except ValueError:
        connection = nds2.connection(host)
    else:
        connection = nds2.connection(host, int(port))

    if connection.get_protocol() > 1:
        connection.set_epoch(gpstime, gpstime + 1)

    # NOTE: the `3` here is the channel type mask for 'ONLINE | RAW'
    return map(attrgetter('name'), connection.find_channels(
        '{ifo}:FEC-{dcuid}_*_OVERFLOW_*'.format(ifo=ifo, dcuid=dcuid), 3))
Exemplo n.º 10
0
 def test_nds2_conversion(self):
     try:
         import nds2
     except ImportError as e:
         self.skipTest(str(e))
     else:
         try:
             conn = nds2.connection(NDSHOST)
         except Exception as f:
             self.skipTest(str(f))
         else:
             nds2channel = conn.find_channels(self.channel)[0]
             new = Channel.from_nds2(nds2channel)
             self.assertTrue(str(new) == self.channel)
             self.assertTrue(new.ifo == self.channel.split(':', 1)[0])
             self.assertTrue(new.sample_rate == units.Quantity(32768, 'Hz'))
Exemplo n.º 11
0
 def test_nds2_conversion(self):
     try:
         import nds2
     except ImportError as e:
         self.skipTest(str(e))
     else:
         try:
             conn = nds2.connection(NDSHOST)
         except Exception as f:
             self.skipTest(str(f))
         else:
             nds2channel = conn.find_channels(self.channel)[0]
             new = Channel.from_nds2(nds2channel)
             self.assertTrue(str(new) == self.channel)
             self.assertTrue(new.ifo == self.channel.split(':', 1)[0])
             self.assertTrue(new.sample_rate == units.Quantity(32768, 'Hz'))
Exemplo n.º 12
0
def connect(start_time):
	global conn
	global connected
	global server

	fast_server = ('l1nds0', 8088)
	slow_server = ('nds.ligo-la.caltech.edu', 31200)
	next_server = server

	# if we're on the slow server, decide if the fast server could be used
	if server == slow_server:
		current_time = int(subprocess.check_output('tconvert now', shell=True).strip())
		fast_time = current_time - 12*24*60*60 # earliest time the fast server will have data

		if start_time >= fast_time:
			next_server = fast_server
		if not connected or next_server != server:
			conn = nds2.connection(next_server[0], next_server[1]) # connect to the next server
			connected = True
			server = next_server # set the current server
Exemplo n.º 13
0
def quickplot(chanList, gpsLength=3600, gpsStop=correct_time().gpsSeconds):
    '''
    quickplot takes in a list of channels, and plots an hour worth of data for all 
    channels specified.
    Inputs: 
    chanList: list of channels valid for the CTN Lab fb4.
    gpsLength: length of time in seconds to plot from gpsStop. Default is one hour.
    gpsStop: gpstime to plot until.  Default is now.
    '''
    conn = nds2.connection('10.0.1.156', 8088)
    gpsStart = gpsStop - gpsLength
    data = conn.fetch(gpsStart, gpsStop, chanList)

    if gpsLength <= 60:
        units = 'seconds'
        timeDivisor = 1

    elif gpsLength <= 3600:
        units = 'minutes'
        timeDivisor = 60

    elif gpsLength <= 86400:
        units = 'hours'
        timeDivisor = 3600

    elif gpsLength <= 604800:
        units = 'days'
        timeDivisor = 86400

    displayTime = gpsLength / timeDivisor
    t = linspace(0, displayTime, gpsLength * 16)

    for dat in data:
        plot(t, dat.data)

    xlabel('Time [{}] from {} ({})'.format(units, tconvert(gpsStart),
                                           gpsStart))

    print(chanList)
    print('gpsLength = {}'.format(gpsLength))
    print('gpsStop = {}'.format(gpsStop))
Exemplo n.º 14
0
def connh():
    return nds2.connection('nds.ligo-wa.caltech.edu', 31200)
Exemplo n.º 15
0
with open(fname, 'r') as f:
    chanlines = f.read().split()
channels = [chan_head + line for line in chanlines]

# Get data start time
if ifo == 'L1':
    ndsServer = 'nds.ligo-la.caltech.edu'
elif ifo == 'H1':
    ndsServer = 'nds.ligo-wa.caltech.edu'
else:
    sys.exit("unknown IFO specified")

# Setup connection to the NDS
try:
    conn = nds2.connection(ndsServer, portNumber)
except RuntimeError:
    print('ERROR: Need to run `kinit albert.einstein` before nds2 '
          'can establish a connection')
    sys.exit(1)

# Setup start and stop times
t = Time(times, format='iso', scale='utc')
t_start = int(t.gps)

if __debug__:
    print("Output sample rate: {} Hz".format(fsup))
    print("Channel List:\n-------------")
    print("\n".join(channels))

print("Getting data from " + ndsServer + "...")
Exemplo n.º 16
0
 def _connect():
     return nds2.connection(host, port)
Exemplo n.º 17
0
import nds2

conn = nds2.connection('nds.ligo-la.caltech.edu')
print conn
channel = ['L1:ASC-AS_A_RF45_Q_PIT_OUT_DQ']
print "channel = {0}".format(channel)

print "Now testing the command 'conn.iterate(channel)' "

try:
    iter = conn.iterate(channel)
except RuntimeError, err:
    print "Failed"
    print RuntimeError, err

print "Now testing the command 'conn.iterate(1, channel)' "

try:
    iter = conn.iterate(1, channel)
except RuntimeError, err:
    print "Failed"
    print RuntimeError, err

print "Now testing the command 'conn.iterate(1125417950, 1125418050, channel)' "

try:
    iter = conn.iterate(1125417950, 1125418050, channel)
except RuntimeError, err:
    print "Failed"
    print RuntimeError, err
Exemplo n.º 18
0

# ----------------------------------------
# Parse Arguments
# ----------------------------------------
import argparse
parser = argparse.ArgumentParser(description='Find Channels in NDS')
parser.add_argument('channel', help='channel name')
parser.add_argument('-trend', default='minutes',
                    help='choose in ["minutes","seconds"]',)
parser.add_argument('-types', default='mean',
                    help='choose in ["mean","max","min","rms"]')
args = parser.parse_args()
chname = args.channel
trend = args.trend
types = args.types

# ----------------------------------------
# Fetch data from NDS
# ----------------------------------------
import nds2
conn = nds2.connection('10.68.10.121', 8088)
fmt = '{c}.{type},{trend}-trend'.format(c=chname,type=types,trend=trend[0])
found = conn.find_channels(fmt)
for ch in found:
    print ch.name

Exemplo n.º 19
0
import nds2

conn = nds2.connection('nds.ligo-la.caltech.edu')
print conn
channel = ['L1:ASC-AS_A_RF45_Q_PIT_OUT_DQ']
print "channel = {0}".format(channel)

print "Now testing the command 'conn.iterate(channel)' "

try:
    iter = conn.iterate(channel)
except RuntimeError, err:
    print "Failed"
    print RuntimeError, err


print "Now testing the command 'conn.iterate(1, channel)' "

try:
    iter = conn.iterate(1, channel)
except RuntimeError, err:
    print "Failed"
    print RuntimeError, err

print "Now testing the command 'conn.iterate(1125417950, 1125418050, channel)' "

try:
    iter = conn.iterate(1125417950, 1125418050, channel)
except RuntimeError, err:
    print "Failed"
    print RuntimeError, err
Exemplo n.º 20
0
def _get_timeseries_dict(channels,
                         segments,
                         config=None,
                         cache=None,
                         query=True,
                         nds=None,
                         frametype=None,
                         multiprocess=True,
                         return_=True,
                         statevector=False,
                         archive=True,
                         datafind_error='raise',
                         **ioargs):
    """Internal method to retrieve the data for a set of like-typed
    channels using the :meth:`TimeSeriesDict.read` accessor.
    """
    channels = map(get_channel, channels)

    # set classes
    if statevector:
        ListClass = StateVectorList
        DictClass = StateVectorDict
    else:
        ListClass = TimeSeriesList
        DictClass = TimeSeriesDict

    # check we have a configparser
    if config is None:
        config = GWSummConfigParser()

    # read segments from global memory
    keys = dict((c.ndsname, make_globalv_key(c)) for c in channels)
    havesegs = reduce(
        operator.and_,
        (globalv.DATA.get(keys[channel.ndsname], ListClass()).segments
         for channel in channels))
    new = segments - havesegs

    # get processes
    if multiprocess is True:
        nproc = count_free_cores()
    elif multiprocess is False:
        nproc = 1
    else:
        nproc = multiprocess

    # read channel information
    filter_ = dict()
    resample = dict()
    dtype_ = dict()
    for channel in channels:
        try:
            filter_[channel.ndsname] = channel.filter
        except AttributeError:
            pass
        try:
            resample[channel] = float(channel.resample)
        except AttributeError:
            pass
        if channel.dtype is None:
            dtype_[channel] = ioargs.get('dtype')
        else:
            dtype_[channel] = channel.dtype

    # work out whether to use NDS or not
    if nds is None and cache is not None:
        nds = False
    elif nds is None:
        nds = 'LIGO_DATAFIND_SERVER' not in os.environ

    # read new data
    query &= (abs(new) > 0)
    if cache is not None:
        query &= len(cache) > 0
    if query:
        for channel in channels:
            globalv.DATA.setdefault(keys[channel.ndsname], ListClass())
        # open NDS connection
        if nds and config.has_option('nds', 'host'):
            host = config.get('nds', 'host')
            port = config.getint('nds', 'port')
            try:
                ndsconnection = nds2.connection(host, port)
            except RuntimeError as e:
                if 'SASL authentication' in str(e):
                    from gwpy.io.nds import kinit
                    kinit()
                    ndsconnection = nds2.connection(host, port)
                else:
                    raise
            frametype = source = 'nds'
            ndstype = channels[0].type
        elif nds:
            ndsconnection = None
            frametype = source = 'nds'
            ndstype = channels[0].type
        # or find frame type and check cache
        else:
            ifo = channels[0].ifo
            frametype = frametype or channels[0].frametype
            if frametype is not None and frametype.endswith('%s_M' % ifo):
                new = type(new)([s for s in new if abs(s) >= 60.])
            elif frametype is not None and frametype.endswith('%s_T' % ifo):
                new = type(new)([s for s in new if abs(s) >= 1.])
            if cache is not None:
                fcache = cache.sieve(ifos=ifo[0],
                                     description=frametype,
                                     exact_match=True)
            else:
                fcache = Cache()
            if (cache is None or len(fcache) == 0) and len(new):
                span = new.extent().protract(8)
                fcache = find_frames(ifo,
                                     frametype,
                                     span[0],
                                     span[1],
                                     config=config,
                                     gaps='ignore',
                                     onerror=datafind_error)
                cachesegments = find_cache_segments(fcache)
                gaps = SegmentList([span]) - cachesegments
                if abs(gaps) and frametype == '%s_HOFT_C00' % ifo:
                    f2 = '%s_DMT_C00' % ifo
                    vprint("    Gaps discovered in aggregated h(t) type "
                           "%s, checking %s\n" % (frametype, f2))
                    c2 = find_frames(ifo,
                                     f2,
                                     span[0],
                                     span[1],
                                     config=config,
                                     gaps='ignore',
                                     onerror=datafind_error)
                    g2 = SegmentList([span]) - find_cache_segments(c2)
                    if abs(g2) < abs(gaps):
                        vprint("    Greater coverage with frametype %s\n" % f2)
                        fcache = c2
                        frametype = f2
                    else:
                        vprint("    No extra coverage with frametype %s\n" %
                               f2)

            # parse discontiguous cache blocks and rebuild segment list
            cachesegments = find_cache_segments(fcache)
            new &= cachesegments
            source = 'frames'

            # set ctype if reading with framecpp
            if cache is None and frametype in ADC_TYPES and HAS_FRAMECPP:
                ioargs['type'] = 'adc'

        for channel in channels:
            channel.frametype = frametype

        # check whether each channel exists for all new times already
        qchannels = []
        qresample = {}
        qdtype = {}
        for channel in channels:
            oldsegs = globalv.DATA.get(channel.ndsname, ListClass()).segments
            if abs(new - oldsegs) != 0:
                qchannels.append(channel)
                if channel in resample:
                    qresample[channel] = resample[channel]
                qdtype[channel] = dtype_.get(channel, ioargs.get('dtype'))
        ioargs['dtype'] = qdtype

        # loop through segments, recording data for each
        if len(new) and nproc > 1:
            vprint("    Fetching data (from %s) for %d channels [%s]" %
                   (source, len(qchannels), nds and ndstype or frametype))
        for segment in new:
            # force reading integer-precision segments
            segment = type(segment)(int(segment[0]), int(segment[1]))
            if abs(segment) < 1:
                continue
            if nds:
                tsd = DictClass.fetch(qchannels,
                                      segment[0],
                                      segment[1],
                                      connection=ndsconnection,
                                      type=ndstype,
                                      **ioargs)
            else:
                # pad resampling
                if segment[1] == cachesegments[-1][1] and qresample:
                    resamplepad = 8
                    if abs(segment) <= resamplepad:
                        continue
                    segment = type(segment)(segment[0],
                                            segment[1] - resamplepad)
                    segcache = fcache.sieve(
                        segment=segment.protract(resamplepad))
                else:
                    segcache = fcache.sieve(segment=segment)
                # set minute trend times modulo 60 from GPS 0
                if (re.match('(?:(.*)_)?[A-Z]\d_M', str(frametype))
                        or (ifo == 'C1' and frametype == 'M')):
                    segstart = int(segment[0]) // 60 * 60
                    segend = int(segment[1]) // 60 * 60
                    if segend >= segment[1]:
                        segend -= 60
                    # and ignore segments shorter than 1 full average
                    if (segend - segstart) < 60:
                        continue
                    segcache = segcache.sieve(
                        segment=type(segment)(segstart, segend))
                else:
                    segstart, segend = map(float, segment)
                # pull filters out because they can break multiprocessing
                if nproc > 1:
                    for c in qchannels:
                        if c.ndsname in filter_:
                            del c.filter
                # read data
                tsd = DictClass.read(segcache,
                                     qchannels,
                                     start=segstart,
                                     end=segend,
                                     nproc=nproc,
                                     resample=qresample,
                                     **ioargs)
                # put filters back
                for c in qchannels:
                    if c.ndsname in filter_:
                        c.filter = filter_[c.ndsname]
            for (channel, data) in tsd.iteritems():
                key = keys[channel.ndsname]
                if (key in globalv.DATA
                        and data.span in globalv.DATA[key].segments):
                    continue
                if data.unit is None:
                    data.unit = 'undef'
                for seg in globalv.DATA[key].segments:
                    if seg.intersects(data.span):
                        data = data.crop(*(data.span - seg))
                        break
                try:
                    filt = filter_[channel.ndsname]
                except KeyError:
                    pass
                else:
                    # filter with function
                    if callable(filt):
                        try:
                            data = filt(data)
                        except TypeError as e:
                            if 'Can only apply' in str(e):
                                data.value[:] = filt(data.value)
                            else:
                                raise
                    # filter with gain
                    elif (isinstance(filt, tuple) and len(filt) == 3
                          and len(filt[0] + filt[1]) == 0):
                        try:
                            data *= filt[2]
                        except TypeError:
                            data = data * filt[2]
                    # filter zpk
                    elif isinstance(filt, tuple):
                        data = data.filter(*filt)
                    # filter fail
                    else:
                        raise ValueError("Cannot parse filter for %s: %r" %
                                         (channel.ndsname, filt))
                if isinstance(data, StateVector) or ':GRD-' in str(channel):
                    try:
                        data.unit = units.dimensionless_unscaled
                    except AttributeError:
                        data._unit = units.dimensionless_unscaled
                    if hasattr(channel, 'bits'):
                        data.bits = channel.bits
                elif data.unit is None:
                    data._unit = channel.unit
                # XXX: HACK for failing unit check
                if len(globalv.DATA[key]):
                    data._unit = globalv.DATA[key][-1].unit
                # update channel type for trends
                if data.channel.type is None and (data.channel.trend
                                                  is not None):
                    if data.dt.to('s').value == 1:
                        data.channel.type = 's-trend'
                    elif data.dt.to('s').value == 60:
                        data.channel.type = 'm-trend'
                # append and coalesce
                add_timeseries(data, key=key, coalesce=True)
            if multiprocess:
                vprint('.')
        if len(new):
            vprint("\n")

    if not return_:
        return

    # return correct data
    out = OrderedDict()
    for channel in channels:
        data = ListClass()
        if keys[channel.ndsname] not in globalv.DATA:
            out[channel.ndsname] = ListClass()
        else:
            for ts in globalv.DATA[keys[channel.ndsname]]:
                for seg in segments:
                    if abs(seg) == 0 or abs(seg) < ts.dt.value:
                        continue
                    if ts.span.intersects(seg):
                        common = map(float, ts.span & seg)
                        cropped = ts.crop(*common, copy=False)
                        if cropped.size:
                            data.append(cropped)
        out[channel.ndsname] = data.coalesce()
    return out
    f.close()
    print "Saved GPS times in logfile: %s" % filename + ".log"
else:
    ### load logfile
    f = open(sys.argv[1])
    lines = f.readlines()
    logfile = []
    for line in lines:
        tok = line.split()
        logfile.append([int(tok[0]), int(tok[1]), tok[2]])
    f.close()

######## Main loop for data processing ########################################################################################################

# open NDS connection
conn = nds2.connection("nds.ligo-wa.caltech.edu", 31200)
# preallocate the sensing matrix and the coherence matrix
matrix = zeros([len(readout), len(excitations)], dtype="complex128")
cohe = zeros([len(readout), len(excitations)])

# loop over all excitation channels
for exc, mon, log, i, frequency in zip(excitations, monitors, logfile, range(len(excitations)), freqs):
    print "Analyzing excitation %s, monitored with %s, data from %d to %d" % (exc, mon, log[0], log[1])
    ### read data (trying more times if the data is not yet on disk)
    waitfordata = True
    while waitfordata:
        try:
            channels = readout[:]
            channels.append(mon)
            bufs = conn.fetch(log[0], log[1], channels)
            waitfordata = False
Exemplo n.º 22
0
def nonna_get_data(target_channel, aux_channels, gps_start, duration, band_freqs, outfs, 
				   fs = 4096, nds_server = 'nds.ligo-wa.caltech.edu', nds_port = 31200):
	"""
	This function reads data and prepares it for the analysis. It computes the band-limited
	RMS and downsamples the auxiliary channels.
	
	Input arguments:
	target_channel = compute the BLRMS of this channel
	aux_channels   = all the slow channels that will be used to predict the BLRMS time
	                 variation
	gps_start      = start reading data at this time
	duration       = number of seconds of data to read
	band_freqs     = [fmin, fmax] edge frequencies of the band used to compute the BLRMS
	outfs          = output sampling frequency for BLRMS and the other channels
	fs             = first downsample the target channel to this sampling rate, to avoid
					 numerical instabilities of the band-pass filter, which would return 
					 only a bunch of NaNs
	nds_server     = address of the NDS2 server
	nds_port       = port used to contact the server
	
	Output data:
	t     = vector of time values for each sample
	blrms = vector containing the BLRMS (at the rate outfs)
	aux   = matrix of all auxiliary channels, downsampled to the output rate
	
	Note that the first two and last two seconds of data are discarded, to cope with 
	the band-pass and low-pass filter transients.
	"""
	
	##### READ DATA 
	
	# open connection
	conn = nds2.connection(nds_server, nds_port)
	# read all data
	buffers = conn.fetch(gps_start, gps_start+duration, [target_channel]+aux_channels)

	##### COMPUTE BLRMS 

	# decimate target signal
	tg = scipy.signal.decimate(buffers[0].data, int(buffers[0].length/duration/fs))
	# band pass
	b,a = scipy.signal.butter(6, scipy.array(band_freqs)/(fs/2.), btype='bandpass')
	tg = scipy.signal.filtfilt(b, a, tg)
	# square and low pass
	tg = tg**2
	b,a = scipy.signal.butter(4, outfs/(fs/2.), btype='lowpass')
	blrms = scipy.signal.filtfilt(b, a, tg)
	# decimate
	blrms = blrms[::fs/outfs]

	##### DECIMATE THE OTHER CHANNELS 

	aux = scipy.zeros((duration*outfs, len(buffers)-1))

	# loop over all channels
	for i in range(1,len(buffers)):
		# low pass and decimate
		fs_aux = int(buffers[i].length/duration)
		aux[:,i-1] = decimate(buffers[i].data, fs_aux/out_fs)
		#b,a = scipy.signal.butter(4, outfs/(fs_aux/2.), btype='lowpass')
		#x = scipy.signal.filtfilt(b, a, buffers[i].data)
		#aux[:,i-1] = x[::fs_aux/outfs]

	# get rid of initial and final transients
	aux = aux[2*outfs:-2*outfs]
	blrms = blrms[2*outfs:-2*outfs]
	t = scipy.arange(0,len(blrms))/float(outfs)
	
	# RETURN RESULTS
	return t, blrms, aux
Exemplo n.º 23
0
    def fetch(cls, channel, start, end, host=None, port=None, verbose=False,
              connection=None, ndschanneltype=None):
        """Fetch data from NDS into a TimeSeries.

        Parameters
        ----------
        channel : :class:`~gwpy.detector.channel.Channel`, or `str`
            required data channel
        start : `~gwpy.time.Time`, or float
            GPS start time of data span
        end : `~gwpy.time.Time`, or float
            GPS end time of data span
        host : `str`, optional
            URL of NDS server to use, defaults to observatory site host
        port : `int`, optional
            port number for NDS server query, must be given with `host`
        verbose : `bool`, optional
            print verbose output about NDS progress
        connection : :class:`~gwpy.io.nds.NDS2Connection`
            open NDS connection to use
        ndschanneltype : `int`
            NDS2 channel type integer

        Returns
        -------
        TimeSeries
            a new `TimeSeries` containing the data read from NDS
        """
        # import module and type-cast arguments
        from ..io import nds as ndsio
        import nds2
        start = int(floor(isinstance(start, Time) and start.gps or start))
        end = int(ceil(isinstance(end, Time) and end.gps or end))
        # set context
        if verbose:
            outputcontext = ndsio.NDSOutputContext()
        else:
            outputcontext = ndsio.NDSOutputContext(open(os.devnull, 'w'),
                                                   open(os.devnull, 'w'))
        # get type
        if not ndschanneltype and isinstance(channel, Channel) and channel.type:
            ndschanneltype = channel.type
        if not ndschanneltype:
            ndschanneltype = (nds2.channel.CHANNEL_TYPE_RAW |
                              nds2.channel.CHANNEL_TYPE_RDS |
                              nds2.channel.CHANNEL_TYPE_STREND |
                              nds2.channel.CHANNEL_TYPE_MTREND)

        channel = str(channel)
        # user-defined host or open connection
        if connection or host:
            hostlist = [(host, port)]
        # logical host resolution order
        else:
            hostlist = ndsio.host_resolution_order(Channel(channel).ifo)

        # loop hosts, stopping on first success
        for host,port in hostlist:
            if connection:
                _conn = connection
            # open connection if needed - check kerberos ticket
            if connection is None:
                if verbose:
                    print("Connecting to %s:%s" % (host, port))
                try:
                    with outputcontext:
                        _conn = nds2.connection(host, port)
                except RuntimeError as e:
                    if str(e).startswith('Request SASL authentication'):
                        print('\nError authenticating against %s' % host,
                              file=sys.stderr)
                        ndsio.kinit()
                        with outputcontext:
                            _conn = nds2.connection(host, port)
                    else:
                        raise
            # double check channels against server
            with outputcontext:
                channelok = _conn.find_channels(str(channel), ndschanneltype)
                if not channelok:
                    channels = _conn.find_channels(
                                   '*%s*' % str(channel), ndschanneltype)
                    # if no channels and user didn't supply their own server
                    # warn and move one
                    if len(channels) == 0 and not connection:
                        if verbose:
                            warnings.warn("No matching channels found",
                                          ndsio.NDSWarning)
                        continue
                    elif len(channels) == 0:
                        pass
                    # if one channel, find
                    elif len(channels) == 1:
                        channel = channels[0].name
                    # if more than one channel and user did supply their own
                    # server, barf
                    elif connection:
                        raise ValueError("No channel '%s' found on server."
                                         " However, %d others were found, "
                                         "please restrict your search and "
                                         "try again:\n    %s"
                                         % (str(channel), len(channels),
                                            "\n    ".join(map(str, channels))))
                if channel.endswith('m-trend') and (start % 60 or end % 60):
                    warnings.warn("Requested channel is minute trend, but "
                                  "start and stop GPS times are not modulo "
                                  "60-seconds (from GPS epoch). Times will be "
                                  "expanded outwards to compensate")
                    if start % 60:
                        start = start // 60 * 60
                    if end % 60:
                        end = end // 60 * 60 + 60
                # fetch data
                try:
                    if verbose:
                        print("Downloading data...")
                    buffer_ = _conn.fetch(start, end, [str(channel)])[0]
                except RuntimeError as e:
                    # if error and user supplied their own server, raise
                    if connection:
                        raise
                    # otherwise warn and move on
                    elif verbose:
                        warnings.warn(str(e), ndsio.NDSWarning)
                else:
                    # cast as TimeSeries and return
                    epoch = Time(buffer_.gps_seconds, buffer_.gps_nanoseconds,
                                 format='gps')
                    channel = Channel.from_nds2(buffer_.channel)
                    return cls(buffer_.data, epoch=epoch, channel=channel)
        raise RuntimeError("Cannot find relevant data on any known server")
Exemplo n.º 24
0
import nds2
conn = nds2.connection('nds.ligo-wa.caltech.edu', 31200)
print conn

buf = conn.fetch(1024417918, 1024417919,
                 'H1:PSL-ISS_PDA_OUT_DQ')  #,'H1:PSL_ISS_PDB_OUT_DQ'])

print buf
#'H1:PSL_ISS_PDB_OUT_DQ'
Exemplo n.º 25
0
def frame_struct(params):
    """@create seismon frame structure

    @param params
        seismon params structure
    """

    gpsStart = np.min(params["gpsStart"])-1000
    gpsEnd = np.max(params["gpsEnd"])

    if params["noFrames"]:
        datacache = []
    elif params["ifo"] == "XG":
        frameDir = "/archive/frames/MBH/"
        frameDir = "/home/prestegard/Homestake_frames/"
        frameList = [os.path.join(root, name)
            for root, dirs, files in os.walk(frameDir)
            for name in files]

        datacache = []
        for frame in frameList:
            thisFrame = frame.replace("file://localhost","")
            if thisFrame == "":
                continue

            thisFrameSplit = thisFrame.split(".")
            if thisFrameSplit[-1] == "log":
                continue

            thisFrameSplit = thisFrame.split("-")
            gps = float(thisFrameSplit[-2])
            dur = float(thisFrameSplit[-1].replace(".gwf",""))

            if gps+dur < gpsStart:
                continue
            if gps > gpsEnd:
                continue

            #cacheFile = glue.lal.CacheEntry("%s %s %d %d %s"%("XG","Homestake",gps,dur,frame))
            datacache.append(frame)
        datacache = glue.lal.Cache(map(glue.lal.CacheEntry.from_T050017, datacache))

    elif params["ifo"] == "LUNAR":
        frameDir = "/home/mcoughlin/Lunar/data/"
        frameList = [os.path.join(root, name)
            for root, dirs, files in os.walk(frameDir)
            for name in files]

        datacache = []
        for frame in frameList:
            datacache.append(frame)
    elif params["ifo"] == "CZKHC": 
        frameDir = "/home/mcoughlin/Stochastic/Lunar/Seismometer/data/"
        frameList = [os.path.join(root, name)
            for root, dirs, files in os.walk(frameDir)
            for name in files]

        datacache = []
        for frame in frameList:
            datacache.append(frame)

    elif params["ifo"] == "SR":
        frameDir = "/home/mcoughlin/Stochastic/Lunar/seismic_data_corrected/data/"
        frameDir = "/home/mcoughlin/Stochastic/Lunar/corrected/data/"
        frameList = [os.path.join(root, name)
            for root, dirs, files in os.walk(frameDir)
            for name in files]

        datacache = []
        for frame in frameList:
            datacache.append(frame)

    elif params["ifo"] == "Gravimeter":
        frameDir = "/home/mcoughlin/Gravimeter/minute/"
        frameList = [os.path.join(root, name)
            for root, dirs, files in os.walk(frameDir)
            for name in files]

        datacache = []
        for frame in frameList:
            datacache.append(frame)
    elif params["ifo"] == "Tiltmeter":
        frameDir = "/home/mcoughlin/Tiltmeter/frames/"
        frameList = [os.path.join(root, name)
            for root, dirs, files in os.walk(frameDir)
            for name in files]

        datacache = []
        for frame in frameList:
            thisFrame = frame.replace("file://localhost","")
            if thisFrame == "":
                continue

            thisFrameSplit = thisFrame.split(".")
            if thisFrameSplit[-1] == "log":
                continue

            thisFrameSplit = thisFrame.split("-")
            gps = float(thisFrameSplit[-2])
            dur = float(thisFrameSplit[-1].replace(".gwf",""))

            if gps+dur < gpsStart:
                continue
            if gps > gpsEnd:
                continue

            #cacheFile = glue.lal.CacheEntry("%s %s %d %d %s"%("XG","Homestake",gps,dur,frame))
            datacache.append(frame)
        datacache = glue.lal.Cache(map(glue.lal.CacheEntry.from_T050017, datacache))

    elif params["ifo"] == "IRIS":
        datacache = "IRIS"
    else:
        if params["frameType"] == "nds2":
            import nds2
            conn = nds2.connection(params["ndsServer"])
            y = conn.find_channels('*',nds2.channel.CHANNEL_TYPE_RAW,\
                nds2.channel.DATA_TYPE_FLOAT32, 128, 16384)

            params["ndsConnection"] = conn
            pass

        else:
            connection = glue.datafind.GWDataFindHTTPConnection()
            datacache = connection.find_frame_urls(params["ifo"][0], params["frameType"],
                                                   gpsStart, gpsEnd,
                                                   urltype="file",
                                                   on_gaps="warn")
            connection.close()

    params["frame"] = datacache

    return params
Exemplo n.º 26
0
def stream_data(ini_file):

    # Read config file
    settings = ConfigParser()
    settings.optionxform = str
    settings.read(ini_file)

    # Unpack configs
    dur = settings.getint('Data', 'duration')
    fname = settings.get('Data', 'chanlist')
    fsup = settings.getint('Data', 'fs')
    ifo = settings.get('Data', 'ifo')
    output = settings.get('Data', 'output')
    portNumber = settings.getint('Data', 'portNumber')
    save = settings.getboolean('Data', 'save_mat')
    times = settings.get('Data', 'data_start')

    nds_osx = ('/opt/local/Library/Frameworks/Python.framework/' +
               'Versions/2.7/lib/python2.7/site-packages/')
    nds_sandbox = '/usr/lib/python2.7/dist-packages/'

    import sys
    if os.path.exists(nds_osx):
        sys.path.append(nds_osx)
    elif os.path.exists(nds_sandbox):
        sys.path.append(nds_sandbox)

    # Collect channels and times
    chan_head = ifo + ':'
    chanlines, custom_times = read_chans_and_times(fname)
    channels = [chan_head + line for line in chanlines]

    # Get data start time
    if ifo == 'L1':
        ndsServer = 'nds.ligo-la.caltech.edu'
    elif ifo == 'H1':
        ndsServer = 'nds.ligo-wa.caltech.edu'
    else:
        sys.exit("unknown IFO specified")

    # Setup connection to the NDS
    try:
        conn = nds2.connection(ndsServer, portNumber)
    except RuntimeError:
        print('ERROR: Need to run `kinit albert.einstein` before nds2 '
              'can establish a connection')
        sys.exit(1)

    if __debug__:
        print("Output sample rate: {} Hz".format(fsup))
        # print("Channel List:\n-------------")
        # print("\n".join(channels))

    # Setup start and stop times
    t = Time(times, format='iso', scale='utc')
    t_start = int(t.gps)

    print("Getting data from " + ndsServer + "...")
    data = []
    for i in range(len(custom_times)):
        if custom_times[i] == None:
            custom_times[i] = t_start

        try:
            temp = conn.fetch(custom_times[i], custom_times[i] + dur,
                              [channels[i]])
            sys.stdout.write("\033[0;32m")
            sys.stdout.write(u'\r  [{}] '.format(u'\u2713'))
            sys.stdout.write("\033[0;0m")
            sys.stdout.write('{} '.format(channels[i]))
            sys.stdout.write('\n')
            sys.stdout.flush()
        except:
            sys.stdout.write("\033[1;31m")
            sys.stdout.write(u'\r  [{}] '.format(u'\u2717'))
            sys.stdout.write("\033[0;0m")
            sys.stdout.write('{} '.format(channels[i]))
            sys.stdout.write('\n')
            sys.stdout.flush()

        data.append(temp)

    # Get the data and stack it (the data are the columns)
    vdata = []
    for k in range(len(channels)):
        fsdown = data[k][0].channel.sample_rate
        down_factor = int(fsdown // fsup)

        fir_aa = sig.firwin(20 * down_factor + 1,
                            0.8 / down_factor,
                            window='blackmanharris')

        # Prevent ringing from DC offset
        DC = np.mean(data[k][0].data)

        # Using fir_aa[1:-1] cuts off a leading and trailing zero
        downdata = sig.decimate(data[k][0].data,
                                down_factor,
                                ftype=sig.dlti(fir_aa[1:-1], 1.0),
                                zero_phase=True)
        vdata.append(downdata)

    if save:
        if not os.path.isdir('Data'):
            os.system('mkdir Data')

        # save to a hdf5 format
        if output == "None":
            funame = 'Data/' + ifo + '_data_array.mat'
        else:
            funame = 'Data/' + output

        sio.savemat(funame,
                    mdict={
                        'data': vdata,
                        'fsample': fsup,
                        'chans': channels
                    },
                    do_compression=True)

        print("Data saved as " + funame)
    else:
        return np.array(vdata).T, fsup
Exemplo n.º 27
0
def stream_data(ini_file):
    """
    Stream the requested data from nds2.

    Parameters
    ----------
    ini_file : `str`
        path to configuration file which contains the pipeline parameters

    Returns
    -------
    vdata : `ndarray`
        numpy array containing the requested channel data. if `save_mat` is True,
        the data is saved to a mat file and not returned
    fsup : `int`
        sample rate of the collected data
    """

    # Read config file
    try:
        settings = ConfigParser.ConfigParser()
    except:
        settings = configparser.ConfigParser()
    settings.optionxform = str
    settings.read(ini_file)

    # Unpack configs
    dur = settings.getint('Data', 'duration')
    fname = settings.get('Data', 'chanlist')
    fs = settings.getint('Data', 'fs')
    ifo = settings.get('Data', 'ifo')
    output = settings.get('Data', 'output')
    portNumber = settings.getint('Data', 'portNumber')
    save = settings.getboolean('Data', 'save_mat')
    data_dir = settings.get('Data', 'data_dir')
    times = settings.get('Data', 'data_start')

    nds_osx = ('/opt/local/Library/Frameworks/Python.framework/' +
               'Versions/2.7/lib/python2.7/site-packages/')
    nds_sandbox = '/usr/lib/python2.7/dist-packages/'

    import sys
    if os.path.exists(nds_osx):
        sys.path.append(nds_osx)
    elif os.path.exists(nds_sandbox):
        sys.path.append(nds_sandbox)

    # Collect channels and times
    chan_head = ifo + ':'
    chanlines, custom_times = read_chans_and_times(fname)
    channels = [chan_head + line for line in chanlines]

    # Get data start time
    if ifo == 'L1':
        ndsServer = 'nds.ligo-la.caltech.edu'
    elif ifo == 'H1':
        ndsServer = 'nds.ligo-wa.caltech.edu'
    else:
        sys.exit("unknown IFO specified")

    # Setup connection to the NDS
    try:
        conn = nds2.connection(ndsServer, portNumber)
    except RuntimeError:
        print('ERROR: Need to run `kinit albert.einstein` before nds2 '
              'can establish a connection')
        sys.exit(1)

    #if __debug__:
    #    print(("Output sample rate: {} Hz".format(fsup)))
    #    # print("Channel List:\n-------------")
    #    # print("\n".join(channels))

    # Setup start and stop times
    t = Time(times, format='iso', scale='utc')
    t_start = int(t.gps)

    print(("Getting data from " + ndsServer + "..."))
    data = []
    for i in range(len(custom_times)):
        if custom_times[i] == None:
            custom_times[i] = t_start

        try:
            temp = conn.fetch(custom_times[i], custom_times[i] + dur,
                              [channels[i]])
            sys.stdout.write("\033[0;32m")
            sys.stdout.write('\r  [{}] '.format('\u2713'))
            sys.stdout.write("\033[0;0m")
            sys.stdout.write('{} '.format(channels[i]))
            sys.stdout.write('\n')
            sys.stdout.flush()
        except:
            sys.stdout.write("\033[1;31m")
            sys.stdout.write('\r  [{}] '.format('\u2717'))
            sys.stdout.write("\033[0;0m")
            sys.stdout.write('{} '.format(channels[i]))
            sys.stdout.write('\n')
            sys.stdout.flush()

        data.append(temp)

    # Get the data and stack it (the data are the columns)
    vdata = []
    for k in range(len(channels)):
        fs_data = data[k][0].channel.sample_rate
        resample_data = sig.resample(
            data[k][0].data, int(1. * len(data[k][0].data) * fs / fs_data))
        vdata.append(resample_data)

    if save:
        if data_dir == "None":
            data_dir = 'Data'

        if not os.path.isdir(data_dir):
            os.system('mkdir %s' % data_dir)

        # save to a hdf5 format
        if output == "None":
            funame = os.path.join(data_dir,
                                  '%s_%d_%d.mat' % (ifo, t_start, dur))
        else:
            funame = os.path.join(data_dir, output)

        sio.savemat(funame,
                    mdict={
                        'data': vdata,
                        'fsample': fs,
                        'chans': channels
                    },
                    do_compression=True)

        print(("Data saved as " + funame))
    else:
        return np.array(vdata).T, fs
Exemplo n.º 28
0
log_dir = "/opt/rtcds/llo/l1/log/l1susdrift/"
log_filename = "%sSUSDRIFT_%s_%s-%s.log" % (log_dir,optic_name,int(opts.starttime),int(opts.duration))
logging.basicConfig(filename=log_filename, level=logging.DEBUG)

def nds_fetch_old_data(connection, channel, start, end):
    """Fetch data from NDS using the standard fetch method
    """
    return connection.fetch(int(math.floor(start)), int(math.ceil(end)),
                            [channel])[0]


logging.info("Start time: %s" % opts.starttime)
logging.info("Duration: %s" % opts.duration)

# open connection to NDS2
connection = nds2.connection(opts.host, opts.port)
logging.info("Connected to NDS host %s:%d" % (opts.host, opts.port))

for optic in optics:
    stage = STAGE[optic]
    DEGREES_OF_FREEDOM = DOF[optic]
    logging.info("Updating %s..." % optic)
    for dof in DEGREES_OF_FREEDOM:
	
        bound_optic = BOUND_OPTIC[optic]
        bound = bound_optic[dof]

        logging.info("    %s: " % dof)
        channel = CHANNEL % (ifo, optic, stage, dof)
        buffer_ = nds_fetch_old_data(connection, channel, gpsstart, gpsstart+opts.duration)
        data = buffer_.data
    f.close()
    print "Saved GPS times in logfile: %s" % filename+".log"
else:
    ### load logfile
    f = open(sys.argv[1])
    lines = f.readlines()
    logfile = []
    for line in lines:
        tok = line.split()
        logfile.append([int(tok[0]), int(tok[1]), tok[2]])
    f.close()

######## Main loop for data processing ########################################################################################################

# open NDS connection
conn = nds2.connection('nds.ligo-wa.caltech.edu', 31200)
# preallocate the sensing matrix and the coherence matrix
matrix = zeros([len(readout), len(excitations)], dtype='complex128')
cohe   = zeros([len(readout), len(excitations)])

# loop over all excitation channels
for exc,mon,log,i,frequency in zip(excitations, monitors, logfile, range(len(excitations)), freqs): 
    print "Analyzing excitation %s, monitored with %s, data from %d to %d" % (exc, mon, log[0], log[1])
    ### read data (trying more times if the data is not yet on disk)
    waitfordata = True
    while waitfordata:
        try:
            channels = readout[:]
            channels.append(mon)
            bufs = conn.fetch(log[0], log[1], channels)   
            waitfordata = False
Exemplo n.º 30
0
def _get_timeseries_dict(channels, segments, config=None,
                         cache=None, query=True, nds=None, frametype=None,
                         multiprocess=True, return_=True, statevector=False,
                         archive=True, datafind_error='raise', **ioargs):
    """Internal method to retrieve the data for a set of like-typed
    channels using the :meth:`TimeSeriesDict.read` accessor.
    """
    channels = map(get_channel, channels)

    # set classes
    if statevector:
        ListClass = StateVectorList
        DictClass = StateVectorDict
    else:
        ListClass = TimeSeriesList
        DictClass = TimeSeriesDict

    # check we have a configparser
    if config is None:
        config = GWSummConfigParser()

    # read segments from global memory
    keys = dict((c.ndsname, make_globalv_key(c)) for c in channels)
    havesegs = reduce(operator.and_,
                      (globalv.DATA.get(keys[channel.ndsname],
                                        ListClass()).segments
                       for channel in channels))
    new = segments - havesegs

    # get processes
    if multiprocess is True:
        nproc = count_free_cores()
    elif multiprocess is False:
        nproc = 1
    else:
        nproc = multiprocess

    if globalv.VERBOSE and not multiprocess:
        verbose = '    '
    else:
        verbose = False

    # read channel information
    filter_ = dict()
    resample = dict()
    dtype_ = dict()
    for channel in channels:
        try:
            filter_[channel.ndsname] = channel.filter
        except AttributeError:
            pass
        try:
            resample[channel] = float(channel.resample)
        except AttributeError:
            pass
        if channel.dtype is None:
            dtype_[channel] = ioargs.get('dtype')
        else:
            dtype_[channel] = channel.dtype

    # work out whether to use NDS or not
    if nds is None and cache is not None:
        nds = False
    elif nds is None:
        nds = 'LIGO_DATAFIND_SERVER' not in os.environ

    # read new data
    query &= (abs(new) > 0)
    if cache is not None:
        query &= len(cache) > 0
    if query:
        for channel in channels:
            globalv.DATA.setdefault(keys[channel.ndsname], ListClass())
        # open NDS connection
        if nds and config.has_option('nds', 'host'):
            host = config.get('nds', 'host')
            port = config.getint('nds', 'port')
            try:
                ndsconnection = nds2.connection(host, port)
            except RuntimeError as e:
                if 'SASL authentication' in str(e):
                    from gwpy.io.nds import kinit
                    kinit()
                    ndsconnection = nds2.connection(host, port)
                else:
                    raise
            frametype = source = 'nds'
            ndstype = channels[0].type
        elif nds:
            ndsconnection = None
            frametype = source = 'nds'
            ndstype = channels[0].type
        # or find frame type and check cache
        else:
            ifo = channels[0].ifo
            frametype = frametype or channels[0].frametype
            if frametype is not None and frametype.endswith('%s_M' % ifo):
                new = type(new)([s for s in new if abs(s) >= 60.])
            elif frametype is not None and frametype.endswith('%s_T' % ifo):
                new = type(new)([s for s in new if abs(s) >= 1.])
            #elif ((globalv.NOW - new[0][0]) < 86400 * 10 and
            #      frametype == '%s_R' % ifo and
            #      find_types(site=ifo[0], match='_C\Z')):
            #    frametype = '%s_C' % ifo
            if cache is not None:
                fcache = cache.sieve(ifos=ifo[0], description=frametype,
                                     exact_match=True)
            else:
                fcache = Cache()
            if (cache is None or len(fcache) == 0) and len(new):
                span = new.extent().protract(8)
                fcache = find_frames(ifo, frametype, span[0], span[1],
                                     config=config, gaps='ignore',
                                     onerror=datafind_error)
                if len(fcache) == 0 and frametype == '%s_R' % ifo:
                    frametype = '%s_C' % ifo
                    vprint("    Moving to backup frametype %s\n" % frametype)
                    fcache = find_frames(ifo, frametype, span[0], span[1],
                                         config=config, gaps='ignore',
                                         onerror=datafind_error)

            # parse discontiguous cache blocks and rebuild segment list
            cachesegments = find_cache_segments(fcache)
            new &= cachesegments
            source = 'frames'
        for channel in channels:
            channel.frametype = frametype

        # check whether each channel exists for all new times already
        qchannels = []
        qresample = {}
        qdtype = {}
        for channel in channels:
            oldsegs = globalv.DATA.get(channel.ndsname,
                                       ListClass()).segments
            if abs(new - oldsegs) != 0:
                qchannels.append(channel)
                if channel in resample:
                    qresample[channel] = resample[channel]
                qdtype[channel] = dtype_.get(channel, ioargs.get('dtype'))
        ioargs['dtype'] = qdtype

        # find channel type
        if not nds:
            ctype = set()
            for channel in qchannels:
                try:
                    ctype.add(channel.ctype)
                except AttributeError:
                    ctype.add(get_channel_type(channel))
            if len(ctype) == 1:
                ctype = list(ctype)[0]
            else:
                ctype = None
        # loop through segments, recording data for each
        if len(new) and nproc > 1:
            vprint("    Fetching data (from %s) for %d channels [%s]"
                   % (source, len(qchannels), nds and ndstype or frametype))
        for segment in new:
            # force reading integer-precision segments
            segment = type(segment)(int(segment[0]), int(segment[1]))
            if abs(segment) < 1:
                continue
            if nds:
                tsd = DictClass.fetch(qchannels, segment[0], segment[1],
                                      connection=ndsconnection, type=ndstype,
                                      **ioargs)
            else:
                # pad resampling
                if segment[1] == cachesegments[-1][1] and qresample:
                    resamplepad = 8
                    if abs(segment) <= resamplepad:
                        continue
                    segment = type(segment)(segment[0],
                                            segment[1] - resamplepad)
                    segcache = fcache.sieve(
                                   segment=segment.protract(resamplepad))
                else:
                    segcache = fcache.sieve(segment=segment)
                # set minute trend times modulo 60 from GPS 0
                if (re.match('(?:(.*)_)?[A-Z]\d_M', str(frametype)) or
                        (ifo == 'C1' and frametype == 'M')):
                    segstart = int(segment[0]) // 60 * 60
                    segend = int(segment[1]) // 60 * 60
                    if segend >= segment[1]:
                        segend -= 60
                    # and ignore segments shorter than 1 full average
                    if (segend - segstart) < 60:
                        continue
                else:
                    segstart, segend = map(float, segment)
                # pull filters out because they can break multiprocessing
                if nproc > 1:
                    for c in qchannels:
                        if c.ndsname in filter_:
                            del c.filter
                # read data
                tsd = DictClass.read(segcache, qchannels,
                                     start=segstart, end=segend, type=ctype,
                                     nproc=nproc, resample=qresample,
                                     verbose=verbose, **ioargs)
                # put filters back
                for c in qchannels:
                    if c.ndsname in filter_:
                        c.filter = filter_[c.ndsname]
            for (channel, data) in tsd.iteritems():
                key = keys[channel.ndsname]
                if (key in globalv.DATA and
                        data.span in globalv.DATA[key].segments):
                    continue
                if data.unit is None:
                    data.unit = 'undef'
                for seg in globalv.DATA[key].segments:
                    if seg.intersects(data.span):
                        data = data.crop(*(data.span - seg))
                        break
                try:
                    filt = filter_[channel.ndsname]
                except KeyError:
                    pass
                else:
                    # filter with function
                    if callable(filt):
                        try:
                            data = filt(data)
                        except TypeError as e:
                            if 'Can only apply' in str(e):
                                data.value[:] = filt(data.value)
                            else:
                                raise
                    # filter with gain
                    elif (isinstance(filt, tuple) and len(filt) == 3 and
                              len(filt[0] + filt[1]) == 0):
                        try:
                            data *= filt[2]
                        except TypeError:
                            data = data * filt[2]
                    # filter zpk
                    elif isinstance(filt, tuple):
                        data = data.filter(*filt)
                    # filter fail
                    else:
                        raise ValueError("Cannot parse filter for %s: %r"
                                         % (channel.ndsname,
                                            filt))
                if isinstance(data, StateVector) or ':GRD-' in str(channel):
                    try:
                        data.unit = units.dimensionless_unscaled
                    except AttributeError:
                        data._unit = units.dimensionless_unscaled
                    if hasattr(channel, 'bits'):
                        data.bits = channel.bits
                elif data.unit is None:
                    data._unit = channel.unit
                # XXX: HACK for failing unit check
                if len(globalv.DATA[key]):
                    data._unit = globalv.DATA[key][-1].unit
                # update channel type for trends
                if (data.channel.type is None and
                       data.channel.trend is not None):
                    if data.dt.to('s').value == 1:
                        data.channel.type = 's-trend'
                    elif data.dt.to('s').value == 60:
                        data.channel.type = 'm-trend'
                # append and coalesce
                add_timeseries(data, key=key, coalesce=True)
            if multiprocess:
                vprint('.')
        if len(new):
            vprint("\n")

    if not return_:
        return

    # return correct data
    out = OrderedDict()
    for channel in channels:
        data = ListClass()
        if keys[channel.ndsname] not in globalv.DATA:
            out[channel.ndsname] = ListClass()
        else:
            for ts in globalv.DATA[keys[channel.ndsname]]:
                for seg in segments:
                    if abs(seg) == 0 or abs(seg) < ts.dt.value:
                        continue
                    if ts.span.intersects(seg):
                        common = map(float, ts.span & seg)
                        cropped = ts.crop(*common, copy=False)
                        if cropped.size:
                            data.append(cropped)
        out[channel.ndsname] = data.coalesce()
    return out