Beispiel #1
0
def co_downsample(q, n=None, ftype='fir'):
    '''Successively downsample broken continuous trace data (Pump).

    Create coroutine which takes :py:class:`Trace` objects, downsamples their
    data and yields new :py:class:`Trace` objects containing the downsampled
    data.  This is useful, if one wants to downsample a long continuous time
    series, which is split into many successive traces without producing filter
    artifacts and gaps at trace boundaries.
    
    Filter states are kept *per channel*, specifically, for each (network,
    station, location, channel) combination occuring in the input traces, a
    separate state is created and maintained. This makes it possible to filter
    multichannel or multistation data with only one :py:func:`co_lfilter`
    instance.
    
    Filter state is reset, when gaps occur. The sampling instances are choosen
    so that they occur at (or as close as possible) to even multiples of the
    sampling interval of the downsampled trace (based on system time).'''

    b, a, n = util.decimate_coeffs(q, n, ftype)
    anti = co_lfilter(b, a)
    newtr = None
    states = States()
    try:
        while True:
            tr = yield newtr
            tr = anti.send(tr)
            newdeltat = q * tr.deltat
            ioffset = states.get(tr)
            if ioffset is None:
                # for fir filter, the first n samples are pulluted by boundary effects; cut it off.
                # for iir this may be (much) more, we do not correct for that.
                # put sample instances to a time which is a multiple of the new sampling interval.
                newtmin_want = math.ceil(
                    (tr.tmin +
                     (n + 1) * tr.deltat) / newdeltat) * newdeltat - (
                         n / 2 * tr.deltat)
                ioffset = int(round((newtmin_want - tr.tmin) / tr.deltat))
                if ioffset < 0:
                    ioffset = ioffset % q

            newtmin_have = tr.tmin + ioffset * tr.deltat
            newtr = tr.copy(data=False)
            newtr.deltat = newdeltat
            newtr.tmin = newtmin_have - (
                n / 2 * tr.deltat
            )  # because the fir kernel shifts data by n/2 samples
            newtr.set_ydata(tr.get_ydata()[ioffset::q].copy())
            states.set(tr, (ioffset % q - tr.data_len() % q) % q)

    except GeneratorExit:
        anti.close()
Beispiel #2
0
def co_downsample(q, n=None, ftype='fir'):
    '''Successively downsample broken continuous trace data (Pump).

    Create coroutine which takes :py:class:`Trace` objects, downsamples their
    data and yields new :py:class:`Trace` objects containing the downsampled
    data.  This is useful, if one wants to downsample a long continuous time
    series, which is split into many successive traces without producing filter
    artifacts and gaps at trace boundaries.
    
    Filter states are kept *per channel*, specifically, for each (network,
    station, location, channel) combination occuring in the input traces, a
    separate state is created and maintained. This makes it possible to filter
    multichannel or multistation data with only one :py:func:`co_lfilter`
    instance.
    
    Filter state is reset, when gaps occur. The sampling instances are choosen
    so that they occur at (or as close as possible) to even multiples of the
    sampling interval of the downsampled trace (based on system time).'''
    
    b,a,n = util.decimate_coeffs(q,n,ftype)
    anti = co_lfilter(b,a)
    newtr = None
    states = States()
    try:
        while True:
            tr = yield newtr
            tr = anti.send(tr)
            newdeltat = q * tr.deltat
            ioffset = states.get(tr)
            if ioffset is None:
                # for fir filter, the first n samples are pulluted by boundary effects; cut it off.
                # for iir this may be (much) more, we do not correct for that.
                # put sample instances to a time which is a multiple of the new sampling interval.
                newtmin_want = math.ceil((tr.tmin+(n+1)*tr.deltat)/newdeltat) * newdeltat - (n/2*tr.deltat)
                ioffset = int(round((newtmin_want - tr.tmin)/tr.deltat))
                if ioffset < 0:
                    ioffset = ioffset % q

            newtmin_have = tr.tmin + ioffset * tr.deltat
            newtr = tr.copy(data=False)
            newtr.deltat = newdeltat
            newtr.tmin = newtmin_have - (n/2*tr.deltat) # because the fir kernel shifts data by n/2 samples
            newtr.set_ydata(tr.get_ydata()[ioffset::q].copy())
            states.set(tr, (ioffset % q - tr.data_len() % q ) % q)        

    except GeneratorExit:
        anti.close()
Beispiel #3
0
def co_downsample(target, q, n=None, ftype='fir'):
    '''Successively downsample broken continuous trace data (coroutine).

    Create coroutine which takes :py:class:`Trace` objects, downsamples their
    data and sends new :py:class:`Trace` objects containing the downsampled
    data to target.  This is useful, if one wants to downsample a long
    continuous time series, which is split into many successive traces without
    producing filter artifacts and gaps at trace boundaries.
    
    Filter states are kept *per channel*, specifically, for each (network,
    station, location, channel) combination occuring in the input traces, a
    separate state is created and maintained. This makes it possible to filter
    multichannel or multistation data with only one :py:func:`co_lfilter`
    instance.
    
    Filter state is reset, when gaps occur. The sampling instances are choosen
    so that they occur at (or as close as possible) to even multiples of the
    sampling interval of the downsampled trace (based on system time).'''
    b, a, n = util.decimate_coeffs(q, n, ftype)
    return co_antialias(co_dropsamples(target, q, n), q, n, ftype)
Beispiel #4
0
def co_downsample(target, q, n=None, ftype='fir'):
    '''Successively downsample broken continuous trace data (coroutine).

    Create coroutine which takes :py:class:`Trace` objects, downsamples their
    data and sends new :py:class:`Trace` objects containing the downsampled
    data to target.  This is useful, if one wants to downsample a long
    continuous time series, which is split into many successive traces without
    producing filter artifacts and gaps at trace boundaries.
    
    Filter states are kept *per channel*, specifically, for each (network,
    station, location, channel) combination occuring in the input traces, a
    separate state is created and maintained. This makes it possible to filter
    multichannel or multistation data with only one :py:func:`co_lfilter`
    instance.
    
    Filter state is reset, when gaps occur. The sampling instances are choosen
    so that they occur at (or as close as possible) to even multiples of the
    sampling interval of the downsampled trace (based on system time).'''
    b,a,n = util.decimate_coeffs(q,n,ftype)
    return co_antialias(co_dropsamples(target,q,n), q,n,ftype)
Beispiel #5
0
def co_antialias(target, q, n=None, ftype='fir'):
    b, a, n = util.decimate_coeffs(q, n, ftype)
    anti = co_lfilter(target, b, a)
    return anti
Beispiel #6
0
def co_antialias(target, q, n=None, ftype='fir'):
    b,a,n = util.decimate_coeffs(q,n,ftype)
    anti = co_lfilter(target, b,a)
    return anti