Exemple #1
0
 def readX(self,
           correlation,
           t1=None,
           t2=None,
           period=24 * 3600,
           select=True,
           stack=None,
           **kwargs):
     st = Stream()
     if t2 is None:
         file_ = self.getX(
             correlation, t1, period=period, stack=stack, **kwargs) + '.QHD'
         if t1 is None:
             st += read(file_)
         else:
             for file_ in glob(file_):
                 st += read(file_)
     else:
         if period == 'day' or period >= 3600 or stack >= 3600:
             iterator = yeargen(t1, t2)
         else:
             iterator = timegen(t1, t2, dt=24 * 3600)
         for t in iterator:
             file_ = self.getX(
                 correlation, t, period=period, stack=stack, **
                 kwargs) + '.QHD'
             try:
                 st += read(file_)
             except (ValueError, IOError):
                 log.warning(
                     'An error occured when trying to read file %s' % file_)
         if select:
             st = st.select(expr='%r<st.starttime<%r' %
                            (t1 - 0.1, t2 + 0.1))
     return st
Exemple #2
0
 def setUp(self):
     # set specific seed value such that random numbers are reproducible
     seed(42)
     self.Z = random(412) - 0.5
     self.N = random(412) - 0.5
     self.E = random(412) - 0.5
     self.stream = read(os.path.join(os.path.dirname(__file__), 'data', 'PKD_1997_246.mseed'))
Exemple #3
0
def test():
    import pylab as plt
    from sito.stream import read
    from sito.util import cosTaper, get_window
    plt.plot(cosTaper(1000, 0.1))
    plt.plot(get_window(('gaussian', 100), 1000))

    N = 10000
    data = np.concatenate((get_window(
        ('gaussian', 10), N // 4), -2 * get_window(
            ('gaussian', 10), N // 4), get_window(('gaussian', 100),
                                                  N // 4), np.zeros(N // 4)))
    src = np.concatenate((get_window(('gaussian', 10),
                                     N // 10), np.zeros(N * 9 // 10)))

    #deconvfAnalyse(data, src, 100, water=0.1, gauss=10, tshift=10., pad=0) # try gauss=2, 10, 100
    dummy = deconvf(data, src, 100, water=0.01, gauss=2, tshift=10., pad=0)

    ms = read('./tests/data_temp/STREAM.QHD')[0:3]
    deconvfAnalyse(ms[1].data,
                   ms[0].data,
                   100,
                   water=0.01,
                   gauss=2,
                   tshift=10.,
                   pad=0)  # try gauss=2, 10, 100
    plt.show()
Exemple #4
0
def main():
    ipoc = IPOCStations()
    ms = read('/home/richter/Results/IPOC/xcorr/1bit/stack/all_stack_-1.QHD')
    setHIDist(ms, ipoc)
#    for tr in ms:
#        st = tr.stats.station
#        if 'PB12' in st or 'PAT' in st or 'HMBC' in st or 'LVC' in st:
#            ms.remove(tr)
    print (list(set(ms[0].stats.station.split('-'))))[0]
    ms = ms.select("(st.station.split('-'))[0][:-1] in 'PB03 PB04 PB07 PB09 PB10 PB14 PATCX'.split()")
    ms = ms.select(autocorr=True)
#    ms.plot_(-200, 200, relative='middle', absolutescale=10, annotate=True)
#    ms.plotXcorrVsDist(-300, 300, absolutescale=200)
#    ms.plotXcorrVsDist(-300, relative='starttime', absolutescale=500)
#    plt.show()
#    ms.filter2(0.05, 0.5)

    #ms = ms.select('st.dist<455')
    ms.addXcorrSides()
#    for tr in ms:
#        tr.data = envelope(tr.data)   
#    ms.normalize()    
    ms.downsample2(5)
#    ms.trim2(100 + np.array(ms.getHI('dist')) / 3., None)
#    ms.plotXcorrVsDist(0, 300, relative='starttime')
#    ms.plot_(absolutescale=1)
#    ms = ms[:1]
#    ms = ms[:1]
#    ms.plot_()
#    plt.show()
#    return
    print ms

    lats = np.linspace(-25, -17, 1500)
    lons = np.linspace(-74, -66, 1500)
Exemple #5
0
 def setUp(self):
     self.path = os.path.dirname(__file__)
     file_ = os.path.join(self.path, 'data_temp', 'STREAM.QHD')
     try:
         self.stream = read(file_)
     except Exception as ex:
         raise IOError('First start the test for data: ' + str(ex))
     self.stream_short = self.stream.copy()
     self.stream.trim2(-50, 200)
Exemple #6
0
 def setUp(self):
     self.path = os.path.dirname(__file__)
     file_ = os.path.join(self.path, 'data_temp', 'STREAM.QHD')
     try:
         self.stream = read(file_)
     except Exception as ex:
         raise IOError('First start the test for data: ' + str(ex))
     self.stream_short = self.stream.copy()
     self.stream.trim2(-50, 200)
Exemple #7
0
 def test_trace(self):
     file_ = os.path.join(self.path, 'data_temp', 'STREAM.QHD')
     try:
         mt = read(file_)[0]
     except IOError:
         raise IOError('First start the test for data.')
     dummy = 'repr(mt) = ' + str(repr(mt)) + '\nstr(mt) = ' + str(mt)
     dummy += '\nmt.stats = ' + str(mt.stats) + '\nmt.data = ' + str(mt.data)
     dummy += '\nmt.print_(mod=0) = ' + mt.print_(mod=0)
     dummy += '\nmt.print_(mod=1) = ' + mt.print_(mod=1)
Exemple #8
0
 def test_trace(self):
     file_ = os.path.join(self.path, 'data_temp', 'STREAM.QHD')
     try:
         mt = read(file_)[0]
     except IOError:
         raise IOError('First start the test for data.')
     dummy = 'repr(mt) = ' + str(repr(mt)) + '\nstr(mt) = ' + str(mt)
     dummy += '\nmt.stats = ' + str(mt.stats) + '\nmt.data = ' + str(
         mt.data)
     dummy += '\nmt.print_(mod=0) = ' + mt.print_(mod=0)
     dummy += '\nmt.print_(mod=1) = ' + mt.print_(mod=1)
Exemple #9
0
 def readX(self, correlation, t1=None, t2=None, period=24 * 3600, select=True, stack=None, **kwargs):
     st = Stream()
     if t2 is None:
         file_ = self.getX(correlation, t1, period=period, stack=stack, **kwargs) + ".QHD"
         if t1 is None:
             st += read(file_)
         else:
             for file_ in glob(file_):
                 st += read(file_)
     else:
         if period == "day" or period >= 3600 or stack >= 3600:
             iterator = yeargen(t1, t2)
         else:
             iterator = timegen(t1, t2, dt=24 * 3600)
         for t in iterator:
             file_ = self.getX(correlation, t, period=period, stack=stack, **kwargs) + ".QHD"
             try:
                 st += read(file_)
             except (ValueError, IOError):
                 log.warning("An error occured when trying to read file %s" % file_)
         if select:
             st = st.select(expr="%r<st.starttime<%r" % (t1 - 0.1, t2 + 0.1))
     return st
Exemple #10
0
def filter(data, correlations, filters, stack=None, period=24 * 3600):  #@ReservedAssignment
    log.info('Filter correlations: %s' % util.parameters())
    for correlation in correlations:
        expr = data.getX(correlation, '????', filter_=None, period=period, stack=stack) + '.QHD'
        files = glob(expr)
        for file_ in files:
            try:
                st = read(file_)
            except Exception as err:
                log.warning('Could not load file, because:/n%s' % str(err))
                continue
            for filter_ in filters:
                st2 = st.copy()
                st2.filter2(*filter_)
                data.writeX(st2, correlation, st[0].stats.endtime, filter=filter_, period=period, stack=stack)
Exemple #11
0
def test():
    import pylab as plt
    from sito.stream import read
    from sito.util import cosTaper, get_window
    plt.plot(cosTaper(1000, 0.1))
    plt.plot(get_window(('gaussian', 100), 1000))

    N = 10000
    data = np.concatenate((get_window(('gaussian', 10), N // 4), -2 * get_window(('gaussian', 10), N // 4), get_window(('gaussian', 100), N // 4), np.zeros(N // 4)))
    src = np.concatenate((get_window(('gaussian', 10), N // 10), np.zeros(N * 9 // 10)))

    #deconvfAnalyse(data, src, 100, water=0.1, gauss=10, tshift=10., pad=0) # try gauss=2, 10, 100
    dummy = deconvf(data, src, 100, water=0.01, gauss=2, tshift=10., pad=0)

    ms = read('./tests/data_temp/STREAM.QHD')[0:3]
    deconvfAnalyse(ms[1].data, ms[0].data, 100, water=0.01, gauss=2, tshift=10., pad=0)  # try gauss=2, 10, 100
    plt.show()
Exemple #12
0
def stack(data, correlations, dt=-1, filters=None, period=24 * 3600, shift=None, onefile=False, yearfiles=False):
    #t1 = t1.__class__(t1.date)
    #t2 = t2.__class__(t2.date)
    log.info('Stack correlations: %s' % util.parameters())
    print 'Stack correlations... '
    if filters is None:
        filters = (None,)
    stack = Stream()
    last_year = None
    for correlation in ProgressBar()(correlations):
        for filter_ in filters:
            try:
                st = read(data.getX(correlation, '*', filter=filter_, period=period) + '.QHD')
            except Exception as err:
                log.warning('Could not load file, because:/n%s' % str(err))
            else:
                print correlation
                for some_traces in streamtimegen(st, dt=dt, start=None, shift=shift):
                    tr = some_traces.calculate('mean')
                    stack.append(tr)
                    this_year = (some_traces[0].stats.starttime).year
                    if last_year is None:
                        last_year = this_year
                    #if yearfiles and (some_traces[0].stats.starttime + period).julday == 1 and len(stack) > 0:
                    if yearfiles and this_year != last_year and len(stack) > 0:
                        data.writeX(stack, correlation, time=some_traces[0].stats.starttime - 365 * 24 * 3600, filter_=filter_, period=period, stack=(dt, shift))
                        last_year = this_year
                        stack = Stream()
                if not onefile:
                    if yearfiles:
                        time = some_traces[0].stats.starttime
                    else:
                        time = None
                    if len(stack) > 0:
                        data.writeX(stack, correlation, time=time, filter=filter_, period=period, stack=(dt, shift))
                    last_year = None
                    stack = Stream()

    if onefile:
        data.writeX(stack, ('all', 'all'), time=None, filter=filters[0], period=period, stack=(dt, shift))
Exemple #13
0
def stack_day(data, correlations, dt=-1, start=None, onefile=False):
    #t1 = t1.__class__(t1.date)
    #t2 = t2.__class__(t2.date)
    log.info('Stack day correlations: %s' % util.parameters())
    if start is not None:
        dt_log = '%s-%s' % (dt, start)
    else:
        dt_log = dt
    stack = Stream()
    for correlation in correlations:
        try:
            days = read(data.getXDay(correlation, '*') + '.QHD')
        except Exception as err:
            log.warning('Could not load file, because:/n%s' % str(err))
        else:
            for somedays in streamdaygen(days, dt=dt, start=start):
                tr = somedays.calculate('mean')
                stack.append(tr)
            if not onefile:
                data.writeXDayStack(stack, correlation, dt_log)
                stack = Stream()
    if onefile:
        data.writeXDayStack(stack, ('all', 'all'), dt_log)
Exemple #14
0
def main():
    ipoc = IPOCStations()
    ms = read('/home/richter/Results/IPOC/xcorr/1bit/stack/all_stack_-1.QHD')
    setHIDist(ms, ipoc)
    #    for tr in ms:
    #        st = tr.stats.station
    #        if 'PB12' in st or 'PAT' in st or 'HMBC' in st or 'LVC' in st:
    #            ms.remove(tr)
    print(list(set(ms[0].stats.station.split('-'))))[0]
    ms = ms.select(
        "(st.station.split('-'))[0][:-1] in 'PB03 PB04 PB07 PB09 PB10 PB14 PATCX'.split()"
    )
    ms = ms.select(autocorr=True)
    #    ms.plot_(-200, 200, relative='middle', absolutescale=10, annotate=True)
    #    ms.plotXcorrVsDist(-300, 300, absolutescale=200)
    #    ms.plotXcorrVsDist(-300, relative='starttime', absolutescale=500)
    #    plt.show()
    #    ms.filter2(0.05, 0.5)

    #ms = ms.select('st.dist<455')
    ms.addXcorrSides()
    #    for tr in ms:
    #        tr.data = envelope(tr.data)
    #    ms.normalize()
    ms.downsample2(5)
    #    ms.trim2(100 + np.array(ms.getHI('dist')) / 3., None)
    #    ms.plotXcorrVsDist(0, 300, relative='starttime')
    #    ms.plot_(absolutescale=1)
    #    ms = ms[:1]
    #    ms = ms[:1]
    #    ms.plot_()
    #    plt.show()
    #    return
    print ms

    lats = np.linspace(-25, -17, 1500)
    lons = np.linspace(-74, -66, 1500)
Exemple #15
0
 def getRawStream(self,
                  date,
                  station,
                  component='Z',
                  endtime=False,
                  checkfile=False):
     if component == 'all':
         component = 'ZNE'
     NC = len(component)
     if NC > 1:
         stream = Stream()
         if checkfile:
             stream = []
         for comp in component:
             stream.extend(self.getRawStream(date, station, comp, endtime))
         if checkfile:
             import numpy as np
             return np.all(stream)
         #if None in stream:
         #    raise ValueError('One or more component is None')
         Ns = [stream[i].stats.npts for i in range(NC)]
         #N1, N2, N3 = len(st_list[0]), len(st_list[1]), len(st_list[2])
         #Ns = (N1, N2, N3)
         if max(Ns) - min(Ns) > 1:
             raise ValueError('Components have different length')
         elif max(Ns) - min(Ns) == 1:
             for i in range(NC):
                 if Ns[i] > min(Ns):
                     stream[i].data = stream[i].data[:-1]
                     stream[i].stats.ntps -= 1
         #return st_list[0] + st_list[1] + st_list[2]
         return stream
     if station == 'LVC':
         log.warning('Using BH channel for LVC')
         file_ = self.lookForMseed(date, station, 'BH' + component)
     else:
         file_ = self.lookForMseed(date, station, 'HH' + component)
     if file_ == None:
         raise ValueError('No IPOC file for %s %s %s' %
                          (station, component, date.date))
     elif checkfile:
         return True
     merge_later = False
     try:
         if endtime and date.julday == endtime.julday:
             ms = read(file_,
                       format='MSEED',
                       starttime=date,
                       endtime=endtime)
         elif endtime and date.julday != endtime.julday:
             border = date.__class__(date.date) + 24 * 3600
             ms1 = read(file_, starttime=date)  #, endtime=border)
             ms2 = self.getRawStream(border, station, component, endtime)
             ms = ms1 + ms2
             ms.merge()
             merge_later = True
         else:
             ms = read(file_)
     except (ValueError, TypeError) as ex:
         raise ValueError('Error reading IPOC file %s because:\n%s' %
                          (file_, str(ex)))
     if len(ms) == 0:
         raise ValueError('No traces in IPOC stream!')
     if station == 'LVC':
         for tr in ms:
             if tr.stats.channel[-1] == '1':
                 tr.stats.channel = tr.stats.channel[:-1] + 'N'
             elif tr.stats.channel[-1] == '2':
                 tr.stats.channel = tr.stats.channel[:-1] + 'E'
     if any([network == 'NC' for network in ms.getHI('network')]):
         # change network code to CX
         ms.setHI('network', 'CX')
         if merge_later:
             ms.merge()
     return ms
Exemple #16
0
    def getRawStream(self, date, station, component="Z", endtime=False, checkfile=False):
        if component == "all":
            component = "ZNE"
        NC = len(component)
        if NC > 1:
            stream = Stream()
            if checkfile:
                stream = []
            for comp in component:
                stream.extend(self.getRawStream(date, station, comp, endtime))
            if checkfile:
                import numpy as np

                return np.all(stream)
            # if None in stream:
            #    raise ValueError('One or more component is None')
            Ns = [stream[i].stats.npts for i in range(NC)]
            # N1, N2, N3 = len(st_list[0]), len(st_list[1]), len(st_list[2])
            # Ns = (N1, N2, N3)
            if max(Ns) - min(Ns) > 1:
                raise ValueError("Components have different length")
            elif max(Ns) - min(Ns) == 1:
                for i in range(NC):
                    if Ns[i] > min(Ns):
                        stream[i].data = stream[i].data[:-1]
                        stream[i].stats.ntps -= 1
            # return st_list[0] + st_list[1] + st_list[2]
            return stream
        if station == "LVC":
            log.warning("Using BH channel for LVC")
            file_ = self.lookForMseed(date, station, "BH" + component)
        else:
            file_ = self.lookForMseed(date, station, "HH" + component)
        if file_ == None:
            raise ValueError("No IPOC file for %s %s %s" % (station, component, date.date))
        elif checkfile:
            return True
        merge_later = False
        try:
            if endtime and date.julday == endtime.julday:
                ms = read(file_, format="MSEED", starttime=date, endtime=endtime)
            elif endtime and date.julday != endtime.julday:
                border = date.__class__(date.date) + 24 * 3600
                ms1 = read(file_, starttime=date)  # , endtime=border)
                ms2 = self.getRawStream(border, station, component, endtime)
                ms = ms1 + ms2
                ms.merge()
                merge_later = True
            else:
                ms = read(file_)
        except (ValueError, TypeError) as ex:
            raise ValueError("Error reading IPOC file %s because:\n%s" % (file_, str(ex)))
        if len(ms) == 0:
            raise ValueError("No traces in IPOC stream!")
        if station == "LVC":
            for tr in ms:
                if tr.stats.channel[-1] == "1":
                    tr.stats.channel = tr.stats.channel[:-1] + "N"
                elif tr.stats.channel[-1] == "2":
                    tr.stats.channel = tr.stats.channel[:-1] + "E"
        if any([network == "NC" for network in ms.getHI("network")]):
            # change network code to CX
            ms.setHI("network", "CX")
            if merge_later:
                ms.merge()
        return ms
Exemple #17
0

def get_vel(stream):
    ms = stream.copy()
    ms.addXcorrSides()
    for tr in ms:
        tr.data = np.abs(envelope(tr.data))
    dists = ms.getHI('dist')
    maxi = ms.getMaxima()
    v, _ = curve_fit(lambda x, a: x * a, maxi, dists, p0=1)
    return v[0]


#ms = read('/home/richter/Results/IPOC/xcorr/FINAL_filter0.01-0.5_1bit_whitening/stack/day_PB0[12345]Z-PB0[12345]Z_stack_all.QHD')
path = '/home/richter/Results/IPOC/xcorr/FINAL_filter0.01-0.5_1bit_whitening'
ms = read(path + '/stack/day_*_stack_all.QHD')
output = path + '/xcorr_vs_dist.pdf'

ipoc = IPOCStations()
setHIDist(ms, ipoc)

print 'no correlation for pairs:', no_corr_pairs(ms, ipoc)
v = get_vel(ms)
#v = 3.03093386
print 'velocity:', v
fig = plt.figure(figsize=(10, 12))
plot = ms.plotXcorrVsDist(-300,
                          300,
                          scale=10,
                          fig=fig,
                          figtitle='%d cross-correlations' % len(ms))
Exemple #18
0
    def getStream(self,
                  date,
                  station,
                  component='Z',
                  endtime=False,
                  filename=None,
                  check=False):
        """
        Return a stream from local day files.

        :param date: if endtime==False: The whole day is returned
                     if endtime!=False: starttime for stream
        :param component: 'Z', 'N', 'E' or 'all'
        :param endtime: read from filename only till endtime (if != False)
        :param filename: use this expression as filename (None= use self.raw)
        :return: one-component or 3-component stream
        """

        if filename == None:
            filename = self.getstr + '.QHD'
        filename = filename % (station, date.year, date.julday)
        if check:
            return os.path.isfile(filename)
        if not os.path.isfile(filename):
            raise ValueError('No filename for %s %s %s at %s' %
                             (station, component, date.date, filename))
        try:
            if endtime and date.julday == (endtime - 0.001).julday:
                ms = read(filename, starttime=date, endtime=endtime)
            elif endtime and date.julday != endtime.julday:
                border = date.__class__(date.date) + 24 * 3600
                ms1 = read(filename, starttime=date)  #, endtime=border)
                ms2 = self.getStream(border, station, component, endtime)
                ms = ms1 + ms2
                ms.merge()
            else:
                ms = read(filename)
        except:
            raise ValueError('Error reading filename %s' % filename)
        #if component == 'all':
        #    component = 'ZNE'

        NC = len(component)
        if NC == 1:
            ms = ms.select(component=component)
            ms.merge()
            if len(ms) > 1:
                raise ValueError('Gaps in data')
        else:
            if NC == 2:
                ms = (ms.select(component=component[0]) +
                      ms.select(component=component[1]))
            if len(ms) > NC:
                raise ValueError('Gaps in data')
            elif len(ms) < NC:
                raise ValueError('Not enough components')
            Ns = [ms[i].stats.npts for i in range(NC)]
            #N1, N2, N3 = len(ms[0]), len(ms[1]), len(ms[2])
            #Ns = (N1, N2, N3)
            if max(Ns) - min(Ns) > 1:
                raise ValueError('Components have different length')
            elif max(Ns) - min(Ns) == 1:
                for i in range(NC):
                    if Ns[i] > min(Ns):
                        ms[i].data = ms[i].data[:-1]
                        ms[i].stats.ntps -= 1
        if len(ms) == 0:
            raise ValueError('No traces in stream!')
        return ms
Exemple #19
0
    return s2 - s1

def get_vel(stream):
    ms = stream.copy()
    ms.addXcorrSides()
    for tr in ms:
        tr.data = np.abs(envelope(tr.data))
    dists = ms.getHI('dist')
    maxi = ms.getMaxima()
    v, _ = curve_fit(lambda x, a: x * a, maxi, dists, p0=1)
    return v[0]


#ms = read('/home/richter/Results/IPOC/xcorr/FINAL_filter0.01-0.5_1bit_whitening/stack/day_PB0[12345]Z-PB0[12345]Z_stack_all.QHD')
path = '/home/richter/Results/IPOC/xcorr/FINAL_filter0.01-0.5_1bit_whitening'
ms = read(path + '/stack/day_*_stack_all.QHD')
output = path + '/xcorr_vs_dist.pdf'

ipoc = IPOCStations()
setHIDist(ms, ipoc)

print 'no correlation for pairs:', no_corr_pairs(ms, ipoc)
v = get_vel(ms)
#v = 3.03093386
print 'velocity:', v
fig = plt.figure(figsize=(10, 12))
plot = ms.plotXcorrVsDist(-300, 300, scale=10, fig=fig,
                          figtitle='%d cross-correlations' % len(ms))
plot.ax.plot((-300, 0, 300), (300 * v, 0, 300 * v), 'r')

d = 30
Exemple #20
0
    def getStream(self, date, station, component="Z", endtime=False, filename=None, check=False):
        """
        Return a stream from local day files.

        :param date: if endtime==False: The whole day is returned
                     if endtime!=False: starttime for stream
        :param component: 'Z', 'N', 'E' or 'all'
        :param endtime: read from filename only till endtime (if != False)
        :param filename: use this expression as filename (None= use self.raw)
        :return: one-component or 3-component stream
        """

        if filename == None:
            filename = self.getstr + ".QHD"
        filename = filename % (station, date.year, date.julday)
        if check:
            return os.path.isfile(filename)
        if not os.path.isfile(filename):
            raise ValueError("No filename for %s %s %s at %s" % (station, component, date.date, filename))
        try:
            if endtime and date.julday == (endtime - 0.001).julday:
                ms = read(filename, starttime=date, endtime=endtime)
            elif endtime and date.julday != endtime.julday:
                border = date.__class__(date.date) + 24 * 3600
                ms1 = read(filename, starttime=date)  # , endtime=border)
                ms2 = self.getStream(border, station, component, endtime)
                ms = ms1 + ms2
                ms.merge()
            else:
                ms = read(filename)
        except:
            raise ValueError("Error reading filename %s" % filename)
        # if component == 'all':
        #    component = 'ZNE'

        NC = len(component)
        if NC == 1:
            ms = ms.select(component=component)
            ms.merge()
            if len(ms) > 1:
                raise ValueError("Gaps in data")
        else:
            if NC == 2:
                ms = ms.select(component=component[0]) + ms.select(component=component[1])
            if len(ms) > NC:
                raise ValueError("Gaps in data")
            elif len(ms) < NC:
                raise ValueError("Not enough components")
            Ns = [ms[i].stats.npts for i in range(NC)]
            # N1, N2, N3 = len(ms[0]), len(ms[1]), len(ms[2])
            # Ns = (N1, N2, N3)
            if max(Ns) - min(Ns) > 1:
                raise ValueError("Components have different length")
            elif max(Ns) - min(Ns) == 1:
                for i in range(NC):
                    if Ns[i] > min(Ns):
                        ms[i].data = ms[i].data[:-1]
                        ms[i].stats.ntps -= 1
        if len(ms) == 0:
            raise ValueError("No traces in stream!")
        return ms