Esempio n. 1
0
 def readX(self,
           correlation,
           t1=None,
           t2=None,
           period=24 * 3600,
           select=True,
           stack=None,
           **kwargs):
     st = Stream()
     if t2 is None:
         file_ = self.getX(
             correlation, t1, period=period, stack=stack, **kwargs) + '.QHD'
         if t1 is None:
             st += read(file_)
         else:
             for file_ in glob(file_):
                 st += read(file_)
     else:
         if period == 'day' or period >= 3600 or stack >= 3600:
             iterator = yeargen(t1, t2)
         else:
             iterator = timegen(t1, t2, dt=24 * 3600)
         for t in iterator:
             file_ = self.getX(
                 correlation, t, period=period, stack=stack, **
                 kwargs) + '.QHD'
             try:
                 st += read(file_)
             except (ValueError, IOError):
                 log.warning(
                     'An error occured when trying to read file %s' % file_)
         if select:
             st = st.select(expr='%r<st.starttime<%r' %
                            (t1 - 0.1, t2 + 0.1))
     return st
Esempio n. 2
0
def shift_and_correlate(ms, t_event, window):
    """
    Calculates shifts of phases compared to reference mean trace.    
    """
    #ms1, ms2 = split_stream(ms, t_event)
    n1 = getind(ms, t_event)
    tr_mean = ms.calculate('mean')
    sec, dt, _unused_cor, func = window
    tr_mean_slice = Stream([tr_mean.copy()])
    tr_mean_slice.trim2(sec - dt, sec + dt, relative='ponset')
    time_mean, _ = tr_mean_slice[0].getArgMax(ret='time',
                                              spline=True,
                                              func=func)
    sec = time_mean = time_mean + sec - dt
    sec1, sec2 = sec - dt, sec + dt

    ms_slice = ms.copy()
    ms_slice.trim2(sec1, sec2, relative='ponset')
    time, maxi = ms_slice.getArgMax(ret='time', spline=True, func=func)
    shift_max = Shift(time - dt, maxi, get_stat(time - dt, n1, sec))

    cor = ms.correlate(tr_mean, dt, start=sec1, end=sec2, relative='ponset')
    time, maxi = cor.getArgMax(ret='time', spline=True)
    shift_old = Shift(time - dt, maxi, get_stat(time - dt, n1, sec))

    cor2 = ms.correlate_numpy(tr_mean,
                              start=sec1,
                              end=sec2,
                              start2=sec1 - dt,
                              end2=sec2 + dt,
                              relative='ponset')
    time, maxi = cor2.getArgMax(ret='time', spline=True)
    shift_cor = Shift(time - dt, maxi, get_stat(time - dt, n1, sec))

    score = scipy.stats.scoreatpercentile(shift_cor.cors, PERC)
    score = round(score, 2)
    score -= score % 0.05
    if score == 1:
        score -= 0.05
    n2 = np.count_nonzero(maxi[:n1] >= score)
    time = time[maxi >= score]
    maxi = maxi[maxi >= score]
    shift_cor_score = Shift(time - dt, maxi, get_stat(time - dt, n2, sec))
    return Results(time_mean, PERC, score, shift_max, shift_old, shift_cor,
                   shift_cor_score)
Esempio n. 3
0
def shift_and_correlate(ms, t_event, window):
    """
    Calculates shifts of phases compared to reference mean trace.    
    """
    #ms1, ms2 = split_stream(ms, t_event)
    n1 = getind(ms, t_event)
    tr_mean = ms.calculate('mean')
    sec, dt, _unused_cor, func = window
    tr_mean_slice = Stream([tr_mean.copy()])
    tr_mean_slice.trim2(sec - dt, sec + dt, relative='ponset')
    time_mean, _ = tr_mean_slice[0].getArgMax(ret='time', spline=True, func=func)
    sec = time_mean = time_mean + sec - dt
    sec1, sec2 = sec - dt, sec + dt

    ms_slice = ms.copy()
    ms_slice.trim2(sec1, sec2, relative='ponset')
    time, maxi = ms_slice.getArgMax(ret='time', spline=True, func=func)
    shift_max = Shift(time - dt, maxi, get_stat(time - dt, n1, sec))

    cor = ms.correlate(tr_mean, dt, start=sec1, end=sec2,
                       relative='ponset')
    time, maxi = cor.getArgMax(ret='time', spline=True)
    shift_old = Shift(time - dt, maxi, get_stat(time - dt, n1, sec))

    cor2 = ms.correlate_numpy(tr_mean, start=sec1, end=sec2,
                              start2=sec1 - dt, end2=sec2 + dt,
                              relative='ponset')
    time, maxi = cor2.getArgMax(ret='time', spline=True)
    shift_cor = Shift(time - dt, maxi, get_stat(time - dt, n1, sec))

    score = scipy.stats.scoreatpercentile(shift_cor.cors, PERC)
    score = round(score, 2)
    score -= score % 0.05
    if score == 1:
        score -= 0.05
    n2 = np.count_nonzero(maxi[:n1] >= score)
    time = time[maxi >= score]
    maxi = maxi[maxi >= score]
    shift_cor_score = Shift(time - dt, maxi, get_stat(time - dt, n2, sec))
    return Results(time_mean, PERC, score, shift_max, shift_old, shift_cor,
                   shift_cor_score)
Esempio n. 4
0
 def getChannelFromClient(self,
                          starttime,
                          endtime,
                          station,
                          network='CX',
                          location='',
                          channel='*'):
     ms = Stream(
         self.client.getWaveform(network, station, location, channel,
                                 starttime, endtime))
     if len(ms) == 0:
         raise ValueError('No traces in stream returned by seishub.')
     return ms
Esempio n. 5
0
 def readX(self, correlation, t1=None, t2=None, period=24 * 3600, select=True, stack=None, **kwargs):
     st = Stream()
     if t2 is None:
         file_ = self.getX(correlation, t1, period=period, stack=stack, **kwargs) + ".QHD"
         if t1 is None:
             st += read(file_)
         else:
             for file_ in glob(file_):
                 st += read(file_)
     else:
         if period == "day" or period >= 3600 or stack >= 3600:
             iterator = yeargen(t1, t2)
         else:
             iterator = timegen(t1, t2, dt=24 * 3600)
         for t in iterator:
             file_ = self.getX(correlation, t, period=period, stack=stack, **kwargs) + ".QHD"
             try:
                 st += read(file_)
             except (ValueError, IOError):
                 log.warning("An error occured when trying to read file %s" % file_)
         if select:
             st = st.select(expr="%r<st.starttime<%r" % (t1 - 0.1, t2 + 0.1))
     return st
Esempio n. 6
0
    def __init__(self, data, starttime, station, shift_sec, component, period=24 * 3600, save=0.0, use_get_raw=False):
        """
        Initialize instance.

        :param data: Data object
        :shift_sec: correlation will be calculated by shifting with this maximum amount of seconds
        :component: one component or 'all'
        """
        self.station = station
        self.shift_sec = shift_sec
        self.component = component
        self.period = period
        if use_get_raw:
            self.getRawStream = self.getStream
            self.data_getStream = data.getRawStream
        else:
            self.data_getStream = data.getStream
        #if self.shift_sec != 0:
#        try:
#            self.stream = self.data_getStream(day - shift_sec - reserve, station, component)
#        except ValueError as err:
#            self.stream = Stream()
#            warnings.warn('Day %s  Station %s  Comp %s:  %s' % ((day - shift_sec - reserve).date, self.station, self.component, str(err)))
        self.save = save
        self.stream = Stream()
        self.use_day = True
        try:
            self.getStream(starttime - 48. * 3600)
        except ValueError:
            pass
        try:
            self.getStream(starttime - 24. * 3600)
        except ValueError:
            pass
        self.time = starttime
        self.use_day = False
Esempio n. 7
0
def stack(data, correlations, dt=-1, filters=None, period=24 * 3600, shift=None, onefile=False, yearfiles=False):
    #t1 = t1.__class__(t1.date)
    #t2 = t2.__class__(t2.date)
    log.info('Stack correlations: %s' % util.parameters())
    print 'Stack correlations... '
    if filters is None:
        filters = (None,)
    stack = Stream()
    last_year = None
    for correlation in ProgressBar()(correlations):
        for filter_ in filters:
            try:
                st = read(data.getX(correlation, '*', filter=filter_, period=period) + '.QHD')
            except Exception as err:
                log.warning('Could not load file, because:/n%s' % str(err))
            else:
                print correlation
                for some_traces in streamtimegen(st, dt=dt, start=None, shift=shift):
                    tr = some_traces.calculate('mean')
                    stack.append(tr)
                    this_year = (some_traces[0].stats.starttime).year
                    if last_year is None:
                        last_year = this_year
                    #if yearfiles and (some_traces[0].stats.starttime + period).julday == 1 and len(stack) > 0:
                    if yearfiles and this_year != last_year and len(stack) > 0:
                        data.writeX(stack, correlation, time=some_traces[0].stats.starttime - 365 * 24 * 3600, filter_=filter_, period=period, stack=(dt, shift))
                        last_year = this_year
                        stack = Stream()
                if not onefile:
                    if yearfiles:
                        time = some_traces[0].stats.starttime
                    else:
                        time = None
                    if len(stack) > 0:
                        data.writeX(stack, correlation, time=time, filter=filter_, period=period, stack=(dt, shift))
                    last_year = None
                    stack = Stream()

    if onefile:
        data.writeX(stack, ('all', 'all'), time=None, filter=filters[0], period=period, stack=(dt, shift))
Esempio n. 8
0
def stretch(stream, reftr=None, stretch=None, start=None, end=None, relative='starttime', str_range=0.1, nstr=100, time_windows=None, sides='right'):
    sr = stream[0].stats.sampling_rate
    if time_windows is not None and isinstance(time_windows[1], (float, int)):
        tw_mat = time_windows_creation(np.array(time_windows[0]) * int(sr),
                                       time_windows[1] * int(sr))
    else:
        raise ValueError('Wrong format for time_window')
    log.debug('getting data...')
    data = getDataWindow(stream, start=start, end=end, relative=relative)
    data[np.isnan(data)] = 0  # bug fix
    data[np.isinf(data)] = 0
    if reftr != 'alternative':
        if hasattr(reftr, 'stats'):
            assert reftr.stats.sampling_rate == sr
            ref_data = getDataWindow(Stream([reftr]), start=start, end=end, relative=relative)[0, :]
        else:
            ref_data = reftr
        log.debug('calculate correlations and time shifts...')
        return time_stretch_estimate(data, ref_trc=ref_data, tw=tw_mat,
                                 stretch_range=str_range, stretch_steps=nstr,
                                 sides=sides)
    else:
        assert len(tw_mat) == len(stretch)
        tses = []
        log.debug('calculate correlations and time shifts...')
        for i in range(len(tw_mat)):
            tw = tw_mat[i:i + 1]
            st = stretch[i]
            sim_mat = time_stretch_apply(data, st, single_sided=False)
            ref_data = np.mean(sim_mat, axis=0)
            tse = time_stretch_estimate(data, ref_trc=ref_data, tw=tw,
                                 stretch_range=str_range, stretch_steps=nstr,
                                 sides=sides)
            tses.append(tse)
        for i in ('corr', 'stretch'):
            tse[i] = np.hstack([t[i] for t in tses])
        i = 'sim_mat'
        tse[i] = np.vstack([t[i] for t in tses])
        return tse
Esempio n. 9
0
    def write(self, filename, format_, **kwargs):
        """
        Saves current trace into a filename.

        Parameters
        ----------
        filename : string
            Name of the output filename.
        format_ : string
            Name of the output format_.
            See :meth:`~obspy.core.stream.Stream.write` method for all possible
            formats.

        Basic Usage
        -----------
        >>> tr = Trace()
        >>> tr.write("out.mseed", format_="MSEED") # doctest: +SKIP
        """
        # we need to import here in order to prevent a circular import of
        # Stream and Trace classes
        from sito.stream import Stream
        Stream([self]).write(filename, format_, **kwargs)
Esempio n. 10
0
 def getRawStreamFromClient(self,
                            starttime,
                            endtime,
                            station,
                            component='Z',
                            channel=None):
     if component in ('all', 'ZNE', 'Z12'):
         component = '?'
     network = 'CX'
     if not channel:
         channel = 'HH' + component
     location = ''
     if station == 'LVC':
         #channel 1 2 support
         if component == 'N':
             component = '{N,1}'
         elif component == 'E':
             component = '{E,2}'
         network = 'GE'
         channel = 'BH' + component
         location = '10'
     try:
         ms = self.client.getWaveform(network, station, location, channel,
                                      starttime, endtime)
     except:
         raise
         ms = []
     else:
         ms = Stream(ms)
     if len(ms) == 0:
         raise ValueError('No traces in stream returned by client.')
     if station == 'LVC':
         for tr in ms:
             ch = tr.stats.channel
             if ch[-1] in '12':
                 tr.stats.channel = ch[:-1] + ('N'
                                               if ch[-1] == '1' else 'E')
     return ms
Esempio n. 11
0
def stack_day(data, correlations, dt=-1, start=None, onefile=False):
    #t1 = t1.__class__(t1.date)
    #t2 = t2.__class__(t2.date)
    log.info('Stack day correlations: %s' % util.parameters())
    if start is not None:
        dt_log = '%s-%s' % (dt, start)
    else:
        dt_log = dt
    stack = Stream()
    for correlation in correlations:
        try:
            days = read(data.getXDay(correlation, '*') + '.QHD')
        except Exception as err:
            log.warning('Could not load file, because:/n%s' % str(err))
        else:
            for somedays in streamdaygen(days, dt=dt, start=start):
                tr = somedays.calculate('mean')
                stack.append(tr)
            if not onefile:
                data.writeXDayStack(stack, correlation, dt_log)
                stack = Stream()
    if onefile:
        data.writeXDayStack(stack, ('all', 'all'), dt_log)
Esempio n. 12
0
def eventPicker(
        data,
        component='all',
        phase='P',
        window=(-100, 400),
        filter=(None, None),
        new_sampling_rate=100,
        write=True,  #@ReservedAssignment
        **kwargs):
    """
    Pick window around onset of events from mseed files.

    The resulting stream is written in seperate files for each station and year.
    :param data: data object with stations property and getRawStream,
                 writeRFEvents methods
    :param events: file with events, Events object or None (in this case kwargs
        have to be defined) - passed to _getEvents
    :param component: 'Z', 'N', 'E' or 'all'
    :param phase: which ponset is used? 'P', 'PP' or 'S' or something else
        consider that events must show this phase for the stations
    :param window: window around pnset in seconds
    :param filter: filter stream between these frequencies
    :param new_sampling_rate: downsample stream to rhis sampling rate
    :param write: if True: everything is written to files
        if False: return stream object
    :kwargs: passed to _getEvents
        - in the end they are passed to events.Events.load function
        if param events == None
    """
    log.info('Start event picker: %s' % util.parameters())
    try:
        log.info('Data used %s' % data.raw)
    except:
        log.info('Data regex used %s' % data.raw_regex)
    log.info('Extraced data for events will be saved in %s' % data.rf_events)
    if data.events == None and len(kwargs) == 0:
        raise Exception('No arguments to determine events!')
    failure_list = []
    if write:
        stream_return = None
    else:
        stream_return = Stream()
    stations = data.stations
    all_events = _getEvents(data.events, **kwargs)
    all_events.sort()
    log.info('Events between %s and %s' %
             (all_events[0].datetime.date, all_events[-1].datetime.date))
    first_year = all_events[0].datetime.year
    last_year = all_events[-1].datetime.year
    for station_name, station in stations.items():
        for year in range(first_year, last_year + 1):
            events = all_events.pick(after='%s-1-1' % year,
                                     before='%s-1-1' % (year + 1),
                                     replace=False)
            stream_year = Stream()
            for event in events:
                dist = util.gps2DistDegree(station.latitude, station.longitude,
                                           event.latitude, event.longitude)
                baz = gps2DistAzimuth(station.latitude, station.longitude,
                                      event.latitude, event.longitude)[1]
                arrival = util.ttt(dist, event.depth).findPhase(phase)
                if arrival == None:
                    log.warning(
                        'Phase %s not present at distance %s depth %s' %
                        (phase, dist, event.depth))
                    arrival = util.ttt(dist, event.depth)[0]
                onset = event.datetime + arrival.time
                t1 = onset + window[0]
                t2 = onset + window[1]
                try:
                    stream = data.getRawStream(t1, station_name, component, t2)
                except Exception as ex:
                    failure_list.append((station_name, event.id, str(ex)))
                    continue
                # Write header entries and basic data processing (filtering, downsampling)
                stats = AttribDict({
                    'event': event,
                    'station': station_name,
                    'dist': dist,
                    'azi': baz,
                    'inci': arrival.inci,
                    phase.lower() + 'onset': onset,
                    'slowness': arrival.slow,
                    'filter': ''
                })
                for trace in stream:
                    trace.stats.update(stats)
                stream_year.extend(stream)
            if len(stream_year) > 0:
                stream_year.demean()
                stream_year.detrend()
                if filter[0] != None or filter[1] != None:
                    stream_year.filter2(freqmin=filter[0], freqmax=filter[1])
                if new_sampling_rate <= (
                        max(stream_year.getHI('sampling_rate')) / 2.):
                    stream_year.downsample2(new_sampling_rate)
                if write:
                    data.writeRFEvents(stream_year, station_name,
                                       event.datetime)
                else:
                    stream_return.extend(stream_year)
    if len(failure_list) > 0:
        log.warning('Failed to load the data for:\nstation     event.id     '
                    'reason\n' +
                    '\n'.join([' '.join(entry) for entry in failure_list]))
    if write:
        return failure_list
    else:
        return stream_return, failure_list
Esempio n. 13
0
def noisexcorrf(data, correlations, t1, t2, shift_sec, period=24 * 3600,
                 pool=None, max_preload=5, overlap=0):
    """
    Freqeuency domain noise cross correlation

    Expects day files prepared with prepare()
    """
    if period == 'day':
        period = 24 * 3600
    elif period == 'hour':
        period = 3600
    log.info('Noise cross correlation: %s' % util.parameters())
    print 'Noise cross correlation...'
#    if period != 24 * 3600:
#        raise ValueError('function at the moment only '
#                         'working with period=24*3600.')
    if 24 * 3600 % period != 0:
        raise ValueError('period has to be factor of a day')
    for correlation in ProgressBar()(correlations):
        autocorr = correlation[0] == correlation[1]
        station1 = correlation[0][:-1]
        station2 = correlation[1][:-1]
        comp1 = correlation[0][-1]
        comp2 = correlation[1][-1]
#        data1 = FloatingStream(data, t1, station1, shift_sec, component=comp1, period=period)
#        if not autocorr:
#            data2 = FloatingStream(data, t1, station2, 0, component=comp2, period=period)
        xcorr = [] if pool else Stream()
        stream1 = None
        for t in timegen(t1, t2, 24 * 3600):
            if len(xcorr) > 0 and (t - period).date != t.date and (
                    (period >= 3600 and t.julday == 1) or period < 3600):
                data.writeX(_get_async_resutls(xcorr) if pool else xcorr,
                            correlation, t - period, period=period)
                xcorr = [] if pool else Stream()
            if (not (data.getStream(t, station1, component=comp1, check=True) and
                     (autocorr or data.getStream(t, station2, component=comp2, check=True)))):
                log.debug('No data for %s %s' % (str(correlation), t))
                continue
            try:
                if overlap == 0 or stream1 is None:
                    stream1 = data.getStream(t, station1, component=comp1)
                else:
                    stream1 = stream1 + data.getStream(t, station1, component=comp1)
                    stream1.merge()
                    stream1.trim(t - overlap, None)
                if autocorr:
                    stream2 = stream1
                else:
                    if overlap == 0:
                        stream2 = data.getStream(t, station2, component=comp2)
                    else:
                        raise NotImplementedError()
            except ValueError:
                log.warning('Could not load data for %s %s' % (str(correlation), t))
                continue
            if not (len(stream1) == len(stream2) == 1):
                log.debug('Stream is not compatible (%d,%d traces)' % (len(stream1), len(stream2)))
                continue
            tr1 = stream1[0]
            tr2 = stream2[0]
            if tr1.stats.sampling_rate != tr2.stats.sampling_rate:
                raise ValueError('Sampling rate is different.')
            if period == 24 * 3600:
                # check data
                if tr1.stats.npts != tr2.stats.npts:
                    log.info('Discard data because of different npts %d vs %d' % (tr1.stats.npts, tr2.stats.npts))
                    continue
                log.debug ('Calculating xcorr for %s' % t)
                # original implementation only ok for period == 'day'
                args = (tr1, tr2, shift_sec, correlation)
                if pool:
                    if len(xcorr) >= max_preload:
                        xcorr[-max_preload].wait()
                    xcorr.append(pool.apply_async(_noisexcorr_traces, args))
                else:
                    xcorr.append(_noisexcorr_traces(*args))
            # new implementation for all other periods
            else:
                if tr1.stats.is_fft:
                    tr1.ifft()
                if tr2.stats.is_fft:
                    tr2.ifft()
                t1_ = tr1.stats.starttime
                while t1_ + period <= tr1.stats.endtime + 1:
                    t2_ = t1_ + period
                    tr1_ = tr1.slice(t1_, t2_)
                    tr2_ = tr2.slice(t1_, t2_)
                    args = (tr1_, tr2_, shift_sec, correlation)
                    if pool:
                        if len(xcorr) >= max_preload:
                            xcorr[-max_preload].wait()
                        xcorr.append(pool.apply_async(_noisexcorr_traces, args))
                    else:
                        xcorr.append(_noisexcorr_traces(*args))
                    t1_ = t1_ + period - overlap
        if len(xcorr) > 0:
            data.writeX(_get_async_resutls(xcorr) if pool else xcorr,
                        correlation, t, period=period)
Esempio n. 14
0
def _get_async_resutls(async_results):
    xcorr = Stream()
    for res in async_results:
        xcorr.append(res.get())
    return xcorr
Esempio n. 15
0
def eventPicker(
    data,
    component="all",
    phase="P",
    window=(-100, 400),
    filter=(None, None),
    new_sampling_rate=100,
    write=True,  # @ReservedAssignment
    **kwargs
):
    """
    Pick window around onset of events from mseed files.

    The resulting stream is written in seperate files for each station and year.
    :param data: data object with stations property and getRawStream,
                 writeRFEvents methods
    :param events: file with events, Events object or None (in this case kwargs
        have to be defined) - passed to _getEvents
    :param component: 'Z', 'N', 'E' or 'all'
    :param phase: which ponset is used? 'P', 'PP' or 'S' or something else
        consider that events must show this phase for the stations
    :param window: window around pnset in seconds
    :param filter: filter stream between these frequencies
    :param new_sampling_rate: downsample stream to rhis sampling rate
    :param write: if True: everything is written to files
        if False: return stream object
    :kwargs: passed to _getEvents
        - in the end they are passed to events.Events.load function
        if param events == None
    """
    log.info("Start event picker: %s" % util.parameters())
    try:
        log.info("Data used %s" % data.raw)
    except:
        log.info("Data regex used %s" % data.raw_regex)
    log.info("Extraced data for events will be saved in %s" % data.rf_events)
    if data.events == None and len(kwargs) == 0:
        raise Exception("No arguments to determine events!")
    failure_list = []
    if write:
        stream_return = None
    else:
        stream_return = Stream()
    stations = data.stations
    all_events = _getEvents(data.events, **kwargs)
    all_events.sort()
    log.info("Events between %s and %s" % (all_events[0].datetime.date, all_events[-1].datetime.date))
    first_year = all_events[0].datetime.year
    last_year = all_events[-1].datetime.year
    for station_name, station in stations.items():
        for year in range(first_year, last_year + 1):
            events = all_events.pick(after="%s-1-1" % year, before="%s-1-1" % (year + 1), replace=False)
            stream_year = Stream()
            for event in events:
                dist = util.gps2DistDegree(station.latitude, station.longitude, event.latitude, event.longitude)
                baz = gps2DistAzimuth(station.latitude, station.longitude, event.latitude, event.longitude)[1]
                arrival = util.ttt(dist, event.depth).findPhase(phase)
                if arrival == None:
                    log.warning("Phase %s not present at distance %s depth %s" % (phase, dist, event.depth))
                    arrival = util.ttt(dist, event.depth)[0]
                onset = event.datetime + arrival.time
                t1 = onset + window[0]
                t2 = onset + window[1]
                try:
                    stream = data.getRawStream(t1, station_name, component, t2)
                except Exception as ex:
                    failure_list.append((station_name, event.id, str(ex)))
                    continue
                # Write header entries and basic data processing (filtering, downsampling)
                stats = AttribDict(
                    {
                        "event": event,
                        "station": station_name,
                        "dist": dist,
                        "azi": baz,
                        "inci": arrival.inci,
                        phase.lower() + "onset": onset,
                        "slowness": arrival.slow,
                        "filter": "",
                    }
                )
                for trace in stream:
                    trace.stats.update(stats)
                stream_year.extend(stream)
            if len(stream_year) > 0:
                stream_year.demean()
                stream_year.detrend()
                if filter[0] != None or filter[1] != None:
                    stream_year.filter2(freqmin=filter[0], freqmax=filter[1])
                if new_sampling_rate <= (max(stream_year.getHI("sampling_rate")) / 2.0):
                    stream_year.downsample2(new_sampling_rate)
                if write:
                    data.writeRFEvents(stream_year, station_name, event.datetime)
                else:
                    stream_return.extend(stream_year)
    if len(failure_list) > 0:
        log.warning(
            "Failed to load the data for:\nstation     event.id     "
            "reason\n" + "\n".join([" ".join(entry) for entry in failure_list])
        )
    if write:
        return failure_list
    else:
        return stream_return, failure_list
Esempio n. 16
0
class FloatingStream(object):
    def __init__(self, data, starttime, station, shift_sec, component, period=24 * 3600, save=0.0, use_get_raw=False):
        """
        Initialize instance.

        :param data: Data object
        :shift_sec: correlation will be calculated by shifting with this maximum amount of seconds
        :component: one component or 'all'
        """
        self.station = station
        self.shift_sec = shift_sec
        self.component = component
        self.period = period
        if use_get_raw:
            self.getRawStream = self.getStream
            self.data_getStream = data.getRawStream
        else:
            self.data_getStream = data.getStream
        #if self.shift_sec != 0:
#        try:
#            self.stream = self.data_getStream(day - shift_sec - reserve, station, component)
#        except ValueError as err:
#            self.stream = Stream()
#            warnings.warn('Day %s  Station %s  Comp %s:  %s' % ((day - shift_sec - reserve).date, self.station, self.component, str(err)))
        self.save = save
        self.stream = Stream()
        self.use_day = True
        try:
            self.getStream(starttime - 48. * 3600)
        except ValueError:
            pass
        try:
            self.getStream(starttime - 24. * 3600)
        except ValueError:
            pass
        self.time = starttime
        self.use_day = False
    def dontGetStream(self):
        self.time += self.period
        self.stream.trim(self.time - self.shift_sec, None)

    def getStream(self, day=None, *args, **kwargs):  #@UnusedVariable
        """
        Return a stream for xcorr.

        :param day: UTCDateTime with beginning of the day
        :args, kwargs: not used
        """
        #if self.shift_sec == 0:
        #    return self.data.getStream(day, self.station, component=self.component)
        #else:
        if self.use_day and day:
            self.time = day
        old_time = self.time
        self.time += self.period
#        if len(self.stream) == 0:
#            try:
#                self.stream = self.data_getStream(day, self.station,
#                                                  component=self.component)
#            except ValueError as err:
#                warnings.warn('Day %s  Station %s  Comp %s:  %s' %
#                              (next_day.date, self.station, self.component,
#                               str(err)))
#        elif day - 1 > self.stream[0].stats.endtime:
#            log.warning('Something went wrong getting the stream. Continue...')
#            try:
#                self.stream = self.data_getStream(day, self.station,
#                                                  component=self.component)
#            except ValueError as err:
#                pass


#            st = self.data_getStream(day, self.station,
#                                     component=self.component)
#            if st[0].stats.endtime > self.stream[0].stats.endtime:
#                self.stream += st
#                self.stream.merge()

#        if (len(self.stream) == 0 or
#            self.time + self.shift_sec > self.stream[0].stats.endtime - self.save):

        try:
            self.stream += self.data_getStream(self.time, self.station, component=self.component)
            #print 'to_merge ', self.stream
            self.stream.merge(method=1, interpolation_samples=10)
            if not np.ma.is_masked(self.stream[0].data):
                self.stream[0].data = np.ma.filled(self.stream[0].data, 0.)

        except ValueError as err:
            msg = '%s' % (str(err))
            warnings.warn(msg)
        ret_stream = self.stream.slice(old_time - self.shift_sec, self.time + self.shift_sec)
        self.stream.trim(self.time - self.shift_sec, None)
        #if component == 'all' and len(self.stream) != 3 or component != 'all' and len(self.stream)!=1:
        #    self.stream = Stream()
        if len(ret_stream) != len(self.component):
            raise ValueError('No data for station %s comp %s time %s!' % (
                                self.station, self.component, old_time))
        return ret_stream
Esempio n. 17
0
    def getRawStream(self, date, station, component="Z", endtime=False, checkfile=False):
        if component == "all":
            component = "ZNE"
        NC = len(component)
        if NC > 1:
            stream = Stream()
            if checkfile:
                stream = []
            for comp in component:
                stream.extend(self.getRawStream(date, station, comp, endtime))
            if checkfile:
                import numpy as np

                return np.all(stream)
            # if None in stream:
            #    raise ValueError('One or more component is None')
            Ns = [stream[i].stats.npts for i in range(NC)]
            # N1, N2, N3 = len(st_list[0]), len(st_list[1]), len(st_list[2])
            # Ns = (N1, N2, N3)
            if max(Ns) - min(Ns) > 1:
                raise ValueError("Components have different length")
            elif max(Ns) - min(Ns) == 1:
                for i in range(NC):
                    if Ns[i] > min(Ns):
                        stream[i].data = stream[i].data[:-1]
                        stream[i].stats.ntps -= 1
            # return st_list[0] + st_list[1] + st_list[2]
            return stream
        if station == "LVC":
            log.warning("Using BH channel for LVC")
            file_ = self.lookForMseed(date, station, "BH" + component)
        else:
            file_ = self.lookForMseed(date, station, "HH" + component)
        if file_ == None:
            raise ValueError("No IPOC file for %s %s %s" % (station, component, date.date))
        elif checkfile:
            return True
        merge_later = False
        try:
            if endtime and date.julday == endtime.julday:
                ms = read(file_, format="MSEED", starttime=date, endtime=endtime)
            elif endtime and date.julday != endtime.julday:
                border = date.__class__(date.date) + 24 * 3600
                ms1 = read(file_, starttime=date)  # , endtime=border)
                ms2 = self.getRawStream(border, station, component, endtime)
                ms = ms1 + ms2
                ms.merge()
                merge_later = True
            else:
                ms = read(file_)
        except (ValueError, TypeError) as ex:
            raise ValueError("Error reading IPOC file %s because:\n%s" % (file_, str(ex)))
        if len(ms) == 0:
            raise ValueError("No traces in IPOC stream!")
        if station == "LVC":
            for tr in ms:
                if tr.stats.channel[-1] == "1":
                    tr.stats.channel = tr.stats.channel[:-1] + "N"
                elif tr.stats.channel[-1] == "2":
                    tr.stats.channel = tr.stats.channel[:-1] + "E"
        if any([network == "NC" for network in ms.getHI("network")]):
            # change network code to CX
            ms.setHI("network", "CX")
            if merge_later:
                ms.merge()
        return ms
Esempio n. 18
0
 def getRawStream(self,
                  date,
                  station,
                  component='Z',
                  endtime=False,
                  checkfile=False):
     if component == 'all':
         component = 'ZNE'
     NC = len(component)
     if NC > 1:
         stream = Stream()
         if checkfile:
             stream = []
         for comp in component:
             stream.extend(self.getRawStream(date, station, comp, endtime))
         if checkfile:
             import numpy as np
             return np.all(stream)
         #if None in stream:
         #    raise ValueError('One or more component is None')
         Ns = [stream[i].stats.npts for i in range(NC)]
         #N1, N2, N3 = len(st_list[0]), len(st_list[1]), len(st_list[2])
         #Ns = (N1, N2, N3)
         if max(Ns) - min(Ns) > 1:
             raise ValueError('Components have different length')
         elif max(Ns) - min(Ns) == 1:
             for i in range(NC):
                 if Ns[i] > min(Ns):
                     stream[i].data = stream[i].data[:-1]
                     stream[i].stats.ntps -= 1
         #return st_list[0] + st_list[1] + st_list[2]
         return stream
     if station == 'LVC':
         log.warning('Using BH channel for LVC')
         file_ = self.lookForMseed(date, station, 'BH' + component)
     else:
         file_ = self.lookForMseed(date, station, 'HH' + component)
     if file_ == None:
         raise ValueError('No IPOC file for %s %s %s' %
                          (station, component, date.date))
     elif checkfile:
         return True
     merge_later = False
     try:
         if endtime and date.julday == endtime.julday:
             ms = read(file_,
                       format='MSEED',
                       starttime=date,
                       endtime=endtime)
         elif endtime and date.julday != endtime.julday:
             border = date.__class__(date.date) + 24 * 3600
             ms1 = read(file_, starttime=date)  #, endtime=border)
             ms2 = self.getRawStream(border, station, component, endtime)
             ms = ms1 + ms2
             ms.merge()
             merge_later = True
         else:
             ms = read(file_)
     except (ValueError, TypeError) as ex:
         raise ValueError('Error reading IPOC file %s because:\n%s' %
                          (file_, str(ex)))
     if len(ms) == 0:
         raise ValueError('No traces in IPOC stream!')
     if station == 'LVC':
         for tr in ms:
             if tr.stats.channel[-1] == '1':
                 tr.stats.channel = tr.stats.channel[:-1] + 'N'
             elif tr.stats.channel[-1] == '2':
                 tr.stats.channel = tr.stats.channel[:-1] + 'E'
     if any([network == 'NC' for network in ms.getHI('network')]):
         # change network code to CX
         ms.setHI('network', 'CX')
         if merge_later:
             ms.merge()
     return ms
Esempio n. 19
0
from sito.data import IPOC
from obspy.core import UTCDateTime as UTC
import matplotlib.pyplot as plt
from sito.util.main import streamtimegen
from sito.stream import Stream
from progressbar import ProgressBar

data = IPOC()
t_day = UTC('2008-01-01')
station = 'PB01'

stream = data.getRawStreamFromClient(
    t_day, t_day + 24 * 3600, station, component='Z')
stream.setHI('filter', '')
stream.demean()
stream.filter2(0.5, 5)
stream.trim2(0, 5 * 3600)

auto = Stream()
for st in streamtimegen(stream, dt=60, start=None, shift=30, use_slice=True):
    tr = st[0].copy()
    tr.addZeros(60)
    tr.acorr(60)
    auto.append(tr)

print auto
auto.plotXcorr()
stream.plot(type='dayplot')

plt.show()
Esempio n. 20
0
import matplotlib.pyplot as plt
from sito.util.main import streamtimegen
from sito.stream import Stream
from progressbar import ProgressBar

data = IPOC()
t_day = UTC('2008-01-01')
station = 'PB01'

stream = data.getRawStreamFromClient(t_day,
                                     t_day + 24 * 3600,
                                     station,
                                     component='Z')
stream.setHI('filter', '')
stream.demean()
stream.filter2(0.5, 5)
stream.trim2(0, 5 * 3600)

auto = Stream()
for st in streamtimegen(stream, dt=60, start=None, shift=30, use_slice=True):
    tr = st[0].copy()
    tr.addZeros(60)
    tr.acorr(60)
    auto.append(tr)

print auto
auto.plotXcorr()
stream.plot(type='dayplot')

plt.show()