Example #1
0
 def test_trigger_onset(self):
     """
     Test trigger onset function
     """
     on_of = np.array([[6.0, 31], [69, 94], [131, 181], [215, 265],
                       [278, 315], [480, 505], [543, 568], [605, 631]])
     cft = np.concatenate((np.sin(np.arange(0, 5 * np.pi, 0.1)) + 1,
                           np.sin(np.arange(0, 5 * np.pi, 0.1)) + 2.1,
                           np.sin(np.arange(0, 5 * np.pi, 0.1)) + 0.4,
                           np.sin(np.arange(0, 5 * np.pi, 0.1)) + 1))
     picks = trigger_onset(cft, 1.5, 1.0, max_len=50)
     np.testing.assert_array_equal(picks, on_of)
     # check that max_len_delete drops the picks
     picks_del = trigger_onset(cft, 1.5, 1.0, max_len=50,
                               max_len_delete=True)
     np.testing.assert_array_equal(
         picks_del, on_of[np.array([0, 1, 5, 6, 7])])
     #
     # set True for visual understanding the tests
     if False:  # pragma: no cover
         import matplotlib.pyplot as plt
         plt.plot(cft)
         plt.hlines([1.5, 1.0], 0, len(cft))
         on_of = np.array(on_of)
         plt.vlines(picks[:, 0], 1.0, 2.0, color='g', linewidth=2,
                    label="ON max_len")
         plt.vlines(picks[:, 1], 0.5, 1.5, color='r', linewidth=2,
                    label="OF max_len")
         plt.vlines(picks_del[:, 0] + 2, 1.0, 2.0, color='y', linewidth=2,
                    label="ON max_len_delete")
         plt.vlines(picks_del[:, 1] + 2, 0.5, 1.5, color='b', linewidth=2,
                    label="OF max_len_delete")
         plt.legend()
         plt.show()
Example #2
0
def make_trigger(origin_folder, filename, dest_folder, trig_on, trig_of,
                 trigger_type, nsta, nlta):
    stream = obspy.read(os.path.join(origin_folder, filename))

    freqmin = bandpass[1]
    freqmax = bandpass[0]
    print freqmin, freqmax
    filtered = stream.filter("bandpass",
                             freqmin=bandpass[1],
                             freqmax=bandpass[0])
    fm = float(filtered[0].stats.sampling_rate)
    to_process = filtered.copy()
    merged = to_process.merge(method=0)

    if np.isnan(merged).any():
        to_process = filtered.copy()
        data = merge_numpy(to_process)
        print type(data)
        print type(nsta)
        print type(nlta)
        cft = recursive_sta_lta(data, int(float(nsta) * fm),
                                np.int(float(nlta) * fm))
        on_of = trigger_onset(cft, trig_on, trig_off)

    else:
        # we can process the whole day.
        data = merged[0]
        cft = recursive_sta_lta(data, int(nsta * fm), int(nlta * fm))
        on_of = trigger_onset(cft, trig_on, trig_off)

    # we can process the whole day.
    cft = recursive_sta_lta(data, int(nsta * fm), int(nlta * fm))
    on_of = trigger_onset(cft, trig_on, trig_of)

    save_stalta(dest_folder, filename, data, cft, on_of, trig_on, trig_of, fm)
Example #3
0
def predict(dtfl, ev_list, dataOperator):
    for c, evi in enumerate(ev_list):
        try:
            if c % 1000 == 0:
                print(c)
            dataset = dtfl.get('data/'+str(evi))
            data = np.array(dataset)

            pre_E = trigger_onset(recursive_sta_lta(
                data[:, 0], config['sta_window'], config['lta_window']), config['on_trigger'], config['off_trigger'])
            pre_N = trigger_onset(recursive_sta_lta(
                data[:, 1], config['sta_window'], config['lta_window']), config['on_trigger'], config['off_trigger'])
            # pre_Z = trigger_onset(recursive_sta_lta(
            #     data[:, 2], config['sta_window'], config['lta_window']), config['on_trigger'], config['off_trigger'])

            N_end_time, E_end_time = 6000, 6000
            if len(pre_E) == 0 and len(pre_N) == 0:
                dataOperator.data_writer(dataset.attrs['trace_name'], dataset.attrs['p_arrival_sample'],
                                        dataset.attrs['s_arrival_sample'], dataset.attrs['coda_end_sample'], 
                                        -1, -1, -1, dataset.attrs['trace_category'], "noise")
                continue

            if dataset.attrs['trace_category'] == 'noise':
                dataOperator.data_writer(dataset.attrs['trace_name'], dataset.attrs['p_arrival_sample'],
                                        dataset.attrs['s_arrival_sample'], dataset.attrs['coda_end_sample'], 
                                        -1, -1, -1, dataset.attrs['trace_category'], "earthquake_local") 
                continue           

            if len(pre_E):
                E_end_time = pre_E[-1][1]

            if len(pre_N):
                N_end_time = pre_N[-1][1]

            end_time = (E_end_time + N_end_time) / 2

            p_pick, s_pick = ar_pick(data[:, 0], data[:, 1], data[:, 2], 100,
                                    1.0, 20.0, 1.0, 0.1, 4.0, 1.0, 2, 8, 0.1, 0.2)

            p_pick, s_pick = p_pick*100, s_pick*100

            # y_true = [float(dataset.attrs['p_arrival_sample']),
            #           float(dataset.attrs['s_arrival_sample']), float(dataset.attrs['coda_end_sample'][0][0])]
            # y_pred = [p_pick, s_pick, end_time]

            # p_true = np.zeros(shape=(6000,))
            # p_true[p_pick-20:p_pick+21] = 1

            # a = np.array(y_true)
            # b = np.array(y_pred)
            # print(a * b)
            # break
            dataOperator.data_writer(dataset.attrs['trace_name'], dataset.attrs['p_arrival_sample'],
                                    dataset.attrs['s_arrival_sample'], dataset.attrs['coda_end_sample'], 
                                    int(p_pick), int(s_pick), int(end_time), dataset.attrs['trace_category'], "earthquake_local")
        except:
            continue
    return
Example #4
0
    def test_trigger_onset_issue_2891(self):
        """
        Regression test for issue 2891

        This used to raise an error if a trigger was activated near the end of
        the trace, and all sample values after that trigger on threshold are
        above the designated off threshold. So basically this can only happen
        if the on threshold is below the off threshold, which is kind of
        unusual, but we fixed it nevertheless, since people can run into this
        playing around with different threshold settings
        """
        tr = read(os.path.join(self.path,
                               'BW.UH1._.EHZ.D.2010.147.a.slist.gz'))[0]
        cft = recursive_sta_lta(tr.data, 5, 30)
        trigger_onset(cft, 2.5, 3.2)
Example #5
0
def detector(args, yh1):
    """
    return two dictionaries and one numpy array:
        
        matches --> {detection statr-time:[ detection end-time,
                                           detection probability,
                                           
                                           ]}
                
    """

    detection = trigger_onset(yh1, args.detection_threshold,
                              args.detection_threshold / 2)

    EVENTS = {}
    matches = {}

    if len(detection) > 0:
        for ev in range(len(detection)):
            D_prob = np.mean(yh1[detection[ev][0]:detection[ev][1]])
            D_prob = np.round(D_prob, 3)
            EVENTS.update({detection[ev][0]: [D_prob, detection[ev][1]]})

    for ev in EVENTS:
        bg = ev
        ed = EVENTS[ev][1]

        if int(ed - bg) >= 1:
            matches.update({bg: [ed, EVENTS[ev][0]]})
    return matches
Example #6
0
def find_peaks(sig, thresh):
    itriggers = trigger_onset(sig, thres1=thresh, thres2=thresh)
    try:
        n_peaks, n_bid = itriggers.shape
    except AttributeError:
        n_peaks = 0
    return n_peaks, itriggers
def predictinput(sacfile, algo):
    #print(input_path)
    bhnfile = read("/home/shilpa/Desktop/earthquakeproject/files/" + sacfile)
    bhn_tr = bhnfile[0]
    df = bhn_tr.stats.sampling_rate
    bhn_trigger = recursive_sta_lta(bhn_tr.data, int(5 * df), int(10 * df))
    bhnonoff = trigger_onset(bhn_trigger, 1.2, 0.5)
    p_pick, s_pick = ar_pick(bhnfile[0].data, bhnfile[0].data, bhnfile[0].data,
                             df, 1.0, 20.0, 1.0, 0.1, 4.0, 1.0, 2, 8, 0.1, 0.2)
    data = []
    temp = []
    temp.append(200)
    temp.append(p_pick)
    temp.append(s_pick)
    temp.append(int(bhnonoff[0][1]) - int(bhnonoff[0][0]))
    data.append(temp)
    print(data)
    if algo == "decisiontree":
        prediction = d_model.predict(data)
    elif algo == "randomforest":
        prediction = r_model.predict(data)
    elif algo == "svm":
        prediction = r_model.predict(data)
    result = " "
    if (prediction[0] == 1):
        result = "an Earthquake"
    else:
        result = "No Earthquake"
    return result
Example #8
0
def get_triggers_stalta(st):

    len_sta, len_lta, trig_on, trig_off, freqmin, freqmax = load_stalta_parameters(
    )

    stations_list = []
    triggers_list = []
    if len(st) > 0:
        for tr in st:
            try:
                tr.detrend('demean')
                tr.detrend('linear')
                tr.taper(max_percentage=0.015, type='hann')
                tr.filter("bandpass",
                          freqmin=freqmin,
                          freqmax=freqmax,
                          corners=2)

                cft_rec = recursive_sta_lta(
                    tr.data, int(len_sta * tr.stats.sampling_rate),
                    int(len_lta * tr.stats.sampling_rate))
                on_off = trigger_onset(cft_rec, trig_on, trig_off)
                triggers = []
                for trig in on_off:
                    on = tr.times("utcdatetime")[trig[0]]
                    off = tr.times("utcdatetime")[trig[1]]
                    triggers.append([on, off])

                stations_list.append(tr.stats.station)
                triggers_list.append(triggers)

            except:
                continue

    return stations_list, triggers_list
Example #9
0
    def __analise_prediction(self, prob):
        picks = []
        if not (isinstance(prob, np.ndarray) or isinstance(prob, list)):
            print(type(prob))
            raise TypeError("prob must be a list or a numpy.array "
                            "instead it got type {}".format(type(prob)))
        try:
            triggers = trigger_onset(prob, self.max_proba, self.min_proba)

            for trigger in triggers:
                index_on, index_off = trigger  # index of when event is On and Off.

                # gets the index where the maximum is between On and Off
                index_of_max = index_on + np.argmax(prob[index_on:index_off]) \
                    if index_on < index_off else index_on

                stamp_pick = self.get_time_from_index(index_of_max)
                picks.append(stamp_pick)
                print("{}".format(stamp_pick.isoformat()))

        except TypeError:
            # this error happens when pass a empty list of prob.
            pass

        return picks
Example #10
0
def update_cft(prev_val, selected=None):
    print(ticker_alg.value)
    if ticker_alg.value == 'Classic STA/LTA':
        from obspy.signal.trigger import classic_sta_lta, trigger_onset
        on = trigger_slider.value[1]; off=trigger_slider.value[0]
        cft = classic_sta_lta(st[0].data, 
                              int(stalta_slider.value[0] * st[0].stats.sampling_rate),
                              int(stalta_slider.value[1] * st[0].stats.sampling_rate))
        on_off = np.array(trigger_onset(cft, on, off))

        source_stalta.data = dict(times=st[0].times(), cft=cft)
        source_triggers.data = dict(ontimes=st[0].times()[on_off[:,0]], y=np.zeros(on_off[:,0].shape))
        #source_triggers.data = dict(offtimes=st[0].times()[on_off[:,1]], y=np.zeros(on_off[:,1].shape))

        sta_on.location = on
        sta_off.location = off
        
    elif ticker_alg.value == 'Recursive STA/LTA':
        from obspy.signal.trigger import recursive_sta_lta, trigger_onset
        on = trigger_slider.value[1]; off=trigger_slider.value[0]
        cft = recursive_sta_lta(st[0].data,
                                int(stalta_slider.value[0] * st[0].stats.sampling_rate),
                                int(stalta_slider.value[1] * st[0].stats.sampling_rate))
        on_off = np.array(trigger_onset(cft, on, off))

        source_stalta.data = dict(times=st[0].times(), cft=cft)
        source_triggers.data = dict(ontimes=st[0].times()[on_off[:,0]], y=np.zeros(on_off[:,0].shape))
        #source_triggers.data = dict(offtimes=st[0].times()[on_off[:,1]], y=np.zeros(on_off[:,1].shape))

        sta_on.location = on
        sta_off.location = off

    elif ticker_alg.value == 'Carl-Sta-Trig [Not Yet Implemented]':
        from obspy.signal.trigger import carl_sta_trig, trigger_onset
        on = 3000; off=-500
        cft = carl_sta_trig(st[0].data, int(5 * st[0].stats.sampling_rate), int(10 * st[0].stats.sampling_rate), 0.8, 0.8)
        on_off = np.array(trigger_onset(cft, on, off))

        source_stalta.data = dict(times=st[0].times(), cft=cft)
        source_triggers.data = dict(ontimes=st[0].times()[on_off[:,0]], y=np.zeros(on_off[:,0].shape))

        sta_on.location = on
        sta_off.location = off

    else:
        print(ticker_alg.value + ' is not yet implemented.')
        ticker_alg.value = prev_val
Example #11
0
def extract_filter_params(trace):
    samp_rate = trace.stats.sampling_rate
    trigs = []
    p_arrivals = model.get_travel_times_geo(trace.stats.event_depth,
                                            trace.stats.event_latitude,
                                            trace.stats.event_longitude,
                                            trace.stats.station_latitude,
                                            trace.stats.station_longitude,
                                            phase_list=[
                                                'P',
                                            ])
    mean_parrival = np.mean([parr.time for parr in p_arrivals])
    for band in [(1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7), (7, 8),
                 (8, 9), (1.5, 2.5), (2.5, 3.5), (3.5, 4.5), (4.5, 5.5),
                 (5.5, 6.5), (6.5, 7.5), (7.5, 8.5), (8.5, 9.5)]:
        tr_copy = trace.copy()
        clean_trace(tr_copy,
                    tr_copy.stats.starttime,
                    tr_copy.stats.endtime,
                    freqmin=band[0],
                    freqmax=band[1])
        cft = recursive_sta_lta(tr_copy.data, int(5 * samp_rate),
                                int(20 * samp_rate))
        upper, lower = find_best_bounds(cft, samp_rate)
        trigs.extend([
            (onset, tr_copy.stats.channel, upper - lower, band, upper)
            for onset in trigger_onset(cft,
                                       upper,
                                       lower,
                                       max_len=(60 *
                                                tr_copy.stats.sampling_rate),
                                       max_len_delete=True)
        ])
    trigs = [
        trig for trig in trigs
        if abs(trace.stats.event_time + mean_parrival - trace.stats.starttime -
               (trig[0][0] / samp_rate)) < pphase_search_margin
    ]
    if len(trigs) > 0:
        mintrigdiff = abs(trace.stats.event_time + mean_parrival -
                          trace.stats.starttime - trigs[0][0][0])
        besttrig = trigs[0][0][0]
        best_trig_margin = trigs[0][2]
        best_band = trigs[0][3]
        best_upper = trigs[0][4]
        for trig in trigs:
            if abs(trace.stats.event_time + mean_parrival - trace.stats.starttime - trig[0][0]) <= mintrigdiff and \
                trig[2] >= best_trig_margin and trig[4] >= best_upper:
                mintrigdiff = abs(trace.stats.event_time + mean_parrival -
                                  trace.stats.starttime - trig[0][0])
                besttrig = trig[0][0]
                best_trig_margin = trig[2]
                best_band = trig[3]
                best_upper = trig[4]
        return (best_band, best_trig_margin, best_upper)
    else:
        print('Something went wrong ... ')
    return None, None, None
Example #12
0
def _channel_loop(tr, parameters, max_trigger_length=60, despike=False):
    """
    Internal loop for parellel processing.

    :type tr: obspy.core.trace
    :param tr: Trace to look for triggers in.
    :type parameters: list
    :param parameters: List of TriggerParameter class for trace.
    :type max_trigger_length: float
    :type despike: bool

    :return: trigger
    :rtype: list
    """
    for par in parameters:
        if par['station'] == tr.stats.station and \
           par['channel'] == tr.stats.channel:
            parameter = par
            break
    else:
        Logger.warning('No parameters set for station ' +
                       str(tr.stats.station))
        return []

    triggers = []
    Logger.debug(tr)
    tr.detrend('simple')
    if despike:
        median_filter(tr)
    if parameter['lowcut'] and parameter['highcut']:
        tr.filter('bandpass',
                  freqmin=parameter['lowcut'],
                  freqmax=parameter['highcut'])
    elif parameter['lowcut']:
        tr.filter('highpass', freq=parameter['lowcut'])
    elif parameter['highcut']:
        tr.filter('lowpass', freq=parameter['highcut'])
    # find triggers for each channel using recursive_sta_lta
    df = tr.stats.sampling_rate
    cft = recursive_sta_lta(tr.data, int(parameter['sta_len'] * df),
                            int(parameter['lta_len'] * df))
    if max_trigger_length:
        trig_args = {'max_len_delete': True}
        trig_args['max_len'] = int(max_trigger_length * df + 0.5)
    tmp_trigs = trigger_onset(cft, float(parameter['thr_on']),
                              float(parameter['thr_off']), **trig_args)
    for on, off in tmp_trigs:
        cft_peak = tr.data[on:off].max()
        cft_std = tr.data[on:off].std()
        on = tr.stats.starttime + \
            float(on) / tr.stats.sampling_rate
        off = tr.stats.starttime + \
            float(off) / tr.stats.sampling_rate
        triggers.append(
            (on.timestamp, off.timestamp, tr.id, cft_peak, cft_std))
    return triggers
Example #13
0
def compute_sta_lta(data,
                    fm,
                    trigger_type,
                    nlta=10.0,
                    nsta=5.0,
                    trig_on=1.2,
                    trig_off=0.5):
    """
    Function that handles the building of STA/LTA event picking: classic, recursive and delayed. It follows Obspy
    implementation of these algorithms and can be interfaced with the main GUI to plot the results, or with the
    CLI to other analysis routines. A detailed comparison of STA/LTA techniques algorithms are included in:

    Withers, M., Aster, R., Young, C., Beiriger, J., Harris, M., Moore, S., and Trujillo, J. (1998),
    A comparison of select trigger algorithms for automated global seismic phase and event detection,
    Bulletin of the Seismological Society of America, 88 (1), 95-106.
    http://www.bssaonline.org/content/88/1/95.abstract

    Args:
        data : Numpy Array
            The seismic data we want to apply our STA/LTA routine
        fm : float
            The sampling frequency of the main trace
        trigger_type : str
            A string identifiying which trigger type we want (Recursive, Delayed, Classic)
        nlta : float
            Length of the Long Time Average Window (LTA)
        nsta : float
            Length of the Short Time Average Window (STA)
        trig_on : float
            Value of the CF to consider as an activation trigger
        trig_off : float
            Value of the CF to consider as a de-activation trigger
    Returns:
        cft: Numpy Array
            The characteristic function result of the
        on_of: Tuple
            A data tuple containing the on/ofs times of the even picking
    """

    if np.isnan(data).any():
        data = merge_numpy(data)
    try:

        if trigger_type == "Recursive":
            cft = recursive_sta_lta(data, int(nsta * fm), int(nlta * fm))
        elif trigger_type == "Delayed":
            cft = delayed_sta_lta(data, int(nsta * fm), int(nlta * fm))
        else:
            cft = classic_sta_lta(data, int(nsta * fm), int(nlta * fm))

        on_of = trigger_onset(cft, trig_on, trig_off)

        return cft, on_of

    except ArithmeticError:
        print "Problem whilst computing the trigger"
Example #14
0
def getTrigger(sac, short=2, long=30):
    df = sac.stats.sampling_rate
    # get cft
    cft = recursive_sta_lta(sac.data, int(short * df), int(long * df))
    # set threshold
    threshold = np.mean(cft) + (np.max(cft) - np.mean(cft))/4
    # get on
    on_of = trigger_onset(cft, threshold*1.1, threshold*0.9)
    if len(on_of) != 0:
        return on_of[:, 0]
    else:
        return np.array([])
Example #15
0
def getTrigger(sac, short=2, long=30):
    df = sac.stats.sampling_rate
    # get cft
    cft = recursive_sta_lta(sac.data, int(short * df), int(long * df))
    # set threshold
    threshold = np.mean(cft) + (np.max(cft) - np.mean(cft)) / 4
    # get on
    on_of = trigger_onset(cft, threshold, threshold)
    if len(on_of) != 0:
        return on_of[:, 0]
    else:
        return np.array([])
Example #16
0
def plot_trigger_edit(trace, cft, thr_on, thr_off, poisson_times, snr_level, method, show=True, outfile=None):
    """ 
    EDIT OF OBSPY'S PLOT_TRIGGER FUNCTION
    
    Includes lines where the events are on the second graph, as well as titles and sizing stuff.
    """
    import matplotlib.pyplot as plt
    from obspy.signal.trigger import trigger_onset
    df = trace.stats.sampling_rate
    npts = trace.stats.npts
    t = np.arange(npts, dtype=np.float32) / df
    fig = plt.figure(figsize=(15,15))
    ax1 = fig.add_subplot(211)
    ax1.plot(t, trace.data, 'k')
    ax1.set_ylabel("Amplitude of signal", fontsize = 12)
    ax1.set_xlabel("Time after %s [s]" % trace.stats.starttime.isoformat(), fontsize = 12)
    ax1.set_xlim(min(t),max(t))
    ax1.tick_params(labelsize=12)
    ax2 = fig.add_subplot(212, sharex=ax1)
    ax2.plot(t, cft, 'k')
    ax2.set_xlim(min(t),max(t))
    on_off = np.array(trigger_onset(cft, thr_on, thr_off))
    i, j = ax1.get_ylim()
    try:
        ax1.vlines(on_off[:, 0] / df, i, j, color='r', lw=2,
                   label="Trigger On")
        ax1.vlines(on_off[:, 1] / df, i, j, color='b', lw=2,
                   label="Trigger Off")
        ax1.legend()
    except IndexError:
        pass
    # adding in rectangles at the Poisson
    for p in poisson_times:
        #ax2.add_patch(patches.Rectangle((p-5, -8), 40, 20, alpha = 0.2, facecolor = 'orange'))
        ax2.add_patch(patches.Rectangle((p, -8), 30, 20, alpha = 0.2, facecolor = 'orange'))
        
    ax2.axhline(thr_on, color='red', lw=1, ls='--')
    ax2.axhline(thr_off, color='blue', lw=1, ls='--')
    ax2.set_xlabel("Time after %s [s]" % trace.stats.starttime.isoformat(), fontsize = 12)
    ax2.set_ylabel("Characteristic function of trigger", fontsize = 12)
    ax2.set_xlim(0, 30000)
    ax2.tick_params(labelsize=12)
    fig.suptitle("Trigger on and off points for a synthetic seismogram with SNR %s using z-detect method" % (str(snr_level)), fontsize = 14)
    fig.canvas.draw()
    if show:
        plt.show()
        
    if outfile:
            fig.savefig(outfile)
    elif show:
        plt.show()
    else:
        return fig
def _get_cut_times(config, tr):
    """Get trace cut times between P arrival and end of envelope coda."""
    tr_env = tr.copy()
    # remove the mean...
    tr_env.detrend(type='constant')
    # ...and the linear trend...
    tr_env.detrend(type='linear')
    # ...filter
    freqmin = 1.
    freqmax = 20.
    nyquist = 1./(2. * tr.stats.delta)
    if freqmax >= nyquist:
        freqmax = nyquist * 0.999
        msg = '%s: maximum frequency for bandpass filtering ' % tr.id
        msg += 'in local magnitude computation is larger than or equal '
        msg += 'to Nyquist. Setting it to %s Hz' % freqmax
        logger.warning(msg)
    cosine_taper(tr_env.data, width=config.taper_halfwidth)
    tr_env.filter(type='bandpass', freqmin=freqmin, freqmax=freqmax)
    tr_env.data = envelope(tr_env.data)
    tr_env.data = smooth(tr_env.data, 100)

    # Skip traces which do not have arrivals
    try:
        p_arrival_time = tr.stats.arrivals['P'][1]
    except Exception:
        logger.warning('%s: Trace has no P arrival: skipping trace' % tr.id)
        raise RuntimeError
    t1 = p_arrival_time - config.win_length
    t2 = p_arrival_time + config.win_length

    tr_noise = tr_env.copy()
    tr_signal = tr_env.copy()
    tr_noise.trim(starttime=t1, endtime=p_arrival_time,
                  pad=True, fill_value=0)
    tr_signal.trim(starttime=p_arrival_time, endtime=t2,
                   pad=True, fill_value=0)
    ampmin = tr_noise.data.mean()
    ampmax = tr_signal.data.mean()
    if ampmax <= ampmin:
        logger.warning(
            '%s: Trace has too high noise before P arrival: '
            'skipping trace' % tr.id)
        raise RuntimeError

    trigger = trigger_onset(tr_env.data, ampmax, ampmin,
                            max_len=9e99, max_len_delete=False)[0]
    t0 = p_arrival_time
    t1 = t0 + trigger[-1] * tr.stats.delta
    if t1 > tr.stats.endtime:
        t1 = tr.stats.endtime
    return t0, t1
Example #18
0
def stalta(data,
           nsta=3,
           nlta=10,
           threson=1.08,
           thresoff=1.05,
           offset=40,
           **kwargs):
    """
    Utilises :func:`~obspy.signal.trigger.classic_sta_lta` to remove outliers

    :type data: :class:`~numpy.ndarray`
    :param data: x-y data in Nx2 array, shape (N, 2)
    :type nsta: int
    :param nsta: Length of short time average window in samples
    :type nlta: int
    :param nlta: Length of long time average window in samples
    :type threson: float
    :param threson: Value above which trigger (of characteristic function)
                    is activated (higher threshold)
    :type thresoff: float
    :param thresoff: Value below which trigger (of characteristic function)
                     is deactivated (lower threshold)
    :type offset: int
    :param offset: in samples, how many additional samples are removed before
                   on trigger and after off trigger
    :rtype: :class:`~numpy.ndarray`
    :return: mask array containing bool values
    """
    orig_N = data.shape[0]

    nanbools = np.isnan(data[:, 1])
    nanindices = np.where(nanbools)[0]
    data = data[nanbools == False]

    cft = classic_sta_lta(data[:, 1], nsta, nlta)
    trigger_onoff = trigger_onset(cft, threson, thresoff)

    def inside_to(x_):
        for to in trigger_onoff:
            if to[0] - offset <= x_ <= to[1] + offset:
                return False
        return True

    filter_ = np.vectorize(inside_to)

    xi = np.where(filter_(np.arange(data.shape[0])))[0]

    for i in nanindices:
        xi[i <= xi] += 1

    return _get_mask(True, orig_N, xi, nanindices=nanindices)
Example #19
0
def _doubleChecking(station_list, detections, preprocessed_dir, moving_window, thr_on=3.7, thr_of=0.5):
    'this function perform traditional detection (STA/LTA) and picker (AIC) to double check for events on the remaining stations when an event has been detected on more than two stations'
    for stt in station_list:
        sttt = stt.split('_')[0]
      #  print(sttt)
        if sttt not in detections['station'].to_list():
            new_picks = {}                    
            if platform.system() == 'Windows':
                file_name = preprocessed_dir+"\\"+sttt+".hdf5"
                file_csv = preprocessed_dir+"\\"+sttt+".csv"
            else:
                file_name = preprocessed_dir+"/"+sttt+".hdf5"
                file_csv = preprocessed_dir+"/"+sttt+".csv"
            
            df = pd.read_csv(file_csv)
            df['start_time'] = pd.to_datetime(df['start_time'])  
            
            mask = (df['start_time'] > detections.iloc[0]['event_start_time']-timedelta(seconds = moving_window)) & (df['start_time'] < detections.iloc[0]['event_start_time']+timedelta(seconds = moving_window))
            df = df.loc[mask]
            dtfl = h5py.File(file_name, 'r')
            dataset = dtfl.get('data/'+df['trace_name'].to_list()[0]) 
            data = np.array(dataset)
                
            cft = recursive_sta_lta(data[:,2], int(2.5 * 100), int(10. * 100))
            on_of = trigger_onset(cft, thr_on, thr_of)
            if len(on_of) >= 1:                    
                p_pick, s_pick = ar_pick(data[:,2], data[:,1], data[:,0], 100, 1.0, 20.0, 1.0, 0.1, 4.0, 1.0, 2, 8, 0.1, 0.2)
                if (on_of[0][1]+100)/100 > p_pick > (on_of[0][0]-100)/100: 
                   # print('got one')
                    new_picks['traceID'] = df['trace_name'].to_list()[0]
                    new_picks['network'] = dataset.attrs["network_code"]
                    new_picks['station'] = sttt
                    new_picks['instrument_type'] = df['trace_name'].to_list()[0].split('_')[2]
                    new_picks['stlat'] = round(dataset.attrs["receiver_latitude"], 4)
                    new_picks['stlon'] = round(dataset.attrs["receiver_longitude"], 4)
                    new_picks['stelv'] = round(dataset.attrs["receiver_elevation_m"], 2)
                    new_picks['event_start_time'] = datetime.strptime(str(UTCDateTime(dataset.attrs['trace_start_time'].replace(' ', 'T')+'Z')+(on_of[0][0]/100)).replace('T', ' ').replace('Z', ''), '%Y-%m-%d %H:%M:%S.%f')
                    new_picks['event_end_time'] = datetime.strptime(str(UTCDateTime(dataset.attrs['trace_start_time'].replace(' ', 'T')+'Z')+(on_of[0][1]/100)).replace('T', ' ').replace('Z', ''), '%Y-%m-%d %H:%M:%S.%f')
                    new_picks['detection_prob'] = 0.3
                    new_picks['detection_unc'] = 0.6
                    new_picks['p_arrival_time'] = datetime.strptime(str(UTCDateTime(dataset.attrs['trace_start_time'].replace(' ', 'T')+'Z')+p_pick).replace('T', ' ').replace('Z', ''), '%Y-%m-%d %H:%M:%S.%f')
                    new_picks['p_prob'] = 0.3
                    new_picks['p_unc'] = 0.6
                    new_picks['p_snr'] = None
                    new_picks['s_arrival_time'] = None
                    new_picks['s_prob'] = 0.0
                    new_picks['s_unc'] = None
                    new_picks['s_snr'] = None
                    new_picks['amp'] = None
                    detections = detections.append(new_picks , ignore_index=True)      
    return detections                    
Example #20
0
def standard_trigger_finder(trace, channel_name):
    global std_on
    global std_off
    t = trace.copy()
    max_triggers = 30
    max_trigger_length = 20000
    ctf_start = 300  # avoids triggering on initial spike
    if "PDB" in channel_name:
        #trace.filter('highpass', freq=1500)
        sta = 20
        lta = 60
        ctf = recursive_sta_lta(t.data, sta, lta)
        ctf = ctf[ctf_start:]

        std_on = ctf[find_index_of_best_val(ctf, max_triggers)] * 0.98

        std_off = std_on * 0.8
        trigger_indices = trigger_onset(ctf, std_on, std_off,
                                        max_trigger_length)

    if "OT" in channel_name:
        #t.filter('bandpass', freqmin=1000, freqmax=15000)
        sta = 10
        lta = 50
        ctf = recursive_sta_lta(t.data, sta, lta)
        ctf = ctf[ctf_start:]

        #std_on  = ctf[find_index_of_max_val(ctf, max_triggers)] * 0.94
        std_on = ctf[find_index_of_best_val(ctf, max_triggers)] * 0.92
        if (std_on < 1):
            std_on += (1 - std_on) * 1.1
        #print("std_on: " + str(std_on))
        std_off = 1  #std_on * 0.92
        trigger_indices = trigger_onset(ctf, std_on, std_off,
                                        max_trigger_length)

    return trigger_indices, ctf
Example #21
0
def find_best_bounds(cft, samp_rate):
    bestupper = 2
    bestlower = 0.75
    max_margin = bestupper - bestlower
    leasttrigs = len(
        trigger_onset(cft,
                      2,
                      0.75,
                      max_len=(60 * samp_rate),
                      max_len_delete=True))
    for upper in np.linspace(2, 10, 12):
        for lower in [0.875, 0.75, 0.625, 0.5, 0.375]:
            t = trigger_onset(cft,
                              upper,
                              lower,
                              max_len=(60 * samp_rate),
                              max_len_delete=True)
            if len(t) > 0 and (upper -
                               lower) > max_margin and len(t) <= leasttrigs:
                leasttrigs = len(t)
                max_margin = upper - lower
                bestupper = upper
                bestlower = lower
    return bestupper, bestlower
def pick_arrival(trace,
                 nsta_seconds,
                 nlta_seconds,
                 df,
                 origin_time,
                 pick_threshold,
                 plot_flag=False):
    """
    P wave arrival is picked using a recursive sta/lta algorithm.

    Parameters
    ----------
    trace: obspy trace
        Seimic data.
    nsta_seconds, nlta_seconds, pick_threshold: float
        parameters for sta/lta 
    df: int, float
        Data sampling rate
    origin_time: obspy UTCDateTime
        Earthquake occurrence time
        .
    Returns
    -------    
    P_pick: array-like
        Picked arrivals in samples.    
    
    Reference: 	
    Withers, M., Aster, R., Young, C., Beiriger, J., Harris, M., Moore, S., and Trujillo, J. (1998),
    A comparison of select trigger algorithms for automated global seismic phase and event detection,
    Bulletin of the Seismological Society of America, 88 (1), 95-106.
    """

    cft = recursive_sta_lta(trace, int(nsta_seconds * df),
                            int(nlta_seconds * df))

    arrivals = trigger_onset(cft, pick_threshold, 0.5)

    if plot_flag:
        plot_trigger(trace, cft, pick_threshold, 0.5, show=True)

    P_pick = check_arrival_time(arrivals, trace.stats.starttime, origin_time,
                                df)

    return P_pick
Example #23
0
def getTrigger(sac, short=2, long=25):  # 1.75 1.25 4.wan perfect
    df = sac.stats.sampling_rate
    # print 'sampling_rate = '
    # print df
    # get cft
    cft = recursive_sta_lta(sac.data, int(short * df), int(long * df))
    # set threshold
    threshold = np.mean(cft) + (np.max(cft) - np.mean(cft)) / 4
    if np.isnan(threshold) == 1:
        print 'thre = nan'
        threshold = 3.2
    # get on
    # gk change
    # on_of = trigger_onset(cft, threshold, threshold)
    on_of = trigger_onset(cft, threshold * 1.38, threshold * 0.92)
    if len(on_of) != 0:
        return on_of[:, 0]
    else:
        return np.array([])
Example #24
0
	def _is_trigger(self):
		'''
		Figures out it there's a trigger active.
		'''
		if self.stalta.max() > self.thresh:
			if not self.exceed:
				# raise a flag that the Producer can read and modify 
				self.alarm = helpers.fsec(self.stream[0].stats.starttime + timedelta(seconds=
										trigger_onset(self.stalta, self.thresh,
										self.reset)[-1][0] * self.stream[0].stats.delta))
				self.exceed = True	# the state machine; this one should not be touched from the outside, otherwise bad things will happen
				print()
				printM('Trigger threshold of %s exceeded at %s'
						% (self.thresh, self.alarm.strftime('%Y-%m-%d %H:%M:%S.%f')[:22]), self.sender)
				printM('Trigger will reset when STA/LTA goes below %s...' % self.reset, sender=self.sender)
				COLOR['current'] = COLOR['purple']
				if self.testing:
					TEST['c_alerton'][1] = True
			else:
				pass

			if self.stalta.max() > self.maxstalta:
				self.maxstalta = self.stalta.max()
		else:
			if self.exceed:
				if self.stalta[-1] < self.reset:
					self.alarm_reset = helpers.fsec(self.stream[0].stats.endtime)	# lazy; effective
					self.exceed = False
					print()
					printM('Max STA/LTA ratio reached in alarm state: %s' % (round(self.maxstalta, 3)),
							self.sender)
					printM('Earthquake trigger reset and active again at %s' % (
							self.alarm_reset.strftime('%Y-%m-%d %H:%M:%S.%f')[:22]),
							self.sender)
					self.maxstalta = 0
					COLOR['current'] = COLOR['green']
				if self.testing:
					TEST['c_alertoff'][1] = True

			else:
				pass
Example #25
0
def pick_arrival(trace,
                 nsta_seconds,
                 nlta_seconds,
                 df,
                 origin_time,
                 pick_threshold,
                 plot_flag=False):
    """
    P wave arrival is picked using a recursive sta/lta algorithm.
    """
    cft = recursive_sta_lta(trace, int(nsta_seconds * df),
                            int(nlta_seconds * df))

    arrivals = trigger_onset(cft, pick_threshold, 0.5)

    if plot_flag:
        plot_trigger(trace, cft, pick_threshold, 0.5, show=True)

    P_pick = check_arrival_time(arrivals, trace.stats.starttime, origin_time,
                                df)

    return P_pick
Example #26
0
def LTASTAtr(tr,thres1, thres2,STA, LTA,plotSTA):
    """ return the cut on and off of the LTA/STA list [[cuton, cutoff], [cuton, cutoff]]
    * input : 
        - tr : type : trace , stream to filnd STA, LTA
        - thres1 : type; float : cut on limit of STA/LTA values :  after tjis value  the cut on is defined
        - thres2 : type, float : cut off limit of STA/LTA values : after this value the cut off is defined
        - STA : type int :  size of the LTA windows in second : STA = the trace average on this time windows
        - LTA : type int :  size of the LTA windows in second : LTA = the trace average on this time windows
        - plotSTA: type, bool; it true, the trace and it's characteristic function are plotted
        
        RQ: AFTER TESTING IT'S SEEEMS GOOD TO HAVE A RATIO WSTA/WLTA > 1/4 AND A CUT OFF HIGHER THAN CUT ON
    * outputs
        - L_onoff: type np, array : 2D  array of cut on and cut_off time  in number of sample ie time* df where df is the sampling rate [[cuton,cutoff], [cuton1, cutoff1]]
    
    exemple: 
        st = Read_event('15','206','15','1', '1', True)
        stcorrec = Stream_Correction(st,  '1', False)
        stfiltered= Stream_PBfilter(stcorrec,0.5, 20,False)
        LTASTA(stfiltered,2, 2.5,300,1400,True)
    """
    #0. sampling rate ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    df = tr.stats.sampling_rate
    #1. characteristic function of the trace following classical LTA ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    #cft = classicSTALTA(tr.data, int(STA * df), int(LTA* df))
     #2. characteristic function of the trace following recursive LTA
    cft2 =recursive_sta_lta(tr.data, int(STA * df), int(LTA* df))
    #3. list of [cuton, cutoff] time in number of samples~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 
    #max_len = maximum lenght of the triggered event in sample, 
    #max_len_delete = Do not write events longer than max_len into report file.
    L_onoff = trigger_onset(cft2, thres1, thres2, max_len=9e+99, max_len_delete=False)
    #4. plot~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    if plotSTA==True : 
        
        ##plotTrigger(tr[0], cft, thres1,thres2)
     
        plotTrigger(tr[0], cft2, thres1,thres2)
        plt.title('recursive')
    return np.array(L_onoff)
Example #27
0
def getTriggers(levels, sta, curr_time, sr, time_step, trigger_duration,
                reset_duration, DMs):
    """
    Returns a list of trigger times for a given station
    @params:
        levels              STA/LTA values for a station
        sta                 station name
        curr_time           start time to process on
        sr                  sampling rate in Hz
        time_step           window size to process on
        trigger_duration    the length of time in seconds
                            the STA/LTA needs to stay
                            above the trigger level
                            in order to be considered
                            a trigger
        DMs                 a list of DecisionMakers
    @return:
        list                a list of triggers times
                            for a given station
    """
    end_time = curr_time + int(sr * (time_step + trigger_duration))
    if end_time > len(levels[sta]):
        end_time = len(levels[sta])

    #onset,offset = getTriggerOnset(levels[sta],curr_time,end_time,
    #                trigger_duration*sr,
    #                DMs[sta].prev_action,DMs[sta].prev_action,reset_duration*sr)

    #if onset != None:
    #    return [onset]
    #else:
    #    return []

    return [
        x[0] for x in trigger_onset(levels[sta][curr_time:end_time],
                                    DMs[sta].prev_action, DMs[sta].prev_action)
        if (x[1] - x[0]) >= trigger_duration * sr
    ]
Example #28
0
)  #cut out sample waveform with same window length as chosen event
#trs_e = obspy.signal.filter.envelope(trs.data)
#print('reference waveform')

trs.plot(type='relative', color='b')  #, starttime=start , endtime=end)

sr = trace.stats.sampling_rate
nsta = int(2 * sr)  #2
nlta = int(10 * sr)  #20
stream = trs.data
cft = recursive_sta_lta(stream, nsta, nlta)
trig_on = 6  #8
trig_off = 0.2  #0.2
plot_trigger(trs, cft, trig_on, trig_off)

on_off = trigger_onset(cft, trig_on, trig_off)

for x in range(0, len(on_off)):
    tr = trace.slice(starttime=start + (on_off[x, 0] / sr) - 10,
                     endtime=start + (on_off[x, 1] / sr) + 10)
    tr_e = obspy.signal.filter.envelope(tr.data)

    #%% frequency info
    tr_data = tr.data
    m = np.mean(tr_data)
    tr_data = tr_data - m
    famp = abs(np.fft.fft(tr_data))

    # peak f
    peak = argmax(abs(famp)) / window
    if peak > 50:
Example #29
0
        tr_win[:, :, 2] = sliding_Z
        tr_win = tr_win / np.max(np.abs(tr_win), axis=(1, 2))[:, None, None]
        tt = tt[:tr_win.shape[0]]
        tt_i = tt_i[:tr_win.shape[0]]

        if args.V:
            ts = model.predict(tr_win, verbose=True, batch_size=batch_size)
        else:
            ts = model.predict(tr_win, verbose=False, batch_size=batch_size)

        prob_S = ts[:, 1]
        prob_P = ts[:, 0]
        prob_N = ts[:, 2]

        from obspy.signal.trigger import trigger_onset
        trigs = trigger_onset(prob_P, min_proba, 0.1)
        p_picks = []
        s_picks = []
        for trig in trigs:
            if trig[1] == trig[0]:
                continue
            pick = np.argmax(ts[trig[0]:trig[1], 0]) + trig[0]
            stamp_pick = st[0].stats.starttime + tt[pick]
            p_picks.append(stamp_pick)
            ofile.write("%s %s P %s\n" % (net, sta, stamp_pick.isoformat()))

        trigs = trigger_onset(prob_S, min_proba, 0.1)
        for trig in trigs:
            if trig[1] == trig[0]:
                continue
            pick = np.argmax(ts[trig[0]:trig[1], 1]) + trig[0]
    tr_z = tr.select(component="Z")[0]
    dtz = tr_z.stats.starttime
    print("BHZ " + str(tr_z.stats.starttime) + " " + str(tr_z.stats.endtime))
    #tr = read('/Users/Nishita/Documents/Research/example30/03.QCH.BHZ.SAC')
    #tr = read('/Users/Nishita/Documents/Research/sample/before/SC.XJI.2008131160000.D.00.BHZ.sac')
    #tr.filter('bandpass', freqmin=10, freqmax=20)  # optional prefiltering
    del tr
    #------------BHN-----------------------------------

    dfn = tr_n.stats.sampling_rate

    # Characteristic function and trigger onsets
    #cft = recursive_sta_lta(tr[0].data, int(2.5 * df), int(10. * df))

    cftn = z_detect(tr_n.data, int(10 * dfn))
    on_of_n = trigger_onset(cftn, 2, 0.5)

    j = 0
    onsettime = []
    print("BHN")
    print(on_of_n)
    len_n = len(on_of_n)
    for tup in on_of_n:
        for item in tup:
            onsettime.append(item)
        j = j + 1
        #print("aahhh1 "+str(dtn+onsettime[0]/100)+str(dtn+onsettime[1]/100))
        tr_bhn = read('/Volumes/Seagate Expansion Drive/after/*' + '.2008' +
                      str(i) + '*.BHN*',
                      starttime=(dtn + onsettime[0] / 100 - 10),
                      endtime=(dtn + onsettime[1] / 100 + 10))[0]
Example #31
0
import matplotlib.pyplot as plt
from obspy import read
from obspy.signal.trigger import recursive_sta_lta, trigger_onset

from seisnn.io import get_dir_list

predict_pkl_dir = "/mnt/tf_data/pkl/2017_02"
predict_pkl_list = get_dir_list(predict_pkl_dir)
for i, pkl in enumerate(predict_pkl_list):
    trace = read(pkl).traces[0]
    df = trace.stats.sampling_rate
    cft = recursive_sta_lta(trace.data, int(0.5 * df), int(1. * df))
    on_of = trigger_onset(cft, 1, 0.5)

    # Plotting the results
    ax = plt.subplot(211)
    plt.plot(trace.data, 'k')
    ymin, ymax = ax.get_ylim()
    plt.vlines(on_of[:, 0], ymin, ymax, color='r', linewidth=2)
    plt.vlines(on_of[:, 1], ymin, ymax, color='b', linewidth=2)
    plt.subplot(212, sharex=ax)
    plt.plot(cft, 'k')
    plt.hlines([3.5, 0.5], 0, len(cft), color=['r', 'b'], linestyle='--')
    plt.axis('tight')
    plt.show()
Example #32
0
def tt_Correlate(stm,
                 phase='P',
                 trim_start=10,
                 trim_end=100,
                 thr_on=.1,
                 thr_off=.05,
                 hp=5.0,
                 lp=49.0,
                 process=True,
                 plot=True,
                 plot_size=(40, 100)):
    """
	Given a stream (obspy.core.stream), calculate travel time based on correlation picker 
	in Massin & Malcolm(2016). 
	Use the trigger.py module in Massin & Malcolm(2016)
	Use the obspy.signal.trigger.trigger_onset module.
	! Assume the traces in stm have the same sampling rate.
	
	:input: 
		- stm			: Stream that contains waveform traces (class:obspy.core.stream)
		- *phase		: P arrival (pick the 1st onset) or S arrival (pick the 2nd onset) (options:'P','S')(string)
		- *trim_start	: For trimming the trace (seconds after start time) (float)
		- *trim_end		: For trimming the trace (seconds after start time) (float)
		- *thr_on		: Threshold of onset triggering (float)
		- *thr_off		: Threshold of onset closing (float)
		- *hp			: Corner frequency of high-pass filter (float)
		- *lp			: Corner frequency of low-pass filter (float)
		- *process		: Need signal processing or not (booline)
	_______
	:output:
		- A list of travel time time series for the traces in the stream (list) 
		
		
	"""
    # Signal processing before calculating Cf (default)
    if process == True:
        stm = Signal_Processing(stm, hp=hp, lp=lp)

    # Extract value from SAC attributes (extract form the very first trace in the stream)
    # Trace start time in UTM
    starttime = stm[0].stats.starttime
    # Sampling rate
    sampling_rate = stm[0].stats.sampling_rate
    # Event origin time
    to = stm[0].stats.sac['o']

    stm_trimed = stm.copy()

    # Trim the stream (mainly for getting rid of the false correlation peaks due to tapering)
    stm_trimed.trim(starttime=starttime + trim_start,
                    endtime=starttime + trim_end)

    # Calculating characteristic function for all ENZ 3 channels
    data = stm_trimed
    data_preprocessed = (trigger.Components(data, preprocessor='rms')).output()
    cf_ENZ = trigger.Correlate(data_preprocessed,
                               data,
                               multiplexor="components",
                               preprocessor='rms').output()

    # Plot the cf (if applicable)
    if plot == True:
        cf_Correlate_c = trigger.Correlate(data_preprocessed,
                                           data,
                                           multiplexor="components",
                                           preprocessor='rms')
        ax, shift = trigger.stream_processor_plot(cf_Correlate_c.data,
                                                  cf_Correlate_c.output(),
                                                  cfcolor='b',
                                                  label=r'$^MC\star\rms$',
                                                  size=plot_size)
        ax.legend()

    if phase == 'P' or phase == 'p':
        # Get Cf of Z channels only (most sensitive to P wave)
        cf_Z = []
        for i in range(len(cf_ENZ)):
            if i % 3 == 2:
                cf_Z.append(cf_ENZ[i])

        # Initialize travel time list tt
        Travel_Time_List = []

        # Calculation of travel time for traces in the stream
        for i, tr in enumerate(cf_Z):

            try:
                # Get the very first triggered nt
                On1 = trigger_onset(cf_Z[i], thr_on, thr_off)[0][0]
                # Convert to travel time
                tt1 = (On1 / sampling_rate) + trim_start - to

            # Exception: If there's only noise, the picker won't be triggered:
            except:
                tt1 = None

            Travel_Time_List.append(tt1)

    if phase == 'S' or phase == 's':
        # Get Cf of E and N channels
        cf_E = []
        cf_N = []
        for i in range(len(cf_ENZ)):
            if i % 3 == 0:
                cf_E.append(cf_ENZ[i])
            elif i % 3 == 1:
                cf_N.append(cf_ENZ[i])

        # Initialize travel time list tt
        Travel_Time_List = []
        Travel_Time_List_E = []
        Travel_Time_List_N = []

        # Calculation of travel time for traces in the stream
        for i, tr in enumerate(cf_E):

            try:
                # Get the second triggered nt
                On2 = trigger_onset(cf_E[i], thr_on, thr_off)[1][0]
                # Convert to travel time
                tt2 = (On2 / sampling_rate) + trim_start - to

            # Exception: If there's only noise, the picker won't be triggered:
            except:
                tt2 = None

            Travel_Time_List_E.append(tt2)

        print(Travel_Time_List_E)

        for i, tr in enumerate(cf_N):

            try:
                # Get the second triggered nt
                On2 = trigger_onset(cf_N[i], thr_on, thr_off)[1][0]
                # Convert to travel time
                tt2 = (On2 / sampling_rate) + trim_start - to

            # Exception: If there's only noise, the picker won't be triggered:
            except:
                tt2 = None

            Travel_Time_List_N.append(tt2)

        print(Travel_Time_List_N)

        # Choose the smaller one btw E and N channel
        for i, tt_EN in enumerate(Travel_Time_List_E):

            try:
                Travel_Time_List.append(
                    min(Travel_Time_List_E[i], Travel_Time_List_N[i]))

            # Exception: No second triggered point in E or N channel
            except:
                if Travel_Time_List_E:
                    Travel_Time_List.append(Travel_Time_List_E[i])

                elif Travel_Time_List_N:
                    Travel_Time_List.append(Travel_Time_List_N[i])

                else:
                    Travel_Time_List.append(None)

    return Travel_Time_List
    '''	
Example #33
0
def stalta_pick(stream, stalen, ltalen, trig_on, trig_off, freqmin=False,
                freqmax=False, debug=0, show=False):
    """
    Basic sta/lta picker, suggest using alternative in obspy.
    Simple sta-lta (short-term average/long-term average) picker, using \
    obspy's stalta routine to generate the characteristic function.

    Currently very basic quick wrapper, there are many other (better) options \
    in obspy, found \
    `here <http://docs.obspy.org/packages/autogen/obspy.signal.trigger.html>`_.

    :type stream: obspy.Stream
    :param stream: The stream to pick on, can be any number of channels.
    :type stalen: float
    :param stalen: Length of the short-term average window in seconds.
    :type ltalen: float
    :param ltalen: Length of the long-term average window in seconds.
    :type trig_on: float
    :param trig_on: sta/lta ratio to trigger a detection/pick
    :type trig_off: float
    :param trig_off: sta/lta ratio to turn the trigger off - no further picks\
        will be made between exceeding trig_on until trig_off is reached.
    :type freqmin: float
    :param freqmin: Low-cut frequency in Hz for bandpass filter
    :type freqmax: float
    :param freqmax: High-cut frequency in Hz for bandpass filter
    :type debug: int
    :param debug: Debug output level from 0-5.
    :type show: bool
    :param show: Show picks on waveform.

    :returns: obspy.core.event.Event

    .. rubric:: Example

    >>> from obspy import read
    >>> from eqcorrscan.utils.picker import stalta_pick
    >>> st = read()
    >>> event = stalta_pick(st, stalen=0.2, ltalen=4, trig_on=10,
    ...             trig_off=1, freqmin=3.0, freqmax=20.0)
    >>> event.creation_info.author
    'EQcorrscan'
    """
    from obspy.signal.trigger import classic_sta_lta, trigger_onset
    from obspy.signal.trigger import plot_trigger
    from obspy import UTCDateTime
    from obspy.core.event import Event, Pick, WaveformStreamID
    from obspy.core.event import CreationInfo, Comment, Origin
    import eqcorrscan.utils.plotting as plotting

    event = Event()
    event.origins.append(Origin())
    event.creation_info = CreationInfo(author='EQcorrscan',
                                       creation_time=UTCDateTime())
    event.comments.append(Comment(text='stalta'))
    picks = []
    for tr in stream:
        # We are going to assume, for now, that if the pick is made on the
        # horizontal channel then it is an S, otherwise we will assume it is
        # a P-phase: obviously a bad assumption...
        if tr.stats.channel[-1] == 'Z':
            phase = 'P'
        else:
            phase = 'S'
        if freqmin and freqmax:
            tr.detrend('simple')
            tr.filter('bandpass', freqmin=freqmin, freqmax=freqmax,
                      corners=3, zerophase=True)
        df = tr.stats.sampling_rate
        cft = classic_sta_lta(tr.data, int(stalen * df), int(ltalen * df))
        if debug > 3:
            plot_trigger(tr, cft, trig_on, trig_off)
        triggers = trigger_onset(cft, trig_on, trig_off)
        for trigger in triggers:
            on = tr.stats.starttime + (trigger[0] / df)
            # off = tr.stats.starttime + (trigger[1] / df)
            wav_id = WaveformStreamID(station_code=tr.stats.station,
                                      channel_code=tr.stats.channel,
                                      network_code=tr.stats.network)
            pick = Pick(waveform_id=wav_id, phase_hint=phase, time=on)
            if debug > 2:
                print('Pick made:')
                print(pick)
            picks.append(pick)
    # QC picks
    del pick
    pick_stations = list(set([pick.waveform_id.station_code for pick in picks]))
    for pick_station in pick_stations:
        station_picks = [pick for pick in picks if
                         pick.waveform_id.station_code == pick_station]
        # If P-pick is after S-picks, remove it.
        p_time = [pick.time for pick in station_picks if pick.phase_hint == 'P']
        s_time = [pick.time for pick in station_picks if pick.phase_hint == 'S']
        if p_time > s_time:
            p_pick = [pick for pick in station_picks if pick.phase_hint == 'P']
            for pick in p_pick:
                print('P pick after S pick, removing P pick')
                picks.remove(pick)
    if show:
        plotting.pretty_template_plot(stream, picks=picks, title='Autopicks',
                                      size=(8, 9))
    event.picks = picks
    event.origins[0].time = min([pick.time for pick in event.picks]) - 1
    event.origins[0].latitude = float('nan')
    event.origins[0].longitude = float('nan')
    # Set arbitrary origin time
    return event
import obspy
from obspy.clients.arclink import Client
from obspy.signal.trigger import recursive_STALTA, trigger_onset


# Retrieve waveforms via ArcLink
client = Client(host="erde.geophysik.uni-muenchen.de", port=18001,
                user="******")
t = obspy.UTCDateTime("2009-08-24 00:19:45")
st = client.get_waveforms('BW', 'RTSH', '', 'EHZ', t, t + 50)

# For convenience
tr = st[0]  # only one trace in mseed volume
df = tr.stats.sampling_rate

# Characteristic function and trigger onsets
cft = recursive_STALTA(tr.data, int(2.5 * df), int(10. * df))
on_of = trigger_onset(cft, 3.5, 0.5)

# Plotting the results
ax = plt.subplot(211)
plt.plot(tr.data, 'k')
ymin, ymax = ax.get_ylim()
plt.vlines(on_of[:, 0], ymin, ymax, color='r', linewidth=2)
plt.vlines(on_of[:, 1], ymin, ymax, color='b', linewidth=2)
plt.subplot(212, sharex=ax)
plt.plot(cft, 'k')
plt.hlines([3.5, 0.5], 0, len(cft), color=['r', 'b'], linestyle='--')
plt.axis('tight')
plt.show()
Example #35
0
def _channel_loop(tr, parameters, max_trigger_length=60,
                  despike=False, debug=0):
    """
    Internal loop for parellel processing.

    :type tr: obspy.core.trace
    :param tr: Trace to look for triggers in.
    :type parameters: list
    :param parameters: List of TriggerParameter class for trace.
    :type max_trigger_length: float
    :type despike: bool
    :type debug: int

    :return: trigger
    :rtype: list
    """
    from eqcorrscan.utils.despike import median_filter
    from obspy.signal.trigger import trigger_onset, plot_trigger
    from obspy.signal.trigger import recursive_sta_lta
    import warnings

    for par in parameters:
        if par['station'] == tr.stats.station and \
                        par['channel'] == tr.stats.channel:
            parameter = par
            break
    else:
        msg = 'No parameters set for station ' + str(tr.stats.station)
        warnings.warn(msg)
        return []

    triggers = []
    if debug > 0:
        print(tr)
    tr.detrend('simple')
    if despike:
        median_filter(tr)
    if parameter['lowcut'] and parameter['highcut']:
        tr.filter('bandpass', freqmin=parameter['lowcut'],
                  freqmax=parameter['highcut'])
    elif parameter['lowcut']:
        tr.filter('highpass', freq=parameter['lowcut'])
    elif parameter['highcut']:
        tr.filter('lowpass', freq=parameter['highcut'])
    # find triggers for each channel using recursive_sta_lta
    df = tr.stats.sampling_rate
    cft = recursive_sta_lta(tr.data, int(parameter['sta_len'] * df),
                            int(parameter['lta_len'] * df))
    if max_trigger_length:
        trig_args = {'max_len_delete': True}
        trig_args['max_len'] = int(max_trigger_length *
                                   df + 0.5)
    if debug > 3:
        plot_trigger(tr, cft, parameter['thr_on'], parameter['thr_off'])
    tmp_trigs = trigger_onset(cft, float(parameter['thr_on']),
                              float(parameter['thr_off']),
                              **trig_args)
    for on, off in tmp_trigs:
        cft_peak = tr.data[on:off].max()
        cft_std = tr.data[on:off].std()
        on = tr.stats.starttime + \
             float(on) / tr.stats.sampling_rate
        off = tr.stats.starttime + \
                float(off) / tr.stats.sampling_rate
        triggers.append((on.timestamp, off.timestamp,
                         tr.id, cft_peak,
                         cft_std))
    return triggers