Ejemplo n.º 1
0
 def test_xcorr_max(self):
     shift, value = xcorr_max((1, 3, -5))
     self.assertEqual(shift, 1)
     self.assertEqual(value, -5)
     shift, value = xcorr_max((3., -5.), abs_max=False)
     self.assertEqual(shift, -0.5)
     self.assertEqual(value, 3.)
Ejemplo n.º 2
0
 def test_correlate(self):
     # simple test
     a, b = [0, 1], [20, 10]
     cc = correlate(a, b, 1, demean=False, normalize=False)
     shift, value = xcorr_max(cc)
     self.assertEqual(shift, 1)
     self.assertAlmostEqual(value, 20.)
     np.testing.assert_allclose(cc, [0., 10., 20.], atol=1e-14)
     # test symetry and different length of a and b
     a, b = [0, 1, 2], [20, 10]
     cc1 = correlate(a, b, 1, demean=False, normalize=False, method='fft')
     cc2 = correlate(a, b, 1, demean=False, normalize=False,
                     method='direct')
     cc3 = correlate(b, a, 1, demean=False, normalize=False, method='fft')
     cc4 = correlate(b, a, 1, demean=False, normalize=False,
                     method='direct')
     shift1, _ = xcorr_max(cc1)
     shift2, _ = xcorr_max(cc2)
     shift3, _ = xcorr_max(cc3)
     shift4, _ = xcorr_max(cc4)
     self.assertEqual(shift1, 0.5)
     self.assertEqual(shift2, 0.5)
     self.assertEqual(shift3, -0.5)
     self.assertEqual(shift4, -0.5)
     np.testing.assert_allclose(cc1, cc2)
     np.testing.assert_allclose(cc3, cc4)
     np.testing.assert_allclose(cc1, cc3[::-1])
     # test sysmetry for method='direct' and len(a) - len(b) - 2 * num > 0
     a, b = [0, 1, 2, 3, 4, 5, 6, 7], [20, 10]
     cc1 = correlate(a, b, 2, method='direct')
     cc2 = correlate(b, a, 2, method='direct')
     np.testing.assert_allclose(cc1, cc2[::-1])
Ejemplo n.º 3
0
    def test_correlate_template_nodemean_fastmatchedfilter(self):
        """
        Compare non-demeaned result against FMF derived result.

        FMF result obtained by the following:

        import copy
        import numpy as np
        from fast_matched_filter import matched_filter
        from obspy import read

        data = read()[0].data
        template = copy.deepcopy(data[400:600])
        data = data[380:620]
        result = matched_filter(
            templates=template.reshape(1, 1, 1, len(template)),
            moveouts=np.array(0).reshape(1, 1, 1),
            weights=np.array(1).reshape(1, 1, 1),
            data=data.reshape(1, 1, len(data)),
            step=1, arch='cpu')[0]

        .. note::
            FastMatchedFilter doesn't use semver, but result generated by Calum
            Chamberlain on 18 Jan 2018 using up-to-date code, with the patch
            in https://github.com/beridel/fast_matched_filter/pull/12
        """
        result = [
            -1.48108244e-01,   4.71532270e-02,   1.82797655e-01,
            1.92574233e-01,   1.18700281e-01,   1.18958903e-02,
            -9.23405439e-02,  -1.40047163e-01,  -1.00863703e-01,
            -4.86961426e-03,   1.04124829e-01,   1.72662303e-01,
            1.41110823e-01,   1.53776666e-04,  -1.71214968e-01,
            -2.83201426e-01,  -3.04899812e-01,  -2.03215942e-01,
            8.88349637e-02,   5.00749528e-01,   7.18140483e-01,
            5.29728174e-01,   1.30591258e-01,  -1.83402568e-01,
            -3.22406143e-01,  -3.20676118e-01,  -1.98054180e-01,
            -5.06028766e-04,   1.56253457e-01,   1.74580097e-01,
            6.49696961e-02,  -8.56237561e-02,  -1.89858019e-01,
            -1.96504310e-01,  -1.04968190e-01,   2.51029599e-02,
            1.32686019e-01,   2.03692451e-01,   2.11983219e-01,
            0.00000000e+00,   0.00000000e+00]
        data = read()[0].data
        template = data[400:600]
        data = data[380:620]
        # FMF demeans template but does not locally demean data for
        # normalization
        template = template - template.mean()
        cc = correlate_template(data, template, demean=False)
        # FMF misses the last two elements?
        np.testing.assert_allclose(cc[0:-2], result[0:-2], atol=1e-7)
        shift, corr = xcorr_max(cc)
        self.assertEqual(shift, 0)
Ejemplo n.º 4
0
 def test_xcorr_vs_old_implementation(self):
     """
     Test against output of xcorr from ObsPy<1.1
     """
     # Results of xcorr(self.a, self.b, 15, full_xcorr=True)
     # for ObsPy==1.0.2:
     # -5, 0.9651607597888241
     x = [0.53555336, 0.60748967, 0.67493495, 0.73707491, 0.79313226,
          0.84237607, 0.88413089, 0.91778536, 0.94280034, 0.95871645,
          0.96516076, 0.96363672, 0.95043933, 0.92590109, 0.89047807,
          0.84474328, 0.78377236, 0.71629895, 0.64316805, 0.56526677,
          0.48351386, 0.39884904, 0.31222231, 0.22458339, 0.13687123,
          0.05000401, -0.03513057, -0.11768441, -0.19685756, -0.27190599,
          -0.34214866]
     corr_fun = correlate(self.a, self.b, shift=15)
     shift, corr = xcorr_max(corr_fun)
     np.testing.assert_allclose(corr_fun, x)
     self.assertAlmostEqual(corr, 0.96516076)
     self.assertEqual(shift, -5)
Ejemplo n.º 5
0
    def SW_L2(self, SW_env_obs, SW_env_syn, var, amplitude):
        misfit = np.array([])
        for i in range(len(SW_env_obs)):
            dt = SW_env_obs[i].meta.delta

            cc_obspy = cc.correlate(SW_env_obs[i].data, SW_env_syn[i].data,int( 0.25*len(SW_env_syn[i].data)))
            shift, CC_s = cc.xcorr_max(cc_obspy)

            SW_syn_shift = self.shift(SW_env_syn[i].data, -shift)*(np.mean(amplitude))

            misfit = np.append(misfit,
                               np.matmul((SW_env_obs[i].data - SW_syn_shift).T, (SW_env_obs[i].data - SW_syn_shift )) / (
                                   2 * (var)))
            time = -shift * dt
            # params = {'legend.fontsize': 'x-large',
            #           'figure.figsize': (15, 15),
            #           'axes.labelsize': 25,
            #           'axes.titlesize': 'x-large',
            #           'xtick.labelsize': 25,
            #           'ytick.labelsize': 25}
            # pylab.rcParams.update(params)
            # plt.figure(figsize=(10, 10))
            # Axes = plt.subplot()
            # time_array = np.arange(len(self.zero_to_nan(SW_env_obs.traces[i]))) * SW_env_obs.traces[i].meta.delta + (SW_env_obs.traces[i].meta.starttime - obspy.UTCDateTime(2020, 1, 2, 3, 4, 5))
            # Axes.plot(time_array,self.zero_to_nan(SW_syn_shift),label='Shifted+Scaled Rayleigh wave (syn) ')
            # Axes.plot(time_array,self.zero_to_nan(SW_env_syn.traces[i].data),label = 'Synthetic Rayleigh wave',c='r')
            # Axes.plot(time_array,self.zero_to_nan(SW_env_obs.traces[i].data),label = 'Observed Rayleigh wave',c='k')
            # Axes.ticklabel_format(style="sci", axis='y', scilimits=(-2, 2))
            # plt.xlabel(obspy.UTCDateTime(2020, 1, 2, 3, 4, 5).strftime('Time : %Y-%m-%dT%H:%M:%S + [sec]'))
            # plt.ylabel("Displacement in Z-component [m]")
            # plt.legend()
            # plt.tight_layout()
            # # plt.show()
            # plt.savefig(self.save_dir + '/Shift.pdf' )
            # plt.close()
            a=1
        sum_misfit = np.sum(misfit)
        return misfit
Ejemplo n.º 6
0
    def test_correlate_template_eqcorrscan(self):
        """
        Test for moving window correlations with "full" normalisation.

        Comparison result is from EQcorrscan v.0.2.7, using the following:

        from eqcorrscan.utils.correlate import get_array_xcorr
        from obspy import read

        data = read()[0].data
        template = data[400:600]
        data = data[380:620]
        eqcorrscan_func = get_array_xcorr("fftw")
        result = eqcorrscan_func(
            stream=data, templates=template.reshape(1, len(template)),
            pads=[0])[0][0]
        """
        result = [
            -2.24548906e-01, 7.10350871e-02, 2.68642932e-01, 2.75941312e-01,
            1.66854098e-01, 1.66086946e-02, -1.29057273e-01, -1.96172655e-01,
            -1.41613603e-01, -6.83271606e-03, 1.45768464e-01, 2.42143899e-01,
            1.98310092e-01, 2.16377302e-04, -2.41576880e-01, -4.00586188e-01,
            -4.32240069e-01, -2.88735539e-01, 1.26461715e-01, 7.09268868e-01,
            9.99999940e-01, 7.22769439e-01, 1.75955653e-01, -2.46459037e-01,
            -4.34027880e-01, -4.32590246e-01, -2.67131507e-01, -6.78363896e-04,
            2.08171085e-01, 2.32197508e-01, 8.64804164e-02, -1.14158235e-01,
            -2.53621429e-01, -2.62945205e-01, -1.40505865e-01, 3.35594788e-02,
            1.77415669e-01, 2.72263527e-01, 2.81718552e-01, 1.38080209e-01,
            -1.27307668e-01
        ]
        data = read()[0].data
        template = data[400:600]
        data = data[380:620]
        cc = correlate_template(data, template)
        np.testing.assert_allclose(cc, result, atol=1e-7)
        shift, corr = xcorr_max(cc)
        self.assertAlmostEqual(corr, 1.0)
        self.assertEqual(shift, 0)
Ejemplo n.º 7
0
def check_lags(DB, winlength = 1000, fl = 0.5, fh = 20, maxshift = 10):
    stations = DB.station.unique()
    nsta = len(stations)
    from obspy.signal.cross_correlation import xcorr, correlate, xcorr_max
    st = obspy.Stream()
    st.filter('bandpass', freqmin=fl, freqmax = fh)
    for fn in DB.filename:
        st += obspy.read(fn)
    t0 = min(DB.t1).replace(minute=0, second=0, microsecond=0)
    t = []
    lag = np.zeros([nsta, 1])
    xc_coef = np.zeros([nsta, 1])
    consistency = []
    N = int((max(DB.t2) - t0)/winlength)
    count = 0
    t1 = t0 + 1e-6
    #while t1 < (t0 + 10*winlength):#max(DB.t2):
    while t1 < max(DB.t2):
        count += 1
        t1 += winlength
        print(str(count) + ' of ' + str(N))
        try:
            test_lags = []
            test_xc_coefs = []
            #st.trim(t1)
            st_test = st.slice(t1, t1 + winlength)
            for i in range(nsta):
                xc_output = xcorr_max(correlate(st_test.traces[i], st_test.traces[(i+1) % nsta], maxshift))
                test_lags.append(xc_output[0])
                test_xc_coefs.append(xc_output[1])
            t.append(t1)
            lag = np.hstack([lag, np.array(test_lags).reshape(nsta, 1)])
            xc_coef = np.hstack([xc_coef, np.array(test_xc_coefs).reshape(nsta, 1)])
            consistency.append(np.sum(test_lags))
        except:
            pass
    return([t, lag[:,1:], xc_coef[:,1:], np.array(consistency)])
Ejemplo n.º 8
0
    def test_correlate_template_eqcorrscan(self):
        """
        Test for moving window correlations with "full" normalisation.

        Comparison result is from EQcorrscan v.0.2.7, using the following:

        from eqcorrscan.utils.correlate import get_array_xcorr
        from obspy import read

        data = read()[0].data
        template = data[400:600]
        data = data[380:620]
        eqcorrscan_func = get_array_xcorr("fftw")
        result = eqcorrscan_func(
            stream=data, templates=template.reshape(1, len(template)),
            pads=[0])[0][0]
        """
        result = [
            -2.24548906e-01,  7.10350871e-02,  2.68642932e-01,  2.75941312e-01,
            1.66854098e-01,  1.66086946e-02, -1.29057273e-01, -1.96172655e-01,
            -1.41613603e-01, -6.83271606e-03,  1.45768464e-01,  2.42143899e-01,
            1.98310092e-01,  2.16377302e-04, -2.41576880e-01, -4.00586188e-01,
            -4.32240069e-01, -2.88735539e-01,  1.26461715e-01,  7.09268868e-01,
            9.99999940e-01,  7.22769439e-01,  1.75955653e-01, -2.46459037e-01,
            -4.34027880e-01, -4.32590246e-01, -2.67131507e-01, -6.78363896e-04,
            2.08171085e-01,  2.32197508e-01,  8.64804164e-02, -1.14158235e-01,
            -2.53621429e-01, -2.62945205e-01, -1.40505865e-01,  3.35594788e-02,
            1.77415669e-01,  2.72263527e-01,  2.81718552e-01,  1.38080209e-01,
            -1.27307668e-01]
        data = read()[0].data
        template = data[400:600]
        data = data[380:620]
        cc = correlate_template(data, template)
        np.testing.assert_allclose(cc, result, atol=1e-7)
        shift, corr = xcorr_max(cc)
        self.assertAlmostEqual(corr, 1.0)
        self.assertEqual(shift, 0)
Ejemplo n.º 9
0
    def rolling_window(a, b, wlen, stp):

        Decorr = []
        Corr = []
        for kwin, window in enumerate(a.slide(window_length=wlen, step=stp)):
            tmpa = window.copy()
            time = np.arange(0, len(tmpa)) * tmpa.stats.delta / 100
            stime = tmpa.stats.starttime
            etime = tmpa.stats.endtime
            tmpb = b.slice(starttime=stime, endtime=etime, nearest_sample=True)

            # Cross-correlation between a_i and b_j windows in the freq. domain:
            a_i = tmpa.data
            b_i = tmpb.data

            cc = correlate(a_i,
                           b_i,
                           normalize=True,
                           domain='time',
                           shift=2,
                           demean=True)
            shift, value = xcorr_max(cc)
            value = np.around(value, 3)

            if value < 0.0:
                value = 0.0
            else:
                value = value

            dec_inx = 1.0 - value
            cc_inx = value

            Decorr.append(dec_inx)
            Corr.append(cc_inx)

        # It returns a list with the decorr index and cc index as a function of time.
        return Decorr, Corr
Ejemplo n.º 10
0
 def test_correlate_template_eqcorrscan_time(self):
     """
     Test full normalization for method='direct'.
     """
     result = [
         -2.24548906e-01, 7.10350871e-02, 2.68642932e-01, 2.75941312e-01,
         1.66854098e-01, 1.66086946e-02, -1.29057273e-01, -1.96172655e-01,
         -1.41613603e-01, -6.83271606e-03, 1.45768464e-01, 2.42143899e-01,
         1.98310092e-01, 2.16377302e-04, -2.41576880e-01, -4.00586188e-01,
         -4.32240069e-01, -2.88735539e-01, 1.26461715e-01, 7.09268868e-01,
         9.99999940e-01, 7.22769439e-01, 1.75955653e-01, -2.46459037e-01,
         -4.34027880e-01, -4.32590246e-01, -2.67131507e-01, -6.78363896e-04,
         2.08171085e-01, 2.32197508e-01, 8.64804164e-02, -1.14158235e-01,
         -2.53621429e-01, -2.62945205e-01, -1.40505865e-01, 3.35594788e-02,
         1.77415669e-01, 2.72263527e-01, 2.81718552e-01, 1.38080209e-01,
         -1.27307668e-01]
     data = read()[0].data
     template = data[400:600]
     data = data[380:620]
     cc = correlate_template(data, template, method='direct')
     np.testing.assert_allclose(cc, result, atol=1e-7)
     shift, corr = xcorr_max(cc)
     self.assertAlmostEqual(corr, 1.0)
     self.assertEqual(shift, 0)
Ejemplo n.º 11
0
 def test_correlate_template_eqcorrscan_time(self):
     """
     Test full normalization for method='direct'.
     """
     result = [
         -2.24548906e-01,  7.10350871e-02,  2.68642932e-01,  2.75941312e-01,
         1.66854098e-01,  1.66086946e-02, -1.29057273e-01, -1.96172655e-01,
         -1.41613603e-01, -6.83271606e-03,  1.45768464e-01,  2.42143899e-01,
         1.98310092e-01,  2.16377302e-04, -2.41576880e-01, -4.00586188e-01,
         -4.32240069e-01, -2.88735539e-01,  1.26461715e-01,  7.09268868e-01,
         9.99999940e-01,  7.22769439e-01,  1.75955653e-01, -2.46459037e-01,
         -4.34027880e-01, -4.32590246e-01, -2.67131507e-01, -6.78363896e-04,
         2.08171085e-01,  2.32197508e-01,  8.64804164e-02, -1.14158235e-01,
         -2.53621429e-01, -2.62945205e-01, -1.40505865e-01,  3.35594788e-02,
         1.77415669e-01,  2.72263527e-01,  2.81718552e-01,  1.38080209e-01,
         -1.27307668e-01]
     data = read()[0].data
     template = data[400:600]
     data = data[380:620]
     cc = correlate_template(data, template, method='direct')
     np.testing.assert_allclose(cc, result, atol=1e-7)
     shift, corr = xcorr_max(cc)
     self.assertAlmostEqual(corr, 1.0)
     self.assertEqual(shift, 0)
Ejemplo n.º 12
0
def calculate_apriori(st, t_before, t_after, plot=False):
    '''
    Calculate the aprior dt based on the earliest and latest causal wave.
    It computes cuts the trace between t_before and t_after, then computes 
    the cross-correlation between both traces and returns the shift in time
    between both traces. It assumes that the center of the traces is the zero 
    time.

    Parameters
    ----------
    st : obspy.Stream()
        Obspy stream with cross_correlation from the same station pair.
        It needs to have the attribute average date to know which one is the 
        earliest and which one is the last cross_correlation.
    t_before_zero : float
        can be negative or positive. Take into account that the middle of the
        trace is assumed to be the zero time.
    t_after_zero : float
        can be negative or positive. Take into account that the middle of the
        trace is assumed to be the zero time.
    plot: boolean

    Returns
    -------
    None.

    '''
    avg_dates = [tr.stats.average_date for tr in st]
    earliest_date = avg_dates[0]
    latest_date = avg_dates[0]
    earliest_i = 0
    latest_i = 0
    for i, t in enumerate(avg_dates):
        if t <= earliest_date:
            earliest_date = t
            earliest_i = i
        if t >= latest_date:
            latest_date = t
            latest_i = i

    # Now we calculate the cross-correlation between both traces an the shift.
    # This value will be used for the apriori estimates.
    tr1 = st[earliest_i]
    tr2 = st[latest_i]
    t1, data1 = trim_correltation_trace(tr1, t_before, t_after)
    t2, data2 = trim_correltation_trace(tr2, t_before, t_after)

    cc = correlate(tr1, tr2, 1000)
    shift, value = xcorr_max(cc)

    if plot:
        f, (ax1, ax2) = plt.subplots(2, 1, sharey=True, figsize=(8, 6))
        ax1.plot(t1, data1)
        ax1.plot(t2, data2)
        ax2.plot(t1, data1)
        ax2.plot(t2 + shift / tr1.stats.sampling_rate, data2)
        plt.show()
    # The shift is negative because we are shifting the firs trace,
    # but actually the trace that was shifted is the trace with older
    # average date.
    time_shift = -shift / tr1.stats.sampling_rate

    delta_t = (latest_date - earliest_date)
    shift_rate = time_shift / delta_t

    apriori_dt = []
    for t in avg_dates:
        dt = (t - earliest_date) * shift_rate
        apriori_dt.append(dt)
    return apriori_dt
Ejemplo n.º 13
0
 def test_correlate_different_length_of_signals(self):
     # Signals are aligned around the middle
     cc = correlate(self.a, self.c, 50)
     shift, _ = xcorr_max(cc)
     self.assertEqual(shift, -5 - (len(self.a) - len(self.c)) // 2)
Ejemplo n.º 14
0
def run_xcorr(ds, tag, trace_num, post_lens, ncomp=0, plot_best_post=True):
    """Runs cross correlation on short traces focused around the first pick for a trc_num and returns the weights and shifts for the best window.
    :param ds: LDS object in use
    :param tag: tag of the ds
    :param trace_num: trace number within the tag
    :param post_lens: list of window lengths beyond the pick to test for the best xcorr result
    :param ncomp: arrival-sorted stn to use as reference trace
    :param plot_best_post: bool to plot and save the shifted traces for the auto-selected window
    """
    # cross-correlate to first arrival, get weight and shift for each trace
    # get very short, pick-windowed traces for this part
    trs_pre, xcorr_start = [
        200,
        100,
    ]  # the extra 100 leave room for shifting more easily
    trs = ds.get_traces(
        tag, trace_num, pre=trs_pre, tot_len=600
    )  # TODO: add params to the trc_dict so pre is documented?
    pks = ds.get_picks(tag, trace_num)
    good_pks = {s: pks[s] for s in pks if pks[s] > 0}
    seq = sorted(good_pks.keys(), key=lambda s: good_pks[s][0])
    astn = seq.pop(ncomp)  # get stn to autocorrelate
    weights, shifts, spreads, shifted_trs = [{}, {}, {}, {}]
    for post in post_lens:
        cut = slice(xcorr_start, trs_pre + post)
        azd = trs[astn][cut] - trs[astn][xcorr_start]  # zero the trace
        acorr = signal.correlate(azd, azd, mode="same")
        sh, aval = cross_correlation.xcorr_max(acorr)
        weights[post] = {astn: 1}
        shifts[post] = {astn: 0}
        shifted_trs[post] = {astn: trs[astn][cut]}
        for stn in seq:
            szd = trs[stn][cut] - trs[stn][xcorr_start]
            xcorr = signal.correlate(azd, szd, mode="same")
            sh, cval = cross_correlation.xcorr_max(xcorr)
            shifts[post][stn] = int(sh)
            weights[post][stn] = aval / cval
            # calculate spread for this post
            sh_cut = slice(max(cut.start - int(sh), 0), cut.stop - int(sh))
            adj = weights[post][stn] * trs[stn][sh_cut]
            shifted_trs[post][stn] = adj - adj[0]
        # check spread of adjusted traces if more than one post_len was provided
        if len(post_lens) > 1:
            # get polarity of shifted_trs[post] (they all match astn)
            is_up_first = abs(max(shifted_trs[post][astn])) > abs(
                min(shifted_trs[post][astn])
            )  # better to sample ~20pts past pick?
            # set spread check point at half the max amplitude # TODO: probably multiple points is better
            if is_up_first:
                check = np.max(shifted_trs[post][astn]) / 2
                hits = [np.argwhere(shifted_trs[post][astn] > check)[0][0]]
            else:
                check = np.min(shifted_trs[post][astn]) / 2
                hits = [np.argwhere(shifted_trs[post][astn] < check)[0][0]]
                # TODO: now works for any polarity but seems misplaced. Probably just forcing positive polarity would be better
            # now add check points for remaining stations
            for stn in seq:
                hits.append(np.argwhere(shifted_trs[post][stn] < check)[0][0])
            hits.sort()
            spreads[post] = hits[-1] - hits[0]

    if len(post_lens) > 1:
        # get best post_len based on shifted trace spreads
        best_post = min(spreads, key=spreads.get)
    else:
        best_post = post_lens[0]

    # plot shifted traces from best post for visual confirmation of no funny business
    if plot_best_post:
        plt.figure()
        plt.plot(shifted_trs[best_post][astn])
        for stn in seq:
            plt.plot(shifted_trs[best_post][stn])
        plt.show()

    return weights[best_post], shifts[best_post], best_post
Ejemplo n.º 15
0
def calc_year_sta(year, sta):
    net, sta = sta.split('.')
    stime = UTCDateTime(str(year) + '-001T00:00:00')
    etime = UTCDateTime(str(year+1) + '-001T00:00')
    ctime = stime
    times, shifts, vals = [],[], []
    while ctime < etime:
        print(ctime)
        #try:
        if True:
            rand = np.random.randint(0,60*60*24)
            cnt = 0
            while cnt <= 4:
                try:
                #if True:
                    st = client.get_waveforms(net, sta,'*', 'BHZ', ctime + rand, ctime + 60 + rand, attach_response=True)
                    st.remove_response()
                    break
                except:
                    cnt += 1
            fig = plt.figure(1, figsize=(10,14))
            plt.subplot(3,1,1)
            plt.plot(st[0].times(), st[0].data*10**6)
            plt.ylabel('Velocity ($\mu m/s$)')
            plt.text(-9, np.max(st[0].data*10**6), '(a)')
            plt.xlim(min(st[0].times()), max(st[0].times()))
            plt.xlabel('Time (s)')
            plt.subplot(3,1,2)
            plt.plot(st[1].times(), st[1].data*10**6)
            plt.xlim(min(st[1].times()), max(st[1].times()))
            plt.ylabel('Velocity ($\mu m/s$)')
            plt.xlabel('Time (s)')
            plt.text(-9, np.max(st[0].data*10**6), '(b)')
            st.filter('bandpass', freqmax=1/4., freqmin=1./8.)
            st.merge(fill_value=0)
            st.resample(1000)
            st.sort()
            print(st)
            tr1 = st.select(location='00')[0]
            tr2 = st.select(location='10')[0]
            cc = correlate(tr1.data, tr2.data, 500)
            plt.subplot(3,1,3)
            plt.plot((np.arange(len(cc))-500)/1000., cc)
            plt.xlim((min((np.arange(len(cc))-500)/1000.), max((np.arange(len(cc))-500)/1000.) ))
            plt.ylabel('Correlation')
            plt.xlabel('Lag (s)')
            plt.text(-.65, 1., '(c)')
            #plt.savefig('example.png', format='PNG')
            plt.savefig('example.pdf', format='PDF', dpi=400)
            import sys
            sys.exit()
            shift, val = xcorr_max(cc)
            shifts.append(shift)

            vals.append(val)
            times.append(str(ctime.year) + ', ' + str(ctime.julday) + ', ' + str(ctime.hour) + ', ' + str(ctime.minute) + str((ctime+rand).second))
        #except:
        #    pass
        ctime += 24*60*60

    with open(net + '_' + sta + '_' + str(year) + '.pickle2', 'wb') as f:
        pickle.dump([shifts, vals, times], f)
    return
Ejemplo n.º 16
0
for i in range(n_events):

    #print (str(i) + ' of ' + str(n_events))

    for j in range(i, n_events):

        xcorrij = xcorr(events[i], events[j], shift, full_xcorr=True)
        #print(xcorrij[2].shape)
        #Return absolute max XC, including negative values

        if xcorrij[1] < 0.:

            #Return highest positive XC

            xcorr_lags_pos[i, j], xcorr_vals_pos[i,
                                                 j] = xcorr_max(xcorrij[2],
                                                                abs_max=False)
            xcorr_lags_neg[i, j], xcorr_vals_neg[i,
                                                 j] = xcorr_max(xcorrij[2],
                                                                abs_max=True)

        else:

            xcorr_vals_neg[i, j] = xcorrij[2].min()
            xcorr_lags_neg[i, j] = np.where(
                xcorrij[2] == xcorrij[2].min())[0][0] - shift
            xcorr_lags_pos[i, j], xcorr_vals_pos[i,
                                                 j] = xcorr_max(xcorrij[2],
                                                                abs_max=False)

#xcorrij = xcorr(events[i], events[j], 250, full_xcorr=True)
#xcorrij = correlate(signal1_t, signal2_t, 10, domain='time', demean=False)
Ejemplo n.º 17
0
def filter(date, time, north, east, up, eq, showme=False):
    """
    @author: Cedric Twardzik
    @contact: cedric.twardz(at)gmail.com
    @inputs: date   [type datetime]: dates of the positions
             time   [type float]: times of the positions (s)
             north  [type float]: position in the north component
             east   [type float]: position in the east component
             up     [type float]: position in the vertical component
             eq     [type datetime]: date of the mainshock 
             showme [type logical]: show the stacks and the filter 
    """

    # Find the sampling interval
    dt = time[1] - time[0]

    # Remove the coseismic offset
    ib = date < eq
    ia = date > eq
    offset_n = north[ia][0] - north[ib][-1]
    offset_e = east[ia][0] - east[ib][-1]
    offset_u = up[ia][0] - up[ib][-1]
    north[ia] -= offset_n
    east[ia] -= offset_e
    up[ia] -= offset_u

    # Create the full time series
    full_time = np.arange(time[0], time[-1] + dt, dt)
    full_date = np.array([
        date[0] + datetime.timedelta(seconds=i * dt)
        for i in range(full_time.size)
    ])
    full_north = np.full(full_time.size, np.nan)
    full_east = np.full(full_time.size, np.nan)
    full_up = np.full(full_time.size, np.nan)
    ij, i, j = np.intersect1d(time,
                              full_time,
                              assume_unique=True,
                              return_indices=True)
    full_north[j] = north[i]
    full_east[j] = east[i]
    full_up[j] = up[i]

    # Sidereal day (Choi et al. 2004)
    sidereal_shift = datetime.datetime(
        2000, 1, 2, 0, 0, 0) - datetime.datetime(2000, 1, 1, 23, 56, 4, 0)
    sidereal_shift = np.int(sidereal_shift.total_seconds() / dt + 0.5001)

    # Maximum sidereal shift allowed
    ndays = (full_date[-1] - full_date[0]).days
    sidereal_shift *= (ndays * 4)

    # Remove the gaps in the time series using simple linear interpolation
    nans = np.isnan(full_north)
    full_north[nans] = np.interp(full_time[nans], full_time[~nans],
                                 full_north[~nans])
    full_east[nans] = np.interp(full_time[nans], full_time[~nans],
                                full_east[~nans])
    full_up[nans] = np.interp(full_time[nans], full_time[~nans],
                              full_up[~nans])

    # Extract the first day
    tstart = full_date[0]
    tstop = full_date[0] + datetime.timedelta(days=1)
    index = (tstart <= full_date) & (full_date < tstop)
    stack_north = full_north[index] * full_north[index].std()
    stack_east = full_east[index] * full_east[index].std()
    stack_up = full_up[index] * full_up[index].std()
    stack_time = full_time[index]
    stack_size = stack_time.size
    ntrace_north = full_north[index].std()
    ntrace_east = full_east[index].std()
    ntrace_up = full_up[index].std()

    # Show the stacks
    if showme:
        plt.close()
        fig, ax = plt.subplots(3, 1, sharex='col')
        ax[0].plot(stack_time,
                   stack_north / ntrace_north,
                   'k-',
                   lw=1.0,
                   alpha=0.5)
        ax[1].plot(stack_time,
                   stack_east / ntrace_east,
                   'k-',
                   lw=1.0,
                   alpha=0.5)
        ax[2].plot(stack_time, stack_up / ntrace_up, 'k-', lw=1.0, alpha=0.5)

    # Initialize the day counter
    iday = 1

    # Stack the days before the earthquake
    while True:

        # Extract the following day
        tstart = tstop
        tstop = tstart + datetime.timedelta(days=1)
        index = (tstart - datetime.timedelta(hours=1.0) <= full_date) & (
            full_date < tstop + datetime.timedelta(hours=1.0))
        i0 = np.int(3600.0 / dt + 0.5001)

        # Ensure that we are not getting any data from after the mainchock
        if np.any(full_date[index] >= eq): break

        # Select the relevant data
        n = full_north[index]
        e = full_east[index]
        u = full_up[index]

        # Cross-correlation between the current day and the stack (position)
        #cc_north = xcorr.correlate(stack_north/ntrace_north, n, stack_size)
        #cc_east  = xcorr.correlate(stack_east /ntrace_east , e, stack_size)
        #cc_up    = xcorr.correlate(stack_up   /ntrace_up   , u, stack_size)

        # Cross-correlation between the current day and the stack (velocity)
        cc_north = xcorr.correlate(np.diff(stack_north / ntrace_north),
                                   np.diff(n), stack_size - 1)
        cc_east = xcorr.correlate(np.diff(stack_east / ntrace_east),
                                  np.diff(e), stack_size - 1)
        cc_up = xcorr.correlate(np.diff(stack_up / ntrace_up), np.diff(u),
                                stack_size - 1)

        # Find the optimal shift to maximize the cross-correlation
        shift_north, ccmax_north = xcorr.xcorr_max(cc_north, abs_max=False)
        shift_east, ccmax_east = xcorr.xcorr_max(cc_east, abs_max=False)
        shift_up, ccmax_up = xcorr.xcorr_max(cc_up, abs_max=False)

        # Show the stacks
        if showme:
            ax[0].plot(stack_time,
                       n[i0 - shift_north:i0 + stack_size - shift_north],
                       'k-',
                       lw=1.0,
                       alpha=0.5)
            ax[1].plot(stack_time,
                       e[i0 - shift_east:i0 + stack_size - shift_east],
                       'k-',
                       lw=1.0,
                       alpha=0.5)
            ax[2].plot(stack_time,
                       u[i0 - shift_up:i0 + stack_size - shift_up],
                       'k-',
                       lw=1.0,
                       alpha=0.5)

        # Add the trace to the stack
        stack_north += n[i0 - shift_north:i0 + stack_size -
                         shift_north] * n[i0 - shift_north:i0 + stack_size -
                                          shift_north].std()
        stack_east += e[i0 - shift_east:i0 + stack_size -
                        shift_east] * e[i0 - shift_east:i0 + stack_size -
                                        shift_east].std()
        stack_up += u[i0 - shift_up:i0 + stack_size -
                      shift_up] * u[i0 - shift_up:i0 + stack_size -
                                    shift_up].std()

        # Update the normalization factor
        ntrace_north += n[i0 - shift_north:i0 + stack_size - shift_north].std()
        ntrace_east += e[i0 - shift_east:i0 + stack_size - shift_east].std()
        ntrace_up += u[i0 - shift_up:i0 + stack_size - shift_up].std()

        # Update the day counter
        iday += 1

    # Normalize the final stack
    stack_north /= ntrace_north
    stack_east /= ntrace_east
    stack_up /= ntrace_up

    # Show the final stack
    if showme:
        ax[0].plot(stack_time, stack_north, 'r-', lw=2.0, alpha=0.9)
        ax[1].plot(stack_time, stack_east, 'r-', lw=2.0, alpha=0.9)
        ax[2].plot(stack_time, stack_up, 'r-', lw=2.0, alpha=0.9)
        plt.show()

    # Initialize the sidereal filter
    filter_date = full_date.copy()
    filter_time = full_time.copy()
    filter_north = np.full(filter_time.size, np.nan)
    filter_east = np.full(filter_time.size, np.nan)
    filter_up = np.full(filter_time.size, np.nan)

    # Initialize the day counter
    iday = 0

    # Build the sidereal filter
    while True:

        # Extract the relevant dates
        tstart = full_date[0] + datetime.timedelta(days=iday)
        tstop = full_date[0] + datetime.timedelta(days=iday + 1)
        index = (tstart <= full_date) & (full_date < tstop)

        # Ensure we have a full date
        if index.sum() != 2880: break

        # Remove the log-trend before cross-correlation
        n = full_north[index]
        e = full_east[index]
        u = full_up[index]

        # Cross-correlation between the current day and the stack (position)
        #cc_north = xcorr.correlate(stack_north, n, stack_size)
        #cc_east  = xcorr.correlate(stack_east , e, stack_size)
        #cc_up    = xcorr.correlate(stack_up   , u, stack_size)

        # Cross-correlation between the current day and the stack (velocity)
        cc_north = xcorr.correlate(np.diff(stack_north), np.diff(n),
                                   stack_size - 1)
        cc_east = xcorr.correlate(np.diff(stack_east), np.diff(e),
                                  stack_size - 1)
        cc_up = xcorr.correlate(np.diff(stack_up), np.diff(u), stack_size - 1)

        # Find the optimal shift to maximize the cross-correlation
        shift_north, ccmax_north = xcorr.xcorr_max(cc_north, abs_max=False)
        shift_east, ccmax_east = xcorr.xcorr_max(cc_east, abs_max=False)
        shift_up, ccmax_up = xcorr.xcorr_max(cc_up, abs_max=False)

        # Insert the stack
        filter_north[index] = np.roll(
            stack_north * signal.tukey(stack_size, 0.05), -shift_north)
        filter_east[index] = np.roll(
            stack_east * signal.tukey(stack_size, 0.05), -shift_east)
        filter_up[index] = np.roll(stack_up * signal.tukey(stack_size, 0.05),
                                   -shift_up)

        # Update the day counter
        iday += 1

    # Remove the mean of the filter
    filter_north -= np.nanmean(filter_north)
    filter_east -= np.nanmean(filter_east)
    filter_up -= np.nanmean(filter_up)

    # Remove the sidereal filter
    full_north -= filter_north
    full_east -= filter_east
    full_up -= filter_up

    # Get the relevant part of the filter
    ij, i, j = np.intersect1d(time,
                              full_time,
                              assume_unique=True,
                              return_indices=True)
    north[i] = full_north[j]
    east[i] = full_east[j]
    up[i] = full_up[j]

    # Add the coseismic offset
    north[ia] += offset_n
    east[ia] += offset_e
    up[ia] += offset_u

    # All done
    return north, east, up
Ejemplo n.º 18
0
 def test_correlate_different_length_of_signals(self):
     # Signals are aligned around the middle
     cc = correlate(self.a, self.c, 50)
     shift, _ = xcorr_max(cc)
     self.assertEqual(shift, -5 - (len(self.a) - len(self.c)) // 2)
Ejemplo n.º 19
0
def calculate_first_apriori_dt(clock_drift_object,
                               correlations,
                               plot=False,
                               **kwargs):
    '''
    Calculates de apriori estimate of given several correlation files of the 
    same station pair, given that the correlation was perform in the same order
    for all the files (meaning station 1 is the same and station 2 is the 
                       same)

    Parameters
    ----------
    clock_drift_object : Clock_drift()
        DESCRIPTION.
    correlations : list
      list of Correlations object. You can use the following function
      to retrieve all the correlations for a given station pair:
      correlations = Clock_drift.get_correlations_of_stationpair(station1_code,
                                                            station2_code)
    if plot is set to tru provide a min_t and t_max to trim the correlation 
    in the times you want to check

    Returns
    -------
    None.

    '''
    freqmin = clock_drift_object.processing_parameters.freqmin
    freqmax = clock_drift_object.processing_parameters.freqmax
    if len(correlations) < 2:
        msg = "There should be at least two correlations to use this method"
        raise Exception(msg)

    sta1 = list(
        set([correlation.station1_code for correlation in correlations]))
    sta2 = list(
        set([correlation.station1_code for correlation in correlations]))
    if len(sta1) != 1 or len(sta2) != 1:
        msg = "The first and second station in the correlations are not the "
        msg += "same for all the correlations."
        raise Exception(msg)

    avg_dates = [correlation.average_date for correlation in correlations]

    # Read the correlation of the earliest date
    earliest_date = min(avg_dates)
    earliest_index = avg_dates.index(earliest_date)
    earliest_path2file = correlations[earliest_index].file_path
    earliest_tr = read_correlation_file(path2file=earliest_path2file)
    earliest_tr = earliest_tr.filter('bandpass',
                                     freqmin=freqmin,
                                     freqmax=freqmax,
                                     corners=4,
                                     zerophase=True)

    # Read the correlation with the latest date.
    latest_date = max(avg_dates)
    latest_index = avg_dates.index(latest_date)
    latest_path2file = correlations[latest_index].file_path
    latest_tr = read_correlation_file(path2file=latest_path2file)
    latest_tr = latest_tr.filter('bandpass',
                                 freqmin=freqmin,
                                 freqmax=freqmax,
                                 corners=4,
                                 zerophase=True)

    cc = correlate(earliest_tr.data, latest_tr.data, 1000)
    shift, value = xcorr_max(cc)
    time_shift = shift / earliest_tr.stats.sampling_rate

    delta_t = (latest_date - earliest_date)
    shift_rate = time_shift / delta_t

    for correlation in correlations:
        t = correlation.average_date
        dt = (t - earliest_date) * shift_rate
        if clock_drift_object.get_station(
                correlation.station1_code).needs_correction == True:
            correlation.first_apriori_dt1 = dt
            correlation.first_apriori_dt2 = 0
        elif clock_drift_object.get_station(
                correlation.station2_code).needs_correction == True:
            correlation.first_apriori_dt1 = 0
            correlation.first_apriori_dt2 = dt
        else:
            raise

    if plot:
        min_t = kwargs['min_t']
        max_t = kwargs['max_t']
        t1, data1 = trim_correlation_trace(earliest_tr, min_t, max_t)
        t2, data2 = trim_correlation_trace(latest_tr, min_t, max_t)
        f, (ax1, ax2) = plt.subplots(2, 1, sharey=True, figsize=(8, 6))
        ax1.set_title('Before correction ' + earliest_tr.stats.station_pair)
        ax2.set_title('After correction ' + earliest_tr.stats.station_pair)
        ax1.plot(t1, data1, label=earliest_tr.stats.average_date)
        ax1.plot(t2, data2, label=latest_tr.stats.average_date)
        ax2.plot(t1, data1, label=earliest_tr.stats.average_date)
        ax2.plot(t2 + time_shift, data2, label=latest_tr.stats.average_date)
        ax2.set_xlabel('Time [s]')
        ax2.set_ylabel('Amplitudes')
        ax1.set_ylabel('Amplitudes')
        plt.tight_layout()
        ax1.legend(loc='best')
        ax2.legend(loc='best')
        plt.show()
Ejemplo n.º 20
0
    if time_shift < 0:
        new_array[-time_shift:] = np_array[:time_shift]
    elif time_shift == 0:
        new_array[:] = np_array[:]
    else:
        new_array[:-time_shift] = np_array[time_shift:]
    return new_array


a = np.ones(5)
trace_a = np.hstack((np.zeros(100), a, np.zeros(100)))
b = np.ones(5)
trace_b = np.hstack((np.zeros(100), b, np.zeros(100)))
trace_b = np.roll(trace_b, 5)

cc_obspy = cc.correlate(trace_b, trace_a, 5)
shift_centered, MAX_CC = cc.xcorr_max(cc_obspy, abs_max=False)

shift = np.argmax(cc_obspy)
b_shift = SHIFT(trace_b, shift_centered)

plt.figure()
plt.subplot(111)
plt.plot(trace_a, label='Observed')
plt.plot(trace_b, label='Synthetic')
plt.plot(b_shift, LineStyle=':', label='Synthetic shifted')
plt.legend()
plt.show()

a = 1
Ejemplo n.º 21
0
def doCorrelation(net, sta, chan, start, end, duration, interval,
                  keep_response, outfilename, resp_filepath, be_verbose):
    stime = UTCDateTime(start)
    etime = UTCDateTime(end)
    ctime = stime
    skiptime = 24 * 60 * 60 * 10  # 10 days in seconds. Override with --interval <minutes> option
    skiptime = interval * 60  #

    # location constants
    LOC00 = '00'
    LOC10 = '10'

    # True to calculate values, False to read them from a pickle file
    # this might be desirable when debugging the plotting code piece
    calc = True

    print(net, sta, LOC00, LOC10, duration, interval, stime, etime,
          keep_response, resp_filepath)
    if calc:
        times, shifts, vals = [], [], []
        while ctime < etime:
            cnt = 1
            attach_response = True

            if resp_filepath:
                inv00 = read_inventory(
                    f'{resp_filepath}/RESP.{net}.{sta}.{LOC00}.{chan}', 'RESP')
                inv10 = read_inventory(
                    f'{resp_filepath}/RESP.{net}.{sta}.{LOC10}.{chan}', 'RESP')
                attach_response = False

            st00 = getStream(net, sta, LOC00, chan, ctime, duration,
                             be_verbose, attach_response)
            st10 = getStream(net, sta, LOC10, chan, ctime, duration,
                             be_verbose, attach_response)

            if len(st00) == 0:
                if be_verbose:
                    print("no traces returned for {} {} {} {} {}".format(
                        net, sta, LOC00, chan, ctime),
                          file=sys.stderr)
                ctime += skiptime
                continue

            if len(st10) == 0:
                if be_verbose:
                    print("no traces returned for {} {} {} {} {}".format(
                        net, sta, LOC10, chan, ctime),
                          file=sys.stderr)
                ctime += skiptime
                continue

            if len(st00) > 1:
                if be_verbose:
                    print("gap(s) found in segment for {} {} {} {} {}".format(
                        net, sta, LOC00, chan, ctime),
                          file=sys.stderr)
                ctime += skiptime
                continue

            if len(st10) > 1:
                if be_verbose:
                    print("gap(s) found in segment for {} {} {} {} {}".format(
                        net, sta, LOC10, chan, ctime),
                          file=sys.stderr)
                ctime += skiptime
                continue

            if ((st00[0].stats.endtime - st00[0].stats.starttime) <
                (duration - 1.0 / st00[0].stats.sampling_rate)):
                if be_verbose:
                    print("skipping short segment in {} {} {} {} {}".format(
                        net, sta, LOC00, chan, ctime),
                          file=sys.stderr)
                ctime += skiptime
                continue

            if ((st10[0].stats.endtime - st10[0].stats.starttime) <
                (duration - 1.0 / st10[0].stats.sampling_rate)):
                if be_verbose:
                    print("skipping short segment in {} {} {} {} {}".format(
                        net, sta, LOC10, chan, ctime),
                          file=sys.stderr)
                ctime += skiptime
                continue

            if not attach_response:
                st00.attach_response(inv00)
                st10.attach_response(inv10)

            if not keep_response:
                st00.remove_response()
                st10.remove_response()

            # apply a bandpass filter and merge before resampling
            st00.filter('bandpass',
                        freqmax=1 / 4.,
                        freqmin=1. / 8.,
                        zerophase=True)
            st00.resample(1000)

            st10.filter('bandpass',
                        freqmax=1 / 4.,
                        freqmin=1. / 8.,
                        zerophase=True)
            st10.resample(1000)

            # get the traces from the stream for each location
            try:
                tr1 = st00.select(location=LOC00)[0]
            except Exception as err:
                print(err, file=sys.stderr)
            try:
                tr2 = st10.select(location=LOC10)[0]
            except Exception as err:
                print(err, file=sys.stderr)

            # trim sample to start and end at the same times
            trace_start = max(tr1.stats.starttime, tr2.stats.starttime)
            trace_end = min(tr1.stats.endtime, tr2.stats.endtime)

            # debug
            if be_verbose:
                print("Before trim", file=sys.stderr)
                print("tr1 start: {} tr2 start: {}".format(
                    tr1.stats.starttime, tr2.stats.starttime),
                      file=sys.stderr)
                print("tr1 end: {} tr2 end: {}".format(tr1.stats.endtime,
                                                       tr2.stats.endtime),
                      file=sys.stderr)
                print("max trace_start: {} min trace_end {}".format(
                    trace_start, trace_end),
                      file=sys.stderr)
            tr1.trim(trace_start, trace_end)
            tr2.trim(trace_start, trace_end)
            # debug
            if be_verbose:
                print("After trim", file=sys.stderr)
                print("tr1 start: {} tr2 start: {}".format(
                    tr1.stats.starttime, tr2.stats.starttime),
                      file=sys.stderr)
                print("tr1 end: {} tr2 end: {}".format(tr1.stats.endtime,
                                                       tr2.stats.endtime),
                      file=sys.stderr)

            # calculate time offset
            time_offset = tr1.stats.starttime - tr2.stats.starttime
            cc = correlate(tr1.data, tr2.data, 500)

            # xcorr_max returns the shift and value of the maximum of the cross-correlation function
            shift, val = xcorr_max(cc)
            # append to lists for plotting
            shifts.append(shift)
            vals.append(val)
            times.append(ctime.year + ctime.julday / 365.25)

            print("duration: {} to {} offset: {}\tshift: {} value: {}".format(
                ctime, ctime + duration, time_offset, shift, val))

            # skip 10 days for next loop
            if be_verbose:
                print("ctime: {}".format(ctime), file=sys.stderr)
            ctime += skiptime

        # persist the data in a pickle file
        if outfilename:
            with open(outfilename + '.pickle', 'wb') as f:
                pickle.dump([shifts, vals, times], f)
        else:
            with open(net + '_' + sta + '_' + net + '_' + sta + '.pickle',
                      'wb') as f:
                pickle.dump([shifts, vals, times], f)
    else:
        # retrieve the data from the pickle file
        if outfilename:
            with open(outfilename + '.pickle', 'rb') as f:
                shifts, vals, times = pickle.load(f)
        else:
            with open(net + '_' + sta + '_' + net + '_' + sta + '.pickle',
                      'rb') as f:
                shifts, vals, times = pickle.load(f)

    mpl.rc('font', serif='Times')
    mpl.rc('font', size=16)

    fig = plt.figure(1, figsize=(10, 10))

    plt.subplot(2, 1, 1)
    plt.title(net + ' ' + sta + ' ' + LOC00 + ' compared to ' + net + ' ' +
              sta + ' ' + LOC10)
    plt.plot(times, shifts, '.')
    plt.ylabel('Time Shift (ms)')

    plt.subplot(2, 1, 2)
    plt.plot(times, vals, '.')
    #plt.ylim((0.8, 1.0))
    plt.ylim((0, 1.0))
    plt.xlabel('Time (year)')
    plt.ylabel('Correlation')

    if outfilename:
        plt.savefig(outfilename + '.PDF', format='PDF')
    else:
        plt.savefig(net + '_' + sta + '_' + net + '_' + sta + '.PDF',
                    format='PDF')
Ejemplo n.º 22
0
               & (fulldate < t1 + dt.timedelta(hours=1))]
 e = fulleast[(fulldate >= t0 - dt.timedelta(hours=1))
              & (fulldate < t1 + dt.timedelta(hours=1))]
 u = fullup[(fulldate >= t0 - dt.timedelta(hours=1))
            & (fulldate < t1 + dt.timedelta(hours=1))]
 # ------------------------------------------------------------------------------------------ #
 # Cross-correlation between the current day and the stack and maximize the cross-correlation #
 # ------------------------------------------------------------------------------------------ #
 nmax = nstack.size
 ncc, ecc, ucc = xcorr.correlate(nstack / ntrace[0], n,
                                 nmax), xcorr.correlate(
                                     estack / ntrace[1], e,
                                     nmax), xcorr.correlate(
                                         ustack / ntrace[2], u, nmax)
 nnoiz, enoiz, unoiz = np.std(ncc), np.std(ecc), np.std(ucc)
 nshift, n_val = xcorr.xcorr_max(ncc, abs_max=False)
 eshift, e_val = xcorr.xcorr_max(ecc, abs_max=False)
 ushift, u_val = xcorr.xcorr_max(ucc, abs_max=False)
 # ---------------------------------------- #
 # Plot the result of the cross-correlation #
 # ---------------------------------------- #
 if showme:
     time = np.arange(n.size) * delta - 3600.0
     plt.subplot(311)
     plt.plot(time + nshift * delta, n, 'k', alpha=0.5)
     plt.subplot(312)
     plt.plot(time + eshift * delta, e, 'k', alpha=0.5)
     plt.subplot(313)
     plt.plot(time + ushift * delta, u, 'k', alpha=0.5)
     plt.pause(1)
 # ------------------------------------- #
Ejemplo n.º 23
0
# set master event for correlation
masterEvent = waveforms[11]
#masterEvent = waveforms[122]

# open file for output
outFile = h5py.File(path + type + "_correlations.h5", "w")

# make some arrays for storing output
shifts = np.zeros((len(waveforms)))
corrCoefs = np.zeros((len(waveforms)))

for i in range(len(waveforms)):

    # correlate master event and waveform i
    corr = correlate_template(masterEvent, waveforms[i])
    shift, corrCoef = xcorr_max(corr)

    # save output
    shifts[i] = shift
    corrCoefs[i] = corrCoef

    # give the user some output
    print("Correlated master event with " +
          str(round(i / len(waveforms) * 100)) + "% of events")

# write output to file
outFile.create_dataset("corrCoefs", data=corrCoefs)
outFile.create_dataset("shifts", data=shifts)

# close output file
outFile.close()
Ejemplo n.º 24
0
def plot_similarity_matrix():

    # base_dir = '/Users/nunn/lunar_data/PDART'
    base_dir = '/Users/nunn/Google Drive/for_Katja/PDART2'
    catalog_file = '../LunarCatalog_Nakamura_1981_and_updates_v1/LunarCatalog_Nakamura_1981_and_updates_v1_A01.xml'
    catalog = read_events(catalog_file)
    stream1 = Stream(traces=[])
    stream2 = Stream(traces=[])
    i = 0
    j = 0
    k = 0
    list = []

    nx = 24
    ny = 24
    x = range(0, nx + 1)
    y = range(0, ny + 1)

    X, Y = np.meshgrid(x, y, indexing='xy')
    correlation = np.zeros(shape=(len(x), len(y)))

    for ev in catalog:
        picks = ev.picks
        for pick in picks:
            pick_time = pick.time
            t01 = pick.time
            startday = UTCDateTime(year=pick_time.year,
                                   julday=pick_time.julday)
            year = pick.time.year
            julday = startday.julday
            station = pick.waveform_id.station_code
            channel = pick.waveform_id.channel_code
            if station == 'S12' and year == 1973 and channel == 'MHZ':
                try:
                    filename = 'XA.%s..MHZ.%s.%03d.gz' % (station, str(year),
                                                          julday)
                    filepath = os.path.join(base_dir, str(year), 'XA', station,
                                            'MHZ', filename)
                except Exception as e:
                    print(e)

                st1 = read(filepath)
                st1 = st1.trim(starttime=pick_time - 1200,
                               endtime=pick_time + 3600)
                #    st1 = st1.merge()
                tra = st1.select(component="Z")[0]
                tr1 = tra.copy()
                tr1.detrend()
                tr1.filter("bandpass", freqmin=0.3, freqmax=0.9, corners=3)
                i = i + 1
                print('    i = {0}'.format(i))
                print(tr1)
                stream1.append(tr1)
                stream2.append(tr1)

    print(('    STREAM = {0}'.format(stream1)))

    for trj in stream1:
        for trk in stream2:
            cc = correlate(trj, trk, 31802)
            shift, value = xcorr_max(cc)
            list.append(value)
            j = j + 1

    # generate random correlation matrix (only one half required because
    # it is symmetric)
    for x1 in x:
        for y1 in y[x1:]:
            k = k + 1
            if x1 == y1:
                correlation[x1, y1] = 1
            else:
                correlation[x1, y1] = abs(list[k - 1])
            # copy to the bottom half of the matrix
            correlation[y1, x1] = correlation[x1, y1]

    fig, (ax0, ax1) = plt.subplots(ncols=2)

    im = ax0.pcolormesh(X, Y, correlation)
    fig.colorbar(im, ax=ax0)
    ax0.set_title('Similarity')
    ax0.set_aspect('equal')

    dissimilarity = distance.squareform(1 - correlation)
    threshold = 0.3
    linkage = hierarchy.linkage(dissimilarity, method="single")
    clusters = hierarchy.fcluster(linkage, threshold, criterion="distance")

    ax1 = plt.subplot(122)
    ax1.set_title('Dendrogram')
    # hierarchy.dendrogram(linkage, color_threshold=0.3)
    hierarchy.dendrogram(linkage)
    plt.xlabel("Event number")
    plt.ylabel("Dissimilarity")
    plt.show()
Ejemplo n.º 25
0
    def test_shift(self):
        tr = read()[0]
        dt = tr.stats.delta
        t = tr.stats.starttime = UTC('2018-01-01T00:00:10.000000Z')
        tr2 = tr.copy()
        _downsample_and_shift(tr2)
        self.assertEqual(tr2, tr)

        tr2 = tr.copy()
        tr2.stats.starttime = t + 0.1 * dt
        _downsample_and_shift(tr2)
        self.assertEqual(tr2.stats.starttime, t)

        tr2 = tr.copy()
        tr2.stats.starttime = t - 0.1 * dt
        _downsample_and_shift(tr2)
        self.assertEqual(tr2.stats.starttime, t)

        tr2 = tr.copy()
        tr2.stats.starttime = t - 0.49 * dt
        _downsample_and_shift(tr2)
        self.assertEqual(tr2.stats.starttime, t)

        tr2 = tr.copy()
        tr2.stats.starttime = t - 0.0001 * dt
        _downsample_and_shift(tr2)
        self.assertEqual(tr2.stats.starttime, t)

        # shift cumulatively by +1 sample
        tr2 = tr.copy()
        tr2.stats.starttime += 0.3 * dt
        _downsample_and_shift(tr2)
        tr2.stats.starttime += 0.3 * dt
        _downsample_and_shift(tr2)
        tr2.stats.starttime += 0.4 * dt
        _downsample_and_shift(tr2)
        self.assertEqual(tr2.stats.starttime, t)
        np.testing.assert_allclose(tr2.data[201:-200],
                                   tr.data[200:-201],
                                   rtol=1e-2,
                                   atol=1)
        cc = correlate(tr2.data, tr.data, 1000)
        shift, cc_max = xcorr_max(cc)
        self.assertEqual(shift, 1)
        self.assertGreater(cc_max, 0.995)

        # shift cumulatively by -1 sample
        tr2 = tr.copy()
        tr2.stats.starttime -= 0.3 * dt
        _downsample_and_shift(tr2)
        tr2.stats.starttime -= 0.3 * dt
        _downsample_and_shift(tr2)
        tr2.stats.starttime -= 0.4 * dt
        _downsample_and_shift(tr2)
        self.assertEqual(tr2.stats.starttime, t)
        np.testing.assert_allclose(tr2.data[200:-201],
                                   tr.data[201:-200],
                                   rtol=1e-2,
                                   atol=2)
        cc = correlate(tr2.data, tr.data, 1000)
        shift, cc_max = xcorr_max(cc)
        self.assertEqual(shift, -1)
        self.assertGreater(cc_max, 0.995)
Ejemplo n.º 26
0
    #    print(sta)
    #    print(stb)

    ta = UTCDateTime("1973-01-06T05:39:12.209122Z") + j - start_time
    tb = UTCDateTime("1973-03-01T07:18:03.829105Z") - 1.020545 + j - start_time

    trc = tra.copy()
    trd = trb.copy()

    trc.trim(starttime=ta, endtime=ta + k)
    trd.trim(starttime=tb, endtime=tb + k)

    l = j + k / 2

    cc2 = correlate(trc, trd, 2)
    shift, value2 = xcorr_max(cc2)

    plt.subplot(212, sharex=ax1)
    plt.bar(l, value2, width=9)
    plt.grid()
    plt.axvline(x=start_time,
                color='k',
                linestyle="--",
                marker=None,
                linewidth=1.0)
    plt.grid(b=None, which='major', axis='both')
    plt.legend(bbox_to_anchor=(0., 1.02, 1., .102),
               loc=2,
               ncol=2,
               mode="expand",
               borderaxespad=0.)
Ejemplo n.º 27
0
    def run_misfit(self, phases: [str], st_obs: _obspy.Stream,
                   st_syn: _obspy.Stream, variances: [float]):
        assert (len(st_obs) == len(st_syn)) and (len(st_obs[0].data) == len(
            st_syn[0].data)), (
                "st_obs and st_syn should have equal amount"
                " of traces AND the trace lengths should be the same")
        # TODO: FIX THE CROSS-CORRELATION FUNCTION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        shift_CC = [None] * len(phases)
        misfit_CC = [None] * len(phases)

        for iphase in range(len(phases)):
            import obspy.signal.cross_correlation as cc
            import numpy as np
            """ nothing worked so I used a padd zero function, still cross-correlation doesnt work, no clue why"""
            offset = [25]
            a = st_obs[iphase].data
            resulta = np.zeros(len(st_obs[iphase].data) + 50)
            insertHere = [
                slice(offset[dim], offset[dim] + a.shape[dim])
                for dim in range(a.ndim)
            ]
            resulta[insertHere] = a

            b = st_syn[iphase].data
            resultb = np.zeros(len(st_obs[iphase].data) + 50)
            insertHere = [
                slice(offset[dim], offset[dim] + b.shape[dim])
                for dim in range(a.ndim)
            ]
            resultb[insertHere] = b

            cc_obspy = cc.correlate(resultb, resulta, len(resulta))

            max_shift = 1.0
            dt = st_obs[iphase].stats.delta
            max_shift_sample = int(max_shift / dt)

            cc_obspy = cc.correlate(st_syn[iphase].data, st_obs[iphase].data,
                                    max_shift_sample)
            shift_centered, MAX_CC = cc.xcorr_max(cc_obspy, abs_max=False)

            c = cc.correlate(
                st_syn[iphase].data, st_obs[iphase].data,
                len(st_syn[iphase].data
                    ))  # a0 is the trace you want to shift in the end!!
            shift, Max_c = cc.xcorr_max(c, abs_max=False)

            corr_array = _correlate(
                st_syn[iphase].data,
                st_obs[iphase].data,
                domain="time",
                shift=self.shift,
            )
            shift_CC[iphase], misfit_CC[iphase] = _xcorr_max(corr_array,
                                                             abs_max=False)

            misfit = corr_array[(len(corr_array) - 1) // 2 +
                                int(shift_CC[iphase])]

            import numpy as np

            mid = (len(corr_array) - 1) / 2
            if len(corr_array) % 2 == 1:
                mid = int(mid)
            t = np.linspace(
                0, len(corr_array), len(corr_array), endpoint=False) - mid
            import matplotlib.pyplot as plt

            plt.close()
            plt.plot(t, corr_array)
            # plt.plot(st_syn[iphase].times(), st_syn[iphase].data)
            # plt.plot(st_obs[iphase].times(), st_obs[iphase].data)
            plt.show()
            plt.close()
            a = 1

        return misfit_CC
Ejemplo n.º 28
0
 def test_correlate_different_length_of_signals(self, state):
     # Signals are aligned around the middle
     cc = correlate(state.a, state.c, 50)
     shift, _ = xcorr_max(cc)
     assert shift == -5 - (len(state.a) - len(state.c)) // 2
Ejemplo n.º 29
0
def process(dir_a,
            dir_b,
            min_period=10.,
            max_period=30.,
            peak_amplitude=False,
            cross_correlate=True,
            color_a="darkorange",
            color_b="mediumblue",
            show=True,
            save=False):
    """
    Read and process the data
    """
    linespecs()

    # Get all the specfem files from directory A, sort them alphabetically
    semd_files_a = glob.glob(os.path.join(dir_a, '*.sem?'))
    fids, stations = [], []
    for semd in semd_files_a:
        fids.append(os.path.basename(semd))
        stations.append(os.path.basename(semd).split('.')[1])

    # Set the tag template for glob to search for stations
    _, _, _, ext = os.path.basename(semd).split('.')

    # Remove duplicate stations for search
    stations_fids = np.array([fids, stations])
    unique_stations = np.unique(stations_fids[1, :])

    # Loop through all of the specfem files and read them in
    throwaway_time = UTCDateTime('2000-01-01T00:00:00')
    for sta in unique_stations:
        print(sta, end=" ")
        tag_template = f"??.{sta}.???.{ext}"
        stations_a = glob.glob(os.path.join(dir_a, tag_template))
        stations_b = glob.glob(os.path.join(dir_b, tag_template))

        # Make sure that there are files found
        if not (len(stations_a) == len(stations_b)):
            print("skipped")
            continue

        # Make sure the loop finds the correct components
        stations_a.sort()
        stations_b.sort()

        # Convert the data to stream objects
        st_a = Stream()
        st_b = Stream()
        for i in range(len(stations_a)):
            st_a += read_sem(stations_a[i], throwaway_time)
            st_b += read_sem(stations_b[i], throwaway_time)

        if not st_a or not st_b:
            print("skipped")
            continue

        # Create common time axes
        t_a = np.linspace(st_a[0].stats.time_offset,
                          st_a[0].stats.endtime - st_a[0].stats.starttime,
                          st_a[0].stats.npts)
        t_b = np.linspace(st_b[0].stats.time_offset,
                          st_b[0].stats.endtime - st_b[0].stats.starttime,
                          st_b[0].stats.npts)

        # Filter the data and taper to remove any spurious signals
        for st in [st_a, st_b]:
            st.taper(max_percentage=0.05)
            if min_period:
                st.filter('bandpass',
                          freqmin=1 / max_period,
                          freqmax=1 / min_period)
                st.taper(max_percentage=0.05)

        # Plot each component on a Grid
        gs = gridspec.GridSpec(3, 1, hspace=0)
        for i in range(len(st_a)):
            ax = plt.subplot(gs[i])
            pretty_grids(ax, scitick=True)
            anno = ""

            # Plot the peak amplitude differences, and time differences
            time_peak_a, peak_a = peak_amplitudes(st_a[i],
                                                  t_a,
                                                  ax,
                                                  c=color_a,
                                                  plot=peak_amplitude)
            time_peak_b, peak_b = peak_amplitudes(st_b[i],
                                                  t_b,
                                                  ax,
                                                  c=color_b,
                                                  plot=peak_amplitude)
            if peak_amplitude:
                anno = "delta_amp={:.2E}\ndelta_t={:.1f}s\n".format(
                    float(peak_a - peak_b), float(time_peak_a - time_peak_b))

            # Cross correlate the two traces and annotate the cc information
            if cross_correlate:
                common_sr = min(
                    [st_a[i].stats.sampling_rate, st_b[i].stats.sampling_rate])
                tr_a = st_a[i].copy()
                tr_b = st_b[i].copy()
                for tr in [tr_a, tr_b]:
                    tr.resample(common_sr)

                cc = correlate(tr_a.data,
                               tr_b.data,
                               shift=int(common_sr * 50),
                               domain="freq")

                f_shift, value = xcorr_max(cc)
                t_shift = f_shift / common_sr
                anno += f"cc={value:.2f}\ntshift={t_shift:.2f}s"

            ax.annotate(s=anno,
                        xy=(t_a[int(len(t_a) / 2)],
                            -1 * st_a[i].data.max() / 2),
                        fontsize=8,
                        bbox=dict(fc="w", boxstyle="round", alpha=0.5))

            # Plot the two traces
            ax.plot(t_a, st_a[i].data, color=color_a)
            ax.plot(t_b, st_b[i].data, color=color_b)
            # plt.xlim([0, 100])

            # Set the title with important information
            if i == 0:
                plt.title(f"2018p130600 {st_a[i].get_id()}\n"
                          f"{os.path.basename(dir_a)} ({color_a}) / "
                          f"{os.path.basename(dir_b)} ({color_b})\n"
                          f"{min_period} - {max_period}s")
            # Put the ylabel on the middle plot
            if i == len(st_a) // 2:
                plt.ylabel(f"amplitude [m]\n{st_a[i].get_id().split('.')[-1]}")
            else:
                plt.ylabel(f"{st_a[i].get_id().split('.')[-1]}")
            # Put the xlabel on the bottom plot
            if i == len(st_a) - 1:
                plt.xlabel("time [s]")
            # Remove tick labels from all but the last plot
            if i != len(st_a) - 1:
                plt.setp(ax.get_xticklabels(), visible=False)

        if save:
            fid_out = "./figures/{}.png".format(st_a[i].get_id())
            plt.savefig(fid_out, figsize=(11.69, 8.27), dpi=100)
        if show:
            plt.show()

        print("")
Ejemplo n.º 30
0
            'station': 'SIG2',
            'location': '',
            'channel': 'BHZ',
            'npts': len(sig2),
            'sampling_rate': 40.,
            'mseed': {
                'dataquality': 'D'
            }
        }

        stats2['starttime'] = UTCDateTime()
        tr2 = Trace(data=sig2, header=stats2)
        tr1 = Trace(data=sig1, header=stats1)

        cc1 = correlate(sig1, sig2, 50)
        shift, val = xcorr_max(cc1)
        print(shift)
        print(val)

        tr1.resample(1000.)
        tr2.resample(1000.)

        cc2 = correlate(tr1.data, tr2.data, 500)
        shift, val = xcorr_max(cc2)
        print(shift)
        print('IMPORTANT')
        print(val)
        vals.append(shift)

    plt.plot(phaseangs, vals, '.', label=str(per) + ' s ', alpha=0.7)
plt.legend(loc=9, ncol=5)
Ejemplo n.º 31
0
def cross_net(stream, env=False, master=False):
    """
    Generate picks using a simple envelope cross-correlation.

    Picks are made for each channel based on optimal moveout defined by
    maximum cross-correlation with master trace.  Master trace will be the
    first trace in the stream if not set.  Requires good inter-station
    coherance.

    :type stream: obspy.core.stream.Stream
    :param stream: Stream to pick
    :type env: bool
    :param env: To compute cross-correlations on the envelope or not.
    :type master: obspy.core.trace.Trace
    :param master:
        Trace to use as master, if False, will use the first trace in stream.

    :returns: :class:`obspy.core.event.event.Event`

    .. rubric:: Example

    >>> from obspy import read
    >>> from eqcorrscan.utils.picker import cross_net
    >>> st = read()
    >>> event = cross_net(st, env=True)
    >>> print(event.creation_info.author)
    EQcorrscan

    .. warning::
        This routine is not designed for accurate picking, rather it can be
        used for a first-pass at picks to obtain simple locations. Based on
        the waveform-envelope cross-correlation method.
    """
    event = Event()
    event.origins.append(Origin())
    event.creation_info = CreationInfo(author='EQcorrscan',
                                       creation_time=UTCDateTime())
    event.comments.append(Comment(text='cross_net'))
    samp_rate = stream[0].stats.sampling_rate
    if not env:
        Logger.info('Using the raw data')
        st = stream.copy()
        st.resample(samp_rate)
    else:
        st = stream.copy()
        Logger.info('Computing envelope')
        for tr in st:
            tr.resample(samp_rate)
            tr.data = envelope(tr.data)
    if not master:
        master = st[0]
    else:
        master = master
    master.data = np.nan_to_num(master.data)
    for i, tr in enumerate(st):
        tr.data = np.nan_to_num(tr.data)
        Logger.debug('Comparing {0} with the master'.format(tr.id))
        shift_len = int(0.3 * len(tr))
        Logger.debug('Shift length is set to ' + str(shift_len) + ' samples')
        corr_fun = correlate(master, tr, shift_len)
        index, cc = xcorr_max(corr_fun)
        wav_id = WaveformStreamID(station_code=tr.stats.station,
                                  channel_code=tr.stats.channel,
                                  network_code=tr.stats.network)
        event.picks.append(
            Pick(time=tr.stats.starttime + (index / tr.stats.sampling_rate),
                 waveform_id=wav_id,
                 phase_hint='S',
                 onset='emergent'))
        Logger.debug(event.picks[i])
    event.origins[0].time = min([pick.time for pick in event.picks]) - 1
    # event.origins[0].latitude = float('nan')
    # event.origins[0].longitude = float('nan')
    # Set arbitrary origin time
    del st
    return event
Ejemplo n.º 32
0
    def test_correlate(self):
        stream = read()
        stream2 = stream.copy()
        stream3 = stream.copy()
        for tr in stream2:
            tr.id = 'GR.FUR..BH' + tr.stats.channel[-1]
            tr.stats.sampling_rate = 80.
        for tr in stream3:
            tr.id = 'GR.WET..BH' + tr.stats.channel[-1]
            tr.stats.sampling_rate = 50.
        stream = stream + stream2 + stream3
        day = UTC('2018-01-02')
        for tr in stream:
            tr.stats.starttime = day
        # create some gaps
        stream = stream.cutout(day + 0.01, day + 10)
        stream = stream.cutout(day + 14, day + 16.05)

        # prepare mock objects for call to yam_correlate
        def data(starttime, endtime, **kwargs):
            return stream.select(**kwargs).slice(starttime, endtime)
        io = {'data': data, 'data_format': None, 'inventory': read_inventory()}
        res = yam_correlate(io, day, 'outkey', keep_correlations=True)
        self.assertEqual(len(res['corr']), 6)
        # by default only 'ZZ' combinations
        for tr in res['corr']:
            self.assertEqual(tr.stats.station[-1], 'Z')
            self.assertEqual(tr.stats.channel[-1], 'Z')
            if len(set(tr.id.split('.'))) == 2:  # autocorr
                np.testing.assert_allclose(xcorr_max(tr.data), (0, 1.))

        res = yam_correlate(
                  io, day, 'outkey',
                  station_combinations=('GR.FUR-GR.WET', 'RJOB-RJOB'),
                  component_combinations=('ZZ', 'NE', 'NR'),
                  keep_correlations=True,
                  stack='1d', njobs=self.njobs)
        self.assertEqual(len(res['corr']), 7)
        self.assertEqual(len(res['stack']), 7)
        ids = ['RJOB.EHE.RJOB.EHN', 'RJOB.EHZ.RJOB.EHZ',
               'FUR.BHE.WET.BHN', 'FUR.BHN.WET.BHE',
               'FUR.BHR.WET.BHN', 'FUR.BHN.WET.BHR',
               'FUR.BHZ.WET.BHZ']
        for tr in res['corr']:
            self.assertIn(tr.id, ids)
            if len(set(tr.id.split('.'))) == 2:  # autocorr
                np.testing.assert_allclose(xcorr_max(tr.data), (0, 1.))

        res = yam_correlate(
                  io, day, 'outkey', only_auto_correlation=True,
                  station_combinations=('GR.FUR-GR.WET', 'RJOB-RJOB'),
                  component_combinations=['ZN', 'RT'], njobs=self.njobs,
                  keep_correlations=True,
                  remove_response=True)
        self.assertEqual(len(res['corr']), 1)
        tr = res['corr'][0]
        self.assertEqual(tr.stats.station[-1], 'N')
        self.assertEqual(tr.stats.channel[-1], 'Z')

        stream.traces = [tr for tr in stream if tr.stats.channel[-1] != 'N']
        res = yam_correlate(
                  io, day, 'outkey',
                  station_combinations=('GR.FUR-GR.WET', 'RJOB-RJOB'),
                  component_combinations=('NT', 'NR'), discard=0.0,
                  keep_correlations=True)
        self.assertEqual(res, None)
Ejemplo n.º 33
0
    def CC_BW(self, BW_obs, BW_syn, Full_P_shift, Full_S_shift, plot=False):
        p_obs = BW_obs.P_stream.copy()
        p_syn = BW_syn.P_stream.copy()
        s_obs = BW_obs.S_stream.copy()
        s_syn = BW_syn.S_stream.copy()

        # Apply the full trace shifts specified in the input file
        # p_syn.traces[0].data = self.shift(p_syn.traces[0].data, Full_P_shift)
        # p_syn.traces[1].data = self.shift(p_syn.traces[1].data, Full_P_shift)
        # s_syn.traces[0].data = self.shift(s_syn.traces[0].data, Full_S_shift)
        # s_syn.traces[1].data = self.shift(s_syn.traces[1].data, Full_S_shift)
        # s_syn.traces[2].data = self.shift(s_syn.traces[2].data, Full_S_shift)

        dt = s_obs[0].meta.delta
        misfit = np.array([])
        misfit_obs = np.array([])
        time_shift = np.array([], dtype=int)
        amplitude = np.array([])
        Norms = np.array([])

        ## Maximum cross-correlation shift allowed:
        max_shift = 1.0
        max_shift_sample = int(max_shift / dt)
        if plot:
            fig = plt.figure(figsize=(10, 12))
        else:
            fig = 1

        # S - correlations:
        # Calculate Shift based on T component
        len_S_obs = len(s_obs[2].data)

        cc_obspy = cc.correlate(s_syn[2].data, s_obs[2].data, max_shift_sample)
        shift_centered, MAX_CC = cc.xcorr_max(cc_obspy, abs_max=False)

        shift = np.argmax(cc_obspy)
        time_shift = np.append(time_shift, shift_centered)

        # mu_s = np.array([0.7, 0.7, 0.9]) # S_T can be shifted to 0.95 at some point
        # sigma_s = np.array([0.3, 0.3, 0.1])

        mu_s = np.array([1., 1.,
                         1.])  # S_T can be shifted to 0.95 at some point
        sigma_s = np.array([0.1, 0.1, 0.1])

        for i in range(len(s_obs)):
            delta = s_obs[i].stats.delta
            cc_obspy = cc.correlate(s_syn[i].data, s_obs[i].data,
                                    max_shift_sample)
            CC_s = cc_obspy[shift]

            misfit = np.append(misfit, ((CC_s - mu_s[i])**2) /
                               (2 * (sigma_s[i])**2))  # + np.abs(shift))

            Norms = np.append(
                Norms, (np.sum(np.abs(s_obs[i].data))) /
                (np.sum(np.abs(s_syn[i].data))))  # Normalization Factor SZ

            if plot:
                s_syn_shift_obspy = self.shift(s_syn[i].data, shift_centered)
                if i == 0:
                    time_array = np.arange(len(s_obs[i].data)) * delta
                start = 0  #500#int((p_start_obs.timestamp - or_time.timestamp - 10) / delta)
                end = len(
                    s_syn_shift_obspy
                ) + 500  #int((p_start_obs.timestamp  - or_time.timestamp+ 30) / delta)

                ax1 = plt.subplot2grid((5, 1), (i + 2, 0))

                plt.plot(time_array[start:end],
                         self.normalize(s_obs[i][start:end]),
                         'b',
                         label='Observed')
                plt.plot(time_array[start:end],
                         self.normalize(s_syn[i][start:end]),
                         'r',
                         linewidth=0.3,
                         label='Synthetic')
                plt.plot(time_array[start:end],
                         self.normalize(s_syn_shift_obspy[start:end]),
                         'r',
                         label='Synthetic')
                ymin, ymax = ax1.get_ylim()
                xmin, xmax = ax1.get_xlim()
                # if i == 0:
                #     plt.text(xmax - 30, ymax / 1.7, "S-Z - %.4f, %.4f " % (misfit[i] , CC_s), fontsize=20, color='b')
                # elif i == 1:
                #     plt.text(xmax - 30, ymax / 1.7, "S-R - %.4f, %.4f " % (misfit[i] , CC_s), fontsize=20, color='b')
                # elif i == 2:
                #     plt.text(xmax - 30, ymax / 1.7, "S-T - %.4f, %.4f " % (misfit[i] , CC_s), fontsize=20, color='b')
                ax1.ticklabel_format(style="sci", axis='y', scilimits=(-2, 2))
                ax1.tick_params(axis='x', labelsize=18)
                ax1.tick_params(axis='y', labelsize=18)
                plt.tight_layout()

        ## P - correlation based on Z component
        len_P_obs = len(p_obs[0].data)
        cc_obspy = cc.correlate(p_syn[0].data, p_obs[0].data, max_shift_sample)
        shift_centered, MAX_CC = cc.xcorr_max(cc_obspy, abs_max=False)

        shift = np.argmax(cc_obspy)
        time_shift = np.append(time_shift, shift_centered)

        # mu_p = np.array([0.95, 0.9])
        # sigma_p = np.array([0.1, 0.2])

        mu_p = np.array([1., 1.])
        sigma_p = np.array([0.1, 0.1])

        Norm_Pz = (np.sum(np.abs(p_obs[0].data))) / (np.sum(
            np.abs(p_syn[0].data)))  #Normalization Factor PZ

        p_syn_shift_obspy = self.shift(p_syn[0].data, shift_centered)

        # P- correlation
        for i in range(len(p_obs)):
            len_P_obs = len(p_obs[i].data)

            cc_obspy = cc.correlate(p_syn[i].data, p_obs[i].data,
                                    max_shift_sample)

            if i == 0:
                p_syn_shift_obspy = self.shift(p_syn[i].data, shift_centered)
                A = (np.dot(p_obs[i].data, p_syn_shift_obspy) /
                     np.dot(p_obs[i].data, p_obs[i].data))
                amplitude = np.append(amplitude, abs(A))

            CC_p = cc_obspy[shift]

            misfit = np.append(misfit, ((CC_p - mu_p[i])**2) /
                               (2 * (sigma_p[i])**2))  #+ np.abs(shift))

            Norms = np.append(Norms, (np.sum(np.abs(p_obs[i].data))) /
                              (np.sum(np.abs(p_syn[i].data))))

            if plot:
                p_syn_shift_obspy = self.shift(p_syn[i].data, shift_centered)
                start = 0  #500#int((p_start_obs.timestamp - or_time.timestamp - 10) / delta)
                end = len(
                    p_syn_shift_obspy
                ) + 500  #int((p_start_obs.timestamp  - or_time.timestamp+ 30) / delta)
                delta = p_obs[i].stats.delta
                if i == 0:
                    time_array = np.arange(len(p_obs[i].data)) * delta
                # time_array = np.arange(len_P_obs) * delta

                ax1 = plt.subplot2grid((5, 1), (i, 0))
                plt.plot(time_array[start:end],
                         self.normalize(p_obs[i][start:end]),
                         'b',
                         label='Observed')
                # plt.plot(time_array[start:end],self.normalize(p_syn[i][start:end]),'r', linewidth = 0.3, label = 'Synthetic')
                plt.plot(time_array[start:end],
                         self.normalize(p_syn_shift_obspy[start:end]),
                         'r',
                         label='Synthetic')
                ymin, ymax = ax1.get_ylim()
                xmin, xmax = ax1.get_xlim()
                # if i == 0:
                #     plt.text(xmax - 30, ymax / 1.7, "P-Z - %.4f - %.4f " % (misfit[i+3] , CC_p), fontsize=20, color='b')
                # elif i == 1:
                #     plt.text(xmax - 30, ymax / 1.7, "P-R - %.4f - %.4f " % (misfit[i+3] , CC_p), fontsize=20, color='b')
                ax1.ticklabel_format(style="sci", axis='y', scilimits=(-2, 2))
                ax1.tick_params(axis='x', labelsize=18)
                ax1.tick_params(axis='y', labelsize=18)
                plt.tight_layout()

            if i == 0 and plot == True:
                # plt.title('Depth: 90000 (m)')
                plt.legend(loc='lower left', fontsize=15)
        # plt.show()
        # sum_misfit = np.sum(misfit)

        # misfit_Amp = (np.abs(np.log10(Norm_Pz) - np.log10(Norm_St)) / np.log(2))**2
        return misfit, Norms, amplitude, time_shift, fig