示例#1
0
    def test_get_geometry(self):
        """
        test get_geometry() in array_analysis.py
        """
        ll = np.array([[24.5797167, 121.4842444, 385.106],
                       [24.5797611, 121.4842333, 384.893],
                       [24.5796694, 121.4842556, 385.106]])

        la = get_geometry(ll)

        np.testing.assert_almost_equal(la[:, 0].sum(), 0., decimal=8)
        np.testing.assert_almost_equal(la[:, 1].sum(), 0., decimal=8)
        np.testing.assert_almost_equal(la[:, 2].sum(), 0., decimal=8)

        ll = np.array([[10., 10., 10.], [0., 5., 5.], [0., 0., 0.]])

        la = get_geometry(ll, coordsys='xy')

        np.testing.assert_almost_equal(la[:, 0].sum(), 0., decimal=8)
        np.testing.assert_almost_equal(la[:, 1].sum(), 0., decimal=8)
        np.testing.assert_almost_equal(la[:, 2].sum(), 0., decimal=8)
示例#2
0
    def test_get_geometry(self):
        """
        test get_geometry() in array_analysis.py
        """
        ll = np.array(
            [[24.5797167, 121.4842444, 385.106], [24.5797611, 121.4842333, 384.893], [24.5796694, 121.4842556, 385.106]]
        )

        la = get_geometry(ll)

        np.testing.assert_almost_equal(la[:, 0].sum(), 0.0, decimal=8)
        np.testing.assert_almost_equal(la[:, 1].sum(), 0.0, decimal=8)
        np.testing.assert_almost_equal(la[:, 2].sum(), 0.0, decimal=8)

        ll = np.array([[10.0, 10.0, 10.0], [0.0, 5.0, 5.0], [0.0, 0.0, 0.0]])

        la = get_geometry(ll, coordsys="xy")

        np.testing.assert_almost_equal(la[:, 0].sum(), 0.0, decimal=8)
        np.testing.assert_almost_equal(la[:, 1].sum(), 0.0, decimal=8)
        np.testing.assert_almost_equal(la[:, 2].sum(), 0.0, decimal=8)
示例#3
0
def beamform_spherical(st, slim, sstep, freqlow, freqhigh, win_len, minbeampow, percdiv, stepdiv, Dmin, Dmax, Dstep, stime=None, etime=None, win_frac=0.05, outfolder=None, coordsys='xy', verbose=False):
    """
    Uses plane wave beamforming to approximate answer, then searches in finer grid around answer from that for spherical wave best solution
    Almendros et al 1999 methods
    """
    if outfolder is None:
        outfolder = os.getcwd()

    def dump(pow_map, apow_map, i):
        """Example function to use with `store` kwarg in
        :func:`~obspy.signal.array_analysis.array_processing`.
        """
        np.savez(outfolder+'/pow_map_%d.npz' % i, pow_map)

    if stime is None:
        stime = st[0].stats.starttime
    if etime is None:
        etime = st[0].stats.endtime

    kwargs = dict(
        # slowness grid: X min, X max, Y min, Y max, Slow Step
        sll_x=-slim, slm_x=slim, sll_y=-slim, slm_y=slim, sl_s=sstep,
        # sliding window properties
        win_len=win_len, win_frac=win_frac,
        # frequency properties
        frqlow=freqlow, frqhigh=freqhigh, prewhiten=0,
        # restrict output
        semb_thres=-1e9, vel_thres=-1e9,
        stime=stime,
        etime=etime, coordsys=coordsys, store=None)

    t, rel_power, abs_power, baz, slow = array_processing(st, **kwargs)

    # Will need this for next step
    geometry = get_geometry(st, coordsys=coordsys)

    # Initiate zero result matrix for entire possible area (sparse?) - Will be

    # Generate time shift table for entire area for all stations (This would be 4D)

    # Filter seismograms to freqlims

    # Pull out just the data from the stream into an array

    for i, t1 in enumerate(t):
        pass
示例#4
0
def beamform_spherical(st,
                       slim,
                       sstep,
                       freqlow,
                       freqhigh,
                       win_len,
                       minbeampow,
                       percdiv,
                       stepdiv,
                       Dmin,
                       Dmax,
                       Dstep,
                       stime=None,
                       etime=None,
                       win_frac=0.05,
                       outfolder=None,
                       coordsys='xy',
                       verbose=False):
    """
    Uses plane wave beamforming to approximate answer, then searches in finer grid around answer from that for spherical wave best solution
    Almendros et al 1999 methods
    """
    if outfolder is None:
        outfolder = os.getcwd()

    def dump(pow_map, apow_map, i):
        """Example function to use with `store` kwarg in
        :func:`~obspy.signal.array_analysis.array_processing`.
        """
        np.savez(outfolder + '/pow_map_%d.npz' % i, pow_map)

    if stime is None:
        stime = st[0].stats.starttime
    if etime is None:
        etime = st[0].stats.endtime

    kwargs = dict(
        # slowness grid: X min, X max, Y min, Y max, Slow Step
        sll_x=-slim,
        slm_x=slim,
        sll_y=-slim,
        slm_y=slim,
        sl_s=sstep,
        # sliding window properties
        win_len=win_len,
        win_frac=win_frac,
        # frequency properties
        frqlow=freqlow,
        frqhigh=freqhigh,
        prewhiten=0,
        # restrict output
        semb_thres=-1e9,
        vel_thres=-1e9,
        stime=stime,
        etime=etime,
        coordsys=coordsys,
        store=None)

    t, rel_power, abs_power, baz, slow = array_processing(st, **kwargs)

    # Will need this for next step
    geometry = get_geometry(st, coordsys=coordsys)

    # Initiate zero result matrix for entire possible area (sparse?) - Will be

    # Generate time shift table for entire area for all stations (This would be 4D)

    # Filter seismograms to freqlims

    # Pull out just the data from the stream into an array

    for i, t1 in enumerate(t):
        pass
示例#5
0
    def call(self):
        try:
            from obspy.core import UTCDateTime, stream
            from obspy.signal import array_analysis
            from obspy.imaging.cm import obspy_sequential as cmap
        except ImportError as _import_error:
            self.fail('ImportError:\n%s' % _import_error)

        from matplotlib.colorbar import ColorbarBase
        from matplotlib.colors import Normalize
        import matplotlib.dates as mdates
        self.cleanup()
        viewer = self.get_viewer()

        if viewer.lowpass is None or viewer.highpass is None:
            self.fail('highpass and lowpass in viewer must be set!')

        traces = []
        for trs in self.chopper_selected_traces(fallback=True):
            for tr in trs:
                tr.lowpass(2, viewer.lowpass)
                tr.highpass(2, viewer.highpass)

            traces.extend(trs)

        if not traces:
            self.fail('no traces selected')

        if self.downresample == 'resample':
            dt_want = min([t.deltat for t in traces])
            for t in traces:
                t.resample(dt_want)

        elif self.downresample == 'downsample':
            dt_want = max([t.deltat for t in traces])
            for t in traces:
                t.downsample_to(dt_want)

        elif self.downresample == 'downsample to "target dt"':
            for t in traces:
                t.downsample_to(float(self.target_dt))

        tmin = max([t.tmin for t in traces])
        tmax = min([t.tmax for t in traces])
        try:
            obspy_traces = [
                p2o_trace(tr, viewer.get_station(viewer.station_key(tr)))
                for tr in traces
            ]

        except KeyError:
            self.fail('station information missing')

        st = stream.Stream(traces=obspy_traces)
        center = array_analysis.get_geometry(st, return_center=True)
        center_lon, center_lat, center_ele = center[len(center) - 1]

        # Execute sonic
        kwargs = dict(sll_x=-self.smax,
                      slm_x=self.smax,
                      sll_y=-self.smax,
                      slm_y=self.smax,
                      sl_s=self.smax / self.divisor,
                      win_len=self.window_lenth,
                      win_frac=self.win_frac,
                      frqlow=viewer.highpass,
                      frqhigh=viewer.lowpass,
                      prewhiten=0,
                      semb_thres=-1.0e9,
                      vel_thres=-1.0e9,
                      verbose=True,
                      timestamp='mlabday',
                      stime=UTCDateTime(tmin),
                      etime=UTCDateTime(tmax))

        try:
            out = array_analysis.array_processing(st, **kwargs)
        except AttributeError:
            from obspy.signal.array_analysis import sonic
            out = sonic(st, **kwargs)

        pi = num.pi

        # make output human readable, adjust backazimuth to values between 0
        # and 360
        t, rel_power, abs_power, baz, slow = out.T
        baz[baz < 0.0] += 360.

        # choose number of fractions in plot (desirably 360 degree/N is an
        # integer!)
        N = int(self.numberOfFraction)
        abins = num.arange(N + 1) * 360. / N
        sbins = num.linspace(0., self.smax, N + 1)

        # sum rel power in bins given by abins and sbins
        hist, baz_edges, sl_edges = num.histogram2d(baz,
                                                    slow,
                                                    bins=[abins, sbins],
                                                    weights=rel_power)

        # transform to gradient
        baz_edges = baz_edges / 180. * pi

        fig = self.pylab(get='figure')
        cax = fig.add_axes([0.85, 0.2, 0.05, 0.5])
        ax = fig.add_axes([0.10, 0.1, 0.70, 0.7], polar=True)
        ax.grid(False)

        dh = abs(sl_edges[1] - sl_edges[0])
        dw = abs(baz_edges[1] - baz_edges[0])

        # circle through backazimuth
        for i, row in enumerate(hist):
            ax.bar(left=(pi / 2 - (i + 1) * dw) * num.ones(N),
                   height=dh * num.ones(N),
                   width=dw,
                   bottom=dh * num.arange(N),
                   color=cmap(row / hist.max()))

        ax.set_xticks([pi / 2, 0, 3. / 2 * pi, pi])
        ax.set_xticklabels(['N', 'E', 'S', 'W'])
        ax.set_ylim(0., self.smax)
        ColorbarBase(cax,
                     cmap=cmap,
                     norm=Normalize(vmin=hist.min(), vmax=hist.max()))

        fig2 = self.pylab(get='figure')
        labels = ['rel.power', 'abs.power', 'baz', 'slow']
        xlocator = mdates.AutoDateLocator()
        ax = None
        for i, lab in enumerate(labels):
            ax = fig2.add_subplot(4, 1, i + 1, sharex=ax)
            ax.scatter(out[:, 0],
                       out[:, i + 1],
                       c=out[:, 1],
                       alpha=0.6,
                       edgecolors='none',
                       cmap=cmap)
            ax.set_ylabel(lab)
            ax.set_xlim(out[0, 0], out[-1, 0])
            ax.set_ylim(out[:, i + 1].min(), out[:, i + 1].max())
            ax.xaxis.set_tick_params(which='both', direction='in')
            ax.xaxis.set_major_locator(xlocator)
            ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(xlocator))
            if i != 3:
                ax.set_xticklabels([])
        fig2.subplots_adjust(hspace=0.)
        fig2.canvas.draw()
        fig.canvas.draw()

        print('Center of Array at latitude %s and longitude %s' %
              (center_lat, center_lon))
    def call(self):
        try:
            from obspy.core import UTCDateTime, stream
            from obspy.signal import array_analysis
            from obspy.imaging.cm import obspy_sequential as cmap
        except ImportError as _import_error:
            self.fail('ImportError:\n%s' % _import_error)

        from matplotlib.colorbar import ColorbarBase
        from matplotlib.colors import Normalize
        import matplotlib.dates as mdates
        self.cleanup()
        viewer = self.get_viewer()

        if viewer.lowpass is None or viewer.highpass is None:
            self.fail('highpass and lowpass in viewer must be set!')

        traces = []
        for trs in self.chopper_selected_traces(fallback=True):
            for tr in trs:
                tr.lowpass(2, viewer.lowpass)
                tr.highpass(2, viewer.highpass)

            traces.extend(trs)

        if not traces:
            self.fail('no traces selected')

        if self.downresample == 'resample':
            dt_want = min([t.deltat for t in traces])
            for t in traces:
                t.resample(dt_want)

        elif self.downresample == 'downsample':
            dt_want = max([t.deltat for t in traces])
            for t in traces:
                t.downsample_to(dt_want)

        elif self.downresample == 'downsample to "target dt"':
            for t in traces:
                t.downsample_to(float(self.target_dt))

        tmin = max([t.tmin for t in traces])
        tmax = min([t.tmax for t in traces])
        try:
            obspy_traces = [p2o_trace(
                tr, viewer.get_station(viewer.station_key(tr)))
                            for tr in traces]

        except KeyError:
            self.fail('station information missing')

        st = stream.Stream(traces=obspy_traces)
        center = array_analysis.get_geometry(st, return_center=True)
        center_lon, center_lat, center_ele = center[len(center)-1]

        # Execute sonic
        kwargs = dict(
            sll_x=-self.smax, slm_x=self.smax, sll_y=-self.smax,
            slm_y=self.smax, sl_s=self.smax/self.divisor,
            win_len=self.window_lenth, win_frac=self.win_frac,
            frqlow=viewer.highpass, frqhigh=viewer.lowpass, prewhiten=0,
            semb_thres=-1.0e9, vel_thres=-1.0e9, verbose=True,
            timestamp='mlabday', stime=UTCDateTime(tmin),
            etime=UTCDateTime(tmax)
        )

        try:
            out = array_analysis.array_processing(st, **kwargs)
        except AttributeError:
            from obspy.signal.array_analysis import sonic
            out = sonic(st, **kwargs)

        pi = num.pi

        # make output human readable, adjust backazimuth to values between 0
        # and 360
        t, rel_power, abs_power, baz, slow = out.T
        baz[baz < 0.0] += 360.

        # choose number of fractions in plot (desirably 360 degree/N is an
        # integer!)
        N = int(self.numberOfFraction)
        abins = num.arange(N + 1) * 360. / N
        sbins = num.linspace(0., self.smax, N + 1)

        # sum rel power in bins given by abins and sbins
        hist, baz_edges, sl_edges = num.histogram2d(
            baz, slow, bins=[abins, sbins], weights=rel_power)

        # transform to gradient
        baz_edges = baz_edges / 180. * pi

        fig = self.pylab(get='figure')
        cax = fig.add_axes([0.85, 0.2, 0.05, 0.5])
        ax = fig.add_axes([0.10, 0.1, 0.70, 0.7], polar=True)
        ax.grid(False)

        dh = abs(sl_edges[1] - sl_edges[0])
        dw = abs(baz_edges[1] - baz_edges[0])

        # circle through backazimuth
        for i, row in enumerate(hist):
            ax.bar(left=(pi / 2 - (i + 1) * dw) * num.ones(N),
                   height=dh * num.ones(N), width=dw,
                   bottom=dh * num.arange(N), color=cmap(row / hist.max()))

        ax.set_xticks([pi / 2, 0, 3. / 2 * pi, pi])
        ax.set_xticklabels(['N', 'E', 'S', 'W'])
        ax.set_ylim(0., self.smax)
        ColorbarBase(cax, cmap=cmap,
                     norm=Normalize(vmin=hist.min(), vmax=hist.max()))

        fig2 = self.pylab(get='figure')
        labels = ['rel.power', 'abs.power', 'baz', 'slow']
        xlocator = mdates.AutoDateLocator()
        ax = None
        for i, lab in enumerate(labels):
            ax = fig2.add_subplot(4, 1, i + 1, sharex=ax)
            ax.scatter(out[:, 0], out[:, i + 1], c=out[:, 1], alpha=0.6,
                       edgecolors='none', cmap=cmap)
            ax.set_ylabel(lab)
            ax.set_xlim(out[0, 0], out[-1, 0])
            ax.set_ylim(out[:, i + 1].min(), out[:, i + 1].max())
            ax.xaxis.set_tick_params(which='both', direction='in')
            ax.xaxis.set_major_locator(xlocator)
            ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(xlocator))
            if i != 3:
                ax.set_xticklabels([])
        fig2.subplots_adjust(hspace=0.)
        fig2.canvas.draw()
        fig.canvas.draw()

        print('Center of Array at latitude %s and longitude %s' %
              (center_lat, center_lon))
示例#7
0
    def __vespa_az(self, st):
        def find_nearest(array, value):

            idx, val = min(enumerate(array), key=lambda x: abs(x[1] - value))
            return idx, val

        sides = 'onesided'
        pi = math.pi
        st.sort()
        n = len(st)
        for i in range(n):
            coords = self.inv.get_coordinates(st[i].id)
            st[i].stats.coordinates = AttribDict({
                'latitude':
                coords['latitude'],
                'elevation':
                coords['elevation'],
                'longitude':
                coords['longitude']
            })

        coord = get_geometry(st, coordsys='lonlat', return_center=True)

        tr = st[0]
        win = len(tr.data)
        if (win % 2) == 0:
            nfft = win / 2 + 1
        else:
            nfft = (win + 1) / 2

        nr = st.count()  # number of stations
        delta = st[0].stats.delta
        fs = 1 / delta
        fn = fs / 2
        freq = np.arange(0, fn, fn / nfft)

        value1, freq1 = find_nearest(freq, self.linf)
        value2, freq2 = find_nearest(freq, self.lsup)
        df = value2 - value1
        m = np.zeros((win, nr))

        WW = np.hamming(int(win))
        WW = np.transpose(WW)
        for i in range(nr):
            tr = st[i]
            if self.method == "FK":
                m[:, i] = (tr.data - np.mean(tr.data)) * WW
            else:
                m[:, i] = (tr.data - np.mean(tr.data))
        pdata = np.transpose(m)

        #####Coherence######
        NW = 2  # the time-bandwidth product##Buena seleccion de 2-3
        K = 2 * NW - 1
        tapers, eigs = alg.dpss_windows(win, NW, K)
        tdata = tapers[None, :, :] * pdata[:, None, :]
        tspectra = fftpack.fft(tdata)

        w = np.empty((nr, int(K), int(nfft)))
        for i in range(nr):
            w[i], _ = utils.adaptive_weights(tspectra[i], eigs, sides=sides)

        Cx = np.ones((nr, nr, df), dtype=np.complex128)

        if self.method == "MTP.COHERENCE":
            for i in range(nr):
                for j in range(nr):
                    sxy = alg.mtm_cross_spectrum(tspectra[i], (tspectra[j]),
                                                 (w[i], w[j]),
                                                 sides='onesided')
                    sxx = alg.mtm_cross_spectrum(tspectra[i],
                                                 tspectra[i],
                                                 w[i],
                                                 sides='onesided')
                    syy = alg.mtm_cross_spectrum(tspectra[j],
                                                 tspectra[j],
                                                 w[j],
                                                 sides='onesided')
                    s = sxy / np.sqrt((sxx * syy))
                    cxcohe = s[value1:value2]
                    Cx[i, j, :] = cxcohe

        ####Calculates Conventional FK-power  ##without normalization
        if self.method == "FK":
            for i in range(nr):
                for j in range(nr):
                    A = np.fft.rfft(m[:, i])
                    B = np.fft.rfft(m[:, j])
                    #Power
                    #out = A * np.conjugate(B)

                    #Relative Power
                    den = np.absolute(A) * np.absolute(np.conjugate(B))
                    out = (A * np.conjugate(B)) / den

                    cxcohe = out[value1:value2]
                    Cx[i, j, :] = cxcohe

        r = np.zeros((nr, 2))
        S = np.zeros((1, 2))
        Pow = np.zeros((360, df))
        for n in range(nr):
            r[n, :] = coord[n][0:2]

        freq = freq[value1:value2]

        rad = np.pi / 180

        slow_range = np.linspace(0, self.slow, 360)

        for j in range(360):

            ang = self.azimuth2mathangle(self.baz)
            S[0, 0] = slow_range[j] * np.cos(rad * ang)
            S[0, 1] = slow_range[j] * np.sin(rad * ang)

            k = (S * r)
            K = np.sum(k, axis=1)
            n = 0
            for f in freq:
                A = np.exp(-1j * 2 * pi * f * K)
                B = np.conjugate(np.transpose(A))
                D = np.matmul(B, Cx[:, :, n]) / nr
                P = np.matmul(D, A) / nr
                Pow[j, n] = np.abs(P)
                n = n + 1

        Pow = np.mean(Pow, axis=1)

        return Pow
示例#8
0
    def FKCoherence(self, st, inv, DT, linf, lsup, slim, win_len, sinc,
                    method):
        def find_nearest(array, value):

            idx, val = min(enumerate(array), key=lambda x: abs(x[1] - value))
            return idx, val

        sides = 'onesided'
        pi = math.pi

        smax = slim
        smin = -1 * smax
        Sx = np.arange(smin, smax, sinc)[np.newaxis]
        Sy = np.arange(smin, smax, sinc)[np.newaxis]
        nx = ny = len(Sx[0])
        Sy = np.fliplr(Sy)

        #####Convert start from Greogorian to actual date###############
        Time = DT
        Time = Time - int(Time)
        d = date.fromordinal(int(DT))
        date1 = d.isoformat()
        H = (Time * 24)
        H1 = int(H)  # Horas
        minutes = (H - int(H)) * 60
        minutes1 = int(minutes)
        seconds = (minutes - int(minutes)) * 60
        H1 = str(H1).zfill(2)
        minutes1 = str(minutes1).zfill(2)
        seconds = "%.2f" % seconds
        seconds = str(seconds).zfill(2)
        DATE = date1 + "T" + str(H1) + minutes1 + seconds
        t1 = UTCDateTime(DATE)
        ########End conversion###############################

        st.trim(starttime=t1, endtime=t1 + win_len)
        st.sort()
        n = len(st)
        for i in range(n):
            coords = inv.get_coordinates(st[i].id)
            st[i].stats.coordinates = AttribDict({
                'latitude':
                coords['latitude'],
                'elevation':
                coords['elevation'],
                'longitude':
                coords['longitude']
            })

        coord = get_geometry(st, coordsys='lonlat', return_center=True)

        tr = st[0]
        win = len(tr.data)
        if (win % 2) == 0:
            nfft = win / 2 + 1
        else:
            nfft = (win + 1) / 2

        nr = st.count()  # number of stations
        delta = st[0].stats.delta
        fs = 1 / delta
        fn = fs / 2
        freq = np.arange(0, fn, fn / nfft)
        value1, freq1 = find_nearest(freq, linf)
        value2, freq2 = find_nearest(freq, lsup)
        df = value2 - value1
        m = np.zeros((win, nr))

        WW = np.hamming(int(win))
        WW = np.transpose(WW)
        for i in range(nr):
            tr = st[i]
            if method == "FK":
                m[:, i] = (tr.data - np.mean(tr.data)) * WW
            else:
                m[:, i] = (tr.data - np.mean(tr.data))
        pdata = np.transpose(m)

        #####Coherence######
        NW = 2  # the time-bandwidth product##Buena seleccion de 2-3
        K = 2 * NW - 1
        tapers, eigs = alg.dpss_windows(win, NW, K)
        tdata = tapers[None, :, :] * pdata[:, None, :]
        tspectra = fftpack.fft(tdata)

        w = np.empty((nr, int(K), int(nfft)))
        for i in range(nr):
            w[i], _ = utils.adaptive_weights(tspectra[i], eigs, sides=sides)

        nseq = nr
        L = int(nfft)
        #csd_mat = np.zeros((nseq, nseq, L), 'D')
        #psd_mat = np.zeros((2, nseq, nseq, L), 'd')
        coh_mat = np.zeros((nseq, nseq, L), 'd')
        #coh_var = np.zeros_like(coh_mat)
        Cx = np.ones((nr, nr, df), dtype=np.complex128)

        if method == "MTP.COHERENCE":
            for i in range(nr):
                for j in range(nr):
                    sxy = alg.mtm_cross_spectrum(tspectra[i], (tspectra[j]),
                                                 (w[i], w[j]),
                                                 sides='onesided')
                    sxx = alg.mtm_cross_spectrum(tspectra[i],
                                                 tspectra[i],
                                                 w[i],
                                                 sides='onesided')
                    syy = alg.mtm_cross_spectrum(tspectra[j],
                                                 tspectra[j],
                                                 w[j],
                                                 sides='onesided')
                    s = sxy / np.sqrt((sxx * syy))
                    cxcohe = s[value1:value2]
                    Cx[i, j, :] = cxcohe

        # Calculates Conventional FK-power
        if method == "FK":
            for i in range(nr):
                for j in range(nr):
                    A = np.fft.rfft(m[:, i])
                    B = np.fft.rfft(m[:, j])
                    #Relative Power
                    den = np.absolute(A) * np.absolute(np.conjugate(B))
                    out = (A * np.conjugate(B)) / den
                    cxcohe = out[value1:value2]
                    Cx[i, j, :] = cxcohe

        r = np.zeros((nr, 2), dtype=np.complex128)
        S = np.zeros((1, 2), dtype=np.complex128)
        Pow = np.zeros((len(Sx[0]), len(Sy[0]), df))
        for n in range(nr):
            r[n, :] = coord[n][0:2]

        freq = freq[value1:value2]

        for i in range(ny):
            for j in range(nx):
                S[0, 0] = Sx[0][j]
                S[0, 1] = Sy[0][i]
                k = (S * r)
                K = np.sum(k, axis=1)
                n = 0
                for f in freq:
                    A = np.exp(-1j * 2 * pi * f * K)
                    B = np.conjugate(np.transpose(A))
                    D = np.matmul(B, Cx[:, :, n]) / nr
                    P = np.matmul(D, A) / nr
                    Pow[i, j, n] = np.abs(P)
                    n = n + 1
        Pow = np.mean(Pow, axis=2)
        #Pow = Pow / len(freq)
        Pow = np.fliplr(Pow)
        x = y = np.linspace(smin, smax, nx)

        nn = len(x)
        maximum_power = np.where(Pow == np.amax(Pow))
        Sxpow = (maximum_power[1] - nn / 2) * sinc
        Sypow = (maximum_power[0] - nn / 2) * sinc

        return Pow, Sxpow, Sypow, coord
def plotFK(st,
           startTime,
           endTime,
           frqlow,
           frqhigh,
           sll_x=-3.6,
           slm_x=3.6,
           sll_y=-3.6,
           slm_y=3.6,
           sl_s=0.18,
           beam='bartlett',
           prewhiten=0,
           coordsys='lonlat',
           verbose=False,
           plot=True,
           normalize=True,
           cmap='inferno_r',
           sl_circle=True,
           interpolation=None,
           vmin=None,
           vmax=None,
           plot_normalize=False,
           sl_corr=[0., 0.]):
    '''
    Modified from Stephen Arrowsmith's ROSES 2020 class

    Computes and displays an FK plot for an ObsPy Stream object, st, given
    a start time and end time (as UTCDateTime objects) and a frequency band
    defined by frqlow and frqhigh. The slowness grid is defined as optional
    parameters (in s/km).

    This function implements code directly from ObsPy, which has been optimized,
    for simply plotting the FK spectrum

    It includes the option to normalize the data in the time window before running FK

    It also includes the option to apply a slowness correction, defined by sl_corr
    '''

    stream = st.copy()
    stream = stream.trim(startTime, endTime)
    nstat = len(stream)

    fk_methods = dict(bartlett=0, capon=1)

    if nstat > 0:
        if normalize:
            for ms in stream:
                ms.data = ms.data / np.max(np.abs(ms.data))

        grdpts_x = int(((slm_x - sll_x) / sl_s + 0.5) + 1)
        grdpts_y = int(((slm_y - sll_y) / sl_s + 0.5) + 1)

        geometry = get_geometry(stream, coordsys=coordsys, verbose=verbose)

        time_shift_table = get_timeshift(geometry, sll_x, sll_y, sl_s,
                                         grdpts_x, grdpts_y)

        fs = stream[0].stats.sampling_rate
        nsamp = stream[0].stats.npts

        # generate plan for rfftr
        nfft = next_pow_2(nsamp)
        deltaf = fs / float(nfft)
        nlow = int(frqlow / float(deltaf) + 0.5)
        nhigh = int(frqhigh / float(deltaf) + 0.5)
        nlow = max(1, nlow)  # avoid using the offset
        nhigh = min(nfft // 2 - 1, nhigh)  # avoid using nyquist
        nf = nhigh - nlow + 1  # include upper and lower frequency

        # to speed up the routine a bit we estimate all steering vectors in advance
        steer = np.empty((nf, grdpts_x, grdpts_y, nstat), dtype=np.complex128)
        clibsignal.calcSteer(nstat, grdpts_x, grdpts_y, nf, nlow, deltaf,
                             time_shift_table, steer)
        _r = np.empty((nf, nstat, nstat), dtype=np.complex128)
        ft = np.empty((nstat, nf), dtype=np.complex128)

        # 0.22 matches 0.2 of historical C bbfk.c
        tap = cosine_taper(nsamp, p=0.22)
        relpow_map = np.empty((grdpts_x, grdpts_y), dtype=np.float64)
        abspow_map = np.empty((grdpts_x, grdpts_y), dtype=np.float64)

        for i, tr in enumerate(stream):
            dat = tr.data
            dat = (dat - dat.mean()) * tap
            ft[i, :] = np.fft.rfft(dat, nfft)[nlow:nlow + nf]

        ft = np.ascontiguousarray(ft, np.complex128)
        relpow_map.fill(0.)
        abspow_map.fill(0.)

        # computing the covariances of the signal at different receivers
        dpow = 0.
        for i in range(nstat):
            for j in range(i, nstat):
                _r[:, i, j] = ft[i, :] * ft[j, :].conj()
                if i != j:
                    _r[:, j, i] = _r[:, i, j].conjugate()
                else:
                    dpow += np.abs(_r[:, i, j].sum())
        dpow *= nstat

        clibsignal.generalizedBeamformer(relpow_map, abspow_map, steer, _r,
                                         nstat, prewhiten, grdpts_x, grdpts_y,
                                         nf, dpow, fk_methods[beam])
        fisher_map = (nstat - 1) * relpow_map / (1 - relpow_map)

        (ix, iy) = np.unravel_index(relpow_map.argmax(), relpow_map.shape)

        # here we compute baz, slow
        slow_x = sll_x + ix * sl_s
        slow_y = sll_y + iy * sl_s

        # ---------
        slow_x = slow_x - sl_corr[0]
        slow_y = slow_y - sl_corr[1]
        #print(slow_x, slow_y)
        # ---------

        slow = np.sqrt(slow_x**2 + slow_y**2)
        if slow < 1e-8:
            slow = 1e-8
        azimut = 180 * math.atan2(slow_x, slow_y) / math.pi
        baz = azimut % -360 + 180

        if plot:
            n_frames = 3
            (fig, ax) = plt.subplots(1,
                                     n_frames,
                                     sharey=True,
                                     figsize=(8, 3.5),
                                     constrained_layout=True)

            extent = extent = [sll_x, slm_x + sl_s, sll_y, slm_y + sl_s]

            # FK power
            i = 0
            H = np.flipud(np.fliplr(abspow_map.T))
            if plot_normalize:
                H = H / H.max()
            im = ax[i].imshow(H,
                              extent=extent,
                              origin='lower',
                              aspect='auto',
                              cmap=cmap,
                              interpolation=interpolation)
            plt.colorbar(im,
                         ax=ax[i],
                         orientation="horizontal",
                         label='FK Power')

            # Semblance
            i += 1
            H = np.flipud(np.fliplr(relpow_map.T))
            if plot_normalize:
                H = H / H.max()
            im = ax[i].imshow(H,
                              extent=extent,
                              origin='lower',
                              aspect='auto',
                              cmap=cmap,
                              interpolation=interpolation)
            plt.colorbar(im,
                         ax=ax[i],
                         orientation="horizontal",
                         label='Semblance')

            # Fisher ratio
            i += 1
            H = np.flipud(np.fliplr(fisher_map.T))
            if plot_normalize:
                H = H / H.max()
            im = ax[i].imshow(H,
                              extent=extent,
                              origin='lower',
                              aspect='auto',
                              cmap=cmap,
                              interpolation=interpolation)
            plt.colorbar(im,
                         ax=ax[i],
                         orientation="horizontal",
                         label='Fisher ratio')

            for i in range(0, n_frames):
                if sl_circle:
                    angles = np.deg2rad(np.arange(0., 360, 1.))
                    slowness = dict(seismic_P=6.0,
                                    Rayleigh=3.0,
                                    infrasound=0.34)
                    for (key, radius) in slowness.items():
                        x_circle = np.sin(angles) / radius
                        y_circle = np.cos(angles) / radius
                        ax[i].plot(x_circle,
                                   y_circle,
                                   linestyle='solid',
                                   label=key,
                                   alpha=0.6)

                ax[i].plot(0, 0, 'k+')
                ax[i].plot(-slow_x, -slow_y, 'w+')
                ax[i].set_xlabel('x-slowness [s/km]')

            ax[0].set_ylabel('y-slowness [s/km]')

            baz_max = round(baz % 360., 2)
            appvel_max = round(1 / slow, 2)
            title_str = (f'Peak semblance at {baz_max:.2f} deg. '
                         f'and {appvel_max:.2f} km/s '
                         f'between [ {frqlow:.2f} - {frqhigh:.2f} ] Hz')
            fig.suptitle(title_str)

            return fig, ax

        # # only flipping left-right, when using imshow to plot the matrix is takes
        # # points top to bottom points are now starting at top-left in row major
        # return np.fliplr(relpow_map.T), baz % 360, 1. / slow

    else:
        print(f'No data present for timerange {startTime} - {endTime}')
        return