Пример #1
0
def correlation_random(rec0=0, rec1=1, verbose=0, plot=0, save=0):
    """
	cct,cct_proc,t,ccf,ccf_proc,f = correlation_random(rec0=0,rec1=1,verbose=0,plot=0,save=1)

	Compute and plot correlation function based on random source summation.

	INPUT:
	------
	rec0, rec1:		indeces of the receivers used in the correlation. 
	plot:			plot when 1.
	verbose:		give screen output when 1.
	save:			store individual correlations to OUTPUT/correlations_individual

	OUTPUT:
	-------
	cct, t:		Time-domain correlation function and time axis [N^2 s / m^4],[s].
	ccf, f:		Frequency-domain correlation function and frequency axis [N^2 s^2 / m^4],[1/s].
	
	Last updated: 19 May 2016.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    #- Start time.
    t1 = time.time()

    #- Input parameters.
    p = parameters.Parameters()

    #- Spatial grid.
    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)
    x, y = np.meshgrid(x_line, y_line)

    #- Compute number of samples as power of 2.
    n = 2.0**np.round(np.log2((p.Twindow) / p.dt))

    #- Frequency axis.
    df = 1.0 / (n * p.dt)
    f = np.arange(0.0, 1.0 / p.dt, df)
    omega = 2.0 * np.pi * f

    #- Compute time axis
    t = np.arange(-0.5 * n * p.dt, 0.5 * n * p.dt, p.dt)

    #- Compute instrument response and natural source spectrum.
    S, indeces = s.space_distribution()
    instrument, natural = s.frequency_distribution(f)

    #- Issue some information if wanted.
    if verbose == 1:

        print 'number of samples: ' + str(n)
        print 'maximum time: ' + str(np.max(t)) + ' s'
        print 'maximum frequency: ' + str(np.max(f)) + ' Hz'

    #- Warnings.
    if (p.fmax > 1.0 / p.dt):
        print 'WARNING: maximum bandpass frequency cannot be represented with this time step!'

    if (p.fmin < 1.0 / (n * p.dt)):
        print 'WARNING: minimum bandpass frequency cannot be represented with this window length!'

    #==============================================================================
    #- March through source locations and compute raw frequency-domain noise traces.
    #==============================================================================

    #- Set a specific random seed to make simulation repeatable, e.g. for different receiver pair.
    np.random.seed(p.seed)

    #- Initialise frequency-domain wavefields.
    u1 = np.zeros([n, p.Nwindows], dtype=complex)
    u2 = np.zeros([n, p.Nwindows], dtype=complex)
    G1 = np.zeros([n, p.Nwindows], dtype=complex)
    G2 = np.zeros([n, p.Nwindows], dtype=complex)

    #- Regularise zero-frequency to avoid singularity in Green function.
    omega[0] = 0.01 * 2.0 * np.pi * df

    #- March through source indices.
    for k in indeces:

        #- Green function for a specific source point.
        G1[:, 0] = g.green_input(p.x[rec0], p.y[rec0], x[k], y[k], omega, p.dx,
                                 p.dy, p.rho, p.v, p.Q)
        G2[:, 0] = g.green_input(p.x[rec1], p.y[rec1], x[k], y[k], omega, p.dx,
                                 p.dy, p.rho, p.v, p.Q)

        #- Apply instrument response and source spectrum
        G1[:, 0] = G1[:, 0] * instrument * np.sqrt(natural)
        G2[:, 0] = G2[:, 0] * instrument * np.sqrt(natural)

        #- Copy this Green function to all time intervals.
        for i in range(p.Nwindows):

            G1[:, i] = G1[:, 0]
            G2[:, i] = G2[:, 0]

        #- Random phase matrix, frequency steps times time windows.
        phi = 2.0 * np.pi * (np.random.rand(n, p.Nwindows) - 0.5)
        ff = np.exp(1j * phi)

        #- Matrix of random frequency-domain wavefields.
        u1 += S[k] * ff * G1
        u2 += S[k] * ff * G2

    #- March through time windows to add earthquakes.
    for win in range(p.Nwindows):

        neq = len(p.eq_t[win])

        for i in range(neq):

            G1 = g.green_input(p.x[rec0], p.y[rec0], p.eq_x[win][i],
                               p.eq_y[win][i], omega, p.dx, p.dy, p.rho, p.v,
                               p.Q)
            G2 = g.green_input(p.x[rec1], p.y[rec1], p.eq_x[win][i],
                               p.eq_y[win][i], omega, p.dx, p.dy, p.rho, p.v,
                               p.Q)

            G1 = G1 * instrument * np.sqrt(natural)
            G2 = G2 * instrument * np.sqrt(natural)

            u1[:, win] += p.eq_m[win][i] * G1 * np.exp(
                -1j * omega * p.eq_t[win][i])
            u2[:, win] += p.eq_m[win][i] * G2 * np.exp(
                -1j * omega * p.eq_t[win][i])

    #==============================================================================
    #- Processing.
    #==============================================================================

    #- Apply single-station processing.
    u1_proc, u2_proc = proc.processing_single_station(u1, u2, f, verbose)

    #- Compute correlation function, raw and processed.
    ccf = u1 * np.conj(u2)
    ccf_proc = u1_proc * np.conj(u2_proc)

    #- Apply correlation processing.
    ccf_proc = proc.processing_correlation(ccf_proc, f, verbose)

    #==============================================================================
    #- Apply the standard bandpass.
    #==============================================================================

    bandpass = np.zeros(np.shape(f))

    Nminmax = np.round((p.bp_fmin) / df)
    Nminmin = np.round((p.bp_fmin - p.bp_width) / df)
    Nmaxmin = np.round((p.bp_fmax) / df)
    Nmaxmax = np.round((p.bp_fmax + p.bp_width) / df)

    bandpass[Nminmin:Nminmax] = np.linspace(0.0, 1.0, Nminmax - Nminmin)
    bandpass[Nmaxmin:Nmaxmax] = np.linspace(1.0, 0.0, Nmaxmax - Nmaxmin)
    bandpass[Nminmax:Nmaxmin] = 1.0

    for i in range(p.Nwindows):

        ccf[:, i] = bandpass * ccf[:, i]
        ccf_proc[:, i] = bandpass * ccf_proc[:, i]

    #==============================================================================
    #- Time-domain correlation function.
    #==============================================================================

    #- Some care has to be taken here with the inverse FFT convention of numpy.

    cct = np.zeros([n, p.Nwindows], dtype=float)
    cct_proc = np.zeros([n, p.Nwindows], dtype=float)

    dummy = np.real(np.fft.ifft(ccf, axis=0) / p.dt)
    cct[0.5 * n:n, :] = dummy[0:0.5 * n, :]
    cct[0:0.5 * n, :] = dummy[0.5 * n:n, :]

    dummy = np.real(np.fft.ifft(ccf_proc, axis=0) / p.dt)
    cct_proc[0.5 * n:n, :] = dummy[0:0.5 * n, :]
    cct_proc[0:0.5 * n, :] = dummy[0.5 * n:n, :]

    #==============================================================================
    #- Save results if wanted.
    #==============================================================================

    if save == 1:

        #- Store frequency and time axes.

        fid = open('OUTPUT/correlations_individual/f', 'w')
        np.save(fid, f)
        fid.close()

        fid = open('OUTPUT/correlations_individual/t', 'w')
        np.save(fid, t)
        fid.close()

        #- Store raw and processed correlations in the frequency domain.

        fn = 'OUTPUT/correlations_individual/ccf_' + str(rec0) + '_' + str(
            rec1)
        fid = open(fn, 'w')
        np.save(fid, ccf)
        fid.close()

        fn = 'OUTPUT/correlations_individual/ccf_proc_' + str(
            rec0) + '_' + str(rec1)
        fid = open(fn, 'w')
        np.save(fid, ccf_proc)
        fid.close()

    #==============================================================================
    #- Plot results if wanted.
    #==============================================================================

    if plot == 1:

        #- Noise traces for first window.
        plt.subplot(2, 1, 1)
        plt.plot(t, np.real(np.fft.ifft(u1[:, 0])) / p.dt, 'k')
        plt.ylabel('u1(t) [N/m^2]')
        plt.title('recordings for first time window')
        plt.subplot(2, 1, 2)
        plt.plot(t, np.real(np.fft.ifft(u2[:, 0])) / p.dt, 'k')
        plt.ylabel('u2(t) [N/m^2]')
        plt.xlabel('t [s]')

        plt.show()

        #- Spectrum of the pressure wavefield for first window.
        plt.subplot(2, 1, 1)
        plt.plot(f, np.abs(np.sqrt(bandpass) * u1[:, 0]), 'k', linewidth=2)
        plt.plot(f, np.real(np.sqrt(bandpass) * u1[:, 0]), 'b', linewidth=1)
        plt.plot(f, np.imag(np.sqrt(bandpass) * u1[:, 0]), 'r', linewidth=1)
        plt.ylabel('u1(f) [Ns/m^2]')
        plt.title(
            'raw and processed spectra for first window (abs=black, real=blue, imag=red)'
        )

        plt.subplot(2, 1, 2)
        plt.plot(f,
                 np.abs(np.sqrt(bandpass) * u1_proc[:, 0]),
                 'k',
                 linewidth=2)
        plt.plot(f,
                 np.real(np.sqrt(bandpass) * u1_proc[:, 0]),
                 'b',
                 linewidth=1)
        plt.plot(f,
                 np.imag(np.sqrt(bandpass) * u1_proc[:, 0]),
                 'r',
                 linewidth=1)
        plt.ylabel('u1_proc(f) [?]')
        plt.xlabel('f [Hz]')

        plt.show()

        #- Raw time- and frequency-domain correlation for first window.
        plt.subplot(2, 1, 1)
        plt.semilogy(f, np.abs(ccf[:, 0]), 'k', linewidth=2)
        plt.title('raw frequency-domain correlation for first window')
        plt.ylabel('correlation [N^2 s^2 / m^4]')
        plt.xlabel('f [Hz]')

        plt.subplot(2, 1, 2)
        plt.plot(t, np.real(cct[:, 0]), 'k')
        plt.title('raw time-domain correlation for first window')
        plt.ylabel('correlation [N^2 s / m^4]')
        plt.xlabel('t [s]')

        plt.show()

        #- Processed time- and frequency-domain correlation for first window.
        plt.subplot(2, 1, 1)
        plt.semilogy(f, np.abs(ccf_proc[:, 0]), 'k', linewidth=2)
        plt.title('processed frequency-domain correlation for first window')
        plt.ylabel('correlation [N^2 s^2 / m^4]*unit(T)')
        plt.xlabel('f [Hz]')

        plt.subplot(2, 1, 2)
        plt.plot(t, np.real(cct_proc[:, 0]))
        plt.title('processed time-domain correlation for first window')
        plt.ylabel('correlation [N^2 s / m^4]*unit(T)')
        plt.xlabel('t [s]')

        plt.show()

        #- Raw and processed ensemble correlations.
        plt.plot(t, np.sum(cct, 1) / np.max(cct), 'k')
        plt.plot(t, np.sum(cct_proc, 1) / np.max(cct_proc), 'r')
        plt.title(
            'ensemble time-domain correlation (black=raw, red=processed)')
        plt.ylabel('correlation [N^2 s / m^4]*unit(T)')
        plt.xlabel('t [s]')

        plt.show()

    #- End time.
    t2 = time.time()

    if verbose == 1:
        print 'elapsed time: ' + str(t2 - t1) + ' s'

    #==============================================================================
    #- Output.
    #==============================================================================

    return cct, cct_proc, t, ccf, ccf_proc, f
Пример #2
0
def source_kernel(cct,
                  t,
                  rec0=0,
                  rec1=1,
                  measurement='cctime',
                  effective=0,
                  plot=0):
    """
	x,y,K = source_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', effective=0, plot=0):

	Compute source kernel for a frequency-independent source power-spectral density.

	INPUT:
	------

	cct, t:			Time-domain correlation function and time axis as obtained from correlation_function.py.
	rec0, rec1:		Indeces of the receivers used in the correlation. 
	measurement:	Type of measurement used to compute the adjoint source. See adsrc.py for options.
	plot:			When plot=1, plot source kernel.
	effective:		When effective==1, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.


	OUTPUT:
	-------
	x,y:			Space coordinates.
	K:				Source kernel [unit of measurement * m^2 / Pa^4 s^2].

	Last updated: 27 May 2016.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)

    nx = len(x_line)
    ny = len(y_line)

    x, y = np.meshgrid(1.5 * x_line, 1.5 * y_line)

    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    omega = 2.0 * np.pi * f

    K_source = np.zeros(np.shape(x))

    #- Read propagation corrector if needed. --------------------------------------

    if (effective == 1):

        gf = gpc.get_propagation_corrector(rec0, rec1, plot=0)

    else:

        gf = np.ones(len(f), dtype=complex)

    #- Compute number of grid points corresponding to the minimum wavelength. -----

    L = int(np.ceil(p.v / (p.fmax * p.dx)))

    #==============================================================================
    #- Compute the adjoint source.
    #==============================================================================

    a = adsrc.adsrc(cct, t, measurement, plot)

    #==============================================================================
    #- Compute kernel.
    #==============================================================================

    for k in range(len(omega)):

        #- Green functions.
        G1 = g.green_input(x, y, p.x[rec0], p.y[rec0], omega[k], p.dx, p.dy,
                           p.rho, p.v, p.Q)
        G2 = g.green_input(x, y, p.x[rec1], p.y[rec1], omega[k], p.dx, p.dy,
                           p.rho, p.v, p.Q)

        #- Compute kernel.
        K_source += 2.0 * np.real(gf[k] * G1 * np.conj(G2) * a[k])

    #==============================================================================
    #- Smooth over minimum wavelength.
    #==============================================================================

    for k in range(L):
        K_source[1:ny -
                 2, :] = (K_source[1:ny - 2, :] + K_source[0:ny - 3, :] +
                          K_source[2:ny - 1, :]) / 3.0

    for k in range(L):
        K_source[:,
                 1:nx - 2] = (K_source[:, 1:nx - 2] + K_source[:, 0:nx - 3] +
                              K_source[:, 2:nx - 1]) / 3.0

    #==============================================================================
    #- Visualise if wanted.
    #==============================================================================

    if plot == 1:

        cmap = plt.get_cmap('RdBu')
    plt.pcolormesh(x, y, K_source, cmap=cmap, shading='interp')
    plt.clim(-np.max(np.abs(K_source)) * 0.15, np.max(np.abs(K_source)) * 0.15)
    plt.axis('image')
    plt.colorbar()
    plt.title('Source kernel [unit of measurement s^2 / m^2]')
    plt.xlabel('x [km]')
    plt.ylabel('y [km]')

    plt.plot(p.x[rec0], p.y[rec0], 'ro')
    plt.plot(p.x[rec1], p.y[rec1], 'ro')

    plt.show()

    return x, y, K_source
Пример #3
0
def structure_kernel(cct,
                     t,
                     rec0=0,
                     rec1=1,
                     measurement='cctime',
                     dir_forward='OUTPUT/',
                     effective=0,
                     plot=0):
    """
	x,y,K_kappa = structure_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', dir_forward='OUTPUT/', effective=0, plot=0):

	Compute structure kernel K_kappa for a frequency-independent source power-spectral density.

	INPUT:
	------

	cct, t:			Time-domain correlation function and time axis as obtained from correlation_function.py.
	rec0, rec1:		Indeces of the receivers used in the correlation. 
	measurement:	Type of measurement used to compute the adjoint source. See adsrc.py for options.
	dir_forward:	Location of the forward interferometric fields from rec0 and rec1. Must exist.
	plot:			When plot=1, plot structure kernel.
	effective:		When effective==1, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.


	OUTPUT:
	-------
	x,y:			Space coordinates.
	K:				Structure kernel [unit of measurement * 1/N].

	Last updated: 11 July 2016.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)

    x, y = np.meshgrid(x_line, y_line)

    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    df = f[1] - f[0]
    omega = 2.0 * np.pi * f

    K_kappa = np.zeros(np.shape(x))

    C1 = np.zeros((len(y_line), len(x_line), len(omega)), dtype=complex)
    C2 = np.zeros((len(y_line), len(x_line), len(omega)), dtype=complex)

    nx = len(x_line)
    ny = len(y_line)

    kappa = p.rho * (p.v**2)

    #- Frequency- and space distribution of the source. ---------------------------

    S, indeces = s.space_distribution(plot=0)
    instrument, natural = s.frequency_distribution(f)
    filt = natural * instrument * instrument

    #- Compute the adjoint source. ------------------------------------------------

    a = adsrc.adsrc(cct, t, measurement, plot)

    #- Compute number of grid points corresponding to the minimum wavelength. -----

    L = int(np.ceil(p.v / (p.fmax * p.dx)))

    #- Read propagation corrector if needed. --------------------------------------

    if (effective == 1):

        gf = gpc.get_propagation_corrector(rec0, rec1, plot=0)

    else:

        gf = np.ones(len(f), dtype=complex)

    #==============================================================================
    #- Load forward interferometric wavefields.
    #==============================================================================

    fn = dir_forward + '/cf_' + str(rec0)
    fid = open(fn, 'r')
    C1 = np.load(fid)
    fid.close()

    fn = dir_forward + '/cf_' + str(rec1)
    fid = open(fn, 'r')
    C2 = np.load(fid)
    fid.close()

    #==============================================================================
    #- Loop over frequencies.
    #==============================================================================

    for k in range(len(omega)):

        w = omega[k]

        #- Adjoint fields. --------------------------------------------------------
        G1 = -w**2 * g.green_input(x, y, p.x[rec0], p.y[rec0], w, p.dx, p.dy,
                                   p.rho, p.v, p.Q) * gf[k]
        G2 = -w**2 * g.green_input(x, y, p.x[rec1], p.y[rec1], w, p.dx, p.dy,
                                   p.rho, p.v, p.Q) * gf[k]

        #- Multiplication with adjoint fields. ------------------------------------
        K_kappa = K_kappa - 2.0 * np.real(G2 * C1[:, :, k] * np.conj(a[k]) +
                                          G1 * C2[:, :, k] * a[k])

    K_kappa = K_kappa / kappa

    #==============================================================================
    #- Smooth over minimum wavelength.
    #==============================================================================

    for k in range(L):
        K_kappa[1:ny - 2, :] = (K_kappa[1:ny - 2, :] + K_kappa[0:ny - 3, :] +
                                K_kappa[2:ny - 1, :]) / 3.0

    for k in range(L):
        K_kappa[:, 1:nx - 2] = (K_kappa[:, 1:nx - 2] + K_kappa[:, 0:nx - 3] +
                                K_kappa[:, 2:nx - 1]) / 3.0

    #==============================================================================
    #- Visualise if wanted.
    #==============================================================================

    if plot == 1:

        cmap = plt.get_cmap('RdBu')
    plt.pcolormesh(x, y, K_kappa, cmap=cmap, shading='interp')
    plt.clim(-np.max(np.abs(K_kappa)) * 0.25, np.max(np.abs(K_kappa)) * 0.25)
    plt.axis('image')
    plt.colorbar()
    plt.title('Structure (kappa) kernel [unit of measurement / m^2]')
    plt.xlabel('x [km]')
    plt.ylabel('y [km]')

    plt.plot(p.x[rec0], p.y[rec0], 'ro')
    plt.plot(p.x[rec1], p.y[rec1], 'ro')

    plt.show()

    return x, y, K_kappa
Пример #4
0
def precompute(rec=0, verbose=False, mode='individual'):
    """
	precompute(rec=0,verbose=False,mode='individual')

	Compute correlation wavefield in the frequency domain and store for in /OUTPUT for re-use in snapshot kernel computation.

	INPUT:
	------

	rec:		index of reference receiver.
	verbose:	give screen output when True.
	mode:		'individual' sums over individual sources. This is very efficient when there are only a few sources. This mode requires that the indeces array returned by source.space_distribution is not empty.
				'random' performs a randomised, down-sampled integration over a quasi-continuous distribution of sources. This is more efficient for widely distributed and rather smooth sources.
				'combined' is the sum of 'individual' and 'random'. This is efficient when a few point sources are super-imposed on a quasi-continuous distribution.

	OUTPUT:
	-------

	Frequency-domain interferometric wavefield stored in /OUTPUT.

	Last updated: 18 July 2019.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    #- Spatial grid.
    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)
    x, y = np.meshgrid(x_line, y_line)

    nx = len(x_line)
    ny = len(y_line)

    #- Frequency line.
    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    omega = 2.0 * np.pi * f

    #- Power-spectral density.
    S, indeces = s.space_distribution()
    instrument, natural = s.frequency_distribution(f)
    filt = natural * instrument * instrument

    C = np.zeros((len(y_line), len(x_line), len(omega)), dtype=complex)

    #==============================================================================
    #- Compute correlation field by summing over individual sources.
    #==============================================================================

    if (mode == 'individual'):

        #- March through the spatial grid. ----------------------------------------
        for idx in range(nx):

            if verbose:
                print(str(100 * float(idx) / float(len(x_line))) + ' %')

            for idy in range(ny):

                #- March through all sources.
                for k in indeces:

                    C[idy, idx, :] += S[k] * filt * g.conjG1_times_G2(
                        x[idy, idx], y[idy, idx], p.x[rec], p.y[rec], x[k],
                        y[k], omega, p.dx, p.dy, p.rho, p.v, p.Q)

        #- Normalisation.
        C = np.conj(C) * p.dx * p.dy

    #==============================================================================
    #- Compute correlation field by random integration over all sources
    #==============================================================================

    downsampling_factor = 5.0
    n_samples = np.floor(float(nx * ny) / downsampling_factor)

    if (mode == 'random'):

        #- March through frequencies. ---------------------------------------------

        for idf in range(0, len(f), 3):

            if verbose: print('f=', f[idf], ' Hz')

            if (filt[idf] > 0.05 * np.max(filt)):

                #- March through downsampled spatial grid. ------------------------

                t0 = time.time()

                for idx in range(0, nx, 3):
                    for idy in range(0, ny, 3):

                        samples_x = np.random.randint(0, nx, n_samples)
                        samples_y = np.random.randint(0, ny, n_samples)

                        G1 = g.green_input(x[samples_y, samples_x],
                                           y[samples_y,
                                             samples_x], x_line[idx],
                                           y_line[idy], omega[idf], p.dx, p.dy,
                                           p.rho, p.v, p.Q)
                        G2 = g.green_input(x[samples_y,
                                             samples_x], y[samples_y,
                                                           samples_x],
                                           p.x[rec], p.y[rec], omega[idf],
                                           p.dx, p.dy, p.rho, p.v, p.Q)

                        C[idy, idx,
                          idf] = downsampling_factor * filt[idf] * np.sum(
                              S[samples_y, samples_x] * G1 * np.conj(G2))

                t1 = time.time()
                if verbose: print('time per frequency: ', t1 - t0, 's')

        #- Normalisation. ---------------------------------------------------------

        C = C * p.dx * p.dy

        #- Spatial interpolation. -------------------------------------------------

        for idx in range(0, nx - 3, 3):
            C[:, idx + 1, :] = 0.67 * C[:, idx, :] + 0.33 * C[:, idx + 3, :]
            C[:, idx + 2, :] = 0.33 * C[:, idx, :] + 0.67 * C[:, idx + 3, :]

        for idy in range(0, ny - 3, 3):
            C[idy + 1, :, :] = 0.67 * C[idy, :, :] + 0.33 * C[idy + 3, :, :]
            C[idy + 2, :, :] = 0.33 * C[idy, :, :] + 0.67 * C[idy + 3, :, :]

        #- Frequency interpolation. -----------------------------------------------

        for idf in range(0, len(f) - 3, 3):
            C[:, :, idf + 1] = 0.67 * C[:, :, idf] + 0.33 * C[:, :, idf + 3]
            C[:, :, idf + 2] = 0.33 * C[:, :, idf] + 0.67 * C[:, :, idf + 3]

    #==============================================================================
    #- Compute correlation field by random integration over all sources + individual sources
    #==============================================================================

    downsampling_factor = 5.0
    n_samples = np.floor(float(nx * ny) / downsampling_factor)

    if (mode == 'combined'):

        #--------------------------------------------------------------------------
        #- March through frequencies for random sampling. -------------------------

        for idf in range(0, len(f), 3):

            if verbose: print('f=', f[idf], ' Hz')

            if (filt[idf] > 0.05 * np.max(filt)):

                #- March through downsampled spatial grid. ------------------------

                t0 = time.time()

                for idx in range(0, nx, 3):
                    for idy in range(0, ny, 3):

                        samples_x = np.random.randint(0, nx, n_samples)
                        samples_y = np.random.randint(0, ny, n_samples)

                        G1 = g.green_input(x[samples_y, samples_x],
                                           y[samples_y,
                                             samples_x], x_line[idx],
                                           y_line[idy], omega[idf], p.dx, p.dy,
                                           p.rho, p.v, p.Q)
                        G2 = g.green_input(x[samples_y,
                                             samples_x], y[samples_y,
                                                           samples_x],
                                           p.x[rec], p.y[rec], omega[idf],
                                           p.dx, p.dy, p.rho, p.v, p.Q)

                        C[idy, idx,
                          idf] = downsampling_factor * filt[idf] * np.sum(
                              S[samples_y, samples_x] * G1 * np.conj(G2))

                t1 = time.time()
                if verbose: print('time per frequency: ', t1 - t0, 's')

        #- Spatial interpolation. -------------------------------------------------

        for idx in range(0, nx - 3, 3):
            C[:, idx + 1, :] = 0.67 * C[:, idx, :] + 0.33 * C[:, idx + 3, :]
            C[:, idx + 2, :] = 0.33 * C[:, idx, :] + 0.67 * C[:, idx + 3, :]

        for idy in range(0, ny - 3, 3):
            C[idy + 1, :, :] = 0.67 * C[idy, :, :] + 0.33 * C[idy + 3, :, :]
            C[idy + 2, :, :] = 0.33 * C[idy, :, :] + 0.67 * C[idy + 3, :, :]

        #- Frequency interpolation. -----------------------------------------------

        for idf in range(0, len(f) - 3, 3):
            C[:, :, idf + 1] = 0.67 * C[:, :, idf] + 0.33 * C[:, :, idf + 3]
            C[:, :, idf + 2] = 0.33 * C[:, :, idf] + 0.67 * C[:, :, idf + 3]

        #--------------------------------------------------------------------------
        #- March through the spatial grid for individual sources. -----------------

        for idx in range(nx):

            if verbose:
                print(str(100 * float(idx) / float(len(x_line))) + ' %')

            for idy in range(ny):

                #- March through all sources.
                for k in indeces:

                    C[idy, idx, :] += S[k] * filt * np.conj(
                        g.conjG1_times_G2(x[idy, idx], y[idy, idx], p.x[rec],
                                          p.y[rec], x[k], y[k], omega, p.dx,
                                          p.dy, p.rho, p.v, p.Q))

        #- Normalisation. ---------------------------------------------------------

        C = C * p.dx * p.dy

    #==============================================================================
    #- Save interferometric wavefield.
    #==============================================================================

    fn = 'OUTPUT/cf_' + str(rec)
    np.save(fn, C)
Пример #5
0
def precompute(rec=0,verbose=0,mode='individual'):

	"""
	precompute(verbose=0)

	Compute correlation wavefield in the frequency domain and store for in /OUTPUT for re-use in kernel computation.

	INPUT:
	------

	rec:		index of reference receiver.
	verbose:	give screen output when 1.
	mode:		'individual' sums over individual sources. This is very efficient when there are only a few sources. This mode requires that the indeces array returned by source.space_distribution is not empty.
				'random' performs a randomised, down-sampled integration over a quasi-continuous distribution of sources. This is more efficient for widely distributed and rather smooth sources.
				'combined' is the sum of 'individual' and 'random'. This is efficient when a few point sources are super-imposed on a quasi-continuous distribution.

	OUTPUT:
	-------

	Frequency-domain interferometric wavefield stored in /OUTPUT.

	Last updated: 13 July 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	p=parameters.Parameters()

	#- Spatial grid.
	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)
	x,y=np.meshgrid(x_line,y_line)

	nx=len(x_line)
	ny=len(y_line)

	#- Frequency line.
	f=np.arange(p.fmin-p.fwidth,p.fmax+p.fwidth,p.df)
	omega=2.0*np.pi*f

	#- Power-spectral density.
	S,indeces=s.space_distribution()
	instrument,natural=s.frequency_distribution(f)
	filt=natural*instrument*instrument

	C=np.zeros((len(y_line),len(x_line),len(omega)),dtype=complex)

	#==============================================================================
	#- Compute correlation field by summing over individual sources.
	#==============================================================================

	if (mode=='individual'):

		#- March through the spatial grid. ----------------------------------------
		for idx in range(nx):

			if (verbose==1): print str(100*float(idx)/float(len(x_line)))+' %'

			for idy in range(ny):

				#- March through all sources.
				for k in indeces:

					C[idy,idx,:]+=S[k]*filt*g.conjG1_times_G2(x[idy,idx],y[idy,idx],p.x[rec],p.y[rec],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)
					
		#- Normalisation.
		C=np.conj(C)*p.dx*p.dy

	#==============================================================================
	#- Compute correlation field by random integration over all sources
	#==============================================================================

	downsampling_factor=5.0
	n_samples=np.floor(float(nx*ny)/downsampling_factor)

	if (mode=='random'):

		#- March through frequencies. ---------------------------------------------

		for idf in range(0,len(f),3):

			if verbose==1: print 'f=', f[idf], ' Hz'

			if (filt[idf]>0.05*np.max(filt)):

				#- March through downsampled spatial grid. ------------------------

				t0=time.time()

				for idx in range(0,nx,3):
					for idy in range(0,ny,3):

						samples_x=np.random.randint(0,nx,n_samples)
						samples_y=np.random.randint(0,ny,n_samples)
						
						G1=g.green_input(x[samples_y,samples_x],y[samples_y,samples_x],x_line[idx],y_line[idy],omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
						G2=g.green_input(x[samples_y,samples_x],y[samples_y,samples_x],p.x[rec],   p.y[rec],   omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
				
						C[idy,idx,idf]=downsampling_factor*filt[idf]*np.sum(S[samples_y,samples_x]*G1*np.conj(G2))
					
				t1=time.time()
				if verbose==1: print 'time per frequency: ', t1-t0, 's'

		#- Normalisation. ---------------------------------------------------------

		C=C*p.dx*p.dy

		#- Spatial interpolation. -------------------------------------------------

		for idx in range(0,nx-3,3):
			C[:,idx+1,:]=0.67*C[:,idx,:]+0.33*C[:,idx+3,:]
			C[:,idx+2,:]=0.33*C[:,idx,:]+0.67*C[:,idx+3,:]

		for idy in range(0,ny-3,3):
			C[idy+1,:,:]=0.67*C[idy,:,:]+0.33*C[idy+3,:,:]
			C[idy+2,:,:]=0.33*C[idy,:,:]+0.67*C[idy+3,:,:]

		#- Frequency interpolation. -----------------------------------------------

		for idf in range(0,len(f)-3,3):
			C[:,:,idf+1]=0.67*C[:,:,idf]+0.33*C[:,:,idf+3]
			C[:,:,idf+2]=0.33*C[:,:,idf]+0.67*C[:,:,idf+3]

	#==============================================================================
	#- Compute correlation field by random integration over all sources + individual sources
	#==============================================================================

	downsampling_factor=5.0
	n_samples=np.floor(float(nx*ny)/downsampling_factor)

	if (mode=='combined'):

		#--------------------------------------------------------------------------
		#- March through frequencies for random sampling. -------------------------

		for idf in range(0,len(f),3):

			if verbose==1: print 'f=', f[idf], ' Hz'

			if (filt[idf]>0.05*np.max(filt)):

				#- March through downsampled spatial grid. ------------------------

				t0=time.time()

				for idx in range(0,nx,3):
					for idy in range(0,ny,3):

						samples_x=np.random.randint(0,nx,n_samples)
						samples_y=np.random.randint(0,ny,n_samples)
						
						G1=g.green_input(x[samples_y,samples_x],y[samples_y,samples_x],x_line[idx],y_line[idy],omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
						G2=g.green_input(x[samples_y,samples_x],y[samples_y,samples_x],p.x[rec],   p.y[rec],   omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
				
						C[idy,idx,idf]=downsampling_factor*filt[idf]*np.sum(S[samples_y,samples_x]*G1*np.conj(G2))
					
				t1=time.time()
				if verbose==1: print 'time per frequency: ', t1-t0, 's'


		#- Spatial interpolation. -------------------------------------------------

		for idx in range(0,nx-3,3):
			C[:,idx+1,:]=0.67*C[:,idx,:]+0.33*C[:,idx+3,:]
			C[:,idx+2,:]=0.33*C[:,idx,:]+0.67*C[:,idx+3,:]

		for idy in range(0,ny-3,3):
			C[idy+1,:,:]=0.67*C[idy,:,:]+0.33*C[idy+3,:,:]
			C[idy+2,:,:]=0.33*C[idy,:,:]+0.67*C[idy+3,:,:]

		#- Frequency interpolation. -----------------------------------------------

		for idf in range(0,len(f)-3,3):
			C[:,:,idf+1]=0.67*C[:,:,idf]+0.33*C[:,:,idf+3]
			C[:,:,idf+2]=0.33*C[:,:,idf]+0.67*C[:,:,idf+3]


		#--------------------------------------------------------------------------
		#- March through the spatial grid for individual sources. -----------------
		
		for idx in range(nx):

			if (verbose==1): print str(100*float(idx)/float(len(x_line)))+' %'

			for idy in range(ny):

				#- March through all sources.
				for k in indeces:

					C[idy,idx,:]+=S[k]*filt*np.conj(g.conjG1_times_G2(x[idy,idx],y[idy,idx],p.x[rec],p.y[rec],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q))
					
		
		#- Normalisation. ---------------------------------------------------------

		C=C*p.dx*p.dy

	#==============================================================================
	#- Save interferometric wavefield.
	#==============================================================================

	fn='OUTPUT/cf_'+str(rec)
	fid=open(fn,'w')
	np.save(fid,C)
	fid.close()
Пример #6
0
def correlation_random(rec0=0,rec1=1,verbose=0,plot=0,save=0):

	"""
	cct,cct_proc,t,ccf,ccf_proc,f = correlation_random(rec0=0,rec1=1,verbose=0,plot=0,save=1)

	Compute and plot correlation function based on random source summation.

	INPUT:
	------
	rec0, rec1:		indeces of the receivers used in the correlation. 
	plot:			plot when 1.
	verbose:		give screen output when 1.
	save:			store individual correlations to OUTPUT/correlations_individual

	OUTPUT:
	-------
	cct, t:		Time-domain correlation function and time axis [N^2 s / m^4],[s].
	ccf, f:		Frequency-domain correlation function and frequency axis [N^2 s^2 / m^4],[1/s].
	
	Last updated: 19 May 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	#- Start time.
	t1=time.time()

	#- Input parameters.
	p=parameters.Parameters()

	#- Spatial grid.
	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)
	x,y=np.meshgrid(x_line,y_line)

	#- Compute number of samples as power of 2.
	n=2.0**np.round(np.log2((p.Twindow)/p.dt))

	#- Frequency axis.
	df=1.0/(n*p.dt)
	f=np.arange(0.0,1.0/p.dt,df)
	omega=2.0*np.pi*f

	#- Compute time axis
	t=np.arange(-0.5*n*p.dt,0.5*n*p.dt,p.dt)

	#- Compute instrument response and natural source spectrum.
	S,indeces=s.space_distribution()
	instrument,natural=s.frequency_distribution(f)

	#- Issue some information if wanted.
	if verbose==1:

		print 'number of samples: '+str(n)
		print 'maximum time: '+str(np.max(t))+' s'
		print 'maximum frequency: '+str(np.max(f))+' Hz'

	#- Warnings.
	if (p.fmax>1.0/p.dt):
		print 'WARNING: maximum bandpass frequency cannot be represented with this time step!'

	if (p.fmin<1.0/(n*p.dt)):
		print 'WARNING: minimum bandpass frequency cannot be represented with this window length!'

	#==============================================================================
	#- March through source locations and compute raw frequency-domain noise traces.
	#==============================================================================

	#- Set a specific random seed to make simulation repeatable, e.g. for different receiver pair.
	np.random.seed(p.seed)

	#- Initialise frequency-domain wavefields.
	u1=np.zeros([n,p.Nwindows],dtype=complex)
	u2=np.zeros([n,p.Nwindows],dtype=complex)
	G1=np.zeros([n,p.Nwindows],dtype=complex)
	G2=np.zeros([n,p.Nwindows],dtype=complex)

	#- Regularise zero-frequency to avoid singularity in Green function.
	omega[0]=0.01*2.0*np.pi*df

	#- March through source indices.
	for k in indeces:

		#- Green function for a specific source point.
		G1[:,0]=g.green_input(p.x[rec0],p.y[rec0],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)
		G2[:,0]=g.green_input(p.x[rec1],p.y[rec1],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)

		#- Apply instrument response and source spectrum
		G1[:,0]=G1[:,0]*instrument*np.sqrt(natural)
		G2[:,0]=G2[:,0]*instrument*np.sqrt(natural)

		#- Copy this Green function to all time intervals.
		for i in range(p.Nwindows):

			G1[:,i]=G1[:,0]
			G2[:,i]=G2[:,0]

		#- Random phase matrix, frequency steps times time windows.
		phi=2.0*np.pi*(np.random.rand(n,p.Nwindows)-0.5)
		ff=np.exp(1j*phi)

		#- Matrix of random frequency-domain wavefields.
		u1+=S[k]*ff*G1
		u2+=S[k]*ff*G2

	#- March through time windows to add earthquakes.
	for win in range(p.Nwindows):

		neq=len(p.eq_t[win])

		for i in range(neq):

			G1=g.green_input(p.x[rec0],p.y[rec0],p.eq_x[win][i],p.eq_y[win][i],omega,p.dx,p.dy,p.rho,p.v,p.Q)
			G2=g.green_input(p.x[rec1],p.y[rec1],p.eq_x[win][i],p.eq_y[win][i],omega,p.dx,p.dy,p.rho,p.v,p.Q)

			G1=G1*instrument*np.sqrt(natural)
			G2=G2*instrument*np.sqrt(natural)

			u1[:,win]+=p.eq_m[win][i]*G1*np.exp(-1j*omega*p.eq_t[win][i])
			u2[:,win]+=p.eq_m[win][i]*G2*np.exp(-1j*omega*p.eq_t[win][i])

	#==============================================================================
	#- Processing.
	#==============================================================================

	#- Apply single-station processing.
	u1_proc,u2_proc=proc.processing_single_station(u1,u2,f,verbose)

	#- Compute correlation function, raw and processed.
	ccf=u1*np.conj(u2)
	ccf_proc=u1_proc*np.conj(u2_proc)

	#- Apply correlation processing.
	ccf_proc=proc.processing_correlation(ccf_proc,f,verbose)

	#==============================================================================
	#- Apply the standard bandpass.
	#==============================================================================

	bandpass=np.zeros(np.shape(f))

	Nminmax=np.round((p.bp_fmin)/df)
	Nminmin=np.round((p.bp_fmin-p.bp_width)/df)
	Nmaxmin=np.round((p.bp_fmax)/df)
	Nmaxmax=np.round((p.bp_fmax+p.bp_width)/df)

	bandpass[Nminmin:Nminmax]=np.linspace(0.0,1.0,Nminmax-Nminmin)
	bandpass[Nmaxmin:Nmaxmax]=np.linspace(1.0,0.0,Nmaxmax-Nmaxmin)
	bandpass[Nminmax:Nmaxmin]=1.0

	for i in range(p.Nwindows):

		ccf[:,i]=bandpass*ccf[:,i]
		ccf_proc[:,i]=bandpass*ccf_proc[:,i]

	#==============================================================================
	#- Time-domain correlation function.
	#==============================================================================

	#- Some care has to be taken here with the inverse FFT convention of numpy.

	cct=np.zeros([n,p.Nwindows],dtype=float)
	cct_proc=np.zeros([n,p.Nwindows],dtype=float)

	dummy=np.real(np.fft.ifft(ccf,axis=0)/p.dt)
	cct[0.5*n:n,:]=dummy[0:0.5*n,:]
	cct[0:0.5*n,:]=dummy[0.5*n:n,:]

	dummy=np.real(np.fft.ifft(ccf_proc,axis=0)/p.dt)
	cct_proc[0.5*n:n,:]=dummy[0:0.5*n,:]
	cct_proc[0:0.5*n,:]=dummy[0.5*n:n,:]

	#==============================================================================
	#- Save results if wanted.
	#==============================================================================

	if save==1:

		#- Store frequency and time axes.

		fid=open('OUTPUT/correlations_individual/f','w')
		np.save(fid,f)
		fid.close()

		fid=open('OUTPUT/correlations_individual/t','w')
		np.save(fid,t)
		fid.close()

		#- Store raw and processed correlations in the frequency domain.

		fn='OUTPUT/correlations_individual/ccf_'+str(rec0)+'_'+str(rec1)
		fid=open(fn,'w')
		np.save(fid,ccf)
		fid.close()

		fn='OUTPUT/correlations_individual/ccf_proc_'+str(rec0)+'_'+str(rec1)
		fid=open(fn,'w')
		np.save(fid,ccf_proc)
		fid.close()

	#==============================================================================
	#- Plot results if wanted.
	#==============================================================================

	if plot==1:

		#- Noise traces for first window.
		plt.subplot(2,1,1)
		plt.plot(t,np.real(np.fft.ifft(u1[:,0]))/p.dt,'k')
		plt.ylabel('u1(t) [N/m^2]')
		plt.title('recordings for first time window')
		plt.subplot(2,1,2)
		plt.plot(t,np.real(np.fft.ifft(u2[:,0]))/p.dt,'k')
		plt.ylabel('u2(t) [N/m^2]')
		plt.xlabel('t [s]')

		plt.show()

		#- Spectrum of the pressure wavefield for first window.
		plt.subplot(2,1,1)
		plt.plot(f,np.abs(np.sqrt(bandpass)*u1[:,0]),'k',linewidth=2)
		plt.plot(f,np.real(np.sqrt(bandpass)*u1[:,0]),'b',linewidth=1)
		plt.plot(f,np.imag(np.sqrt(bandpass)*u1[:,0]),'r',linewidth=1)
		plt.ylabel('u1(f) [Ns/m^2]')
		plt.title('raw and processed spectra for first window (abs=black, real=blue, imag=red)')
		
		plt.subplot(2,1,2)
		plt.plot(f,np.abs(np.sqrt(bandpass)*u1_proc[:,0]),'k',linewidth=2)
		plt.plot(f,np.real(np.sqrt(bandpass)*u1_proc[:,0]),'b',linewidth=1)
		plt.plot(f,np.imag(np.sqrt(bandpass)*u1_proc[:,0]),'r',linewidth=1)
		plt.ylabel('u1_proc(f) [?]')
		plt.xlabel('f [Hz]')

		plt.show()

		#- Raw time- and frequency-domain correlation for first window.
		plt.subplot(2,1,1)
		plt.semilogy(f,np.abs(ccf[:,0]),'k',linewidth=2)
		plt.title('raw frequency-domain correlation for first window')
		plt.ylabel('correlation [N^2 s^2 / m^4]')
		plt.xlabel('f [Hz]')
		
		plt.subplot(2,1,2)
		plt.plot(t,np.real(cct[:,0]),'k')
		plt.title('raw time-domain correlation for first window')
		plt.ylabel('correlation [N^2 s / m^4]')
		plt.xlabel('t [s]')

		plt.show()

		#- Processed time- and frequency-domain correlation for first window.
		plt.subplot(2,1,1)
		plt.semilogy(f,np.abs(ccf_proc[:,0]),'k',linewidth=2)
		plt.title('processed frequency-domain correlation for first window')
		plt.ylabel('correlation [N^2 s^2 / m^4]*unit(T)')
		plt.xlabel('f [Hz]')
		
		plt.subplot(2,1,2)
		plt.plot(t,np.real(cct_proc[:,0]))
		plt.title('processed time-domain correlation for first window')
		plt.ylabel('correlation [N^2 s / m^4]*unit(T)')
		plt.xlabel('t [s]')

		plt.show()

		#- Raw and processed ensemble correlations. 
		plt.plot(t,np.sum(cct,1)/np.max(cct),'k')
		plt.plot(t,np.sum(cct_proc,1)/np.max(cct_proc),'r')
		plt.title('ensemble time-domain correlation (black=raw, red=processed)')
		plt.ylabel('correlation [N^2 s / m^4]*unit(T)')
		plt.xlabel('t [s]')

		plt.show()

	
	#- End time.
	t2=time.time()

	if verbose==1:
		print 'elapsed time: '+str(t2-t1)+' s'

	#==============================================================================
	#- Output.
	#==============================================================================

	return cct,cct_proc,t,ccf,ccf_proc,f
Пример #7
0
def structure_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', dir_forward='OUTPUT/', effective=0, plot=0):
	"""
	x,y,K_kappa = structure_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', dir_forward='OUTPUT/', effective=0, plot=0):

	Compute structure kernel K_kappa for a frequency-independent source power-spectral density.

	INPUT:
	------

	cct, t:			Time-domain correlation function and time axis as obtained from correlation_function.py.
	rec0, rec1:		Indeces of the receivers used in the correlation. 
	measurement:	Type of measurement used to compute the adjoint source. See adsrc.py for options.
	dir_forward:	Location of the forward interferometric fields from rec0 and rec1. Must exist.
	plot:			When plot=1, plot structure kernel.
	effective:		When effective==1, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.


	OUTPUT:
	-------
	x,y:			Space coordinates.
	K:				Structure kernel [unit of measurement * 1/N].

	Last updated: 11 July 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	p=parameters.Parameters()

	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)

	x,y=np.meshgrid(x_line,y_line)

	f=np.arange(p.fmin-p.fwidth,p.fmax+p.fwidth,p.df)
	df=f[1]-f[0]
	omega=2.0*np.pi*f

	K_kappa=np.zeros(np.shape(x))

	C1=np.zeros((len(y_line),len(x_line),len(omega)),dtype=complex)
	C2=np.zeros((len(y_line),len(x_line),len(omega)),dtype=complex)

	nx=len(x_line)
	ny=len(y_line)

	kappa=p.rho*(p.v**2)

    #- Frequency- and space distribution of the source. ---------------------------

	S,indeces=s.space_distribution(plot=0)
	instrument,natural=s.frequency_distribution(f)
	filt=natural*instrument*instrument

	#- Compute the adjoint source. ------------------------------------------------

	a=adsrc.adsrc(cct, t, measurement, plot)

	#- Compute number of grid points corresponding to the minimum wavelength. -----

	L=int(np.ceil(p.v/(p.fmax*p.dx)))

	#- Read propagation corrector if needed. --------------------------------------

	if (effective==1):

		gf=gpc.get_propagation_corrector(rec0,rec1,plot=0)

	else:

		gf=np.ones(len(f),dtype=complex)

	#==============================================================================
	#- Load forward interferometric wavefields.
	#==============================================================================

	fn=dir_forward+'/cf_'+str(rec0)
	fid=open(fn,'r')
	C1=np.load(fid)
	fid.close()

	fn=dir_forward+'/cf_'+str(rec1)
	fid=open(fn,'r')
	C2=np.load(fid)
	fid.close()

    #==============================================================================
	#- Loop over frequencies.
	#==============================================================================

	for k in range(len(omega)):

		w=omega[k]

		#- Adjoint fields. --------------------------------------------------------
		G1=-w**2*g.green_input(x,y,p.x[rec0],p.y[rec0],w,p.dx,p.dy,p.rho,p.v,p.Q)*gf[k]
		G2=-w**2*g.green_input(x,y,p.x[rec1],p.y[rec1],w,p.dx,p.dy,p.rho,p.v,p.Q)*gf[k]

		#- Multiplication with adjoint fields. ------------------------------------
		K_kappa=K_kappa-2.0*np.real(G2*C1[:,:,k]*np.conj(a[k])+G1*C2[:,:,k]*a[k])

	K_kappa=K_kappa/kappa

	#==============================================================================
	#- Smooth over minimum wavelength.
	#==============================================================================

	for k in range(L):
		K_kappa[1:ny-2,:]=(K_kappa[1:ny-2,:]+K_kappa[0:ny-3,:]+K_kappa[2:ny-1,:])/3.0

	for k in range(L):
		K_kappa[:,1:nx-2]=(K_kappa[:,1:nx-2]+K_kappa[:,0:nx-3]+K_kappa[:,2:nx-1])/3.0

	#==============================================================================
	#- Visualise if wanted.
	#==============================================================================

	if plot==1:

		cmap = plt.get_cmap('RdBu')
    	plt.pcolormesh(x,y,K_kappa,cmap=cmap,shading='interp')
    	plt.clim(-np.max(np.abs(K_kappa))*0.25,np.max(np.abs(K_kappa))*0.25)
    	plt.axis('image')
    	plt.colorbar()
    	plt.title('Structure (kappa) kernel [unit of measurement / N]')
    	plt.xlabel('x [km]')
    	plt.ylabel('y [km]')

    	plt.plot(p.x[rec0],p.y[rec0],'ro')
    	plt.plot(p.x[rec1],p.y[rec1],'ro')

    	plt.show()


	return x,y,K_kappa
Пример #8
0
def source_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', effective=0, plot=0):
	"""
	x,y,K = source_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', effective=0, plot=0):

	Compute source kernel for a frequency-independent source power-spectral density.

	INPUT:
	------

	cct, t:			Time-domain correlation function and time axis as obtained from correlation_function.py.
	rec0, rec1:		Indeces of the receivers used in the correlation. 
	measurement:	Type of measurement used to compute the adjoint source. See adsrc.py for options.
	plot:			When plot=1, plot source kernel.
	effective:		When effective==1, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.


	OUTPUT:
	-------
	x,y:			Space coordinates.
	K:				Source kernel [unit of measurement * m^2 / Pa^4 s^2].

	Last updated: 27 May 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	p=parameters.Parameters()

	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)

	nx=len(x_line)
	ny=len(y_line)

	x,y=np.meshgrid(1.5*x_line,1.5*y_line)

	f=np.arange(p.fmin-p.fwidth,p.fmax+p.fwidth,p.df)
	omega=2.0*np.pi*f

	K_source=np.zeros(np.shape(x))

	#- Read propagation corrector if needed. --------------------------------------

	if (effective==1):

		gf=gpc.get_propagation_corrector(rec0,rec1,plot=0)

	else:

		gf=np.ones(len(f),dtype=complex)

	#- Compute number of grid points corresponding to the minimum wavelength. -----

	L=int(np.ceil(p.v/(p.fmax*p.dx)))

	#==============================================================================
	#- Compute the adjoint source.
	#==============================================================================

	a=adsrc.adsrc(cct, t, measurement, plot)

	#==============================================================================
	#- Compute kernel.
	#==============================================================================

	for k in range(len(omega)):

		#- Green functions.
		G1=g.green_input(x,y,p.x[rec0],p.y[rec0],omega[k],p.dx,p.dy,p.rho,p.v,p.Q)
		G2=g.green_input(x,y,p.x[rec1],p.y[rec1],omega[k],p.dx,p.dy,p.rho,p.v,p.Q)

		#- Compute kernel.
		K_source+=2.0*np.real(gf[k]*G1*np.conj(G2)*a[k])

	#==============================================================================
	#- Smooth over minimum wavelength.
	#==============================================================================

	for k in range(L):
		K_source[1:ny-2,:]=(K_source[1:ny-2,:]+K_source[0:ny-3,:]+K_source[2:ny-1,:])/3.0

	for k in range(L):
		K_source[:,1:nx-2]=(K_source[:,1:nx-2]+K_source[:,0:nx-3]+K_source[:,2:nx-1])/3.0


	#==============================================================================
	#- Visualise if wanted.
	#==============================================================================

	if plot==1:

		cmap = plt.get_cmap('RdBu')
    	plt.pcolormesh(x,y,K_source,cmap=cmap,shading='interp')
    	plt.clim(-np.max(np.abs(K_source))*0.15,np.max(np.abs(K_source))*0.15)
    	plt.axis('image')
    	plt.colorbar()
    	plt.title('Source kernel [unit of measurement s^2 / m^2]')
    	plt.xlabel('x [km]')
    	plt.ylabel('y [km]')

    	plt.plot(p.x[rec0],p.y[rec0],'ro')
    	plt.plot(p.x[rec1],p.y[rec1],'ro')

    	plt.show()

	return x,y,K_source