Exemplo n.º 1
0
def correlation_random(rec0=0, rec1=1, verbose=0, plot=0, save=0):
    """
	cct,cct_proc,t,ccf,ccf_proc,f = correlation_random(rec0=0,rec1=1,verbose=0,plot=0,save=1)

	Compute and plot correlation function based on random source summation.

	INPUT:
	------
	rec0, rec1:		indeces of the receivers used in the correlation. 
	plot:			plot when 1.
	verbose:		give screen output when 1.
	save:			store individual correlations to OUTPUT/correlations_individual

	OUTPUT:
	-------
	cct, t:		Time-domain correlation function and time axis [N^2 s / m^4],[s].
	ccf, f:		Frequency-domain correlation function and frequency axis [N^2 s^2 / m^4],[1/s].
	
	Last updated: 19 May 2016.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    #- Start time.
    t1 = time.time()

    #- Input parameters.
    p = parameters.Parameters()

    #- Spatial grid.
    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)
    x, y = np.meshgrid(x_line, y_line)

    #- Compute number of samples as power of 2.
    n = 2.0**np.round(np.log2((p.Twindow) / p.dt))

    #- Frequency axis.
    df = 1.0 / (n * p.dt)
    f = np.arange(0.0, 1.0 / p.dt, df)
    omega = 2.0 * np.pi * f

    #- Compute time axis
    t = np.arange(-0.5 * n * p.dt, 0.5 * n * p.dt, p.dt)

    #- Compute instrument response and natural source spectrum.
    S, indeces = s.space_distribution()
    instrument, natural = s.frequency_distribution(f)

    #- Issue some information if wanted.
    if verbose == 1:

        print 'number of samples: ' + str(n)
        print 'maximum time: ' + str(np.max(t)) + ' s'
        print 'maximum frequency: ' + str(np.max(f)) + ' Hz'

    #- Warnings.
    if (p.fmax > 1.0 / p.dt):
        print 'WARNING: maximum bandpass frequency cannot be represented with this time step!'

    if (p.fmin < 1.0 / (n * p.dt)):
        print 'WARNING: minimum bandpass frequency cannot be represented with this window length!'

    #==============================================================================
    #- March through source locations and compute raw frequency-domain noise traces.
    #==============================================================================

    #- Set a specific random seed to make simulation repeatable, e.g. for different receiver pair.
    np.random.seed(p.seed)

    #- Initialise frequency-domain wavefields.
    u1 = np.zeros([n, p.Nwindows], dtype=complex)
    u2 = np.zeros([n, p.Nwindows], dtype=complex)
    G1 = np.zeros([n, p.Nwindows], dtype=complex)
    G2 = np.zeros([n, p.Nwindows], dtype=complex)

    #- Regularise zero-frequency to avoid singularity in Green function.
    omega[0] = 0.01 * 2.0 * np.pi * df

    #- March through source indices.
    for k in indeces:

        #- Green function for a specific source point.
        G1[:, 0] = g.green_input(p.x[rec0], p.y[rec0], x[k], y[k], omega, p.dx,
                                 p.dy, p.rho, p.v, p.Q)
        G2[:, 0] = g.green_input(p.x[rec1], p.y[rec1], x[k], y[k], omega, p.dx,
                                 p.dy, p.rho, p.v, p.Q)

        #- Apply instrument response and source spectrum
        G1[:, 0] = G1[:, 0] * instrument * np.sqrt(natural)
        G2[:, 0] = G2[:, 0] * instrument * np.sqrt(natural)

        #- Copy this Green function to all time intervals.
        for i in range(p.Nwindows):

            G1[:, i] = G1[:, 0]
            G2[:, i] = G2[:, 0]

        #- Random phase matrix, frequency steps times time windows.
        phi = 2.0 * np.pi * (np.random.rand(n, p.Nwindows) - 0.5)
        ff = np.exp(1j * phi)

        #- Matrix of random frequency-domain wavefields.
        u1 += S[k] * ff * G1
        u2 += S[k] * ff * G2

    #- March through time windows to add earthquakes.
    for win in range(p.Nwindows):

        neq = len(p.eq_t[win])

        for i in range(neq):

            G1 = g.green_input(p.x[rec0], p.y[rec0], p.eq_x[win][i],
                               p.eq_y[win][i], omega, p.dx, p.dy, p.rho, p.v,
                               p.Q)
            G2 = g.green_input(p.x[rec1], p.y[rec1], p.eq_x[win][i],
                               p.eq_y[win][i], omega, p.dx, p.dy, p.rho, p.v,
                               p.Q)

            G1 = G1 * instrument * np.sqrt(natural)
            G2 = G2 * instrument * np.sqrt(natural)

            u1[:, win] += p.eq_m[win][i] * G1 * np.exp(
                -1j * omega * p.eq_t[win][i])
            u2[:, win] += p.eq_m[win][i] * G2 * np.exp(
                -1j * omega * p.eq_t[win][i])

    #==============================================================================
    #- Processing.
    #==============================================================================

    #- Apply single-station processing.
    u1_proc, u2_proc = proc.processing_single_station(u1, u2, f, verbose)

    #- Compute correlation function, raw and processed.
    ccf = u1 * np.conj(u2)
    ccf_proc = u1_proc * np.conj(u2_proc)

    #- Apply correlation processing.
    ccf_proc = proc.processing_correlation(ccf_proc, f, verbose)

    #==============================================================================
    #- Apply the standard bandpass.
    #==============================================================================

    bandpass = np.zeros(np.shape(f))

    Nminmax = np.round((p.bp_fmin) / df)
    Nminmin = np.round((p.bp_fmin - p.bp_width) / df)
    Nmaxmin = np.round((p.bp_fmax) / df)
    Nmaxmax = np.round((p.bp_fmax + p.bp_width) / df)

    bandpass[Nminmin:Nminmax] = np.linspace(0.0, 1.0, Nminmax - Nminmin)
    bandpass[Nmaxmin:Nmaxmax] = np.linspace(1.0, 0.0, Nmaxmax - Nmaxmin)
    bandpass[Nminmax:Nmaxmin] = 1.0

    for i in range(p.Nwindows):

        ccf[:, i] = bandpass * ccf[:, i]
        ccf_proc[:, i] = bandpass * ccf_proc[:, i]

    #==============================================================================
    #- Time-domain correlation function.
    #==============================================================================

    #- Some care has to be taken here with the inverse FFT convention of numpy.

    cct = np.zeros([n, p.Nwindows], dtype=float)
    cct_proc = np.zeros([n, p.Nwindows], dtype=float)

    dummy = np.real(np.fft.ifft(ccf, axis=0) / p.dt)
    cct[0.5 * n:n, :] = dummy[0:0.5 * n, :]
    cct[0:0.5 * n, :] = dummy[0.5 * n:n, :]

    dummy = np.real(np.fft.ifft(ccf_proc, axis=0) / p.dt)
    cct_proc[0.5 * n:n, :] = dummy[0:0.5 * n, :]
    cct_proc[0:0.5 * n, :] = dummy[0.5 * n:n, :]

    #==============================================================================
    #- Save results if wanted.
    #==============================================================================

    if save == 1:

        #- Store frequency and time axes.

        fid = open('OUTPUT/correlations_individual/f', 'w')
        np.save(fid, f)
        fid.close()

        fid = open('OUTPUT/correlations_individual/t', 'w')
        np.save(fid, t)
        fid.close()

        #- Store raw and processed correlations in the frequency domain.

        fn = 'OUTPUT/correlations_individual/ccf_' + str(rec0) + '_' + str(
            rec1)
        fid = open(fn, 'w')
        np.save(fid, ccf)
        fid.close()

        fn = 'OUTPUT/correlations_individual/ccf_proc_' + str(
            rec0) + '_' + str(rec1)
        fid = open(fn, 'w')
        np.save(fid, ccf_proc)
        fid.close()

    #==============================================================================
    #- Plot results if wanted.
    #==============================================================================

    if plot == 1:

        #- Noise traces for first window.
        plt.subplot(2, 1, 1)
        plt.plot(t, np.real(np.fft.ifft(u1[:, 0])) / p.dt, 'k')
        plt.ylabel('u1(t) [N/m^2]')
        plt.title('recordings for first time window')
        plt.subplot(2, 1, 2)
        plt.plot(t, np.real(np.fft.ifft(u2[:, 0])) / p.dt, 'k')
        plt.ylabel('u2(t) [N/m^2]')
        plt.xlabel('t [s]')

        plt.show()

        #- Spectrum of the pressure wavefield for first window.
        plt.subplot(2, 1, 1)
        plt.plot(f, np.abs(np.sqrt(bandpass) * u1[:, 0]), 'k', linewidth=2)
        plt.plot(f, np.real(np.sqrt(bandpass) * u1[:, 0]), 'b', linewidth=1)
        plt.plot(f, np.imag(np.sqrt(bandpass) * u1[:, 0]), 'r', linewidth=1)
        plt.ylabel('u1(f) [Ns/m^2]')
        plt.title(
            'raw and processed spectra for first window (abs=black, real=blue, imag=red)'
        )

        plt.subplot(2, 1, 2)
        plt.plot(f,
                 np.abs(np.sqrt(bandpass) * u1_proc[:, 0]),
                 'k',
                 linewidth=2)
        plt.plot(f,
                 np.real(np.sqrt(bandpass) * u1_proc[:, 0]),
                 'b',
                 linewidth=1)
        plt.plot(f,
                 np.imag(np.sqrt(bandpass) * u1_proc[:, 0]),
                 'r',
                 linewidth=1)
        plt.ylabel('u1_proc(f) [?]')
        plt.xlabel('f [Hz]')

        plt.show()

        #- Raw time- and frequency-domain correlation for first window.
        plt.subplot(2, 1, 1)
        plt.semilogy(f, np.abs(ccf[:, 0]), 'k', linewidth=2)
        plt.title('raw frequency-domain correlation for first window')
        plt.ylabel('correlation [N^2 s^2 / m^4]')
        plt.xlabel('f [Hz]')

        plt.subplot(2, 1, 2)
        plt.plot(t, np.real(cct[:, 0]), 'k')
        plt.title('raw time-domain correlation for first window')
        plt.ylabel('correlation [N^2 s / m^4]')
        plt.xlabel('t [s]')

        plt.show()

        #- Processed time- and frequency-domain correlation for first window.
        plt.subplot(2, 1, 1)
        plt.semilogy(f, np.abs(ccf_proc[:, 0]), 'k', linewidth=2)
        plt.title('processed frequency-domain correlation for first window')
        plt.ylabel('correlation [N^2 s^2 / m^4]*unit(T)')
        plt.xlabel('f [Hz]')

        plt.subplot(2, 1, 2)
        plt.plot(t, np.real(cct_proc[:, 0]))
        plt.title('processed time-domain correlation for first window')
        plt.ylabel('correlation [N^2 s / m^4]*unit(T)')
        plt.xlabel('t [s]')

        plt.show()

        #- Raw and processed ensemble correlations.
        plt.plot(t, np.sum(cct, 1) / np.max(cct), 'k')
        plt.plot(t, np.sum(cct_proc, 1) / np.max(cct_proc), 'r')
        plt.title(
            'ensemble time-domain correlation (black=raw, red=processed)')
        plt.ylabel('correlation [N^2 s / m^4]*unit(T)')
        plt.xlabel('t [s]')

        plt.show()

    #- End time.
    t2 = time.time()

    if verbose == 1:
        print 'elapsed time: ' + str(t2 - t1) + ' s'

    #==============================================================================
    #- Output.
    #==============================================================================

    return cct, cct_proc, t, ccf, ccf_proc, f
Exemplo n.º 2
0
def correlation_function(rec0=0, rec1=1, effective=0, plot=0):
    """
	cct, t, ccf, f = correlation_function(rec0=0,rec1=1,effective=0,plot=0)

	Compute time- and frequency-domain correlation functions. 


	INPUT:
	------
	rec0, rec1:		indeces of the receivers used in the correlation. 
	plot:			When plot=1, the source distribution, and the time- and frequency domain correlation functions are plotted.
	effective:		When effective==1, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.

	OUTPUT:
	-------
	cct, t:		Time-domain correlation function and time axis [N^2 s / m^4],[s].
	ccf, f:		Frequency-domain correlation function and frequency axis [N^2 s^2 / m^4],[1/s].

	Last updated: 27 May 2016.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)

    x, y = np.meshgrid(x_line, y_line)

    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    omega = 2.0 * np.pi * f

    t = np.arange(p.tmin, p.tmax, p.dt)

    #- Frequency- and space distribution of the source. ---------------------------

    S, indices = s.space_distribution(plot)
    instrument, natural = s.frequency_distribution(f)
    filt = natural * instrument * instrument

    #- Read propagation corrector if needed. --------------------------------------

    if (effective == 1):

        gf = gpc.get_propagation_corrector(rec0, rec1, plot=0)

    else:

        gf = np.ones(len(f), dtype=complex)

    #==============================================================================
    #- Compute inter-station correlation function.
    #==============================================================================

    cct = np.zeros(np.shape(t), dtype=float)
    ccf = np.zeros(np.shape(f), dtype=complex)

    for idf in range(len(omega)):

        P = g.conjG1_times_G2(p.x[rec0], p.y[rec0], p.x[rec1], p.y[rec1], x, y,
                              omega[idf], p.dx, p.dy, p.rho, p.v, p.Q)
        ccf[idf] = gf[idf] * np.conj(np.sum(P * S))

        cct = cct + np.real(filt[idf] * ccf[idf] * np.exp(1j * omega[idf] * t))

    cct = cct * p.dx * p.dy * p.df

    #==============================================================================
    #- Plot result.
    #==============================================================================

    if (plot == 1):

        #- Frequency domain.
        plt.semilogy(f, np.abs(ccf), 'k')
        plt.semilogy(f, np.real(ccf), 'b')
        plt.title(
            'frequency-domain correlation function (black=abs, blue=real)')
        plt.xlabel('frequency [Hz]')
        plt.ylabel('correlation [N^2 s^2/m^4]')
        plt.show()

        #- Time domain.

        tt = np.sqrt((p.x[rec0] - p.x[rec1])**2 +
                     (p.y[rec0] - p.y[rec1])**2) / p.v
        cct_max = np.max(np.abs(cct))

        plt.plot(t, cct, 'k', linewidth=2.0)
        plt.plot([tt, tt], [-1.1 * cct_max, 1.1 * cct_max],
                 '--',
                 color=(0.5, 0.5, 0.5),
                 linewidth=1.5)
        plt.plot([-tt, -tt], [-1.1 * cct_max, 1.1 * cct_max],
                 '--',
                 color=(0.5, 0.5, 0.5),
                 linewidth=1.5)

        plt.ylim((-1.1 * cct_max, 1.1 * cct_max))
        plt.title('correlation function')
        plt.xlabel('time [s]')
        plt.ylabel('correlation [N^2 s/m^4]')
        plt.show()

    #==============================================================================
    #- Return.
    #==============================================================================

    return cct, t, ccf, f
Exemplo n.º 3
0
def snapshot(rec=0,
             t=1.0,
             minvalplot=0.0,
             maxvalplot=0.0,
             plot=False,
             save=False,
             verbose=False,
             dir_precomputed='OUTPUT/'):
    """
	
	snapshot(rec=0,t=1.0, minvalplot=0.0, maxvalplot=0.0, plot=False, save=False, verbose=False, dir_precomputed='OUTPUT/')

	Compute and plot correlation wavefield for a specific time t. This requires that the frequency-domain wavefield has been precomputed using the precompute function below.

	INPUT:
	------

	rec:				index of receiver.
	t: 					time in s.
	minvalplot:			minimum of colour scale, ignored when 0.
	maxvalplot: 		maximum of colour scale, ignored when 0.
	plot:				plot when True.
	save:				save as pdf when True.
	verbose:			give screen output when True.
	dir_precomputed:	directory where precomputed correlation field is located.

	OUTPUT:
	-------

	C:		2D time-domain correlation wavefield [N^2 s / m^4].
	x,y:	2D axes [m].

	Last updated: 18 July 2019.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    #- Spatial grid.
    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)
    x, y = np.meshgrid(x_line, y_line)

    #- Frequency line.
    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    omega = 2.0 * np.pi * f

    #- Power-spectral density.
    S, indeces = s.space_distribution()
    instrument, natural = s.frequency_distribution(f)
    filt = natural * instrument * instrument

    C = np.zeros(np.shape(x))

    #==============================================================================
    #- Load forward interferometric wavefields in the frequency domain.
    #==============================================================================

    fn = dir_precomputed + 'cf_' + str(rec) + '.npy'
    Cfull = np.load(fn)

    #==============================================================================
    #- Compute correlation field for a specific time.
    #==============================================================================

    for idx in range(len(x_line)):
        for idy in range(len(y_line)):
            C[idy, idx] = np.real(
                np.sum(Cfull[idy, idx, :] * np.exp(1j * omega * t))) * p.df

    #==============================================================================
    #- Plot.
    #==============================================================================

    if (plot or save):

        plt.rcParams["font.family"] = "serif"
        plt.rcParams.update({'font.size': 10})

        if (minvalplot == 0.0 and maxvalplot == 0.0):
            maxvalplot = 0.8 * np.max(np.abs(C))
            minvalplot = -maxvalplot

        #- Plot interferometric wavefield. ----------------------------------------

        #plt.pcolor(x/1000.0,y/1000.0,C,cmap='RdBu',vmin=minvalplot,vmax=maxvalplot)
        plt.pcolor(x / 1000.0,
                   y / 1000.0,
                   np.abs(C),
                   cmap='Greys',
                   vmin=0.0,
                   vmax=maxvalplot)

        #- Plot receiver positions. -----------------------------------------------

        for k in range(p.Nreceivers):

            plt.plot(p.x[k] / 1000.0, p.y[k] / 1000.0, 'ko', markersize='5')
            plt.text(p.x[k] / 1000.0 + 4.0 * p.dx / 1000.0,
                     p.y[k] / 1000.0 + 4.0 * p.dx / 1000.0, str(k))

        plt.plot(p.x[rec] / 1000.0, p.y[rec] / 1000.0, 'kx')
        plt.text(p.x[rec] / 1000.0 + 4.0 * p.dx / 1000.0,
                 p.y[rec] / 1000.0 + 4.0 * p.dx / 1000.0, str(rec))

        #- Embellish the plot. ----------------------------------------------------

        plt.colorbar()
        plt.axis('image')
        plt.title('correlation field, t=' + str(t) + ' s')
        plt.xlim((p.xmin / 1000.0, p.xmax / 1000.0))
        plt.ylim((p.ymin / 1000.0, p.ymax / 1000.0))
        plt.xlabel('x [km]')
        plt.ylabel('y [km]')

        if plot:
            plt.show()
        if save:
            fn = 'OUTPUT/' + str(t) + '.pdf'
            plt.savefig(fn, format='pdf')
            plt.clf()

    #==============================================================================
    #- Return.
    #==============================================================================

    return C, x, y
Exemplo n.º 4
0
def structure_kernel(cct,
                     t,
                     rec0=0,
                     rec1=1,
                     measurement='cctime',
                     dir_forward='OUTPUT/',
                     effective=0,
                     plot=0):
    """
	x,y,K_kappa = structure_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', dir_forward='OUTPUT/', effective=0, plot=0):

	Compute structure kernel K_kappa for a frequency-independent source power-spectral density.

	INPUT:
	------

	cct, t:			Time-domain correlation function and time axis as obtained from correlation_function.py.
	rec0, rec1:		Indeces of the receivers used in the correlation. 
	measurement:	Type of measurement used to compute the adjoint source. See adsrc.py for options.
	dir_forward:	Location of the forward interferometric fields from rec0 and rec1. Must exist.
	plot:			When plot=1, plot structure kernel.
	effective:		When effective==1, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.


	OUTPUT:
	-------
	x,y:			Space coordinates.
	K:				Structure kernel [unit of measurement * 1/N].

	Last updated: 11 July 2016.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)

    x, y = np.meshgrid(x_line, y_line)

    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    df = f[1] - f[0]
    omega = 2.0 * np.pi * f

    K_kappa = np.zeros(np.shape(x))

    C1 = np.zeros((len(y_line), len(x_line), len(omega)), dtype=complex)
    C2 = np.zeros((len(y_line), len(x_line), len(omega)), dtype=complex)

    nx = len(x_line)
    ny = len(y_line)

    kappa = p.rho * (p.v**2)

    #- Frequency- and space distribution of the source. ---------------------------

    S, indeces = s.space_distribution(plot=0)
    instrument, natural = s.frequency_distribution(f)
    filt = natural * instrument * instrument

    #- Compute the adjoint source. ------------------------------------------------

    a = adsrc.adsrc(cct, t, measurement, plot)

    #- Compute number of grid points corresponding to the minimum wavelength. -----

    L = int(np.ceil(p.v / (p.fmax * p.dx)))

    #- Read propagation corrector if needed. --------------------------------------

    if (effective == 1):

        gf = gpc.get_propagation_corrector(rec0, rec1, plot=0)

    else:

        gf = np.ones(len(f), dtype=complex)

    #==============================================================================
    #- Load forward interferometric wavefields.
    #==============================================================================

    fn = dir_forward + '/cf_' + str(rec0)
    fid = open(fn, 'r')
    C1 = np.load(fid)
    fid.close()

    fn = dir_forward + '/cf_' + str(rec1)
    fid = open(fn, 'r')
    C2 = np.load(fid)
    fid.close()

    #==============================================================================
    #- Loop over frequencies.
    #==============================================================================

    for k in range(len(omega)):

        w = omega[k]

        #- Adjoint fields. --------------------------------------------------------
        G1 = -w**2 * g.green_input(x, y, p.x[rec0], p.y[rec0], w, p.dx, p.dy,
                                   p.rho, p.v, p.Q) * gf[k]
        G2 = -w**2 * g.green_input(x, y, p.x[rec1], p.y[rec1], w, p.dx, p.dy,
                                   p.rho, p.v, p.Q) * gf[k]

        #- Multiplication with adjoint fields. ------------------------------------
        K_kappa = K_kappa - 2.0 * np.real(G2 * C1[:, :, k] * np.conj(a[k]) +
                                          G1 * C2[:, :, k] * a[k])

    K_kappa = K_kappa / kappa

    #==============================================================================
    #- Smooth over minimum wavelength.
    #==============================================================================

    for k in range(L):
        K_kappa[1:ny - 2, :] = (K_kappa[1:ny - 2, :] + K_kappa[0:ny - 3, :] +
                                K_kappa[2:ny - 1, :]) / 3.0

    for k in range(L):
        K_kappa[:, 1:nx - 2] = (K_kappa[:, 1:nx - 2] + K_kappa[:, 0:nx - 3] +
                                K_kappa[:, 2:nx - 1]) / 3.0

    #==============================================================================
    #- Visualise if wanted.
    #==============================================================================

    if plot == 1:

        cmap = plt.get_cmap('RdBu')
    plt.pcolormesh(x, y, K_kappa, cmap=cmap, shading='interp')
    plt.clim(-np.max(np.abs(K_kappa)) * 0.25, np.max(np.abs(K_kappa)) * 0.25)
    plt.axis('image')
    plt.colorbar()
    plt.title('Structure (kappa) kernel [unit of measurement / m^2]')
    plt.xlabel('x [km]')
    plt.ylabel('y [km]')

    plt.plot(p.x[rec0], p.y[rec0], 'ro')
    plt.plot(p.x[rec1], p.y[rec1], 'ro')

    plt.show()

    return x, y, K_kappa
Exemplo n.º 5
0
def snapshot(rec=0,t=1.0, mg_level=5, mg_tol=0.05, minvalplot=0.0, maxvalplot=0.0, plot=0, save=0, verbose=0, precomputed=0, dir_precomputed='OUTPUT/'):

	"""
	
	snapshot(rec=0,t=1.0, mg_level=5, mg_tol=0.05, minvalplot=0.0, maxvalplot=0.0, plot=0, save=0, verbose=0, precomputed=0, dir_precomputed='OUTPUT/')

	Compute and plot correlation wavefield.

	INPUT:
	------

	rec:				index of receiver.
	t: 					time in s.
	mg_level:			level for multi-grid solver.
	mg_tol:				tolerance for multi-grid solver.
	minvalplot:			minimum of colour scale, ignored when 0.
	maxvalplot: 		maximum of colour scale, ignored when 0.
	plot:				plot when 1.
	save:				save as png when 1.
	verbose:			give screen output when 1.
	precomputed:		set to 1 if precomputed frequency-domain correlation field available (see precompute() below).
	dir_precomputed:	directory where precomputed correlation field is located.

	OUTPUT:
	-------

	C:		2D time-domain correlation wavefield [N^2 s / m^4].
	x,y:	2D axes [m].

	Last updated: 27 May 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	p=parameters.Parameters()

	#- Spatial grid.
	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)
	x,y=np.meshgrid(x_line,y_line)

	#- Frequency line.
	f=np.arange(p.fmin-p.fwidth,p.fmax+p.fwidth,p.df)
	omega=2.0*np.pi*f

	#- Power-spectral density.
	S,indeces=s.space_distribution()
	instrument,natural=s.frequency_distribution(f)
	filt=natural*instrument*instrument

	C=np.zeros(np.shape(x))

	#==============================================================================
	#- Load forward interferometric wavefields.
	#==============================================================================

	if precomputed==1:

		fn=dir_precomputed+'/cf_'+str(rec)
		fid=open(fn,'r')
		Cfull=np.load(fid)
		fid.close()

		for idx in range(len(x_line)):
			for idy in range(len(y_line)):

				C[idy,idx]=np.real(np.sum(Cfull[idy,idx,:]*np.exp(1j*omega*t)))*p.df


	#==============================================================================
	#- Compute correlation field for a specific time.
	#==============================================================================

	else:

		#- First multi-grid stage. ------------------------------------------------

		if (verbose==1): print 'First multi-grid stage'

		#- March through the spatial grid.
		for idx in range(0,len(x_line),mg_level):

			if (verbose==1): print str(100*float(idx)/float(len(x_line)))+' %'

			for idy in range(0,len(y_line),mg_level):

				C_proto=np.zeros(len(omega),dtype=complex)

				#- March through all sources.
				for k in indeces:

					C_proto+=g.conjG1_times_G2(x[idy,idx],y[idy,idx],p.x[rec],p.y[rec],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)*S[k]
					C_proto=np.conj(C_proto)

				#- Transform to time domain.
				C[idy,idx]=np.real(np.sum(filt*C_proto*np.exp(1j*omega*t)))

		#- Second multi-grid stage. -----------------------------------------------

		if (verbose==1): print 'Second multi-grid stage'

		c_max=np.max(np.abs(C))

		#- March through the spatial grid.
		for idx in range(mg_level,len(x_line)-mg_level):

			if (verbose==1): print str(100*float(idx)/float(len(x_line)))+' %'

			for idy in range(mg_level,len(y_line)-mg_level):

				if (np.max(np.abs(C[(idy-mg_level):(idy+mg_level),(idx-mg_level):(idx+mg_level)]))>mg_tol*c_max):

					C_proto=np.zeros(len(omega),dtype=complex)

					#- March through all sources.
					for k in indeces:

						C_proto+=g.conjG1_times_G2(x[idy,idx],y[idy,idx],p.x[rec],p.y[rec],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)*S[k]
						C_proto=np.conj(C_proto)

					#- Transform to time domain.
					C[idy,idx]=np.real(np.sum(filt*C_proto*np.exp(1j*omega*t)))

		#- Normalisation.
		C=C*p.dx*p.dy*p.df

	#==============================================================================
	#- Plot.
	#==============================================================================

	if (plot==1 or save==1):

		if (minvalplot==0.0 and maxvalplot==0.0):
			maxvalplot=0.8*np.max(np.abs(C))
			minvalplot=-maxvalplot

		font = {'size'   : 14,}

		#- Plot interferometric wavefield. ----------------------------------------

		plt.pcolor(x,y,C,cmap='RdBu',vmin=minvalplot,vmax=maxvalplot)

		#- Plot receiver positions. -----------------------------------------------

		for k in range(p.Nreceivers):

			plt.plot(p.x[k],p.y[k],'kx')
			plt.text(p.x[k]+3.0*p.dx,p.y[k]+3.0*p.dx,str(k),fontdict=font)

		plt.plot(p.x[rec],p.y[rec],'ro')
		plt.text(p.x[rec]+3.0*p.dx,p.y[rec]+3.0*p.dx,str(rec),fontdict=font)

		#- Embellish the plot. ----------------------------------------------------

		plt.colorbar()
		plt.axis('image')
		plt.title('correlation field, t='+str(t)+' s')
		plt.xlim((p.xmin,p.xmax))
		plt.ylim((p.ymin,p.ymax))
		plt.xlabel('x [m]')
		plt.ylabel('y [m]')

		if (plot==1):
			plt.show()
		if (save==1):
			fn='OUTPUT/'+str(t)+'.png'
			plt.savefig(fn)
			plt.clf()

	#==============================================================================
	#- Return.
	#==============================================================================
	
	return C, x, y
Exemplo n.º 6
0
def precompute(rec=0, verbose=False, mode='individual'):
    """
	precompute(rec=0,verbose=False,mode='individual')

	Compute correlation wavefield in the frequency domain and store for in /OUTPUT for re-use in snapshot kernel computation.

	INPUT:
	------

	rec:		index of reference receiver.
	verbose:	give screen output when True.
	mode:		'individual' sums over individual sources. This is very efficient when there are only a few sources. This mode requires that the indeces array returned by source.space_distribution is not empty.
				'random' performs a randomised, down-sampled integration over a quasi-continuous distribution of sources. This is more efficient for widely distributed and rather smooth sources.
				'combined' is the sum of 'individual' and 'random'. This is efficient when a few point sources are super-imposed on a quasi-continuous distribution.

	OUTPUT:
	-------

	Frequency-domain interferometric wavefield stored in /OUTPUT.

	Last updated: 18 July 2019.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    #- Spatial grid.
    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)
    x, y = np.meshgrid(x_line, y_line)

    nx = len(x_line)
    ny = len(y_line)

    #- Frequency line.
    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    omega = 2.0 * np.pi * f

    #- Power-spectral density.
    S, indeces = s.space_distribution()
    instrument, natural = s.frequency_distribution(f)
    filt = natural * instrument * instrument

    C = np.zeros((len(y_line), len(x_line), len(omega)), dtype=complex)

    #==============================================================================
    #- Compute correlation field by summing over individual sources.
    #==============================================================================

    if (mode == 'individual'):

        #- March through the spatial grid. ----------------------------------------
        for idx in range(nx):

            if verbose:
                print(str(100 * float(idx) / float(len(x_line))) + ' %')

            for idy in range(ny):

                #- March through all sources.
                for k in indeces:

                    C[idy, idx, :] += S[k] * filt * g.conjG1_times_G2(
                        x[idy, idx], y[idy, idx], p.x[rec], p.y[rec], x[k],
                        y[k], omega, p.dx, p.dy, p.rho, p.v, p.Q)

        #- Normalisation.
        C = np.conj(C) * p.dx * p.dy

    #==============================================================================
    #- Compute correlation field by random integration over all sources
    #==============================================================================

    downsampling_factor = 5.0
    n_samples = np.floor(float(nx * ny) / downsampling_factor)

    if (mode == 'random'):

        #- March through frequencies. ---------------------------------------------

        for idf in range(0, len(f), 3):

            if verbose: print('f=', f[idf], ' Hz')

            if (filt[idf] > 0.05 * np.max(filt)):

                #- March through downsampled spatial grid. ------------------------

                t0 = time.time()

                for idx in range(0, nx, 3):
                    for idy in range(0, ny, 3):

                        samples_x = np.random.randint(0, nx, n_samples)
                        samples_y = np.random.randint(0, ny, n_samples)

                        G1 = g.green_input(x[samples_y, samples_x],
                                           y[samples_y,
                                             samples_x], x_line[idx],
                                           y_line[idy], omega[idf], p.dx, p.dy,
                                           p.rho, p.v, p.Q)
                        G2 = g.green_input(x[samples_y,
                                             samples_x], y[samples_y,
                                                           samples_x],
                                           p.x[rec], p.y[rec], omega[idf],
                                           p.dx, p.dy, p.rho, p.v, p.Q)

                        C[idy, idx,
                          idf] = downsampling_factor * filt[idf] * np.sum(
                              S[samples_y, samples_x] * G1 * np.conj(G2))

                t1 = time.time()
                if verbose: print('time per frequency: ', t1 - t0, 's')

        #- Normalisation. ---------------------------------------------------------

        C = C * p.dx * p.dy

        #- Spatial interpolation. -------------------------------------------------

        for idx in range(0, nx - 3, 3):
            C[:, idx + 1, :] = 0.67 * C[:, idx, :] + 0.33 * C[:, idx + 3, :]
            C[:, idx + 2, :] = 0.33 * C[:, idx, :] + 0.67 * C[:, idx + 3, :]

        for idy in range(0, ny - 3, 3):
            C[idy + 1, :, :] = 0.67 * C[idy, :, :] + 0.33 * C[idy + 3, :, :]
            C[idy + 2, :, :] = 0.33 * C[idy, :, :] + 0.67 * C[idy + 3, :, :]

        #- Frequency interpolation. -----------------------------------------------

        for idf in range(0, len(f) - 3, 3):
            C[:, :, idf + 1] = 0.67 * C[:, :, idf] + 0.33 * C[:, :, idf + 3]
            C[:, :, idf + 2] = 0.33 * C[:, :, idf] + 0.67 * C[:, :, idf + 3]

    #==============================================================================
    #- Compute correlation field by random integration over all sources + individual sources
    #==============================================================================

    downsampling_factor = 5.0
    n_samples = np.floor(float(nx * ny) / downsampling_factor)

    if (mode == 'combined'):

        #--------------------------------------------------------------------------
        #- March through frequencies for random sampling. -------------------------

        for idf in range(0, len(f), 3):

            if verbose: print('f=', f[idf], ' Hz')

            if (filt[idf] > 0.05 * np.max(filt)):

                #- March through downsampled spatial grid. ------------------------

                t0 = time.time()

                for idx in range(0, nx, 3):
                    for idy in range(0, ny, 3):

                        samples_x = np.random.randint(0, nx, n_samples)
                        samples_y = np.random.randint(0, ny, n_samples)

                        G1 = g.green_input(x[samples_y, samples_x],
                                           y[samples_y,
                                             samples_x], x_line[idx],
                                           y_line[idy], omega[idf], p.dx, p.dy,
                                           p.rho, p.v, p.Q)
                        G2 = g.green_input(x[samples_y,
                                             samples_x], y[samples_y,
                                                           samples_x],
                                           p.x[rec], p.y[rec], omega[idf],
                                           p.dx, p.dy, p.rho, p.v, p.Q)

                        C[idy, idx,
                          idf] = downsampling_factor * filt[idf] * np.sum(
                              S[samples_y, samples_x] * G1 * np.conj(G2))

                t1 = time.time()
                if verbose: print('time per frequency: ', t1 - t0, 's')

        #- Spatial interpolation. -------------------------------------------------

        for idx in range(0, nx - 3, 3):
            C[:, idx + 1, :] = 0.67 * C[:, idx, :] + 0.33 * C[:, idx + 3, :]
            C[:, idx + 2, :] = 0.33 * C[:, idx, :] + 0.67 * C[:, idx + 3, :]

        for idy in range(0, ny - 3, 3):
            C[idy + 1, :, :] = 0.67 * C[idy, :, :] + 0.33 * C[idy + 3, :, :]
            C[idy + 2, :, :] = 0.33 * C[idy, :, :] + 0.67 * C[idy + 3, :, :]

        #- Frequency interpolation. -----------------------------------------------

        for idf in range(0, len(f) - 3, 3):
            C[:, :, idf + 1] = 0.67 * C[:, :, idf] + 0.33 * C[:, :, idf + 3]
            C[:, :, idf + 2] = 0.33 * C[:, :, idf] + 0.67 * C[:, :, idf + 3]

        #--------------------------------------------------------------------------
        #- March through the spatial grid for individual sources. -----------------

        for idx in range(nx):

            if verbose:
                print(str(100 * float(idx) / float(len(x_line))) + ' %')

            for idy in range(ny):

                #- March through all sources.
                for k in indeces:

                    C[idy, idx, :] += S[k] * filt * np.conj(
                        g.conjG1_times_G2(x[idy, idx], y[idy, idx], p.x[rec],
                                          p.y[rec], x[k], y[k], omega, p.dx,
                                          p.dy, p.rho, p.v, p.Q))

        #- Normalisation. ---------------------------------------------------------

        C = C * p.dx * p.dy

    #==============================================================================
    #- Save interferometric wavefield.
    #==============================================================================

    fn = 'OUTPUT/cf_' + str(rec)
    np.save(fn, C)
Exemplo n.º 7
0
def movie(time_axis, minvalplot=0.0, maxvalplot=0.0, verbose=0):

	"""
	movie(time_axis, mg_level=5, mg_tol=0.05, minvalplot=0.0, maxvalplot=0.0, verbose=0)

	Compute correlation wavefield and save png figures to /OUTPUT.

	INPUT:
	------

	time_axis: 	array containing time values in s for which figures will be saved.
	minvalplot:	minimum of colour scale, ignored when 0.
	maxvalplot: maximum of colour scale, ignored when 0.
	verbose:	give screen output when 1.

	OUTPUT:
	-------

	Snapshots of the interferometric wavefield saved as png files to /OUTPUT

	Last updated: 5 May 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	p=parameters.Parameters()

	#- Spatial grid.
	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)
	x,y=np.meshgrid(x_line,y_line)

	#- Frequency line.
	f=np.arange(p.fmin-p.fwidth,p.fmax+p.fwidth,p.df)
	omega=2.0*np.pi*f

	#- Power-spectral density.
	S,indeces=s.space_distribution()
	instrument,natural=s.frequency_distribution(f)
	filt=natural*instrument*instrument

	#- Check if the indeces are actually available. If not, interrupt.
	if len(indeces)==0:
		print 'Correlation field cannot be computed because source index array is empty.'
		return

	#==============================================================================
	#- Compute correlation field for specific times and store.
	#==============================================================================

	C=np.zeros((len(y_line),len(x_line),len(time_axis)))

	#- March through the spatial grid. --------------------------------------------
	for idx in range(len(x_line)):

		if (verbose==1):
			print str(100*float(idx)/float(len(x_line)))+' %'

		for idy in range(len(y_line)):

			C_proto=np.zeros(len(omega),dtype=complex)

			#- March through all sources.
			for k in indeces:

				C_proto+=g.conjG1_times_G2(x[idy,idx],y[idy,idx],p.x[1],p.y[1],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)*S[k]
				C_proto=np.conj(C_proto)

			#- Transform to time domain.

			for t in range(len(time_axis)):

				C[idy,idx,t]=np.real(np.sum(filt*C_proto*np.exp(1j*omega*time_axis[t])))


	#- Normalisation.
	C=C*p.dx*p.dy*p.df

	#==============================================================================
	#- Save images.
	#==============================================================================

	for t in range(len(time_axis)):

		if (minvalplot==0.0 and maxvalplot==0.0):
			maxvalplot=0.8*np.max(np.abs(C[:,:,t]))
			minvalplot=-maxvalplot

		plt.pcolor(x,y,C[:,:,t],cmap='RdBu',vmin=minvalplot,vmax=maxvalplot)

		font = {'family' : 'sansserif', 'color'  : 'darkred', 'weight' : 'normal', 'size'   : 14,}
		plt.plot(p.x[0],p.y[0],'ko')
		plt.plot(p.x[1],p.y[1],'ko')
		plt.text(0.9*p.x[0],p.y[0],'1',fontdict=font)
		plt.text(1.1*p.x[1],p.y[1],'2',fontdict=font)
		
		plt.colorbar()
		plt.axis('image')
		plt.title('correlation field, t='+str(time_axis[t])+' s')
		plt.xlabel('x [m]')
		plt.ylabel('y [m]')

		fn='OUTPUT/'+str(time_axis[t])+'.png'
		plt.savefig(fn)
		plt.clf()
Exemplo n.º 8
0
def precompute(rec=0,verbose=0,mode='individual'):

	"""
	precompute(verbose=0)

	Compute correlation wavefield in the frequency domain and store for in /OUTPUT for re-use in kernel computation.

	INPUT:
	------

	rec:		index of reference receiver.
	verbose:	give screen output when 1.
	mode:		'individual' sums over individual sources. This is very efficient when there are only a few sources. This mode requires that the indeces array returned by source.space_distribution is not empty.
				'random' performs a randomised, down-sampled integration over a quasi-continuous distribution of sources. This is more efficient for widely distributed and rather smooth sources.
				'combined' is the sum of 'individual' and 'random'. This is efficient when a few point sources are super-imposed on a quasi-continuous distribution.

	OUTPUT:
	-------

	Frequency-domain interferometric wavefield stored in /OUTPUT.

	Last updated: 13 July 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	p=parameters.Parameters()

	#- Spatial grid.
	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)
	x,y=np.meshgrid(x_line,y_line)

	nx=len(x_line)
	ny=len(y_line)

	#- Frequency line.
	f=np.arange(p.fmin-p.fwidth,p.fmax+p.fwidth,p.df)
	omega=2.0*np.pi*f

	#- Power-spectral density.
	S,indeces=s.space_distribution()
	instrument,natural=s.frequency_distribution(f)
	filt=natural*instrument*instrument

	C=np.zeros((len(y_line),len(x_line),len(omega)),dtype=complex)

	#==============================================================================
	#- Compute correlation field by summing over individual sources.
	#==============================================================================

	if (mode=='individual'):

		#- March through the spatial grid. ----------------------------------------
		for idx in range(nx):

			if (verbose==1): print str(100*float(idx)/float(len(x_line)))+' %'

			for idy in range(ny):

				#- March through all sources.
				for k in indeces:

					C[idy,idx,:]+=S[k]*filt*g.conjG1_times_G2(x[idy,idx],y[idy,idx],p.x[rec],p.y[rec],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)
					
		#- Normalisation.
		C=np.conj(C)*p.dx*p.dy

	#==============================================================================
	#- Compute correlation field by random integration over all sources
	#==============================================================================

	downsampling_factor=5.0
	n_samples=np.floor(float(nx*ny)/downsampling_factor)

	if (mode=='random'):

		#- March through frequencies. ---------------------------------------------

		for idf in range(0,len(f),3):

			if verbose==1: print 'f=', f[idf], ' Hz'

			if (filt[idf]>0.05*np.max(filt)):

				#- March through downsampled spatial grid. ------------------------

				t0=time.time()

				for idx in range(0,nx,3):
					for idy in range(0,ny,3):

						samples_x=np.random.randint(0,nx,n_samples)
						samples_y=np.random.randint(0,ny,n_samples)
						
						G1=g.green_input(x[samples_y,samples_x],y[samples_y,samples_x],x_line[idx],y_line[idy],omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
						G2=g.green_input(x[samples_y,samples_x],y[samples_y,samples_x],p.x[rec],   p.y[rec],   omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
				
						C[idy,idx,idf]=downsampling_factor*filt[idf]*np.sum(S[samples_y,samples_x]*G1*np.conj(G2))
					
				t1=time.time()
				if verbose==1: print 'time per frequency: ', t1-t0, 's'

		#- Normalisation. ---------------------------------------------------------

		C=C*p.dx*p.dy

		#- Spatial interpolation. -------------------------------------------------

		for idx in range(0,nx-3,3):
			C[:,idx+1,:]=0.67*C[:,idx,:]+0.33*C[:,idx+3,:]
			C[:,idx+2,:]=0.33*C[:,idx,:]+0.67*C[:,idx+3,:]

		for idy in range(0,ny-3,3):
			C[idy+1,:,:]=0.67*C[idy,:,:]+0.33*C[idy+3,:,:]
			C[idy+2,:,:]=0.33*C[idy,:,:]+0.67*C[idy+3,:,:]

		#- Frequency interpolation. -----------------------------------------------

		for idf in range(0,len(f)-3,3):
			C[:,:,idf+1]=0.67*C[:,:,idf]+0.33*C[:,:,idf+3]
			C[:,:,idf+2]=0.33*C[:,:,idf]+0.67*C[:,:,idf+3]

	#==============================================================================
	#- Compute correlation field by random integration over all sources + individual sources
	#==============================================================================

	downsampling_factor=5.0
	n_samples=np.floor(float(nx*ny)/downsampling_factor)

	if (mode=='combined'):

		#--------------------------------------------------------------------------
		#- March through frequencies for random sampling. -------------------------

		for idf in range(0,len(f),3):

			if verbose==1: print 'f=', f[idf], ' Hz'

			if (filt[idf]>0.05*np.max(filt)):

				#- March through downsampled spatial grid. ------------------------

				t0=time.time()

				for idx in range(0,nx,3):
					for idy in range(0,ny,3):

						samples_x=np.random.randint(0,nx,n_samples)
						samples_y=np.random.randint(0,ny,n_samples)
						
						G1=g.green_input(x[samples_y,samples_x],y[samples_y,samples_x],x_line[idx],y_line[idy],omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
						G2=g.green_input(x[samples_y,samples_x],y[samples_y,samples_x],p.x[rec],   p.y[rec],   omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
				
						C[idy,idx,idf]=downsampling_factor*filt[idf]*np.sum(S[samples_y,samples_x]*G1*np.conj(G2))
					
				t1=time.time()
				if verbose==1: print 'time per frequency: ', t1-t0, 's'


		#- Spatial interpolation. -------------------------------------------------

		for idx in range(0,nx-3,3):
			C[:,idx+1,:]=0.67*C[:,idx,:]+0.33*C[:,idx+3,:]
			C[:,idx+2,:]=0.33*C[:,idx,:]+0.67*C[:,idx+3,:]

		for idy in range(0,ny-3,3):
			C[idy+1,:,:]=0.67*C[idy,:,:]+0.33*C[idy+3,:,:]
			C[idy+2,:,:]=0.33*C[idy,:,:]+0.67*C[idy+3,:,:]

		#- Frequency interpolation. -----------------------------------------------

		for idf in range(0,len(f)-3,3):
			C[:,:,idf+1]=0.67*C[:,:,idf]+0.33*C[:,:,idf+3]
			C[:,:,idf+2]=0.33*C[:,:,idf]+0.67*C[:,:,idf+3]


		#--------------------------------------------------------------------------
		#- March through the spatial grid for individual sources. -----------------
		
		for idx in range(nx):

			if (verbose==1): print str(100*float(idx)/float(len(x_line)))+' %'

			for idy in range(ny):

				#- March through all sources.
				for k in indeces:

					C[idy,idx,:]+=S[k]*filt*np.conj(g.conjG1_times_G2(x[idy,idx],y[idy,idx],p.x[rec],p.y[rec],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q))
					
		
		#- Normalisation. ---------------------------------------------------------

		C=C*p.dx*p.dy

	#==============================================================================
	#- Save interferometric wavefield.
	#==============================================================================

	fn='OUTPUT/cf_'+str(rec)
	fid=open(fn,'w')
	np.save(fid,C)
	fid.close()
Exemplo n.º 9
0
def correlation_function(rec0=0,rec1=1,effective=0,plot=0):

	"""
	cct, t, ccf, f = correlation_function(rec0=0,rec1=1,effective=0,plot=0)

	Compute time- and frequency-domain correlation functions. 


	INPUT:
	------
	rec0, rec1:		indeces of the receivers used in the correlation. 
	plot:			When plot=1, the source distribution, and the time- and frequency domain correlation functions are plotted.
	effective:		When effective==1, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.

	OUTPUT:
	-------
	cct, t:		Time-domain correlation function and time axis [N^2 s / m^4],[s].
	ccf, f:		Frequency-domain correlation function and frequency axis [N^2 s^2 / m^4],[1/s].

	Last updated: 27 May 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	p=parameters.Parameters()

	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)

	x,y=np.meshgrid(x_line,y_line)

	f=np.arange(p.fmin-p.fwidth,p.fmax+p.fwidth,p.df)
	omega=2.0*np.pi*f

	t=np.arange(p.tmin,p.tmax,p.dt)

	#- Frequency- and space distribution of the source. ---------------------------

	S,indices=s.space_distribution(plot)
	instrument,natural=s.frequency_distribution(f)
	filt=natural*instrument*instrument

	#- Read propagation corrector if needed. --------------------------------------

	if (effective==1):

		gf=gpc.get_propagation_corrector(rec0,rec1,plot=0)

	else:

		gf=np.ones(len(f),dtype=complex)

	#==============================================================================
	#- Compute inter-station correlation function.
	#==============================================================================

	cct=np.zeros(np.shape(t),dtype=float)
	ccf=np.zeros(np.shape(f),dtype=complex)

	for idf in range(len(omega)):

		P=g.conjG1_times_G2(p.x[rec0],p.y[rec0],p.x[rec1],p.y[rec1],x,y,omega[idf],p.dx,p.dy,p.rho,p.v,p.Q)
		ccf[idf]=gf[idf]*np.conj(np.sum(P*S))

		cct=cct+np.real(filt[idf]*ccf[idf]*np.exp(1j*omega[idf]*t))

	cct=cct*p.dx*p.dy*p.df

	#==============================================================================
	#- Plot result.
	#==============================================================================

	if (plot==1):

		#- Frequency domain.
		plt.semilogy(f,np.abs(ccf),'k')
		plt.semilogy(f,np.real(ccf),'b')
		plt.title('frequency-domain correlation function (black=abs, blue=real)')
		plt.xlabel('frequency [Hz]')
		plt.ylabel('correlation [N^2 s^2/m^4]')
		plt.show()

		#- Time domain.

		tt=np.sqrt((p.x[rec0]-p.x[rec1])**2+(p.y[rec0]-p.y[rec1])**2)/p.v
		cct_max=np.max(np.abs(cct))

		plt.plot(t,cct,'k',linewidth=2.0)
		plt.plot([tt,tt],[-1.1*cct_max,1.1*cct_max],'--',color=(0.5,0.5,0.5),linewidth=1.5)
		plt.plot([-tt,-tt],[-1.1*cct_max,1.1*cct_max],'--',color=(0.5,0.5,0.5),linewidth=1.5)

		plt.ylim((-1.1*cct_max,1.1*cct_max))
		plt.title('correlation function')
		plt.xlabel('time [s]')
		plt.ylabel('correlation [N^2 s/m^4]')
		plt.show()

	#==============================================================================
	#- Return.
	#==============================================================================

	return cct, t, ccf, f
Exemplo n.º 10
0
def correlation_random(rec0=0,rec1=1,verbose=0,plot=0,save=0):

	"""
	cct,cct_proc,t,ccf,ccf_proc,f = correlation_random(rec0=0,rec1=1,verbose=0,plot=0,save=1)

	Compute and plot correlation function based on random source summation.

	INPUT:
	------
	rec0, rec1:		indeces of the receivers used in the correlation. 
	plot:			plot when 1.
	verbose:		give screen output when 1.
	save:			store individual correlations to OUTPUT/correlations_individual

	OUTPUT:
	-------
	cct, t:		Time-domain correlation function and time axis [N^2 s / m^4],[s].
	ccf, f:		Frequency-domain correlation function and frequency axis [N^2 s^2 / m^4],[1/s].
	
	Last updated: 19 May 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	#- Start time.
	t1=time.time()

	#- Input parameters.
	p=parameters.Parameters()

	#- Spatial grid.
	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)
	x,y=np.meshgrid(x_line,y_line)

	#- Compute number of samples as power of 2.
	n=2.0**np.round(np.log2((p.Twindow)/p.dt))

	#- Frequency axis.
	df=1.0/(n*p.dt)
	f=np.arange(0.0,1.0/p.dt,df)
	omega=2.0*np.pi*f

	#- Compute time axis
	t=np.arange(-0.5*n*p.dt,0.5*n*p.dt,p.dt)

	#- Compute instrument response and natural source spectrum.
	S,indeces=s.space_distribution()
	instrument,natural=s.frequency_distribution(f)

	#- Issue some information if wanted.
	if verbose==1:

		print 'number of samples: '+str(n)
		print 'maximum time: '+str(np.max(t))+' s'
		print 'maximum frequency: '+str(np.max(f))+' Hz'

	#- Warnings.
	if (p.fmax>1.0/p.dt):
		print 'WARNING: maximum bandpass frequency cannot be represented with this time step!'

	if (p.fmin<1.0/(n*p.dt)):
		print 'WARNING: minimum bandpass frequency cannot be represented with this window length!'

	#==============================================================================
	#- March through source locations and compute raw frequency-domain noise traces.
	#==============================================================================

	#- Set a specific random seed to make simulation repeatable, e.g. for different receiver pair.
	np.random.seed(p.seed)

	#- Initialise frequency-domain wavefields.
	u1=np.zeros([n,p.Nwindows],dtype=complex)
	u2=np.zeros([n,p.Nwindows],dtype=complex)
	G1=np.zeros([n,p.Nwindows],dtype=complex)
	G2=np.zeros([n,p.Nwindows],dtype=complex)

	#- Regularise zero-frequency to avoid singularity in Green function.
	omega[0]=0.01*2.0*np.pi*df

	#- March through source indices.
	for k in indeces:

		#- Green function for a specific source point.
		G1[:,0]=g.green_input(p.x[rec0],p.y[rec0],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)
		G2[:,0]=g.green_input(p.x[rec1],p.y[rec1],x[k],y[k],omega,p.dx,p.dy,p.rho,p.v,p.Q)

		#- Apply instrument response and source spectrum
		G1[:,0]=G1[:,0]*instrument*np.sqrt(natural)
		G2[:,0]=G2[:,0]*instrument*np.sqrt(natural)

		#- Copy this Green function to all time intervals.
		for i in range(p.Nwindows):

			G1[:,i]=G1[:,0]
			G2[:,i]=G2[:,0]

		#- Random phase matrix, frequency steps times time windows.
		phi=2.0*np.pi*(np.random.rand(n,p.Nwindows)-0.5)
		ff=np.exp(1j*phi)

		#- Matrix of random frequency-domain wavefields.
		u1+=S[k]*ff*G1
		u2+=S[k]*ff*G2

	#- March through time windows to add earthquakes.
	for win in range(p.Nwindows):

		neq=len(p.eq_t[win])

		for i in range(neq):

			G1=g.green_input(p.x[rec0],p.y[rec0],p.eq_x[win][i],p.eq_y[win][i],omega,p.dx,p.dy,p.rho,p.v,p.Q)
			G2=g.green_input(p.x[rec1],p.y[rec1],p.eq_x[win][i],p.eq_y[win][i],omega,p.dx,p.dy,p.rho,p.v,p.Q)

			G1=G1*instrument*np.sqrt(natural)
			G2=G2*instrument*np.sqrt(natural)

			u1[:,win]+=p.eq_m[win][i]*G1*np.exp(-1j*omega*p.eq_t[win][i])
			u2[:,win]+=p.eq_m[win][i]*G2*np.exp(-1j*omega*p.eq_t[win][i])

	#==============================================================================
	#- Processing.
	#==============================================================================

	#- Apply single-station processing.
	u1_proc,u2_proc=proc.processing_single_station(u1,u2,f,verbose)

	#- Compute correlation function, raw and processed.
	ccf=u1*np.conj(u2)
	ccf_proc=u1_proc*np.conj(u2_proc)

	#- Apply correlation processing.
	ccf_proc=proc.processing_correlation(ccf_proc,f,verbose)

	#==============================================================================
	#- Apply the standard bandpass.
	#==============================================================================

	bandpass=np.zeros(np.shape(f))

	Nminmax=np.round((p.bp_fmin)/df)
	Nminmin=np.round((p.bp_fmin-p.bp_width)/df)
	Nmaxmin=np.round((p.bp_fmax)/df)
	Nmaxmax=np.round((p.bp_fmax+p.bp_width)/df)

	bandpass[Nminmin:Nminmax]=np.linspace(0.0,1.0,Nminmax-Nminmin)
	bandpass[Nmaxmin:Nmaxmax]=np.linspace(1.0,0.0,Nmaxmax-Nmaxmin)
	bandpass[Nminmax:Nmaxmin]=1.0

	for i in range(p.Nwindows):

		ccf[:,i]=bandpass*ccf[:,i]
		ccf_proc[:,i]=bandpass*ccf_proc[:,i]

	#==============================================================================
	#- Time-domain correlation function.
	#==============================================================================

	#- Some care has to be taken here with the inverse FFT convention of numpy.

	cct=np.zeros([n,p.Nwindows],dtype=float)
	cct_proc=np.zeros([n,p.Nwindows],dtype=float)

	dummy=np.real(np.fft.ifft(ccf,axis=0)/p.dt)
	cct[0.5*n:n,:]=dummy[0:0.5*n,:]
	cct[0:0.5*n,:]=dummy[0.5*n:n,:]

	dummy=np.real(np.fft.ifft(ccf_proc,axis=0)/p.dt)
	cct_proc[0.5*n:n,:]=dummy[0:0.5*n,:]
	cct_proc[0:0.5*n,:]=dummy[0.5*n:n,:]

	#==============================================================================
	#- Save results if wanted.
	#==============================================================================

	if save==1:

		#- Store frequency and time axes.

		fid=open('OUTPUT/correlations_individual/f','w')
		np.save(fid,f)
		fid.close()

		fid=open('OUTPUT/correlations_individual/t','w')
		np.save(fid,t)
		fid.close()

		#- Store raw and processed correlations in the frequency domain.

		fn='OUTPUT/correlations_individual/ccf_'+str(rec0)+'_'+str(rec1)
		fid=open(fn,'w')
		np.save(fid,ccf)
		fid.close()

		fn='OUTPUT/correlations_individual/ccf_proc_'+str(rec0)+'_'+str(rec1)
		fid=open(fn,'w')
		np.save(fid,ccf_proc)
		fid.close()

	#==============================================================================
	#- Plot results if wanted.
	#==============================================================================

	if plot==1:

		#- Noise traces for first window.
		plt.subplot(2,1,1)
		plt.plot(t,np.real(np.fft.ifft(u1[:,0]))/p.dt,'k')
		plt.ylabel('u1(t) [N/m^2]')
		plt.title('recordings for first time window')
		plt.subplot(2,1,2)
		plt.plot(t,np.real(np.fft.ifft(u2[:,0]))/p.dt,'k')
		plt.ylabel('u2(t) [N/m^2]')
		plt.xlabel('t [s]')

		plt.show()

		#- Spectrum of the pressure wavefield for first window.
		plt.subplot(2,1,1)
		plt.plot(f,np.abs(np.sqrt(bandpass)*u1[:,0]),'k',linewidth=2)
		plt.plot(f,np.real(np.sqrt(bandpass)*u1[:,0]),'b',linewidth=1)
		plt.plot(f,np.imag(np.sqrt(bandpass)*u1[:,0]),'r',linewidth=1)
		plt.ylabel('u1(f) [Ns/m^2]')
		plt.title('raw and processed spectra for first window (abs=black, real=blue, imag=red)')
		
		plt.subplot(2,1,2)
		plt.plot(f,np.abs(np.sqrt(bandpass)*u1_proc[:,0]),'k',linewidth=2)
		plt.plot(f,np.real(np.sqrt(bandpass)*u1_proc[:,0]),'b',linewidth=1)
		plt.plot(f,np.imag(np.sqrt(bandpass)*u1_proc[:,0]),'r',linewidth=1)
		plt.ylabel('u1_proc(f) [?]')
		plt.xlabel('f [Hz]')

		plt.show()

		#- Raw time- and frequency-domain correlation for first window.
		plt.subplot(2,1,1)
		plt.semilogy(f,np.abs(ccf[:,0]),'k',linewidth=2)
		plt.title('raw frequency-domain correlation for first window')
		plt.ylabel('correlation [N^2 s^2 / m^4]')
		plt.xlabel('f [Hz]')
		
		plt.subplot(2,1,2)
		plt.plot(t,np.real(cct[:,0]),'k')
		plt.title('raw time-domain correlation for first window')
		plt.ylabel('correlation [N^2 s / m^4]')
		plt.xlabel('t [s]')

		plt.show()

		#- Processed time- and frequency-domain correlation for first window.
		plt.subplot(2,1,1)
		plt.semilogy(f,np.abs(ccf_proc[:,0]),'k',linewidth=2)
		plt.title('processed frequency-domain correlation for first window')
		plt.ylabel('correlation [N^2 s^2 / m^4]*unit(T)')
		plt.xlabel('f [Hz]')
		
		plt.subplot(2,1,2)
		plt.plot(t,np.real(cct_proc[:,0]))
		plt.title('processed time-domain correlation for first window')
		plt.ylabel('correlation [N^2 s / m^4]*unit(T)')
		plt.xlabel('t [s]')

		plt.show()

		#- Raw and processed ensemble correlations. 
		plt.plot(t,np.sum(cct,1)/np.max(cct),'k')
		plt.plot(t,np.sum(cct_proc,1)/np.max(cct_proc),'r')
		plt.title('ensemble time-domain correlation (black=raw, red=processed)')
		plt.ylabel('correlation [N^2 s / m^4]*unit(T)')
		plt.xlabel('t [s]')

		plt.show()

	
	#- End time.
	t2=time.time()

	if verbose==1:
		print 'elapsed time: '+str(t2-t1)+' s'

	#==============================================================================
	#- Output.
	#==============================================================================

	return cct,cct_proc,t,ccf,ccf_proc,f
Exemplo n.º 11
0
def propagation(rec0=0, rec1=1, average=True):
    """
	propagation(idx1=0,idx2=1,average=True)

	Plot time- and frequency-domain propagation corrector, and time- and frequency-domain effective
	Green function. Requires propagation corrector files located in OUTPUT/correctors.

	INPUT:
	------
	rec0, rec1:		indeces of the receivers used in the correlation. 
	average: 		plot average over all receiver pairs (True or False)
	

	OUTPUT:
	-------
	none
	
	Last updated: 24 March 2016.
	"""

    #==============================================================================
    #- Input.
    #==============================================================================

    #- Load frequency and time axes. ----------------------------------------------

    fn = 'OUTPUT/correctors/f'
    fid = open(fn, 'r')
    f = np.load(fid)
    fid.close()

    df = f[1] - f[0]
    f[0] = 0.01 * f[1]

    n = len(f)

    fn = 'OUTPUT/correctors/t'
    fid = open(fn, 'r')
    t = np.load(fid)
    fid.close()

    dt = t[1] - t[0]

    #- Read receiver positions and compute frequency-domain Green function. -------

    p = parameters.Parameters()
    G = g.green(p.x[rec0], p.y[rec0], p.x[rec1], p.y[rec1], 2.0 * np.pi * f)

    d = np.sqrt((p.x[rec0] - p.x[rec1])**2 + (p.y[rec0] - p.y[rec1])**2)

    #- Load frequency-domain propagation corrector. -------------------------------

    fn = 'OUTPUT/correctors/g_' + str(rec0) + '_' + str(rec1)
    fid = open(fn, 'r')
    gf = np.load(fid)
    fid.close()

    #- Load all frequency-domain propagation correctors to compute average. -------

    if average == True:

        gf_all = np.zeros(np.shape(gf), dtype=complex)

        for i in range(p.Nreceivers):
            for k in range(i, p.Nreceivers):

                fn = 'OUTPUT/correctors/g_' + str(i) + '_' + str(k)
                fid = open(fn, 'r')
                dummy = np.load(fid)
                fid.close()

                gf_all += dummy

        gf_all = gf_all / float((p.Nreceivers * (p.Nreceivers - 1)))

    #- Bandpass, instrument and natural spectra. ----------------------------------

    bandpass = np.zeros(np.shape(f))

    Nminmax = np.round((p.bp_fmin) / df)
    Nminmin = np.round((p.bp_fmin - p.bp_width) / df)
    Nmaxmin = np.round((p.bp_fmax) / df)
    Nmaxmax = np.round((p.bp_fmax + p.bp_width) / df)

    bandpass[Nminmin:Nminmax] = np.linspace(0.0, 1.0, Nminmax - Nminmin)
    bandpass[Nmaxmin:Nmaxmax] = np.linspace(1.0, 0.0, Nmaxmax - Nmaxmin)
    bandpass[Nminmax:Nmaxmin] = 1.0

    instrument, natural = s.frequency_distribution(f)

    #==============================================================================
    #- Time-domain corrector.
    #==============================================================================

    dummy = np.real(np.fft.ifft(gf) / dt)
    gt = np.zeros(np.shape(dummy))
    gt[0.5 * n:n] = dummy[0:0.5 * n]
    gt[0:0.5 * n] = dummy[0.5 * n:n]

    if average == True:

        dummy = np.real(np.fft.ifft(gf_all) / dt)
        gt_all = np.zeros(np.shape(dummy))
        gt_all[0.5 * n:n] = dummy[0:0.5 * n]
        gt_all[0:0.5 * n] = dummy[0.5 * n:n]

    #==============================================================================
    #- Time-domain Green functions.
    #==============================================================================

    dummy = np.real(np.fft.ifft(instrument * np.sqrt(natural) * G) / dt)
    Gt = np.zeros(np.shape(dummy))
    Gt[0.5 * n:n] = dummy[0:0.5 * n]
    Gt[0:0.5 * n] = dummy[0.5 * n:n]

    dummy = np.real(np.fft.ifft(instrument * np.sqrt(natural) * G * gf) / dt)
    Gt_corr = np.zeros(np.shape(dummy))
    Gt_corr[0.5 * n:n] = dummy[0:0.5 * n]
    Gt_corr[0:0.5 * n] = dummy[0.5 * n:n]

    #==============================================================================
    #- Plot results.
    #==============================================================================

    #- Plot time-domain corrector. ------------------------------------------------

    plt.subplot(2, 1, 1)
    if average == True:
        plt.plot(t, gt_all, '--', color=(0.7, 0.7, 0.7), linewidth=2)
    plt.plot(t, gt, 'k', linewidth=2)
    plt.xlim((0.25 * np.min(t), 0.25 * np.max(t)))

    plt.title('time-domain propagation corrector in [1/s]*unit(T)')
    plt.xlabel('time [s]')

    #- Plot frequency-domain corrector. ------------------------------------------

    plt.subplot(2, 1, 2)
    if average == True:
        plt.plot(f, gf_all, '--', color=(0.7, 0.7, 0.7), linewidth=2)
    plt.plot(f, np.abs(gf), 'k', linewidth=2)
    plt.plot(f, np.imag(gf), 'r', linewidth=1)
    plt.xlim((0.0, 0.2 * np.max(f)))

    plt.title('frequency-domain propagation corrector in unit(T)')
    plt.xlabel('frequency [Hz]')

    plt.show()

    #- Plot time-domain Green functions. ------------------------------------------

    scale_eff = np.max(np.abs(Gt_corr))
    scale = np.max(np.abs(Gt))

    plt.subplot(2, 1, 1)
    plt.plot(t, Gt_corr / scale_eff, 'r', linewidth=2)
    plt.plot(t, Gt / scale, 'k', linewidth=2)
    plt.plot([d / p.v, d / p.v], [-1.1, 1.1], 'k--')

    plt.text(d / p.v + 200.0,
             0.7,
             'x %0.2g' % scale,
             color=(0.7, 0.7, 0.7),
             fontsize=14)
    plt.text(d / p.v + 200.0,
             0.5,
             'x %0.2g' % scale_eff,
             color=(0.9, 0.2, 0.2),
             fontsize=14)

    plt.xlim((d / p.v - 700.0, d / p.v + 700.0))
    plt.ylim((-1.1, 1.1))

    plt.title(
        'scaled time-domain Green functions (black=original, red=effective)')
    plt.xlabel('time [s]')
    plt.ylabel('Green function [s/kg]*unit(T)')

    #- Plot frequency-domain Green functions. -------------------------------------

    scale = np.max(np.abs(instrument * np.sqrt(natural) * G))
    scale_eff = np.max(np.abs(instrument * np.sqrt(natural) * G * gf))

    plt.subplot(2, 1, 2)
    plt.plot(f,
             np.real(instrument * np.sqrt(natural) * G * gf) / scale_eff,
             'r',
             linewidth=2)
    plt.plot(f,
             np.abs(instrument * np.sqrt(natural) * G * gf) / scale_eff,
             'r--',
             color=(0.9, 0.2, 0.2),
             linewidth=2)
    plt.plot(f,
             np.real(instrument * np.sqrt(natural) * G) / scale,
             'k',
             linewidth=2)
    plt.plot(f,
             np.abs(instrument * np.sqrt(natural) * G) / scale,
             '--',
             color=(0.3, 0.3, 0.3),
             linewidth=2)

    plt.text(0.15 * np.max(f),
             0.7,
             'x %0.2g' % scale,
             color=(0.7, 0.7, 0.7),
             fontsize=14)
    plt.text(0.15 * np.max(f),
             0.5,
             'x %0.2g' % scale_eff,
             color=(0.9, 0.2, 0.2),
             fontsize=14)

    plt.xlim((0.0, 0.2 * np.max(f)))
    plt.ylim((-1.1, 1.1))

    plt.title(
        'scaled frequency-domain Green functions, real part and absolute value (black=original, red=effective)'
    )
    plt.ylabel('Green function [s^2/kg]*unit(T)')
    plt.xlabel('frequency [Hz]')

    plt.show()

    return f
Exemplo n.º 12
0
def geometric_spreading(freq=0.015):
    """
	geometric_spreading(freq=0.015)

	Plot effective geometric spreading for a specific frequency.

	INPUT:
	------
	freq:				frequency [Hz].
	
	OUTPUT:
	-------
	none
	
	Last updated: 19 May 2016.
	"""

    #==============================================================================
    #- Input and initialisation.
    #==============================================================================

    #- Load frequency axis. -------------------------------------------------------

    fn = 'OUTPUT/correctors/f'
    fid = open(fn, 'r')
    f = np.load(fid)
    fid.close()

    df = f[1] - f[0]
    f[0] = 0.01 * f[1]

    n = len(f)

    p = parameters.Parameters()

    idx = np.round((freq - np.min(f)) / df)
    print idx, f[idx], len(f)

    #- Bandpass, instrument and natural spectra. ----------------------------------

    bandpass = np.zeros(np.shape(f))

    Nminmax = np.round((p.bp_fmin) / df)
    Nminmin = np.round((p.bp_fmin - p.bp_width) / df)
    Nmaxmin = np.round((p.bp_fmax) / df)
    Nmaxmax = np.round((p.bp_fmax + p.bp_width) / df)

    bandpass[Nminmin:Nminmax] = np.linspace(0.0, 1.0, Nminmax - Nminmin)
    bandpass[Nmaxmin:Nmaxmax] = np.linspace(1.0, 0.0, Nmaxmax - Nmaxmin)
    bandpass[Nminmax:Nmaxmin] = 1.0

    instrument, natural = s.frequency_distribution(f)

    #==============================================================================
    #- March through all receiver pairs.
    #==============================================================================

    d = np.zeros(p.Nreceivers * (p.Nreceivers + 1) / 2)
    a = np.zeros(p.Nreceivers * (p.Nreceivers + 1) / 2)
    a_eff = np.zeros(p.Nreceivers * (p.Nreceivers + 1) / 2)

    count = 0
    average = 0.0
    count_average = 0.0

    for i in range(p.Nreceivers):
        for k in range(i, p.Nreceivers):

            #- Read propagation corrector. ----------------------------------------

            fn = 'OUTPUT/correctors/g_' + str(i) + '_' + str(k)
            fid = open(fn, 'r')
            g_ik = np.load(fid)
            fid.close()

            #- Compute effective Green function. ----------------------------------

            G = g.green(p.x[i], p.y[i], p.x[k], p.y[k], 2.0 * np.pi * f)
            G_eff = G * g_ik

            d[count] = np.sqrt((p.x[i] - p.x[k])**2 + (p.y[i] - p.y[k])**2)
            a_eff[count] = np.abs(G_eff[idx])
            a[count] = np.abs(G[idx])

            #- Compute scaling factor excluding auto-correlations. ----------------

            if d[count] > 0.0:
                average += a_eff[count] / a[count]
                count_average += 1.0

            count += 1

    scale = average / count_average

    #==============================================================================
    #- Plot.
    #==============================================================================

    plt.semilogy(d, a_eff, 'ro')
    plt.semilogy(d, a * scale, 'ko')
    plt.xlim((-0.1 * np.max(d), 1.1 * np.max(d)))
    plt.ylabel('Green function unit(T) [s/kg]')
    plt.xlabel('inter-station distance')
    plt.title('amplitude vs. distance (effective=red, original scaled=black')
    plt.show()
Exemplo n.º 13
0
def precompute(rec=0, verbose=0):
    """
	precompute(verbose=0)

	Compute correlation wavefield in the frequency domain and store for in /OUTPUT for re-use in kernel computation.

	INPUT:
	------

	rec:		index of reference receiver.
	verbose:	give screen output when 1.

	OUTPUT:
	-------

	Frequency-domain interferometric wavefield stored in /OUTPUT.

	Last updated: 16 May 2016.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    #- Spatial grid.
    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)
    x, y = np.meshgrid(x_line, y_line)

    #- Frequency line.
    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    omega = 2.0 * np.pi * f

    #- Power-spectral density.
    S, indeces = s.space_distribution()
    instrument, natural = s.frequency_distribution(f)
    filt = natural * instrument * instrument

    #- Check if the indeces are actually available. If not, interrupt.
    if len(indeces) == 0:
        print 'Correlation field cannot be computed because source index array is empty.'
        return

    C = np.zeros((len(y_line), len(x_line), len(omega)), dtype=complex)

    #==============================================================================
    #- Compute correlation field for all frequencies and store.
    #==============================================================================

    #- March through the spatial grid. --------------------------------------------
    for idx in range(len(x_line)):

        if (verbose == 1):
            print str(100 * float(idx) / float(len(x_line))) + ' %'
        t1 = time.time()

        for idy in range(len(y_line)):

            #- March through all sources.
            for k in indeces:

                C[idy, idx, :] += S[k] * filt * g.conjG1_times_G2(
                    x[idy, idx], y[idy, idx], p.x[rec], p.y[rec], x[k], y[k],
                    omega, p.dx, p.dy, p.rho, p.v, p.Q)
                C[idy, idx, :] = np.conj(C[idy, idx, :])

        t2 = time.time()
        print t2 - t1

    #- Normalisation.
    C = C * p.dx * p.dy

    #==============================================================================
    #- Save interferometric wavefield.
    #==============================================================================

    fn = 'OUTPUT/cf_' + str(rec)
    fid = open(fn, 'w')
    np.save(fid, C)
    fid.close()
Exemplo n.º 14
0
def correlation_function(rec0=0,
                         rec1=1,
                         effective=False,
                         plot=False,
                         save=False):
    """
	cct, t, ccf, f = correlation_function(rec0=0,rec1=1,effective=False,plot=False,save=False)

	Compute time- and frequency-domain correlation functions. 


	INPUT:
	------
	rec0, rec1:		indeces of the receivers used in the correlation. 
	plot:			When plot=True, the source distribution, and the time- and frequency domain correlation functions are plotted.
	save:			Save pdf figures to OUTPUT when True.
	effective:		When effective==True, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.

	OUTPUT:
	-------
	cct, t:		Time-domain correlation function and time axis [N^2 s / m^4],[s].
	ccf, f:		Frequency-domain correlation function and frequency axis [N^2 s^2 / m^4],[1/s].

	Last updated: 16 July 2019.
	"""

    #==============================================================================
    #- Initialisation.
    #==============================================================================

    p = parameters.Parameters()

    x_line = np.arange(p.xmin, p.xmax, p.dx)
    y_line = np.arange(p.ymin, p.ymax, p.dy)

    x, y = np.meshgrid(x_line, y_line)

    f = np.arange(p.fmin - p.fwidth, p.fmax + p.fwidth, p.df)
    omega = 2.0 * np.pi * f

    t = np.arange(p.tmin, p.tmax, p.dt)

    #- Frequency- and space distribution of the source. ---------------------------

    S, indices = s.space_distribution(plot=plot, save=save)
    instrument, natural = s.frequency_distribution(f)
    filt = natural * instrument * instrument

    #- Read propagation corrector if needed. --------------------------------------

    if effective:

        gf = gpc.get_propagation_corrector(rec0, rec1, plot=False)

    else:

        gf = np.ones(len(f), dtype=complex)

    #==============================================================================
    #- Compute inter-station correlation function.
    #==============================================================================

    cct = np.zeros(np.shape(t), dtype=float)
    ccf = np.zeros(np.shape(f), dtype=complex)

    for idf in range(len(omega)):

        P = g.conjG1_times_G2(p.x[rec0], p.y[rec0], p.x[rec1], p.y[rec1], x, y,
                              omega[idf], p.dx, p.dy, p.rho, p.v, p.Q)
        ccf[idf] = gf[idf] * np.conj(np.sum(P * S))

        cct = cct + np.real(filt[idf] * ccf[idf] * np.exp(1j * omega[idf] * t))

    cct = cct * p.dx * p.dy * p.df

    #==============================================================================
    #- Plot result.
    #==============================================================================

    if (plot or save):

        plt.rcParams["font.family"] = "serif"
        plt.rcParams.update({'font.size': 10})

        #- Frequency domain.
        plt.semilogy(f, np.abs(ccf), 'k')
        plt.semilogy(f, np.real(ccf), 'b')
        plt.title(
            'frequency-domain correlation function (black=abs, blue=real)')
        plt.xlabel('frequency [Hz]')
        plt.ylabel(r'correlation [N$^2$ s$^2$/m$^4$]')

        if plot:
            plt.show()
        else:
            fn = 'OUTPUT/correlations_computed/c_frequency_domain_' + str(
                rec0) + '-' + str(rec1) + '.pdf'
            plt.savefig(fn, format='pdf')
            plt.clf()

        #- Time domain.

        tt = np.sqrt((p.x[rec0] - p.x[rec1])**2 +
                     (p.y[rec0] - p.y[rec1])**2) / p.v
        cct_max = np.max(np.abs(cct))

        plt.plot(t, cct, 'k', linewidth=2.0)
        plt.plot([tt, tt], [-1.1 * cct_max, 1.1 * cct_max],
                 '--',
                 color=(0.5, 0.5, 0.5),
                 linewidth=1.5)
        plt.plot([-tt, -tt], [-1.1 * cct_max, 1.1 * cct_max],
                 '--',
                 color=(0.5, 0.5, 0.5),
                 linewidth=1.5)

        plt.ylim((-1.1 * cct_max, 1.1 * cct_max))
        plt.title('correlation function')
        plt.xlabel('time [s]')
        plt.ylabel(r'correlation [N$^2$ s/m$^4$]')

        if plot:
            plt.show()
        else:
            fn = 'OUTPUT/correlations_computed/c_time_domain_' + str(
                rec0) + '-' + str(rec1) + '.pdf'
            plt.savefig(fn, format='pdf')
            plt.clf()

    #==============================================================================
    #- Save results if wanted.
    #==============================================================================

    if save == 1:

        #- Store frequency and time axes.
        fn = 'OUTPUT/correlations_computed/t'
        np.save(fn, t)

        fn = 'OUTPUT/correlations_computed/t'
        np.save(fn, t)

        #- Store computed correlations in the time and frequency domain.
        fn = 'OUTPUT/correlations_computed/cct_' + str(rec0) + '-' + str(rec1)
        np.save(fn, cct)

        fn = 'OUTPUT/correlations_computed/ccf_' + str(rec0) + '-' + str(rec1)
        np.save(fn, ccf)

    #==============================================================================
    #- Return.
    #==============================================================================

    return cct, t, ccf, f
Exemplo n.º 15
0
def structure_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', dir_forward='OUTPUT/', effective=0, plot=0):
	"""
	x,y,K_kappa = structure_kernel(cct, t, rec0=0, rec1=1, measurement='cctime', dir_forward='OUTPUT/', effective=0, plot=0):

	Compute structure kernel K_kappa for a frequency-independent source power-spectral density.

	INPUT:
	------

	cct, t:			Time-domain correlation function and time axis as obtained from correlation_function.py.
	rec0, rec1:		Indeces of the receivers used in the correlation. 
	measurement:	Type of measurement used to compute the adjoint source. See adsrc.py for options.
	dir_forward:	Location of the forward interferometric fields from rec0 and rec1. Must exist.
	plot:			When plot=1, plot structure kernel.
	effective:		When effective==1, effective correlations are computed using the propagation correctors stored in OUTPUT/correctors.
					The source power-spectral density is then interpreted as the effective one.


	OUTPUT:
	-------
	x,y:			Space coordinates.
	K:				Structure kernel [unit of measurement * 1/N].

	Last updated: 11 July 2016.
	"""

	#==============================================================================
	#- Initialisation.
	#==============================================================================

	p=parameters.Parameters()

	x_line=np.arange(p.xmin,p.xmax,p.dx)
	y_line=np.arange(p.ymin,p.ymax,p.dy)

	x,y=np.meshgrid(x_line,y_line)

	f=np.arange(p.fmin-p.fwidth,p.fmax+p.fwidth,p.df)
	df=f[1]-f[0]
	omega=2.0*np.pi*f

	K_kappa=np.zeros(np.shape(x))

	C1=np.zeros((len(y_line),len(x_line),len(omega)),dtype=complex)
	C2=np.zeros((len(y_line),len(x_line),len(omega)),dtype=complex)

	nx=len(x_line)
	ny=len(y_line)

	kappa=p.rho*(p.v**2)

    #- Frequency- and space distribution of the source. ---------------------------

	S,indeces=s.space_distribution(plot=0)
	instrument,natural=s.frequency_distribution(f)
	filt=natural*instrument*instrument

	#- Compute the adjoint source. ------------------------------------------------

	a=adsrc.adsrc(cct, t, measurement, plot)

	#- Compute number of grid points corresponding to the minimum wavelength. -----

	L=int(np.ceil(p.v/(p.fmax*p.dx)))

	#- Read propagation corrector if needed. --------------------------------------

	if (effective==1):

		gf=gpc.get_propagation_corrector(rec0,rec1,plot=0)

	else:

		gf=np.ones(len(f),dtype=complex)

	#==============================================================================
	#- Load forward interferometric wavefields.
	#==============================================================================

	fn=dir_forward+'/cf_'+str(rec0)
	fid=open(fn,'r')
	C1=np.load(fid)
	fid.close()

	fn=dir_forward+'/cf_'+str(rec1)
	fid=open(fn,'r')
	C2=np.load(fid)
	fid.close()

    #==============================================================================
	#- Loop over frequencies.
	#==============================================================================

	for k in range(len(omega)):

		w=omega[k]

		#- Adjoint fields. --------------------------------------------------------
		G1=-w**2*g.green_input(x,y,p.x[rec0],p.y[rec0],w,p.dx,p.dy,p.rho,p.v,p.Q)*gf[k]
		G2=-w**2*g.green_input(x,y,p.x[rec1],p.y[rec1],w,p.dx,p.dy,p.rho,p.v,p.Q)*gf[k]

		#- Multiplication with adjoint fields. ------------------------------------
		K_kappa=K_kappa-2.0*np.real(G2*C1[:,:,k]*np.conj(a[k])+G1*C2[:,:,k]*a[k])

	K_kappa=K_kappa/kappa

	#==============================================================================
	#- Smooth over minimum wavelength.
	#==============================================================================

	for k in range(L):
		K_kappa[1:ny-2,:]=(K_kappa[1:ny-2,:]+K_kappa[0:ny-3,:]+K_kappa[2:ny-1,:])/3.0

	for k in range(L):
		K_kappa[:,1:nx-2]=(K_kappa[:,1:nx-2]+K_kappa[:,0:nx-3]+K_kappa[:,2:nx-1])/3.0

	#==============================================================================
	#- Visualise if wanted.
	#==============================================================================

	if plot==1:

		cmap = plt.get_cmap('RdBu')
    	plt.pcolormesh(x,y,K_kappa,cmap=cmap,shading='interp')
    	plt.clim(-np.max(np.abs(K_kappa))*0.25,np.max(np.abs(K_kappa))*0.25)
    	plt.axis('image')
    	plt.colorbar()
    	plt.title('Structure (kappa) kernel [unit of measurement / N]')
    	plt.xlabel('x [km]')
    	plt.ylabel('y [km]')

    	plt.plot(p.x[rec0],p.y[rec0],'ro')
    	plt.plot(p.x[rec1],p.y[rec1],'ro')

    	plt.show()


	return x,y,K_kappa