def test_geostuff(): grid = np.zeros((2, 5)) location = [45.0, 45.0] dist = geographical_distances(grid, location) assert (type(dist) == np.ndarray) assert (pytest.approx(dist[0]) == 6662472.7182103) assert is_land(location, location)[0] assert (floor(geograph_to_geocent(location[0])) == 44) assert (pytest.approx(len_deg_lat(location[0])) == 111131.779) assert (pytest.approx(len_deg_lon(location[0])) == 78582.91976)
def g1g2_kern(wf1str, wf2str, kernel, adjt, src, source_conf, insta): measr_conf = json.load( open(os.path.join(source_conf['source_path'], 'measr_config.json'))) bandpass = measr_conf['bandpass'] if bandpass == None: filtcnt = 1 elif type(bandpass) == list: if type(bandpass[0]) != list: filtcnt = 1 else: filtcnt = len(bandpass) ntime, n, n_corr, Fs = get_ns(wf1str, source_conf, insta) # use a one-sided taper: The seismogram probably has a non-zero end, # being cut off whereever the solver stopped running. taper = cosine_taper(ntime, p=0.01) taper[0:ntime // 2] = 1.0 ######################################################################## # Prepare filenames and adjoint sources ######################################################################## filenames = [] adjt_srcs = [] adjt_srcs_cnt = 0 for ix_f in range(filtcnt): filename = kernel + '.{}.npy'.format(ix_f) filenames.append(filename) #if os.path.exists(filename): # continue f = Stream() for a in adjt: adjtfile = a + '*.{}.sac'.format(ix_f) adjtfile = glob(adjtfile) try: f += read(adjtfile[0])[0] f[-1].data = my_centered(f[-1].data, n_corr) adjt_srcs_cnt += 1 except IndexError: print('No adjoint source found: {}\n'.format(a)) break adjt_srcs.append(f) ######################################################################## # Compute the kernels ######################################################################## with NoiseSource(src) as nsrc: ntraces = nsrc.src_loc[0].shape[0] if insta: # open database dbpath = json.load( open(os.path.join(source_conf['project_path'], 'config.json')))['wavefield_path'] # open and determine Fs, nt db = instaseis.open_db(dbpath) # get receiver locations lat1 = geograph_to_geocent(float(wf1[2])) lon1 = float(wf1[3]) rec1 = instaseis.Receiver(latitude=lat1, longitude=lon1) lat2 = geograph_to_geocent(float(wf2[2])) lon2 = float(wf2[3]) rec2 = instaseis.Receiver(latitude=lat2, longitude=lon2) else: wf1 = WaveField(wf1str) wf2 = WaveField(wf2str) kern = np.zeros((filtcnt, ntraces, len(adjt))) ######################################################################## # Loop over locations ######################################################################## for i in range(ntraces): # noise source spectrum at this location # For the kernel, this contains only the basis functions of the # spectrum without weights; might still be location-dependent, # for example when constraining sensivity to ocean S = nsrc.get_spect(i) if S.sum() == 0.: # The spectrum has 0 phase so only checking absolute value here continue #################################################################### # Get synthetics #################################################################### if insta: # get source locations lat_src = geograph_to_geocent(nsrc.src_loc[1, i]) lon_src = nsrc.src_loc[0, i] fsrc = instaseis.ForceSource(latitude=lat_src, longitude=lon_src, f_r=1.e12) s1 = np.ascontiguousarray( db.get_seismograms( source=fsrc, receiver=rec1, dt=1. / source_conf['sampling_rate'])[0].data * taper) s2 = np.ascontiguousarray( db.get_seismograms( source=fsrc, receiver=rec2, dt=1. / source_conf['sampling_rate'])[0].data * taper) else: s1 = np.ascontiguousarray(wf1.data[i, :] * taper) s2 = np.ascontiguousarray(wf2.data[i, :] * taper) spec1 = np.fft.rfft(s1, n) spec2 = np.fft.rfft(s2, n) g1g2_tr = np.multiply(np.conjugate(spec1), spec2) c = np.multiply(g1g2_tr, S) ####################################################################### # Get Kernel at that location ####################################################################### corr_temp = my_centered(np.fft.ifftshift(np.fft.irfft(c, n)), n_corr) ####################################################################### # Apply the 'adjoint source' ####################################################################### for ix_f in range(filtcnt): f = adjt_srcs[ix_f] if f == None: continue for j in range(len(f)): delta = f[j].stats.delta kern[ix_f, i, j] = np.dot(corr_temp, f[j].data) * delta #elif measr_conf['mtype'] in ['envelope']: # if j == 0: # corr_temp_h = corr_temp # print(corr_temp_h) # if j == 1: # corr_temp_h = hilbert(corr_temp) # print(corr_temp_h) # # kern[ix_f,i,j] = np.dot(corr_temp,f[j].data) * delta if i % 50000 == 0: print("Finished {} source locations.".format(i)) if not insta: wf1.file.close() wf2.file.close() for ix_f in range(filtcnt): filename = filenames[ix_f] if kern[ix_f, :, :].sum() != 0: np.save(filename, kern[ix_f, :, :]) return ()
def compute_correlation(input_files, all_conf, nsrc, all_ns, taper, insta=False): """ Compute noise cross-correlations from two .h5 'wavefield' files. Noise source distribution and spectrum is given by starting_model.h5 It is assumed that noise sources are delta-correlated in space. Metainformation: Include the reference station names for both stations from wavefield files, if possible. Do not include geographic information from .csv file as this might be error-prone. Just add the geographic info later if needed. """ wf1, wf2 = input_files ntime, n, n_corr, Fs = all_ns ntraces = nsrc.src_loc[0].shape[0] correlation = np.zeros(n_corr) if insta: # open database dbpath = all_conf.config['wavefield_path'] # open db = instaseis.open_db(dbpath) # get receiver locations station1 = wf1[0] station2 = wf2[0] lat1 = geograph_to_geocent(float(wf1[2])) lon1 = float(wf1[3]) rec1 = instaseis.Receiver(latitude=lat1, longitude=lon1) lat2 = geograph_to_geocent(float(wf2[2])) lon2 = float(wf2[3]) rec2 = instaseis.Receiver(latitude=lat2, longitude=lon2) else: wf1 = WaveField(wf1) wf2 = WaveField(wf2) station1 = wf1.stats['reference_station'] station2 = wf2.stats['reference_station'] # Make sure all is consistent if False in (wf1.sourcegrid[1, 0:10] == wf2.sourcegrid[1, 0:10]): raise ValueError("Wave fields not consistent.") if False in (wf1.sourcegrid[1, -10:] == wf2.sourcegrid[1, -10:]): raise ValueError("Wave fields not consistent.") if False in (wf1.sourcegrid[0, -10:] == nsrc.src_loc[0, -10:]): raise ValueError("Wave field and source not consistent.") # Loop over source locations print_each_n = max(5, round(max(ntraces // 5, 1), -1)) for i in range(ntraces): # noise source spectrum at this location S = nsrc.get_spect(i) if S.sum() == 0.: # If amplitude is 0, continue. (Spectrum has 0 phase anyway.) continue if insta: # get source locations lat_src = geograph_to_geocent(nsrc.src_loc[1, i]) lon_src = nsrc.src_loc[0, i] fsrc = instaseis.ForceSource(latitude=lat_src, longitude=lon_src, f_r=1.e12) Fs = all_conf.config['wavefield_sampling_rate'] s1 = db.get_seismograms(source=fsrc, receiver=rec1, dt=1. / Fs)[0].data * taper s2 = db.get_seismograms(source=fsrc, receiver=rec2, dt=1. / Fs)[0].data * taper s1 = np.ascontiguousarray(s1) s2 = np.ascontiguousarray(s2) spec1 = np.fft.rfft(s1, n) spec2 = np.fft.rfft(s2, n) else: if not wf1.fdomain: # read Green's functions s1 = np.ascontiguousarray(wf1.data[i, :] * taper) s2 = np.ascontiguousarray(wf2.data[i, :] * taper) # Fourier transform for greater ease of convolution spec1 = np.fft.rfft(s1, n) spec2 = np.fft.rfft(s2, n) else: spec1 = np.ascontiguousarray(wf1.data[i, :]) spec2 = np.ascontiguousarray(wf2.data[i, :]) # convolve G1G2 g1g2_tr = np.multiply(np.conjugate(spec1), spec2) # convolve noise source c = np.multiply(g1g2_tr, S) # transform back correlation += my_centered(np.fft.fftshift(np.fft.irfft(c, n)), n_corr) * nsrc.surf_area[i] # occasional info if i % print_each_n == 0 and all_conf.config['verbose']: print("Finished {} of {} source locations.".format(i, ntraces)) # end of loop over all source locations ####################################### return(correlation, station1, station2)
def g1g2_kern(wf1str,wf2str,kernel,adjt, src,source_conf,insta): measr_conf = json.load(open(os.path.join(source_conf['source_path'], 'measr_config.json'))) bandpass = measr_conf['bandpass'] if bandpass == None: filtcnt = 1 elif type(bandpass) == list: if type(bandpass[0]) != list: filtcnt = 1 else: filtcnt = len(bandpass) ntime, n, n_corr, Fs = get_ns(wf1str,source_conf,insta) # use a one-sided taper: The seismogram probably has a non-zero end, # being cut off whereever the solver stopped running. taper = cosine_taper(ntime,p=0.01) taper[0:ntime//2] = 1.0 ######################################################################## # Prepare filenames and adjoint sources ######################################################################## filenames = [] adjt_srcs = [] adjt_srcs_cnt = 0 for ix_f in range(filtcnt): filename = kernel+'.{}.npy'.format(ix_f) filenames.append(filename) #if os.path.exists(filename): # continue f = Stream() for a in adjt: adjtfile = a + '*.{}.sac'.format(ix_f) adjtfile = glob(adjtfile) try: f += read(adjtfile[0])[0] f[-1].data = my_centered(f[-1].data,n_corr) adjt_srcs_cnt += 1 except IndexError: print('No adjoint source found: {}\n'.format(a)) break adjt_srcs.append(f) ######################################################################## # Compute the kernels ######################################################################## with NoiseSource(src) as nsrc: ntraces = nsrc.src_loc[0].shape[0] if insta: # open database dbpath = json.load(open(os.path.join(source_conf['project_path'], 'config.json')))['wavefield_path'] # open and determine Fs, nt db = instaseis.open_db(dbpath) # get receiver locations lat1 = geograph_to_geocent(float(wf1[2])) lon1 = float(wf1[3]) rec1 = instaseis.Receiver(latitude=lat1,longitude=lon1) lat2 = geograph_to_geocent(float(wf2[2])) lon2 = float(wf2[3]) rec2 = instaseis.Receiver(latitude=lat2,longitude=lon2) else: wf1 = WaveField(wf1str) wf2 = WaveField(wf2str) kern = np.zeros((filtcnt,ntraces,len(adjt))) ######################################################################## # Loop over locations ######################################################################## for i in range(ntraces): # noise source spectrum at this location # For the kernel, this contains only the basis functions of the # spectrum without weights; might still be location-dependent, # for example when constraining sensivity to ocean S = nsrc.get_spect(i) if S.sum() == 0.: # The spectrum has 0 phase so only checking absolute value here continue #################################################################### # Get synthetics #################################################################### if insta: # get source locations lat_src = geograph_to_geocent(nsrc.src_loc[1,i]) lon_src = nsrc.src_loc[0,i] fsrc = instaseis.ForceSource(latitude=lat_src, longitude=lon_src,f_r=1.e12) s1 = np.ascontiguousarray(db.get_seismograms(source=fsrc, receiver=rec1, dt=1./source_conf['sampling_rate'])[0].data*taper) s2 = np.ascontiguousarray(db.get_seismograms(source=fsrc, receiver=rec2, dt=1./source_conf['sampling_rate'])[0].data*taper) else: s1 = np.ascontiguousarray(wf1.data[i,:]*taper) s2 = np.ascontiguousarray(wf2.data[i,:]*taper) spec1 = np.fft.rfft(s1,n) spec2 = np.fft.rfft(s2,n) g1g2_tr = np.multiply(np.conjugate(spec1),spec2) c = np.multiply(g1g2_tr,S) ####################################################################### # Get Kernel at that location ####################################################################### corr_temp = my_centered(np.fft.ifftshift(np.fft.irfft(c,n)),n_corr) ####################################################################### # Apply the 'adjoint source' ####################################################################### for ix_f in range(filtcnt): f = adjt_srcs[ix_f] if f==None: continue for j in range(len(f)): delta = f[j].stats.delta kern[ix_f,i,j] = np.dot(corr_temp,f[j].data) * delta #elif measr_conf['mtype'] in ['envelope']: # if j == 0: # corr_temp_h = corr_temp # print(corr_temp_h) # if j == 1: # corr_temp_h = hilbert(corr_temp) # print(corr_temp_h) # # kern[ix_f,i,j] = np.dot(corr_temp,f[j].data) * delta if i%50000 == 0: print("Finished {} source locations.".format(i)) if not insta: wf1.file.close() wf2.file.close() for ix_f in range(filtcnt): filename = filenames[ix_f] if kern[ix_f,:,:].sum() != 0: np.save(filename,kern[ix_f,:,:]) return()
def compute_kernel(input_files, output_file, all_conf, nsrc, all_ns, taper, insta=False): ntime, n, n_corr, Fs = all_ns wf1, wf2, adjt = input_files ######################################################################## # Prepare filenames and adjoint sources ######################################################################## adjt_srcs = open_adjoint_sources(all_conf, adjt, n_corr) if None in adjt_srcs: return (None) else: if all_conf.config["verbose"]: print("========================================") print("Computing: " + output_file) # Uniform spatial weights. (current model is in the adjoint source) nsrc.distr_basis = np.ones(nsrc.distr_basis.shape) ntraces = nsrc.src_loc[0].shape[0] # [comp1, comp2] = [wf1, wf2] # keep these strings in case need to be rotated if insta: # open database dbpath = all_conf.config['wavefield_path'] # open and determine Fs, nt db = instaseis.open_db(dbpath) # get receiver locations lat1 = geograph_to_geocent(float(wf1[2])) lon1 = float(wf1[3]) rec1 = instaseis.Receiver(latitude=lat1, longitude=lon1) lat2 = geograph_to_geocent(float(wf2[2])) lon2 = float(wf2[3]) rec2 = instaseis.Receiver(latitude=lat2, longitude=lon2) else: wf1 = WaveField(wf1) wf2 = WaveField(wf2) # Make sure all is consistent if False in (wf1.sourcegrid[1, 0:10] == wf2.sourcegrid[1, 0:10]): raise ValueError("Wave fields not consistent.") if False in (wf1.sourcegrid[1, -10:] == wf2.sourcegrid[1, -10:]): raise ValueError("Wave fields not consistent.") if False in (wf1.sourcegrid[0, -10:] == nsrc.src_loc[0, -10:]): raise ValueError("Wave field and source not consistent.") kern = np.zeros( (nsrc.spect_basis.shape[0], all_conf.filtcnt, ntraces, len(adjt))) if all_conf.source_config["rotate_horizontal_components"]: tempfile = output_file + ".h5_temp" temp = wf1.copy_setup(tempfile, ntraces=ntraces, nt=n_corr) map_temp_datasets = {0: temp.data} for ix_spec in range(1, nsrc.spect_basis.shape[0]): dtmp = temp.file.create_dataset('data{}'.format(ix_spec), temp.data.shape, dtype=np.float32) map_temp_datasets[ix_spec] = dtmp # Loop over locations print_each_n = max(5, round(max(ntraces // 5, 1), -1)) # preload wavefield and spectrum S_all = nsrc.get_spect_all() wf1_data = np.asarray(wf1.data) wf2_data = np.asarray(wf2.data) for i in range(ntraces): # noise source spectrum at this location # For the kernel, this contains only the basis functions of the # spectrum without weights; might still be location-dependent, # for example when constraining sensivity to ocean #S = nsrc.get_spect(i) S = S_all[i, :] if S.sum() == 0.: # The spectrum has 0 phase so only checking # absolute value here continue if insta: # get source locations lat_src = geograph_to_geocent(nsrc.src_loc[1, i]) lon_src = nsrc.src_loc[0, i] fsrc = instaseis.ForceSource(latitude=lat_src, longitude=lon_src, f_r=1.e12) dt = 1. / all_conf.source_config['sampling_rate'] s1 = db.get_seismograms(source=fsrc, receiver=rec1, dt=dt)[0].data * taper s1 = np.ascontiguousarray(s1) s2 = db.get_seismograms(source=fsrc, receiver=rec2, dt=dt)[0].data * taper s2 = np.ascontiguousarray(s2) spec1 = np.fft.rfft(s1, n) spec2 = np.fft.rfft(s2, n) else: if not wf1.fdomain: s1 = np.ascontiguousarray(wf1_data[i, :] * taper) s2 = np.ascontiguousarray(wf2_data[i, :] * taper) # if horizontal component rotation: perform it here # more convenient before FFT to avoid additional FFTs spec1 = np.fft.rfft(s1, n) spec2 = np.fft.rfft(s2, n) else: spec1 = wf1_data[i, :] spec2 = wf2_data[i, :] g1g2_tr = np.multiply(np.conjugate(spec1), spec2) # spectrum for ix_spec in range(nsrc.spect_basis.shape[0]): c = np.multiply(g1g2_tr, nsrc.spect_basis[ix_spec, :]) ################################################################### # Get Kernel at that location ################################################################### ctemp = np.fft.fftshift(np.fft.irfft(c, n)) corr_temp = my_centered(ctemp, n_corr) if all_conf.source_config["rotate_horizontal_components"]: map_temp_datasets[ix_spec][i, :] = corr_temp ################################################################### # Apply the 'adjoint source' ################################################################### for ix_f in range(all_conf.filtcnt): f = adjt_srcs[ix_f] if f is None: continue for j in range(len(f)): delta = f[j].stats.delta kern[ix_spec, ix_f, i, j] = np.dot(corr_temp, f[j].data) * delta if i % print_each_n == 0 and all_conf.config['verbose']: print("Finished {} of {} source locations.".format(i, ntraces)) if not insta: wf1.file.close() wf2.file.close() if all_conf.source_config["rotate_horizontal_components"]: temp.file.close() return kern
def g1g2_corr(wf1,wf2,corr_file,src,source_conf,insta): """ Compute noise cross-correlations from two .h5 'wavefield' files. Noise source distribution and spectrum is given by starting_model.h5 It is assumed that noise sources are delta-correlated in space. """ #ToDo: check whether to include autocorrs from user (now hardcoded off) #ToDo: Parallel loop(s) #ToDo tests # Metainformation: Include the reference station names for both stations # from wavefield files, if possible. Do not include geographic information # from .csv file as this might be error-prone. Just add the geographic # info later if needed. with NoiseSource(src) as nsrc: ntime, n, n_corr, Fs = get_ns(wf1,source_conf,insta) # use a one-sided taper: The seismogram probably has a non-zero end, # being cut off whereever the solver stopped running. taper = cosine_taper(ntime,p=0.01) taper[0:ntime//2] = 1.0 ntraces = nsrc.src_loc[0].shape[0] print(taper.shape) correlation = np.zeros(n_corr) if insta: # open database dbpath = json.load(open(os.path.join(source_conf['project_path'], 'config.json')))['wavefield_path'] # open and determine Fs, nt db = instaseis.open_db(dbpath) # get receiver locations lat1 = geograph_to_geocent(float(wf1[2])) lon1 = float(wf1[3]) rec1 = instaseis.Receiver(latitude=lat1,longitude=lon1) lat2 = geograph_to_geocent(float(wf2[2])) lon2 = float(wf2[3]) rec2 = instaseis.Receiver(latitude=lat2,longitude=lon2) else: wf1 = WaveField(wf1) wf2 = WaveField(wf2) # Loop over source locations for i in range(ntraces): # noise source spectrum at this location S = nsrc.get_spect(i) if S.sum() == 0.: #If amplitude is 0, continue. (Spectrum has 0 phase anyway. ) continue if insta: # get source locations lat_src = geograph_to_geocent(nsrc.src_loc[1,i]) lon_src = nsrc.src_loc[0,i] fsrc = instaseis.ForceSource(latitude=lat_src, longitude=lon_src,f_r=1.e12) s1 = np.ascontiguousarray(db.get_seismograms(source=fsrc, receiver=rec1, dt=1./source_conf['sampling_rate'])[0].data*taper) s2 = np.ascontiguousarray(db.get_seismograms(source=fsrc, receiver=rec2, dt=1./source_conf['sampling_rate'])[0].data*taper) else: # read Green's functions s1 = np.ascontiguousarray(wf1.data[i,:]*taper) s2 = np.ascontiguousarray(wf2.data[i,:]*taper) # Fourier transform for greater ease of convolution spec1 = np.fft.rfft(s1,n) spec2 = np.fft.rfft(s2,n) # convolve G1G2 g1g2_tr = np.multiply(np.conjugate(spec1),spec2) # convolve noise source c = np.multiply(g1g2_tr,S) # transform back correlation += my_centered(np.fft.ifftshift(np.fft.irfft(c,n)), n_corr) * nsrc.surf_area[i] # occasional info if i%50000 == 0: print("Finished {} source locations.".format(i)) ###################### end of loop over all source locations ################### if not insta: wf1.file.close() wf2.file.close() # save output trace = Trace() trace.stats.sampling_rate = Fs trace.data = correlation # try to add some meta data try: sta1 = wf1.stats['reference_station'] sta2 = wf2.stats['reference_station'] trace.stats.station = sta1.split('.')[1] trace.stats.network = sta1.split('.')[0] trace.stats.location = sta1.split('.')[2] trace.stats.channel = sta1.split('.')[3] trace.stats.sac = {} trace.stats.sac['kuser0'] = sta2.split('.')[1] trace.stats.sac['kuser1'] = sta2.split('.')[0] trace.stats.sac['kuser2'] = sta2.split('.')[2] trace.stats.sac['kevnm'] = sta2.split('.')[3] except: pass trace.write(filename=corr_file,format='SAC')
startindex = 0 f_out = h5py.File(f_out_name, "w") # DATASET NR 1: STATS stats = f_out.create_dataset('stats',data=(0,)) stats.attrs['reference_station'] = '{}.{}'.format(net,sta) stats.attrs['data_quantity'] = config['synt_data'] stats.attrs['ntraces'] = ntraces stats.attrs['Fs'] = Fs stats.attrs['nt'] = int(ntimesteps) # DATASET NR 2: Source grid sources = f_out.create_dataset('sourcegrid',data=f_sources[0:2]) lat1 = geograph_to_geocent(float(lat)) lon1 = float(lon) rec1 = instaseis.Receiver(latitude=lat1,longitude=lon1) # DATASET Nr 3: Seismograms itself traces = f_out.create_dataset('data',(ntraces,ntimesteps),dtype=np.float32) if channel[-1] == 'Z': c_index = 0 elif channel[-1] == 'R': c_index = 1 elif channel[-1] == 'T': c_index = 2 else: f_out = h5py.File(f_out_name, "r+") startindex = len(f_out['data'])
def g1g2_corr(wf1, wf2, corr_file, src, source_conf, insta): """ Compute noise cross-correlations from two .h5 'wavefield' files. Noise source distribution and spectrum is given by starting_model.h5 It is assumed that noise sources are delta-correlated in space. """ #ToDo: check whether to include autocorrs from user (now hardcoded off) #ToDo: Parallel loop(s) #ToDo tests # Metainformation: Include the reference station names for both stations # from wavefield files, if possible. Do not include geographic information # from .csv file as this might be error-prone. Just add the geographic # info later if needed. with NoiseSource(src) as nsrc: ntime, n, n_corr, Fs = get_ns(wf1, source_conf, insta) # use a one-sided taper: The seismogram probably has a non-zero end, # being cut off whereever the solver stopped running. taper = cosine_taper(ntime, p=0.01) taper[0:ntime // 2] = 1.0 ntraces = nsrc.src_loc[0].shape[0] print(taper.shape) correlation = np.zeros(n_corr) if insta: # open database dbpath = json.load( open(os.path.join(source_conf['project_path'], 'config.json')))['wavefield_path'] # open and determine Fs, nt db = instaseis.open_db(dbpath) # get receiver locations lat1 = geograph_to_geocent(float(wf1[2])) lon1 = float(wf1[3]) rec1 = instaseis.Receiver(latitude=lat1, longitude=lon1) lat2 = geograph_to_geocent(float(wf2[2])) lon2 = float(wf2[3]) rec2 = instaseis.Receiver(latitude=lat2, longitude=lon2) else: wf1 = WaveField(wf1) wf2 = WaveField(wf2) # Loop over source locations for i in range(ntraces): # noise source spectrum at this location S = nsrc.get_spect(i) if S.sum() == 0.: #If amplitude is 0, continue. (Spectrum has 0 phase anyway. ) continue if insta: # get source locations lat_src = geograph_to_geocent(nsrc.src_loc[1, i]) lon_src = nsrc.src_loc[0, i] fsrc = instaseis.ForceSource(latitude=lat_src, longitude=lon_src, f_r=1.e12) s1 = np.ascontiguousarray( db.get_seismograms( source=fsrc, receiver=rec1, dt=1. / source_conf['sampling_rate'])[0].data * taper) s2 = np.ascontiguousarray( db.get_seismograms( source=fsrc, receiver=rec2, dt=1. / source_conf['sampling_rate'])[0].data * taper) else: # read Green's functions s1 = np.ascontiguousarray(wf1.data[i, :] * taper) s2 = np.ascontiguousarray(wf2.data[i, :] * taper) # Fourier transform for greater ease of convolution spec1 = np.fft.rfft(s1, n) spec2 = np.fft.rfft(s2, n) # convolve G1G2 g1g2_tr = np.multiply(np.conjugate(spec1), spec2) # convolve noise source c = np.multiply(g1g2_tr, S) # transform back correlation += my_centered(np.fft.ifftshift(np.fft.irfft(c, n)), n_corr) * nsrc.surf_area[i] # occasional info if i % 50000 == 0: print("Finished {} source locations.".format(i)) ###################### end of loop over all source locations ################### if not insta: wf1.file.close() wf2.file.close() # save output trace = Trace() trace.stats.sampling_rate = Fs trace.data = correlation # try to add some meta data try: sta1 = wf1.stats['reference_station'] sta2 = wf2.stats['reference_station'] trace.stats.station = sta1.split('.')[1] trace.stats.network = sta1.split('.')[0] trace.stats.location = sta1.split('.')[2] trace.stats.channel = sta1.split('.')[3] trace.stats.sac = {} trace.stats.sac['kuser0'] = sta2.split('.')[1] trace.stats.sac['kuser1'] = sta2.split('.')[0] trace.stats.sac['kuser2'] = sta2.split('.')[2] trace.stats.sac['kevnm'] = sta2.split('.')[3] except: pass trace.write(filename=corr_file, format='SAC')
def green_from_instaseis(self, station, channel): # set some parameters lat_sta = station['lat'] lon_sta = station['lon'] lat_sta = geograph_to_geocent(float(lat_sta)) lon_sta = float(lon_sta) rec = instaseis.Receiver(latitude=lat_sta, longitude=lon_sta) point_f = float(self.config['wavefield_point_force']) station_id = station['net'] + '.' + station['sta'] + '..MX' + channel if self.config['verbose']: print(station_id) if channel not in ['Z', 'N', 'E']: raise ValueError("Unknown channel: %s, choose E, N, Z" % channel) # initialize the file f_out = os.path.join(self.wf_path, station_id + '.h5') with h5py.File(f_out, "w") as f: # DATASET NR 1: STATS stats = f.create_dataset('stats', data=(0, )) stats.attrs['reference_station'] = station['sta'] stats.attrs['data_quantity'] = self.data_quantity stats.attrs['ntraces'] = self.ntraces stats.attrs['Fs'] = self.Fs stats.attrs['nt'] = int(self.npts) stats.attrs['npad'] = self.npad if self.fdomain: stats.attrs['fdomain'] = True else: stats.attrs['fdomain'] = False # DATASET NR 2: Source grid f.create_dataset('sourcegrid', data=self.sourcegrid) # DATASET Nr 3: Seismograms itself if self.fdomain: traces = f.create_dataset('data', (self.ntraces, self.npts + 1), dtype=np.complex64) else: traces = f.create_dataset('data', (self.ntraces, self.npts), dtype=np.float32) for i in range(self.ntraces): if i % 1000 == 0 and i > 0 and self.config['verbose']: print('Converted %g of %g traces' % (i, self.ntraces)) lat_src = geograph_to_geocent(self.sourcegrid[1, i]) lon_src = self.sourcegrid[0, i] fsrc = instaseis.ForceSource(latitude=lat_src, longitude=lon_src, f_r=point_f) if self.config['synt_data'] == 'DIS': values = self.db.get_seismograms(source=fsrc, receiver=rec, dt=1. / self.Fs) elif self.config['synt_data'] == 'VEL': values = self.db.get_seismograms(source=fsrc, receiver=rec, dt=1. / self.Fs, kind='velocity') elif self.config['synt_data'] == 'ACC': values = self.db.get_seismograms(source=fsrc, receiver=rec, dt=1. / self.Fs, kind='acceleration') else: raise ValueError('Unknown data quantity. \ Choose DIS, VEL or ACC in configuration.') trace = values.select(component=channel)[0].data if self.filter is not None: trace = lfilter(*self.filter, x=trace) if self.fdomain: trace_fd = np.fft.rfft(trace[0:self.npts], n=self.npad) traces[i, :] = trace_fd else: traces[i, :] = trace[0:self.npts] return ()