def multiprocess_fft(inlist): tr1=inlist[0] tr2=inlist[1] window_length=inlist[2] overlap = inlist[3] try: return noise.noisecorr(tr1,tr2,window_length,overlap) except: return None
import noise from obspy import read from obspy.geodetics.base import gps2dist_azimuth import matplotlib.pyplot as plt ref_curve = np.loadtxt("Average_phase_velocity_rayleigh") tr1 = read("preprocessed_data/SULZ.LHZ.CH.2013.219.processed.SAC")[0] tr2 = read("preprocessed_data/VDL.LHZ.CH.2013.219.processed.SAC")[0] # bad example with only one day of correlation dist, az, baz = gps2dist_azimuth(tr1.stats.sac.stla, tr1.stats.sac.stlo, tr2.stats.sac.stla, tr2.stats.sac.stlo) freq, xcorr, n_corr_wins = noise.noisecorr(tr1, tr2, window_length=3600., overlap=0.5) smoothed = noise.velocity_filter(freq, xcorr, dist / 1000., velband=(6.0, 5.0, 1.5, 0.5), return_all=False) crossings,phase_vel = noise.extract_phase_velocity(freq,smoothed,dist/1000.,ref_curve,\ freqmin=0.004,freqmax=0.25, min_vel=1.5, max_vel=5.0,min_amp=0.0,\ horizontal_polarization=False, smooth_spectrum=False,plotting=True) plt.figure(figsize=(16, 10)) plt.subplot(2, 2, 1) plt.plot(freq, np.real(xcorr), label='original')
from obspy import read, Trace from obspy.geodetics.base import gps2dist_azimuth import matplotlib.pyplot as plt ref_curve = np.loadtxt("avg.txt") tr1 = read("/Users/tzompantli/Desktop/tr1.mseed")[0] tr2 = read("/Users/tzompantli/Desktop/tr2.mseed")[0] # bad example with only one day of correlation #dist,az,baz = gps2dist_azimuth(tr1.stats.sac.stla,tr1.stats.sac.stlo, # tr2.stats.sac.stla,tr2.stats.sac.stlo) dist = 150 freq, xcorr = noise.noisecorr(tr1, tr2, window_length=30., overlap=0.3) smoothed = noise.velocity_filter(freq, xcorr, dist / 1000., cmin=.1, cmax=1.0, return_all=False) cc = np.real(np.fft.fftshift(np.fft.ifft(smoothed))) tr = Trace(data=cc) tr.stats.sampling_rate = 50 tr.write('test.mseed', format='MSEED') dkjd
def process_noise(stream,pair,comp_correlations,window_length,overlap,year,julday,flog): global statdict global save_monthly global pairdict global existing_corrdays #print(datetime.datetime.now(),">>>>>> Processing pair",stat1,stat2,file=flog) if len(stream)==0: print("empty stream") return stat1 = pair[0] stat2 = pair[1] net1,sta1 = stat1.split(".") net2,sta2 = stat2.split(".") # sort the correlation components, so that the unrotated ones are processed # first, and afterwards the rotated (unrotated traces are no longer acces- # sible after rotation) unrotated_components = [] rotated_components = [] for comp in comp_correlations: if comp[0] in 'RT' or comp[1] in 'RT': rotated_components.append(comp) else: unrotated_components.append(comp) component_list = unrotated_components+rotated_components # results will be saved to this dictionary corr_list = {} for components in component_list: corr_list[components] = {} corr_list[components]['spec'] = [] corr_list[components]['no_windows'] = [] rotated = False for components in component_list: if ((components[0] in 'NE' and components[1] in 'RT') or (components[0] in 'RT' and components[1] in 'NE')): print("mixed correlations of rotated (RT) and unrotated (NE) components is currently not supported.") continue if len(components) != 2: print("correlation",components,"is not valid, skipping.") continue if (year,julday) in existing_corrdays[pair][components]: continue # correlations of unrotated components (ZZ,NN,EE,ZN,...) if components[0] in 'ZNE' and components[1] in 'ZNE': st1 = stream.select(network=net1,station=sta1,component=components[0]) st2 = stream.select(network=net2,station=sta2,component=components[1]) if len(st1) == 0 or len(st2) == 0: continue st1,st2 = noise.adapt_timespan(st1,st2,min_overlap=window_length, interpolate=True,copystreams=True) if len(st1) == 0 or len(st2) == 0: continue # if R or T is in the correlation components, rotate streams elif not rotated: st1z = stream.select(network=net1,station=sta1,component='Z') st1n = stream.select(network=net1,station=sta1,component='N') st1e = stream.select(network=net1,station=sta1,component='E') st2z = stream.select(network=net2,station=sta2,component='Z') st2n = stream.select(network=net2,station=sta2,component='N') st2e = stream.select(network=net2,station=sta2,component='E') if len(st1n)==0 or len(st2n)==0 or len(st1e)==0 or len(st2e)==0: return # abort, will not be possible to rotate to RT coords st1,st2 = noise.adapt_timespan((st1z+st1n+st1e), (st2z+st2n+st2e), min_overlap=window_length, interpolate=True,copystreams=True) if len(st1) < 2 or len(st2) < 2: return # abort, will not be possible to rotate to RT coords # check that the time span is really the same if (st1[0].stats.starttime != st1[1].stats.starttime or st1[0].stats.starttime != st2[0].stats.starttime or st1[0].stats.starttime != st2[1].stats.starttime or st1[0].stats.endtime != st1[1].stats.endtime or st1[0].stats.endtime != st2[0].stats.endtime or st1[0].stats.endtime != st2[1].stats.endtime or st1[0].stats.endtime - st1[0].stats.starttime < window_length): raise Exception("this should not be possible!") # az = azimuth from station1 -> station2 # baz = azimuth from station2 -> station1 # for stream2 the back azimuth points in direction of station1 # for stream1 the azimuth points in direction of station2 # BUT 180. degree shift is needed so that the radial components point in the same direction! # otherwise they point towards each other => transverse comp would be also opposed try: st1.rotate('NE->RT',back_azimuth=(pairdict[pair]['az']+180.)%360.) except: print("Error rotating stream",file=flog) print(st1,file=flog) raise Exception("Error rotating stream") continue try: st2.rotate('NE->RT',back_azimuth=pairdict[pair]['baz']) except: print("Error rotating stream",file=flog) print(st2,file=flog) raise Exception("Error rotating stream") continue rotated = True # timewindows that have overlapping data windows longer than window_length windows = [] for trace in st1: window = (trace.stats.starttime,trace.stats.endtime) if window in windows: continue windows.append(window) for timewin in windows: corrstream1 = st1.select(component=components[0]).slice( starttime=timewin[0],endtime=timewin[1]) corrstream2 = st2.select(component=components[1]).slice( starttime=timewin[0],endtime=timewin[1]) # check for nan/inf in data data_errors = False for tr in (corrstream1+corrstream2): if tr.stats.endtime-tr.stats.starttime < window_length: # trace too short data_errors = True if np.std(tr.data) == 0.: print("data all zero",file=flog) print(tr,file=flog) data_errors = True if np.isnan(tr.data).any() or np.isinf(tr.data).any(): print("nan/inf in data",file=flog) print(tr,file=flog) data_errors = True if data_errors: continue # check that the time span is really the same if (corrstream1[0].stats.starttime != corrstream2[0].stats.starttime or corrstream1[0].stats.endtime != corrstream2[0].stats.endtime): raise Exception("this should not be possible!") # finally, do the correlation try: freq,spec,wincount = noise.noisecorr( corrstream1[0],corrstream2[0], window_length,overlap,whiten=whiten,onebit=onebit) except: print("could not correlate",corrstream1[0].stats.id, corrstream2[0].stats.id,components) continue corr_list[components]['spec'].append(spec) corr_list[components]['no_windows'].append(wincount) # finished correlating. saving the results to a file for components in component_list: if len(corr_list[components]['spec']) == 0: continue corr_spectrum = np.average( np.array(corr_list[components]['spec']),axis=0, weights=corr_list[components]['no_windows']) filepath = getfilepath(stat1,stat2,components, pairdict[pair]['dist'],overlap) if os.path.isfile(filepath): with open(filepath,"rb") as f: corr_dict = pickle.load(f) if (year,julday) in corr_dict['corrdays']: print("correlation day already in database!",filepath,year,julday) continue else: corr_dict['corrdays'].append((year,julday)) corr_dict['spectrum'] = np.average( [corr_spectrum,corr_dict['spectrum']],axis=0, weights=[np.sum(corr_list[components]['no_windows']), corr_dict['no_wins']]) corr_dict['no_wins'] += np.sum(corr_list[components]['no_windows']) else: corr_dict = {} corr_dict['corrdays'] = [(year,julday)] corr_dict['spectrum'] = corr_spectrum corr_dict['freq'] = freq corr_dict['no_wins'] = np.sum(corr_list[components]['no_windows']) corr_dict['dist'] = pairdict[pair]['dist'] corr_dict['az'] = pairdict[pair]['az'] corr_dict['baz'] = pairdict[pair]['baz'] corr_dict['component'] = components corr_dict['station1'] = statdict[stat1] corr_dict['station2'] = statdict[stat2] corr_dict['station1']['id'] = stat1 corr_dict['station2']['id'] = stat2 if save_monthly: month = str(year)+"."+str(UTCDateTime(year=year,julday=julday).month) if not 'spectrum.'+month in corr_dict.keys(): corr_dict['spectrum.'+month] = corr_spectrum corr_dict['no_wins.'+month] = np.sum( corr_list[components]['no_windows']) else: corr_dict['spectrum.'+month] = np.average( [corr_spectrum,corr_dict['spectrum.'+month]],axis=0, weights=[np.sum(corr_list[components]['no_windows']), corr_dict['no_wins.'+month]]) corr_dict['no_wins.'+month] += np.sum( corr_list[components]['no_windows']) with open(filepath,"wb") as f: pickle.dump(corr_dict,f) #print("successfully correlated",stat1,stat2,"comp:",corrcomps,"day:",year,julday) return