def main(): """ define main function :return: """ rffile = '/g/data/ha3/am7399/shared/OA-ZRT-R-cleaned.h5' output_folder = '/g/data/ha3/am7399/shared/OA_piercing' s = read_rf(rffile, 'H5') g = Geometry(start_lat_lon=(-17.4, 132.9), azimuth=80, lengthkm=1000, nx=100, widthkm=450, ny=45, depthkm=75, nz=375, debug=False) #g = Geometry(start_lat_lon=(-18.35, 138.45), azimuth=90, # lengthkm=450, nx=45, widthkm=350, ny=35, depthkm=100, nz=500, debug=False) m = Migrate(geometry=g, stream=s, debug=False, output_folder=output_folder) m.execute() return
def test_slice2(self): stream = read_rf() endtimes = [tr.stats.endtime for tr in stream] stream = stream.slice2(-50, -20, 'endtime') for t0, tr in zip(endtimes, stream): self.assertEqual(tr.stats.starttime - t0, -50) self.assertEqual(tr.stats.endtime - t0, -20)
def test_trim2(self): stream = read_rf() starttimes = [tr.stats.starttime for tr in stream] stream.trim2(50, 100, 'starttime') for t0, tr in zip(starttimes, stream): self.assertEqual(tr.stats.starttime - t0, 50) self.assertEqual(tr.stats.endtime - t0, 100)
def test_io_header(testcase, stream, ignore=()): for format in FORMATS: stream1 = stream.copy() suffix = '.' + format.upper() if format == 'sh': format = 'q' suffix = '.QHD' elif format == 'h5': for tr in stream1: tr.stats.pop('sac', None) with NamedTemporaryFile(suffix=suffix) as ft: fname = ft.name stream1.write(fname, format.upper()) stream2 = read_rf(fname) st1 = stream1[0].stats st2 = stream2[0].stats for head in _HEADERS: if head in st1 and head not in ignore: testcase.assertIn(head, st2) msg = ("AssertionError for header '%s' with format '%s': " "%s and %s not equal within 2 places") testcase.assertAlmostEqual( st1[head], st2[head], 2, msg=msg % (head, format, st1[head], st2[head])) if len(ignore) == 0 or format != 'q': testcase.assertEqual(stream1[0].id, stream2[0].id)
def read_h5_rf(src_file, network=None, station=None, loc='', root='/waveforms'): """Helper function to load data from hdf5 file generated by rf library or script `rf_quality_filter.py`. For faster loading time, a particular network and station may be specified. :param src_file: File from which to load data :type src_file: str or Path :param network: Specific network to load, defaults to None :type network: str, optional :param station: Specific station to load, defaults to None :type station: str, optional :param root: Root path in hdf5 file where to start looking for data, defaults to '/waveforms' :type root: str, optional :return: All the loaded data in a rf.RFStream container. :rtype: rf.RFStream """ logger = logging.getLogger(__name__) if (network is None and station is not None) or (network is not None and station is None): logger.warning("network and station should both be specified - IGNORING incomplete specification") group = root elif network and station: group = root + '/{}.{}.{}'.format(network.upper(), station.upper(), loc.upper()) else: group = root # end if rf_data = rf.read_rf(src_file, format='h5', group=group) return rf_data
def test_io_header(testcase, stream, ignore=()): for format in FORMATS: stream1 = stream.copy() suffix = '.' + format.upper() if format == 'sh': format = 'q' suffix = '.QHD' elif format == 'h5': for tr in stream1: tr.stats.pop('sac', None) with NamedTemporaryFile(suffix=suffix) as ft: fname = ft.name stream1.write(fname, format.upper()) stream2 = read_rf(fname) st1 = stream1[0].stats st2 = stream2[0].stats for head in _HEADERS: if head in st1 and head not in ignore: testcase.assertIn(head, st2) msg = ("AssertionError for header '%s' with format '%s': " "%s and %s not equal within 2 places") testcase.assertAlmostEqual( st1[head], st2[head], 2, msg=msg % (head, format, st1[head], st2[head]) ) if len(ignore) == 0 or format != 'q': testcase.assertEqual(stream1[0].id, stream2[0].id)
def main(rf_h5_file, output_folder, start_lat_lon, azimuth, dimensions, num_cells, debug): """Perform 3D migration of RFs to volumetric space, stacking RF amplitudes in each cell. Example usage: python rf_3dmigrate.py --start-lat-lon -17.4 132.9 --azimuth 80 --dimensions 1000 450 75 \ --num-cells 100 45 375 /g/data/ha3/am7399/shared/OA-ZRT-R-cleaned.h5 /g/data/ha3/am7399/shared/OA_piercing The script produces text data files which are converted to visualization using experimental ipython notebook `sandbox/plot_3dmigrate.ipynb`. :param rf_h5_file: Source file containing receiver functions :type rf_h5_file: str or Path :param output_folder: Folder in which to output results :type output_folder: str or Path """ s = read_rf(rf_h5_file, 'H5') g = Geometry(start_lat_lon=start_lat_lon, azimuth=azimuth, lengthkm=dimensions[0], nx=num_cells[0], widthkm=dimensions[1], ny=num_cells[1], depthkm=dimensions[2], nz=num_cells[2], debug=debug) m = Migrate(geometry=g, stream=s, debug=False, output_folder=output_folder) m.execute()
def plot_RF_profile( profilefileloc, destination="./", trimrange=(int(inpRFdict['rf_display_settings']['trim_min']), int(inpRFdict['rf_display_settings']['trim_max']))): logger = logging.getLogger(__name__) logger.info("--> Plotting the RF profile") # plt.style.use('classic') for azimuth in [0, 90]: inpfiles = glob.glob( profilefileloc + f"{str(inpRFdict['filenames']['rfprofile_compute_result_prefix'])}{azimuth}_*.h5" ) if len(inpfiles): for inpfile in inpfiles: logger.info(f"----> RF profile {inpfile}") pstream = read_rf(inpfile) divparam = inpfile.split("_")[-4:-1] divsuffix = inpfile.split("_")[-1].split(".")[0] pstream.trim2(trimrange[0], trimrange[1], 'onset') for chn in ['L', 'Q']: outputimage = destination + f"{chn}_{azimuth}_{divsuffix}_profile.png" if not os.path.exists(outputimage): plt.figure() pstream.select(channel='??' + chn).normalize().plot_profile( scale=1.5, top='hist', fillcolors=('r', 'b')) plt.gcf().set_size_inches(15, 10) plt.title(f'Channel: {chn} Azimuth {azimuth}') plt.savefig(outputimage, dpi=200, bbox_inches='tight') logger.info(f"------> Output image: {outputimage}") plt.close('all')
def test_moveout_vs_XY(self): stream = read_rf()[:1] stream._write_test_header() stream.decimate(10) N = len(stream[0]) t = np.linspace(0, 20 * np.pi, N) stream[0].data = np.sin(t) * np.exp(-0.04 * t) stream[0].stats.slowness = 4.0 stream1 = stream.copy() stream2 = stream.copy() stream3 = stream.copy() stream3[0].stats.slowness = 9.0 stream4 = stream3.copy() stream5 = stream.copy() stream6 = stream.copy() stream7 = stream.copy() stream8 = stream.copy() stream9 = stream.copy() stream10 = stream.copy() stream1.moveout() stream3.moveout() stream5.moveout(phase='Ppps') stream7.moveout(phase='Ppss') stream9.moveout(phase='Psss') # stream2._moveout_xy() # print(repr(stream2[0].data)) # stream4._moveout_xy() # print(repr(stream6[0].data)) # stream6._moveout_xy(phase='Ppps') # print(repr(stream6[0].data)) # stream8._moveout_xy(phase='Ppss') # print(repr(stream8[0].data)) # stream10._moveout_xy(phase='Psss') # print(repr(stream10[0].data)) stream2[0].data = XY_PSMOUT_REF4 stream4[0].data = XY_PSMOUT_REF9 stream6[0].data = XY_PPPSMOUT_REF4 stream8[0].data = XY_PPSSMOUT_REF4 stream8[0].data = XY_PSSSMOUT_REF4 np.testing.assert_array_almost_equal(stream1[0].data, stream2[0].data, decimal=2) np.testing.assert_array_almost_equal(stream3[0].data, stream4[0].data, decimal=2) np.testing.assert_array_almost_equal(stream5[0].data, stream6[0].data, decimal=2) np.testing.assert_array_almost_equal(stream7[0].data, stream8[0].data, decimal=2) np.testing.assert_array_almost_equal(stream9[0].data, stream10[0].data, decimal=2)
def plot_RF(dataRFfileloc, destImg, fig_frmt="png"): logger = logging.getLogger(__name__) logger.info("--> Plotting the receiver functions") rffiles = glob.glob( dataRFfileloc + f"*-{str(inpRFdict['filenames']['rf_compute_data_suffix'])}.h5") for i, rffile in enumerate(rffiles): stream = read_rf(rffile, 'H5') outfigname1 = destImg + f"{stream[0].stats.station}" + '_L.' + fig_frmt outfigname2 = destImg + f"{stream[0].stats.station}" + '_Q.' + fig_frmt if not os.path.exists(outfigname1) and not os.path.exists(outfigname2): kw = { 'trim': (int(inpRFdict['rf_display_settings']['trim_min']), int(inpRFdict['rf_display_settings']['trim_max'])), 'fillcolors': ('black', 'gray'), 'trace_height': float(inpRFdict['rf_display_settings']['trace_height']) } if str(inpRFdict['rf_display_settings']['rf_info']) == "default": kw['info'] = (('back_azimuth', u'baz (°)', 'C0'), ('distance', u'dist (°)', 'C3')) else: kw['info'] = None num_trace = len( stream.select(component='L', station=stream[0].stats.station).sort( ['back_azimuth'])) if num_trace > 0: try: stream.select(component='L', station=stream[0].stats.station).sort( ['back_azimuth']).plot_rf(**kw) plt.savefig(destImg + f"{stream[0].stats.station}" + '_L.' + fig_frmt) plt.close('all') stream.select(component='Q', station=stream[0].stats.station).sort( ['back_azimuth']).plot_rf(**kw) plt.savefig(destImg + f"{stream[0].stats.station}" + '_Q.' + fig_frmt) plt.close('all') logger.info( "----> Plotting RF {}/{}, {}-{} Traces: {}".format( i + 1, len(rffiles), stream[0].stats.network, stream[0].stats.station, num_trace)) except Exception as e: logger.error("Unexpected error", exc_info=True) else: logger.info("----> {} traces for {}-{}".format( num_trace, stream[0].stats.network, stream[0].stats.station))
def test_io_format(format): stream1 = stream.copy() suffix = '.' + format.upper() if format == 'sh': format = 'q' suffix = '.QHD' with NamedTemporaryFile(suffix=suffix) as ft: fname = ft.name stream1.write(fname, format.upper()) stream2 = read_rf(fname) st2 = stream2[0].stats self.assertNotIn('event_time', st2)
def test_polarity_R_component(self): """issue #4""" stream = read_rf() rfstats(stream) stream.filter('bandpass', freqmin=0.5, freqmax=2) stream.trim2(10, 110, reftime='starttime') stream.rf(rotate='NE->RT') for tr in stream.select(component='R'): onset = tr.stats.onset - tr.stats.starttime dt = tr.stats.delta self.assertAlmostEqual(tr.data.argmax() * dt - onset, 0, delta=0.01)
def main(rf_file, output_file, start_latlon, end_latlon, width, spacing, max_depth, stacked_scale, channels, title=None): # rf_file is the clean H5 file of ZRT receiver functions, generated by rf_quality_filter.py channels = channels.split(',') # Range of stacked amplitude for imshow to get best contrast vmin, vmax = (-stacked_scale, stacked_scale) output_file_base, ext = os.path.splitext(output_file) if ext != ".png": output_file += ".png" print("Reading HDF5 file...") stream = rf.read_rf(rf_file, 'H5') matrix_norm, sample_density, length, stn_params = \ ccp_generate(stream, start_latlon, end_latlon, width=width, spacing=spacing, max_depth=max_depth, channels=channels, station_map_file=output_file_base + '_MAP.png') if matrix_norm is not None: plot_ccp(matrix_norm, length, max_depth, spacing, ofile=output_file, vlims=(vmin, vmax), metadata=stn_params, title=title) if sample_density is not None: sample_density_file = output_file_base + '_SAMPLE_DENSITY.png' # Use median of number of events per station to set the scale range. sc = sorted([ s['event_count'] for s in stn_params.values() if s is not None ]) median_samples = sc[len(sc) // 2] plot_ccp(sample_density, length, max_depth, spacing, ofile=sample_density_file, vlims=(0, median_samples), metadata=stn_params, title=title + ' [sample density]' if title else None)
def test_io_format(format): stream1 = stream.copy() suffix = '.' + format.upper() if format == 'sh': format = 'q' suffix = '.QHD' with NamedTemporaryFile(suffix=suffix) as ft: fname = ft.name stream1.write(fname, format.upper()) stream2 = read_rf(fname) st1 = stream1[0].stats st2 = stream2[0].stats for head in HEADERS: self.assertAlmostEqual(st1[head], st2[head], 4, msg=head) self.assertEqual(stream1[0].id, stream2[0].id)
def test_moveout_vs_XY(self): stream = read_rf()[:1] stream._write_test_header() stream.decimate(10) N = len(stream[0]) t = np.linspace(0, 20 * np.pi, N) stream[0].data = np.sin(t) * np.exp(-0.04 * t) stream[0].stats.slowness = 4.0 stream1 = stream.copy() stream2 = stream.copy() stream3 = stream.copy() stream3[0].stats.slowness = 9.0 stream4 = stream3.copy() stream5 = stream.copy() stream6 = stream.copy() stream7 = stream.copy() stream8 = stream.copy() stream9 = stream.copy() stream10 = stream.copy() stream1.moveout() stream3.moveout() stream5.moveout(phase="Ppps") stream7.moveout(phase="Ppss") stream9.moveout(phase="Psss") # stream2._moveout_xy() # print(repr(stream2[0].data)) # stream4._moveout_xy() # print(repr(stream6[0].data)) # stream6._moveout_xy(phase='Ppps') # print(repr(stream6[0].data)) # stream8._moveout_xy(phase='Ppss') # print(repr(stream8[0].data)) # stream10._moveout_xy(phase='Psss') # print(repr(stream10[0].data)) stream2[0].data = XY_PSMOUT_REF4 stream4[0].data = XY_PSMOUT_REF9 stream6[0].data = XY_PPPSMOUT_REF4 stream8[0].data = XY_PPSSMOUT_REF4 stream8[0].data = XY_PSSSMOUT_REF4 np.testing.assert_array_almost_equal(stream1[0].data, stream2[0].data, decimal=2) np.testing.assert_array_almost_equal(stream3[0].data, stream4[0].data, decimal=2) np.testing.assert_array_almost_equal(stream5[0].data, stream6[0].data, decimal=2) np.testing.assert_array_almost_equal(stream7[0].data, stream8[0].data, decimal=2) np.testing.assert_array_almost_equal(stream9[0].data, stream10[0].data, decimal=2)
def main(): """ define main function :return: """ #rffile = '/home/rakib/work/pst/rf/notebooks/rf_pt15_to5Hz.h5' rffile = '/media/data/work/GA/rf/rf_pt15_to5Hz.h5' s = read_rf(rffile, 'H5') g = Geometry(start_lat_lon=(-18.75, 138.15), azimuth=80, lengthkm=450, nx=45, widthkm=350, ny=35, depthkm=100, nz=500, debug=False) #g = Geometry(start_lat_lon=(-18.35, 138.45), azimuth=90, # lengthkm=450, nx=45, widthkm=350, ny=35, depthkm=100, nz=500, debug=False) m = Migrate(geometry=g, stream=s, debug=False) m.execute() return
def test_io_header(self): def test_io_format(format): stream1 = stream.copy() suffix = '.' + format.upper() if format == 'sh': format = 'q' suffix = '.QHD' with NamedTemporaryFile(suffix=suffix) as ft: fname = ft.name stream1.write(fname, format.upper()) stream2 = read_rf(fname) st1 = stream1[0].stats st2 = stream2[0].stats for head in HEADERS: self.assertAlmostEqual(st1[head], st2[head], 4, msg=head) self.assertEqual(stream1[0].id, stream2[0].id) stream = read_rf()[:1] for tr in stream: tr.stats.location = '11' stream._write_test_header() for format in FORMATHEADERS: test_io_format(format)
def test_deconvolution(self): ms = rf.read_rf() ms.decimate(10) for i in range(len(ms)): ms[i].stats.channel = ms[i].stats.channel[:2] + 'LQT'[i] t = np.linspace(0, 30, len(ms[0])) hann1 = get_window('hann', 10) hann2 = get_window('hann', 50) ms[0].data[:] = 0 ms[0].data[40:50] = hann1 ms[0].data[50:60] = -hann1 ms[1].data[:] = 0 ms[1].data[100:150] = hann2 ms[1].data[240:290] = hann2 ms_orig = ms.copy() data3 = convolve(ms[1].data, ms[0].data, 'full')[50:350]/np.sum(np.abs(ms[0].data)) ms[1].data = data3 ms[2].data = -ms[1].data for tr in ms: tr.stats.sampling_rate = 1 tr.stats.onset = tr.stats.starttime + 40 ms.deconvolve(deconvolve_method='time')
def main(rf_file, output_file, start_latlon, end_latlon, width, spacing, max_depth, channels, background_model, stacked_scale=None, title=None): # rf_file is the clean H5 file of ZRT receiver functions, generated by rf_quality_filter.py print("Reading HDF5 file...") stream = rf.read_rf(rf_file, 'H5') output_file_base, ext = os.path.splitext(output_file) if ext != ".png": output_file += ".png" # endif colormap = 'jet' fig, fig_map, _ = run(stream, start_latlon, end_latlon, width, spacing, max_depth, channels, background_model, stacked_scale, title, colormap=colormap) if fig is not None: fig.savefig(output_file, dpi=300) plt.close(fig) # endif if fig_map is not None: station_map_file = output_file_base + '_MAP.png' fig_map.savefig(station_map_file, dpi=300) plt.close(fig_map)
def test_deconvolution(self): ms = rf.read_rf() ms.decimate(10) for i in range(len(ms)): ms[i].stats.channel = ms[i].stats.channel[:2] + 'LQT'[i] t = np.linspace(0, 30, len(ms[0])) hann1 = get_window('hann', 10) hann2 = get_window('hann', 50) ms[0].data[:] = 0 ms[0].data[40:50] = hann1 ms[0].data[50:60] = -hann1 ms[1].data[:] = 0 ms[1].data[100:150] = hann2 ms[1].data[240:290] = hann2 ms_orig = ms.copy() data3 = convolve(ms[1].data, ms[0].data, 'full')[50:350] / np.sum( np.abs(ms[0].data)) ms[1].data = data3 ms[2].data = -ms[1].data for tr in ms: tr.stats.sampling_rate = 1 tr.stats.onset = tr.stats.starttime + 40 ms.deconvolve(deconvolve_method='time')
#-------------Main--------------------------------- if __name__ == '__main__': ''' This program composes vespagrams to identify RF converted phases and their multiples please refer to Tian et al. GRL 2005 VOL. 32, L08301, doi:10.1029/2004GL021885 for good examples input - H5 file with receiver functions output - PDF files to print Dependencies - rf and obspy packages beside other standard python packages The image is composed using triangulation. It gives good results but block median or mean must be implemented at some stage to reduce size of PDF. ''' stream = rf.read_rf('/g/data/ha3/am7399/shared/OA-ZRT-R-cleaned.h5', 'H5') rf_type = 'LQT-Q ' filter_type = 'bandpass' freqmin = 0.03 freqmax = 0.5 #we use a zero-phase-shift band-pass filter using 2 corners. This is done in two runs forward and backward, # so we end up with 4 corners de facto. # Lets assume we have LQT orientation selected_stream=stream.select(component='Q').filter(filter_type, freqmin=freqmin, freqmax=freqmax, corners=2,zerophase=True).interpolate(10) # if none lets try ZRT if len(selected_stream)<=0: selected_stream=stream.select(component='R').filter(filter_type, freqmin=freqmin, freqmax=freqmax,
# AG import os.path # import matplotlib.pyplot as plt # import numpy as np from rf import read_rf, RFStream from rf import IterMultipleComponents # from rf.imaging import plot_profile_map # from rf.profile import profile from tqdm import tqdm data = read_rf('DATA/7X-event_waveforms_for_rf.h5', 'H5') # exclude bad stations inc_set = list(set([tr.stats.inclination for tr in data])) data_filtered = RFStream([ tr for tr in data if tr.stats.inclination in inc_set and tr.stats.station not in ['MIJ2', 'MIL2'] ]) stream = RFStream() for stream3c in tqdm(IterMultipleComponents(data, 'onset', 3)): stream3c.detrend('linear').resample(100) stream3c.taper(0.01) stream3c.filter('bandpass', freqmin=0.01, freqmax=15) if len(stream3c) != 3: continue a1 = stream3c[0].stats['asdf'] a2 = stream3c[1].stats['asdf'] a3 = stream3c[2].stats['asdf'] stream3c[0].stats['asdf'] = []
stations = [] for station in stream: stations.append([ station.stats.station.encode('utf-8'), station.stats.station_longitude, station.stats.station_latitude ]) return np.unique(np.array(stations), axis=0) #-------------Main--------------------------------- if __name__ == '__main__': """ It is an example of how to plot nice maps """ import rf streams = rf.read_rf('DATA/7X-LQT-Q-cleaned.h5', 'H5') # Lets see intersection of rays at 50km depth ppoints = streams.ppoints(50.) # initialization of map m = plot_map(ppoints) # RF package uses lat,lon meanwhile others use lon,lat notion lon, lat = m(ppoints[:, 1], ppoints[:, 0]) plt.plot(lon, lat, 'bx', markersize=5, markeredgewidth=0.1) # Now lets plot stations coordinates = get_stations(streams) if coordinates.ndim == 1:
import rf import geopy from geopy.distance import VincentyDistance, distance from shapely.geometry import LineString, Point from rf_tools import * from latexify import latexify # IMPORT PASSIVE SEISMIC DATA #eqdir='/Users/brookkeats/Dropbox/Documents/Oxford_DPhil/Misc_data_files/RF_data/' eqdir = '/Users/brookkeats/Documents/DPhil/Data/RF_data/' #eqfile='PI_netw_M5.5+v2_raw_data.h5' #eqfile='PI_netw_M5.5+v2_instr_corr_data.h5' eqfile = 'NCMS_ASU_M5.5+instr_corr_data.h5' eq_st = rf.read_rf(eqdir + eqfile) # PRE PROCESS DATA eq_st.filter('bandpass', freqmin=1. / 20, freqmax=4., corners=2, zerophase=True) #eq_st.filter('lowpass', freq=4., corners=2, zerophase=True) # FILTER EVENTS BY SNR SNR_llim = 2 snr_eq_st = SNR_st_filter(eq_st, SNR_llim=SNR_llim) # CALCULATE RECEIVER FUNCTIONS fmax = 1
def test_read_rf(self): self.assertIsInstance(read_rf(), RFStream)
# stream3c[1].data=stream3c[0].data*(amax['amax']/np.amax(stream3c[0].data)) amax = {'amax': np.amax(stream3c[2].data)} stream3c[2].stats['asdf'] = a3 stream3c[2].stats.update(amax) # stream3c[2].filter('bandpass', freqmin=0.03, freqmax=1.00, corners=2, zerophase=True) # stream3c[2].data=stream3c[0].data*(amax['amax']/np.amax(stream3c[0].data)) stream3c.trim2(-25, 75, 'onset') # print np.max(stream3c[0].data),np.max(stream3c[1].data),np.max(stream3c[2].data) return stream3c print "Lets start the show..." #data = read_rf('DATA/7X-event_waveforms_for_rf.h5', 'H5') data = read_rf('DATA/7X-MA12.h5', 'H5') print "Data in..." ''' # we can exclude bad stations inc_set = list(set([tr.stats.inclination for tr in data])) data_filtered = RFStream([tr for tr in data if tr.stats.inclination in inc_set and tr.stats.station not in ['MIJ2', 'MIL2']]) ''' stream = RFStream() rf_streams = Parallel(n_jobs=-1, verbose=1)(map(delayed(do_rf), IterMultipleComponents(data, 'onset', 3))) for i, rf in enumerate(rf_streams): event_id = {'event_id': 0}
def calc_h_kappa(vp=6.3, p=0.06, w1=0.75, w2=0.25, outfile="h-kappa-values.txt", data_dir_loc="../results/dataRF", outloc="./"): f = open(outloc + outfile, 'w') data_files = glob.glob(data_dir_loc + "/*-rf_profile_rfs.h5") for data in data_files: network = ntpath.basename(data).split('-')[0] station = ntpath.basename(data).split('-')[1] st = read_rf(data) st = st.select(component="L") len_trace_list = [] for tr in st: lentr = tr.stats.npts len_trace_list.append(lentr) if len(set(len_trace_list)) > 1: continue st = st.stack() for index, trace in enumerate(st): errorphase = False nbphase = 0 [xpeaks, ypeaks] = [], [] trace.filter('bandpass', freqmin=0.005, freqmax=2) t = trace.stats.starttime pps = trace.stats.sampling_rate trace.trim(t + 24, t + 44) xpeaks, ypeaks = find_peaks(trace, height=0.02, distance=50) if len(xpeaks) > 2: if len(xpeaks) < 5: # print('nb of peaks =',len(xpeaks)) plt.plot(trace) plt.plot(xpeaks, trace[xpeaks], "x") plt.plot(np.zeros_like(trace), "--", color="gray") t0 = xpeaks[0] / pps t1 = xpeaks[1] / pps t2 = xpeaks[2] / pps if len(xpeaks) > 3: t3 = xpeaks[3] / pps if t0 < 2.5 and t0 > 0: t0 = t0 else: t0 = np.NaN errorphase = True if t1 < 7.0 and t1 > 2.6: t1 = t1 else: t1 = np.NaN errorphase = True if t2 < 14.0 and t2 > 7.5: t2 = t2 else: if t3 and t3 < 14.0 and t2 > 7.5: t2 = t3 else: t2 = np.NaN errorphase = True try: if w1 + w2 != 1: raise ValueError( 'Weights are not properly defined') except ValueError as e: exit(str(e)) # Measure the difference between theory and data: if not errorphase: numpoints = 1000 hs = np.linspace(20, 40, numpoints) Kappas = np.linspace(1.5, 2.5, numpoints) H, K = np.meshgrid(hs, Kappas) depth1 = (t1 - t0) / (np.sqrt((K / vp)**2 - (p)**2) - np.sqrt((1 / vp)**2 - (p)**2)) depth2 = (t2 - t0) / (np.sqrt((K / vp)**2 - (p)**2) + np.sqrt((1 / vp)**2 - (p)**2)) deltas = np.absolute((w1 * depth1 + w2 * depth2) - H) ## VISUALIZATION # fig, ax = plt.subplots(2,2,figsize=(8,6),gridspec_kw={"width_ratios":[1, 0.05]}) fig = plt.figure() delta_lvs = np.linspace(np.amin(deltas), np.amax(deltas), 30) fig, axes = plt.subplots(nrows=2, ncols=1) cmap = plt.get_cmap('rainbow_r') CS = axes[0].contourf(H, K, deltas, levels=delta_lvs, cmap=cmap) result = np.where(deltas == np.amin(deltas)) axes[0].plot(H[result], K[result], 'ko') f.write( f"{network},{station},{trace.stats.station_latitude:.4f},{trace.stats.station_longitude:.4f},{H[result][0]:.2f},{K[result][0]:.2f}\n" ) # axes[0].clabel(CS, inline=1, fontsize=10, fmt='%2.1f', colors='w') axes[0].set_title(r'$H$-$\kappa$ grid search') axes[0].set_xlabel('H') axes[0].set_ylabel(r'$\kappa$') times_data = np.arange(0, len(trace.data)) / pps axes[1].plot(times_data, trace.data) axes[1].plot(xpeaks / pps, trace[xpeaks], "x") axes[1].annotate( 'P', (t0 + 0.2, trace.data[np.where(times_data == t0)]), textcoords='data', size=10) axes[1].annotate( 'PS', (t1, trace.data[np.where(times_data == t1)]), textcoords='data', size=10) axes[1].annotate( 'PpPs', (t2, trace.data[np.where(times_data == t2)]), textcoords='data', size=10) plt.tight_layout() fig.subplots_adjust(right=0.82) cbar_ax = fig.add_axes([0.85, 0.56, 0.03, 0.36]) fig.colorbar(CS, cax=cbar_ax) plt.savefig( outloc + f'H-K_{network}-{station}-h-k_outfile-{index}.png') # else: # print('bad peaks') f.close()
def main(input_h5_file, output_pdf_file): ''' This program composes vespagrams to identify RF converted phases and their multiples please refer to Tian et al. GRL 2005 VOL. 32, L08301, doi:10.1029/2004GL021885 for good examples input - H5 file with receiver functions output - PDF files to print Dependencies - rf and obspy packages beside other standard python packages The image is composed using triangulation. It gives good results but block median or mean must be implemented at some stage to reduce size of PDF. ''' stream = rf.read_rf(input_h5_file, 'H5') rf_type = 'LQT-Q ' filter_type = 'bandpass' freqmin = 0.03 freqmax = 0.5 # we use a zero-phase-shift band-pass filter using 2 corners. This is done in two runs forward and backward, # so we end up with 4 corners de facto. # Lets assume we have LQT orientation selected_stream = stream.select(component='Q').filter( filter_type, freqmin=freqmin, freqmax=freqmax, corners=2, zerophase=True).interpolate(10) # if none lets try ZRT if not selected_stream: selected_stream = stream.select(component='R').filter( filter_type, freqmin=freqmin, freqmax=freqmax, corners=2, zerophase=True).interpolate(10) rf_type = 'ZRT-R ' # end if if not selected_stream: print("Tried Q and R components but neither found, quitting...") exit(-1) # end if station_list = [] # here we collect station names but maybe ID is more appropriate in case of having the same station names # in different deployments for tr in selected_stream: station_list.append(tr.stats.station) net = tr.stats.network # end for pdf = PdfPages(output_pdf_file) case_description = rf_type + filter_type + ' ' + str(freqmin) + '-' + str( freqmax) + ' Hz' pdf.attach_note(case_description) d = pdf.infodict() d['Title'] = rf_type + 'RF vespagrams of ' + net + ' network' d['Keywords'] = case_description station_list = np.unique(np.array(station_list)) print("Gathered ", len(station_list), " stations") # Define layout of the page outer_grid columns = 3 rows = 2 frame = 0 figure = 1 # ------------------------------------------ # Main loop here over all stations for i, station in enumerate(station_list): if frame == 0: printed = False fig = plt.figure(figsize=(11.69, 8.27)) outer_grid = gridspec.GridSpec(columns, rows, wspace=0.2, hspace=0.2) # end if print("Station ", station, i + 1, " of ", station_list.shape[0]) traces = selected_stream.select(station=station) print('Contains: ', len(traces), ' events') # we choose short RF to simplify and speed up the processing # from -5 to 20 seconds and slowness range from 5 to 9 s/deg # its enough to see multiples and possible LAB conversion at ~19 sec (~160km) traces = traces.trim2(-5, 20, 'onset') moved = [] slow = [] for tr in traces: tr.normalize() # This 'if' block is designed to check correct data placement on vespagram to # trace the logic (debugging purposes) DEBUG_PLACEMENT = False if DEBUG_PLACEMENT and (tr.stats.slowness > 6.) and (tr.stats.slowness < 7.): print('altered') data = tr.data.copy() print(data.shape, tr.stats.delta) # 500 below corresponds to 0 with sampling rate of 100Hz # TODO: !Change these array indices to be computed, not magic numbers! data[500:800] = 1. moved.append(data) else: moved.append(tr.data.copy() / np.max(np.abs(tr.data))) slow.append(tr.stats.slowness) # end if # end for print("Slowness min and max: ", np.min(slow), np.max(slow)) slow.append(np.min(slow) - 0.1) moved.append(np.zeros(traces[0].data.shape)) slow.append(np.max(slow) + 0.1) moved.append(np.zeros(traces[0].data.shape)) slow = np.array(slow) idx = np.argsort(slow) moved = np.nan_to_num(np.array(moved)) # moved = np.array(moved) slow = slow[idx] moved = moved[idx, :] z = moved.copy() # Some simple stacking to reduce data size on the image, this block can be safely commented out idx = [] idx.append(True) # first row with zeroes slo_cum = 0. elements = 1 for j in xrange(1, slow.shape[0] - 2): if np.abs(slow[j + 1] - slow[j]) < 0.1 and slo_cum < 0.2: slow[j + 1] = (slow[j] + slow[j + 1]) / 2. moved[j, :] = moved[j, :] * elements moved[j + 1, :] = np.sum(moved[j:j + 2, :], axis=0) / (elements + 1) elements = elements + 1 idx.append(False) slo_cum = slo_cum + np.abs(slow[j + 1] - slow[j]) else: idx.append(True) slo_cum = 0 elements = 1 # end if # end for idx.append(True) # before last idx.append(True) # last row with zeroes idx = np.array(idx) print(idx.shape, slow.shape, moved.shape) slow = slow[idx] moved = moved[idx, :] z = moved.copy() # ------------------------------ end of stacking ------------------------ # print('minmax',np.min(z),np.max(z)) x = np.array(list(range(moved.shape[1]))) * traces[0].stats.delta - 5. x = np.repeat([x], moved.shape[0], axis=0) y = np.ones((moved.shape[0], moved.shape[1])) phase_Ps = [] phase_Pms = [] phase_PpPmS = [] phase_PpSmS = [] phase_slow = [] # basin part phase_Pbs = [] phase_PpPbs = [] for j in xrange(slow.shape[0]): y[j, :] = y[j, :] * slow[j] phase_Ps.append( simple_model.calculate_delay_times(slow[j], phase='PS')) phase_Pms.append( simple_model.calculate_delay_times(slow[j], phase='PmS')) phase_PpPmS.append( simple_model.calculate_delay_times(slow[j], phase='PpPmS')) phase_PpSmS.append( simple_model.calculate_delay_times(slow[j], phase='PpSmS')) phase_slow.append(np.ones(phase_Ps[-1].shape[0]) * slow[j]) # basin, we will use reflection at the top layer only if zb.size > 0: phase_Pbs.append( basin_model.calculate_delay_times(slow[j], phase='PS')) phase_PpPbs.append( basin_model.calculate_delay_times(slow[j], phase='PpPmS')) # end if # end for xi = np.linspace(-5, 20, 200) yi = np.linspace(0, 9, 400) # Gridding the data using triangulation. standard gridding doesn't work well here triang = tri.Triangulation(x.flatten(), y.flatten()) interpolator = tri.LinearTriInterpolator(triang, z.flatten()) xi, yi = np.meshgrid(xi, yi) zi = interpolator(xi, yi) # Define two plots as inner_grid to place them inside one cell of outer_grid inner_grid = gridspec.GridSpecFromSubplotSpec( 2, 1, subplot_spec=outer_grid[frame], wspace=0.5, hspace=0.) ax1 = plt.Subplot(fig, inner_grid[0]) ax2 = plt.Subplot(fig, inner_grid[1], sharex=ax1) lim = np.max(np.abs(zi[zi < 0]) * 0.5) levels = np.linspace(-lim, lim, 15) # print("Levels ",-lim,lim) cmap = plt.cm.jet cs = ax1.contourf(xi, yi, zi, levels=levels, extend='both', cmap=cmap) cs.cmap.set_under('k') cs.cmap.set_over('k') ax1.set_ylim(5, 9) ax1.set_xlim(-5, 20) ax1.plot(phase_Ps, slow, color='black') # direct conversion, positive amplitude ax1.plot(phase_PpPmS, slow, color='crimson') # multiples, positive amplitude ax1.plot(phase_PpSmS, slow, color='purple') # multiples, negative amplitude ax1.annotate('Pms', xy=(phase_Ps[-1][0], 9.1), xycoords='data', ha='center', va='bottom', rotation=0., annotation_clip=False, fontsize=7) ax1.annotate('Ps LAB', xy=(phase_Ps[-1][-1], 9.1), xycoords='data', ha='center', va='bottom', rotation=0., annotation_clip=False, fontsize=7) if phase_Pbs: ax1.annotate('Pbs', xy=(phase_Pbs[-1][0], 9.1), xycoords='data', ha='center', va='bottom', rotation=0., annotation_clip=False, fontsize=7) ax1.plot(phase_Pbs, slow, color='black') ax1.plot(phase_PpPbs, slow, color='crimson') # end if ax1.spines['bottom'].set_visible(False) ax1.tick_params(labelbottom='off') ax1.spines['bottom'].set_visible(False) ax1.yaxis.tick_right() ax1.yaxis.set_label_position("right") xlabels = ax1.get_xticklabels() ylabels = ax1.get_yticklabels() for label in xlabels: label.set_rotation(90) label.set_fontsize(7) # end for for label in ylabels: label.set_rotation(90) label.set_fontsize(7) # end for ax1.annotate(station, xy=(-0.08, 0), ha='left', va='center', xycoords='axes fraction', textcoords='offset points', rotation=90.) start, end = ax1.get_ylim() ax1.yaxis.set_ticks(np.arange(start + 1, end + 1, 1)) cs = ax2.contourf(xi, -1. * yi, zi, levels=levels, extend='both', cmap=cmap) cs.cmap.set_under('k') cs.cmap.set_over('k') ax2.spines['top'].set_visible(False) ax2.set_ylim(-9, -5) ax2.set_xlim(-5, 20) ax2.yaxis.tick_right() ax2.yaxis.set_label_position("right") ylabels = ax2.get_yticklabels() ax2.plot(phase_Ps, -slow, color='black') ax2.plot(phase_PpPmS, -slow, color='crimson') ax2.plot(phase_PpSmS, -slow, color='purple') ax2.annotate('+PpPms', xy=(phase_PpPmS[-1][0], -9.1), xycoords='data', ha='center', va='top', rotation=0., annotation_clip=False, fontsize=7, color='crimson') ax2.annotate('-PpSms', xy=(phase_PpSmS[-1][0], -9.1), xycoords='data', ha='center', va='top', rotation=0., annotation_clip=False, fontsize=7, color='purple') if phase_PpPbs: ax2.annotate('+PpPbs', xy=(phase_PpPbs[-1][0], -9.1), xycoords='data', ha='center', va='top', rotation=0., annotation_clip=False, fontsize=7, color='crimson') ax2.plot(phase_PpPbs, -slow, color='crimson') ax2.plot(phase_Pbs, -slow, color='black') # end if for label in ylabels: label.set_rotation(90) label.set_fontsize(7) # end for if frame > 3: xlabels = ax2.get_xticklabels() for label in xlabels: label.set_rotation(90) label.set_fontsize(7) ax2.set_xlabel('Time (sec.)') else: ax2.set_xticklabels([]) # end if if (frame % 2) != 0: ax2.annotate('Slowness s/deg', xy=(1.2, 1), ha='left', va='center', xycoords='axes fraction', textcoords='offset points', rotation=90.) # end if start, end = ax2.get_ylim() ax2.yaxis.set_ticks(np.arange(start, end, 1)) traces.moveout() x = np.array(list(range( traces[0].data.shape[0]))) * traces[0].stats.delta - 5. y = traces.stack() # Some amplitude scaling to have nice plot y = y[0].data / 1.5 - 5. ax2.plot(x, y, clip_on=False, linewidth=3, color='white') ax2.plot(x, y, clip_on=False, linewidth=1) fig.add_subplot(ax1) fig.add_subplot(ax2) frame = frame + 1 print('frame', frame) if frame >= rows * columns: cb_ax = fig.add_axes([0.25, 0.98, 0.5, 0.02]) labels = fig.colorbar(cs, cax=cb_ax, ticks=[np.min(zi), 0, np.max(zi)], orientation='horizontal', extend='neither', extendfrac=0.00001, extendrect=True, drawedges=False) # labels.set_ticks([np.min(zi), 0, np.max(zi)]) # labels.set_ticklabels(['-', '0', '+']) cb_ax.set_xticks([np.min(zi), 0, np.max(zi)]) cb_ax.set_xticklabels(['-', '0', '+']) # labels.ax.set_yticklabels(['-', '0', '+']) pdf.savefig() figure += 1 frame = 0 printed = True # plt.show() plt.close() # end if # end for if not printed: cb_ax = fig.add_axes([0.25, 0.95, 0.5, 0.02]) labels = fig.colorbar(cs, cax=cb_ax, ticks=[-1, 0, 1], orientation='horizontal', extend='neither', extendfrac=0.00001, extendrect=True, drawedges=False) labels.ax.set_xticklabels(['-', '0', '+', '']) pdf.savefig() plt.close() # end if pdf.close()
trig[2] >= best_trig_margin and trig[4] >= best_upper: mintrigdiff = abs(trace.stats.event_time + mean_parrival - trace.stats.starttime - trig[0][0]) besttrig = trig[0][0] best_trig_margin = trig[2] best_band = trig[3] best_upper = trig[4] return (best_band, best_trig_margin, best_upper) else: print('Something went wrong ... ') return None, None, None in_streamfile = 'data/7X-rf_profile_data-15deg.h5' out_streamfile = 'data/7X-rf_profile_data-15deg-out.h5' st = read_rf(in_streamfile, 'H5') stz = [tr for tr in st if tr.stats.channel.endswith('Z')] stn = [tr for tr in st if tr.stats.channel.endswith('N')] ste = [tr for tr in st if tr.stats.channel.endswith('E')] output_from_z = [] for trz, trn, tre in zip(stz, stn, ste): band, margin, upper = extract_filter_params(trz) if band and margin and upper: for tr in [trz, trn, tre]: clean_trace(tr, tr.stats.starttime, tr.stats.endtime, freqmin=band[0], freqmax=band[1]) print('Plot the cleaned trace here')
#simple_model=rf.simple_model.load_model(fname='iasp91') #-------------Main--------------------------------- if __name__ == '__main__': ''' This program composes vespagrams to identify RF converted phases and their multiples please refer to Tian et al. GRL 2005 VOL. 32, L08301, doi:10.1029/2004GL021885 for good examples input - H5 file with receiver functions output - PDF files to print Dependencies - rf and obspy packages beside other standard python packages The image is composed using triangulation. It gives good results but block median or mean must be implemented at some stage to reduce size of PDF. ''' stream = rf.read_rf('DATA/7X-rf_zrt.h5', 'H5') rf_type = 'LQT-Q ' filter_type = 'bandpass' freqmin = 0.03 freqmax = 0.5 #we use a zero-phase-shift band-pass filter using 2 corners. This is done in two runs forward and backward, so we end up with 4 corners de facto. # Lets assume we have LQT orientation selected_stream = stream.select(component='Q').filter( filter_type, freqmin=freqmin, freqmax=freqmax, corners=2, zerophase=True).interpolate(10) # if none lets try ZRT
'station_latlon': (-25, 140), 'layerprops': [upper_crust, lower_crust, mantle] } raw_file = 'synth_events_2L.h5' synthesize_dataset('propmatrix', raw_file, 'SY', 'AAA', src_latlon, fs, time_window, **generator_args) # Generate receiver function rf_file = 'synth_rf_2L.h5' resample_rate = 6.25 # Hz event_waveforms_to_rf(raw_file, rf_file, resample_rate, taper_limit=0.05, filter_band=(0.02, 1.0), trim_start_time=-5.0 - 275 * resample_rate, trim_end_time=150, rotation_type='ZRT', deconv_domain='iter') # Read in RF and convert to text format rf_dat_file = os.path.splitext(rf_file)[0] + '.dat' rf_data = rf.read_rf(rf_file, format='h5') rf_data.trim2(-5.0, 25.0, reftime='onset', nearest_sample=True) rf_data = rf_data.select(component='R')[0] times = rf_data.times() - (rf_data.stats.onset - rf_data.stats.starttime) with open(rf_dat_file, 'w') as f: for t, d in zip(times, rf_data.data): f.write('{:2.2f}, {:.8f}\n'.format(t, d)) # end with
import os import sys from obspy import read_inventory, Stream from rf import read_rf from eqcorrscan.utils.clustering import cluster from eqcorrscan.utils.stacking import linstack, PWS_stack import pandas as pd stream = read_rf('data/7X-rf_profile_rfs-cleaned.h5', 'H5') inv = read_inventory('data/7X-inventory.xml') def convert_ms_to_ascii(msfile_path, outfile_folder): msf = read(msfile_path) convert_ms_to_ascii(msf, outfile_folder) def convert_stream_to_ascii(stream, outfile_folder, group_len): df = pd.DataFrame(columns=['offset_from_onset', 'value']) offset = -5.0 for i in range(len(stream[0].data)): df.loc[i] = [offset, stream[0].data[i]] offset += stream[0].stats.delta df.to_csv(outfile_folder + '/' + stream[0].stats.network + '.' + stream[0].stats.station + '..' + stream[0].stats.channel + '.' + str(group_len) + '.dat', sep=' ', index=False, header=False)
def compute_rf(dataRFfileloc): logger = logging.getLogger(__name__) all_rfdatafile = glob.glob( dataRFfileloc + f"*-{str(inpRFdict['filenames']['data_rf_suffix'])}.h5") for jj, rfdatafile in enumerate(all_rfdatafile): network = rfdatafile.split("-")[0] station = rfdatafile.split("-")[1] rffile = f"{network}-{station}-{str(inpRFdict['filenames']['rf_compute_data_suffix'])}.h5" datatmp = read_rf(rfdatafile, 'H5') if not os.path.exists(rffile): logger.info( f"--> Computing RF for {rfdatafile}, {jj+1}/{len(all_rfdatafile)}" ) data = read_rf(rfdatafile, 'H5') stream = RFStream() for stream3c in tqdm.tqdm(IterMultipleComponents(data, 'onset', 3)): if len(stream3c) != 3: continue ## check if the length of all three traces are equal lenphase = 100 for tr in stream3c: lentr = tr.stats.npts lengt = tr.stats.sampling_rate * lenphase if lentr != lengt: if tr.stats.sampling_rate < 20: logger.warning( f"Sampling rate too low: {tr.stats.sampling_rate}, required >= 20Hz" ) stream3c.remove(tr) continue elif tr.stats.sampling_rate >= 20: if tr.stats.sampling_rate % 20 == 0: factor = int(tr.stats.sampling_rate / 20) tr.decimate(factor, strict_length=False, no_filter=True) if tr.stats.npts > tr.stats.sampling_rate * lenphase: t = tr.stats.starttime tr.trim( t, t + lenphase - (1 / tr.stats.sampling_rate)) continue else: tr.resample(20.0) if tr.stats.npts > tr.stats.sampling_rate * lenphase: t = tr.stats.starttime tr.trim( t, t + lenphase - (1 / tr.stats.sampling_rate)) continue else: pass test_npts = [] for tr in stream3c: lentr = tr.stats.npts test_npts.append(lentr) if len(set(test_npts)) > 1: continue stream3c.filter( 'bandpass', freqmin=float(inpRFdict['rf_filter_settings']['minfreq']), freqmax=float(inpRFdict['rf_filter_settings']['maxfreq'])) try: stream3c.rf() except Exception as e: logger.warning("Problem applying rf method", exc_info=True) stream3c.moveout() stream.extend(stream3c) stream.write(rffile, 'H5') else: # logger.info(f"--> {rffile} already exists!, {jj}/{len(all_rfdatafile)}") logger.info( f"--> Verifying RF computation {jj+1}/{len(all_rfdatafile)}")