def _sim_WA(trace, PAZ, seedresp, water_level): """ Function to remove the insturment response from a trace and return a \ de-meaned, de-trended, Wood Anderson simulated trace in it's place. Works in-place on data and will destroy your original data, copy the \ trace before giving it to this function! :type trace: obspy.Trace :param trace: A standard obspy trace, generally should be given without pre-filtering, if given with pre-filtering for use with amplitude determiniation for magnitudes you will need to worry about how you cope with the response of this filter yourself. :type PAZ: dict :param PAZ: Dictionary containing lists of poles and zeros, the gain and the sensitivity. :type water_level: int :param water_level: Water level for the simulation. :returns: obspy.Trace """ # Note Wood anderson sensitivity is 2080 as per Uhrhammer & Collins 1990 PAZ_WA = { 'poles': [-6.283 + 4.7124j, -6.283 - 4.7124j], 'zeros': [0 + 0j], 'gain': 1.0, 'sensitivity': 2080 } from obspy.signal import seisSim # De-trend data trace.detrend('simple') # Simulate Wood Anderson if PAZ: trace.data = seisSim(trace.data, trace.stats.sampling_rate, paz_remove=PAZ, paz_simulate=PAZ_WA, water_level=water_level, remove_sensitivity=True) elif seedresp: trace.data = seisSim(trace.data, trace.stats.sampling_rate, paz_remove=None, paz_simulate=PAZ_WA, water_level=water_level, seedresp=seedresp) else: UserWarning('No response given to remove, will just simulate WA') trace.data = seisSim(trace.data, trace.stats.samplng_rate, paz_remove=None, water_level=water_level) return trace
def test_simulate(self): """ Tests if calling simulate of trace gives the same result as using seisSim manually. """ tr = read()[0] paz_sts2 = {'poles': [-0.037004 + 0.037016j, -0.037004 - 0.037016j, - 251.33 + 0j, -131.04 - 467.29j, - 131.04 + 467.29j], 'zeros': [0j, 0j], 'gain': 60077000.0, 'sensitivity': 2516778400.0} paz_le3d1s = {'poles': [-4.440 + 4.440j, -4.440 - 4.440j, - 1.083 + 0.0j], 'zeros': [0.0 + 0.0j, 0.0 + 0.0j, 0.0 + 0.0j], 'gain': 0.4, 'sensitivity': 1.0} data = seisSim(tr.data, tr.stats.sampling_rate, paz_remove=paz_sts2, paz_simulate=paz_le3d1s, remove_sensitivity=True, simulate_sensitivity=True) try: proc_info = tr.stats.processing except KeyError: proc_info = [] proc_info.append("simulate:inverse:%s:sensitivity=True" % paz_sts2) proc_info.append("simulate:forward:%s:sensitivity=True" % paz_le3d1s) tr.simulate(paz_remove=paz_sts2, paz_simulate=paz_le3d1s) np.testing.assert_array_equal(tr.data, data) self.assertEqual(tr.stats.processing, proc_info)
def test_simulate(self): """ Tests if calling simulate of trace gives the same result as using seisSim manually. """ tr = read()[0] paz_sts2 = {'poles': [-0.037004 + 0.037016j, -0.037004 - 0.037016j, - 251.33 + 0j, -131.04 - 467.29j, - 131.04 + 467.29j], 'zeros': [0j, 0j], 'gain': 60077000.0, 'sensitivity': 2516778400.0} paz_le3d1s = {'poles': [-4.440 + 4.440j, -4.440 - 4.440j, - 1.083 + 0.0j], 'zeros': [0.0 + 0.0j, 0.0 + 0.0j, 0.0 + 0.0j], 'gain': 0.4, 'sensitivity': 1.0} data = seisSim(tr.data, tr.stats.sampling_rate, paz_remove=paz_sts2, paz_simulate=paz_le3d1s, remove_sensitivity=True, simulate_sensitivity=True) try: proc_info = tr.stats.processing except (KeyError, AttributeError): proc_info = [] proc_info.append("simulate:inverse:%s:sensitivity=True" % paz_sts2) proc_info.append("simulate:forward:%s:sensitivity=True" % paz_le3d1s) tr.simulate(paz_remove=paz_sts2, paz_simulate=paz_le3d1s) np.testing.assert_array_equal(tr.data, data) self.assertEqual(tr.stats.processing, proc_info)
def test_simulate(self): """ Tests if calling simulate of trace gives the same result as using seisSim manually. """ tr = read()[0] paz_sts2 = { 'poles': [ -0.037004 + 0.037016j, -0.037004 - 0.037016j, -251.33 + 0j, -131.04 - 467.29j, -131.04 + 467.29j ], 'zeros': [0j, 0j], 'gain': 60077000.0, 'sensitivity': 2516778400.0 } paz_le3d1s = { 'poles': [-4.440 + 4.440j, -4.440 - 4.440j, -1.083 + 0.0j], 'zeros': [0.0 + 0.0j, 0.0 + 0.0j, 0.0 + 0.0j], 'gain': 0.4, 'sensitivity': 1.0 } data = seisSim(tr.data, tr.stats.sampling_rate, paz_remove=paz_sts2, paz_simulate=paz_le3d1s, remove_sensitivity=True, simulate_sensitivity=True) tr.simulate(paz_remove=paz_sts2, paz_simulate=paz_le3d1s) np.testing.assert_array_equal(tr.data, data)
def test_FDSN_ARC_IC(): (options, args, parser) = command_parse() input_dics = read_input_command(parser) # Changing the input_dics values for testing input_dics['min_date'] = '2011-03-01' input_dics['max_date'] = '2011-03-20' input_dics['min_mag'] = 8.9 input_dics['datapath'] = 'test_%s' % dir_name input_dics['net'] = 'TA' input_dics['sta'] = 'Z3*' input_dics['cha'] = 'BHZ' input_dics['req_parallel'] = 'Y' input_dics['req_np'] = 4 FDSN_ARC_IC(input_dics, input_dics['fdsn_base_url']) st_cor = read(os.path.join(input_dics['datapath'], '2011-03-01_2011-03-20', '20110311_1', 'BH', '*')) assert len(st_cor) == 7 st_wilber = read(os.path.join('tests', 'fdsn_waveforms', 'TA*')) paz_35 = {'gain': 5.714000e+08, 'sensitivity': 6.309070e+08, 'zeros': (0.0, 0.0, 0.0), 'poles': (-3.701000e-02+3.701000e-02j, -3.701000e-02-3.701000e-02j, -1.131000e+03+0.000000e+00j, -1.005000e+03+0.000000e+00j, -5.027000e+02+0.000000e+00j)} for sta in ['Z35A', 'Z37A', 'Z39A']: tr_cor = st_cor.select(station=sta)[0] tr_wilber = st_wilber.select(station=sta)[0] tr_wilber_corr = tr_wilber.copy() tr_wilber_corr.detrend() corr_wilber = seisSim(tr_wilber.data, tr_wilber.stats.sampling_rate, paz_remove=paz_35, paz_simulate=None, remove_sensitivity=True, simulate_sensitivity=False, water_level=600., zero_mean=True, taper=True, taper_fraction=0.05, pre_filt=(0.008, 0.012, 3.0, 4.0), pitsasim=False, sacsim=True) tr_wilber_corr.data = corr_wilber tr_diff = abs(tr_cor.data - tr_wilber_corr.data) # amplitude of the traces is in the order of 1e6 or so assert max(tr_diff) < 0.00001
def _sim_WA(trace, PAZ, seedresp, water_level): """ Function to remove the insturment response from a trace and return a de-meaned, de-trended, Wood Anderson simulated trace in it's place. Works in-place on data and will destroy your original data, copy the trace before giving it to this function! :type trace: obspy.Trace :param trace: A standard obspy trace, generally should be given without pre-filtering, if given with pre-filtering for use with amplitude determiniation for magnitudes you will need to worry about how you cope with the response of this filter yourself. :type PAZ: dict :param PAZ: Dictionary containing lists of poles and zeros, the gain and the sensitivity. :type water_level: int :param water_level: Water level for the simulation. :returns: obspy.Trace """ # Note Wood anderson sensitivity is 2080 as per Uhrhammer & Collins 1990 PAZ_WA={'poles': [-6.283 + 4.7124j, -6.283 - 4.7124j], 'zeros': [0 + 0j], 'gain': 1.0, 'sensitivity': 2080} from obspy.signal import seisSim # De-trend data trace.detrend('simple') # Simulate Wood Anderson if PAZ: trace.data=seisSim(trace.data, trace.stats.sampling_rate, paz_remove=PAZ,\ paz_simulate=PAZ_WA, water_level=water_level,\ remove_sensitivity=True) elif seedresp: trace.data=seisSim(trace.data, trace.stats.sampling_rate, paz_remove=None,\ paz_simulate=PAZ_WA, water_level=water_level,\ seedresp=seedresp) else: UserWarning('No response given to remove, will just simulate WA') trace.data=seisSim(trace.data, trace.stats.samplng_rate, paz_remove=None,\ water_level=water_level) return trace
def removeInstrument(st, args): if (args.sim == 'PZs'): # prefilters f = args.flim.split() f0 = eval(f[0]) f1 = eval(f[1]) f2 = eval(f[2]) f3 = eval(f[3]) toPurge = [] # station to purge if no Paz found for i in range(len(st)): # attach poles and zeros instrument if (args.dva == '1'): try: attach_paz(st[i], st[i].stats.PZs_file, todisp=False) except: print "No appropriate PZs file found for station " + st[ i].stats.station, st[i].stats.channel, st[ i].stats.network toPurge.append(st[i].stats.station) else: try: attach_paz(st[i], st[i].stats.PZs_file, tovel=True) except: print "No appropriate PZs file found for station " + st[ i].stats.station, st[i].stats.channel, st[ i].stats.network toPurge.append(st[i].stats.station) # remove stations if len(toPurge>0) if len(toPurge) > 0: st = purgeListStation(st, toPurge, 'r') print "Check if station/channel/network/location of the PZs files and the same string within loaded binary files " print "do correspond. It may occour for instance that the headers strings of the waveform files (e.g. sac, fseed) " print "do not agrees with the same strings of the PZs name files. For instance the name of the network. " print "If these strings do not correspond, modify the name of the PZs files or the header values of the waveforms" print "You may also choose to remove this station using the option --purge (see help for details)" # now do remove for i in range(len(st)): # remove instrument to displacement # st[i].data=detrend(st[i].data) st[i].data = seisSim(st[i].data,st[i].stats.sampling_rate,paz_remove=st[i].stats.paz, \ taper=True, taper_fraction=0.050, pre_filt=(f0,f1,f2,f3)) #,water_level=60.0) # from meters to centimeters st[i].data = st[i].data * 100 return st
def correct_responce(st, paz_orig, paz_desidered): from obspy.signal import seisSim if not isinstance(st, Stream): raise InputError("'st' must be a 'obspy.core.stream.Stream' object") for k in np.arange(st.count()): st[k].data = seisSim(st[0].data, st[0].stats.sampling_rate, \ paz_desidered, inst_sim=paz_orig) st_corr = st return st_corr
def removeInstrument(st,args): if(args.sim == 'PZs'): # prefilters f = args.flim.split() f0 = eval(f[0]) f1 = eval(f[1]) f2 = eval(f[2]) f3 = eval(f[3]) toPurge= [] # station to purge if no Paz found for i in range(len(st)): # attach poles and zeros instrument if(args.dva=='1'): try: attach_paz(st[i], st[i].stats.PZs_file,todisp=False) except: print "No appropriate PZs file found for station " + st[i].stats.station,st[i].stats.channel,st[i].stats.network toPurge.append(st[i].stats.station) else: try: attach_paz(st[i], st[i].stats.PZs_file,tovel=True) except: print "No appropriate PZs file found for station " + st[i].stats.station,st[i].stats.channel,st[i].stats.network toPurge.append(st[i].stats.station) # remove stations if len(toPurge>0) if len(toPurge) > 0: st = purgeListStation(st,toPurge,'r') print "Check if station/channel/network/location of the PZs files and the same string within loaded binary files " print "do correspond. It may occour for instance that the headers strings of the waveform files (e.g. sac, fseed) " print "do not agrees with the same strings of the PZs name files. For instance the name of the network. " print "If these strings do not correspond, modify the name of the PZs files or the header values of the waveforms" print "You may also choose to remove this station using the option --purge (see help for details)" # now do remove for i in range(len(st)): # remove instrument to displacement # st[i].data=detrend(st[i].data) st[i].data = seisSim(st[i].data,st[i].stats.sampling_rate,paz_remove=st[i].stats.paz, \ taper=True, taper_fraction=0.050, pre_filt=(f0,f1,f2,f3)) #,water_level=60.0) # from meters to centimeters st[i].data = st[i].data * 100 return st
def test_simulate(self): """ Tests if calling simulate of trace gives the same result as using seisSim manually. """ tr = read()[0] paz_sts2 = {'poles': [-0.037004 + 0.037016j, -0.037004 - 0.037016j, - 251.33 + 0j, -131.04 - 467.29j, - 131.04 + 467.29j], 'zeros': [0j, 0j], 'gain': 60077000.0, 'sensitivity': 2516778400.0} paz_le3d1s = {'poles': [-4.440 + 4.440j, -4.440 - 4.440j, - 1.083 + 0.0j], 'zeros': [0.0 + 0.0j, 0.0 + 0.0j, 0.0 + 0.0j], 'gain': 0.4, 'sensitivity': 1.0} data = seisSim(tr.data, tr.stats.sampling_rate, paz_remove=paz_sts2, paz_simulate=paz_le3d1s, remove_sensitivity=True, simulate_sensitivity=True) tr.simulate(paz_remove=paz_sts2, paz_simulate=paz_le3d1s) np.testing.assert_array_equal(tr.data, data)
def convolution_automatic(self,stream_data,parser_data): #st=stream_data.values().copy() #for tr in st and key in parser_data: inst2hz = cornFreq2Paz(float(self.inst2hz.getvalue())) #what the hell is this? waterLevel=float(self.water_level.getvalue()) parser_keys=parser_data.keys() for key, st in stream_data.items(): for pr_key in parser_data.keys(): if pr_key.find(key[0:2]): pr=parser_data.get(pr_key) for tr in st: paz=pr.getPAZ(tr.stats) df = tr.stats.sampling_rate tr.data = seisSim(tr.data, df, paz_remove=paz, paz_simulate=inst2hz, water_level=waterLevel) stream_data[key+'_converted']=tr.copy() print "Try the manual option" return stream_data
print "Cannot process station %s, no RESP file given" % tr.stats.station continue # Cannot process a whole day file, split it in smaller junks overlap = s2p(30.0, tr) olap = overlap samp = 0 df = tr.stats.sampling_rate if trId(tr.stats)[1] != last_id or tr.stats.starttime - last_endtime > 1.0 / df: data_buf = np.array([], dtype='float64') olap = 0 while samp < tr.stats.npts: data = tr.data[samp:samp + nfft - olap].astype('float64') data = np.concatenate((data_buf, data)) data = detrend(data) # Correct for frequency response of instrument data = seisSim(data, tr.stats.sampling_rate, paz, inst_sim=inst) data /= (paz['sensitivity'] / 1e9) #V/nm/s correct for overall sensitivity data = recStalta(data, s2p(2.5, tr), s2p(10.0, tr)) picked_values = triggerOnset(data, 3.0, 0.5, max_len=overlap) # for i, j in picked_values: begin = tr.stats.starttime + float(i + samp - olap) / df end = tr.stats.starttime + float(j + samp - olap) / df f.write("%s,%s,%s\n" % (str(begin), str(end), tr.stats.station)) olap = overlap # only needed for first time in loop samp += nfft - overlap data_buf = data[-overlap:] print '.', # Progress Bar last_endtime, last_id = trId(tr.stats) f.close()
def test_FDSN_update(): (options, args, parser) = command_parse() input_dics = read_input_command(parser) # Changing the input_dics values for testing input_dics['min_date'] = '2011-03-01' input_dics['max_date'] = '2011-03-20' input_dics['min_mag'] = 8.9 input_dics['datapath'] = 'test_%s' % dir_name input_dics['net'] = 'TA' input_dics['sta'] = 'T40A' input_dics['cha'] = 'BHZ' input_dics['req_parallel'] = 'N' input_dics['ic_parallel'] = 'Y' input_dics['ic_np'] = 4 input_dics['fdsn_update'] = input_dics['datapath'] FDSN_update(input_dics, address=input_dics['fdsn_update']) FDSN_ARC_IC(input_dics, input_dics['fdsn_base_url']) st_cor = read(os.path.join(input_dics['datapath'], '2011-03-01_2011-03-20', '20110311_1', 'BH', '*')) assert len(st_cor) == 8 st_wilber = read(os.path.join('tests', 'fdsn_waveforms', 'TA*')) paz_t40 = {'gain': 3.484620e+17, 'sensitivity': 6.271920e+08, 'zeros': (+0.000000e+00+0.000000e+00j, +0.000000e+00+0.000000e+00j, +0.000000e+00+0.000000e+00j, -4.631000e+02+4.305000e+02j, -4.631000e+02-4.305000e+02j, -1.766000e+02+0.000000e+00j, -1.515000e+01+0.000000e+00j), 'poles': (-1.330000e+04+0.000000e+00j, -1.053000e+04+1.005000e+04j, -1.053000e+04-1.005000e+04j, -5.203000e+02+0.000000e+00j, -3.748000e+02+0.000000e+00j, -9.734000e+01+4.007000e+02j, -9.734000e+01-4.007000e+02j, -1.564000e+01+0.000000e+00j, -3.700000e-02+3.700000e-02j, -3.700000e-02-3.700000e-02j, -2.551000e+02+0.000000e+00j)} paz_35 = {'gain': 5.714000e+08, 'sensitivity': 6.309070e+08, 'zeros': (0.0, 0.0, 0.0), 'poles': (-3.701000e-02+3.701000e-02j, -3.701000e-02-3.701000e-02j, -1.131000e+03+0.000000e+00j, -1.005000e+03+0.000000e+00j, -5.027000e+02+0.000000e+00j)} for sta in ['T40A', 'Z35A', 'Z37A', 'Z39A']: if sta not in ['T40A']: paz_req = paz_35 else: paz_req = paz_t40 tr_cor = st_cor.select(station=sta)[0] tr_wilber = st_wilber.select(station=sta)[0] tr_wilber_corr = tr_wilber.copy() tr_wilber_corr.detrend() corr_wilber = seisSim(tr_wilber.data, tr_wilber.stats.sampling_rate, paz_remove=paz_req, paz_simulate=None, remove_sensitivity=True, simulate_sensitivity=False, water_level=600., zero_mean=True, taper=True, taper_fraction=0.05, pre_filt=(0.008, 0.012, 3.0, 4.0), pitsasim=False, sacsim=True) tr_wilber_corr.data = corr_wilber tr_diff = abs(tr_cor.data - tr_wilber_corr.data) plt.figure() plt.clf() plt.subplot(2, 1, 1) plt.plot(tr_cor.data, 'b') plt.plot(tr_wilber_corr.data, 'r') plt.subplot(2, 1, 2) plt.plot(tr_diff) plt.savefig(os.path.join(input_dics['datapath'], '%s.png' % sta), format='png') # amplitude of the traces is in the order of 1e13 or so assert max(tr_diff) < 0.00001
summary.append("found no metadata for %s. skipping!" % tr.id) continue # Cannot process a whole day file, split it in smaller junks overlap = s2p(30.0, tr) olap = overlap samp = 0 df = tr.stats.sampling_rate if trId(tr.stats)[1] != last_id or tr.stats.starttime - last_endtime > 1.0 / df: data_buf = np.array([], dtype='float64') olap = 0 while samp < tr.stats.npts: data = tr.data[samp:samp + nfft - olap].astype('float64') data = np.concatenate((data_buf, data)) data = detrend(data) # Correct for frequency response of instrument data = seisSim(data, df, paz_remove=tr.stats.paz, paz_simulate=inst, remove_sensitivity=True) # XXX is removed in seisSim... ?! # XXX data /= (paz['sensitivity'] / 1e9) #V/nm/s correct for overall sensitivity data = bandpass(data, LOW, HIGH, df) data = recStalta(data, s2p(STA, tr), s2p(LTA, tr)) picked_values = triggerOnset(data, ON, OFF, max_len=overlap) # for i, j in picked_values: begin = tr.stats.starttime + float(i + samp - olap) / df end = tr.stats.starttime + float(j + samp - olap) / df trigger_list.append((begin.timestamp, end.timestamp, tr.stats.station)) olap = overlap # only needed for first time in loop samp += nfft - overlap data_buf = data[-overlap:] last_endtime, last_id = trId(tr.stats)
import matplotlib.pyplot as plt # Define poles zeros and gain le3d = { 'poles': [-4.21000 + 4.66000j, -4.21000 - 4.66000j, -2.105000 + 0.00000j], 'zeros': [0.0 + 0.0j] * 3, # add or remove zeros here 'gain': 0.4 } # Read in the data tr = read("loc_RJOB20050831023349.z")[0] # Do the instrument correction data_corr = seisSim(tr.data, tr.stats.sampling_rate, le3d, inst_sim=PAZ_WOOD_ANDERSON, water_level=60.0) # Just for visualization, calculate transferfuction in frequency domain trans, freq = pazToFreqResp(le3d['poles'], le3d['zeros'], le3d['gain'], 1. / tr.stats.sampling_rate, 2**12, freq=True) # # The plotting part # time = np.arange(0, tr.stats.npts) / tr.stats.sampling_rate
from obspy.core import read from obspy.signal import seisSim, cornFreq2Paz onehzinst = cornFreq2Paz(1.0, damp=0.707) # 1Hz instrument tr = read("http://examples.obspy.org/RJOB20090824.ehz")[0] tr.data = tr.data - tr.data.mean() sts2 = {'gain': 60077000.0, 'poles': [(-0.037004000000000002+0.037016j), (-0.037004000000000002-0.037016j), (-251.33000000000001+0j), (-131.03999999999999-467.29000000000002j), (-131.03999999999999+467.29000000000002j)], 'sensitivity': 2516778400.0, 'zeros': [0j, 0j]} data2 = seisSim(tr.data, tr.stats.sampling_rate, sts2, inst_sim=onehzinst,water_level=600.0) data2 = data2 / sts2["sensitivity"] # The plotting, plain matplotlib t = np.arange(tr.stats.npts) / tr.stats.sampling_rate plt.subplot(211) plt.plot(t, tr.data, 'k') plt.ylabel('STS-2 [counts]') # plt.subplot(212) plt.plot(t, data2, 'k') plt.ylabel('1Hz Instrument [m/s]') plt.xlabel('Time [s]') plt.savefig('sts2onehz.pdf') #plt.show()
paz = sp.getPAZ(tr.stats.channel) # Uncomment the following for: # Integrate by adding a zero at the position zero # As for the simulation the poles and zeros are inverted and convolved # in the frequency domain this is basically mutliplying by 1/jw which # is an integration in the frequency domain # See "Of Poles and Zeros", Frank Scherbaum, Springer 2007 #paz['zeros'].append(0j) # preprocessing tr.data = tr.data.astype('float64') #convert data to float tr.data = detrend(tr.data, 'linear') #detrend tr.data *= cosTaper(tr.stats.npts, 0.10) #costaper 5% at start and end # correct for instrument, play with water_level # this will results to unit of XSEEDs tag stage_signal_output_units # most common for seed is m/s, write xseed by sp.writeXSEED('xs.txt') tr.data = seisSim(tr.data, tr.stats.sampling_rate, paz, inst_sim=None, water_level=60.0) tr.data = tr.data/paz['sensitivity'] # You need to do postprocessing the low freq are most likely artefacts (result from # dividing the freqresp / to high water_level), use a highpass to get # rid of the artefacts, e.g. highpass at e.g. 2.0Hz #tr.data = highpass(tr.data, 2.0, df=tr.stats.sampling_rate, corners=2) # # the plotting part # m = stream.count() for i, tr in enumerate(stream): plt.subplot(m, 1, i+1) plt.plot(tr.data) plt.ylabel(tr.stats.channel)
olap = overlap samp = 0 df = tr.stats.sampling_rate if trId( tr.stats )[1] != last_id or tr.stats.starttime - last_endtime > 1.0 / df: data_buf = np.array([], dtype='float64') olap = 0 while samp < tr.stats.npts: data = tr.data[samp:samp + nfft - olap].astype('float64') data = np.concatenate((data_buf, data)) data = detrend(data) # Correct for frequency response of instrument data = seisSim(data, df, paz_remove=tr.stats.paz, paz_simulate=inst, remove_sensitivity=True) # XXX is removed in seisSim... ?! # XXX data /= (paz['sensitivity'] / 1e9) #V/nm/s correct for overall sensitivity data = bandpass(data, LOW, HIGH, df) data = recStalta(data, s2p(STA, tr), s2p(LTA, tr)) picked_values = triggerOnset(data, ON, OFF, max_len=overlap) # for i, j in picked_values: begin = tr.stats.starttime + float(i + samp - olap) / df end = tr.stats.starttime + float(j + samp - olap) / df trigger_list.append( (begin.timestamp, end.timestamp, tr.stats.station)) olap = overlap # only needed for first time in loop samp += nfft - overlap
# Cannot process a whole day file, split it in smaller junks overlap = s2p(30.0, tr) olap = overlap samp = 0 df = tr.stats.sampling_rate if trId( tr.stats )[1] != last_id or tr.stats.starttime - last_endtime > 1.0 / df: data_buf = np.array([], dtype='float64') olap = 0 while samp < tr.stats.npts: data = tr.data[samp:samp + nfft - olap].astype('float64') data = np.concatenate((data_buf, data)) data = detrend(data) # Correct for frequency response of instrument data = seisSim(data, tr.stats.sampling_rate, paz, inst_sim=inst) data /= (paz['sensitivity'] / 1e9 ) #V/nm/s correct for overall sensitivity data = recStalta(data, s2p(2.5, tr), s2p(10.0, tr)) picked_values = triggerOnset(data, 3.0, 0.5, max_len=overlap) # for i, j in picked_values: begin = tr.stats.starttime + float(i + samp - olap) / df end = tr.stats.starttime + float(j + samp - olap) / df f.write("%s,%s,%s\n" % (str(begin), str(end), tr.stats.station)) olap = overlap # only needed for first time in loop samp += nfft - overlap data_buf = data[-overlap:] print '.', # Progress Bar last_endtime, last_id = trId(tr.stats)
from obspy.arclink import Client from obspy.signal import cornFreq2Paz, seisSim # Retrieve data via ArcLink # please provide a valid email address for the keyword user client = Client(user="******") t = UTCDateTime("2009-08-24 00:20:03") st = client.getWaveform('BW', 'RJOB', '', 'EHZ', t, t + 30) paz = client.getPAZ('BW', 'RJOB', '', 'EHZ', t) paz = paz.values()[0] # 1Hz instrument one_hertz = cornFreq2Paz(1.0) # Correct for frequency response of the instrument res = seisSim(st[0].data.astype('float32'), st[0].stats.sampling_rate, paz, inst_sim=one_hertz) # Correct for overall sensitivity res = res / paz['sensitivity'] # Plot the seismograms sec = np.arange(len(res)) / st[0].stats.sampling_rate plt.subplot(211) plt.plot(sec, st[0].data, 'k') plt.title("%s %s" % (st[0].stats.station, t)) plt.ylabel('STS-2') plt.subplot(212) plt.plot(sec, res, 'k') plt.xlabel('Time [s]') plt.ylabel('1Hz CornerFrequency') plt.show()
import matplotlib.pyplot as plt # Define poles zeros and gain le3d = { 'poles': [-4.21000 + 4.66000j, - 4.21000 - 4.66000j, - 2.105000 + 0.00000j], 'zeros': [0.0 + 0.0j] * 3, # add or remove zeros here 'gain' : 0.4 } # Read in the data tr = read("loc_RJOB20050831023349.z")[0] # Do the instrument correction data_corr = seisSim(tr.data, tr.stats.sampling_rate, le3d, inst_sim=PAZ_WOOD_ANDERSON, water_level=60.0) # Just for visualization, calculate transferfuction in frequency domain trans, freq = pazToFreqResp(le3d['poles'], le3d['zeros'], le3d['gain'], 1./tr.stats.sampling_rate, 2**12, freq=True) # # The plotting part # time = np.arange(0,tr.stats.npts)/tr.stats.sampling_rate plt.figure() plt.subplot(211) plt.plot(time, tr.data, label="Original Data") plt.legend() plt.subplot(212) plt.plot(time, data_corr, label="Wood Anderson Simulated Data")