def findEvents(condir='ContinousWaveForms',stakey='StationKey.csv',chan='Z', trigDir='Trigs',startbuff=25,endbuff=200,trigBuff=2000): stations=pd.read_csv(stakey) if not os.path.isdir(trigDir): os.makedirs(trigDir) years,juldays=getConRange(stations,condir) for a in range(len(years)): for b in juldays[a]: STfull=makeStream(stations,years[a],b,condir) STfull.sort() st=STfull.copy() st=st.select(channel='*Z') trig = coincidenceTrigger("recstalta", 5, 1.0, st, 7, sta=0.5, lta=15,details=True,trigger_off_extension=20) trig=trimTrig(trig,trigBuff) tt=[0]*len(trig) for c in range(len(tt)): if trig[c] != None: try: tt[c]=STfull.slice(starttime=trig[c]['time']-startbuff,endtime=trig[c]['time']+trig[c]['duration']+endbuff) except: global de,t,C de,t,C=trig,tt,c sys.exit(1) saveTrace(tt[c],trigDir,trig[c]['time'])
mutt = [] if st: # preprocessing, backup original data for plotting at end st.merge(0) st.detrend("linear") for tr in st: tr.data = tr.data * cosTaper(len(tr), 0.01) #st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False) st.sort() st.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True) st.trim(T1, T2) st_trigger = st.copy() st.normalize(global_max=False) # do the triggering trig = coincidenceTrigger("recstalta", PAR.ON, PAR.OFF, st_trigger, thr_coincidence_sum=PAR.MIN_STATIONS, max_trigger_length=PAR.MAXLEN, trigger_off_extension=PAR.ALLOWANCE, details=True, sta=PAR.STA, lta=PAR.LTA) for t in trig: info = "%s %ss %s %s" % (t['time'].strftime("%Y-%m-%dT%H:%M:%S"), ("%.1f" % t['duration']).rjust(4), ("%i" % t['cft_peak_wmean']).rjust(3), "-".join(t['stations'])) summary.append(info) tmp = st.slice(t['time'] - 1, t['time'] + t['duration']) outfilename = "%s/%s_%.1f_%i_%s-%s_%s.png" % (PLOTDIR, t['time'].strftime("%Y-%m-%dT%H:%M:%S"), t['duration'], t['cft_peak_wmean'], len(t['stations']), num_stations, "-".join(t['stations'])) tmp.plot(outfile=outfilename) mutt += ("-a", outfilename) summary.append("#" * 79) summary = "\n".join(summary) summary += "\n" + "\n".join(("%s=%s" % (k, v) for k, v in PAR.items())) #print summary open(SUMMARY, "at").write(summary + "\n")
def test_coincidenceTriggerWithSimilarityChecking(self): """ Test network coincidence trigger with cross correlation similarity checking of given event templates. """ st = Stream() files = [ "BW.UH1._.SHZ.D.2010.147.cut.slist.gz", "BW.UH2._.SHZ.D.2010.147.cut.slist.gz", "BW.UH3._.SHZ.D.2010.147.cut.slist.gz", "BW.UH3._.SHN.D.2010.147.cut.slist.gz", "BW.UH3._.SHE.D.2010.147.cut.slist.gz", "BW.UH4._.EHZ.D.2010.147.cut.slist.gz" ] for filename in files: filename = os.path.join(self.path, filename) st += read(filename) # some prefiltering used for UH network st.filter('bandpass', freqmin=10, freqmax=20) # set up template event streams times = ["2010-05-27T16:24:33.095000", "2010-05-27T16:27:30.370000"] templ = {} for t in times: t = UTCDateTime(t) st_ = st.select(station="UH3").slice(t, t + 2.5).copy() templ.setdefault("UH3", []).append(st_) times = ["2010-05-27T16:27:30.574999"] for t in times: t = UTCDateTime(t) st_ = st.select(station="UH1").slice(t, t + 2.5).copy() templ.setdefault("UH1", []).append(st_) trace_ids = { "BW.UH1..SHZ": 1, "BW.UH2..SHZ": 1, "BW.UH3..SHZ": 1, "BW.UH4..EHZ": 1 } similarity_thresholds = {"UH1": 0.8, "UH3": 0.7} trig = coincidenceTrigger("classicstalta", 5, 1, st.copy(), 4, sta=0.5, lta=10, trace_ids=trace_ids, event_templates=templ, similarity_threshold=similarity_thresholds) # check floats in resulting dictionary separately self.assertAlmostEqual(trig[0].pop('duration'), 3.9600000381469727) self.assertAlmostEqual(trig[1].pop('duration'), 1.9900000095367432) self.assertAlmostEqual(trig[2].pop('duration'), 1.9200000762939453) self.assertAlmostEqual(trig[3].pop('duration'), 3.9200000762939453) self.assertAlmostEqual(trig[0]['similarity'].pop('UH1'), 0.94149447384) self.assertAlmostEqual(trig[0]['similarity'].pop('UH3'), 1) self.assertAlmostEqual(trig[1]['similarity'].pop('UH1'), 0.65228204570) self.assertAlmostEqual(trig[1]['similarity'].pop('UH3'), 0.72679293429) self.assertAlmostEqual(trig[2]['similarity'].pop('UH1'), 0.89404458774) self.assertAlmostEqual(trig[2]['similarity'].pop('UH3'), 0.74581409371) self.assertAlmostEqual(trig[3]['similarity'].pop('UH1'), 1) self.assertAlmostEqual(trig[3]['similarity'].pop('UH3'), 1) remaining_results = \ [{'coincidence_sum': 4.0, 'similarity': {}, 'stations': ['UH3', 'UH2', 'UH1', 'UH4'], 'time': UTCDateTime(2010, 5, 27, 16, 24, 33, 210000), 'trace_ids': ['BW.UH3..SHZ', 'BW.UH2..SHZ', 'BW.UH1..SHZ', 'BW.UH4..EHZ']}, {'coincidence_sum': 3.0, 'similarity': {}, 'stations': ['UH3', 'UH1', 'UH2'], 'time': UTCDateTime(2010, 5, 27, 16, 25, 26, 710000), 'trace_ids': ['BW.UH3..SHZ', 'BW.UH1..SHZ', 'BW.UH2..SHZ']}, {'coincidence_sum': 3.0, 'similarity': {}, 'stations': ['UH2', 'UH1', 'UH3'], 'time': UTCDateTime(2010, 5, 27, 16, 27, 2, 260000), 'trace_ids': ['BW.UH2..SHZ', 'BW.UH1..SHZ', 'BW.UH3..SHZ']}, {'coincidence_sum': 4.0, 'similarity': {}, 'stations': ['UH3', 'UH2', 'UH1', 'UH4'], 'time': UTCDateTime(2010, 5, 27, 16, 27, 30, 510000), 'trace_ids': ['BW.UH3..SHZ', 'BW.UH2..SHZ', 'BW.UH1..SHZ', 'BW.UH4..EHZ']}] self.assertTrue(trig == remaining_results)
def test_coincidenceTrigger(self): """ Test network coincidence trigger. """ st = Stream() files = [ "BW.UH1._.SHZ.D.2010.147.cut.slist.gz", "BW.UH2._.SHZ.D.2010.147.cut.slist.gz", "BW.UH3._.SHZ.D.2010.147.cut.slist.gz", "BW.UH4._.EHZ.D.2010.147.cut.slist.gz" ] for filename in files: filename = os.path.join(self.path, filename) st += read(filename) # some prefiltering used for UH network st.filter('bandpass', freqmin=10, freqmax=20) # 1. no weighting, no stations specified, good settings # => 3 events, no false triggers # for the first test we make some additional tests regarding types res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3, sta=0.5, lta=10) self.assertTrue(isinstance(res, list)) self.assertTrue(len(res) == 3) expected_keys = [ 'time', 'coincidence_sum', 'duration', 'stations', 'trace_ids' ] expected_types = [UTCDateTime, float, float, list, list] for item in res: self.assertTrue(isinstance(item, dict)) for key, _type in zip(expected_keys, expected_types): self.assertTrue(key in item) self.assertTrue(isinstance(item[key], _type)) self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(4.2 < res[0]['duration'] < 4.8) self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4']) self.assertTrue(res[0]['coincidence_sum'] == 4) self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59")) self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03")) self.assertTrue(3.2 < res[1]['duration'] < 3.7) self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1']) self.assertTrue(res[1]['coincidence_sum'] == 3) self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(4.2 < res[2]['duration'] < 4.4) self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4']) self.assertTrue(res[2]['coincidence_sum'] == 4) # 2. no weighting, station selection # => 2 events, no false triggers trace_ids = ['BW.UH1..SHZ', 'BW.UH3..SHZ', 'BW.UH4..EHZ'] # ignore UserWarnings with warnings.catch_warnings(record=True): warnings.simplefilter('ignore', UserWarning) re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3, trace_ids=trace_ids, sta=0.5, lta=10) self.assertTrue(len(re) == 2) self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(4.2 < re[0]['duration'] < 4.8) self.assertTrue(re[0]['stations'] == ['UH3', 'UH1', 'UH4']) self.assertTrue(re[0]['coincidence_sum'] == 3) self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(4.2 < re[1]['duration'] < 4.4) self.assertTrue(re[1]['stations'] == ['UH3', 'UH1', 'UH4']) self.assertTrue(re[1]['coincidence_sum'] == 3) # 3. weighting, station selection # => 3 events, no false triggers trace_ids = { 'BW.UH1..SHZ': 0.4, 'BW.UH2..SHZ': 0.35, 'BW.UH3..SHZ': 0.4, 'BW.UH4..EHZ': 0.25 } res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.0, trace_ids=trace_ids, sta=0.5, lta=10) self.assertTrue(len(res) == 3) self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(4.2 < res[0]['duration'] < 4.8) self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4']) self.assertTrue(res[0]['coincidence_sum'] == 1.4) self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59")) self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03")) self.assertTrue(3.2 < res[1]['duration'] < 3.7) self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1']) self.assertTrue(res[1]['coincidence_sum'] == 1.15) self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(4.2 < res[2]['duration'] < 4.4) self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4']) self.assertTrue(res[2]['coincidence_sum'] == 1.4) # 4. weighting, station selection, max_len # => 2 events, no false triggers, small event does not overlap anymore trace_ids = {'BW.UH1..SHZ': 0.6, 'BW.UH2..SHZ': 0.6} # ignore UserWarnings with warnings.catch_warnings(record=True): warnings.simplefilter('ignore', UserWarning) re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.2, trace_ids=trace_ids, max_trigger_length=0.13, sta=0.5, lta=10) self.assertTrue(len(re) == 2) self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(0.2 < re[0]['duration'] < 0.3) self.assertTrue(re[0]['stations'] == ['UH2', 'UH1']) self.assertTrue(re[0]['coincidence_sum'] == 1.2) self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(0.18 < re[1]['duration'] < 0.2) self.assertTrue(re[1]['stations'] == ['UH2', 'UH1']) self.assertTrue(re[1]['coincidence_sum'] == 1.2) # 5. station selection, extremely sensitive settings # => 4 events, 1 false triggers res = coincidenceTrigger("recstalta", 2.5, 1, st.copy(), 2, trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'], sta=0.3, lta=5) self.assertTrue(len(res) == 5) self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01")) self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02")) self.assertTrue(1.5 < res[3]['duration'] < 1.7) self.assertTrue(res[3]['stations'] == ['UH3', 'UH1']) self.assertTrue(res[3]['coincidence_sum'] == 2.0) # 6. same as 5, gappy stream # => same as 5 (almost, duration of 1 event changes by 0.02s) st2 = st.copy() tr1 = st2.pop(0) t1 = tr1.stats.starttime t2 = tr1.stats.endtime td = t2 - t1 tr1a = tr1.slice(starttime=t1, endtime=t1 + 0.45 * td) tr1b = tr1.slice(starttime=t1 + 0.6 * td, endtime=t1 + 0.94 * td) st2.insert(1, tr1a) st2.insert(3, tr1b) res = coincidenceTrigger("recstalta", 2.5, 1, st2, 2, trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'], sta=0.3, lta=5) self.assertTrue(len(res) == 5) self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01")) self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02")) self.assertTrue(1.5 < res[3]['duration'] < 1.7) self.assertTrue(res[3]['stations'] == ['UH3', 'UH1']) self.assertTrue(res[3]['coincidence_sum'] == 2.0) # 7. same as 3 but modify input trace ids and check output of trace_ids # and other additional information with ``details=True`` st2 = st.copy() st2[0].stats.network = "XX" st2[1].stats.location = "99" st2[1].stats.network = "" st2[1].stats.location = "99" st2[1].stats.channel = "" st2[2].stats.channel = "EHN" st2[3].stats.network = "" st2[3].stats.channel = "" st2[3].stats.station = "" trace_ids = { 'XX.UH1..SHZ': 0.4, '.UH2.99.': 0.35, 'BW.UH3..EHN': 0.4, '...': 0.25 } res = coincidenceTrigger("recstalta", 3.5, 1, st2, 1.0, trace_ids=trace_ids, details=True, sta=0.5, lta=10) self.assertTrue(len(res) == 3) self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(4.2 < res[0]['duration'] < 4.8) self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', '']) self.assertTrue(res[0]['trace_ids'][0] == st2[2].id) self.assertTrue(res[0]['trace_ids'][1] == st2[1].id) self.assertTrue(res[0]['trace_ids'][2] == st2[0].id) self.assertTrue(res[0]['trace_ids'][3] == st2[3].id) self.assertTrue(res[0]['coincidence_sum'] == 1.4) self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59")) self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03")) self.assertTrue(3.2 < res[1]['duration'] < 3.7) self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1']) self.assertTrue(res[1]['trace_ids'][0] == st2[1].id) self.assertTrue(res[1]['trace_ids'][1] == st2[2].id) self.assertTrue(res[1]['trace_ids'][2] == st2[0].id) self.assertTrue(res[1]['coincidence_sum'] == 1.15) self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(4.2 < res[2]['duration'] < 4.4) self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', '']) self.assertTrue(res[2]['trace_ids'][0] == st2[2].id) self.assertTrue(res[2]['trace_ids'][1] == st2[1].id) self.assertTrue(res[2]['trace_ids'][2] == st2[0].id) self.assertTrue(res[2]['trace_ids'][3] == st2[3].id) self.assertTrue(res[2]['coincidence_sum'] == 1.4) expected_keys = [ 'cft_peak_wmean', 'cft_std_wmean', 'cft_peaks', 'cft_stds' ] expected_types = [float, float, list, list] for item in res: for key, _type in zip(expected_keys, expected_types): self.assertTrue(key in item) self.assertTrue(isinstance(item[key], _type)) # check some of the detailed info ev = res[-1] self.assertAlmostEqual(ev['cft_peak_wmean'], 18.101139518271076) self.assertAlmostEqual(ev['cft_std_wmean'], 4.800051726246676) self.assertAlmostEqual(ev['cft_peaks'][0], 18.985548683223936) self.assertAlmostEqual(ev['cft_peaks'][1], 16.852175794415011) self.assertAlmostEqual(ev['cft_peaks'][2], 18.64005853900883) self.assertAlmostEqual(ev['cft_peaks'][3], 17.572363634564621) self.assertAlmostEqual(ev['cft_stds'][0], 4.8909448258821362) self.assertAlmostEqual(ev['cft_stds'][1], 4.4446373508521804) self.assertAlmostEqual(ev['cft_stds'][2], 5.3499401252675964) self.assertAlmostEqual(ev['cft_stds'][3], 4.2723814539487703)
def cjc_trigger_routine(startdate,enddate,dataloc,trigloc,routype): """ Module to run the obspy sta-lta energy based filter routine Must be parsed start date & end date in obspy UTCDateTime type, dataloc should be a string of the path for the input data trigloc should be a string of the ouput path routype should be a string denpoting the type of detection routine to use either classic or carl defaults have been set in the module for trigger parameters """ ############################################################################### # Import parameter settings import sys sys.path.insert(0,"/home/calumch/my_programs/Building/rt2detection") from par import trigger_par as defaults print defaults.stalen ############################################################################### # Format dates startyear=startdate.split('/')[0] startmonth=startdate.split('/')[1] startday=startdate.split('/')[2] endyear=enddate.split('/')[0] endmonth=enddate.split('/')[1] endday=enddate.split('/')[2] # Import modules from obspy import read as obsread from obspy import UTCDateTime import glob, os import numpy as np from obspy.signal import coincidenceTrigger # Generate list of days to check through lengthinseconds=UTCDateTime(endyear+' '+endmonth+' '+endday)-\ UTCDateTime(startyear+' '+startmonth+' '+startday) lendays=lengthinseconds/86400 lengthinseconds=[] dfiles=[] dates=[] for i in range(0,int(lendays)+1): dates.append(UTCDateTime(startyear+' '+startmonth+' '+startday)+(i*86400)) dfiles.extend(glob.glob(dataloc+'/'+str(dates[i].year)+'/'+\ str(dates[i].month).zfill(2)+'/'+str(dates[i].year)+'-'+\ str(dates[i].month).zfill(2)+'-'+str(dates[i].day).zfill(2)+'*')) print len(dfiles) wavelist=[] # Initialize list variable # Read in data for hfile in dfiles: print 'Working on file: '+hfile st=obsread(hfile) st1=st.copy() if not defaults.comp=='all': st1=st1.select(channel='*'+defaults.comp) # De-mean data for tr in st: tr.data=tr.data-np.mean(tr.data) # Filter data st1.filter('bandpass',freqmin=defaults.lowcut,freqmax=defaults.highcut) # Use the obspy triggering routine trig=[] if routype=='classic': trig = coincidenceTrigger("recstalta",defaults.trigon,\ defaults.trigoff,st1,defaults.netsum,\ sta=defaults.stalen,lta=defaults.ltalen,\ delete_long_trigger='True',\ trigger_off_extension=\ defaults.netwin) else: try: trig = coincidenceTrigger("carlstatrig",defaults.trigon,\ defaults.trigoff,st1,\ defaults.netsum,sta=defaults.stalen,\ lta=defaults.ltalen,ratio=defaults.crat,\ quiet=defaults.cquite,delete_long_trigger='True') except: print 'Triggering routine failed, suggest altering parameters' # Cut data and write out in multiplexed miniseed files if trig and defaults.trigout=='Y': for event in trig: stout=st.slice(event['time']-defaults.precut,event['time']+defaults.postcut) filename=str(stout[0].stats.starttime.year)+'-'+\ str(stout[0].stats.starttime.month).zfill(2)+'-'+\ str(stout[0].stats.starttime.day).zfill(2)+'-'+\ str(stout[0].stats.starttime.hour).zfill(2)+\ str(stout[0].stats.starttime.minute).zfill(2)+'-'+\ str(stout[0].stats.starttime.second).zfill(2)+'.'+\ defaults.net+'_'+str(len(stout)).zfill(3)+'_00' if not os.path.isdir(trigloc+'/'+\ str(stout[0].stats.starttime.year)): os.makedirs(trigloc+'/'+str(stout[0].stats.starttime.year)) if not os.path.isdir(trigloc+'/'+str(stout[0].stats.starttime.year)\ +'/'+str(stout[0].stats.starttime.month).zfill(2)): os.makedirs(trigloc+'/'+str(stout[0].stats.starttime.year)\ +'/'+str(stout[0].stats.starttime.month).zfill(2)) filename=trigloc+'/'+str(stout[0].stats.starttime.year)+'/'+\ str(stout[0].stats.starttime.month).zfill(2)+'/'+\ filename wavelist.append(filename) try: stout.write(filename,format="MSEED",encoding="STEIM2") except: # Cope with dtype issues for tr in stout: tr.data = np.array(tr.data, dtype=np.int32) stout.write(filename,format='MSEED',encoding='STEIM2') print 'Written triggered file as: '+filename elif defaults.trigout=='N': print 'Triggers will not be written out but I made '+len(trig)+' detections' elif not trig: print 'No triggers were detected' return wavelist
stations = ["AIGLE", "SENIN", "DIX", "LAUCH", "MMK", "SIMPL"] st = Stream() for station in stations: try: tmp = client.getWaveform("CH", station, "", "[EH]HZ", t, t2, metadata=True) except: print station, "---" continue st += tmp st.taper() st.filter("bandpass", freqmin=1, freqmax=20) triglist = coincidenceTrigger("recstalta", 10, 2, st, 4, sta=0.5, lta=10) print len(triglist), "events triggered." for trig in triglist: closest_sta = trig['stations'][0] tr = st.select(station=closest_sta)[0] trig['latitude'] = tr.stats.coordinates.latitude trig['longitude'] = tr.stats.coordinates.longitude paz_wa = {'sensitivity': 2800, 'zeros': [0j], 'gain': 1, 'poles': [-6.2832-4.7124j, -6.2832+4.7124j]} for trig in triglist: t = trig['time'] print "#" * 80 print "Trigger time:", t
def test_coincidenceTriggerWithSimilarityChecking(self): """ Test network coincidence trigger with cross correlation similarity checking of given event templates. """ st = Stream() files = ["BW.UH1._.SHZ.D.2010.147.cut.slist.gz", "BW.UH2._.SHZ.D.2010.147.cut.slist.gz", "BW.UH3._.SHZ.D.2010.147.cut.slist.gz", "BW.UH3._.SHN.D.2010.147.cut.slist.gz", "BW.UH3._.SHE.D.2010.147.cut.slist.gz", "BW.UH4._.EHZ.D.2010.147.cut.slist.gz"] for filename in files: filename = os.path.join(self.path, filename) st += read(filename) # some prefiltering used for UH network st.filter('bandpass', freqmin=10, freqmax=20) # set up template event streams times = ["2010-05-27T16:24:33.095000", "2010-05-27T16:27:30.370000"] templ = {} for t in times: t = UTCDateTime(t) st_ = st.select(station="UH3").slice(t, t + 2.5).copy() templ.setdefault("UH3", []).append(st_) times = ["2010-05-27T16:27:30.574999"] for t in times: t = UTCDateTime(t) st_ = st.select(station="UH1").slice(t, t + 2.5).copy() templ.setdefault("UH1", []).append(st_) trace_ids = {"BW.UH1..SHZ": 1, "BW.UH2..SHZ": 1, "BW.UH3..SHZ": 1, "BW.UH4..EHZ": 1} similarity_thresholds = {"UH1": 0.8, "UH3": 0.7} with warnings.catch_warnings(record=True) as w: # avoid getting influenced by the warning filters getting set up # differently in obspy-runtests. # (e.g. depending on options "-v" and "-q") warnings.resetwarnings() trig = coincidenceTrigger( "classicstalta", 5, 1, st.copy(), 4, sta=0.5, lta=10, trace_ids=trace_ids, event_templates=templ, similarity_threshold=similarity_thresholds) # two warnings get raised self.assertEqual(len(w), 2) # check floats in resulting dictionary separately self.assertAlmostEqual(trig[0].pop('duration'), 3.9600000381469727) self.assertAlmostEqual(trig[1].pop('duration'), 1.9900000095367432) self.assertAlmostEqual(trig[2].pop('duration'), 1.9200000762939453) self.assertAlmostEqual(trig[3].pop('duration'), 3.9200000762939453) self.assertAlmostEqual(trig[0]['similarity'].pop('UH1'), 0.94149447384) self.assertAlmostEqual(trig[0]['similarity'].pop('UH3'), 1) self.assertAlmostEqual(trig[1]['similarity'].pop('UH1'), 0.65228204570) self.assertAlmostEqual(trig[1]['similarity'].pop('UH3'), 0.72679293429) self.assertAlmostEqual(trig[2]['similarity'].pop('UH1'), 0.89404458774) self.assertAlmostEqual(trig[2]['similarity'].pop('UH3'), 0.74581409371) self.assertAlmostEqual(trig[3]['similarity'].pop('UH1'), 1) self.assertAlmostEqual(trig[3]['similarity'].pop('UH3'), 1) remaining_results = \ [{'coincidence_sum': 4.0, 'similarity': {}, 'stations': ['UH3', 'UH2', 'UH1', 'UH4'], 'time': UTCDateTime(2010, 5, 27, 16, 24, 33, 210000), 'trace_ids': ['BW.UH3..SHZ', 'BW.UH2..SHZ', 'BW.UH1..SHZ', 'BW.UH4..EHZ']}, {'coincidence_sum': 3.0, 'similarity': {}, 'stations': ['UH3', 'UH1', 'UH2'], 'time': UTCDateTime(2010, 5, 27, 16, 25, 26, 710000), 'trace_ids': ['BW.UH3..SHZ', 'BW.UH1..SHZ', 'BW.UH2..SHZ']}, {'coincidence_sum': 3.0, 'similarity': {}, 'stations': ['UH2', 'UH1', 'UH3'], 'time': UTCDateTime(2010, 5, 27, 16, 27, 2, 260000), 'trace_ids': ['BW.UH2..SHZ', 'BW.UH1..SHZ', 'BW.UH3..SHZ']}, {'coincidence_sum': 4.0, 'similarity': {}, 'stations': ['UH3', 'UH2', 'UH1', 'UH4'], 'time': UTCDateTime(2010, 5, 27, 16, 27, 30, 510000), 'trace_ids': ['BW.UH3..SHZ', 'BW.UH2..SHZ', 'BW.UH1..SHZ', 'BW.UH4..EHZ']}] self.assertTrue(trig == remaining_results)
def test_coincidenceTrigger(self): """ Test network coincidence trigger. """ st = Stream() files = ["BW.UH1._.SHZ.D.2010.147.cut.slist.gz", "BW.UH2._.SHZ.D.2010.147.cut.slist.gz", "BW.UH3._.SHZ.D.2010.147.cut.slist.gz", "BW.UH4._.EHZ.D.2010.147.cut.slist.gz"] for filename in files: filename = os.path.join(self.path, filename) st += read(filename) # some prefiltering used for UH network st.filter('bandpass', freqmin=10, freqmax=20) # 1. no weighting, no stations specified, good settings # => 3 events, no false triggers # for the first test we make some additional tests regarding types res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3, sta=0.5, lta=10) self.assertTrue(isinstance(res, list)) self.assertTrue(len(res) == 3) expected_keys = ['time', 'coincidence_sum', 'duration', 'stations', 'trace_ids'] expected_types = [UTCDateTime, float, float, list, list] for item in res: self.assertTrue(isinstance(item, dict)) for key, _type in zip(expected_keys, expected_types): self.assertTrue(key in item) self.assertTrue(isinstance(item[key], _type)) self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(4.2 < res[0]['duration'] < 4.8) self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4']) self.assertTrue(res[0]['coincidence_sum'] == 4) self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59")) self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03")) self.assertTrue(3.2 < res[1]['duration'] < 3.7) self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1']) self.assertTrue(res[1]['coincidence_sum'] == 3) self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(4.2 < res[2]['duration'] < 4.4) self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4']) self.assertTrue(res[2]['coincidence_sum'] == 4) # 2. no weighting, station selection # => 2 events, no false triggers trace_ids = ['BW.UH1..SHZ', 'BW.UH3..SHZ', 'BW.UH4..EHZ'] # ignore UserWarnings with warnings.catch_warnings(record=True): warnings.simplefilter('ignore', UserWarning) re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3, trace_ids=trace_ids, sta=0.5, lta=10) self.assertTrue(len(re) == 2) self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(4.2 < re[0]['duration'] < 4.8) self.assertTrue(re[0]['stations'] == ['UH3', 'UH1', 'UH4']) self.assertTrue(re[0]['coincidence_sum'] == 3) self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(4.2 < re[1]['duration'] < 4.4) self.assertTrue(re[1]['stations'] == ['UH3', 'UH1', 'UH4']) self.assertTrue(re[1]['coincidence_sum'] == 3) # 3. weighting, station selection # => 3 events, no false triggers trace_ids = {'BW.UH1..SHZ': 0.4, 'BW.UH2..SHZ': 0.35, 'BW.UH3..SHZ': 0.4, 'BW.UH4..EHZ': 0.25} res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.0, trace_ids=trace_ids, sta=0.5, lta=10) self.assertTrue(len(res) == 3) self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(4.2 < res[0]['duration'] < 4.8) self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4']) self.assertTrue(res[0]['coincidence_sum'] == 1.4) self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59")) self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03")) self.assertTrue(3.2 < res[1]['duration'] < 3.7) self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1']) self.assertTrue(res[1]['coincidence_sum'] == 1.15) self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(4.2 < res[2]['duration'] < 4.4) self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4']) self.assertTrue(res[2]['coincidence_sum'] == 1.4) # 4. weighting, station selection, max_len # => 2 events, no false triggers, small event does not overlap anymore trace_ids = {'BW.UH1..SHZ': 0.6, 'BW.UH2..SHZ': 0.6} # ignore UserWarnings with warnings.catch_warnings(record=True): warnings.simplefilter('ignore', UserWarning) re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.2, trace_ids=trace_ids, max_trigger_length=0.13, sta=0.5, lta=10) self.assertTrue(len(re) == 2) self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(0.2 < re[0]['duration'] < 0.3) self.assertTrue(re[0]['stations'] == ['UH2', 'UH1']) self.assertTrue(re[0]['coincidence_sum'] == 1.2) self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(0.18 < re[1]['duration'] < 0.2) self.assertTrue(re[1]['stations'] == ['UH2', 'UH1']) self.assertTrue(re[1]['coincidence_sum'] == 1.2) # 5. station selection, extremely sensitive settings # => 4 events, 1 false triggers res = coincidenceTrigger("recstalta", 2.5, 1, st.copy(), 2, trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'], sta=0.3, lta=5) self.assertTrue(len(res) == 5) self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01")) self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02")) self.assertTrue(1.5 < res[3]['duration'] < 1.7) self.assertTrue(res[3]['stations'] == ['UH3', 'UH1']) self.assertTrue(res[3]['coincidence_sum'] == 2.0) # 6. same as 5, gappy stream # => same as 5 (almost, duration of 1 event changes by 0.02s) st2 = st.copy() tr1 = st2.pop(0) t1 = tr1.stats.starttime t2 = tr1.stats.endtime td = t2 - t1 tr1a = tr1.slice(starttime=t1, endtime=t1 + 0.45 * td) tr1b = tr1.slice(starttime=t1 + 0.6 * td, endtime=t1 + 0.94 * td) st2.insert(1, tr1a) st2.insert(3, tr1b) res = coincidenceTrigger("recstalta", 2.5, 1, st2, 2, trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'], sta=0.3, lta=5) self.assertTrue(len(res) == 5) self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01")) self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02")) self.assertTrue(1.5 < res[3]['duration'] < 1.7) self.assertTrue(res[3]['stations'] == ['UH3', 'UH1']) self.assertTrue(res[3]['coincidence_sum'] == 2.0) # 7. same as 3 but modify input trace ids and check output of trace_ids # and other additional information with ``details=True`` st2 = st.copy() st2[0].stats.network = "XX" st2[1].stats.location = "99" st2[1].stats.network = "" st2[1].stats.location = "99" st2[1].stats.channel = "" st2[2].stats.channel = "EHN" st2[3].stats.network = "" st2[3].stats.channel = "" st2[3].stats.station = "" trace_ids = {'XX.UH1..SHZ': 0.4, '.UH2.99.': 0.35, 'BW.UH3..EHN': 0.4, '...': 0.25} res = coincidenceTrigger("recstalta", 3.5, 1, st2, 1.0, trace_ids=trace_ids, details=True, sta=0.5, lta=10) self.assertTrue(len(res) == 3) self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31")) self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35")) self.assertTrue(4.2 < res[0]['duration'] < 4.8) self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', '']) self.assertTrue(res[0]['trace_ids'][0] == st2[2].id) self.assertTrue(res[0]['trace_ids'][1] == st2[1].id) self.assertTrue(res[0]['trace_ids'][2] == st2[0].id) self.assertTrue(res[0]['trace_ids'][3] == st2[3].id) self.assertTrue(res[0]['coincidence_sum'] == 1.4) self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59")) self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03")) self.assertTrue(3.2 < res[1]['duration'] < 3.7) self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1']) self.assertTrue(res[1]['trace_ids'][0] == st2[1].id) self.assertTrue(res[1]['trace_ids'][1] == st2[2].id) self.assertTrue(res[1]['trace_ids'][2] == st2[0].id) self.assertTrue(res[1]['coincidence_sum'] == 1.15) self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27")) self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33")) self.assertTrue(4.2 < res[2]['duration'] < 4.4) self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', '']) self.assertTrue(res[2]['trace_ids'][0] == st2[2].id) self.assertTrue(res[2]['trace_ids'][1] == st2[1].id) self.assertTrue(res[2]['trace_ids'][2] == st2[0].id) self.assertTrue(res[2]['trace_ids'][3] == st2[3].id) self.assertTrue(res[2]['coincidence_sum'] == 1.4) expected_keys = ['cft_peak_wmean', 'cft_std_wmean', 'cft_peaks', 'cft_stds'] expected_types = [float, float, list, list] for item in res: for key, _type in zip(expected_keys, expected_types): self.assertTrue(key in item) self.assertTrue(isinstance(item[key], _type)) # check some of the detailed info ev = res[-1] self.assertAlmostEqual(ev['cft_peak_wmean'], 18.101139518271076) self.assertAlmostEqual(ev['cft_std_wmean'], 4.800051726246676) self.assertAlmostEqual(ev['cft_peaks'][0], 18.985548683223936) self.assertAlmostEqual(ev['cft_peaks'][1], 16.852175794415011) self.assertAlmostEqual(ev['cft_peaks'][2], 18.64005853900883) self.assertAlmostEqual(ev['cft_peaks'][3], 17.572363634564621) self.assertAlmostEqual(ev['cft_stds'][0], 4.8909448258821362) self.assertAlmostEqual(ev['cft_stds'][1], 4.4446373508521804) self.assertAlmostEqual(ev['cft_stds'][2], 5.3499401252675964) self.assertAlmostEqual(ev['cft_stds'][3], 4.2723814539487703)
stations = ["AIGLE", "SENIN", "DIX", "LAUCH", "MMK", "SIMPL"] st = Stream() for station in stations: try: tmp = client.getWaveform("CH", station, "", "[EH]HZ", t, t2, metadata=True) except: print(station, "---") continue st += tmp st.taper() st.filter("bandpass", freqmin=1, freqmax=20) triglist = coincidenceTrigger("recstalta", 10, 2, st, 4, sta=0.5, lta=10) print(len(triglist), "events triggered.") for trig in triglist: closest_sta = trig['stations'][0] tr = st.select(station=closest_sta)[0] trig['latitude'] = tr.stats.coordinates.latitude trig['longitude'] = tr.stats.coordinates.longitude paz_wa = {'sensitivity': 2800, 'zeros': [0j], 'gain': 1, 'poles': [-6.2832 - 4.7124j, -6.2832 + 4.7124j]} for trig in triglist: t = trig['time'] print("#" * 80) print("Trigger time:", t)