def test_space_cluster(self): """Test the wrapper around dist_mat_km.""" from eqcorrscan.utils.clustering import space_cluster from obspy.clients.fdsn import Client from obspy import UTCDateTime client = Client("IRIS") starttime = UTCDateTime("2002-01-01") endtime = UTCDateTime("2002-01-02") cat = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=6, catalog="ISC") groups = space_cluster(catalog=cat, d_thresh=1000, show=False) self.assertEqual(len([ev for group in groups for ev in group]), len(cat))
def get_test_data(): """ Generate a set of waveforms from GeoNet for use in subspace testing :return: List of cut templates with no filters applied :rtype: list """ from obspy import UTCDateTime from eqcorrscan.utils.catalog_utils import filter_picks from eqcorrscan.utils.clustering import space_cluster from obspy.clients.fdsn import Client client = Client("GEONET") cat = client.get_events(minlatitude=-40.98, maxlatitude=-40.85, minlongitude=175.4, maxlongitude=175.5, starttime=UTCDateTime(2016, 5, 11), endtime=UTCDateTime(2016, 5, 13)) cat = filter_picks(catalog=cat, top_n_picks=5) stachans = list( set([(pick.waveform_id.station_code, pick.waveform_id.channel_code) for event in cat for pick in event.picks])) clusters = space_cluster(catalog=cat, d_thresh=2, show=False) cluster = sorted(clusters, key=lambda c: len(c))[-1] client = Client('GEONET') design_set = [] bulk_info = [] for event in cluster: t1 = event.origins[0].time + 5 t2 = t1 + 15.1 for station, channel in stachans: bulk_info.append(('NZ', station, '*', channel[0:2] + '?', t1, t2)) st = client.get_waveforms_bulk(bulk=bulk_info) for event in cluster: t1 = event.origins[0].time + 5 t2 = t1 + 15 design_set.append(st.copy().trim(t1, t2)) t1 = UTCDateTime(2016, 5, 11, 19) t2 = UTCDateTime(2016, 5, 11, 20) bulk_info = [('NZ', stachan[0], '*', stachan[1][0:2] + '?', t1, t2) for stachan in stachans] st = client.get_waveforms_bulk(bulk_info) st.merge().detrend('simple').trim(starttime=t1, endtime=t2) return design_set, st
within these distance clustered groups. Finally, clustering based on waveform cross correlation will be applied within these groups. """ import sys sys.path.insert(0, '/home/chet/EQcorrscan') from eqcorrscan.utils import clustering, plotting from obspy import read_events # Read in whole damn catalog cat = read_events('/home/chet/data/mrp_data/sherburn_catalog/quake-ml/' + 'rotnga/final_cat/bbox_final_QML.xml') # Cluster events by distance groups = clustering.space_cluster(cat, d_thresh=2.0) # Eliminate groups with size below a certain threshold real_groups = [g for g in refined_groups if len(g) > 1] group_lengths = [len(g) for g in real_groups] # print('At corr_thresh: ' + str(corr_thresh)) print('Total number of groups: %d' % len(real_groups)) print('Total number of events: %d' % sum(group_lengths)) # Here we can write the groups (which are Catalogs) to qml/shapefile/xyz for i, group_cat in enumerate(real_groups): file_names = '/media/chet/hdd/seismic/NZ/catalogs/qml/space_groups/' +\ 'nlloc_thresh_%.02f_group_%03d' % (d_thresh, i) # Write shapefile first group_cat.write(file_names + '.shp', format="SHAPEFILE") # Now qml
def run_tutorial(plot=False, multiplex=True, return_streams=False): """ Run the tutorial. :return: detections """ # We are going to use data from the GeoNet (New Zealand) catalogue. GeoNet # do not implement the full FDSN system yet, so we have a hack to get # around this. It is not strictly part of EQcorrscan, so we haven't # included it here, but you can find it in the tutorials directory of the # github repository import obspy if int(obspy.__version__.split('.')[0]) >= 1: from obspy.clients.fdsn import Client else: from obspy.fdsn import Client from eqcorrscan.tutorials.get_geonet_events import get_geonet_events from obspy import UTCDateTime from eqcorrscan.utils.catalog_utils import filter_picks from eqcorrscan.utils.clustering import space_cluster from eqcorrscan.core import subspace cat = get_geonet_events(minlat=-40.98, maxlat=-40.85, minlon=175.4, maxlon=175.5, startdate=UTCDateTime(2016, 5, 1), enddate=UTCDateTime(2016, 5, 20)) # This gives us a catalog of events - it takes a while to download all # the information, so give it a bit! # We will generate a five station, multi-channel detector. cat = filter_picks(catalog=cat, top_n_picks=5) stachans = list( set([(pick.waveform_id.station_code, pick.waveform_id.channel_code) for event in cat for pick in event.picks])) # In this tutorial we will only work on one cluster, defined spatially. # You can work on multiple clusters, or try to whole set. clusters = space_cluster(catalog=cat, d_thresh=2, show=False) # We will work on the largest cluster cluster = sorted(clusters, key=lambda c: len(c))[-1] # This cluster contains 32 events, we will now download a trim the # waveforms. Note that each chanel must start at the same time and be the # same length for multiplexing. If not multiplexing EQcorrscan will # maintain the individual differences in time between channels and delay # the detection statistics by that amount before stacking and detection. client = Client('GEONET') design_set = [] bulk_info = [] for event in cluster: t1 = event.origins[0].time t2 = t1 + 25 for station, channel in stachans: bulk_info.append(('NZ', station, '*', channel[0:2] + '?', t1, t2)) st = client.get_waveforms_bulk(bulk=bulk_info) for event in cluster: t1 = event.origins[0].time t2 = t1 + 25 design_set.append(st.copy().trim(t1, t2)) # Construction of the detector will process the traces, then align them, # before multiplexing. detector = subspace.Detector() detector.construct(streams=design_set, lowcut=2.0, highcut=9.0, filt_order=4, sampling_rate=20, multiplex=multiplex, name='Wairarapa1', align=True, reject=0.2, shift_len=6, plot=plot).partition(9) if plot: detector.plot() # We also want the continuous stream to detect in. t1 = UTCDateTime(2016, 5, 11, 19) t2 = UTCDateTime(2016, 5, 11, 20) # We are going to look in a single hour just to minimize cost, but you can \ # run for much longer. bulk_info = [('NZ', stachan[0], '*', stachan[1][0] + '?' + stachan[1][-1], t1, t2) for stachan in detector.stachans] st = client.get_waveforms_bulk(bulk_info) st.merge().detrend('simple').trim(starttime=t1, endtime=t2) # We set a very low threshold because the detector is not that great, we # haven't aligned it particularly well - however, at this threshold we make # two real detections. detections, det_streams = detector.detect(st=st, threshold=0.005, trig_int=2, extract_detections=True) if return_streams: return detections, det_streams else: return detections
def test_space_cluster(self): """Test the wrapper around dist_mat_km.""" groups = space_cluster(catalog=self.cat, d_thresh=1000, show=False) self.assertEqual(len([ev for group in groups for ev in group]), len(self.cat))
def run_tutorial(plot=False, multiplex=True, return_streams=False): """ Run the tutorial. :return: detections """ client = Client("GEONET") cat = client.get_events(minlatitude=-40.98, maxlatitude=-40.85, minlongitude=175.4, maxlongitude=175.5, starttime=UTCDateTime(2016, 5, 1), endtime=UTCDateTime(2016, 5, 20)) print("Downloaded a catalog of %i events" % len(cat)) # This gives us a catalog of events - it takes a while to download all # the information, so give it a bit! # We will generate a five station, multi-channel detector. cat = filter_picks(catalog=cat, top_n_picks=5) stachans = list( set([(pick.waveform_id.station_code, pick.waveform_id.channel_code) for event in cat for pick in event.picks])) # In this tutorial we will only work on one cluster, defined spatially. # You can work on multiple clusters, or try to whole set. clusters = space_cluster(catalog=cat, d_thresh=2, show=False) # We will work on the largest cluster cluster = sorted(clusters, key=lambda c: len(c))[-1] # This cluster contains 32 events, we will now download and trim the # waveforms. Note that each chanel must start at the same time and be the # same length for multiplexing. If not multiplexing EQcorrscan will # maintain the individual differences in time between channels and delay # the detection statistics by that amount before stacking and detection. client = Client('GEONET') design_set = [] bulk_info = [] for event in cluster: t1 = event.origins[0].time t2 = t1 + 25.1 # Have to download extra data, otherwise GeoNet will # trim wherever suits. t1 -= 0.1 for station, channel in stachans: bulk_info.append(('NZ', station, '*', channel[0:2] + '?', t1, t2)) print("Downloading data for %i events" % len(cluster)) st = client.get_waveforms_bulk(bulk=bulk_info) for event in cluster: t1 = event.origins[0].time t2 = t1 + 25 design_set.append(st.copy().trim(t1, t2)) # Construction of the detector will process the traces, then align them, # before multiplexing. detector = subspace.Detector() detector.construct(streams=design_set, lowcut=2.0, highcut=9.0, filt_order=4, sampling_rate=20, multiplex=multiplex, name='Wairarapa1', align=True, reject=0.2, shift_len=6, plot=plot).partition(9) print("Constructed Detector") if plot: detector.plot() # We also want the continuous stream to detect in. t1 = UTCDateTime(2016, 5, 11, 19) t2 = UTCDateTime(2016, 5, 11, 20) # We are going to look in a single hour just to minimize cost, but you can # run for much longer. bulk_info = [('NZ', stachan[0], '*', stachan[1][0] + '?' + stachan[1][-1], t1, t2) for stachan in detector.stachans] print("Downloading continuous data") st = client.get_waveforms_bulk(bulk_info) st.merge().detrend('simple').trim(starttime=t1, endtime=t2) # We set a very low threshold because the detector is not that great, we # haven't aligned it particularly well - however, at this threshold we make # two real detections. print("Computing detections") detections, det_streams = detector.detect(st=st, threshold=0.3, trig_int=2, extract_detections=True) if return_streams: return detections, det_streams else: return detections