def test_warn_zeros(self):
     st = self.st.copy()
     template = template_gen(self.picks, st.copy(), 10)
     self.assertTrue('LABE' in [tr.stats.station for tr in template])
     st.select(station='LABE', channel='SN')[0].data = np.zeros(10000)
     template = template_gen(self.picks, st, 10)
     self.assertFalse('LABE' in [tr.stats.station for tr in template])
Beispiel #2
0
 def test_not_delayed(self):
     """Test the method of template_gen without applying delays to
     channels."""
     client = Client("GEONET")
     cat = client.get_events(minlatitude=-40.98,
                             maxlatitude=-40.85,
                             minlongitude=175.4,
                             maxlongitude=175.5,
                             starttime=UTCDateTime(2016, 5, 1),
                             endtime=UTCDateTime(2016, 5, 2))
     cat = filter_picks(catalog=cat, top_n_picks=5)
     template = template_gen(method="from_client",
                             catalog=cat,
                             client_id='GEONET',
                             lowcut=None,
                             highcut=None,
                             samp_rate=100.0,
                             filt_order=4,
                             length=10.0,
                             prepick=0.5,
                             swin='all',
                             process_len=3600,
                             plot=False,
                             delayed=False)[0]
     for tr in template:
         tr.stats.starttime.precision = 6
     starttime = template[0].stats.starttime
     length = template[0].stats.npts
     self.assertEqual(len(template), 5)
     for tr in template:
         self.assertTrue(
             abs((tr.stats.starttime - starttime)) <= tr.stats.delta)
         self.assertEqual(tr.stats.npts, length)
     template = template_gen(method="from_client",
                             catalog=cat,
                             client_id='GEONET',
                             lowcut=None,
                             highcut=None,
                             samp_rate=100.0,
                             filt_order=4,
                             length=10.0,
                             prepick=0.5,
                             swin='P_all',
                             process_len=3600,
                             plot=False,
                             delayed=False)[0]
     for tr in template:
         tr.stats.starttime.precision = 6
     starttime = template[0].stats.starttime
     length = template[0].stats.npts
     self.assertEqual(len(template), 15)
     for tr in template:
         self.assertTrue(
             abs((tr.stats.starttime - starttime)) <= tr.stats.delta)
         self.assertEqual(tr.stats.npts, length)
Beispiel #3
0
    def test_sac_template_gen(self):
        """Test template generation."""
        samp_rate = 20
        length = 8

        for event in ['2014p611252', 'No_head']:
            test_files = os.path.join(
                os.path.abspath(os.path.dirname(__file__)), 'test_data', 'SAC',
                event, '*')
            # Test with various input types
            filelist = glob.glob(test_files)
            streamlist = [read(f) for f in glob.glob(test_files)]
            stream = read(test_files)
            for sac_files in [filelist, streamlist, stream]:
                templates = template_gen(method="from_sac",
                                         sac_files=sac_files,
                                         lowcut=2.0,
                                         highcut=8.0,
                                         samp_rate=samp_rate,
                                         filt_order=4,
                                         length=length,
                                         swin='all',
                                         prepick=0.1,
                                         plot=False)
                self.assertEqual(len(templates), 1)
                template = templates[0]
                self.assertEqual(len(template), len(sactoevent(stream).picks))
                for tr in template:
                    self.assertEqual(len(tr.data), length * samp_rate)
Beispiel #4
0
 def test_save_progress(self):
     """ Test template creation with progress saving """
     client = Client('GEONET')
     catalog = client.get_events(starttime=UTCDateTime(2016, 1, 4, 0, 50),
                                 endtime=UTCDateTime(2016, 1, 4, 1, 20))
     # Gets a catalog of 2 events separated by 127s
     # Need a bigger gap to allow moveouts
     catalog[0].origins[0].time -= 600
     for pick in catalog[0].picks:
         pick.time -= 600
     catalog = filter_picks(catalog=catalog, top_n_picks=5)
     templates = template_gen(method="from_client",
                              catalog=catalog,
                              client_id="GEONET",
                              lowcut=2,
                              highcut=5,
                              samp_rate=20,
                              filt_order=4,
                              prepick=0.4,
                              process_len=600,
                              swin="P",
                              save_progress=True,
                              length=2)
     assert (os.path.isdir("eqcorrscan_temporary_templates"))
     saved_templates = [
         read(f)
         for f in sorted(glob.glob("eqcorrscan_temporary_templates/*.ms"))
     ]
     # Writing to miniseed adds miniseed stats dict
     for saved_template, template in zip(saved_templates, templates):
         for saved_tr in saved_template:
             tr = template.select(id=saved_tr.id)[0]
             assert (np.allclose(saved_tr.data, tr.data, atol=0.01))
     shutil.rmtree("eqcorrscan_temporary_templates")
Beispiel #5
0
 def test_bad_client(self):
     """Ensure passing a non-client raises."""
     client = _StreamTestClient()
     cat = client.get_default_catalog()
     with self.assertRaises(NotImplementedError):
         template_gen('from_client',
                      client_id=cat,
                      catalog=cat,
                      highcut=None,
                      lowcut=None,
                      filt_order=4,
                      samp_rate=100,
                      prepick=0.1,
                      length=10,
                      process_len=20,
                      data_pad=5)
Beispiel #6
0
 def test_download_various_methods(self):
     """
     Will download data from server and store in various databases,
     then create templates using the various methods.
     """
     client = Client('GEONET')
     # get the events
     catalog = client.get_events(eventid='2016p008194')
     # Select 3 channels to use and download
     sta_chans = [(pick.waveform_id.station_code,
                   pick.waveform_id.channel_code)
                  for pick in catalog[0].picks[0:3]]
     t1 = UTCDateTime(catalog[0].origins[0].time.date)
     t2 = t1 + 86400
     bulk = [('NZ', sta_chan[0], '*', sta_chan[1], t1, t2)
             for sta_chan in sta_chans]
     continuous_st = client.get_waveforms_bulk(bulk)
     continuous_st.merge(fill_value=0)
     # Test multi_template_gen
     kwargs = {
         "process": False,
         "lowcut": None,
         "highcut": None,
         "filt_order": None,
         "swin": "all",
         "prepick": 0.05,
         "all_horiz": False,
         "delayed": True,
         "plot": False,
         "return_event": False,
         "min_snr": None,
         "samp_rate": continuous_st[0].stats.sampling_rate
     }
     templates = template_gen(method="from_meta_file",
                              meta_file=catalog,
                              st=continuous_st,
                              length=3,
                              **kwargs)
     self.assertEqual(len(templates), 1)
     # Test without an event
     templates = template_gen(method="from_meta_file",
                              meta_file=Catalog(),
                              st=continuous_st,
                              length=3,
                              **kwargs)
     self.assertEqual(len(templates), 0)
Beispiel #7
0
def mktemplates(
        network_code='GEONET',
        plot=True,
        publicIDs=['2016p008122', '2016p008353', '2016p008155',
                   '2016p008194']):
    """Functional wrapper to make templates"""

    client = Client(network_code)
    # We want to download a few events from an earthquake sequence, these are
    # identified by publiID numbers, given as arguments

    catalog = Catalog()
    for publicID in publicIDs:
        try:
            catalog += client.get_events(eventid=publicID,
                                         includearrivals=True)
        except TypeError:
            # Cope with some FDSN services not implementing includearrivals
            catalog += client.get_events(eventid=publicID)

    # Lets plot the catalog to see what we have
    if plot:
        catalog.plot(projection='local', resolution='h')

    # We don't need all the picks, lets take the information from the
    # five most used stations - note that this is done to reduce computational
    # costs.
    catalog = filter_picks(catalog, top_n_picks=5)
    # We only want the P picks in this example, but you can use others or all
    #  picks if you want.
    for event in catalog:
        for pick in event.picks:
            if pick.phase_hint == 'S':
                event.picks.remove(pick)

    # Now we can generate the templates
    templates = template_gen.template_gen(method='from_client',
                                          catalog=catalog,
                                          client_id=network_code,
                                          lowcut=2.0,
                                          highcut=9.0,
                                          samp_rate=20.0,
                                          filt_order=4,
                                          length=3.0,
                                          prepick=0.15,
                                          swin='all',
                                          process_len=3600,
                                          plot=plot)

    # We now have a series of templates! Using Obspy's Stream.write() method we
    # can save these to disk for later use.  We will do that now for use in the
    # following tutorials.
    for i, template in enumerate(templates):
        template.write('tutorial_template_' + str(i) + '.ms', format='MSEED')
        # Note that this will warn you about data types.  As we don't care
        # at the moment, whatever obspy chooses is fine.
    return
Beispiel #8
0
    def test_seishub(self):
        """Test the seishub method, use obspy default seishub client."""
        import sys
        if sys.version_info.major == 2:
            from future.backports.urllib.request import URLError
        else:
            from urllib.request import URLError
        t = UTCDateTime(2009, 9, 3)
        test_cat = Catalog()
        test_cat.append(Event())
        test_cat[0].origins.append(Origin())
        test_cat[0].origins[0].time = t
        test_cat[0].origins[0].latitude = 45
        test_cat[0].origins[0].longitude = 45
        test_cat[0].origins[0].depth = 5000
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHZ',
                                              network_code='BW'),
                 phase_hint='PG',
                 time=t + 2000))
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHN',
                                              network_code='BW'),
                 phase_hint='SG',
                 time=t + 2005))
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHE',
                                              network_code='BW'),
                 phase_hint='SG',
                 time=t + 2005.5))

        test_url = "http://teide.geophysik.uni-muenchen.de:8080"

        if sys.version_info.major == 3:
            try:
                template = template_gen(method="from_seishub",
                                        catalog=test_cat,
                                        url=test_url,
                                        lowcut=1.0,
                                        highcut=5.0,
                                        samp_rate=20,
                                        filt_order=4,
                                        length=3,
                                        prepick=0.5,
                                        swin='all',
                                        process_len=300)
            except URLError:
                pass
        else:
            pass
        if 'template' in locals():
            self.assertEqual(len(template), 3)
 def test_extract_from_stack(self):
     length = 3
     stack = self.st.copy()
     template = template_gen(self.picks, self.st.copy(), 2)
     extracted = extract_from_stack(stack,
                                    template,
                                    length=length,
                                    pre_pick=0.3,
                                    pre_pad=45)
     self.assertEqual(len(template), len(extracted))
     for tr in extracted:
         self.assertEqual(tr.stats.endtime - tr.stats.starttime, length)
def mktemplates(network_code='GEONET',
                publicIDs=['2016p008122', '2016p008353', '2016p008155',
                           '2016p008194'], plot=True):
    """Functional wrapper to make templates"""
    # We want to download some QuakeML files from the New Zealand GeoNet
    # network, GeoNet currently doesn't support FDSN event queries, so we
    # have to work around to download quakeml from their quakeml.geonet site.

    client = Client(network_code)
    # We want to download a few events from an earthquake sequence, these are
    # identified by publiID numbers, given as arguments

    catalog = Catalog()
    for publicID in publicIDs:
        if network_code == 'GEONET':
            data_stream = client._download(
                'http://quakeml.geonet.org.nz/quakeml/1.2/' + publicID)
            data_stream.seek(0, 0)
            catalog += read_events(data_stream, format="quakeml")
            data_stream.close()
        else:
            catalog += client.get_events(
                eventid=publicID, includearrivals=True)

    # Lets plot the catalog to see what we have
    if plot:
        catalog.plot(projection='local', resolution='h')

    # We don't need all the picks, lets take the information from the
    # five most used stations - note that this is done to reduce computational
    # costs.
    catalog = filter_picks(catalog, top_n_picks=5)
    # We only want the P picks in this example, but you can use others or all
    #  picks if you want.
    for event in catalog:
        for pick in event.picks:
            if pick.phase_hint == 'S':
                event.picks.remove(pick)

    # Now we can generate the templates
    templates = template_gen.template_gen(
        method='from_client', catalog=catalog, client_id=network_code,
        lowcut=2.0, highcut=9.0, samp_rate=20.0, filt_order=4, length=3.0,
        prepick=0.15, swin='all', process_len=3600, debug=0, plot=plot)

    # We now have a series of templates! Using Obspy's Stream.write() method we
    # can save these to disk for later use.  We will do that now for use in the
    # following tutorials.
    for i, template in enumerate(templates):
        template.write('tutorial_template_' + str(i) + '.ms', format='MSEED')
        # Note that this will warn you about data types.  As we don't care
        # at the moment, whatever obspy chooses is fine.
    return
Beispiel #11
0
 def test_day_long_processing(self):
     templates = template_gen(method='from_meta_file',
                              meta_file=self.cat,
                              st=self.st,
                              lowcut=2.0,
                              highcut=9.0,
                              samp_rate=20.0,
                              filt_order=3,
                              length=2,
                              prepick=0.1,
                              swin='P',
                              all_horiz=True)
     self.assertEqual(len(templates), 1)
     self.assertEqual(len(templates[0]), 3)
 def test_extract_from_stack_and_process(self):
     length = 3
     stack = self.st.copy()
     template = template_gen(self.picks, self.st.copy(), 2)
     extracted = extract_from_stack(stack,
                                    template,
                                    length=length,
                                    pre_pick=0.3,
                                    pre_pad=45,
                                    pre_processed=False,
                                    samp_rate=20,
                                    lowcut=2,
                                    highcut=8)
     self.assertEqual(len(template), len(extracted))
     for tr in extracted:
         self.assertEqual(tr.stats.endtime - tr.stats.starttime, length)
Beispiel #13
0
 def test_triggered_data(self):
     client = Client("GEONET")
     catalog = client.get_events(eventid="1481730")
     templates = template_gen("from_client",
                              lowcut=2.,
                              highcut=15.,
                              samp_rate=40.,
                              swin="all",
                              filt_order=4,
                              prepick=0.2,
                              catalog=catalog,
                              length=3.0,
                              client_id="GEONET",
                              all_horiz=True,
                              process_len=600,
                              min_snr=5.,
                              skip_short_chans=True)
     self.assertEqual(len(templates), 0)
Beispiel #14
0
 def test_from_client(self):
     """Test for using a waveform client not related to obspy's Clients."""
     client = _StreamTestClient()
     cat = client.get_default_catalog()
     temps = template_gen('from_client',
                          client_id=client,
                          catalog=cat,
                          highcut=None,
                          lowcut=None,
                          filt_order=4,
                          samp_rate=100,
                          prepick=0.1,
                          length=10,
                          process_len=20,
                          data_pad=5)
     self.assertEqual(len(temps), 1)  # there should be one template stream
     self.assertEqual(len(temps[0]), 1)  # with one trace
     self.assertGreater(len(temps[0][0].data), 1)  # with some data
Beispiel #15
0
 def test_missing_waveform_id(self):
     testing_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                 'test_data')
     quakeml = os.path.join(testing_path,
                            '20130901T041115_missingwavid.xml')
     st = read(
         os.path.join(testing_path, 'WAV', 'TEST_',
                      '2013-09-01-0410-35.DFDPC_024_00'))
     templates = template_gen(method="from_meta_file",
                              meta_file=quakeml,
                              st=st,
                              lowcut=2.0,
                              highcut=9.0,
                              samp_rate=20.0,
                              filt_order=3,
                              length=2,
                              prepick=0.1,
                              swin='S')
     self.assertEqual(len(templates), 1)
 def test_undefined_phase_type(self):
     with self.assertRaises(IOError):
         template_gen(picks=self.picks,
                      st=self.st.copy(),
                      length=2,
                      swin='bob')
 def test_missing_data(self):
     picks = copy.deepcopy(self.picks)
     picks.append(picks[-1])
     picks[-1].waveform_id.station_code = 'DUMMY'
     template = template_gen(picks, self.st.copy(), 10)
     self.assertFalse('DUMMY' in [tr.stats.station for tr in template])
 def test_no_matched_picks(self):
     picks = [copy.deepcopy(self.picks[0])]
     picks[0].waveform_id.station_code = 'DUMMY'
     template = template_gen(picks, self.st.copy(), 10)
     self.assertFalse(template)
Beispiel #19
0
 def test_all_phase_methods(self):
     sfile = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                          'test_data', 'REA', 'TEST_',
                          '01-0411-15L.S201309')
     catalog = read_events(sfile)
     p_stations = list(
         set([
             pick.waveform_id.station_code for pick in catalog[0].picks
             if pick.phase_hint == 'P'
         ]))
     s_stations = list(
         set([
             pick.waveform_id.station_code for pick in catalog[0].picks
             if pick.phase_hint == 'S'
         ]))
     st = read(
         os.path.join(os.path.dirname(os.path.abspath(__file__)),
                      'test_data', 'WAV', 'TEST_',
                      '2013-09-01-0410-35.DFDPC_024_00'))
     templates = template_gen(method="from_meta_file",
                              meta_file=sfile,
                              st=st,
                              lowcut=2,
                              highcut=20,
                              samp_rate=100,
                              filt_order=4,
                              length=6,
                              prepick=0.2,
                              swin='P_all')
     self.assertEqual(len(templates), 1)
     self.assertEqual(len(templates[0]), len(p_stations) * 3)
     for tr in templates[0]:
         pick = [
             p for p in catalog[0].picks
             if p.waveform_id.station_code == tr.stats.station
             and p.phase_hint.upper() == 'P'
         ][0]
         print(tr)
         print(pick)
         self.assertLess(abs(tr.stats.starttime - (pick.time - 0.2)),
                         tr.stats.delta)
     templates = template_gen(method="from_meta_file",
                              meta_file=sfile,
                              st=st,
                              lowcut=2,
                              highcut=20,
                              samp_rate=100,
                              filt_order=4,
                              length=6,
                              prepick=0.2,
                              swin='S_all')
     self.assertEqual(len(templates), 1)
     self.assertEqual(len(templates[0]), len(s_stations) * 3)
     for tr in templates[0]:
         pick = [
             p for p in catalog[0].picks
             if p.waveform_id.station_code == tr.stats.station
             and p.phase_hint.upper() == 'S'
         ][0]
         print(tr)
         print(pick)
         self.assertLess(abs(tr.stats.starttime - (pick.time - 0.2)),
                         tr.stats.delta)
Beispiel #20
0
def mseed_2_templates(wav_dirs, cat, outdir, length, prepick,
                      highcut=None, lowcut=None, f_order=None,
                      samp_rate=None, min_snr=2.,
                      start=None, end=None, miniseed=True,
                      asdf_file=False, debug=1):
    """
    Function to generate individual mseed files for each event in a catalog
    from a pyasdf file or continuous data.
    :param asdf_file: ASDF file with waveforms and stations
    :param cat: path to xml of Catalog of events for which we'll create
        templates
    :param outdir: output directory for miniseed files
    :param length: length of templates in seconds
    :param prepick: prepick time for waveform trimming
    :param highcut: Filter highcut (if desired)
    :param lowcut: Filter lowcut (if desired)
    :param f_order: Filter order
    :param samp_rate: Sampling rate for the templates
    :param start: start date as %Y/%m/%d if desired
    :param end: same as above. Defaults to full length of catalog.
    :return:
    """

    # Establish date range for template creation
    cat.events.sort(key=lambda x: x.origins[-1].time)
    if start:
        cat_start = datetime.datetime.strptime(start, '%d/%m/%Y')
        cat_end = datetime.datetime.strptime(end, '%d/%m/%Y')
    else:
        cat_start = cat[0].origins[-1].time.date
        cat_end = cat[-1].origins[-1].time.date
    for date in date_generator(cat_start, cat_end):
        dto = UTCDateTime(date)
        print('Processing templates for: %s' % str(dto))
        q_start = dto - 10
        q_end = dto + 86410
        # Establish which events are in this day
        sch_str_start = 'time >= %s' % str(dto)
        sch_str_end = 'time <= %s' % str(dto + 86400)
        tmp_cat = cat.filter(sch_str_start, sch_str_end)
        if len(tmp_cat) == 0:
            print('No events on: %s' % str(dto))
            continue
        # Which stachans we got?
        stachans = {pk.waveform_id.station_code: [] for ev in tmp_cat
                    for pk in ev.picks}
        for ev in tmp_cat:
            for pk in ev.picks:
                chan_code = pk.waveform_id.channel_code
                if chan_code not in stachans[pk.waveform_id.station_code]:
                    stachans[pk.waveform_id.station_code].append(chan_code)
        wav_read_start = timer()
        # Be sure to go +/- 10 sec to account for GeoNet shit timing
        if asdf_file:
            with pyasdf.ASDFDataSet(asdf_file) as ds:
                st = Stream()
                for sta, chans in iter(stachans.items()):
                    for station in ds.ifilter(ds.q.station == sta,
                                              ds.q.channel == chans,
                                              ds.q.starttime >= q_start,
                                              ds.q.endtime <= q_end):
                        st += station.raw_recording
        elif miniseed:
            wav_ds = ['%s%d' % (d, dto.year) for d in wav_dirs]
            st = grab_day_wavs(wav_ds, dto, stachans)
        wav_read_stop = timer()
        print('Reading waveforms took %.3f seconds' % (wav_read_stop
                                                       - wav_read_start))
        print('Looping through stachans to merge/resamp')
        stachans = [(tr.stats.station, tr.stats.channel) for tr in st]
        for stachan in list(set(stachans)):
            tmp_st = st.select(station=stachan[0], channel=stachan[1])
            if len(tmp_st) > 1 and len(set([tr.stats.sampling_rate for tr in tmp_st])) > 1:
                print('Traces from %s.%s have differing samp rates' % (stachan[0], stachan[1]))
                for tr in tmp_st:
                    st.remove(tr)
                tmp_st.resample(sampling_rate=samp_rate)
                st += tmp_st
        st.merge(fill_value='interpolate')
        resamp_stop = timer()
        print('Resample/merge took %s secs' % str(resamp_stop - wav_read_stop))
        print('Preprocessing...')
        # Process the stream
        try:
            st1 = pre_processing.dayproc(st, lowcut=lowcut, highcut=highcut,
                                         filt_order=f_order, samp_rate=samp_rate,
                                         starttime=dto, debug=debug, ignore_length=True,
                                         num_cores=4)
        except NotImplementedError or Exception as e:
            print('Found error in dayproc, noting date and continuing')
            print(e)
            with open('%s/dayproc_errors.txt' % outdir, mode='a') as fo:
                fo.write('%s\n%s\n' % (str(date), e))
            continue
        print('Feeding stream to template_gen...')
        for event in tmp_cat:
            print('Copying stream to keep away from the trim...')
            trim_st = copy.deepcopy(st1)
            ev_name = str(event.resource_id).split('/')[-1]
            pk_stachans = ['%s.%s' % (pk.waveform_id.station_code,
                                      pk.waveform_id.channel_code)
                           for pk in event.picks]
            # Run check to ensure that there is only one pick for each channel
            dups = [pk for pk, count
                    in collections.Counter(pk_stachans).items() if count > 1]
            if len(dups) > 0:
                print('Event %s still has dup picks. Skipping' % ev_name)
                continue
            template = template_gen(event.picks, trim_st, length=length,
                                    prepick=prepick, min_snr=min_snr)
            if len([tr for tr in template
                    if tr.stats.channel[-1] == 'Z']) < 6:
                print('Skipping template with fewer than 6 Z-comp traces')
                continue
            # temp_list.append(template)
            print('Writing event %s to file...' % ev_name)
            template.write('%s/%s.mseed' % (outdir, ev_name),
                           format="MSEED")
            del trim_st
        del tmp_cat, st1, st
Beispiel #21
0
def brightness(stations,
               nodes,
               lags,
               stream,
               threshold,
               thresh_type,
               template_length,
               template_saveloc,
               coherence_thresh,
               coherence_stations=['all'],
               coherence_clip=False,
               gap=2.0,
               clip_level=100,
               instance=0,
               pre_pick=0.2,
               plotvar=False,
               plotsave=True,
               cores=1,
               debug=0,
               mem_issue=False):
    """
    Calculate the brightness function for a single day.

    Written to calculate the brightness function for a single day of data,
    using moveouts from a 3D travel-time grid.

    .. Note::
        Data in stream must be all of the same length and have the same
        sampling rates, see :func:`eqcorrscan.utils.pre_processing.dayproc`

    :type stations: list
    :param stations:
        List of station names from in the form where stations[i] refers to
        nodes[i][:] and lags[i][:]
    :type nodes: list
    :param nodes:
        List of node points where nodes[i] refers to stations[i] and
        nodes[:][:][0] is latitude in degrees, nodes[:][:][1] is longitude in
        degrees, nodes[:][:][2] is depth in km.
    :type lags: numpy.ndarray
    :param lags:
        Array of arrays where lags[i][:] refers to stations[i]. lags[i][j]
        should be the delay to the nodes[i][j] for stations[i] in seconds.
    :type stream: obspy.core.stream.Stream
    :param stream: Data through which to look for detections.
    :type threshold: float
    :param threshold:
        Threshold value for detection of template within the brightness
        function.
    :type thresh_type: str
    :param thresh_type:
        Either MAD or abs where MAD is the Median Absolute Deviation and abs
        is an absolute brightness.
    :type template_length: float
    :param template_length: Length of template to extract in seconds
    :type template_saveloc: str
    :param template_saveloc: Path of where to save the templates.
    :type coherence_thresh: tuple
    :param coherence_thresh:
            Threshold for removing incoherent peaks in the network response,
            those below this will not be used as templates. Must be in the
            form of (a,b) where the coherence is given by: :math:`a-kchan/b`
            where kchan is the number of channels used to compute the
            coherence.
    :type coherence_stations: list
    :param coherence_stations:
        List of stations to use in the coherence thresholding - defaults to
        `all` which uses all the stations.
    :type coherence_clip: tuple
    :param coherence_clip:
        Start and end in seconds of data to window around, defaults to False,
        which uses all the data given.
    :type gap: float
    :param gap: Minimum inter-event time in seconds for detections.
    :type clip_level: float
    :param clip_level:
        Multiplier applied to the mean deviation of the energy as an upper
        limit, used to remove spikes (earthquakes, lightning, electrical
        spikes) from the energy stack.
    :type instance: int
    :param instance:
        Optional, used for tracking when using a distributed computing system.
    :type pre_pick: float
    :param pre_pick: Seconds before the detection time to include in template
    :type plotvar: bool
    :param plotvar: Turn plotting on or off
    :type plotsave: bool
    :param plotsave:
        Save or show plots, if `False` will try and show the plots on screen -
        as this is designed for bulk use this is set to `True` to save any
        plots rather than show them if you create them - changes the backend
        of matplotlib, so if is set to `False` you will see NO PLOTS!
    :type cores: int
    :param cores: Number of cores to use, defaults to 1.
    :type debug: int
    :param debug: Debug level from 0-5, higher is more output.
    :type mem_issue: bool
    :param mem_issue:
        Set to True to write temporary variables to disk rather than store in
        memory, slow.

    :return: list of templates as :class:`obspy.core.stream.Stream` objects
    :rtype: list
    """
    if plotsave:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        plt.ioff()
    from eqcorrscan.utils import plotting
    from eqcorrscan.utils.debug_log import debug_print
    # Check that we actually have the correct stations
    realstations = []
    for station in stations:
        st = stream.select(station=station)
        if st:
            realstations += station
    del st
    stream_copy = stream.copy()
    # Force convert to int16
    for tr in stream_copy:
        # int16 max range is +/- 32767
        if max(abs(tr.data)) > 32767:
            tr.data = 32767 * (tr.data / max(abs(tr.data)))
            # Make sure that the data aren't clipped it they are high gain
            # scale the data
        tr.data = tr.data.astype(np.int16)
    # The internal _node_loop converts energy to int16 too to conserve memory,
    # to do this it forces the maximum of a single energy trace to be 500 and
    # normalises to this level - this only works for fewer than 65 channels of
    # data
    if len(stream_copy) > 130:
        raise BrightnessError(
            'Too many streams, either re-code and cope with either more memory'
            ' usage, or less precision, or reduce data volume')
    # Loop through each node in the input
    # Linear run
    print('Computing the energy stacks')
    # Parallel run
    num_cores = cores
    if num_cores > len(nodes):
        num_cores = len(nodes)
    if num_cores > cpu_count():
        num_cores = cpu_count()
    if mem_issue and not os.path.isdir('tmp' + str(instance)):
        os.makedirs('tmp' + str(instance))
    pool = Pool(processes=num_cores)
    results = [
        pool.apply_async(
            _node_loop, (stations, ), {
                'lags': lags[:, i],
                'stream': stream,
                'i': i,
                'clip_level': clip_level,
                'mem_issue': mem_issue,
                'instance': instance
            }) for i in range(len(nodes))
    ]
    pool.close()
    if not mem_issue:
        print('Computing the cumulative network response from memory')
        energy = [p.get() for p in results]
        pool.join()
        energy.sort(key=lambda tup: tup[0])
        energy = [node[1] for node in energy]
        energy = np.concatenate(energy, axis=0)
        print(energy.shape)
    else:
        pool.join()
        del results
    # Now compute the cumulative network response and then detect possible
    # events
    if not mem_issue:
        print(energy.shape)
        indices = np.argmax(energy, axis=0)  # Indices of maximum energy
        print(indices.shape)
        cum_net_resp = np.array([np.nan] * len(indices))
        cum_net_resp[0] = energy[indices[0]][0]
        peak_nodes = [nodes[indices[0]]]
        for i in range(1, len(indices)):
            cum_net_resp[i] = energy[indices[i]][i]
            peak_nodes.append(nodes[indices[i]])
        del energy, indices
    else:
        print('Reading the temp files and computing network response')
        node_splits = int(len(nodes) // num_cores)
        print(node_splits)
        indices = []
        for i in range(num_cores):
            indices.append(
                list(np.arange(node_splits * i, node_splits * (i + 1))))
        indices[-1] += list(np.arange(node_splits * (i + 1), len(nodes)))
        # results = [_cum_net_resp(node_lis=indices[i], instance=instance)
        #            for i in range(num_cores)]
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_cum_net_resp, args=(indices[i], instance))
            for i in range(num_cores)
        ]
        pool.close()
        results = [p.get() for p in results]
        pool.join()
        responses = [result[0] for result in results]
        print(np.shape(responses))
        node_indices = [result[1] for result in results]
        cum_net_resp = np.array(responses)
        indices = np.argmax(cum_net_resp, axis=0)
        print(indices.shape)
        print(cum_net_resp.shape)
        cum_net_resp = np.array(
            [cum_net_resp[indices[i]][i] for i in range(len(indices))])
        peak_nodes = [
            nodes[node_indices[indices[i]][i]] for i in range(len(indices))
        ]
        del indices, node_indices
    if plotvar:
        cum_net_trace = Stream(
            Trace(data=cum_net_resp,
                  header=Stats({
                      'station': 'NR',
                      'channel': '',
                      'network': 'Z',
                      'location': '',
                      'starttime': stream[0].stats.starttime,
                      'sampling_rate': stream[0].stats.sampling_rate
                  })))
        cum_net_trace += stream.select(channel='*N')
        cum_net_trace += stream.select(channel='*1')
        cum_net_trace.sort(['network', 'station', 'channel'])

    # Find detection within this network response
    print('Finding detections in the cumulative network response')
    detections = _find_detections(cum_net_resp, peak_nodes, threshold,
                                  thresh_type, stream[0].stats.sampling_rate,
                                  realstations, gap)
    del cum_net_resp
    templates = []
    nodesout = []
    good_detections = []
    if detections:
        print('Converting detections into templates')
        # Generate a catalog of detections
        # detections_cat = Catalog()
        for j, detection in enumerate(detections):
            debug_print(
                'Converting for detection %i of %i' % (j, len(detections)), 3,
                debug)
            # Create an event for each detection
            event = Event()
            # Set up some header info for the event
            event.event_descriptions.append(EventDescription())
            event.event_descriptions[0].text = 'Brightness detection'
            event.creation_info = CreationInfo(agency_id='EQcorrscan')
            copy_of_stream = deepcopy(stream_copy)
            # Convert detections to obspy.core.event type -
            # name of detection template is the node.
            node = (detection.template_name.split('_')[0],
                    detection.template_name.split('_')[1],
                    detection.template_name.split('_')[2])
            # Look up node in nodes and find the associated lags
            index = nodes.index(
                (float(node[0]), float(node[1]), float(node[2])))
            detect_lags = lags[:, index]
            ksta = Comment(text='Number of stations=' + str(len(detect_lags)))
            event.origins.append(Origin())
            event.origins[0].comments.append(ksta)
            event.origins[0].time = copy_of_stream[0].stats.starttime +\
                detect_lags[0] + detection.detect_time
            event.origins[0].latitude = node[0]
            event.origins[0].longitude = node[1]
            event.origins[0].depth = node[2]
            for i, detect_lag in enumerate(detect_lags):
                station = stations[i]
                st = copy_of_stream.select(station=station)
                if len(st) != 0:
                    for tr in st:
                        _waveform_id = WaveformStreamID(
                            station_code=tr.stats.station,
                            channel_code=tr.stats.channel,
                            network_code=tr.stats.network)
                        event.picks.append(
                            Pick(waveform_id=_waveform_id,
                                 time=tr.stats.starttime + detect_lag +
                                 detection.detect_time + pre_pick,
                                 onset='emergent',
                                 evalutation_mode='automatic'))
            debug_print('Generating template for detection: %i' % j, 0, debug)
            template = template_gen(picks=event.picks,
                                    st=copy_of_stream,
                                    length=template_length,
                                    swin='all')
            template_name = template_saveloc + '/' +\
                str(template[0].stats.starttime) + '.ms'
            # In the interests of RAM conservation we write then read
            # Check coherency here!
            temp_coher, kchan = coherence(template, coherence_stations,
                                          coherence_clip)
            coh_thresh = float(coherence_thresh[0]) - kchan / \
                float(coherence_thresh[1])
            coherent = False
            if temp_coher > coh_thresh:
                template.write(template_name, format="MSEED")
                print('Written template as: ' + template_name)
                print('---------------------------------coherence LEVEL: ' +
                      str(temp_coher))
                coherent = True
                debug_print(
                    'Template was incoherent, coherence level: ' +
                    str(temp_coher), 0, debug)
                coherent = False
            del copy_of_stream, tr, template
            if coherent:
                templates.append(obsread(template_name))
                nodesout += [node]
                good_detections.append(detection)
            debug_print('No template for you', 0, debug)
            # detections_cat += event
    if plotvar:
        good_detections = [(cum_net_trace[-1].stats.starttime +
                            detection.detect_time).datetime
                           for detection in good_detections]
        if not plotsave:
            plotting.NR_plot(cum_net_trace[0:-1],
                             Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             title='Network response')
            # cum_net_trace.plot(size=(800,600), equal_scale=False)
        else:
            savefile = 'plots/' +\
                cum_net_trace[0].stats.starttime.datetime.strftime('%Y%m%d') +\
                '_NR_timeseries.pdf'
            plotting.NR_plot(cum_net_trace[0:-1],
                             Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             save=True,
                             savefile=savefile,
                             title='Network response')
    nodesout = list(set(nodesout))
    return templates, nodesout
Beispiel #22
0
    def construct(self,
                  method,
                  name,
                  lowcut,
                  highcut,
                  samp_rate,
                  filt_order,
                  length,
                  prepick,
                  swin="all",
                  process_len=86400,
                  all_horiz=False,
                  delayed=True,
                  plot=False,
                  plotdir=None,
                  min_snr=None,
                  parallel=False,
                  num_cores=False,
                  skip_short_chans=False,
                  **kwargs):
        """
        Construct a template using a given method.

        :param method:
            Method to make the template, the only available method is:
            `from_sac`. For all other methods (`from_seishub`, `from_client`
            and `from_meta_file`) use `Tribe.construct()`.
        :type method: str
        :type name: str
        :param name: Name for the template
        :type lowcut: float
        :param lowcut:
            Low cut (Hz), if set to None will not apply a lowcut
        :type highcut: float
        :param highcut:
            High cut (Hz), if set to None will not apply a highcut.
        :type samp_rate: float
        :param samp_rate:
            New sampling rate in Hz.
        :type filt_order: int
        :param filt_order:
            Filter level (number of corners).
        :type length: float
        :param length: Length of template waveform in seconds.
        :type prepick: float
        :param prepick: Pre-pick time in seconds
        :type swin: str
        :param swin:
            P, S, P_all, S_all or all, defaults to all: see note in
            :func:`eqcorrscan.core.template_gen.template_gen`
        :type process_len: int
        :param process_len: Length of data in seconds to download and process.
        :type all_horiz: bool
        :param all_horiz:
            To use both horizontal channels even if there is only a pick on
            one of them.  Defaults to False.
        :type delayed: bool
        :param delayed: If True, each channel will begin relative to it's own
            pick-time, if set to False, each channel will begin at the same
            time.
        :type plot: bool
        :param plot: Plot templates or not.
        :type plotdir: str
    	:param plotdir:
            The path to save plots to. If `plotdir=None` (default) then the
            figure will be shown on screen.
        :type min_snr: float
        :param min_snr:
            Minimum signal-to-noise ratio for a channel to be included in the
            template, where signal-to-noise ratio is calculated as the ratio
            of the maximum amplitude in the template window to the rms
            amplitude in the whole window given.
        :type parallel: bool
        :param parallel: Whether to process data in parallel or not.
        :type num_cores: int
        :param num_cores:
            Number of cores to try and use, if False and parallel=True,
            will use either all your cores, or as many traces as in the data
            (whichever is smaller).
        :type skip_short_chans: bool
        :param skip_short_chans:
            Whether to ignore channels that have insufficient length data or
            not. Useful when the quality of data is not known, e.g. when
            downloading old, possibly triggered data from a datacentre

        .. note::

            `method=from_sac` requires the following kwarg(s):
            :param list sac_files:
                osbpy.core.stream.Stream of sac waveforms, or list of paths to
                sac waveforms.
            .. note::
                See `eqcorrscan.utils.sac_util.sactoevent` for details on
                how pick information is collected.

        .. rubric:: Example

        >>> # Get the path to the test data
        >>> import eqcorrscan
        >>> import os, glob
        >>> TEST_PATH = (
        ...     os.path.dirname(eqcorrscan.__file__) + '/tests/test_data')
        >>> sac_files = glob.glob(TEST_PATH + '/SAC/2014p611252/*')
        >>> template = Template().construct(
        ...     method='from_sac', name='test', lowcut=2.0, highcut=8.0,
        ...     samp_rate=20.0, filt_order=4, prepick=0.1, swin='all',
        ...     length=2.0, sac_files=sac_files)
        >>> print(template) # doctest: +NORMALIZE_WHITESPACE
        Template test:
         12 channels;
         lowcut: 2.0 Hz;
         highcut: 8.0 Hz;
         sampling rate 20.0 Hz;
         filter order: 4;
         process length: 300.0 s


        This will raise an error if the method is unsupported:

        >>> template = Template().construct(
        ...     method='from_meta_file', name='test', lowcut=2.0, highcut=8.0,
        ...     samp_rate=20.0, filt_order=4, prepick=0.1, swin='all',
        ...     length=2.0) # doctest: +IGNORE_EXCEPTION_DETAIL
        Traceback (most recent call last):
        NotImplementedError: Method is not supported, use \
        Tribe.construct instead.

        """
        if method in [
                'from_meta_file', 'from_seishub', 'from_client',
                'multi_template_gen'
        ]:
            raise NotImplementedError('Method is not supported, '
                                      'use Tribe.construct instead.')
        streams, events, process_lengths = template_gen.template_gen(
            method=method,
            lowcut=lowcut,
            highcut=highcut,
            length=length,
            filt_order=filt_order,
            samp_rate=samp_rate,
            prepick=prepick,
            return_event=True,
            swin=swin,
            process_len=process_len,
            all_horiz=all_horiz,
            delayed=delayed,
            plot=plot,
            plotdir=plotdir,
            min_snr=min_snr,
            parallel=parallel,
            num_cores=num_cores,
            skip_short_chans=skip_short_chans,
            **kwargs)
        self.name = name
        st = streams[0]
        event = events[0]
        process_length = process_lengths[0]
        for tr in st:
            if not np.any(tr.data.astype(np.float16)):
                Logger.warning('Data are zero in float16, missing data,'
                               ' will not use: {0}'.format(tr.id))
                st.remove(tr)
        self.st = st
        self.lowcut = lowcut
        self.highcut = highcut
        self.filt_order = filt_order
        self.samp_rate = samp_rate
        self.process_length = process_length
        self.prepick = prepick
        self.event = event
        return self
 def test_debug_levels(self):
     template = template_gen(self.picks, self.st.copy(), 10, debug=3)
     self.assertEqual(len(template), len(self.picks))
Beispiel #24
0
    def construct(self, method, lowcut, highcut, samp_rate, filt_order,
                  length, prepick, swin="all", process_len=86400,
                  all_horiz=False, delayed=True, plot=False, plotdir=None,
                  min_snr=None, parallel=False, num_cores=False,
                  skip_short_chans=False, save_progress=False, **kwargs):
        """
        Generate a Tribe of Templates.

        :type method: str
        :param method:
            Method of Tribe generation. Possible options are: `from_client`,
            `from_seishub`, `from_meta_file`.  See below on the additional
            required arguments for each method.
        :type lowcut: float
        :param lowcut:
            Low cut (Hz), if set to None will not apply a lowcut
        :type highcut: float
        :param highcut:
            High cut (Hz), if set to None will not apply a highcut.
        :type samp_rate: float
        :param samp_rate:
            New sampling rate in Hz.
        :type filt_order: int
        :param filt_order:
            Filter level (number of corners).
        :type length: float
        :param length: Length of template waveform in seconds.
        :type prepick: float
        :param prepick: Pre-pick time in seconds
        :type swin: str
        :param swin:
            P, S, P_all, S_all or all, defaults to all: see note in
            :func:`eqcorrscan.core.template_gen.template_gen`
        :type process_len: int
        :param process_len: Length of data in seconds to download and process.
        :type all_horiz: bool
        :param all_horiz:
            To use both horizontal channels even if there is only a pick on
            one of them.  Defaults to False.
        :type delayed: bool
        :param delayed: If True, each channel will begin relative to it's own
            pick-time, if set to False, each channel will begin at the same
            time.
        :type plot: bool
        :param plot: Plot templates or not.
        :type plotdir: str
        :param plotdir:
            The path to save plots to. If `plotdir=None` (default) then the
            figure will be shown on screen.
        :type min_snr: float
        :param min_snr:
            Minimum signal-to-noise ratio for a channel to be included in the
            template, where signal-to-noise ratio is calculated as the ratio
            of the maximum amplitude in the template window to the rms
            amplitude in the whole window given.
        :type parallel: bool
        :param parallel: Whether to process data in parallel or not.
        :type num_cores: int
        :param num_cores:
            Number of cores to try and use, if False and parallel=True,
            will use either all your cores, or as many traces as in the data
            (whichever is smaller).
        :type save_progress: bool
        :param save_progress:
            Whether to save the resulting template set at every data step or
            not. Useful for long-running processes.
        :type skip_short_chans: bool
        :param skip_short_chans:
            Whether to ignore channels that have insufficient length data or
            not. Useful when the quality of data is not known, e.g. when
            downloading old, possibly triggered data from a datacentre
        :type save_progress: bool
        :param save_progress:
            Whether to save the resulting party at every data step or not.
            Useful for long-running processes.

        .. note::
            *Method specific arguments:*

            - `from_client` requires:
                :param str client_id:
                    string passable by obspy to generate Client, or any object
                    with a `get_waveforms` method, including a Client instance.
                :param `obspy.core.event.Catalog` catalog:
                    Catalog of events to generate template for
                :param float data_pad: Pad length for data-downloads in seconds
            - `from_seishub` requires:
                :param str url: url to seishub database
                :param `obspy.core.event.Catalog` catalog:
                    Catalog of events to generate template for
                :param float data_pad: Pad length for data-downloads in seconds
            - `from_meta_file` requires:
                :param str meta_file:
                    Path to obspy-readable event file, or an obspy Catalog
                :param `obspy.core.stream.Stream` st:
                    Stream containing waveform data for template. Note that
                    this should be the same length of stream as you will use
                    for the continuous detection, e.g. if you detect in
                    day-long files, give this a day-long file!
                :param bool process:
                    Whether to process the data or not, defaults to True.

        .. Note::
            Method: `from_sac` is not supported by Tribe.construct and must
            use Template.construct.

        .. Note:: Templates will be named according to their start-time.
        """
        templates, catalog, process_lengths = template_gen.template_gen(
            method=method, lowcut=lowcut, highcut=highcut, length=length,
            filt_order=filt_order, samp_rate=samp_rate, prepick=prepick,
            return_event=True, save_progress=save_progress, swin=swin,
            process_len=process_len, all_horiz=all_horiz, plotdir=plotdir,
            delayed=delayed, plot=plot, min_snr=min_snr, parallel=parallel,
            num_cores=num_cores, skip_short_chans=skip_short_chans,
            **kwargs)
        for template, event, process_len in zip(templates, catalog,
                                                process_lengths):
            t = Template()
            for tr in template:
                if not np.any(tr.data.astype(np.float16)):
                    Logger.warning('Data are zero in float16, missing data,'
                                   ' will not use: {0}'.format(tr.id))
                    template.remove(tr)
            if len(template) == 0:
                Logger.error('Empty Template')
                continue
            t.st = template
            t.name = template.sort(['starttime'])[0]. \
                stats.starttime.strftime('%Y_%m_%dt%H_%M_%S')
            t.lowcut = lowcut
            t.highcut = highcut
            t.filt_order = filt_order
            t.samp_rate = samp_rate
            t.process_length = process_len
            t.prepick = prepick
            event.comments.append(Comment(
                text="eqcorrscan_template_" + t.name,
                creation_info=CreationInfo(agency='eqcorrscan',
                                           author=getpass.getuser())))
            t.event = event
            self.templates.append(t)
        return self
Beispiel #25
0
def run_tutorial(min_magnitude=2, shift_len=0.2, num_cores=4, min_cc=0.5):
    """Functional, tested example script for running the lag-calc tutorial."""
    if num_cores > cpu_count():
        num_cores = cpu_count()
    client = Client('NCEDC')
    t1 = UTCDateTime(2004, 9, 28)
    t2 = t1 + 86400
    print('Downloading catalog')
    catalog = client.get_events(starttime=t1,
                                endtime=t2,
                                minmagnitude=min_magnitude,
                                minlatitude=35.7,
                                maxlatitude=36.1,
                                minlongitude=-120.6,
                                maxlongitude=-120.2,
                                includearrivals=True)
    # We don't need all the picks, lets take the information from the
    # five most used stations - note that this is done to reduce computational
    # costs.
    catalog = catalog_utils.filter_picks(catalog,
                                         channels=['EHZ'],
                                         top_n_picks=5)
    # There is a duplicate pick in event 3 in the catalog - this has the effect
    # of reducing our detections - check it yourself.
    for pick in catalog[3].picks:
        if pick.waveform_id.station_code == 'PHOB' and \
                        pick.onset == 'emergent':
            catalog[3].picks.remove(pick)
    print('Generating templates')
    templates = template_gen.template_gen(method="from_client",
                                          catalog=catalog,
                                          client_id='NCEDC',
                                          lowcut=2.0,
                                          highcut=9.0,
                                          samp_rate=50.0,
                                          filt_order=4,
                                          length=3.0,
                                          prepick=0.15,
                                          swin='all',
                                          process_len=3600)
    # In this section we generate a series of chunks of data.
    start_time = UTCDateTime(2004, 9, 28, 17)
    end_time = UTCDateTime(2004, 9, 28, 20)
    process_len = 3600
    chunks = []
    chunk_start = start_time
    while chunk_start < end_time:
        chunk_end = chunk_start + process_len
        if chunk_end > end_time:
            chunk_end = end_time
        chunks.append((chunk_start, chunk_end))
        chunk_start += process_len

    all_detections = []
    picked_catalog = Catalog()
    template_names = [
        template[0].stats.starttime.strftime("%Y%m%d_%H%M%S")
        for template in templates
    ]
    for t1, t2 in chunks:
        print('Downloading and processing for start-time: %s' % t1)
        # Download and process the data
        bulk_info = [(tr.stats.network, tr.stats.station, '*',
                      tr.stats.channel, t1, t2) for tr in templates[0]]
        # Just downloading a chunk of data
        try:
            st = client.get_waveforms_bulk(bulk_info)
        except FDSNException:
            st = Stream()
            for _bulk in bulk_info:
                st += client.get_waveforms(*_bulk)
        st.merge(fill_value='interpolate')
        st = pre_processing.shortproc(st,
                                      lowcut=2.0,
                                      highcut=9.0,
                                      filt_order=4,
                                      samp_rate=50.0,
                                      num_cores=num_cores)
        detections = match_filter.match_filter(template_names=template_names,
                                               template_list=templates,
                                               st=st,
                                               threshold=8.0,
                                               threshold_type='MAD',
                                               trig_int=6.0,
                                               plotvar=False,
                                               plotdir='.',
                                               cores=num_cores)
        # Extract unique detections from set.
        unique_detections = []
        for master in detections:
            keep = True
            for slave in detections:
                if not master == slave and\
                   abs(master.detect_time - slave.detect_time) <= 1.0:
                    # If the events are within 1s of each other then test which
                    # was the 'best' match, strongest detection
                    if not master.detect_val > slave.detect_val:
                        keep = False
                        break
            if keep:
                unique_detections.append(master)
        all_detections += unique_detections

        picked_catalog += lag_calc.lag_calc(detections=unique_detections,
                                            detect_data=st,
                                            template_names=template_names,
                                            templates=templates,
                                            shift_len=shift_len,
                                            min_cc=min_cc,
                                            interpolate=False,
                                            plot=False)
    # Return all of this so that we can use this function for testing.
    return all_detections, picked_catalog, templates, template_names