Пример #1
0
 def test_read_wavename(self):
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     wavefiles = readwavename(testing_path)
     self.assertEqual(len(wavefiles), 1)
     # Test that full paths are handled
     test_event = full_test_event()
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0],
                               filename=None,
                               userid='TEST',
                               evtype='L',
                               outdir='.',
                               wavefiles=['walrus/test'],
                               explosion=True,
                               overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
     # Check that multiple wavefiles are read properly
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0],
                               filename=None,
                               userid='TEST',
                               evtype='L',
                               outdir='.',
                               wavefiles=['walrus/test', 'albert'],
                               explosion=True,
                               overwrite=True)
         self.assertEqual(readwavename(sfile), ['test', 'albert'])
Пример #2
0
 def setUpClass(cls):
     cls.testing_path = os.path.join(
         os.path.abspath(os.path.dirname(__file__)), 'test_data')
     sfile = os.path.join(cls.testing_path, 'REA', 'TEST_',
                          '01-0411-15L.S201309')
     cls.event = read_events(sfile)[0]
     cls.wavfiles = readwavename(sfile)
     cls.datapath = os.path.join(cls.testing_path, 'WAV', 'TEST_')
     cls.st = read(os.path.join(cls.datapath, cls.wavfiles[0]))
     cls.respdir = cls.testing_path
Пример #3
0
 def test_read_wavename(self):
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     wavefiles = readwavename(testing_path)
     self.assertEqual(len(wavefiles), 1)
     # Test that full paths are handled
     test_event = full_test_event()
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(
             test_cat[0], filename=None, userid='TEST', evtype='L',
             outdir='.', wavefiles=['walrus/test'], explosion=True,
             overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
     # Check that multiple wavefiles are read properly
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(
             test_cat[0], filename=None, userid='TEST', evtype='L',
             outdir='.', wavefiles=['walrus/test', 'albert'],
             explosion=True, overwrite=True)
         self.assertEqual(readwavename(sfile), ['test', 'albert'])
Пример #4
0
 def setUpClass(cls):
     sfiles = glob.glob(
         os.path.join(os.path.dirname(os.path.abspath(__file__)),
                      'test_data/REA/TEST_/*.S??????'))
     cls.catalog = Catalog()
     cls.streams = []
     for sfile in sfiles:
         cls.catalog += read_events(sfile)
         wavefile = readwavename(sfile)[0]
         stream_path = os.path.join(
             os.path.dirname(os.path.abspath(__file__)),
             'test_data/WAV/TEST_', wavefile)
         stream = read(stream_path)
         for tr in stream:
             tr.stats.channel = tr.stats.channel[0] + tr.stats.channel[-1]
         cls.streams.append(stream)
Пример #5
0
 def test_read_latin1(self):
     """
     Check that we can read dos formatted, latin1 encoded files.
     """
     dos_file = os.path.join(self.testing_path, 'dos-file.sfile')
     self.assertTrue(_is_sfile(dos_file))
     event = readheader(dos_file)
     self.assertEqual(event.origins[0].latitude, 60.328)
     cat = read_events(dos_file)
     self.assertEqual(cat[0].origins[0].latitude, 60.328)
     wavefiles = readwavename(dos_file)
     self.assertEqual(wavefiles[0], "90121311.0851W41")
     spectral_info = read_spectral_info(dos_file)
     self.assertEqual(len(spectral_info.keys()), 10)
     self.assertEqual(spectral_info[('AVERAGE', '')]['stress_drop'], 27.7)
     with self.assertRaises(UnicodeDecodeError):
         readheader(dos_file, 'ASCII')
Пример #6
0
 def test_read_latin1(self):
     """
     Check that we can read dos formatted, latin1 encoded files.
     """
     dos_file = os.path.join(self.testing_path, 'dos-file.sfile')
     self.assertTrue(_is_sfile(dos_file))
     event = readheader(dos_file)
     self.assertEqual(event.origins[0].latitude, 60.328)
     cat = read_events(dos_file)
     self.assertEqual(cat[0].origins[0].latitude, 60.328)
     wavefiles = readwavename(dos_file)
     self.assertEqual(wavefiles[0], "90121311.0851W41")
     spectral_info = read_spectral_info(dos_file)
     self.assertEqual(len(spectral_info.keys()), 10)
     self.assertEqual(spectral_info[('AVERAGE', '')]['stress_drop'], 27.7)
     with self.assertRaises(UnicodeDecodeError):
         readheader(dos_file, 'ASCII')
Пример #7
0
 def test_read_write(self):
     """
     Function to test the read and write capabilities of sfile_util.
     """
     # Set-up a test event
     test_event = full_test_event()
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0], filename=None, userid='TEST',
                               evtype='L', outdir='.', wavefiles='test',
                               explosion=True, overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
         read_cat = Catalog()
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             read_cat += read_nordic(sfile)
     read_ev = read_cat[0]
     test_ev = test_cat[0]
     for read_pick, test_pick in zip(read_ev.picks, test_ev.picks):
         self.assertEqual(read_pick.time, test_pick.time)
         self.assertEqual(read_pick.backazimuth, test_pick.backazimuth)
         self.assertEqual(read_pick.onset, test_pick.onset)
         self.assertEqual(read_pick.phase_hint, test_pick.phase_hint)
         self.assertEqual(read_pick.polarity, test_pick.polarity)
         self.assertEqual(read_pick.waveform_id.station_code,
                          test_pick.waveform_id.station_code)
         self.assertEqual(read_pick.waveform_id.channel_code[-1],
                          test_pick.waveform_id.channel_code[-1])
     # assert read_ev.origins[0].resource_id ==\
     #     test_ev.origins[0].resource_id
     self.assertEqual(read_ev.origins[0].time,
                      test_ev.origins[0].time)
     # Note that time_residual_RMS is not a quakeML format
     self.assertEqual(read_ev.origins[0].longitude,
                      test_ev.origins[0].longitude)
     self.assertEqual(read_ev.origins[0].latitude,
                      test_ev.origins[0].latitude)
     self.assertEqual(read_ev.origins[0].depth,
                      test_ev.origins[0].depth)
     self.assertEqual(read_ev.magnitudes[0].mag,
                      test_ev.magnitudes[0].mag)
     self.assertEqual(read_ev.magnitudes[1].mag,
                      test_ev.magnitudes[1].mag)
     self.assertEqual(read_ev.magnitudes[2].mag,
                      test_ev.magnitudes[2].mag)
     self.assertEqual(read_ev.magnitudes[0].creation_info,
                      test_ev.magnitudes[0].creation_info)
     self.assertEqual(read_ev.magnitudes[1].creation_info,
                      test_ev.magnitudes[1].creation_info)
     self.assertEqual(read_ev.magnitudes[2].creation_info,
                      test_ev.magnitudes[2].creation_info)
     self.assertEqual(read_ev.magnitudes[0].magnitude_type,
                      test_ev.magnitudes[0].magnitude_type)
     self.assertEqual(read_ev.magnitudes[1].magnitude_type,
                      test_ev.magnitudes[1].magnitude_type)
     self.assertEqual(read_ev.magnitudes[2].magnitude_type,
                      test_ev.magnitudes[2].magnitude_type)
     self.assertEqual(read_ev.event_descriptions,
                      test_ev.event_descriptions)
     # assert read_ev.amplitudes[0].resource_id ==\
     #     test_ev.amplitudes[0].resource_id
     self.assertEqual(read_ev.amplitudes[0].period,
                      test_ev.amplitudes[0].period)
     self.assertEqual(read_ev.amplitudes[0].snr,
                      test_ev.amplitudes[0].snr)
     self.assertEqual(read_ev.amplitudes[2].period,
                      test_ev.amplitudes[2].period)
     self.assertEqual(read_ev.amplitudes[2].snr,
                      test_ev.amplitudes[2].snr)
     # Check coda magnitude pick
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].resource_id,
     #                  test_ev.amplitudes[1].resource_id)
     self.assertEqual(read_ev.amplitudes[1].type,
                      test_ev.amplitudes[1].type)
     self.assertEqual(read_ev.amplitudes[1].unit,
                      test_ev.amplitudes[1].unit)
     self.assertEqual(read_ev.amplitudes[1].generic_amplitude,
                      test_ev.amplitudes[1].generic_amplitude)
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].pick_id,
     #                  test_ev.amplitudes[1].pick_id)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code,
                      test_ev.amplitudes[1].waveform_id.station_code)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code,
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[0] +
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[-1])
     self.assertEqual(read_ev.amplitudes[1].magnitude_hint,
                      test_ev.amplitudes[1].magnitude_hint)
     # snr is not supported in s-file
     # self.assertEqual(read_ev.amplitudes[1].snr,
     #                  test_ev.amplitudes[1].snr)
     self.assertEqual(read_ev.amplitudes[1].category,
                      test_ev.amplitudes[1].category)
Пример #8
0
 def test_read_wavename(self):
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     wavefiles = readwavename(testing_path)
     self.assertEqual(len(wavefiles), 1)
Пример #9
0
 def test_read_write(self):
     """
     Function to test the read and write capabilities of sfile_util.
     """
     # Set-up a test event
     test_event = full_test_event()
     # Sort the magnitudes - they are sorted on writing and we need to check
     # like-for-like
     test_event.magnitudes.sort(key=lambda obj: obj['mag'], reverse=True)
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0], filename=None, userid='TEST',
                               evtype='L', outdir='.', wavefiles='test',
                               explosion=True, overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
         read_cat = Catalog()
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             read_cat += read_nordic(sfile)
     read_ev = read_cat[0]
     test_ev = test_cat[0]
     for read_pick, test_pick in zip(read_ev.picks, test_ev.picks):
         self.assertEqual(read_pick.time, test_pick.time)
         self.assertEqual(read_pick.backazimuth, test_pick.backazimuth)
         self.assertEqual(read_pick.onset, test_pick.onset)
         self.assertEqual(read_pick.phase_hint, test_pick.phase_hint)
         self.assertEqual(read_pick.polarity, test_pick.polarity)
         self.assertEqual(read_pick.waveform_id.station_code,
                          test_pick.waveform_id.station_code)
         self.assertEqual(read_pick.waveform_id.channel_code[-1],
                          test_pick.waveform_id.channel_code[-1])
     # assert read_ev.origins[0].resource_id ==\
     #     test_ev.origins[0].resource_id
     self.assertEqual(read_ev.origins[0].time,
                      test_ev.origins[0].time)
     # Note that time_residual_RMS is not a quakeML format
     self.assertEqual(read_ev.origins[0].longitude,
                      test_ev.origins[0].longitude)
     self.assertEqual(read_ev.origins[0].latitude,
                      test_ev.origins[0].latitude)
     self.assertEqual(read_ev.origins[0].depth,
                      test_ev.origins[0].depth)
     self.assertEqual(read_ev.magnitudes[0].mag,
                      test_ev.magnitudes[0].mag)
     self.assertEqual(read_ev.magnitudes[1].mag,
                      test_ev.magnitudes[1].mag)
     self.assertEqual(read_ev.magnitudes[2].mag,
                      test_ev.magnitudes[2].mag)
     self.assertEqual(read_ev.magnitudes[0].creation_info,
                      test_ev.magnitudes[0].creation_info)
     self.assertEqual(read_ev.magnitudes[1].creation_info,
                      test_ev.magnitudes[1].creation_info)
     self.assertEqual(read_ev.magnitudes[2].creation_info,
                      test_ev.magnitudes[2].creation_info)
     self.assertEqual(read_ev.magnitudes[0].magnitude_type,
                      test_ev.magnitudes[0].magnitude_type)
     self.assertEqual(read_ev.magnitudes[1].magnitude_type,
                      test_ev.magnitudes[1].magnitude_type)
     self.assertEqual(read_ev.magnitudes[2].magnitude_type,
                      test_ev.magnitudes[2].magnitude_type)
     self.assertEqual(read_ev.event_descriptions,
                      test_ev.event_descriptions)
     # assert read_ev.amplitudes[0].resource_id ==\
     #     test_ev.amplitudes[0].resource_id
     self.assertEqual(read_ev.amplitudes[0].period,
                      test_ev.amplitudes[0].period)
     self.assertEqual(read_ev.amplitudes[0].snr,
                      test_ev.amplitudes[0].snr)
     self.assertEqual(read_ev.amplitudes[2].period,
                      test_ev.amplitudes[2].period)
     self.assertEqual(read_ev.amplitudes[2].snr,
                      test_ev.amplitudes[2].snr)
     # Check coda magnitude pick
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].resource_id,
     #                  test_ev.amplitudes[1].resource_id)
     self.assertEqual(read_ev.amplitudes[1].type,
                      test_ev.amplitudes[1].type)
     self.assertEqual(read_ev.amplitudes[1].unit,
                      test_ev.amplitudes[1].unit)
     self.assertEqual(read_ev.amplitudes[1].generic_amplitude,
                      test_ev.amplitudes[1].generic_amplitude)
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].pick_id,
     #                  test_ev.amplitudes[1].pick_id)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code,
                      test_ev.amplitudes[1].waveform_id.station_code)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code,
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[0] +
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[-1])
     self.assertEqual(read_ev.amplitudes[1].magnitude_hint,
                      test_ev.amplitudes[1].magnitude_hint)
     # snr is not supported in s-file
     # self.assertEqual(read_ev.amplitudes[1].snr,
     #                  test_ev.amplitudes[1].snr)
     self.assertEqual(read_ev.amplitudes[1].category,
                      test_ev.amplitudes[1].category)
Пример #10
0
 def test_read_write(self):
     """
     Function to test the read and write capabilities of sfile_util.
     """
     # Set-up a test event
     test_event = full_test_event()
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     sfile = _write_nordic(test_cat[0], filename=None, userid='TEST',
                           evtype='L', outdir='.',
                           wavefiles='test', explosion=True, overwrite=True)
     self.assertEqual(readwavename(sfile), ['test'])
     read_cat = Catalog()
     read_cat += read_nordic(sfile)
     os.remove(sfile)
     read_ev = read_cat[0]
     test_ev = test_cat[0]
     for read_pick, test_pick in zip(read_ev.picks, test_ev.picks):
         self.assertEqual(read_pick.time, test_pick.time)
         self.assertEqual(read_pick.backazimuth, test_pick.backazimuth)
         self.assertEqual(read_pick.onset, test_pick.onset)
         self.assertEqual(read_pick.phase_hint, test_pick.phase_hint)
         self.assertEqual(read_pick.polarity, test_pick.polarity)
         self.assertEqual(read_pick.waveform_id.station_code,
                          test_pick.waveform_id.station_code)
         self.assertEqual(read_pick.waveform_id.channel_code[-1],
                          test_pick.waveform_id.channel_code[-1])
     # assert read_ev.origins[0].resource_id ==\
     #     test_ev.origins[0].resource_id
     self.assertEqual(read_ev.origins[0].time,
                      test_ev.origins[0].time)
     # Note that time_residual_RMS is not a quakeML format
     self.assertEqual(read_ev.origins[0].longitude,
                      test_ev.origins[0].longitude)
     self.assertEqual(read_ev.origins[0].latitude,
                      test_ev.origins[0].latitude)
     self.assertEqual(read_ev.origins[0].depth,
                      test_ev.origins[0].depth)
     self.assertEqual(read_ev.magnitudes[0].mag,
                      test_ev.magnitudes[0].mag)
     self.assertEqual(read_ev.magnitudes[1].mag,
                      test_ev.magnitudes[1].mag)
     self.assertEqual(read_ev.magnitudes[2].mag,
                      test_ev.magnitudes[2].mag)
     self.assertEqual(read_ev.magnitudes[0].creation_info,
                      test_ev.magnitudes[0].creation_info)
     self.assertEqual(read_ev.magnitudes[1].creation_info,
                      test_ev.magnitudes[1].creation_info)
     self.assertEqual(read_ev.magnitudes[2].creation_info,
                      test_ev.magnitudes[2].creation_info)
     self.assertEqual(read_ev.magnitudes[0].magnitude_type,
                      test_ev.magnitudes[0].magnitude_type)
     self.assertEqual(read_ev.magnitudes[1].magnitude_type,
                      test_ev.magnitudes[1].magnitude_type)
     self.assertEqual(read_ev.magnitudes[2].magnitude_type,
                      test_ev.magnitudes[2].magnitude_type)
     self.assertEqual(read_ev.event_descriptions,
                      test_ev.event_descriptions)
     # assert read_ev.amplitudes[0].resource_id ==\
     #     test_ev.amplitudes[0].resource_id
     self.assertEqual(read_ev.amplitudes[0].period,
                      test_ev.amplitudes[0].period)
     self.assertEqual(read_ev.amplitudes[0].snr,
                      test_ev.amplitudes[0].snr)
     # Check coda magnitude pick
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].resource_id,
     #                  test_ev.amplitudes[1].resource_id)
     self.assertEqual(read_ev.amplitudes[1].type,
                      test_ev.amplitudes[1].type)
     self.assertEqual(read_ev.amplitudes[1].unit,
                      test_ev.amplitudes[1].unit)
     self.assertEqual(read_ev.amplitudes[1].generic_amplitude,
                      test_ev.amplitudes[1].generic_amplitude)
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].pick_id,
     #                  test_ev.amplitudes[1].pick_id)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code,
                      test_ev.amplitudes[1].waveform_id.station_code)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code,
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[0] +
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[-1])
     self.assertEqual(read_ev.amplitudes[1].magnitude_hint,
                      test_ev.amplitudes[1].magnitude_hint)
     # snr is not supported in s-file
     # self.assertEqual(read_ev.amplitudes[1].snr,
     #                  test_ev.amplitudes[1].snr)
     self.assertEqual(read_ev.amplitudes[1].category,
                      test_ev.amplitudes[1].category)
Пример #11
0
 def test_read_wavename(self):
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     wavefiles = readwavename(testing_path)
     self.assertEqual(len(wavefiles), 1)
Пример #12
0
def write_correlations(event_list,
                       wavbase,
                       extract_len,
                       pre_pick,
                       shift_len,
                       lowcut=1.0,
                       highcut=10.0,
                       max_sep=8,
                       min_link=8,
                       cc_thresh=0.0,
                       plotvar=False,
                       debug=0):
    """
    Write a dt.cc file for hypoDD input for a given list of events.

    Takes an input list of events and computes pick refinements by correlation.
    Outputs two files, dt.cc and dt.cc2, each provides a different weight,
    dt.cc uses weights of the cross-correlation, and dt.cc2 provides weights
    as the square of the cross-correlation.

    :type event_list: list
    :param event_list: List of tuples of event_id (int) and sfile (String)
    :type wavbase: str
    :param wavbase: Path to the seisan wave directory that the wavefiles in the
                    S-files are stored
    :type extract_len: float
    :param extract_len: Length in seconds to extract around the pick
    :type pre_pick: float
    :param pre_pick: Time before the pick to start the correlation window
    :type shift_len: float
    :param shift_len: Time to allow pick to vary
    :type lowcut: float
    :param lowcut: Lowcut in Hz - default=1.0
    :type highcut: float
    :param highcut: Highcut in Hz - default=10.0
    :type max_sep: float
    :param max_sep: Maximum separation between event pairs in km
    :type min_link: int
    :param min_link: Minimum links for an event to be paired
    :type cc_thresh: float
    :param cc_thresh: Threshold to include cross-correlation results.
    :type plotvar: bool
    :param plotvar: To show the pick-correction plots, defualts to False.
    :type debug: int
    :param debug: Variable debug levels from 0-5, higher=more output.

    .. warning:: This is not a fast routine!

    .. warning::
        In contrast to seisan's corr routine, but in accordance with the
        hypoDD manual, this outputs corrected differential time.

    .. note::
        Currently we have not implemented a method for taking
        unassociated event objects and wavefiles.  As such if you have events \
        with associated wavefiles you are advised to generate Sfiles for each \
        event using the sfile_util module prior to this step.

    .. note::
        There is no provision to taper waveforms within these functions, if you
        desire this functionality, you should apply the taper before calling
        this.  Note the :func:`obspy.Trace.taper` functions.
    """
    warnings.filterwarnings(action="ignore",
                            message="Maximum of cross correlation " +
                            "lower than 0.8: *")
    corr_list = []
    f = open('dt.cc', 'w')
    f2 = open('dt.cc2', 'w')
    k_events = len(list(event_list))
    for i, master in enumerate(event_list):
        master_sfile = master[1]
        if debug > 1:
            print('Computing correlations for master: %s' % master_sfile)
        master_event_id = master[0]
        master_event = read_nordic(master_sfile)[0]
        master_picks = master_event.picks
        master_ori_time = master_event.origins[0].time
        master_location = (master_event.origins[0].latitude,
                           master_event.origins[0].longitude,
                           master_event.origins[0].depth / 1000.0)
        master_wavefiles = readwavename(master_sfile)
        masterpath = glob.glob(wavbase + os.sep + master_wavefiles[0])
        if masterpath:
            masterstream = read(masterpath[0])
        if len(master_wavefiles) > 1:
            for wavefile in master_wavefiles:
                try:
                    masterstream += read(os.join(wavbase, wavefile))
                except:
                    raise IOError("Couldn't find wavefile")
                    continue
        for j in range(i + 1, k_events):
            # Use this tactic to only output unique event pairings
            slave_sfile = event_list[j][1]
            if debug > 2:
                print('Comparing to event: %s' % slave_sfile)
            slave_event_id = event_list[j][0]
            slave_wavefiles = readwavename(slave_sfile)
            try:
                slavestream = read(wavbase + os.sep + slave_wavefiles[0])
            except:
                raise IOError('No wavefile found: ' + slave_wavefiles[0] +
                              ' ' + slave_sfile)
            if len(slave_wavefiles) > 1:
                for wavefile in slave_wavefiles:
                    try:
                        slavestream += read(wavbase + os.sep + wavefile)
                    except IOError:
                        print('No waveform found: %s' %
                              (wavbase + os.sep + wavefile))
                        continue
            # Write out the header line
            event_text = '#' + str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10) + ' 0.0   \n'
            event_text2 = '#' + str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10) + ' 0.0   \n'
            slave_event = read_nordic(slave_sfile)[0]
            slave_picks = slave_event.picks
            slave_ori_time = slave_event.origins[0].time
            slave_location = (slave_event.origins[0].latitude,
                              slave_event.origins[0].longitude,
                              slave_event.origins[0].depth / 1000.0)
            if dist_calc(master_location, slave_location) > max_sep:
                if debug > 0:
                    print('Seperation exceeds max_sep: %s' %
                          (dist_calc(master_location, slave_location)))
                continue
            links = 0
            phases = 0
            for pick in master_picks:
                if not hasattr(pick, 'phase_hint') or \
                                len(pick.phase_hint) == 0:
                    warnings.warn('No phase-hint for pick:')
                    print(pick)
                    continue
                if pick.phase_hint[0].upper() not in ['P', 'S']:
                    warnings.warn('Will only use P or S phase picks')
                    print(pick)
                    continue
                    # Only use P and S picks, not amplitude or 'other'
                # Find station, phase pairs
                # Added by Carolin
                slave_matches = [
                    p for p in slave_picks if hasattr(p, 'phase_hint')
                    and p.phase_hint == pick.phase_hint and
                    p.waveform_id.station_code == pick.waveform_id.station_code
                ]

                if masterstream.select(station=pick.waveform_id.station_code,
                                       channel='*' +
                                       pick.waveform_id.channel_code[-1]):
                    mastertr = masterstream.\
                        select(station=pick.waveform_id.station_code,
                               channel='*' +
                               pick.waveform_id.channel_code[-1])[0]
                elif debug > 1:
                    print('No waveform data for ' +
                          pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code)
                    print(pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code + ' ' + slave_sfile +
                          ' ' + master_sfile)
                    break
                # Loop through the matches
                for slave_pick in slave_matches:
                    if slavestream.select(
                            station=slave_pick.waveform_id.station_code,
                            channel='*' +
                            slave_pick.waveform_id.channel_code[-1]):
                        slavetr = slavestream.\
                            select(station=slave_pick.waveform_id.station_code,
                                   channel='*' + slave_pick.waveform_id.
                                   channel_code[-1])[0]
                    else:
                        print('No slave data for ' +
                              slave_pick.waveform_id.station_code + '.' +
                              slave_pick.waveform_id.channel_code)
                        print(pick.waveform_id.station_code + '.' +
                              pick.waveform_id.channel_code + ' ' +
                              slave_sfile + ' ' + master_sfile)
                        break
                    # Correct the picks
                    try:
                        correction, cc =\
                            xcorr_pick_correction(
                                pick.time, mastertr, slave_pick.time,
                                slavetr, pre_pick, extract_len - pre_pick,
                                shift_len, filter="bandpass",
                                filter_options={'freqmin': lowcut,
                                                'freqmax': highcut},
                                plot=plotvar)
                        # Get the differential travel time using the
                        # corrected time.
                        # Check that the correction is within the allowed shift
                        # This can occur in the obspy routine when the
                        # correlation function is increasing at the end of the
                        # window.
                        if abs(correction) > shift_len:
                            warnings.warn('Shift correction too large, ' +
                                          'will not use')
                            continue
                        correction = (pick.time - master_ori_time) -\
                            (slave_pick.time + correction - slave_ori_time)
                        links += 1
                        if cc >= cc_thresh:
                            weight = cc
                            phases += 1
                            # added by Caro
                            event_text += pick.waveform_id.station_code.\
                                ljust(5) + _cc_round(correction, 3).\
                                rjust(11) + _cc_round(weight, 3).rjust(8) +\
                                ' ' + pick.phase_hint + '\n'
                            event_text2 += pick.waveform_id.station_code\
                                .ljust(5) + _cc_round(correction, 3).\
                                rjust(11) +\
                                _cc_round(weight * weight, 3).rjust(8) +\
                                ' ' + pick.phase_hint + '\n'
                            if debug > 3:
                                print(event_text)
                        else:
                            print('cc too low: %s' % cc)
                        corr_list.append(cc * cc)
                    except:
                        msg = "Couldn't compute correlation correction"
                        warnings.warn(msg)
                        continue
            if links >= min_link and phases > 0:
                f.write(event_text)
                f2.write(event_text2)
    if plotvar:
        plt.hist(corr_list, 150)
        plt.show()
    # f.write('\n')
    f.close()
    f2.close()
    return
Пример #13
0
 def setUpClass(cls):
     cls.testing_path = os.path.join(
         os.path.abspath(os.path.dirname(__file__)), 'test_data', 'REA',
         'TEST_')
     cls.wave_path = os.path.join(
         os.path.abspath(os.path.dirname(__file__)), 'test_data', 'WAV',
         'TEST_')
     key_dict = [{
         'name': 'template',
         'sfile': '21-1412-02L.S201309'
     }, {
         'name': 'detection',
         'sfile': '21-1759-04L.S201309'
     }, {
         'name': 'template_spicks',
         'sfile': '18-2120-53L.S201309'
     }, {
         'name': 'detection_spicks',
         'sfile': '18-2350-08L.S201309'
     }]
     for item in key_dict:
         st = read(
             os.path.join(
                 cls.wave_path,
                 readwavename(os.path.join(cls.testing_path,
                                           item['sfile']))[0]))
         for tr in st:
             tr.stats.channel = tr.stats.channel[0] + tr.stats.channel[-1]
         item.update({
             'st': st,
             'sfile': os.path.join(cls.testing_path, item['sfile'])
         })
         setattr(
             cls, item['name'],
             from_meta_file(meta_file=item['sfile'],
                            lowcut=5,
                            highcut=15,
                            samp_rate=40,
                            filt_order=4,
                            length=3,
                            swin='all',
                            prepick=0.05,
                            st=item['st'])[0])
     detection_event = read_events(
         os.path.join(cls.testing_path, '21-1759-04L.S201309'))[0]
     detection_spicks_event = read_events(
         os.path.join(cls.testing_path, '18-2350-07L.S201309'))[0]
     cls.detections = [
         Detection(detect_time=detection_event.origins[0].time,
                   detect_val=2.0,
                   no_chans=5,
                   threshold=1.9,
                   typeofdet='corr',
                   event=detection_event,
                   template_name='test_template',
                   threshold_type='MAD',
                   threshold_input=8.0),
         Detection(detect_time=detection_spicks_event.origins[0].time,
                   detect_val=2.0,
                   no_chans=5,
                   threshold=1.9,
                   typeofdet='corr',
                   event=detection_spicks_event,
                   template_name='test_template',
                   threshold_type='MAD',
                   threshold_input=8.0)
     ]
     tstart = min(tr.stats.starttime for tr in cls.template)
     cls.delays = {}
     for tr in cls.template:
         cls.delays.update({
             tr.stats.station + '.' + tr.stats.channel:
             tr.stats.starttime - tstart
         })
     warnings.simplefilter("always")
Пример #14
0
import obspy.io.nordic.core as nordic
from obspy.core import *
import subprocess

sfileDir = "REA/EVENT/1996/06/"
waveFileDir = "WAV/"
lsOutput = subprocess.run(["ls", sfileDir],
                          stdout=subprocess.PIPE,
                          universal_newlines=True)
sfileList = lsOutput.stdout.splitlines()
for file in sfileList:
    event = nordic.read_nordic(sfileDir + file)
    event.wavename = nordic.readwavename(sfileDir + file)
    stream = Stream()
    for wave in event.wavename:
        stream += read(str(waveFileDir + wave))
    stream.normalize()
    start_time = event.events[0].origins[0].time
    stream.trim(start_time + 0, start_time + 800)
    stream.plot()