Esempio n. 1
0
 def test_read_picks_across_day_end(self):
     testing_path = os.path.join(self.testing_path, 'sfile_over_day')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event = read_nordic(testing_path)[0]
     pick_times = [pick.time for pick in event.picks]
     for pick in event.picks:
         # Pick should come after origin time
         self.assertGreater(pick.time, event.origins[0].time)
         # All picks in this event are within 60s of origin time
         self.assertLessEqual((pick.time - event.origins[0].time), 60)
     # Make sure zero hours and 24 hour picks are handled the same.
     testing_path = os.path.join(self.testing_path, 'sfile_over_day_zeros')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event_2 = read_nordic(testing_path)[0]
     for pick in event_2.picks:
         # Pick should come after origin time
         self.assertGreater(pick.time, event_2.origins[0].time)
         # All picks in this event are within 60s of origin time
         self.assertLessEqual((pick.time - event_2.origins[0].time), 60)
         # Each pick should be the same as one pick in the previous event
         self.assertTrue(pick.time in pick_times)
     self.assertEqual(event_2.origins[0].time, event.origins[0].time)
Esempio n. 2
0
 def test_read_picks_across_day_end(self):
     testing_path = os.path.join(self.testing_path, 'sfile_over_day')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event = read_nordic(testing_path)[0]
     pick_times = [pick.time for pick in event.picks]
     for pick in event.picks:
         # Pick should come after origin time
         self.assertGreater(pick.time, event.origins[0].time)
         # All picks in this event are within 60s of origin time
         self.assertLessEqual((pick.time - event.origins[0].time), 60)
     # Make sure zero hours and 24 hour picks are handled the same.
     testing_path = os.path.join(self.testing_path, 'sfile_over_day_zeros')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event_2 = read_nordic(testing_path)[0]
     for pick in event_2.picks:
         # Pick should come after origin time
         self.assertGreater(pick.time, event_2.origins[0].time)
         # All picks in this event are within 60s of origin time
         self.assertLessEqual((pick.time - event_2.origins[0].time), 60)
         # Each pick should be the same as one pick in the previous event
         self.assertTrue(pick.time in pick_times)
     self.assertEqual(event_2.origins[0].time, event.origins[0].time)
Esempio n. 3
0
 def test_read_extra_header(self):
     testing_path = os.path.join(self.testing_path, 'Sfile_extra_header')
     not_extra_header = os.path.join(self.testing_path,
                                     '01-0411-15L.S201309')
     test_event = read_nordic(testing_path)[0]
     header_event = read_nordic(not_extra_header)[0]
     self.assertEqual(test_event.origins[0].time,
                      header_event.origins[0].time)
     self.assertEqual(test_event.origins[0].latitude,
                      header_event.origins[0].latitude)
     self.assertEqual(test_event.origins[0].longitude,
                      header_event.origins[0].longitude)
     self.assertEqual(test_event.origins[0].depth,
                      header_event.origins[0].depth)
Esempio n. 4
0
 def test_read_extra_header(self):
     testing_path = os.path.join(self.testing_path, 'Sfile_extra_header')
     not_extra_header = os.path.join(self.testing_path,
                                     '01-0411-15L.S201309')
     test_event = read_nordic(testing_path)[0]
     header_event = read_nordic(not_extra_header)[0]
     self.assertEqual(test_event.origins[0].time,
                      header_event.origins[0].time)
     self.assertEqual(test_event.origins[0].latitude,
                      header_event.origins[0].latitude)
     self.assertEqual(test_event.origins[0].longitude,
                      header_event.origins[0].longitude)
     self.assertEqual(test_event.origins[0].depth,
                      header_event.origins[0].depth)
Esempio n. 5
0
 def test_read_moment(self):
     """Test the reading of seismic moment from the s-file."""
     testing_path = os.path.join(self.testing_path, 'automag.out')
     event = read_nordic(testing_path)[0]
     mag = [m for m in event.magnitudes if m.magnitude_type == 'MW']
     self.assertEqual(len(mag), 1)
     self.assertEqual(mag[0].mag, 0.7)
Esempio n. 6
0
 def test_read_many_events(self):
     testing_path = os.path.join(self.testing_path, 'select.out')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         catalog = read_nordic(testing_path)
     self.assertEqual(len(catalog), 50)
Esempio n. 7
0
 def test_read_moment(self):
     """Test the reading of seismic moment from the s-file."""
     testing_path = os.path.join(self.testing_path, 'automag.out')
     event = read_nordic(testing_path)[0]
     mag = [m for m in event.magnitudes if m.magnitude_type == 'MW']
     self.assertEqual(len(mag), 1)
     self.assertEqual(mag[0].mag, 0.7)
Esempio n. 8
0
 def _get_nordic_wavefile_name(self, database_filename):
     log(f'database filename = {database_filename}', 'verbose')
     cat, wav_names = read_nordic(database_filename, return_wavnames=True)
     assert len(wav_names) == 1, 'More than one wav_name in database file'
     pts = wav_names[0][0].split('-')
     full_wav_name = self.wav_base_path / pts[0] / pts[1] / wav_names[0][0]
     return str(full_wav_name)
Esempio n. 9
0
 def test_read_many_events(self):
     testing_path = os.path.join(self.testing_path, 'select.out')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         catalog = read_nordic(testing_path)
     self.assertEqual(len(catalog), 50)
Esempio n. 10
0
 def test_read_extra_header(self):
     testing_path = os.path.join(self.testing_path, 'Sfile_extra_header')
     not_extra_header = os.path.join(self.testing_path,
                                     '01-0411-15L.S201309')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         test_event = read_nordic(testing_path)[0]
         header_event = read_nordic(not_extra_header)[0]
     self.assertEqual(test_event.origins[0].time,
                      header_event.origins[0].time)
     self.assertEqual(test_event.origins[0].latitude,
                      header_event.origins[0].latitude)
     self.assertEqual(test_event.origins[0].longitude,
                      header_event.origins[0].longitude)
     self.assertEqual(test_event.origins[0].depth,
                      header_event.origins[0].depth)
Esempio n. 11
0
 def _get_nordic_wavefile_name(self, database_filename):
     log(f'database filename = {database_filename}', level='verbose')
     cat, wav_names = read_nordic(database_filename, return_wavnames=True)
     assert len(wav_names) == 1, 'More than one wav_name in database file'
     parts = wav_names[0][0].split('-')
     full_wav_name = os.path.join(self.wav_base_path, parts[0], parts[1],
                                  wav_names[0][0])
     return full_wav_name
Esempio n. 12
0
 def test_read_extra_header(self):
     testing_path = os.path.join(self.testing_path, 'Sfile_extra_header')
     not_extra_header = os.path.join(self.testing_path,
                                     '01-0411-15L.S201309')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         test_event = read_nordic(testing_path)[0]
         header_event = read_nordic(not_extra_header)[0]
     self.assertEqual(test_event.origins[0].time,
                      header_event.origins[0].time)
     self.assertEqual(test_event.origins[0].latitude,
                      header_event.origins[0].latitude)
     self.assertEqual(test_event.origins[0].longitude,
                      header_event.origins[0].longitude)
     self.assertEqual(test_event.origins[0].depth,
                      header_event.origins[0].depth)
Esempio n. 13
0
 def test_round_len(self):
     testing_path = os.path.join(self.testing_path, 'round_len_undef.sfile')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event = read_nordic(testing_path)[0]
     pick_string = nordpick(event)
     for pick in pick_string:
         self.assertEqual(len(pick), 80)
Esempio n. 14
0
 def test_inaccurate_picks(self):
     testing_path = os.path.join(self.testing_path, 'bad_picks.sfile')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         cat = read_nordic(testing_path)
     pick_string = nordpick(cat[0])
     for pick in pick_string:
         self.assertEqual(len(pick), 80)
Esempio n. 15
0
 def test_inaccurate_picks(self):
     testing_path = os.path.join(self.testing_path, 'bad_picks.sfile')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         cat = read_nordic(testing_path)
     pick_string = nordpick(cat[0])
     for pick in pick_string:
         self.assertEqual(len(pick), 80)
Esempio n. 16
0
 def test_round_len(self):
     testing_path = os.path.join(self.testing_path, 'round_len_undef.sfile')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event = read_nordic(testing_path)[0]
     pick_string = nordpick(event)
     for pick in pick_string:
         self.assertEqual(len(pick), 80)
Esempio n. 17
0
    def import_sfile(self, input_path):
        try:
            obspyCatalogMeta = read_nordic(input_path, return_wavnames=True)
            #If return_wavnames not specified it returns directly the events, otherwise:
            #obspyCatalogMeta[0] contains the events
            #obspyCatalogMeta[1] contains the waveform files
        except Exception as e:
            print(
                "[preprocessing metadata] \033[91m ERROR!!\033[0m Error reading Nordic Format file: "
                + str(e))
            track = traceback.format_exc()
            print(track)
            sys.exit()

        if len(obspyCatalogMeta[0].events) == 0:
            print(
                "[preprocessing metadata] \033[91m ERROR!!\033[0m No events found in "
                + input_path)
            sys.exit(0)

        #For testing we can add an event generated by code
        #using the function full_test_event
        #https://github.com/obspy/obspy/blob/master/obspy/io/nordic/tests/test_nordic.py
        #obspyCatalogMeta.events.append(full_test_event())
        #write_select (obspyCatalogMeta[0], "select_debug.out")

        #wave files can be read alone with:
        #print(readwavename(input_path))

        eventsCatalog = obspyCatalogMeta[0]
        waveform_files = obspyCatalogMeta[1]

        for i, event in enumerate(eventsCatalog.events):
            print("Processing event " + str(i))
            eventOriginTime = event.origins[0].time
            lat = event.origins[0].latitude
            lon = event.origins[0].longitude
            depth = event.origins[0].depth
            if len(event.magnitudes) > 0:
                mag = event.magnitudes[0].mag
            else:
                print(
                    "[preprocessing metadata] \033[91m WARNING!!\033[0m Magnitude not found in event number"
                    + str(i))
                mag = 0
                #sys.exit()

            eventid = event.resource_id.id
            e = Event(eventOriginTime, lat, lon, depth, mag, eventid,
                      waveform_files[i])
            self.events.append(e)

            for pick in event.picks:
                station_code = pick.waveform_id.station_code
                d = Pick(station_code, pick.time, pick.phase_hint)
                e.picks.append(d)
Esempio n. 18
0
 def test_read_empty_header(self):
     """
     Function to check a known issue, empty header info S-file: Bug found \
     by Dominic Evanzia.
     """
     test_event = read_nordic(os.path.join(self.testing_path,
                                           'Sfile_no_location'))[0]
     self.assertFalse(test_event.origins[0].latitude)
     self.assertFalse(test_event.origins[0].longitude)
     self.assertFalse(test_event.origins[0].depth)
Esempio n. 19
0
 def test_read_event(self):
     """
     Test the wrapper.
     """
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event = read_nordic(testing_path)[0]
     self.assertEqual(len(event.origins), 1)
Esempio n. 20
0
 def test_read_event(self):
     """
     Test the wrapper.
     """
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event = read_nordic(testing_path)[0]
     self.assertEqual(len(event.origins), 1)
Esempio n. 21
0
 def test_read_empty_header(self):
     """
     Function to check a known issue, empty header info S-file: Bug found \
     by Dominic Evanzia.
     """
     test_event = read_nordic(os.path.join(self.testing_path,
                                           'Sfile_no_location'))[0]
     self.assertFalse(test_event.origins[0].latitude)
     self.assertFalse(test_event.origins[0].longitude)
     self.assertFalse(test_event.origins[0].depth)
Esempio n. 22
0
 def test_read_picks_across_day_end(self):
     testing_path = os.path.join(self.testing_path, 'sfile_over_day')
     event = read_nordic(testing_path)[0]
     pick_times = [pick.time for pick in event.picks]
     for pick in event.picks:
         # Pick should come after origin time
         self.assertGreater(pick.time, event.origins[0].time)
         # All picks in this event are within 60s of origin time
         self.assertLessEqual((pick.time - event.origins[0].time), 60)
     # Make sure zero hours and 24 hour picks are handled the same.
     testing_path = os.path.join(self.testing_path, 'sfile_over_day_zeros')
     event_2 = read_nordic(testing_path)[0]
     for pick in event_2.picks:
         # Pick should come after origin time
         self.assertGreater(pick.time, event_2.origins[0].time)
         # All picks in this event are within 60s of origin time
         self.assertLessEqual((pick.time - event_2.origins[0].time), 60)
         # Each pick should be the same as one pick in the previous event
         self.assertTrue(pick.time in pick_times)
     self.assertEqual(event_2.origins[0].time, event.origins[0].time)
Esempio n. 23
0
 def test_read_picks_across_day_end(self):
     testing_path = os.path.join(self.testing_path, 'sfile_over_day')
     event = read_nordic(testing_path)[0]
     pick_times = [pick.time for pick in event.picks]
     for pick in event.picks:
         # Pick should come after origin time
         self.assertGreater(pick.time, event.origins[0].time)
         # All picks in this event are within 60s of origin time
         self.assertLessEqual((pick.time - event.origins[0].time), 60)
     # Make sure zero hours and 24 hour picks are handled the same.
     testing_path = os.path.join(self.testing_path, 'sfile_over_day_zeros')
     event_2 = read_nordic(testing_path)[0]
     for pick in event_2.picks:
         # Pick should come after origin time
         self.assertGreater(pick.time, event_2.origins[0].time)
         # All picks in this event are within 60s of origin time
         self.assertLessEqual((pick.time - event_2.origins[0].time), 60)
         # Each pick should be the same as one pick in the previous event
         self.assertTrue(pick.time in pick_times)
     self.assertEqual(event_2.origins[0].time, event.origins[0].time)
Esempio n. 24
0
 def test_read_moment(self):
     """
     Test the reading of seismic moment from the s-file.
     """
     testing_path = os.path.join(self.testing_path, 'automag.out')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event = read_nordic(testing_path)[0]
     mag = [m for m in event.magnitudes if m.magnitude_type == 'MW']
     self.assertEqual(len(mag), 1)
     self.assertEqual(mag[0].mag, 0.7)
Esempio n. 25
0
 def test_read_moment(self):
     """
     Test the reading of seismic moment from the s-file.
     """
     testing_path = os.path.join(self.testing_path, 'automag.out')
     # raises "UserWarning: AIN in header, currently unsupported"
     with warnings.catch_warnings():
         warnings.simplefilter('ignore', UserWarning)
         event = read_nordic(testing_path)[0]
     mag = [m for m in event.magnitudes if m.magnitude_type == 'MW']
     self.assertEqual(len(mag), 1)
     self.assertEqual(mag[0].mag, 0.7)
Esempio n. 26
0
def _parse_nordic(filepath: str):
    catalog = read_nordic(filepath)
    picks = {}
    for col in ('magnitud', 'time', 'network', 'station', 'channel'):
        picks[col] = list()
    for event in catalog:
        magnitud = _get_magnitude(event)
        for pick in event.picks:
            picks['magnitud'].append(magnitud)
            picks['time'].append(pick.time)
            picks['network'].append(pick.waveform_id.network_code)
            picks['station'].append(pick.waveform_id.station_code)
            picks['channel'].append(pick.waveform_id.channel_code)
    return pd.DataFrame(picks)
Esempio n. 27
0
    def test_read_empty_header(self):
        """
        Function to check a known issue, empty header info S-file: Bug found \
        by Dominic Evanzia.
        """
        # raises "UserWarning: AIN in header, currently unsupported"
        with warnings.catch_warnings():
            warnings.simplefilter('ignore', UserWarning)

            test_event = read_nordic(os.path.join(self.testing_path,
                                                  'Sfile_no_location'))[0]
        self.assertFalse(test_event.origins[0].latitude)
        self.assertFalse(test_event.origins[0].longitude)
        self.assertFalse(test_event.origins[0].depth)
Esempio n. 28
0
    def test_read_empty_header(self):
        """
        Function to check a known issue, empty header info S-file: Bug found \
        by Dominic Evanzia.
        """
        # raises "UserWarning: AIN in header, currently unsupported"
        with warnings.catch_warnings():
            warnings.simplefilter('ignore', UserWarning)

            test_event = read_nordic(os.path.join(self.testing_path,
                                                  'Sfile_no_location'))[0]
        self.assertFalse(test_event.origins[0].latitude)
        self.assertFalse(test_event.origins[0].longitude)
        self.assertFalse(test_event.origins[0].depth)
Esempio n. 29
0
File: io.py Progetto: jyyjqq/SeisNN
def get_event(filename):
    import warnings
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        try:
            events = []
            for file in filename:
                catalog, wavename = read_nordic(file, return_wavnames=True)
                for event in catalog.events:
                    for pick in event.picks:
                        pick.waveform_id.wavename = wavename
                    events.append(event)
            return events
        except:
            pass
Esempio n. 30
0
def import_sfile(input_path):
    try:
        obspyCatalog = read_nordic(input_path, return_wavnames=True)
        #If return_wavnames not specified it returns directly the events, otherwise:
        #obspyCatalogMeta[0] contains the events
        #obspyCatalogMeta[1] contains the waveform files
    except Exception as e:
        print ("[preprocessing metadata] \033[91m ERROR!!\033[0m Error reading Nordic Format file: "+str(e))
        track = traceback.format_exc()
        print(track)
        sys.exit()

    if len(obspyCatalog[0].events) == 0 :
        print ("[preprocessing metadata] \033[91m ERROR!!\033[0m No events found in "+input_path)
        sys.exit(0)

    return obspyCatalog
Esempio n. 31
0
    def test_fullrun_NORDIC(self):
        """
        Test Read in catalog from NORDIC and run with a tt_stations_1D file
        """
        db_tt = 'sqlite:///' + os.path.join(self.testing_path, 'tt_lsv_1D.db')
        assoc_params = dict(max_km=80,
                            aggregation=1,
                            aggr_norm='L2',
                            assoc_ot_uncert=1,
                            nsta_declare=2,
                            cutoff_outlier=10,
                            loc_uncert_thresh=0.1)
        catalog_file = os.path.join(self.testing_path, 'test_catalog.nordic')
        db_assoc_file = 'assoc_1D.db'
        db_assoc_url = 'sqlite:///' + db_assoc_file
        events, wavefiles = read_nordic(catalog_file, True)
        txt = ''
        for event, wavefile in zip(events, wavefiles):
            if os.path.exists(db_assoc_file):
                os.remove(db_assoc_file)
            dbsession = make_assoc_session(db_assoc_url)
            for pick in event.picks:
                my_pick = tables1D.Pick.from_obspy(pick)
                dbsession.add(my_pick)
                dbsession.commit()

            assoc = assoc1D.LocalAssociator(db_assoc_url, db_tt,
                                            **assoc_params)
            assoc.id_candidate_events()
            assoc.associate_candidates()
            if assoc.count_associated():
                assoc.single_phase()
            txt += str(assoc) + '\n'
        with open('temp.txt', 'w') as f:
            f.write(txt)
        self.assertTextFilesEqual(
            'temp.txt', os.path.join(self.testing_path,
                                     'test_catalog_out.txt'))
        os.remove('temp.txt')
        os.remove('assoc_1D.db')
Esempio n. 32
0
def get_event_stations(reading_path, output_level=0):
    """
    Reads S-file and gets all stations from it
    :param reading_path:    string  path to REA database
    :param output_level:    int     0 - min output, 5 - max output, default - 0
    :return: 
    """
    if output_level >= 5:
        logging.info('Reading file: ' + reading_path)

    try:
        events = nordic_reader.read_nordic(
            reading_path,
            True)  # Events tuple: (event.Catalog, [waveforms file names])
    except nordic_reader.NordicParsingError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1
    except ValueError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1
    except AttributeError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1

    stations = []
    for event in events[0].events:
        try:
            if len(event.picks
                   ) > 0:  # Only files with picks have stations data
                for pick in event.picks:
                    stations.append(pick.waveform_id.station_code)
        except ValueError as error:
            if output_level >= 2:
                logging.warning('In ' + reading_path + ': ' + str(error))
            continue

    return stations
Esempio n. 33
0
def get_single_picks_stations_data(nordic_path):
    """
    Returns all picks for stations with corresponding pick time in format: [(UTC start time, Station name)]
    :param nordic_path: string  path to REA database
    :return:
    """
    try:
        events = nordic_reader.read_nordic(
            nordic_path,
            True)  # Events tuple: (event.Catalog, [waveforms file names])
    except nordic_reader.NordicParsingError as error:
        if config.output_level >= 2:
            logging.warning('In ' + nordic_path + ': ' + str(error))
        return -1
    except ValueError as error:
        if config.output_level >= 2:
            logging.warning('In ' + nordic_path + ': ' + str(error))
        return -1
    except AttributeError as error:
        if config.output_level >= 2:
            logging.warning('In ' + nordic_path + ': ' + str(error))
        return -1

    index = -1
    slices = []
    for event in events[0].events:
        index += 1

        try:
            if len(event.picks) > 0:  # Only for files with picks
                for pick in event.picks:
                    slice_station = (pick.time, pick.waveform_id.station_code)
                    slices.append(slice_station)

        except ValueError as error:
            if config.output_level >= 2:
                logging.warning('In ' + nordic_path + ': ' + str(error))
            continue

    return slices
Esempio n. 34
0
 def test_read_write(self):
     """
     Function to test the read and write capabilities of sfile_util.
     """
     # Set-up a test event
     test_event = full_test_event()
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0], filename=None, userid='TEST',
                               evtype='L', outdir='.', wavefiles='test',
                               explosion=True, overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
         read_cat = Catalog()
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             read_cat += read_nordic(sfile)
     read_ev = read_cat[0]
     test_ev = test_cat[0]
     for read_pick, test_pick in zip(read_ev.picks, test_ev.picks):
         self.assertEqual(read_pick.time, test_pick.time)
         self.assertEqual(read_pick.backazimuth, test_pick.backazimuth)
         self.assertEqual(read_pick.onset, test_pick.onset)
         self.assertEqual(read_pick.phase_hint, test_pick.phase_hint)
         self.assertEqual(read_pick.polarity, test_pick.polarity)
         self.assertEqual(read_pick.waveform_id.station_code,
                          test_pick.waveform_id.station_code)
         self.assertEqual(read_pick.waveform_id.channel_code[-1],
                          test_pick.waveform_id.channel_code[-1])
     # assert read_ev.origins[0].resource_id ==\
     #     test_ev.origins[0].resource_id
     self.assertEqual(read_ev.origins[0].time,
                      test_ev.origins[0].time)
     # Note that time_residual_RMS is not a quakeML format
     self.assertEqual(read_ev.origins[0].longitude,
                      test_ev.origins[0].longitude)
     self.assertEqual(read_ev.origins[0].latitude,
                      test_ev.origins[0].latitude)
     self.assertEqual(read_ev.origins[0].depth,
                      test_ev.origins[0].depth)
     self.assertEqual(read_ev.magnitudes[0].mag,
                      test_ev.magnitudes[0].mag)
     self.assertEqual(read_ev.magnitudes[1].mag,
                      test_ev.magnitudes[1].mag)
     self.assertEqual(read_ev.magnitudes[2].mag,
                      test_ev.magnitudes[2].mag)
     self.assertEqual(read_ev.magnitudes[0].creation_info,
                      test_ev.magnitudes[0].creation_info)
     self.assertEqual(read_ev.magnitudes[1].creation_info,
                      test_ev.magnitudes[1].creation_info)
     self.assertEqual(read_ev.magnitudes[2].creation_info,
                      test_ev.magnitudes[2].creation_info)
     self.assertEqual(read_ev.magnitudes[0].magnitude_type,
                      test_ev.magnitudes[0].magnitude_type)
     self.assertEqual(read_ev.magnitudes[1].magnitude_type,
                      test_ev.magnitudes[1].magnitude_type)
     self.assertEqual(read_ev.magnitudes[2].magnitude_type,
                      test_ev.magnitudes[2].magnitude_type)
     self.assertEqual(read_ev.event_descriptions,
                      test_ev.event_descriptions)
     # assert read_ev.amplitudes[0].resource_id ==\
     #     test_ev.amplitudes[0].resource_id
     self.assertEqual(read_ev.amplitudes[0].period,
                      test_ev.amplitudes[0].period)
     self.assertEqual(read_ev.amplitudes[0].snr,
                      test_ev.amplitudes[0].snr)
     self.assertEqual(read_ev.amplitudes[2].period,
                      test_ev.amplitudes[2].period)
     self.assertEqual(read_ev.amplitudes[2].snr,
                      test_ev.amplitudes[2].snr)
     # Check coda magnitude pick
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].resource_id,
     #                  test_ev.amplitudes[1].resource_id)
     self.assertEqual(read_ev.amplitudes[1].type,
                      test_ev.amplitudes[1].type)
     self.assertEqual(read_ev.amplitudes[1].unit,
                      test_ev.amplitudes[1].unit)
     self.assertEqual(read_ev.amplitudes[1].generic_amplitude,
                      test_ev.amplitudes[1].generic_amplitude)
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].pick_id,
     #                  test_ev.amplitudes[1].pick_id)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code,
                      test_ev.amplitudes[1].waveform_id.station_code)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code,
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[0] +
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[-1])
     self.assertEqual(read_ev.amplitudes[1].magnitude_hint,
                      test_ev.amplitudes[1].magnitude_hint)
     # snr is not supported in s-file
     # self.assertEqual(read_ev.amplitudes[1].snr,
     #                  test_ev.amplitudes[1].snr)
     self.assertEqual(read_ev.amplitudes[1].category,
                      test_ev.amplitudes[1].category)
Esempio n. 35
0
 def test_inaccurate_picks(self):
     testing_path = os.path.join(self.testing_path, 'bad_picks.sfile')
     cat = read_nordic(testing_path)
     pick_string = nordpick(cat[0])
     for pick in pick_string:
         self.assertEqual(len(pick), 80)
Esempio n. 36
0
def slice_from_reading(reading_path,
                       waveforms_path,
                       slice_duration=5,
                       archive_definitions=[],
                       output_level=0):
    """
    Reads S-file on reading_path and slice relevant waveforms in waveforms_path
    :param reading_path:        string    path to S-file
    :param waveforms_path:      string    path to folder with waveform files
    :param slice_duration:      int       duration of the slice in seconds
    :param archive_definitions: list      list of archive definition tuples (see utils/seisan_reader.py)
    :param output_level:        int       0 - min output, 5 - max output, default - 0
    :return: -1                                  -    corrupted file
             [(obspy.core.trace.Trace, string)]  -    list of slice tuples: (slice, name of waveform file)
    """
    if output_level >= 5:
        logging.info('Reading file: ' + reading_path)

    try:
        events = nordic_reader.read_nordic(
            reading_path,
            True)  # Events tuple: (event.Catalog, [waveforms file names])
    except nordic_reader.NordicParsingError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1
    except ValueError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1
    except AttributeError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1

    index = -1
    slices = []
    picks_line = "STAT SP IPHASW"
    for event in events[0].events:
        index += 1

        f = open(reading_path)
        l = [line.strip() for line in f]

        id = None
        picks_started = False
        picks_amount = len(event.picks)
        picks_read = 0
        picks_distance = []
        if config.seconds_high_precision:
            start_seconds = []
        for line in l:
            if picks_started and picks_read < picks_amount and len(line) >= 74:
                try:
                    dist = float(line[70:74])
                except ValueError as e:
                    dist = None
                picks_distance.append(dist)

                if config.seconds_high_precision:
                    try:
                        seconds = float(line[21:27])
                    except ValueError as e:
                        seconds = None
                    start_seconds.append(seconds)

            if len(line) > 73:
                title = line[0:6]
                if title == "ACTION":
                    id_title = line[56:59]
                    if id_title == "ID:":
                        id_str = line[59:73]
                        id = int(id_str)

            if len(line) > 25:
                if line[0:len(picks_line)] == picks_line:
                    picks_started = True

        # Min magnitude check
        if len(event.magnitudes) > 0:
            if event.magnitudes[0].mag < config.min_magnitude:
                continue

        # Max depth check
        if len(event.origins) > 0:
            if event.origins[0].depth is None:
                continue
            if event.origins[0].depth > config.max_depth:
                continue

        try:
            if len(event.picks) > 0:  # Only for files with picks
                if output_level >= 3:
                    logging.info('File: ' + reading_path + ' Event #' +
                                 str(index) + ' Picks: ' +
                                 str(len(event.picks)))

                picks_index = -1
                for pick in event.picks:
                    if output_level >= 3:
                        logging.info('\t' + str(pick))

                    picks_index += 1
                    if config.seconds_high_precision:
                        if picks_index < len(start_seconds):
                            start_seconds_pick = start_seconds[picks_index]
                        else:
                            start_seconds_pick = pick.time.second
                            print("OUT OF BOUNDS START SECONDS PICK")
                            print("FILE: " + reading_path)
                            print("PICKS: ")
                            for pick_print in event.picks:
                                print(str(pick_print))
                    else:
                        start_seconds_pick = pick.time.seconds
                    pick_time = UTCDateTime(pick.time.year, pick.time.month,
                                            pick.time.day, pick.time.hour,
                                            pick.time.minute,
                                            start_seconds_pick)

                    if picks_index < len(picks_distance) and picks_distance[
                            picks_index] is not None:
                        if picks_distance[picks_index] > config.max_dist:
                            continue

                    # Check phase
                    if pick.phase_hint != 'S' and pick.phase_hint != 'P':
                        logging.info('\t' + 'Neither P nor S phase. Skipping.')
                        continue

                    if output_level >= 3:
                        logging.info('\t' + 'Slices:')

                    # Checking archives
                    found_archive = False
                    if len(archive_definitions) > 0:
                        station = pick.waveform_id.station_code
                        station_archives = seisan.station_archives(
                            archive_definitions, station)

                        channel_slices = []
                        for x in station_archives:
                            if x[4] <= pick_time:
                                if x[5] is not None and pick_time > x[5]:
                                    continue
                                else:
                                    archive_file_path = seisan.archive_path(
                                        x, pick_time.year, pick_time.julday,
                                        config.archives_path, output_level)

                                    if os.path.isfile(archive_file_path):
                                        try:
                                            arch_st = read(archive_file_path)
                                        except TypeError as error:
                                            if output_level >= 2:
                                                logging.warning(
                                                    'In ' + archive_file_path +
                                                    ': ' + str(error))
                                            return -1

                                        # arch_st.normalize(global_max=config.global_max_normalizing)  # remove that
                                        # arch_st.filter("highpass", freq=config.highpass_filter_df)  # remove that
                                        # line later
                                        for trace in arch_st:
                                            pick_start_time = pick_time
                                            if trace.stats.starttime > pick_time or pick_time + slice_duration >= trace.stats.endtime:
                                                logging.info(
                                                    '\t\tArchive ' +
                                                    archive_file_path +
                                                    ' does not cover required slice interval'
                                                )
                                                continue

                                            shifted_time = pick_time - config.static_slice_offset
                                            end_time = shifted_time + slice_duration

                                            found_archive = True

                                            trace_slice = trace.slice(
                                                shifted_time, end_time)
                                            if output_level >= 3:
                                                logging.info('\t\t' +
                                                             str(trace_slice))

                                            trace_file = x[0] + str(
                                                x[4].year) + str(
                                                    x[4].julday
                                                ) + x[1] + x[2] + x[3]
                                            event_id = x[0] + str(
                                                x[4].year) + str(
                                                    x[4].julday) + x[2] + x[3]
                                            slice_name_station_channel = (
                                                trace_slice, trace_file, x[0],
                                                x[1], event_id,
                                                pick.phase_hint, id_str)

                                            # print("ID " + str(id_str))
                                            # if id_str == '20140413140958':
                                            # print(x[0])
                                            # if True:#x[0] == 'NKL':
                                            # trace.integrate()
                                            # trace_slice.integrate()
                                            # trace.normalize()
                                            # trace_slice.normalize()
                                            # print('FOUND ID! NORMALIZED')
                                            # print('ARCHIVE: ' + archive_file_path)
                                            # print('FILE: ' + trace_file)
                                            # print('SLICE: ' + str(trace_slice))
                                            # print('TIME: ' + str(shifted_time) + ' till ' + str(end_time))
                                            # print('TRACE: ' + str(trace))
                                            # print('DATA: ' + str(trace_slice.data))

                                            # trace_slice.filter("highpass", freq=config.highpass_filter_df)
                                            # patho = "/seismo/seisan/WOR/chernykh/plots/part/"
                                            # patho2 = "/seismo/seisan/WOR/chernykh/plots/whole/"

                                            # plt.plot(trace_slice.data)
                                            # plt.ylabel('Amplitude')
                                            # plt.savefig(patho + trace_file)
                                            # plt.figure()

                                            # plt.plot(trace.data)
                                            # plt.ylabel('Amplitude')
                                            # plt.savefig(patho2 + trace_file)
                                            # plt.figure()

                                            if len(trace_slice.data) >= 400:
                                                channel_slices.append(
                                                    slice_name_station_channel)

                    # Read and slice waveform
                    if found_archive:
                        if len(channel_slices) > 0:
                            slices.append(channel_slices)
                        continue

        except ValueError as error:
            if output_level >= 2:
                logging.warning('In ' + reading_path + ': ' + str(error))
            continue

    return sort_slices(slices)
Esempio n. 37
0
 def test_round_len(self):
     testing_path = os.path.join(self.testing_path, 'round_len_undef.sfile')
     event = read_nordic(testing_path)[0]
     pick_string = nordpick(event)
     for pick in pick_string:
         self.assertEqual(len(pick), 80)
Esempio n. 38
0
def write_catalog(event_list, max_sep=8, min_link=8, debug=0):
    """
    Generate a dt.ct for hypoDD for a series of events.

    Takes input event list from
    :func:`eqcorrscan.utils.catalog_to_dd.write_event` as a list of tuples of
    event id and sfile.  It will read the pick information from the seisan
    formated s-file using the sfile_util utilities.

    :type event_list: list
    :param event_list: List of tuples of event_id (int) and sfile (String)
    :type max_sep: float
    :param max_sep: Maximum separation between event pairs in km
    :type min_link: int
    :param min_link:
        Minimum links for an event to be paired, e.g. minimum number of picks
        from the same station and channel (and phase) that are shared between
        two events for them to be paired.
    :type debug: int
    :param debug: Debug output level.

    :returns: list of stations that have been used in this catalog

    .. note::
        We have not yet implemented a method for taking unassociated event
        objects and wavefiles.  As such if you have events with associated
        wavefiles you are advised to generate Sfiles for each event using
        the :mod:`eqcorrscan.utils.sfile_util` module prior to this step.
    """
    # Cope with possibly being passed a zip in python 3.x
    event_list = list(event_list)
    f = open('dt.ct', 'w')
    f2 = open('dt.ct2', 'w')
    fphase = open('phase.dat', 'w')
    stations = []
    evcount = 0
    for i, master in enumerate(event_list):
        master_sfile = master[1]
        master_event_id = master[0]
        master_event = read_nordic(master_sfile)[0]
        master_ori_time = master_event.origins[0].time
        master_location = (master_event.origins[0].latitude,
                           master_event.origins[0].longitude,
                           master_event.origins[0].depth / 1000)
        if len(master_event.magnitudes) > 0:
            master_magnitude = master_event.magnitudes[0].mag or ' '
        else:
            master_magnitude = ' '
        header = '# ' + \
            master_ori_time.strftime('%Y  %m  %d  %H  %M  %S.%f') +\
            ' ' + str(master_location[0]).ljust(8) + ' ' +\
            str(master_location[1]).ljust(8) + ' ' +\
            str(master_location[2]).ljust(4) + ' ' +\
            str(master_magnitude).ljust(4) + ' 0.0 0.0 0.0' +\
            str(master_event_id).rjust(4)
        fphase.write(header + '\n')
        for pick in master_event.picks:
            if not hasattr(pick, 'phase_hint') or len(pick.phase_hint) == 0:
                warnings.warn('No phase-hint for pick:')
                print(pick)
                continue
            if pick.phase_hint[0].upper() in ['P', 'S']:
                weight = [
                    arrival.time_weight
                    for arrival in master_event.origins[0].arrivals
                    if arrival.pick_id == pick.resource_id
                ][0]
                # Convert seisan weight to hypoDD 0-1 weights
                if weight == 0:
                    weight = 1.0
                elif weight == 9:
                    weight = 0.0
                else:
                    weight = 1 - weight / 4.0
                fphase.write(pick.waveform_id.station_code + '  ' +
                             _cc_round(pick.time -
                                       master_ori_time, 3).rjust(6) + '   ' +
                             str(weight).ljust(5) + pick.phase_hint + '\n')
        for j in range(i + 1, len(event_list)):
            # Use this tactic to only output unique event pairings
            slave_sfile = event_list[j][1]
            slave_event_id = event_list[j][0]
            # Write out the header line
            event_text = '#' + str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10) + '\n'
            event_text2 = '#' + str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10) + '\n'
            slave_event = read_nordic(slave_sfile)[0]
            slave_ori_time = slave_event.origins[0].time
            slave_location = (slave_event.origins[0].latitude,
                              slave_event.origins[0].longitude,
                              slave_event.origins[0].depth / 1000)
            if dist_calc(master_location, slave_location) > max_sep:
                continue
            links = 0  # Count the number of linkages
            for pick in master_event.picks:
                if not hasattr(pick, 'phase_hint') or\
                                len(pick.phase_hint) == 0:
                    continue
                if pick.phase_hint[0].upper() not in ['P', 'S']:
                    continue
                    # Only use P and S picks, not amplitude or 'other'
                # Added by Carolin
                slave_matches = [
                    p for p in slave_event.picks
                    if hasattr(p, 'phase_hint') and p.phase_hint ==
                    pick.phase_hint and p.waveform_id.station_code.upper() ==
                    pick.waveform_id.station_code.upper()
                ]
                # Loop through the matches
                for slave_pick in slave_matches:
                    links += 1
                    master_weight = [
                        arrival.time_weight
                        for arrival in master_event.origins[0].arrivals
                        if arrival.pick_id == pick.resource_id
                    ][0]
                    slave_weight = [
                        arrival.time_weight
                        for arrival in slave_event.origins[0].arrivals
                        if arrival.pick_id == slave_pick.resource_id
                    ][0]
                    master_weight = str(int(master_weight))
                    slave_weight = str(int(slave_weight))
                    event_text += pick.waveform_id.station_code.ljust(5) +\
                        _cc_round(pick.time - master_ori_time, 3).rjust(11) +\
                        _cc_round(slave_pick.time -
                                  slave_ori_time, 3).rjust(8) +\
                        _av_weight(master_weight, slave_weight).rjust(7) +\
                        ' ' + pick.phase_hint + '\n'
                    # Added by Carolin
                    event_text2 += pick.waveform_id.station_code.ljust(5) +\
                        _cc_round(pick.time - master_ori_time, 3).rjust(11) +\
                        _cc_round(slave_pick.time -
                                  slave_ori_time, 3).rjust(8) +\
                        _av_weight(master_weight, slave_weight).rjust(7) +\
                        ' ' + pick.phase_hint + '\n'
                    stations.append(pick.waveform_id.station_code)
            if links >= min_link:
                f.write(event_text)
                f2.write(event_text2)
                evcount += 1
    print('You have ' + str(evcount) + ' links')
    # f.write('\n')
    f.close()
    f2.close()
    fphase.close()
    return list(set(stations))
Esempio n. 39
0
 def test_read_write(self):
     """
     Function to test the read and write capabilities of sfile_util.
     """
     # Set-up a test event
     test_event = full_test_event()
     # Sort the magnitudes - they are sorted on writing and we need to check
     # like-for-like
     test_event.magnitudes.sort(key=lambda obj: obj['mag'], reverse=True)
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0], filename=None, userid='TEST',
                               evtype='L', outdir='.', wavefiles='test',
                               explosion=True, overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
         read_cat = Catalog()
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             read_cat += read_nordic(sfile)
     read_ev = read_cat[0]
     test_ev = test_cat[0]
     for read_pick, test_pick in zip(read_ev.picks, test_ev.picks):
         self.assertEqual(read_pick.time, test_pick.time)
         self.assertEqual(read_pick.backazimuth, test_pick.backazimuth)
         self.assertEqual(read_pick.onset, test_pick.onset)
         self.assertEqual(read_pick.phase_hint, test_pick.phase_hint)
         self.assertEqual(read_pick.polarity, test_pick.polarity)
         self.assertEqual(read_pick.waveform_id.station_code,
                          test_pick.waveform_id.station_code)
         self.assertEqual(read_pick.waveform_id.channel_code[-1],
                          test_pick.waveform_id.channel_code[-1])
     # assert read_ev.origins[0].resource_id ==\
     #     test_ev.origins[0].resource_id
     self.assertEqual(read_ev.origins[0].time,
                      test_ev.origins[0].time)
     # Note that time_residual_RMS is not a quakeML format
     self.assertEqual(read_ev.origins[0].longitude,
                      test_ev.origins[0].longitude)
     self.assertEqual(read_ev.origins[0].latitude,
                      test_ev.origins[0].latitude)
     self.assertEqual(read_ev.origins[0].depth,
                      test_ev.origins[0].depth)
     self.assertEqual(read_ev.magnitudes[0].mag,
                      test_ev.magnitudes[0].mag)
     self.assertEqual(read_ev.magnitudes[1].mag,
                      test_ev.magnitudes[1].mag)
     self.assertEqual(read_ev.magnitudes[2].mag,
                      test_ev.magnitudes[2].mag)
     self.assertEqual(read_ev.magnitudes[0].creation_info,
                      test_ev.magnitudes[0].creation_info)
     self.assertEqual(read_ev.magnitudes[1].creation_info,
                      test_ev.magnitudes[1].creation_info)
     self.assertEqual(read_ev.magnitudes[2].creation_info,
                      test_ev.magnitudes[2].creation_info)
     self.assertEqual(read_ev.magnitudes[0].magnitude_type,
                      test_ev.magnitudes[0].magnitude_type)
     self.assertEqual(read_ev.magnitudes[1].magnitude_type,
                      test_ev.magnitudes[1].magnitude_type)
     self.assertEqual(read_ev.magnitudes[2].magnitude_type,
                      test_ev.magnitudes[2].magnitude_type)
     self.assertEqual(read_ev.event_descriptions,
                      test_ev.event_descriptions)
     # assert read_ev.amplitudes[0].resource_id ==\
     #     test_ev.amplitudes[0].resource_id
     self.assertEqual(read_ev.amplitudes[0].period,
                      test_ev.amplitudes[0].period)
     self.assertEqual(read_ev.amplitudes[0].snr,
                      test_ev.amplitudes[0].snr)
     self.assertEqual(read_ev.amplitudes[2].period,
                      test_ev.amplitudes[2].period)
     self.assertEqual(read_ev.amplitudes[2].snr,
                      test_ev.amplitudes[2].snr)
     # Check coda magnitude pick
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].resource_id,
     #                  test_ev.amplitudes[1].resource_id)
     self.assertEqual(read_ev.amplitudes[1].type,
                      test_ev.amplitudes[1].type)
     self.assertEqual(read_ev.amplitudes[1].unit,
                      test_ev.amplitudes[1].unit)
     self.assertEqual(read_ev.amplitudes[1].generic_amplitude,
                      test_ev.amplitudes[1].generic_amplitude)
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].pick_id,
     #                  test_ev.amplitudes[1].pick_id)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code,
                      test_ev.amplitudes[1].waveform_id.station_code)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code,
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[0] +
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[-1])
     self.assertEqual(read_ev.amplitudes[1].magnitude_hint,
                      test_ev.amplitudes[1].magnitude_hint)
     # snr is not supported in s-file
     # self.assertEqual(read_ev.amplitudes[1].snr,
     #                  test_ev.amplitudes[1].snr)
     self.assertEqual(read_ev.amplitudes[1].category,
                      test_ev.amplitudes[1].category)
Esempio n. 40
0
def write_correlations(event_list,
                       wavbase,
                       extract_len,
                       pre_pick,
                       shift_len,
                       lowcut=1.0,
                       highcut=10.0,
                       max_sep=8,
                       min_link=8,
                       cc_thresh=0.0,
                       plotvar=False,
                       debug=0):
    """
    Write a dt.cc file for hypoDD input for a given list of events.

    Takes an input list of events and computes pick refinements by correlation.
    Outputs two files, dt.cc and dt.cc2, each provides a different weight,
    dt.cc uses weights of the cross-correlation, and dt.cc2 provides weights
    as the square of the cross-correlation.

    :type event_list: list
    :param event_list: List of tuples of event_id (int) and sfile (String)
    :type wavbase: str
    :param wavbase: Path to the seisan wave directory that the wavefiles in the
                    S-files are stored
    :type extract_len: float
    :param extract_len: Length in seconds to extract around the pick
    :type pre_pick: float
    :param pre_pick: Time before the pick to start the correlation window
    :type shift_len: float
    :param shift_len: Time to allow pick to vary
    :type lowcut: float
    :param lowcut: Lowcut in Hz - default=1.0
    :type highcut: float
    :param highcut: Highcut in Hz - default=10.0
    :type max_sep: float
    :param max_sep: Maximum separation between event pairs in km
    :type min_link: int
    :param min_link: Minimum links for an event to be paired
    :type cc_thresh: float
    :param cc_thresh: Threshold to include cross-correlation results.
    :type plotvar: bool
    :param plotvar: To show the pick-correction plots, defualts to False.
    :type debug: int
    :param debug: Variable debug levels from 0-5, higher=more output.

    .. warning:: This is not a fast routine!

    .. warning::
        In contrast to seisan's corr routine, but in accordance with the
        hypoDD manual, this outputs corrected differential time.

    .. note::
        Currently we have not implemented a method for taking
        unassociated event objects and wavefiles.  As such if you have events \
        with associated wavefiles you are advised to generate Sfiles for each \
        event using the sfile_util module prior to this step.

    .. note::
        There is no provision to taper waveforms within these functions, if you
        desire this functionality, you should apply the taper before calling
        this.  Note the :func:`obspy.Trace.taper` functions.
    """
    warnings.filterwarnings(action="ignore",
                            message="Maximum of cross correlation " +
                            "lower than 0.8: *")
    corr_list = []
    f = open('dt.cc', 'w')
    f2 = open('dt.cc2', 'w')
    k_events = len(list(event_list))
    for i, master in enumerate(event_list):
        master_sfile = master[1]
        if debug > 1:
            print('Computing correlations for master: %s' % master_sfile)
        master_event_id = master[0]
        master_event = read_nordic(master_sfile)[0]
        master_picks = master_event.picks
        master_ori_time = master_event.origins[0].time
        master_location = (master_event.origins[0].latitude,
                           master_event.origins[0].longitude,
                           master_event.origins[0].depth / 1000.0)
        master_wavefiles = readwavename(master_sfile)
        masterpath = glob.glob(wavbase + os.sep + master_wavefiles[0])
        if masterpath:
            masterstream = read(masterpath[0])
        if len(master_wavefiles) > 1:
            for wavefile in master_wavefiles:
                try:
                    masterstream += read(os.join(wavbase, wavefile))
                except:
                    raise IOError("Couldn't find wavefile")
                    continue
        for j in range(i + 1, k_events):
            # Use this tactic to only output unique event pairings
            slave_sfile = event_list[j][1]
            if debug > 2:
                print('Comparing to event: %s' % slave_sfile)
            slave_event_id = event_list[j][0]
            slave_wavefiles = readwavename(slave_sfile)
            try:
                slavestream = read(wavbase + os.sep + slave_wavefiles[0])
            except:
                raise IOError('No wavefile found: ' + slave_wavefiles[0] +
                              ' ' + slave_sfile)
            if len(slave_wavefiles) > 1:
                for wavefile in slave_wavefiles:
                    try:
                        slavestream += read(wavbase + os.sep + wavefile)
                    except IOError:
                        print('No waveform found: %s' %
                              (wavbase + os.sep + wavefile))
                        continue
            # Write out the header line
            event_text = '#' + str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10) + ' 0.0   \n'
            event_text2 = '#' + str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10) + ' 0.0   \n'
            slave_event = read_nordic(slave_sfile)[0]
            slave_picks = slave_event.picks
            slave_ori_time = slave_event.origins[0].time
            slave_location = (slave_event.origins[0].latitude,
                              slave_event.origins[0].longitude,
                              slave_event.origins[0].depth / 1000.0)
            if dist_calc(master_location, slave_location) > max_sep:
                if debug > 0:
                    print('Seperation exceeds max_sep: %s' %
                          (dist_calc(master_location, slave_location)))
                continue
            links = 0
            phases = 0
            for pick in master_picks:
                if not hasattr(pick, 'phase_hint') or \
                                len(pick.phase_hint) == 0:
                    warnings.warn('No phase-hint for pick:')
                    print(pick)
                    continue
                if pick.phase_hint[0].upper() not in ['P', 'S']:
                    warnings.warn('Will only use P or S phase picks')
                    print(pick)
                    continue
                    # Only use P and S picks, not amplitude or 'other'
                # Find station, phase pairs
                # Added by Carolin
                slave_matches = [
                    p for p in slave_picks if hasattr(p, 'phase_hint')
                    and p.phase_hint == pick.phase_hint and
                    p.waveform_id.station_code == pick.waveform_id.station_code
                ]

                if masterstream.select(station=pick.waveform_id.station_code,
                                       channel='*' +
                                       pick.waveform_id.channel_code[-1]):
                    mastertr = masterstream.\
                        select(station=pick.waveform_id.station_code,
                               channel='*' +
                               pick.waveform_id.channel_code[-1])[0]
                elif debug > 1:
                    print('No waveform data for ' +
                          pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code)
                    print(pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code + ' ' + slave_sfile +
                          ' ' + master_sfile)
                    break
                # Loop through the matches
                for slave_pick in slave_matches:
                    if slavestream.select(
                            station=slave_pick.waveform_id.station_code,
                            channel='*' +
                            slave_pick.waveform_id.channel_code[-1]):
                        slavetr = slavestream.\
                            select(station=slave_pick.waveform_id.station_code,
                                   channel='*' + slave_pick.waveform_id.
                                   channel_code[-1])[0]
                    else:
                        print('No slave data for ' +
                              slave_pick.waveform_id.station_code + '.' +
                              slave_pick.waveform_id.channel_code)
                        print(pick.waveform_id.station_code + '.' +
                              pick.waveform_id.channel_code + ' ' +
                              slave_sfile + ' ' + master_sfile)
                        break
                    # Correct the picks
                    try:
                        correction, cc =\
                            xcorr_pick_correction(
                                pick.time, mastertr, slave_pick.time,
                                slavetr, pre_pick, extract_len - pre_pick,
                                shift_len, filter="bandpass",
                                filter_options={'freqmin': lowcut,
                                                'freqmax': highcut},
                                plot=plotvar)
                        # Get the differential travel time using the
                        # corrected time.
                        # Check that the correction is within the allowed shift
                        # This can occur in the obspy routine when the
                        # correlation function is increasing at the end of the
                        # window.
                        if abs(correction) > shift_len:
                            warnings.warn('Shift correction too large, ' +
                                          'will not use')
                            continue
                        correction = (pick.time - master_ori_time) -\
                            (slave_pick.time + correction - slave_ori_time)
                        links += 1
                        if cc >= cc_thresh:
                            weight = cc
                            phases += 1
                            # added by Caro
                            event_text += pick.waveform_id.station_code.\
                                ljust(5) + _cc_round(correction, 3).\
                                rjust(11) + _cc_round(weight, 3).rjust(8) +\
                                ' ' + pick.phase_hint + '\n'
                            event_text2 += pick.waveform_id.station_code\
                                .ljust(5) + _cc_round(correction, 3).\
                                rjust(11) +\
                                _cc_round(weight * weight, 3).rjust(8) +\
                                ' ' + pick.phase_hint + '\n'
                            if debug > 3:
                                print(event_text)
                        else:
                            print('cc too low: %s' % cc)
                        corr_list.append(cc * cc)
                    except:
                        msg = "Couldn't compute correlation correction"
                        warnings.warn(msg)
                        continue
            if links >= min_link and phases > 0:
                f.write(event_text)
                f2.write(event_text2)
    if plotvar:
        plt.hist(corr_list, 150)
        plt.show()
    # f.write('\n')
    f.close()
    f2.close()
    return
Esempio n. 41
0
 def test_read_many_events(self):
     testing_path = os.path.join(self.testing_path, 'select.out')
     catalog = read_nordic(testing_path)
     self.assertEqual(len(catalog), 50)
Esempio n. 42
0
 def test_round_len(self):
     testing_path = os.path.join(self.testing_path, 'round_len_undef.sfile')
     event = read_nordic(testing_path)[0]
     pick_string = nordpick(event)
     for pick in pick_string:
         self.assertEqual(len(pick), 80)
Esempio n. 43
0
import obspy.io.nordic.core as nordic
from obspy.core import *
import subprocess

sfileDir = "REA/EVENT/1996/06/"
waveFileDir = "WAV/"
lsOutput = subprocess.run(["ls", sfileDir],
                          stdout=subprocess.PIPE,
                          universal_newlines=True)
sfileList = lsOutput.stdout.splitlines()
for file in sfileList:
    event = nordic.read_nordic(sfileDir + file)
    event.wavename = nordic.readwavename(sfileDir + file)
    stream = Stream()
    for wave in event.wavename:
        stream += read(str(waveFileDir + wave))
    stream.normalize()
    start_time = event.events[0].origins[0].time
    stream.trim(start_time + 0, start_time + 800)
    stream.plot()
Esempio n. 44
0
 def test_inaccurate_picks(self):
     testing_path = os.path.join(self.testing_path, 'bad_picks.sfile')
     cat = read_nordic(testing_path)
     pick_string = nordpick(cat[0])
     for pick in pick_string:
         self.assertEqual(len(pick), 80)
Esempio n. 45
0
 def test_read_many_events(self):
     testing_path = os.path.join(self.testing_path, 'select.out')
     catalog = read_nordic(testing_path)
     self.assertEqual(len(catalog), 50)
Esempio n. 46
0
 def test_read_event(self):
     """Test the wrapper."""
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     event = read_nordic(testing_path)[0]
     self.assertEqual(len(event.origins), 1)
Esempio n. 47
0
 def test_read_event(self):
     """Test the wrapper."""
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     event = read_nordic(testing_path)[0]
     self.assertEqual(len(event.origins), 1)
Esempio n. 48
0
 def test_read_write(self):
     """
     Function to test the read and write capabilities of sfile_util.
     """
     # Set-up a test event
     test_event = full_test_event()
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     sfile = _write_nordic(test_cat[0], filename=None, userid='TEST',
                           evtype='L', outdir='.',
                           wavefiles='test', explosion=True, overwrite=True)
     self.assertEqual(readwavename(sfile), ['test'])
     read_cat = Catalog()
     read_cat += read_nordic(sfile)
     os.remove(sfile)
     read_ev = read_cat[0]
     test_ev = test_cat[0]
     for read_pick, test_pick in zip(read_ev.picks, test_ev.picks):
         self.assertEqual(read_pick.time, test_pick.time)
         self.assertEqual(read_pick.backazimuth, test_pick.backazimuth)
         self.assertEqual(read_pick.onset, test_pick.onset)
         self.assertEqual(read_pick.phase_hint, test_pick.phase_hint)
         self.assertEqual(read_pick.polarity, test_pick.polarity)
         self.assertEqual(read_pick.waveform_id.station_code,
                          test_pick.waveform_id.station_code)
         self.assertEqual(read_pick.waveform_id.channel_code[-1],
                          test_pick.waveform_id.channel_code[-1])
     # assert read_ev.origins[0].resource_id ==\
     #     test_ev.origins[0].resource_id
     self.assertEqual(read_ev.origins[0].time,
                      test_ev.origins[0].time)
     # Note that time_residual_RMS is not a quakeML format
     self.assertEqual(read_ev.origins[0].longitude,
                      test_ev.origins[0].longitude)
     self.assertEqual(read_ev.origins[0].latitude,
                      test_ev.origins[0].latitude)
     self.assertEqual(read_ev.origins[0].depth,
                      test_ev.origins[0].depth)
     self.assertEqual(read_ev.magnitudes[0].mag,
                      test_ev.magnitudes[0].mag)
     self.assertEqual(read_ev.magnitudes[1].mag,
                      test_ev.magnitudes[1].mag)
     self.assertEqual(read_ev.magnitudes[2].mag,
                      test_ev.magnitudes[2].mag)
     self.assertEqual(read_ev.magnitudes[0].creation_info,
                      test_ev.magnitudes[0].creation_info)
     self.assertEqual(read_ev.magnitudes[1].creation_info,
                      test_ev.magnitudes[1].creation_info)
     self.assertEqual(read_ev.magnitudes[2].creation_info,
                      test_ev.magnitudes[2].creation_info)
     self.assertEqual(read_ev.magnitudes[0].magnitude_type,
                      test_ev.magnitudes[0].magnitude_type)
     self.assertEqual(read_ev.magnitudes[1].magnitude_type,
                      test_ev.magnitudes[1].magnitude_type)
     self.assertEqual(read_ev.magnitudes[2].magnitude_type,
                      test_ev.magnitudes[2].magnitude_type)
     self.assertEqual(read_ev.event_descriptions,
                      test_ev.event_descriptions)
     # assert read_ev.amplitudes[0].resource_id ==\
     #     test_ev.amplitudes[0].resource_id
     self.assertEqual(read_ev.amplitudes[0].period,
                      test_ev.amplitudes[0].period)
     self.assertEqual(read_ev.amplitudes[0].snr,
                      test_ev.amplitudes[0].snr)
     # Check coda magnitude pick
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].resource_id,
     #                  test_ev.amplitudes[1].resource_id)
     self.assertEqual(read_ev.amplitudes[1].type,
                      test_ev.amplitudes[1].type)
     self.assertEqual(read_ev.amplitudes[1].unit,
                      test_ev.amplitudes[1].unit)
     self.assertEqual(read_ev.amplitudes[1].generic_amplitude,
                      test_ev.amplitudes[1].generic_amplitude)
     # Resource ids get overwritten because you can't have two the same in
     # memory
     # self.assertEqual(read_ev.amplitudes[1].pick_id,
     #                  test_ev.amplitudes[1].pick_id)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code,
                      test_ev.amplitudes[1].waveform_id.station_code)
     self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code,
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[0] +
                      test_ev.amplitudes[1].
                      waveform_id.channel_code[-1])
     self.assertEqual(read_ev.amplitudes[1].magnitude_hint,
                      test_ev.amplitudes[1].magnitude_hint)
     # snr is not supported in s-file
     # self.assertEqual(read_ev.amplitudes[1].snr,
     #                  test_ev.amplitudes[1].snr)
     self.assertEqual(read_ev.amplitudes[1].category,
                      test_ev.amplitudes[1].category)