Exemplo n.º 1
0
def interev_mag_sfiles(sfiles):
    r"""Function to plot interevent-time versus magnitude for series of events.
    **thin** Wrapper for interev_mag.

    :type sfiles: list
    :param sfiles: List of sfiles to read from
    """
    from eqcorrscan.utils import sfile_util
    times = [sfile_util.readheader(sfile)[0].origins[0].time
             for sfile in sfiles]
    mags = [sfile_util.readheader(sfile)[0].magnitudes[0].mag
            for sfile in sfiles]
    interev_mag(times, mags)
Exemplo n.º 2
0
def sfiles_to_event(sfile_list):
    """
    Write an event.dat file from a list of Seisan events

    :type sfile_list: list
    :param sfile_list: List of s-files to sort and put into the database

    :returns: List of tuples of event ID (int) and Sfile name
    """
    event_list = []
    sort_list = [(sfile_util.readheader(sfile).origins[0].time, sfile)
                 for sfile in sfile_list]
    sort_list.sort(key=lambda tup: tup[0])
    sfile_list = [sfile[1] for sfile in sort_list]
    catalog = Catalog()
    for i, sfile in enumerate(sfile_list):
        event_list.append((i, sfile))
        catalog.append(sfile_util.readheader(sfile))
    # Hand off to sister function
    write_event(catalog)
    return event_list
Exemplo n.º 3
0
def interev_mag_sfiles(sfiles, save=False, savefile=None):
    r"""Function to plot interevent-time versus magnitude for series of events.
    **thin** Wrapper for interev_mag.

    :type sfiles: list
    :param sfiles: List of sfiles to read from
    :type save: bool
    :param save: False will plot to screen, true will save plot and not show \
        to screen.
    :type savefile: str
    :param savefile: Filename to save to, required for save=True
    """
    _check_save_args(save, savefile)
    from eqcorrscan.utils import sfile_util
    times = [
        sfile_util.readheader(sfile)[0].origins[0].time for sfile in sfiles
    ]
    mags = [
        sfile_util.readheader(sfile)[0].magnitudes[0].mag for sfile in sfiles
    ]
    interev_mag(times, mags, save, savefile)
Exemplo n.º 4
0
def sfiles_to_event(sfile_list):
    """
    Function to write out an event.dat file of the events

    :type sfile_list: list
    :param sfile_list: List of s-files to sort and put into the database

    :returns: List of tuples of event ID (int) and Sfile name
    """
    from obspy.core.event import Catalog
    event_list = []
    sort_list = [(sfile_util.readheader(sfile).origins[0].time, sfile)
                 for sfile in sfile_list]
    sort_list.sort(key=lambda tup: tup[0])
    sfile_list = [sfile[1] for sfile in sort_list]
    catalog = Catalog()
    for i, sfile in enumerate(sfile_list):
        event_list.append((i, sfile))
        catalog.append(sfile_util.readheader(sfile))
    # Hand off to sister function
    write_event(catalog)
    return event_list
Exemplo n.º 5
0
    def test_write_event(self):
        """Simple test function to test the writing of events.
        """
        from eqcorrscan.utils.catalog_to_dd import sfiles_to_event
        from eqcorrscan.utils import sfile_util
        import os
        import glob

        testing_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                    'test_data', 'REA', 'TEST_')
        sfile_list = glob.glob(os.path.join(testing_path, '*L.S??????'))
        event_list = sfiles_to_event(sfile_list)
        # Check that we have written a file
        self.assertTrue(os.path.isfile('event.dat'))
        f = open('event.dat', 'r')
        for line, event in zip(f, event_list):
            header = sfile_util.readheader(event[1])
            event_id_input = event[0]
            output_event_info = line.strip().split()
            # Check that the event id's match
            self.assertEqual(event_id_input, int(output_event_info[-1]))
            time_string = str(header.origins[0].time.year) +\
                str(header.origins[0].time.month).zfill(2) +\
                str(header.origins[0].time.day).zfill(2)+'  ' +\
                str(header.origins[0].time.hour).rjust(2) +\
                str(header.origins[0].time.minute).zfill(2) +\
                str(header.origins[0].time.second).zfill(2) +\
                str(header.origins[0].time.microsecond)[0:2].zfill(2)
            self.assertEqual(output_event_info[0:2], time_string.split())
            self.assertEqual(header.origins[0].latitude,
                             float(output_event_info[2]))
            self.assertEqual(header.origins[0].longitude,
                             float(output_event_info[3]))
            self.assertEqual(header.origins[0].depth / 1000,
                             float(output_event_info[4]))
            if header.magnitudes[0]:
                self.assertEqual(header.magnitudes[0].mag,
                                 float(output_event_info[5]))
            if header.origins[0].time_errors.Time_Residual_RMS:
                self.assertEqual(header.origins[0].time_errors.
                                 Time_Residual_RMS,
                                 float(output_event_info[-2]))
        f.close()
        os.remove('event.dat')
Exemplo n.º 6
0
 def test_write_catalog(self):
     """
     Simple testing function for the write_catalogue function in \
     catalog_to_dd.
     """
     from eqcorrscan.utils.catalog_to_dd import write_catalog
     from eqcorrscan.utils.mag_calc import dist_calc
     from eqcorrscan.utils import sfile_util
     import glob
     import os
     # Set forced variables
     maximum_seperation = 1  # Maximum inter-event seperation in km
     minimum_links = 8  # Minimum inter-event links to generate a pair
     # We have to make an event list first
     testing_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                 'test_data', 'REA', 'TEST_')
     sfile_list = glob.glob(os.path.join(testing_path, '*L.S??????'))
     event_ids = list(range(len(sfile_list)))
     event_list = zip(event_ids, sfile_list)
     write_catalog(event_list=event_list,
                   max_sep=maximum_seperation,
                   min_link=minimum_links)
     self.assertTrue(os.path.isfile('dt.ct'))
     # Check dt.ct file, should contain only a few linked events
     dt_file_out = open('dt.ct', 'r')
     event_pairs = []
     for i, line in enumerate(dt_file_out):
         if line[0] == '#':
             if i != 0:
                 # Check the number of links
                 self.assertTrue(len(event_links) >= minimum_links)
                 # Check the distance between events
                 event_1_name = [event[1] for event in event_list
                                 if event[0] ==
                                 int(event_pair.split()[1])][0]
                 event_2_name = [event[1] for event in event_list
                                 if event[0] ==
                                 int(event_pair.split()[2])][0]
                 event_1 = sfile_util.readheader(event_1_name)
                 event_2 = sfile_util.readheader(event_2_name)
                 event_1_location = (event_1.origins[0].latitude,
                                     event_1.origins[0].longitude,
                                     event_1.origins[0].depth / 1000)
                 event_2_location = (event_2.origins[0].latitude,
                                     event_2.origins[0].longitude,
                                     event_2.origins[0].depth / 1000)
                 hypocentral_seperation = dist_calc(event_1_location,
                                                    event_2_location)
                 self.assertTrue(hypocentral_seperation <
                                 maximum_seperation)
                 # Check that the differential times are accurate
                 event_1_picks = sfile_util.readpicks(event_1_name).picks
                 event_2_picks = sfile_util.readpicks(event_2_name).picks
                 for pick_pair in event_links:
                     station = pick_pair.split()[0]
                     event_1_travel_time_output = pick_pair.split()[1]
                     event_2_travel_time_output = pick_pair.split()[2]
                     weight = pick_pair.split()[3]
                     phase = pick_pair.split()[4]
                     # Extract the relevant pick information from the
                     # two sfiles
                     for pick in event_1_picks:
                         if pick.waveform_id.station_code == station:
                             if pick.phase_hint[0].upper() == phase:
                                 event_1_pick = pick
                     for pick in event_2_picks:
                         if pick.waveform_id.station_code == station:
                             if pick.phase_hint[0].upper() == phase:
                                 event_2_pick = pick
                     # Calculate the travel-time
                     event_1_travel_time_input = event_1_pick.time -\
                         event_1.origins[0].time
                     event_2_travel_time_input = event_2_pick.time -\
                         event_2.origins[0].time
                     self.assertEqual(event_1_travel_time_input,
                                      float(event_1_travel_time_output))
                     self.assertEqual(event_2_travel_time_input,
                                      float(event_2_travel_time_output))
             event_pair = line
             event_pairs.append(line)
             event_links = []
         else:
             event_links.append(line)
     self.assertTrue(os.path.isfile('phase.dat'))
     dt_file_out.close()
     os.remove('phase.dat')
     os.remove('dt.ct')
     if os.path.isfile('dt.ct2'):
         os.remove('dt.ct2')
Exemplo n.º 7
0
def from_contbase(sfile, contbase_list, lowcut, highcut, samp_rate, filt_order,
                  length, prepick, swin, debug=0, plot=False):
    r"""Function to read in picks from sfile then generate the template from \
    the picks within this and the wavefiles from the continous database of \
    day-long files.  Included is a section to sanity check that the files are \
    daylong and that they start at the start of the day.  You should ensure \
    this is the case otherwise this may alter your data if your data are \
    daylong but the headers are incorrectly set.

    :type sfile: string
    :param sfile: sfilename must be the path to a seisan nordic type s-file \
            containing waveform and pick information, all other arguments can \
            be numbers save for swin which must be either P, S or all \
            (case-sensitive).
    :type contbase_list: List of tuple of string
    :param contbase_list: List of tuples of the form \
        ['path', 'type', 'network'].  Where path is the path to the \
        continuous database, type is the directory structure, which can be \
        either Yyyyy/Rjjj.01, which is the standard IRIS Year, julian day \
        structure, or, yyyymmdd which is a single directory for every day.
    :type lowcut: float
    :param lowcut: Low cut (Hz), if set to None will look in template \
            defaults file
    :type highcut: float
    :param lowcut: High cut (Hz), if set to None will look in template \
            defaults file
    :type samp_rate: float
    :param samp_rate: New sampling rate in Hz, if set to None will look in \
            template defaults file
    :type filt_order: int
    :param filt_order: Filter level, if set to None will look in \
            template defaults file
    :type length: float
    :param length: Extract length in seconds, if None will look in template \
            defaults file.
    :type prepick: float
    :param prepick: Pre-pick time in seconds
    :type swin: str
    :param swin: Either 'all', 'P' or 'S', to select which phases to output.
    :type debug: int
    :param debug: Level of debugging output, higher=more
    :type plot: bool
    :param plot: Turns template plotting on or off.

    :returns: obspy.Stream Newly cut template
    """
    # Perform some checks first
    import os
    if not os.path.isfile(sfile):
        raise IOError('sfile does not exist')

    # import some things
    from eqcorrscan.utils import pre_processing
    from eqcorrscan.utils import sfile_util
    import glob
    from obspy import read as obsread

    # Read in the header of the sfile
    event = sfile_util.readheader(sfile)
    day = event.origins[0].time

    # Read in pick info
    catalog = sfile_util.readpicks(sfile)
    picks = catalog[0].picks
    print("I have found the following picks")
    pick_chans = []
    used_picks = []
    for pick in picks:
        station = pick.waveform_id.station_code
        channel = pick.waveform_id.channel_code
        phase = pick.phase_hint
        pcktime = pick.time
        if station + channel not in pick_chans and phase in ['P', 'S']:
            pick_chans.append(station + channel)
            used_picks.append(pick)
            print(pick)
            # #########Left off here
            for contbase in contbase_list:
                if contbase[1] == 'yyyy/mm/dd':
                    daydir = os.path.join([str(day.year),
                                           str(day.month).zfill(2),
                                           str(day.day).zfill(2)])
                elif contbase[1] == 'Yyyyy/Rjjj.01':
                    daydir = os.path.join(['Y' + str(day.year),
                                           'R' + str(day.julday).zfill(3) +
                                           '.01'])
                elif contbase[1] == 'yyyymmdd':
                    daydir = day.datetime.strftime('%Y%m%d')
                if 'wavefiles' not in locals():
                    wavefiles = (glob.glob(os.path.join([contbase[0], daydir,
                                                         '*' + station +
                                                         '.*'])))
                else:
                    wavefiles += glob.glob(os.path.join([contbase[0], daydir,
                                                         '*' + station +
                                                         '.*']))
        elif phase in ['P', 'S']:
            print(' '.join(['Duplicate pick', station, channel,
                            phase, str(pcktime)]))
        elif phase == 'IAML':
            print(' '.join(['Amplitude pick', station, channel,
                            phase, str(pcktime)]))
    picks = used_picks
    wavefiles = list(set(wavefiles))

    # Read in waveform file
    wavefiles.sort()
    for wavefile in wavefiles:
        print("I am going to read waveform data from: " + wavefile)
        if 'st' not in locals():
            st = obsread(wavefile)
        else:
            st += obsread(wavefile)
    # Process waveform data
    st.merge(fill_value='interpolate')
    for tr in st:
        tr = pre_processing.dayproc(tr, lowcut, highcut, filt_order,
                                    samp_rate, debug, day)
    # Cut and extract the templates
    st1 = _template_gen(picks, st, length, swin, prepick=prepick, plot=plot)
    return st1
Exemplo n.º 8
0
def write_correlations(event_list, wavbase, extract_len, pre_pick, shift_len,
                       lowcut=1.0, highcut=10.0, max_sep=4, min_link=8,
                       coh_thresh=0.0, coherence_weight=True, plotvar=False):
    """
    Function to write a dt.cc file for hypoDD input - takes an input list of
    events and computes pick refienements by correlation.

    :type event_list: list of tuple
    :param event_list: List of tuples of event_id (int) and sfile (String)
    :type wavbase: str
    :param wavbase: Path to the seisan wave directory that the wavefiles in the
                    S-files are stored
    :type extract_len: float
    :param extract_len: Length in seconds to extract around the pick
    :type pre_pick: float
    :param pre_pick: Time before the pick to start the correlation window
    :type shift_len: float
    :param shift_len: Time to allow pick to vary
    :type lowcut: float
    :param lowcut: Lowcut in Hz - default=1.0
    :type highcut: float
    :param highcut: Highcut in Hz - deafult=10.0
    :type max_sep: float
    :param max_sep: Maximum seperation between event pairs in km
    :type min_link: int
    :param min_link: Minimum links for an event to be paired
    :type coherence_weight: bool
    :param coherence_weight: Use coherence to weight the dt.cc file, or the \
        raw cross-correlation value, defaults to false which uses the cross-\
        correlation value.
    :type plotvar: bool
    :param plotvar: To show the pick-correction plots, defualts to False.

    .. warning:: This is not a fast routine!

    .. warning:: In contrast to seisan's \
        corr routine, but in accordance with the hypoDD manual, this outputs \
        corrected differential time.

    .. note:: Currently we have not implemented a method for taking \
        unassociated event objects and wavefiles.  As such if you have events \
        with associated wavefiles you are advised to generate Sfiles for each \
        event using the sfile_util module prior to this step.
    """
    import obspy
    if int(obspy.__version__.split('.')[0]) > 0:
        from obspy.signal.cross_correlation import xcorr_pick_correction
    else:
        from obspy.signal.cross_correlation import xcorrPickCorrection \
            as xcorr_pick_correction
    import matplotlib.pyplot as plt
    from obspy import read
    from eqcorrscan.utils.mag_calc import dist_calc
    import glob
    import warnings

    corr_list = []
    f = open('dt.cc', 'w')
    f2 = open('dt.cc2', 'w')
    for i, master in enumerate(event_list):
        master_sfile = master[1]
        master_event_id = master[0]
        master_picks = sfile_util.readpicks(master_sfile).picks
        master_event = sfile_util.readheader(master_sfile)
        master_ori_time = master_event.origins[0].time
        master_location = (master_event.origins[0].latitude,
                           master_event.origins[0].longitude,
                           master_event.origins[0].depth)
        master_wavefiles = sfile_util.readwavename(master_sfile)
        masterpath = glob.glob(wavbase + os.sep + master_wavefiles[0])
        if masterpath:
            masterstream = read(masterpath[0])
        if len(master_wavefiles) > 1:
            for wavefile in master_wavefiles:
                try:
                    masterstream += read(os.join(wavbase, wavefile))
                except:
                    continue
                    raise IOError("Couldn't find wavefile")
        for j in range(i+1, len(event_list)):
            # Use this tactic to only output unique event pairings
            slave_sfile = event_list[j][1]
            slave_event_id = event_list[j][0]
            slave_wavefiles = sfile_util.readwavename(slave_sfile)
            try:
                # slavestream=read(wavbase+'/*/*/'+slave_wavefiles[0])
                slavestream = read(wavbase + os.sep + slave_wavefiles[0])
            except:
                # print(slavestream)
                raise IOError('No wavefile found: '+slave_wavefiles[0]+' ' +
                              slave_sfile)
            if len(slave_wavefiles) > 1:
                for wavefile in slave_wavefiles:
                    # slavestream+=read(wavbase+'/*/*/'+wavefile)
                    try:
                        slavestream += read(wavbase+'/'+wavefile)
                    except:
                        continue
            # Write out the header line
            event_text = '#'+str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10)+' 0.0   \n'
            event_text2 = '#'+str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10)+' 0.0   \n'
            slave_picks = sfile_util.readpicks(slave_sfile).picks
            slave_event = sfile_util.readheader(slave_sfile)
            slave_ori_time = slave_event.origins[0].time
            slave_location = (slave_event.origins[0].latitude,
                              slave_event.origins[0].longitude,
                              slave_event.origins[0].depth)
            if dist_calc(master_location, slave_location) > max_sep:
                continue
            links = 0
            phases = 0
            for pick in master_picks:
                if pick.phase_hint[0].upper() not in ['P', 'S']:
                    continue
                    # Only use P and S picks, not amplitude or 'other'
                # Find station, phase pairs
                # Added by Carolin
                slave_matches = [p for p in slave_picks
                                 if p.phase_hint == pick.phase_hint
                                 and p.waveform_id.station_code ==
                                 pick.waveform_id.station_code]

                if masterstream.select(station=pick.waveform_id.station_code,
                                       channel='*' +
                                       pick.waveform_id.channel_code[-1]):
                    mastertr = masterstream.\
                        select(station=pick.waveform_id.station_code,
                               channel='*' +
                               pick.waveform_id.channel_code[-1])[0]
                else:
                    print('No waveform data for ' +
                          pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code)
                    print(pick.waveform_id.station_code +
                          '.' + pick.waveform_id.channel_code +
                          ' ' + slave_sfile+' ' + master_sfile)
                    break
                # Loop through the matches
                for slave_pick in slave_matches:
                    if slavestream.select(station=slave_pick.waveform_id.
                                          station_code,
                                          channel='*'+slave_pick.waveform_id.
                                          channel_code[-1]):
                        slavetr = slavestream.\
                            select(station=slave_pick.waveform_id.station_code,
                                   channel='*'+slave_pick.waveform_id.
                                   channel_code[-1])[0]
                    else:
                        print('No slave data for ' +
                              slave_pick.waveform_id.station_code + '.' +
                              slave_pick.waveform_id.channel_code)
                        print(pick.waveform_id.station_code +
                              '.' + pick.waveform_id.channel_code +
                              ' ' + slave_sfile + ' ' + master_sfile)
                        break
                    # Correct the picks
                    try:
                        correction, cc =\
                            xcorr_pick_correction(pick.time, mastertr,
                                                  slave_pick.time,
                                                  slavetr, pre_pick,
                                                  extract_len - pre_pick,
                                                  shift_len, filter="bandpass",
                                                  filter_options={'freqmin':
                                                                  lowcut,
                                                                  'freqmax':
                                                                  highcut},
                                                  plot=plotvar)
                        # Get the differntial travel time using the
                        # corrected time.
                        # Check that the correction is within the allowed shift
                        # This can occur in the obspy routine when the
                        # correlation function is increasing at the end of the
                        # window.
                        if abs(correction) > shift_len:
                            warnings.warn('Shift correction too large, ' +
                                          'will not use')
                            continue
                        correction = (pick.time - master_ori_time) -\
                            (slave_pick.time + correction - slave_ori_time)
                        links += 1
                        if cc * cc >= coh_thresh:
                            if coherence_weight:
                                weight = cc * cc
                            else:
                                weight = cc
                            phases += 1
                            # added by Caro
                            event_text += pick.waveform_id.station_code.\
                                ljust(5) + _cc_round(correction, 3).\
                                rjust(11) + _cc_round(weight, 3).rjust(8) +\
                                ' '+pick.phase_hint+'\n'
                            event_text2 += pick.waveform_id.station_code\
                                .ljust(5).upper() +\
                                _cc_round(correction, 3).rjust(11) +\
                                _cc_round(weight, 3).rjust(8) +\
                                ' '+pick.phase_hint+'\n'

                            # links+=1
                        corr_list.append(cc*cc)
                    except:
                        # Should warn here
                        msg = "Couldn't compute correlation correction"
                        warnings.warn(msg)
                        continue
            if links >= min_link and phases > 0:
                f.write(event_text)
                f2.write(event_text2)
    if plotvar:
        plt.hist(corr_list, 150)
        plt.show()
    # f.write('\n')
    f.close()
    f2.close()
    return
Exemplo n.º 9
0
 def test_write_catalog(self):
     """
     Simple testing function for the write_catalogue function in \
     catalog_to_dd.
     """
     self.assertTrue(os.path.isfile('dt.ct'))
     # Check dt.ct file, should contain only a few linked events
     dt_file_out = open('dt.ct', 'r')
     event_pairs = []
     event_links = []
     event_pair = ''
     for i, line in enumerate(dt_file_out):
         if line[0] == '#':
             if i != 0:
                 # Check the number of links
                 self.assertTrue(len(event_links) >= self.minimum_links)
                 # Check the distance between events
                 event_1_name = [
                     event[1] for event in self.event_list
                     if event[0] == int(event_pair.split()[1])
                 ][0]
                 event_2_name = [
                     event[1] for event in self.event_list
                     if event[0] == int(event_pair.split()[2])
                 ][0]
                 event_1 = sfile_util.readheader(event_1_name)
                 event_2 = sfile_util.readheader(event_2_name)
                 event_1_location = (event_1.origins[0].latitude,
                                     event_1.origins[0].longitude,
                                     event_1.origins[0].depth / 1000)
                 event_2_location = (event_2.origins[0].latitude,
                                     event_2.origins[0].longitude,
                                     event_2.origins[0].depth / 1000)
                 hypocentral_seperation = dist_calc(event_1_location,
                                                    event_2_location)
                 self.assertTrue(
                     hypocentral_seperation < self.maximum_separation)
                 # Check that the differential times are accurate
                 event_1_picks = sfile_util.readpicks(event_1_name).picks
                 event_2_picks = sfile_util.readpicks(event_2_name).picks
                 for pick_pair in event_links:
                     station = pick_pair.split()[0]
                     event_1_travel_time_output = pick_pair.split()[1]
                     event_2_travel_time_output = pick_pair.split()[2]
                     # weight = pick_pair.split()[3]
                     phase = pick_pair.split()[4]
                     # Extract the relevant pick information from the
                     # two sfiles
                     for pick in event_1_picks:
                         if pick.waveform_id.station_code == station:
                             if pick.phase_hint[0].upper() == phase:
                                 event_1_pick = pick
                     for pick in event_2_picks:
                         if pick.waveform_id.station_code == station:
                             if pick.phase_hint[0].upper() == phase:
                                 event_2_pick = pick
                     # Calculate the travel-time
                     event_1_travel_time_input = event_1_pick.time -\
                         event_1.origins[0].time
                     event_2_travel_time_input = event_2_pick.time -\
                         event_2.origins[0].time
                     self.assertEqual(event_1_travel_time_input,
                                      float(event_1_travel_time_output))
                     self.assertEqual(event_2_travel_time_input,
                                      float(event_2_travel_time_output))
             event_pair = line
             event_pairs.append(line)
             event_links = []
         else:
             event_links.append(line)
     self.assertTrue(os.path.isfile('phase.dat'))
     dt_file_out.close()
Exemplo n.º 10
0
def from_contbase(sfile, contbase_list, lowcut, highcut, samp_rate, filt_order,
                  length, prepick, swin, debug=0, plot=False):
    r"""Function to read in picks from sfile then generate the template from \
    the picks within this and the wavefiles from the continous database of \
    day-long files.  Included is a section to sanity check that the files are \
    daylong and that they start at the start of the day.  You should ensure \
    this is the case otherwise this may alter your data if your data are \
    daylong but the headers are incorrectly set.

    :type sfile: string
    :param sfile: sfilename must be the path to a seisan nordic type s-file \
            containing waveform and pick information, all other arguments can \
            be numbers save for swin which must be either P, S or all \
            (case-sensitive).
    :type contbase_list: List of tuple of string
    :param contbase_list: List of tuples of the form \
        ['path', 'type', 'network'].  Where path is the path to the \
        continuous database, type is the directory structure, which can be \
        either Yyyyy/Rjjj.01, which is the standard IRIS Year, julian day \
        structure, or, yyyymmdd which is a single directory for every day.
    :type lowcut: float
    :param lowcut: Low cut (Hz), if set to None will look in template \
            defaults file
    :type highcut: float
    :param lowcut: High cut (Hz), if set to None will look in template \
            defaults file
    :type samp_rate: float
    :param samp_rate: New sampling rate in Hz, if set to None will look in \
            template defaults file
    :type filt_order: int
    :param filt_order: Filter level, if set to None will look in \
            template defaults file
    :type length: float
    :param length: Extract length in seconds, if None will look in template \
            defaults file.
    :type prepick: float
    :param prepick: Pre-pick time in seconds
    :type swin: str
    :param swin: Either 'all', 'P' or 'S', to select which phases to output.
    :type debug: int
    :param debug: Level of debugging output, higher=more
    :type plot: bool
    :param plot: Turns template plotting on or off.

    :returns: obspy.Stream Newly cut template
    """
    # Perform some checks first
    import os
    if not os.path.isfile(sfile):
        raise IOError('sfile does not exist')

    # import some things
    from eqcorrscan.utils import pre_processing
    from eqcorrscan.utils import sfile_util
    import glob
    from obspy import read as obsread

    # Read in the header of the sfile
    event = sfile_util.readheader(sfile)
    day = event.origins[0].time

    # Read in pick info
    catalog = sfile_util.readpicks(sfile)
    picks = catalog[0].picks
    print("I have found the following picks")
    pick_chans = []
    used_picks = []
    for pick in picks:
        station = pick.waveform_id.station_code
        channel = pick.waveform_id.channel_code
        phase = pick.phase_hint
        pcktime = pick.time
        if station + channel not in pick_chans and phase in ['P', 'S']:
            pick_chans.append(station + channel)
            used_picks.append(pick)
            print(pick)
            # #########Left off here
            for contbase in contbase_list:
                if contbase[1] == 'yyyy/mm/dd':
                    daydir = os.path.join([str(day.year),
                                           str(day.month).zfill(2),
                                           str(day.day).zfill(2)])
                elif contbase[1] == 'Yyyyy/Rjjj.01':
                    daydir = os.path.join(['Y' + str(day.year),
                                           'R' + str(day.julday).zfill(3) +
                                           '.01'])
                elif contbase[1] == 'yyyymmdd':
                    daydir = day.datetime.strftime('%Y%m%d')
                if 'wavefiles' not in locals():
                    wavefiles = (glob.glob(os.path.join([contbase[0], daydir,
                                                         '*' + station +
                                                         '.*'])))
                else:
                    wavefiles += glob.glob(os.path.join([contbase[0], daydir,
                                                         '*' + station +
                                                         '.*']))
        elif phase in ['P', 'S']:
            print(' '.join(['Duplicate pick', station, channel,
                            phase, str(pcktime)]))
        elif phase == 'IAML':
            print(' '.join(['Amplitude pick', station, channel,
                            phase, str(pcktime)]))
    picks = used_picks
    wavefiles = list(set(wavefiles))

    # Read in waveform file
    wavefiles.sort()
    for wavefile in wavefiles:
        print("I am going to read waveform data from: " + wavefile)
        if 'st' not in locals():
            st = obsread(wavefile)
        else:
            st += obsread(wavefile)
    # Process waveform data
    st.merge(fill_value='interpolate')
    for tr in st:
        tr = pre_processing.dayproc(tr, lowcut, highcut, filt_order,
                                    samp_rate, debug, day)
    # Cut and extract the templates
    st1 = _template_gen(picks, st, length, swin, prepick=prepick, plot=plot,
                        debug=debug)
    return st1
Exemplo n.º 11
0
def write_correlations(event_list,
                       wavbase,
                       extract_len,
                       pre_pick,
                       shift_len,
                       lowcut=1.0,
                       highcut=10.0,
                       max_sep=8,
                       min_link=8,
                       cc_thresh=0.0,
                       plotvar=False,
                       debug=0):
    """
    Write a dt.cc file for hypoDD input for a given list of events.

    Takes an input list of events and computes pick refinements by correlation.
    Outputs two files, dt.cc and dt.cc2, each provides a different weight,
    dt.cc uses weights of the cross-correlation, and dt.cc2 provides weights
    as the square of the cross-correlation.

    :type event_list: list
    :param event_list: List of tuples of event_id (int) and sfile (String)
    :type wavbase: str
    :param wavbase: Path to the seisan wave directory that the wavefiles in the
                    S-files are stored
    :type extract_len: float
    :param extract_len: Length in seconds to extract around the pick
    :type pre_pick: float
    :param pre_pick: Time before the pick to start the correlation window
    :type shift_len: float
    :param shift_len: Time to allow pick to vary
    :type lowcut: float
    :param lowcut: Lowcut in Hz - default=1.0
    :type highcut: float
    :param highcut: Highcut in Hz - default=10.0
    :type max_sep: float
    :param max_sep: Maximum separation between event pairs in km
    :type min_link: int
    :param min_link: Minimum links for an event to be paired
    :type cc_thresh: float
    :param cc_thresh: Threshold to include cross-correlation results.
    :type plotvar: bool
    :param plotvar: To show the pick-correction plots, defualts to False.
    :type debug: int
    :param debug: Variable debug levels from 0-5, higher=more output.

    .. warning:: This is not a fast routine!

    .. warning::
        In contrast to seisan's corr routine, but in accordance with the
        hypoDD manual, this outputs corrected differential time.

    .. note::
        Currently we have not implemented a method for taking
        unassociated event objects and wavefiles.  As such if you have events \
        with associated wavefiles you are advised to generate Sfiles for each \
        event using the sfile_util module prior to this step.

    .. note::
        There is no provision to taper waveforms within these functions, if you
        desire this functionality, you should apply the taper before calling
        this.  Note the :func:`obspy.Trace.taper` functions.
    """
    from obspy.signal.cross_correlation import xcorr_pick_correction
    warnings.filterwarnings(action="ignore",
                            message="Maximum of cross correlation " +
                            "lower than 0.8: *")
    corr_list = []
    f = open('dt.cc', 'w')
    f2 = open('dt.cc2', 'w')
    k_events = len(list(event_list))
    for i, master in enumerate(event_list):
        master_sfile = master[1]
        if debug > 1:
            print('Computing correlations for master: %s' % master_sfile)
        master_event_id = master[0]
        master_picks = sfile_util.readpicks(master_sfile).picks
        master_event = sfile_util.readheader(master_sfile)
        master_ori_time = master_event.origins[0].time
        master_location = (master_event.origins[0].latitude,
                           master_event.origins[0].longitude,
                           master_event.origins[0].depth / 1000.0)
        master_wavefiles = sfile_util.readwavename(master_sfile)
        masterpath = glob.glob(wavbase + os.sep + master_wavefiles[0])
        if masterpath:
            masterstream = read(masterpath[0])
        if len(master_wavefiles) > 1:
            for wavefile in master_wavefiles:
                try:
                    masterstream += read(os.join(wavbase, wavefile))
                except:
                    raise IOError("Couldn't find wavefile")
                    continue
        for j in range(i + 1, k_events):
            # Use this tactic to only output unique event pairings
            slave_sfile = event_list[j][1]
            if debug > 2:
                print('Comparing to event: %s' % slave_sfile)
            slave_event_id = event_list[j][0]
            slave_wavefiles = sfile_util.readwavename(slave_sfile)
            try:
                slavestream = read(wavbase + os.sep + slave_wavefiles[0])
            except:
                raise IOError('No wavefile found: ' + slave_wavefiles[0] +
                              ' ' + slave_sfile)
            if len(slave_wavefiles) > 1:
                for wavefile in slave_wavefiles:
                    try:
                        slavestream += read(wavbase + os.sep + wavefile)
                    except IOError:
                        print('No waveform found: %s' %
                              (wavbase + os.sep + wavefile))
                        continue
            # Write out the header line
            event_text = '#' + str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10) + ' 0.0   \n'
            event_text2 = '#' + str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10) + ' 0.0   \n'
            slave_picks = sfile_util.readpicks(slave_sfile).picks
            slave_event = sfile_util.readheader(slave_sfile)
            slave_ori_time = slave_event.origins[0].time
            slave_location = (slave_event.origins[0].latitude,
                              slave_event.origins[0].longitude,
                              slave_event.origins[0].depth / 1000.0)
            if dist_calc(master_location, slave_location) > max_sep:
                if debug > 0:
                    print('Seperation exceeds max_sep: %s' %
                          (dist_calc(master_location, slave_location)))
                continue
            links = 0
            phases = 0
            for pick in master_picks:
                if not hasattr(pick, 'phase_hint') or \
                                len(pick.phase_hint) == 0:
                    warnings.warn('No phase-hint for pick:')
                    print(pick)
                    continue
                if pick.phase_hint[0].upper() not in ['P', 'S']:
                    warnings.warn('Will only use P or S phase picks')
                    print(pick)
                    continue
                    # Only use P and S picks, not amplitude or 'other'
                # Find station, phase pairs
                # Added by Carolin
                slave_matches = [
                    p for p in slave_picks if hasattr(p, 'phase_hint')
                    and p.phase_hint == pick.phase_hint and
                    p.waveform_id.station_code == pick.waveform_id.station_code
                ]

                if masterstream.select(station=pick.waveform_id.station_code,
                                       channel='*' +
                                       pick.waveform_id.channel_code[-1]):
                    mastertr = masterstream.\
                        select(station=pick.waveform_id.station_code,
                               channel='*' +
                               pick.waveform_id.channel_code[-1])[0]
                elif debug > 1:
                    print('No waveform data for ' +
                          pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code)
                    print(pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code + ' ' + slave_sfile +
                          ' ' + master_sfile)
                    break
                # Loop through the matches
                for slave_pick in slave_matches:
                    if slavestream.select(
                            station=slave_pick.waveform_id.station_code,
                            channel='*' +
                            slave_pick.waveform_id.channel_code[-1]):
                        slavetr = slavestream.\
                            select(station=slave_pick.waveform_id.station_code,
                                   channel='*' + slave_pick.waveform_id.
                                   channel_code[-1])[0]
                    else:
                        print('No slave data for ' +
                              slave_pick.waveform_id.station_code + '.' +
                              slave_pick.waveform_id.channel_code)
                        print(pick.waveform_id.station_code + '.' +
                              pick.waveform_id.channel_code + ' ' +
                              slave_sfile + ' ' + master_sfile)
                        break
                    # Correct the picks
                    try:
                        correction, cc =\
                            xcorr_pick_correction(
                                pick.time, mastertr, slave_pick.time,
                                slavetr, pre_pick, extract_len - pre_pick,
                                shift_len, filter="bandpass",
                                filter_options={'freqmin': lowcut,
                                                'freqmax': highcut},
                                plot=plotvar)
                        # Get the differential travel time using the
                        # corrected time.
                        # Check that the correction is within the allowed shift
                        # This can occur in the obspy routine when the
                        # correlation function is increasing at the end of the
                        # window.
                        if abs(correction) > shift_len:
                            warnings.warn('Shift correction too large, ' +
                                          'will not use')
                            continue
                        correction = (pick.time - master_ori_time) -\
                            (slave_pick.time + correction - slave_ori_time)
                        links += 1
                        if cc >= cc_thresh:
                            weight = cc
                            phases += 1
                            # added by Caro
                            event_text += pick.waveform_id.station_code.\
                                ljust(5) + _cc_round(correction, 3).\
                                rjust(11) + _cc_round(weight, 3).rjust(8) +\
                                ' ' + pick.phase_hint + '\n'
                            event_text2 += pick.waveform_id.station_code\
                                .ljust(5) + _cc_round(correction, 3).\
                                rjust(11) +\
                                _cc_round(weight * weight, 3).rjust(8) +\
                                ' ' + pick.phase_hint + '\n'
                            if debug > 3:
                                print(event_text)
                        else:
                            print('cc too low: %s' % cc)
                        corr_list.append(cc * cc)
                    except:
                        msg = "Couldn't compute correlation correction"
                        warnings.warn(msg)
                        continue
            if links >= min_link and phases > 0:
                f.write(event_text)
                f2.write(event_text2)
    if plotvar:
        plt.hist(corr_list, 150)
        plt.show()
    # f.write('\n')
    f.close()
    f2.close()
    return
Exemplo n.º 12
0
def write_correlations(event_list,
                       wavbase,
                       extract_len,
                       pre_pick,
                       shift_len,
                       lowcut=1.0,
                       highcut=10.0,
                       max_sep=4,
                       min_link=8,
                       coh_thresh=0.0,
                       coherence_weight=True,
                       plotvar=False):
    """
    Function to write a dt.cc file for hypoDD input - takes an input list of
    events and computes pick refienements by correlation.

    :type event_list: list of tuple
    :param event_list: List of tuples of event_id (int) and sfile (String)
    :type wavbase: str
    :param wavbase: Path to the seisan wave directory that the wavefiles in the
                    S-files are stored
    :type extract_len: float
    :param extract_len: Length in seconds to extract around the pick
    :type pre_pick: float
    :param pre_pick: Time before the pick to start the correlation window
    :type shift_len: float
    :param shift_len: Time to allow pick to vary
    :type lowcut: float
    :param lowcut: Lowcut in Hz - default=1.0
    :type highcut: float
    :param highcut: Highcut in Hz - deafult=10.0
    :type max_sep: float
    :param max_sep: Maximum seperation between event pairs in km
    :type min_link: int
    :param min_link: Minimum links for an event to be paired
    :type coherence_weight: bool
    :param coherence_weight: Use coherence to weight the dt.cc file, or the \
        raw cross-correlation value, defaults to false which uses the cross-\
        correlation value.
    :type plotvar: bool
    :param plotvar: To show the pick-correction plots, defualts to False.

    .. warning:: This is not a fast routine!

    .. warning:: In contrast to seisan's \
        corr routine, but in accordance with the hypoDD manual, this outputs \
        corrected differential time.

    .. note:: Currently we have not implemented a method for taking \
        unassociated event objects and wavefiles.  As such if you have events \
        with associated wavefiles you are advised to generate Sfiles for each \
        event using the sfile_util module prior to this step.
    """
    import obspy
    if int(obspy.__version__.split('.')[0]) > 0:
        from obspy.signal.cross_correlation import xcorr_pick_correction
    else:
        from obspy.signal.cross_correlation import xcorrPickCorrection \
            as xcorr_pick_correction
    import matplotlib.pyplot as plt
    from obspy import read
    from eqcorrscan.utils.mag_calc import dist_calc
    import glob
    import warnings

    corr_list = []
    f = open('dt.cc', 'w')
    f2 = open('dt.cc2', 'w')
    for i, master in enumerate(event_list):
        master_sfile = master[1]
        master_event_id = master[0]
        master_picks = sfile_util.readpicks(master_sfile).picks
        master_event = sfile_util.readheader(master_sfile)
        master_ori_time = master_event.origins[0].time
        master_location = (master_event.origins[0].latitude,
                           master_event.origins[0].longitude,
                           master_event.origins[0].depth)
        master_wavefiles = sfile_util.readwavename(master_sfile)
        masterpath = glob.glob(wavbase + os.sep + master_wavefiles[0])
        if masterpath:
            masterstream = read(masterpath[0])
        if len(master_wavefiles) > 1:
            for wavefile in master_wavefiles:
                try:
                    masterstream += read(os.join(wavbase, wavefile))
                except:
                    continue
                    raise IOError("Couldn't find wavefile")
        for j in range(i + 1, len(event_list)):
            # Use this tactic to only output unique event pairings
            slave_sfile = event_list[j][1]
            slave_event_id = event_list[j][0]
            slave_wavefiles = sfile_util.readwavename(slave_sfile)
            try:
                # slavestream=read(wavbase+'/*/*/'+slave_wavefiles[0])
                slavestream = read(wavbase + os.sep + slave_wavefiles[0])
            except:
                # print(slavestream)
                raise IOError('No wavefile found: ' + slave_wavefiles[0] +
                              ' ' + slave_sfile)
            if len(slave_wavefiles) > 1:
                for wavefile in slave_wavefiles:
                    # slavestream+=read(wavbase+'/*/*/'+wavefile)
                    try:
                        slavestream += read(wavbase + '/' + wavefile)
                    except:
                        continue
            # Write out the header line
            event_text = '#'+str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10)+' 0.0   \n'
            event_text2 = '#'+str(master_event_id).rjust(10) +\
                str(slave_event_id).rjust(10)+' 0.0   \n'
            slave_picks = sfile_util.readpicks(slave_sfile).picks
            slave_event = sfile_util.readheader(slave_sfile)
            slave_ori_time = slave_event.origins[0].time
            slave_location = (slave_event.origins[0].latitude,
                              slave_event.origins[0].longitude,
                              slave_event.origins[0].depth)
            if dist_calc(master_location, slave_location) > max_sep:
                continue
            links = 0
            phases = 0
            for pick in master_picks:
                if pick.phase_hint[0].upper() not in ['P', 'S']:
                    continue
                    # Only use P and S picks, not amplitude or 'other'
                # Find station, phase pairs
                # Added by Carolin
                slave_matches = [
                    p for p in slave_picks
                    if p.phase_hint == pick.phase_hint and
                    p.waveform_id.station_code == pick.waveform_id.station_code
                ]

                if masterstream.select(station=pick.waveform_id.station_code,
                                       channel='*' +
                                       pick.waveform_id.channel_code[-1]):
                    mastertr = masterstream.\
                        select(station=pick.waveform_id.station_code,
                               channel='*' +
                               pick.waveform_id.channel_code[-1])[0]
                else:
                    print('No waveform data for ' +
                          pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code)
                    print(pick.waveform_id.station_code + '.' +
                          pick.waveform_id.channel_code + ' ' + slave_sfile +
                          ' ' + master_sfile)
                    break
                # Loop through the matches
                for slave_pick in slave_matches:
                    if slavestream.select(
                            station=slave_pick.waveform_id.station_code,
                            channel='*' +
                            slave_pick.waveform_id.channel_code[-1]):
                        slavetr = slavestream.\
                            select(station=slave_pick.waveform_id.station_code,
                                   channel='*'+slave_pick.waveform_id.
                                   channel_code[-1])[0]
                    else:
                        print('No slave data for ' +
                              slave_pick.waveform_id.station_code + '.' +
                              slave_pick.waveform_id.channel_code)
                        print(pick.waveform_id.station_code + '.' +
                              pick.waveform_id.channel_code + ' ' +
                              slave_sfile + ' ' + master_sfile)
                        break
                    # Correct the picks
                    try:
                        correction, cc =\
                            xcorr_pick_correction(pick.time, mastertr,
                                                  slave_pick.time,
                                                  slavetr, pre_pick,
                                                  extract_len - pre_pick,
                                                  shift_len, filter="bandpass",
                                                  filter_options={'freqmin':
                                                                  lowcut,
                                                                  'freqmax':
                                                                  highcut},
                                                  plot=plotvar)
                        # Get the differntial travel time using the
                        # corrected time.
                        # Check that the correction is within the allowed shift
                        # This can occur in the obspy routine when the
                        # correlation function is increasing at the end of the
                        # window.
                        if abs(correction) > shift_len:
                            warnings.warn('Shift correction too large, ' +
                                          'will not use')
                            continue
                        correction = (pick.time - master_ori_time) -\
                            (slave_pick.time + correction - slave_ori_time)
                        links += 1
                        if cc * cc >= coh_thresh:
                            if coherence_weight:
                                weight = cc * cc
                            else:
                                weight = cc
                            phases += 1
                            # added by Caro
                            event_text += pick.waveform_id.station_code.\
                                ljust(5) + _cc_round(correction, 3).\
                                rjust(11) + _cc_round(weight, 3).rjust(8) +\
                                ' '+pick.phase_hint+'\n'
                            event_text2 += pick.waveform_id.station_code\
                                .ljust(5).upper() +\
                                _cc_round(correction, 3).rjust(11) +\
                                _cc_round(weight, 3).rjust(8) +\
                                ' '+pick.phase_hint+'\n'

                            # links+=1
                        corr_list.append(cc * cc)
                    except:
                        # Should warn here
                        msg = "Couldn't compute correlation correction"
                        warnings.warn(msg)
                        continue
            if links >= min_link and phases > 0:
                f.write(event_text)
                f2.write(event_text2)
    if plotvar:
        plt.hist(corr_list, 150)
        plt.show()
    # f.write('\n')
    f.close()
    f2.close()
    return