示例#1
1
文件: print.py 项目: 3rdcycle/obspy
def main(argv=None):
    parser = ArgumentParser(prog='obspy-print', description=__doc__.strip())
    parser.add_argument('-V', '--version', action='version',
                        version='%(prog)s ' + __version__)
    parser.add_argument('-f', '--format', choices=ENTRY_POINTS['waveform'],
                        help='Waveform format (slightly faster if specified).')
    parser.add_argument('-n', '--no-merge', action='store_false',
                        dest='merge', help='Switch off cleanup merge.')
    parser.add_argument('--no-sorting', action='store_false',
                        dest='sort', help='Switch off sorting of traces.')
    parser.add_argument('-g', '--print-gaps', action='store_true',
                        help='Switch on printing of gap information.')
    parser.add_argument('files', nargs='+',
                        help='Files to process.')

    # Deprecated arguments
    action = _get_deprecated_argument_action(
        '--nomerge', '--no-merge', real_action='store_false')
    parser.add_argument('--nomerge', nargs=0, action=action, dest='merge',
                        help=SUPPRESS)

    args = parser.parse_args(argv)

    st = Stream()
    for f in args.files:
        st += read(f, format=args.format)
    if args.merge:
        st.merge(-1)
    if args.sort:
        st.sort()
    print(st.__str__(extended=True))
    if args.print_gaps:
        print()
        st.printGaps()
示例#2
0
def main(argv=None):
    parser = ArgumentParser(prog='obspy-print', description=__doc__.strip())
    parser.add_argument('-V', '--version', action='version',
                        version='%(prog)s ' + __version__)
    parser.add_argument('-f', '--format', choices=ENTRY_POINTS['waveform'],
                        help='Waveform format (slightly faster if specified).')
    parser.add_argument('-n', '--no-merge', action='store_false',
                        dest='merge', help='Switch off cleanup merge.')
    parser.add_argument('--no-sorting', action='store_false',
                        dest='sort', help='Switch off sorting of traces.')
    parser.add_argument('-g', '--print-gaps', action='store_true',
                        help='Switch on printing of gap information.')
    parser.add_argument('files', nargs='+',
                        help='Files to process.')

    args = parser.parse_args(argv)

    st = Stream()
    for f in args.files:
        st += read(f, format=args.format)
    if args.merge:
        st.merge(-1)
    if args.sort:
        st.sort()
    print(st.__str__(extended=True))
    if args.print_gaps:
        print()
        st.print_gaps()
示例#3
0
文件: print.py 项目: jmfee-usgs/obspy
def main(argv=None):
    parser = ArgumentParser(prog="obspy-print", description=__doc__.strip())
    parser.add_argument("-V", "--version", action="version", version="%(prog)s " + __version__)
    parser.add_argument(
        "-f", "--format", choices=ENTRY_POINTS["waveform"], help="Waveform format (slightly faster if specified)."
    )
    parser.add_argument("-n", "--no-merge", action="store_false", dest="merge", help="Switch off cleanup merge.")
    parser.add_argument("--no-sorting", action="store_false", dest="sort", help="Switch off sorting of traces.")
    parser.add_argument("-g", "--print-gaps", action="store_true", help="Switch on printing of gap information.")
    parser.add_argument("files", nargs="+", help="Files to process.")

    args = parser.parse_args(argv)

    st = Stream()
    for f in args.files:
        st += read(f, format=args.format)
    if args.merge:
        st.merge(-1)
    if args.sort:
        st.sort()
    print(st.__str__(extended=True))
    if args.print_gaps:
        print()
        st.print_gaps()
示例#4
0
                        tr.trim(starttime=s_t, endtime=e_t)
                        # deduct theoretical traveltime and start_buf from starttime
                        if rel_time == 1:
                            tr.stats.starttime = tr.stats.starttime - atime
                        st_pickalign2 += tr
                    except:
                        pass
print('After alignment and range selection - event 1: ' +
      str(len(st_pickalign1)) + ' traces')
print('After alignment and range selection - event 2: ' +
      str(len(st_pickalign2)) + ' traces')

#%%
#print(st) # at length
if verbose:
    print(st1.__str__(extended=True))
    print(st2.__str__(extended=True))
    if rel_time == 1:
        print(st_pickalign1.__str__(extended=True))
        print(st_pickalign2.__str__(extended=True))

#%%  detrend, taper, filter
st_pickalign1.detrend(type='simple')
st_pickalign2.detrend(type='simple')
st_pickalign1.taper(taper_frac)
st_pickalign2.taper(taper_frac)
st_pickalign1.filter('bandpass',
                     freqmin=freq_min,
                     freqmax=freq_max,
                     corners=2,
                     zerophase=True)
示例#5
0
def read_local(data_dir,
               coord_file,
               network,
               station,
               location,
               channel,
               starttime,
               endtime,
               merge=True):
    """
    Read in waveforms from "local" 1-hour, IRIS-compliant miniSEED files, and
    output a Stream object with station/element coordinates attached.

    NOTE 1:
        The expected naming convention for the miniSEED files is:
        <network>.<station>.<location>.<channel>.<year>.<julian_day>.<hour>

    NOTE 2:
        This function assumes that the response has been removed from the
        waveforms in the input miniSEED files.

    Args:
        data_dir: Directory containing miniSEED files
        coord_file: JSON file containing coordinates for local stations (full
                    path required)
        network: SEED network code [wildcards (*, ?) accepted]
        station: SEED station code [wildcards (*, ?) accepted]
        location: SEED location code [wildcards (*, ?) accepted]
        channel: SEED channel code [wildcards (*, ?) accepted]
        starttime: Start time for data request (UTCDateTime)
        endtime: End time for data request (UTCDateTime)
        merge: Toggle merging of Traces with identical IDs (default: True)

    Returns:
        st_out: Stream containing gathered waveforms
    """

    print('-----------------------------')
    print('GATHERING LOCAL MINISEED DATA')
    print('-----------------------------')

    # Take (hour) floor of starttime
    starttime_hr = UTCDateTime(starttime.year, starttime.month, starttime.day,
                               starttime.hour)

    # Take (hour) floor of endtime - this ensures we check this miniSEED file
    endtime_hr = UTCDateTime(endtime.year, endtime.month, endtime.day,
                             endtime.hour)

    # Define filename template
    template = f'{network}.{station}.{location}.{channel}.{{}}.{{}}.{{}}'

    # Initialize Stream object
    st_out = Stream()

    # Initialize the starting hour
    tmp_time = starttime_hr

    # Cycle forward in time, advancing hour by hour through miniSEED files
    while tmp_time <= endtime_hr:

        pattern = template.format(tmp_time.strftime('%Y'),
                                  tmp_time.strftime('%j'),
                                  tmp_time.strftime('%H'))

        files = glob.glob(os.path.join(data_dir, pattern))

        for file in files:
            st_out += read(file)

        tmp_time += HR2SEC  # Add an hour!

    if merge:
        st_out.merge()  # Merge Traces with the same ID
    st_out.sort()

    # If the Stream is empty, then we can stop here
    if st_out.count() == 0:
        print('No data downloaded.')
        return st_out

    # Otherwise, show what the Stream contains
    print(st_out.__str__(extended=True))  # This syntax prints the WHOLE Stream

    # Add zeros to ensure all Traces have same length
    st_out.trim(starttime, endtime, pad=True, fill_value=0)

    print('Assigning coordinates...')

    # Assign coordinates by searching through user-supplied JSON file
    local_coords = load_json_file(coord_file)
    for tr in st_out:
        try:
            tr.stats.latitude, tr.stats.longitude,\
                tr.stats.elevation = local_coords[tr.stats.station]
        except KeyError:
            print(f'No coordinates available for {tr.id}. Stopping.')
            raise

    print('Done')

    # Return the Stream with coordinates attached
    return st_out
示例#6
0
def gather_waveforms(source,
                     network,
                     station,
                     location,
                     channel,
                     starttime,
                     endtime,
                     time_buffer=0,
                     merge_fill_value=0,
                     trim_fill_value=0,
                     remove_response=False,
                     return_failed_stations=False,
                     watc_url=None,
                     watc_username=None,
                     watc_password=None):
    """
    Gather seismic/infrasound waveforms from IRIS or WATC FDSN, or AVO Winston,
    and output a :class:`~obspy.core.stream.Stream` with station/element
    coordinates attached. Optionally remove the sensitivity.

    **NOTE**

    Usual RTM usage is to specify a starttime/endtime that brackets the
    estimated source origin time. Then time_buffer is used to download enough
    extra data to account for the time required for an infrasound signal to
    propagate to the farthest station.

    Args:
        source (str): Which source to gather waveforms from. Options are:

            * `'IRIS'` – IRIS FDSN
            * `'WATC'` – WATC FDSN
            * `'AVO'` – AVO Winston

        network (str): SEED network code [wildcards (``*``, ``?``) accepted]
        station (str): SEED station code [wildcards (``*``, ``?``) accepted]
        location (str): SEED location code [wildcards (``*``, ``?``) accepted]
        channel (str): SEED channel code [wildcards (``*``, ``?``) accepted]
        starttime (:class:`~obspy.core.utcdatetime.UTCDateTime`): Start time for
            data request
        endtime (:class:`~obspy.core.utcdatetime.UTCDateTime`): End time for
            data request
        time_buffer (int or float): Extra amount of data to download after
            `endtime` [s]
        merge_fill_value (bool, int, float, str, or None): Controls merging of
            :class:`~obspy.core.trace.Trace` objects with identical IDs. If
            `False`, no merging is performed. Otherwise, a merge is performed
            with the ``fill_value`` provided to this parameter. For details,
            see the docstring of :meth:`obspy.core.stream.Stream.trim`
        trim_fill_value (bool, int, float, or None): Controls trimming of the
            output :class:`~obspy.core.stream.Stream`, useful if precisely
            uniform start and end times are desired. If `False`, no trimming is
            performed. Otherwise, a trim is performed with the ``fill_value``
            provided to this parameter. For details, see the docstring of
            :meth:`obspy.core.stream.Stream.merge`
        remove_response (bool): Toggle response removal via
            :meth:`~obspy.core.trace.Trace.remove_sensitivity` or a simple
            scalar multiplication
        return_failed_stations (bool): If `True`, returns a list of station
            codes that were requested but not downloaded. This disables the
            standard failed station warning message
        watc_url (str): URL for WATC FDSN server
        watc_username (str): Username for WATC FDSN server
        watc_password (str): Password for WATC FDSN server

    Returns:
        :class:`~obspy.core.stream.Stream` containing gathered waveforms. If
        `return_failed_stations` is `True`, additionally returns a list
        containing station codes that were requested but not downloaded
    """

    # Check for issues with fill value args
    if merge_fill_value is True or trim_fill_value is True:
        raise ValueError('Cannot provide True to fill value parameters.')

    print('--------------')
    print('GATHERING DATA')
    print('--------------')

    # IRIS FDSN
    if source == 'IRIS':

        client = FDSN_Client('IRIS')
        print('Reading data from IRIS FDSN...')
        try:
            st_out = client.get_waveforms(network,
                                          station,
                                          location,
                                          channel,
                                          starttime,
                                          endtime + time_buffer,
                                          attach_response=True)
        except FDSNNoDataException:
            st_out = Stream()  # Just create an empty Stream object

    # WATC FDSN
    elif source == 'WATC':

        print('Connecting to WATC FDSN...')
        client = FDSN_Client(base_url=watc_url,
                             user=watc_username,
                             password=watc_password)
        print('Successfully connected. Reading data from WATC FDSN...')
        try:
            st_out = client.get_waveforms(network,
                                          station,
                                          location,
                                          channel,
                                          starttime,
                                          endtime + time_buffer,
                                          attach_response=True)
        except FDSNNoDataException:
            st_out = Stream()  # Just create an empty Stream object

    # AVO Winston
    elif source == 'AVO':

        client = EW_Client('pubavo1.wr.usgs.gov',
                           port=16023)  # 16023 is long-term
        print('Reading data from AVO Winston...')
        st_out = Stream()  # Make empty Stream object to populate

        # Brute-force "dynamic grid search" over network/station/channel/location codes
        for nw in _restricted_matching('network', network, client):
            for sta in _restricted_matching('station',
                                            station,
                                            client,
                                            network=nw):
                for cha in _restricted_matching('channel',
                                                channel,
                                                client,
                                                network=nw,
                                                station=sta):
                    for loc in _restricted_matching('location',
                                                    location,
                                                    client,
                                                    network=nw,
                                                    station=sta,
                                                    channel=cha):
                        try:
                            st_out += client.get_waveforms(
                                nw, sta, loc, cha, starttime,
                                endtime + time_buffer)
                        except KeyError:
                            pass

    else:
        raise ValueError('Unrecognized source. Valid options are \'IRIS\', '
                         '\'WATC\', or \'AVO\'.')

    # Merge, if specified
    if merge_fill_value is not False:
        st_out.merge(fill_value=merge_fill_value)  # Merge Traces with same ID
        warnings.warn(f'Merging with "fill_value={merge_fill_value}"',
                      CollectionWarning)

    st_out.sort()

    # Check that all requested stations are present in Stream
    requested_stations = station.split(',')
    downloaded_stations = [tr.stats.station for tr in st_out]
    failed_stations = []
    for sta in requested_stations:
        # The below check works with wildcards, but obviously cannot detect if
        # ALL stations corresponding to a given wildcard (e.g., O??K) were
        # downloaded. Thus, if careful station selection is desired, specify
        # each station explicitly and the below check will then be effective.
        if not fnmatch.filter(downloaded_stations, sta):
            if not return_failed_stations:
                # If we're not returning the failed stations, then show this
                # warning message to alert the user
                warnings.warn(
                    f'Station {sta} not downloaded from {source} '
                    'server for this time period.', CollectionWarning)
            failed_stations.append(sta)

    # If the Stream is empty, then we can stop here
    if st_out.count() == 0:
        print('No data downloaded.')
        if return_failed_stations:
            return st_out, failed_stations
        else:
            return st_out

    # Otherwise, show what the Stream contains
    print(st_out.__str__(extended=True))  # This syntax prints the WHOLE Stream

    # Trim, if specified
    if trim_fill_value is not False:
        st_out.trim(starttime,
                    endtime + time_buffer,
                    pad=True,
                    fill_value=trim_fill_value)
        warnings.warn(f'Trimming with "fill_value={trim_fill_value}"',
                      CollectionWarning)

    print('Assigning coordinates...')

    # Use IRIS inventory info for AVO data source
    if source == 'AVO':
        client = FDSN_Client('IRIS')

    try:
        inv = client.get_stations(network=network,
                                  station=station,
                                  location=location,
                                  channel=channel,
                                  starttime=starttime,
                                  endtime=endtime + time_buffer,
                                  level='channel')
    except FDSNNoDataException:
        inv = Inventory()  # Make an empty inv
        warnings.warn('Creating empty inventory.', CollectionWarning)

    for tr in st_out:
        try:
            coords = inv.get_coordinates(tr.id)
            tr.stats.longitude = coords['longitude']
            tr.stats.latitude = coords['latitude']
            tr.stats.elevation = coords['elevation']
        except Exception as e:
            if str(e) == 'No matching channel metadata found.':
                warnings.warn(f'No metadata for {tr.id} found in inventory.',
                              CollectionWarning)
            else:
                raise

    # Check if any Trace did NOT get coordinates assigned, and try to use JSON
    # coordinates if available
    for tr in st_out:
        try:
            tr.stats.longitude, tr.stats.latitude, tr.stats.elevation
        except AttributeError:
            try:
                tr.stats.latitude, tr.stats.longitude,\
                    tr.stats.elevation = AVO_COORDS[tr.id]
                warnings.warn(f'Using coordinates from JSON file for {tr.id}.',
                              CollectionWarning)
            except KeyError:
                print(f'No coordinates available for {tr.id}. Stopping.')
                raise

    # Remove sensitivity
    if remove_response:

        print('Removing sensitivity...')

        for tr in st_out:
            try:
                # Just removing sensitivity for now. remove_response() can lead
                # to errors. This should be sufficient for now. Plus some
                # IRIS-AVO responses are wonky.
                tr.remove_sensitivity()
            except ValueError:  # No response information found
                # This is only set up for infrasound calibration values
                try:
                    calib = AVO_INFRA_CALIBS[tr.id]
                    tr.data = tr.data * calib
                    warnings.warn(
                        'Using calibration value from JSON file for '
                        f'{tr.id}.', CollectionWarning)
                except KeyError:
                    print(f'No calibration value available for {tr.id}. '
                          'Stopping.')
                    raise

    print('Done')

    # Return the Stream with coordinates attached (and responses removed if
    # specified)
    if return_failed_stations:
        return st_out, failed_stations
    else:
        return st_out
def pro3pair(eq_num1,
             eq_num2,
             stat_corr=1,
             simple_taper=0,
             skip_SNR=0,
             dphase='PKIKP',
             dphase2='PKiKP',
             dphase3='PKIKP',
             dphase4='PKiKP',
             rel_time=1,
             start_buff=-200,
             end_buff=500,
             plot_scale_fac=0.05,
             qual_threshold=0,
             corr_threshold=0.5,
             freq_min=1,
             freq_max=3,
             min_dist=0,
             max_dist=180,
             auto_dist=True,
             alt_statics=0,
             statics_file='nothing',
             ARRAY=0,
             ref_loc=False,
             ref_rad=0.4,
             max_taper_length=5.,
             no_plots=False,
             taper_frac=0.05):

    #%% Import functions
    from obspy import UTCDateTime
    from obspy import Stream
    from obspy import read
    from obspy.geodetics import gps2dist_azimuth
    import numpy as np
    import os
    from obspy.taup import TauPyModel
    import matplotlib.pyplot as plt
    import time
    model = TauPyModel(model='iasp91')

    import sys  # don't show any warnings
    import warnings
    from termcolor import colored

    if not sys.warnoptions:
        warnings.simplefilter("ignore")

    print(colored('Running pro3a_sort_plot_pair', 'cyan'))
    start_time_wc = time.time()

    #%%  Set some parameters
    verbose = 0  # more output
    #    rel_time = 1          # timing is relative to a chosen phase, otherwise relative to OT
    # taper_frac = 0.05      # Fraction of window tapered on both ends
    signal_dur = 5.  # signal length used in SNR calculation
    plot_tt = 1  # plot the traveltimes?
    do_decimate = 0  # 0 if no decimation desired
    # if ref_loc ==true,  use ref_rad        to filter station distance
    # if ref_loc ==false, use earthquake loc to filter station distance
    #    ref_rad = 0.4    # ° radius (°) set by input or at top
    if ARRAY == 0:
        ref_lat = 36.3  # °N, around middle of Japan
        ref_lon = 138.5  # °E
    if ARRAY == 1:
        ref_lat = 46.7  # °N keep only inner rings A-D if radius is 0.4°
        ref_lon = -106.22  # °E
    if ARRAY == 2:
        ref_lat = 38  # °N
        ref_lon = 104.5  # °E

    if rel_time == 0:  # SNR requirement not implemented for unaligned traces
        qual_threshold = 0

    # Plot with reduced velocity?
    red_plot = 0
    red_dist = 55
    red_time = 300
    red_slow = 7.2  # seconds per degree
    #%% Get saved event info, also used to name files
    #  event 2016-05-28T09:47:00.000 -56.241 -26.935 78
    print('Opening locations for events ' + str(eq_num1) + ' and ' +
          str(eq_num2))
    fname1 = '/Users/vidale/Documents/Research/IC/EvLocs/event' + str(
        eq_num1) + '.txt'
    fname2 = '/Users/vidale/Documents/Research/IC/EvLocs/event' + str(
        eq_num2) + '.txt'
    file1 = open(fname1, 'r')
    file2 = open(fname2, 'r')
    lines1 = file1.readlines()
    lines2 = file2.readlines()
    split_line1 = lines1[0].split()
    split_line2 = lines2[0].split()
    #            ids.append(split_line[0])  ignore label for now
    t1 = UTCDateTime(split_line1[1])
    t2 = UTCDateTime(split_line2[1])
    date_label1 = split_line1[1][0:10]
    date_label2 = split_line2[1][0:10]
    year1 = split_line1[1][0:4]
    year2 = split_line2[1][0:4]
    ev_lat1 = float(split_line1[2])
    ev_lat2 = float(split_line2[2])
    ev_lon1 = float(split_line1[3])
    ev_lon2 = float(split_line2[3])
    ev_depth1 = float(split_line1[4])
    ev_depth2 = float(split_line2[4])
    print('1st event: date_label ' + date_label1 + ' time ' + str(t1) +
          ' lat ' + str(ev_lat1) + ' lon ' + str(ev_lon1) + ' depth ' +
          str(ev_depth1))
    print('2nd event: date_label ' + date_label2 + ' time ' + str(t2) +
          ' lat ' + str(ev_lat2) + ' lon ' + str(ev_lon2) + ' depth ' +
          str(ev_depth2))

    #%% Get station location file
    if stat_corr == 1:  # load static terms, only applies to Hinet and LASA
        if ARRAY == 0:
            if alt_statics == 0:  # standard set
                sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_statics_hinet.txt'
            else:  # custom set made by this event for this event
                print('probably needs fixing')
                sta_file = (
                    '/Users/vidale/Documents/PyCode/Hinet/Array_codes/Files/' +
                    'HA' + date_label1[:10] + 'pro4_' + dphase + '.statics')
        elif ARRAY == 1:
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_statics_LASA.txt'
        elif ARRAY == 2:
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_statics_ch.txt'
        with open(sta_file, 'r') as file:
            lines = file.readlines()
        print(str(len(lines)) + ' stations read from ' + sta_file)
        # Load station coords into arrays
        station_index = range(len(lines))
        st_names = []
        st_dist = []
        st_lats = []
        st_lons = []
        st_shift = []
        st_corr = []
        for ii in station_index:
            line = lines[ii]
            split_line = line.split()
            st_names.append(split_line[0])
            st_dist.append(split_line[1])
            st_lats.append(split_line[2])
            st_lons.append(split_line[3])
            st_shift.append(split_line[4])
            st_corr.append(split_line[5])
    else:  # no static terms, always true for NORSAR
        if ARRAY == 0:  # Hinet set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_hinet.txt'
        elif ARRAY == 1:  #         LASA set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_LASA.txt'
        elif ARRAY == 2:  #         LASA set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_ch.txt'
        else:  #         NORSAR set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_NORSAR.txt'
        with open(sta_file, 'r') as file:
            lines = file.readlines()
        print(str(len(lines)) + ' stations read from ' + sta_file)
        # Load station coords into arrays
        station_index = range(len(lines))
        st_names = []
        st_lats = []
        st_lons = []
        for ii in station_index:
            line = lines[ii]
            split_line = line.split()
            st_names.append(split_line[0])
            st_lats.append(split_line[1])
            st_lons.append(split_line[2])
    if ARRAY == 0:  # shorten and make upper case Hi-net station names to match station list
        for ii in station_index:
            this_name = st_names[ii]
            this_name_truc = this_name[0:5]
            st_names[ii] = this_name_truc.upper()

#%% Is taper too long compared to noise estimation window?
    totalt = end_buff - start_buff
    noise_time_skipped = taper_frac * totalt
    noise_time_skipped = min(noise_time_skipped,
                             10.0)  # set max of 10s to taper length
    if simple_taper == 0:
        if noise_time_skipped >= 0.5 * (-start_buff):
            print(
                'Specified taper of ' + str(taper_frac * totalt) +
                ' is not big enough compared to available noise estimation window '
                + str(-start_buff - noise_time_skipped) +
                '. May not work well.')
            old_taper_frac = taper_frac
            taper_frac = -0.5 * start_buff / totalt
            print('Taper reset from ' + str(old_taper_frac * totalt) + ' to ' +
                  str(taper_frac * totalt) + ' seconds.')

#%% Load waveforms and decimate to 10 sps
    st1 = Stream()
    st2 = Stream()
    fname1 = '/Users/vidale/Documents/GitHub/LASA_data/HD' + date_label1 + '.mseed'
    fname2 = '/Users/vidale/Documents/GitHub/LASA_data/HD' + date_label2 + '.mseed'
    st1 = read(fname1)
    st2 = read(fname2)

    if do_decimate != 0:
        st1.decimate(do_decimate, no_filter=True)
        st2.decimate(do_decimate, no_filter=True)

    print(
        f'1st trace for event 1 has {len(st1[0].data)} time pts which is {len(st1[0].data)*st1[0].stats.delta:.1f} s'
    )
    print(
        f'1st trace for event 2 has {len(st2[0].data)} time pts which is {len(st2[0].data)*st2[0].stats.delta:.1f} s'
    )
    print('st1 has ' + str(len(st1)) + ' traces')
    print('st2 has ' + str(len(st2)) + ' traces')
    print('1st trace starts at ' + str(st1[0].stats.starttime) +
          ', event at ' + str(t1))
    print('2nd trace starts at ' + str(st2[0].stats.starttime) +
          ', event at ' + str(t2))

    #%% Select by distance, window and adjust start time to align picked times
    st_pickalign1 = Stream()
    st_pickalign2 = Stream()
    tra1_in_range = 0
    tra1_sta_found = 0
    nodata1 = 0
    tra2_in_range = 0
    tra2_sta_found = 0
    nodata2 = 0
    min_dist_auto = 180
    max_dist_auto = 0
    min_time_plot = 1000000
    max_time_plot = -1000000

    # not used in all cases, but printed out below
    # only used if rel_slow == 1, preserves 0 slowness, otherwise 0 is set to phase slowness
    ref_distance = gps2dist_azimuth(ref_lat, ref_lon, ev_lat1, ev_lon1)
    ref1_dist = ref_distance[0] / (1000 * 111)
    dist_minus = ref1_dist - 0.5
    dist_plus = ref1_dist + 0.5
    arrivals_ref = model.get_travel_times(source_depth_in_km=ev_depth1,
                                          distance_in_degree=ref1_dist,
                                          phase_list=[dphase])
    arrivals_minus = model.get_travel_times(source_depth_in_km=ev_depth1,
                                            distance_in_degree=dist_minus,
                                            phase_list=[dphase])
    arrivals_plus = model.get_travel_times(source_depth_in_km=ev_depth1,
                                           distance_in_degree=dist_plus,
                                           phase_list=[dphase])
    atime_ref = arrivals_ref[
        0].time  # phase arrival time at reference distance
    ref_slow = arrivals_plus[0].time - arrivals_minus[
        0].time  # dt over 1 degree at ref distance

    for tr in st1:  # find lat-lon from list, chop, statics, traces one by one
        if float(
                year1
        ) < 1970:  # fix the damn 1969 -> 2069 bug in Gibbon's LASA data
            temp_t = str(tr.stats.starttime)
            temp_tt = '19' + temp_t[2:]
            tr.stats.starttime = UTCDateTime(temp_tt)
        if tr.stats.station in st_names:  # find station in station list
            ii = st_names.index(tr.stats.station)
            tra1_sta_found += 1

            if stat_corr != 1 or float(
                    st_corr[ii]
            ) > corr_threshold:  # if using statics, reject low correlations
                stalat = float(
                    st_lats[ii])  # look up lat & lon again to find distance
                stalon = float(st_lons[ii])

                distance = gps2dist_azimuth(
                    stalat, stalon, ev_lat1,
                    ev_lon1)  # Get traveltimes again, hard to store
                tr.stats.distance = distance[0]  # distance in km
                dist = distance[0] / (1000 * 111)

                in_range = 0  # flag for whether this trace goes into stack
                if ref_loc == False:  # check whether trace is in distance range from earthquake
                    if min_dist < dist and dist < max_dist:
                        in_range = 1
                        tra1_in_range += 1
                elif ref_loc == True:  # alternately, check whether trace is close enough to ref_location
                    ref_distance = gps2dist_azimuth(ref_lat, ref_lon, stalat,
                                                    stalon)
                    ref2_dist = ref_distance[0] / (1000 * 111)
                    if ref2_dist < ref_rad:
                        in_range = 1
                        tra1_in_range += 1
                if in_range == 1:  # trace fulfills the specified criteria for being in range
                    s_t = t1 + start_buff
                    e_t = t1 + end_buff
                    if stat_corr == 1:  # apply static station corrections
                        tr.stats.starttime -= float(st_shift[ii])
                    if rel_time == 0:  #  don't adjust absolute time
                        tr.trim(starttime=s_t, endtime=e_t)
                    else:  # shift relative to a chosen phase
                        arrivals_each = model.get_travel_times(
                            source_depth_in_km=ev_depth1,
                            distance_in_degree=dist,
                            phase_list=[dphase])
                        atime_each = arrivals_each[0].time
                        if rel_time == 1:  # each window has a shift proportional to ref_dist at phase slowness at ref_dist
                            s_t += atime_each
                            e_t += atime_each
                            tr.trim(starttime=s_t, endtime=e_t)
                            tr.stats.starttime -= atime_each - (
                                dist - ref1_dist) * ref_slow
                        elif rel_time == 2:  # each window has a distinct shift, but offset is common to all stations
                            s_t += atime_each
                            e_t += atime_each
                            tr.trim(starttime=s_t, endtime=e_t)
                            tr.stats.starttime -= atime_ref
                        elif rel_time == 3:  # each station has an individual, chosen-phase shift, phase arrival set to zero
                            s_t += atime_each
                            e_t += atime_each
                            tr.trim(starttime=s_t, endtime=e_t)
                            tr.stats.starttime -= atime_each
                        elif rel_time == 4:  # use same window around chosen phase for all stations, phase arrival set to zero
                            s_t += atime_ref
                            e_t += atime_ref
                            tr.trim(starttime=s_t, endtime=e_t)
                            tr.stats.starttime -= atime_ref
                        else:
                            print('invalid rel_time, must be integer 0 to 4')
                            sys.exit()
                    if len(tr.data) > 0:
                        st_pickalign1 += tr
                    else:
                        nodata1 += 1
        else:
            print(tr.stats.station + ' not found in station list ')
#            sys.exit()

    for tr in st2:  # find lat-lon from list, chop, statics, traces one by one
        if float(
                year2
        ) < 1970:  # fix the damn 1969 -> 2069 bug in Gibbon's LASA data
            temp_t = str(tr.stats.starttime)
            temp_tt = '19' + temp_t[2:]
            tr.stats.starttime = UTCDateTime(temp_tt)
        if tr.stats.station in st_names:  # find station in station list
            ii = st_names.index(tr.stats.station)
            tra2_sta_found += 1

            if stat_corr != 1 or float(
                    st_corr[ii]
            ) > corr_threshold:  # if using statics, reject low correlations
                stalat = float(
                    st_lats[ii])  # look up lat & lon again to find distance
                stalon = float(st_lons[ii])

                distance = gps2dist_azimuth(
                    stalat, stalon, ev_lat2,
                    ev_lon2)  # Get traveltimes again, hard to store
                tr.stats.distance = distance[0]  # distance in km
                dist = distance[0] / (1000 * 111)

                in_range = 0  # flag for whether this trace goes into stack

                if ref_loc == False:  # check whether trace is in distance range from earthquake
                    if min_dist < dist and dist < max_dist:
                        in_range = 1
                        tra2_in_range += 1
                elif ref_loc == True:  # alternately, check whether trace is close enough to ref_location
                    ref_distance = gps2dist_azimuth(ref_lat, ref_lon, stalat,
                                                    stalon)
                    ref2_dist = ref_distance[0] / (1000 * 111)
                    if ref2_dist < ref_rad:
                        in_range = 1
                        tra2_in_range += 1

                if in_range == 1:  # trace fulfills the specified criteria for being in range
                    s_t = t2 + start_buff
                    e_t = t2 + end_buff
                    if stat_corr == 1:  # apply static station corrections
                        tr.stats.starttime -= float(st_shift[ii])
                    if rel_time == 0:  #  don't adjust absolute time
                        tr.trim(starttime=s_t, endtime=e_t)
                    else:  # shift relative to a chosen phase
                        arrivals_each = model.get_travel_times(
                            source_depth_in_km=ev_depth2,
                            distance_in_degree=dist,
                            phase_list=[dphase])
                        atime_each = arrivals_each[0].time
                        if rel_time == 1:  # each window has a shift proportional to ref_dist at phase slowness at ref_dist
                            s_t += atime_each
                            e_t += atime_each
                            tr.trim(starttime=s_t, endtime=e_t)
                            tr.stats.starttime -= atime_each - (
                                dist - ref1_dist) * ref_slow
                        elif rel_time == 2:  # each window has a distinct shift, but offset is common to all stations
                            s_t += atime_each
                            e_t += atime_each
                            tr.trim(starttime=s_t, endtime=e_t)
                            tr.stats.starttime -= atime_ref
                        elif rel_time == 3:  # each station has an individual, chosen-phase shift, phase arrival set to zero
                            s_t += atime_each
                            e_t += atime_each
                            tr.trim(starttime=s_t, endtime=e_t)
                            tr.stats.starttime -= atime_each
                        elif rel_time == 4:  # use same window around chosen phase for all stations, phase arrival set to zero
                            s_t += atime_ref
                            e_t += atime_ref
                            tr.trim(starttime=s_t, endtime=e_t)
                            tr.stats.starttime -= atime_ref
                        else:
                            print('invalid rel_time, must be integer 0 to 4')
                            sys.exit()
                    if len(tr.data) > 0:
                        st_pickalign2 += tr
                    else:
                        nodata2 += 1
        else:
            print(tr.stats.station + ' not found in station list')

    print('After alignment + range and correlation selection')
    print('1st event, Traces found: ' + str(tra1_sta_found) +
          ' Traces in range: ' + str(tra1_in_range) +
          ' Traces with no data: ' + str(nodata1))
    print('2nd event, Traces found: ' + str(tra2_sta_found) +
          ' Traces in range: ' + str(tra2_in_range) +
          ' Traces with no data: ' + str(nodata2))

    print(
        f'ref1_distance  {ref1_dist:.3f}  relative start time  {atime_ref:.3f}'
    )
    if ref_loc == True:
        print(
            f'ref2_distance  {ref2_dist:.3f}  relative start time  {atime_ref:.3f}'
        )
        print('ref_loc == True, ref_lat: ' + str(ref_lat) + ' ref_lon: ' +
              str(ref_lon))
    print(
        f'last station: distance {dist:.3f}  last station lat: {stalat:.3f}   last station lon: {stalon:.3f}'
    )

    #%%
    #print(st) # at length
    if verbose:
        print(st1.__str__(extended=True))
        print(st2.__str__(extended=True))
        if rel_time == 1:
            print(st_pickalign1.__str__(extended=True))
            print(st_pickalign2.__str__(extended=True))

#%%  Detrend, taper, filter
    print('Taper fraction is ' + str(taper_frac) + ' bandpass is ' +
          str(freq_min) + ' to ' + str(freq_max))
    st_pickalign1.detrend(type='simple')
    st_pickalign2.detrend(type='simple')
    st_pickalign1.taper(taper_frac, max_length=max_taper_length)
    st_pickalign2.taper(taper_frac, max_length=max_taper_length)
    st_pickalign1.filter('bandpass',
                         freqmin=freq_min,
                         freqmax=freq_max,
                         corners=4,
                         zerophase=True)
    st_pickalign2.filter('bandpass',
                         freqmin=freq_min,
                         freqmax=freq_max,
                         corners=4,
                         zerophase=True)
    st_pickalign1.taper(taper_frac, max_length=max_taper_length)
    st_pickalign2.taper(taper_frac, max_length=max_taper_length)

    #%%  Cull further by imposing SNR threshold on both traces
    st1good = Stream()
    st2good = Stream()
    for tr1 in st_pickalign1:
        for tr2 in st_pickalign2:
            if ((tr1.stats.network == tr2.stats.network) &
                (tr1.stats.station == tr2.stats.station)):
                if skip_SNR == 1:
                    st1good += tr1
                    st2good += tr2
                else:
                    # estimate median noise
                    t_noise1_start = int(len(tr1.data) * taper_frac)
                    t_noise2_start = int(len(tr2.data) * taper_frac)
                    t_noise1_end = int(
                        len(tr1.data) * (-start_buff) /
                        (end_buff - start_buff))
                    t_noise2_end = int(
                        len(tr2.data) * (-start_buff) /
                        (end_buff - start_buff))
                    noise1 = np.median(
                        abs(tr1.data[t_noise1_start:t_noise1_end]))
                    noise2 = np.median(
                        abs(tr2.data[t_noise2_start:t_noise2_end]))
                    # estimate median signal
                    t_signal1_start = int(
                        len(tr1.data) * (-start_buff) /
                        (end_buff - start_buff))
                    t_signal2_start = int(
                        len(tr2.data) * (-start_buff) /
                        (end_buff - start_buff))
                    t_signal1_end = t_signal1_start + int(
                        len(tr1.data) * signal_dur / (end_buff - start_buff))
                    t_signal2_end = t_signal2_start + int(
                        len(tr2.data) * signal_dur / (end_buff - start_buff))
                    signal1 = np.median(
                        abs(tr1.data[t_signal1_start:t_signal1_end]))
                    signal2 = np.median(
                        abs(tr2.data[t_signal2_start:t_signal2_end]))
                    #            test SNR
                    SNR1 = signal1 / noise1
                    SNR2 = signal2 / noise2
                    if (SNR1 > qual_threshold and SNR2 > qual_threshold):
                        st1good += tr1
                        st2good += tr2
    if skip_SNR == 1:
        print('Matches (no SNR test): ' + str(len(st1good)) + ' traces')
    else:
        print('Match and above SNR threshold: ' + str(len(st1good)) +
              ' traces')

    #%%  get station lat-lon, compute distance for plot
    min_dist_auto = 180
    max_dist_auto = 0
    min_time_plot = 1000000
    max_time_plot = -1000000

    for tr in st1good:
        if tr.stats.station in st_names:  # find station in station list
            ii = st_names.index(tr.stats.station)
            stalon = float(
                st_lons[ii])  # look up lat & lon again to find distance
            stalat = float(st_lats[ii])
            distance = gps2dist_azimuth(stalat, stalon, ev_lat1, ev_lon1)
            tr.stats.distance = distance[0] / (1000 * 111)  # distance in km
            if tr.stats.distance < min_dist_auto:
                min_dist_auto = tr.stats.distance
            if tr.stats.distance > max_dist_auto:
                max_dist_auto = tr.stats.distance
            if tr.stats.starttime - t1 < min_time_plot:
                min_time_plot = tr.stats.starttime - t1
            if ((tr.stats.starttime - t1) +
                ((len(tr.data) - 1) * tr.stats.delta)) > max_time_plot:
                max_time_plot = ((tr.stats.starttime - t1) +
                                 ((len(tr.data) - 1) * tr.stats.delta))
    for tr in st2good:
        if tr.stats.station in st_names:  # find station in station list
            ii = st_names.index(tr.stats.station)
            stalon = float(
                st_lons[ii])  # look up lat & lon again to find distance
            stalat = float(st_lats[ii])
            distance = gps2dist_azimuth(stalat, stalon, ev_lat2, ev_lon2)
            tr.stats.distance = distance[0] / (1000 * 111)  # distance in km

    print(
        f'Min distance is   {min_dist_auto:.3f}   Max distance is {max_dist_auto:.3f}'
    )
    print(
        f'Min time is   {min_time_plot:.2f}   Max time is {max_time_plot:.2f}')
    if min_time_plot > start_buff:
        print(f'Min time {min_time_plot:.2f} > start_buff {start_buff:.2f}')
        print(
            colored('Write zero-filling into pro3 for this code to work',
                    'red'))
        sys.exit(-1)
    if max_time_plot < end_buff:
        print(f'Max time {max_time_plot:.2f} < end_buff {end_buff:.2f}')
        print(
            colored('Write zero-filling into pro3 for this code to work',
                    'red'))
        sys.exit(-1)

    #%%
    # plot traces
    fig_index = 3
    plt.close(fig_index)
    plt.figure(fig_index, figsize=(8, 8))
    plt.xlim(start_buff, end_buff)

    if auto_dist == True:
        dist_diff = max_dist_auto - min_dist_auto  # add space at extremes
        plt.ylim(min_dist_auto - 0.1 * dist_diff,
                 max_dist_auto + 0.1 * dist_diff)
    else:
        plt.ylim(min_dist, max_dist)

    for tr in st1good:
        dist_offset = tr.stats.distance  # trying for approx degrees
        ttt = np.arange(len(tr.data)) * tr.stats.delta + (tr.stats.starttime -
                                                          t1)
        if red_plot == 1:
            shift = red_time + (dist_offset - red_dist) * red_slow
            ttt = ttt - shift
        plt.plot(ttt, (tr.data - np.median(tr.data)) * plot_scale_fac /
                 (tr.data.max() - tr.data.min()) + dist_offset,
                 color='green')

    for tr in st2good:
        dist_offset = tr.stats.distance  # trying for approx degrees
        ttt = np.arange(len(tr.data)) * tr.stats.delta + (tr.stats.starttime -
                                                          t2)
        if red_plot == 1:
            shift = red_time + (dist_offset - red_dist) * red_slow
            ttt = ttt - shift
        ttt = ttt
        plt.plot(ttt, (tr.data - np.median(tr.data)) * plot_scale_fac /
                 (tr.data.max() - tr.data.min()) + dist_offset,
                 color='red')
    print('And made it to here.')

    #%% Plot traveltime curves
    if rel_time != 1:
        if plot_tt:
            # first traveltime curve
            line_pts = 50
            dist_vec = np.arange(min_dist_auto, max_dist_auto,
                                 (max_dist_auto - min_dist_auto) /
                                 line_pts)  # distance grid
            time_vec1 = np.arange(
                min_dist_auto, max_dist_auto, (max_dist_auto - min_dist_auto) /
                line_pts)  # empty time grid of same length (filled with -1000)
            for i in range(0, line_pts):
                arrivals = model.get_travel_times(
                    source_depth_in_km=ev_depth1,
                    distance_in_degree=dist_vec[i],
                    phase_list=[dphase])
                num_arrivals = len(arrivals)
                found_it = 0
                for j in range(0, num_arrivals):
                    if arrivals[j].name == dphase:
                        time_vec1[i] = arrivals[j].time
                        found_it = 1
                if found_it == 0:
                    time_vec1[i] = np.nan
            # second traveltime curve
            if dphase2 != 'no':
                time_vec2 = np.arange(
                    min_dist_auto, max_dist_auto,
                    (max_dist_auto - min_dist_auto) / line_pts
                )  # empty time grid of same length (filled with -1000)
                for i in range(0, line_pts):
                    arrivals = model.get_travel_times(
                        source_depth_in_km=ev_depth1,
                        distance_in_degree=dist_vec[i],
                        phase_list=[dphase2])
                    num_arrivals = len(arrivals)
                    found_it = 0
                    for j in range(0, num_arrivals):
                        if arrivals[j].name == dphase2:
                            time_vec2[i] = arrivals[j].time
                            found_it = 1
                    if found_it == 0:
                        time_vec2[i] = np.nan
                if rel_time == 3 or rel_time == 4:
                    time_vec2 = time_vec2 - time_vec1
                elif rel_time == 2:
                    time_vec2 = time_vec2 - atime_ref
                plt.plot(time_vec2, dist_vec, color='orange')
            # third traveltime curve
            if dphase3 != 'no':
                time_vec3 = np.arange(
                    min_dist_auto, max_dist_auto,
                    (max_dist_auto - min_dist_auto) / line_pts
                )  # empty time grid of same length (filled with -1000)
                for i in range(0, line_pts):
                    arrivals = model.get_travel_times(
                        source_depth_in_km=ev_depth1,
                        distance_in_degree=dist_vec[i],
                        phase_list=[dphase3])
                    num_arrivals = len(arrivals)
                    found_it = 0
                    for j in range(0, num_arrivals):
                        if arrivals[j].name == dphase3:
                            time_vec3[i] = arrivals[j].time
                            found_it = 1
                    if found_it == 0:
                        time_vec3[i] = np.nan
                if rel_time == 3 or rel_time == 4:
                    time_vec2 = time_vec2 - time_vec1
                elif rel_time == 2:
                    time_vec2 = time_vec2 - atime_ref
                plt.plot(time_vec3, dist_vec, color='yellow')
            # fourth traveltime curve
            # if dphase4 != 'no':
            #     time_vec4 = np.arange(min_dist, max_dist_auto, (max_dist_auto - min_dist)/line_pts) # empty time grid of same length (filled with -1000)
            #     for i in range(0,line_pts):
            #         arrivals = model.get_travel_times(source_depth_in_km=ev_depth1,distance_in_degree
            #                                     =dist_vec[i],phase_list=[dphase4])
            #         num_arrivals = len(arrivals)
            #         found_it = 0
            #         for j in range(0,num_arrivals):
            #             if arrivals[j].name == dphase4:
            #                 time_vec4[i] = arrivals[j].time
            #                 found_it = 1
            #         if found_it == 0:
            #             time_vec4[i] = np.nan
            #     if   rel_time == 3 or rel_time == 4:
            #         time_vec2 = time_vec2 - time_vec1
            #     elif rel_time == 2:
            #         time_vec2 = time_vec2 - atime_ref
            #     plt.plot(time_vec4,dist_vec, color = 'purple')

            # if   rel_time == 3 or rel_time == 4:
            #     time_vec1 = time_vec1 - time_vec1
            # elif rel_time == 2:
            #     time_vec1 = time_vec1 - atime_ref
            # plt.plot(time_vec1,dist_vec, color = 'blue')
            # if no_plots == False:
            #     plt.show()

    plt.xlabel('Time (s)')
    plt.ylabel('Epicentral distance from event (°)')
    plt.title(dphase + ' for ' + fname1[43:53] + ' vs ' + fname2[43:53])
    if no_plots == False:
        plt.show()

#%%  Save processed files
    fname1 = '/Users/vidale/Documents/Research/IC/Pro_Files/HD' + date_label1 + 'sel.mseed'
    fname2 = '/Users/vidale/Documents/Research/IC/Pro_Files/HD' + date_label2 + 'sel.mseed'
    st1good.write(fname1, format='MSEED')
    st2good.write(fname2, format='MSEED')

    elapsed_time_wc = time.time() - start_time_wc
    print(f'This job took   {elapsed_time_wc:.1f}   seconds')
    os.system('say "Done"')
示例#8
0
    ev_time = init_time + det_times[kk]
    start_time = ev_time - wtime_before
    end_time = ev_time + wtime_after
    print ev_time, start_time, end_time
    if (diff_times[kk] > wtime_after): # special case: unusually long delay between start and end times
        end_time = ev_time + diff_times[kk] + wtime_after
    jday_start = start_time.julday
    jday_end = end_time.julday
    if (jday_start != jday_end):
        print "Warning: start and end day not equal", kk, jday_start, jday_end

    stalist = []
    st =Stream()
    if int(peaksum[kk])<150:continue
    for s, sta in zip(ss[:,kk], stations):
        if np.isnan(s):continue
        else:st += read(ts_dir+'%03d/*%s*.mseed'%(jday_start, sta), format='MSEED')

    #st = read(ts_dir+'%03d/*.mseed'%jday_start, format='MSEED')
    print len(st)
    print st.__str__(extended=True)

    st_slice = st.slice(start_time, end_time)

    out_file = 'event_rank'+format(kk,'05d')+'_nsta'+str(int(num_sta[kk]))\
    +'_peaksum'+str(int(peaksum[kk]))+'_ind'+str(int(det_start_ind[kk]))+'_time'\
    +str(det_times[kk])+'_'+ev_time.strftime('%Y-%m-%dT%H:%M:%S.%f')
    st_slice.write(os.path.join(output_eq_dir, out_file+'.mseed'), format='MSEED' )
    if plot:
        st_slice.plot(equal_scale=False, size=(out_width,out_height), outfile=out_file)
def pro3singlet(eq_file,
                stat_corr=0,
                rel_time=1,
                rel_slow=1,
                simple_taper=0,
                skip_SNR=0,
                dphase='PKiKP',
                dphase2='PKKP',
                dphase3='PKIKP',
                dphase4='PPP',
                start_buff=-10,
                end_buff=30,
                plot_scale_fac=0.05,
                qual_threshold=0,
                corr_threshold=0,
                freq_min=0.25,
                freq_max=1,
                do_filt=1,
                min_dist=0,
                max_dist=180,
                auto_dist=0,
                do_decimate=0,
                alt_statics=0,
                statics_file='nothing',
                ARRAY=0,
                ref_loc=0,
                ref_rad=0.4,
                verbose=0,
                fig_index=101,
                event_no=0):
    # 0 is Hinet, 1 is LASA, 2 is NORSAR

    #%% Import functions
    from obspy import UTCDateTime
    from obspy import Stream
    from obspy import read
    from obspy.geodetics import gps2dist_azimuth
    import numpy as np
    import os
    from obspy.taup import TauPyModel
    import matplotlib.pyplot as plt
    import time
    model = TauPyModel(model='iasp91')

    #	import sys # don't show any warnings
    #	import warnings
    #
    #	if not sys.warnoptions:
    #	    warnings.simplefilter("ignore")

    print('Running pro3b_sort_plot_singlet')
    start_time_wc = time.time()

    #%% Get saved event info, also used to name files
    #  input event data with 1-line file of format
    #  event 2016-05-28T09:47:00.000 -56.241 -26.935 78
    if ARRAY == 0:
        file = open(eq_file, 'r')
    elif ARRAY == 1:
        file = open('EvLocs/' + eq_file, 'r')
    lines = file.readlines()
    split_line = lines[0].split()
    #			ids.append(split_line[0])  ignore label for now
    t = UTCDateTime(split_line[1])
    date_label = split_line[1][0:10]
    year = split_line[1][0:4]
    ev_lat = float(split_line[2])
    ev_lon = float(split_line[3])
    ev_depth = float(split_line[4])
    print('date_label ' + date_label + ' time ' + str(t) + ' lat ' +
          str(ev_lat) + ' lon ' + str(ev_lon) + ' depth ' + str(ev_depth))

    #%% Get station location file
    if stat_corr == 1:  # load static terms, only applies to Hinet and LASA
        if ARRAY == 0:
            if alt_statics == 0:  # standard set
                sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/hinet_sta_statics.txt'
            else:  # custom set made by this event for this event
                sta_file = (
                    '/Users/vidale/Documents/PyCode/Hinet/Array_codes/Files/' +
                    'HA' + date_label[:10] + 'pro4_' + dphase + '.statics')
        elif ARRAY == 1:
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/L_sta_statics.txt'
        with open(sta_file, 'r') as file:
            lines = file.readlines()
        print(str(len(lines)) + ' stations read from ' + sta_file)
        # Load station coords into arrays
        station_index = range(len(lines))
        st_names = []
        st_dist = []
        st_lats = []
        st_lons = []
        st_shift = []
        st_corr = []
        for ii in station_index:
            line = lines[ii]
            split_line = line.split()
            st_names.append(split_line[0])
            st_dist.append(split_line[1])
            st_lats.append(split_line[2])
            st_lons.append(split_line[3])
            st_shift.append(split_line[4])
            st_corr.append(split_line[5])
    else:  # no static terms, always true for NORSAR
        if ARRAY == 0:  # Hinet set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/hinet_sta.txt'
        elif ARRAY == 1:  #         LASA set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/LASA_sta.txt'
        else:  #         NORSAR set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/NORSAR_sta.txt'
        with open(sta_file, 'r') as file:
            lines = file.readlines()
        print(str(len(lines)) + ' stations read from ' + sta_file)
        # Load station coords into arrays
        station_index = range(len(lines))
        st_names = []
        st_lats = []
        st_lons = []
        for ii in station_index:
            line = lines[ii]
            split_line = line.split()
            st_names.append(split_line[0])
            st_lats.append(split_line[1])
            st_lons.append(split_line[2])

#%%  Set some parameters
#	fig_index = 101
#	stat_corr = 1 # apply station static corrections
#	rel_time = 1          # timing is relative to a chosen phase, otherwise relative to OT
#	dphase  = 'PKIKP'       # phase to be aligned
#	dphase2 = 'PKiKP'      # another phase to have traveltime plotted
#	dphase3 = 'PKP'        # another phase to have traveltime plotted
#	dphase4 = 'pP'        # another phase to have traveltime plotted
    taper_frac = .05  #Fraction of window tapered on both ends
    signal_dur = 10.  # signal length used in SNR calculation
    #	plot_scale_fac = 0.5    #  Bigger numbers make each trace amplitude bigger on plot
    #	qual_threshold =  2 # minimum SNR
    #	corr_threshold = 0.7  # minimum correlation in measuring shift to use station
    plot_tt = 1  # plot the traveltimes?
    #	ref_loc = 0  # 1 if selecting stations within ref_rad of ref_lat and ref_lon
    # 0 if selecting stations by distance from earthquake
    if ARRAY == 0:
        ref_lat = 36.3  # °N, around middle of Japan
        ref_lon = 138.5  # °E
        ref_rad = 1.5  # ° radius (°)
    elif ARRAY == 1:
        ref_lat = 46.7  # °N keep only inner rings A-D
        ref_lon = -106.22  # °E
#		ref_rad = 0.4    # ° radius (°) set by input or at top

#%% Is taper too long compared to noise estimation window?
    totalt = end_buff - start_buff
    noise_time_skipped = taper_frac * totalt
    if simple_taper == 0:
        if noise_time_skipped >= -0.5 * start_buff:
            print(
                'Specified taper of ' + str(taper_frac * totalt) +
                ' is not big enough compared to available noise estimation window '
                + str(-start_buff - noise_time_skipped) +
                '. May not work well.')
            old_taper_frac = taper_frac
            taper_frac = -0.5 * start_buff / totalt
            if start_buff > 0:
                taper_frac = 0.05  # pick random minimal window if there is no leader
            print('Taper reset from ' + str(old_taper_frac * totalt) + ' to ' +
                  str(taper_frac * totalt) + ' seconds.')

    if rel_time == 0:  # SNR requirement not implemented for unaligned traces
        qual_threshold = 0

    # Plot with reduced velocity?
    red_plot = 0
    red_dist = 55
    red_time = 300
    red_slow = 7.2  # seconds per degree

    #%% In case one wants to manually enter data here
    #	date_label = '2018-04-02' # date for filename
    #	ev_lon   = -63.006
    #	ev_lat   = -20.659
    #	ev_depth = 559
    #	t        = UTCDateTime('2018-04-02T13:40:34.840')

    #%% Load waveforms and decimate to 10 sps
    st = Stream()
    if ARRAY == 0:
        fname = 'HD' + date_label + '.mseed'
    elif ARRAY == 1:
        fname = 'Mseed/HD' + date_label + '.mseed'
    st = read(fname)
    if do_decimate != 0:
        st.decimate(do_decimate)

    print('Read in: ' + str(len(st)) + ' traces')
    print('First trace has : ' + str(len(st[0].data)) + ' time pts ')
    print('Start time : ' + str(st[0].stats.starttime) + '  event time : ' +
          str(t))
    print('After decimation: ' + str(len(st)) + ' traces')
    nt = len(st[0].data)
    dt = st[0].stats.delta
    print('First trace has : ' + str(nt) + ' time pts, time sampling of ' +
          str(dt) + ' and thus duration of ' + str((nt - 1) * dt))
    #	print(f'Sta lat-lon {stalat:.4f}  {stalon:.4f}')

    #%% Select by distance, window and adjust start time to align picked times
    st_pickalign = Stream()

    tra_located = 0
    tra_in_range = 0
    tra_sta_found = 0
    if auto_dist != 0:  # set plot limit automatically
        min_dist_auto = 180
        max_dist_auto = 0
#	for ii in station_index:
#		print('Station name of index ' + str(ii) + ' is ' + str(st_names[ii])) # enumerate stations
#	for tr in st: # traces one by one, find lat-lon by searching entire inventory.  Inefficient
#		print('Station name of tr ' + str(tr.stats.station)) # enumerate stations
    for tr in st:  # traces one by one, find lat-lon by searching entire inventory.  Inefficient
        if float(
                year
        ) < 1970:  # fix the damn 1969 -> 2069 bug in Gibbon's LASA data
            temp_t = str(tr.stats.starttime)
            temp_tt = '19' + temp_t[2:]
            tr.stats.starttime = UTCDateTime(temp_tt)
        for ii in station_index:
            if ARRAY == 0:  # have to chop off last letter, always 'h'
                this_name = st_names[ii]
                this_name_truc = this_name[0:5]
                name_truc_cap = this_name_truc.upper()
            elif ARRAY == 1:
                name_truc_cap = st_names[ii]
            if (tr.stats.station == name_truc_cap
                ):  # find station in inventory
                tra_sta_found += 1
                if stat_corr != 1 or float(
                        st_corr[ii]
                ) > corr_threshold:  # if using statics, reject low correlations
                    stalat = float(st_lats[ii])
                    stalon = float(
                        st_lons[ii]
                    )  # look up lat & lon again to find distance

                    # only used if ref_slow == 1:
                    ref_distance = gps2dist_azimuth(ref_lat, ref_lon, ev_lat,
                                                    ev_lon)
                    ref1_dist = ref_distance[0] / (1000 * 111)

                    distance = gps2dist_azimuth(
                        stalat, stalon, ev_lat,
                        ev_lon)  # Get traveltimes again, hard to store
                    tr.stats.distance = distance[0]  # distance in km
                    dist = distance[0] / (1000 * 111)

                    if ref_loc != 1:
                        tra_located += 1
                        if min_dist < dist and dist < max_dist:  # select distance range from earthquake
                            tra_in_range += 1
                            try:
                                if rel_slow == 0:
                                    arrivals = model.get_travel_times(
                                        source_depth_in_km=ev_depth,
                                        distance_in_degree=dist,
                                        phase_list=[dphase])
                                else:
                                    arrivals = model.get_travel_times(
                                        source_depth_in_km=ev_depth,
                                        distance_in_degree=ref1_dist,
                                        phase_list=[dphase])
                                atime = arrivals[0].time
                                if stat_corr == 1:  # apply static station corrections
                                    tr.stats.starttime -= float(st_shift[ii])
                                if rel_time == 1:
                                    s_t = t + atime + start_buff
                                    e_t = t + atime + end_buff
                                else:
                                    s_t = t + start_buff
                                    e_t = t + end_buff
                                tr.trim(starttime=s_t, endtime=e_t)
                                # deduct theoretical traveltime and start_buf from starttime
                                if rel_time == 1:
                                    tr.stats.starttime -= atime
                                st_pickalign += tr
                            except:
                                pass
                    elif ref_loc == 1:
                        # only used if ref_loc == 1:
                        ref_distance = gps2dist_azimuth(
                            ref_lat, ref_lon, stalat, stalon)
                        ref2_dist = ref_distance[0] / (1000 * 111)
                        #						print('ref_rad ' + str(ref_rad) + ' ref2_dist ' + str(ref2_dist))
                        if ref2_dist < ref_rad:  # alternatively, select based on distance from ref location
                            try:
                                if rel_slow == 0:
                                    arrivals = model.get_travel_times(
                                        source_depth_in_km=ev_depth,
                                        distance_in_degree=dist,
                                        phase_list=[dphase])
                                else:
                                    arrivals = model.get_travel_times(
                                        source_depth_in_km=ev_depth,
                                        distance_in_degree=ref1_dist,
                                        phase_list=[dphase])
                                atime = arrivals[0].time
                                if stat_corr == 1:  # apply static station corrections
                                    tr.stats.starttime -= float(st_shift[ii])
                                if rel_time == 1:
                                    s_t = t + atime + start_buff
                                    e_t = t + atime + end_buff
                                else:
                                    s_t = t + start_buff
                                    e_t = t + end_buff
                                tr.trim(starttime=s_t, endtime=e_t)
                                # deduct theoretical traveltime and start_buf from starttime
                                if rel_time == 1:
                                    tr.stats.starttime -= atime
                                st_pickalign += tr
                            except:
                                pass
    print('After alignment + range and correlation selection - event: ' +
          str(len(st_pickalign)) + ' traces')
    print('Traces found: ' + str(tra_sta_found) +
          ' traces with range examined: ' + str(tra_located) +
          ' traces in range: ' + str(tra_in_range))
    print('distance: ' + str(dist) + ' ref1_distance: ' + str(ref1_dist) +
          ' atime: ' + str(atime))
    print('ref_lat: ' + str(ref_lat) + ' ref_lon: ' + str(ref_lon))
    print('ref_lat: ' + str(stalat) + ' ref_lon: ' + str(stalon))

    #print(st) # at length
    if verbose:
        print(st.__str__(extended=True))
        if rel_time == 1:
            print(st_pickalign.__str__(extended=True))

#%%  Detrend, taper, filter
    st_pickalign.detrend(type='simple')
    print('taper_frac is ' + str(taper_frac))
    st_pickalign.taper(taper_frac)
    if do_filt == 1:
        st_pickalign.filter('bandpass',
                            freqmin=freq_min,
                            freqmax=freq_max,
                            corners=4,
                            zerophase=True)
    st_pickalign.taper(taper_frac)

    #%%  Cull further by imposing SNR threshold on both traces
    if skip_SNR == 1:
        stgood = st_pickalign.copy()
    else:
        stgood = Stream()
        for tr in st_pickalign:
            # estimate median noise
            t_noise_start = int(len(tr.data) * taper_frac)
            t_noise_end = int(
                len(tr.data) * start_buff / (start_buff - end_buff))
            noise = np.median(abs(tr.data[t_noise_start:t_noise_end]))
            # estimate median signal
            t_signal_start = int(
                len(tr.data) * start_buff / (start_buff - end_buff))
            t_signal_end = t_signal_start + int(
                len(tr.data) * signal_dur / (end_buff - start_buff))
            signal = np.median(abs(tr.data[t_signal_start:t_signal_end]))
            #			test SNR
            SNR = signal / noise
            if (SNR > qual_threshold):
                stgood += tr

    print('Above SNR threshold: ' + str(len(stgood)) + ' traces')
    if verbose:
        for tr in stgood:
            print('Distance is ' + str(tr.stats.distance / (1000 * 111)) +
                  ' for station ' + tr.stats.station)

    #%%  get station lat-lon, compute distance for plot
    for tr in stgood:
        for ii in station_index:
            if (tr.stats.station == st_names[ii]):  # find station in inventory
                stalon = float(
                    st_lons[ii])  # look up lat & lon again to find distance
                stalat = float(st_lats[ii])
                distance = gps2dist_azimuth(stalat, stalon, ev_lat, ev_lon)
                tr.stats.distance = distance[0] / (1000 * 111
                                                   )  # distance in degrees
                if auto_dist != 0:
                    if tr.stats.distance < min_dist_auto:
                        min_dist_auto = tr.stats.distance
                    if tr.stats.distance > max_dist_auto:
                        max_dist_auto = tr.stats.distance

#%%  This section causes a crash in Spyder
# plot traces
    plt.close(fig_index)
    plt.figure(fig_index, figsize=(10, 10))
    plt.xlim(start_buff, end_buff)

    if auto_dist == 1:
        dist_diff = max_dist_auto - min_dist_auto  # add space at extremes
        plt.ylim(min_dist_auto - 0.1 * dist_diff,
                 max_dist_auto + 0.1 * dist_diff)
    else:
        plt.ylim(min_dist, max_dist)

    for tr in stgood:
        dist_offset = tr.stats.distance
        ttt = np.arange(len(tr.data)) * tr.stats.delta + (tr.stats.starttime -
                                                          t)
        if red_plot == 1:
            shift = red_time + (dist_offset - red_dist) * red_slow
            ttt = ttt - shift
        plt.plot(ttt, (tr.data - np.median(tr.data)) * plot_scale_fac /
                 (tr.data.max() - tr.data.min()) + dist_offset,
                 color='black')
#%% Plot traveltime curves
    if plot_tt:
        # first traveltime curve
        line_pts = 50
        dist_vec = np.arange(min_dist, max_dist,
                             (max_dist - min_dist) / line_pts)  # distance grid
        time_vec1 = np.arange(
            min_dist, max_dist, (max_dist - min_dist) /
            line_pts)  # empty time grid of same length (filled with -1000)
        for i in range(0, line_pts):
            arrivals = model.get_travel_times(source_depth_in_km=ev_depth,
                                              distance_in_degree=dist_vec[i],
                                              phase_list=[dphase])
            num_arrivals = len(arrivals)
            found_it = 0
            for j in range(0, num_arrivals):
                if arrivals[j].name == dphase:
                    time_vec1[i] = arrivals[j].time
                    found_it = 1
            if found_it == 0:
                time_vec1[i] = np.nan
        # second traveltime curve
        if dphase2 != 'no':
            time_vec2 = np.arange(
                min_dist, max_dist, (max_dist - min_dist) /
                line_pts)  # empty time grid of same length (filled with -1000)
            for i in range(0, line_pts):
                arrivals = model.get_travel_times(
                    source_depth_in_km=ev_depth,
                    distance_in_degree=dist_vec[i],
                    phase_list=[dphase2])
                num_arrivals = len(arrivals)
                found_it = 0
                for j in range(0, num_arrivals):
                    if arrivals[j].name == dphase2:
                        time_vec2[i] = arrivals[j].time
                        found_it = 1
                if found_it == 0:
                    time_vec2[i] = np.nan
            if rel_time == 1:
                time_vec2 = time_vec2 - time_vec1
            plt.plot(time_vec2, dist_vec, color='orange')
        # third traveltime curve
        if dphase3 != 'no':
            time_vec3 = np.arange(
                min_dist, max_dist, (max_dist - min_dist) /
                line_pts)  # empty time grid of same length (filled with -1000)
            for i in range(0, line_pts):
                arrivals = model.get_travel_times(
                    source_depth_in_km=ev_depth,
                    distance_in_degree=dist_vec[i],
                    phase_list=[dphase3])
                num_arrivals = len(arrivals)
                found_it = 0
                for j in range(0, num_arrivals):
                    if arrivals[j].name == dphase3:
                        time_vec3[i] = arrivals[j].time
                        found_it = 1
                if found_it == 0:
                    time_vec3[i] = np.nan
            if rel_time == 1:
                time_vec3 = time_vec3 - time_vec1
            plt.plot(time_vec3, dist_vec, color='yellow')
        # fourth traveltime curve
        if dphase4 != 'no':
            time_vec4 = np.arange(
                min_dist, max_dist, (max_dist - min_dist) /
                line_pts)  # empty time grid of same length (filled with -1000)
            for i in range(0, line_pts):
                arrivals = model.get_travel_times(
                    source_depth_in_km=ev_depth,
                    distance_in_degree=dist_vec[i],
                    phase_list=[dphase4])
                num_arrivals = len(arrivals)
                found_it = 0
                for j in range(0, num_arrivals):
                    if arrivals[j].name == dphase4:
                        time_vec4[i] = arrivals[j].time
                        found_it = 1
                if found_it == 0:
                    time_vec4[i] = np.nan
            if rel_time == 1:
                time_vec4 = time_vec4 - time_vec1
            plt.plot(time_vec4, dist_vec, color='purple')

        if rel_time == 1:
            time_vec1 = time_vec1 - time_vec1
        plt.plot(time_vec1, dist_vec, color='blue')
        plt.show()

    plt.xlabel('Time (s)')
    plt.ylabel('Epicentral distance from event (°)')
    if ARRAY == 0:
        plt.title(dphase + ' for ' + fname)
    elif ARRAY == 1:
        plt.title(dphase + ' for ' + fname[8:18] + ' event # ' + str(event_no))
    os.chdir('/Users/vidale/Documents/PyCode/LASA/Quake_results/Plots')
    #	plt.savefig(date_label + '_' + str(event_no) + '_raw.png')
    plt.show()

    #%%  Save processed files
    if ARRAY == 0:
        fname3 = '/Users/vidale/Documents/PyCode/LASA/HD' + date_label + 'sel.mseed'
    elif ARRAY == 1:
        fname3 = '/Users/vidale/Documents/PyCode/LASA/Pro_Files/HD' + date_label + 'sel.mseed'

    stgood.write(fname3, format='MSEED')

    elapsed_time_wc = time.time() - start_time_wc
    print('This job took ' + str(elapsed_time_wc) + ' seconds')
    os.system('say "Done"')
示例#10
0
                            "latitude"] = station.latitude
                        st[y].stats["coordinates"][
                            "longitude"] = station.longitude
                        distance = locations2degrees(EQLAT, EQLON,
                                                     station.latitude,
                                                     station.longitude)
                        st[y].stats["distance"] = distance
# sort the traces by distance and subsample them, leaving at least SEPARATION between them and copying the desired traces into st2
st.sort(keys=['distance'])  # sort the traces by distance from the epicentre
st2 = Stream()  # set up a blank stream object for data
last_distance = -10  # variable to record the last distance copied to the stream for the plot
for trace in st:
    if last_distance + SEPARATION <= trace.stats.distance:  # only copy traces that are >= SEPARATION from the previous copied trace
        st2 += trace.copy()
        last_distance = trace.stats.distance
print(st2.__str__(extended=True))
# filter
st2.filter("bandpass", freqmin=F1, freqmax=F2, corners=2, zerophase=True)
# Create the section plot
fig = plt.figure(figsize=(16, 12), dpi=80)
plt.title('Section plot for ' + EQNAME + " " + str(START_TIME.date) + " " +
          str(START_TIME.time) + " lat:" + str(EQLAT) + " lon:" + str(EQLON),
          fontsize=12,
          y=1.07)
# plot the data
st2.plot(size=(960, 720),
         type='section',
         recordlength=DURATION,
         linewidth=1.5,
         grid_linewidth=.5,
         show=False,
示例#11
0
def pro3singlet(eq_num,
                stat_corr=1,
                rel_time=1,
                max_taper_length=5.,
                simple_taper=0,
                skip_SNR=0,
                dphase='P',
                dphase2='',
                dphase3='',
                dphase4='',
                start_buff=-10,
                end_buff=10,
                start_beam=0,
                end_beam=0,
                plot_scale_fac=0.2,
                qual_threshold=0,
                corr_threshold=0,
                freq_min=0.25,
                freq_max=1,
                do_filt=1,
                min_dist=0,
                max_dist=180,
                auto_dist=True,
                do_decimate=0,
                alt_statics=0,
                statics_file='nothing',
                ARRAY=0,
                JST=0,
                ref_loc=0,
                ref_rad=0.4,
                verbose=0,
                fig_index=101,
                event_no=0):
    # 0 is Hinet, 1 is LASA, 2 is NORSAR

    #%% Import functions
    from obspy import UTCDateTime
    from obspy import Stream
    from obspy import read
    from obspy.geodetics import gps2dist_azimuth
    import numpy as np
    import os
    import sys
    from obspy.taup import TauPyModel
    import matplotlib.pyplot as plt
    import time
    from termcolor import colored
    model = TauPyModel(model='iasp91')

    #    import sys # don't show any warnings
    #    import warnings
    #
    #    if not sys.warnoptions:
    #        warnings.simplefilter("ignore")

    print(colored('Running pro3b_sort_plot_singlet', 'cyan'))
    start_time_wc = time.time()

    #%% Get saved event info, also used to name files
    #  input event data with 1-line file of format
    #  event 2016-05-28T09:47:00.000 -56.241 -26.935 78
    fname = '/Users/vidale/Documents/Research/IC/EvLocs/event' + str(
        eq_num) + '.txt'
    print('Opening ' + fname)
    file = open(fname, 'r')
    lines = file.readlines()
    split_line = lines[0].split()
    #            ids.append(split_line[0])  ignore label for now
    t = UTCDateTime(split_line[1])
    date_label = split_line[1][0:10]
    year_label = split_line[1][0:4]
    year_short_label = split_line[1][2:4]
    month_label = split_line[1][5:7]
    day_label = split_line[1][8:10]
    hour_label = split_line[1][11:13]
    minute_label = split_line[1][14:16]
    print(date_label + ' year_label ' + year_label + ' hour_label ' +
          hour_label + ' min_label ' + minute_label)
    ev_lat = float(split_line[2])
    ev_lon = float(split_line[3])
    ev_depth = float(split_line[4])
    print('        date_label ' + date_label + ' time ' + str(t) + ' lat ' +
          str(ev_lat) + ' lon ' + str(ev_lon) + ' depth ' + str(ev_depth))

    #%% Get station location file
    if stat_corr == 1:  # load static terms, only applies to Hinet, LASA, and China
        if ARRAY == 0:
            if alt_statics == 0:  # standard set
                sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_statics_hinet.txt'
            else:  # custom set made by this event for this event
                sta_file = (
                    '/Users/vidale/Documents/PyCode/Hinet/Array_codes/Files/' +
                    'HA' + date_label[:10] + 'pro4_' + dphase + '.statics')
        elif ARRAY == 1:
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_statics_LASA.txt'
        elif ARRAY == 2:
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_statics_ch.txt'
        with open(sta_file, 'r') as file:
            lines = file.readlines()
        print('    ' + str(len(lines)) + ' coarse station statics read from ' +
              sta_file)
        # Load station coords into arrays
        station_index = range(len(lines))
        st_names = []
        st_dist = []
        st_lats = []
        st_lons = []
        st_shift = []
        st_corr = []
        for ii in station_index:
            line = lines[ii]
            split_line = line.split()
            st_names.append(split_line[0])
            if ARRAY == 0 or ARRAY == 1:
                st_dist.append(split_line[1])
                st_lats.append(split_line[2])
                st_lons.append(split_line[3])
                st_shift.append(split_line[4])
                st_corr.append(split_line[5])
            elif ARRAY == 2:
                st_lats.append(split_line[1])
                st_lons.append(split_line[2])
                st_shift.append(split_line[3])
                st_corr.append(split_line[4])  # but really std dev

    else:  # no static terms, always true for NORSAR
        if ARRAY == 0:  # Hinet set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_hinet.txt'
        elif ARRAY == 1:  #         LASA set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_LASA.txt'
        elif ARRAY == 2:  #         China set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_ch.txt'
        else:  #         NORSAR set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/sta_NORSAR.txt'
        with open(sta_file, 'r') as file:
            lines = file.readlines()
        print('    ' + str(len(lines)) + ' stations read from ' + sta_file)
        # Load station coords into arrays
        station_index = range(len(lines))
        st_names = []
        st_lats = []
        st_lons = []
        for ii in station_index:
            line = lines[ii]
            split_line = line.split()
            st_names.append(split_line[0])
            st_lats.append(split_line[1])
            st_lons.append(split_line[2])
    if ARRAY == 0:  # shorten and make upper case Hi-net station names to match station list
        for ii in station_index:
            this_name = st_names[ii]
            this_name_truc = this_name[0:5]
            st_names[ii] = this_name_truc.upper()

#%%  Set some parameters
#    fig_index = 101
#    stat_corr = 1 # apply station static corrections
#    rel_time = 1          # timing is relative to a chosen phase, otherwise relative to OT
#    dphase  = 'PKIKP'       # phase to be aligned
#    dphase2 = 'PKiKP'      # another phase to have traveltime plotted
#    dphase3 = 'PKP'        # another phase to have traveltime plotted
#    dphase4 = 'pP'        # another phase to have traveltime plotted
    taper_frac = .05  # Fraction of window tapered on both ends
    noise_win_max = 20  # maximum length of noise window for SNR estimation, seconds
    #    plot_scale_fac = 0.5    #  Bigger numbers make each trace amplitude bigger on plot
    #    qual_threshold =  2 # minimum SNR
    #    corr_threshold = 0.7  # minimum correlation in measuring shift to use station
    plot_tt = 1  # plot the traveltimes?
    # if ref_loc ==true,  use ref_rad        to filter station distance
    # if ref_loc ==false, use earthquake loc to filter station distance
    #    ref_rad = 0.4    # ° radius (°) set by input or at top
    if ARRAY == 0:
        ref_lat = 36.3  # °N, around middle of Japan
        ref_lon = 138.5  # °E
    if ARRAY == 1:
        ref_lat = 46.7  # °N keep only inner rings A-D if radius is 0.4°
        ref_lon = -106.22  # °E
    if ARRAY == 2:
        ref_lat = 38  # °N
        ref_lon = 104.5  # °E
#        ref_rad = 0.4    # ° radius (°) set by input or at top

#%% Is taper too long compared to noise estimation window?
    totalt = end_buff - start_buff
    noise_time_skipped = taper_frac * totalt
    noise_time_skipped = min(noise_time_skipped,
                             10.0)  # set max of 10s to taper length
    if simple_taper == 0:
        if noise_time_skipped >= -0.5 * start_buff:
            print(
                '        ' + 'Specified taper of ' + str(taper_frac * totalt) +
                ' is not big enough compared to available noise estimation window '
                + str(-start_buff - noise_time_skipped) +
                '. May not work well.')
            old_taper_frac = taper_frac
            taper_frac = -0.5 * start_buff / totalt
            if start_buff > 0:
                taper_frac = 0.05  # pick random minimal window if there is no leader
            print('        ' + 'Taper reset from ' +
                  str(old_taper_frac * totalt) + ' to ' +
                  str(taper_frac * totalt) + ' seconds.')

    if rel_time == 0:  # SNR requirement not implemented for unaligned traces
        qual_threshold = 0

    # Plot with reduced velocity?
    red_plot = 0
    red_dist = 55
    red_time = 300
    red_slow = 7.2  # seconds per degree

    #%% Load waveforms and decimate to 10 sps, if not already decimated
    st = Stream()
    # fname     = '/Users/vidale/Documents/GitHub/LASA_data/HD' + date_label + '.mseed'
    mseed_name = year_short_label + month_label + day_label + '_' + hour_label + minute_label
    fname = '/Users/vidale/Documents/Research/IC/Mseed/L' + mseed_name + '.mseed'
    print('file name attempt: ' + fname)
    st = read(fname)
    if do_decimate != 0:
        st.decimate(do_decimate, no_filter=True)

    print('        ' + fname)
    print('    ' + str(len(st)) + '  traces read in')
    if (len(st) == 0):
        exit('No traces is a failure')
    print('        First trace has : ' + str(len(st[0].data)) + ' time pts ')
    print('        Start time : ' + str(st[0].stats.starttime) +
          '  event time : ' + str(t))
    # print('    ' + str(len(st)) + '  traces after decimation')
    nt = len(st[0].data)
    dt = st[0].stats.delta
    print('        First trace has : ' + str(nt) +
          ' time pts, time sampling of ' + str(dt) + ' and thus duration of ' +
          str((nt - 1) * dt))

    #%% Select by distance, window and adjust start time to align picked times
    st_pickalign = Stream()

    tra_in_range = 0
    tra_sta_found = 0
    nodata = 0
    min_dist_auto = 180
    max_dist_auto = 0
    min_time_plot = 1000000
    max_time_plot = -1000000

    # not used in all cases, but printed out below
    # only used if rel_slow == 1, preserves 0 slowness, otherwise 0 is set to phase slowness
    ref_distance = gps2dist_azimuth(ref_lat, ref_lon, ev_lat, ev_lon)
    ref1_dist = ref_distance[0] / (1000 * 111)
    dist_minus = ref1_dist - 0.5
    dist_plus = ref1_dist + 0.5
    arrivals_ref = model.get_travel_times(source_depth_in_km=ev_depth,
                                          distance_in_degree=ref1_dist,
                                          phase_list=[dphase])
    if (len(arrivals_ref) == 0 and ref1_dist < 10 and dphase
            == 'P'):  # in case first arrival is upgoing P, which is 'p'
        arrivals_ref = model.get_travel_times(source_depth_in_km=ev_depth,
                                              distance_in_degree=ref1_dist,
                                              phase_list='p')
    arrivals_minus = model.get_travel_times(source_depth_in_km=ev_depth,
                                            distance_in_degree=dist_minus,
                                            phase_list=[dphase])
    if (len(arrivals_minus) == 0 and dist_minus < 10 and dphase == 'P'):
        arrivals_minus = model.get_travel_times(source_depth_in_km=ev_depth,
                                                distance_in_degree=dist_minus,
                                                phase_list='p')
    arrivals_plus = model.get_travel_times(source_depth_in_km=ev_depth,
                                           distance_in_degree=dist_plus,
                                           phase_list=[dphase])
    if (len(arrivals_plus) == 0 and dist_plus < 10 and dphase == 'P'):
        arrivals_plus = model.get_travel_times(source_depth_in_km=ev_depth,
                                               distance_in_degree=dist_plus,
                                               phase_list='p')
    if (len(arrivals_ref) == 0 or len(arrivals_minus) == 0
            or len(arrivals_plus) == 0):
        print('model.get_travel_times failed: dist, phase  ' + str(ref1_dist) +
              '   ' + dphase)

    atime_ref = arrivals_ref[
        0].time  # phase arrival time at reference distance
    ref_slow = arrivals_plus[0].time - arrivals_minus[
        0].time  # dt over 1 degree at ref distance

    for tr in st:  # traces one by one, find lat-lon
        if float(
                year_label
        ) < 1970:  # fix the damn 1969 -> 2069 bug in Gibbon's LASA data
            temp_t = str(tr.stats.starttime)
            temp_tt = '19' + temp_t[2:]
            tr.stats.starttime = UTCDateTime(temp_tt)
        if JST == 1:  # if necessary, convert JST -> UTC, time in Greenwich 9 hours earlier than Japan
            tr.stats.starttime = tr.stats.starttime - 9 * 60 * 60
#            temp_t = str(tr.stats.starttime)
#            temp_tt = '19' + temp_t[2:]
#            tr.stats.starttime = UTCDateTime(temp_tt)
#            times.append(   tr.stats.starttime.strptime(split_line[1], dtformat) - dt.timedelta(seconds=offset))
#            timesUTC.append(dt.datetime.strptime(split_line[1], dtformat)) # keep UTC version for output
        if tr.stats.station in st_names:  # find station in station list
            ii = st_names.index(tr.stats.station)
            tra_sta_found += 1

            corr = 1
            if stat_corr == 1:
                corr = float(st_corr[ii])

            if corr > corr_threshold:  # if using statics, reject low correlations
                stalat = float(
                    st_lats[ii])  # look up lat & lon again to find distance
                stalon = float(st_lons[ii])

                distance = gps2dist_azimuth(
                    stalat, stalon, ev_lat,
                    ev_lon)  # Get traveltimes again, hard to store
                tr.stats.distance = distance[0]  # distance in km
                dist = distance[0] / (1000 * 111)

                in_range = 0  # flag for whether this trace goes into stack
                rejector = False  # flag in case model.get_travel_times fails

                if ref_loc == False:  # check whether trace is in distance range from earthquake
                    if min_dist < dist and dist < max_dist:
                        in_range = 1
                        tra_in_range += 1
                elif ref_loc == True:  # alternately, check whether trace is close enough to ref_location
                    ref_distance = gps2dist_azimuth(ref_lat, ref_lon, stalat,
                                                    stalon)
                    ref2_dist = ref_distance[0] / (1000 * 111)
                    if ref2_dist < ref_rad:
                        in_range = 1
                        tra_in_range += 1
                if in_range == 1:  # trace fulfills the specified criteria for being in range
                    s_t = t + start_buff
                    e_t = t + end_buff
                    if stat_corr == 1:  # apply static station corrections
                        tr.stats.starttime -= float(st_shift[ii])
                    if rel_time == 0:  #  don't adjust absolute time
                        tr.trim(starttime=s_t, endtime=e_t)
                    else:  # shift relative to a chosen phase
                        arrivals_each = model.get_travel_times(
                            source_depth_in_km=ev_depth,
                            distance_in_degree=dist,
                            phase_list=[dphase])
                        if (len(arrivals_each) == 0):
                            if (
                                    dist < 10 and dphase == 'P'
                            ):  # in case first arrival is upgoing P, try 'p'
                                arrivals_each = model.get_travel_times(
                                    source_depth_in_km=ev_depth,
                                    distance_in_degree=dist,
                                    phase_list='p')
                        if (len(arrivals_each) == 0):  # did it still fail?
                            print(
                                'model.get_travel_times failed: dist, depth, phase  '
                                + tr.stats.station + '   ' + str(ref1_dist) +
                                '   ' + '   ' + str(ev_depth) + '   ' + dphase)
                            tra_in_range -= 1  # don't count this trace after all
                            rejector = True
                        else:
                            atime_each = arrivals_each[0].time
                            if rel_time == 1:  # each window has a shift proportional to ref_dist at phase slowness at ref_dist
                                s_t += atime_each
                                e_t += atime_each
                                tr.trim(starttime=s_t, endtime=e_t)
                                tr.stats.starttime -= atime_each - (
                                    dist - ref1_dist) * ref_slow
                            elif rel_time == 2:  # each window has a distinct shift, but offset is common to all stations
                                s_t += atime_each
                                e_t += atime_each
                                tr.trim(starttime=s_t, endtime=e_t)
                                tr.stats.starttime -= atime_ref
                            elif rel_time == 3:  # each station has an individual, chosen-phase shift, phase arrival set to zero
                                s_t += atime_each
                                e_t += atime_each
                                tr.trim(starttime=s_t, endtime=e_t)
                                tr.stats.starttime -= atime_each
                            elif rel_time == 4:  # use same window around chosen phase for all stations, phase arrival set to zero
                                s_t += atime_ref
                                e_t += atime_ref
                                tr.trim(starttime=s_t, endtime=e_t)
                                tr.stats.starttime -= atime_ref
                            else:
                                sys.exit(
                                    'Invalid rel_time parameter, must be integer 0 to 4'
                                )
                    if len(tr.data) > 0 and rejector == False:
                        st_pickalign += tr
                    else:
                        nodata += 1
        else:
            print(tr.stats.station + ' not found in station list with statics')
    print('    ' + str(tra_in_range) + '  traces in range')
    print('    ' + str(len(st_pickalign)) +
          '  traces after alignment and correlation selection')
    print('    ' + str(nodata) + '  traces with no data')

    #print(st) # at length
    if verbose:
        print(st.__str__(extended=True))
        if rel_time == 1:
            print(st_pickalign.__str__(extended=True))

#%%  Detrend, taper, filter
    st_pickalign.detrend(type='simple')
    st_pickalign.taper(taper_frac, max_length=max_taper_length)
    if do_filt == 1:
        st_pickalign.filter('bandpass',
                            freqmin=freq_min,
                            freqmax=freq_max,
                            corners=4,
                            zerophase=True)
    st_pickalign.taper(taper_frac, max_length=max_taper_length)

    #%%  Cull further by imposing SNR threshold
    if skip_SNR == 1:
        stgood = st_pickalign.copy()
    else:
        stgood = Stream()
        for tr in st_pickalign:
            # estimate median noise
            time_to_beam_start = (start_beam - start_buff)
            if time_to_beam_start - taper_frac * (
                    end_buff -
                    start_buff) < noise_win_max:  # noise window < max length
                t_noise_start = int(len(tr.data) *
                                    taper_frac)  # start just after taper
                t_noise_end = int(
                    len(tr.data) * time_to_beam_start /
                    (end_buff - start_buff))  # end at beam start
            else:  # plenty of leader, set noise window to max length
                time_to_noise_start = time_to_beam_start - noise_win_max
                t_noise_start = int(
                    len(tr.data) * time_to_noise_start /
                    (end_buff - start_buff))  # start just after taper
                t_noise_end = int(
                    len(tr.data) * time_to_beam_start /
                    (end_buff - start_buff))  # end at beam start
            noise = np.median(abs(tr.data[t_noise_start:t_noise_end]))

            # estimate median signal
            t_signal_start = t_noise_end
            t_signal_end = int(
                len(tr.data) * (end_beam - start_buff) /
                (end_buff - start_buff))
            # old            t_signal_start = int(len(tr.data) * start_buff/(start_buff-end_buff))
            # old            t_signal_end   = t_signal_start + int(len(tr.data) * signal_dur/(end_buff - start_buff))
            signal = np.median(abs(tr.data[t_signal_start:t_signal_end]))

            # test SNR
            SNR = signal / noise
            #            print('set noise window to max length: ' + str(t_noise_start) + ' start   ' + str(t_noise_end) + ' end')
            #            print('set signal window: ' + str(t_signal_start) + ' start   ' + str(t_signal_end) + ' end')
            #            print('SNR: ' + str(SNR))
            if (SNR > qual_threshold):
                stgood += tr

    print('    ' + str(len(stgood)) + '  traces above SNR threshold')
    if verbose:
        for tr in stgood:
            print('        Distance is ' + str(tr.stats.distance /
                                               (1000 * 111)) +
                  ' for station ' + tr.stats.station)

    #%%  get station lat-lon, compute distance and time limits for plot
    for tr in stgood:
        if tr.stats.station in st_names:  # find station in station list
            ii = st_names.index(tr.stats.station)
            stalon = float(
                st_lons[ii])  # look up lat & lon again to find distance
            stalat = float(st_lats[ii])
            distance = gps2dist_azimuth(stalat, stalon, ev_lat, ev_lon)
            tr.stats.distance = distance[0] / (1000 * 111
                                               )  # distance in degrees
            if tr.stats.distance < min_dist_auto:
                min_dist_auto = tr.stats.distance
            if tr.stats.distance > max_dist_auto:
                max_dist_auto = tr.stats.distance
            if tr.stats.starttime - t < min_time_plot:
                min_time_plot = tr.stats.starttime - t
            if ((tr.stats.starttime - t) +
                ((len(tr.data) - 1) * tr.stats.delta)) > max_time_plot:
                max_time_plot = ((tr.stats.starttime - t) +
                                 ((len(tr.data) - 1) * tr.stats.delta))
    print(
        f'        Min distance is   {min_dist_auto:.3f}   Max distance is {max_dist_auto:.3f}'
    )
    print(
        f'        Min time is   {min_time_plot:.2f}   Max time is {max_time_plot:.2f}'
    )
    if min_time_plot > start_buff:
        print(f'Min time {min_time_plot:.2f} > start_buff {start_buff:.2f}')
        print(
            colored('Write zero-filling into pro3 for this code to work',
                    'red'))
        sys.exit(-1)
    if max_time_plot < end_buff:
        print(f'Max time {max_time_plot:.2f} < end_buff {end_buff:.2f}')
        print(
            colored('Write zero-filling into pro3 for this code to work',
                    'red'))
        sys.exit(-1)

#%%  Plot traces
    plt.close(fig_index)
    plt.figure(fig_index, figsize=(10, 10))
    plt.xlim(min_time_plot, max_time_plot)

    if auto_dist == True:
        dist_diff = max_dist_auto - min_dist_auto  # add space at extremes
        plt.ylim(min_dist_auto - 0.1 * dist_diff,
                 max_dist_auto + 0.1 * dist_diff)
        max_dist = max_dist_auto
        min_dist = min_dist_auto
    else:
        plt.ylim(min_dist, max_dist)

    for tr in stgood:
        dist_offset = tr.stats.distance
        ttt = np.arange(len(tr.data)) * tr.stats.delta + (tr.stats.starttime -
                                                          t)
        if red_plot == 1:
            shift = red_time + (dist_offset - red_dist) * red_slow
            ttt = ttt - shift
        if len(tr.data) > 0:
            if tr.data.max() - tr.data.min() > 0:
                plt.plot(ttt, (tr.data - np.median(tr.data)) * plot_scale_fac /
                         (tr.data.max() - tr.data.min()) + dist_offset,
                         color='black')
            else:
                print('Max ' + str(tr.data.max()) + ' equals min ' +
                      str(tr.data.min()) + ', skip plotting')
        else:
            nodata += 1
            print('Trace ' + tr.stats.station + ' has : ' + str(len(tr.data)) +
                  ' time pts, skip plotting')
#%% Plot traveltime curves
    if rel_time != 100:
        if plot_tt:
            # first traveltime curve
            line_pts = 50
            dist_vec = np.arange(min_dist_auto, max_dist_auto,
                                 (max_dist_auto - min_dist_auto) /
                                 line_pts)  # distance grid
            time_vec1 = np.arange(
                min_dist_auto, max_dist_auto, (max_dist_auto - min_dist_auto) /
                line_pts)  # empty time grid of same length (filled with -1000)
            for i in range(0, line_pts):
                arrivals = model.get_travel_times(
                    source_depth_in_km=ev_depth,
                    distance_in_degree=dist_vec[i],
                    phase_list=[dphase])
                if (len(arrivals) == 0 and dist_vec[i] < 10 and dphase == 'P'
                    ):  # in case first arrival is upgoing P, which is 'p'
                    arrivals = model.get_travel_times(
                        source_depth_in_km=ev_depth,
                        distance_in_degree=dist_vec[i],
                        phase_list='p')
                    if (len(arrivals) == 0):
                        print('model.get_travel_times failed: dist, phase  ' +
                              str(dist_vec[i]) + '   ' + dphase)
                num_arrivals = len(arrivals)
                found_it = 0
                for j in range(0, num_arrivals):
                    if arrivals[j].name == dphase:
                        time_vec1[i] = arrivals[j].time
                        found_it = 1
                if found_it == 0:
                    time_vec1[i] = np.nan
            # second traveltime curve
            if dphase2 != 'no':
                time_vec2 = np.arange(
                    min_dist_auto, max_dist_auto,
                    (max_dist_auto - min_dist_auto) / line_pts
                )  # empty time grid of same length (filled with -1000)
                for i in range(0, line_pts):
                    arrivals = model.get_travel_times(
                        source_depth_in_km=ev_depth,
                        distance_in_degree=dist_vec[i],
                        phase_list=[dphase2])
                    if (len(arrivals) == 0 and dist_vec[i] < 10
                            and dphase2 == 'P'
                        ):  # in case first arrival is upgoing P, which is 'p'
                        arrivals = model.get_travel_times(
                            source_depth_in_km=ev_depth,
                            distance_in_degree=dist_vec[i],
                            phase_list='p')
                        if (len(arrivals) == 0):
                            print(
                                'model.get_travel_times failed: dist, phase  '
                                + str(dist_vec[i]) + '   ' + dphase2)
                    num_arrivals = len(arrivals)
                    found_it = 0
                    for j in range(0, num_arrivals):
                        if arrivals[j].name == dphase2:
                            time_vec2[i] = arrivals[j].time
                            found_it = 1
                    if found_it == 0:
                        time_vec2[i] = np.nan
                if rel_time == 3 or rel_time == 4:
                    time_vec2 = time_vec2 - time_vec1
                elif rel_time == 2:
                    time_vec2 = time_vec2 - atime_ref
                plt.plot(time_vec2, dist_vec, color='orange')
            # third traveltime curve
            if dphase3 != 'no':
                time_vec3 = np.arange(
                    min_dist_auto, max_dist_auto,
                    (max_dist_auto - min_dist_auto) / line_pts
                )  # empty time grid of same length (filled with -1000)
                for i in range(0, line_pts):
                    arrivals = model.get_travel_times(
                        source_depth_in_km=ev_depth,
                        distance_in_degree=dist_vec[i],
                        phase_list=[dphase3])
                    if (len(arrivals) == 0 and dist_vec[i] < 10
                            and dphase3 == 'P'
                        ):  # in case first arrival is upgoing P, which is 'p'
                        arrivals = model.get_travel_times(
                            source_depth_in_km=ev_depth,
                            distance_in_degree=dist_vec[i],
                            phase_list='p')
                        if (len(arrivals) == 0):
                            print(
                                'model.get_travel_times failed: dist, phase  '
                                + str(dist_vec[i]) + '   ' + dphase3)
                    num_arrivals = len(arrivals)
                    found_it = 0
                    for j in range(0, num_arrivals):
                        if arrivals[j].name == dphase3:
                            time_vec3[i] = arrivals[j].time
                            found_it = 1
                    if found_it == 0:
                        time_vec3[i] = np.nan
                if rel_time == 3 or rel_time == 4:
                    time_vec2 = time_vec2 - time_vec1
                elif rel_time == 2:
                    time_vec2 = time_vec2 - atime_ref
                plt.plot(time_vec3, dist_vec, color='yellow')
            # fourth traveltime curve
            if dphase4 != 'no':
                time_vec4 = np.arange(
                    min_dist_auto, max_dist_auto,
                    (max_dist_auto - min_dist_auto) / line_pts
                )  # empty time grid of same length (filled with -1000)
                for i in range(0, line_pts):
                    arrivals = model.get_travel_times(
                        source_depth_in_km=ev_depth,
                        distance_in_degree=dist_vec[i],
                        phase_list=[dphase4])
                    if (len(arrivals) == 0 and dist_vec[i] < 10
                            and dphase4 == 'P'
                        ):  # in case first arrival is upgoing P, which is 'p'
                        arrivals = model.get_travel_times(
                            source_depth_in_km=ev_depth,
                            distance_in_degree=dist_vec[i],
                            phase_list='p')
                        if (len(arrivals) == 0):
                            print(
                                'model.get_travel_times failed: dist, phase  '
                                + str(dist_vec[i]) + '   ' + dphase4)
                    num_arrivals = len(arrivals)
                    found_it = 0
                    for j in range(0, num_arrivals):
                        if arrivals[j].name == dphase4:
                            time_vec4[i] = arrivals[j].time
                            found_it = 1
                    if found_it == 0:
                        time_vec4[i] = np.nan
                if rel_time == 3 or rel_time == 4:
                    time_vec2 = time_vec2 - time_vec1
                elif rel_time == 2:
                    time_vec2 = time_vec2 - atime_ref
                plt.plot(time_vec4, dist_vec, color='purple')

            if rel_time == 3 or rel_time == 4:
                time_vec1 = time_vec1 - time_vec1
            elif rel_time == 2:
                time_vec1 = time_vec1 - atime_ref
            plt.plot(time_vec1, dist_vec, color='blue')
            plt.xlabel('Time (s)')
            plt.ylabel('Epicentral distance from event (°)')
            plt.title(date_label + ' event #' + str(eq_num))
            #    os.chdir('/Users/vidale/Documents/PyCode/Plots')
            #    plt.savefig(date_label + '_' + str(event_no) + '_raw.png')
            plt.show()

#%%  Save processed files
    fname3 = '/Users/vidale/Documents/Research/IC/Pro_Files/HD' + date_label + 'sel.mseed'

    stgood.write(fname3, format='MSEED')

    elapsed_time_wc = time.time() - start_time_wc
    print(f'    This job took   {elapsed_time_wc:.1f}   seconds')
    os.system('say "Done"')
示例#12
0
def gather_waveforms(source,
                     network,
                     station,
                     location,
                     channel,
                     starttime,
                     endtime,
                     time_buffer=0,
                     merge=True,
                     remove_response=False,
                     return_failed_stations=False,
                     watc_url=None,
                     watc_username=None,
                     watc_password=None):
    """
    Gather seismic/infrasound waveforms from IRIS or WATC FDSN, or AVO Winston,
    and output a Stream object with station/element coordinates attached.
    Optionally remove the sensitivity.

    NOTE 1:
        Usual RTM usage is to specify a starttime/endtime that brackets the
        estimated source origin time. Then time_buffer is used to download
        enough extra data to account for the time required for an infrasound
        signal to propagate to the farthest station.

    Args:
        source: Which source to gather waveforms from - options are:
                'IRIS' <-- IRIS FDSN
                'WATC' <-- WATC FDSN
                'AVO'  <-- AVO Winston
        network: SEED network code
        station: SEED station code
        location: SEED location code
        channel: SEED channel code
        starttime: Start time for data request (UTCDateTime)
        endtime: End time for data request (UTCDateTime)
        time_buffer: [s] Extra amount of data to download after endtime
                     (default: 0)
        merge: Toggle merging of Traces with identical IDs (default: True)
        remove_response: Toggle response removal via remove_sensitivity() or a
                         simple scalar multiplication (default: False)
        return_failed_stations: If True, returns a list of station codes that
                                were requested but not downloaded. This
                                disables the standard failed station warning
                                message (default: False)
        watc_url: URL for WATC FDSN server (default: None)
        watc_username: Username for WATC FDSN server (default: None)
        watc_password: Password for WATC FDSN server (default: None)
    Returns:
        st_out: Stream containing gathered waveforms
        failed_stations: (Optional) List containing station codes that were
                         requested but not downloaded
    """

    print('--------------')
    print('GATHERING DATA')
    print('--------------')

    # IRIS FDSN
    if source == 'IRIS':

        client = FDSN_Client('IRIS')
        print('Reading data from IRIS FDSN...')
        try:
            st_out = client.get_waveforms(network,
                                          station,
                                          location,
                                          channel,
                                          starttime,
                                          endtime + time_buffer,
                                          attach_response=True)
        except FDSNNoDataException:
            st_out = Stream()  # Just create an empty Stream object

    # WATC FDSN
    elif source == 'WATC':

        print('Connecting to WATC FDSN...')
        client = FDSN_Client(base_url=watc_url,
                             user=watc_username,
                             password=watc_password)
        print('Successfully connected. Reading data from WATC FDSN...')
        try:
            st_out = client.get_waveforms(network,
                                          station,
                                          location,
                                          channel,
                                          starttime,
                                          endtime + time_buffer,
                                          attach_response=True)
        except FDSNNoDataException:
            st_out = Stream()  # Just create an empty Stream object

    # AVO Winston
    elif source == 'AVO':

        client = EW_Client('pubavo1.wr.usgs.gov',
                           port=16023)  # 16023 is long-term
        print('Reading data from AVO Winston...')
        st_out = Stream()  # Make empty Stream object to populate

        # Brute-force "dynamic grid search" over network/station/channel/location codes
        for nw in _restricted_matching('network', network, client):
            for sta in _restricted_matching('station',
                                            station,
                                            client,
                                            network=nw):
                for cha in _restricted_matching('channel',
                                                channel,
                                                client,
                                                network=nw,
                                                station=sta):
                    for loc in _restricted_matching('location',
                                                    location,
                                                    client,
                                                    network=nw,
                                                    station=sta,
                                                    channel=cha):
                        try:
                            st_out += client.get_waveforms(
                                nw, sta, loc, cha, starttime,
                                endtime + time_buffer)
                        except KeyError:
                            pass

    else:
        raise ValueError('Unrecognized source. Valid options are \'IRIS\', '
                         '\'WATC\', or \'AVO\'.')

    if merge:
        st_out.merge()  # Merge Traces with the same ID
    st_out.sort()

    # Check that all requested stations are present in Stream
    requested_stations = station.split(',')
    downloaded_stations = [tr.stats.station for tr in st_out]
    failed_stations = []
    for sta in requested_stations:
        # The below check works with wildcards, but obviously cannot detect if
        # ALL stations corresponding to a given wildcard (e.g., O??K) were
        # downloaded. Thus, if careful station selection is desired, specify
        # each station explicitly and the below check will then be effective.
        if not fnmatch.filter(downloaded_stations, sta):
            if not return_failed_stations:
                # If we're not returning the failed stations, then show this
                # warning message to alert the user
                warnings.warn(
                    f'Station {sta} not downloaded from {source} '
                    'server for this time period.', CollectionWarning)
            failed_stations.append(sta)

    # If the Stream is empty, then we can stop here
    if st_out.count() == 0:
        print('No data downloaded.')
        if return_failed_stations:
            return st_out, failed_stations
        else:
            return st_out

    # Otherwise, show what the Stream contains
    print(st_out.__str__(extended=True))  # This syntax prints the WHOLE Stream

    # Add zeros to ensure all Traces have same length
    st_out.trim(starttime, endtime + time_buffer, pad=True, fill_value=0)

    print('Assigning coordinates...')

    # Use IRIS inventory info for AVO data source
    if source == 'AVO':
        client = FDSN_Client('IRIS')

    try:
        inv = client.get_stations(network=network,
                                  station=station,
                                  location=location,
                                  channel=channel,
                                  starttime=starttime,
                                  endtime=endtime + time_buffer,
                                  level='channel')
    except FDSNNoDataException:
        inv = []

    for tr in st_out:
        for nw in inv:
            for sta in nw:
                for cha in sta:
                    # Being very thorough to check if everything matches!
                    if (tr.stats.network == nw.code
                            and tr.stats.station == sta.code
                            and tr.stats.location == cha.location_code
                            and tr.stats.channel == cha.code):

                        tr.stats.longitude = cha.longitude
                        tr.stats.latitude = cha.latitude
                        tr.stats.elevation = cha.elevation

    # Check if any Trace did NOT get coordinates assigned, and try to use JSON
    # coordinates if available
    for tr in st_out:
        try:
            tr.stats.longitude, tr.stats.latitude, tr.stats.elevation
        except AttributeError:
            try:
                tr.stats.latitude, tr.stats.longitude,\
                    tr.stats.elevation = AVO_COORDS[tr.id]
                warnings.warn(f'Using coordinates from JSON file for {tr.id}.',
                              CollectionWarning)
            except KeyError:
                print(f'No coordinates available for {tr.id}. Stopping.')
                raise

    # Remove sensitivity
    if remove_response:

        print('Removing sensitivity...')

        for tr in st_out:
            try:
                # Just removing sensitivity for now. remove_response() can lead
                # to errors. This should be sufficient for now. Plus some
                # IRIS-AVO responses are wonky.
                tr.remove_sensitivity()
            except ValueError:  # No response information found
                # This is only set up for infrasound calibration values
                try:
                    calib = AVO_INFRA_CALIBS[tr.id]
                    tr.data = tr.data * calib
                    warnings.warn(
                        'Using calibration value from JSON file for '
                        f'{tr.id}.', CollectionWarning)
                except KeyError:
                    print(f'No calibration value available for {tr.id}. '
                          'Stopping.')
                    raise

    print('Done')

    # Return the Stream with coordinates attached (and responses removed if
    # specified)
    if return_failed_stations:
        return st_out, failed_stations
    else:
        return st_out
示例#13
0
    def calculate_onsets(self, data, log=True, timespan=None):
        """
        Calculate onset functions for the requested stations and phases.

        Returns a stacked array of onset functions for the requested phases,
        and an :class:`~quakemigrate.signal.onsets.base.OnsetData` object
        containing all outputs from the onset function calculation: a dict of
        the onset functions, a Stream containing the pre-processed input
        waveforms, and a dict of availability info describing which of the
        requested onset functions could be calculated (depending on data
        availability and data quality checks).

        Parameters
        ----------
        data : :class:`~quakemigrate.io.data.WaveformData` object
            Light class encapsulating data returned by an archive query.
        log : bool
            Calculate log(onset) if True, otherwise calculate the raw onset.
        timespan : float or None, optional
            If the timespan for which the onsets are being generated is
            provided, this will be used to calculate the tapered window of data
            at the start and end of the onset function which should be
            disregarded. This is necessary to accurately set the pick threshold
            in GaussianPicker, for example.

        Returns
        -------
        onsets : `numpy.ndarray` of float
            Stacked onset functions served up for migration,
            shape(nonsets, nsamples).
        onset_data : :class:`~quakemigrate.signal.onsets.base.OnsetData` object
            Light class encapsulating data generated during onset calculation.

        """

        onsets = []
        onsets_dict = {}
        filtered_waveforms = Stream()
        availability = {}

        # Loop through phases, pre-process data, and calculate onsets.
        for phase in self.phases:
            # Select traces based on channel map for this phase
            phase_waveforms = data.waveforms.select(
                channel=self.channel_maps[phase])

            # Convert sta window, lta window lengths from seconds to samples.
            stw, ltw = self.sta_lta_windows[phase]
            stw = util.time2sample(stw, self.sampling_rate) + 1
            ltw = util.time2sample(ltw, self.sampling_rate) + 1

            # Pre-process the data. The ObsPy functions operate by trace, so
            # will not break on gappy data (we haven't checked availability
            # yet)
            filtered_phase_waveforms = pre_process(
                phase_waveforms, self.sampling_rate, data.resample,
                data.upfactor, self.bandpass_filters[phase], data.starttime,
                data.endtime)

            # Loop through stations, check data availability for this phase,
            # and store this info, filtered waveforms and calculated onsets
            for station in data.stations:
                waveforms = filtered_phase_waveforms.select(station=station)

                available, av_dict = data.check_availability(
                    waveforms,
                    all_channels=self.all_channels,
                    n_channels=self.channel_counts[phase],
                    allow_gaps=self.allow_gaps,
                    full_timespan=self.full_timespan,
                    check_sampling_rate=True,
                    sampling_rate=self.sampling_rate)
                availability[f"{station}_{phase}"] = available

                # If no data available, skip
                if available == 0:
                    logging.info(f"\t\tNo {phase} onset for {station}.")
                    continue

                # Check that all channels met the availability critera. If
                # not, remove this channel from the stream.
                for key, available in av_dict.items():
                    if available == 0:
                        to_remove = waveforms.select(id=key)
                        [waveforms.remove(tr) for tr in to_remove]

                # Pad with tiny floats so onset will be the correct length.
                # Note: this will only have an effect if allow_gaps=True or
                # full_timespan=False. Otherwise, there will be no gaps to pad.
                if self.allow_gaps or not self.full_timespan:
                    # Square root to avoid floating point errors when value
                    # is squared to compute the energy trace
                    tiny = np.sqrt(np.finfo(float).tiny)
                    # Apply another taper to remove transients from filtering -
                    # this is within the pre- and post-pad for continuous data
                    waveforms.taper(type="cosine", max_percentage=0.05)
                    # Fill gaps
                    waveforms.merge(method=1, fill_value=tiny)
                    # Pad start/end; delta of +/-0.00001 is to avoid
                    # occasional obspy weirdness. `nearest_sample` is
                    # appropriate as data is at uniform sampling rate with
                    # off-sample data corrected by util.shift_to_sample()
                    waveforms.trim(starttime=data.starttime - 0.00001,
                                   endtime=data.endtime + 0.00001,
                                   pad=True,
                                   fill_value=tiny,
                                   nearest_sample=False)

                # Calculate onset and add to WaveForm data object; add filtered
                # waveforms that have passed the availability check to
                # WaveformData.filtered_waveforms
                onsets_dict.setdefault(station, {}).update(
                    {phase: self._onset(waveforms, stw, ltw, log, timespan)})
                onsets.append(onsets_dict[station][phase])
                filtered_waveforms += waveforms

        logging.debug(filtered_waveforms.__str__(extended=True))

        onsets = np.stack(onsets, axis=0)
        onset_data = OnsetData(onsets_dict, self.phases, self.channel_maps,
                               filtered_waveforms, availability,
                               data.starttime, data.endtime,
                               self.sampling_rate)

        return onsets, onset_data
示例#14
0
                            s_t = t - start_buff
                            e_t = t + end_buff
                        tr.trim(starttime=s_t, endtime=e_t)
                        # deduct theoretical traveltime and start_buf from starttime
                        if rel_time == 1:
                            tr.stats.starttime = tr.stats.starttime - atime
                        st_pickalign += tr
                    except:
                        pass
print('After alignment and range selection - event: ' +
      str(len(st_pickalign)) + ' traces')

#%%
#print(st) # at length
if verbose:
    print(st.__str__(extended=True))
    if rel_time == 1:
        print(st_pickalign.__str__(extended=True))

#%%  detrend, taper, filter
st_pickalign.detrend(type='simple')
st_pickalign.taper(taper_frac)
st_pickalign.filter('bandpass',
                    freqmin=freq_min,
                    freqmax=freq_max,
                    corners=2,
                    zerophase=True)
st_pickalign.taper(taper_frac)

#%%  Cull further by imposing SNR threshold on both traces
stgood = Stream()
示例#15
0
def pro3pair(eq_file1,
             eq_file2,
             stat_corr=1,
             simple_taper=0,
             skip_SNR=0,
             dphase='PKIKP',
             dphase2='PKiKP',
             dphase3='PKIKP',
             dphase4='PKiKP',
             rel_time=1,
             start_buff=-200,
             end_buff=500,
             plot_scale_fac=0.05,
             qual_threshold=0,
             corr_threshold=0.5,
             freq_min=1,
             freq_max=3,
             min_dist=0,
             max_dist=180,
             alt_statics=0,
             statics_file='nothing',
             ARRAY=0,
             ref_loc=0):

    # Parameters
    #	ARRAY 0 is Hinet, 1 is LASA, 2 is NORSAR
    #	start_buff = -50       # plots start Xs after PKIKP
    #	end_buff   = 200       # plots end Xs after PKIKP
    #	plot_scale_fac = 0.5  #  Bigger numbers make each trace amplitude bigger on plot
    #	stat_corr = 1 # apply station static corrections
    #	qual_threshold =  0.2   # minimum SNR
    #	corr_threshold = 0.7  # minimum correlation in measuring shift to use station in static construction

    #	dphase  = 'PKIKP'       # phase to be aligned
    #	dphase2 = 'PKiKP'      # another phase to have traveltime plotted
    #	dphase3 = 'pPKiKP'        # another phase to have traveltime plotted
    #	dphase4 = 'pPKIKP'        # another phase to have traveltime plotted

    #%%  Set some parameters
    verbose = 0  # more output
    #	rel_time = 1          # timing is relative to a chosen phase, otherwise relative to OT
    taper_frac = .05  #Fraction of window tapered on both ends
    signal_dur = 5.  # signal length used in SNR calculation
    plot_tt = 1  # plot the traveltimes?
    do_decimate = 0  # 0 if no decimation desired
    #ref_loc = 0  # 1 if selecting stations within ref_rad of ref_lat and ref_lon
    # 0 if selecting stations by distance from earthquake
    if ref_loc == 1:
        if ARRAY == 0:
            ref_lat = 36.3  # °N, around middle of Japan
            ref_lon = 138.5  # °E
            ref_rad = 1.5  # ° radius (°)
        elif ARRAY == 1:
            ref_lat = 46.7  # °N keep only inner rings A-D
            ref_lon = -106.22  # °E
            ref_rad = 0.4  # ° radius (°)

    if rel_time == 0:  # SNR requirement not implemented for unaligned traces
        qual_threshold = 0

    # Plot with reduced velocity?
    red_plot = 0
    red_dist = 55
    red_time = 300
    red_slow = 7.2  # seconds per degree

    #%% Import functions
    from obspy import UTCDateTime
    from obspy import Stream
    from obspy import read
    from obspy.geodetics import gps2dist_azimuth
    import numpy as np
    import os
    from obspy.taup import TauPyModel
    import matplotlib.pyplot as plt
    import time
    model = TauPyModel(model='iasp91')

    import sys  # don't show any warnings
    import warnings

    if not sys.warnoptions:
        warnings.simplefilter("ignore")

    print('Running pro3a_sort_plot_pair')
    start_time_wc = time.time()

    #%% Get saved event info, also used to name files
    #  event 2016-05-28T09:47:00.000 -56.241 -26.935 78
    print('Opening ' + eq_file1)
    if ARRAY == 0:
        file = open(eq_file1, 'r')
    elif ARRAY == 1:
        file = open('EvLocs/' + eq_file1, 'r')
    lines = file.readlines()
    split_line = lines[0].split()
    #			ids.append(split_line[0])  ignore label for now
    t1 = UTCDateTime(split_line[1])
    date_label1 = split_line[1][0:10]
    year1 = split_line[1][0:4]
    ev_lat1 = float(split_line[2])
    ev_lon1 = float(split_line[3])
    ev_depth1 = float(split_line[4])
    print('1st event: date_label ' + date_label1 + ' time ' + str(t1) +
          ' lat ' + str(ev_lat1) + ' lon ' + str(ev_lon1) + ' depth ' +
          str(ev_depth1))

    print('Opening ' + eq_file2)
    if ARRAY == 0:
        file = open(eq_file2, 'r')
    elif ARRAY == 1:
        file = open('EvLocs/' + eq_file2, 'r')
    lines = file.readlines()
    split_line = lines[0].split()
    #			ids.append(split_line[0])  ignore label for now
    t2 = UTCDateTime(split_line[1])
    date_label2 = split_line[1][0:10]
    year2 = split_line[1][0:4]
    ev_lat2 = float(split_line[2])
    ev_lon2 = float(split_line[3])
    ev_depth2 = float(split_line[4])
    print('2nd event: date_label ' + date_label2 + ' time ' + str(t2) +
          ' lat ' + str(ev_lat2) + ' lon ' + str(ev_lon2) + ' depth ' +
          str(ev_depth2))

    #%% Get station location file
    if stat_corr == 1:  # load static terms, only applies to Hinet and LASA
        if ARRAY == 0:
            if alt_statics == 0:  # standard set
                sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/hinet_sta_statics.txt'
            else:  # custom set made by this event for this event
                sta_file = (
                    '/Users/vidale/Documents/GitHub/Array_codes/Files/' +
                    'HA' + date_label1[:10] + 'pro4_' + dphase + '.statics')
        elif ARRAY == 1:
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/L_sta_statics.txt'
        with open(sta_file, 'r') as file:
            lines = file.readlines()
        print(str(len(lines)) + ' stations read from ' + sta_file)
        # Load station coords into arrays
        station_index = range(len(lines))
        st_names = []
        st_dist = []
        st_lats = []
        st_lons = []
        st_shift = []
        st_corr = []
        for ii in station_index:
            line = lines[ii]
            split_line = line.split()
            st_names.append(split_line[0])
            st_dist.append(split_line[1])
            st_lats.append(split_line[2])
            st_lons.append(split_line[3])
            st_shift.append(split_line[4])
            st_corr.append(split_line[5])
    else:  # no static terms, always true for LASA or NORSAR
        if ARRAY == 0:  # Hinet set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/hinet_sta.txt'
        elif ARRAY == 1:  #         LASA set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/LASA_sta.txt'
        else:  #         NORSAR set
            sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/NORSAR_sta.txt'
        with open(sta_file, 'r') as file:
            lines = file.readlines()
        print(str(len(lines)) + ' stations read from ' + sta_file)
        # Load station coords into arrays
        station_index = range(len(lines))
        st_names = []
        st_lats = []
        st_lons = []
        for ii in station_index:
            line = lines[ii]
            split_line = line.split()
            st_names.append(split_line[0])
            st_lats.append(split_line[1])
            st_lons.append(split_line[2])

#%% Is taper too long compared to noise estimation window?
    totalt = end_buff - start_buff
    noise_time_skipped = taper_frac * totalt
    if simple_taper == 0:
        if noise_time_skipped >= 0.5 * (-start_buff):
            print(
                'Specified taper of ' + str(taper_frac * totalt) +
                ' is not big enough compared to available noise estimation window '
                + str(-start_buff - noise_time_skipped) +
                '. May not work well.')
            old_taper_frac = taper_frac
            taper_frac = -0.5 * start_buff / totalt
            print('Taper reset from ' + str(old_taper_frac * totalt) + ' to ' +
                  str(taper_frac * totalt) + ' seconds.')

#%% Load waveforms and decimate to 10 sps
    st1 = Stream()
    st2 = Stream()
    if ARRAY == 0:
        fname1 = 'HD' + date_label1 + '.mseed'
        fname2 = 'HD' + date_label2 + '.mseed'
    elif ARRAY == 1:
        fname1 = 'Mseed/HD' + date_label1 + '.mseed'
        fname2 = 'Mseed/HD' + date_label2 + '.mseed'
    st1 = read(fname1)
    st2 = read(fname2)
    if do_decimate != 0:
        st1.decimate(do_decimate)
        st2.decimate(do_decimate)

    print('1st trace has : ' + str(len(st1[0].data)) + ' time pts ')
    print('st1 has ' + str(len(st1)) + ' traces')
    print('st2 has ' + str(len(st2)) + ' traces')
    print('1st trace starts at ' + str(st1[0].stats.starttime) +
          ', event at ' + str(t1))
    print('2nd trace starts at ' + str(st2[0].stats.starttime) +
          ', event at ' + str(t2))

    #%% Select by distance, window and adjust start time to align picked times
    st_pickalign1 = Stream()
    st_pickalign2 = Stream()

    for tr in st1:  # traces one by one, find lat-lon by searching entire inventory.  Inefficient
        if float(
                year1
        ) < 1970:  # fix the damn 1969 -> 2069 bug in Gibbon's LASA data
            temp_t = str(tr.stats.starttime)
            temp_tt = '19' + temp_t[2:]
            tr.stats.starttime = UTCDateTime(temp_tt)
        for ii in station_index:
            if ARRAY == 0:  # have to chop off last letter, always 'h'
                this_name = st_names[ii]
                this_name_truc = this_name[0:5]
                name_truc_cap = this_name_truc.upper()
            elif ARRAY == 1:
                name_truc_cap = st_names[ii]
            if (tr.stats.station == name_truc_cap
                ):  # find station in inventory
                #			if (tr.stats.station == st_names[ii]): # find station in inventory
                if stat_corr != 1 or float(
                        st_corr[ii]
                ) > corr_threshold:  # if using statics, reject low correlations
                    stalat = float(st_lats[ii])
                    stalon = float(
                        st_lons[ii]
                    )  # look up lat & lon again to find distance
                    if ref_loc == 1:
                        ref_distance = gps2dist_azimuth(
                            stalat, stalon, ref_lat, ref_lon)
                        ref_dist = ref_distance[0] / (1000 * 111)
                    distance = gps2dist_azimuth(
                        stalat, stalon, ev_lat1,
                        ev_lon1)  # Get traveltimes again, hard to store
                    tr.stats.distance = distance[0]  # distance in km
                    dist = distance[0] / (1000 * 111)
                    if ref_loc != 1 and min_dist < dist and dist < max_dist:  # select distance range from earthquake
                        try:
                            #							print('Phase ' + dphase + ', depth ' + str(ev_depth1) + ' distance ' + str(dist))
                            arrivals = model.get_travel_times(
                                source_depth_in_km=ev_depth1,
                                distance_in_degree=dist,
                                phase_list=[dphase])
                            atime = arrivals[0].time
                            #							print(dphase + ' arrival time is ' + str(atime))
                            if stat_corr == 1:  # apply static station corrections
                                tr.stats.starttime -= float(st_shift[ii])
                            if rel_time == 1:
                                s_t = t1 + atime + start_buff
                                e_t = t1 + atime + end_buff
                            else:
                                s_t = t1 + start_buff
                                e_t = t1 + end_buff
                            tr.trim(starttime=s_t, endtime=e_t)
                            # deduct theoretical traveltime and start_buf from starttime
                            if rel_time == 1:
                                tr.stats.starttime -= atime
                            st_pickalign1 += tr
                        except:
                            pass
                    elif ref_loc == 1:
                        if ref_dist < ref_rad:  # alternatively, select based on distance from ref location
                            try:
                                arrivals = model.get_travel_times(
                                    source_depth_in_km=ev_depth1,
                                    distance_in_degree=dist,
                                    phase_list=[dphase])
                                atime = arrivals[0].time
                                if stat_corr == 1:  # apply static station corrections
                                    tr.stats.starttime -= float(st_shift[ii])
                                if rel_time == 1:
                                    s_t = t1 + atime + start_buff
                                    e_t = t1 + atime + end_buff
                                else:
                                    s_t = t1 + start_buff
                                    e_t = t1 + end_buff
                                tr.trim(starttime=s_t, endtime=e_t)
                                # deduct theoretical traveltime and start_buf from starttime
                                if rel_time == 1:
                                    tr.stats.starttime -= atime
                                st_pickalign1 += tr
                            except:
                                pass
    #				if len(tr.data) == 0:
    #					print('Event 1 - empty window.  Trace starts at ' + str(tr.stats.starttime) + ', event at ' + str(t1))

    for tr in st2:  # traces one by one
        if float(
                year2
        ) < 1970:  # fix the damn 1969 -> 2069 bug in Gibbon's LASA data
            temp_t = str(tr.stats.starttime)
            temp_tt = '19' + temp_t[2:]
            tr.stats.starttime = UTCDateTime(temp_tt)
        for ii in station_index:
            if ARRAY == 0:  # have to chop off last letter, always 'h'
                this_name = st_names[ii]
                this_name_truc = this_name[0:5]
                name_truc_cap = this_name_truc.upper()
            elif ARRAY == 1:
                name_truc_cap = st_names[ii]
            if (tr.stats.station == name_truc_cap
                ):  # find station in inventory
                #			if (tr.stats.station == st_names[ii]): # find station in inventory
                if stat_corr != 1 or float(
                        st_corr[ii]
                ) > corr_threshold:  # if using statics, reject low correlations
                    stalat = float(st_lats[ii])
                    stalon = float(st_lons[ii])
                    if ref_loc == 1:
                        ref_distance = gps2dist_azimuth(
                            stalat, stalon, ref_lat, ref_lon)
                        ref_dist = ref_distance[0] / (1000 * 111)
                    distance = gps2dist_azimuth(
                        stalat, stalon, ev_lat2,
                        ev_lon2)  # Get traveltimes again, hard to store
                    tr.stats.distance = distance[0]  # distance in km
                    dist = distance[0] / (1000 * 111)
                    if ref_loc != 1 and min_dist < dist and dist < max_dist:  # select distance range from earthquake
                        try:
                            arrivals = model.get_travel_times(
                                source_depth_in_km=ev_depth2,
                                distance_in_degree=dist,
                                phase_list=[dphase])
                            atime = arrivals[0].time
                            if stat_corr == 1:  # apply static station corrections
                                tr.stats.starttime -= float(st_shift[ii])
                            if rel_time == 1:
                                s_t = t2 + atime + start_buff
                                e_t = t2 + atime + end_buff
                            else:
                                s_t = t2 + start_buff
                                e_t = t2 + end_buff
                            tr.trim(starttime=s_t, endtime=e_t)
                            # deduct theoretical traveltime and start_buf from starttime
                            if rel_time == 1:
                                tr.stats.starttime -= atime
                            st_pickalign2 += tr
                        except:
                            pass
                    elif ref_loc == 1:
                        if ref_dist < ref_rad:  # alternatively, select based on distance from ref location
                            try:
                                arrivals = model.get_travel_times(
                                    source_depth_in_km=ev_depth2,
                                    distance_in_degree=dist,
                                    phase_list=[dphase])
                                atime = arrivals[0].time
                                if stat_corr == 1:  # apply static station corrections
                                    tr.stats.starttime -= float(st_shift[ii])
                                if rel_time == 1:
                                    s_t = t2 + atime + start_buff
                                    e_t = t2 + atime + end_buff
                                else:
                                    s_t = t2 + start_buff
                                    e_t = t2 + end_buff
                                tr.trim(starttime=s_t, endtime=e_t)
                                # deduct theoretical traveltime and start_buf from starttime
                                if rel_time == 1:
                                    tr.stats.starttime -= atime
                                st_pickalign2 += tr
                            except:
                                pass
    #				if len(tr.data) == 0:
    #					print('Event 2 - empty window.  Trace starts at ' + str(tr.stats.starttime) + ', event at ' + str(t2))

    print('After alignment and range selection: ' + str(len(st_pickalign1)) +
          ' traces')

    #%%
    #print(st) # at length
    if verbose:
        print(st1.__str__(extended=True))
        print(st2.__str__(extended=True))
        if rel_time == 1:
            print(st_pickalign1.__str__(extended=True))
            print(st_pickalign2.__str__(extended=True))

#%%  Detrend, taper, filter
    st_pickalign1.detrend(type='simple')
    st_pickalign2.detrend(type='simple')
    st_pickalign1.taper(taper_frac)
    st_pickalign2.taper(taper_frac)
    st_pickalign1.filter('bandpass',
                         freqmin=freq_min,
                         freqmax=freq_max,
                         corners=4,
                         zerophase=True)
    st_pickalign2.filter('bandpass',
                         freqmin=freq_min,
                         freqmax=freq_max,
                         corners=4,
                         zerophase=True)
    st_pickalign1.taper(taper_frac)
    st_pickalign2.taper(taper_frac)

    #%%  Cull further by imposing SNR threshold on both traces
    st1good = Stream()
    st2good = Stream()
    for tr1 in st_pickalign1:
        for tr2 in st_pickalign2:
            if ((tr1.stats.network == tr2.stats.network) &
                (tr1.stats.station == tr2.stats.station)):
                if skip_SNR == 1:
                    st1good += tr1
                    st2good += tr2
                else:
                    # estimate median noise
                    t_noise1_start = int(len(tr1.data) * taper_frac)
                    t_noise2_start = int(len(tr2.data) * taper_frac)
                    t_noise1_end = int(
                        len(tr1.data) * (-start_buff) /
                        (end_buff - start_buff))
                    t_noise2_end = int(
                        len(tr2.data) * (-start_buff) /
                        (end_buff - start_buff))
                    noise1 = np.median(
                        abs(tr1.data[t_noise1_start:t_noise1_end]))
                    noise2 = np.median(
                        abs(tr2.data[t_noise2_start:t_noise2_end]))
                    # estimate median signal
                    t_signal1_start = int(
                        len(tr1.data) * (-start_buff) /
                        (end_buff - start_buff))
                    t_signal2_start = int(
                        len(tr2.data) * (-start_buff) /
                        (end_buff - start_buff))
                    t_signal1_end = t_signal1_start + int(
                        len(tr1.data) * signal_dur / (end_buff - start_buff))
                    t_signal2_end = t_signal2_start + int(
                        len(tr2.data) * signal_dur / (end_buff - start_buff))
                    signal1 = np.median(
                        abs(tr1.data[t_signal1_start:t_signal1_end]))
                    signal2 = np.median(
                        abs(tr2.data[t_signal2_start:t_signal2_end]))
                    #			test SNR
                    SNR1 = signal1 / noise1
                    SNR2 = signal2 / noise2
                    if (SNR1 > qual_threshold and SNR2 > qual_threshold):
                        st1good += tr1
                        st2good += tr2
    if skip_SNR == 1:
        print('Matches (no SNR test): ' + str(len(st1good)) + ' traces')
    else:
        print('Match and above SNR threshold: ' + str(len(st1good)) +
              ' traces')

    #%%  get station lat-lon, compute distance for plot
    for tr in st1good:
        for ii in station_index:
            if (tr.stats.station == st_names[ii]):  # find station in inventory
                stalon = float(
                    st_lons[ii])  # look up lat & lon again to find distance
                stalat = float(st_lats[ii])
                distance = gps2dist_azimuth(stalat, stalon, ev_lat1, ev_lon1)
                tr.stats.distance = distance[0]  # distance in km
    for tr in st2good:
        for ii in station_index:
            if (tr.stats.station == st_names[ii]):  # find station in inventory
                stalon = float(
                    st_lons[ii])  # look up lat & lon again to find distance
                stalat = float(st_lats[ii])
                distance = gps2dist_azimuth(stalat, stalon, ev_lat2, ev_lon2)
                tr.stats.distance = distance[0]  # distance in km

    print('Made it to here.')
    #%%
    # plot traces
    fig_index = 3
    plt.close(fig_index)
    plt.figure(fig_index, figsize=(8, 8))
    plt.xlim(start_buff, end_buff)
    plt.ylim(min_dist, max_dist)
    for tr in st1good:
        dist_offset = tr.stats.distance / (1000 * 111
                                           )  # trying for approx degrees
        ttt = np.arange(len(tr.data)) * tr.stats.delta + (tr.stats.starttime -
                                                          t1)
        if red_plot == 1:
            shift = red_time + (dist_offset - red_dist) * red_slow
            ttt = ttt - shift
#	These lines used to cause a crash in Spyder
        plt.plot(ttt, (tr.data - np.median(tr.data)) * plot_scale_fac /
                 (tr.data.max() - tr.data.min()) + dist_offset,
                 color='green')
    #plt.title(fname1)

    print('And made it to here?')

    for tr in st2good:
        dist_offset = tr.stats.distance / (1000 * 111
                                           )  # trying for approx degrees
        ttt = np.arange(len(tr.data)) * tr.stats.delta + (tr.stats.starttime -
                                                          t2)
        if red_plot == 1:
            shift = red_time + (dist_offset - red_dist) * red_slow
            ttt = ttt - shift
        ttt = ttt
        #	These lines used to cause a crash in Spyder
        plt.plot(ttt, (tr.data - np.median(tr.data)) * plot_scale_fac /
                 (tr.data.max() - tr.data.min()) + dist_offset,
                 color='red')
    print('And made it to here.')

    #%% Plot traveltime curves
    if plot_tt:
        # first traveltime curve
        line_pts = 50
        dist_vec = np.arange(min_dist, max_dist,
                             (max_dist - min_dist) / line_pts)  # distance grid
        time_vec1 = np.arange(
            min_dist, max_dist, (max_dist - min_dist) /
            line_pts)  # empty time grid of same length (filled with -1000)
        for i in range(0, line_pts):
            arrivals = model.get_travel_times(source_depth_in_km=ev_depth1,
                                              distance_in_degree=dist_vec[i],
                                              phase_list=[dphase])
            num_arrivals = len(arrivals)
            found_it = 0
            for j in range(0, num_arrivals):
                if arrivals[j].name == dphase:
                    time_vec1[i] = arrivals[j].time
                    found_it = 1
            if found_it == 0:
                time_vec1[i] = np.nan
        # second traveltime curve
        if dphase2 != 'no':
            time_vec2 = np.arange(
                min_dist, max_dist, (max_dist - min_dist) /
                line_pts)  # empty time grid of same length (filled with -1000)
            for i in range(0, line_pts):
                arrivals = model.get_travel_times(
                    source_depth_in_km=ev_depth1,
                    distance_in_degree=dist_vec[i],
                    phase_list=[dphase2])
                num_arrivals = len(arrivals)
                found_it = 0
                for j in range(0, num_arrivals):
                    if arrivals[j].name == dphase2:
                        time_vec2[i] = arrivals[j].time
                        found_it = 1
                if found_it == 0:
                    time_vec2[i] = np.nan
            if rel_time == 1:
                time_vec2 = time_vec2 - time_vec1
            plt.plot(time_vec2, dist_vec, color='orange')
        # third traveltime curve
        if dphase3 != 'no':
            time_vec3 = np.arange(
                min_dist, max_dist, (max_dist - min_dist) /
                line_pts)  # empty time grid of same length (filled with -1000)
            for i in range(0, line_pts):
                arrivals = model.get_travel_times(
                    source_depth_in_km=ev_depth1,
                    distance_in_degree=dist_vec[i],
                    phase_list=[dphase3])
                num_arrivals = len(arrivals)
                found_it = 0
                for j in range(0, num_arrivals):
                    if arrivals[j].name == dphase3:
                        time_vec3[i] = arrivals[j].time
                        found_it = 1
                if found_it == 0:
                    time_vec3[i] = np.nan
            if rel_time == 1:
                time_vec3 = time_vec3 - time_vec1
            plt.plot(time_vec3, dist_vec, color='yellow')
        # fourth traveltime curve
        if dphase4 != 'no':
            time_vec4 = np.arange(
                min_dist, max_dist, (max_dist - min_dist) /
                line_pts)  # empty time grid of same length (filled with -1000)
            for i in range(0, line_pts):
                arrivals = model.get_travel_times(
                    source_depth_in_km=ev_depth1,
                    distance_in_degree=dist_vec[i],
                    phase_list=[dphase4])
                num_arrivals = len(arrivals)
                found_it = 0
                for j in range(0, num_arrivals):
                    if arrivals[j].name == dphase4:
                        time_vec4[i] = arrivals[j].time
                        found_it = 1
                if found_it == 0:
                    time_vec4[i] = np.nan
            if rel_time == 1:
                time_vec4 = time_vec4 - time_vec1
            plt.plot(time_vec4, dist_vec, color='purple')

        if rel_time == 1:
            time_vec1 = time_vec1 - time_vec1
        plt.plot(time_vec1, dist_vec, color='blue')

    plt.xlabel('Time (s)')
    plt.ylabel('Epicentral distance from event (°)')
    if ARRAY == 0:
        plt.title(dphase + ' for ' + fname1 + ' vs ' + fname2)
    elif ARRAY == 1:
        plt.title(dphase + ' for ' + fname1[8:18] + ' vs ' + fname2[8:18])
    plt.show()

    #%%  Save processed files
    if ARRAY == 0:
        fname1 = 'HD' + date_label1 + 'sel.mseed'
        fname2 = 'HD' + date_label2 + 'sel.mseed'
    elif ARRAY == 1:
        fname1 = 'Pro_Files/HD' + date_label1 + 'sel.mseed'
        fname2 = 'Pro_Files/HD' + date_label2 + 'sel.mseed'
    st1good.write(fname1, format='MSEED')
    st2good.write(fname2, format='MSEED')

    elapsed_time_wc = time.time() - start_time_wc
    print('This job took ' + str(elapsed_time_wc) + ' seconds')
    os.system('say "Done"')
示例#16
0
def read_local(data_dir,
               coord_file,
               network,
               station,
               location,
               channel,
               starttime,
               endtime,
               merge=True):
    """
    Read in waveforms from "local" 1-hour, IRIS-compliant miniSEED files, and
    output a :class:`~obspy.core.stream.Stream` with station/element coordinates
    attached.

    **NOTE 1**

    The expected naming convention for the miniSEED files is

    ``<network>.<station>.<location>.<channel>.<year>.<julian_day>.<hour>``

    **NOTE 2**

    This function assumes that the response has been removed from the waveforms
    in the input miniSEED files.

    Args:
        data_dir (str): Directory containing miniSEED files
        coord_file (str): JSON file containing coordinates for local stations
            (full path required)
        network (str): SEED network code [wildcards (``*``, ``?``) accepted]
        station (str): SEED station code [wildcards (``*``, ``?``) accepted]
        location (str): SEED location code [wildcards (``*``, ``?``) accepted]
        channel (str): SEED channel code [wildcards (``*``, ``?``) accepted]
        starttime (:class:`~obspy.core.utcdatetime.UTCDateTime`): Start time for
            data request
        endtime (:class:`~obspy.core.utcdatetime.UTCDateTime`): End time for
            data request
        merge (bool): Toggle merging of :class:`~obspy.core.trace.Trace` objects
            with identical IDs

    Returns:
        :class:`~obspy.core.stream.Stream` containing gathered waveforms
    """

    print('-----------------------------')
    print('GATHERING LOCAL MINISEED DATA')
    print('-----------------------------')

    # Take (hour) floor of starttime
    starttime_hr = UTCDateTime(starttime.year, starttime.month, starttime.day,
                               starttime.hour)

    # Take (hour) floor of endtime - this ensures we check this miniSEED file
    endtime_hr = UTCDateTime(endtime.year, endtime.month, endtime.day,
                             endtime.hour)

    # Define filename template
    template = f'{network}.{station}.{location}.{channel}.{{}}.{{}}.{{}}'

    # Initialize Stream object
    st_out = Stream()

    # Initialize the starting hour
    tmp_time = starttime_hr

    # Cycle forward in time, advancing hour by hour through miniSEED files
    while tmp_time <= endtime_hr:

        pattern = template.format(tmp_time.strftime('%Y'),
                                  tmp_time.strftime('%j'),
                                  tmp_time.strftime('%H'))

        files = glob.glob(os.path.join(data_dir, pattern))

        for file in files:
            st_out += read(file)

        tmp_time += HR2SEC  # Add an hour!

    if merge:
        st_out.merge()  # Merge Traces with the same ID
    st_out.sort()

    # If the Stream is empty, then we can stop here
    if st_out.count() == 0:
        print('No data downloaded.')
        return st_out

    # Otherwise, show what the Stream contains
    print(st_out.__str__(extended=True))  # This syntax prints the WHOLE Stream

    # Add zeros to ensure all Traces have same length
    st_out.trim(starttime, endtime, pad=True, fill_value=0)

    # Replace numerical outliers with zeroes
    for tr in st_out:
        d0 = np.where(tr.data > OUTLIER_THRESHOLD)[0]
        if d0.any():
            print(f'{len(d0)} data points in {tr.id} were outliers with '
                  f' values > {OUTLIER_THRESHOLD} and are now set to 0')
        tr.data[d0] = 0

    print('Assigning coordinates...')

    # Assign coordinates by searching through user-supplied JSON file
    local_coords = load_json_file(coord_file)
    for tr in st_out:
        try:
            tr.stats.latitude, tr.stats.longitude,\
                tr.stats.elevation = local_coords[tr.stats.station]
        except KeyError:
            print(f'No coordinates available for {tr.id}. Stopping.')
            raise

    print('Done')

    # Return the Stream with coordinates attached
    return st_out