Beispiel #1
0
import obspy
import numpy as np
import matplotlib.pyplot as plt

import circ_array as c
from circ_beam import pws_stack_baz_slow, linear_stack_baz_slow

phase = 'SKS'
phases = ['SKS', 'SKKS', 'ScS', 'Sdiff', 'sSKS', 'sSKKS', 'PS']

st = obspy.read('./data/19970525/*SAC')

# get array metadata
event_time = c.get_eventtime(st)
geometry = c.get_geometry(st)
distances = c.get_distances(st, type='deg')
mean_dist = np.mean(distances)
stations = c.get_stations(st)

# get travel time information and define a window
Target_phase_times, time_header_times = c.get_predicted_times(st, phase)

avg_target_time = np.mean(Target_phase_times)
min_target_time = int(np.nanmin(Target_phase_times, axis=0))
max_target_time = int(np.nanmax(Target_phase_times, axis=0))

stime = event_time + min_target_time
etime = event_time + max_target_time + 30

print(stime)
    def create_newlines(
        self,
        st,
        file_path,
        phase,
        window,
        Boots,
        epsilon,
        slow_vec_error=3,
        Filter=False,
    ):
        """
        This function will create a list of lines with all relevant information to be stored in
        the results file. The function write_to_cluster_file() will write these lines to a new
        file and replace any of the lines with the same array location and target phase.

        Parameters
        ----------
        st : Obspy stream object
             Obspy stream object of sac files with event, arrival time and
             station headers populated.

        file_path : string
                    Path to the results file to check the contents of.

        phase : string
                Target phase (e.g. SKS)

        window : list of floats
                 tmin and tmax describing the relative time window.

        Boots : int
                Number of bootstrap samples.
        epsilon : float
                  Epsilon value used to find the clusters.

        slow_vec_error : float
                         Maximum slowness vector deviation between the predicted
                         and observed arrival. Arrival with larger deviations will
                         be removed if Filter = True (below). Default is 3.

        Filter : bool
                 Do you want to filter out the arrivals (default = False)

        Returns
        -------
            newlines: list of strings of the contents to write to the results file.

        """

        from scipy.spatial import distance
        from sklearn.neighbors import KDTree
        import os
        from circ_beam import shift_traces
        from obspy.taup import TauPyModel

        model = TauPyModel(model='prem')

        newlines = []
        header = (
            "Name evla evlo evdp reloc_evla reloc_evlo "
            "stla_mean stlo_mean slow_pred slow_max slow_diff "
            "slow_std_dev baz_pred baz_max baz_diff baz_std_dev "
            "slow_x_pred slow_x_obs del_x_slow x_std_dev slow_y_pred slow_y_obs "
            "del_y_slow y_std_dev az az_std mag mag_std time_obs time_pred time_diff time_std_dev "
            "error_ellipse_area ellispe_width ellispe_height "
            "ellispe_theta ellipse_rel_density multi phase no_stations "
            "stations t_window_start t_window_end Boots\n")
        event_time = c.get_eventtime(st)
        geometry = c.get_geometry(st)
        distances = c.get_distances(st, type="deg")
        mean_dist = np.mean(distances)
        stations = c.get_stations(st)
        no_stations = len(stations)
        sampling_rate = st[0].stats.sampling_rate
        stlo_mean, stla_mean = np.mean(geometry[:, 0]), np.mean(geometry[:, 1])
        evdp = st[0].stats.sac.evdp
        evlo = st[0].stats.sac.evlo
        evla = st[0].stats.sac.evla
        t_min = window[0]
        t_max = window[1]
        Target_phase_times, time_header_times = c.get_predicted_times(
            st, phase)

        # the traces need to be trimmed to the same start and end time
        # for the shifting and clipping traces to work (see later).
        min_target = int(np.nanmin(Target_phase_times, axis=0)) + (t_min)
        max_target = int(np.nanmax(Target_phase_times, axis=0)) + (t_max)

        stime = event_time + min_target
        etime = event_time + max_target

        # trim the stream
        # Normalise and cut seismogram around defined window
        st = st.copy().trim(starttime=stime, endtime=etime)
        st = st.normalize()

        # get predicted slownesses and backazimuths
        predictions = c.pred_baz_slow(stream=st,
                                      phases=[phase],
                                      one_eighty=True)

        # find the line with the predictions for the phase of interest
        row = np.where((predictions == phase))[0]

        (
            P,
            S,
            BAZ,
            PRED_BAZ_X,
            PRED_BAZ_Y,
            PRED_AZ_X,
            PRED_AZ_Y,
            DIST,
            TIME,
        ) = predictions[row, :][0]
        PRED_BAZ_X = float(PRED_BAZ_X)
        PRED_BAZ_Y = float(PRED_BAZ_Y)
        S = float(S)
        BAZ = float(BAZ)

        name = (str(event_time.year) + f"{event_time.month:02d}" +
                f"{event_time.day:02d}" + "_" + f"{event_time.hour:02d}" +
                f"{event_time.minute:02d}" + f"{event_time.second:02d}")

        traces = c.get_traces(st)
        shifted_traces = shift_traces(traces=traces,
                                      geometry=geometry,
                                      abs_slow=float(S),
                                      baz=float(BAZ),
                                      distance=float(mean_dist),
                                      centre_x=float(stlo_mean),
                                      centre_y=float(stla_mean),
                                      sampling_rate=sampling_rate)

        # predict arrival time
        arrivals = model.get_travel_times(source_depth_in_km=evdp,
                                          distance_in_degree=mean_dist,
                                          phase_list=[phase])

        pred_time = arrivals[0].time

        # get point of the predicted arrival time
        pred_point = int(sampling_rate * (pred_time - min_target))

        # get points to clip window
        point_before = int(pred_point + (t_min * sampling_rate))
        point_after = int(pred_point + (t_max * sampling_rate))

        # clip the traces
        cut_shifted_traces = shifted_traces[:, point_before:point_after]

        # get the min time of the traces
        min_time = pred_time + t_min

        no_clusters = np.amax(self.labels) + 1
        means_xy, means_baz_slow = self.cluster_means()
        bazs_std, slows_std, slow_xs_std, slow_ys_std, azs_std, mags_std = self.cluster_std_devs(
            pred_x=PRED_BAZ_X, pred_y=PRED_BAZ_Y)
        ellipse_areas = self.cluster_ellipse_areas(std_dev=2)
        ellipse_properties = self.cluster_ellipse_properties(std_dev=2)
        points_clusters = self.group_points_clusters()
        arrival_times = self.estimate_travel_times(traces=cut_shifted_traces,
                                                   tmin=min_time,
                                                   sampling_rate=sampling_rate,
                                                   geometry=geometry,
                                                   distance=mean_dist,
                                                   pred_x=PRED_BAZ_X,
                                                   pred_y=PRED_BAZ_Y)

        # Option to filter based on ellipse size or vector deviation
        if Filter == True:
            try:

                distances = distance.cdist(np.array([[PRED_BAZ_X,
                                                      PRED_BAZ_Y]]),
                                           means_xy,
                                           metric="euclidean")
                number_arrivals_slow_space = np.where(
                    distances < slow_vec_error)[0].shape[0]

                number_arrivals = number_arrivals_slow_space
            except:
                multi = "t"
                number_arrivals = 0

        elif Filter == False:
            number_arrivals = no_clusters

        else:
            print("Filter needs to be True or False")
            exit()

        if number_arrivals > 1:
            multi = "y"

        elif number_arrivals == 0:
            print("no usable arrivals, exiting code")
            # exit()
            multi = "t"
        elif number_arrivals == 1:
            multi = "n"

        else:
            print("something went wrong in error estimates, exiting")
            # exit()
            multi = "t"

        # make new line
        usable_means = np.empty((number_arrivals, 2))

        # set counter to be zero, this will be used to label the arrivals as first second etc.
        usable_arrivals = 0

        if no_clusters != 0:
            for i in range(no_clusters):

                # create label for the arrival
                # get information for that arrival
                baz_obs = means_baz_slow[i, 0]

                baz_diff = baz_obs - float(BAZ)

                slow_obs = means_baz_slow[i, 1]
                slow_diff = slow_obs - float(S)

                slow_x_obs = c.myround(means_xy[i, 0])
                slow_y_obs = c.myround(means_xy[i, 1])

                del_x_slow = slow_x_obs - PRED_BAZ_X
                del_y_slow = slow_y_obs - PRED_BAZ_Y

                az = np.degrees(np.arctan2(del_y_slow, del_x_slow))
                mag = np.sqrt(del_x_slow**2 + del_y_slow**2)

                if az < 0:
                    az += 360

                distance = np.sqrt(del_x_slow**2 + del_y_slow**2)

                baz_std_dev = bazs_std[i]
                slow_std_dev = slows_std[i]

                x_std_dev = slow_xs_std[i]
                y_std_dev = slow_ys_std[i]

                az_std_dev = azs_std[i]
                mag_std_dev = mags_std[i]

                error_ellipse_area = ellipse_areas[i]

                width = ellipse_properties[i, 1]
                height = ellipse_properties[i, 2]
                theta = ellipse_properties[i, 3]

                # relocated event location
                reloc_evla, reloc_evlo = c.relocate_event_baz_slow(
                    evla=evla,
                    evlo=evlo,
                    evdp=evdp,
                    stla=stla_mean,
                    stlo=stlo_mean,
                    baz=baz_obs,
                    slow=slow_obs,
                    phase=phase,
                    mod='prem')

                times = arrival_times[i]
                mean_time = np.mean(times)
                time_diff = mean_time - pred_time
                times_std_dev = np.std(times)

                # if error_ellipse_area <= error_criteria_area and error_ellipse_area > 1.0:
                #     multi = 'm'

                if Filter == True:
                    if distance < slow_vec_error:

                        usable_means[usable_arrivals] = np.array(
                            [slow_x_obs, slow_y_obs])

                        points_cluster = points_clusters[i]

                        tree = KDTree(points_cluster,
                                      leaf_size=self.points.shape[0] * 1.5)
                        points_rad = tree.query_radius(points_cluster,
                                                       r=epsilon,
                                                       count_only=True)
                        densities = points_rad / (np.pi * (epsilon**2))
                        mean_density = np.mean(densities)

                        # update the usable arrivals count
                        usable_arrivals += 1
                        name_label = name + "_" + str(usable_arrivals)

                        # define the newline to be added to the file
                        newline = (
                            f"{name_label} {evla:.2f} {evlo:.2f} {evdp:.2f} {reloc_evla:.2f} "
                            f"{reloc_evlo:.2f} {stla_mean:.2f} {stlo_mean:.2f} {S:.2f} {slow_obs:.2f} "
                            f"{slow_diff:.2f} {slow_std_dev:.2f} {BAZ:.2f} {baz_obs:.2f} {baz_diff:.2f} "
                            f"{baz_std_dev:.2f} {PRED_BAZ_X:.2f} {slow_x_obs:.2f} "
                            f"{del_x_slow:.2f} {x_std_dev:.2f} {PRED_BAZ_Y:.2f} {slow_y_obs:.2f} "
                            f"{del_y_slow:.2f} {y_std_dev:.2f} {az:.2f} {az_std_dev:.2f} {mag:.2f} {mag_std_dev:.2f} "
                            f"{mean_time:.2f} {pred_time:.2f} {time_diff:.2f} {times_std_dev:.2f} "
                            f"{error_ellipse_area:.2f} {width:.2f} {height:.2f} {theta:.2f} {mean_density:.2f} "
                            f"{multi} {phase} {no_stations} {','.join(stations)} "
                            f"{window[0]:.2f} {window[1]:.2f} {Boots}\n")

                        # there will be multiple lines so add these to this list.
                        newlines.append(newline)

                    else:
                        print(
                            "The error for this arrival is too large, not analysing this any further"
                        )
                        ## change the labels
                        updated_labels = np.where(self.labels == i, -1,
                                                  self.labels)

                        newline = ""
                        newlines.append(newline)

                elif Filter == False:

                    usable_means[usable_arrivals] = np.array(
                        [slow_x_obs, slow_y_obs])

                    points_cluster = points_clusters[i]

                    tree = KDTree(points_cluster,
                                  leaf_size=self.points.shape[0] * 1.5)
                    points_rad = tree.query_radius(points_cluster,
                                                   r=epsilon,
                                                   count_only=True)
                    densities = points_rad / (np.pi * (epsilon**2))
                    mean_density = np.mean(densities)

                    # update the usable arrivals count
                    usable_arrivals += 1
                    name_label = name + "_" + str(usable_arrivals)

                    # define the newline to be added to the file
                    newline = (
                        f"{name_label} {evla:.2f} {evlo:.2f} {evdp:.2f} {reloc_evla:.2f} "
                        f"{reloc_evlo:.2f} {stla_mean:.2f} {stlo_mean:.2f} {S:.2f} {slow_obs:.2f} "
                        f"{slow_diff:.2f} {slow_std_dev:.2f} {BAZ:.2f} {baz_obs:.2f} {baz_diff:.2f} "
                        f"{baz_std_dev:.2f} {PRED_BAZ_X:.2f} {slow_x_obs:.2f} "
                        f"{del_x_slow:.2f} {x_std_dev:.2f} {PRED_BAZ_Y:.2f} {slow_y_obs:.2f} "
                        f"{del_y_slow:.2f} {y_std_dev:.2f} {az:.2f} {az_std_dev:.2f} {mag:.2f} {mag_std_dev:.2f} "
                        f"{mean_time:.2f} {pred_time:.2f} {time_diff:.2f} {times_std_dev:.2f} "
                        f"{error_ellipse_area:.2f} {width:.2f} {height:.2f} {theta:.2f} {mean_density:.2f} "
                        f"{multi} {phase} {no_stations} {','.join(stations)} "
                        f"{window[0]:.2f} {window[1]:.2f} {Boots}\n")

                    # there will be multiple lines so add these to this list.
                    newlines.append(newline)

                else:
                    print("Filter needs to be True or False")
                    exit()

        else:
            newline = ""
            newlines.append(newline)

        ## Write to file!

        # now loop over file to see if I have this observation already
        found = False
        added = False  # just so i dont write it twice if i find the criteria in multiple lines
        ## write headers to the file if it doesnt exist
        line_list = []
        if os.path.exists(file_path):
            with open(file_path, "r") as Multi_file:
                for line in Multi_file:
                    if name in line and phase in line and f"{stla_mean:.2f}" in line:
                        print("name and phase and stla in line, replacing")
                        if added == False:
                            line_list.extend(newlines)
                            added = True
                        else:
                            print("already added to file")
                        found = True
                    else:
                        line_list.append(line)
        else:
            with open(file_path, "w") as Multi_file:
                Multi_file.write(header)
                line_list.append(header)

        if not found:
            print("name or phase or stla not in line. Adding to the end.")
            line_list.extend(newlines)
        else:
            pass

        with open(file_path, "w") as Multi_file2:
            Multi_file2.write("".join(line_list))

        return newlines
Beispiel #3
0
stime = event_time + min_target
etime = event_time + max_target

# trim the stream
# Normalise and cut seismogram around defined window
st = st.copy().trim(starttime=stime, endtime=etime)
st = st.normalize()

evla = st[0].stats.sac.evla
evlo = st[0].stats.sac.evlo
evdp = st[0].stats.sac.evdp

distances = ca.get_distances(st, type="deg")
mean_dist = np.mean(distances)
geometry = ca.get_geometry(st)
centre_lo, centre_la = np.mean(geometry[:, 0]), np.mean(geometry[:, 1])
sampling_rate = st[0].stats.sampling_rate

mean_lo = (evlo + centre_lo) / 2
mean_la = (evla + centre_la) / 2

# get predicted slownesses and backazimuths
predictions = ca.pred_baz_slow(stream=st, phases=phases, one_eighty=True)

# find the line with the predictions for the phase of interest
row = np.where((predictions == phase))[0]
P, S, BAZ, PRED_BAZ_X, PRED_BAZ_Y, PRED_AZ_X, PRED_AZ_Y, DIST, TIME = predictions[
    row, :][0]
PRED_BAZ_X = float(PRED_BAZ_X)
PRED_BAZ_Y = float(PRED_BAZ_Y)
"""

# import packages

import obspy
import circ_array as c

# read in data as an obspy stream

st = obspy.read('./data/19970525/*SAC')

stations = c.get_stations(stream=st)

geometry_degrees_absolute = c.get_geometry(stream=st,
                                           return_center=False,
                                           distance='degrees',
                                           verbose='False',
                                           relative='False')
geometry_degrees_relative = c.get_geometry(stream=st,
                                           return_center=False,
                                           distance='degrees',
                                           verbose='False',
                                           relative='True')

geometry_kilometres_absolute = c.get_geometry(stream=st,
                                              return_center=False,
                                              distance='km',
                                              verbose='False',
                                              relative='False')
geometry_kilometres_relative = c.get_geometry(stream=st,
                                              return_center=False,