예제 #1
0
파일: test_tau.py 프로젝트: Keita1/obspy
    def test_p_iasp91_geo_fallback_manual(self):
        """
        Manual test for P phase in IASP91 given geographical input.

        This version of the test checks that things still work when
        geographiclib is not installed.
        """
        has_geographiclib_real = geodetics.HAS_GEOGRAPHICLIB
        geodetics.HAS_GEOGRAPHICLIB = False
        m = TauPyModel(model="iasp91")
        arrivals = m.get_travel_times_geo(source_depth_in_km=10.0,
                                          source_latitude_in_deg=20.0,
                                          source_longitude_in_deg=33.0,
                                          receiver_latitude_in_deg=55.0,
                                          receiver_longitude_in_deg=33.0,
                                          phase_list=["P"])
        geodetics.HAS_GEOGRAPHICLIB = has_geographiclib_real
        self.assertEqual(len(arrivals), 1)
        p_arrival = arrivals[0]

        self.assertEqual(p_arrival.name, "P")
        self.assertAlmostEqual(p_arrival.time, 412.43, 2)
        self.assertAlmostEqual(p_arrival.ray_param_sec_degree, 8.613, 3)
        self.assertAlmostEqual(p_arrival.takeoff_angle, 26.74, 2)
        self.assertAlmostEqual(p_arrival.incident_angle, 26.70, 2)
        self.assertAlmostEqual(p_arrival.purist_distance, 35.00, 2)
        self.assertEqual(p_arrival.purist_name, "P")
예제 #2
0
파일: test_tau.py 프로젝트: htxu007/obspy
    def test_pierce_p_iasp91_geo(self):
        """
        Test single pierce point against output from TauP using geo data.

        This version of the test is used when geographiclib is installed
        """
        m = TauPyModel(model="iasp91")
        arrivals = m.get_pierce_points_geo(source_depth_in_km=10.0,
                                           source_latitude_in_deg=-45.0,
                                           source_longitude_in_deg=-50.0,
                                           receiver_latitude_in_deg=-80.0,
                                           receiver_longitude_in_deg=-50.0,
                                           phase_list=["P"])
        self.assertEqual(len(arrivals), 1)
        p_arr = arrivals[0]

        # Open test file.
        filename = os.path.join(DATA,
                                "taup_pierce_-mod_isp91_ph_P_-h_10_-evt_" +
                                "-45_-50_-sta_-80_-50")

        expected = np.genfromtxt(filename, skip_header=1)

        np.testing.assert_almost_equal(expected[:, 0],
                                       np.degrees(p_arr.pierce['dist']), 2)
        np.testing.assert_almost_equal(expected[:, 1],
                                       p_arr.pierce['depth'], 1)
        np.testing.assert_almost_equal(expected[:, 2],
                                       p_arr.pierce['time'], 1)
        np.testing.assert_almost_equal(expected[:, 3],
                                       p_arr.pierce['lat'], 1)
        np.testing.assert_almost_equal(expected[:, 4],
                                       p_arr.pierce['lon'], 1)
예제 #3
0
파일: test_tau.py 프로젝트: QuLogic/obspy
    def test_regional_models(self):
        """
        Tests small regional models as this used to not work.

        Note: It looks like too much work to get a 1-layer model working.
        The problem is first in finding the moho, and second in coarsely-
        sampling slowness. Also, why bother.
        """
        model_names = ["2_layer_model", "5_layer_model"]
        expected_results = [
            [("p", 18.143), ("Pn", 19.202), ("PcP", 19.884), ("sP", 22.054),
             ("ScP", 23.029), ("PcS", 26.410), ("s", 31.509), ("Sn", 33.395),
             ("ScS", 34.533)],
            [("Pn", 17.358), ("P", 17.666), ("p", 17.804), ("P", 17.869),
             ("PcP", 18.039), ("ScP", 19.988), ("sP", 22.640), ("sP", 22.716),
             ("sP", 22.992), ("PcS", 23.051), ("sP", 24.039), ("sP", 24.042),
             ("Sn", 30.029), ("S", 30.563), ("s", 30.801), ("S", 30.913),
             ("ScS", 31.208)]]

        for model_name, expects in zip(model_names, expected_results):
            with TemporaryWorkingDirectory():
                folder = os.path.abspath(os.curdir)
                build_taup_model(
                    filename=os.path.join(DATA, os.path.pardir,
                                          model_name + ".tvel"),
                    output_folder=folder, verbose=False)
                model = TauPyModel(os.path.join(folder, model_name + ".npz"))

            arrvials = model.get_ray_paths(source_depth_in_km=18.0,
                                           distance_in_degree=1.0)

            self.assertEqual(len(arrvials), len(expects))
            for arrival, expect in zip(arrvials, expects):
                self.assertEqual(arrival.name, expect[0])
                self.assertAlmostEqual(arrival.time, expect[1], 3)
예제 #4
0
파일: utils.py 프로젝트: NeilWilkins/vespa
def get_first_arrival(st, model='ak135'):
    '''
    Returns first arrival information for a particular stream and a theoretical velocity model ak135 or iasp91.
    
    Output is phase name, arrival time (in s after origin) and slowness (in s / km).
        
    Parameters
    ----------
    st : ObsPy Stream object
        Stream of SAC format seismograms for the seismic array
    model : string
        Model to use for the travel times, either 'ak135' or 'iasp91'.

    Returns
    -------
    phase : Phase object
        Phase object containing the phase name, arrival time, and slowness of the first arrival
    '''

    # Read event depth and great circle distance from SAC header
    depth = st[0].stats.sac.evdp
    delta = st[0].stats.sac.gcarc

    taup = TauPyModel(model)
    first_arrival = taup.get_travel_times(depth, delta)[0]

    phase = Phase(first_arrival.name, first_arrival.time, first_arrival.ray_param_sec_degree / G_KM_DEG)

    return phase
예제 #5
0
파일: utils.py 프로젝트: NeilWilkins/vespa
def get_arrivals(st, model='ak135'):
    '''
    Returns complete arrival information for a particular stream and a theoretical velocity model ak135 or iasp91.
    
    Output is phase name, arrival time (in s after origin) and slowness (in s / km).
        
    Parameters
    ----------
    st : ObsPy Stream object
        Stream of SAC format seismograms for the seismic array
    model : string
        Model to use for the travel times, either 'ak135' or 'iasp91'.

    Returns
    -------
    phase_list : list
        List containing Phase objects containing the phase name, arrival time, and slowness of each arrival
    '''

    # Read event depth and great circle distance from SAC header
    depth = st[0].stats.sac.evdp
    delta = st[0].stats.sac.gcarc

    tau_model = TauPyModel(model)
    arrivals = tau_model.get_travel_times(depth, delta)
    
    phase_list = []
    
    for arrival in arrivals:
        phase = Phase(arrival.name, arrival.time, arrival.ray_param_sec_degree / G_KM_DEG)
        phase_list.append(phase)

    return phase_list
예제 #6
0
파일: test_tau.py 프로젝트: Keita1/obspy
    def test_surface_wave_ttimes(self):
        """
        Tests the calculation of surface ttimes.

        Tested against a reference output from the Java TauP version.
        """
        for model, table in [("iasp91", "iasp91_surface_waves_table.txt"),
                             ("ak135", "ak135_surface_waves_table.txt")]:
            m = TauPyModel(model=model)
            filename = os.path.join(DATA, table)

            with open(filename, "rt") as fh:
                for line in fh:
                    _, distance, depth, phase, time, ray_param, _, _ \
                        = line.split()
                    distance, depth, time, ray_param = \
                        map(float, [distance, depth, time, ray_param])

                    arrivals = m.get_travel_times(
                        source_depth_in_km=depth, distance_in_degree=distance,
                        phase_list=[phase])

                    self.assertTrue(len(arrivals) > 0)

                    # Potentially multiple arrivals. Get the one closest in
                    # time and closest in ray parameter.
                    arrivals = sorted(
                        arrivals,
                        key=lambda x: (abs(x.time - time),
                                       abs(x.ray_param_sec_degree -
                                           ray_param)))
                    arrival = arrivals[0]
                    self.assertEqual(round(arrival.time, 2), round(time, 2))
                    self.assertEqual(round(arrival.ray_param_sec_degree, 2),
                                     round(ray_param, 2))
예제 #7
0
파일: test_tau.py 프로젝트: Keita1/obspy
    def test_different_models(self):
        """
        Open all included models and make sure that they can produce
        reasonable travel times.
        """
        models = ["1066a", "1066b", "ak135", "herrin", "iasp91", "prem",
                  "sp6", "jb", "pwdk", "ak135f_no_mud"]
        for model in models:
            m = TauPyModel(model=model)

            # Get a p phase.
            arrivals = m.get_travel_times(
                source_depth_in_km=10.0, distance_in_degree=50.0,
                phase_list=["P"])
            # AK135 travel time.
            expected = 534.4
            self.assertTrue(abs(arrivals[0].time - expected) < 5)

            # Get an s phase.
            arrivals = m.get_travel_times(
                source_depth_in_km=10.0, distance_in_degree=50.0,
                phase_list=["S"])
            # AK135 travel time.
            expected = 965.1
            # Some models do produce s-waves but they are very far from the
            # AK135 value.
            self.assertTrue(abs(arrivals[0].time - expected) < 50)
예제 #8
0
def main():
                                      #           MAIN PROGRAM BODY
    OT,stlat,stlon,evlat,evlon,depth = getoptions()
    origin_time = UTCDateTime(str(OT))
    result = client.distaz(stalat=stlat, stalon=stlon, evtlat=evlat,evtlon=evlon)
    model = TauPyModel(model="AK135")
    arrivals = model.get_travel_times(source_depth_in_km=depth,distance_in_degree=result['distance'])#,
                                     #phase_list = ['P','PcP','PP','PKiKP','S','SS','ScS','SKiKS'])
    print "Distance = {0:.1f} arc degrees.".format(result['distance'])
    print "{0:.0f} Km distance.".format(result['distance']*111.25)
    print "{0:.0f} deg back Azimuth.".format(result['backazimuth'])
    table = client.traveltime(evloc=(evlat,evlon),staloc=[(stlat,stlon)],evdepth=depth)
    print "Selected phase list:\n"
    print (table.decode())

                   #   Print the phases, travel time and forecasted arrival time.
    phasename = []
    phasetime = []
    arrivaltime = []
    print "For origin time {}, ".format(origin_time)
    print "TauP big list of phases and arrival times:"
    for i in range(0,len(arrivals)):
        phasename.append(arrivals[i].name)
        phasetime.append(arrivals[i].time)
        at = origin_time+(arrivals[i].time)
        arrivaltime.append(at)
        print 'Phase: {0} \t arrives in {1:.2f} sec. at time {2:02.0f}:{3:02.0f}:{4:02.0f}.{5:02.0f}' \
              .format(arrivals[i].name,arrivals[i].time,at.hour,at.minute,at.second,at.microsecond/10000)
    arrivalpaths = model.get_ray_paths(source_depth_in_km=depth,distance_in_degree=result['distance'])#,\
                                #        phase_list = ['P','PcP','PP','PKiKP','S','SS','ScS','SKiKS'])
    arrivalpaths.plot()
예제 #9
0
def get_station_delays(station_coords,sources,velmod='PREM',phase_list=['s','S']):
    '''
    Given an array of station corodiantes and sources calculate the travel time
    from each source toe ach site.
    
    velmod is the FULL path to the .npz file used by TauPy
    '''
    
    from obspy.taup import TauPyModel
    from numpy import zeros
    from obspy.geodetics.base import locations2degrees

    model = TauPyModel(model=velmod)
    
    #Initalize output variabe
    delay_time=zeros((len(sources),len(station_coords)))
    
    #Loop over sources
    for ksource in range(len(sources)):
        
        print('Working on source %d of %d ' % (ksource,len(sources)))
        
        #loop over sites
        for ksite in range(len(station_coords)):
        
            distance_in_degrees=locations2degrees(station_coords[ksite,1],station_coords[ksite,0],
                                                      sources[ksource,2],sources[ksource,1])
            
            arrivals = model.get_travel_times(source_depth_in_km=sources[ksource,3],
                                  distance_in_degree=distance_in_degrees,phase_list=phase_list)
            
            delay_time[ksource,ksite]=arrivals[0].time
            
    return delay_time
예제 #10
0
파일: test_tau.py 프로젝트: Fran89/obspy
    def test_single_path_ak135(self):
        """
        Test the raypath for a single phase. This time for model AK135.
        """
        filename = os.path.join(
            DATA, "taup_path_-o_stdout_-h_10_-ph_P_-deg_35_-mod_ak135")
        expected = np.genfromtxt(filename, comments='>')

        m = TauPyModel(model="ak135")
        arrivals = m.get_ray_paths(source_depth_in_km=10.0,
                                   distance_in_degree=35.0, phase_list=["P"])
        self.assertEqual(len(arrivals), 1)

        # Interpolate both paths to 100 samples and make sure they are
        # approximately equal.
        sample_points = np.linspace(0, 35, 100)

        interpolated_expected = np.interp(
            sample_points,
            expected[:, 0],
            expected[:, 1])

        interpolated_actual = np.interp(
            sample_points,
            np.round(np.degrees(arrivals[0].path['dist']), 2),
            np.round(6371 - arrivals[0].path['depth'], 2))

        self.assertTrue(np.allclose(interpolated_actual,
                                    interpolated_expected, rtol=1E-4, atol=0))
예제 #11
0
파일: test_tau.py 프로젝트: htxu007/obspy
    def test_single_path_geo_iasp91(self):
        """
        Test the raypath for a single phase given geographical input.

        This tests the case when geographiclib is installed.
        """
        filename = os.path.join(DATA,
                                "taup_path_-mod_iasp91_-o_stdout_-h_10_" +
                                "-ph_P_-sta_-45_-60_evt_-80_-60")
        expected = np.genfromtxt(filename, comments='>')

        m = TauPyModel(model="iasp91")
        arrivals = m.get_ray_paths_geo(source_depth_in_km=10.0,
                                       source_latitude_in_deg=-80.0,
                                       source_longitude_in_deg=-60.0,
                                       receiver_latitude_in_deg=-45.0,
                                       receiver_longitude_in_deg=-60.0,
                                       phase_list=["P"])
        self.assertEqual(len(arrivals), 1)

        # Interpolate both paths to 100 samples and make sure they are
        # approximately equal.
        sample_points = np.linspace(0, 35, 100)

        interpolated_expected_depth = np.interp(
            sample_points,
            expected[:, 0],
            expected[:, 1])
        interpolated_expected_lat = np.interp(
            sample_points,
            expected[:, 0],
            expected[:, 2])
        interpolated_expected_lon = np.interp(
            sample_points,
            expected[:, 0],
            expected[:, 3])

        interpolated_actual_depth = np.interp(
            sample_points,
            np.round(np.degrees(arrivals[0].path['dist']), 2),
            np.round(6371 - arrivals[0].path['depth'], 2))
        interpolated_actual_lat = np.interp(
            sample_points,
            np.round(np.degrees(arrivals[0].path['dist']), 2),
            np.round(arrivals[0].path['lat'], 2))
        interpolated_actual_lon = np.interp(
            sample_points,
            np.round(np.degrees(arrivals[0].path['dist']), 2),
            np.round(arrivals[0].path['lon'], 2))

        np.testing.assert_allclose(interpolated_actual_depth,
                                   interpolated_expected_depth,
                                   rtol=1E-4, atol=0)
        np.testing.assert_allclose(interpolated_actual_lat,
                                   interpolated_expected_lat,
                                   rtol=1E-4, atol=0)
        np.testing.assert_allclose(interpolated_actual_lon,
                                   interpolated_expected_lon,
                                   rtol=1E-4, atol=0)
def get_taupy_points(
    center_lat, center_lon, ev_lat, ev_lon, ev_depth, stime, etime, mini, maxi, ev_otime, phase_shift, sll, slm
):

    distance = locations2degrees(center_lat, center_lon, ev_lat, ev_lon)
    # print(distance)

    model = TauPyModel(model="ak135")
    arrivals = model.get_pierce_points(ev_depth, distance)
    # arrivals = earthmodel.get_pierce_points(ev_depth,distance,phase_list=('PP','P^410P'))

    # compute the vespagram window
    start_vespa = stime - mini
    end_vespa = etime - maxi

    # compare the arrival times with the time window
    count = 0
    k = 0
    phase_name_info = []
    phase_slowness_info = []
    phase_time_info = []

    for i_elem in arrivals:
        # print(i_elem)
        dummy_phase = arrivals[count]
        # print(dummy_phase)
        # phase time in seconds
        taup_phase_time = dummy_phase.time
        # print(taup_phase_time)
        # slowness of the phase
        taup_phase_slowness = dummy_phase.ray_param_sec_degree
        # compute the UTC travel phase time
        taup_phase_time2 = ev_otime + taup_phase_time + phase_shift

        # print(start_vespa)
        # print(end_vespa)
        # print(taup_phase_time2)

        if start_vespa <= taup_phase_time2 <= end_vespa:  # time window
            if sll <= taup_phase_slowness <= slm:  # slowness window

                # seconds inside the vespagram
                taup_mark = taup_phase_time2 - start_vespa
                # store the information
                phase_name_info.append(dummy_phase.name)
                phase_slowness_info.append(dummy_phase.ray_param_sec_degree)
                phase_time_info.append(taup_mark)
                # print(phases_info[k])
                k += 1

        count += 1

    # print(phase_name_info)

    phase_slowness_info = np.array(phase_slowness_info)
    phase_time_info = np.array(phase_time_info)

    return phase_name_info, phase_slowness_info, phase_time_info
예제 #13
0
   def migrate(self,plot=False):
      import geopy
      from geopy.distance import VincentyDistance

      '''
      This is a rewritten function that combines the functions find_pierce_coor and
      migrate_1d so that it's more efficient.  Still in testing stages. RM 2/6/16
      '''

      depth_range = np.arange(50,800,5)        #set range of pierce points
      value       = np.zeros((len(depth_range)))

      #geodetic info
      bearing     = self.az
      lon_s = self.ses3d_seismogram.sy
      lat_s = 90.0-self.ses3d_seismogram.sx
      lon_r = self.ses3d_seismogram.ry
      lat_r = 90.0-self.ses3d_seismogram.rx
      origin      = geopy.Point(lat_s, lon_s)

      #find how far away the pierce point is
      model  = TauPyModel(model='pyrolite_5km')

      for i in range(0,len(depth_range)):
         phase = 'P'+str(depth_range[i])+'s'
         pierce = model.get_pierce_points(self.eq_depth,self.delta_deg,phase_list=[phase])
         tt     = model.get_travel_times(self.eq_depth,self.delta_deg,phase_list=['P',phase])

         #in case there's duplicate phase arrivals
         for j in range(0,len(tt)):
            if tt[j].name == 'P':
               p_arr = tt[j].time           
            elif tt[j].name == phase:
               phase_arr = tt[j].time

         #determine value 
         Pds_time = phase_arr - p_arr
         i_start  = int((0.0 - self.window_start)/self.ses3d_seismogram.dt)
         i_t      = int(Pds_time/self.ses3d_seismogram.dt) + i_start
         value[i] = self.prf[i_t]

         points = pierce[0].pierce
         for j in range(0,len(points)):
            if points[j]['depth'] == depth_range[i] and points[j]['dist']*(180.0/np.pi) > 20.0:
               prc_dist = points[j]['dist']*(180.0/np.pi)
               d_km = prc_dist * ((2*np.pi*6371.0/360.0))
               destination = VincentyDistance(kilometers=d_km).destination(origin,bearing)
               lat = destination[0]
               lon = destination[1]
               row = {'depth':depth_range[i],'dist':prc_dist,'lat':lat,'lon':lon,'value':value[i]}
               self.pierce_dict.append(row)

      if plot == True:
         plt.plot(value,depth_range)
         plt.gca().invert_yaxis()
         plt.show()

      return value,depth_range
예제 #14
0
파일: test_tau.py 프로젝트: Fran89/obspy
 def test_p_ak135(self):
     """
     Test P phase arrival against TauP output in in model AK135.
     """
     m = TauPyModel(model="ak135")
     arrivals = m.get_travel_times(source_depth_in_km=10.0,
                                   distance_in_degree=35.0,
                                   phase_list=["P"])
     self._compare_arrivals_with_file(
         arrivals, "taup_time_-h_10_-ph_ttall_-deg_35_-mod_ak135")
예제 #15
0
파일: test_tau.py 프로젝트: Keita1/obspy
    def test_buried_receiver(self):
        """
        Simple test for a buried receiver.
        """
        m = TauPyModel(model="iasp91")
        arrivals = m.get_travel_times(
            source_depth_in_km=10.0, distance_in_degree=90.0,
            receiver_depth_in_km=50,
            phase_list=["P", "PP", "S"])

        self._compare_arrivals_with_file(arrivals, "buried_receivers.txt")
예제 #16
0
파일: test_tau.py 프로젝트: Fran89/obspy
 def test_ak135(self):
     """
     Test travel times for lots of phases against output from TauP in model
     AK135.
     """
     m = TauPyModel(model="ak135")
     arrivals = m.get_travel_times(source_depth_in_km=10.0,
                                   distance_in_degree=35.0,
                                   phase_list=["ttall"])
     self._compare_arrivals_with_file(
         arrivals, "taup_time_-h_10_-ph_ttall_-deg_35_-mod_ak135")
예제 #17
0
파일: test_tau.py 프로젝트: QuLogic/obspy
 def test_p_pwdk(self):
     """
     Test P phase arrival against TauP output in model pwdk
     with different cache values to test `TauModel.load_from_depth_cache`
     """
     for cache in self.caches:
         m = TauPyModel(model="pwdk", cache=cache)
         arrivals = m.get_travel_times(source_depth_in_km=10.0,
                                       distance_in_degree=35.0,
                                       phase_list=["P"])
         self._compare_arrivals_with_file(
             arrivals, "taup_time_-h_10_-ph_P_-deg_35_-mod_pwdk")
예제 #18
0
 def test_ppointvsobspytaup_S2P(self):
     slowness = 12.33
     evdep = 12.4
     evdist = 67.7
     pp1 = self.model.ppoint_distance(200, slowness, phase="P")
     model = TauPyModel(model="iasp91")
     arrivals = model.get_ray_paths(evdep, evdist, ("S250p",))
     arrival = arrivals[0]
     index = np.searchsorted(arrival.path["depth"][::-1], 200)
     pdist = arrival.path["dist"]
     pp2 = degrees2kilometers((pdist[-1] - pdist[-index - 1]) * 180 / np.pi)
     self.assertLess(abs(pp1 - pp2) / pp2, 0.2)
예제 #19
0
def zero_phase(tr,phase,rf_window=[-10,120],**kwargs):
##############################################################################
   '''
   Finds predicted PP arrival and zeros a window centered on the arrival
   args--------------------------------------------------------------------------
   tr: obspy trace
   phase: phase to zero
   time_start: starting point of receiver function time window

   kwargs---------------------------------------------------------------------
   window_half_dur : half duration of zero window (default = 2.5 s)
   taup_model
   '''
   window_half_dur = kwargs.get('window_half_dur',2.5)
   taup_model = kwargs.get('taup_model','none')
   if taup_model == 'none':
      taup_model = TauPyModel('prem')

   arrs = taup_model.get_travel_times(source_depth_in_km=tr.stats.evdp,
                                      distance_in_degree=tr.stats.gcarc,
                                      phase_list=['P',phase])
   print 'arrs = ', arrs
   P_arr  = 'none'
   phase_arr = 'none'

   for arr in arrs:
      if arr.name == 'P':
         P_arr = arr
      elif arr.name == phase:
         phase_arr = arr

   if P_arr == 'none' or phase_arr == 'none':
      raise ValueError('problem occured in function "zero_phase", no matching arrivals found')
   else:
      P_time = P_arr.time
      phase_time = phase_arr.time
      delay_time = phase_time - P_time
      zero_window_center = -1.0*rf_window[0] + delay_time
      zero_window_start  = zero_window_center - window_half_dur
      zero_window_end    = zero_window_center + window_half_dur
      zero_window_start_index = int(zero_window_start/tr.stats.delta)
      zero_window_end_index   = int(zero_window_end/tr.stats.delta)

      #case 1: entire window is in range
      if zero_window_start_index >= 0 and zero_window_end_index <= len(tr.data):
         tr.data[zero_window_start_index:zero_window_end_index] = 0.0
      #case 2: end of window is out of range
      if zero_window_start_index >= 0 and zero_window_end_index >= len(tr.data):
         tr.data[zero_window_start_index:] = 0.0
      #case 3: entire window is out of range
      if zero_window_start_index >= len(tr.data):
         print "PP arrives outside the receiver function window"
예제 #20
0
파일: test_tau.py 프로젝트: QuLogic/obspy
 def test_ak135(self):
     """
     Test travel times for lots of phases against output from TauP in model
     AK135 with different cache values to test
     `TauModel.load_from_depth_cache`
     """
     for cache in self.caches:
         m = TauPyModel(model="ak135", cache=cache)
         arrivals = m.get_travel_times(source_depth_in_km=10.0,
                                       distance_in_degree=35.0,
                                       phase_list=["ttall"])
         self._compare_arrivals_with_file(
             arrivals, "taup_time_-h_10_-ph_ttall_-deg_35_-mod_ak135")
예제 #21
0
 def test_ppointvsobspytaup_P2S(self):
     slowness = 6.28
     evdep = 12.4
     evdist = 67.7
     depth = 200
     pp1 = self.model.ppoint_distance(depth, slowness)
     model = TauPyModel(model="iasp91")
     arrivals = model.get_ray_paths(evdep, evdist, ("P250s",))
     arrival = arrivals[0]
     index = np.searchsorted(arrival.path["depth"][::-1], depth)
     pdist = arrival.path["dist"]
     pp2 = degrees2kilometers((pdist[-1] - pdist[-index - 1]) * 180 / np.pi)
     self.assertLess(abs(pp1 - pp2) / pp2, 0.1)
예제 #22
0
파일: test_tau.py 프로젝트: Keita1/obspy
    def test_single_path_geo_fallback_iasp91(self):
        """
        Test the raypath for a single phase given geographical input.

        This version of the test checks that things still work when
        geographiclib is not installed.
        """
        has_geographiclib_real = geodetics.HAS_GEOGRAPHICLIB
        geodetics.HAS_GEOGRAPHICLIB = False
        filename = os.path.join(DATA,
                                "taup_path_-o_stdout_-h_10_-ph_P_-deg_35")
        expected = np.genfromtxt(filename, comments='>')

        m = TauPyModel(model="iasp91")
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            arrivals = m.get_ray_paths_geo(source_depth_in_km=10.0,
                                           source_latitude_in_deg=-80.0,
                                           source_longitude_in_deg=-60.0,
                                           receiver_latitude_in_deg=-45.0,
                                           receiver_longitude_in_deg=-60.0,
                                           phase_list=["P"])
            geodetics.HAS_GEOGRAPHICLIB = has_geographiclib_real
            assert issubclass(w[-1].category, UserWarning)

        self.assertEqual(len(arrivals), 1)

        # Interpolate both paths to 100 samples and make sure they are
        # approximately equal.
        sample_points = np.linspace(0, 35, 100)

        interpolated_expected = np.interp(
            sample_points,
            expected[:, 0],
            expected[:, 1])

        interpolated_actual = np.interp(
            sample_points,
            np.round(np.degrees(arrivals[0].path['dist']), 2),
            np.round(6371 - arrivals[0].path['depth'], 2))

        np.testing.assert_allclose(interpolated_actual, interpolated_expected,
                                   rtol=1E-4, atol=0)

        # NB: we do not check path['lat'] and path['lon'] here, as these
        # are not calculated when geographiclib is not installed. We check
        # that they are not present.
        with self.assertRaises(ValueError):
            arrivals[0].path["lat"]
        with self.assertRaises(ValueError):
            arrivals[0].path["lon"]
예제 #23
0
   def find_pierce_coor(self,plot='False'):
      import geopy
      from geopy.distance import VincentyDistance

      '''
      given an instance of the receiver function class this function
      returns latitude and longitude of all receiver side pierce points
      of Pds in a given depth range (the default range is 50 - 800 km)
      NOTE:
      be careful that ses3d typically uses colatitude, while
      this function returns latitude '''

      depth_range = np.arange(50,800,5)        #set range of pierce points

      #geodetic info
      bearing     = self.az
      lon_s = self.ses3d_seismogram.sy
      lat_s = 90.0-self.ses3d_seismogram.sx
      lon_r = self.ses3d_seismogram.ry
      lat_r = 90.0-self.ses3d_seismogram.rx
      origin      = geopy.Point(lat_s, lon_s)

      #find how far away the pierce point is
      model  = TauPyModel(model='pyrolite_5km')

      for i in range(0,len(depth_range)):
         phase = 'P'+str(depth_range[i])+'s'
         pierce = model.get_pierce_points(self.eq_depth,self.delta_deg,phase_list=[phase])
         points = pierce[0].pierce
         for j in range(0,len(points)):
            if points[j]['depth'] == depth_range[i] and points[j]['dist']*(180.0/np.pi) > 25.0:
               prc_dist = points[j]['dist']*(180.0/np.pi)
               d_km = prc_dist * ((2*np.pi*6371.0/360.0))
               destination = VincentyDistance(kilometers=d_km).destination(origin,bearing)
               lat = destination[0]
               lon = destination[1]
               value = 0
               row = {'depth':depth_range[i],'dist':prc_dist,'lat':lat,'lon':lon,'value':value}
               self.pierce_dict.append(row)

      if plot=='True':
         m = Basemap(projection='hammer',lon_0=0)
         m.drawmapboundary()
         m.drawcoastlines()
         m.drawgreatcircle(lon_s,lat_s,lon_r,lat_r,linewidth=1,color='b',alpha=0.5)

         for i in range(len(self.pierce_dict)):
            x,y = m(self.pierce_dict[i]['lon'],self.pierce_dict[i]['lat'])
            m.scatter(x,y,5,marker='o',color='r')
         plt.show()
예제 #24
0
파일: test_tau.py 프로젝트: Keita1/obspy
    def test_underside_reflections(self):
        """
        Tests the calculation of a couple of underside reflection phases.
        """
        m = TauPyModel(model="iasp91")
        # If an interface that is not in the model is used for a phase name,
        # it should snap to the next closest interface. This is reflected in
        # the purist name of the arrivals.
        arrivals = m.get_travel_times(
            source_depth_in_km=10.0, distance_in_degree=90.0,
            phase_list=["P", "PP", "P^410P", "P^660P", "P^300P", "P^400P",
                        "P^500P", "P^600P"])

        self._compare_arrivals_with_file(arrivals, "underside_reflections.txt")
예제 #25
0
def pds_time(evdp,gcarc,depth,model_1d):
##################################################################################
   '''
   Calculates the travel time of a Pds phase based on the spherical travel time 
   equation (e.g., Eager et al. 2010)

   args--------------------------------------------------------------------------
   evdp:event depth 
   gcarc:great circle distance
   depth:the conversion depth
   model_1d:velocity model 
   '''
   taup_model = TauPyModel(model_1d)
   p_arrs = taup_model.get_travel_times(evdp,gcarc,['P'])
예제 #26
0
def plot_expected_arrivals(source_depth, distance, ref_model, phase_list, seislist):
###############################################################################
   '''
   Plot expected arrival times based on seismic reference model ref_model
   ref_model must be a string. source depth in km. distance in degrees.
   phase_list is list of phase names you want to plot. seis is a numpy array for 
   a seismic trace.
   See obpy taup manual
   '''

   model = TauPyModel(model=ref_model)
   arrivals = model.get_travel_times(source_depth_in_km=source_depth, \
           distance_in_degree=distance)

   seis_number = len(seislist)
   #seis = seislist
   #seis_array = np.loadtxt(seis) 
   #max_value = seis_array[:,1].max()

   arrive_list = str(arrivals).strip().split('\n')
   arrive_array = [ii.split() for ii in arrive_list]
   phase_dict = dict()
   for ii in range(1,len(arrive_array)):
      phase_dict[arrive_array[ii][0]] = float(arrive_array[ii][4])

   print 'Phases Available: \n'
   print phase_dict.keys()

   for jj in range(0,len(seislist)):
      plt.subplot(seis_number,1,(jj+1))
      seis_array = np.loadtxt(seislist[jj])
      max_value = seis_array[:,1].max()*0.9

      #Remove y lables
      ax = plt.gca()
      ax.set_yticklabels([])
      #Label plot
      plt.title(seislist[jj])
      plt.xlabel('Seconds')
      #change limits
      plt.xlim(seis_array[0,0],seis_array[len(seis_array)-1,0])
      
      for ii in phase_list:
         plt.axvline(x=phase_dict[ii], ymin=-1.0, ymax = 1.0, \
                linewidth=2, color='gold')
         plt.text(phase_dict[ii],max_value,ii)
         plt.text(0,max_value,'Vel. model '+ref_model)
      plt.plot(seis_array[:,0],seis_array[:,1])

   plt.show()
예제 #27
0
 def test_many_identically_named_phases(self):
     """
     Regression test to make sure obspy.taup works with models that
     produce many identically names seismic phases.
     """
     with TemporaryWorkingDirectory():
         folder = os.path.abspath(os.curdir)
         model_name = "smooth_geodynamic_model"
         build_taup_model(
             filename=os.path.join(DATA, model_name + ".tvel"),
             output_folder=folder, verbose=False)
         m = TauPyModel(os.path.join(folder, model_name + ".npz"))
     arr = m.get_ray_paths(172.8000, 46.762440693494824, ["SS"])
     self.assertGreater(len(arr), 10)
예제 #28
0
def get_travel_times(station, earthquake):
    """
    Calculate travel times for phases using obspy.

    Return a dictionary with phase name as key and arrival time as the value.
    """
    dist = locations2degrees(station[0], station[1],
                             earthquake[0], earthquake[1])
    model = TauPyModel(model='iasp91')
    arrivals = model.get_travel_times(source_depth_in_km=earthquake[2],
                                      distance_in_degree=dist)
    travel_times = {}
    for arrival in arrivals:
        travel_times[arrival.name] = arrival.time
    return travel_times
예제 #29
0
파일: test_tau.py 프로젝트: junlysky/obspy
    def test_arrivals_class(self):
        """
        Tests list operations on the Arrivals class.

        See #1518.
        """
        model = TauPyModel(model='iasp91')
        arrivals = model.get_ray_paths(source_depth_in_km=0,
                                       distance_in_degree=1,
                                       phase_list=['Pn', 'PmP'])
        self.assertEqual(len(arrivals), 2)
        # test copy
        self.assertTrue(isinstance(arrivals.copy(), Arrivals))
        # test sum
        self.assertTrue(isinstance(arrivals + arrivals, Arrivals))
        self.assertTrue(isinstance(arrivals + arrivals[0], Arrivals))
        # test multiplying
        self.assertTrue(isinstance(arrivals * 2, Arrivals))
        arrivals *= 3
        self.assertEqual(len(arrivals), 6)
        self.assertTrue(isinstance(arrivals, Arrivals))
        # test slicing
        self.assertTrue(isinstance(arrivals[2:5], Arrivals))
        # test appending
        arrivals.append(arrivals[0])
        self.assertEqual(len(arrivals), 7)
        self.assertTrue(isinstance(arrivals, Arrivals))
        # test assignment
        arrivals[0] = arrivals[-1]
        self.assertTrue(isinstance(arrivals, Arrivals))
        arrivals[2:5] = arrivals[1:4]
        self.assertTrue(isinstance(arrivals, Arrivals))
        # test assignment with wrong type
        with self.assertRaises(TypeError):
            arrivals[0] = 10.
        with self.assertRaises(TypeError):
            arrivals[2:5] = [0, 1, 2]
        with self.assertRaises(TypeError):
            arrivals.append(arrivals)
        # test add and mul with wrong type
        with self.assertRaises(TypeError):
            arrivals + [2, ]
        with self.assertRaises(TypeError):
            arrivals += [2, ]
        with self.assertRaises(TypeError):
            arrivals * [2, ]
        with self.assertRaises(TypeError):
            arrivals *= [2, ]
예제 #30
0
    def exclude_by_local_catalog(self,catalogue):

        model = TauPyModel(model="iasp91")
        
        for tr in self.stream:
            tr.detrend('demean')


        t_total = 0.0
        for trace in self.stream:
            t_total += trace.stats.npts
            
            
        for event in catalogue:
            # get origin time
            t0 = event.origins[0].time
            lon0 = event.origins[0].longitude
            lat0 = event.origins[0].latitude
            depth0 = event.origins[0].depth/1000.
            coords = self.inv.get_coordinates(self.ids[0])
            data_start = self.stream[0].stats.starttime
            if t0 < data_start-24*60*60.:
                continue
            data_end = self.stream[-1].stats.endtime
            if t0 > data_end:
                continue
            dist = gps2dist_azimuth(lat0,lon0,
                    coords["latitude"],coords["longitude"])[0]/1000.
            p_arrival = model.get_travel_times(source_depth_in_km=depth0,
                                  distance_in_degree=dist/111.19,phase_list=["P"])
            if len(p_arrival)==0:
                tcut1 = t0
            else:
                tcut1 = t0 + p_arrival[0].time - 10.0 #10s before p arrival
            if tcut1<t0:
                tcut1 = t0
            tcut2 = t0 + dist/1.0 + 60. #slowest surface-wave arrival plus one minute
            self.stream.cutout(starttime=tcut1,endtime=tcut2)


        t_kept = 0.0
        for trace in self.stream:
            t_kept += trace.stats.npts
        

        print('* Excluded all events in local catalogue.', file=self.ofid)
        print('* Lost %g percent of original traces' %((t_total-t_kept)/t_total*100), file=self.ofid)
        return()
예제 #31
0
class Client(object):
    def __init__(self, stationinfo, mseeddir, sacdir, model='prem'):
        self.mseeddir = mseeddir
        self.sacdir = sacdir
        self.stations = self._read_stations(stationinfo)
        self.model = TauPyModel(model=model)

    def _read_stations(self, stationinfo):
        """
        Read station information from station metadata file.

        Format of station information:

            NET.STA  latitude  longitude  elevation
        """
        stations = {} 
        with open(stationinfo, "r") as f:
            for line in f:
                name, stla, stlo, stel, starttime, endtime = line.split()[0:6]

                # handle the time
                try:
                    starttime = UTCDateTime(starttime)
                    endtime   = UTCDateTime(endtime)
                except:
                    starttime = UTCDateTime("20090101")
                    endtime   = UTCDateTime("20500101")

                if name not in stations.keys():
                    station = {
                               name: [{
                                        "name": name,
                                        "stla": float(stla),
                                        "stlo": float(stlo),
                                        "stel": float(stel) / 1000.0,
                                        "starttime": starttime,
                                        "endtime": endtime
                                      }]
                              }
                    stations.update(station)
                else:
                    subdict=  {
                                "name": name,
                                "stla": float(stla),
                                "stlo": float(stlo),
                                "stel": float(stel) / 1000.0,
                                "starttime": starttime,
                                "endtime": endtime
                              }
                    stations[name].append(subdict)

        logger.info("%d stations in database.", len(stations))
        return stations

    def _get_dirname(self, starttime, endtime):
        """
        Get directory names based on starttime and endtime.
        """
        # mseed data are stored according to BJT not UTC
        starttime_in_bjt = starttime + timedelta(hours=8)
        endtime_in_bjt = endtime + timedelta(hours=8)

        if starttime_in_bjt.date == endtime_in_bjt.date:  # one day
            return [starttime_in_bjt.strftime("%Y%m%d")]
        else:  # two days
            return [starttime_in_bjt.strftime("%Y%m%d"),
                    endtime_in_bjt.strftime("%Y%m%d")]

    def _read_mseed(self, station, dirnames, starttime, endtime):
        """
        Read waveform in specified time window.
        """
        # obtain event waveform
        pattern = station['name'] + ".*.*.*.mseed"
        if not 1 <= len(dirnames) <= 2:  # zero or more than two days
            logger.error("Cannot trim waveform duration span %s day(s)",
                         len(dirnames))
            return

        # loop over to read all mseed in
        st = Stream()
        for dirname in dirnames:
            mseedname = os.path.join(self.mseeddir, dirname, pattern)
            try:
                st += read(mseedname)
            except FileNotFoundError:
                logger.warning("File not exist: %s", mseedname)
            except Exception as e:
                logger.error("Error in reading: %s", e)

        # Merge data
        try:
            st.merge(fill_value=0)
        except Exception:
            logger.error("Error in merging %s", station['name'])
            return None

        # check if st contains data
        if not st:
            logger.warning("No data for %s", station['name'])
            return None

        st.trim(starttime, endtime)
        return st

    def _writesac(self, stream, event, station, outdir):
        """
        Write data with SAC format with event and station information.
        """
        for trace in stream:  # loop over 3-component traces
            # transfer obspy trace to sac trace
            sac_trace = SACTrace.from_obspy_trace(trace=trace)

            # set station related headers
            sac_trace.stla = station["stla"]
            sac_trace.stlo = station["stlo"]
            sac_trace.stel = station["stel"]

            if trace.stats.channel[-1] == "E":
                sac_trace.cmpaz = 90
                sac_trace.cmpinc = 90
            elif trace.stats.channel[-1] == "N":
                sac_trace.cmpaz = 0
                sac_trace.cmpinc = 90
            elif trace.stats.channel[-1] == "Z":
                sac_trace.cmpaz = 0
                sac_trace.cmpinc = 0
            else:
                logger.warning("Not E|N|Z component")

            # set event related headers
            sac_trace.evla = event["latitude"]
            sac_trace.evlo = event["longitude"]
            sac_trace.evdp = event["depth"]
            sac_trace.mag = event["magnitude"]

            # 1. SACTrace.from_obspy_trace automatically set Trace starttime
            #    as the reference time of SACTrace, when converting Trace to
            #    SACTrace. Thus in SACTrace, b = 0.0.
            # 2. Set SACTrace.o as the time difference in seconds between
            #    event origin time and reference time (a.k.a. starttime).
            # 3. Set SACTrace.iztype to 'io' change the reference time to
            #    event origin time (determined by SACTrace.o) and also
            #    automatically change other time-related headers
            #    (e.g. SACTrace.b).

            # 1.from_obspy_trace
            #   o
            #   |
            #   b----------------------e
            #   |=>   shift  <=|
            # reftime          |
            #               origin time
            #
            # 2.sac_trace.o = shift
            #   o:reset to be zero
            #   |
            #   b---------------------e
            #   |            |
            #   | refer(origin) time
            # -shift
            sac_trace.o = event["origin"] - sac_trace.reftime
            sac_trace.iztype = 'io'
            sac_trace.lcalda = True

            # SAC file location
            sac_flnm = ".".join([event["origin"].strftime("%Y.%j.%H.%M.%S"),
                                 "0000", trace.id, "M", "SAC"])
            sac_fullname = os.path.join(outdir, sac_flnm)
            sac_trace.write(sac_fullname)
        return

    def _get_window(self, event, station=None, by_event=None, by_phase=None):
        """
        Determin the starttime and endtime

        Parameters
        ----------

        event: dict
            Contain information of events
        station: dict
            Contain information of station
        """

        if by_event:
            starttime = event['origin'] + by_event['start_offset']
            endtime = starttime + by_event['duration']
            return starttime, endtime

        # by phase
        dist = locations2degrees(event["latitude"], event["longitude"],
                                 station["stla"], station["stlo"])

        start_ref_phase = by_phase['start_ref_phase']
        end_ref_phase = by_phase['end_ref_phase']
        start_offset = by_phase['start_offset']
        end_offset = by_phase['end_offset']

        # TauPyModel.get_travel_times always return sorted value
        start_arrivals = self.model.get_travel_times(
            source_depth_in_km=event['depth'],
            distance_in_degree=dist,
            phase_list=start_ref_phase)
        if not start_arrivals:  # no phase avaiable, skip this data
            return None, None  # starttime and endtime are None

        end_arrivals = model.get_travel_times(
            source_depth_in_km=event['depth'],
            distance_in_degree=dist,
            phase_list=end_ref_phase)

        # determine starttime and endtime
        starttime = event['origin'] + start_arrivals[0].time + start_offset
        endtime = event['origin'] + end_arrivals[-1].time + end_offset
        return starttime, endtime

    def get_waveform(self, event, by_event=None, by_phase=None, epicenter=None):
        """
        Trim waveform from dataset of CGRM

        Parameters
        ----------
        event: dict
            Event information container
        by_event: dict
            Determine waveform window by event origin time
        by_phase: dict
            Determine waveform window by phase arrival times
        epicenter: dict
            Select station location
        """
        # check the destination
        eventdir = event['origin'].strftime("%Y%m%d%H%M%S")
        outdir = os.path.join(self.sacdir, eventdir)
        if not os.path.exists(outdir):
            os.makedirs(outdir, exist_ok=True)

        if by_event:
            starttime, endtime = self._get_window(event=event,
                                                  by_event=by_event)
            dirnames = self._get_dirname(starttime, endtime)
            logger.debug("dirnames: %s", dirnames)

        # loop over all stations
        for key, stationlist in self.stations.items():
            station = find_station(stationlist, event['origin'])
            logger.debug("station: %s", key)
            if not by_event:
                starttime, endtime = self._get_window(event=event,
                                                      station=station,
                                                      by_phase=by_phase)
                dirnames = self._get_dirname(starttime, endtime)
                logger.debug("dirnames: %s", dirnames)
            if epicenter:
                dist = locations2degrees(event["latitude"], event["longitude"],
                                              station["stla"], station["stlo"])
                if dist < epicenter['minimum'] or dist > epicenter['maximum']:
                    continue

            st = self._read_mseed(station, dirnames, starttime, endtime)
            if not st:
                continue
            self._writesac(st, event, station, outdir)
예제 #32
0
client = Client("IRIS")
cat = client.get_events(starttime=UTCDateTime(2004, 1, 1, 0, 0, 0),
                        endtime=UTCDateTime(2016, 4, 30, 23, 59, 59),
                        latitude=36.80060501882054,
                        longitude=137.6569971141782,
                        mindepth=60,
                        minmagnitude=6,
                        minradius=30,
                        maxradius=90)
print(cat)

# Calculate travel time for the accurate picking of onset from teleseismic waveform
p_tttable = []
s_tttable = []
sta_lat, sta_long = 35.5038, 136.7939  # latitude and longitude of specified station (N.TKTH)
model = TauPyModel(model="iasp91")
for i in range(len(cat)):
    distance = locations2degrees(sta_lat, sta_long, cat[i].origins[0].latitude,
                                 cat[i].origins[0].longitude)
    parrivals = model.get_travel_times(
        source_depth_in_km=cat[i].origins[0].depth / 1000,
        distance_in_degree=distance,
        phase_list=['P'])
    sarrivals = model.get_travel_times(
        source_depth_in_km=cat[i].origins[0].depth / 1000,
        distance_in_degree=distance,
        phase_list=['S'])
    p_time = parrivals[0].time
    s_time = sarrivals[0].time
    p_tttable.append(p_time)
    s_tttable.append(s_time)
예제 #33
0
relative_ptimes = [
    5.831, 65.7839, 5.4050, 67.8967, 5.3254, 6.5378, 5.7208, 63.1667, 5.8395,
    5.9275, 5.8311, 5.825, 5.66964, 5.22, 5.39
]
rootpath = '/Users/dmelgar/Amatrice2016/strong_motion/sac/'

lonlat = genfromtxt(
    '/Users/dmelgar/Amatrice2016/strong_motion/stations/latest.sta',
    usecols=[1, 2])
station_catalogue = genfromtxt(
    '/Users/dmelgar/Amatrice2016/strong_motion/stations/latest.sta',
    usecols=[0],
    dtype='S')

#velmod = TauPyModel(model="/Users/dmelgar/FakeQuakes/Cascadia/structure/cascadia")
velmod = TauPyModel(model='aci')

predictedP = 9999 * ones(len(stations))
#Get predicted arrivals
for ksta in range(len(stations)):
    st = read(rootpath + stations[ksta] + '.HNZ.sac')

    #Find coordinates
    i = where(station_catalogue == stations[ksta])[0]
    lon_sta = lonlat[i, 0]
    lat_sta = lonlat[i, 1]

    deg = locations2degrees(lon_sta, lat_sta, epicenter[0], epicenter[1])
    arrivals = velmod.get_travel_times(source_depth_in_km=epicenter[2],
                                       distance_in_degree=deg,
                                       phase_list=['P', 'Pn', 'p'])
예제 #34
0
파일: mail.py 프로젝트: xumi1993/bqmail
from bqmail.query import Query
from obspy import UTCDateTime
from obspy.taup import TauPyModel
from obspy.clients.iris import Client
import smtplib
from email.mime.text import MIMEText
from email.header import Header
from .distaz import distaz
model = TauPyModel()
cld = Client()


def connectsmtp(server, port, sender, password):
    smtpObj = smtplib.SMTP_SSL(server, port)
    smtpObj.login(sender, password)
    return smtpObj


def loginmail(sender, server='localhost', password='', port=465, test_num=5):
    test_it = 1
    while test_it <= test_num:
        if test_it > 1:
            print('Try to link to {} in {} times'.format(server, test_it))
        try:
            smtpObj = connectsmtp(server, port, sender, password)
        except:
            test_it += 1
            continue
        else:
            break
    if test_it > test_num:
예제 #35
0
from obspy.taup import TauPyModel
import matplotlib.pyplot as plt
import os.path
import glob
import numpy as np
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
import math
from skimage import measure
from skimage.draw import ellipsoid
from geographiclib.geodesic import Geodesic as geo
from scipy.interpolate import interp1d
from mpl_toolkits.mplot3d.axes3d import Axes3D
from mpl_toolkits.mplot3d import proj3d

taupmodel = TauPyModel(
    model='ak135'
)  # Could change to AK135 to setup in accordance with BBAFRP19

# from matplotlib import rc
# rc('font',size=28)
# rc('font',family='serif')
# rc('axes',labelsize=32)

sys.path.append(home + '/Google_Drive/GITHUB_AB/3D_corrections/PLOTTING')
import Africa_BBAFRP19


def init_model_BBAFRP19():
    # Read crustal and mantle models
    global mod
    mod = Africa_BBAFRP19.BBAFRP19_model()
예제 #36
0
#sta='LUTZ'
#lonsta=-121.8652
#latsta= 37.2869

#sta='MILP'
#lonsta=-121.8340
#latsta=37.4491

path = '/Users/dmelgar/FakeQuakes/M6_validation_pwave/output/waveforms/M6.000000/'

#taup
zs = 8.0
g = Geod(ellps='WGS84')
azimuth, baz, dist = g.inv(-121.753508, 37.332028, lonsta, latsta)
dist_in_degs = kilometer2degrees(dist / 1000.)
velmod = TauPyModel(
    '/Users/dmelgar/FakeQuakes/M6_validation_pwave/structure/bbp_norcal.npz')
Ppaths = velmod.get_ray_paths(zs, dist_in_degs, phase_list=['P', 'p'])
p = Ppaths[0].time
Spaths = velmod.get_ray_paths(zs, dist_in_degs, phase_list=['S', 's'])
s = Spaths[0].time

nlf = read(path + sta + '.LYN.sac')
elf = read(path + sta + '.LYE.sac')
zlf = read(path + sta + '.LYZ.sac')

nbb = read(path + sta + '.bb.HNN.sac')
ebb = read(path + sta + '.bb.HNE.sac')
zbb = read(path + sta + '.bb.HNZ.sac')

nhf = read(path + sta + '.HNN.sac')
ehf = read(path + sta + '.HNE.sac')
예제 #37
0
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.rcParams['figure.figsize'] = (13, 8) if False else (10, 6)

from obspy.taup.taup import getTravelTimes
from obspy.core.util.geodetics import gps2DistAzimuth
from obspy.taup import TauPyModel

from seismon.eqmon import ampRf, shoot

degrees = np.linspace(1, 180, 180)
distances = degrees * (np.pi / 180) * 6370000
depths = np.linspace(1, 100, 100)

model = TauPyModel(model="iasp91")
#model = TauPyModel(model="1066a")

fwd = 0
back = 0

eqlat, eqlon = 35.6895, 139.6917

GPS = 0
magnitude = 6.0
depth = 20.0
Rf0 = 76.44
Rfs = 1.37
cd = 440.68
rs = 1.57
예제 #38
0
relative_ptimes = [
    5.831, 65.7839, 5.4050, 67.8967, 5.3254, 6.5378, 5.7208, 63.1667, 5.8395,
    5.9275, 5.8311, 5.825, 5.66964
]
rootpath = '/Users/dmelgar/Amatrice2016/strong_motion/sac/'

lonlat = genfromtxt(
    '/Users/dmelgar/Amatrice2016/strong_motion/stations/latest.sta',
    usecols=[1, 2])
station_catalogue = genfromtxt(
    '/Users/dmelgar/Amatrice2016/strong_motion/stations/latest.sta',
    usecols=[0],
    dtype='S')

#velmod = TauPyModel(model="/Users/dmelgar/FakeQuakes/Cascadia/structure/cascadia")
velmod = TauPyModel()

lon_grid = arange(13.17, 13.37, 0.01)
lat_grid = arange(42.68, 42.75, 0.01)
z_grid = arange(2, 10, 0.5)
Npts = len(lon_grid) * len(lat_grid) * len(z_grid) * len(time_offsets) * len(
    stations)

predictedP = ones((Nsta, Npts)) * 9999
arrival_times = zeros((Nsta, Npts))
error = zeros((Nsta, Npts))

lon_out = zeros(Npts)
lat_out = zeros(Npts)
z_out = zeros(Npts)
offsets_out = zeros(Npts)
예제 #39
0
def main(fnam_nd: str,
         times: tuple,
         phase_list=("P", "S"),
         depth=40.,
         plot_rays=False):
    """
    Compute distance of an event, given S-P time
    :param fnam_nd: name of TauP compatible model file
    :param times: list of S-P time
    :param phase_list: list of phases between which the time difference is measured (usually P and S)
    :param depth: assumed depth of event
    :param plot_rays: create a plot with the ray paths
    """
    fnam_npz = "./taup_tmp/" \
               + psplit(fnam_nd)[-1][:-3] + ".npz"
    build_taup_model(fnam_nd, output_folder="./taup_tmp")
    cache = OrderedDict()
    model = TauPyModel(model=fnam_npz, cache=cache)

    if plot_rays:
        fig, ax = plt.subplots(1, 1)

    for itime, tSmP in enumerate(times):
        dist = get_dist(model, tSmP=tSmP, depth=depth, phase_list=phase_list)
        if dist is None:
            print(f"{fnam_nd}, S-P time: {tSmP:5.1f}: NO SOLUTION FOUND!")
        else:
            print(f"{fnam_nd}, S-P time: {tSmP:5.1f}, "
                  f"taup_distance: {dist:5.1f}")

        if plot_rays:
            if dist is None:
                ax.plot((-200), (-200),
                        label="%4.1f sec, NO SOLUTION" % (tSmP),
                        c="white",
                        lw=0.0)
            else:
                arrivals = model.get_ray_paths(distance_in_degree=dist,
                                               source_depth_in_km=depth,
                                               phase_list=["P", "S"])

                RADIUS_MARS = 3389.5
                already_plotted = dict(P=False, S=False)
                ls = dict(P="solid", S="dashed")
                label = dict(P="%4.1f sec, %5.1f°" % (tSmP, dist), S=None)
                for arr in arrivals:
                    if not already_plotted[arr.name]:
                        already_plotted[arr.name] = True
                        x = (RADIUS_MARS - arr.path["depth"]) * \
                            np.sin(arr.path["dist"])
                        y = (RADIUS_MARS - arr.path["depth"]) * \
                            np.cos(arr.path["dist"])
                        ax.plot(x,
                                y,
                                c="C%d" % itime,
                                ls=ls[arr.name],
                                label=label[arr.name],
                                lw=1.2)

    if plot_rays:
        for layer_depth in model.model.get_branch_depths(
        ):  # (0, 10, 50, 1000,
            # 1500):
            angles = np.linspace(0, 2 * np.pi, 1000)
            x_circle = (RADIUS_MARS - layer_depth) * np.sin(angles)
            y_circle = (RADIUS_MARS - layer_depth) * np.cos(angles)
            ax.plot(x_circle, y_circle, c="k", ls="dashed", lw=0.5, zorder=-1)
        for layer_depth in (model.model.cmb_depth, 0.0):
            angles = np.linspace(0, 2 * np.pi, 1000)
            x_circle = (RADIUS_MARS - layer_depth) * np.sin(angles)
            y_circle = (RADIUS_MARS - layer_depth) * np.cos(angles)
            ax.plot(x_circle, y_circle, c="k", ls="solid", lw=1.0)

        #  for layer_depth in [1100.0]:
        #      angles = np.linspace(0, 2 * np.pi, 1000)
        #      x_circle = (RADIUS_MARS - layer_depth) * np.sin(angles)
        #      y_circle = (RADIUS_MARS - layer_depth) * np.cos(angles)
        #      ax.plot(x_circle, y_circle, c="k", ls="dotted", lw=0.3)

        ax.set_xlim(-100, RADIUS_MARS + 100)
        ax.set_ylim(1000, RADIUS_MARS + 100)
        ax.set_xlabel("radius / km")
        ax.set_ylabel("radius / km")
        ax.set_title("Ray path for model %s" % fnam_nd)
        ax.set_aspect("equal", "box")
        ax.legend(loc="lower left")
        plt.show()
예제 #40
0
tempdata = par['tempdata']
#phasesl = phasel.split(',')

#ndep = 96
#ndis = 111
dep = np.linspace(mindep, maxdep, ndep)
dis = np.linspace(mindis, maxdis, ndis)
filetele = './tables/' + firstarrtt
#filepcp = './tables/'+cmbreftt
if os.path.isfile(filetele):
    telep = np.load(filetele)
#    pcpdp = np.load(filepcp)
else:
    print('constructing tt table')
    from obspy.taup import TauPyModel
    mod = TauPyModel(model='ak135')
    telep = np.zeros((ndep, ndis))
    #    pcpdp = np.zeros((ndep,ndis))
    for ii in range(ndep):
        for jj in range(ndis):
            arr = mod.get_travel_times(source_depth_in_km=dep[ii],
                                       distance_in_degree=dis[jj],
                                       phase_list=phasesl)
            #pcpdp[ii,jj] = arr[-1].time-arr[0].time
            telep[ii, jj] = arr[0].time
    np.save(filetele, telep)
ftelep = interpolate.interp2d(dis, dep, telep, kind='linear')
#fpcpdp = interpolate.interp2d(dis, dep, pcpdp, kind='linear')

#pPdP = np.load('./tables/teleP50to600.npy')
#ndep = 551
예제 #41
0
    def __init__(self, sta, event, gacmin=30., gacmax=90., phase='P'):

        from obspy.geodetics.base import gps2dist_azimuth as epi
        from obspy.geodetics import kilometer2degrees as k2d
        from obspy.taup import TauPyModel

        # Extract event 4D parameters
        self.time = event.origins[0].time
        self.lon = event.origins[0].longitude
        self.lat = event.origins[0].latitude
        self.dep = event.origins[0].depth

        # Check if depth is valid type
        if self.dep is not None:
            if self.dep > 1000.:
                self.dep = self.dep / 1000.
        else:
            self.dep = 10.

        # Magnitude
        self.mag = event.magnitudes[0].mag
        if self.mag is None:
            self.mag = -9.

        # Calculate epicentral distance
        self.epi_dist, self.az, self.baz = epi(self.lat, self.lon,
                                               sta.latitude, sta.longitude)
        self.epi_dist /= 1000
        self.gac = k2d(self.epi_dist)

        if self.gac > gacmin and self.gac < gacmax:

            # Get travel time info
            tpmodel = TauPyModel(model='iasp91')

            # Get Travel times (Careful: here dep is in meters)
            arrivals = tpmodel.get_travel_times(distance_in_degree=self.gac,
                                                source_depth_in_km=self.dep,
                                                phase_list=[phase])
            if len(arrivals) > 1:
                print("arrival has many entries: ", len(arrivals))
            elif len(arrivals) == 0:
                print("no arrival found")
                self.accept = False
                return

            arrival = arrivals[0]

            # Attributes from parameters
            self.ttime = arrival.time
            self.slow = arrival.ray_param_sec_degree / 111.
            self.inc = arrival.incident_angle
            self.phase = phase
            self.accept = True
        else:
            self.ttime = None
            self.slow = None
            self.inc = None
            self.phase = None
            self.accept = False

        # Defaults for non - station-event geometry attributes
        self.vp = 6.0
        self.vs = 3.5
        self.align = 'ZRT'

        # Attributes that get updated as analysis progresses
        self.rotated = False
        self.snr = None
        self.snrh = None
        self.cc = None
            transform=transform,
            zorder=10,
            fontsize=8,
            color='red')

    # print out the filters that have been used
    plt.text(0, DURATION * 1.05, filtertext1)
    plt.text(0, DURATION * 1.07, filtertext2)

    # Print the coloured phases over the seismic section
    textlist = []  # list of text on plot, to avoid over-writing
    for j, color in enumerate(COLORS):
        phase = PHASES[j]
        x = []
        y = []
        model = TauPyModel(model=MODEL)
        for dist in range(
                MIN_DIST, MAX_DIST + 1,
                1):  # calculate and plot one point for each degree from 0-180
            arrivals = model.get_travel_times(source_depth_in_km=EVT_Z,
                                              distance_in_degree=dist,
                                              phase_list=[phase])
            printed = False
            for i in range(len(arrivals)):
                instring = str(arrivals[i])
                phaseline = instring.split(" ")
                if phaseline[0] == phase and printed == False and int(
                        dist) > 0 and int(dist) < 180 and float(
                            phaseline[4]) > 0 and float(
                                phaseline[4]) < DURATION:
                    x.append(int(dist))
예제 #43
0
Last Modified : Mon 17 Apr 2017 05:02:10 PM EDT
Created By : Samuel M. Haugland

==============================================================================
'''

import numpy as np
from matplotlib import pyplot as plt
from subprocess import call
from os import listdir
import h5py
import obspy
import seispy
from obspy.taup import TauPyModel

model = TauPyModel(model='prem50')


def main():
    homedir = '/home/samhaug/work1/SP_brazil_sims/SVaxi/full_shareseismos/shareseismos/'
    prem_st = read_prem(homedir)

    fig, ax = setup_figure()
    thickness_compare(homedir, prem_st, ax[0][0])
    dvs_compare(homedir, prem_st, ax[1][0])
    dvp_compare(homedir, prem_st, ax[1][1])
    drho_compare(homedir, prem_st, ax[0][1])
    angle_compare(homedir, prem_st, ax[0][2])
    dist_compare(homedir, prem_st, ax[1][2])

    plt.figtext(0.3, 0.93, '(a)', size=10)
예제 #44
0
#!/usr/bin/env python

import h5py
import numpy as np
import obspy
from geopy.distance import great_circle
from obspy.taup import TauPyModel
model = TauPyModel(model="prem")
from matplotlib import pyplot as plt
import glob


def read_ed_list():
    dir_list = glob.glob('*PKIKP')
    ed_list = []
    for ii in dir_list:
        f = h5py.File(ii + '/Processed/env.h5', 'r')
        env = f['env'][...]
        num = f['num'][...]
        ed_list.append([env, num])
        f.close()
    return ed_list


def sum_env(ed_list):
    def matrix_sum(ed_list):
        norm = np.zeros(ed_list[0][0].shape)
        for ii in ed_list:
            num = np.flipud(ii[1][:, 1])
            env = ii[0].copy()
            for jj in range(0, env.shape[0]):
예제 #45
0
def pro5stack(eq_file,
              plot_scale_fac=0.05,
              slowR_lo=-0.1,
              slowR_hi=0.1,
              slow_delta=0.0005,
              start_buff=-50,
              end_buff=50,
              ref_lat=36.3,
              ref_lon=138.5,
              envelope=1,
              plot_dyn_range=1000,
              log_plot=1,
              norm=1,
              global_norm_plot=1,
              color_plot=1,
              fig_index=401,
              ARRAY=0):

    #%% Import functions
    import obspy
    import obspy.signal
    from obspy import UTCDateTime
    from obspy import Stream, Trace
    from obspy import read
    from obspy.geodetics import gps2dist_azimuth
    import numpy as np
    import os
    from obspy.taup import TauPyModel
    import obspy.signal as sign
    import matplotlib.pyplot as plt
    from matplotlib.colors import LogNorm
    model = TauPyModel(model='iasp91')
    from scipy.signal import hilbert
    import math
    import time

    #	import sys # don't show any warnings
    #	import warnings

    print('Running pro5a_stack')

    #%% Get saved event info, also used to name files
    start_time_wc = time.time()

    if ARRAY == 0:
        file = open(eq_file, 'r')
    elif ARRAY == 1:
        file = open('EvLocs/' + eq_file, 'r')
    lines = file.readlines()
    split_line = lines[0].split()
    #			ids.append(split_line[0])  ignore label for now
    t = UTCDateTime(split_line[1])
    date_label = split_line[1][0:10]
    ev_lat = float(split_line[2])
    ev_lon = float(split_line[3])
    ev_depth = float(split_line[4])

    #if not sys.warnoptions:
    #    warnings.simplefilter("ignore")

    #%% Get station location file
    if ARRAY == 0:  # Hinet set and center
        sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/hinet_sta.txt'
        ref_lat = 36.3
        ref_lon = 138.5
    elif ARRAY == 1:  # LASA set and center
        sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/LASA_sta.txt'
        ref_lat = 46.69
        ref_lon = -106.22
    else:  # NORSAR set and center if 2
        sta_file = '/Users/vidale/Documents/GitHub/Array_codes/Files/NORSAR_sta.txt'
        ref_lat = 61
        ref_lon = 11
    with open(sta_file, 'r') as file:
        lines = file.readlines()
    print(str(len(lines)) + ' stations read from ' + sta_file)
    # Load station coords into arrays
    station_index = range(len(lines))
    st_names = []
    st_lats = []
    st_lons = []
    for ii in station_index:
        line = lines[ii]
        split_line = line.split()
        st_names.append(split_line[0])
        st_lats.append(split_line[1])
        st_lons.append(split_line[2])

#%% Name file, read data
# date_label = '2018-04-02' # date for filename
    if ARRAY == 0:
        fname = 'HD' + date_label + 'sel.mseed'
    elif ARRAY == 1:
        fname = 'Pro_Files/HD' + date_label + 'sel.mseed'
    st = Stream()
    print('reading ' + fname)
    st = read(fname)
    print('Read in: ' + str(len(st)) + ' traces')
    nt = len(st[0].data)
    dt = st[0].stats.delta
    print('First trace has : ' + str(nt) + ' time pts, time sampling of ' +
          str(dt) + ' and thus duration of ' + str((nt - 1) * dt))

    #%% Build Stack arrays
    stack = Stream()
    tr = Trace()
    tr.stats.delta = dt
    tr.stats.network = 'stack'
    tr.stats.channel = 'BHZ'
    slow_n = int(1 +
                 (slowR_hi - slowR_lo) / slow_delta)  # number of slownesses
    stack_nt = int(1 + ((end_buff - start_buff) / dt))  # number of time points
    # In English, stack_slows = range(slow_n) * slow_delta - slowR_lo
    a1 = range(slow_n)
    stack_slows = [(x * slow_delta + slowR_lo) for x in a1]
    print(str(slow_n) + ' slownesses.')
    tr.stats.starttime = t + start_buff
    tr.data = np.zeros(stack_nt)
    done = 0
    for stack_one in stack_slows:
        tr1 = tr.copy()
        tr1.stats.station = str(int(done))
        stack.extend([tr1])
        done += 1
    #	stack.append([tr])
    #	stack += tr

    #  Only need to compute ref location to event distance once
    ref_distance = gps2dist_azimuth(ev_lat, ev_lon, ref_lat, ref_lon)

    #%% Select traces by distance, window and adjust start time to align picked times
    done = 0
    for tr in st:  # traces one by one, find lat-lon by searching entire inventory.  Inefficient but cheap
        for ii in station_index:
            if ARRAY == 0:  # for hi-net, have to chop off last letter, always 'h'
                this_name = st_names[ii]
                this_name_truc = this_name[0:5]
                name_truc_cap = this_name_truc.upper()
            elif ARRAY == 1:
                name_truc_cap = st_names[ii]
            if (tr.stats.station == name_truc_cap
                ):  # find station in inventory
                if norm == 1:
                    tr.normalize()
#					tr.normalize(norm= -len(st)) # mystery command or error
                stalat = float(st_lats[ii])
                stalon = float(
                    st_lons[ii])  # look up lat & lon again to find distance
                distance = gps2dist_azimuth(
                    stalat, stalon, ev_lat,
                    ev_lon)  # Get traveltimes again, hard to store
                tr.stats.distance = distance[0]  # distance in m
                del_dist = (ref_distance[0] - distance[0]) / (1000)  # in km
                # ALSO NEEDS distance station - hypocenter calculation
                #isolate components of distance in radial and transverse directions, ref_distR & ref_distT
                # FIX ref_distR = distance*cos(azi-backazi)
                # FIX ref_distT = distance*sin(azi-backazi)
                #			for(k=0;k<nslow;k++){
                #				slow = 110.*(LOWSLOW + k*DELTASLOW);
                for slow_i in range(
                        slow_n):  # for this station, loop over slownesses
                    time_lag = -del_dist * stack_slows[
                        slow_i]  # time shift due to slowness, flipped to match 2D
                    #					start_offset = tr.stats.starttime - t
                    #					time_correction = (start_buff - (start_offset + time_lag))/dt
                    time_correction = ((t - tr.stats.starttime) +
                                       (time_lag + start_buff)) / dt
                    #				print('Time lag ' + str(time_lag) + ' for slowness ' + str(stack_slows[slow_i]) + ' and distance ' + str(del_dist) + ' time sample correction is ' + str(time_correction))
                    for it in range(stack_nt):  # check points one at a time
                        it_in = int(it + time_correction)
                        if it_in >= 0 and it_in < nt - 1:  # does data lie within seismogram?
                            stack[slow_i].data[it] += tr[it_in]
                done += 1
                if done % 50 == 0:
                    print('Done stacking ' + str(done) + ' out of ' +
                          str(len(st)) + ' stations.')
#%% Plot traces
    global_max = 0
    for slow_i in range(
            slow_n):  # find global max, and if requested, take envelope
        if len(stack[slow_i].data) == 0:
            print('%d data has zero length ' % (slow_i))
        if envelope == 1 or color_plot == 1:
            stack[slow_i].data = np.abs(hilbert(stack[slow_i].data))
        local_max = max(abs(stack[slow_i].data))
        if local_max > global_max:
            global_max = local_max
    if global_max <= 0:
        print('global_max ' + str(global_max) + ' slow_n ' + str(slow_n))

    # create time axis (x-axis), use of slow_i here is arbitrary, oops
    ttt = (np.arange(len(stack[slow_i].data)) * stack[slow_i].stats.delta +
           (stack[slow_i].stats.starttime - t))  # in units of seconds

    # Plotting
    if color_plot == 1:  # 2D color plot
        stack_array = np.zeros((slow_n, stack_nt))

        #	stack_array = np.random.rand(int(slow_n),int(stack_nt))  # test with random numbers
        min_allowed = global_max / plot_dyn_range
        if log_plot == 1:
            for it in range(stack_nt):  # check points one at a time
                for slow_i in range(
                        slow_n):  # for this station, loop over slownesses
                    num_val = stack[slow_i].data[it]
                    if num_val < min_allowed:
                        num_val = min_allowed
                    stack_array[
                        slow_i,
                        it] = math.log10(num_val) - math.log10(min_allowed)
        else:
            for it in range(stack_nt):  # check points one at a time
                for slow_i in range(
                        slow_n):  # for this station, loop over slownesses
                    stack_array[slow_i,
                                it] = stack[slow_i].data[it] / global_max
        y, x = np.mgrid[slice(stack_slows[0], stack_slows[-1] + slow_delta,
                              slow_delta),
                        slice(ttt[0], ttt[-1] + dt,
                              dt)]  # make underlying x-y grid for plot
        #	y, x = np.mgrid[ stack_slows , time ]  # make underlying x-y grid for plot
        plt.close(fig_index)

        fig, ax = plt.subplots(1, figsize=(9, 2))
        fig.subplots_adjust(bottom=0.3)
        #		c = ax.pcolormesh(x, y, stack_array, cmap=plt.cm.gist_yarg)
        #		c = ax.pcolormesh(x, y, stack_array, cmap=plt.cm.gist_rainbow_r)
        c = ax.pcolormesh(x, y, stack_array, cmap=plt.cm.binary)
        ax.axis([x.min(), x.max(), y.min(), y.max()])
        fig.colorbar(c, ax=ax)
        plt.figure(fig_index, figsize=(6, 8))
        plt.close(fig_index)
    else:  # line plot
        for slow_i in range(slow_n):
            dist_offset = stack_slows[slow_i]  # in units of slowness
            if global_norm_plot != 1:
                plt.plot(
                    ttt,
                    stack[slow_i].data * plot_scale_fac /
                    (stack[slow_i].data.max() - stack[slow_i].data.min()) +
                    dist_offset,
                    color='black')
            else:
                plt.plot(ttt,
                         stack[slow_i].data * plot_scale_fac /
                         (global_max - stack[slow_i].data.min()) + dist_offset,
                         color='black')
        plt.ylim(slowR_lo, slowR_hi)
        plt.xlim(start_buff, end_buff)
    plt.xlabel('Time (s)')
    plt.ylabel('Slowness (s/km)')
    plt.title(date_label)
    plt.show()

    #%% Save processed files
    print('Stack has ' + str(len(stack)) + ' traces')
    if ARRAY == 0:
        fname = 'HD' + date_label + '_1dstack.mseed'
    elif ARRAY == 1:
        fname = 'Pro_Files/HD' + date_label + '_1dstack.mseed'
    stack.write(fname, format='MSEED')

    elapsed_time_wc = time.time() - start_time_wc
    print('This job took ' + str(elapsed_time_wc) + ' seconds')
    os.system('say "Done"')
예제 #46
0
import numpy as np
import scipy.signal
import copy
import numexpr as npr
import obspy
import os
import matplotlib.pylab as plb
import matplotlib.pyplot as plt
from obspy.taup import TauPyModel
import warnings
from numba import jit
# from pyproj import Geod
# 
# geodist = Geod(ellps='WGS84')
taupmodel = TauPyModel(model="iasp91")
stretchbackdatafname='./strechback.data'

def _gaussFilter( dt, nft, f0 ):
    """
    Compute a gaussian filter in the freq domain which is unit area in time domain
    private function for IterDeconv
    ================================================================================
    Input:
    dt  - sampling time interval
    nft - number freq points
    f0  - width of filter
    
    Output:
    gauss  - Gaussian filter array (numpy)
    filter has the form: exp( - (0.5*w/f0)^2 ) the units of the filter are 1/s
예제 #47
0
 def test_deep_source(self):
     # Regression test -- check if deep sources are ok
     model = TauPyModel("ak135")
     arrivals = model.get_ray_paths(2000.0, 60.0, ["P"])
     assert abs(arrivals[0].time - 480.32) < 1e-2
예제 #48
0
파일: analysis.py 프로젝트: shineusn/MudPy
def dump_picks(event_log,vel_model,gf_list,out_file):
    '''  
    Dump P and S picks to a file
    '''
    
    from obspy.taup import TauPyModel
    from obspy.geodetics.base import gps2dist_azimuth
    from obspy.geodetics import locations2degrees
    from numpy import genfromtxt,zeros,array,ones
    from string import replace
    
    
    #Read station locations
    sta=genfromtxt(gf_list,usecols=0,dtype='S')
    lonlat=genfromtxt(gf_list,usecols=[1,2])
    
    #Load velocity model for ray tracing
    velmod = TauPyModel(vel_model)
    
    # Get hypocenter
    f=open(event_log,'r')
    loop_go=True
    while loop_go:
        line=f.readline()
        if 'Hypocenter (lon,lat,z[km])' in line:
            s=replace(line.split(':')[-1],'(','')
            s=replace(s,')','')
            hypo=array(s.split(',')).astype('float')
            loop_go=False

    #compute station to hypo distances
    d=zeros(len(lonlat))
    for k in range(len(lonlat)):
        d[k],az,baz=gps2dist_azimuth(lonlat[k,1],lonlat[k,0],hypo[1],hypo[0])
        d[k]=d[k]/1000
        

    f=open(out_file,'w')
    f.write('# sta,lon,lat,ptime(s),stime(s)\n')
    
    for k in range(len(sta)):
        
        
        # Ray trace
        deg=locations2degrees(hypo[1],hypo[0],lonlat[k,1],lonlat[k,0])
        try:
            arrivals = velmod.get_travel_times(source_depth_in_km=hypo[2],distance_in_degree=deg,phase_list=['P','Pn','S','Sn','p','s'])
        except:
            arrivals = velmod.get_travel_times(source_depth_in_km=hypo[2]-1.056,distance_in_degree=deg,phase_list=['P','Pn','S','Sn','p','s'])

        ptime=1e6
        stime=1e6
            
        #Determine P and S arrivals
        for kphase in range(len(arrivals)):
            if 'P' == arrivals[kphase].name or 'p' == arrivals[kphase].name or 'Pn' == arrivals[kphase].name:
                if arrivals[kphase].time<ptime:
                    ptime=arrivals[kphase].time
            if 'S' == arrivals[kphase].name or 's' == arrivals[kphase].name or 'Sn' == arrivals[kphase].name:
                if arrivals[kphase].time<stime:
                    stime=arrivals[kphase].time
            
        lon=lonlat[k,0]
        lat=lonlat[k,1] 
        station=sta[k]       
        line='%s\t%.4f\t%.4f\t%10.4f\t%10.4f\n' % (station,lon,lat,ptime,stime)
        f.write(line)
        
    f.close()
예제 #49
0
def fetch_rf_data(network, location, channel, data_directory, output_units,
                  minimum_magnitude, maximum_magnitude, station):

    # Track execution time for logging purposes
    t1 = time.time()

    ntwk = network
    stat = station
    loc = location
    chan = channel

    # Define the client that hosts the desired data
    client = Client("IRIS")

    # Define directory where seismic data will be saved as SAC files
    if output_units == 'counts':
        sac_dir = data_directory + ntwk + '/' + stat + '/' + loc + '/RFQUAKES_COUNTS/'
    elif output_units == 'displacement':
        sac_dir = data_directory + ntwk + '/' + stat + '/' + loc + '/RFQUAKES_DISP/'
    elif output_units == 'velocity':
        sac_dir = data_directory + ntwk + '/' + stat + '/' + loc + '/RFQUAKES_VEL/'
    elif output_units == 'acceleration':
        sac_dir = data_directory + ntwk + '/' + stat + '/' + loc + '/RFQUAKES_ACC/'
    else:
        print(
            'ERROR: Invalid output units. Acceptable options are \'displacement,\' \'velocity,\' or \'counts\''
        )
        quit()

    # For now: delete the directory if it exists...
    if os.path.exists(sac_dir):
        print('Directory exists. Terminiating process...')
        quit()
        # shutil.rmtree(sac_dir)

    if not os.path.exists(sac_dir):
        os.makedirs(sac_dir)

    # Define amount of data desired (minutes)
    duration = 60

    # Log potential errors to a .log file
    logFileName = sac_dir + ntwk + '.' + stat + '.log'

    # Fetch station information for data retrieval
    if loc == "NULL":
        loc = ""
        try:
            inv = client.get_stations(network=ntwk,
                                      station=stat,
                                      channel=chan,
                                      level="response")
        except Exception as error:
            with open(logFileName, "a") as log:
                log.write(str(error))
                log.write(
                    'Error fetching station information with the IRIS client...'
                )
            return
    else:
        try:
            inv = client.get_stations(network=ntwk,
                                      station=stat,
                                      loc=loc,
                                      channel=chan,
                                      level="response")
        except Exception as error:
            with open(logFileName, "a") as log:
                log.write(str(error))
                log.write(
                    'Error fetching station information with the IRIS client...'
                )
            return

    # Save the pole zero files
    nstats = len(inv.networks[0])
    resp_t0 = []
    resp_tf = []
    pre_filt = []
    for i in range(0, nstats):
        nresp = len(inv.networks[0].stations[i].channels)
        # Tag the PZ files and SAC files with a number indicating the period of operation
        for j in range(0, nresp):
            fileName = sac_dir + "SAC_PZs_" + ntwk + '_' + stat + '_' + inv.networks[0].stations[i].channels[j].code + \
                       '.' + str(j)
            with open(fileName, "a") as pzFile:
                pzFile.write('* **********************************\n')
                pzFile.write('* NETWORK   (KNETWK): ' + inv.networks[0].code +
                             '\n')
                pzFile.write('* STATION    (KSTNM): ' +
                             inv.networks[0].stations[i].code + '\n')
                pzFile.write(
                    '* LOCATION   (KHOLE): ' +
                    inv.networks[0].stations[i].channels[j].location_code +
                    '\n')
                pzFile.write('* CHANNEL   (KCMPNM): ' +
                             inv.networks[0].stations[i].channels[j].code +
                             '\n')
                pzFile.write('* CREATED           : ' +
                             str(UTCDateTime.now()).split('.')[0] + '\n')
                pzFile.write('* START             : ' +
                             str(inv.networks[0].stations[i].channels[j].
                                 start_date).split('.')[0] + '\n')
                pzFile.write('* END               : ' +
                             str(inv.networks[0].stations[i].channels[j].
                                 end_date).split('.')[0] + '\n')
                pzFile.write('* DESCRIPTION       : ' +
                             inv.networks[0].stations[i].site.name + '\n')
                pzFile.write('* LATITUDE          : %0.6f\n' %
                             inv.networks[0].stations[i].latitude)
                pzFile.write('* LONGITUDE         : %0.6f\n' %
                             inv.networks[0].stations[i].longitude)
                pzFile.write('* ELEVATION         : %0.1f\n' %
                             inv.networks[0].stations[i].channels[j].elevation)
                pzFile.write('* DEPTH             : %0.1f\n' %
                             inv.networks[0].stations[i].channels[j].depth)
                pzFile.write(
                    '* DIP               : %0.1f\n' %
                    (90.0 -
                     np.abs(inv.networks[0].stations[i].channels[j].dip)))
                pzFile.write('* AZIMUTH           : %0.1f\n' %
                             inv.networks[0].stations[i].channels[j].azimuth)
                pzFile.write(
                    '* SAMPLE RATE       : %0.1f\n' %
                    inv.networks[0].stations[i].channels[j].sample_rate)
                pzFile.write('* INPUT UNIT        : M\n')
                pzFile.write('* OUTPUT UNIT       : COUNTS\n')
                pzFile.write('* INSTTYPE          : ' + inv.networks[0].
                             stations[i].channels[j].sensor.description + '\n')
                pzFile.write('* INSTGAIN          : %e (M/S)\n' %
                             inv.networks[0].stations[i].channels[j].response.
                             get_paz().stage_gain)
                pzFile.write('* COMMENT           : \n')
                pzFile.write('* SENSITIVITY       : %e (M/S)\n' %
                             inv.networks[0].stations[i].channels[j].response.
                             instrument_sensitivity.value)
                pzFile.write('* A0                : %e\n' %
                             inv.networks[0].stations[i].channels[j].response.
                             get_paz().normalization_factor)
                pzFile.write('* **********************************\n')

                # Save the poles, zeros, and constant
                nzeros = 3
                zeros = inv.networks[0].stations[i].channels[
                    j].response.get_paz().zeros
                nz = np.nonzero(zeros)
                pzFile.write('ZEROS   ' + str(len(nz[0]) + nzeros) + '\n')
                pzFile.write("        %+e   %+e\n" % (0, 0))
                pzFile.write("        %+e   %+e\n" % (0, 0))
                pzFile.write("        %+e   %+e\n" % (0, 0))
                if len(nz[0]) != 0:
                    for k in range(0, len(nz[0])):
                        pzFile.write("        %+e   %+e\n" % (np.real(
                            zeros[nz[0][k]]), np.imag(zeros[nz[0][k]])))

                poles = inv.networks[0].stations[i].channels[
                    j].response.get_paz().poles
                pzFile.write('POLES   ' + str(len(poles)) + '\n')
                for k in range(0, len(poles)):
                    pzFile.write(
                        "        %+e   %+e\n" %
                        (np.real(inv.networks[0].stations[i].channels[j].
                                 response.get_paz().poles[k]),
                         np.imag(inv.networks[0].stations[i].channels[j].
                                 response.get_paz().poles[k])))

                pzFile.write(
                    'CONSTANT        %e' %
                    (inv.networks[0].stations[i].channels[j].response.get_paz(
                    ).normalization_factor * inv.networks[0].stations[i].
                     channels[j].response.instrument_sensitivity.value))
                # pzFile.write(inv.networks[0].stations[i].channels[j].response.get_sacpz())

    # Loop over time-periods during which the station was operational and fetch data
    for i in range(0, nstats):
        for j in range(0, nresp):
            if inv.networks[0].stations[i].channels[
                    j].end_date > UTCDateTime.now():
                t0 = inv.networks[0].stations[i].channels[j].start_date
                tf = UTCDateTime.now()
            else:
                t0 = inv.networks[0].stations[i].channels[j].start_date
                tf = inv.networks[0].stations[i].channels[j].end_date
            # Get station coordinates for event selection
            stla = inv.networks[0].stations[i].latitude
            stlo = inv.networks[0].stations[i].longitude
            # Fetch relevant events in time-window during which station was operational
            try:
                catalog = client.get_events(starttime=t0,
                                            endtime=tf,
                                            minmagnitude=minimum_magnitude,
                                            maxmagnitude=maximum_magnitude,
                                            latitude=stla,
                                            longitude=stlo,
                                            minradius=30,
                                            maxradius=90)
            except Exception as error:
                with open(logFileName, "a") as log:
                    log.write(str(error))
                    log.write('Error fetching event catalog...')
                continue

            nEvents = len(catalog.events)
            # Initialize list of events used for bulk request
            bulk = []
            # Fill 'bulk' with desired event information
            for k in range(0, nEvents):
                teq = catalog.events[k].origins[0].time
                chan = inv.networks[0].stations[i].channels[j].code
                bulk.append((ntwk, stat, loc, chan, teq, teq + duration * 60))

            # Fetch the data!
            if output_units == 'counts':
                try:
                    st = client.get_waveforms_bulk(bulk)
                except Exception as error:
                    with open(logFileName, "a") as log:
                        log.write(str(error))
                        log.write('Unable to complete fetch request for: ' +
                                  stat + '.' + loc + '.' + chan)
                    continue
            else:
                try:
                    st = client.get_waveforms_bulk(bulk, attach_response=True)
                except Exception as error:
                    with open(logFileName, "a") as log:
                        log.write(str(error))
                        log.write('Unable to complete fetch request for: ' +
                                  stat + '.' + loc + '.' + chan)
                    continue

            # Do some file-formatting and optional minor pre-processing
            for k in range(0, len(st)):
                teq = st[k].meta.starttime

                # Optional instrument response removal goes here...

                # Prepare filename for saving
                evchan = st[k].meta.channel
                evid = st[k].meta.starttime.isoformat().replace(
                    '-', '.').replace('T', '.').replace(':',
                                                        '.').split('.')[:-1]
                evid.extend([ntwk, stat, loc, evchan, str(j), 'SAC'])
                evid = ".".join(evid)
                # Add station specific metadata to SAC files
                st[k].stats.sac = {}
                st[k].stats.sac.stla = stla
                st[k].stats.sac.stlo = stlo
                # Channel orientation (CMPAZ)
                azid = [ntwk, stat, loc, evchan]
                azid = ".".join(azid)
                st[k].stats.sac.cmpaz = inv.get_orientation(azid,
                                                            teq)["azimuth"]

                # Add event-specific metadata to SAC files (surely there must be a faster way to do this...?)
                for l in range(0, nEvents):
                    if catalog.events[l].origins[0].time - 5 <= st[k].meta.starttime <= \
                            catalog.events[l].origins[0].time + 5:
                        st[k].stats.sac.evla = catalog.events[l].origins[
                            0].latitude
                        if st[k].stats.sac.evla is None:
                            with open(logFileName, "a") as log:
                                log.write('Couldn'
                                          't find event latitude for: ' +
                                          evid + '\n')
                            st[k].stats.sac.evla = 0.0
                        st[k].stats.sac.evlo = catalog.events[l].origins[
                            0].longitude
                        if st[k].stats.sac.evlo is None:
                            with open(logFileName, "a") as log:
                                log.write('Couldn'
                                          't find event longitude for: ' +
                                          evid + '\n')
                            st[k].stats.sac.evlo = 0.0
                        st[k].stats.sac.evdp = catalog.events[l].origins[
                            0].depth
                        if st[k].stats.sac.evdp is None:
                            with open(logFileName, "a") as log:
                                log.write('Couldn'
                                          't find event depth for: ' + evid +
                                          '\n')
                            st[k].stats.sac.evdp = 0.0
                        st[k].stats.sac.mag = catalog.events[l].magnitudes[
                            0].mag
                        if st[k].stats.sac.mag is None:
                            with open(logFileName, "a") as log:
                                log.write('Couldn'
                                          't find event magnitude for: ' +
                                          evid + '\n')
                            st[k].stats.sac.mag = 0.0
                        # Calculate great circle distance and back-azimuth
                        gcarc, baz = su.haversine(stla, stlo,
                                                  st[k].stats.sac.evla,
                                                  st[k].stats.sac.evlo)
                        st[k].stats.sac.gcarc = gcarc
                        st[k].stats.sac.baz = baz
                        # Get theoretical P arrival time, and assign to header 'T0'
                        model = TauPyModel(model="iasp91")
                        phases = ["P"]
                        arrivals = model.get_travel_times(
                            source_depth_in_km=st[k].stats.sac.evdp / 1000.0,
                            distance_in_degree=gcarc,
                            phase_list=phases)
                        st[k].stats.sac.t0 = arrivals[0].time

                        # Save the Pole Zero file index in 'USER0' Header
                        st[k].stats.sac.user0 = j

                        # Save the P-wave ray parameter in 'USER9' Header
                        st[k].stats.sac.user9 = arrivals[0].ray_param * (
                            np.pi / 180)

                        # Write the data to a SAC file
                        st[k].write(sac_dir + evid, format='SAC')

    elapsed = time.time() - t1
    with open(logFileName, "a") as log:
        log.write('Time required to complete fetch request: ' + str(elapsed))
예제 #50
0
mnt_folder = "/mnt/marshost/"

if not lsdir(mnt_folder):
    print(f"{mnt_folder} is still empty, mounting now...")
    SS_MTI.DataGetter.mnt_remote_folder(
        host_ip="marshost.ethz.ch",
        host_usr="******",
        remote_folder="/data/",
        mnt_folder=mnt_folder,
    )

npz_file = "/home/nienke/Documents/Research/Data/npz_files/TAYAK_BKE.npz"
# npz_file = "/home/nienke/Data_2020/npz_files/TAYAK_BKE.npz"
# npz_file = "/home/nienke/Documents/Research/Data/npz_files/TAYAK.npz"
model = TauPyModel(npz_file)

db_path = "/mnt/marshost/instaseis2/databases/TAYAK_15s_BKE"
# db_path = "/opt/databases/TAYAK_15s_BKE"
# db_path = "http://instaseis.ethz.ch/blindtest_1s/TAYAK_1s/"
db = instaseis.open_db(db_path)

# SS_MTI.DataGetter.unmnt_remote_folder(mnt_folder=mnt_folder)

lat_rec = 4.502384
lon_rec = 135.623447

strike = 90
dip = 90  # 45
rake = 0  # -90
focal_mech = [strike, dip, rake]
Last Modified : Mon 30 Apr 2018 03:58:10 PM EDT
Created By : Samuel M. Haugland

==============================================================================
'''

from matplotlib import pyplot as plt
import numpy as np
import h5py
import obspy
from sys import argv
from obspy.taup import TauPyModel
import argparse
from subprocess import call
from scipy.signal import correlate
model = TauPyModel(model='prem')


def main():
    parser = argparse.ArgumentParser(description='Clip/write reverb intervals')
    parser.add_argument('-s','--synth', metavar='H5_FILE',type=str,
                        help='h5 obspy stream synth file')
    parser.add_argument('-d','--data', metavar='H5_FILE',type=str,
                        help='h5 obspy stream data file')
    args = parser.parse_args()
    try:
        h5f_d = h5py.File('peg_data_reverb.h5','w',driver='core')
    except IOError:
        call('rm peg_data_reverb.h5',shell=True)
        h5f_d = h5py.File('peg_data_reverb.h5','w',driver='core')
    try:
"""
from obspy.clients.fdsn import Client
from obspy import UTCDateTime, Stream, read, read_inventory
from obspy.taup import TauPyModel
from obspy.geodetics.base import locations2degrees
from matplotlib.transforms import blended_transform_factory
from os import path
import matplotlib.pyplot as plt
from geopy.geocoders import Nominatim
import numpy as np
from matplotlib.cm import get_cmap
from obspy.geodetics import gps2dist_azimuth
DATA_PROVIDER = "RASPISHAKE"
MODEL = 'iasp91'  # Velocity model to predict travel-times through
#MODEL = 'ak135'  # Velocity model to predict travel-times through
model = TauPyModel(model=MODEL)

client = Client(DATA_PROVIDER)

# Event details
URL = 'https://earthquake.usgs.gov/earthquakes/eventpage/us7000c7y0/executive'
EQNAME = 'M7 15 km NNE of Néon Karlovásion, Greece'
EQLAT = 37.9175
EQLON = 26.7901
EQZ = '2020-10-30 11:51:28'
EQTIME = 25.7859016291
FILE_STEM = 'Turkey-2020-10-30'
MAGNITUDE = 'M7'

RESP = "DISP"  # DISP, VEL or ACC
WINDOW = 20  # displacement plus/minus window
예제 #53
0
def input_chen_tele_body(tensor_info, data_prop):
    """We write some text files, which are based on teleseismic body wave data,
    as inputs for Chen's scripts.

    :param tensor_info: dictionary with moment tensor information
    :param data_prop: dictionary with properties of waveform data
    :type tensor_info: dict
    :type data_prop: dict

    .. warning::

        Make sure the filters of teleseismic data agree with the values in
        sampling_filter.json!
    """
    if not os.path.isfile('tele_waves.json'):
        return
    traces_info = json.load(open('tele_waves.json'))
    date_origin = tensor_info['date_origin']
    dt = traces_info[0]['dt']
    dt = round(dt, 1)
    filtro = data_prop['tele_filter']
    low_freq = filtro['low_freq']
    high_freq = filtro['high_freq']

    with open('filtro_tele', 'w') as outfile:
        outfile.write('Corners: {} {}\n'.format(low_freq, high_freq))
        outfile.write('dt: {}'.format(dt))

    nsta = len(traces_info)
    model = TauPyModel(model="ak135f_no_mud")
    depth = tensor_info['depth']
    event_lat = tensor_info['lat']
    event_lon = tensor_info['lon']

    string = '{0:2d}   FAR GDSN {1:>6} {1:>6}BHZ.DAT {2:5.2f} {3:6.2f} '\
        '{4:5.2f} {5:6.2f} {6:6.2f} 0 0  {7}  {8} {9}  1 0\n'
    sin_fun = lambda p: p * 3.6 / 111.12
    angle_fun = lambda p:\
    np.arctan2(sin_fun(p), np.sqrt(1 - sin_fun(p)**2)) * 180.0 / np.pi
    string_fun1 = lambda i, name, dist, az, lat, lon, p_slowness, disp_or_vel:\
    string.format(
        i, name, dist, az, lat, lon, angle_fun(p_slowness), disp_or_vel, 1.0, 0)
    string_fun2 = lambda i, name, dist, az, lat, lon, s_slowness, disp_or_vel:\
    string.format(
        i, name, dist, az, lat, lon, angle_fun(s_slowness), disp_or_vel, 4.0, 2)

    with open('Readlp.das', 'w') as outfile:
        outfile.write('30 30 30 0 0 0 0 0 0 1.1e+20\n')
        outfile.write('3 10 {}\n{}{}{}{}{}{}.{}\n{}\n'.format(
            dt, date_origin.year, date_origin.month, date_origin.day,
            date_origin.hour, date_origin.minute, date_origin.second,
            date_origin.microsecond, nsta))
        i = 0
        for file in traces_info:  #header in headers:
            name = file['name']
            channel = file['component']
            lat, lon = file['location']
            dist, az, back_azimuth = mng._distazbaz(lat, lon, event_lat,
                                                    event_lon)
            dist = kilometers2degrees(dist)
            derivative = False if not 'derivative' in file\
                else file['derivative']
            derivative = int(derivative)
            arrivals = mng.theoretic_arrivals(model, dist, depth)
            p_slowness = arrivals['p_slowness']
            s_slowness = arrivals['s_slowness']
            if channel == 'BHZ':
                outfile.write(
                    string_fun1(i + 1, name, dist, az, lat, lon, p_slowness,
                                derivative))
            else:
                outfile.write(
                    string_fun2(i + 1, name, dist, az, lat, lon, s_slowness,
                                derivative))
            i = i + 1

    with open('Wave.tele', 'w') as file1, open('Obser.tele', 'w') as file2:
        write_files_wavelet_observed(file1, file2, dt, data_prop, traces_info)


#
# instrumental response common to all body waves
#
    string2 = '\n3\n' + '0. 0.\n' * 3 + '4\n-6.17E-03  6.17E-03\n'\
              '-6.17E-03 -6.17E-03\n-39.18    49.12\n-39.18   '\
              '-49.12\n3948\n'
    with open('instrumental_response', 'w') as outfile:
        outfile.write('{}\n'.format(nsta))
        outfile.write(string2 * len(traces_info))

    write_wavelet_freqs(dt, 'Wavelets_tele_body')

    with open('Weight', 'w') as outfile:
        for info in traces_info:
            sta = info['name']
            channel = info['component']
            weight = info['trace_weight']
            outfile.write('{} {} {}\n'.format(weight, sta, channel))
    return 'tele_body'
def arrival(ot, depth, distance_in_degree):
    model = TauPyModel(model="iasp91")
    arrivals = model.get_travel_times(source_depth_in_km=depth,
                                      distance_in_degree=distance_in_degree)
    arrival_time = ot + arrivals[0].time
    return arrival_time
# #### Step 4: Calculate Theoretical Arrivals
#
# ```python
# from obspy.taup import TauPyModel
# m = TauPyModel(model="ak135")
# arrivals = m.get_ray_paths(...)
# arrivals.plot()
# ```

# + {"tags": ["exercise"]}


# + {"tags": ["solution"]}
from obspy.taup import TauPyModel

m = TauPyModel(model="ak135")

arrivals = m.get_ray_paths(
    distance_in_degree=distance,
    source_depth_in_km=origin.depth / 1000.0)

arrivals.plot();
# -

# #### Step 5: Calculate absolute time of the first arrivals at the station

# + {"tags": ["exercise"]}


# + {"tags": ["solution"]}
first_arrival = origin.time + arrivals[0].time
예제 #56
0
 def get_sp(self, epi, depth_m):
     model = TauPyModel(model=self.veloc_model)
     tt = model.get_travel_times(source_depth_in_km=depth_m / 1000,
                                 distance_in_degree=epi,
                                 phase_list=['sP'])
     return tt[0].time
예제 #57
0
 def __init__(self, stationinfo, mseeddir, sacdir, model='prem'):
     self.mseeddir = mseeddir
     self.sacdir = sacdir
     self.stations = self._read_stations(stationinfo)
     self.model = TauPyModel(model=model)
def run_parallel_generate_ruptures(home,project_name,run_name,fault_name,slab_name,mesh_name,
        load_distances,distances_name,UTM_zone,tMw,model_name,hurst,Ldip,Lstrike,
        num_modes,Nrealizations,rake,buffer_factor,rise_time_depths0,rise_time_depths1,time_epi,max_slip,
        source_time_function,lognormal,slip_standard_deviation,scaling_law,ncpus,force_magnitude,
        force_area,mean_slip_name,hypocenter,slip_tol,force_hypocenter,
        no_random,shypo,use_hypo_fraction,shear_wave_fraction,rank,size):
    
    '''
    Depending on user selected flags parse the work out to different functions
    '''
    
    from numpy import load,save,genfromtxt,log10,cos,sin,deg2rad,savetxt,zeros,where
    from time import gmtime, strftime
    from numpy.random import shuffle
    from mudpy import fakequakes
    from obspy import UTCDateTime
    from obspy.taup import TauPyModel
    import warnings

    #I don't condone it but this cleans up the warnings
    warnings.filterwarnings("ignore")
    
    # Fix input formats
    rank=int(rank)
    size=int(size)
    if time_epi=='None':
        time_epi=None
    else:
        time_epi=UTCDateTime(time_epi)
    rise_time_depths=[rise_time_depths0,rise_time_depths1]
    #hypocenter=[hypocenter_lon,hypocenter_lat,hypocenter_dep]
    tMw=tMw.split(',')
    target_Mw=zeros(len(tMw))
    for rMw in range(len(tMw)):
        target_Mw[rMw]=float(tMw[rMw])

    #Should I calculate or load the distances?
    if load_distances==1:  
        Dstrike=load(home+project_name+'/data/distances/'+distances_name+'.strike.npy')
        Ddip=load(home+project_name+'/data/distances/'+distances_name+'.dip.npy')
    else:
        Dstrike,Ddip=fakequakes.subfault_distances_3D(home,project_name,fault_name,slab_name,UTM_zone)
        save(home+project_name+'/data/distances/'+distances_name+'.strike.npy',Dstrike)
        save(home+project_name+'/data/distances/'+distances_name+'.dip.npy',Ddip)
    

    #Read fault and prepare output variable
    whole_fault=genfromtxt(home+project_name+'/data/model_info/'+fault_name)
    
    #Get structure model
    vel_mod_file=home+project_name+'/structure/'+model_name
    
    #Get TauPyModel
    velmod = TauPyModel(model=home+project_name+'/structure/'+model_name.split('.')[0])

    #Now loop over the number of realizations
    realization=0
    if rank==0:
        print('Generating rupture scenarios')
    for kmag in range(len(target_Mw)):
        if rank==0:
            print('... Calculating ruptures for target magnitude Mw = '+str(target_Mw[kmag]))
        for kfault in range(Nrealizations):
            if kfault%1==0 and rank==0:
                print('... ... working on ruptures '+str(ncpus*realization)+' to ' + str(ncpus*(realization+1)-1) + ' of '+str(Nrealizations*size*len(target_Mw)))
                #print '... ... working on ruptures '+str(ncpus*realization+rank)+' of '+str(Nrealizations*size-1)
            
            #Prepare output
            fault_out=zeros((len(whole_fault),14))
            fault_out[:,0:8]=whole_fault[:,0:8]
            fault_out[:,10:12]=whole_fault[:,8:]   
            
            #Sucess criterion
            success=False
            while success==False:
                #Select only a subset of the faults based on magnitude scaling
                current_target_Mw=target_Mw[kmag]
                ifaults,hypo_fault,Lmax,Wmax,Leff,Weff=fakequakes.select_faults(whole_fault,Dstrike,Ddip,current_target_Mw,buffer_factor,num_modes,scaling_law,force_area,no_shallow_epi=False,no_random=no_random,subfault_hypocenter=shypo)
                fault_array=whole_fault[ifaults,:]
                Dstrike_selected=Dstrike[ifaults,:][:,ifaults]
                Ddip_selected=Ddip[ifaults,:][:,ifaults]
                
                #Determine correlation lengths from effective length.width Leff and Weff
                if Lstrike=='auto': #Use scaling
                    #Ls=10**(-2.43+0.49*target_Mw)
                    Ls=2.0+(1./3)*Leff
                elif Lstrike=='MH2019':
                    Ls=17.7+0.34*Leff
                else:
                    Ls=Lstrike
                if Ldip=='auto': #Use scaling
                    #Ld=10**(-1.79+0.38*target_Mw)
                    Ld=1.0+(1./3)*Weff
                elif Ldip=='MH2019':
                    Ld=6.8+0.4*Weff
                else:
                    Ld=Ldip
                
                #Get the mean uniform slip for the target magnitude
                if mean_slip_name==None:
                    mean_slip,mu=fakequakes.get_mean_slip(target_Mw[kmag],fault_array,vel_mod_file)
                else:
                    foo,mu=fakequakes.get_mean_slip(target_Mw[kmag],fault_array,vel_mod_file)
                    mean_fault=genfromtxt(mean_slip_name)
                    mean_slip=(mean_fault[:,8]**2+mean_fault[:,9]**2)**0.5
                    
                    #keep onlt faults that have man slip inside the fault_array seelcted faults
                    mean_slip=mean_slip[ifaults]
                    
                    #get the area in those selected faults
                    area=fault_array[:,-2]*fault_array[:,-1]
                    
                    #get the moment in those selected faults
                    moment_on_selected=(area*mu*mean_slip).sum()
                    
                    #target moment
                    target_moment=10**(1.5*target_Mw[kmag]+9.1)
                    
                    #How much do I need to upscale?
                    scale_factor=target_moment/moment_on_selected
                    
                    #rescale the slip
                    mean_slip = mean_slip*scale_factor
                    
                    
                    #Make sure mean_slip has no zero slip faults
                    izero=where(mean_slip==0)[0]
                    mean_slip[izero]=slip_tol
                
                #Get correlation matrix
                C=fakequakes.vonKarman_correlation(Dstrike_selected,Ddip_selected,Ls,Ld,hurst)
                
                # Lognormal or not?
                if lognormal==False:
                    #Get covariance matrix
                    C_nonlog=fakequakes.get_covariance(mean_slip,C,target_Mw[kmag],fault_array,vel_mod_file,slip_standard_deviation) 
                    #Get eigen values and eigenvectors
                    eigenvals,V=fakequakes.get_eigen(C_nonlog)
                    #Generate fake slip pattern
                    rejected=True
                    while rejected==True:
#                        slip_unrectified,success=make_KL_slip(fault_array,num_modes,eigenvals,V,mean_slip,max_slip,lognormal=False,seed=kfault)
                        slip_unrectified,success=fakequakes.make_KL_slip(fault_array,num_modes,eigenvals,V,mean_slip,max_slip,lognormal=False,seed=None)
                        slip,rejected,percent_negative=fakequakes.rectify_slip(slip_unrectified,percent_reject=13)
                        if rejected==True:
                            print('... ... ... negative slip threshold exceeeded with %d%% negative slip. Recomputing...' % (percent_negative))
                else:
                    #Get lognormal values
                    C_log,mean_slip_log=fakequakes.get_lognormal(mean_slip,C,target_Mw[kmag],fault_array,vel_mod_file,slip_standard_deviation)               
                    #Get eigen values and eigenvectors
                    eigenvals,V=fakequakes.get_eigen(C_log)
                    #Generate fake slip pattern
#                    slip,success=make_KL_slip(fault_array,num_modes,eigenvals,V,mean_slip_log,max_slip,lognormal=True,seed=kfault)
                    slip,success=fakequakes.make_KL_slip(fault_array,num_modes,eigenvals,V,mean_slip_log,max_slip,lognormal=True,seed=None)
            
                #Slip pattern sucessfully made, moving on.
                #Rigidities
                foo,mu=fakequakes.get_mean_slip(target_Mw[kmag],whole_fault,vel_mod_file)
                fault_out[:,13]=mu
                
                #Calculate moment and magnitude of fake slip pattern
                M0=sum(slip*fault_out[ifaults,10]*fault_out[ifaults,11]*mu[ifaults])
                Mw=(2./3)*(log10(M0)-9.1)
                
                #Force to target magnitude
                if force_magnitude==True:
                    M0_target=10**(1.5*target_Mw[kmag]+9.1)
                    M0_ratio=M0_target/M0
                    #Multiply slip by ratio
                    slip=slip*M0_ratio
                    #Recalculate
                    M0=sum(slip*fault_out[ifaults,10]*fault_out[ifaults,11]*mu[ifaults])
                    Mw=(2./3)*(log10(M0)-9.1)
                    
                #check max_slip again
                if slip.max() > max_slip:
                    success=False
                    print('... ... ... max slip condition violated due to force_magnitude=True, recalculating...')
            
            
            #Get stochastic rake vector
            stoc_rake=fakequakes.get_stochastic_rake(rake,len(slip))
            
            #Place slip values in output variable
            fault_out[ifaults,8]=slip*cos(deg2rad(stoc_rake))
            fault_out[ifaults,9]=slip*sin(deg2rad(stoc_rake))
            
            #Move hypocenter to somewhere with a susbtantial fraction of peak slip
#            slip_fraction=0.25
#            islip=where(slip>slip.max()*slip_fraction)[0]
#            shuffle(islip) #randomize
#            hypo_fault=ifaults[islip[0]] #select first from randomized vector
            
            #Calculate and scale rise times
            rise_times=fakequakes.get_rise_times(M0,slip,fault_array,rise_time_depths,stoc_rake)
            
            #Place rise_times in output variable
            fault_out[:,7]=0
            fault_out[ifaults,7]=rise_times
            
            #Calculate rupture onset times
            if force_hypocenter==False: #Use random hypo, otehrwise force hypo to user specified
                hypocenter=whole_fault[hypo_fault,1:4]

            t_onset=fakequakes.get_rupture_onset(home,project_name,slip,fault_array,model_name,hypocenter,rise_time_depths,M0,velmod)
            fault_out[:,12]=0
            fault_out[ifaults,12]=t_onset
            
            #Calculate location of moment centroid
            centroid_lon,centroid_lat,centroid_z=fakequakes.get_centroid(fault_out)
            
            #Write to file
            run_number=str(ncpus*realization+rank).rjust(6,'0')
            outfile=home+project_name+'/output/ruptures/'+run_name+'.'+run_number+'.rupt'
            savetxt(outfile,fault_out,fmt='%d\t%10.6f\t%10.6f\t%8.4f\t%7.2f\t%7.2f\t%4.1f\t%5.2f\t%5.2f\t%5.2f\t%10.2f\t%10.2f\t%5.2f\t%.6e',header='No,lon,lat,z(km),strike,dip,rise,dura,ss-slip(m),ds-slip(m),ss_len(m),ds_len(m),rupt_time(s),rigidity(Pa)')
            
            #Write log file
            logfile=home+project_name+'/output/ruptures/'+run_name+'.'+run_number+'.log'
            f=open(logfile,'w')
            f.write('Scenario calculated at '+strftime("%Y-%m-%d %H:%M:%S", gmtime())+' GMT\n')
            f.write('Project name: '+project_name+'\n')
            f.write('Run name: '+run_name+'\n')
            f.write('Run number: '+run_number+'\n')
            f.write('Velocity model: '+model_name+'\n')
            f.write('No. of KL modes: '+str(num_modes)+'\n')
            f.write('Hurst exponent: '+str(hurst)+'\n')
            f.write('Corr. length used Lstrike: %.2f km\n' % Ls)
            f.write('Corr. length used Ldip: %.2f km\n' % Ld)
            f.write('Slip std. dev.: %.3f km\n' % slip_standard_deviation)
            f.write('Maximum length Lmax: %.2f km\n' % Lmax)
            f.write('Maximum width Wmax: %.2f km\n' % Wmax)
            f.write('Effective length Leff: %.2f km\n' % Leff)
            f.write('Effective width Weff: %.2f km\n' % Weff)
            f.write('Target magnitude: Mw %.4f\n' % target_Mw[kmag])
            f.write('Actual magnitude: Mw %.4f\n' % Mw)
            f.write('Hypocenter (lon,lat,z[km]): (%.6f,%.6f,%.2f)\n' %(hypocenter[0],hypocenter[1],hypocenter[2]))
            f.write('Hypocenter time: %s\n' % time_epi)
            f.write('Centroid (lon,lat,z[km]): (%.6f,%.6f,%.2f)\n' %(centroid_lon,centroid_lat,centroid_z))
            f.write('Source time function type: %s' % source_time_function)
            f.close()
            
            realization+=1
예제 #59
0
import matplotlib.pyplot as plt
import os.path
import time
import glob
import shutil
import numpy as np
import scipy
from obspy.io.xseed import Parser
from obspy.clients.arclink import Client as ARCLINKClient
from obspy.clients.fdsn import Client as IRISClient
from subprocess import call
import subprocess
from obspy.taup.taup import getTravelTimes
import sys
from obspy.taup import TauPyModel
model=TauPyModel(model="prem")

# Find list of stations directories
stations = glob.glob('DataRF/*')

# Add phase names as additional arguments (these are the TauP phase names)
phase=[]
for i in range(1,len(sys.argv)):
    phase.append(sys.argv[i])
count=0
# Loop through stations
for stadir in stations:
        print("STATION:", stadir)
        stalist=glob.glob(stadir+'/*PICKLE') 
        # make directory for processed data
        direc= stadir+'/Travel_time_added'
예제 #60
0
:copyright:
    2012-2021 Claudio Satriano <*****@*****.**>
:license:
    CeCILL Free Software License Agreement, Version 2.1
    (http://www.cecill.info/index.en.html)
"""
from __future__ import (absolute_import, division, print_function,
                        unicode_literals)

import os
from glob import glob
import logging
import warnings
from sourcespec.ssp_setup import ssp_exit
from obspy.taup import TauPyModel
model = TauPyModel(model='iasp91')
logger = logging.getLogger(__name__.split('.')[-1])


def _wave_arrival_nll(trace, phase, NLL_time_dir):
    """Arrival time using a NLL grid."""
    if trace.stats.hypo.origin_time is None:
        return
    if NLL_time_dir is None:
        return
    try:
        from nllgrid import NLLGrid
    except ImportError:
        logger.error('Error: the "nllgrid" python module is required '
                     'for "NLL_time_dir".')
        ssp_exit()