Exemplo n.º 1
0
def points_on_sphere(dx,xmin=-180.,xmax=180.,ymin=-89.999,ymax=89.999,c_centr=None,\
radius=None):
    """
    Calculate a more or less equally spaced grid on spherical Earth's surface.
    :param dx: spacing in latitudinal and longitudinal direction in meter
    :type c_centr: Tuple
    :param c_centr: Specify a central location
    :type radius: float
    :param radius: Radius around central location in m; no sources beyond this will be included
    :returns: np.array(latitude, longitude) of grid points, where -180<=lon<180     and -90 <= lat < 90
    """

    if xmax <= xmin or ymax <= ymin:
        msg = 'Lower bounds must be lower than upper bounds.'
        raise ValueError(msg)

    gridx = []
    gridy = []

    lat = ymin
    if ymin == -90.:
        ymin = -89.999
        warn("Resetting lat_min to -89.999 degree")

    while lat <= ymax:
        d_lat = dx / len_deg_lat(lat)
        d_lon = dx / len_deg_lon(lat)

        lon = xmin + np.random.rand(1)[0] * d_lon

        while lon <= xmax:

            gridx.append(lon)
            gridy.append(lat)

            if c_centr and radius:
                if gps2dist_azimuth(lat, lon, c_centr[0],
                                    c_centr[1])[0] > radius:
                    print(
                        lat, lon,
                        gps2dist_azimuth(lat, lon, c_centr[0], c_centr[1])[0])
                    if abs(lat) != 90.:
                        d_lon = dx / len_deg_lon(lat)
                        lon += d_lon
                        continue
                    else:
                        break

            if abs(lat) == 90:
                # length of a degree longitude will be 0.
                break
            else:
                d_lon = dx / len_deg_lon(lat)
                lon += d_lon
        lat += d_lat  # do not start at pole or zero division will raise...

    # return values sorted by longitude, basemap likes it.
    grid = list(zip(*sorted(zip(gridx, gridy), key=lambda it: it[0])))
    return list((gridx, gridy))  #grid
Exemplo n.º 2
0
def points_on_sphere(dx,xmin=-180.,xmax=180.,ymin=-89.999,ymax=89.999,c_centr=None,\
radius=None):
    """
    Calculate a more or less equally spaced grid on spherical Earth's surface.
    :param dx: spacing in latitudinal and longitudinal direction in meter
    :type c_centr: Tuple
    :param c_centr: Specify a central location
    :type radius: float
    :param radius: Radius around central location in m; no sources beyond this will be included
    :returns: np.array(latitude, longitude) of grid points, where -180<=lon<180     and -90 <= lat < 90
    """
    
    if xmax <= xmin or ymax <= ymin:
        msg = 'Lower bounds must be lower than upper bounds.'
        raise ValueError(msg)

    
    gridx = []
    gridy = []
    
    lat = ymin
    if ymin == -90.:
        ymin = -89.999
        warn("Resetting lat_min to -89.999 degree")
    
    while lat <= ymax:
        d_lat = dx / len_deg_lat(lat)
        d_lon = dx / len_deg_lon(lat)
            
        lon = xmin + np.random.rand(1)[0] * d_lon

        while lon <= xmax:
                    
            gridx.append(lon)
            gridy.append(lat)

            if c_centr and radius:
                if gps2dist_azimuth(lat,lon,c_centr[0],c_centr[1])[0] > radius:
                    print(lat,lon,gps2dist_azimuth(lat,lon,c_centr[0],c_centr[1])[0])
                    if abs(lat) != 90.:
                        d_lon = dx / len_deg_lon(lat)
                        lon += d_lon
                        continue
                    else:
                        break

            
            if abs(lat) == 90:
                # length of a degree longitude will be 0.
                break
            else:
                d_lon = dx / len_deg_lon(lat)
                lon += d_lon
        lat += d_lat # do not start at pole or zero division will raise...
        
            
    # return values sorted by longitude, basemap likes it.
    grid = list(zip(*sorted(zip(gridx, gridy), key=lambda it: it[0])))
    return list((gridx,gridy))#grid
Exemplo n.º 3
0
 def test_gps_2_dist_azimuth_bug150(self):
     """
     Test case for #150: UserWarning will be only raised if geographiclib is
     not installed.
     """
     # this raises UserWarning
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('error', UserWarning)
         with pytest.raises(UserWarning):
             gps2dist_azimuth(0, 0, 0, 180)
Exemplo n.º 4
0
def get_geoinf(x1,y1,x2,y2,inp='coord'):
    
    if inp == 'coord':
        try:
            dist=gps2DistAzimuth(x1, y1, x2, y2)[0]
            az=gps2DistAzimuth(x1, y1, x2, y2)[1]
            baz=gps2DistAzimuth(x1, y1, x2, y2)[2]
        except NameError:
            dist=gps2dist_azimuth(x1, y1, x2, y2)[0]
            az=gps2dist_azimuth(x1, y1, x2, y2)[1]
            baz=gps2dist_azimuth(x1, y1, x2, y2)[2]
   
    return (x1, y1, x2, y2, dist, az, baz)
Exemplo n.º 5
0
def get_geoinf(x1, y1, x2, y2, inp='coord'):

    if inp == 'coord':
        try:
            dist = gps2DistAzimuth(x1, y1, x2, y2)[0]
            az = gps2DistAzimuth(x1, y1, x2, y2)[1]
            baz = gps2DistAzimuth(x1, y1, x2, y2)[2]
        except NameError:
            dist = gps2dist_azimuth(x1, y1, x2, y2)[0]
            az = gps2dist_azimuth(x1, y1, x2, y2)[1]
            baz = gps2dist_azimuth(x1, y1, x2, y2)[2]

    return (x1, y1, x2, y2, dist, az, baz)
Exemplo n.º 6
0
    def initialize(self):
        ''' Initialize the node.

        '''
        super(QuarryBlastClassification, self).initialize()
        # Compute the neighbor stations.
        nearest_station_name = self.pref_manager.get_value('nearest_station')[0]
        nearest_station = self.project.geometry_inventory.get_station(name = nearest_station_name)[0]
        stations = self.project.geometry_inventory.get_station()
        stations = [x for x in stations if x.name != nearest_station.name]

        station_dist = []
        src_lonlat = nearest_station.get_lon_lat()
        for cur_station in stations:
            dst_lonlat = cur_station.get_lon_lat()
            dist, az1, az2 = geodetics.gps2dist_azimuth(lon1 = src_lonlat[0], lat1 = src_lonlat[1],
                                                        lon2 = dst_lonlat[0], lat2 = dst_lonlat[1])
            station_dist.append((cur_station, dist))

        self.nearest_station = nearest_station
        self.neighbor_stations = sorted(station_dist, key = lambda x: x[1])

        # Get the desired event type.
        event_types = self.load_event_types()
        dst_event_type = self.pref_manager.get_value('event_type')[0]
        self.event_type = [x for x in event_types if x.name == dst_event_type][0]
Exemplo n.º 7
0
 def test_issue_375(self):
     """
     Test for #375.
     """
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 + 1, 10 + 1)
     assert round(azim, 0) == 32
     assert round(bazim, 0) == 213
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 + 1, 10 - 1)
     assert round(azim, 0) == 328
     assert round(bazim, 0) == 147
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 - 1, 10 + 1)
     assert round(azim, 0) == 147
     assert round(bazim, 0) == 327
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 - 1, 10 - 1)
     assert round(azim, 0) == 213
     assert round(bazim, 0) == 33
Exemplo n.º 8
0
    def magnitude_coda(self):
        Mc = []
        # values for california
        a = 2.0
        b = 0.0035
        c = -0.87
        #
        for key in self.chop['Coda']:
            magnitude_dict = self.chop['Coda'][key]
            coords = get_coordinates_from_metadata(self.inventory,
                                                   magnitude_dict[0])
            dist, _, _ = gps2dist_azimuth(coords.Latitude, coords.Longitude,
                                          self.event.latitude,
                                          self.event.longitude)
            dist = dist / 1000
            #data = np.array(magnitude_dict[2])
            pick_time = UTCDateTime(mdt.num2date(magnitude_dict[1][0]))
            end_time = UTCDateTime(mdt.num2date(magnitude_dict[1][-1]))
            t_coda = end_time - pick_time
            Mc_value = a * np.log10(t_coda) + b * dist + c
            Mc_value = Mc_value
            Mc.append(Mc_value)
            Mcs = np.array(Mc)
            Mc_mean = Mcs.mean()
            Mc_deviation = Mcs.std()

        print("Coda Magnitude", Mc_mean, "Variance", Mc_deviation)
        label = "Mc"
        self.plot_histograms(Mc, label)
        self.Mcs = Mcs
        self.Mc = Mc_mean
        self.Mc_std = Mc_deviation
Exemplo n.º 9
0
 def test_compute_differential_times(self):
     max_sep = 8.
     diff_times, mapper = compute_differential_times(catalog=self.catalog,
                                                     correlation=False,
                                                     event_id_mapper=None,
                                                     max_sep=max_sep,
                                                     min_link=8)
     reverse_mapper = {value: key for key, value in mapper.items()}
     self.assertEqual(len(self.catalog), len(diff_times))
     for key, links in diff_times.items():
         master_id = mapper[key]
         master_event = [
             e for e in self.catalog if e.resource_id.id == key
         ][0]
         for link in links:
             self.assertEqual(master_id, link.event_id_1)
             linked_event = [
                 e for e in self.catalog
                 if e.resource_id.id == reverse_mapper[link.event_id_2]
             ][0]
             dist, _, _ = gps2dist_azimuth(
                 lat1=master_event.preferred_origin().latitude,
                 lon1=master_event.preferred_origin().longitude,
                 lat2=linked_event.preferred_origin().latitude,
                 lon2=linked_event.preferred_origin().longitude)
             self.assertLess(dist / 1000, max_sep)
Exemplo n.º 10
0
def are_duplicates(tr1, tr2, max_dist_tolerance):
    """
    Determines whether two StationTraces are duplicates by checking the
    station, channel codes, and the distance between them.

    Args:
        tr1 (StationTrace):
            1st trace.
        tr2 (StationTrace):
            2nd trace.
        max_dist_tolerance (float):
            Maximum distance tolerance for determining whether two streams
            are at the same location (in meters).

    Returns:
        bool. True if traces are duplicates, False otherwise.
    """

    # First, check if the ids match (net.sta.loc.cha)
    if tr1.id == tr2.id:
        return True
    # If not matching IDs, check the station, instrument code, and distance
    else:
        distance = gps2dist_azimuth(
            tr1.stats.coordinates.latitude, tr1.stats.coordinates.longitude,
            tr2.stats.coordinates.latitude, tr2.stats.coordinates.longitude)[0]
        if (tr1.stats.station == tr2.stats.station and
            tr1.stats.location == tr2.stats.location and
            tr1.stats.channel == tr2.stats.channel and
           distance < max_dist_tolerance):
            return True
        else:
            return False
Exemplo n.º 11
0
def azimuth(coordinates, x0, y0, x1, y1):
    """
    Returns the azimuth between two coordinate sets.

    :type coordinates: str
    :param coordinates: {'DEG', 'UTM', 'MIX'}
    :type x0: float
    :param x0: X coordinate of station 1
    :type y0: float
    :param y0: Y coordinate of station 1
    :type x1: float
    :param x1: X coordinate of station 2
    :type y1: float
    :param y1: Y coordinate of station 2

    :rtype: float
    :returns: The azimuth in degrees
    """
    if coordinates == "DEG":
        dist, azim, bazim = gps2dist_azimuth(y0, x0, y1, x1)
        return azim
    elif coordinates == 'UTM':
        azim = 90. - np.arctan2((y1 - y0), (x1 - x0)) * 180. / np.pi
        return azim
    else:
        print("Please consider having a single coordinate system for\
            all stations")
        return 0
Exemplo n.º 12
0
def get_interstation_distance(station1, station2, coordinates="DEG"):
    """Returns the distance in km between `station1` and `station2`.

    .. warning:: Currently the stations coordinates system have to be the same!

    :type station1: :class:`~msnoise.msnoise_table_def.Station`
    :param station1: A Station object
    :type station2: :class:`~msnoise.msnoise_table_def.Station`
    :param station2: A Station object
    :type coordinates: str
    :param coordinates: The coordinates system. "DEG" is WGS84 latitude/
        longitude in degrees. "UTM" is expressed in meters.



    :rtype: float
    :returns: The interstation distance in km
    """

    if coordinates == "DEG":
        dist, azim, bazim = gps2dist_azimuth(station1.Y, station1.X,
                                            station2.Y, station2.X)
        return dist / 1.e3
    else:
        dist = np.hypot(float(station1.X - station2.X),
                        float(station1.Y - station2.Y)) / 1.e3
        return dist
Exemplo n.º 13
0
def get_interstation_distance(station1, station2, coordinates="DEG"):
    """Returns the distance in km between `station1` and `station2`.

    .. warning:: Currently the stations coordinates system have to be the same!

    :type station1: :class:`~msnoise.msnoise_table_def.Station`
    :param station1: A Station object
    :type station2: :class:`~msnoise.msnoise_table_def.Station`
    :param station2: A Station object
    :type coordinates: str
    :param coordinates: The coordinates system. "DEG" is WGS84 latitude/
        longitude in degrees. "UTM" is expressed in meters.

    :rtype: float
    :returns: The interstation distance in km
    """

    if coordinates == "DEG":
        dist, azim, bazim = gps2dist_azimuth(station1.Y, station1.X,
                                             station2.Y, station2.X)
        return dist / 1.e3
    else:
        dist = np.hypot(float(station1.X - station2.X),
                        float(station1.Y - station2.Y)) / 1.e3
        return dist
Exemplo n.º 14
0
def azimuth(coordinates, x0, y0, x1, y1):
    """
    Returns the azimuth between two coordinate sets.

    :type coordinates: str
    :param coordinates: {'DEG', 'UTM', 'MIX'}
    :type x0: float
    :param x0: X coordinate of station 1
    :type y0: float
    :param y0: Y coordinate of station 1
    :type x1: float
    :param x1: X coordinate of station 2
    :type y1: float
    :param y1: Y coordinate of station 2

    :rtype: float
    :returns: The azimuth in degrees
    """
    if coordinates == "DEG":
        dist, azim, bazim = gps2dist_azimuth(y0, x0, y1, x1)
        return azim
    elif coordinates == 'UTM':
        if (np.isclose(y0, y1) & np.isclose(x0, x1)):
            return 0
        else:
            azim = 90. - np.arctan2((y1 - y0), (x1 - x0)) * 180. / np.pi
            return azim % 360
    else:
        logging.warning("Please consider having a single coordinate system for\
            all stations")
        return 0
Exemplo n.º 15
0
def get_geoinf(id1,id2):


		inv1 = '{}.{}.xml'.format(*id1.split('.')[0:2])
		inv2 = '{}.{}.xml'.format(*id2.split('.')[0:2])

		inv1 = read_inventory(os.path.join('meta','stationxml',inv1))
		inv2 = read_inventory(os.path.join('meta','stationxml',inv2))

		# Replace 'radial' and 'transverse' by 'N' and 'E'
		id1 = re.sub('\.??R$','N',id1)
		id2 = re.sub('\.??R$','N',id2)
		id1 = re.sub('\.??T$','E',id1)
		id2 = re.sub('\.??T$','E',id2)
		

		c1 = inv1.get_coordinates(id1)
		c2 = inv2.get_coordinates(id2)

		lat1, lon1, lat2, lon2 = (
			c1['latitude'],
			c1['longitude'],
			c2['latitude'],
			c2['longitude'])

		dist, az, baz = gps2dist_azimuth(lat1,lon1,lat2,lon2)

		return lat1, lon1, lat2, lon2, dist, az, baz
Exemplo n.º 16
0
def hypo_dist(trace):
    """Compute hypocentral and epicentral distance (in km) for a trace."""
    try:
        coords = trace.stats.coords
        hypo = trace.stats.hypo
    except (KeyError, AttributeError):
        return None
    if None in (coords, hypo):
        return None
    stla = coords.latitude
    stlo = coords.longitude
    stel = coords.elevation
    evla = hypo.latitude
    evlo = hypo.longitude
    evdp = hypo.depth
    if None in (stla, stlo, stel, evla, evlo, evdp):
        return None
    epi_dist, az, baz = gps2dist_azimuth(
        hypo.latitude, hypo.longitude,
        trace.stats.coords.latitude, trace.stats.coords.longitude)
    epi_dist /= 1e3   # in km
    gcarc = kilometers2degrees(epi_dist)
    hypo_dist = math.sqrt(epi_dist**2 + (stel+evdp)**2)
    trace.stats.azimuth = az
    trace.stats.back_azimuth = baz
    trace.stats.epi_dist = epi_dist
    trace.stats.hypo_dist = hypo_dist
    trace.stats.gcarc = gcarc
    return hypo_dist
Exemplo n.º 17
0
def calc_detection(netset, conf, session, dd=0.2):
    """ Calculate the magnitude of detection at a delta degrees of dd"""
    stalist = get_stations(netset, session)
    stalist2csv(stalist, netset)
    fh = open("GIS/%s/%s_mdetect.csv" % (netset['label'], netset['label']),
              'w')
    fh.write("longitude,latitude,Mdetect\n")
    for lat in np.arange(conf['region']['minlatitude'],
                         conf['region']['maxlatitude'], dd):
        for lon in np.arange(conf['region']['minlongitude'],
                             conf['region']['maxlongitude'], dd):
            dv = []  # Store detection values to all stations
            for sta in stalist:
                d, azm, baz = gps2dist_azimuth(sta.latitude, sta.longitude,
                                               lat, lon)
                threshold_db = sta.meanp25 + conf['source'][
                    'nsigma'] * sta.stdp25 + conf['source']['snr_db']
                f = np.array([4, 6, 8])
                mw = noise2mw(f,
                              threshold_db,
                              Q=conf['source']['Q'],
                              rho=conf['source']['rho'],
                              vel=conf['source']['alpha'],
                              delta=d / 1000.,
                              depth=conf['source']['depth'],
                              stressdrop=conf['source']['stressdrop'],
                              phase='P')
                dv.append(mw)
            dv = np.sort(dv)
            fh.write("%.2f,%.2f,%.2f\n" %
                     (lon, lat, dv[conf['source']['nstations'] - 1]))
    fh.close()
Exemplo n.º 18
0
def retrieve_waveform(client,net,stn,t1,t2,stats_dict=None,cha="BHE,BHN,BHZ",attach_response=False,loc="",pharr=None, phasenm = 'P'):  
    try:  
        st = client.get_waveforms(net, stn, loc, cha, t1, t2,attach_response=attach_response)
    except:
        return False
    # print("Retrieving")
    if phasenm == 'P':
        # filter_traces(st,lenphase=int(t2-t1))
        filter_traces_rf(st,pharr = pharr)
    elif phasenm == 'SKS':
        filter_traces_sks(st,pharr = pharr)

    if len(st) != 3:
        # print(f"All three components not available: {len(st)}")
        return False
    if stats_dict:
        dist, baz, _ = gps2dist_azimuth(stats_dict['station_latitude'],stats_dict['station_longitude'],stats_dict['event_latitude'],stats_dict['event_longitude'])
        for tr in st:
            for key, value in stats_dict.items():
                tr.stats[key] = value
            tr.stats['distance'] = dist / 1000 / DEG2KM
            tr.stats['back_azimuth'] = baz
    
    st.merge()
    # if all 3 components present and no gap or overlap in data
    if len(st) == 3 and not any(isinstance(tr.data, np.ma.masked_array) for tr in st):
        # print(f"Stream obtained {len(st)}")
        return RFStream(st)
    elif not any(isinstance(tr.data, np.ma.masked_array) for tr in st):
        # print("--------> There's a gap/overlap in the data")
        return False
Exemplo n.º 19
0
 def test_issue_375(self):
     """
     Test for #375.
     """
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 + 1, 10 + 1)
     self.assertEqual(round(azim, 0), 32)
     self.assertEqual(round(bazim, 0), 213)
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 + 1, 10 - 1)
     self.assertEqual(round(azim, 0), 328)
     self.assertEqual(round(bazim, 0), 147)
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 - 1, 10 + 1)
     self.assertEqual(round(azim, 0), 147)
     self.assertEqual(round(bazim, 0), 327)
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 - 1, 10 - 1)
     self.assertEqual(round(azim, 0), 213)
     self.assertEqual(round(bazim, 0), 33)
Exemplo n.º 20
0
def _azimuth(lat1, lon1, lat2, lon2):
    """
    The azimuth(unit:degree) starting from point1 to
    point 2 on the sphere
    """
    _, azi, _ = gps2dist_azimuth(lat1, lon1, lat2, lon2)
    return azi
Exemplo n.º 21
0
def compute_ellipticity_corr(arrival_phase, ev_latitude, ev_longitude,
                             ev_depth_km, sta_latitude, sta_longitude,
                             degrees_to_source):
    """
    Utility function to compute ellipticity correction.

    :param arrival_phase: P or S
    :param ev_latitude:  event lat
    :param ev_longitude: event long
    :param ev_depth_km: event depth in km
    :param sta_latitude: station lat
    :param sta_longitude: station long
    :param degrees_to_source: degree to source
    :return: ellipticity correction float value
    """
    myazim = gps2dist_azimuth(ev_latitude, ev_longitude, sta_latitude,
                              sta_longitude)[1]  # [1] shall be taken
    # see https://docs.obspy.org/_modules/obspy/geodetics/base.html#gps2dist_azimuth
    # this function returns 3 values (Great_circle_distance_in_m, azimuth_A->B_in_degrees, azimuth_B->A_in degrees)

    log.debug("Check input params to ellipticity_corr = %s, %s, %s, %s, %s",
              arrival_phase, degrees_to_source, ev_depth_km, 90 - ev_latitude,
              myazim)

    ellipticity_corr = ellipcorr.ellipticity_corr(
        phase=arrival_phase,
        edist=degrees_to_source,
        edepth=ev_depth_km,
        ecolat=90 - ev_latitude,  # conversion to co-latitude
        azim=myazim)

    log.debug("ellipticity_corr = %s", ellipticity_corr)

    return ellipticity_corr
Exemplo n.º 22
0
    def test_lcalda(self):
        """
        Test that distances are set when geographic information is present and
        lcalda is True, and that they're not set when geographic information
        is missing or lcalca is false.
        """
        stla, stlo, evla, evlo = -35.0, 100, 42.5, -37.5
        meters, az, baz = gps2dist_azimuth(evla, evlo, stla, stlo)
        km = meters / 1000.0
        gcarc = kilometer2degrees(km)

        # distances are set when lcalda True and all distance values set
        sac = SACTrace(lcalda=True, stla=stla, stlo=stlo, evla=evla, evlo=evlo)
        self.assertAlmostEqual(sac.az, az, places=4)
        self.assertAlmostEqual(sac.baz, baz, places=4)
        self.assertAlmostEqual(sac.dist, km, places=4)
        self.assertAlmostEqual(sac.gcarc, gcarc, places=4)
        # distances are not set when lcalda False and all distance values set
        sac = SACTrace(lcalda=False, stla=stla, stlo=stlo, evla=evla,
                       evlo=evlo)
        self.assertIs(sac.az, None)
        self.assertIs(sac.baz, None)
        self.assertIs(sac.dist, None)
        self.assertIs(sac.gcarc, None)
        # distances are not set when lcalda True, not all distance values set
        sac = SACTrace(lcalda=True, stla=stla)
        self.assertIs(sac.az, None)
        self.assertIs(sac.baz, None)
        self.assertIs(sac.dist, None)
        self.assertIs(sac.gcarc, None)
        # exception raised when set_distances is forced but not all distances
        # values are set. NOTE: still have a problem when others are "None".
        sac = SACTrace(lcalda=True, stla=stla)
        self.assertRaises(SacHeaderError, sac._set_distances, force=True)
Exemplo n.º 23
0
def calc_steer(slats, slons):
    """
    Compute the steering vector

    :type slats:
    :param slats:
    :type slons:
    :param slons:
    """
    theta = arange(0, 362, 2)
    theta = theta.reshape((theta.size, 1))
    sta_origin_dist = array([])
    sta_origin_bearing = array([])
    meanlat = slats.mean()
    meanlon = slons.mean()
    for lat, lon in zip(slats, slons):
        #get dist between each sta and mean xy of array
        dist, az, baz = gps2dist_azimuth(meanlat, meanlon, lat, lon)
        sta_origin_dist = append(sta_origin_dist, dist)
        sta_origin_bearing = append(sta_origin_bearing, az)
    # ramener dist sur le cadran en x et y (dist*cos(az)) ou dist*sin(az)
    sta_origin_x = sta_origin_dist * cos(sta_origin_bearing * pi / 180.)
    sta_origin_y = sta_origin_dist * sin(sta_origin_bearing * pi / 180.)
    # On le calcule pr plein de theta different car apres on cherche quel est le meilleur.  
    zeta_x = -cos(theta * pi / 180.)
    zeta_y = -sin(theta * pi / 180.)
    # dot product betwen zeta and x: d = -x.cos(theta)-y.sin(theta)   
    zetax = zeta_x * sta_origin_x + zeta_y * sta_origin_y
    return zetax, theta, sta_origin_x, sta_origin_y
Exemplo n.º 24
0
 def test_issue_375(self):
     """
     Test for #375.
     """
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 + 1, 10 + 1)
     self.assertEqual(round(azim, 0), 32)
     self.assertEqual(round(bazim, 0), 213)
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 + 1, 10 - 1)
     self.assertEqual(round(azim, 0), 328)
     self.assertEqual(round(bazim, 0), 147)
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 - 1, 10 + 1)
     self.assertEqual(round(azim, 0), 147)
     self.assertEqual(round(bazim, 0), 327)
     _, azim, bazim = gps2dist_azimuth(50, 10, 50 - 1, 10 - 1)
     self.assertEqual(round(azim, 0), 213)
     self.assertEqual(round(bazim, 0), 33)
Exemplo n.º 25
0
def _azimuth(lat1, lon1, lat2, lon2):
    """
    The azimuth(unit:degree) starting from point1 to
    point 2 on the sphere
    """
    _, azi, _ = gps2dist_azimuth(lat1, lon1, lat2, lon2)
    return azi
Exemplo n.º 26
0
    def test_lcalda(self):
        """
        Test that distances are set when geographic information is present and
        lcalda is True, and that they're not set when geographic information
        is missing or lcalca is false.
        """
        stla, stlo, evla, evlo = -35.0, 100, 42.5, -37.5
        meters, az, baz = gps2dist_azimuth(evla, evlo, stla, stlo)
        km = meters / 1000.0
        gcarc = kilometer2degrees(km)

        # distances are set when lcalda True and all distance values set
        sac = SACTrace(lcalda=True, stla=stla, stlo=stlo, evla=evla, evlo=evlo)
        self.assertAlmostEqual(sac.az, az, places=4)
        self.assertAlmostEqual(sac.baz, baz, places=4)
        self.assertAlmostEqual(sac.dist, km, places=4)
        self.assertAlmostEqual(sac.gcarc, gcarc, places=4)
        # distances are not set when lcalda False and all distance values set
        sac = SACTrace(lcalda=False, stla=stla, stlo=stlo, evla=evla,
                       evlo=evlo)
        self.assertIs(sac.az, None)
        self.assertIs(sac.baz, None)
        self.assertIs(sac.dist, None)
        self.assertIs(sac.gcarc, None)
        # distances are not set when lcalda True, not all distance values set
        sac = SACTrace(lcalda=True, stla=stla)
        self.assertIs(sac.az, None)
        self.assertIs(sac.baz, None)
        self.assertIs(sac.dist, None)
        self.assertIs(sac.gcarc, None)
        # exception raised when set_distances is forced but not all distances
        # values are set. NOTE: still have a problem when others are "None".
        sac = SACTrace(lcalda=True, stla=stla)
        self.assertRaises(SacHeaderError, sac._set_distances, force=True)
Exemplo n.º 27
0
 def magnitude_local(self):
     ML = []
     for key in self.chop['Body waves']:
         magnitude_dict = self.chop['Body waves'][key]
         coords = get_coordinates_from_metadata(self.inventory,
                                                magnitude_dict[0])
         dist, _, _ = gps2dist_azimuth(coords.Latitude, coords.Longitude,
                                       self.event.latitude,
                                       self.event.longitude)
         dist = dist / 1000
         data = np.array(
             magnitude_dict[2]
         )  # already converted Wood Anderson (Gain in mm 2800 +-60)
         max_amplitude = np.max(data) * 1e3  # convert to  mm --> nm
         ML_value = np.log10(
             max_amplitude) + 1.11 * np.log10(dist) + 0.00189 * dist - 2.09
         ML.append(ML_value)
         MLs = np.array(ML)
         ML_mean = MLs.mean()
         ML_deviation = MLs.std()
     print("Local Magnitude", ML_mean, "Variance", ML_deviation)
     label = "ML"
     self.MLs = MLs
     self.plot_histograms(ML, label)
     self.ML = ML_mean
     self.ML_std = ML_deviation
Exemplo n.º 28
0
 def aperture(self):
     """The array aperture is the largest inter-station distance."""
     aperture = 0
     for ele1, ele2 in combinations(self.elements, 2):
         dist = gps2dist_azimuth(ele1.latitude, ele1.longitude,
                                 ele2.latitude, ele2.longitude)[0]
         aperture = max(aperture, dist)
     return aperture
Exemplo n.º 29
0
def kernel(event_pair, stations=None):
    """
    kernel: the core function to get the information.
    """
    model = TauPyModel(model="ak135")
    # for each station, we calculate the travel time
    result = {}
    evla = event_pair.lat
    evlo = event_pair.lon
    evdp = event_pair.dep / 1000
    event_time = event_pair.time

    for row in stations:
        result_template = {
            "event_time": event_time,
            "evla": evla,
            "evlo": evlo,
            "evdp": evdp,
            "gcarc": None,
            "az": None,
            "baz": None,
            "S": None,
            "sS": None,
            "SS": None,
            "P": None,
            "pP": None,
            "sP": None,
            "PP": None,
            "3.3kmps": None,
            "4.6kmps": None,
            "ScS": None
        }
        station = row[0]
        network = row[1]
        net_sta = f"{network}.{station}"
        stla = float(row[2])
        stlo = float(row[3])
        arrivals = model.get_travel_times_geo(evdp, evla, evlo, stla, stlo,
                                              PHASE_LIST)

        gcarc_m, az, baz = gps2dist_azimuth(evla, evlo, stla, stlo)
        gcarc = kilometers2degrees(gcarc_m / 1000)
        result_template["gcarc"] = gcarc
        result_template["az"] = az
        result_template["baz"] = baz

        for each_arrival in arrivals:
            name = each_arrival.name
            time = each_arrival.time
            if ((name in PHASE_LIST)):
                if (name == "p"):
                    name = "P"
                if (name == "s"):
                    name = "S"
                if (result_template[name] == None):
                    result_template[name] = time
        result[net_sta] = result_template
    return result
Exemplo n.º 30
0
def calculate_epicentral_distance(event_lat, event_lon, sta_lat, sta_lon):

    from obspy.geodetics import gps2dist_azimuth

    epi_dist, az, baz = gps2dist_azimuth(event_lat, event_lon, sta_lat,
                                         sta_lon)
    epi_dist = epi_dist / 1000  # Convert to km

    return epi_dist
Exemplo n.º 31
0
def _join_unconnected_areas(areas, max_distance, inventory):
    # At the moment, areas are joined by setting the site response of
    # the station pair with smallest distance to 1
    # Often this works, but sometimes it produces undesired results
    coordinates = _collect_station_coordinates(inventory)
    station_by_coordinate = {c: sta for sta, c in coordinates.items()}
    # reduce number of coordinates in each area
    hulls = {}
    for name in areas:
        points = np.array([coordinates[sta] for sta in areas[name]])
        hull = scipy.spatial.ConvexHull(points)
        hulls[name] = {
            station_by_coordinate[tuple(p)]
            for p in points[hull.vertices, :]
        }
    # calculated distances between unconnected areas
    distance = {}
    for a1 in areas:
        for a2 in areas:
            name = frozenset((a1, a2))
            if name in distance or a1 == a2:
                continue
            dists = {}
            for sta1 in hulls[a1]:
                for sta2 in hulls[a2]:
                    args = coordinates[sta1] + coordinates[sta2]
                    dist = gps2dist_azimuth(*args)[0]
                    dists[(sta1, sta2)] = dist
            mink = min(dists, key=dists.get)
            distance[name] = (dists[mink] / 1e3, mink)
    # join unconnected regions if distance is smaller than max_distance
    near_stations = {}
    while len(distance) > 0:
        nearest_pair = min(distance, key=distance.get)
        dist = distance[nearest_pair][0]
        if dist > max_distance:
            break
        s1, s2 = distance[nearest_pair][1]
        near_stations[s1] = s2
        near_stations[s2] = s1
        a1, a2 = tuple(nearest_pair)
        msg = 'connect areas %s and %s with distance %.1fkm'
        log.debug(msg, a1, a2, dist)
        distance.pop(nearest_pair)
        areas[a1] |= areas.pop(a2)
        hulls[a1] |= hulls.pop(a2)
        for a3 in areas:
            if a3 in (a1, a2):
                continue
            pair1 = frozenset((a1, a3))
            pair2 = frozenset((a2, a3))
            dist1 = distance[pair1]
            dist2 = distance.pop(pair2)
            if dist2[0] < dist1[0]:
                distance[pair1] = dist2
    return areas, near_stations
Exemplo n.º 32
0
    def analyse_earthquake(self, event_obj):
        # Get event catalogue
        self.event_cat = self.ds.events
        comp_list = ['*Z', '*N', '*E']


        # Launch the custom station/component selection dialog
        sel_dlg = selectionDialog(parent=self, sta_list=self.ds.waveforms.list())
        if sel_dlg.exec_():
            select_sta, bool_comp = sel_dlg.getSelected()
            query_comp = list(itertools.compress(comp_list, bool_comp))

            # Open up a new stream object
            self.st = Stream()

            # use the ifilter functionality to extract desired streams to visualize
            for station in self.ds.ifilter(self.ds.q.station == map(lambda el: el.split('.')[1], select_sta),
                                           self.ds.q.channel == query_comp,
                                           self.ds.q.event == event_obj):
                for filtered_id in station.list():
                    if filtered_id == 'StationXML':
                        continue
                    self.st += station[filtered_id]

            if self.st.__nonzero__():
                # Get quake origin info
                origin_info = event_obj.preferred_origin() or event_obj.origins[0]

                # Iterate through traces
                for tr in self.st:
                    # Run Java Script to highlight all selected stations in station view
                    js_call = "highlightStation('{station}')".format(station=tr.stats.network + '.' +tr.stats.station)
                    self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call)


                    # Get inventory for trace
                    inv = self.ds.waveforms[tr.stats.network + '.' +tr.stats.station].StationXML
                    sta_coords = inv.get_coordinates(tr.get_id())

                    dist, baz, _ = gps2dist_azimuth(sta_coords['latitude'],
                                                    sta_coords['longitude'],
                                                    origin_info.latitude,
                                                    origin_info.longitude)
                    dist_deg = kilometer2degrees(dist/1000.0)
                    tt_model = TauPyModel(model='iasp91')
                    arrivals = tt_model.get_travel_times(origin_info.depth/1000.0, dist_deg, ('P'))

                    # Write info to trace header
                    tr.stats.distance = dist
                    tr.stats.ptt = arrivals[0]

                # Sort the st by distance from quake
                self.st.sort(keys=['distance'])


                self.update_waveform_plot()
Exemplo n.º 33
0
 def setloc_source_centered(self, dist, azimuth, flattening):
     self.dist = dist
     self.azimuth = azimuth
     xsrc = thetaphi2xyz(dist, azimuth)
     xglb = SurfaceStation.src_rmat.dot(xsrc)
     theta, phi = xyz2thetaphi(xglb)
     self.lat, self.lon = thetaphi2latlon(theta, phi, flattening)
     d, az, baz = gps2dist_azimuth(SurfaceStation.src_lat, SurfaceStation.src_lon, 
         self.lat, self.lon, a=1., f=flattening)
     self.baz = np.radians(baz)
Exemplo n.º 34
0
def get_event_params(eq_lat,eq_lon):
   dist_az = gps2dist_azimuth(eq_lat,eq_lon,0,0,a=6371000.0,f=0.0)
   dist_km = dist_az[0]/1000.0
   dist_deg = kilometer2degrees(dist_km)
   az = dist_az[1]
   baz = dist_az[2]
   rotation_angle = -1.0*((baz-180) -90.0)
   #rotation_angle = -1.0*(az-90.0)

   return dist_deg,rotation_angle
Exemplo n.º 35
0
 def setloc_geographic(self, lat, lon, flattening):
     self.lat = lat
     self.lon = lon
     theta, phi = latlon2thetaphi(lat, lon, flattening)
     xglb = thetaphi2xyz(theta, phi)
     xsrc = SurfaceStation.src_rmat.T.dot(xglb)
     self.dist, self.azimuth = xyz2thetaphi(xsrc)
     d, az, baz = gps2dist_azimuth(SurfaceStation.src_lat, SurfaceStation.src_lon, 
         self.lat, self.lon, a=1., f=flattening)
     self.baz = np.radians(baz)
Exemplo n.º 36
0
def chiapas_BAz():
    """case study of chiapas, return backazimuth values for rotating seismograms
    """
    from obspy.geodetics import gps2dist_azimuth
    event_latlon = (15.38,-94.66)
    wpuk_latlon = (-40.06,176.44)
    gcd,BAz,Az = gps2dist_azimuth(lat1=event_latlon[0],lon1=event_latlon[1],
                           lat2=wpuk_latlon[0],lon2=event_latlon[1])
    
    return BAz
Exemplo n.º 37
0
def distance(station, origin):
    if hasattr(station, 'latitude') and\
        hasattr(station, 'longitude'):
        distance_in_m, _, _, = gps2dist_azimuth(station.latitude,
                                                station.longitude,
                                                origin.latitude,
                                                origin.longitude)
        return distance_in_m / 1000.

    else:
        raise Exception
Exemplo n.º 38
0
 def azimuthal_distribution_coefficient(self):
     azimuths = []
     for arrival in self.arrivals:
         d, abaz, baaz = gps2dist_azimuth(self.lat, self.lon,
                                          arrival.station.lat,
                                          arrival.station.lon)
         if abaz not in azimuths:
             azimuths += [abaz]
     hist = np.histogram(azimuths, bins=18, range=(0., 360.))
     return sum([1 for count in hist[0] if count > 0]) /\
         float(len(hist[1]) - 1)
Exemplo n.º 39
0
  def single_phase(self):
    
    events = self.assoc_db.query(Associated).all()
    for event in events:

      event_id = event.id
      ot = event.ot
      #print event_id,ot
    
    
      # Pick phases that are between origintime and origintime+max_tt
      sta_assoc = []
      for sta, in self.assoc_db.query(PickModified.sta).filter(PickModified.assoc_id==event_id).distinct().all():  # only associated single phase from stations not contribute p and s pairs
        sta_assoc.append(sta)    
    
      # associate single phase
      for sta, in self.assoc_db.query(PickModified.sta).filter(PickModified.assoc_id==None).filter(PickModified.time>ot).filter(PickModified.time<=(ot+timedelta(seconds=self.max_tt))).distinct().all():

        station = self.tt_stations_db_1D.query(Station1D).filter(Station1D.sta==sta).first()
        #print event.latitude,event.longitude,sta,station.latitude,station.longitude
        d_km = gps2dist_azimuth(event.latitude,event.longitude,station.latitude,station.longitude)[0]/1000.
    
        if (d_km < self.max_km) and (sta not in sta_assoc): # only associated single phase from stations not contribute p and s pairs
          tt,d_diff = tt_km(self.tt_stations_db_1D,d_km)
        
          picks_p = self.assoc_db.query(PickModified).filter(PickModified.sta==sta).filter(PickModified.time>=(ot+timedelta(seconds=tt.p_tt-0.5*self.aggr_window))).filter(PickModified.time<=(ot+timedelta(seconds=tt.p_tt+0.5*self.aggr_window))).all()
          #print 'picks_p: ',picks_p, 'tt.p_tt: ',tt.p_tt
          # if there are more than one modified pick in the aggr_window range, only associate the first modified pick
          if picks_p:
            modi_pick = picks_p[0] # the first modified pick
            modi_pick.phase = 'P'
            modi_pick.assoc_id = event.id
            modi_pick.locate_flag = False
            # Associated all the picks contribute to this single modified picks with assoc_id and phase
            picks=self.assoc_db.query(Pick).filter(Pick.modified_id==modi_pick.id).all()
            for pick in picks:
              pick.phase='P'
              pick.assoc_id=event.id
              pick.locate_flag = False

          picks_s = self.assoc_db.query(PickModified).filter(PickModified.sta==sta).filter(PickModified.time>=(ot+timedelta(seconds=tt.s_tt-0.5*self.aggr_window))).filter(PickModified.time<=(ot+timedelta(seconds=tt.s_tt+0.5*self.aggr_window))).all()  
          # if there are more than one modified pick in the aggr_window range, only associate the first modified pick
          if picks_s:
            modi_pick = picks_s[0] # the first modified pick
            modi_pick.phase = 'S'
            modi_pick.assoc_id = event.id
            modi_pick.locate_flag = None
            # Associated all the picks contribute to this single modified picks with assoc_id and phase
            picks=self.assoc_db.query(Pick).filter(Pick.modified_id==modi_pick.id).all()
            for pick in picks:
              pick.phase='S'
              pick.assoc_id=event.id
              pick.locate_flag = None
      self.assoc_db.commit()
def dist_baz_az2(eve, sta):
    global StaDict
    global AllEveDict
    gcarcinfo = gps2dist_azimuth(StaDict[sta]['stla'], StaDict[sta]['stlo'],
                                 AllEveDict[eve]['EVLA'],
                                 AllEveDict[eve]['EVLO'])
    gcarc = kilometers2degrees(gcarcinfo[0] / 1000)
    dist = gcarcinfo[0] / 1000
    baz = gcarcinfo[1]
    az = gcarcinfo[2]

    return f"{gcarc:.3f} {dist:.3f} {baz:.3f} {az:.3f}"
Exemplo n.º 41
0
def Get_location(la_s, lo_s, la_r, lo_r, radius=3389.5, flattening=0):
    """
    Get the epicentral distance, azimuth and backazimuth
    """
    dist, az, baz = gps2dist_azimuth(lat1=la_s,
                                     lon1=lo_s,
                                     lat2=la_r,
                                     lon2=lo_r,
                                     a=radius,
                                     f=flattening)
    epi = kilometer2degrees(dist, radius=radius)
    return epi, az, baz
Exemplo n.º 42
0
def residuals_minimum(location,args):
#   from obspy.core.util import gps2DistAzimuth
  L=len(args)
  residuals=0
  i=0
  while True:
    residuals=residuals+(gps2dist_azimuth(location[1],location[0],args[i][2],args[i][1])[0]/1000*180/(np.pi*6371)-args[i][4])**2
    if i==L-1:
      break
    else:
      i=i+1
  return np.sqrt(residuals/L)
Exemplo n.º 43
0
 def __init__(self, disp_file):
     data, self.lon1, self.lat1, self.lon2, self.lat2 = \
         readDisp(disp_file)
     self.filename = os.path.basename(disp_file)
     self.T = data[:, 0]
     self.disp = data[:, 1]
     self.err = data[:, 2]
     self.valid = data[:, 3]
     self.dist = gps2dist_azimuth(self.lat1, self.lon1, self.lat2,
                                  self.lon2)[0] / 1000.0
     mask = self.valid == 0
     self.masked_disp = np.ma.masked_array(self.disp, mask=mask)
Exemplo n.º 44
0
def calculate_baz(elat, elon, slat, slon):
    """
    Calculate back azimuth(station to event azimuth)

    :param elat: event latitude
    :param elon: event longitude
    :param slat: station latitude
    :param slon: station longitude
    :return: back azimuth
    """
    _, _, baz = gps2dist_azimuth(elat, elon, slat, slon)

    return baz
Exemplo n.º 45
0
def horiz_rotate(st,event):
    '''This function rotates horizontal components into radial and
    transverse. Returns the radial component as a trace.
    '''
    # Calculate the back azimuth
    dist, az, baz = gps2dist_azimuth(st[0].stats.coordinates.latitude,
            st[0].stats.coordinates.longitude,event.origins[0].latitude,
            event.origins[0].longitude)

    # Rotate the stream
    st = st.rotate(method='NE->RT',back_azimuth=baz)

    return st.select(channel="??R")[0]
Exemplo n.º 46
0
def locating(guess,*args):
#   from obspy.core.util import gps2DistAzimuth
  L=len(args)
  residuals=0
  i=0
  while True:
    # gps2DistAzimuth(lat1, lon1, lat2, lon2) Returns:	(Great circle distance in m, azimuth A->B in degrees, azimuth B->A in degrees)
    residuals=residuals+(gps2dist_azimuth(guess[1],guess[0],args[i][2],args[i][1])[0]/1000*180/(np.pi*6371)-args[i][4])**2
#     np.sqrt((guess[0]-args[i][1])**2+(guess[1]-args[i][2])**2)-args[i][4])**2
    if i==L-1:
      break
    else:                     
      i=i+1
  return np.sqrt(residuals/L)
Exemplo n.º 47
0
def bin_filter(st,bin_lat0,bin_lon0,bin_radius):
   '''
   Removes traces which lie outside of a circular bin.
   bin_radius must be given in degrees.
   '''

   for tr in st: 
      dist = gps2dist_azimuth(tr.stats.sac['stla'],tr.stats.sac['stlo'],
                              bin_lat0, bin_lon0)
      dist_m   = dist[0]
      dist_deg = kilometer2degrees(dist_m/1000.0)

      if dist_deg > bin_radius:
         st.remove(tr)
Exemplo n.º 48
0
def get_station_info(stats):

    sta1 = '{}.{}.{}.{}'.format(stats.network,stats.station,stats.location,
    stats.channel)
    sta2 = '{}.{}.{}.{}'.format(stats.sac.kuser0.strip(),stats.sac.kevnm.strip(),
    stats.sac.kuser1.strip(),stats.sac.kuser2.strip())
    lat1 = stats.sac.stla
    lon1 = stats.sac.stlo
    lat2 = stats.sac.evla
    lon2 = stats.sac.evlo
    dist = stats.sac.dist
    az = gps2dist_azimuth(lat1,lon1,lat2,lon2)[2]


    return([sta1,sta2,lat1,lon1,lat2,lon2,dist,az])
Exemplo n.º 49
0
 def test_gps2DistAzimuthWithGeographiclib(self):
     """
     Testing gps2dist_azimuth function using the module geographiclib.
     """
     # nearly antipodal points
     result = gps2dist_azimuth(15.26804251, 2.93007342, -14.80522806,
                               -177.2299081)
     self.assertAlmostEqual(result[0], 19951425.048688546)
     self.assertAlmostEqual(result[1], 8.65553241932755)
     self.assertAlmostEqual(result[2], 351.36325485132306)
     # out of bounds
     self.assertRaises(ValueError, gps2dist_azimuth, 91, 0, 0, 0)
     self.assertRaises(ValueError, gps2dist_azimuth, -91, 0, 0, 0)
     self.assertRaises(ValueError, gps2dist_azimuth, 0, 0, 91, 0)
     self.assertRaises(ValueError, gps2dist_azimuth, 0, 0, -91, 0)
Exemplo n.º 50
0
    def exclude_by_local_catalog(self,catalogue):

        model = TauPyModel(model="iasp91")
        
        for tr in self.stream:
            tr.detrend('demean')


        t_total = 0.0
        for trace in self.stream:
            t_total += trace.stats.npts
            
            
        for event in catalogue:
            # get origin time
            t0 = event.origins[0].time
            lon0 = event.origins[0].longitude
            lat0 = event.origins[0].latitude
            depth0 = event.origins[0].depth/1000.
            coords = self.inv.get_coordinates(self.ids[0])
            data_start = self.stream[0].stats.starttime
            if t0 < data_start-24*60*60.:
                continue
            data_end = self.stream[-1].stats.endtime
            if t0 > data_end:
                continue
            dist = gps2dist_azimuth(lat0,lon0,
                    coords["latitude"],coords["longitude"])[0]/1000.
            p_arrival = model.get_travel_times(source_depth_in_km=depth0,
                                  distance_in_degree=dist/111.19,phase_list=["P"])
            if len(p_arrival)==0:
                tcut1 = t0
            else:
                tcut1 = t0 + p_arrival[0].time - 10.0 #10s before p arrival
            if tcut1<t0:
                tcut1 = t0
            tcut2 = t0 + dist/1.0 + 60. #slowest surface-wave arrival plus one minute
            self.stream.cutout(starttime=tcut1,endtime=tcut2)


        t_kept = 0.0
        for trace in self.stream:
            t_kept += trace.stats.npts
        

        print('* Excluded all events in local catalogue.', file=self.ofid)
        print('* Lost %g percent of original traces' %((t_total-t_kept)/t_total*100), file=self.ofid)
        return()
Exemplo n.º 51
0
    def prepare_array(self):
        # station
        self.sta_lat = [window.latitude for window in self.trwins]
        self.sta_lon = [window.longitude for window in self.trwins]

        for sta_lat, sta_lon in zip(self.sta_lat, self.sta_lon):
            dist, az, baz = gps2dist_azimuth(self.cmtsource.latitude,
                                             self.cmtsource.longitude,
                                             sta_lat, sta_lon)
            self.sta_azi.append(az)
            self.sta_theta.append(az / 180.0 * np.pi)
            if self.mode == "regional":
                # if regional, then use original distance(in km)
                self.sta_dist.append(dist / 1000.0)
            elif self.mode == "global":
                # if global, then use degree as unit
                self.sta_dist.append(dist/EARTH_HC)
Exemplo n.º 52
0
def data_request(client_name, cat_client_name, start, end, minmag, net=None, scode="*", channels="*", minlat=None,
                 maxlat=None,minlon=None,maxlon=None, station_minlat=None,
                 station_maxlat=None, station_minlon=None, station_maxlon=None, mindepth=None, maxdepth=None, 
                 radialcenterlat=None, radialcenterlon=None, minrad=None, maxrad=None,
                 station_radialcenterlat=None, station_radialcenterlon=None, station_minrad=None, station_maxrad=None,
                 azimuth=None, baz=False, t_before_first_arrival=1, t_after_first_arrival=9, savefile=False, file_format='SAC'):
	"""
	Searches in a given Database for seismic data. Restrictions in terms of starttime, endtime, network etc can be made.
	If data is found it returns a stream variable, with the waveforms, an inventory with all station and network information
	and a catalog with the event information.

	:param client_name: Name of desired fdsn client, for a list of all clients see: 
		                https://docs.obspy.org/tutorial/code_snippets/retrieving_data_from_datacenters.html
	:type  client_name:  string

	:param cat_client_name: Name of Event catalog

	:type  cat_client_name: string

	:param start, end: starttime, endtime
	:type : UTCDateTime

	:param minmag: Minimum magnitude of event
	:type  minmag: float

	:param net: Network code for which to search data for
	:type  net: string

	:param scode: Station code for which to search data for
	:type  scode: string

	:param channels: Used channels of stations 
	:type  channels: string

	:param minlat, maxlat, minlon, maxlon: Coordinate-window of interest
	:type : float

	:param mindepth, maxdepth: depth information of event in km
	:type : float

	:param radialcenterlat, radialcenterlon: Centercoordinates of a radialsearch, if radialsearch=True
	:type : float

	:param minrad, maxrad: Minimum and maximum radii for radialsearch
	:type : float

	:param azimuth: Desired range of azimuths of event, station couples in deg as a list [minimum azimuth, maximum azimuth]
	:type  azimuth: list

	:param baz: Desired range of back-azimuths of event, station couples in deg as a list [minimum back azimuth, maximum back azimuth]
	:type  baz: list

	:param t_before_first_arrival, t_before_after_arrival: Length of the seismograms, startingpoint, minutes before 1st arrival and
															minutes after 1st arrival.
	:type  t_before_first_arrival, t_before_after_arrival: float, int
	
	:param savefile: if True, Stream, Inventory and Catalog will be saved local, in the current directory.
	:type  savefile: bool

	:param format: File-format of the data, for supported formats see: https://docs.obspy.org/packages/autogen/obspy.core.stream.Stream.write.html#obspy.core.stream.Stream.write
	:type  format: string
	
	returns

	:param: list_of_stream, Inventory, Catalog
	:type: list, obspy, obspy 



	### Example 1 ###

	from obspy import UTCDateTime
	from sipy.util.data_request import data_request

	start = UTCDateTime(2010,1,1,0,0)
	end = UTCDateTime(2010,12,31,0,0)
	minmag = 8
	station = '034A'
	list_of_stream, inventory, cat = data_request('IRIS', start, end, minmag, net='TA', scode=station)
	
	st = list_of_stream[0]
	st = st.select(channel='BHZ')
	st.normalize()
	inv = inventory[0]

	st.plot()
	inv.plot()
	cat.plot()

	### Example 2 ###

	from obspy import UTCDateTime
	from sipy.util.data_request import data_request

	start = UTCDateTime(2010,1,1,0,0)
	end = UTCDateTime(2010,12,31,0,0)
	minmag = 8
	station = '034A'
	client = 'IRIS'
	cat_client = 'globalcmt'
	list_of_stream, inventory, cat = data_request(client, cat_client, start, end, minmag, net='TA', scode=station)
	
	st = list_of_stream[0]
	st = st.select(channel='BHZ')
	st.normalize()
	inv = inventory[0]

	st.plot()
	inv.plot()
	cat.plot()

	"""

	data =[]
	stream = Stream()
	streamall = []
	

	#build in different approach for catalog search, using urllib

	if cat_client_name == 'globalcmt':
		catalog = request_gcmt(starttime=start, endtime=end, minmagnitude=minmag, mindepth=mindepth, maxdepth=maxdepth, minlatitude=minlat, maxlatitude=maxlat, minlongitude=minlon, maxlongitude=maxlon)
		client = Client(client_name)
	else:	
		client = Client(client_name)
		try:
			catalog = client.get_events(starttime=start, endtime=end, minmagnitude=minmag, mindepth=mindepth, maxdepth=maxdepth, latitude=radialcenterlat, longitude=radialcenterlon, minradius=minrad, maxradius=maxrad,minlatitude=minlat, maxlatitude=maxlat, minlongitude=minlon, maxlongitude=maxlon)

		except:
			print("No events found for given parameters.")
			return

	print("Following events found: \n")
	print(catalog)
	m = TauPyModel(model="ak135")
	Plist = ["P", "Pdiff", "p"]
	for event in catalog:
		print("\n")
		print("########################################")
		print("Looking for available data for event: \n")
		print(event.short_str())
		print("\n")

		origin_t = event.origins[0].time
		station_stime = UTCDateTime(origin_t - 3600*24)
		station_etime = UTCDateTime(origin_t + 3600*24)

		try:
			inventory = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
			print("Inventory found.")
		except:
			print("No Inventory found for given parameters")
			return
		
		for network in inventory:

			elat = event.origins[0].latitude
			elon = event.origins[0].longitude
			depth = event.origins[0].depth/1000.

			array_fits = True
			if azimuth or baz:
				cog=center_of_gravity(network)
				slat = cog['latitude']
				slon = cog['longitude']			
				epidist = locations2degrees(slat,slon,elat,elon)
				arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
							                        phase_list=Plist)

				P_arrival_time = arrivaltime[0]

				Ptime = P_arrival_time.time
				tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
				tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)


				center = geometrical_center(inv)
				clat = center['latitude']
				clon = center['longitude']
				if azimuth:
					print("Looking for events in the azimuth range of %f to %f" % (azimuth[0], azimuth[1]) )
					center_az = gps2dist_azimuth(clat, clon, elat, elon)[1]
					if center_az > azimuth[1] and center_az < azimuth[0]: 
						print("Geometrical center of Array out of azimuth bounds, \ncheking if single stations fit")
						array_fits = False

				elif baz:
					print("Looking for events in the back azimuth range of %f to %f" %(baz[0], baz[1]))
					center_baz = gps2dist_azimuth(clat, clon, elat, elon)[2]
					if center_baz > baz[1] and center_baz < baz[0]: 
						print("Geometrical center of Array out of back azimuth bounds, \ncheking if single stations fit")
						array_fits = False

			# If array fits to azimuth/back azimuth or no azimuth/back azimuth is given
			no_of_stations = 0
			if array_fits:

				for station in network:

					epidist = locations2degrees(station.latitude,station.longitude,elat,elon)
					arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
								                        phase_list=Plist)

					P_arrival_time = arrivaltime[0]

					Ptime = P_arrival_time.time
					tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
					tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)

					try:
						streamreq = client.get_waveforms(network=network.code, station=station.code, location='*', channel=channels, starttime=tstart, endtime=tend, attach_response=True)
						no_of_stations += 1
						print("Downloaded data for %i of %i available stations!" % (no_of_stations, network.selected_number_of_stations), end='\r' )
						sys.stdout.flush()
						stream 		   += streamreq
						try:
							if inventory_used:
								inventory_used 	+= client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
									
						except:
								inventory_used 	 = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
					except:
						continue


			# If not checking each station individually.
			else:
				for station in network:
					epidist = locations2degrees(station.latitude,station.longitude,elat,elon)
					arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
								                        phase_list=Plist)


					P_arrival_time = arrivaltime[0]

					Ptime = P_arrival_time.time
					tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
					tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)

					fit = False
					if azimuth:
						stat_az = gps2dist_azimuth(station.latitude, station.longitude, elat, elon)[1]
						if stat_az > azimuth[1] and stat_az < azimuth[0]: fit = True
					elif baz:
						stat_baz = gps2dist_azimuth(station.latitude, station.longitude, elat, elon)[2]
						if stat_baz > baz[1] and stat_baz < baz[0]: fit = True
					if fit:
						try:
							streamreq = client.get_waveforms(network = network.code, station = station.code, location='*', channel = channels, startime = tstart, endtime = tend, attach_response = True)
							no_of_stations += 1
							print("Downloaded data for %i of %i available stations!" % (no_of_stations, network.selected_number_of_stations), end='\r' )
							sys.stdout.flush()
							stream 		+= streamreq
							try:
								if inventory_used:
									inventory_used 	+= client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
							except:
									inventory_used 	 = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
						except:

							continue

		try:
			if invall:
				invall += inventory
		except:
			invall 		= inventory

		attach_network_to_traces(stream, inventory)
		attach_coordinates_to_traces(stream, inventory, event)
		streamall.append(stream)
		stream = Stream()

	if savefile:
		stname = str(origin_t).split('.')[0] + ".MSEED"
		invname = stname + "_inv.xml"
		catname = stname + "_cat.xml"
		stream.write(stname, format=file_format)
		inventory.write(invname, format="STATIONXML")
		catalog.write(catname, format="QUAKEML")

	plt.ion()
	#invall.plot()
	#catalog.plot()
	plt.ioff()
	inventory = invall
	list_of_stream = streamall
	return(list_of_stream, inventory, catalog)
Exemplo n.º 53
0
def get_rupture_onset(home,project_name,slip,fault_array,model_name,hypocenter,rise_time_depths,M0):
    '''
    Using a custom built tvel file ray trace from hypocenter to determine rupture
    onset times
    '''
        
    from numpy import genfromtxt,zeros,arctan,sin,r_,where,log10,isnan
    from obspy.geodetics import gps2dist_azimuth
    
    #Load velocity model
    vel=genfromtxt(home+project_name+'/structure/'+model_name)
        
    # Convert from thickness to depth to bottom of layer
    depth_to_top=r_[0,vel[:,0].cumsum()[0:-1]]
    
    #Get rupture speed shear-wave multipliers
    rupture_multiplier=zeros(len(vel))
    # Shallow 
    i=where(depth_to_top<=rise_time_depths[0])[0]
    rupture_multiplier[i]=0.56
    # Deep 
    i=where(depth_to_top>=rise_time_depths[1])[0]
    rupture_multiplier[i]=0.8
    # Transition 
    i=where((depth_to_top<rise_time_depths[1]) & (depth_to_top>rise_time_depths[0]))[0]
    slope=(0.8-0.56)/(rise_time_depths[1]-rise_time_depths[0])
    intercept=0.8-slope*rise_time_depths[1]
    rupture_multiplier[i]=slope*depth_to_top[i]+intercept
        
    #Loop over all faults
    t_onset=zeros(len(slip))
    for kfault in range(len(slip)):
        D,az,baz=gps2dist_azimuth(hypocenter[1],hypocenter[0],fault_array[kfault,2],fault_array[kfault,1])
        D=D/1000
        #Start and stop depths
        if fault_array[kfault,3]<hypocenter[2]:
            zshallow=fault_array[kfault,3]
            zdeep=hypocenter[2]
        else:
            zdeep=fault_array[kfault,3]
            zshallow=hypocenter[2]
        #Get angle between depths
        theta=arctan((zdeep-zshallow)/D)
        # get hypotenuse distance on all layers
        delta_ray=vel[:,0]/sin(theta)
        # Calculate distance in each layer
        depth1=0
        depth2=vel[0,0]
        length_ray=zeros(len(vel))
        for klayer in range(len(vel)):
            if zshallow>depth1 and zdeep<depth2: #both points in same layer
                length_ray[klayer]=abs(zshallow-zdeep)/sin(theta)
            elif zshallow>depth1 and zshallow<depth2: #This is the top
                length_ray[klayer]=abs(depth2-zshallow)/sin(theta)
            elif zdeep>depth1 and zdeep<depth2: #This is the bottom
                length_ray[klayer]=abs(depth1-zdeep)/sin(theta)
            elif depth1>zshallow and depth2<zdeep: #Use full layer thickness for ray path length
                length_ray[klayer]=delta_ray[klayer]
            else: #Some other layer, do nothing
                pass
            #Update reference depths
            if klayer<len(vel)-1: #last layer:
                depth1=depth2
                depth2=depth2+vel[klayer+1,0]
            else:
                depth1=depth2
                depth2=1e6
        
        #Now multiply ray path length times rupture velocity
        ray_times=length_ray/(vel[:,1]*rupture_multiplier)
        t_onset[kfault]=ray_times.sum()   
        
    #Now perturb onset times according to Graves-Pitarka eq 5 and 6
    delta_t=((M0*1e7)**(1./3))*1.8e-9
    slip_average=slip.mean()
    i=where(slip>0.05*slip_average)[0]
    perturbation=(log10(slip)-log10(slip_average))/(log10(slip.max())-log10(slip_average))
    t_onset_final=t_onset.copy()
    t_onset_final[i]=t_onset[i]-delta_t*perturbation[i]
    #Check for negative times
    i=where(t_onset_final<0)[0]
    t_onset_final[i]=t_onset[i]
    #Check for nan times
    i=where(isnan(t_onset_final)==True)[0]
    t_onset_final[i]=0
    
    return t_onset_final      
Exemplo n.º 54
0
 def f(lat,lon,location):
     return abs(gps2dist_azimuth(lat,lon,location[0],location[1])[0])
Exemplo n.º 55
0
        stats.attrs['ntraces'] = ntraces
        stats.attrs['Fs'] = Fs
        stats.attrs['nt'] = int(npts)

        # DATASET NR 2: Source grid
        sources = f_out.create_dataset('sourcegrid',data=srcgrid)

        # DATASET Nr 3: Seismograms itself
        traces = f_out.create_dataset('data',(ntraces,npts),dtype=np.float32)

        for k in range(ntraces):
            # for each location, determine the distance
            lat = srcgrid[1,k]
            lon = srcgrid[0,k]

            r = gps2dist_azimuth(lat,lon,lat_sta,lon_sta)[0]

        # evaluate the Greens fct.
            g1 = green_membrane(r)

        # apply the freq. domain taper
            taper = np.zeros(freq.shape)
            i0 = np.argmin(np.abs(freq-filt[0]))
            i1 = np.argmin(np.abs(freq-filt[1]))

            taper[i0:i1] = hann(i1-i0)

        # transform back to time domain
            g1_td = np.fft.irfft(g1)[0:3600]
            g1_td_taper = np.fft.irfft(taper*g1)[0:3600]
            #if k % 30000 == 0:
Exemplo n.º 56
0
def get_rupture_onset(home,project_name,slip,fault_array,model_name,hypocenter,
        rise_time_depths,M0,sigma_rise_time=0.2):
    '''
    Using a custom built tvel file ray trace from hypocenter to determine rupture
    onset times
    '''
        
    from numpy import genfromtxt,zeros,arctan2,sin,r_,where,log10,isnan,argmin,setxor1d,exp
    from numpy .random import rand,randn
    from obspy.geodetics import gps2dist_azimuth
    
    #Load velocity model
    vel=genfromtxt(home+project_name+'/structure/'+model_name)
        
    # Convert from thickness to depth to bottom of layer
    depth_to_top=r_[0,vel[:,0].cumsum()[0:-1]]
    
#    #Get rupture speed shear-wave multipliers
#    rupture_multiplier=zeros(len(vel))
#    # Shallow 
#    i=where(depth_to_top<=rise_time_depths[0])[0]
#    rupture_multiplier[i]=0.56
#    # Deep 
#    i=where(depth_to_top>=rise_time_depths[1])[0]
#    rupture_multiplier[i]=0.8
#    # Transition 
#    i=where((depth_to_top<rise_time_depths[1]) & (depth_to_top>rise_time_depths[0]))[0]
#    slope=(0.8-0.56)/(rise_time_depths[1]-rise_time_depths[0])
#    intercept=0.8-slope*rise_time_depths[1]
#    rupture_multiplier[i]=slope*depth_to_top[i]+intercept
    
    
    #Get rupture speed shear-wave multipliers
    rupture_multiplier=zeros(len(vel))
    # Shallow 
    i=where(depth_to_top<=rise_time_depths[0])[0]
    rupture_multiplier[i]=0.49
    # Deep 
    i=where(depth_to_top>=rise_time_depths[1])[0]
    rupture_multiplier[i]=0.7
    # Transition 
    i=where((depth_to_top<rise_time_depths[1]) & (depth_to_top>rise_time_depths[0]))[0]
    slope=(0.7-0.49)/(rise_time_depths[1]-rise_time_depths[0])
    intercept=0.7-slope*rise_time_depths[1]
    rupture_multiplier[i]=slope*depth_to_top[i]+intercept
    
    
    
    #Perturb depths of the hypocenter so that faults at the same depth are not zero onset
    delta=0.00001
    i_same_as_hypo=where(fault_array[:,3]==hypocenter[2])[0]
    dist=((fault_array[:,1]-hypocenter[0])**2+(fault_array[:,2]-hypocenter[1])**2)**0.5
    i_hypo=argmin(dist)
    #Get faults at same depth that are NOT the hypo
    i_same_as_hypo=setxor1d(i_same_as_hypo,i_hypo)
    #perturb
    R=rand(1)
    fault_array[i_hypo,3]=fault_array[i_hypo,3]-delta*R
    hypocenter[2]=hypocenter[2]-delta*R
    R=rand(len(i_same_as_hypo))
    fault_array[i_same_as_hypo,3]=fault_array[i_same_as_hypo,3]+delta*R
    
         
    #Loop over all faults
    t_onset=zeros(len(slip))
    #Perturb all subfault depths a tiny amount by some random number so that they NEVER lie on a layer interface
    z_perturb=(rand(len(fault_array))-0.5)*1e-6
    fault_array[:,3]=fault_array[:,3]+z_perturb
    for kfault in range(len(slip)):
        D,az,baz=gps2dist_azimuth(hypocenter[1],hypocenter[0],fault_array[kfault,2],fault_array[kfault,1])
        D=D/1000
        #Start and stop depths
        if fault_array[kfault,3]<=hypocenter[2]:
            zshallow=fault_array[kfault,3]
            zdeep=hypocenter[2]
        else:
            zdeep=fault_array[kfault,3]
            zshallow=hypocenter[2]
        #Get angle between depths
        theta=arctan2(zdeep-zshallow,D)
        # get hypotenuse distance on all layers
        delta_ray=vel[:,0]/sin(theta)
        # Calculate distance in each layer
        depth1=0
        depth2=vel[0,0]
        length_ray=zeros(len(vel))
        for klayer in range(len(vel)):
            if zshallow>depth1 and zdeep<depth2: #both points in same layer
                length_ray[klayer]=abs(zshallow-zdeep)/sin(theta)
            elif zshallow>depth1 and zshallow<depth2: #This is the top
                length_ray[klayer]=abs(depth2-zshallow)/sin(theta)
            elif zdeep>depth1 and zdeep<depth2: #This is the bottom
                length_ray[klayer]=abs(depth1-zdeep)/sin(theta)
            elif depth1>zshallow and depth2<zdeep: #Use full layer thickness for ray path length
                length_ray[klayer]=delta_ray[klayer]
            else: #Some other layer, do nothing
                pass
            #Update reference depths
            if klayer<len(vel)-1: #last layer:
                depth1=depth2
                depth2=depth2+vel[klayer+1,0]
            else:
                depth1=depth2
                depth2=1e6
        
        #Now multiply ray path length times rupture velocity
        ray_times=length_ray/(vel[:,1]*rupture_multiplier)
        t_onset[kfault]=ray_times.sum()   
        
    #Now perturb onset times according to Graves-Pitarka eq 5 and 6 (assumes 1:1 corelation with slip)
    delta_t0=((M0*1e7)**(1./3))*1.8e-9
    
    #GP 2015 extra perturbation to destroy the 1:1 correlation with slip
    rand_numb=randn()
    delta_t=delta_t0*exp(sigma_rise_time*rand_numb)
    
    #Now apply total perturbation
    slip_average=slip.mean()
    i=where(slip>0.05*slip_average)[0] #perturbation is applied only to subfaults with significant slip
    perturbation=(log10(slip)-log10(slip_average))/(log10(slip.max())-log10(slip_average))
    t_onset_final=t_onset.copy()
    t_onset_final[i]=t_onset[i]-delta_t*perturbation[i]
    
    #Check for negative times
    i=where(t_onset_final<0)[0]
    t_onset_final[i]=t_onset[i]
    #Reassign subfaults within the "nucleation zone" to be their original, unperturbed onsets    
    #nu=some-relation-to-M0
    i=where(t_onset<5.5)[0]
    t_nucleation_edge=max(t_onset[i])
    t_onset_final=t_onset_final+t_nucleation_edge
    t_onset_final[i]=t_onset[i]
    #Check for nan times
    i=where(isnan(t_onset_final)==True)[0]
    t_onset_final[i]=0
    
    return t_onset_final      
Exemplo n.º 57
0
    sta2 = os.path.splitext(os.path.basename(t))[0].split('--')[1]
    
    datafile = os.path.join(data_dir,os.path.basename(t))
    tr = read(t)[0]
    
    i = tr.stats.npts // 2 + 1 
    tr.data[0:i] *= amps[cnt]
    cnt += 1
    tr.data[i+1:] *= amps[cnt]
    cnt += 1
    
    tr.stats.network = sta1.split('.')[0]
    tr.stats.station = sta1.split('.')[1]
    tr.stats.location = ''
    tr.stats.channel = sta1.split('.')[3]
    tr.stats.sac={}
    tr.stats.sac.kuser0 = sta2.split('.')[0]
    tr.stats.sac.kevnm = sta2.split('.')[1]
    tr.stats.sac.kuser1 = ''
    tr.stats.sac.kuser2 = sta2.split('.')[3]
    tr.stats.sac.stla = coords[sta1][0]
    tr.stats.sac.stlo = coords[sta1][1]
    tr.stats.sac.evla = coords[sta2][0]
    tr.stats.sac.evlo = coords[sta2][1]
    tr.stats.sac.dist = gps2dist_azimuth(coords[sta1][0],coords[sta1][1],coords[sta2][0],coords[sta2][1])[0]
    tr.write(datafile,format='SAC')
   



Exemplo n.º 58
0
from collections import defaultdict
import math
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from mpl_toolkits.basemap import Basemap
from matplotlib.patches import Rectangle
from obspy.geodetics import gps2dist_azimuth
from obspy.imaging.beachball import beach

from . import logger
from .util import get_cmt_par, get_trwin_tag
from .measure import _envelope

# earth half circle
EARTH_HC, _, _ = gps2dist_azimuth(0, 0, 0, 180)


def _plot_new_seismogram_sub(trwin, outputdir, cmtsource, figure_format):
    obsd = trwin.datalist['obsd']
    synt = trwin.datalist['synt']
    new_synt = trwin.datalist['new_synt']

    station = obsd.stats.station
    network = obsd.stats.network
    channel = obsd.stats.channel
    location = obsd.stats.location
    outputfig = os.path.join(outputdir, "%s.%s.%s.%s.%s" % (
        network, station, location, channel, figure_format))

    if cmtsource is None:
Exemplo n.º 59
0
    st.simulate(paz_remove="self", paz_simulate=paz_wa, water_level=10)
    st.trim(t, t + 50)

    tr_n = st.select(component="N")[0]
    ampl_n = max(abs(tr_n.data))
    tr_e = st.select(component="E")[0]
    ampl_e = max(abs(tr_e.data))
    ampl = max(ampl_n, ampl_e)

    sta_lat = st[0].stats.coordinates.latitude
    sta_lon = st[0].stats.coordinates.longitude
    event_lat = 46.218
    event_lon = 7.706

    epi_dist, az, baz = gps2dist_azimuth(event_lat, event_lon, sta_lat, sta_lon)
    epi_dist = epi_dist / 1000

    if epi_dist < 60:
        a = 0.018
        b = 2.17
    else:
        a = 0.0038
        b = 3.02
    ml = log10(ampl * 1000) + a * epi_dist + b
    print(station, ml)
    mags.append(ml)

net_mag = np.median(mags)
print("Network magnitude:", net_mag)
Exemplo n.º 60
0
    tr[0].stats.sac.kuser0 = meta[meta['sta']==sta2].iloc[0]['net']
    
    tr[0].stats.sac.kevnm = sta2
    tr[0].stats.sac.kuser1 = ''
    try:
        tr[0].stats.sac.kuser2 = os.path.basename(t).split('.')[7] #os.path.basename(t).split('--')[1].split('.')[3]
    except IndexError:
        sta2 = os.path.basename(t).split('.')[7]
    tr[0].stats.sac.user0 = 100.   
    #print(lat1 > -90.)
    #print(lat1 < 90.)
    #print(type(lat1))
    #print(float(lat1))
    #print(lat1,lon1,lat2,lon2)
    
    geoinf = gps2dist_azimuth(lat1,lon1,lat2,lon2)
    tr[0].stats.sac.dist = geoinf[0]
    tr[0].stats.sac.az = geoinf[1]
    tr[0].stats.sac.baz = geoinf[2]
    tr[0].stats['distance'] = geoinf[0]   # add stats.distance for section plot
    #print(tr[0].stats.keys())

    tr.write(t,format='SAC')
    #tr.plot()



# # Back to my code

# Check the metadata again
ext = '*.sac'