コード例 #1
0
 def _compute_offsets(self):
     """x, y offsets from array center in meters."""
     c_lon, c_lat, c_elev = self.center
     for i, ele in enumerate(self.elements):
         ele.x, ele.y = np.array(
             util_geo_km(c_lon, c_lat, ele.longitude, ele.latitude)) * 1e3
         ele.z = ele.elevation - c_elev
コード例 #2
0
def read_locationTremor(infile, hour, lat_or, lon_or, depth=35.):
    f = open(infile, 'r')
    vardict = {
        'yyyy': 0,
        'mm': 1,
        'dd': 2,
        'hh': 3,
        'lat_e': 4,
        'lon_e': 5,
        'energy': 6,
        'n_eve': 7
    }

    epicenter = dict.fromkeys(vardict.keys())

    for key in epicenter.keys():
        epicenter[key] = []

    for line in f:
        data = [float(x) for x in line.split()]
        for var, index in vardict.items():
            epicenter[var].append(data[index])

    XX = []
    YY = []
    ZZ = []
    hours_dic = epicenter.get('hh')
    for i, hh in enumerate(hours_dic):
        if hh == float(hour):
            xeq, yeq = util_geo_km(lon_or, lat_or, epicenter['lon_e'][i],
                                   epicenter['lat_e'][i])
            XX.append(xeq)
            YY.append(yeq)
            ZZ.append(depth)
    return XX, YY, ZZ
コード例 #3
0
def read_locationEQ(infile, day, hours, lat_zero, lon_zero, depth=35.):
    f = open(infile, 'r')
    vardict = {
        'yyyy': 0,
        'mm': 1,
        'dd': 2,
        'hh': 3,
        'min': 4,
        'sec': 5,
        'es': 6,
        'lat_e': 7,
        'e_lat': 8,
        'lon_e': 9,
        'e_lon': 10,
        'depth_e': 11,
        'e_depth': 12,
        'mag': 13
    }
    epicenter = dict.fromkeys(vardict.keys())

    for key in epicenter.keys():
        epicenter[key] = []

    for line in f:
        data = [float(x) for x in line.split()]
        for var, index in vardict.items():
            epicenter[var].append(data[index])

    xx = []
    yy = []
    zz = []

    hours_dic = epicenter.get('hh')
    for i, hh in enumerate(hours_dic):
        year = str(epicenter['yyyy'][i])[2:4] + '0' + \
               str(int(epicenter['mm'][i])) + str(int(epicenter['dd'][i]))
        if year == day and str(int(hh)) == hours:
            xeq, yeq = util_geo_km(lon_zero, lat_zero, epicenter['lon_e'][i],
                                   epicenter['lat_e'][i])
            xx.append(xeq)
            yy.append(yeq)
            zz.append(epicenter['depth_e'][i])
    return xx, yy, zz
コード例 #4
0
def calcEpiHypo(wf):
    """
       Uses Obspy's distance converter tool to calulate the distance delta
       between site and eq; then converts to km from deci degs. 
       Takes in waveform dictionary object returned by the readKiknet() 
       function. Better to use Obspy's func because it uses elliptical earth 
       model = accurate distances.
    """
    #extract relevent params
    slat, slon = wf["sitelatlon"]
    eqlat, eqlon = wf["eqlatlon"]
    eqdepth = wf["eqdepth"]
    sheight = wf["station height"] / 1000  #convert to km
    #do the delta and conversion to km
    dx, dy = util_geo_km(eqlon, eqlat, slon, slat)
    #calc Repi (dx**2 + dy**2)**0.5 and Rhyp (dx**2 + dy**2 + dz**2)**0.5
    Repi = np.sqrt(dx**2 + dy**2)
    Rhyp = np.sqrt(dx**2 + dy**2 + (eqdepth + sheight)**2)

    return (Repi, Rhyp)
コード例 #5
0
def get_offsets(inv):
    """
    Calculate the offset of each channel in an inventory object relative
    to the center and attach an ``x`` and ``y`` attributes to the
    channel.

    Parameters
    ----------
    inv : :class:`~obspy.core.inventory.inventory.Inventory` or str
        An :class:`~obspy.core.inventory.inventory.Inventory` object or
        a path (relative or absolute) to a StationXML file with the
        array elements.

    Returns
    -------
    x, y, x : list
        Offsets relative to array center coordinate.
    """
    if isinstance(inv, str):
        inv = read_inventory(inv)

    inv[0].stations.sort(key=lambda x: x.code)

    center_lon, center_lat, center_elev = get_center(inv)
    net = inv[0]
    x = []
    y = []
    z = []
    for sta in net:
        for cha in sta:
            x_, y_ = np.array(
                util_geo_km(center_lon, center_lat,
                            cha.longitude, cha.latitude)) * 1e3
        z_ = cha.elevation - center_elev

        x.append(x_)
        y.append(y_)
        z.append(z_)
    return x, y, z
コード例 #6
0
def getXY_array(stream):
    '''
    Returns the site coordinates for a specific array in the format required
    by fk
    '''
    lo = []
    la = []
    for ii in range(len(stream)):
        try:
            stream[ii].stats.coordinates
            coord = 0
        except Exception as ex1:
            #print('No coordinates')
            coord = 1
        if coord == 0:
            lo.append(stream[ii].stats.coordinates.longitude)
            la.append(stream[ii].stats.coordinates.latitude)
        else:
            lo.append(stream[ii].stats.sac['stlo'])
            la.append(stream[ii].stats.sac['stla'])
    lo = np.asarray(lo)
    la = np.asarray(la)
    #embed()
    X = []
    Y = []
    loc = np.array([la, lo])
    loc = np.mean(loc, axis=1)
    #print loc
    for i in range(0, len(la)):
        try:
            tempDX, tempDY = utlGeoKm(np.mean(lo), np.mean(la), lo[i], la[i])
        except:
            from obspy.signal.util import util_geo_km
            tempDX, tempDY = util_geo_km(np.mean(lo), np.mean(la), lo[i],
                                         la[i])
        X.append(tempDX)
        Y.append(tempDY)
    return np.asarray(X), np.asarray(Y)
コード例 #7
0
ファイル: array_analysis.py プロジェクト: junlysky/obspy
def get_geometry(stream, coordsys='lonlat', return_center=False,
                 verbose=False):
    """
    Method to calculate the array geometry and the center coordinates in km

    :param stream: Stream object, the trace.stats dict like class must
        contain an :class:`~obspy.core.util.attribdict.AttribDict` with
        'latitude', 'longitude' (in degrees) and 'elevation' (in km), or 'x',
        'y', 'elevation' (in km) items/attributes. See param ``coordsys``
    :param coordsys: valid values: 'lonlat' and 'xy', choose which stream
        attributes to use for coordinates
    :param return_center: Returns the center coordinates as extra tuple
    :return: Returns the geometry of the stations as 2d :class:`numpy.ndarray`
            The first dimension are the station indexes with the same order
            as the traces in the stream object. The second index are the
            values of [lat, lon, elev] in km
            last index contains center [lat, lon, elev] in degrees and km if
            return_center is true
    """
    nstat = len(stream)
    center_lat = 0.
    center_lon = 0.
    center_h = 0.
    geometry = np.empty((nstat, 3))

    if isinstance(stream, Stream):
        for i, tr in enumerate(stream):
            if coordsys == 'lonlat':
                geometry[i, 0] = tr.stats.coordinates.longitude
                geometry[i, 1] = tr.stats.coordinates.latitude
                geometry[i, 2] = tr.stats.coordinates.elevation
            elif coordsys == 'xy':
                geometry[i, 0] = tr.stats.coordinates.x
                geometry[i, 1] = tr.stats.coordinates.y
                geometry[i, 2] = tr.stats.coordinates.elevation
    elif isinstance(stream, np.ndarray):
        geometry = stream.copy()
    else:
        raise TypeError('only Stream or numpy.ndarray allowed')

    if verbose:
        print("coordsys = " + coordsys)

    if coordsys == 'lonlat':
        center_lon = geometry[:, 0].mean()
        center_lat = geometry[:, 1].mean()
        center_h = geometry[:, 2].mean()
        for i in np.arange(nstat):
            x, y = util_geo_km(center_lon, center_lat, geometry[i, 0],
                               geometry[i, 1])
            geometry[i, 0] = x
            geometry[i, 1] = y
            geometry[i, 2] -= center_h
    elif coordsys == 'xy':
        geometry[:, 0] -= geometry[:, 0].mean()
        geometry[:, 1] -= geometry[:, 1].mean()
        geometry[:, 2] -= geometry[:, 2].mean()
    else:
        raise ValueError("Coordsys must be one of 'lonlat', 'xy'")

    if return_center:
        return np.c_[geometry.T,
                     np.array((center_lon, center_lat, center_h))].T
    else:
        return geometry
コード例 #8
0
def get_geometry(stream,
                 coordsys='lonlat',
                 return_center=False,
                 verbose=False):
    """
    Method to calculate the array geometry and the center coordinates in km

    :param stream: Stream object, the trace.stats dict like class must
        contain an :class:`~obspy.core.util.attribdict.AttribDict` with
        'latitude', 'longitude' (in degrees) and 'elevation' (in km), or 'x',
        'y', 'elevation' (in km) items/attributes. See param ``coordsys``
    :param coordsys: valid values: 'lonlat' and 'xy', choose which stream
        attributes to use for coordinates
    :param return_center: Returns the center coordinates as extra tuple
    :return: Returns the geometry of the stations as 2d :class:`numpy.ndarray`
            The first dimension are the station indexes with the same order
            as the traces in the stream object. The second index are the
            values of [lat, lon, elev] in km
            last index contains center [lat, lon, elev] in degrees and km if
            return_center is true
    """
    nstat = len(stream)
    center_lat = 0.
    center_lon = 0.
    center_h = 0.
    geometry = np.empty((nstat, 3))

    if isinstance(stream, Stream):
        for i, tr in enumerate(stream):
            if coordsys == 'lonlat':
                geometry[i, 0] = tr.stats.coordinates.longitude
                geometry[i, 1] = tr.stats.coordinates.latitude
                geometry[i, 2] = tr.stats.coordinates.elevation
            elif coordsys == 'xy':
                geometry[i, 0] = tr.stats.coordinates.x
                geometry[i, 1] = tr.stats.coordinates.y
                geometry[i, 2] = tr.stats.coordinates.elevation
    elif isinstance(stream, np.ndarray):
        geometry = stream.copy()
    else:
        raise TypeError('only Stream or numpy.ndarray allowed')

    if verbose:
        print("coordsys = " + coordsys)

    if coordsys == 'lonlat':
        center_lon = geometry[:, 0].mean()
        center_lat = geometry[:, 1].mean()
        center_h = geometry[:, 2].mean()
        for i in np.arange(nstat):
            x, y = util_geo_km(center_lon, center_lat, geometry[i, 0],
                               geometry[i, 1])
            geometry[i, 0] = x
            geometry[i, 1] = y
            geometry[i, 2] -= center_h
    elif coordsys == 'xy':
        geometry[:, 0] -= geometry[:, 0].mean()
        geometry[:, 1] -= geometry[:, 1].mean()
        geometry[:, 2] -= geometry[:, 2].mean()
    else:
        raise ValueError("Coordsys must be one of 'lonlat', 'xy'")

    if return_center:
        return np.c_[geometry.T,
                     np.array((center_lon, center_lat, center_h))].T
    else:
        return geometry
コード例 #9
0
ファイル: sncast.py プロジェクト: moellhoff/Jupyter-Notebooks
def minML(filename,
          dir_in='./',
          lon0=-12,
          lon1=-4,
          lat0=50.5,
          lat1=56.6,
          dlon=0.33,
          dlat=0.2,
          stat_num=4,
          snr=3,
          foc_depth=0,
          region='CAL',
          mag_min=-3.0,
          mag_delta=0.1):
    """
    This routine calculates the geographic distribution of the minimum 
    detectable local magnitude ML for a given seismic network. Required 
#### 9.10.2020    input is a file containg four comma separated
    columns containing for each seismic station:

         longitude, latitude, noise [nm], station name
    e.g.: -7.5100, 55.0700, 0.53, IDGL

    The output file *.grd lists in ASCII xyz format: longitud, latitude, ML
  
    Optional parameters are:

    :param  dir_in:	full path to input and output file
    :param  lon0:	minimum longitude of search grid
    :param  lon1:	maximum longitude of search grid
    :param  lat0:	minimum latitude of search grid
    :param  lat1:	maximum latitude of search grid
    :param  dlon:	longitude increment of search grid
    :param  dlat:	latitude increment of search grid
    :param  stat_num:	required number of station detections
    :param  snr:	required signal-to-noise ratio for detection
    :param  foc_depth:  assumed focal event depth
    :param  region:	locality for assumed ML scale parameters ('UK' or 'CAL')
    :param  mag_min:	minimum ML value for grid search
    :param  mag_delta:  ML increment used in grid search
    """
    # region specific ML = log(ampl) + a*log(hypo-dist) + b*hypo_dist + c
    if region == 'UK':  # UK scale, Ottemöller and Sargeant (2013), BSSA, doi:10.1785/0120130085
        a = 0.95
        b = 0.00183
        c = -1.76
    elif region == 'CAL':  # South. California scale, IASPEI (2005),
        # www.iaspei.org/commissions/CSOI/summary_of_WG_recommendations_2005.pdf
        a = 1.11
        b = 0.00189
        c = -2.09

    # read in data, file format: "LON, LAT, NOISE [nm], STATION"
#### 9.10.2020    array_in = np.genfromtxt('%s/%s.dat' %(dir_in, filename), dtype=None, delimiter=",")
#### 9.10.2020    array_in = np.genfromtxt('%s/%s.dat' %(dir_in, filename), encoding='ASCII', dtype=None, delimiter=",")
    array_in = np.genfromtxt('%s/%s' % (dir_in, filename),
                             encoding='ASCII',
                             dtype=None,
                             delimiter=",")
    lon = ([t[0] for t in array_in])
    lat = [t[1] for t in array_in]
    noise = [t[2] for t in array_in]
    stat = [t[3] for t in array_in]
    # grid size
    nx = int((lon1 - lon0) / dlon) + 1
    ny = int((lat1 - lat0) / dlat) + 1
    # open output file:
    ### 9.10.2020    f = open('%s/%s-stat%s-foc%s-snr%s-%s.grd' %(dir_in, filename, stat_num, foc_depth, snr, region), 'wb')
    f = open(
        '%s/%s-stat%s-foc%s-snr%s-%s.grd' %
        (dir_in, filename, stat_num, foc_depth, snr, region), 'w')
    mag = []

    for ix in range(nx):  # loop through longitude increments
        ilon = lon0 + ix * dlon
        for iy in range(ny):  # loop through latitude increments
            ilat = lat0 + iy * dlat
            j = 0
            for jstat in stat:  # loop through stations
                # calculate hypcocentral distance in km
                dx, dy = util_geo_km(ilon, ilat, lon[j], lat[j])
                hypo_dist = sqrt(dx**2 + dy**2 + foc_depth**2)
                # find smallest detectable magnitude
                ampl = 0.0
                m = mag_min - mag_delta
                while ampl < snr * noise[j]:
                    m = m + mag_delta
                    ampl = pow(10,
                               (m - a * log10(hypo_dist) - b * hypo_dist - c))
                mag.append(m)
                j = j + 1
            # sort magnitudes in ascending order
            mag = sorted(mag)
            # write out lonngitude, latitude and smallest detectable magnitude
            f.write("".join(
                str(ilon) + " " + str(ilat) + " " + str(mag[stat_num - 1]) +
                "\n"))
            del mag[:]
    f.close()
コード例 #10
0
def compute_offsets(cha, ref):
    (x, y) = util_geo_km(ref.longitude, ref.latitude, cha.longitude,
                         cha.latitude)
    return (x, y)