예제 #1
0
    def test_equality(self):
        sctx1 = SitesContext()
        sctx1.vs30 = numpy.array([500., 600., 700.])
        sctx1.vs30measured = True
        sctx1.z1pt0 = numpy.array([40., 50., 60.])
        sctx1.z2pt5 = numpy.array([1, 2, 3])

        sctx2 = SitesContext()
        sctx2.vs30 = numpy.array([500., 600., 700.])
        sctx2.vs30measured = True
        sctx2.z1pt0 = numpy.array([40., 50., 60.])
        sctx2.z2pt5 = numpy.array([1, 2, 3])

        self.assertTrue(sctx1 == sctx2)

        sctx2 = SitesContext()
        sctx2.vs30 = numpy.array([500., 600.])
        sctx2.vs30measured = True
        sctx2.z1pt0 = numpy.array([40., 50., 60.])
        sctx2.z2pt5 = numpy.array([1, 2, 3])

        self.assertTrue(sctx1 != sctx2)

        sctx2 = SitesContext()
        sctx2.vs30 = numpy.array([500., 600., 700.])
        sctx2.vs30measured = False
        sctx2.z1pt0 = numpy.array([40., 50., 60.])
        sctx2.z2pt5 = numpy.array([1, 2, 3])

        self.assertTrue(sctx1 != sctx2)

        sctx2 = SitesContext()
        sctx2.vs30 = numpy.array([500., 600., 700.])
        sctx2.vs30measured = True
        sctx2.z1pt0 = numpy.array([40., 50., 60.])

        self.assertTrue(sctx1 != sctx2)

        rctx = RuptureContext()
        rctx.mag = 5.
        self.assertTrue(sctx1 != rctx)
예제 #2
0
    def test_equality(self):
        sctx1 = SitesContext()
        sctx1.vs30 = numpy.array([500., 600., 700.])
        sctx1.vs30measured = True
        sctx1.z1pt0 = numpy.array([40., 50., 60.])
        sctx1.z2pt5 = numpy.array([1, 2, 3])

        sctx2 = SitesContext()
        sctx2.vs30 = numpy.array([500., 600., 700.])
        sctx2.vs30measured = True
        sctx2.z1pt0 = numpy.array([40., 50., 60.])
        sctx2.z2pt5 = numpy.array([1, 2, 3])

        self.assertTrue(sctx1 == sctx2)

        sctx2 = SitesContext()
        sctx2.vs30 = numpy.array([500., 600.])
        sctx2.vs30measured = True
        sctx2.z1pt0 = numpy.array([40., 50., 60.])
        sctx2.z2pt5 = numpy.array([1, 2, 3])

        self.assertTrue(sctx1 != sctx2)

        sctx2 = SitesContext()
        sctx2.vs30 = numpy.array([500., 600., 700.])
        sctx2.vs30measured = False
        sctx2.z1pt0 = numpy.array([40., 50., 60.])
        sctx2.z2pt5 = numpy.array([1, 2, 3])

        self.assertTrue(sctx1 != sctx2)

        sctx2 = SitesContext()
        sctx2.vs30 = numpy.array([500., 600., 700.])
        sctx2.vs30measured = True
        sctx2.z1pt0 = numpy.array([40., 50., 60.])

        self.assertTrue(sctx1 != sctx2)

        rctx = RuptureContext()
        rctx.mag = 5.
        self.assertTrue(sctx1 != rctx)
예제 #3
0
 def getSitesContext(self):
     """
     :returns:
        SitesContext object.
     """
     sctx = SitesContext()
     sctx.vs30 = self._Vs30.getData().copy()
     sctx.z1pt0 = self._Z1Pt0
     sctx.z2pt5 = self._Z2Pt5
     sctx.backarc = self._backarc  # zoneconfig might have this info
     if self._vs30measured_grid is None:  # If we don't know, then use false
         sctx.vs30measured = np.zeros_like(sctx.vs30, dtype=bool)
     else:
         sctx.vs30measured = self._vs30measured_grid
     sctx.lons = self._lons
     sctx.lats = self._lats
     return sctx
예제 #4
0
파일: sites.py 프로젝트: jnf0910/shakemap
 def getSitesContext(self):
     """
     :returns:
        SitesContext object.
     """
     sctx = SitesContext()
     sctx.vs30 = self._Vs30.getData().copy()
     sctx.z1pt0 = self._Z1Pt0
     sctx.z2pt5 = self._Z2Pt5
     sctx.backarc = self._backarc  # zoneconfig might have this info
     if self._vs30measured_grid is None:  # If we don't know, then use false
         sctx.vs30measured = np.zeros_like(
             sctx.vs30, dtype=bool)
     else:
         sctx.vs30measured = self._vs30measured_grid
     sctx.lons = self._lons
     sctx.lats = self._lats
     return sctx
예제 #5
0
    def sampleFromSites(self, lats, lons, vs30measured_grid=None):
        """
        Create a SitesContext object by sampling the current Sites object.

        :param lats:
            Sequence of latitudes.
        :param lons:
            Sequence of longitudes.
        :param vs30measured_grid:
            Sequence of booleans of the same shape as lats/lons indicating 
            whether the vs30 values are measured or inferred.
        :returns:
            SitesContext object where data are sampled from the current Sites
            object.
        :raises ShakeMapException:
             When lat/lon input sequences do not share dimensionality.
        """
        lats = np.array(lats)
        lons = np.array(lons)
        latshape = lats.shape
        lonshape = lons.shape
        if latshape != lonshape:
            msg = 'Input lat/lon arrays must have the same dimensions'
            raise ShakeMapException(msg)

        site = SitesContext()
        # use default vs30 if outside grid
        site.vs30 = self._Vs30.getValue(lats, lons, default=self._defaultVs30)
        site.lats = lats
        site.lons = lons
        site.z1pt0 = _calculate_z1p0(site.vs30)
        site.z2pt5 = _calculate_z2p5(site.z1pt0)
        if vs30measured_grid is None:  # If we don't know, then use false
            site.vs30measured = np.zeros_like(lons, dtype=bool)
        else:
            site.vs30measured = vs30measured_grid
        site.backarc = self._backarc

        return site
예제 #6
0
파일: sites.py 프로젝트: jnf0910/shakemap
    def sampleFromSites(self, lats, lons, vs30measured_grid=None):
        """
        Create a SitesContext object by sampling the current Sites object.

        :param lats:
            Sequence of latitudes.
        :param lons:
            Sequence of longitudes.
        :param vs30measured_grid:
            Sequence of booleans of the same shape as lats/lons indicating 
            whether the vs30 values are measured or inferred.
        :returns:
            SitesContext object where data are sampled from the current Sites
            object.
        :raises ShakeMapException:
             When lat/lon input sequences do not share dimensionality.
        """
        lats = np.array(lats)
        lons = np.array(lons)
        latshape = lats.shape
        lonshape = lons.shape
        if latshape != lonshape:
            msg = 'Input lat/lon arrays must have the same dimensions'
            raise ShakeMapException(msg)

        site = SitesContext()
        # use default vs30 if outside grid
        site.vs30 = self._Vs30.getValue(lats, lons, default=self._defaultVs30)
        site.lats = lats
        site.lons = lons
        site.z1pt0 = _calculate_z1p0(site.vs30)
        site.z2pt5 = _calculate_z2p5(site.z1pt0)
        if vs30measured_grid is None:  # If we don't know, then use false
            site.vs30measured = np.zeros_like(lons, dtype=bool)
        else:
            site.vs30measured = vs30measured_grid
        site.backarc = self._backarc

        return site
예제 #7
0
rctx.width = 10.0
rctx.hypo_depth = 8.0

#dctx.rrup = np.logspace(1,np.log10(200),100)
dctx.rrup = np.logspace(np.log10(10),np.log10(10.0),1)


# Assuming average ztor, get rjb:
dctx.rjb = np.sqrt(dctx.rrup**2 - rctx.ztor**2)
dctx.rhypo = dctx.rrup
dctx.rx = dctx.rjb
dctx.ry0 = dctx.rx

sctx.vs30 = np.ones_like(dctx.rrup) * 760.0
sctx.vs30measured = np.full_like(dctx.rrup, False, dtype='bool')
sctx.z1pt0 = np.ones_like(dctx.rrup) * 0.05

lmean_ask14, sd_ask14 = ASK14.get_mean_and_stddevs(
    sctx, rctx, dctx, IMT, [const.StdDev.TOTAL])





###############################################################################
#
### Now try using mien which uses this... ##
#
## Read in annemarie's ask:
#ask_data_ab = np.genfromtxt('/Users/vsahakian/anza/data/coeffs/ASK_for_Valerie.txt',skip_header=1)
#
def signal_end(st, event_time, event_lon, event_lat, event_mag,
               method=None, vmin=None, floor=None,
               model=None, epsilon=2.0):
    """
    Estimate end of signal by using a model of the 5-95% significant
    duration, and adding this value to the "signal_split" time. This probably
    only works well when the split is estimated with a p-wave picker since
    the velocity method often ends up with split times that are well before
    signal actually starts.

    Args:
        st (StationStream):
            Stream of data.
        event_time (UTCDateTime):
            Event origin time.
        event_mag (float):
            Event magnitude.
        event_lon (float):
            Event longitude.
        event_lat (float):
            Event latitude.
        method (str):
            Method for estimating signal end time. Either 'velocity'
            or 'model'.
        vmin (float):
            Velocity (km/s) for estimating end of signal. Only used if
            method="velocity".
        floor (float):
            Minimum duration (sec) applied along with vmin.
        model (str):
            Short name of duration model to use. Must be defined in the
            gmprocess/data/modules.yml file.
        epsilon (float):
            Number of standard deviations; if epsilon is 1.0, then the signal
            window duration is the mean Ds + 1 standard deviation. Only used
            for method="model".

    Returns:
        trace with stats dict updated to include a
        stats['processing_parameters']['signal_end'] dictionary.

    """
    # Load openquake stuff if method="model"
    if method == "model":
        dmodel = load_model(model)

        # Set some "conservative" inputs (in that they will tend to give
        # larger durations).
        sctx = SitesContext()
        sctx.vs30 = np.array([180.0])
        sctx.z1pt0 = np.array([0.51])
        rctx = RuptureContext()
        rctx.mag = event_mag
        rctx.rake = -90.0
        dur_imt = imt.from_string('RSD595')
        stddev_types = [const.StdDev.INTRA_EVENT]

    for tr in st:
        if not tr.hasParameter('signal_split'):
            continue
        if method == "velocity":
            if vmin is None:
                raise ValueError('Must specify vmin if method is "velocity".')
            if floor is None:
                raise ValueError('Must specify floor if method is "velocity".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            end_time = event_time + max(floor, epi_dist / vmin)
        elif method == "model":
            if model is None:
                raise ValueError('Must specify model if method is "model".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            dctx = DistancesContext()
            # Repi >= Rrup, so substitution here should be conservative
            # (leading to larger durations).
            dctx.rrup = np.array([epi_dist])
            lnmu, lnstd = dmodel.get_mean_and_stddevs(
                sctx, rctx, dctx, dur_imt, stddev_types)
            duration = np.exp(lnmu + epsilon * lnstd[0])
            # Get split time
            split_time = tr.getParameter('signal_split')['split_time']
            end_time = split_time + float(duration)
        else:
            raise ValueError('method must be either "velocity" or "model".')
        # Update trace params
        end_params = {
            'end_time': end_time,
            'method': method,
            'vsplit': vmin,
            'floor': floor,
            'model': model,
            'epsilon': epsilon
        }
        tr.setParameter('signal_end', end_params)

    return st
def signal_end(st, event_time, event_lon, event_lat, event_mag,
               method=None, vmin=None, floor=None,
               model=None, epsilon=2.0):
    """
    Estimate end of signal by using a model of the 5-95% significant
    duration, and adding this value to the "signal_split" time. This probably
    only works well when the split is estimated with a p-wave picker since
    the velocity method often ends up with split times that are well before
    signal actually starts.

    Args:
        st (StationStream):
            Stream of data.
        event_time (UTCDateTime):
            Event origin time.
        event_mag (float):
            Event magnitude.
        event_lon (float):
            Event longitude.
        event_lat (float):
            Event latitude.
        method (str):
            Method for estimating signal end time. Either 'velocity'
            or 'model'.
        vmin (float):
            Velocity (km/s) for estimating end of signal. Only used if
            method="velocity".
        floor (float):
            Minimum duration (sec) applied along with vmin.
        model (str):
            Short name of duration model to use. Must be defined in the
            gmprocess/data/modules.yml file.
        epsilon (float):
            Number of standard deviations; if epsilon is 1.0, then the signal
            window duration is the mean Ds + 1 standard deviation. Only used
            for method="model".

    Returns:
        trace with stats dict updated to include a
        stats['processing_parameters']['signal_end'] dictionary.

    """
    # Load openquake stuff if method="model"
    if method == "model":
        mod_file = pkg_resources.resource_filename(
            'gmprocess', os.path.join('data', 'modules.yml'))
        with open(mod_file, 'r') as f:
            mods = yaml.load(f)

        # Import module
        cname, mpath = mods['modules'][model]
        dmodel = getattr(import_module(mpath), cname)()

        # Set some "conservative" inputs (in that they will tend to give
        # larger durations).
        sctx = SitesContext()
        sctx.vs30 = np.array([180.0])
        sctx.z1pt0 = np.array([0.51])
        rctx = RuptureContext()
        rctx.mag = event_mag
        rctx.rake = -90.0
        dur_imt = imt.from_string('RSD595')
        stddev_types = [const.StdDev.INTRA_EVENT]

    for tr in st:
        if not tr.hasParameter('signal_split'):
            continue
        if method == "velocity":
            if vmin is None:
                raise ValueError('Must specify vmin if method is "velocity".')
            if floor is None:
                raise ValueError('Must specify floor if method is "velocity".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            end_time = event_time + max(floor, epi_dist / vmin)
        elif method == "model":
            if model is None:
                raise ValueError('Must specify model if method is "model".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            dctx = DistancesContext()
            # Repi >= Rrup, so substitution here should be conservative
            # (leading to larger durations).
            dctx.rrup = np.array([epi_dist])
            lnmu, lnstd = dmodel.get_mean_and_stddevs(
                sctx, rctx, dctx, dur_imt, stddev_types)
            duration = np.exp(lnmu + epsilon * lnstd[0])
            # Get split time
            split_time = tr.getParameter('signal_split')['split_time']
            end_time = split_time + float(duration)
        else:
            raise ValueError('method must be either "velocity" or "model".')
        # Update trace params
        end_params = {
            'end_time': end_time,
            'method': method,
            'vsplit': vmin,
            'floor': floor,
            'model': model,
            'epsilon': epsilon
        }
        tr.setParameter('signal_end', end_params)

    return st