Example #1
0
    def test_mag_greater_8pt5(self):
        gmpe = SadighEtAl1997()

        sctx = SitesContext()
        rctx = RuptureContext()
        dctx = DistancesContext()

        rctx.rake = 0.0
        dctx.rrup = numpy.array([0., 1.])
        sctx.vs30 = numpy.array([800., 800.])

        rctx.mag = 9.0
        mean_rock_9, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(),
                                                   [StdDev.TOTAL])
        rctx.mag = 8.5
        mean_rock_8pt5, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(),
                                                      [StdDev.TOTAL])
        numpy.testing.assert_allclose(mean_rock_9, mean_rock_8pt5)

        sctx.vs30 = numpy.array([300., 300.])
        rctx.mag = 9.0
        mean_soil_9, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(),
                                                   [StdDev.TOTAL])
        rctx.mag = 8.5
        mean_soil_8pt5, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(),
                                                      [StdDev.TOTAL])
        numpy.testing.assert_allclose(mean_soil_9, mean_soil_8pt5)
 def test_get_mean_and_stddevs_good(self):
     """
     Tests the full execution of the GMPE tables for valid data
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     rctx = RuptureContext()
     rctx.mag = 6.0
     rctx.rake = 90.0
     dctx = DistancesContext()
     # Test values at the given distances and those outside range
     dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0])
     sctx = SitesContext()
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.array([20.0, 20.0, 10.0, 5.0, 1.0E-19])
     # PGA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGA(),
                                             stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.25 * np.ones(5), 5)
     # SA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.SA(1.0),
                                             stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5)
Example #3
0
def calculate_total_std(gsim_list, imts, vs30):
    std_total = {}
    std_inter = {}
    std_intra = {}
    for gsim in gsim_list:
        rctx = RuptureContext()
        # The calculator needs these inputs but they are not used
        # in the std calculation
        rctx.mag = 5
        rctx.rake = 0
        rctx.hypo_depth = 0
        dctx = DistancesContext()
        dctx.rjb = np.copy(np.array([1]))  # I do not care about the distance
        dctx.rrup = np.copy(np.array([1]))  # I do not care about the distance
        sctx = SitesContext()
        sctx.vs30 = vs30 * np.ones_like(np.array([0]))
        for imt in imts:
            gm_table, [
                gm_stddev_inter, gm_stddev_intra
            ] = (gsim.get_mean_and_stddevs(
                sctx, rctx, dctx, imt,
                [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT]))
            std_total[gsim, imt] = (np.sqrt(gm_stddev_inter[0]**2 +
                                            gm_stddev_intra[0]**2))
            std_inter[gsim, imt] = gm_stddev_inter[0]
            std_intra[gsim, imt] = gm_stddev_intra[0]
    return (std_total, std_inter, std_intra)
 def test_get_mean_and_stddevs(self):
     """
     Tests mean and standard deviations without amplification
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Test values at the given distances and those outside range
     dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0])
     sctx = SitesContext()
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20])
     # PGA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGA(),
                                             stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
     # SA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.SA(1.0),
                                             stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.8 * np.ones(5), 5)
     # PGV
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGV(),
                                             stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean),
                                          10. * expected_mean,
                                          5)
     np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
Example #5
0
    def test_mag_greater_8pt5(self):
        gmpe = SadighEtAl1997()

        sctx = SitesContext()
        rctx = RuptureContext()
        dctx = DistancesContext()

        rctx.rake =  0.0
        dctx.rrup = numpy.array([0., 1.])
        sctx.vs30 = numpy.array([800., 800.])

        rctx.mag = 9.0
        mean_rock_9, _ = gmpe.get_mean_and_stddevs(
            sctx, rctx, dctx, PGA(), [StdDev.TOTAL]
        )
        rctx.mag = 8.5
        mean_rock_8pt5, _ = gmpe.get_mean_and_stddevs(
            sctx, rctx, dctx, PGA(), [StdDev.TOTAL]
        )
        numpy.testing.assert_allclose(mean_rock_9, mean_rock_8pt5)

        sctx.vs30 = numpy.array([300., 300.])
        rctx.mag = 9.0
        mean_soil_9, _ = gmpe.get_mean_and_stddevs(
            sctx, rctx, dctx, PGA(), [StdDev.TOTAL]
        )
        rctx.mag = 8.5
        mean_soil_8pt5, _ = gmpe.get_mean_and_stddevs(
            sctx, rctx, dctx, PGA(), [StdDev.TOTAL]
        )
        numpy.testing.assert_allclose(mean_soil_9, mean_soil_8pt5)
Example #6
0
 def test_get_mean_and_stddevs(self):
     """
     Tests mean and standard deviations without amplification
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Test values at the given distances and those outside range
     dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0])
     sctx = SitesContext()
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20])
     # PGA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGA(), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
     # SA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.SA(1.0), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.8 * np.ones(5), 5)
     # PGV
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGV(), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean,
                                          5)
     np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
Example #7
0
 def test_get_mean_and_stddevs_good_amplified(self):
     """
     Tests the full execution of the GMPE tables for valid data with
     amplification
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Test values at the given distances and those outside range
     dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0])
     sctx = SitesContext()
     sctx.vs30 = 100. * np.ones(5)
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.array([20., 20., 10., 5., 1.0E-19])
     expected_sigma = 0.25 * np.ones(5)
     # PGA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGA(), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5)
     # SA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.SA(1.0), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5)
Example #8
0
    def test_dist_not_in_increasing_order(self):
        sctx = SitesContext()
        rctx = RuptureContext()
        dctx = DistancesContext()

        rctx.mag = 5.
        dctx.rhypo = numpy.array([150, 100])
        mean_150_100, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL])

        dctx.rhypo = numpy.array([100, 150])
        mean_100_150, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL])
        self.assertAlmostEqual(mean_150_100[1], mean_100_150[0])
        self.assertAlmostEqual(mean_150_100[0], mean_100_150[1])
Example #9
0
    def get_gsim_contexts(self):
        """
        Returns a comprehensive set of GMPE contecxt objects
        """
        assert isinstance(self.rupture, Rupture)
        assert isinstance(self.target_sites, SiteCollection)
        # Distances
        dctx = DistancesContext()
        # Rupture distance
        setattr(dctx, 'rrup',
                self.rupture.surface.get_min_distance(self.target_sites.mesh))
        # Rx
        setattr(dctx, 'rx',
                self.rupture.surface.get_rx_distance(self.target_sites.mesh))
        # Rjb
        setattr(
            dctx, 'rjb',
            self.rupture.surface.get_joyner_boore_distance(
                self.target_sites.mesh))
        # Rhypo
        setattr(
            dctx, 'rhypo',
            self.rupture.hypocenter.distance_to_mesh(self.target_sites.mesh))
        # Repi
        setattr(
            dctx, 'repi',
            self.rupture.hypocenter.distance_to_mesh(self.target_sites.mesh,
                                                     with_depths=False))
        # Ry0
        setattr(dctx, 'ry0',
                self.rupture.surface.get_ry0_distance(self.target_sites.mesh))
        # Rcdpp - ignored at present
        setattr(dctx, 'rcdpp', None)
        # Azimuth - ignored at present
        setattr(dctx, 'azimuth', None)
        setattr(dctx, 'hanging_wall', None)
        # Rvolc
        setattr(dctx, "rvolc", np.zeros_like(self.target_sites.mesh.lons))
        # Sites
        sctx = SitesContext()
        key_list = ['_vs30', '_vs30measured', '_z1pt0', '_z2pt5', '_backarc']
        for key in key_list:
            setattr(sctx, key[1:], getattr(self.target_sites, key))
        for key in ['lons', 'lats']:
            setattr(sctx, key, getattr(self.target_sites, key))

        # Rupture
        rctx = RuptureContext()
        setattr(rctx, 'mag', self.magnitude)
        setattr(rctx, 'strike', self.strike)
        setattr(rctx, 'dip', self.dip)
        setattr(rctx, 'rake', self.rake)
        setattr(rctx, 'ztor', self.ztor)
        setattr(rctx, 'hypo_depth', self.rupture.hypocenter.depth)
        setattr(rctx, 'hypo_lat', self.rupture.hypocenter.latitude)
        setattr(rctx, 'hypo_lon', self.rupture.hypocenter.longitude)
        setattr(rctx, 'hypo_loc', self.hypo_loc)
        setattr(rctx, 'width', self.rupture.surface.get_width())
        return sctx, rctx, dctx
    def test_dist_not_in_increasing_order(self):
        sctx = SitesContext()
        rctx = RuptureContext()
        dctx = DistancesContext()

        rctx.mag = 5.
        dctx.rhypo = numpy.array([150, 100])
        mean_150_100, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]
        )

        dctx.rhypo = numpy.array([100, 150])
        mean_100_150, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]
        )
        self.assertAlmostEqual(mean_150_100[1], mean_100_150[0])
        self.assertAlmostEqual(mean_150_100[0], mean_100_150[1])
Example #11
0
 def _get_poes(self, **kwargs):
     default_kwargs = dict(sctx=SitesContext(),
                           rctx=RuptureContext(),
                           dctx=DistancesContext(),
                           imt=self.DEFAULT_IMT(),
                           imls=[1.0, 2.0, 3.0],
                           truncation_level=1.0)
     default_kwargs.update(kwargs)
     kwargs = default_kwargs
     return self.gsim.get_poes(**kwargs)
 def test_get_mean_stddevs_unsupported_stddev(self):
     """
     Tests the execution of the GMPE with an unsupported standard deviation
     type
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Test values at the given distances and those outside range
     dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0])
     sctx = SitesContext()
     sctx.vs30 = 1000. * np.ones(5)
     stddevs = [const.StdDev.TOTAL, const.StdDev.INTER_EVENT]
     with self.assertRaises(ValueError) as ve:
         gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(),
                                   stddevs)
     self.assertEqual(str(ve.exception),
                      "Standard Deviation type Inter event not supported")
 def test_get_amplification_factors(self):
     """
     Tests the amplification tables
     """
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Takes distances at the values found in the table (not checking
     # distance interpolation)
     dctx.rjb = np.copy(self.amp_table.distances[:, 0, 0])
     # Test Vs30 is 700.0 m/s midpoint between the 400 m/s and 1000 m/s
     # specified in the table
     sctx = SitesContext()
     sctx.vs30 = 700.0 * np.ones_like(dctx.rjb)
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.ones_like(dctx.rjb)
     expected_sigma = np.ones_like(dctx.rjb)
     # Check PGA and PGV
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.PGA(), sctx, rctx, dctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 1.5) * expected_mean)
     np.testing.assert_array_almost_equal(
         sigma_amp[0],
         0.9 * expected_mean)
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.PGV(), sctx, rctx, dctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 0.5) * expected_mean)
     np.testing.assert_array_almost_equal(
         sigma_amp[0],
         0.9 * expected_mean)
     # Sa (0.5)
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.SA(0.5), sctx, rctx, dctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 2.0) * expected_mean)
     np.testing.assert_array_almost_equal(
         sigma_amp[0],
         0.9 * expected_mean)
Example #14
0
 def test_get_mean_stddevs_unsupported_stddev(self):
     """
     Tests the execution of the GMPE with an unsupported standard deviation
     type
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Test values at the given distances and those outside range
     dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0])
     sctx = SitesContext()
     sctx.vs30 = 1000. * np.ones(5)
     stddevs = [const.StdDev.TOTAL, const.StdDev.INTER_EVENT]
     with self.assertRaises(ValueError) as ve:
         gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(),
                                   stddevs)
     self.assertEqual(str(ve.exception),
                      "Standard Deviation type Inter event not supported")
Example #15
0
 def test_get_amplification_factors(self):
     """
     Tests the amplification tables
     """
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Takes distances at the values found in the table (not checking
     # distance interpolation)
     dctx.rjb = np.copy(self.amp_table.distances[:, 0, 0])
     # Test Vs30 is 700.0 m/s midpoint between the 400 m/s and 1000 m/s
     # specified in the table
     sctx = SitesContext()
     sctx.vs30 = 700.0 * np.ones_like(dctx.rjb)
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.ones_like(dctx.rjb)
     expected_sigma = np.ones_like(dctx.rjb)
     # Check PGA and PGV
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.PGA(), sctx, rctx, dctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 1.5) * expected_mean)
     np.testing.assert_array_almost_equal(
         sigma_amp[0],
         0.9 * expected_mean)
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.PGV(), sctx, rctx, dctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 0.5) * expected_mean)
     np.testing.assert_array_almost_equal(
         sigma_amp[0],
         0.9 * expected_mean)
     # Sa (0.5)
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.SA(0.5), sctx, rctx, dctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 2.0) * expected_mean)
     np.testing.assert_array_almost_equal(
         sigma_amp[0],
         0.9 * expected_mean)
Example #16
0
 def setUp(self):
     """
     Setup with a set of distances and site paramwters
     """
     self.imts = [PGA(), SA(0.1), SA(0.2), SA(0.5), SA(1.0), SA(2.0)]
     self.mags = [4.5, 5.5, 6.5, 7.5]
     self.rakes = [-90., 0., 90.]
     self.dctx = DistancesContext()
     self.dctx.rhypo = np.array([5., 10., 20., 50., 100.])
     self.sctx = SitesContext()
     self.sctx.vs30 = 800.0 * np.ones(5)
Example #17
0
 def get_response_spectrum(self, magnitude, distance, periods, rake=90, vs30=800, damping=0.05):
     """
     """
     responses = np.zeros((len(periods),))
     p_damping = damping * 100
     rup = RuptureContext()
     rup.mag = magnitude
     rup.rake = rake
     dists = DistancesContext()
     dists.rjb = np.array([distance])
     sites = SitesContext()
     sites.vs30 = np.array([vs30])
     stddev_types = [StdDev.TOTAL]
     for i, period in enumerate(periods):
         if period == 0:
             imt = _PGA()
         else:
             imt = _SA(period, p_damping)
         responses[i] = np.exp(self._gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types)[0][0])
     return ResponseSpectrum(periods, responses, unit='g', damping=damping)
 def test_rhypo_smaller_than_15(self):
     # test the calculation in case of rhypo distances less than 15 km
     # (for rhypo=0 the distance term has a singularity). In this case the
     # method should return values equal to the ones obtained by clipping
     # distances at 15 km.
     sctx = SitesContext()
     sctx.vs30 = numpy.array([800.0, 800.0, 800.0])
     rctx = RuptureContext()
     rctx.mag = 5.0
     rctx.rake = 0
     dctx = DistancesContext()
     dctx.rhypo = numpy.array([0.0, 10.0, 16.0])
     dctx.rhypo.flags.writeable = False
     mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs(
         sctx, rctx, dctx, PGA(), [StdDev.TOTAL])
     setattr(dctx, 'rhypo', numpy.array([15.0, 15.0, 16.0]))
     mean_15, stds_15 = self.GSIM_CLASS().get_mean_and_stddevs(
         sctx, rctx, dctx, PGA(), [StdDev.TOTAL])
     numpy.testing.assert_array_equal(mean_0, mean_15)
     numpy.testing.assert_array_equal(stds_0, stds_15)
Example #19
0
 def setUp(self):
     """
     """
     self.gsim = TromansEtAl2019SigmaMu
     self.rctx = RuptureContext()
     self.rctx.mag = 6.5
     self.rctx.rake = 0.
     self.dctx = DistancesContext()
     self.dctx.rjb = np.array([5., 10., 20., 50., 100.])
     self.sctx = SitesContext()
     self.sctx.vs30 = 500. * np.ones(5)
 def test_rhypo_smaller_than_15(self):
     # test the calculation in case of rhypo distances less than 15 km
     # (for rhypo=0 the distance term has a singularity). In this case the
     # method should return values equal to the ones obtained by clipping
     # distances at 15 km.
     sctx = SitesContext()
     sctx.vs30 = numpy.array([800.0, 800.0, 800.0])
     rctx = RuptureContext()
     rctx.mag = 5.0
     rctx.rake = 0
     dctx = DistancesContext()
     dctx.rhypo = numpy.array([0.0, 10.0, 16.0])
     dctx.rhypo.flags.writeable = False
     mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs(
         sctx, rctx, dctx, PGA(), [StdDev.TOTAL])
     setattr(dctx, 'rhypo', numpy.array([15.0, 15.0, 16.0]))
     mean_15, stds_15 = self.GSIM_CLASS().get_mean_and_stddevs(
         sctx, rctx, dctx, PGA(), [StdDev.TOTAL])
     numpy.testing.assert_array_equal(mean_0, mean_15)
     numpy.testing.assert_array_equal(stds_0, stds_15)
Example #21
0
    def test_mag_dist_outside_range(self):
        sctx = SitesContext()
        rctx = RuptureContext()
        dctx = DistancesContext()

        # rupture with Mw = 3 (Mblg=2.9434938048208452) at rhypo = 1 must give
        # same mean as rupture with Mw = 4.4 (Mblg=4.8927897867183798) at
        # rhypo = 10
        rctx.mag = 2.9434938048208452
        dctx.rhypo = numpy.array([1])
        mean_mw3_d1, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL])

        rctx.mag = 4.8927897867183798
        dctx.rhypo = numpy.array([10])
        mean_mw4pt4_d10, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL])

        self.assertAlmostEqual(float(mean_mw3_d1), float(mean_mw4pt4_d10))

        # rupture with Mw = 9 (Mblg = 8.2093636421088814) at rhypo = 1500 km
        # must give same mean as rupture with Mw = 8.2
        # (Mblg = 7.752253535347597) at rhypo = 1000
        rctx.mag = 8.2093636421088814
        dctx.rhypo = numpy.array([1500.])
        mean_mw9_d1500, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL])

        rctx.mag = 7.752253535347597
        dctx.rhypo = numpy.array([1000.])
        mean_mw8pt2_d1000, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL])

        self.assertAlmostEqual(mean_mw9_d1500, mean_mw8pt2_d1000)
Example #22
0
 def _disaggregate_poe(self, **kwargs):
     default_kwargs = dict(
         sctx=SitesContext(),
         rctx=RuptureContext(),
         dctx=DistancesContext(),
         imt=self.DEFAULT_IMT(),
         iml=2.0,
         truncation_level=1.0,
         n_epsilons=3,
     )
     default_kwargs.update(kwargs)
     kwargs = default_kwargs
     return self.gsim.disaggregate_poe(**kwargs)
    def test_mag_dist_outside_range(self):
        sctx = SitesContext()
        rctx = RuptureContext()
        dctx = DistancesContext()

        # rupture with Mw = 3 (Mblg=2.9434938048208452) at rhypo = 1 must give
        # same mean as rupture with Mw = 4.4 (Mblg=4.8927897867183798) at
        # rhypo = 10
        rctx.mag = 2.9434938048208452
        dctx.rhypo = numpy.array([1])
        mean_mw3_d1, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]
        )

        rctx.mag = 4.8927897867183798
        dctx.rhypo = numpy.array([10])
        mean_mw4pt4_d10, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]
        )

        self.assertAlmostEqual(float(mean_mw3_d1), float(mean_mw4pt4_d10))

        # rupture with Mw = 9 (Mblg = 8.2093636421088814) at rhypo = 1500 km
        # must give same mean as rupture with Mw = 8.2
        # (Mblg = 7.752253535347597) at rhypo = 1000
        rctx.mag = 8.2093636421088814
        dctx.rhypo = numpy.array([1500.])
        mean_mw9_d1500, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]
        )

        rctx.mag = 7.752253535347597
        dctx.rhypo = numpy.array([1000.])
        mean_mw8pt2_d1000, _ = self.GSIM_CLASS().get_mean_and_stddevs(
            sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]
        )

        self.assertAlmostEqual(mean_mw9_d1500, mean_mw8pt2_d1000)
Example #24
0
 def test_zero_distance(self):
     # test the calculation in case of zero rrup distance (for rrup=0
     # the equations have a singularity). In this case the
     # method should return values equal to the ones obtained by
     # replacing 0 values with 1
     sctx = SitesContext()
     rctx = RuptureContext()
     dctx = DistancesContext()
     setattr(sctx, 'vs30', numpy.array([500.0, 2500.0]))
     setattr(rctx, 'mag', 5.0)
     setattr(dctx, 'rrup', numpy.array([0.0, 0.2]))
     mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs(
         sctx, rctx, dctx, PGA(), [StdDev.TOTAL])
     setattr(dctx, 'rrup', numpy.array([1.0, 0.2]))
     mean_01, stds_01 = self.GSIM_CLASS().get_mean_and_stddevs(
         sctx, rctx, dctx, PGA(), [StdDev.TOTAL])
     numpy.testing.assert_array_equal(mean_0, mean_01)
     numpy.testing.assert_array_equal(stds_0, stds_01)
Example #25
0
 def _get_distances_context_event(self, idx):
     """
     Returns the distance contexts for a specific event
     """
     dctx = DistancesContext()
     rrup = []
     rjb = []
     repi = []
     rhypo = []
     r_x = []
     ry0 = []
     for idx_j in idx:
         # Distance parameters
         rup = self.records[idx_j]
         repi.append(rup.distance.repi)
         rhypo.append(rup.distance.rhypo)
         # TODO Setting Rjb == Repi and Rrup == Rhypo when missing value
         # is a hack! Need feedback on how to fix
         if rup.distance.rjb is not None:
             rjb.append(rup.distance.rjb)
         else:
             rjb.append(rup.distance.repi)
         if rup.distance.rrup is not None:
             rrup.append(rup.distance.rrup)
         else:
             rrup.append(rup.distance.rhypo)
         if rup.distance.r_x is not None:
             r_x.append(rup.distance.r_x)
         else:
             r_x.append(rup.distance.epi)
         if ("ry0" in dir(rup.distance)) and rup.distance.ry0 is not None:
             ry0.append(rup.distance.ry0)
     setattr(dctx, 'repi', np.array(repi))
     setattr(dctx, 'rhypo', np.array(rhypo))
     if len(rjb) > 0:
         setattr(dctx, 'rjb', np.array(rjb))
     if len(rrup) > 0:
         setattr(dctx, 'rrup', np.array(rrup))
     if len(r_x) > 0:
         setattr(dctx, 'rx', np.array(r_x))
     if len(ry0) > 0:
         setattr(dctx, 'ry0', np.array(ry0))
     return dctx
Example #26
0
bssa14 = BooreEtAl2014()

## Set the empty arrays:
median_zhao2006 = np.array([])
median_travasarou = np.array([])
median_bssa14 = np.array([])

sd_zhao2006 = np.array([])
sd_travasarou = np.array([])
sd_bssa14 = np.array([])

## Run per recording:
for recording_i in range(len(station)):
    ## Make the rupture and distance contexts:
    i_rctx = RuptureContext()
    i_dctx = DistancesContext()
    i_sctx = SitesContext()

    # Zhao & Travasarou wants rrup, but dont' have that - so set to rhypo;
    # BSSA14 wants rjb but don't have it, so set to repi:
    i_dctx.rrup = rhypo[recording_i]
    i_dctx.rjb = repi[recording_i]

    ## Site:
    i_sctx.vs30 = np.array([vs30_data[recording_i]])

    ## Rupture - USE THE PGD MAGNTIUDE!
    i_rctx.rake = rake[recording_i]
    i_rctx.mag = mw_pgd[recording_i]
    i_rctx.hypo_depth = hypodepth[recording_i]
Example #27
0
def evaluate_model(site_params, rup_params, df, npts, azimuth, moveout, mod,
                   imt):
    sx = SitesContext()
    rx = RuptureContext()
    dx = DistancesContext()

    # TODO: some site parameters can be pulled from the dataframe so we don't
    # have to use the defaults (vs30, azimuth, etc.)
    if not moveout:
        npts = df.shape[0]
    for param in site_params.keys():
        setattr(sx, param, np.full(npts, site_params[param]))

    rx.__dict__.update(rup_params)
    rx.mag = df['EarthquakeMagnitude'].iloc[0]
    rx.hypo_depth = df['EarthquakeDepth'].iloc[0]

    if moveout:
        dx.rjb = np.linspace(0, df['JoynerBooreDistance'].max(), npts)
        dx.rrup = np.sqrt(dx.rjb**2 + df['EarthquakeDepth'].iloc[0]**2)
        dx.rhypo = dx.rrup
        dx.repi = dx.rjb
    else:
        dx.rjb = df['JoynerBooreDistance']
        dx.rrup = df['RuptureDistance']
        dx.rhypo = df['HypocentralDistance']
        dx.repi = df['EpicentralDistance']

    # TODO: some of these distances can be pulled from the dataframe
    dx.ry0 = dx.rjb
    dx.rx = np.full_like(dx.rjb, -1)
    dx.azimuth = np.full_like(npts, azimuth)
    dx.rcdpp = dx.rjb
    dx.rvolc = dx.rjb

    try:
        mean, sd = MODELS_DICT[mod]().get_mean_and_stddevs(
            sx, rx, dx,
            manage_imts(imt)[0], [StdDev.TOTAL])
        mean = convert_units(mean, imt)
        if moveout:
            return mean, dx
        else:
            return mean, sd[0]
    except Exception:
        return
def signal_end(st, event_time, event_lon, event_lat, event_mag,
               method=None, vmin=None, floor=None,
               model=None, epsilon=2.0):
    """
    Estimate end of signal by using a model of the 5-95% significant
    duration, and adding this value to the "signal_split" time. This probably
    only works well when the split is estimated with a p-wave picker since
    the velocity method often ends up with split times that are well before
    signal actually starts.

    Args:
        st (StationStream):
            Stream of data.
        event_time (UTCDateTime):
            Event origin time.
        event_mag (float):
            Event magnitude.
        event_lon (float):
            Event longitude.
        event_lat (float):
            Event latitude.
        method (str):
            Method for estimating signal end time. Either 'velocity'
            or 'model'.
        vmin (float):
            Velocity (km/s) for estimating end of signal. Only used if
            method="velocity".
        floor (float):
            Minimum duration (sec) applied along with vmin.
        model (str):
            Short name of duration model to use. Must be defined in the
            gmprocess/data/modules.yml file.
        epsilon (float):
            Number of standard deviations; if epsilon is 1.0, then the signal
            window duration is the mean Ds + 1 standard deviation. Only used
            for method="model".

    Returns:
        trace with stats dict updated to include a
        stats['processing_parameters']['signal_end'] dictionary.

    """
    # Load openquake stuff if method="model"
    if method == "model":
        mod_file = pkg_resources.resource_filename(
            'gmprocess', os.path.join('data', 'modules.yml'))
        with open(mod_file, 'r') as f:
            mods = yaml.load(f)

        # Import module
        cname, mpath = mods['modules'][model]
        dmodel = getattr(import_module(mpath), cname)()

        # Set some "conservative" inputs (in that they will tend to give
        # larger durations).
        sctx = SitesContext()
        sctx.vs30 = np.array([180.0])
        sctx.z1pt0 = np.array([0.51])
        rctx = RuptureContext()
        rctx.mag = event_mag
        rctx.rake = -90.0
        dur_imt = imt.from_string('RSD595')
        stddev_types = [const.StdDev.INTRA_EVENT]

    for tr in st:
        if not tr.hasParameter('signal_split'):
            continue
        if method == "velocity":
            if vmin is None:
                raise ValueError('Must specify vmin if method is "velocity".')
            if floor is None:
                raise ValueError('Must specify floor if method is "velocity".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            end_time = event_time + max(floor, epi_dist / vmin)
        elif method == "model":
            if model is None:
                raise ValueError('Must specify model if method is "model".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            dctx = DistancesContext()
            # Repi >= Rrup, so substitution here should be conservative
            # (leading to larger durations).
            dctx.rrup = np.array([epi_dist])
            lnmu, lnstd = dmodel.get_mean_and_stddevs(
                sctx, rctx, dctx, dur_imt, stddev_types)
            duration = np.exp(lnmu + epsilon * lnstd[0])
            # Get split time
            split_time = tr.getParameter('signal_split')['split_time']
            end_time = split_time + float(duration)
        else:
            raise ValueError('method must be either "velocity" or "model".')
        # Update trace params
        end_params = {
            'end_time': end_time,
            'method': method,
            'vsplit': vmin,
            'floor': floor,
            'model': model,
            'epsilon': epsilon
        }
        tr.setParameter('signal_end', end_params)

    return st
Example #29
0
def test_scr_rlme():
    old_gmpe = set_gmpe('stable_continental_nshmp2014_rlme')
    spec_file = pkg_resources.resource_filename(
        'scenarios', os.path.join('data', 'configspec.conf'))
    validator = get_custom_validator()
    config = ConfigObj(os.path.join(os.path.expanduser('~'), 'scenarios.conf'),
                       configspec=spec_file)
    tmp = pkg_resources.resource_filename(
        'scenarios', os.path.join('..', 'data', 'gmpe_sets.conf'))
    config.merge(ConfigObj(tmp, configspec=spec_file))
    tmp = pkg_resources.resource_filename(
        'scenarios', os.path.join('..', 'data', 'modules.conf'))
    config.merge(ConfigObj(tmp, configspec=spec_file))
    results = config.validate(validator)
    if results != True:
        config_error(config, results)

    # MultiGMPE from config
    config = config.dict()
    gmpe = MultiGMPE.from_config(config)

    # Input stuff
    IMT = imt.SA(1.0)
    rctx = RuptureContext()
    dctx = DistancesContext()
    sctx = SitesContext()

    rctx.rake = 0.0
    rctx.dip = 90.0
    rctx.ztor = 0.0
    rctx.mag = 8.0
    rctx.width = 10.0
    rctx.hypo_depth = 8.0

    dctx.rjb = np.logspace(1, np.log10(800), 100)
    dctx.rrup = dctx.rjb
    dctx.rhypo = dctx.rjb
    dctx.rx = dctx.rjb
    dctx.ry0 = dctx.rjb

    sctx.vs30 = np.ones_like(dctx.rjb) * 275.0
    sctx.vs30measured = np.full_like(dctx.rjb, False, dtype='bool')
    sctx = MultiGMPE.set_sites_depth_parameters(sctx, gmpe)

    # Evaluate
    conf_lmean, dummy = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, IMT,
                                                  [const.StdDev.TOTAL])

    target_lmean = np.array([
        0.10556736, 0.0839267, 0.06189444, 0.03945984, 0.01661264, -0.006657,
        -0.03035844, -0.05450058, -0.07909179, -0.10413995, -0.1296524,
        -0.15563655, -0.1821091, -0.20909381, -0.23661405, -0.26469259,
        -0.29335086, -0.32257956, -0.35232905, -0.38254639, -0.41317807,
        -0.44417017, -0.47549552, -0.5071888, -0.53929293, -0.57185042,
        -0.60490345, -0.63848027, -0.67255251, -0.70707712, -0.74201096,
        -0.77731091, -0.81293906, -0.84889737, -0.88520644, -0.92188724,
        -0.95899471, -0.99699613, -1.03583184, -1.07530664, -1.11531737,
        -1.15576129, -1.19653696, -1.23757689, -1.2772327, -1.2915098,
        -1.30576498, -1.32001713, -1.33429606, -1.3486727, -1.36322545,
        -1.37803346, -1.39317668, -1.40677752, -1.42081409, -1.43538898,
        -1.45056417, -1.46640223, -1.48327111, -1.50656497, -1.53368548,
        -1.56645985, -1.59991327, -1.63399401, -1.66867278, -1.7039438,
        -1.73980246, -1.77624473, -1.81326727, -1.85087166, -1.889066,
        -1.92784814, -1.96721442, -2.0071855, -2.04779304, -2.08909259,
        -2.13114448, -2.17401045, -2.21775376, -2.26243406, -2.30808979,
        -2.35475487, -2.40246494, -2.4512575, -2.50117075, -2.55223495,
        -2.60447754, -2.65792811, -2.71261851, -2.61732716, -2.67007323,
        -2.72399057, -2.77918054, -2.83574666, -2.89379416, -2.95340501,
        -3.01462691, -3.07750731, -3.14209631, -3.20844679
    ])

    np.testing.assert_allclose(conf_lmean, target_lmean, atol=1e-6)

    # Redo for 3 sec so some GMPEs are filtered out
    IMT = imt.SA(3.0)
    gmpe = MultiGMPE.from_config(config, filter_imt=IMT)
    conf_lmean, dummy = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, IMT,
                                                  [const.StdDev.TOTAL])

    target_lmean = np.array([
        -1.26636973, -1.289514, -1.31300386, -1.33683936, -1.36102084,
        -1.38554902, -1.41042497, -1.43565015, -1.46122642, -1.48715602,
        -1.51344154, -1.54008586, -1.56709215, -1.59446375, -1.62220409,
        -1.65031664, -1.6788048, -1.70767178, -1.7369205, -1.76655351,
        -1.79657287, -1.82698005, -1.85777587, -1.88896039, -1.92053288,
        -1.95249175, -1.98483453, -2.01755788, -2.05065755, -2.08412844,
        -2.11796463, -2.15215943, -2.18670547, -2.22159473, -2.25681869,
        -2.29236835, -2.32823441, -2.36453464, -2.40140834, -2.43883442,
        -2.47679132, -2.51525752, -2.55421156, -2.59363211, -2.63112832,
        -2.63336521, -2.63582817, -2.6385319, -2.64147962, -2.64466761,
        -2.64809268, -2.65175214, -2.6556438, -2.65976592, -2.66411721,
        -2.66869673, -2.67350386, -2.67853821, -2.68413311, -2.69604497,
        -2.7124745, -2.73590549, -2.75964098, -2.78367044, -2.80798539,
        -2.8325853, -2.85746998, -2.88263948, -2.90809408, -2.93383429,
        -2.95986073, -2.98617306, -3.01275705, -3.03961495, -3.06675608,
        -3.09419043, -3.12192861, -3.14998191, -3.17836228, -3.20708239,
        -3.23615561, -3.26559604, -3.29541858, -3.32563888, -3.35627343,
        -3.38733956, -3.41885548, -3.4508403, -3.48331409, -3.56476842,
        -3.59987076, -3.63573296, -3.67238872, -3.70987332, -3.74822369,
        -3.78747847, -3.82767809, -3.86886488, -3.91108308, -3.95437899
    ])

    np.testing.assert_allclose(conf_lmean, target_lmean, atol=1e-6)

    # Clean up
    set_gmpe(old_gmpe)
Example #30
0
def _parse_csv_line(headers, values):
    """
    Parse a single line from data file.

    :param headers:
        A list of header names, the strings from the first line of csv file.
    :param values:
        A list of values of a single row to parse.
    :returns:
        A tuple of the following values (in specified order):

        sctx
            An instance of :class:`openquake.hazardlib.gsim.base.SitesContext`
            with attributes populated by the information from in row in a form
            of single-element numpy arrays.
        rctx
            An instance of
            :class:`openquake.hazardlib.gsim.base.RuptureContext`.
        dctx
            An instance of
            :class:`openquake.hazardlib.gsim.base.DistancesContext`.
        stddev_types
            An empty list, if the ``result_type`` column says "MEAN"
            for that row, otherwise it is a list with one item --
            a requested standard deviation type.
        expected_results
            A dictionary mapping IMT-objects to one-element arrays of expected
            result values. Those results represent either standard deviation
            or mean value of corresponding IMT depending on ``result_type``.
        result_type
            A string literal, one of ``'STDDEV'`` or ``'MEAN'``. Value
            is taken from column ``result_type``.
    """
    rctx = RuptureContext()
    sctx = SitesContext()
    dctx = DistancesContext()
    expected_results = {}
    stddev_types = result_type = damping = None

    for param, value in zip(headers, values):
        if param == 'result_type':
            value = value.upper()
            if value.endswith('_STDDEV'):
                # the row defines expected stddev results
                result_type = 'STDDEV'
                stddev_types = [getattr(const.StdDev,
                                        value[:-len('_STDDEV')])]
            else:
                # the row defines expected exponents of mean values
                assert value == 'MEAN'
                stddev_types = []
                result_type = 'MEAN'
        elif param == 'damping':
            damping = float(value)
        elif param.startswith('site_'):
            # value is sites context object attribute
            if (param == 'site_vs30measured') or (param == 'site_backarc'):
                value = float(value) != 0
            else:
                value = float(value)
            setattr(sctx, param[len('site_'):], numpy.array([value]))
        elif param.startswith('dist_'):
            # value is a distance measure
            value = float(value)
            setattr(dctx, param[len('dist_'):], numpy.array([value]))
        elif param.startswith('rup_'):
            # value is a rupture context attribute
            value = float(value)
            setattr(rctx, param[len('rup_'):], value)
        elif param == 'component_type':
            pass
        else:
            # value is the expected result (of result_type type)
            value = float(value)
            if param == 'pga':
                imt = PGA()
            elif param == 'pgv':
                imt = PGV()
            elif param == 'pgd':
                imt = PGD()
            elif param == 'cav':
                imt = CAV()
            else:
                period = float(param)
                assert damping is not None
                imt = SA(period, damping)

            expected_results[imt] = numpy.array([value])

    assert result_type is not None
    return sctx, rctx, dctx, stddev_types, expected_results, result_type
Example #31
0
stds = [
    pga_std, sa0_1_std, sa0_2_std, sa0_3_std, sa0_5_std, sa1_std, sa2_std,
    sa3_std, sa5_std
]

# Separate data by event.
for event in eventids:
    partial_df = df.loc[df['USGS_eventID'] == event]

    # Data directory.
    data_dir = '/Users/tnye/PROJECTS/Duration/data/'
    # Event directory.
    event_dir = os.path.join(data_dir, 'events', event)

    # Distance context.
    dx = DistancesContext()
    dx.rjb = np.array(partial_df['rjb'])
    dx.rrup = np.array(partial_df['rrup'])
    dx.rx = np.array(partial_df['rx'])
    dx.ry0 = np.array(partial_df['ry'])

    # Sites context.
    sx = SitesContext()
    sx.vs30 = np.array(partial_df['Vs30(m/s)'])
    sx = MultiGMPE.set_sites_depth_parameters(sx, gmpe)
    sx.vs30measured = np.zeros_like(sx.vs30, dtype=bool)

    # Rupture context.
    rx = RuptureContext()
    rx.mag = np.array(partial_df['magnitude'])[0]
    rx.rake = np.array(partial_df['rake_angle'])[0]
Example #32
0
    def _get_distances_context_event(self, idx):
        """
        Returns the distance contexts for a specific event
        """
        dctx = DistancesContext()
        rrup = []
        rjb = []
        repi = []
        rhypo = []
        r_x = []
        ry0 = []
        rcdpp = []
        azimuth = []
        hanging_wall = []
        rvolc = []
        for idx_j in idx:
            # Distance parameters
            rup = self.records[idx_j]
            repi.append(rup.distance.repi)
            rhypo.append(rup.distance.rhypo)
            # TODO Setting Rjb == Repi and Rrup == Rhypo when missing value
            # is a hack! Need feedback on how to fix
            if rup.distance.rjb is not None:
                rjb.append(rup.distance.rjb)
            else:
                rjb.append(rup.distance.repi)
            if rup.distance.rrup is not None:
                rrup.append(rup.distance.rrup)
            else:
                rrup.append(rup.distance.rhypo)
            if rup.distance.r_x is not None:
                r_x.append(rup.distance.r_x)
            else:
                r_x.append(rup.distance.repi)
            if ("ry0" in dir(rup.distance)) and rup.distance.ry0 is not None:
                ry0.append(rup.distance.ry0)
            if ("rcdpp" in dir(rup.distance)) and\
                rup.distance.rcdpp is not None:
                rcdpp.append(rup.distance.rcdpp)
            if rup.distance.azimuth is not None:
                azimuth.append(rup.distance.azimuth)
            if rup.distance.hanging_wall is not None:
                hanging_wall.append(rup.distance.hanging_wall)
            if "rvolc" in dir(rup.distance) and\
                rup.distance.rvolc is not None:
                rvolc.append(rup.distance.rvolc)

        setattr(dctx, 'repi', np.array(repi))
        setattr(dctx, 'rhypo', np.array(rhypo))
        if len(rjb) > 0:
            setattr(dctx, 'rjb', np.array(rjb))
        if len(rrup) > 0:
            setattr(dctx, 'rrup', np.array(rrup))
        if len(r_x) > 0:
            setattr(dctx, 'rx', np.array(r_x))
        if len(ry0) > 0:
            setattr(dctx, 'ry0', np.array(ry0))
        if len(rcdpp) > 0:
            setattr(dctx, 'rcdpp', np.array(rcdpp))
        if len(azimuth) > 0:
            setattr(dctx, 'azimuth', np.array(azimuth))
        if len(hanging_wall) > 0:
            setattr(dctx, 'hanging_wall', np.array(hanging_wall))
        if len(rvolc) > 0:
            setattr(dctx, 'rvolc', np.array(rvolc))
        return dctx
Example #33
0
def _get_extent_from_multigmpe(rupture, config=None):
    """
    Use MultiGMPE to determine extent
    """
    (clon, clat) = _rupture_center(rupture)
    origin = rupture.getOrigin()
    if config is not None:
        gmpe = MultiGMPE.from_config(config)
        gmice = get_object_from_config('gmice', 'modeling', config)
        if imt.SA in gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES:
            default_imt = imt.SA(1.0)
        elif imt.PGV in gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES:
            default_imt = imt.PGV()
        else:
            default_imt = imt.PGA()
    else:
        # Put in some default values for conf
        config = {
            'extent': {
                'mmi': {
                    'threshold': 4.5,
                    'mindist': 100,
                    'maxdist': 1000
                }
            }
        }

        # Generic GMPEs choices based only on active vs stable
        # as defaults...
        stable = is_stable(origin.lon, origin.lat)
        if not stable:
            ASK14 = AbrahamsonEtAl2014()
            CB14 = CampbellBozorgnia2014()
            CY14 = ChiouYoungs2014()
            gmpes = [ASK14, CB14, CY14]
            site_gmpes = None
            weights = [1/3.0, 1/3.0, 1/3.0]
            gmice = WGRW12()
        else:
            Fea96 = FrankelEtAl1996MwNSHMP2008()
            Tea97 = ToroEtAl1997MwNSHMP2008()
            Sea02 = SilvaEtAl2002MwNSHMP2008()
            C03 = Campbell2003MwNSHMP2008()
            TP05 = TavakoliPezeshk2005MwNSHMP2008()
            AB06p = AtkinsonBoore2006Modified2011()
            Pea11 = PezeshkEtAl2011()
            Atk08p = Atkinson2008prime()
            Sea01 = SomervilleEtAl2001NSHMP2008()
            gmpes = [Fea96, Tea97, Sea02, C03,
                     TP05, AB06p, Pea11, Atk08p, Sea01]
            site_gmpes = [AB06p]
            weights = [0.16, 0.0, 0.0, 0.17, 0.17, 0.3, 0.2, 0.0, 0.0]
            gmice = AK07()

        gmpe = MultiGMPE.from_list(
            gmpes, weights, default_gmpes_for_site=site_gmpes)
        default_imt = imt.SA(1.0)

    min_mmi = config['extent']['mmi']['threshold']
    sd_types = [const.StdDev.TOTAL]

    # Distance context
    dx = DistancesContext()
    # This imposes minimum/ maximum distances of:
    #   80 and 800 km; could make this configurable
    d_min = config['extent']['mmi']['mindist']
    d_max = config['extent']['mmi']['maxdist']
    dx.rjb = np.logspace(np.log10(d_min), np.log10(d_max), 2000)
    # Details don't matter for this; assuming vertical surface rupturing fault
    # with epicenter at the surface.
    dx.rrup = dx.rjb
    dx.rhypo = dx.rjb
    dx.repi = dx.rjb
    dx.rx = np.zeros_like(dx.rjb)
    dx.ry0 = np.zeros_like(dx.rjb)
    dx.rvolc = np.zeros_like(dx.rjb)

    # Sites context
    sx = SitesContext()
    # Set to soft soil conditions
    sx.vs30 = np.full_like(dx.rjb, 180)
    sx = MultiGMPE.set_sites_depth_parameters(sx, gmpe)
    sx.vs30measured = np.full_like(sx.vs30, False, dtype=bool)
    sx = Sites._addDepthParameters(sx)
    sx.backarc = np.full_like(sx.vs30, False, dtype=bool)

    # Rupture context
    rx = RuptureContext()
    rx.mag = origin.mag
    rx.rake = 0.0
    # From WC94...
    rx.width = 10**(-0.76 + 0.27*rx.mag)
    rx.dip = 90.0
    rx.ztor = origin.depth
    rx.hypo_depth = origin.depth

    gmpe_imt_mean, _ = gmpe.get_mean_and_stddevs(
        sx, rx, dx, default_imt, sd_types)

    # Convert to MMI
    gmpe_to_mmi, _ = gmice.getMIfromGM(gmpe_imt_mean, default_imt)

    # Minimum distance that exceeds threshold MMI?
    dists_exceed_mmi = dx.rjb[gmpe_to_mmi > min_mmi]
    if len(dists_exceed_mmi):
        mindist_km = np.max(dists_exceed_mmi)
    else:
        mindist_km = d_min

    # Get a projection
    proj = OrthographicProjection(clon - 4, clon + 4, clat + 4, clat - 4)
    if isinstance(rupture, (QuadRupture, EdgeRupture)):
        ruptx, rupty = proj(
            rupture.lons[~np.isnan(rupture.lons)],
            rupture.lats[~np.isnan(rupture.lats)]
        )
    else:
        ruptx, rupty = proj(clon, clat)

    xmin = np.nanmin(ruptx) - mindist_km
    ymin = np.nanmin(rupty) - mindist_km
    xmax = np.nanmax(ruptx) + mindist_km
    ymax = np.nanmax(rupty) + mindist_km

    # Put a limit on range of aspect ratio
    dx = xmax - xmin
    dy = ymax - ymin
    ar = dy / dx
    if ar > 1.2:
        # Inflate x
        dx_target = dy / 1.2
        ddx = dx_target - dx
        xmax = xmax + ddx / 2
        xmin = xmin - ddx / 2
    if ar < 0.83:
        # Inflate y
        dy_target = dx * 0.83
        ddy = dy_target - dy
        ymax = ymax + ddy / 2
        ymin = ymin - ddy / 2

    lonmin, latmin = proj(np.array([xmin]), np.array([ymin]), reverse=True)
    lonmax, latmax = proj(np.array([xmax]), np.array([ymax]), reverse=True)

    #
    # Round coordinates to the nearest minute -- that should make the
    # output grid register with common grid resolutions (60c, 30c,
    # 15c, 7.5c)
    #
    logging.debug("Extent: %f, %f, %f, %f" %
                  (lonmin, lonmax, latmin, latmax))
    return _round_coord(lonmin[0]), _round_coord(lonmax[0]), \
        _round_coord(latmin[0]), _round_coord(latmax[0])
Example #34
0
from openquake.hazardlib.gsim.base import DistancesContext
from openquake.hazardlib.gsim.base import SitesContext
import numpy as np
import gmpe as gm
import matplotlib.pyplot as plt

fig_dir = '/Users/vsahakian/anza/models/statistics/misc/oq_vs_matlab/'


## This all works..... ##

ASK14 = AbrahamsonEtAl2014()

IMT = imt.PGA()
rctx = RuptureContext()
dctx = DistancesContext()
sctx = SitesContext()
sctx_rock = SitesContext()

rctx.rake = 0.0
rctx.dip = 90.0
rctx.ztor = 7.13
rctx.mag = 3.0
#rctx.mag = np.linspace(0.1,5.)
rctx.width = 10.0
rctx.hypo_depth = 8.0

#dctx.rrup = np.logspace(1,np.log10(200),100)
dctx.rrup = np.logspace(np.log10(10),np.log10(10.0),1)

Example #35
0
def build_gmpe_table(matrixMagsMin, matrixMagsMax, matrixMagsStep,
                     matrixDistsMin, matrixDistsMax, matrixDistsStep,
                     imt_filtering, limitIM, gsim_list, limit_max_mag,
                     limit_min_mag):
    # Define the magnitude range of interest, 5.0 - 9.0 every 0.1
    mags = np.arange(matrixMagsMin, matrixMagsMax, matrixMagsStep)
    # Define the distance range of interest, 0.0 - 300.0 every 1 km
    dists = np.arange(matrixDistsMin, matrixDistsMax, matrixDistsStep)
    # Define the Vs30 range of interest, 180.0 - 1000.0 every 1 m/s
    vs30s = np.arange(180.0, 181., 1.)
    gm_table = np.zeros([len(dists), len(mags), len(vs30s)])
    stddevs = [const.StdDev.TOTAL]
    gsim_tables = []
    for gsim in gsim_list:
        for i, mag in enumerate(mags):
            for j, vs30 in enumerate(vs30s):
                # The RuptureContext object holds all of the
                # rupture related attributes (e.g. mag, rake, ztor, hypo_depth)
                rctx = RuptureContext()
                rctx.mag = mag
                rctx.rake = 0.0
                rctx.hypo_depth = 10
                # The DistancesContext object holds all of the distance
                # calculations (e.g. rjb, rrup, rx, ry0)
                # OQ GMPEs are vectorised by distance - so this needs
                # to be an array
                dctx = DistancesContext()
                dctx.rjb = np.copy(dists)
                dctx.rrup = np.copy(dists)
                # dctx.rhypo = np.copy(dists)
                # The SitesContext object holds all of the site
                # attributes - also an array
                sctx = SitesContext()
                # The attributes of the site array must be of the
                # same size as the distances
                sctx.vs30 = vs30 * np.ones_like(dists)
                # GMPE produces 2 outputs, the means (well their
                # natural logarithm) and standard deviations
                gm_table[:, i, j], gm_stddevs = gsim.get_mean_and_stddevs(
                                        sctx, rctx, dctx, imt_filtering,
                                        stddevs)
        gm_table_exp = np.exp(gm_table)
        gsim_tables.append(gm_table_exp)

    if len(gsim_list) == 1:
        gm_table_final = gsim_tables[0]
    else:
        gm_table_final = np.maximum(gsim_tables[0], gsim_tables[1])
    # These "if" exclude all ruptures above and below the limit magnitude
    if limit_max_mag < matrixMagsMax:
        indexMag = int((limit_max_mag - matrixMagsMin) / matrixMagsStep)
        list_mag_to_exclude = np.arange(indexMag+1, len(mags))
        gm_table_final[:, list_mag_to_exclude, 0] = 0.001
    
    if limit_min_mag > matrixMagsMin:
        indexMinMag = int((limit_min_mag - matrixMagsMin) / matrixMagsStep)
        list_min_mag_to_exclude = np.arange(0, indexMinMag)
        gm_table_final[:, list_min_mag_to_exclude, 0] = 0.001

    gm_mask = gm_table_final >= limitIM
    GMPEmatrix = gm_mask[:, :, 0]
    return GMPEmatrix
def trim_multiple_events(st, origin, catalog, travel_time_df, pga_factor,
                         pct_window_reject, gmpe, site_parameters,
                         rupture_parameters):
    """
    Uses a catalog (list of ScalarEvents) to handle cases where a trace might
    contain signals from multiple events. The catalog should contain events
    down to a low enough magnitude in relation to the events of interest.
    Overall, the algorithm is as follows:

    1) For each earthquake in the catalog, get the P-wave travel time
       and estimated PGA at this station.

    2) Compute the PGA (of the as-recorded horizontal channels).

    3) Select the P-wave arrival times across all events for this record
       that are (a) within the signal window, and (b) the predicted PGA is
       greater than pga_factor times the PGA from step #1.

    4) If any P-wave arrival times match the above criteria, then if any of
       the arrival times fall within in the first pct_window_reject*100%
       of the signal window, then reject the record. Otherwise, trim the
       record such that the end time does not include any of the arrivals
       selected in step #3.

    Args:
        st (StationStream): Stream of data.
        origin (ScalarEvent): ScalarEvent object associated with the StationStream.
        catalog (list): List of ScalarEvent objects.
        travel_time_df (DataFrame): A pandas DataFrame that contains the travel time information
            (obtained from gmprocess.waveform_processing.phase.create_travel_time_dataframe).
            The columns in the DataFrame are the station ids and the indices
            are the earthquake ids.
        pga_factor (float): A decimal factor used to determine whether the predicted PGA
            from an event arrival is significant enough that it should be
            considered for removal.
        pct_window_reject (float): A decimal from 0.0 to 1.0 used to determine if an arrival should
            be trimmed from the record, or if the entire record should be
            rejected. If the arrival falls within the first
            pct_window_reject * 100% of the signal window, then the entire
            record will be rejected. Otherwise, the record will be trimmed
            appropriately.
        gmpe (str): Short name of the GMPE to use. Must be defined in the modules file.
        site_parameters (dict): Dictionary of site parameters to input to the GMPE.
        rupture_parameters: Dictionary of rupture parameters to input to the GMPE.

    Returns:
        StationStream: Processed stream.

    """

    if not st.passed:
        return st

    # Check that we know the signal split for each trace in the stream
    for tr in st:
        if not tr.hasParameter('signal_split'):
            return st

    signal_window_starttime = st[0].getParameter('signal_split')['split_time']

    arrivals = travel_time_df[st[0].stats.network + '.' + st[0].stats.station]
    arrivals = arrivals.sort_values()

    # Filter by any arrival times that appear in the signal window
    arrivals = arrivals[
        (arrivals > signal_window_starttime) &
        (arrivals < st[0].stats.endtime)]

    # Make sure we remove the arrival that corresponds to the event of interest
    if origin.id in arrivals.index:
        arrivals.drop(index=origin.id, inplace=True)

    if arrivals.empty:
        return st

    # Calculate the recorded PGA for this record
    stasum = StationSummary.from_stream(st, ['ROTD(50.0)'], ['PGA'])
    recorded_pga = stasum.get_pgm('PGA', 'ROTD(50.0)')

    # Load the GMPE model
    gmpe = load_model(gmpe)

    # Set site parameters
    sx = SitesContext()

    # Make sure that site parameter values are converted to numpy arrays
    site_parameters_copy = site_parameters.copy()
    for k, v in site_parameters_copy.items():
        site_parameters_copy[k] = np.array([site_parameters_copy[k]])
    sx.__dict__.update(site_parameters_copy)

    # Filter by arrivals that have significant expected PGA using GMPE
    is_significant = []
    for eqid, arrival_time in arrivals.items():
        event = next(event for event in catalog if event.id == eqid)

        # Set rupture parameters
        rx = RuptureContext()
        rx.__dict__.update(rupture_parameters)
        rx.mag = event.magnitude

        # TODO: distances should be calculated when we refactor to be
        # able to import distance calculations
        dx = DistancesContext()
        dx.repi = np.array([
            gps2dist_azimuth(
                st[0].stats.coordinates.latitude,
                st[0].stats.coordinates.longitude,
                event.latitude, event.longitude)[0] / 1000])
        dx.rjb = dx.repi
        dx.rhypo = np.sqrt(dx.repi**2 + event.depth_km**2)
        dx.rrup = dx.rhypo

        pga, sd = gmpe.get_mean_and_stddevs(sx, rx, dx, imt.PGA(), [])

        # Convert from ln(g) to %g
        predicted_pga = 100 * np.exp(pga[0])
        if predicted_pga > (pga_factor * recorded_pga):
            is_significant.append(True)
        else:
            is_significant.append(False)

    significant_arrivals = arrivals[is_significant]
    if significant_arrivals.empty:
        return st

    # Check if any of the significant arrivals occur within the
    signal_length = st[0].stats.endtime - signal_window_starttime
    cutoff_time = signal_window_starttime + pct_window_reject * (signal_length)
    if (significant_arrivals < cutoff_time).any():
        for tr in st:
            tr.fail('A significant arrival from another event occurs within '
                    'the first %s percent of the signal window' %
                    (100 * pct_window_reject))

    # Otherwise, trim the stream at the first significant arrival
    else:
        for tr in st:
            signal_end = tr.getParameter('signal_end')
            signal_end['end_time'] = significant_arrivals[0]
            signal_end['method'] = ('Trimming before right another event')
            tr.setParameter('signal_end', signal_end)
        cut(st)

    return st
def signal_end(st, event_time, event_lon, event_lat, event_mag,
               method=None, vmin=None, floor=None,
               model=None, epsilon=2.0):
    """
    Estimate end of signal by using a model of the 5-95% significant
    duration, and adding this value to the "signal_split" time. This probably
    only works well when the split is estimated with a p-wave picker since
    the velocity method often ends up with split times that are well before
    signal actually starts.

    Args:
        st (StationStream):
            Stream of data.
        event_time (UTCDateTime):
            Event origin time.
        event_mag (float):
            Event magnitude.
        event_lon (float):
            Event longitude.
        event_lat (float):
            Event latitude.
        method (str):
            Method for estimating signal end time. Either 'velocity'
            or 'model'.
        vmin (float):
            Velocity (km/s) for estimating end of signal. Only used if
            method="velocity".
        floor (float):
            Minimum duration (sec) applied along with vmin.
        model (str):
            Short name of duration model to use. Must be defined in the
            gmprocess/data/modules.yml file.
        epsilon (float):
            Number of standard deviations; if epsilon is 1.0, then the signal
            window duration is the mean Ds + 1 standard deviation. Only used
            for method="model".

    Returns:
        trace with stats dict updated to include a
        stats['processing_parameters']['signal_end'] dictionary.

    """
    # Load openquake stuff if method="model"
    if method == "model":
        dmodel = load_model(model)

        # Set some "conservative" inputs (in that they will tend to give
        # larger durations).
        sctx = SitesContext()
        sctx.vs30 = np.array([180.0])
        sctx.z1pt0 = np.array([0.51])
        rctx = RuptureContext()
        rctx.mag = event_mag
        rctx.rake = -90.0
        dur_imt = imt.from_string('RSD595')
        stddev_types = [const.StdDev.INTRA_EVENT]

    for tr in st:
        if not tr.hasParameter('signal_split'):
            continue
        if method == "velocity":
            if vmin is None:
                raise ValueError('Must specify vmin if method is "velocity".')
            if floor is None:
                raise ValueError('Must specify floor if method is "velocity".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            end_time = event_time + max(floor, epi_dist / vmin)
        elif method == "model":
            if model is None:
                raise ValueError('Must specify model if method is "model".')
            epi_dist = gps2dist_azimuth(
                lat1=event_lat,
                lon1=event_lon,
                lat2=tr.stats['coordinates']['latitude'],
                lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0
            dctx = DistancesContext()
            # Repi >= Rrup, so substitution here should be conservative
            # (leading to larger durations).
            dctx.rrup = np.array([epi_dist])
            lnmu, lnstd = dmodel.get_mean_and_stddevs(
                sctx, rctx, dctx, dur_imt, stddev_types)
            duration = np.exp(lnmu + epsilon * lnstd[0])
            # Get split time
            split_time = tr.getParameter('signal_split')['split_time']
            end_time = split_time + float(duration)
        else:
            raise ValueError('method must be either "velocity" or "model".')
        # Update trace params
        end_params = {
            'end_time': end_time,
            'method': method,
            'vsplit': vmin,
            'floor': floor,
            'model': model,
            'epsilon': epsilon
        }
        tr.setParameter('signal_end', end_params)

    return st
Example #38
0
def get_extent(rupture=None, config=None):
    """
    Method to compute map extent from rupture. There are numerous methods for
    getting the extent:
        - It can be specified directly in the config file,
        - it can be hard coded for specific magnitude ranges in the config
          file, or
        - it can be based on the MultiGMPE for the event.

    All methods except for the first requires a rupture object.

    If no config is provided then a rupture is required and the extent is based
    on a generic set of active/stable.

    Args:
        rupture (Rupture): A ShakeMap Rupture instance.
        config (ConfigObj): ShakeMap config object.

    Returns:
        tuple: lonmin, lonmax, latmin, latmax rounded to the nearest
        arc-minute..

    """

    # -------------------------------------------------------------------------
    # Check to see what parameters are specified in the extent config
    # -------------------------------------------------------------------------
    spans = {}
    bounds = []
    if config is not None:
        if 'extent' in config:
            if 'magnitude_spans' in config['extent']:
                if len(config['extent']['magnitude_spans']):
                    if isinstance(config['extent']['magnitude_spans'], dict):
                        spans = config['extent']['magnitude_spans']
            if 'bounds' in config['extent']:
                if 'extent' in config['extent']['bounds']:
                    if config['extent']['bounds']['extent'][0] != -999.0:
                        bounds = config['extent']['bounds']['extent']

    # -------------------------------------------------------------------------
    # Simplest option: extent was specified in the config, use that and exit.
    # -------------------------------------------------------------------------
    if len(bounds):
        xmin, ymin, xmax, ymax = bounds
        return (xmin, xmax, ymin, ymax)

    if not rupture or not isinstance(rupture, Rupture):
        raise TypeError('get_extent() requires a rupture object if the extent '
                        'is not specified in the config object.')

    # Find the central point
    origin = rupture.getOrigin()
    if isinstance(rupture, (QuadRupture, EdgeRupture)):
        # For an extended rupture, it is the midpoint between the extent of the
        # verticies
        lats = rupture.lats
        lons = rupture.lons

        # Remove nans
        lons = lons[~np.isnan(lons)]
        lats = lats[~np.isnan(lats)]

        clat = 0.5 * (np.nanmax(lats) + np.nanmin(lats))
        clon = 0.5 * (np.nanmax(lons) + np.nanmin(lons))
    else:
        # For a point source, it is just the epicenter
        clat = origin.lat
        clon = origin.lon

    mag = origin.mag

    # -------------------------------------------------------------------------
    # Second simplest option: spans are hardcoded based on magnitude
    # -------------------------------------------------------------------------
    if len(spans):
        xmin = None
        xmax = None
        ymin = None
        ymax = None
        for spankey, span in spans.items():
            if mag > span[0] and mag <= span[1]:
                ymin = clat - span[2] / 2
                ymax = clat + span[2] / 2
                xmin = clon - span[3] / 2
                xmax = clon + span[3] / 2
                break
        if xmin is not None:
            return (xmin, xmax, ymin, ymax)

    # -------------------------------------------------------------------------
    # Use MultiGMPE to get spans
    # -------------------------------------------------------------------------
    if config is not None:
        gmpe = MultiGMPE.from_config(config)
        gmice = get_object_from_config('gmice', 'modeling', config)
    else:
        # Put in some default values for conf
        config = {
            'extent': {
                'mmi': {
                    'threshold': 4.5,
                    'mindist': 100,
                    'maxdist': 1000
                }
            }
        }

        # Generic GMPEs choices based only on active vs stable
        # as defaults...
        stable = is_stable(origin.lon, origin.lat)
        if not stable:
            ASK14 = AbrahamsonEtAl2014()
            CB14 = CampbellBozorgnia2014()
            CY14 = ChiouYoungs2014()
            gmpes = [ASK14, CB14, CY14]
            site_gmpes = None
            weights = [1 / 3.0, 1 / 3.0, 1 / 3.0]
            gmice = WGRW12()
        else:
            Fea96 = FrankelEtAl1996MwNSHMP2008()
            Tea97 = ToroEtAl1997MwNSHMP2008()
            Sea02 = SilvaEtAl2002MwNSHMP2008()
            C03 = Campbell2003MwNSHMP2008()
            TP05 = TavakoliPezeshk2005MwNSHMP2008()
            AB06p = AtkinsonBoore2006Modified2011()
            Pea11 = PezeshkEtAl2011()
            Atk08p = Atkinson2008prime()
            Sea01 = SomervilleEtAl2001NSHMP2008()
            gmpes = [
                Fea96, Tea97, Sea02, C03, TP05, AB06p, Pea11, Atk08p, Sea01
            ]
            site_gmpes = [AB06p]
            weights = [0.16, 0.0, 0.0, 0.17, 0.17, 0.3, 0.2, 0.0, 0.0]
            gmice = AK07()

        gmpe = MultiGMPE.from_list(gmpes,
                                   weights,
                                   default_gmpes_for_site=site_gmpes)

    min_mmi = config['extent']['mmi']['threshold']
    default_imt = imt.SA(1.0)
    sd_types = [const.StdDev.TOTAL]

    # Distance context
    dx = DistancesContext()
    # This imposes minimum/ maximum distances of:
    #   80 and 800 km; could make this configurable
    d_min = config['extent']['mmi']['mindist']
    d_max = config['extent']['mmi']['maxdist']
    dx.rjb = np.logspace(np.log10(d_min), np.log10(d_max), 2000)
    # Details don't matter for this; assuming vertical surface rupturing fault
    # with epicenter at the surface.
    dx.rrup = dx.rjb
    dx.rhypo = dx.rjb
    dx.repi = dx.rjb
    dx.rx = np.zeros_like(dx.rjb)
    dx.ry0 = np.zeros_like(dx.rjb)
    dx.rvolc = np.zeros_like(dx.rjb)

    # Sites context
    sx = SitesContext()
    # Set to soft soil conditions
    sx.vs30 = np.full_like(dx.rjb, 180)
    sx = MultiGMPE.set_sites_depth_parameters(sx, gmpe)
    sx.vs30measured = np.full_like(sx.vs30, False, dtype=bool)
    sx = Sites._addDepthParameters(sx)
    sx.backarc = np.full_like(sx.vs30, False, dtype=bool)

    # Rupture context
    rx = RuptureContext()
    rx.mag = origin.mag
    rx.rake = 0.0
    # From WC94...
    rx.width = 10**(-0.76 + 0.27 * rx.mag)
    rx.dip = 90.0
    rx.ztor = origin.depth
    rx.hypo_depth = origin.depth

    gmpe_imt_mean, _ = gmpe.get_mean_and_stddevs(sx, rx, dx, default_imt,
                                                 sd_types)

    # Convert to MMI
    gmpe_to_mmi, _ = gmice.getMIfromGM(gmpe_imt_mean, default_imt)

    # Minimum distance that exceeds threshold MMI?
    dists_exceed_mmi = dx.rjb[gmpe_to_mmi > min_mmi]
    if len(dists_exceed_mmi):
        mindist_km = np.max(dists_exceed_mmi)
    else:
        mindist_km = d_min

    # Get a projection
    proj = OrthographicProjection(clon - 4, clon + 4, clat + 4, clat - 4)
    if isinstance(rupture, (QuadRupture, EdgeRupture)):
        ruptx, rupty = proj(lons, lats)
    else:
        ruptx, rupty = proj(clon, clat)

    xmin = np.nanmin(ruptx) - mindist_km
    ymin = np.nanmin(rupty) - mindist_km
    xmax = np.nanmax(ruptx) + mindist_km
    ymax = np.nanmax(rupty) + mindist_km

    # Put a limit on range of aspect ratio
    dx = xmax - xmin
    dy = ymax - ymin
    ar = dy / dx
    if ar > 1.2:
        # Inflate x
        dx_target = dy / 1.2
        ddx = dx_target - dx
        xmax = xmax + ddx / 2
        xmin = xmin - ddx / 2
    if ar < 0.83:
        # Inflate y
        dy_target = dx * 0.83
        ddy = dy_target - dy
        ymax = ymax + ddy / 2
        ymin = ymin - ddy / 2

    lonmin, latmin = proj(np.array([xmin]), np.array([ymin]), reverse=True)
    lonmax, latmax = proj(np.array([xmax]), np.array([ymax]), reverse=True)

    #
    # Round coordinates to the nearest minute -- that should make the
    # output grid register with common grid resolutions (60c, 30c,
    # 15c, 7.5c)
    #
    logging.debug("Extent: %f, %f, %f, %f" % (lonmin, lonmax, latmin, latmax))
    return _round_coord(lonmin[0]), _round_coord(lonmax[0]), \
        _round_coord(latmin[0]), _round_coord(latmax[0])
Example #39
0
import numpy as np
import pandas as pd

# Local imports.
from openquake.hazardlib.gsim.travasarou_2003 import TravasarouEtAl2003
from openquake.hazardlib.imt import IA
from openquake.hazardlib.gsim.base import SitesContext
from openquake.hazardlib.gsim.base import DistancesContext
from openquake.hazardlib.gsim.base import RuptureContext
from openquake.hazardlib import const

# Create an instance of the gmpe and input contexts.
trav2003 = TravasarouEtAl2003()
sx = SitesContext()
rx = RuptureContext()
dx = DistancesContext()

# Import data frame.
df = pd.read_csv(
    '/Users/tnye/PROJECTS/Duration/data/dataframes/select_data.csv')

# Define IMTs and standard deviations.
imt = IA()
sd_types = [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT]

# List of event IDs.
eventids = [
    'usp000a1b0', 'usp000d6vk', 'usp000fg9t', 'usp000g9h6', 'us2000gge9',
    'us1000etmq', 'us2000dwh6', 'nc30228270', 'nc72282711', 'ci14383980',
    'ci14607652', 'usp0009eq0'
]