def _get_event_context(self, idx, nodal_plane_index=1): """ Returns the event contexts for a specific event """ idx = idx[0] rctx = RuptureContext() rup = self.records[idx] setattr(rctx, 'mag', rup.event.magnitude.value) if nodal_plane_index == 2: setattr(rctx, 'strike', rup.event.mechanism.nodal_planes.nodal_plane_2['strike']) setattr(rctx, 'dip', rup.event.mechanism.nodal_planes.nodal_plane_2['dip']) setattr(rctx, 'rake', rup.event.mechanism.nodal_planes.nodal_plane_2['rake']) else: setattr(rctx, 'strike', 0.0) setattr(rctx, 'dip', 90.0) rctx.rake = rup.event.mechanism.get_rake_from_mechanism_type() if rup.event.rupture.surface: setattr(rctx, 'ztor', rup.event.rupture.surface.get_top_edge_depth()) setattr(rctx, 'width', rup.event.rupture.surface.width) setattr(rctx, 'hypo_loc', rup.event.rupture.surface.get_hypo_location(1000)) else: setattr(rctx, 'ztor', rup.event.depth) # Use the PeerMSR to define the area and assuming an aspect ratio # of 1 get the width setattr(rctx, 'width', np.sqrt(DEFAULT_MSR.get_median_area(rctx.mag, 0))) # Default hypocentre location to the middle of the rupture setattr(rctx, 'hypo_loc', (0.5, 0.5)) setattr(rctx, 'hypo_depth', rup.event.depth) setattr(rctx, 'hypo_lat', rup.event.latitude) setattr(rctx, 'hypo_lon', rup.event.longitude) return rctx
def test_get_mean_and_stddevs_good(self): """ Tests the full execution of the GMPE tables for valid data """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 rctx.rake = 90.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() stddevs = [const.StdDev.TOTAL] expected_mean = np.array([20.0, 20.0, 10.0, 5.0, 1.0E-19]) # PGA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.25 * np.ones(5), 5) # SA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.SA(1.0), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5)
def test_get_mean_table(self, idx=0): """ Test the retrieval of the mean amplification tables for a given magnitude and IMT """ rctx = RuptureContext() rctx.mag = 6.0 # PGA expected_table = np.ones([10, 2]) expected_table[:, self.IDX] *= 1.5 np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.PGA(), rctx), expected_table) # SA expected_table[:, self.IDX] = 2.0 * np.ones(10) np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.SA(0.5), rctx), expected_table) # SA (period interpolation) interpolator = interp1d(np.log10(self.amp_table.periods), np.log10(np.array([1.5, 2.0, 0.5]))) period = 0.3 expected_table[:, self.IDX] = ( 10.0 ** interpolator(np.log10(period))) * np.ones(10.) np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.SA(period), rctx), expected_table)
def test_get_mean_and_stddevs_good_amplified(self): """ Tests the full execution of the GMPE tables for valid data with amplification """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() sctx.vs30 = 100. * np.ones(5) stddevs = [const.StdDev.TOTAL] expected_mean = np.array([20., 20., 10., 5., 1.0E-19]) expected_sigma = 0.25 * np.ones(5) # PGA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5) # SA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.SA(1.0), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5)
def test_get_mean_and_stddevs(self): """ Tests mean and standard deviations without amplification """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() stddevs = [const.StdDev.TOTAL] expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20]) # PGA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5) # SA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.SA(1.0), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.8 * np.ones(5), 5) # PGV mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGV(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
def test_get_mean_table(self, idx=0): """ Test the retrieval of the mean amplification tables for a given magnitude and IMT """ ctx = RuptureContext() ctx.mag = 6.0 # PGA expected_table = np.ones([10, 2]) expected_table[:, self.IDX] *= 1.5 np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.PGA(), ctx), expected_table) # SA expected_table[:, self.IDX] = 2.0 * np.ones(10) np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.SA(0.5), ctx), expected_table) # SA (period interpolation) interpolator = interp1d(np.log10(self.amp_table.periods), np.log10(np.array([1.5, 2.0, 0.5]))) period = 0.3 expected_table[:, self.IDX] = (10.0**interpolator( np.log10(period))) * np.ones(10) np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.SA(period), ctx), expected_table)
def _get_event_context(self, idx, nodal_plane_index=1): """ Returns the event contexts for a specific event """ idx = idx[0] rctx = RuptureContext() rup = self.records[idx] setattr(rctx, 'mag', rup.event.magnitude.value) if nodal_plane_index == 2: setattr(rctx, 'dip', rup.event.mechanism.nodal_planes.nodal_plane_2['dip']) setattr(rctx, 'rake', rup.event.mechanism.nodal_planes.nodal_plane_2['rake']) else: setattr(rctx, 'dip', rup.event.mechanism.nodal_planes.nodal_plane_1['dip']) setattr(rctx, 'rake', rup.event.mechanism.nodal_planes.nodal_plane_1['rake']) if not rctx.rake: rctx.rake = rup.event.mechanism.get_rake_from_mechanism_type() if rup.event.rupture: setattr(rctx, 'ztor', rup.event.rupture.depth) setattr(rctx, 'width', rup.event.rupture.width) setattr(rctx, 'hypo_depth', rup.event.depth) return rctx
def test_get_amplification_factors(self): """ Tests the amplification tables """ ctx = RuptureContext() ctx.rake = 45.0 ctx.mag = 6.0 # Takes distances at the values found in the table (not checking # distance interpolation) ctx.rjb = np.copy(self.amp_table.distances[:, 0, 0]) # Test Vs30 is 700.0 m/s midpoint between the 400 m/s and 1000 m/s # specified in the table stddevs = [const.StdDev.TOTAL] expected_mean = np.ones_like(ctx.rjb) # Check PGA and PGV mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.PGA(), ctx, ctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 1.5) * expected_mean) np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean) mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.PGV(), ctx, ctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 0.5) * expected_mean) np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean) # Sa (0.5) mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.SA(0.5), ctx, ctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 2.0) * expected_mean) np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean)
def test_get_mean_and_stddevs(self): """ Tests mean and standard deviations without amplification """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) ctx = RuptureContext() ctx.mag = 6.0 # Test values at the given distances and those outside range ctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) ctx.sids = np.arange(5) stddevs = [const.StdDev.TOTAL] expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20]) # PGA mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imt_module.PGA(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5) # SA mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imt_module.SA(1.0), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.8 * np.ones(5), 5) # PGV mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imt_module.PGV(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
def calculate_total_std(gsim_list, imts, vs30): std_total = {} std_inter = {} std_intra = {} for gsim in gsim_list: rctx = RuptureContext() # The calculator needs these inputs but they are not used # in the std calculation rctx.mag = 5 rctx.rake = 0 rctx.hypo_depth = 0 dctx = DistancesContext() dctx.rjb = np.copy(np.array([1])) # I do not care about the distance dctx.rrup = np.copy(np.array([1])) # I do not care about the distance sctx = SitesContext() sctx.vs30 = vs30 * np.ones_like(np.array([0])) for imt in imts: gm_table, [ gm_stddev_inter, gm_stddev_intra ] = (gsim.get_mean_and_stddevs( sctx, rctx, dctx, imt, [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT])) std_total[gsim, imt] = (np.sqrt(gm_stddev_inter[0]**2 + gm_stddev_intra[0]**2)) std_inter[gsim, imt] = gm_stddev_inter[0] std_intra[gsim, imt] = gm_stddev_intra[0] return (std_total, std_inter, std_intra)
def _get_event_context(self, idx, nodal_plane_index=1): """ Returns the event contexts for a specific event """ idx = idx[0] rctx = RuptureContext() rup = self.records[idx] setattr(rctx, 'mag', rup.event.magnitude.value) if nodal_plane_index == 2: setattr(rctx, 'strike', rup.event.mechanism.nodal_planes.nodal_plane_2['strike']) setattr(rctx, 'dip', rup.event.mechanism.nodal_planes.nodal_plane_2['dip']) setattr(rctx, 'rake', rup.event.mechanism.nodal_planes.nodal_plane_2['rake']) else: setattr(rctx, 'strike', 0.0) setattr(rctx, 'dip', 90.0) rctx.rake = rup.event.mechanism.get_rake_from_mechanism_type() if rup.event.rupture: setattr(rctx, 'ztor', rup.event.rupture.surface.get_top_edge_depth()) setattr(rctx, 'width', rup.event.rupture.surface.width) setattr(rctx, 'hypo_loc', rup.event.rupture.surface.get_hypo_location(1000)) else: setattr(rctx, 'ztor', rup.event.depth) setattr(rctx, 'hypo_depth', rup.event.depth) setattr(rctx, 'hypo_lat', rup.event.latitude) setattr(rctx, 'hypo_lon', rup.event.longitude) return rctx
def test_dist_not_in_increasing_order(self): sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() rctx.mag = 5. dctx.rhypo = numpy.array([150, 100]) mean_150_100, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) dctx.rhypo = numpy.array([100, 150]) mean_100_150, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) self.assertAlmostEqual(mean_150_100[1], mean_100_150[0]) self.assertAlmostEqual(mean_150_100[0], mean_100_150[1])
def get_gsim_contexts(self): """ Returns a comprehensive set of GMPE contecxt objects """ assert isinstance(self.rupture, Rupture) assert isinstance(self.target_sites, SiteCollection) # Distances dctx = DistancesContext() # Rupture distance setattr(dctx, 'rrup', self.rupture.surface.get_min_distance(self.target_sites.mesh)) # Rx setattr(dctx, 'rx', self.rupture.surface.get_rx_distance(self.target_sites.mesh)) # Rjb setattr( dctx, 'rjb', self.rupture.surface.get_joyner_boore_distance( self.target_sites.mesh)) # Rhypo setattr( dctx, 'rhypo', self.rupture.hypocenter.distance_to_mesh(self.target_sites.mesh)) # Repi setattr( dctx, 'repi', self.rupture.hypocenter.distance_to_mesh(self.target_sites.mesh, with_depths=False)) # Ry0 setattr(dctx, 'ry0', self.rupture.surface.get_ry0_distance(self.target_sites.mesh)) # Rcdpp - ignored at present setattr(dctx, 'rcdpp', None) # Azimuth - ignored at present setattr(dctx, 'azimuth', None) setattr(dctx, 'hanging_wall', None) # Rvolc setattr(dctx, "rvolc", np.zeros_like(self.target_sites.mesh.lons)) # Sites sctx = SitesContext() key_list = ['_vs30', '_vs30measured', '_z1pt0', '_z2pt5', '_backarc'] for key in key_list: setattr(sctx, key[1:], getattr(self.target_sites, key)) for key in ['lons', 'lats']: setattr(sctx, key, getattr(self.target_sites, key)) # Rupture rctx = RuptureContext() setattr(rctx, 'mag', self.magnitude) setattr(rctx, 'strike', self.strike) setattr(rctx, 'dip', self.dip) setattr(rctx, 'rake', self.rake) setattr(rctx, 'ztor', self.ztor) setattr(rctx, 'hypo_depth', self.rupture.hypocenter.depth) setattr(rctx, 'hypo_lat', self.rupture.hypocenter.latitude) setattr(rctx, 'hypo_lon', self.rupture.hypocenter.longitude) setattr(rctx, 'hypo_loc', self.hypo_loc) setattr(rctx, 'width', self.rupture.surface.get_width()) return sctx, rctx, dctx
def evaluate_model(site_params, rup_params, df, npts, azimuth, moveout, mod, imt): sx = SitesContext() rx = RuptureContext() dx = DistancesContext() # TODO: some site parameters can be pulled from the dataframe so we don't # have to use the defaults (vs30, azimuth, etc.) if not moveout: npts = df.shape[0] for param in site_params.keys(): setattr(sx, param, np.full(npts, site_params[param])) rx.__dict__.update(rup_params) rx.mag = df['EarthquakeMagnitude'].iloc[0] rx.hypo_depth = df['EarthquakeDepth'].iloc[0] if moveout: dx.rjb = np.linspace(0, df['JoynerBooreDistance'].max(), npts) dx.rrup = np.sqrt(dx.rjb**2 + df['EarthquakeDepth'].iloc[0]**2) dx.rhypo = dx.rrup dx.repi = dx.rjb else: dx.rjb = df['JoynerBooreDistance'] dx.rrup = df['RuptureDistance'] dx.rhypo = df['HypocentralDistance'] dx.repi = df['EpicentralDistance'] # TODO: some of these distances can be pulled from the dataframe dx.ry0 = dx.rjb dx.rx = np.full_like(dx.rjb, -1) dx.azimuth = np.full_like(npts, azimuth) dx.rcdpp = dx.rjb dx.rvolc = dx.rjb try: mean, sd = MODELS_DICT[mod]().get_mean_and_stddevs( sx, rx, dx, manage_imts(imt)[0], [StdDev.TOTAL]) mean = convert_units(mean, imt) if moveout: return mean, dx else: return mean, sd[0] except Exception: return
def test_dist_not_in_increasing_order(self): sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() rctx.mag = 5. dctx.rhypo = numpy.array([150, 100]) mean_150_100, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL] ) dctx.rhypo = numpy.array([100, 150]) mean_100_150, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL] ) self.assertAlmostEqual(mean_150_100[1], mean_100_150[0]) self.assertAlmostEqual(mean_150_100[0], mean_100_150[1])
def test_mag_greater_8pt5(self): gmpe = SadighEtAl1997() sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() rctx.rake = 0.0 dctx.rrup = numpy.array([0., 1.]) sctx.vs30 = numpy.array([800., 800.]) rctx.mag = 9.0 mean_rock_9, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) rctx.mag = 8.5 mean_rock_8pt5, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_allclose(mean_rock_9, mean_rock_8pt5) sctx.vs30 = numpy.array([300., 300.]) rctx.mag = 9.0 mean_soil_9, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) rctx.mag = 8.5 mean_soil_8pt5, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_allclose(mean_soil_9, mean_soil_8pt5)
def test_get_sigma_table(self): """ Test the retrieval of the standard deviation modification tables for a given magnitude and IMT """ ctx = RuptureContext() ctx.mag = 6.0 # PGA expected_table = np.ones([10, 2]) expected_table[:, self.IDX] *= 0.8 stddevs = [const.StdDev.TOTAL] pga_table = self.amp_table.get_sigma_tables(imt_module.PGA(), ctx, stddevs)[0] np.testing.assert_array_almost_equal(pga_table, expected_table) # SA (for coverage) sa_table = self.amp_table.get_sigma_tables(imt_module.SA(0.3), ctx, stddevs)[0] np.testing.assert_array_almost_equal(sa_table, expected_table)
def test_get_mean_stddevs_unsupported_stddev(self): """ Tests the execution of the GMPE with an unsupported standard deviation type """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() sctx.vs30 = 1000. * np.ones(5) stddevs = [const.StdDev.TOTAL, const.StdDev.INTER_EVENT] with self.assertRaises(ValueError) as ve: gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) self.assertEqual(str(ve.exception), "Standard Deviation type Inter event not supported")
def test_get_amplification_factors(self): """ Tests the amplification tables """ rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Takes distances at the values found in the table (not checking # distance interpolation) dctx.rjb = np.copy(self.amp_table.distances[:, 0, 0]) # Test Vs30 is 700.0 m/s midpoint between the 400 m/s and 1000 m/s # specified in the table sctx = SitesContext() sctx.vs30 = 700.0 * np.ones_like(dctx.rjb) stddevs = [const.StdDev.TOTAL] expected_mean = np.ones_like(dctx.rjb) expected_sigma = np.ones_like(dctx.rjb) # Check PGA and PGV mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.PGA(), sctx, rctx, dctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 1.5) * expected_mean) np.testing.assert_array_almost_equal( sigma_amp[0], 0.9 * expected_mean) mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.PGV(), sctx, rctx, dctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 0.5) * expected_mean) np.testing.assert_array_almost_equal( sigma_amp[0], 0.9 * expected_mean) # Sa (0.5) mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.SA(0.5), sctx, rctx, dctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 2.0) * expected_mean) np.testing.assert_array_almost_equal( sigma_amp[0], 0.9 * expected_mean)
def _get_poes(self, **kwargs): default_kwargs = dict(sctx=SitesContext(), rctx=RuptureContext(), dctx=DistancesContext(), imt=self.DEFAULT_IMT(), imls=[1.0, 2.0, 3.0], truncation_level=1.0) default_kwargs.update(kwargs) kwargs = default_kwargs return self.gsim.get_poes(**kwargs)
def check_gmpe_adjustments(self, adj_gmpe_set, original_gmpe): """ Takes a set of three adjusted GMPEs representing the "low", "middle" and "high" stress drop adjustments for Germany and compares them against the original "target" GMPE for a variety of magnitudes and styles of fauling. """ low_gsim, mid_gsim, high_gsim = adj_gmpe_set tot_std = [const.StdDev.TOTAL] for imt in self.imts: for mag in self.mags: for rake in self.rakes: rctx = RuptureContext() rctx.mag = mag rctx.rake = rake rctx.hypo_depth = 10. # Get "original" values mean = original_gmpe.get_mean_and_stddevs(self.sctx, rctx, self.dctx, imt, tot_std)[0] mean = np.exp(mean) # Get "low" adjustments (0.75 times the original) low_mean = low_gsim.get_mean_and_stddevs(self.sctx, rctx, self.dctx, imt, tot_std)[0] np.testing.assert_array_almost_equal( np.exp(low_mean) / mean, 0.75 * np.ones_like(low_mean)) # Get "middle" adjustments (1.25 times the original) mid_mean = mid_gsim.get_mean_and_stddevs(self.sctx, rctx, self.dctx, imt, tot_std)[0] np.testing.assert_array_almost_equal( np.exp(mid_mean) / mean, 1.25 * np.ones_like(mid_mean)) # Get "high" adjustments (1.5 times the original) high_mean = high_gsim.get_mean_and_stddevs(self.sctx, rctx, self.dctx, imt, tot_std)[0] np.testing.assert_array_almost_equal( np.exp(high_mean) / mean, 1.5 * np.ones_like(high_mean))
def test_rhypo_smaller_than_15(self): # test the calculation in case of rhypo distances less than 15 km # (for rhypo=0 the distance term has a singularity). In this case the # method should return values equal to the ones obtained by clipping # distances at 15 km. sctx = SitesContext() sctx.vs30 = numpy.array([800.0, 800.0, 800.0]) rctx = RuptureContext() rctx.mag = 5.0 rctx.rake = 0 dctx = DistancesContext() dctx.rhypo = numpy.array([0.0, 10.0, 16.0]) dctx.rhypo.flags.writeable = False mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) setattr(dctx, 'rhypo', numpy.array([15.0, 15.0, 16.0])) mean_15, stds_15 = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_array_equal(mean_0, mean_15) numpy.testing.assert_array_equal(stds_0, stds_15)
def test_get_sigma_table(self): """ Test the retrieval of the standard deviation modification tables for a given magnitude and IMT """ rctx = RuptureContext() rctx.mag = 6.0 # PGA expected_table = np.ones([10, 2]) expected_table[:, self.IDX] *= 0.8 stddevs = ["Total"] pga_table = self.amp_table.get_sigma_tables(imt_module.PGA(), rctx, stddevs)[0] np.testing.assert_array_almost_equal(pga_table, expected_table) # SA (for coverage) sa_table = self.amp_table.get_sigma_tables(imt_module.SA(0.3), rctx, stddevs)[0] np.testing.assert_array_almost_equal(sa_table, expected_table)
def setUp(self): """ """ self.gsim = TromansEtAl2019SigmaMu self.rctx = RuptureContext() self.rctx.mag = 6.5 self.rctx.rake = 0. self.dctx = DistancesContext() self.dctx.rjb = np.array([5., 10., 20., 50., 100.]) self.sctx = SitesContext() self.sctx.vs30 = 500. * np.ones(5)
def get_response_spectrum(self, magnitude, distance, periods, rake=90, vs30=800, damping=0.05): """ """ responses = np.zeros((len(periods),)) p_damping = damping * 100 rup = RuptureContext() rup.mag = magnitude rup.rake = rake dists = DistancesContext() dists.rjb = np.array([distance]) sites = SitesContext() sites.vs30 = np.array([vs30]) stddev_types = [StdDev.TOTAL] for i, period in enumerate(periods): if period == 0: imt = _PGA() else: imt = _SA(period, p_damping) responses[i] = np.exp(self._gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types)[0][0]) return ResponseSpectrum(periods, responses, unit='g', damping=damping)
def setUp(self): """ Setup with a set of distances and site paramwters """ self.imts = [PGA(), SA(0.1), SA(0.2), SA(0.5), SA(1.0), SA(2.0)] self.mags = [4.5, 5.5, 6.5, 7.5] self.rakes = [-90., 0., 90.] self.ctx = RuptureContext() self.ctx.sids = np.arange(5) self.ctx.rhypo = np.array([5., 10., 20., 50., 100.]) self.ctx.vs30 = 800.0 * np.ones(5)
def test_zero_distance(self): # test the calculation in case of zero rrup distance (for rrup=0 # the slab correction term has a singularity). In this case the # method should return values equal to the ones obtained by # replacing 0 values with 0.1 ctx = RuptureContext() ctx.sids = [0, 1] ctx.vs30 = numpy.array([800.0, 800.0]) ctx.mag = 5.0 ctx.rake = 0.0 ctx.hypo_depth = 0.0 ctx.rrup = numpy.array([0.0, 0.2]) ctx.occurrence_rate = .0001 mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs( ctx, ctx, ctx, PGA(), [StdDev.TOTAL]) ctx.rrup = numpy.array([0.1, 0.2]) mean_01, stds_01 = self.GSIM_CLASS().get_mean_and_stddevs( ctx, ctx, ctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_array_equal(mean_0, mean_01) numpy.testing.assert_array_equal(stds_0, stds_01)
def test_equality(self): sctx1 = SitesContext() sctx1.vs30 = numpy.array([500., 600., 700.]) sctx1.vs30measured = True sctx1.z1pt0 = numpy.array([40., 50., 60.]) sctx1.z2pt5 = numpy.array([1, 2, 3]) sctx2 = SitesContext() sctx2.vs30 = numpy.array([500., 600., 700.]) sctx2.vs30measured = True sctx2.z1pt0 = numpy.array([40., 50., 60.]) sctx2.z2pt5 = numpy.array([1, 2, 3]) self.assertTrue(sctx1 == sctx2) sctx2 = SitesContext() sctx2.vs30 = numpy.array([500., 600.]) sctx2.vs30measured = True sctx2.z1pt0 = numpy.array([40., 50., 60.]) sctx2.z2pt5 = numpy.array([1, 2, 3]) self.assertTrue(sctx1 != sctx2) sctx2 = SitesContext() sctx2.vs30 = numpy.array([500., 600., 700.]) sctx2.vs30measured = False sctx2.z1pt0 = numpy.array([40., 50., 60.]) sctx2.z2pt5 = numpy.array([1, 2, 3]) self.assertTrue(sctx1 != sctx2) sctx2 = SitesContext() sctx2.vs30 = numpy.array([500., 600., 700.]) sctx2.vs30measured = True sctx2.z1pt0 = numpy.array([40., 50., 60.]) self.assertTrue(sctx1 != sctx2) rctx = RuptureContext() rctx.mag = 5. self.assertTrue(sctx1 != rctx)
def test_get_mean_and_stddevs_good(self): """ Tests the full execution of the GMPE tables for valid data """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) ctx = RuptureContext() ctx.mag = 6.0 # Test values at the given distances and those outside range ctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) ctx.vs30 = 1000. * np.ones(5) ctx.sids = np.arange(5) stddevs = [const.StdDev.TOTAL] expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20]) expected_sigma = 0.25 * np.ones(5) imts = [imt_module.PGA(), imt_module.SA(1.0), imt_module.PGV()] # PGA mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imts[0], stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5) # SA mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imts[1], stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5) # PGV mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imts[2], stddevs) np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5) # StdDev.ALL check contexts.get_mean_stds([gsim], ctx, imts)
def test_mag_greater_8pt5(self): gmpe = SadighEtAl1997() sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() rctx.rake = 0.0 dctx.rrup = numpy.array([0., 1.]) sctx.vs30 = numpy.array([800., 800.]) rctx.mag = 9.0 mean_rock_9, _ = gmpe.get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL] ) rctx.mag = 8.5 mean_rock_8pt5, _ = gmpe.get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL] ) numpy.testing.assert_allclose(mean_rock_9, mean_rock_8pt5) sctx.vs30 = numpy.array([300., 300.]) rctx.mag = 9.0 mean_soil_9, _ = gmpe.get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL] ) rctx.mag = 8.5 mean_soil_8pt5, _ = gmpe.get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL] ) numpy.testing.assert_allclose(mean_soil_9, mean_soil_8pt5)
def test_mag_dist_outside_range(self): sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() # rupture with Mw = 3 (Mblg=2.9434938048208452) at rhypo = 1 must give # same mean as rupture with Mw = 4.4 (Mblg=4.8927897867183798) at # rhypo = 10 rctx.mag = 2.9434938048208452 dctx.rhypo = numpy.array([1]) mean_mw3_d1, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) rctx.mag = 4.8927897867183798 dctx.rhypo = numpy.array([10]) mean_mw4pt4_d10, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) self.assertAlmostEqual(float(mean_mw3_d1), float(mean_mw4pt4_d10)) # rupture with Mw = 9 (Mblg = 8.2093636421088814) at rhypo = 1500 km # must give same mean as rupture with Mw = 8.2 # (Mblg = 7.752253535347597) at rhypo = 1000 rctx.mag = 8.2093636421088814 dctx.rhypo = numpy.array([1500.]) mean_mw9_d1500, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) rctx.mag = 7.752253535347597 dctx.rhypo = numpy.array([1000.]) mean_mw8pt2_d1000, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) self.assertAlmostEqual(mean_mw9_d1500, mean_mw8pt2_d1000)
def _disaggregate_poe(self, **kwargs): default_kwargs = dict( sctx=SitesContext(), rctx=RuptureContext(), dctx=DistancesContext(), imt=self.DEFAULT_IMT(), iml=2.0, truncation_level=1.0, n_epsilons=3, ) default_kwargs.update(kwargs) kwargs = default_kwargs return self.gsim.disaggregate_poe(**kwargs)
def test_mag_dist_outside_range(self): sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() # rupture with Mw = 3 (Mblg=2.9434938048208452) at rhypo = 1 must give # same mean as rupture with Mw = 4.4 (Mblg=4.8927897867183798) at # rhypo = 10 rctx.mag = 2.9434938048208452 dctx.rhypo = numpy.array([1]) mean_mw3_d1, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL] ) rctx.mag = 4.8927897867183798 dctx.rhypo = numpy.array([10]) mean_mw4pt4_d10, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL] ) self.assertAlmostEqual(float(mean_mw3_d1), float(mean_mw4pt4_d10)) # rupture with Mw = 9 (Mblg = 8.2093636421088814) at rhypo = 1500 km # must give same mean as rupture with Mw = 8.2 # (Mblg = 7.752253535347597) at rhypo = 1000 rctx.mag = 8.2093636421088814 dctx.rhypo = numpy.array([1500.]) mean_mw9_d1500, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL] ) rctx.mag = 7.752253535347597 dctx.rhypo = numpy.array([1000.]) mean_mw8pt2_d1000, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL] ) self.assertAlmostEqual(mean_mw9_d1500, mean_mw8pt2_d1000)
def test_recarray_conversion(self): # automatic recarray conversion for backward compatibility imt = PGA() gsim = AbrahamsonGulerce2020SInter() ctx = RuptureContext() ctx.mag = 5. ctx.sids = [0, 1] ctx.vs30 = [760., 760.] ctx.rrup = [100., 110.] ctx.occurrence_rate = .000001 mean, _stddevs = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imt, []) numpy.testing.assert_allclose(mean, [-5.81116004, -6.00192455])
def check_gmpe_adjustments(self, adj_gmpe_set, original_gmpe): """ Takes a set of three adjusted GMPEs representing the "low", "middle" and "high" stress drop adjustments for Germany and compares them against the original "target" GMPE for a variety of magnitudes and styles of fauling. """ low_gsim, mid_gsim, high_gsim = adj_gmpe_set tot_std = [const.StdDev.TOTAL] for imt in self.imts: for mag in self.mags: for rake in self.rakes: rctx = RuptureContext() rctx.mag = mag rctx.rake = rake rctx.hypo_depth = 10. # Get "original" values mean = original_gmpe.get_mean_and_stddevs( self.sctx, rctx, self.dctx, imt, tot_std)[0] mean = np.exp(mean) # Get "low" adjustments (0.75 times the original) low_mean = low_gsim.get_mean_and_stddevs( self.sctx, rctx, self.dctx, imt, tot_std)[0] np.testing.assert_array_almost_equal( np.exp(low_mean) / mean, 0.75 * np.ones_like(low_mean)) # Get "middle" adjustments (1.25 times the original) mid_mean = mid_gsim.get_mean_and_stddevs( self.sctx, rctx, self.dctx, imt, tot_std)[0] np.testing.assert_array_almost_equal( np.exp(mid_mean) / mean, 1.25 * np.ones_like(mid_mean)) # Get "high" adjustments (1.5 times the original) high_mean = high_gsim.get_mean_and_stddevs( self.sctx, rctx, self.dctx, imt, tot_std)[0] np.testing.assert_array_almost_equal( np.exp(high_mean) / mean, 1.5 * np.ones_like(high_mean))
def test_rhypo_smaller_than_15(self): # test the calculation in case of rhypo distances less than 15 km # (for rhypo=0 the distance term has a singularity). In this case the # method should return values equal to the ones obtained by clipping # distances at 15 km. ctx = RuptureContext() ctx.sids = [0, 1, 2] ctx.vs30 = numpy.array([800.0, 800.0, 800.0]) ctx.mag = 5.0 ctx.rake = 0 ctx.occurrence_rate = .0001 ctx.rhypo = numpy.array([0.0, 10.0, 16.0]) ctx.rhypo.flags.writeable = False mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs( ctx, ctx, ctx, PGA(), [StdDev.TOTAL]) mean_15, stds_15 = self.GSIM_CLASS().get_mean_and_stddevs( ctx, ctx, ctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_array_equal(mean_0, mean_15) numpy.testing.assert_array_equal(stds_0, stds_15)
def test_zero_distance(self): # test the calculation in case of zero rrup distance (for rrup=0 # the equations have a singularity). In this case the # method should return values equal to the ones obtained by # replacing 0 values with 1 sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() setattr(sctx, 'vs30', numpy.array([500.0, 2500.0])) setattr(rctx, 'mag', 5.0) setattr(dctx, 'rrup', numpy.array([0.0, 0.2])) mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) setattr(dctx, 'rrup', numpy.array([1.0, 0.2])) mean_01, stds_01 = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_array_equal(mean_0, mean_01) numpy.testing.assert_array_equal(stds_0, stds_01)
def test_dist_not_in_increasing_order(self): ctx = RuptureContext() ctx.mag = 5. ctx.sids = [0, 1] ctx.rhypo = numpy.array([150, 100]) mean_150_100, _ = self.GSIM_CLASS().get_mean_and_stddevs( ctx, ctx, ctx, SA(0.1, 5), [StdDev.TOTAL]) ctx.rhypo = numpy.array([100, 150]) mean_100_150, _ = self.GSIM_CLASS().get_mean_and_stddevs( ctx, ctx, ctx, SA(0.1, 5), [StdDev.TOTAL]) self.assertAlmostEqual(mean_150_100[1], mean_100_150[0]) self.assertAlmostEqual(mean_150_100[0], mean_100_150[1])
def test_zero_distance(self): # test the calculation in case of zero rrup distance (for rrup=0 # the equations have a singularity). In this case the # method should return values equal to the ones obtained by # replacing 0 values with 1 ctx = RuptureContext() ctx.sids = [0, 1] ctx.vs30 = numpy.array([500.0, 2500.0]) ctx.mag = 5.0 ctx.rrup = numpy.array([0.0, 0.2]) mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs( ctx, ctx, ctx, PGA(), [StdDev.TOTAL]) ctx.rrup = numpy.array([1.0, 0.2]) mean_01, stds_01 = self.GSIM_CLASS().get_mean_and_stddevs( ctx, ctx, ctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_array_equal(mean_0, mean_01) numpy.testing.assert_array_equal(stds_0, stds_01)
def trim_multiple_events( st, origin, catalog, travel_time_df, pga_factor, pct_window_reject, gmpe, site_parameters, rupture_parameters, ): """ Uses a catalog (list of ScalarEvents) to handle cases where a trace might contain signals from multiple events. The catalog should contain events down to a low enough magnitude in relation to the events of interest. Overall, the algorithm is as follows: 1) For each earthquake in the catalog, get the P-wave travel time and estimated PGA at this station. 2) Compute the PGA (of the as-recorded horizontal channels). 3) Select the P-wave arrival times across all events for this record that are (a) within the signal window, and (b) the predicted PGA is greater than pga_factor times the PGA from step #1. 4) If any P-wave arrival times match the above criteria, then if any of the arrival times fall within in the first pct_window_reject*100% of the signal window, then reject the record. Otherwise, trim the record such that the end time does not include any of the arrivals selected in step #3. Args: st (StationStream): Stream of data. origin (ScalarEvent): ScalarEvent object associated with the StationStream. catalog (list): List of ScalarEvent objects. travel_time_df (DataFrame): A pandas DataFrame that contains the travel time information (obtained from gmprocess.waveform_processing.phase.create_travel_time_dataframe). The columns in the DataFrame are the station ids and the indices are the earthquake ids. pga_factor (float): A decimal factor used to determine whether the predicted PGA from an event arrival is significant enough that it should be considered for removal. pct_window_reject (float): A decimal from 0.0 to 1.0 used to determine if an arrival should be trimmed from the record, or if the entire record should be rejected. If the arrival falls within the first pct_window_reject * 100% of the signal window, then the entire record will be rejected. Otherwise, the record will be trimmed appropriately. gmpe (str): Short name of the GMPE to use. Must be defined in the modules file. site_parameters (dict): Dictionary of site parameters to input to the GMPE. rupture_parameters: Dictionary of rupture parameters to input to the GMPE. Returns: StationStream: Processed stream. """ if not st.passed: return st # Check that we know the signal split for each trace in the stream for tr in st: if not tr.hasParameter("signal_split"): return st signal_window_starttime = st[0].getParameter("signal_split")["split_time"] arrivals = travel_time_df[st[0].stats.network + "." + st[0].stats.station] arrivals = arrivals.sort_values() # Filter by any arrival times that appear in the signal window arrivals = arrivals[(arrivals > signal_window_starttime) & (arrivals < st[0].stats.endtime)] # Make sure we remove the arrival that corresponds to the event of interest if origin.id in arrivals.index: arrivals.drop(index=origin.id, inplace=True) if arrivals.empty: return st # Calculate the recorded PGA for this record stasum = StationSummary.from_stream(st, ["ROTD(50.0)"], ["PGA"]) recorded_pga = stasum.get_pgm("PGA", "ROTD(50.0)") # Load the GMPE model gmpe = load_model(gmpe) # Generic context rctx = RuptureContext() # Make sure that site parameter values are converted to numpy arrays site_parameters_copy = site_parameters.copy() for k, v in site_parameters_copy.items(): site_parameters_copy[k] = np.array([site_parameters_copy[k]]) rctx.__dict__.update(site_parameters_copy) # Filter by arrivals that have significant expected PGA using GMPE is_significant = [] for eqid, arrival_time in arrivals.items(): event = next(event for event in catalog if event.id == eqid) # Set rupture parameters rctx.__dict__.update(rupture_parameters) rctx.mag = event.magnitude # TODO: distances should be calculated when we refactor to be # able to import distance calculations rctx.repi = np.array([ gps2dist_azimuth( st[0].stats.coordinates.latitude, st[0].stats.coordinates.longitude, event.latitude, event.longitude, )[0] / 1000 ]) rctx.rjb = rctx.repi rctx.rhypo = np.sqrt(rctx.repi**2 + event.depth_km**2) rctx.rrup = rctx.rhypo rctx.sids = np.array(range(np.size(rctx.rrup))) pga, sd = gmpe.get_mean_and_stddevs(rctx, rctx, rctx, imt.PGA(), []) # Convert from ln(g) to %g predicted_pga = 100 * np.exp(pga[0]) if predicted_pga > (pga_factor * recorded_pga): is_significant.append(True) else: is_significant.append(False) significant_arrivals = arrivals[is_significant] if significant_arrivals.empty: return st # Check if any of the significant arrivals occur within the signal_length = st[0].stats.endtime - signal_window_starttime cutoff_time = signal_window_starttime + pct_window_reject * (signal_length) if (significant_arrivals < cutoff_time).any(): for tr in st: tr.fail("A significant arrival from another event occurs within " "the first %s percent of the signal window" % (100 * pct_window_reject)) # Otherwise, trim the stream at the first significant arrival else: for tr in st: signal_end = tr.getParameter("signal_end") signal_end["end_time"] = significant_arrivals[0] signal_end["method"] = "Trimming before right another event" tr.setParameter("signal_end", signal_end) cut(st) return st
from openquake.hazardlib.gsim.base import RuptureContext from openquake.hazardlib.gsim.base import DistancesContext from openquake.hazardlib.gsim.base import SitesContext import numpy as np import gmpe as gm import matplotlib.pyplot as plt fig_dir = '/Users/vsahakian/anza/models/statistics/misc/oq_vs_matlab/' ## This all works..... ## ASK14 = AbrahamsonEtAl2014() IMT = imt.PGA() rctx = RuptureContext() dctx = DistancesContext() sctx = SitesContext() sctx_rock = SitesContext() rctx.rake = 0.0 rctx.dip = 90.0 rctx.ztor = 7.13 rctx.mag = 3.0 #rctx.mag = np.linspace(0.1,5.) rctx.width = 10.0 rctx.hypo_depth = 8.0 #dctx.rrup = np.logspace(1,np.log10(200),100) dctx.rrup = np.logspace(np.log10(10),np.log10(10.0),1)
def _parse_csv_line(headers, values): """ Parse a single line from data file. :param headers: A list of header names, the strings from the first line of csv file. :param values: A list of values of a single row to parse. :returns: A tuple of the following values (in specified order): sctx An instance of :class:`openquake.hazardlib.gsim.base.SitesContext` with attributes populated by the information from in row in a form of single-element numpy arrays. rctx An instance of :class:`openquake.hazardlib.gsim.base.RuptureContext`. dctx An instance of :class:`openquake.hazardlib.gsim.base.DistancesContext`. stddev_types An empty list, if the ``result_type`` column says "MEAN" for that row, otherwise it is a list with one item -- a requested standard deviation type. expected_results A dictionary mapping IMT-objects to one-element arrays of expected result values. Those results represent either standard deviation or mean value of corresponding IMT depending on ``result_type``. result_type A string literal, one of ``'STDDEV'`` or ``'MEAN'``. Value is taken from column ``result_type``. """ rctx = RuptureContext() sctx = SitesContext() dctx = DistancesContext() expected_results = {} stddev_types = result_type = damping = None for param, value in zip(headers, values): if param == 'result_type': value = value.upper() if value.endswith('_STDDEV'): # the row defines expected stddev results result_type = 'STDDEV' stddev_types = [getattr(const.StdDev, value[:-len('_STDDEV')])] else: # the row defines expected exponents of mean values assert value == 'MEAN' stddev_types = [] result_type = 'MEAN' elif param == 'damping': damping = float(value) elif param.startswith('site_'): # value is sites context object attribute if (param == 'site_vs30measured') or (param == 'site_backarc'): value = float(value) != 0 else: value = float(value) setattr(sctx, param[len('site_'):], numpy.array([value])) elif param.startswith('dist_'): # value is a distance measure value = float(value) setattr(dctx, param[len('dist_'):], numpy.array([value])) elif param.startswith('rup_'): # value is a rupture context attribute value = float(value) setattr(rctx, param[len('rup_'):], value) elif param == 'component_type': pass else: # value is the expected result (of result_type type) value = float(value) if param == 'pga': imt = PGA() elif param == 'pgv': imt = PGV() elif param == 'pgd': imt = PGD() elif param == 'cav': imt = CAV() else: period = float(param) assert damping is not None imt = SA(period, damping) expected_results[imt] = numpy.array([value]) assert result_type is not None return sctx, rctx, dctx, stddev_types, expected_results, result_type
def signal_end(st, event_time, event_lon, event_lat, event_mag, method=None, vmin=None, floor=None, model=None, epsilon=2.0): """ Estimate end of signal by using a model of the 5-95% significant duration, and adding this value to the "signal_split" time. This probably only works well when the split is estimated with a p-wave picker since the velocity method often ends up with split times that are well before signal actually starts. Args: st (StationStream): Stream of data. event_time (UTCDateTime): Event origin time. event_mag (float): Event magnitude. event_lon (float): Event longitude. event_lat (float): Event latitude. method (str): Method for estimating signal end time. Either 'velocity' or 'model'. vmin (float): Velocity (km/s) for estimating end of signal. Only used if method="velocity". floor (float): Minimum duration (sec) applied along with vmin. model (str): Short name of duration model to use. Must be defined in the gmprocess/data/modules.yml file. epsilon (float): Number of standard deviations; if epsilon is 1.0, then the signal window duration is the mean Ds + 1 standard deviation. Only used for method="model". Returns: trace with stats dict updated to include a stats['processing_parameters']['signal_end'] dictionary. """ # Load openquake stuff if method="model" if method == "model": mod_file = pkg_resources.resource_filename( 'gmprocess', os.path.join('data', 'modules.yml')) with open(mod_file, 'r') as f: mods = yaml.load(f) # Import module cname, mpath = mods['modules'][model] dmodel = getattr(import_module(mpath), cname)() # Set some "conservative" inputs (in that they will tend to give # larger durations). sctx = SitesContext() sctx.vs30 = np.array([180.0]) sctx.z1pt0 = np.array([0.51]) rctx = RuptureContext() rctx.mag = event_mag rctx.rake = -90.0 dur_imt = imt.from_string('RSD595') stddev_types = [const.StdDev.INTRA_EVENT] for tr in st: if not tr.hasParameter('signal_split'): continue if method == "velocity": if vmin is None: raise ValueError('Must specify vmin if method is "velocity".') if floor is None: raise ValueError('Must specify floor if method is "velocity".') epi_dist = gps2dist_azimuth( lat1=event_lat, lon1=event_lon, lat2=tr.stats['coordinates']['latitude'], lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0 end_time = event_time + max(floor, epi_dist / vmin) elif method == "model": if model is None: raise ValueError('Must specify model if method is "model".') epi_dist = gps2dist_azimuth( lat1=event_lat, lon1=event_lon, lat2=tr.stats['coordinates']['latitude'], lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0 dctx = DistancesContext() # Repi >= Rrup, so substitution here should be conservative # (leading to larger durations). dctx.rrup = np.array([epi_dist]) lnmu, lnstd = dmodel.get_mean_and_stddevs( sctx, rctx, dctx, dur_imt, stddev_types) duration = np.exp(lnmu + epsilon * lnstd[0]) # Get split time split_time = tr.getParameter('signal_split')['split_time'] end_time = split_time + float(duration) else: raise ValueError('method must be either "velocity" or "model".') # Update trace params end_params = { 'end_time': end_time, 'method': method, 'vsplit': vmin, 'floor': floor, 'model': model, 'epsilon': epsilon } tr.setParameter('signal_end', end_params) return st