def test1(self): site1_pga_poe_expected = [0.0639157, 0.03320212, 0.02145989] site2_pga_poe_expected = [0.06406232, 0.02965879, 0.01864331] site1_pgd_poe_expected = [0.16146619, 0.1336553] site2_pgd_poe_expected = [0.15445961, 0.13437589] curves = hazard_curves_poissonian(self.sources, self.sites, self.imts, self.time_span, self.gsims, self.truncation_level) self.assertIsInstance(curves, dict) self.assertEqual(set(curves.keys()), set([imt.PGA(), imt.PGD()])) pga_curves = curves[imt.PGA()] self.assertIsInstance(pga_curves, numpy.ndarray) self.assertEqual(pga_curves.shape, (2, 3)) # two sites, three IMLs site1_pga_poe, site2_pga_poe = pga_curves self.assertTrue(numpy.allclose(site1_pga_poe, site1_pga_poe_expected), str(site1_pga_poe)) self.assertTrue(numpy.allclose(site2_pga_poe, site2_pga_poe_expected), str(site2_pga_poe)) pgd_curves = curves[imt.PGD()] self.assertIsInstance(pgd_curves, numpy.ndarray) self.assertEqual(pgd_curves.shape, (2, 2)) # two sites, two IMLs site1_pgd_poe, site2_pgd_poe = pgd_curves self.assertTrue(numpy.allclose(site1_pgd_poe, site1_pgd_poe_expected), str(site1_pgd_poe)) self.assertTrue(numpy.allclose(site2_pgd_poe, site2_pgd_poe_expected), str(site2_pgd_poe))
def test_retreival_tables_good_no_interp(self): """ Tests the retreival of the IML tables for 'good' conditions without applying magnitude interpolations """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) # PGA np.testing.assert_array_almost_equal( gsim._return_tables(6.0, imt_module.PGA(), "IMLs"), np.array([2., 1., 0.5])) # PGV np.testing.assert_array_almost_equal( gsim._return_tables(6.0, imt_module.PGV(), "IMLs"), np.array([20., 10., 5.]), 5) # SA(1.0) np.testing.assert_array_almost_equal( gsim._return_tables(6.0, imt_module.SA(1.0), "IMLs"), np.array([2.0, 1., 0.5])) # Also for standard deviations np.testing.assert_array_almost_equal( gsim._return_tables(6.0, imt_module.PGA(), "Total"), 0.5 * np.ones(3)) np.testing.assert_array_almost_equal( gsim._return_tables(6.0, imt_module.SA(1.0), "Total"), 0.8 * np.ones(3))
def test_get_mean_and_stddevs_good(self): """ Tests the full execution of the GMPE tables for valid data """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) ctx = RuptureContext() ctx.mag = 6.0 # Test values at the given distances and those outside range ctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) ctx.vs30 = 1000. * np.ones(5) ctx.sids = np.arange(5) stddevs = [const.StdDev.TOTAL] expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20]) expected_sigma = 0.25 * np.ones(5) imts = [imt_module.PGA(), imt_module.SA(1.0), imt_module.PGV()] # PGA mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imts[0], stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5) # SA mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imts[1], stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5) # PGV mean, sigma = gsim.get_mean_and_stddevs(ctx, ctx, ctx, imts[2], stddevs) np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5) # StdDev.ALL check contexts.get_mean_stds([gsim], ctx, imts)
def test_get_mean_and_stddevs(self): """ Tests mean and standard deviations without amplification """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() stddevs = [const.StdDev.TOTAL] expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20]) # PGA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5) # SA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.SA(1.0), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.8 * np.ones(5), 5) # PGV mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGV(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
def test_get_mean_and_stddevs_good_amplified(self): """ Tests the full execution of the GMPE tables for valid data with amplification """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() sctx.vs30 = 100. * np.ones(5) stddevs = [const.StdDev.TOTAL] expected_mean = np.array([20., 20., 10., 5., 1.0E-19]) expected_sigma = 0.25 * np.ones(5) # PGA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5) # SA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.SA(1.0), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5)
def test_get_mean_table(self, idx=0): """ Test the retrieval of the mean amplification tables for a given magnitude and IMT """ rctx = RuptureContext() rctx.mag = 6.0 # PGA expected_table = np.ones([10, 2]) expected_table[:, self.IDX] *= 1.5 np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.PGA(), rctx), expected_table) # SA expected_table[:, self.IDX] = 2.0 * np.ones(10) np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.SA(0.5), rctx), expected_table) # SA (period interpolation) interpolator = interp1d(np.log10(self.amp_table.periods), np.log10(np.array([1.5, 2.0, 0.5]))) period = 0.3 expected_table[:, self.IDX] = (10.0**interpolator( np.log10(period))) * np.ones(10.) np.testing.assert_array_almost_equal( self.amp_table.get_mean_table(imt_module.SA(period), rctx), expected_table)
def test_get_amplification_factors(self): """ Tests the amplification tables """ ctx = RuptureContext() ctx.rake = 45.0 ctx.mag = 6.0 # Takes distances at the values found in the table (not checking # distance interpolation) ctx.rjb = np.copy(self.amp_table.distances[:, 0, 0]) # Test Vs30 is 700.0 m/s midpoint between the 400 m/s and 1000 m/s # specified in the table stddevs = [const.StdDev.TOTAL] expected_mean = np.ones_like(ctx.rjb) # Check PGA and PGV mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.PGA(), ctx, ctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 1.5) * expected_mean) np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean) mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.PGV(), ctx, ctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 0.5) * expected_mean) np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean) # Sa (0.5) mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.SA(0.5), ctx, ctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 2.0) * expected_mean) np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean)
def test_retreival_tables_outside_mag_range(self): """ Tests that an error is raised when inputting a magnitude value outside the supported range """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) with self.assertRaises(ValueError) as ve: gsim._return_tables(7.5, imt_module.PGA(), "IMLs") self.assertEqual( str(ve.exception), "Magnitude 7.50 outside of supported range (5.00 to 7.00)")
def test_calculation_mean(self): DATA_FILE = data / "AMERI2017/A17_Rjb_Homoscedastic_MEAN.csv" stddev_types = [const.StdDev.TOTAL] # Unused here sctx = gsim.base.SitesContext() rctx = gsim.base.RuptureContext() dctx = gsim.base.DistancesContext() with open(DATA_FILE, 'r') as f: # Read periods from header: header = f.readline() periods = header.strip().split(',')[7:] # Periods in s. for line in f: arr_str = line.strip().split(',') arr_str[ 5] = "9999" # Replace result_type string by any float-convertible value arr = np.float_(arr_str) # Setting ground-motion attributes: setattr(rctx, 'mag', arr[0]) setattr(rctx, 'rake', arr[1]) stress_drop = arr[2] setattr(dctx, 'rjb', arr[3]) setattr(sctx, 'vs30', np.array([arr[4]])) damp = arr[6] # Compute ground-motion: gmpe = gsim.ameri_2017.AmeriEtAl2017RjbStressDrop( norm_stress_drop=stress_drop) for k in range(len(periods)): per = periods[k] value = np.log(arr[ 7 + k]) # convert value to ln(SA) with SA in units of g if per == 'pga': P = imt.PGA() else: P = imt.SA(period=np.float_(per), damping=damp) mean = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, P, stddev_types)[0] np.testing.assert_almost_equal(mean, value, decimal=MEAN_DECIMAL)
def test_get_sigma_table(self): """ Test the retrieval of the standard deviation modification tables for a given magnitude and IMT """ rctx = RuptureContext() rctx.mag = 6.0 # PGA expected_table = np.ones([10, 2]) expected_table[:, self.IDX] *= 0.8 stddevs = ["Total"] pga_table = self.amp_table.get_sigma_tables(imt_module.PGA(), rctx, stddevs)[0] np.testing.assert_array_almost_equal(pga_table, expected_table) # SA (for coverage) sa_table = self.amp_table.get_sigma_tables(imt_module.SA(0.3), rctx, stddevs)[0] np.testing.assert_array_almost_equal(sa_table, expected_table)
def setUp(self): self.orig_make_contexts = ContextMaker.make_contexts ContextMaker.make_contexts = lambda self, sites, rupture: ( FakeSiteContext(sites), rupture, None) self.truncation_level = 3.4 self.imts = {'PGA': [1, 2, 3], 'PGD': [2, 4]} self.time_span = 49.2 rup11 = self.FakeRupture(0.23, const.TRT.ACTIVE_SHALLOW_CRUST) rup12 = self.FakeRupture(0.15, const.TRT.ACTIVE_SHALLOW_CRUST) rup21 = self.FakeRupture(0.04, const.TRT.VOLCANIC) self.source1 = self.FakeSource(1, [rup11, rup12], self.time_span, const.TRT.ACTIVE_SHALLOW_CRUST) self.source2 = self.FakeSource(2, [rup21], self.time_span, const.TRT.VOLCANIC) self.sources = [self.source1, self.source2] site1 = Site(Point(10, 20), 1, True, 2, 3) site2 = Site(Point(20, 30), 2, False, 4, 5) self.sites = SiteCollection([site1, site2]) gsim1 = self.FakeGSIM( self.truncation_level, self.imts, poes={ (site1.location.latitude, rup11, imt.PGA()): [0.1, 0.05, 0.03], (site2.location.latitude, rup11, imt.PGA()): [0.11, 0.051, 0.034], (site1.location.latitude, rup12, imt.PGA()): [0.12, 0.052, 0.035], (site2.location.latitude, rup12, imt.PGA()): [0.13, 0.053, 0.036], (site1.location.latitude, rup11, imt.PGD()): [0.4, 0.33], (site2.location.latitude, rup11, imt.PGD()): [0.39, 0.331], (site1.location.latitude, rup12, imt.PGD()): [0.38, 0.332], (site2.location.latitude, rup12, imt.PGD()): [0.37, 0.333], }) gsim2 = self.FakeGSIM(self.truncation_level, self.imts, poes={ (site1.location.latitude, rup21, imt.PGA()): [0.5, 0.3, 0.2], (site2.location.latitude, rup21, imt.PGA()): [0.4, 0.2, 0.1], (site1.location.latitude, rup21, imt.PGD()): [0.24, 0.08], (site2.location.latitude, rup21, imt.PGD()): [0.14, 0.09], }) self.gsims = { const.TRT.ACTIVE_SHALLOW_CRUST: gsim1, const.TRT.VOLCANIC: gsim2 }
def test_calculation_std_inter(self): DATA_FILE = (data / "AMERI2017/A17_Repi_Homoscedastic_INTER_EVENT_STDDEV.csv") stddev_types = [const.StdDev.INTER_EVENT] ctx = gsim.base.RuptureContext() with open(DATA_FILE, 'r') as f: # Read periods from header: header = f.readline() periods = header.strip().split(',')[7:] # Periods in s. for line in f: arr_str = line.strip().split(',') arr_str[5] = "9999" # Replace result_type string by any float-convertible value arr = np.float_(arr_str) # Setting ground-motion attributes: setattr(ctx, 'mag', arr[0]) setattr(ctx, 'rake', arr[1]) stress_drop = arr[2] setattr(ctx, 'repi', arr[3]) setattr(ctx, 'vs30', np.array([arr[4]])) damp = arr[6] ctx.sids = [0] # Compute ground-motion: gmpe = gsim.ameri_2017.AmeriEtAl2017RepiStressDrop( norm_stress_drop=stress_drop) for k in range(len(periods)): per = periods[k] value = arr[7 + k] if per == 'pga': P = imt.PGA() else: P = imt.SA(period=np.float_(per), damping=damp) std = gmpe.get_mean_and_stddevs(ctx, ctx, ctx, P, stddev_types)[1][0] np.testing.assert_almost_equal(std, value, decimal=STDDEV_DECIMAL)
def test_get_mean_stddevs_unsupported_stddev(self): """ Tests the execution of the GMPE with an unsupported standard deviation type """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() sctx.vs30 = 1000. * np.ones(5) stddevs = [const.StdDev.TOTAL, const.StdDev.INTER_EVENT] with self.assertRaises(ValueError) as ve: gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) self.assertEqual(str(ve.exception), "Standard Deviation type Inter event not supported")
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): # List of GMPE weights, which is the product of the the branch weights # for the seed models vs the NGA East resampled models as well as the # weights for the indivudual GMPES as defined by Petersen et al. (2019) # # Note that the NGA East resampled models are a function of spectral # period. # # NGA East Seeds (1/3) # ├── B_bca10d (0.06633), wts = 0.333 * 0.06633 = 0.02208789 # ├── B_ab95 (0.02211), wts = 0.333 * 0.02211 = 0.00736263 # ... # NGA East Resampled or "USGS" (2/3) # ├── Model 1 (0.1009 for PGA), wts = 0.667 * 0.1009 = 0.0673003 # ├── Model 2 (0.1606 for PGA), wts = 0.667 * 0.1606 = 0.1071202 # ... # wts = [0] * len(self.gmpes) # Is IMT PGA or PGV? is_pga = imt == IMT.PGA() is_pgv = imt == IMT.PGV() # Is magnitude less than 4? If so, we will need to set it to 4.0 and # then extrapolate the tables at the end. if rup.mag < 4.0: is_small_mag = True delta_mag = rup.mag - 4.0 rup.mag = 4.0 else: is_small_mag = False for i, tp in enumerate(self.ALL_TABLE_PATHS): if 'usgs' in tp: # Get model number from i-th path using regex mod_num = int(re.search(r'\d+', tp).group()) coefs = np.array( self.NGA_EAST_USGS.iloc[mod_num - 1] ) # Is the IMT PGA, PGA, or SA? if is_pga: iweight = coefs[-2] elif is_pgv: iweight = coefs[-1] else: # For SA, need to interpolate; we'll use log-period and # linear-weight interpolation. iweight = np.interp( np.log(imt.period), np.log(self.per_array), coefs[self.per_idx_start:self.per_idx_end] ) wts[i] = self.NGA_EAST_USGS_WEIGHT * iweight else: # Strip off the cruft to get the string we need to match str_match = tp.replace('nga_east_', '').replace('.hdf5', '') matched = self.NGA_EAST_SEEDS[ self.NGA_EAST_SEEDS['model'] == str_match] if len(matched): iweight = self.NGA_EAST_SEEDS[ self.NGA_EAST_SEEDS['model'] == str_match].iloc[0, 1] wts[i] = self.NGA_EAST_SEED_WEIGHT * iweight total_gmpe_weights = self.sigma_weights * wts if not np.allclose(np.sum(total_gmpe_weights), 1.0): raise ValueError('Weights must sum to 1.0.') mean = np.full_like(sites.vs30, 0) stddevs = [] for i in range(len(stddev_types)): stddevs.append(np.full_like(sites.vs30, 0)) # Apply max distance to dists.rrup np.clip(dists.rrup, 0, MAX_RRUP) # Since we will be dropping the models that don't have PGV, # we now also need to track the total sum of weights for when # the imt is PGV so that we can re-distribute the weights. if is_pgv: twts = [] # Loop over gmpes for i, gm in enumerate(self.gmpes): if is_pgv: # Is PGV and also not available for gm? try: gm._return_tables(rup.mag, imt, "IMLs") except KeyError: continue except: logging.error("Unexpected error:", sys.exc_info()[0]) tmean, tstddevs = gm.get_mean_and_stddevs( sites, rup, dists, imt, stddev_types) mean += tmean * total_gmpe_weights[i] for j, sd in enumerate(tstddevs): stddevs[j] += sd * total_gmpe_weights[i] if is_pgv: twts.append(total_gmpe_weights[i]) if is_pgv: # Rescale the PGV wieghts so that they sum to 1 after dropping # the models that are not defined for PGV. mean = mean / np.sum(twts) for j, sd in enumerate(stddevs): stddevs[j] = stddevs[j] / np.sum(twts) # Zero out values at distances beyond the range for which NGA East # was defined. mean[dists.rrup > MAX_RRUP] = -999.0 # Do we need to extrapolate for small magnitude factor? if is_small_mag: if is_pga: slopes = np.interp( np.log(dists.rrup), np.log(self.SMALL_M_DIST), self.SMALL_M_SLOPE_PGA) elif is_pgv: slopes = np.interp( np.log(dists.rrup), np.log(self.SMALL_M_DIST), self.SMALL_M_SLOPE_PGV) else: interp_obj = RectBivariateSpline( np.log(self.SMALL_M_DIST), np.log(self.SMALL_M_PER), self.SMALL_M_SLOPE, kx=1, ky=1) slopes = interp_obj.ev( np.log(dists.rrup), np.log(imt.period) ) mean = mean + slopes * delta_mag return mean, stddevs
def test_pickeable(self): for imt in (imt_module.PGA(), imt_module.SA(0.2)): imt_pik = pickle.dumps(imt, pickle.HIGHEST_PROTOCOL) self.assertEqual(pickle.loads(imt_pik), imt)
def compute_cs(t_cs, bgmpe, sctx, rctx, dctx, im_type, t_star, rrup, mag, avg_periods, corr_type, im_star, gmpe_input): """ Compute the conditional spectrum according to the procedure outlined in Baker JW, Lee C. An Improved Algorithm for Selecting Ground Motions to Match a Conditional Spectrum. J Earthq Eng 2018;22:708-23. https://doi.org/10.1080/13632469.2016.1264334. When the IM and the GMM are defined for the maximum of the two horizontal components, the The Boore and Kishida (2017) relationship is applied to convert the maximum of the two horizontal components into `RotD50`. This is done only for `PGA` and `SA`. """ import numpy as np import sys from openquake.hazardlib import imt, const, gsim from .compute_avgSA import compute_rho_avgsa from .modified_akkar_correlation_model import ModifiedAkkarCorrelationModel # Use the same periods as the available spectra to construct the # conditional spectrum p = [] s = [const.StdDev.TOTAL] if im_type == 'AvgSA': _ = gsim.get_available_gsims() p = imt.AvgSA() mgmpe = gsim.mgmpe.generic_gmpe_avgsa.GenericGmpeAvgSA \ (gmpe_name=gmpe_input, avg_periods=avg_periods, corr_func=corr_type) mu_im_cond, sigma_im_cond = mgmpe.get_mean_and_stddevs( sctx, rctx, dctx, p, s) else: if im_type == 'PGA': p = imt.PGA() else: p = imt.SA(t_star) s = [const.StdDev.TOTAL] mu_im_cond, sigma_im_cond = bgmpe().get_mean_and_stddevs( sctx, rctx, dctx, p, s) sigma_im_cond = sigma_im_cond[0] if (bgmpe.DEFINED_FOR_INTENSITY_MEASURE_COMPONENT == 'Greater of two horizontal'): if im_type == 'PGA' or im_type == 'SA': from shakelib.conversions.imc.boore_kishida_2017 import \ BooreKishida2017 bk17 = BooreKishida2017(const.IMC.GREATER_OF_TWO_HORIZONTAL, const.IMC.RotD50) mu_im_cond = bk17.convertAmps(p, mu_im_cond, rrup, float(mag)) sigma_im_cond = bk17.convertSigmas(p, sigma_im_cond[0]) else: sys.exit('Error: conversion between intensity measures is not ' 'possible for AvgSA') # Compute how many standard deviations the PSHA differs from # the GMPE value epsilon = (np.log(im_star) - mu_im_cond) / sigma_im_cond mu_im = np.zeros(len(t_cs)) sigma_im = np.zeros(len(t_cs)) rho_t_tstar = np.zeros(len(t_cs)) mu_im_im_cond = np.zeros(len(t_cs)) for i in range(len(t_cs)): # Get the GMPE ouput for a rupture scenario if t_cs[i] == 0.: p = imt.PGA() else: p = imt.SA(t_cs[i]) s = [const.StdDev.TOTAL] mu0, sigma0 = bgmpe().get_mean_and_stddevs(sctx, rctx, dctx, p, s) if (bgmpe.DEFINED_FOR_INTENSITY_MEASURE_COMPONENT == 'Greater of two horizontal'): if im_type == 'PGA' or im_type == 'SA': from shakelib.conversions.imc.boore_kishida_2017 \ import BooreKishida2017 bk17 = BooreKishida2017(const.IMC.GREATER_OF_TWO_HORIZONTAL, const.IMC.RotD50) mu0 = bk17.convertAmps(p, mu0, rrup, float(mag)) sigma0 = bk17.convertSigmas(p, sigma0[0]) mu_im[i] = mu0[0] sigma_im[i] = sigma0[0][0] rho = None if im_type == 'AvgSA': rho = compute_rho_avgsa(t_cs[i], avg_periods, sctx, rctx, dctx, sigma_im_cond, bgmpe, corr_type) rho = rho[0] else: if corr_type == 'baker_jayaram': rho = gsim.mgmpe.generic_gmpe_avgsa. \ BakerJayaramCorrelationModel([t_cs[i], t_star])(0, 1) if corr_type == 'akkar': rho = ModifiedAkkarCorrelationModel([t_cs[i], t_star])(0, 1) rho_t_tstar[i] = rho # Get the value of the CMS mu_im_im_cond[i] = \ mu_im[i] + rho_t_tstar[i] * epsilon[0] * sigma_im[i] # Compute covariances and correlations at all periods cov = np.zeros((len(t_cs), len(t_cs))) for i in np.arange(len(t_cs)): for j in np.arange(len(t_cs)): var1 = sigma_im[i]**2 var2 = sigma_im[j]**2 var_tstar = sigma_im_cond**2 sigma_corr = [] if corr_type == 'baker_jayaram': sigma_corr = gsim.mgmpe.generic_gmpe_avgsa. \ BakerJayaramCorrelationModel([t_cs[i], t_cs[j]])(0, 1) * \ np.sqrt(var1 * var2) if corr_type == 'akkar': sigma_corr = ModifiedAkkarCorrelationModel([t_cs[i], t_cs[j]])(0, 1) * \ np.sqrt(var1 * var2) sigma11 = np.matrix([[var1, sigma_corr], [sigma_corr, var2]]) sigma22 = np.array(var_tstar) sigma12 = np.array([ rho_t_tstar[i] * np.sqrt(var1 * var_tstar), rho_t_tstar[j] * np.sqrt(var_tstar * var2) ]) sigma_cond = sigma11 - sigma12 * 1. / (sigma22) * sigma12.T cov[i, j] = sigma_cond[0, 1] # find covariance values of zero and set them to a small number # so that random number generation can be performed cov[np.absolute(cov) < 1e-10] = 1e-10 stdevs = np.sqrt(np.diagonal(cov)) return mu_im_im_cond, cov, stdevs
def trim_multiple_events( st, origin, catalog, travel_time_df, pga_factor, pct_window_reject, gmpe, site_parameters, rupture_parameters, ): """ Uses a catalog (list of ScalarEvents) to handle cases where a trace might contain signals from multiple events. The catalog should contain events down to a low enough magnitude in relation to the events of interest. Overall, the algorithm is as follows: 1) For each earthquake in the catalog, get the P-wave travel time and estimated PGA at this station. 2) Compute the PGA (of the as-recorded horizontal channels). 3) Select the P-wave arrival times across all events for this record that are (a) within the signal window, and (b) the predicted PGA is greater than pga_factor times the PGA from step #1. 4) If any P-wave arrival times match the above criteria, then if any of the arrival times fall within in the first pct_window_reject*100% of the signal window, then reject the record. Otherwise, trim the record such that the end time does not include any of the arrivals selected in step #3. Args: st (StationStream): Stream of data. origin (ScalarEvent): ScalarEvent object associated with the StationStream. catalog (list): List of ScalarEvent objects. travel_time_df (DataFrame): A pandas DataFrame that contains the travel time information (obtained from gmprocess.waveform_processing.phase.create_travel_time_dataframe). The columns in the DataFrame are the station ids and the indices are the earthquake ids. pga_factor (float): A decimal factor used to determine whether the predicted PGA from an event arrival is significant enough that it should be considered for removal. pct_window_reject (float): A decimal from 0.0 to 1.0 used to determine if an arrival should be trimmed from the record, or if the entire record should be rejected. If the arrival falls within the first pct_window_reject * 100% of the signal window, then the entire record will be rejected. Otherwise, the record will be trimmed appropriately. gmpe (str): Short name of the GMPE to use. Must be defined in the modules file. site_parameters (dict): Dictionary of site parameters to input to the GMPE. rupture_parameters: Dictionary of rupture parameters to input to the GMPE. Returns: StationStream: Processed stream. """ if not st.passed: return st # Check that we know the signal split for each trace in the stream for tr in st: if not tr.hasParameter("signal_split"): return st signal_window_starttime = st[0].getParameter("signal_split")["split_time"] arrivals = travel_time_df[st[0].stats.network + "." + st[0].stats.station] arrivals = arrivals.sort_values() # Filter by any arrival times that appear in the signal window arrivals = arrivals[(arrivals > signal_window_starttime) & (arrivals < st[0].stats.endtime)] # Make sure we remove the arrival that corresponds to the event of interest if origin.id in arrivals.index: arrivals.drop(index=origin.id, inplace=True) if arrivals.empty: return st # Calculate the recorded PGA for this record stasum = StationSummary.from_stream(st, ["ROTD(50.0)"], ["PGA"]) recorded_pga = stasum.get_pgm("PGA", "ROTD(50.0)") # Load the GMPE model gmpe = load_model(gmpe) # Generic context rctx = RuptureContext() # Make sure that site parameter values are converted to numpy arrays site_parameters_copy = site_parameters.copy() for k, v in site_parameters_copy.items(): site_parameters_copy[k] = np.array([site_parameters_copy[k]]) rctx.__dict__.update(site_parameters_copy) # Filter by arrivals that have significant expected PGA using GMPE is_significant = [] for eqid, arrival_time in arrivals.items(): event = next(event for event in catalog if event.id == eqid) # Set rupture parameters rctx.__dict__.update(rupture_parameters) rctx.mag = event.magnitude # TODO: distances should be calculated when we refactor to be # able to import distance calculations rctx.repi = np.array([ gps2dist_azimuth( st[0].stats.coordinates.latitude, st[0].stats.coordinates.longitude, event.latitude, event.longitude, )[0] / 1000 ]) rctx.rjb = rctx.repi rctx.rhypo = np.sqrt(rctx.repi**2 + event.depth_km**2) rctx.rrup = rctx.rhypo rctx.sids = np.array(range(np.size(rctx.rrup))) pga, sd = gmpe.get_mean_and_stddevs(rctx, rctx, rctx, imt.PGA(), []) # Convert from ln(g) to %g predicted_pga = 100 * np.exp(pga[0]) if predicted_pga > (pga_factor * recorded_pga): is_significant.append(True) else: is_significant.append(False) significant_arrivals = arrivals[is_significant] if significant_arrivals.empty: return st # Check if any of the significant arrivals occur within the signal_length = st[0].stats.endtime - signal_window_starttime cutoff_time = signal_window_starttime + pct_window_reject * (signal_length) if (significant_arrivals < cutoff_time).any(): for tr in st: tr.fail("A significant arrival from another event occurs within " "the first %s percent of the signal window" % (100 * pct_window_reject)) # Otherwise, trim the stream at the first significant arrival else: for tr in st: signal_end = tr.getParameter("signal_end") signal_end["end_time"] = significant_arrivals[0] signal_end["method"] = "Trimming before right another event" tr.setParameter("signal_end", signal_end) cut(st) return st
def oq_run( model: Enum, tect_type: Enum, rupture_df: pd.DataFrame, im: str, periods: Sequence[Union[int, float]] = None, **kwargs, ): """Run an openquake model with dataframe model: Enum OQ model name tect_type: Enum One of the tectonic types from ACTIVE_SHALLOW, SUBDUCTION_SLAB and SUBDUCTION_INTERFACE rupture_df: Rupture DF Columns for properties. E.g., vs30, z1pt0, rrup, rjb, mag, rake, dip.... Rows be the separate site-fault pairs But Site information must be identical across the rows, only the faults can be different. im: string intensity measure periods: Sequence[Union[int, float]] for spectral acceleration, openquake tables automatically interpolate values between specified values, fails if outside range kwargs: pass extra (model specific) parameters to models """ model = (OQ_MODELS[model][tect_type]( **kwargs) if not model.name.endswith("_NZ") else OQ_MODELS[model][tect_type](region="NZL", **kwargs)) # Check the given tect_type with its model's tect type trt = model.DEFINED_FOR_TECTONIC_REGION_TYPE if trt == const.TRT.SUBDUCTION_INTERFACE: assert tect_type == TectType.SUBDUCTION_INTERFACE elif trt == const.TRT.SUBDUCTION_INTRASLAB: assert tect_type == TectType.SUBDUCTION_SLAB elif trt == const.TRT.ACTIVE_SHALLOW_CRUST: assert tect_type == TectType.ACTIVE_SHALLOW else: raise ValueError("unknown tectonic region: " + trt) stddev_types = [ std for std in SPT_STD_DEVS if std in model.DEFINED_FOR_STANDARD_DEVIATION_TYPES ] # Make a copy in case the original rupture_df used with other functions rupture_df = rupture_df.copy() # Check if df contains what model requires rupture_ctx_properties = set(rupture_df.columns.values) extra_site_parameters = set( model.REQUIRES_SITES_PARAMETERS).difference(rupture_ctx_properties) if len(extra_site_parameters) > 0: raise ValueError("unknown site property: " + extra_site_parameters) extra_rup_properties = set( model.REQUIRES_RUPTURE_PARAMETERS).difference(rupture_ctx_properties) if len(extra_rup_properties) > 0: raise ValueError("unknown rupture property: " + " ".join(extra_rup_properties)) extra_dist_properties = set( model.REQUIRES_DISTANCES).difference(rupture_ctx_properties) if len(extra_dist_properties) > 0: raise ValueError("unknown distance property: " + " ".join(extra_dist_properties)) # Convert z1pt0 from km to m rupture_df["z1pt0"] *= 1000 # OQ's single new-style context which contains all site, distance and rupture's information rupture_ctx = contexts.RuptureContext( tuple([ # Openquake requiring occurrence_rate attribute to exist ("occurrence_rate", None), # sids is the number of sites provided (OQ term) # This term needs to be repeated for the number of rows in the df ("sids", [1] * rupture_df.shape[0]), *(( column, rupture_df.loc[:, column].values, ) for column in rupture_df.columns.values), ])) if periods is not None: assert imt.SA in model.DEFINED_FOR_INTENSITY_MEASURE_TYPES # use sorted instead of max for full list avail_periods = np.asarray([ im.period for im in (model.COEFFS.sa_coeffs.keys() if not isinstance( model, ( gsim.zhao_2006.ZhaoEtAl2006Asc, gsim.zhao_2006.ZhaoEtAl2006SSlab, gsim.zhao_2006.ZhaoEtAl2006SInter, ), ) else model.COEFFS_ASC.sa_coeffs.keys()) ]) max_period = max(avail_periods) if not hasattr(periods, "__len__"): periods = [periods] results = [] for period in periods: im = imt.SA(period=min(period, max_period)) try: result = oq_mean_stddevs(model, rupture_ctx, im, stddev_types) except KeyError as ke: cause = ke.args[0] # To make sure the KeyError is about missing pSA's period if (isinstance(cause, imt.IMT) and str(cause).startswith("SA") and cause.period > 0.0): # Period is smaller than model's supported min_period E.g., ZA_06 # Interpolate between PGA(0.0) and model's min_period low_result = oq_mean_stddevs(model, rupture_ctx, imt.PGA(), stddev_types) high_period = avail_periods[period <= avail_periods][0] high_result = oq_mean_stddevs(model, rupture_ctx, imt.SA(period=high_period), stddev_types) result = interpolate_with_pga(period, high_period, low_result, high_result) else: # KeyError that we cannot handle logging.exception(ke) raise except Exception as e: # Any other exceptions that we cannot handle logging.exception(e) raise # extrapolate pSA value up based on maximum available period if period > max_period: result.loc[:, result.columns.str.endswith("mean")] += 2 * np.log( max_period / period) # Updating the period from max_period to the given period # E.g with ZA_06, replace 5.0 to period > 5.0 result.columns = result.columns.str.replace(str(max_period), str(period), regex=False) results.append(result) return pd.concat(results, axis=1) else: imc = getattr(imt, im) assert imc in model.DEFINED_FOR_INTENSITY_MEASURE_TYPES return oq_mean_stddevs(model, rupture_ctx, imc(), stddev_types)
def _get_extent_from_multigmpe(rupture, config=None): """ Use MultiGMPE to determine extent """ (clon, clat) = _rupture_center(rupture) origin = rupture.getOrigin() if config is not None: gmpe = MultiGMPE.from_config(config) gmice = get_object_from_config('gmice', 'modeling', config) if imt.SA in gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES: default_imt = imt.SA(1.0) elif imt.PGV in gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES: default_imt = imt.PGV() else: default_imt = imt.PGA() else: # Put in some default values for conf config = { 'extent': { 'mmi': { 'threshold': 4.5, 'mindist': 100, 'maxdist': 1000 } } } # Generic GMPEs choices based only on active vs stable # as defaults... stable = is_stable(origin.lon, origin.lat) if not stable: ASK14 = AbrahamsonEtAl2014() CB14 = CampbellBozorgnia2014() CY14 = ChiouYoungs2014() gmpes = [ASK14, CB14, CY14] site_gmpes = None weights = [1/3.0, 1/3.0, 1/3.0] gmice = WGRW12() else: Fea96 = FrankelEtAl1996MwNSHMP2008() Tea97 = ToroEtAl1997MwNSHMP2008() Sea02 = SilvaEtAl2002MwNSHMP2008() C03 = Campbell2003MwNSHMP2008() TP05 = TavakoliPezeshk2005MwNSHMP2008() AB06p = AtkinsonBoore2006Modified2011() Pea11 = PezeshkEtAl2011() Atk08p = Atkinson2008prime() Sea01 = SomervilleEtAl2001NSHMP2008() gmpes = [Fea96, Tea97, Sea02, C03, TP05, AB06p, Pea11, Atk08p, Sea01] site_gmpes = [AB06p] weights = [0.16, 0.0, 0.0, 0.17, 0.17, 0.3, 0.2, 0.0, 0.0] gmice = AK07() gmpe = MultiGMPE.from_list( gmpes, weights, default_gmpes_for_site=site_gmpes) default_imt = imt.SA(1.0) min_mmi = config['extent']['mmi']['threshold'] sd_types = [const.StdDev.TOTAL] # Distance context dx = DistancesContext() # This imposes minimum/ maximum distances of: # 80 and 800 km; could make this configurable d_min = config['extent']['mmi']['mindist'] d_max = config['extent']['mmi']['maxdist'] dx.rjb = np.logspace(np.log10(d_min), np.log10(d_max), 2000) # Details don't matter for this; assuming vertical surface rupturing fault # with epicenter at the surface. dx.rrup = dx.rjb dx.rhypo = dx.rjb dx.repi = dx.rjb dx.rx = np.zeros_like(dx.rjb) dx.ry0 = np.zeros_like(dx.rjb) dx.rvolc = np.zeros_like(dx.rjb) # Sites context sx = SitesContext() # Set to soft soil conditions sx.vs30 = np.full_like(dx.rjb, 180) sx = MultiGMPE.set_sites_depth_parameters(sx, gmpe) sx.vs30measured = np.full_like(sx.vs30, False, dtype=bool) sx = Sites._addDepthParameters(sx) sx.backarc = np.full_like(sx.vs30, False, dtype=bool) # Rupture context rx = RuptureContext() rx.mag = origin.mag rx.rake = 0.0 # From WC94... rx.width = 10**(-0.76 + 0.27*rx.mag) rx.dip = 90.0 rx.ztor = origin.depth rx.hypo_depth = origin.depth gmpe_imt_mean, _ = gmpe.get_mean_and_stddevs( sx, rx, dx, default_imt, sd_types) # Convert to MMI gmpe_to_mmi, _ = gmice.getMIfromGM(gmpe_imt_mean, default_imt) # Minimum distance that exceeds threshold MMI? dists_exceed_mmi = dx.rjb[gmpe_to_mmi > min_mmi] if len(dists_exceed_mmi): mindist_km = np.max(dists_exceed_mmi) else: mindist_km = d_min # Get a projection proj = OrthographicProjection(clon - 4, clon + 4, clat + 4, clat - 4) if isinstance(rupture, (QuadRupture, EdgeRupture)): ruptx, rupty = proj( rupture.lons[~np.isnan(rupture.lons)], rupture.lats[~np.isnan(rupture.lats)] ) else: ruptx, rupty = proj(clon, clat) xmin = np.nanmin(ruptx) - mindist_km ymin = np.nanmin(rupty) - mindist_km xmax = np.nanmax(ruptx) + mindist_km ymax = np.nanmax(rupty) + mindist_km # Put a limit on range of aspect ratio dx = xmax - xmin dy = ymax - ymin ar = dy / dx if ar > 1.2: # Inflate x dx_target = dy / 1.2 ddx = dx_target - dx xmax = xmax + ddx / 2 xmin = xmin - ddx / 2 if ar < 0.83: # Inflate y dy_target = dx * 0.83 ddy = dy_target - dy ymax = ymax + ddy / 2 ymin = ymin - ddy / 2 lonmin, latmin = proj(np.array([xmin]), np.array([ymin]), reverse=True) lonmax, latmax = proj(np.array([xmax]), np.array([ymax]), reverse=True) # # Round coordinates to the nearest minute -- that should make the # output grid register with common grid resolutions (60c, 30c, # 15c, 7.5c) # logging.debug("Extent: %f, %f, %f, %f" % (lonmin, lonmax, latmin, latmax)) return _round_coord(lonmin[0]), _round_coord(lonmax[0]), \ _round_coord(latmin[0]), _round_coord(latmax[0])
def get_mean_and_stddevs(self, sites, rx, dists, imt, stddev_types): # List of GMPE weights, which is the product of the the branch weights # for the seed models vs the NGA East resampled models as well as the # weights for the indivudual GMPES as defined by Petersen et al. (2019) # # Note that the NGA East resampled models are a function of spectral # period. # # NGA East Seeds (1/3) # ├── B_bca10d (0.06633), wts = 0.333 * 0.06633 = 0.02208789 # ├── B_ab95 (0.02211), wts = 0.333 * 0.02211 = 0.00736263 # ... # NGA East Resampled or "USGS" (2/3) # ├── Model 1 (0.1009 for PGA), wts = 0.667 * 0.1009 = 0.0673003 # ├── Model 2 (0.1606 for PGA), wts = 0.667 * 0.1606 = 0.1071202 # ... # wts = [0] * len(self.gmpes) # Is IMT PGA or PGV? is_pga = imt == IMT.PGA() is_pgv = imt == IMT.PGV() # Is magnitude less than 4? If so, we will need to set it to 4.0 and # then extrapolate the tables at the end. # But... in the brave new world of new OpenQuake, sites, rx, and # dists are all exactly the same object, so when we copy rx to # rup, and change it, as well as when we change the distances # below, we need to do it all in rup, then pass rup as all three # contexts when we call the gmpe.get_mean_and_stddevs() rup = copy.deepcopy(rx) if rup.mag < 4.0: is_small_mag = True delta_mag = rup.mag - 4.0 rup.mag = 4.0 else: is_small_mag = False for i, tp in enumerate(self.ALL_TABLE_PATHS): if 'usgs' in tp: # Get model number from i-th path using regex mod_num = int(re.search(r'\d+', tp).group()) coefs = np.array(self.NGA_EAST_USGS.iloc[mod_num - 1]) # Is the IMT PGA, PGA, or SA? if is_pga: iweight = coefs[-2] elif is_pgv: iweight = coefs[-1] else: # For SA, need to interpolate; we'll use log-period and # linear-weight interpolation. iweight = np.interp( np.log(imt.period), np.log(self.per_array), coefs[self.per_idx_start:self.per_idx_end]) wts[i] = self.NGA_EAST_USGS_WEIGHT * iweight else: # Strip off the cruft to get the string we need to match str_match = tp.replace('nga_east_', '').replace('.hdf5', '') matched = self.NGA_EAST_SEEDS[self.NGA_EAST_SEEDS['model'] == str_match] if len(matched): iweight = self.NGA_EAST_SEEDS[self.NGA_EAST_SEEDS['model'] == str_match].iloc[0, 1] wts[i] = self.NGA_EAST_SEED_WEIGHT * iweight total_gmpe_weights = self.sigma_weights * wts if not np.allclose(np.sum(total_gmpe_weights), 1.0): raise ValueError('Weights must sum to 1.0.') mean = np.full_like(sites.vs30, 0) stddevs = [] for i in range(len(stddev_types)): stddevs.append(np.full_like(sites.vs30, 0)) # Apply max distance to dists.rrup -->> now rup.rrup np.clip(rup.rrup, 0, MAX_RRUP) # # Some models don't have PGV terms, so we will make PSA for them # and then use the conditional conversion to get PGV # if is_pgv: ab2020 = AbrahamsonBhasin2020(rup.mag) vimt = IMT.SA(ab2020.getTref()) # Loop over gmpes for i, gm in enumerate(self.gmpes): if is_pgv: # Is PGV and also not available for gm? try: _ = _return_tables(gm, rup.mag, imt, "IMLs") except KeyError: # # No table for PGV, compute vimt, then convert to PGV # vmean, vstddevs = gm.get_mean_and_stddevs( rup, rup, rup, vimt, stddev_types) tmean, tstddevs = ab2020.getPGVandSTDDEVS( vmean, vstddevs, stddev_types, rup.rrup, rup.vs30) except Exception: logging.error("Unexpected error:", sys.exc_info()[0]) else: # # Table exists for PGV, proceed normally # tmean, tstddevs = gm.get_mean_and_stddevs( rup, rup, rup, imt, stddev_types) else: tmean, tstddevs = gm.get_mean_and_stddevs( rup, rup, rup, imt, stddev_types) mean += tmean * total_gmpe_weights[i] for j, sd in enumerate(tstddevs): stddevs[j] += sd * total_gmpe_weights[i] # Zero out values at distances beyond the range for which NGA East # was defined. -->> was dists.rrup, now rup.rrup mean[rup.rrup > MAX_RRUP] = -999.0 # Do we need to extrapolate for small magnitude factor? if is_small_mag: if is_pga: slopes = np.interp(np.log(rup.rrup), np.log(self.SMALL_M_DIST), self.SMALL_M_SLOPE_PGA) elif is_pgv: slopes = np.interp(np.log(rup.rrup), np.log(self.SMALL_M_DIST), self.SMALL_M_SLOPE_PGV) else: interp_obj = RectBivariateSpline(np.log(self.SMALL_M_DIST), np.log(self.SMALL_M_PER), self.SMALL_M_SLOPE, kx=1, ky=1) slopes = interp_obj.ev(np.log(rup.rrup), np.log(imt.period)) mean = mean + slopes * delta_mag return mean, stddevs
from shakelib.gmpe.nga_east import NGAEast home_dir = os.path.dirname(os.path.abspath(__file__)) data_dir = os.path.join(home_dir, 'nga_east_data') stddev_types = [StdDev.TOTAL] gmpe = NGAEast() dx = base.DistancesContext() dx.rrup = np.logspace(-1, np.log10(2000), 100) rx = base.RuptureContext() sx = base.SitesContext() IMTS = [imt.PGA(), imt.PGV(), imt.SA(0.3), imt.SA(1.0), imt.SA(3.0)] MAGS = [3, 5, 6, 7] VS30 = [180, 380, 760, 2000] def update_results(): # To build the data for testing result = {} for i in IMTS: ikey = i.__str__() result[ikey] = {} for mag in MAGS: rx.mag = mag result[ikey][str(mag)] = {}
# Print output print(out) print(err) log = str(out) + str(err) ## Read in the output file: vs30_data = np.genfromtxt('tmp2', usecols=2) pd.DataFrame(vs30_data).to_csv( '/home/eking/Documents/internship/data/Kappa/vs30.csv') kappa_data = fullfile[' tstar(s) '] ############# GMPEs ################### imt_pga = imt.PGA() imt_pgv = imt.PGV() imt_arias = imt.IA() uncertaintytype = const.StdDev.TOTAL ## Set GMPEs: zhao2006 = ZhaoEtAl2006SInter() travasarou = TravasarouEtAl2003() bssa14 = BooreEtAl2014() ## Set the empty arrays: median_zhao2006 = np.array([]) median_travasarou = np.array([]) median_bssa14 = np.array([]) sd_zhao2006 = np.array([])