def test_get_mean_and_stddevs_good_amplified(self): """ Tests the full execution of the GMPE tables for valid data with amplification """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() sctx.vs30 = 100. * np.ones(5) stddevs = [const.StdDev.TOTAL] expected_mean = np.array([20., 20., 10., 5., 1.0E-19]) expected_sigma = 0.25 * np.ones(5) # PGA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5) # SA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.SA(1.0), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5)
def test_mag_greater_8pt5(self): gmpe = SadighEtAl1997() sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() rctx.rake = 0.0 dctx.rrup = numpy.array([0., 1.]) sctx.vs30 = numpy.array([800., 800.]) rctx.mag = 9.0 mean_rock_9, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) rctx.mag = 8.5 mean_rock_8pt5, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_allclose(mean_rock_9, mean_rock_8pt5) sctx.vs30 = numpy.array([300., 300.]) rctx.mag = 9.0 mean_soil_9, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) rctx.mag = 8.5 mean_soil_8pt5, _ = gmpe.get_mean_and_stddevs(sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_allclose(mean_soil_9, mean_soil_8pt5)
def calculate_total_std(gsim_list, imts, vs30): std_total = {} std_inter = {} std_intra = {} for gsim in gsim_list: rctx = RuptureContext() # The calculator needs these inputs but they are not used # in the std calculation rctx.mag = 5 rctx.rake = 0 rctx.hypo_depth = 0 dctx = DistancesContext() dctx.rjb = np.copy(np.array([1])) # I do not care about the distance dctx.rrup = np.copy(np.array([1])) # I do not care about the distance sctx = SitesContext() sctx.vs30 = vs30 * np.ones_like(np.array([0])) for imt in imts: gm_table, [ gm_stddev_inter, gm_stddev_intra ] = (gsim.get_mean_and_stddevs( sctx, rctx, dctx, imt, [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT])) std_total[gsim, imt] = (np.sqrt(gm_stddev_inter[0]**2 + gm_stddev_intra[0]**2)) std_inter[gsim, imt] = gm_stddev_inter[0] std_intra[gsim, imt] = gm_stddev_intra[0] return (std_total, std_inter, std_intra)
def test_mag_greater_8pt5(self): gmpe = SadighEtAl1997() sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() rctx.rake = 0.0 dctx.rrup = numpy.array([0., 1.]) sctx.vs30 = numpy.array([800., 800.]) rctx.mag = 9.0 mean_rock_9, _ = gmpe.get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL] ) rctx.mag = 8.5 mean_rock_8pt5, _ = gmpe.get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL] ) numpy.testing.assert_allclose(mean_rock_9, mean_rock_8pt5) sctx.vs30 = numpy.array([300., 300.]) rctx.mag = 9.0 mean_soil_9, _ = gmpe.get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL] ) rctx.mag = 8.5 mean_soil_8pt5, _ = gmpe.get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL] ) numpy.testing.assert_allclose(mean_soil_9, mean_soil_8pt5)
def test_mag_dist_outside_range(self): sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() # rupture with Mw = 3 (Mblg=2.9434938048208452) at rhypo = 1 must give # same mean as rupture with Mw = 4.4 (Mblg=4.8927897867183798) at # rhypo = 10 rctx.mag = 2.9434938048208452 dctx.rhypo = numpy.array([1]) mean_mw3_d1, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) rctx.mag = 4.8927897867183798 dctx.rhypo = numpy.array([10]) mean_mw4pt4_d10, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) self.assertAlmostEqual(float(mean_mw3_d1), float(mean_mw4pt4_d10)) # rupture with Mw = 9 (Mblg = 8.2093636421088814) at rhypo = 1500 km # must give same mean as rupture with Mw = 8.2 # (Mblg = 7.752253535347597) at rhypo = 1000 rctx.mag = 8.2093636421088814 dctx.rhypo = numpy.array([1500.]) mean_mw9_d1500, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) rctx.mag = 7.752253535347597 dctx.rhypo = numpy.array([1000.]) mean_mw8pt2_d1000, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) self.assertAlmostEqual(mean_mw9_d1500, mean_mw8pt2_d1000)
def test_get_mean_and_stddevs(self): """ Tests mean and standard deviations without amplification """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() stddevs = [const.StdDev.TOTAL] expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20]) # PGA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5) # SA mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.SA(1.0), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.8 * np.ones(5), 5) # PGV mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGV(), stddevs) np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean, 5) np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
def get_gsim_contexts(self): """ Returns a comprehensive set of GMPE contecxt objects """ assert isinstance(self.rupture, Rupture) assert isinstance(self.target_sites, SiteCollection) # Distances dctx = DistancesContext() # Rupture distance setattr(dctx, 'rrup', self.rupture.surface.get_min_distance(self.target_sites.mesh)) # Rx setattr(dctx, 'rx', self.rupture.surface.get_rx_distance(self.target_sites.mesh)) # Rjb setattr( dctx, 'rjb', self.rupture.surface.get_joyner_boore_distance( self.target_sites.mesh)) # Rhypo setattr( dctx, 'rhypo', self.rupture.hypocenter.distance_to_mesh(self.target_sites.mesh)) # Repi setattr( dctx, 'repi', self.rupture.hypocenter.distance_to_mesh(self.target_sites.mesh, with_depths=False)) # Ry0 setattr(dctx, 'ry0', self.rupture.surface.get_ry0_distance(self.target_sites.mesh)) # Rcdpp - ignored at present setattr(dctx, 'rcdpp', None) # Azimuth - ignored at present setattr(dctx, 'azimuth', None) setattr(dctx, 'hanging_wall', None) # Rvolc setattr(dctx, "rvolc", np.zeros_like(self.target_sites.mesh.lons)) # Sites sctx = SitesContext() key_list = ['_vs30', '_vs30measured', '_z1pt0', '_z2pt5', '_backarc'] for key in key_list: setattr(sctx, key[1:], getattr(self.target_sites, key)) for key in ['lons', 'lats']: setattr(sctx, key, getattr(self.target_sites, key)) # Rupture rctx = RuptureContext() setattr(rctx, 'mag', self.magnitude) setattr(rctx, 'strike', self.strike) setattr(rctx, 'dip', self.dip) setattr(rctx, 'rake', self.rake) setattr(rctx, 'ztor', self.ztor) setattr(rctx, 'hypo_depth', self.rupture.hypocenter.depth) setattr(rctx, 'hypo_lat', self.rupture.hypocenter.latitude) setattr(rctx, 'hypo_lon', self.rupture.hypocenter.longitude) setattr(rctx, 'hypo_loc', self.hypo_loc) setattr(rctx, 'width', self.rupture.surface.get_width()) return sctx, rctx, dctx
def getSitesContext(self): """ :returns: SitesContext object. """ sctx = SitesContext() sctx.vs30 = self._Vs30.getData().copy() sctx.z1pt0 = self._Z1Pt0 sctx.z2pt5 = self._Z2Pt5 sctx.backarc = self._backarc # zoneconfig might have this info if self._vs30measured_grid is None: # If we don't know, then use false sctx.vs30measured = np.zeros_like(sctx.vs30, dtype=bool) else: sctx.vs30measured = self._vs30measured_grid sctx.lons = self._lons sctx.lats = self._lats return sctx
def _get_poes(self, **kwargs): default_kwargs = dict(sctx=SitesContext(), rctx=RuptureContext(), dctx=DistancesContext(), imt=self.DEFAULT_IMT(), imls=[1.0, 2.0, 3.0], truncation_level=1.0) default_kwargs.update(kwargs) kwargs = default_kwargs return self.gsim.get_poes(**kwargs)
def test_get_amplification_factors(self): """ Tests the amplification tables """ rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Takes distances at the values found in the table (not checking # distance interpolation) dctx.rjb = np.copy(self.amp_table.distances[:, 0, 0]) # Test Vs30 is 700.0 m/s midpoint between the 400 m/s and 1000 m/s # specified in the table sctx = SitesContext() sctx.vs30 = 700.0 * np.ones_like(dctx.rjb) stddevs = [const.StdDev.TOTAL] expected_mean = np.ones_like(dctx.rjb) expected_sigma = np.ones_like(dctx.rjb) # Check PGA and PGV mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.PGA(), sctx, rctx, dctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 1.5) * expected_mean) np.testing.assert_array_almost_equal( sigma_amp[0], 0.9 * expected_mean) mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.PGV(), sctx, rctx, dctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 0.5) * expected_mean) np.testing.assert_array_almost_equal( sigma_amp[0], 0.9 * expected_mean) # Sa (0.5) mean_amp, sigma_amp = self.amp_table.get_amplification_factors( imt_module.SA(0.5), sctx, rctx, dctx.rjb, stddevs) np.testing.assert_array_almost_equal( mean_amp, midpoint(1.0, 2.0) * expected_mean) np.testing.assert_array_almost_equal( sigma_amp[0], 0.9 * expected_mean)
def test_get_mean_stddevs_unsupported_stddev(self): """ Tests the execution of the GMPE with an unsupported standard deviation type """ gsim = GMPETable(gmpe_table=self.TABLE_FILE) rctx = RuptureContext() rctx.mag = 6.0 dctx = DistancesContext() # Test values at the given distances and those outside range dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0]) sctx = SitesContext() sctx.vs30 = 1000. * np.ones(5) stddevs = [const.StdDev.TOTAL, const.StdDev.INTER_EVENT] with self.assertRaises(ValueError) as ve: gsim.get_mean_and_stddevs(sctx, rctx, dctx, imt_module.PGA(), stddevs) self.assertEqual(str(ve.exception), "Standard Deviation type Inter event not supported")
def _get_sites_context_event(self, idx): """ Returns the site context for a particular event """ sctx = SitesContext() longs = [] lats = [] depths = [] vs30 = [] vs30_measured = [] z1pt0 = [] z2pt5 = [] backarc = [] azimuth = [] hanging_wall = [] for idx_j in idx: # Site parameters rup = self.records[idx_j] longs.append(rup.site.longitude) lats.append(rup.site.latitude) if rup.site.altitude: depths.append(rup.site.altitude * -1.0E-3) else: depths.append(0.0) vs30.append(rup.site.vs30) if rup.site.vs30_measured is not None: vs30_measured.append(rup.site.vs30_measured) else: vs30_measured.append(0) if rup.site.z1pt0 is not None: z1pt0.append(rup.site.z1pt0) else: z1pt0.append(vs30_to_z1pt0_cy14(rup.site.vs30)) if rup.site.z2pt5 is not None: z2pt5.append(rup.site.z2pt5) else: z2pt5.append(vs30_to_z2pt5_cb14(rup.site.vs30)) if ("backarc" in dir(rup.site)) and rup.site.backarc is not None: backarc.append(rup.site.backarc) setattr(sctx, 'vs30', np.array(vs30)) if len(longs) > 0: setattr(sctx, 'lons', np.array(longs)) if len(lats) > 0: setattr(sctx, 'lats', np.array(lats)) if len(depths) > 0: setattr(sctx, 'depths', np.array(depths)) if len(vs30_measured) > 0: setattr(sctx, 'vs30measured', np.array(vs30_measured)) if len(z1pt0) > 0: setattr(sctx, 'z1pt0', np.array(z1pt0)) if len(z2pt5) > 0: setattr(sctx, 'z2pt5', np.array(z2pt5)) if len(backarc) > 0: setattr(sctx, 'backarc', np.array(backarc)) return sctx
def setUp(self): """ Setup with a set of distances and site paramwters """ self.imts = [PGA(), SA(0.1), SA(0.2), SA(0.5), SA(1.0), SA(2.0)] self.mags = [4.5, 5.5, 6.5, 7.5] self.rakes = [-90., 0., 90.] self.dctx = DistancesContext() self.dctx.rhypo = np.array([5., 10., 20., 50., 100.]) self.sctx = SitesContext() self.sctx.vs30 = 800.0 * np.ones(5)
def test_rhypo_smaller_than_15(self): # test the calculation in case of rhypo distances less than 15 km # (for rhypo=0 the distance term has a singularity). In this case the # method should return values equal to the ones obtained by clipping # distances at 15 km. sctx = SitesContext() sctx.vs30 = numpy.array([800.0, 800.0, 800.0]) rctx = RuptureContext() rctx.mag = 5.0 rctx.rake = 0 dctx = DistancesContext() dctx.rhypo = numpy.array([0.0, 10.0, 16.0]) dctx.rhypo.flags.writeable = False mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) setattr(dctx, 'rhypo', numpy.array([15.0, 15.0, 16.0])) mean_15, stds_15 = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_array_equal(mean_0, mean_15) numpy.testing.assert_array_equal(stds_0, stds_15)
def setUp(self): """ """ self.gsim = TromansEtAl2019SigmaMu self.rctx = RuptureContext() self.rctx.mag = 6.5 self.rctx.rake = 0. self.dctx = DistancesContext() self.dctx.rjb = np.array([5., 10., 20., 50., 100.]) self.sctx = SitesContext() self.sctx.vs30 = 500. * np.ones(5)
def get_response_spectrum(self, magnitude, distance, periods, rake=90, vs30=800, damping=0.05): """ """ responses = np.zeros((len(periods),)) p_damping = damping * 100 rup = RuptureContext() rup.mag = magnitude rup.rake = rake dists = DistancesContext() dists.rjb = np.array([distance]) sites = SitesContext() sites.vs30 = np.array([vs30]) stddev_types = [StdDev.TOTAL] for i, period in enumerate(periods): if period == 0: imt = _PGA() else: imt = _SA(period, p_damping) responses[i] = np.exp(self._gmpe.get_mean_and_stddevs(sites, rup, dists, imt, stddev_types)[0][0]) return ResponseSpectrum(periods, responses, unit='g', damping=damping)
def getSitesContext(self): """ :returns: SitesContext object. """ sctx = SitesContext() sctx.vs30 = self._Vs30.getData().copy() sctx.z1pt0 = self._Z1Pt0 sctx.z2pt5 = self._Z2Pt5 sctx.backarc = self._backarc # zoneconfig might have this info if self._vs30measured_grid is None: # If we don't know, then use false sctx.vs30measured = np.zeros_like( sctx.vs30, dtype=bool) else: sctx.vs30measured = self._vs30measured_grid sctx.lons = self._lons sctx.lats = self._lats return sctx
def sampleFromSites(self, lats, lons, vs30measured_grid=None): """ Create a SitesContext object by sampling the current Sites object. :param lats: Sequence of latitudes. :param lons: Sequence of longitudes. :param vs30measured_grid: Sequence of booleans of the same shape as lats/lons indicating whether the vs30 values are measured or inferred. :returns: SitesContext object where data are sampled from the current Sites object. :raises ShakeMapException: When lat/lon input sequences do not share dimensionality. """ lats = np.array(lats) lons = np.array(lons) latshape = lats.shape lonshape = lons.shape if latshape != lonshape: msg = 'Input lat/lon arrays must have the same dimensions' raise ShakeMapException(msg) site = SitesContext() # use default vs30 if outside grid site.vs30 = self._Vs30.getValue(lats, lons, default=self._defaultVs30) site.lats = lats site.lons = lons site.z1pt0 = _calculate_z1p0(site.vs30) site.z2pt5 = _calculate_z2p5(site.z1pt0) if vs30measured_grid is None: # If we don't know, then use false site.vs30measured = np.zeros_like(lons, dtype=bool) else: site.vs30measured = vs30measured_grid site.backarc = self._backarc return site
def _disaggregate_poe(self, **kwargs): default_kwargs = dict( sctx=SitesContext(), rctx=RuptureContext(), dctx=DistancesContext(), imt=self.DEFAULT_IMT(), iml=2.0, truncation_level=1.0, n_epsilons=3, ) default_kwargs.update(kwargs) kwargs = default_kwargs return self.gsim.disaggregate_poe(**kwargs)
def test_dist_not_in_increasing_order(self): sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() rctx.mag = 5. dctx.rhypo = numpy.array([150, 100]) mean_150_100, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) dctx.rhypo = numpy.array([100, 150]) mean_100_150, _ = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, SA(0.1, 5), [StdDev.TOTAL]) self.assertAlmostEqual(mean_150_100[1], mean_100_150[0]) self.assertAlmostEqual(mean_150_100[0], mean_100_150[1])
def evaluate_model(site_params, rup_params, df, npts, azimuth, moveout, mod, imt): sx = SitesContext() rx = RuptureContext() dx = DistancesContext() # TODO: some site parameters can be pulled from the dataframe so we don't # have to use the defaults (vs30, azimuth, etc.) if not moveout: npts = df.shape[0] for param in site_params.keys(): setattr(sx, param, np.full(npts, site_params[param])) rx.__dict__.update(rup_params) rx.mag = df['EarthquakeMagnitude'].iloc[0] rx.hypo_depth = df['EarthquakeDepth'].iloc[0] if moveout: dx.rjb = np.linspace(0, df['JoynerBooreDistance'].max(), npts) dx.rrup = np.sqrt(dx.rjb**2 + df['EarthquakeDepth'].iloc[0]**2) dx.rhypo = dx.rrup dx.repi = dx.rjb else: dx.rjb = df['JoynerBooreDistance'] dx.rrup = df['RuptureDistance'] dx.rhypo = df['HypocentralDistance'] dx.repi = df['EpicentralDistance'] # TODO: some of these distances can be pulled from the dataframe dx.ry0 = dx.rjb dx.rx = np.full_like(dx.rjb, -1) dx.azimuth = np.full_like(npts, azimuth) dx.rcdpp = dx.rjb dx.rvolc = dx.rjb try: mean, sd = MODELS_DICT[mod]().get_mean_and_stddevs( sx, rx, dx, manage_imts(imt)[0], [StdDev.TOTAL]) mean = convert_units(mean, imt) if moveout: return mean, dx else: return mean, sd[0] except Exception: return
def test_zero_distance(self): # test the calculation in case of zero rrup distance (for rrup=0 # the equations have a singularity). In this case the # method should return values equal to the ones obtained by # replacing 0 values with 1 sctx = SitesContext() rctx = RuptureContext() dctx = DistancesContext() setattr(sctx, 'vs30', numpy.array([500.0, 2500.0])) setattr(rctx, 'mag', 5.0) setattr(dctx, 'rrup', numpy.array([0.0, 0.2])) mean_0, stds_0 = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) setattr(dctx, 'rrup', numpy.array([1.0, 0.2])) mean_01, stds_01 = self.GSIM_CLASS().get_mean_and_stddevs( sctx, rctx, dctx, PGA(), [StdDev.TOTAL]) numpy.testing.assert_array_equal(mean_0, mean_01) numpy.testing.assert_array_equal(stds_0, stds_01)
def __init__(self, vs30grid, vs30measured_grid=None, backarc=False, defaultVs30=686.0): """ Construct a Sites object. :param vs30grid: MapIO Grid2D object containing Vs30 values. :param vs30measured_grid: Boolean grid indicating whether Vs30 values were measured or derived (i.e., from slope) :param backarc: Boolean indicating whether event is on the backarc as defined here: http://earthquake.usgs.gov/learn/glossary/?term=backarc :param defaultVs30: Default Vs30 value to use in locations where Vs30Grid is not specified. """ self.Vs30 = vs30grid self.defaultVs30 = defaultVs30 self.GeoDict = vs30grid.getGeoDict().copy() lons = np.linspace(self.GeoDict.xmin, self.GeoDict.xmax, self.GeoDict.nx) lats = np.linspace(self.GeoDict.ymin, self.GeoDict.ymax, self.GeoDict.ny) self.Z1Pt0 = calculate_z1p0(self.Vs30.getData()) self.Z2Pt5 = calculate_z2p5(self.Z1Pt0) self.SitesContext = SitesContext() self.SitesContext.vs30 = self.Vs30.getData().copy() self.SitesContext.z1pt0 = self.Z1Pt0 self.SitesContext.z2pt5 = self.Z2Pt5 self.SitesContext.backarc = backarc #zoneconfig might have this info if vs30measured_grid is None: # If we don't know, then use false self.SitesContext.vs30measured = np.zeros_like( self.SitesContext.vs30, dtype=bool) else: self.SitesContext.vs30measured = vs30measured_grid self.SitesContext.lons = lons self.SitesContext.lats = lats
def signal_end(st, event_time, event_lon, event_lat, event_mag, method=None, vmin=None, floor=None, model=None, epsilon=2.0): """ Estimate end of signal by using a model of the 5-95% significant duration, and adding this value to the "signal_split" time. This probably only works well when the split is estimated with a p-wave picker since the velocity method often ends up with split times that are well before signal actually starts. Args: st (StationStream): Stream of data. event_time (UTCDateTime): Event origin time. event_mag (float): Event magnitude. event_lon (float): Event longitude. event_lat (float): Event latitude. method (str): Method for estimating signal end time. Either 'velocity' or 'model'. vmin (float): Velocity (km/s) for estimating end of signal. Only used if method="velocity". floor (float): Minimum duration (sec) applied along with vmin. model (str): Short name of duration model to use. Must be defined in the gmprocess/data/modules.yml file. epsilon (float): Number of standard deviations; if epsilon is 1.0, then the signal window duration is the mean Ds + 1 standard deviation. Only used for method="model". Returns: trace with stats dict updated to include a stats['processing_parameters']['signal_end'] dictionary. """ # Load openquake stuff if method="model" if method == "model": dmodel = load_model(model) # Set some "conservative" inputs (in that they will tend to give # larger durations). sctx = SitesContext() sctx.vs30 = np.array([180.0]) sctx.z1pt0 = np.array([0.51]) rctx = RuptureContext() rctx.mag = event_mag rctx.rake = -90.0 dur_imt = imt.from_string('RSD595') stddev_types = [const.StdDev.INTRA_EVENT] for tr in st: if not tr.hasParameter('signal_split'): continue if method == "velocity": if vmin is None: raise ValueError('Must specify vmin if method is "velocity".') if floor is None: raise ValueError('Must specify floor if method is "velocity".') epi_dist = gps2dist_azimuth( lat1=event_lat, lon1=event_lon, lat2=tr.stats['coordinates']['latitude'], lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0 end_time = event_time + max(floor, epi_dist / vmin) elif method == "model": if model is None: raise ValueError('Must specify model if method is "model".') epi_dist = gps2dist_azimuth( lat1=event_lat, lon1=event_lon, lat2=tr.stats['coordinates']['latitude'], lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0 dctx = DistancesContext() # Repi >= Rrup, so substitution here should be conservative # (leading to larger durations). dctx.rrup = np.array([epi_dist]) lnmu, lnstd = dmodel.get_mean_and_stddevs( sctx, rctx, dctx, dur_imt, stddev_types) duration = np.exp(lnmu + epsilon * lnstd[0]) # Get split time split_time = tr.getParameter('signal_split')['split_time'] end_time = split_time + float(duration) else: raise ValueError('method must be either "velocity" or "model".') # Update trace params end_params = { 'end_time': end_time, 'method': method, 'vsplit': vmin, 'floor': floor, 'model': model, 'epsilon': epsilon } tr.setParameter('signal_end', end_params) return st
from openquake.hazardlib.gsim.base import SitesContext import numpy as np import gmpe as gm import matplotlib.pyplot as plt fig_dir = '/Users/vsahakian/anza/models/statistics/misc/oq_vs_matlab/' ## This all works..... ## ASK14 = AbrahamsonEtAl2014() IMT = imt.PGA() rctx = RuptureContext() dctx = DistancesContext() sctx = SitesContext() sctx_rock = SitesContext() rctx.rake = 0.0 rctx.dip = 90.0 rctx.ztor = 7.13 rctx.mag = 3.0 #rctx.mag = np.linspace(0.1,5.) rctx.width = 10.0 rctx.hypo_depth = 8.0 #dctx.rrup = np.logspace(1,np.log10(200),100) dctx.rrup = np.logspace(np.log10(10),np.log10(10.0),1) # Assuming average ztor, get rjb:
def build_gmpe_table(matrixMagsMin, matrixMagsMax, matrixMagsStep, matrixDistsMin, matrixDistsMax, matrixDistsStep, imt_filtering, limitIM, gsim_list, limit_max_mag, limit_min_mag): # Define the magnitude range of interest, 5.0 - 9.0 every 0.1 mags = np.arange(matrixMagsMin, matrixMagsMax, matrixMagsStep) # Define the distance range of interest, 0.0 - 300.0 every 1 km dists = np.arange(matrixDistsMin, matrixDistsMax, matrixDistsStep) # Define the Vs30 range of interest, 180.0 - 1000.0 every 1 m/s vs30s = np.arange(180.0, 181., 1.) gm_table = np.zeros([len(dists), len(mags), len(vs30s)]) stddevs = [const.StdDev.TOTAL] gsim_tables = [] for gsim in gsim_list: for i, mag in enumerate(mags): for j, vs30 in enumerate(vs30s): # The RuptureContext object holds all of the # rupture related attributes (e.g. mag, rake, ztor, hypo_depth) rctx = RuptureContext() rctx.mag = mag rctx.rake = 0.0 rctx.hypo_depth = 10 # The DistancesContext object holds all of the distance # calculations (e.g. rjb, rrup, rx, ry0) # OQ GMPEs are vectorised by distance - so this needs # to be an array dctx = DistancesContext() dctx.rjb = np.copy(dists) dctx.rrup = np.copy(dists) # dctx.rhypo = np.copy(dists) # The SitesContext object holds all of the site # attributes - also an array sctx = SitesContext() # The attributes of the site array must be of the # same size as the distances sctx.vs30 = vs30 * np.ones_like(dists) # GMPE produces 2 outputs, the means (well their # natural logarithm) and standard deviations gm_table[:, i, j], gm_stddevs = gsim.get_mean_and_stddevs( sctx, rctx, dctx, imt_filtering, stddevs) gm_table_exp = np.exp(gm_table) gsim_tables.append(gm_table_exp) if len(gsim_list) == 1: gm_table_final = gsim_tables[0] else: gm_table_final = np.maximum(gsim_tables[0], gsim_tables[1]) # These "if" exclude all ruptures above and below the limit magnitude if limit_max_mag < matrixMagsMax: indexMag = int((limit_max_mag - matrixMagsMin) / matrixMagsStep) list_mag_to_exclude = np.arange(indexMag+1, len(mags)) gm_table_final[:, list_mag_to_exclude, 0] = 0.001 if limit_min_mag > matrixMagsMin: indexMinMag = int((limit_min_mag - matrixMagsMin) / matrixMagsStep) list_min_mag_to_exclude = np.arange(0, indexMinMag) gm_table_final[:, list_min_mag_to_exclude, 0] = 0.001 gm_mask = gm_table_final >= limitIM GMPEmatrix = gm_mask[:, :, 0] return GMPEmatrix
def test_equality(self): sctx1 = SitesContext() sctx1.vs30 = numpy.array([500., 600., 700.]) sctx1.vs30measured = True sctx1.z1pt0 = numpy.array([40., 50., 60.]) sctx1.z2pt5 = numpy.array([1, 2, 3]) sctx2 = SitesContext() sctx2.vs30 = numpy.array([500., 600., 700.]) sctx2.vs30measured = True sctx2.z1pt0 = numpy.array([40., 50., 60.]) sctx2.z2pt5 = numpy.array([1, 2, 3]) self.assertTrue(sctx1 == sctx2) sctx2 = SitesContext() sctx2.vs30 = numpy.array([500., 600.]) sctx2.vs30measured = True sctx2.z1pt0 = numpy.array([40., 50., 60.]) sctx2.z2pt5 = numpy.array([1, 2, 3]) self.assertTrue(sctx1 != sctx2) sctx2 = SitesContext() sctx2.vs30 = numpy.array([500., 600., 700.]) sctx2.vs30measured = False sctx2.z1pt0 = numpy.array([40., 50., 60.]) sctx2.z2pt5 = numpy.array([1, 2, 3]) self.assertTrue(sctx1 != sctx2) sctx2 = SitesContext() sctx2.vs30 = numpy.array([500., 600., 700.]) sctx2.vs30measured = True sctx2.z1pt0 = numpy.array([40., 50., 60.]) self.assertTrue(sctx1 != sctx2) rctx = RuptureContext() rctx.mag = 5. self.assertTrue(sctx1 != rctx)
def _parse_csv_line(headers, values): """ Parse a single line from data file. :param headers: A list of header names, the strings from the first line of csv file. :param values: A list of values of a single row to parse. :returns: A tuple of the following values (in specified order): sctx An instance of :class:`openquake.hazardlib.gsim.base.SitesContext` with attributes populated by the information from in row in a form of single-element numpy arrays. rctx An instance of :class:`openquake.hazardlib.gsim.base.RuptureContext`. dctx An instance of :class:`openquake.hazardlib.gsim.base.DistancesContext`. stddev_types An empty list, if the ``result_type`` column says "MEAN" for that row, otherwise it is a list with one item -- a requested standard deviation type. expected_results A dictionary mapping IMT-objects to one-element arrays of expected result values. Those results represent either standard deviation or mean value of corresponding IMT depending on ``result_type``. result_type A string literal, one of ``'STDDEV'`` or ``'MEAN'``. Value is taken from column ``result_type``. """ rctx = RuptureContext() sctx = SitesContext() dctx = DistancesContext() expected_results = {} stddev_types = result_type = damping = None for param, value in zip(headers, values): if param == 'result_type': value = value.upper() if value.endswith('_STDDEV'): # the row defines expected stddev results result_type = 'STDDEV' stddev_types = [getattr(const.StdDev, value[:-len('_STDDEV')])] else: # the row defines expected exponents of mean values assert value == 'MEAN' stddev_types = [] result_type = 'MEAN' elif param == 'damping': damping = float(value) elif param.startswith('site_'): # value is sites context object attribute if (param == 'site_vs30measured') or (param == 'site_backarc'): value = float(value) != 0 else: value = float(value) setattr(sctx, param[len('site_'):], numpy.array([value])) elif param.startswith('dist_'): # value is a distance measure value = float(value) setattr(dctx, param[len('dist_'):], numpy.array([value])) elif param.startswith('rup_'): # value is a rupture context attribute value = float(value) setattr(rctx, param[len('rup_'):], value) elif param == 'component_type': pass else: # value is the expected result (of result_type type) value = float(value) if param == 'pga': imt = PGA() elif param == 'pgv': imt = PGV() elif param == 'pgd': imt = PGD() elif param == 'cav': imt = CAV() else: period = float(param) assert damping is not None imt = SA(period, damping) expected_results[imt] = numpy.array([value]) assert result_type is not None return sctx, rctx, dctx, stddev_types, expected_results, result_type
partial_df = df.loc[df['USGS_eventID'] == event] # Data directory. data_dir = '/Users/tnye/PROJECTS/Duration/data/' # Event directory. event_dir = os.path.join(data_dir, 'events', event) # Distance context. dx = DistancesContext() dx.rjb = np.array(partial_df['rjb']) dx.rrup = np.array(partial_df['rrup']) dx.rx = np.array(partial_df['rx']) dx.ry0 = np.array(partial_df['ry']) # Sites context. sx = SitesContext() sx.vs30 = np.array(partial_df['Vs30(m/s)']) sx = MultiGMPE.set_sites_depth_parameters(sx, gmpe) sx.vs30measured = np.zeros_like(sx.vs30, dtype=bool) # Rupture context. rx = RuptureContext() rx.mag = np.array(partial_df['magnitude'])[0] rx.rake = np.array(partial_df['rake_angle'])[0] rx.dip = np.array(partial_df['dip'])[0] rx.hypo_depth = np.array(partial_df['depth(km)'])[0] # Determine if there is a fault file. rupt_dir = os.path.join(event_dir, 'rupture_info') os.chdir(rupt_dir) rupture_file = []
def _get_extent_from_multigmpe(rupture, config=None): """ Use MultiGMPE to determine extent """ (clon, clat) = _rupture_center(rupture) origin = rupture.getOrigin() if config is not None: gmpe = MultiGMPE.from_config(config) gmice = get_object_from_config('gmice', 'modeling', config) if imt.SA in gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES: default_imt = imt.SA(1.0) elif imt.PGV in gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES: default_imt = imt.PGV() else: default_imt = imt.PGA() else: # Put in some default values for conf config = { 'extent': { 'mmi': { 'threshold': 4.5, 'mindist': 100, 'maxdist': 1000 } } } # Generic GMPEs choices based only on active vs stable # as defaults... stable = is_stable(origin.lon, origin.lat) if not stable: ASK14 = AbrahamsonEtAl2014() CB14 = CampbellBozorgnia2014() CY14 = ChiouYoungs2014() gmpes = [ASK14, CB14, CY14] site_gmpes = None weights = [1/3.0, 1/3.0, 1/3.0] gmice = WGRW12() else: Fea96 = FrankelEtAl1996MwNSHMP2008() Tea97 = ToroEtAl1997MwNSHMP2008() Sea02 = SilvaEtAl2002MwNSHMP2008() C03 = Campbell2003MwNSHMP2008() TP05 = TavakoliPezeshk2005MwNSHMP2008() AB06p = AtkinsonBoore2006Modified2011() Pea11 = PezeshkEtAl2011() Atk08p = Atkinson2008prime() Sea01 = SomervilleEtAl2001NSHMP2008() gmpes = [Fea96, Tea97, Sea02, C03, TP05, AB06p, Pea11, Atk08p, Sea01] site_gmpes = [AB06p] weights = [0.16, 0.0, 0.0, 0.17, 0.17, 0.3, 0.2, 0.0, 0.0] gmice = AK07() gmpe = MultiGMPE.from_list( gmpes, weights, default_gmpes_for_site=site_gmpes) default_imt = imt.SA(1.0) min_mmi = config['extent']['mmi']['threshold'] sd_types = [const.StdDev.TOTAL] # Distance context dx = DistancesContext() # This imposes minimum/ maximum distances of: # 80 and 800 km; could make this configurable d_min = config['extent']['mmi']['mindist'] d_max = config['extent']['mmi']['maxdist'] dx.rjb = np.logspace(np.log10(d_min), np.log10(d_max), 2000) # Details don't matter for this; assuming vertical surface rupturing fault # with epicenter at the surface. dx.rrup = dx.rjb dx.rhypo = dx.rjb dx.repi = dx.rjb dx.rx = np.zeros_like(dx.rjb) dx.ry0 = np.zeros_like(dx.rjb) dx.rvolc = np.zeros_like(dx.rjb) # Sites context sx = SitesContext() # Set to soft soil conditions sx.vs30 = np.full_like(dx.rjb, 180) sx = MultiGMPE.set_sites_depth_parameters(sx, gmpe) sx.vs30measured = np.full_like(sx.vs30, False, dtype=bool) sx = Sites._addDepthParameters(sx) sx.backarc = np.full_like(sx.vs30, False, dtype=bool) # Rupture context rx = RuptureContext() rx.mag = origin.mag rx.rake = 0.0 # From WC94... rx.width = 10**(-0.76 + 0.27*rx.mag) rx.dip = 90.0 rx.ztor = origin.depth rx.hypo_depth = origin.depth gmpe_imt_mean, _ = gmpe.get_mean_and_stddevs( sx, rx, dx, default_imt, sd_types) # Convert to MMI gmpe_to_mmi, _ = gmice.getMIfromGM(gmpe_imt_mean, default_imt) # Minimum distance that exceeds threshold MMI? dists_exceed_mmi = dx.rjb[gmpe_to_mmi > min_mmi] if len(dists_exceed_mmi): mindist_km = np.max(dists_exceed_mmi) else: mindist_km = d_min # Get a projection proj = OrthographicProjection(clon - 4, clon + 4, clat + 4, clat - 4) if isinstance(rupture, (QuadRupture, EdgeRupture)): ruptx, rupty = proj( rupture.lons[~np.isnan(rupture.lons)], rupture.lats[~np.isnan(rupture.lats)] ) else: ruptx, rupty = proj(clon, clat) xmin = np.nanmin(ruptx) - mindist_km ymin = np.nanmin(rupty) - mindist_km xmax = np.nanmax(ruptx) + mindist_km ymax = np.nanmax(rupty) + mindist_km # Put a limit on range of aspect ratio dx = xmax - xmin dy = ymax - ymin ar = dy / dx if ar > 1.2: # Inflate x dx_target = dy / 1.2 ddx = dx_target - dx xmax = xmax + ddx / 2 xmin = xmin - ddx / 2 if ar < 0.83: # Inflate y dy_target = dx * 0.83 ddy = dy_target - dy ymax = ymax + ddy / 2 ymin = ymin - ddy / 2 lonmin, latmin = proj(np.array([xmin]), np.array([ymin]), reverse=True) lonmax, latmax = proj(np.array([xmax]), np.array([ymax]), reverse=True) # # Round coordinates to the nearest minute -- that should make the # output grid register with common grid resolutions (60c, 30c, # 15c, 7.5c) # logging.debug("Extent: %f, %f, %f, %f" % (lonmin, lonmax, latmin, latmax)) return _round_coord(lonmin[0]), _round_coord(lonmax[0]), \ _round_coord(latmin[0]), _round_coord(latmax[0])
def trim_multiple_events(st, origin, catalog, travel_time_df, pga_factor, pct_window_reject, gmpe, site_parameters, rupture_parameters): """ Uses a catalog (list of ScalarEvents) to handle cases where a trace might contain signals from multiple events. The catalog should contain events down to a low enough magnitude in relation to the events of interest. Overall, the algorithm is as follows: 1) For each earthquake in the catalog, get the P-wave travel time and estimated PGA at this station. 2) Compute the PGA (of the as-recorded horizontal channels). 3) Select the P-wave arrival times across all events for this record that are (a) within the signal window, and (b) the predicted PGA is greater than pga_factor times the PGA from step #1. 4) If any P-wave arrival times match the above criteria, then if any of the arrival times fall within in the first pct_window_reject*100% of the signal window, then reject the record. Otherwise, trim the record such that the end time does not include any of the arrivals selected in step #3. Args: st (StationStream): Stream of data. origin (ScalarEvent): ScalarEvent object associated with the StationStream. catalog (list): List of ScalarEvent objects. travel_time_df (DataFrame): A pandas DataFrame that contains the travel time information (obtained from gmprocess.waveform_processing.phase.create_travel_time_dataframe). The columns in the DataFrame are the station ids and the indices are the earthquake ids. pga_factor (float): A decimal factor used to determine whether the predicted PGA from an event arrival is significant enough that it should be considered for removal. pct_window_reject (float): A decimal from 0.0 to 1.0 used to determine if an arrival should be trimmed from the record, or if the entire record should be rejected. If the arrival falls within the first pct_window_reject * 100% of the signal window, then the entire record will be rejected. Otherwise, the record will be trimmed appropriately. gmpe (str): Short name of the GMPE to use. Must be defined in the modules file. site_parameters (dict): Dictionary of site parameters to input to the GMPE. rupture_parameters: Dictionary of rupture parameters to input to the GMPE. Returns: StationStream: Processed stream. """ if not st.passed: return st # Check that we know the signal split for each trace in the stream for tr in st: if not tr.hasParameter('signal_split'): return st signal_window_starttime = st[0].getParameter('signal_split')['split_time'] arrivals = travel_time_df[st[0].stats.network + '.' + st[0].stats.station] arrivals = arrivals.sort_values() # Filter by any arrival times that appear in the signal window arrivals = arrivals[ (arrivals > signal_window_starttime) & (arrivals < st[0].stats.endtime)] # Make sure we remove the arrival that corresponds to the event of interest if origin.id in arrivals.index: arrivals.drop(index=origin.id, inplace=True) if arrivals.empty: return st # Calculate the recorded PGA for this record stasum = StationSummary.from_stream(st, ['ROTD(50.0)'], ['PGA']) recorded_pga = stasum.get_pgm('PGA', 'ROTD(50.0)') # Load the GMPE model gmpe = load_model(gmpe) # Set site parameters sx = SitesContext() # Make sure that site parameter values are converted to numpy arrays site_parameters_copy = site_parameters.copy() for k, v in site_parameters_copy.items(): site_parameters_copy[k] = np.array([site_parameters_copy[k]]) sx.__dict__.update(site_parameters_copy) # Filter by arrivals that have significant expected PGA using GMPE is_significant = [] for eqid, arrival_time in arrivals.items(): event = next(event for event in catalog if event.id == eqid) # Set rupture parameters rx = RuptureContext() rx.__dict__.update(rupture_parameters) rx.mag = event.magnitude # TODO: distances should be calculated when we refactor to be # able to import distance calculations dx = DistancesContext() dx.repi = np.array([ gps2dist_azimuth( st[0].stats.coordinates.latitude, st[0].stats.coordinates.longitude, event.latitude, event.longitude)[0] / 1000]) dx.rjb = dx.repi dx.rhypo = np.sqrt(dx.repi**2 + event.depth_km**2) dx.rrup = dx.rhypo pga, sd = gmpe.get_mean_and_stddevs(sx, rx, dx, imt.PGA(), []) # Convert from ln(g) to %g predicted_pga = 100 * np.exp(pga[0]) if predicted_pga > (pga_factor * recorded_pga): is_significant.append(True) else: is_significant.append(False) significant_arrivals = arrivals[is_significant] if significant_arrivals.empty: return st # Check if any of the significant arrivals occur within the signal_length = st[0].stats.endtime - signal_window_starttime cutoff_time = signal_window_starttime + pct_window_reject * (signal_length) if (significant_arrivals < cutoff_time).any(): for tr in st: tr.fail('A significant arrival from another event occurs within ' 'the first %s percent of the signal window' % (100 * pct_window_reject)) # Otherwise, trim the stream at the first significant arrival else: for tr in st: signal_end = tr.getParameter('signal_end') signal_end['end_time'] = significant_arrivals[0] signal_end['method'] = ('Trimming before right another event') tr.setParameter('signal_end', signal_end) cut(st) return st
# Third party imports. import numpy as np import pandas as pd # Local imports. from openquake.hazardlib.gsim.travasarou_2003 import TravasarouEtAl2003 from openquake.hazardlib.imt import IA from openquake.hazardlib.gsim.base import SitesContext from openquake.hazardlib.gsim.base import DistancesContext from openquake.hazardlib.gsim.base import RuptureContext from openquake.hazardlib import const # Create an instance of the gmpe and input contexts. trav2003 = TravasarouEtAl2003() sx = SitesContext() rx = RuptureContext() dx = DistancesContext() # Import data frame. df = pd.read_csv( '/Users/tnye/PROJECTS/Duration/data/dataframes/select_data.csv') # Define IMTs and standard deviations. imt = IA() sd_types = [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT] # List of event IDs. eventids = [ 'usp000a1b0', 'usp000d6vk', 'usp000fg9t', 'usp000g9h6', 'us2000gge9', 'us1000etmq', 'us2000dwh6', 'nc30228270', 'nc72282711', 'ci14383980',
def signal_end(st, event_time, event_lon, event_lat, event_mag, method=None, vmin=None, floor=None, model=None, epsilon=2.0): """ Estimate end of signal by using a model of the 5-95% significant duration, and adding this value to the "signal_split" time. This probably only works well when the split is estimated with a p-wave picker since the velocity method often ends up with split times that are well before signal actually starts. Args: st (StationStream): Stream of data. event_time (UTCDateTime): Event origin time. event_mag (float): Event magnitude. event_lon (float): Event longitude. event_lat (float): Event latitude. method (str): Method for estimating signal end time. Either 'velocity' or 'model'. vmin (float): Velocity (km/s) for estimating end of signal. Only used if method="velocity". floor (float): Minimum duration (sec) applied along with vmin. model (str): Short name of duration model to use. Must be defined in the gmprocess/data/modules.yml file. epsilon (float): Number of standard deviations; if epsilon is 1.0, then the signal window duration is the mean Ds + 1 standard deviation. Only used for method="model". Returns: trace with stats dict updated to include a stats['processing_parameters']['signal_end'] dictionary. """ # Load openquake stuff if method="model" if method == "model": mod_file = pkg_resources.resource_filename( 'gmprocess', os.path.join('data', 'modules.yml')) with open(mod_file, 'r') as f: mods = yaml.load(f) # Import module cname, mpath = mods['modules'][model] dmodel = getattr(import_module(mpath), cname)() # Set some "conservative" inputs (in that they will tend to give # larger durations). sctx = SitesContext() sctx.vs30 = np.array([180.0]) sctx.z1pt0 = np.array([0.51]) rctx = RuptureContext() rctx.mag = event_mag rctx.rake = -90.0 dur_imt = imt.from_string('RSD595') stddev_types = [const.StdDev.INTRA_EVENT] for tr in st: if not tr.hasParameter('signal_split'): continue if method == "velocity": if vmin is None: raise ValueError('Must specify vmin if method is "velocity".') if floor is None: raise ValueError('Must specify floor if method is "velocity".') epi_dist = gps2dist_azimuth( lat1=event_lat, lon1=event_lon, lat2=tr.stats['coordinates']['latitude'], lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0 end_time = event_time + max(floor, epi_dist / vmin) elif method == "model": if model is None: raise ValueError('Must specify model if method is "model".') epi_dist = gps2dist_azimuth( lat1=event_lat, lon1=event_lon, lat2=tr.stats['coordinates']['latitude'], lon2=tr.stats['coordinates']['longitude'])[0] / 1000.0 dctx = DistancesContext() # Repi >= Rrup, so substitution here should be conservative # (leading to larger durations). dctx.rrup = np.array([epi_dist]) lnmu, lnstd = dmodel.get_mean_and_stddevs( sctx, rctx, dctx, dur_imt, stddev_types) duration = np.exp(lnmu + epsilon * lnstd[0]) # Get split time split_time = tr.getParameter('signal_split')['split_time'] end_time = split_time + float(duration) else: raise ValueError('method must be either "velocity" or "model".') # Update trace params end_params = { 'end_time': end_time, 'method': method, 'vsplit': vmin, 'floor': floor, 'model': model, 'epsilon': epsilon } tr.setParameter('signal_end', end_params) return st
def get_extent(rupture=None, config=None): """ Method to compute map extent from rupture. There are numerous methods for getting the extent: - It can be specified directly in the config file, - it can be hard coded for specific magnitude ranges in the config file, or - it can be based on the MultiGMPE for the event. All methods except for the first requires a rupture object. If no config is provided then a rupture is required and the extent is based on a generic set of active/stable. Args: rupture (Rupture): A ShakeMap Rupture instance. config (ConfigObj): ShakeMap config object. Returns: tuple: lonmin, lonmax, latmin, latmax rounded to the nearest arc-minute.. """ # ------------------------------------------------------------------------- # Check to see what parameters are specified in the extent config # ------------------------------------------------------------------------- spans = {} bounds = [] if config is not None: if 'extent' in config: if 'magnitude_spans' in config['extent']: if len(config['extent']['magnitude_spans']): if isinstance(config['extent']['magnitude_spans'], dict): spans = config['extent']['magnitude_spans'] if 'bounds' in config['extent']: if 'extent' in config['extent']['bounds']: if config['extent']['bounds']['extent'][0] != -999.0: bounds = config['extent']['bounds']['extent'] # ------------------------------------------------------------------------- # Simplest option: extent was specified in the config, use that and exit. # ------------------------------------------------------------------------- if len(bounds): xmin, ymin, xmax, ymax = bounds return (xmin, xmax, ymin, ymax) if not rupture or not isinstance(rupture, Rupture): raise TypeError('get_extent() requires a rupture object if the extent ' 'is not specified in the config object.') # Find the central point origin = rupture.getOrigin() if isinstance(rupture, (QuadRupture, EdgeRupture)): # For an extended rupture, it is the midpoint between the extent of the # verticies lats = rupture.lats lons = rupture.lons # Remove nans lons = lons[~np.isnan(lons)] lats = lats[~np.isnan(lats)] clat = 0.5 * (np.nanmax(lats) + np.nanmin(lats)) clon = 0.5 * (np.nanmax(lons) + np.nanmin(lons)) else: # For a point source, it is just the epicenter clat = origin.lat clon = origin.lon mag = origin.mag # ------------------------------------------------------------------------- # Second simplest option: spans are hardcoded based on magnitude # ------------------------------------------------------------------------- if len(spans): xmin = None xmax = None ymin = None ymax = None for spankey, span in spans.items(): if mag > span[0] and mag <= span[1]: ymin = clat - span[2] / 2 ymax = clat + span[2] / 2 xmin = clon - span[3] / 2 xmax = clon + span[3] / 2 break if xmin is not None: return (xmin, xmax, ymin, ymax) # ------------------------------------------------------------------------- # Use MultiGMPE to get spans # ------------------------------------------------------------------------- if config is not None: gmpe = MultiGMPE.from_config(config) gmice = get_object_from_config('gmice', 'modeling', config) else: # Put in some default values for conf config = { 'extent': { 'mmi': { 'threshold': 4.5, 'mindist': 100, 'maxdist': 1000 } } } # Generic GMPEs choices based only on active vs stable # as defaults... stable = is_stable(origin.lon, origin.lat) if not stable: ASK14 = AbrahamsonEtAl2014() CB14 = CampbellBozorgnia2014() CY14 = ChiouYoungs2014() gmpes = [ASK14, CB14, CY14] site_gmpes = None weights = [1 / 3.0, 1 / 3.0, 1 / 3.0] gmice = WGRW12() else: Fea96 = FrankelEtAl1996MwNSHMP2008() Tea97 = ToroEtAl1997MwNSHMP2008() Sea02 = SilvaEtAl2002MwNSHMP2008() C03 = Campbell2003MwNSHMP2008() TP05 = TavakoliPezeshk2005MwNSHMP2008() AB06p = AtkinsonBoore2006Modified2011() Pea11 = PezeshkEtAl2011() Atk08p = Atkinson2008prime() Sea01 = SomervilleEtAl2001NSHMP2008() gmpes = [ Fea96, Tea97, Sea02, C03, TP05, AB06p, Pea11, Atk08p, Sea01 ] site_gmpes = [AB06p] weights = [0.16, 0.0, 0.0, 0.17, 0.17, 0.3, 0.2, 0.0, 0.0] gmice = AK07() gmpe = MultiGMPE.from_list(gmpes, weights, default_gmpes_for_site=site_gmpes) min_mmi = config['extent']['mmi']['threshold'] default_imt = imt.SA(1.0) sd_types = [const.StdDev.TOTAL] # Distance context dx = DistancesContext() # This imposes minimum/ maximum distances of: # 80 and 800 km; could make this configurable d_min = config['extent']['mmi']['mindist'] d_max = config['extent']['mmi']['maxdist'] dx.rjb = np.logspace(np.log10(d_min), np.log10(d_max), 2000) # Details don't matter for this; assuming vertical surface rupturing fault # with epicenter at the surface. dx.rrup = dx.rjb dx.rhypo = dx.rjb dx.repi = dx.rjb dx.rx = np.zeros_like(dx.rjb) dx.ry0 = np.zeros_like(dx.rjb) dx.rvolc = np.zeros_like(dx.rjb) # Sites context sx = SitesContext() # Set to soft soil conditions sx.vs30 = np.full_like(dx.rjb, 180) sx = MultiGMPE.set_sites_depth_parameters(sx, gmpe) sx.vs30measured = np.full_like(sx.vs30, False, dtype=bool) sx = Sites._addDepthParameters(sx) sx.backarc = np.full_like(sx.vs30, False, dtype=bool) # Rupture context rx = RuptureContext() rx.mag = origin.mag rx.rake = 0.0 # From WC94... rx.width = 10**(-0.76 + 0.27 * rx.mag) rx.dip = 90.0 rx.ztor = origin.depth rx.hypo_depth = origin.depth gmpe_imt_mean, _ = gmpe.get_mean_and_stddevs(sx, rx, dx, default_imt, sd_types) # Convert to MMI gmpe_to_mmi, _ = gmice.getMIfromGM(gmpe_imt_mean, default_imt) # Minimum distance that exceeds threshold MMI? dists_exceed_mmi = dx.rjb[gmpe_to_mmi > min_mmi] if len(dists_exceed_mmi): mindist_km = np.max(dists_exceed_mmi) else: mindist_km = d_min # Get a projection proj = OrthographicProjection(clon - 4, clon + 4, clat + 4, clat - 4) if isinstance(rupture, (QuadRupture, EdgeRupture)): ruptx, rupty = proj(lons, lats) else: ruptx, rupty = proj(clon, clat) xmin = np.nanmin(ruptx) - mindist_km ymin = np.nanmin(rupty) - mindist_km xmax = np.nanmax(ruptx) + mindist_km ymax = np.nanmax(rupty) + mindist_km # Put a limit on range of aspect ratio dx = xmax - xmin dy = ymax - ymin ar = dy / dx if ar > 1.2: # Inflate x dx_target = dy / 1.2 ddx = dx_target - dx xmax = xmax + ddx / 2 xmin = xmin - ddx / 2 if ar < 0.83: # Inflate y dy_target = dx * 0.83 ddy = dy_target - dy ymax = ymax + ddy / 2 ymin = ymin - ddy / 2 lonmin, latmin = proj(np.array([xmin]), np.array([ymin]), reverse=True) lonmax, latmax = proj(np.array([xmax]), np.array([ymax]), reverse=True) # # Round coordinates to the nearest minute -- that should make the # output grid register with common grid resolutions (60c, 30c, # 15c, 7.5c) # logging.debug("Extent: %f, %f, %f, %f" % (lonmin, lonmax, latmin, latmax)) return _round_coord(lonmin[0]), _round_coord(lonmax[0]), \ _round_coord(latmin[0]), _round_coord(latmax[0])
def getSitesContext(self, lldict=None, rock_vs30=None): """ Create a SitesContext object by sampling the current Sites object. Args: lldict: Either - None, in which case the SitesContext for the complete Sites grid is returned, or - A location dictionary (elements are 'lats' and 'lons' and each is a numpy array). Each element must have the same shape. In this case the SitesContext for these locaitons is returned. rock_vs30: Either - None, in which case the SitesContext will reflect the Vs30 grid in the Sites instance, or - A float for the rock Vs30 value, in which case the SitesContext will be constructed for this constant Vs30 value. Returns: SitesContext object. Raises: ShakeLibException: When lat/lon input sequences do not share dimensionality. """ # noqa sctx = SitesContext() if lldict is not None: lats = lldict['lats'] lons = lldict['lons'] latshape = lats.shape lonshape = lons.shape if latshape != lonshape: msg = 'Input lat/lon arrays must have the same dimensions' raise ShakeLibException(msg) if rock_vs30 is not None: tmp = self._Vs30.getValue( lats, lons, default=self._defaultVs30) sctx.vs30 = np.ones_like(tmp) * rock_vs30 else: sctx.vs30 = self._Vs30.getValue( lats, lons, default=self._defaultVs30) sctx.lats = lats sctx.lons = lons else: sctx.lats = self._lats.copy() sctx.lons = self._lons.copy() if rock_vs30 is not None: sctx.vs30 = np.full_like(self._Vs30.getData(), rock_vs30) else: sctx.vs30 = self._Vs30.getData().copy() sctx = Sites._addDepthParameters(sctx) # For ShakeMap purposes, vs30 measured is always Fales sctx.vs30measured = np.zeros_like(sctx.vs30, dtype=bool) # Backarc should be a numpy array if lldict is not None: backarcgrid = Grid2D(self._backarc, self._Vs30.getGeoDict()) sctx.backarc = backarcgrid.getValue(lats, lons, default=False) else: sctx.backarc = self._backarc.copy() return sctx
def getSitesContext(self, lldict=None, rock_vs30=None): """ Create a SitesContext object by sampling the current Sites object. Args: lldict: Either - None, in which case the SitesContext for the complete Sites grid is returned, or - A location dictionary (elements are 'lats' and 'lons' and each is a numpy array). Each element must have the same shape. In this case the SitesContext for these locaitons is returned. rock_vs30: Either - None, in which case the SitesContext will reflect the Vs30 grid in the Sites instance, or - A float for the rock Vs30 value, in which case the SitesContext will be constructed for this constant Vs30 value. Returns: SitesContext object. Raises: ShakeLibException: When lat/lon input sequences do not share dimensionality. """ # noqa sctx = SitesContext() if lldict is not None: lats = lldict['lats'] lons = lldict['lons'] latshape = lats.shape lonshape = lons.shape if latshape != lonshape: msg = 'Input lat/lon arrays must have the same dimensions' raise ShakeLibException(msg) if rock_vs30 is not None: tmp = self._Vs30.getValue( lats, lons, default=self._defaultVs30) sctx.vs30 = np.ones_like(tmp) * rock_vs30 else: sctx.vs30 = self._Vs30.getValue( lats, lons, default=self._defaultVs30) sctx.lats = lats sctx.lons = lons else: sctx.lats = self._lats.copy() sctx.lons = self._lons.copy() if rock_vs30 is not None: sctx.vs30 = np.ones_like(self._Vs30.getData()) * rock_vs30 else: sctx.vs30 = self._Vs30.getData().copy() sctx = Sites._addDepthParameters(sctx) # For ShakeMap purposes, vs30 measured is always Fales sctx.vs30measured = np.zeros_like(sctx.vs30, dtype=bool) # Backarc should be a numpy array if lldict is not None: backarcgrid = Grid2D(self._backarc, self._Vs30.getGeoDict()) sctx.backarc = backarcgrid.getValue(lats, lons, default=False) else: sctx.backarc = self._backarc.copy() return sctx