def test_expand_1d(self): col = SiteCollection(self.SITES) col = col.filter(numpy.array([1, 0, 1, 1])) data_condensed = numpy.array([5, 6, 7]) data_expanded = col.expand(data_condensed, total_sites=4, placeholder=100) data_expanded_expected = numpy.array([5, 100, 6, 7]) numpy.testing.assert_array_equal(data_expanded, data_expanded_expected)
def test_expand_no_filtering(self): col = SiteCollection(self.SITES) data_condensed = numpy.array([3, 2, 1, 0]) data_expanded = col.expand(data_condensed, total_sites=4, placeholder=100) data_expanded_expected = data_condensed numpy.testing.assert_array_equal(data_expanded, data_expanded_expected)
def test_expand_2d(self): col = SiteCollection(self.SITES) col.indices = numpy.array([1, 3, 5, 6]) data_condensed = numpy.array([[1, 2, 3], [5, 6, 7], [10, 11, 12], [15, 16, 17]]) data_expanded = col.expand(data_condensed, total_sites=8, placeholder=-1) data_expanded_expected = numpy.array( [[-1, -1, -1], [1, 2, 3], [-1, -1, -1], [5, 6, 7], [-1, -1, -1], [10, 11, 12], [15, 16, 17], [-1, -1, -1]] ) numpy.testing.assert_array_equal(data_expanded, data_expanded_expected)
def test_expand_2d(self): col = SiteCollection(self.SITES) col.indices = numpy.array([1, 3, 5, 6]) data_condensed = numpy.array([[1, 2, 3], [5, 6, 7], [10, 11, 12], [15, 16, 17]]) data_expanded = col.expand(data_condensed, total_sites=8, placeholder=-1) data_expanded_expected = numpy.array([[-1, -1, -1], [1, 2, 3], [-1, -1, -1], [5, 6, 7], [-1, -1, -1], [10, 11, 12], [15, 16, 17], [-1, -1, -1]]) numpy.testing.assert_array_equal(data_expanded, data_expanded_expected)
def test_double_filter(self): col = SiteCollection(self.SITES) filtered = col.filter(numpy.array([True, False, True, True])) filtered2 = filtered.filter(numpy.array([False, True, False])) arreq = numpy.testing.assert_array_equal arreq(filtered2.vs30, [2]) arreq(filtered2.vs30measured, [True]) arreq(filtered2.z1pt0, [9]) arreq(filtered2.z2pt5, [17]) arreq(filtered2.mesh.lons, [0]) arreq(filtered2.mesh.lats, [2]) self.assertIs(filtered2.mesh.depths, None) arreq(filtered.indices, [0, 2, 3]) arreq(filtered2.indices, [2]) filtered2 = filtered.filter(numpy.array([True, False, True])) arreq(filtered2.indices, [0, 3])
def test_no_correlation_mean_and_intra_respected(self): mean1 = 10 mean2 = 14 inter = 1e-300 intra1 = 0.2 intra2 = 1.6 p1 = Point(0, 0) p2 = Point(0, 0.3) sites = [Site(p1, mean1, False, inter, intra1), Site(p2, mean2, False, inter, intra2)] self.sites = SiteCollection(sites) numpy.random.seed(41) cormo = JB2009CorrelationModel(vs30_clustering=False) lt_corma = cormo.get_lower_triangle_correlation_matrix(self.sites, self.imt1) s1_intensity, s2_intensity = ground_motion_fields( self.rupture, self.sites, [self.imt1], self.gsim, truncation_level=None, realizations=6000, lt_correlation_matrices={self.imt1: lt_corma} )[self.imt1] self.assertAlmostEqual(s1_intensity.mean(), mean1, delta=1e-3) self.assertAlmostEqual(s2_intensity.mean(), mean2, delta=1e-3) self.assertAlmostEqual(s1_intensity.std(), intra1, delta=2e-3) self.assertAlmostEqual(s2_intensity.std(), intra2, delta=1e-2)
def setUp(self): super(PointSourceSourceFilterTestCase, self).setUp() self.sitecol = SiteCollection(self.SITES) self.source1 = make_point_source( mfd=EvenlyDiscretizedMFD(min_mag=5, bin_width=1, occurrence_rates=[1]), rupture_aspect_ratio=1.9, upper_seismogenic_depth=0, lower_seismogenic_depth=18.5, magnitude_scaling_relationship=PeerMSR(), nodal_plane_distribution=PMF([ (0.5, NodalPlane(strike=1, dip=2, rake=3)), (0.5, NodalPlane(strike=1, dip=20, rake=3)), ]), location=Point(2.0, 0.0), ) self.source2 = make_point_source( mfd=EvenlyDiscretizedMFD(min_mag=6.5, bin_width=1, occurrence_rates=[1]), rupture_aspect_ratio=0.5, upper_seismogenic_depth=0, lower_seismogenic_depth=18.5, magnitude_scaling_relationship=PeerMSR(), nodal_plane_distribution=PMF([ (0.5, NodalPlane(strike=1, dip=10, rake=3)), (0.5, NodalPlane(strike=1, dip=20, rake=3)), ]), location=Point(2.0, 0.0), )
def test(self): s1 = Site(location=Point(10, 20, 30), vs30=1.2, vs30measured=True, z1pt0=3.4, z2pt5=5.6) s2 = Site(location=Point(-1.2, -3.4, -5.6), vs30=55.4, vs30measured=False, z1pt0=66.7, z2pt5=88.9) cll = SiteCollection([s1, s2]) self.assertTrue((cll.vs30 == [1.2, 55.4]).all()) self.assertTrue((cll.vs30measured == [True, False]).all()) self.assertTrue((cll.z1pt0 == [3.4, 66.7]).all()) self.assertTrue((cll.z2pt5 == [5.6, 88.9]).all()) self.assertTrue((cll.mesh.lons == [10, -1.2]).all()) self.assertTrue((cll.mesh.lats == [20, -3.4]).all()) self.assertIs(cll.mesh.depths, None) for arr in (cll.vs30, cll.z1pt0, cll.z2pt5): self.assertIsInstance(arr, numpy.ndarray) self.assertEqual(arr.flags.writeable, False) self.assertEqual(arr.dtype, float) self.assertIsInstance(cll.vs30measured, numpy.ndarray) self.assertEqual(cll.vs30measured.flags.writeable, False) self.assertEqual(cll.vs30measured.dtype, bool) self.assertEqual(len(cll), 2)
def test_unknown_distance_error(self): self.gsim_class.REQUIRES_DISTANCES.add('jump height') err = "FakeGSIM requires unknown distance measure 'jump height'" sites = SiteCollection([self.site1]) self._assert_value_error(self.gsim.make_contexts, err, site_collection=sites, rupture=self.rupture)
def test_unknown_rupture_param_error(self): self.gsim_class.REQUIRES_RUPTURE_PARAMETERS.add('stuff') err = "FakeGSIM requires unknown rupture parameter 'stuff'" sites = SiteCollection([self.site1]) self._assert_value_error(self.gsim.make_contexts, err, site_collection=sites, rupture=self.rupture)
def test_source_filter_filter_all_out(self): col = SiteCollection([ Site(Point(10, 10), 1, True, 2, 3), Site(Point(11, 12), 2, True, 2, 3), Site(Point(13, 14), 1, True, 2, 3) ]) for int_dist in (0, 1, 10, 100, 1000): filtered = self.source.filter_sites_by_distance_to_source( integration_distance=int_dist, sites=col) self.assertIs(filtered, None)
def test_case_11(self): hypocenter_probability = (Decimal(1) / len(test_data.SET1_CASE11_HYPOCENTERS)) hypocenter_pmf = PMF([ (hypocenter_probability, hypocenter) for hypocenter in test_data.SET1_CASE11_HYPOCENTERS ]) # apart from hypocenter pmf repeats case 10 sources = [ AreaSource( source_id='area', name='area', tectonic_region_type=const.TRT.ACTIVE_SHALLOW_CRUST, mfd=test_data.SET1_CASE11_MFD, nodal_plane_distribution=PMF([(1, NodalPlane(0.0, 90.0, 0.0))]), hypocenter_distribution=hypocenter_pmf, upper_seismogenic_depth=0.0, lower_seismogenic_depth=10.0, magnitude_scaling_relationship=PeerMSR(), rupture_aspect_ratio=test_data.SET1_RUPTURE_ASPECT_RATIO, polygon=test_data.SET1_CASE11_SOURCE_POLYGON, area_discretization=30.0, rupture_mesh_spacing=10.0) ] sites = SiteCollection([ test_data.SET1_CASE11_SITE1, test_data.SET1_CASE11_SITE2, test_data.SET1_CASE11_SITE3, test_data.SET1_CASE11_SITE4 ]) gsims = {const.TRT.ACTIVE_SHALLOW_CRUST: SadighEtAl1997()} truncation_level = 0 time_span = 1.0 imts = {test_data.IMT: test_data.SET1_CASE11_IMLS} curves = hazard_curves(sources, sites, imts, time_span, gsims, truncation_level) s1hc, s2hc, s3hc, s4hc = curves[test_data.IMT] assert_hazard_curve_is(self, s1hc, test_data.SET1_CASE11_SITE1_POES, tolerance=2e-3) assert_hazard_curve_is(self, s2hc, test_data.SET1_CASE11_SITE2_POES, tolerance=2e-3) assert_hazard_curve_is(self, s3hc, test_data.SET1_CASE11_SITE3_POES, tolerance=2e-3) assert_hazard_curve_is(self, s4hc, test_data.SET1_CASE11_SITE4_POES, tolerance=2e-3)
class JB2009LowerTriangleCorrelationMatrixTestCase(unittest.TestCase): SITECOL = SiteCollection([ Site(Point(2, -40), 1, True, 1, 1), Site(Point(2, -40.1), 1, True, 1, 1), Site(Point(2, -39.9), 1, True, 1, 1) ]) def test(self): cormo = JB2009CorrelationModel(vs30_clustering=False) lt = cormo.get_lower_triangle_correlation_matrix(self.SITECOL, PGA()) aaae(lt, [[1.0, 0.0, 0.0], [1.97514806e-02, 9.99804920e-01, 0.0], [1.97514806e-02, 5.42206860e-20, 9.99804920e-01]])
def test_filter(self): col = SiteCollection(self.SITES) filtered = col.filter(numpy.array([True, False, True, False])) self.assertIsInstance(filtered, SiteCollection) arreq = numpy.testing.assert_array_equal arreq(filtered.vs30, [1.2, 2]) arreq(filtered.vs30measured, [True, True]) arreq(filtered.z1pt0, [3, 9]) arreq(filtered.z2pt5, [5, 17]) arreq(filtered.mesh.lons, [10, 0]) arreq(filtered.mesh.lats, [20, 2]) self.assertIs(filtered.mesh.depths, None) filtered = col.filter(numpy.array([False, True, True, True])) self.assertIsInstance(filtered, SiteCollection) arreq(filtered.vs30, [55.4, 2, 4]) arreq(filtered.vs30measured, [False, True, False]) arreq(filtered.z1pt0, [6, 9, 22]) arreq(filtered.z2pt5, [8, 17, 11]) arreq(filtered.mesh.lons, [11, 0, 1]) arreq(filtered.mesh.lats, [12, 2, 1]) self.assertIs(filtered.mesh.depths, None)
def setUp(self): class FakeSource(SeismicSource): iter_ruptures = None get_rupture_enclosing_polygon = None self.source_class = FakeSource mfd = EvenlyDiscretizedMFD(min_mag=3, bin_width=1, occurrence_rates=[5, 6, 7]) self.source = FakeSource('source_id', 'name', const.TRT.VOLCANIC, mfd=mfd, rupture_mesh_spacing=2, magnitude_scaling_relationship=PeerMSR(), rupture_aspect_ratio=1) self.sitecol = SiteCollection(self.SITES)
def test_some_values(self): self.gsim_class.REQUIRES_DISTANCES = set('rjb rx'.split()) self.gsim_class.REQUIRES_RUPTURE_PARAMETERS = set('mag rake'.split()) self.gsim_class.REQUIRES_SITES_PARAMETERS = set('vs30 z1pt0'.split()) sites = SiteCollection([self.site1, self.site2]) sctx, rctx, dctx = self.gsim.make_contexts(sites, self.rupture) self.assertEqual((rctx.mag, rctx.rake), (123.45, 123.56)) self.assertTrue((sctx.vs30 == (456, 1456)).all()) self.assertTrue((sctx.z1pt0 == (12.1, 112.1)).all()) self.assertTrue((dctx.rx == (4, 5)).all()) self.assertFalse(hasattr(rctx, 'dip')) self.assertFalse(hasattr(sctx, 'vs30measured')) self.assertFalse(hasattr(sctx, 'z2pt0')) self.assertFalse(hasattr(dctx, 'rrup')) self.assertFalse(hasattr(dctx, 'ztor')) self.assertEqual(self.fake_surface.call_counts, { 'get_rx_distance': 1, 'get_joyner_boore_distance': 1 })
def test_array_instead_of_matrix(self): mean = 10 inter = 1e-300 intra = 1 points = [Point(0, 0), Point(0, 0.23)] sites = [Site(point, mean, False, inter, intra) for point in points] self.sites = SiteCollection(sites) numpy.random.seed(43) cormo = JB2009CorrelationModel(vs30_clustering=False) corma = cormo.get_correlation_matrix(self.sites, self.imt1) lt_corma = cormo.get_lower_triangle_correlation_matrix(self.sites, self.imt1) gmfs = ground_motion_fields( self.rupture, self.sites, [self.imt1], self.gsim, truncation_level=None, realizations=6000, lt_correlation_matrices={self.imt1: lt_corma.A} ) sampled_corma = numpy.corrcoef(gmfs[self.imt1]) assert_allclose(corma, sampled_corma, rtol=0, atol=0.02)
def test_all_values(self): self.gsim_class.REQUIRES_DISTANCES = set( 'rjb rx rrup repi rhypo'.split()) self.gsim_class.REQUIRES_RUPTURE_PARAMETERS = set( 'mag rake dip ztor hypo_depth'.split()) self.gsim_class.REQUIRES_SITES_PARAMETERS = set( 'vs30 vs30measured z1pt0 z2pt5'.split()) sites = SiteCollection([self.site1, self.site2]) sctx, rctx, dctx = self.gsim.make_contexts(sites, self.rupture) self.assertIsInstance(sctx, SitesContext) self.assertIsInstance(rctx, RuptureContext) self.assertIsInstance(dctx, DistancesContext) self.assertEqual(rctx.mag, 123.45) self.assertEqual(rctx.rake, 123.56) self.assertEqual(rctx.dip, 45.4545) self.assertEqual(rctx.ztor, 30) self.assertEqual(rctx.hypo_depth, 40) self.assertTrue((sctx.vs30 == [456, 1456]).all()) self.assertTrue((sctx.vs30measured == [False, True]).all()) self.assertTrue((sctx.z1pt0 == [12.1, 112.1]).all()) self.assertTrue((sctx.z2pt5 == [15.1, 115.1]).all()) self.assertTrue((dctx.rjb == [6, 7]).all()) self.assertTrue((dctx.rx == [4, 5]).all()) self.assertTrue((dctx.rrup == [10, 11]).all()) numpy.testing.assert_almost_equal(dctx.rhypo, [162.18749272, 802.72247682]) numpy.testing.assert_almost_equal(dctx.repi, [157.17755181, 801.72524895]) self.assertEqual( self.fake_surface.call_counts, { 'get_top_edge_depth': 1, 'get_rx_distance': 1, 'get_joyner_boore_distance': 1, 'get_dip': 1, 'get_min_distance': 1 })
class JB2009CorrelationMatrixTestCase(unittest.TestCase): SITECOL = SiteCollection([ Site(Point(2, -40), 1, True, 1, 1), Site(Point(2, -40.1), 1, True, 1, 1), Site(Point(2, -40), 1, True, 1, 1), Site(Point(2, -39.9), 1, True, 1, 1) ]) def test_no_clustering(self): cormo = JB2009CorrelationModel(vs30_clustering=False) imt = SA(period=0.1, damping=5) corma = cormo.get_correlation_matrix(self.SITECOL, imt) aaae(corma, [[1, 0.03823366, 1, 0.03823366], [0.03823366, 1, 0.03823366, 0.00146181], [1, 0.03823366, 1, 0.03823366], [0.03823366, 0.00146181, 0.03823366, 1]]) imt = SA(period=0.95, damping=5) corma = cormo.get_correlation_matrix(self.SITECOL, imt) aaae(corma, [[1, 0.26107857, 1, 0.26107857], [0.26107857, 1, 0.26107857, 0.06816202], [1, 0.26107857, 1, 0.26107857], [0.26107857, 0.06816202, 0.26107857, 1]]) def test_clustered(self): cormo = JB2009CorrelationModel(vs30_clustering=True) imt = SA(period=0.001, damping=5) corma = cormo.get_correlation_matrix(self.SITECOL, imt) aaae(corma, [[1, 0.44046654, 1, 0.44046654], [0.44046654, 1, 0.44046654, 0.19401077], [1, 0.44046654, 1, 0.44046654], [0.44046654, 0.19401077, 0.44046654, 1]]) imt = SA(period=0.5, damping=5) corma = cormo.get_correlation_matrix(self.SITECOL, imt) aaae(corma, [[1, 0.36612758, 1, 0.36612758], [0.36612758, 1, 0.36612758, 0.1340494], [1, 0.36612758, 1, 0.36612758], [0.36612758, 0.1340494, 0.36612758, 1]]) def test_period_one_and_above(self): cormo = JB2009CorrelationModel(vs30_clustering=False) cormo2 = JB2009CorrelationModel(vs30_clustering=True) imt = SA(period=1.0, damping=5) corma = cormo.get_correlation_matrix(self.SITECOL, imt) aaae(corma, [[1, 0.2730787, 1, 0.2730787], [0.2730787, 1, 0.2730787, 0.07457198], [1, 0.2730787, 1, 0.2730787], [0.2730787, 0.07457198, 0.2730787, 1]]) corma2 = cormo2.get_correlation_matrix(self.SITECOL, imt) self.assertTrue((corma == corma2).all()) imt = SA(period=10.0, damping=5) corma = cormo.get_correlation_matrix(self.SITECOL, imt) aaae(corma, [[1, 0.56813402, 1, 0.56813402], [0.56813402, 1, 0.56813402, 0.32277627], [1, 0.56813402, 1, 0.56813402], [0.56813402, 0.32277627, 0.56813402, 1]]) corma2 = cormo2.get_correlation_matrix(self.SITECOL, imt) self.assertTrue((corma == corma2).all()) def test_pga(self): sa = SA(period=1e-50, damping=5) pga = PGA() cormo = JB2009CorrelationModel(vs30_clustering=False) corma = cormo.get_correlation_matrix(self.SITECOL, sa) corma2 = cormo.get_correlation_matrix(self.SITECOL, pga) self.assertTrue((corma == corma2).all()) cormo = JB2009CorrelationModel(vs30_clustering=True) corma = cormo.get_correlation_matrix(self.SITECOL, sa) corma2 = cormo.get_correlation_matrix(self.SITECOL, pga) self.assertTrue((corma == corma2).all())
def test_filter_all_in(self): col = SiteCollection(self.SITES) filtered = col.filter(numpy.ones(len(self.SITES), bool)) self.assertIs(filtered, col)
def test_filter_all_out(self): col = SiteCollection(self.SITES) filtered = col.filter(numpy.zeros(len(self.SITES), bool)) self.assertIs(filtered, None)
def test_case_5(self): # only mfd differs from case 2 sources = [ SimpleFaultSource( source_id='fault1', name='fault1', tectonic_region_type=const.TRT.ACTIVE_SHALLOW_CRUST, mfd=test_data.SET1_CASE5_MFD, rupture_mesh_spacing=1.0, magnitude_scaling_relationship=PeerMSR(), rupture_aspect_ratio=test_data.SET1_RUPTURE_ASPECT_RATIO, upper_seismogenic_depth=test_data. SET1_CASE1TO9_UPPER_SEISMOGENIC_DEPTH, lower_seismogenic_depth=test_data. SET1_CASE1TO9_LOWER_SEISMOGENIC_DEPTH, fault_trace=test_data.SET1_CASE1TO9_FAULT_TRACE, dip=test_data.SET1_CASE1TO9_DIP, rake=test_data.SET1_CASE1TO9_RAKE) ] sites = SiteCollection([ test_data.SET1_CASE1TO9_SITE1, test_data.SET1_CASE1TO9_SITE2, test_data.SET1_CASE1TO9_SITE3, test_data.SET1_CASE1TO9_SITE4, test_data.SET1_CASE1TO9_SITE5, test_data.SET1_CASE1TO9_SITE6, test_data.SET1_CASE1TO9_SITE7 ]) gsims = {const.TRT.ACTIVE_SHALLOW_CRUST: SadighEtAl1997()} truncation_level = 0 time_span = 1.0 imts = {test_data.IMT: test_data.SET1_CASE5_IMLS} curves = hazard_curves(sources, sites, imts, time_span, gsims, truncation_level) s1hc, s2hc, s3hc, s4hc, s5hc, s6hc, s7hc = curves[test_data.IMT] assert_hazard_curve_is(self, s1hc, test_data.SET1_CASE5_SITE1_POES, tolerance=1e-3) assert_hazard_curve_is(self, s2hc, test_data.SET1_CASE5_SITE2_POES, tolerance=1e-3) assert_hazard_curve_is(self, s3hc, test_data.SET1_CASE5_SITE3_POES, tolerance=1e-3) assert_hazard_curve_is(self, s4hc, test_data.SET1_CASE5_SITE4_POES, tolerance=1e-3) assert_hazard_curve_is(self, s5hc, test_data.SET1_CASE5_SITE5_POES, tolerance=1e-3) assert_hazard_curve_is(self, s6hc, test_data.SET1_CASE5_SITE6_POES, tolerance=1e-3) assert_hazard_curve_is(self, s7hc, test_data.SET1_CASE5_SITE7_POES, tolerance=1e-3)
def _collect_bins_data(sources, site, imt, iml, gsims, tom, truncation_level, n_epsilons, source_site_filter, rupture_site_filter): """ Extract values of magnitude, distance, closest point, tectonic region types and PoE distribution. This method processes the source model (generates ruptures) and collects all needed parameters to arrays. It also defines tectonic region type bins sequence. """ mags = [] dists = [] lons = [] lats = [] tect_reg_types = [] joint_probs = [] sitecol = SiteCollection([site]) sitemesh = sitecol.mesh _next_trt_num = 0 trt_nums = {} sources_sites = ((source, sitecol) for source in sources) # here we ignore filtered site collection because either it is the same # as the original one (with one site), or the source/rupture is filtered # out and doesn't show up in the filter's output for source, s_sites in source_site_filter(sources_sites): tect_reg = source.tectonic_region_type gsim = gsims[tect_reg] if not tect_reg in trt_nums: trt_nums[tect_reg] = _next_trt_num _next_trt_num += 1 tect_reg = trt_nums[tect_reg] ruptures_sites = ((rupture, s_sites) for rupture in source.iter_ruptures(tom)) for rupture, r_sites in rupture_site_filter(ruptures_sites): # extract rupture parameters of interest mags.append(rupture.mag) [jb_dist] = rupture.surface.get_joyner_boore_distance(sitemesh) dists.append(jb_dist) [closest_point] = rupture.surface.get_closest_points(sitemesh) lons.append(closest_point.longitude) lats.append(closest_point.latitude) tect_reg_types.append(tect_reg) # compute conditional probability of exceeding iml given # the current rupture, and different epsilon level, that is # ``P(IMT >= iml | rup, epsilon_bin)`` for each of epsilon bins sctx, rctx, dctx = gsim.make_contexts(sitecol, rupture) [poes_given_rup_eps ] = gsim.disaggregate_poe(sctx, rctx, dctx, imt, iml, truncation_level, n_epsilons) # compute the probability of the rupture occurring once, # that is ``P(rup)`` p_rup = rupture.get_probability_one_occurrence() # compute joint probability of rupture occurrence and # iml exceedance for the different epsilon levels joint_probs.append(poes_given_rup_eps * p_rup) mags = numpy.array(mags, float) dists = numpy.array(dists, float) lons = numpy.array(lons, float) lats = numpy.array(lats, float) tect_reg_types = numpy.array(tect_reg_types, int) joint_probs = numpy.array(joint_probs, float) trt_bins = [ trt for (num, trt) in sorted((num, trt) for (trt, num) in trt_nums.items()) ] return mags, dists, lons, lats, joint_probs, tect_reg_types, trt_bins
def setUp(self): self.mean1 = 1 self.mean2 = 5 self.mean3 = 10 self.mean4567 = 10 self.inter1 = 0.4 self.inter2 = 1 self.inter3 = 1.4 self.inter45 = 1e-300 self.inter67 = 1 self.intra1 = 0.7 self.intra2 = 2 self.intra3 = 0.3 self.intra45 = 1 self.intra67 = 1e-300 self.stddev1 = (self.inter1 ** 2 + self.intra1 ** 2) ** 0.5 self.stddev2 = (self.inter2 ** 2 + self.intra2 ** 2) ** 0.5 self.stddev3 = (self.inter3 ** 2 + self.intra3 ** 2) ** 0.5 self.stddev45 = (self.inter45 ** 2 + self.intra45 ** 2) ** 0.5 self.stddev67 = (self.inter67 ** 2 + self.intra67 ** 2) ** 0.5 p = [Point(0, 0), Point(0, 0.1), Point(0, 0.2), Point(0, 0.3), Point(0, 0.4), Point(0, 0.5), Point(0, 0.6)] sites = [Site(p[0], self.mean1, False, self.inter1, self.intra1), Site(p[1], self.mean2, True, self.inter2, self.intra2), Site(p[2], self.mean3, False, self.inter3, self.intra3), Site(p[3], self.mean4567, True, self.inter45, self.intra45), Site(p[4], self.mean4567, False, self.inter45, self.intra45), Site(p[5], self.mean4567, True, self.inter67, self.intra67), Site(p[6], self.mean4567, False, self.inter67, self.intra67)] self.sites = SiteCollection(sites) self.rupture = object() self.imt1 = SA(10, 5) self.imt2 = PGV() class FakeGSIM(object): expect_stddevs = True expect_same_sitecol = True def make_contexts(gsim, sites, rupture): if gsim.expect_same_sitecol: self.assertIs(sites, self.sites) else: self.assertIsNot(sites, self.sites) self.assertIs(rupture, self.rupture) return sites.vs30, sites.z1pt0, sites.z2pt5 def get_mean_and_stddevs(gsim, mean, std_inter, std_intra, imt, stddev_types): assert imt is self.imt1 or imt is self.imt2 if gsim.expect_stddevs: self.assertEqual(stddev_types, [const.StdDev.INTER_EVENT, const.StdDev.INTRA_EVENT]) # + 10 is needed to make sure that to_imt_unit_values() # is called on the result of gmf calc return mean + 10, [std_inter, std_intra] else: self.assertEqual(stddev_types, []) return mean + 10, [] def to_imt_unit_values(gsim, intensities): return intensities - 10. def rupture_site_filter(rupture_site_gen): [(rupture, sites)] = rupture_site_gen assert rupture is self.rupture assert sites is self.sites yield rupture, sites.filter(sites.vs30measured) self.rupture_site_filter = rupture_site_filter self.gsim = FakeGSIM()
def setUp(self): super(PointSourceRuptureFilterTestCase, self).setUp() self.hypocenter = Point(2, 0, 50) self.sitecol = SiteCollection(self.SITES)