def test_filter(self): def extract_first_source(sources, sites): for source in sources: yield source, None break fake_sites = [1, 2, 3] ses = list( stochastic_event_set( [self.source1, self.source2], fake_sites, extract_first_source )) self.assertEqual(ses, [self.r1_1, self.r1_2, self.r1_2]) def extract_first_rupture(ruptures, sites): for rupture in ruptures: yield rupture, None break ses = list( stochastic_event_set( [self.source1, self.source2], fake_sites, extract_first_source, extract_first_rupture )) self.assertEqual(ses, [self.r1_1]) self.source1 = self.FakeSource(1, [self.r1_1, self.r1_0, self.r1_2]) self.source2 = self.FakeSource(2, [self.r2_1])
def test_filter(self): def extract_first_source(sources_sites): for source, _sites in sources_sites: yield source, None break fake_sites = [1, 2, 3] ses = list( stochastic_event_set( [self.source1, self.source2], fake_sites, extract_first_source )) self.assertEqual(ses, [self.r1_1, self.r1_2, self.r1_2]) def extract_first_rupture(ruptures_sites): for rupture, _sites in ruptures_sites: yield rupture, None break ses = list( stochastic_event_set( [self.source1, self.source2], fake_sites, extract_first_source, extract_first_rupture )) self.assertEqual(ses, [self.r1_1]) self.source1 = self.FakeSource(1, [self.r1_1, self.r1_0, self.r1_2]) self.source2 = self.FakeSource(2, [self.r2_1])
def test_nankai(self): # source model for the Nankai region provided by M. Pagani source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml') # it has a single group containing 15 mutex sources [group] = nrml.to_python(source_model) aae([src.mutex_weight for src in group], [0.0125, 0.0125, 0.0125, 0.0125, 0.1625, 0.1625, 0.0125, 0.0125, 0.025, 0.025, 0.05, 0.05, 0.325, 0.025, 0.1]) seed = 42 rup_serial = numpy.arange(seed, seed + group.tot_ruptures, dtype=numpy.uint32) start = 0 for i, src in enumerate(group): src.id = i nr = src.num_ruptures src.serial = rup_serial[start:start + nr] start += nr lonlat = 135.68, 35.68 site = Site(geo.Point(*lonlat), 800, z1pt0=100., z2pt5=1.) s_filter = SourceFilter(SiteCollection([site]), {}) param = dict(ses_per_logic_tree_path=10, filter_distance='rjb', gsims=[SiMidorikawa1999SInter()]) dic = sum(sample_ruptures(group, param, s_filter), {}) self.assertEqual(len(dic['rup_array']), 5) self.assertEqual(len(dic['calc_times']), 15) # mutex sources # test no filtering 1 ruptures = list(stochastic_event_set(group)) self.assertEqual(len(ruptures), 19) # test no filtering 2 ruptures = sum(sample_ruptures(group, param), {})['rup_array'] self.assertEqual(len(ruptures), 5)
def test_ses_generation_from_parametric_source_with_filtering(self): # generate stochastic event set (SES) from 2 area sources (area1, # area2). However, by including a single site co-located with the # area1 center, and with source site filtering of 100 km (exactly # the radius of area1), the second source (area2), which is centered # at 5., 5. (that is about 500 km from center of area1), will be # excluded. the MFD from the SES will be therefore approximately equal # to the one of area1 only. numpy.random.seed(123) sites = SiteCollection([ Site( location=Point(0., 0.), vs30=760, vs30measured=True, z1pt0=40., z2pt5=2. ) ]) ses = stochastic_event_set( [self.area1, self.area2], sites=sites, source_site_filter=filters.SourceFilter(sites, 100.) ) rates = self._extract_rates(ses, time_span=self.time_span, bins=numpy.arange(5., 6.6, 0.1)) expect_rates = numpy.array( [r for m, r in self.mfd.get_annual_occurrence_rates()] ) numpy.testing.assert_allclose(rates, expect_rates, rtol=0, atol=1e-4)
def test_nankai(self): # source model for the Nankai region provided by M. Pagani source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml') # it has a single group containing 15 mutex sources [group] = nrml.to_python(source_model) for i, src in enumerate(group): src.id = i src.grp_id = 0 aae([src.mutex_weight for src in group], [ 0.0125, 0.0125, 0.0125, 0.0125, 0.1625, 0.1625, 0.0125, 0.0125, 0.025, 0.025, 0.05, 0.05, 0.325, 0.025, 0.1 ]) param = dict(ses_per_logic_tree_path=10, ses_seed=42, imtls={}) cmaker = contexts.ContextMaker('*', [SiMidorikawa1999SInter()], param) dic = sum(sample_ruptures(group, cmaker), {}) self.assertEqual(len(dic['rup_array']), 8) self.assertEqual(len(dic['calc_times']), 15) # mutex sources # test no filtering 1 ruptures = list(stochastic_event_set(group)) self.assertEqual(len(ruptures), 19) # test no filtering 2 ruptures = sum(sample_ruptures(group, cmaker), {})['rup_array'] self.assertEqual(len(ruptures), 8)
def test_nankai(self): # source model for the Nankai region provided by M. Pagani source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml') # it has a single group containing 15 mutex sources [group] = nrml.to_python(source_model) aae([src.mutex_weight for src in group], [0.0125, 0.0125, 0.0125, 0.0125, 0.1625, 0.1625, 0.0125, 0.0125, 0.025, 0.025, 0.05, 0.05, 0.325, 0.025, 0.1]) seed = 42 start = 0 for i, src in enumerate(group): src.id = i nr = src.num_ruptures src.serial = start + seed start += nr param = dict(ses_per_logic_tree_path=10, filter_distance='rjb', gsims=[SiMidorikawa1999SInter()]) sf = calc.filters.SourceFilter(None, {}) dic = sum(sample_ruptures(group, sf, param), {}) self.assertEqual(len(dic['rup_array']), 5) self.assertEqual(len(dic['calc_times']), 15) # mutex sources # test no filtering 1 ruptures = list(stochastic_event_set(group)) self.assertEqual(len(ruptures), 19) # test no filtering 2 ruptures = sum(sample_ruptures(group, sf, param), {})['rup_array'] self.assertEqual(len(ruptures), 5)
def test_ses_generation_from_parametric_source_with_filtering(self): # generate stochastic event set (SES) from 2 area sources (area1, # area2). However, by including a single site co-located with the # area1 center, and with source site filtering of 100 km (exactly # the radius of area1), the second source (area2), which is centered # at 5., 5. (that is about 500 km from center of area1), will be # excluded. the MFD from the SES will be therefore approximately equal # to the one of area1 only. numpy.random.seed(123) sites = SiteCollection([ Site(location=Point(0., 0.), vs30=760, vs30measured=True, z1pt0=40., z2pt5=2.) ]) ses = stochastic_event_set([self.area1, self.area2], filters.SourceFilter( sites, filters.MagDepDistance.new('100'))) rates = self._extract_rates(ses, time_span=self.time_span, bins=numpy.arange(5., 6.6, 0.1)) expect_rates = numpy.array( [r for m, r in self.mfd.get_annual_occurrence_rates()]) numpy.testing.assert_allclose(rates, expect_rates, rtol=0, atol=1e-4)
def test_nankai(self): # source model for the Nankai region provided by M. Pagani source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml') # it has a single group containing 15 mutex sources [group] = nrml.to_python(source_model) aae([src.mutex_weight for src in group], [0.0125, 0.0125, 0.0125, 0.0125, 0.1625, 0.1625, 0.0125, 0.0125, 0.025, 0.025, 0.05, 0.05, 0.325, 0.025, 0.1]) seed = 42 start = 0 for i, src in enumerate(group): src.id = i nr = src.num_ruptures src.serial = start + seed start += nr param = dict(ses_per_logic_tree_path=10, filter_distance='rjb', gsims=[SiMidorikawa1999SInter()]) sf = calc.filters.SourceFilter(None, {}) dic = sum(sample_ruptures(group, sf, param), {}) self.assertEqual(len(dic['rup_array']), 6) self.assertEqual(len(dic['calc_times']), 15) # mutex sources # test no filtering 1 ruptures = list(stochastic_event_set(group)) self.assertEqual(len(ruptures), 19) # test no filtering 2 ruptures = sum(sample_ruptures(group, sf, param), {})['rup_array'] self.assertEqual(len(ruptures), 6)
def test_source_errors(self): # exercise the case where an error occurs while computing on a given # seismic source; in this case, we expect an error to be raised which # signals the id of the source in question fail_source = self.FailSource(2, [self.r2_1]) with self.assertRaises(ValueError) as ae: list(stochastic_event_set([self.source1, fail_source])) expected_error = ( 'An error occurred with source id=2. Error: Something bad happened' ) self.assertEqual(expected_error, str(ae.exception))
def test_ses_generation_from_parametric_source(self): # generate stochastic event set (SES) from area source with given # magnitude frequency distribution (MFD). Check that the MFD as # obtained from the SES (by making an histogram of the magnitude values # and normalizing by the total duration of the event set) is # approximately equal to the original MFD. numpy.random.seed(123) ses = stochastic_event_set([self.area1]) rates = self._extract_rates(ses, time_span=self.time_span, bins=numpy.arange(5., 6.6, 0.1)) expect_rates = numpy.array( [r for m, r in self.mfd.get_annual_occurrence_rates()]) numpy.testing.assert_allclose(rates, expect_rates, rtol=0, atol=1e-4)
def test_ses_generation_from_non_parametric_source(self): # np_src contains two ruptures: rup1 (of magnitude 5) and rup2 (of # magnitude 6) # rup1 has probability of zero occurences of 0.7 and of one # occurrence of 0.3 # rup2 has probability of zero occurrence of 0.7, of one occurrence of # 0.2 and of two occurrences of 0.1 # the test generate multiple SESs. From the ensamble of SES the # probability of 0, 1, and 2, rupture occurrences is computed and # compared with the expected value numpy.random.seed(123) num_sess = 10000 sess = [stochastic_event_set([self.np_src]) for i in range(num_sess)] # loop over ses. For each ses count number of rupture # occurrences (for each magnitude) n_rups1 = {} n_rups2 = {} for i, ses in enumerate(sess): n_rups1[i] = 0 n_rups2[i] = 0 for rup in ses: if rup.mag == 5.: n_rups1[i] += 1 elif rup.mag == 6.: n_rups2[i] += 1 # count how many SESs have 0,1 or 2 occurrences, and then normalize # by the total number of SESs generated. This gives the probability # of having 0, 1 or 2 occurrences n_occs1 = numpy.fromiter(n_rups1.values(), int) n_occs2 = numpy.fromiter(n_rups2.values(), int) p_occs1_0 = (n_occs1 == 0).sum() / num_sess p_occs1_1 = (n_occs1 == 1).sum() / num_sess p_occs2_0 = (n_occs2 == 0).sum() / num_sess p_occs2_1 = (n_occs2 == 1).sum() / num_sess p_occs2_2 = (n_occs2 == 2).sum() / num_sess self.assertAlmostEqual(p_occs1_0, 0.70, places=2) self.assertAlmostEqual(p_occs1_1, 0.30, places=2) self.assertAlmostEqual(p_occs2_0, 0.70, places=2) self.assertAlmostEqual(p_occs2_1, 0.20, places=2) self.assertAlmostEqual(p_occs2_2, 0.10, places=2)
def test_ses_generation_from_non_parametric_source(self): # np_src contains two ruptures: rup1 (of magnitude 5) and rup2 (of # magnitude 6) # rup1 has probability of zero occurences of 0.7 and of one # occurrence of 0.3 # rup2 has probability of zero occurrence of 0.7, of one occurrence of # 0.2 and of two occurrences of 0.1 # the test generate multiple SESs. From the ensamble of SES the # probability of 0, 1, and 2, rupture occurrences is computed and # compared with the expected value numpy.random.seed(123) num_sess = 10000 sess = [stochastic_event_set([self.np_src]) for i in range(num_sess)] # loop over ses. For each ses count number of rupture # occurrences (for each magnitude) n_rups1 = {} n_rups2 = {} for i, ses in enumerate(sess): n_rups1[i] = 0 n_rups2[i] = 0 for rup in ses: if rup.mag == 5.: n_rups1[i] += 1 if rup.mag == 6.: n_rups2[i] += 1 # count how many SESs have 0,1 or 2 occurrences, and then normalize # by the total number of SESs generated. This gives the probability # of having 0, 1 or 2 occurrences n_occs1 = numpy.array(list(n_rups1.values())) n_occs2 = numpy.array(list(n_rups2.values())) p_occs1_0 = float(len(n_occs1[n_occs1 == 0])) / num_sess p_occs1_1 = float(len(n_occs1[n_occs1 == 1])) / num_sess p_occs2_0 = float(len(n_occs2[n_occs2 == 0])) / num_sess p_occs2_1 = float(len(n_occs2[n_occs2 == 1])) / num_sess p_occs2_2 = float(len(n_occs2[n_occs2 == 2])) / num_sess self.assertAlmostEqual(p_occs1_0, 0.7, places=2) self.assertAlmostEqual(p_occs1_1, 0.3, places=2) self.assertAlmostEqual(p_occs2_0, 0.7, places=2) self.assertAlmostEqual(p_occs2_1, 0.2, places=2) self.assertAlmostEqual(p_occs2_2, 0.1, places=2)
def test_nankai(self): # source model for the Nankai region provided by M. Pagani source_model = os.path.join(os.path.dirname(__file__), 'nankai.xml') # it has a single group containing 15 mutex sources [group] = nrml.to_python(source_model) aae(group.srcs_weights, [ 0.0125, 0.0125, 0.0125, 0.0125, 0.1625, 0.1625, 0.0125, 0.0125, 0.025, 0.025, 0.05, 0.05, 0.325, 0.025, 0.1 ]) rup_serial = numpy.arange(group.tot_ruptures, dtype=numpy.uint32) start = 0 for i, src in enumerate(group): src.id = i nr = src.num_ruptures src.serial = rup_serial[start:start + nr] start += nr lonlat = 135.68, 35.68 site = Site(geo.Point(*lonlat), 800, True, z1pt0=100., z2pt5=1.) s_filter = SourceFilter(SiteCollection([site]), {}) param = dict(ses_per_logic_tree_path=10, seed=42, filter_distance='rjb', samples=1) gsims = [SiMidorikawa1999SInter()] dic = sample_ruptures(group, s_filter, gsims, param) self.assertEqual(dic['num_ruptures'], 19) # total ruptures self.assertEqual(dic['num_events'], 16) self.assertEqual(len(dic['eb_ruptures']), 8) self.assertEqual(len(dic['calc_times']), 15) # mutex sources # test export mesh = numpy.array([lonlat], [('lon', float), ('lat', float)]) ebr = dic['eb_ruptures'][0] ebr.export(mesh) # test no filtering 1 ruptures = list(stochastic_event_set(group)) self.assertEqual(len(ruptures), 19) # test no filtering 2 ruptures = sample_ruptures(group)['eb_ruptures'] self.assertEqual(len(ruptures), 2)
def test(self): ses = list(stochastic_event_set([self.source1, self.source2])) self.assertEqual(ses, [self.r1_1, self.r1_2, self.r1_2, self.r2_1])
def test(self): ses = list(stochastic_event_set( [self.source1, self.source2])) self.assertEqual(ses, [self.r1_1, self.r1_2, self.r1_2, self.r2_1])