def test_get_2d_grid(self): ''' Tests the module to count the events across a grid ''' self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) # Case 1 - all events in grid (including borderline cases) comp_table = np.array([[1960., 4.0]]) lons = np.arange(35.0, 41.0, 1.0) lats = np.arange(40.0, 46.0, 1.0) mags = 5.0 * np.ones(6) years = 2000. * np.ones(6) expected_result = np.zeros(100, dtype=int) expected_result[[9, 28, 46, 64, 82, 90]] = 1 np.testing.assert_array_almost_equal( expected_result, self.model.create_2D_grid_simple(lons, lats, years, mags, comp_table)) self.assertEqual(np.sum(expected_result), 6) # Case 2 - some events outside grid lons = np.arange(35.0, 42.0, 1.0) lats = np.arange(40.0, 47.0, 1.0) mags = 5.0 * np.ones(7) years = 2000. * np.ones(7) np.testing.assert_array_almost_equal( expected_result, self.model.create_2D_grid_simple(lons, lats, years, mags, comp_table)) self.assertEqual(np.sum(expected_result), 6)
def test_analysis_Frankel_comparison(self): ''' To test the run_analysis function we compare test results with those from Frankel's fortran implementation, under the same conditions ''' self.grid_limits = [-128., -113.0, 0.2, 30., 43.0, 0.2, 0., 100., 100.] comp_table = np.array([[1933., 4.0], [1900., 5.0], [1850., 6.0], [1850., 7.0]]) config = {'Length_Limit': 3., 'BandWidth': 50., 'increment': 0.1} self.model = SmoothedSeismicity(self.grid_limits, bvalue=0.8) self.catalogue = Catalogue() frankel_catalogue = np.genfromtxt( os.path.join(BASE_PATH, FRANKEL_TEST_CATALOGUE)) self.catalogue.data['magnitude'] = frankel_catalogue[:, 0] self.catalogue.data['longitude'] = frankel_catalogue[:, 1] self.catalogue.data['latitude'] = frankel_catalogue[:, 2] self.catalogue.data['depth'] = frankel_catalogue[:, 3] self.catalogue.data['year'] = frankel_catalogue[:, 4] self.catalogue.end_year = 2006 frankel_results = np.genfromtxt( os.path.join(BASE_PATH, FRANKEL_OUTPUT_FILE)) # Run analysis output_data = self.model.run_analysis( self.catalogue, config, completeness_table=comp_table, smoothing_kernel=IsotropicGaussian()) self.assertTrue( fabs(np.sum(output_data[:, -1]) - np.sum(output_data[:, -2])) < 1.0) self.assertTrue(fabs(np.sum(output_data[:, -1]) - 390.) < 1.0)
def test_instantiation(self): ''' Tests the instantiation of the class ''' # Test 1: Good Grid Limits self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.]) expected_dict = {'beta': None, 'bval': None, 'catalogue': None, 'data': None, 'grid': None, 'grid_limits': {'xmax': 40.0, 'xmin': 35.0, 'xspc': 0.5, 'ymax': 45.0, 'ymin': 40.0, 'yspc': 0.5, 'zmax': 40.0, 'zmin': 0.0, 'zspc': 20.0}, 'kernel': None, 'use_3d': False} self.model = SmoothedSeismicity(self.grid_limits) self.assertDictEqual(self.model.__dict__, expected_dict) # Test 2 - with b-value set self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) expected_dict['bval'] = 1.0 expected_dict['beta'] = np.log(10.) self.assertDictEqual(self.model.__dict__, expected_dict)
def test_instantiation(self): ''' Tests the instantiation of the class ''' # Test 1: Good Grid Limits self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.]) expected_dict = { 'beta': None, 'bval': None, 'catalogue': None, 'data': None, 'grid': None, 'grid_limits': { 'xmax': 40.0, 'xmin': 35.0, 'xspc': 0.5, 'ymax': 45.0, 'ymin': 40.0, 'yspc': 0.5, 'zmax': 40.0, 'zmin': 0.0, 'zspc': 20.0 }, 'kernel': None, 'use_3d': False } self.model = SmoothedSeismicity(self.grid_limits) self.assertDictEqual(self.model.__dict__, expected_dict) # Test 2 - with b-value set self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) expected_dict['bval'] = 1.0 expected_dict['beta'] = np.log(10.) self.assertDictEqual(self.model.__dict__, expected_dict)
def test_csv_writer(self): ''' Short test of consistency of the csv writer ''' self.grid_limits = [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.] self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) self.model.data = np.array([[1.0, 1.0, 10.0, 4.0, 4.0, 1.0], [2.0, 2.0, 20.0, 8.0, 8.0, 1.0]]) self.model.write_to_csv(OUTPUT_FILE) return_data = np.genfromtxt(OUTPUT_FILE, delimiter=',', skip_header=1) np.testing.assert_array_almost_equal(return_data, self.model.data) os.system('rm ' + OUTPUT_FILE)
def test_get_3d_grid(self): ''' Tests the module to count the events in a 3D grid ''' comp_table = np.array([[1960., 4.0]]) self.catalogue = Catalogue() self.catalogue.data['longitude'] = np.hstack([ np.arange(35., 41.0, 1.0), np.arange(35., 41.0, 1.0)]) self.catalogue.data['latitude'] = np.hstack([ np.arange(40., 46.0, 1.0), np.arange(40., 46.0, 1.0)]) self.catalogue.data['depth'] = np.hstack([10.0 * np.ones(6), 30.0 * np.ones(6)]) self.catalogue.data['magnitude'] = 4.5 * np.ones(12) self.catalogue.data['year'] = 1990. * np.ones(12) # Case 1 - one depth layer self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40.0, 45., 0.5, 0., 40., 40.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) [gx, gy] = np.meshgrid(np.arange(35.25, 40., 0.5), np.arange(40.25, 45., 0.5)) ngp = np.shape(gx)[0] * np.shape(gy)[1] gx = np.reshape(gx, [ngp, 1]) gy = np.reshape(gy, [ngp, 1]) gz = 20. * np.ones(ngp) expected_count = np.zeros(ngp, dtype=float) expected_count[[9, 28, 46, 64, 82, 90]] = 2.0 expected_result = np.column_stack([gx, np.flipud(gy), gz, expected_count]) self.model.create_3D_grid(self.catalogue, comp_table) np.testing.assert_array_almost_equal(expected_result, self.model.data) # Case 2 - multiple depth layers self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45., 0.5, 0., 40., 20.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) expected_result = np.vstack([expected_result, expected_result]) expected_count = np.zeros(200) expected_count[[9, 28, 46, 64, 82, 90, 109, 128, 146, 164, 182, 190]] = 1.0 expected_result[:, -1] = expected_count expected_result[:, 2] = np.hstack([10. * np.ones(100), 30. * np.ones(100)]) self.model.create_3D_grid(self.catalogue, comp_table) np.testing.assert_array_almost_equal(expected_result, self.model.data)
def test_analysis_Frankel_comparison(self): ''' To test the run_analysis function we compare test results with those from Frankel's fortran implementation, under the same conditions ''' self.grid_limits = [-128., -113.0, 0.2, 30., 43.0, 0.2, 0., 100., 100.] comp_table = np.array([[1933., 4.0], [1900., 5.0], [1850., 6.0], [1850., 7.0]]) config = {'Length_Limit': 3., 'BandWidth': 50., 'increment': 0.1} self.model = SmoothedSeismicity(self.grid_limits, bvalue=0.8) self.catalogue = Catalogue() frankel_catalogue = np.genfromtxt(os.path.join(BASE_PATH, FRANKEL_TEST_CATALOGUE)) self.catalogue.data['magnitude'] = frankel_catalogue[:, 0] self.catalogue.data['longitude'] = frankel_catalogue[:, 1] self.catalogue.data['latitude'] = frankel_catalogue[:, 2] self.catalogue.data['depth'] = frankel_catalogue[:, 3] self.catalogue.data['year'] = frankel_catalogue[:, 4] self.catalogue.end_year = 2006 frankel_results = np.genfromtxt(os.path.join(BASE_PATH, FRANKEL_OUTPUT_FILE)) # Run analysis output_data = self.model.run_analysis( self.catalogue, config, completeness_table=comp_table, smoothing_kernel = IsotropicGaussian()) self.assertTrue(fabs(np.sum(output_data[:, -1]) - np.sum(output_data[:, -2])) < 1.0) self.assertTrue(fabs(np.sum(output_data[:, -1]) - 390.) < 1.0)
def test_get_2d_grid(self): ''' Tests the module to count the events across a grid ''' self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) # Case 1 - all events in grid (including borderline cases) comp_table = np.array([[1960., 4.0]]) lons = np.arange(35.0, 41.0, 1.0) lats = np.arange(40.0, 46.0, 1.0) mags = 5.0 * np.ones(6) years = 2000. * np.ones(6) expected_result = np.zeros(100, dtype=int) expected_result[[9, 28, 46, 64, 82, 90]] = 1 np.testing.assert_array_almost_equal(expected_result, self.model.create_2D_grid_simple(lons, lats, years, mags, comp_table)) self.assertEqual(np.sum(expected_result), 6) # Case 2 - some events outside grid lons = np.arange(35.0, 42.0, 1.0) lats = np.arange(40.0, 47.0, 1.0) mags = 5.0 * np.ones(7) years = 2000. * np.ones(7) np.testing.assert_array_almost_equal(expected_result, self.model.create_2D_grid_simple(lons, lats, years, mags, comp_table)) self.assertEqual(np.sum(expected_result), 6)
def test_get_3d_grid(self): ''' Tests the module to count the events in a 3D grid ''' comp_table = np.array([[1960., 4.0]]) self.catalogue = Catalogue() self.catalogue.data['longitude'] = np.hstack( [np.arange(35., 41.0, 1.0), np.arange(35., 41.0, 1.0)]) self.catalogue.data['latitude'] = np.hstack( [np.arange(40., 46.0, 1.0), np.arange(40., 46.0, 1.0)]) self.catalogue.data['depth'] = np.hstack( [10.0 * np.ones(6), 30.0 * np.ones(6)]) self.catalogue.data['magnitude'] = 4.5 * np.ones(12) self.catalogue.data['year'] = 1990. * np.ones(12) # Case 1 - one depth layer self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40.0, 45., 0.5, 0., 40., 40.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) [gx, gy] = np.meshgrid(np.arange(35.25, 40., 0.5), np.arange(40.25, 45., 0.5)) ngp = np.shape(gx)[0] * np.shape(gy)[1] gx = np.reshape(gx, [ngp, 1]) gy = np.reshape(gy, [ngp, 1]) gz = 20. * np.ones(ngp) expected_count = np.zeros(ngp, dtype=float) expected_count[[9, 28, 46, 64, 82, 90]] = 2.0 expected_result = np.column_stack( [gx, np.flipud(gy), gz, expected_count]) self.model.create_3D_grid(self.catalogue, comp_table) np.testing.assert_array_almost_equal(expected_result, self.model.data) # Case 2 - multiple depth layers self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45., 0.5, 0., 40., 20.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) expected_result = np.vstack([expected_result, expected_result]) expected_count = np.zeros(200) expected_count[[9, 28, 46, 64, 82, 90, 109, 128, 146, 164, 182, 190]] = 1.0 expected_result[:, -1] = expected_count expected_result[:, 2] = np.hstack( [10. * np.ones(100), 30. * np.ones(100)]) self.model.create_3D_grid(self.catalogue, comp_table) np.testing.assert_array_almost_equal(expected_result, self.model.data)
# res, spc = 0.2, 250 # model # [xmin, xmax, spcx, ymin, ymax, spcy, zmin, spcz] map_config = {"min_lon": -95.0, "max_lon": -25.0, "min_lat": -65.0, "max_lat": 25.0, "resolution": "l"} # _l = [ 118.5, 124, res, 20.0, 26.5, res, 0, 300, 300] # _l = [ -95., -25, res, -65, 25, res, 0, 800, 800] _l = [-80, -30, res, -37, 13, res, 0, 30, 30] grid_limits = Grid.make_from_list(_l) nx = round((_l[1] - _l[0]) / _l[2], 0) ny = round((_l[4] - _l[3]) / _l[5], 0) grid_shape = (nx, ny) print grid_shape model = SmoothedSeismicity(grid_limits, bvalue=1.0) # Time-varying completeness comp_table = np.array([[1980.0, 3.5], [1970.0, 4.5], [1960.0, 5.0]]) if model_name == "hmtk_sa3": comp_table = np.array( [ [1986, 3.0], [1986, 3.5], [1986, 4.0], [1960, 4.5], [1958, 5.0], [1958, 5.5], [1927, 6.0],
# In[ ]: from hmtk.seismicity.smoothing.smoothed_seismicity import SmoothedSeismicity smoothing_config = {"BandWidth": 50., "Length_Limit": 3., "increment": 0.1} #bvalue = 0.819 #bvalue = 0.835 #upper #bvalue = 0.747 #bvalue= 0.727 #bvalue= 1.062 #lower #bvalue = 0.892 #bvalue = 0.944 #bvalue = 1.355 smoother = SmoothedSeismicity([100., 160., 0.1, -45., -5, 0.1, 0., 20., 20.], bvalue=bvalue) #smoothed_grid = smoother.run_analysis(source_model.sources[0].catalogue, smoothing_config, completeness_table=completeness_table_a) print 'Running smoothing' smoothed_grid = smoother.run_analysis(source.catalogue, smoothing_config, completeness_table=completeness_table_a) smoother_filename = 'smoothed_%i_%i_mmin_%.1f_%.3f_0.1.csv' % ( smoothing_config["BandWidth"], smoothing_config["Length_Limit"], completeness_table_a[0][-1], bvalue) smoother.write_to_csv(smoother_filename) # In[ ]: #smoother_filename = 'smoothed_%i_%i_mmin_%.1f_0.1.csv' % \ ## (smoothing_config["BandWidth"], smoothing_config["Length_Limit"], # completeness_table_a[0][-1])
class TestSmoothedSeismicity(unittest.TestCase): ''' Class to test the implementation of the smoothed seismicity algorithm ''' def setUp(self): self.grid_limits = [] self.model = None def test_instantiation(self): ''' Tests the instantiation of the class ''' # Test 1: Good Grid Limits self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.]) expected_dict = {'beta': None, 'bval': None, 'catalogue': None, 'data': None, 'grid': None, 'grid_limits': {'xmax': 40.0, 'xmin': 35.0, 'xspc': 0.5, 'ymax': 45.0, 'ymin': 40.0, 'yspc': 0.5, 'zmax': 40.0, 'zmin': 0.0, 'zspc': 20.0}, 'kernel': None, 'use_3d': False} self.model = SmoothedSeismicity(self.grid_limits) self.assertDictEqual(self.model.__dict__, expected_dict) # Test 2 - with b-value set self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) expected_dict['bval'] = 1.0 expected_dict['beta'] = np.log(10.) self.assertDictEqual(self.model.__dict__, expected_dict) def test_get_2d_grid(self): ''' Tests the module to count the events across a grid ''' self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) # Case 1 - all events in grid (including borderline cases) comp_table = np.array([[1960., 4.0]]) lons = np.arange(35.0, 41.0, 1.0) lats = np.arange(40.0, 46.0, 1.0) mags = 5.0 * np.ones(6) years = 2000. * np.ones(6) expected_result = np.zeros(100, dtype=int) expected_result[[9, 28, 46, 64, 82, 90]] = 1 np.testing.assert_array_almost_equal(expected_result, self.model.create_2D_grid_simple(lons, lats, years, mags, comp_table)) self.assertEqual(np.sum(expected_result), 6) # Case 2 - some events outside grid lons = np.arange(35.0, 42.0, 1.0) lats = np.arange(40.0, 47.0, 1.0) mags = 5.0 * np.ones(7) years = 2000. * np.ones(7) np.testing.assert_array_almost_equal(expected_result, self.model.create_2D_grid_simple(lons, lats, years, mags, comp_table)) self.assertEqual(np.sum(expected_result), 6) def test_get_3d_grid(self): ''' Tests the module to count the events in a 3D grid ''' comp_table = np.array([[1960., 4.0]]) self.catalogue = Catalogue() self.catalogue.data['longitude'] = np.hstack([ np.arange(35., 41.0, 1.0), np.arange(35., 41.0, 1.0)]) self.catalogue.data['latitude'] = np.hstack([ np.arange(40., 46.0, 1.0), np.arange(40., 46.0, 1.0)]) self.catalogue.data['depth'] = np.hstack([10.0 * np.ones(6), 30.0 * np.ones(6)]) self.catalogue.data['magnitude'] = 4.5 * np.ones(12) self.catalogue.data['year'] = 1990. * np.ones(12) # Case 1 - one depth layer self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40.0, 45., 0.5, 0., 40., 40.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) [gx, gy] = np.meshgrid(np.arange(35.25, 40., 0.5), np.arange(40.25, 45., 0.5)) ngp = np.shape(gx)[0] * np.shape(gy)[1] gx = np.reshape(gx, [ngp, 1]) gy = np.reshape(gy, [ngp, 1]) gz = 20. * np.ones(ngp) expected_count = np.zeros(ngp, dtype=float) expected_count[[9, 28, 46, 64, 82, 90]] = 2.0 expected_result = np.column_stack([gx, np.flipud(gy), gz, expected_count]) self.model.create_3D_grid(self.catalogue, comp_table) np.testing.assert_array_almost_equal(expected_result, self.model.data) # Case 2 - multiple depth layers self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45., 0.5, 0., 40., 20.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) expected_result = np.vstack([expected_result, expected_result]) expected_count = np.zeros(200) expected_count[[9, 28, 46, 64, 82, 90, 109, 128, 146, 164, 182, 190]] = 1.0 expected_result[:, -1] = expected_count expected_result[:, 2] = np.hstack([10. * np.ones(100), 30. * np.ones(100)]) self.model.create_3D_grid(self.catalogue, comp_table) np.testing.assert_array_almost_equal(expected_result, self.model.data) def test_csv_writer(self): ''' Short test of consistency of the csv writer ''' self.grid_limits = [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.] self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) self.model.data = np.array([[1.0, 1.0, 10.0, 4.0, 4.0, 1.0], [2.0, 2.0, 20.0, 8.0, 8.0, 1.0]]) self.model.write_to_csv(OUTPUT_FILE) return_data = np.genfromtxt(OUTPUT_FILE, delimiter=',', skip_header=1) np.testing.assert_array_almost_equal(return_data, self.model.data) os.system('rm ' + OUTPUT_FILE) def test_analysis_Frankel_comparison(self): ''' To test the run_analysis function we compare test results with those from Frankel's fortran implementation, under the same conditions ''' self.grid_limits = [-128., -113.0, 0.2, 30., 43.0, 0.2, 0., 100., 100.] comp_table = np.array([[1933., 4.0], [1900., 5.0], [1850., 6.0], [1850., 7.0]]) config = {'Length_Limit': 3., 'BandWidth': 50., 'increment': 0.1} self.model = SmoothedSeismicity(self.grid_limits, bvalue=0.8) self.catalogue = Catalogue() frankel_catalogue = np.genfromtxt(os.path.join(BASE_PATH, FRANKEL_TEST_CATALOGUE)) self.catalogue.data['magnitude'] = frankel_catalogue[:, 0] self.catalogue.data['longitude'] = frankel_catalogue[:, 1] self.catalogue.data['latitude'] = frankel_catalogue[:, 2] self.catalogue.data['depth'] = frankel_catalogue[:, 3] self.catalogue.data['year'] = frankel_catalogue[:, 4] self.catalogue.end_year = 2006 frankel_results = np.genfromtxt(os.path.join(BASE_PATH, FRANKEL_OUTPUT_FILE)) # Run analysis output_data = self.model.run_analysis( self.catalogue, config, completeness_table=comp_table, smoothing_kernel = IsotropicGaussian()) self.assertTrue(fabs(np.sum(output_data[:, -1]) - np.sum(output_data[:, -2])) < 1.0) self.assertTrue(fabs(np.sum(output_data[:, -1]) - 390.) < 1.0)
class TestSmoothedSeismicity(unittest.TestCase): ''' Class to test the implementation of the smoothed seismicity algorithm ''' def setUp(self): self.grid_limits = [] self.model = None def test_instantiation(self): ''' Tests the instantiation of the class ''' # Test 1: Good Grid Limits self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.]) expected_dict = { 'beta': None, 'bval': None, 'catalogue': None, 'data': None, 'grid': None, 'grid_limits': { 'xmax': 40.0, 'xmin': 35.0, 'xspc': 0.5, 'ymax': 45.0, 'ymin': 40.0, 'yspc': 0.5, 'zmax': 40.0, 'zmin': 0.0, 'zspc': 20.0 }, 'kernel': None, 'use_3d': False } self.model = SmoothedSeismicity(self.grid_limits) self.assertDictEqual(self.model.__dict__, expected_dict) # Test 2 - with b-value set self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) expected_dict['bval'] = 1.0 expected_dict['beta'] = np.log(10.) self.assertDictEqual(self.model.__dict__, expected_dict) def test_get_2d_grid(self): ''' Tests the module to count the events across a grid ''' self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) # Case 1 - all events in grid (including borderline cases) comp_table = np.array([[1960., 4.0]]) lons = np.arange(35.0, 41.0, 1.0) lats = np.arange(40.0, 46.0, 1.0) mags = 5.0 * np.ones(6) years = 2000. * np.ones(6) expected_result = np.zeros(100, dtype=int) expected_result[[9, 28, 46, 64, 82, 90]] = 1 np.testing.assert_array_almost_equal( expected_result, self.model.create_2D_grid_simple(lons, lats, years, mags, comp_table)) self.assertEqual(np.sum(expected_result), 6) # Case 2 - some events outside grid lons = np.arange(35.0, 42.0, 1.0) lats = np.arange(40.0, 47.0, 1.0) mags = 5.0 * np.ones(7) years = 2000. * np.ones(7) np.testing.assert_array_almost_equal( expected_result, self.model.create_2D_grid_simple(lons, lats, years, mags, comp_table)) self.assertEqual(np.sum(expected_result), 6) def test_get_3d_grid(self): ''' Tests the module to count the events in a 3D grid ''' comp_table = np.array([[1960., 4.0]]) self.catalogue = Catalogue() self.catalogue.data['longitude'] = np.hstack( [np.arange(35., 41.0, 1.0), np.arange(35., 41.0, 1.0)]) self.catalogue.data['latitude'] = np.hstack( [np.arange(40., 46.0, 1.0), np.arange(40., 46.0, 1.0)]) self.catalogue.data['depth'] = np.hstack( [10.0 * np.ones(6), 30.0 * np.ones(6)]) self.catalogue.data['magnitude'] = 4.5 * np.ones(12) self.catalogue.data['year'] = 1990. * np.ones(12) # Case 1 - one depth layer self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40.0, 45., 0.5, 0., 40., 40.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) [gx, gy] = np.meshgrid(np.arange(35.25, 40., 0.5), np.arange(40.25, 45., 0.5)) ngp = np.shape(gx)[0] * np.shape(gy)[1] gx = np.reshape(gx, [ngp, 1]) gy = np.reshape(gy, [ngp, 1]) gz = 20. * np.ones(ngp) expected_count = np.zeros(ngp, dtype=float) expected_count[[9, 28, 46, 64, 82, 90]] = 2.0 expected_result = np.column_stack( [gx, np.flipud(gy), gz, expected_count]) self.model.create_3D_grid(self.catalogue, comp_table) np.testing.assert_array_almost_equal(expected_result, self.model.data) # Case 2 - multiple depth layers self.grid_limits = Grid.make_from_list( [35.0, 40., 0.5, 40., 45., 0.5, 0., 40., 20.]) self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) expected_result = np.vstack([expected_result, expected_result]) expected_count = np.zeros(200) expected_count[[9, 28, 46, 64, 82, 90, 109, 128, 146, 164, 182, 190]] = 1.0 expected_result[:, -1] = expected_count expected_result[:, 2] = np.hstack( [10. * np.ones(100), 30. * np.ones(100)]) self.model.create_3D_grid(self.catalogue, comp_table) np.testing.assert_array_almost_equal(expected_result, self.model.data) def test_csv_writer(self): ''' Short test of consistency of the csv writer ''' self.grid_limits = [35.0, 40., 0.5, 40., 45.0, 0.5, 0., 40., 20.] self.model = SmoothedSeismicity(self.grid_limits, bvalue=1.0) self.model.data = np.array([[1.0, 1.0, 10.0, 4.0, 4.0, 1.0], [2.0, 2.0, 20.0, 8.0, 8.0, 1.0]]) self.model.write_to_csv(OUTPUT_FILE) return_data = np.genfromtxt(OUTPUT_FILE, delimiter=',', skip_header=1) np.testing.assert_array_almost_equal(return_data, self.model.data) os.system('rm ' + OUTPUT_FILE) def test_analysis_Frankel_comparison(self): ''' To test the run_analysis function we compare test results with those from Frankel's fortran implementation, under the same conditions ''' self.grid_limits = [-128., -113.0, 0.2, 30., 43.0, 0.2, 0., 100., 100.] comp_table = np.array([[1933., 4.0], [1900., 5.0], [1850., 6.0], [1850., 7.0]]) config = {'Length_Limit': 3., 'BandWidth': 50., 'increment': 0.1} self.model = SmoothedSeismicity(self.grid_limits, bvalue=0.8) self.catalogue = Catalogue() frankel_catalogue = np.genfromtxt( os.path.join(BASE_PATH, FRANKEL_TEST_CATALOGUE)) self.catalogue.data['magnitude'] = frankel_catalogue[:, 0] self.catalogue.data['longitude'] = frankel_catalogue[:, 1] self.catalogue.data['latitude'] = frankel_catalogue[:, 2] self.catalogue.data['depth'] = frankel_catalogue[:, 3] self.catalogue.data['year'] = frankel_catalogue[:, 4] self.catalogue.end_year = 2006 frankel_results = np.genfromtxt( os.path.join(BASE_PATH, FRANKEL_OUTPUT_FILE)) # Run analysis output_data = self.model.run_analysis( self.catalogue, config, completeness_table=comp_table, smoothing_kernel=IsotropicGaussian()) self.assertTrue( fabs(np.sum(output_data[:, -1]) - np.sum(output_data[:, -2])) < 1.0) self.assertTrue(fabs(np.sum(output_data[:, -1]) - 390.) < 1.0)
def run_smoothing(grid_lims, smoothing_config, catalogue, completeness_table, map_config, run, overwrite=True): """Run all the smoothing """ ystart = completeness_table[-1][0] yend = catalogue.end_year catalogue_comp = deepcopy(catalogue) # Ensuring that catalogue is cleaned of earthquakes outside of # completeness period index = catalogue_comp.data['year'] >= ystart catalogue_comp.purge_catalogue(index) completeness_string = 'comp' for ym in completeness_table: completeness_string += '_%i_%.1f' % (ym[0], ym[1]) smoother_filename = 'Australia_Fixed_%i_%i_b%.3f_mmin_%.1f_0.1%s.csv' % ( smoothing_config["BandWidth"], smoothing_config["Length_Limit"], bvalue, completeness_table[0][1], completeness_string) filename = smoother_filename[:-4] + '.xml' if os.path.exists(filename) and not overwrite: print '%s already created, not overwriting!' % filename return smoother = SmoothedSeismicity( [105., 160., 0.1, -47., -5, 0.1, 0., 20., 20.], bvalue=smoothing_config['bvalue']) print 'Running smoothing' smoothed_grid = smoother.run_analysis( catalogue_comp, smoothing_config, completeness_table=completeness_table) smoother.write_to_csv(smoother_filename) from openquake.hazardlib.nrml import SourceModelParser, write, NAMESPACE from openquake.baselib.node import Node from openquake.hazardlib import nrml from openquake.hazardlib.sourcewriter import obj_to_node # Build nrml input file of point sources source_list = [] #i=0 min_mag = 4.5 max_mag = 7.8 bval = bvalue # just define as 1 for time being # Read in data again to solve number fomatting issue in smoother.data # For some reason it just returns 0 for all a values try: data = np.genfromtxt(smoother_filename, delimiter=',', skip_header=1) except ValueError: print 'Something wrong with file %s' % smoother_filename sys.exit() tom = PoissonTOM( 50) # Dummy temporal occurence model for building pt sources msr = Leonard2014_SCR() for j in range(len(data[:, 4])): # print smoother.data[j,:] identifier = 'FSS' + str(j) + '_' + str(run) name = 'Frankel' + str(j) + '_' + str(run) point = Point(data[j, 0], data[j, 1], data[j, 2]) annual_rate = data[j, 4] / (yend - ystart + 1) aval = np.log10(annual_rate) + smoothing_config[ 'bvalue'] * completeness_table[0][1] mfd = TruncatedGRMFD(min_mag, max_mag, 0.1, aval, bval) hypo_depth_dist = PMF([(0.5, 10.0), (0.25, 5.0), (0.25, 15.0)]) nodal_plane_dist = PMF([(0.3, NodalPlane(0, 30, 90)), (0.2, NodalPlane(90, 30, 90)), (0.3, NodalPlane(180, 30, 90)), (0.2, NodalPlane(270, 30, 90))]) point_source = PointSource(identifier, name, 'Non_cratonic', mfd, 2, msr, 2.0, tom, 0.1, 20.0, point, nodal_plane_dist, hypo_depth_dist) source_list.append(point_source) nodes = list(map(obj_to_node, sorted(source_list))) source_model = Node("sourceModel", {"name": name}, nodes=nodes) with open(filename, 'wb') as f: nrml.write([source_model], f, '%s', xmlns=NAMESPACE) # Creating a basemap - input a cconfiguration and (if desired) a title title = 'Smoothed seismicity rate for learning \nperiod %i 2017, Mmin = %.1f' % ( completeness_table[0][0], completeness_table[0][1]) basemap1 = HMTKBaseMap(map_config, 'Smoothed seismicity rate') # Adding the smoothed grip to the basemap sym = (2., 3., 'cx') x, y = basemap1.m(smoother.data[:, 0], smoother.data[:, 1]) basemap1.m.scatter(x, y, marker='s', c=np.log10(smoother.data[:, 4]), cmap=plt.cm.coolwarm, zorder=10, lw=0, vmin=-6.5, vmax=1.5) basemap1.m.drawcoastlines(linewidth=1, zorder=50) # Add coastline on top basemap1.m.drawmeridians( np.arange(map_config['min_lat'], map_config['max_lat'], 5)) basemap1.m.drawparallels( np.arange(map_config['min_lon'], map_config['max_lon'], 5)) plt.colorbar(label='log10(Smoothed rate per cell)') plt.legend() figname = smoother_filename[:-4] + '_smoothed_rates_map.png' plt.savefig(figname)
'min_lon': -95.0, 'max_lon': -25.0, 'min_lat': -65.0, 'max_lat': 25.0, 'resolution': 'l' } #_l = [ 118.5, 124, res, 20.0, 26.5, res, 0, 300, 300] #_l = [ -95., -25, res, -65, 25, res, 0, 800, 800] _l = [-80, -30, res, -37, 13, res, 0, 30, 30] grid_limits = Grid.make_from_list(_l) nx = round((_l[1] - _l[0]) / _l[2], 0) ny = round((_l[4] - _l[3]) / _l[5], 0) grid_shape = (nx, ny) print grid_shape model = SmoothedSeismicity(grid_limits, bvalue=1.0) # Time-varying completeness comp_table = np.array([[1980., 3.5], [1970., 4.5], [1960., 5.0]]) if model_name == 'hmtk_sa3': comp_table = np.array([[1986, 3.], [1986, 3.5], [1986, 4.], [1960, 4.5], [1958, 5.], [1958, 5.5], [1927, 6.], [1898, 6.5], [1885, 7.], [1885, 7.5], [1885, 8.]]) else: comp_table = np.array([[1980, 3.], [1975, 3.5], [1975, 4.], [1965, 4.5], [1965, 5.], [1860, 5.5], [1860, 6.]]) #config config = { 'Length_Limit': 3.,
#bvalue = 0.835 #upper #bvalue = 0.747 #bvalue= 0.727 #bvalue= 1.062 #lower #bvalue = 0.892 #bvalue = 0.944 #bvalue = 1.355 llcrnrlat = 35 urcrnrlat = 83.5 llcrnrlon = -160 urcrnrlon = -51 smoother = SmoothedSeismicity([llcrnrlon,urcrnrlon,0.1,llcrnrlat,urcrnrlat,0.1,0.,20., 20.], bvalue = bvalue) #smoothed_grid = smoother.run_analysis(source_model.sources[0].catalogue, smoothing_config, completeness_table=completeness_table_a) print 'Running smoothing' smoothed_grid = smoother.run_analysis(source.catalogue, smoothing_config, completeness_table=completeness_table_a) smoother_filename = 'smoothed_%i_%i_mmin_%.1f_%.3f_0.1.csv' % (smoothing_config["BandWidth"], smoothing_config["Length_Limit"], completeness_table_a[0][-1], bvalue) print 'Writing to file' smoother.write_to_csv(smoother_filename) """ from openquake.hazardlib.nrml import SourceModelParser, write, NAMESPACE from openquake.baselib.node import Node from openquake.hazardlib import nrml from openquake.hazardlib.sourcewriter import obj_to_node