def test_select_within_depth_range(self): # Tests the function to select within the depth range # Setup function self.catalogue = Catalogue() self.catalogue.data['depth'] = np.array([5., 15., 25., 35., 45.]) selector0 = CatalogueSelector(self.catalogue) # Test case 1: No limits specified - all catalogue valid test_cat_1 = selector0.within_depth_range() np.testing.assert_array_almost_equal(test_cat_1.data['depth'], self.catalogue.data['depth']) # Test case 2: Lower depth limit specfied only test_cat_1 = selector0.within_depth_range(lower_depth=30.) np.testing.assert_array_almost_equal(test_cat_1.data['depth'], np.array([5., 15., 25.])) # Test case 3: Upper depth limit specified only test_cat_1 = selector0.within_depth_range(upper_depth=20.) np.testing.assert_array_almost_equal(test_cat_1.data['depth'], np.array([25., 35., 45.])) # Test case 4: Both depth limits specified test_cat_1 = selector0.within_depth_range(upper_depth=20., lower_depth=40.) np.testing.assert_array_almost_equal(test_cat_1.data['depth'], np.array([25., 35.]))
def setUp(self): """ """ # Read initial dataset filename = os.path.join(self.BASE_DATA_PATH, 'completeness_test_cat.csv') test_data = np.genfromtxt(filename, delimiter=',', skip_header=1) # Create the catalogue A self.catalogueA = Catalogue.make_from_dict( {'year': test_data[:, 3], 'magnitude': test_data[:, 17]}) # Read initial dataset filename = os.path.join(self.BASE_DATA_PATH, 'recurrence_test_cat_B.csv') test_data = np.genfromtxt(filename, delimiter=',', skip_header=1) # Create the catalogue A self.catalogueB = Catalogue.make_from_dict( {'year': test_data[:, 3], 'magnitude': test_data[:, 17]}) # Read the verification table A filename = os.path.join(self.BASE_DATA_PATH, 'recurrence_table_test_A.csv') self.true_tableA = np.genfromtxt(filename, delimiter=',') # Read the verification table A filename = os.path.join(self.BASE_DATA_PATH, 'recurrence_table_test_B.csv') self.true_tableB = np.genfromtxt(filename, delimiter=',')
def test_select_catalogue_rrup(self): """ Tests catalogue selection with Joyner-Boore distance """ self.fault = mtkActiveFault( '001', 'A Fault', self.simple_fault, [(5., 0.5), (7., 0.5)], 0., None, msr_sigma=[(-1.5, 0.15), (0., 0.7), (1.5, 0.15)]) cat1 = Catalogue() cat1.data = {"eventID": ["001", "002", "003", "004"], "longitude": np.array([30.1, 30.1, 30.5, 31.5]), "latitude": np.array([30.0, 30.25, 30.4, 30.5]), "depth": np.array([5.0, 250.0, 10.0, 10.0])} selector = CatalogueSelector(cat1) # Select within 50 km of the fault self.fault.select_catalogue(selector, 50.0, distance_metric="rupture") np.testing.assert_array_almost_equal( self.fault.catalogue.data["longitude"], np.array([30.1, 30.5])) np.testing.assert_array_almost_equal( self.fault.catalogue.data["latitude"], np.array([30.0, 30.4])) np.testing.assert_array_almost_equal( self.fault.catalogue.data["depth"], np.array([5.0, 10.0]))
class TestMagnitudeTimeDistribution(unittest.TestCase): """ Simple class to test the magnitude time density distribution """ def setUp(self): self.catalogue = Catalogue() x, y = np.meshgrid(np.arange(1915., 2010., 10.), np.arange(5.5, 9.0, 1.0)) nx, ny = np.shape(x) self.catalogue.data['magnitude'] = (y.reshape([nx * ny, 1])).flatten() x = (x.reshape([nx * ny, 1])).flatten() self.catalogue.data['year'] = x.astype(int) self.catalogue.data['month'] = np.ones_like(x, dtype=int) self.catalogue.data['day'] = np.ones_like(x, dtype=int) self.catalogue.data['hour'] = np.ones_like(x, dtype=int) self.catalogue.data['minute'] = np.ones_like(x, dtype=int) self.catalogue.data['second'] = np.ones_like(x, dtype=float) def test_magnitude_time_distribution_no_uncertainties(self): # Tests the magnitude-depth distribution without uncertainties mag_range = np.arange(5., 10., 1.) time_range = np.arange(1910., 2020., 10.) # Without normalisation expected_array = np.ones( [len(time_range) - 1, len(mag_range) - 1], dtype=float) np.testing.assert_array_almost_equal( expected_array, self.catalogue.get_magnitude_time_distribution( mag_range, time_range)) # With Normalisation np.testing.assert_array_almost_equal( expected_array / np.sum(expected_array), self.catalogue.get_magnitude_time_distribution(mag_range, time_range, normalisation=True))
def setUp(self): """ """ # Read initial dataset filename = os.path.join(self.BASE_DATA_PATH, 'completeness_test_cat.csv') test_data = np.genfromtxt(filename, delimiter=',', skip_header=1) # Create the catalogue A self.catalogueA = Catalogue.make_from_dict( {'year': test_data[:,3], 'magnitude': test_data[:,17]}) # Read initial dataset filename = os.path.join(self.BASE_DATA_PATH, 'recurrence_test_cat_B.csv') test_data = np.genfromtxt(filename, delimiter=',', skip_header=1) # Create the catalogue A self.catalogueB = Catalogue.make_from_dict( {'year': test_data[:,3], 'magnitude': test_data[:,17]}) # Read the verification table A filename = os.path.join(self.BASE_DATA_PATH, 'recurrence_table_test_A.csv') self.true_tableA = np.genfromtxt(filename, delimiter = ',') # Read the verification table A filename = os.path.join(self.BASE_DATA_PATH, 'recurrence_table_test_B.csv') self.true_tableB = np.genfromtxt(filename, delimiter = ',')
class TestMagnitudeTimeDistribution(unittest.TestCase): """ Simple class to test the magnitude time density distribution """ def setUp(self): self.catalogue = Catalogue() x, y = np.meshgrid(np.arange(1915., 2010., 10.), np.arange(5.5, 9.0, 1.0)) nx, ny = np.shape(x) self.catalogue.data['magnitude'] = (y.reshape([nx * ny, 1])).flatten() x = (x.reshape([nx * ny, 1])).flatten() self.catalogue.data['year'] = x.astype(int) self.catalogue.data['month'] = np.ones_like(x, dtype=int) self.catalogue.data['day'] = np.ones_like(x, dtype=int) self.catalogue.data['hour'] = np.ones_like(x, dtype=int) self.catalogue.data['minute'] = np.ones_like(x, dtype=int) self.catalogue.data['second'] = np.ones_like(x, dtype=float) def test_magnitude_time_distribution_no_uncertainties(self): # Tests the magnitude-depth distribution without uncertainties mag_range = np.arange(5., 10., 1.) time_range = np.arange(1910., 2020., 10.) # Without normalisation expected_array = np.ones([len(time_range) - 1, len(mag_range) - 1], dtype=float) np.testing.assert_array_almost_equal( expected_array, self.catalogue.get_magnitude_time_distribution( mag_range, time_range)) # With Normalisation np.testing.assert_array_almost_equal( expected_array / np.sum(expected_array), self.catalogue.get_magnitude_time_distribution( mag_range, time_range, normalisation=True))
def test_select_within_magnitude_range(self): ''' Tests the function to select within the magnitude range ''' # Setup function self.catalogue = Catalogue() self.catalogue.data['magnitude'] = np.array([4., 5., 6., 7., 8.]) selector0 = CatalogueSelector(self.catalogue) # Test case 1: No limits specified - all catalogue valid test_cat_1 = selector0.within_magnitude_range() np.testing.assert_array_almost_equal(test_cat_1.data['magnitude'], self.catalogue.data['magnitude']) # Test case 2: Lower depth limit specfied only test_cat_1 = selector0.within_magnitude_range(lower_mag=5.5) np.testing.assert_array_almost_equal(test_cat_1.data['magnitude'], np.array([6., 7., 8.])) # Test case 3: Upper depth limit specified only test_cat_1 = selector0.within_magnitude_range(upper_mag=5.5) np.testing.assert_array_almost_equal(test_cat_1.data['magnitude'], np.array([4., 5.])) # Test case 4: Both depth limits specified test_cat_1 = selector0.within_magnitude_range(upper_mag=7.5, lower_mag=5.5) np.testing.assert_array_almost_equal(test_cat_1.data['magnitude'], np.array([6., 7.]))
def select_catalogue(self, valid_id): ''' Method to post-process the catalogue based on the selection options :param numpy.ndarray valid_id: Boolean vector indicating whether each event is selected (True) or not (False) :returns: Catalogue of selected events as instance of openquake.hmtk.seismicity.catalogue.Catalogue class ''' if not np.any(valid_id): # No events selected - create clean instance of class output = Catalogue() output.processes = self.catalogue.processes elif np.all(valid_id): if self.copycat: output = deepcopy(self.catalogue) else: output = self.catalogue else: if self.copycat: output = deepcopy(self.catalogue) else: output = self.catalogue output.purge_catalogue(valid_id) return output
def test_select_catalogue_rrup(self): """ Tests catalogue selection with Joyner-Boore distance """ self.fault = mtkActiveFault('001', 'A Fault', self.simple_fault, [(5., 0.5), (7., 0.5)], 0., None, msr_sigma=[(-1.5, 0.15), (0., 0.7), (1.5, 0.15)]) cat1 = Catalogue() cat1.data = { "eventID": ["001", "002", "003", "004"], "longitude": np.array([30.1, 30.1, 30.5, 31.5]), "latitude": np.array([30.0, 30.25, 30.4, 30.5]), "depth": np.array([5.0, 250.0, 10.0, 10.0]) } selector = CatalogueSelector(cat1) # Select within 50 km of the fault self.fault.select_catalogue(selector, 50.0, distance_metric="rupture") np.testing.assert_array_almost_equal( self.fault.catalogue.data["longitude"], np.array([30.1, 30.5])) np.testing.assert_array_almost_equal( self.fault.catalogue.data["latitude"], np.array([30.0, 30.4])) np.testing.assert_array_almost_equal( self.fault.catalogue.data["depth"], np.array([5.0, 10.0]))
def test_load_from_array(self): # Tests the creation of a catalogue from an array and a key list cat = Catalogue() cat.load_from_array(['year', 'magnitude'], self.data_array) np.testing.assert_allclose(cat.data['magnitude'], self.data_array[:, 1]) np.testing.assert_allclose(cat.data['year'], self.data_array[:, 0].astype(int))
def test_hypocentres_as_mesh(self): # Tests the function to render the hypocentres to a # hazardlib.geo.mesh.Mesh object. cat = Catalogue() cat.data['longitude'] = np.array([2., 3.]) cat.data['latitude'] = np.array([2., 3.]) cat.data['depth'] = np.array([2., 3.]) self.assertTrue(isinstance(cat.hypocentres_as_mesh(), Mesh))
def test_select_within_fault_distance(self): # Tests the selection of events within a distance from the fault # Set up catalouge self.catalogue = Catalogue() self.catalogue.data['longitude'] = np.arange(0., 5.5, 0.5) self.catalogue.data['latitude'] = np.arange(0., 5.5, 0.5) self.catalogue.data['depth'] = np.zeros(11, dtype=float) self.catalogue.data['eventID'] = np.arange(0, 11, 1) self.fault_source = mtkSimpleFaultSource('101', 'A simple fault') trace_as_line = line.Line([point.Point(2.0, 3.0), point.Point(3.0, 2.0)]) self.fault_source.create_geometry(trace_as_line, 30., 0., 30.) selector0 = CatalogueSelector(self.catalogue) # Test 1 - simple case Joyner-Boore distance self.fault_source.select_catalogue(selector0, 40.) np.testing.assert_array_almost_equal( np.array([2., 2.5]), self.fault_source.catalogue.data['longitude']) np.testing.assert_array_almost_equal( np.array([2., 2.5]), self.fault_source.catalogue.data['latitude']) # Test 2 - simple case Rupture distance self.fault_source.catalogue = None self.fault_source.select_catalogue(selector0, 40., 'rupture') np.testing.assert_array_almost_equal( np.array([2.5]), self.fault_source.catalogue.data['longitude']) np.testing.assert_array_almost_equal( np.array([2.5]), self.fault_source.catalogue.data['latitude']) # Test 3 - for vertical fault ensure that Joyner-Boore distance # behaviour is the same as for rupture distance fault1 = mtkSimpleFaultSource('102', 'A vertical fault') fault1.create_geometry(trace_as_line, 90., 0., 30.) self.fault_source.create_geometry(trace_as_line, 90., 0., 30.) # Joyner-Boore self.fault_source.select_catalogue(selector0, 40.) # Rupture fault1.select_catalogue(selector0, 40., 'rupture') np.testing.assert_array_almost_equal( self.fault_source.catalogue.data['longitude'], fault1.catalogue.data['longitude']) np.testing.assert_array_almost_equal( self.fault_source.catalogue.data['latitude'], fault1.catalogue.data['latitude']) # The usual test to ensure error is raised when no events in catalogue self.catalogue = Catalogue() selector0 = CatalogueSelector(self.catalogue) with self.assertRaises(ValueError) as ver: self.fault_source.select_catalogue(selector0, 40.0) self.assertEqual(str(ver.exception), 'No events found in catalogue!')
def from_df(df, end_year=None): cat = Catalogue() for column in df: if (column in Catalogue.FLOAT_ATTRIBUTE_LIST or column in Catalogue.INT_ATTRIBUTE_LIST): cat.data[column] = df[column].to_numpy() else: cat.data[column] = df[column] cat.end_year = np.max(df.year) if end_year is None else end_year return cat
def test_select_catalogue(self): # Tests the select_catalogue function - essentially a wrapper to the # two selection functions self.point_source = mtkPointSource('101', 'A Point Source') simple_point = Point(4.5, 4.5) self.point_source.create_geometry(simple_point, 0., 30.) # Bad case - no events in catalogue self.catalogue = Catalogue() selector0 = CatalogueSelector(self.catalogue) with self.assertRaises(ValueError) as ver: self.point_source.select_catalogue(selector0, 100.) self.assertEqual(str(ver.exception), 'No events found in catalogue!') # Create a catalogue self.catalogue = Catalogue() self.catalogue.data['eventID'] = np.arange(0, 7, 1) self.catalogue.data['longitude'] = np.arange(4.0, 7.5, 0.5) self.catalogue.data['latitude'] = np.arange(4.0, 7.5, 0.5) self.catalogue.data['depth'] = np.ones(7, dtype=float) selector0 = CatalogueSelector(self.catalogue) # To ensure that square function is called - compare against direct # instance # First implementation - compare select within distance self.point_source.select_catalogue_within_distance(selector0, 100., 'epicentral') expected_catalogue = deepcopy(self.point_source.catalogue) self.point_source.catalogue = None # Reset catalogue self.point_source.select_catalogue(selector0, 100., 'circle') np.testing.assert_array_equal( self.point_source.catalogue.data['eventID'], expected_catalogue.data['eventID']) # Second implementation - compare select within cell expected_catalogue = None self.point_source.select_catalogue_within_cell(selector0, 150.) expected_catalogue = deepcopy(self.point_source.catalogue) self.point_source.catalogue = None # Reset catalogue self.point_source.select_catalogue(selector0, 150., 'square') np.testing.assert_array_equal( self.point_source.catalogue.data['eventID'], expected_catalogue.data['eventID']) # Finally ensure error is raised when input is neither # 'circle' nor 'square' with self.assertRaises(ValueError) as ver: self.point_source.select_catalogue(selector0, 100., 'bad input') self.assertEqual(str(ver.exception), 'Unrecognised selection type for point source!')
def test_get_bounding_box(self): """ Tests the method to return the bounding box of a catalogue """ cat1 = Catalogue() cat1.data["longitude"] = np.array([10.0, 20.0]) cat1.data["latitude"] = np.array([40.0, 50.0]) bbox = cat1.get_bounding_box() self.assertAlmostEqual(bbox[0], 10.0) self.assertAlmostEqual(bbox[1], 20.0) self.assertAlmostEqual(bbox[2], 40.0) self.assertAlmostEqual(bbox[3], 50.0)
def setUp(self): self.catalogue = Catalogue() x, y = np.meshgrid(np.arange(1915., 2010., 10.), np.arange(5.5, 9.0, 1.0)) nx, ny = np.shape(x) self.catalogue.data['magnitude'] = (y.reshape([nx * ny, 1])).flatten() x = (x.reshape([nx * ny, 1])).flatten() self.catalogue.data['year'] = x.astype(int) self.catalogue.data['month'] = np.ones_like(x, dtype=int) self.catalogue.data['day'] = np.ones_like(x, dtype=int) self.catalogue.data['hour'] = np.ones_like(x, dtype=int) self.catalogue.data['minute'] = np.ones_like(x, dtype=int) self.catalogue.data['second'] = np.ones_like(x, dtype=float)
def test_hypocentres_to_cartesian(self): # Tests the function to render the hypocentres to a cartesian array. # The invoked function nhlib.geo.utils.spherical_to_cartesian is # tested as part of the nhlib suite. The test here is included for # coverage cat = Catalogue() cat.data['longitude'] = np.array([2., 3.]) cat.data['latitude'] = np.array([2., 3.]) cat.data['depth'] = np.array([2., 3.]) expected_data = spherical_to_cartesian(cat.data['longitude'], cat.data['latitude'], cat.data['depth']) model_output = cat.hypocentres_to_cartesian() np.testing.assert_array_almost_equal(expected_data, model_output)
def from_df(df, end_year=None): """ :param df: A :class:`pd.DataFrame` instance with the catalogue """ cat = Catalogue() for column in df: if (column in Catalogue.FLOAT_ATTRIBUTE_LIST or column in Catalogue.INT_ATTRIBUTE_LIST): cat.data[column] = df[column].to_numpy() else: cat.data[column] = df[column] cat.end_year = np.max(df.year) if end_year is None else end_year return cat
def setUp(self): cat1 = Catalogue() cat1.end_year = 2000 cat1.start_year = 1900 cat1.data['eventID'] = [1.0, 2.0, 3.0] cat1.data['magnitude'] = np.array([1.0, 2.0, 3.0]) cat2 = Catalogue() cat2.end_year = 1990 cat2.start_year = 1910 cat2.data['eventID'] = [1.0, 2.0, 3.0] cat2.data['magnitude'] = np.array([1.0, 2.0, 3.0]) self.cat1 = cat1 self.cat2 = cat2
def test_input_checks_sets_magnitude_interval(self): fake_completeness_table = 0.0 catalogue = Catalogue.make_from_dict({'year': [1900]}) config = {'magnitude_interval': 0.1} cmag, ctime, ref_mag, dmag, _ = rec_utils.input_checks(catalogue, config, fake_completeness_table) self.assertEqual(0.1, dmag)
def test_input_checks_sets_magnitude_interval(self): fake_completeness_table = 0.0 catalogue = Catalogue.make_from_dict({'year': [1900]}) config = {'magnitude_interval' : 0.1} cmag, ctime, ref_mag, dmag, _ = rec_utils.input_checks(catalogue, config, fake_completeness_table) self.assertEqual(0.1, dmag)
def test_kijko_smit_set_reference_magnitude(self): completeness_table = np.array([[1900, 1.0]]) catalogue = Catalogue.make_from_dict( {'magnitude': np.array([5.0, 6.0]), 'year': np.array([2000, 2000])}) config = {'reference_magnitude': 0.0} self.ks_ml.calculate(catalogue, config, completeness_table)
def test_input_checks_use_reference_magnitude(self): fake_completeness_table = 0.0 catalogue = Catalogue.make_from_dict({'year': [1900]}) config = {'reference_magnitude': 3.0} cmag, ctime, ref_mag, dmag, _ = rec_utils.input_checks(catalogue, config, fake_completeness_table) self.assertEqual(3.0, ref_mag)
def test_input_checks_use_reference_magnitude(self): fake_completeness_table = 0.0 catalogue = Catalogue.make_from_dict({'year': [1900]}) config = {'reference_magnitude' : 3.0} cmag, ctime, ref_mag, dmag, _ = rec_utils.input_checks(catalogue, config, fake_completeness_table) self.assertEqual(3.0, ref_mag)
def test_select_events_within_cell(self): # Tests the selection of events within a cell centred on the point self.point_source = mtkPointSource('101', 'A Point Source') simple_point = Point(4.5, 4.5) self.point_source.create_geometry(simple_point, 0., 30.) self.catalogue = Catalogue() self.catalogue.data['eventID'] = np.arange(0, 7, 1) self.catalogue.data['longitude'] = np.arange(4.0, 7.5, 0.5) self.catalogue.data['latitude'] = np.arange(4.0, 7.5, 0.5) self.catalogue.data['depth'] = np.ones(7, dtype=float) selector0 = CatalogueSelector(self.catalogue) # Simple case - 200 km by 200 km cell centred on point self.point_source.select_catalogue_within_cell(selector0, 100.) np.testing.assert_array_almost_equal( np.array([4., 4.5, 5.]), self.point_source.catalogue.data['longitude']) np.testing.assert_array_almost_equal( np.array([4., 4.5, 5.]), self.point_source.catalogue.data['latitude']) np.testing.assert_array_almost_equal( np.array([1., 1., 1.]), self.point_source.catalogue.data['depth'])
def test_get_even_magnitude_completeness(self): ''' Tests the function to render an evenly spaced completeness table at 0.1 interval spacing ''' # Common case - many rows self.catalogue = Catalogue() self.catalogue.data['magnitude'] = np.array([4.5, 5.0]) comp_table = np.array([[1990., 4.0], [1960., 4.5], [1900., 4.8]]) expected_table = np.array([[1990., 4.0], [1990., 4.1], [1990., 4.2], [1990., 4.3], [1990., 4.4], [1960., 4.5], [1960., 4.6], [1960., 4.7], [1900., 4.8], [1900., 4.9], [1900., 5.0]]) np.testing.assert_array_almost_equal(expected_table, utils.get_even_magnitude_completeness(comp_table, self.catalogue)[0]) # Common case - only one value comp_table = np.array([[1990., 4.0]]) np.testing.assert_array_almost_equal(np.array([[1990., 4.0]]), utils.get_even_magnitude_completeness(comp_table, self.catalogue)[0])
def setUp(self): """ This generates a minimum data-set to be used for the regression. """ # Test A: Generates a data set assuming b=1 and N(m=4.0)=10.0 events self.dmag = 0.1 mext = np.arange(4.0, 7.01, 0.1) self.mval = mext[0:-1] + self.dmag / 2.0 self.bval = 1.0 self.numobs = np.flipud( np.diff(np.flipud(10.0 ** (-self.bval * mext + 8.0)))) # Test B: Generate a completely artificial catalogue using the # Gutenberg-Richter distribution defined above numobs = np.around(self.numobs) size = int(np.sum(self.numobs)) magnitude = np.zeros(size) lidx = 0 for mag, nobs in zip(self.mval, numobs): uidx = int(lidx + nobs) magnitude[lidx:uidx] = mag + 0.01 lidx = uidx year = np.ones(size) * 1999 self.catalogue = Catalogue.make_from_dict( {'magnitude': magnitude, 'year': year}) # Create the seismicity occurrence calculator self.aki_ml = AkiMaxLikelihood()
def test_analysis_Frankel_comparison(self): ''' To test the run_analysis function we compare test results with those from Frankel's fortran implementation, under the same conditions ''' self.grid_limits = [-128., -113.0, 0.2, 30., 43.0, 0.2, 0., 100., 100.] comp_table = np.array([[1933., 4.0], [1900., 5.0], [1850., 6.0], [1850., 7.0]]) config = {'Length_Limit': 3., 'BandWidth': 50., 'increment': 0.1} self.model = SmoothedSeismicity(self.grid_limits, bvalue=0.8) self.catalogue = Catalogue() frankel_catalogue = np.genfromtxt( os.path.join(BASE_PATH, FRANKEL_TEST_CATALOGUE)) self.catalogue.data['magnitude'] = frankel_catalogue[:, 0] self.catalogue.data['longitude'] = frankel_catalogue[:, 1] self.catalogue.data['latitude'] = frankel_catalogue[:, 2] self.catalogue.data['depth'] = frankel_catalogue[:, 3] self.catalogue.data['year'] = frankel_catalogue[:, 4] self.catalogue.end_year = 2006 frankel_results = np.genfromtxt( os.path.join(BASE_PATH, FRANKEL_OUTPUT_FILE)) # Run analysis output_data = self.model.run_analysis( self.catalogue, config, completeness_table=comp_table, smoothing_kernel=IsotropicGaussian()) self.assertTrue( fabs(np.sum(output_data[:, -1]) - np.sum(output_data[:, -2])) < 1.0) self.assertTrue(fabs(np.sum(output_data[:, -1]) - 390.) < 1.0)
def test_select_within_distance(self): ''' Tests the selection of earthquakes within distance of fault ''' # Create fault self.fault_source = mtkComplexFaultSource('101', 'A complex fault') # Test case when input as list of nhlib.geo.line.Line self.fault_source.create_geometry(self.trace_line, mesh_spacing=2.0) self.assertIsInstance(self.fault_source.geometry, ComplexFaultSurface) # Create simple catalogue self.catalogue.data['longitude'] = np.arange(0., 4.1, 0.1) self.catalogue.data['latitude'] = np.arange(0., 4.1, 0.1) self.catalogue.data['depth'] = np.ones(41, dtype=float) self.catalogue.data['eventID'] = np.arange(0, 41, 1) selector0 = CatalogueSelector(self.catalogue) # Test when considering Joyner-Boore distance self.fault_source.select_catalogue(selector0, 50.) np.testing.assert_array_equal( self.fault_source.catalogue.data['eventID'], np.arange(2, 14, 1)) # Test when considering rupture distance self.fault_source.select_catalogue(selector0, 50., 'rupture') np.testing.assert_array_equal( self.fault_source.catalogue.data['eventID'], np.arange(2, 12, 1)) # The usual test to ensure error is raised when no events in catalogue self.catalogue = Catalogue() selector0 = CatalogueSelector(self.catalogue) with self.assertRaises(ValueError) as ver: self.fault_source.select_catalogue(selector0, 40.0) self.assertEqual(str(ver.exception), 'No events found in catalogue!')
def setUp(self): """ Sets up the test catalogue to be used for the Weichert algorithm """ cat_file = os.path.join(BASE_DATA_PATH, "synthetic_test_cat1.csv") raw_data = np.genfromtxt(cat_file, delimiter=",") neq = raw_data.shape[0] self.catalogue = Catalogue.make_from_dict({ "eventID": raw_data[:, 0].astype(int), "year": raw_data[:, 1].astype(int), "dtime": raw_data[:, 2], "longitude": raw_data[:, 3], "latitude": raw_data[:, 4], "magnitude": raw_data[:, 5], "depth": raw_data[:, 6] }) self.config = {"reference_magnitude": 3.0} self.completeness = np.array([[1990., 3.0], [1975., 4.0], [1960., 5.0], [1930., 6.0], [1910., 7.0]])
def test_catalogue_writer_only_mag_table_purging(self): ''' Tests the writer only purging according to the magnitude table ''' # Write to file writer = CsvCatalogueWriter(self.output_filename) writer.write_file(self.catalogue, magnitude_table=self.magnitude_table) parser = CsvCatalogueParser(self.output_filename) cat2 = parser.read_file() expected_catalogue = Catalogue() expected_catalogue.data['eventID'] = ['1', '3', '5'] expected_catalogue.data['magnitude'] = np.array([5.6, 4.8, 5.0]) expected_catalogue.data['year'] = np.array([1960, 1970, 1990]) expected_catalogue.data['ErrorStrike'] = np.array([np.nan, np.nan, np.nan]) self.check_catalogues_are_equal(expected_catalogue, cat2)
def test_catalogue_writer_only_flag_purging(self): ''' Tests the writer only purging according to the flag ''' # Write to file writer = CsvCatalogueWriter(self.output_filename) writer.write_file(self.catalogue, flag_vector=self.flag) parser = CsvCatalogueParser(self.output_filename) cat2 = parser.read_file() expected_catalogue = Catalogue() expected_catalogue.data['eventID'] = ['1', '2', '3', '4'] expected_catalogue.data['magnitude'] = np.array([5.6, 5.4, 4.8, 4.3]) expected_catalogue.data['year'] = np.array([1960, 1965, 1970, 1980]) expected_catalogue.data['ErrorStrike'] = np.array([np.nan, np.nan, np.nan, np.nan]) self.check_catalogues_are_equal(expected_catalogue, cat2)
def test_input_checks_simple_input(self): completeness_table = [[1900, 2.0]] catalogue = Catalogue.make_from_dict({ 'magnitude': [5.0, 6.0], 'year': [2000, 2000] }) config = {} rec_utils.input_checks(catalogue, config, completeness_table)
def from_df(df, end_year=None): """ Converts a dataframe into a :class:`openquake.hmtk.seismicity.catalogue.Catalogue` instance :param df: The dataframe with the catalogue :returns: The catalogue instance """ cat = Catalogue() for column in df: if (column in Catalogue.FLOAT_ATTRIBUTE_LIST or column in Catalogue.INT_ATTRIBUTE_LIST): cat.data[column] = df[column].to_numpy() else: cat.data[column] = df[column] cat.end_year = np.max(df.year) if end_year is None else end_year return cat
def get_catalogue_from_ses(fname, duration): """ Converts a set of ruptures into an instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue`. :param fname: Name of the .csv file :param float duration: Duration [in years] of the SES :returns: A :class:`openquake.hmtk.seismicity.catalogue.Catalogue` instance """ # Read the set of ruptures ses = pd.read_csv(fname, sep='\t', skiprows=1) if len(ses.columns) < 2: ses = pd.read_csv(fname, sep=',', skiprows=1) # Create an empty catalogue cat = Catalogue() # Set catalogue data cnt = 0 year = [] eventids = [] mags = [] lons = [] lats = [] deps = [] print(ses['rup_id']) print('Columns:', ses.columns) for i in range(len(ses)): nevents = ses['multiplicity'][i] for j in range(nevents): eventids.append(':d'.format(cnt)) mags.append(ses['mag'].values[i]) lons.append(ses['centroid_lon'].values[i]) lats.append(ses['centroid_lat'].values[i]) deps.append(ses['centroid_depth'].values[i]) cnt += 1 year.append(numpy.random.random_integers(1, duration, 1)) data = {} year = numpy.array(year, dtype=int) data['year'] = year data['month'] = numpy.ones_like(year, dtype=int) data['day'] = numpy.ones_like(year, dtype=int) data['hour'] = numpy.zeros_like(year, dtype=int) data['minute'] = numpy.zeros_like(year, dtype=int) data['second'] = numpy.zeros_like(year) data['magnitude'] = numpy.array(mags) data['longitude'] = numpy.array(lons) data['latitude'] = numpy.array(lats) data['depth'] = numpy.array(deps) data['eventID'] = eventids cat.data = data cat.end_year = duration cat.start_year = 0 cat.data['dtime'] = cat.get_decimal_time() return cat
def test_catalogue_mt_filter(self): # Tests the catalogue magnitude-time filter cat = Catalogue() cat.load_from_array(['year', 'magnitude'], self.data_array) cat.data['eventID'] = np.arange(0, 7) cat.catalogue_mt_filter(self.mt_table) mag = np.array([7.0, 5.5, 5.01, 6.99]) yea = np.array([1920, 1970, 1960, 1960]) np.testing.assert_allclose(cat.data['magnitude'], mag) np.testing.assert_allclose(cat.data['year'], yea)
def build_catalogue_from_file(filename): """ Creates a "minimal" catalogue from a raw csv file """ raw_data = np.genfromtxt(filename, delimiter=",") return Catalogue.make_from_dict({"eventID": raw_data[:, 0].astype(int), "year": raw_data[:, 1].astype(int), "dtime": raw_data[:, 2], "longitude": raw_data[:, 3], "latitude": raw_data[:, 4], "magnitude": raw_data[:, 5], "depth": raw_data[:, 6]})
def test_update_start_end_year(self): # Tests the correct usage of the update start year cat1 = Catalogue() cat1.data['year'] = np.array([1900, 1950, 2000]) # Update start year cat1.update_start_year() self.assertEqual(cat1.start_year, 1900) # Update end-year cat1.update_end_year() self.assertEqual(cat1.end_year, 2000)
def test_get_decimal_time(self): # Tests the decimal time function. The function itself is tested in # tests.seismicity.utils so only minimal testing is undertaken here to # ensure coverage time_dict = { 'year': np.array([1990, 2000]), 'month': np.array([3, 9]), 'day': np.ones(2, dtype=int), 'hour': np.ones(2, dtype=int), 'minute': np.ones(2, dtype=int), 'second': np.ones(2, dtype=float) } expected_dec_time = decimal_time(time_dict['year'], time_dict['month'], time_dict['day'], time_dict['hour'], time_dict['minute'], time_dict['second']) cat = Catalogue() for key in ['year', 'month', 'day', 'hour', 'minute', 'second']: cat.data[key] = np.copy(time_dict[key]) np.testing.assert_array_almost_equal(expected_dec_time, cat.get_decimal_time())
def test_get_decimal_time(self): # Tests the decimal time function. The function itself is tested in # tests.seismicity.utils so only minimal testing is undertaken here to # ensure coverage time_dict = {'year': np.array([1990, 2000]), 'month': np.array([3, 9]), 'day': np.ones(2, dtype=int), 'hour': np.ones(2, dtype=int), 'minute': np.ones(2, dtype=int), 'second': np.ones(2, dtype=float)} expected_dec_time = decimal_time(time_dict['year'], time_dict['month'], time_dict['day'], time_dict['hour'], time_dict['minute'], time_dict['second']) cat = Catalogue() for key in ['year', 'month', 'day', 'hour', 'minute', 'second']: cat.data[key] = np.copy(time_dict[key]) np.testing.assert_array_almost_equal(expected_dec_time, cat.get_decimal_time())
def setUp(self): warnings.simplefilter("ignore") self.catalogue = Catalogue() self.fault_source = None self.trace_line = [line.Line([point.Point(1.0, 0.0, 1.0), point.Point(0.0, 1.0, 0.9)])] self.trace_line.append(line.Line([point.Point(1.2, 0.0, 40.), point.Point(1.0, 1.0, 45.), point.Point(0.0, 1.3, 42.)])) self.trace_array = [np.array([[1.0, 0.0, 1.0], [0.0, 1.0, 0.9]])] self.trace_array.append(np.array([[1.2, 0.0, 40.], [1.0, 1.0, 45.], [0.0, 1.3, 42.]]))
def test_generate_synthetic_catalogues(self): ''' Tests the openquake.hmtk.seismicity.occurence.utils function generate_synthetic_magnitudes ''' bvals = [] # Generate set of synthetic catalogues for i in range(0, 100): cat1 = rec_utils.generate_synthetic_magnitudes(4.5, 1.0, 4.0, 8.0, 1000) bvals.append(self.occur.calculate( Catalogue.make_from_dict(cat1))[0]) bvals = np.array(bvals) self.assertAlmostEqual(np.mean(bvals), 1.0, 1)
def test_generate_magnitudes(self): ''' Tests the openquake.hmtk.seismicity.occurence.utils function generate_trunc_gr_magnitudes ''' bvals = [] # Generate set of synthetic catalogues for _ in range(0, 100): mags = rec_utils.generate_trunc_gr_magnitudes(1.0, 4.0, 8.0, 1000) cat = Catalogue.make_from_dict( {'magnitude': mags, 'year': np.zeros(len(mags), dtype=int)}) bvals.append(self.occur.calculate(cat)[0]) bvals = np.array(bvals) self.assertAlmostEqual(np.mean(bvals), 1.0, 1)
class TestGetDistributions(unittest.TestCase): """ Class to test the openquake.hmtk.seismicity.catalogue.Catalogue methods to determine depth distribution, magnitude-depth distribution, and magnitude-time distribution """ def setUp(self): self.catalogue = Catalogue() def test_depth_distribution_no_depth_error(self): # ensure error is raised when no depths are found in catalogue depth_bins = np.arange(0., 60., 10.) self.catalogue.data['depth'] = np.array([]) with self.assertRaises(ValueError) as ae: self.catalogue.get_depth_distribution(depth_bins) self.assertEqual(str(ae.exception), 'Depths missing in catalogue') def test_depth_distribution_simple(self): # Tests the calculation of the depth histogram with no uncertainties # Without normalisation self.catalogue.data['depth'] = np.arange(5., 50., 5.) depth_bins = np.arange(0., 60., 10.) expected_array = np.array([1., 2., 2., 2., 2.]) np.testing.assert_array_almost_equal( expected_array, self.catalogue.get_depth_distribution(depth_bins)) # With normalisation np.testing.assert_array_almost_equal( expected_array / np.sum(expected_array), self.catalogue.get_depth_distribution(depth_bins, normalisation=True)) def test_depth_distribution_uncertainties(self): # Tests the depth distribution with uncertainties # Without normalisation self.catalogue.data['depth'] = np.arange(5., 50., 5.) self.catalogue.data['depthError'] = 3. * np.ones_like( self.catalogue.data['depth']) depth_bins = np.arange(-10., 70., 10.) expected_array = np.array([0., 1.5, 2., 2., 2., 1.5, 0.]) hist_array = self.catalogue.get_depth_distribution(depth_bins, bootstrap=1000) array_diff = np.round(hist_array, 1) - expected_array self.assertTrue(np.all(np.fabs(array_diff) < 0.2)) # With normalisation expected_array = np.array([0., 0.16, 0.22, 0.22, 0.22, 0.16, 0.01]) hist_array = self.catalogue.get_depth_distribution(depth_bins, normalisation=True, bootstrap=1000) array_diff = np.round(hist_array, 2) - expected_array self.assertTrue(np.all(np.fabs(array_diff) < 0.03))
def setUp(self): """ This generates a catalogue to be used for the regression. """ # Generates a data set assuming b=1 self.dmag = 0.1 mext = np.arange(4.0, 7.01, 0.1) self.mval = mext[0:-1] + self.dmag / 2.0 self.bval = 1.0 numobs = np.flipud(np.diff(np.flipud(10.0**(-self.bval*mext+7.0)))) # Define completeness window numobs[0:6] *= 10 numobs[6:13] *= 20 numobs[13:22] *= 50 numobs[22:] *= 100 compl = np.array([[1900, 1950, 1980, 1990], [6.34, 5.44, 4.74, 3.0]]) print(compl) self.compl = compl.transpose() print('completeness') print(self.compl) print(self.compl.shape) numobs = np.around(numobs) print(numobs) magnitude = np.zeros(int(np.sum(numobs))) year = np.zeros(int(np.sum(numobs))) * 1999 lidx = 0 for mag, nobs in zip(self.mval, numobs): uidx = int(lidx+nobs) magnitude[lidx:uidx] = mag + 0.01 year_low = compl[0, np.min(np.nonzero(compl[1, :] < mag)[0])] year[lidx:uidx] = (year_low + np.random.rand(uidx-lidx) * (2000-year_low)) print('%.2f %.0f %.0f' % (mag, np.min(year[lidx:uidx]), np.max(year[lidx:uidx]))) lidx = uidx self.catalogue = Catalogue.make_from_dict( {'magnitude': magnitude, 'year': year}) self.b_ml = BMaxLikelihood() self.config = {'Average Type': 'Weighted'}
def setUp(self): cat_file = os.path.join(BASE_DATA_PATH, "synthetic_test_cat1.csv") raw_data = np.genfromtxt(cat_file, delimiter=",") neq = raw_data.shape[0] self.catalogue = Catalogue.make_from_dict({ "eventID": raw_data[:, 0].astype(int), "year": raw_data[:, 1].astype(int), "dtime": raw_data[:, 2], "longitude": raw_data[:, 3], "latitude": raw_data[:, 4], "magnitude": raw_data[:, 5], "depth": raw_data[:, 6]}) self.config = {"reference_magnitude": 3.0} self.completeness = np.array([[1990., 3.0], [1975., 4.0], [1960., 5.0], [1930., 6.0], [1910., 7.0]])
def test_purge_catalogue(self): # Tests the function to purge the catalogue of invalid events cat1 = Catalogue() cat1.data['eventID'] = np.array([100, 101, 102], dtype=int) cat1.data['magnitude'] = np.array([4., 5., 6.], dtype=float) cat1.data['Agency'] = ['XXX', 'YYY', 'ZZZ'] flag_vector = np.array([False, True, False]) cat1.purge_catalogue(flag_vector) np.testing.assert_array_almost_equal(cat1.data['magnitude'], np.array([5.])) np.testing.assert_array_equal(cat1.data['eventID'], np.array([101])) self.assertListEqual(cat1.data['Agency'], ['YYY'])
def test_create_cluster_set(self): """ """ # Setup function self.catalogue = Catalogue() self.catalogue.data["EventID"] = np.array([1, 2, 3, 4, 5, 6]) self.catalogue.data["magnitude"] = np.array([7.0, 5.0, 5.0, 5.0, 4.0, 4.0]) selector0 = CatalogueSelector(self.catalogue) vcl = np.array([0, 1, 1, 1, 2, 2]) cluster_set = selector0.create_cluster_set(vcl) np.testing.assert_array_equal(cluster_set[0].data["EventID"], np.array([1])) np.testing.assert_array_almost_equal(cluster_set[0].data["magnitude"], np.array([7.0])) np.testing.assert_array_equal(cluster_set[1].data["EventID"], np.array([2, 3, 4])) np.testing.assert_array_almost_equal(cluster_set[1].data["magnitude"], np.array([5.0, 5.0, 5.0])) np.testing.assert_array_equal(cluster_set[2].data["EventID"], np.array([5, 6])) np.testing.assert_array_almost_equal(cluster_set[2].data["magnitude"], np.array([4.0, 4.0]))
def test_input_checks_use_a_float_for_completeness(self): fake_completeness_table = 0.0 catalogue = Catalogue.make_from_dict({'year': [1900]}) config = {} rec_utils.input_checks(catalogue, config, fake_completeness_table)