def test_read_eq_catalog(self): expected_first_eq_entry = { 'eventID': 1, 'Agency': 'AAA', 'month': 1, 'depthError': 0.5, 'second': 13.0, 'SemiMajor90': 2.43, 'year': 2000, 'ErrorStrike': 298.0, 'timeError': 0.02, 'sigmamb': '', 'latitude': 44.368, 'sigmaMw': 0.355, 'sigmaMs': '', 'Mw': 1.71, 'Ms': '', 'Identifier': 20000102034913, 'day': 2, 'minute': 49, 'hour': 3, 'mb': '', 'SemiMinor90': 1.01, 'longitude': 7.282, 'depth': 9.3, 'ML': 1.7, 'sigmaML': 0.1 } read_eq_catalog(self.context_jobs) self.assertEqual(10, len(self.context_jobs.eq_catalog)) self.assertEqual(expected_first_eq_entry, self.context_jobs.eq_catalog[0])
def test_stepp(self): self.context.config['eq_catalog_file'] = get_data_path( 'completeness_input_test.csv', DATA_DIR) self.context.config['Stepp']['time_window'] = 5 self.context.config['Stepp']['magnitude_windows'] = 0.1 self.context.config['Stepp']['sensitivity'] = 0.2 self.context.config['Stepp']['increment_lock'] = True read_eq_catalog(self.context) create_catalog_matrix(self.context) filtered_eq_events = np.array([ [4.0, 1994.], [4.1, 1994.], [4.2, 1994.], [4.3, 1994.], [4.4, 1994.], [4.5, 1964.], [4.6, 1964.], [4.7, 1964.], [4.8, 1964.], [4.9, 1964.], [5.0, 1964.], [5.1, 1964.], [5.2, 1964.], [5.3, 1964.], [5.4, 1964.], [5.5, 1919.], [5.6, 1919.], [5.7, 1919.], [5.8, 1919.], [5.9, 1919.], [6.0, 1919.], [6.1, 1919.], [6.2, 1919.], [6.3, 1919.], [6.4, 1919.], [6.5, 1919.], [6.6, 1919.], [6.7, 1919.], [6.8, 1919.], [6.9, 1919.], [7.0, 1919.], [7.1, 1919.], [7.2, 1919.], [7.3, 1919.]]) stepp(self.context) self.assertTrue(np.allclose(filtered_eq_events, self.context.completeness_table)) gardner_knopoff(self.context) stepp(self.context) self.assertTrue(np.allclose(filtered_eq_events, self.context.completeness_table))
def test_gardner_knopoff(self): self.context.config['eq_catalog_file'] = get_data_path( 'declustering_input_test.csv', DATA_DIR) self.context.config['GardnerKnopoff']['time_dist_windows'] = \ 'GardnerKnopoff' self.context.config['GardnerKnopoff']['foreshock_time_window'] = 0.5 read_eq_catalog(self.context) create_catalog_matrix(self.context) expected_vmain_shock = np.delete( self.context.catalog_matrix, [4, 10, 19], 0) expected_vcl = np.array([0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 6]) expected_flag_vector = np.array([0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1]) gardner_knopoff(self.context) self.assertTrue(np.array_equal(expected_vmain_shock, self.context.catalog_matrix)) self.assertTrue(np.array_equal(expected_vcl, self.context.vcl)) self.assertTrue(np.array_equal(expected_flag_vector, self.context.flag_vector))
def test_store_catalog_in_csv_after_preprocessing(self): self.context_jobs.selected_eq_vector = np.array( [0, 0, 0, 1, 1, 0, 1, 0, 1, 0]) read_eq_catalog(self.context_jobs) self.context_jobs.catalog_matrix = self.context_jobs.eq_catalog store_preprocessed_catalog(self.context_jobs) self.assertTrue(filecmp.cmp(self.expected_preprocessed_catalogue, self.context_jobs.config['pprocessing_result_file']))
def test_store_catalog_in_csv_after_preprocessing(self): self.context_jobs.selected_eq_vector = np.array( [0, 0, 0, 1, 1, 0, 1, 0, 1, 0]) read_eq_catalog(self.context_jobs) self.context_jobs.catalog_matrix = self.context_jobs.eq_catalog store_preprocessed_catalog(self.context_jobs) self.assertTrue( filecmp.cmp(self.expected_preprocessed_catalogue, self.context_jobs.config['pprocessing_result_file']))
def test_read_eq_catalog(self): expected_first_eq_entry = {'eventID': 1, 'Agency': 'AAA', 'month': 1, 'depthError': 0.5, 'second': 13.0, 'SemiMajor90': 2.43, 'year': 2000, 'ErrorStrike': 298.0, 'timeError': 0.02, 'sigmamb': '', 'latitude': 44.368, 'sigmaMw': 0.355, 'sigmaMs': '', 'Mw': 1.71, 'Ms': '', 'Identifier': 20000102034913, 'day': 2, 'minute': 49, 'hour': 3, 'mb': '', 'SemiMinor90': 1.01, 'longitude': 7.282, 'depth': 9.3, 'ML': 1.7, 'sigmaML': 0.1} read_eq_catalog(self.context_jobs) self.assertEqual(10, len(self.context_jobs.eq_catalog)) self.assertEqual(expected_first_eq_entry, self.context_jobs.eq_catalog[0])
def test_parameters_gardner_knopoff(self): self.context.config['eq_catalog_file'] = get_data_path( 'declustering_input_test.csv', DATA_DIR) self.context.config['GardnerKnopoff']['time_dist_windows'] = \ 'GardnerKnopoff' self.context.config['GardnerKnopoff']['foreshock_time_window'] = 0.5 read_eq_catalog(self.context) create_catalog_matrix(self.context) def mock(data, time_dist_windows, foreshock_time_window): self.assertEquals("GardnerKnopoff", time_dist_windows) self.assertEquals(0.5, foreshock_time_window) return None, None, None self.context.map_sc['gardner_knopoff'] = mock gardner_knopoff(self.context)
def test_parameters_stepp(self): self.context.config['eq_catalog_file'] = get_data_path( 'completeness_input_test.csv', DATA_DIR) self.context.config['Stepp']['time_window'] = 5 self.context.config['Stepp']['magnitude_windows'] = 0.1 self.context.config['Stepp']['sensitivity'] = 0.2 self.context.config['Stepp']['increment_lock'] = True read_eq_catalog(self.context) create_catalog_matrix(self.context) def mock(year, mw, magnitude_windows, time_window, sensitivity, iloc): self.assertEqual(time_window, 5) self.assertEqual(magnitude_windows, 0.1) self.assertEqual(sensitivity, 0.2) self.assertTrue(iloc) self.context.map_sc['stepp'] = mock stepp(self.context)