def test_complete_stepp_analysis_basic(self): ''' Basic test of the entire completeness analysis using a synthetic test catalogue with in-built completeness periods ''' parser0 = CsvCatalogueParser(INPUT_FILE_1) self.catalogue = parser0.read_file() self.config = {'magnitude_bin': 0.5, 'time_bin': 5.0, 'increment_lock': True, 'filename': None} expected_completeness_table = np.array([[1990., 4.0], [1962., 4.5], [1959., 5.0], [1906., 5.5], [1906., 6.0], [1904., 6.5], [1904., 7.0]]) np.testing.assert_array_almost_equal( expected_completeness_table, self.process.completeness(self.catalogue, self.config))
def setUp(self): filename = os.path.join(BASE_DATA_PATH, 'completeness_test_cat.csv') parser0 = CsvCatalogueParser(filename) self.catalogue = parser0.read_file() self.config = {'algorithm': None, 'number_bootstraps': None} self.model = CumulativeMoment()
def setUp(self): """ Read the sample catalogue """ flnme = 'gardner_knopoff_test_catalogue.csv' filename = os.path.join(self.BASE_DATA_PATH, flnme) parser = CsvCatalogueParser(filename) self.cat = parser.read_file()
def setUp(self): filename = os.path.join(BASE_DATA_PATH, 'completeness_test_cat.csv') parser0 = CsvCatalogueParser(filename) self.catalogue = parser0.read_file() self.config = {'maximum_iterations': 1000, 'number_earthquakes': 100, 'number_samples': 51, 'tolerance': 0.05} self.model = KijkoNonParametricGaussian()
def setUp(self): """ Read the sample catalogue """ flnme = 'afteran_test_catalogue.csv' filename = os.path.join(self.BASE_DATA_PATH, flnme) parser = CsvCatalogueParser(filename) self.cat = parser.read_file() self.dec = Afteran()
def setUp(self): filename = os.path.join(BASE_DATA_PATH, 'completeness_test_cat.csv') parser0 = CsvCatalogueParser(filename) self.catalogue = parser0.read_file() self.config = {'b-value': 1.0, 'sigma-b': 0.05, 'input_mmin': 5.0, 'input_mmax': None, 'input_mmax_uncertainty': None, 'tolerance': 0.001, 'maximum_iterations': 1000} self.model = KijkoSellevolBayes()
def load_catalogue(catalogue_fname): ext = pathlib.Path(catalogue_fname).suffix if ext == '.pkl' or ext == '.p': # # load pickle file cat = pickle.load(open(catalogue_fname, 'rb')) elif ext == '.csv' or ext == '.hmtk': # # load hmtk file parser = CsvCatalogueParser(catalogue_fname) cat = parser.read_file() cat.sort_catalogue_chronologically() return cat
def setUp(self): ''' Set up test class ''' filename = os.path.join(BASE_DATA_PATH, 'completeness_test_cat.csv') parser0 = CsvCatalogueParser(filename) self.catalogue = parser0.read_file() self.config = {'b-value': 1.0, 'input_mmin': 5.0, 'input_mmax': None, 'tolerance': 0.001, 'maximum_iterations': 1000} self.model = KijkoSellevolFixedb()
def test_convert_to_decimal_2(self): fname = os.path.join(DATA_DIR, 'test_cat_02.csv') parser = CsvCatalogueParser(fname) cat = parser.read_file() for lab in ['day', 'hour', 'minute', 'second']: idx = numpy.isnan(cat.data[lab]) if lab == 'day': cat.data[lab][idx] = 1 elif lab == 'second': cat.data[lab][idx] = 0.0 else: cat.data[lab][idx] = 0 computed = cat.get_decimal_time() expected = numpy.array([2015., 1963.65205479, 1963.65217088, 1963.58082192, 1999.62793753]) numpy.testing.assert_almost_equal(computed, expected)
def _load_catalogue(catalogue_fname): ext = pathlib.Path(catalogue_fname).suffix if ext == '.pkl' or ext == '.p': # load pickle file cat = pickle.load(open(catalogue_fname, 'rb')) elif ext in ['.csv', '.hmtk']: file = open(catalogue_fname, "r") nonempty_lines = [line.strip("\n") for line in file if line != "\n"] line_count = len(nonempty_lines) file.close() if line_count < 2: return None # # load hmtk file parser = CsvCatalogueParser(catalogue_fname) cat = parser.read_file() return cat
def get_catalogue(catalogue_filename, force_csv=False): """ """ ext = Path(catalogue_filename).suffix path, name = os.path.split(catalogue_filename) cat_pickle_filename = os.path.join(path, Path(name).stem+'.pkl') if (ext == '.csv' or ext == '.hmtk') or (force_csv): parser = CsvCatalogueParser(catalogue_filename) catalogue = parser.read_file() pickle.dump(catalogue, open(cat_pickle_filename, 'wb')) elif ext == '.pkl' or ext == '.p': catalogue = pickle.load(open(catalogue_filename, 'rb')) elif ext == '.ndk': parser = ParseNDKtoGCMT(catalogue_filename) catalogue = parser.read_file() pickle.dump(catalogue, open(cat_pickle_filename, 'wb')) else: raise ValueError('File with an unkown extension') return catalogue
def test_IDL_02(self): # tests that the smoothing works accross the IDL # set bounds crossing the IDL lons = [-179.5, 179.5, 179.5, -179.5, -179.5] lats = [-15.5, -15.5, -16.0, -16.0, -15.5] mesh = setUpMesh(lons, lats) # read in the test catalogue cat_filename = TESTDIR / 'data/tools/idl_test_catalogue.csv' catalogue_parser = CsvCatalogueParser(cat_filename) cat = catalogue_parser.read_file() # smooth the catalogue onto the mesh grid smooth = Smoothing(cat, mesh, 20) values = smooth.gaussian(50, 20) # check that smoothed values sum to 1.0 self.assertAlmostEqual(sum(values), len(cat.data['depth']), 5) # check that the Gaussian distribution works across IDL: # assert that max %-difference is < 1 self.assertLess(check_symmetry(mesh, values), 1)
def test_convert_to_decimal_1(self): fname = os.path.join(DATA_DIR, 'test_cat_01.csv') parser = CsvCatalogueParser(fname) cat = parser.read_file() with self.assertRaises(ValueError): _ = cat.get_decimal_time()