def testcase01(self): config = {'time_distance_window': 'GardnerKnopoffWindow', 'fs_time_prop': 0.9} decluster(catalogue_hmtk_fname=self.catalogue, declustering_meth='GardnerKnopoffType1', declustering_params=config, output_path=self.tmp, labels=['a', 'b'], tr_fname=self.classification, subcatalogues=True, fmat='pkl') # # Read first mainshock catalogue a_fname = os.path.join(self.tmp, 'catalogue_dec__a.pkl') self.assertTrue(os.path.exists(a_fname)) cat = _load_catalogue(a_fname) self.assertTrue(len(cat.data['magnitude'] == 1)) self.assertAlmostEqual(cat.data['magnitude'][0], 6.0) # # Read second mainshock catalogue b_fname = os.path.join(self.tmp, 'catalogue_dec__b.pkl') self.assertTrue(os.path.exists(b_fname)) cat = _load_catalogue(b_fname) self.assertTrue(len(cat.data['magnitude'] == 1)) self.assertAlmostEqual(cat.data['magnitude'][0], 6.1) # # Check that the third mainshock catalogue does not exist c_fname = os.path.join(self.tmp, 'catalogue_dec__c.pkl') self.assertFalse(os.path.exists(c_fname))
def main(config_fname, *, root=None): """ Decluster a catalogue and create subcatalogues """ if root is None: root = os.getcwd() print('\nReading:', config_fname) config = toml.load(config_fname) fname_cat = os.path.join(root, config['main']['catalogue']) fname_reg = os.path.join(root, config['main']['tr_file']) fname_out = os.path.join(root, config['main']['output']) create_sc = config['main']['create_subcatalogues'] save_afrs = config['main']['save_aftershocks'] add_deflt = config['main']['catalogue_add_defaults'] assert os.path.exists(fname_cat) assert os.path.exists(fname_reg) assert os.path.exists(fname_out) methods = [] for key in config: if re.search('^method', key): method = config[key]['name'] params = config[key]['params'] label = config[key]['label'] methods.append([method, params, label]) for key in config: if re.search('^case', key): print('\n Case {:s}'.format(key)) regions = config[key]['regions'] cat_lab = config[key]['label'] for meth in methods: print('') params = copy.deepcopy(meth[1]) _ = decluster(fname_cat, meth[0], params, fname_out, regions, fname_reg, create_sc, 'csv', meth[2], save_afrs, cat_lab, add_deflt)
def catalogue_declustering(fname: str, output_folder: str, *, subcatalogues: bool = False): """ Decluster a catalogue """ create_folder(output_folder) create_folder('./tmp') # Create a fake file with the classification. We use a fake classification # since earthquakes in this analysis are just from stable crust tr_fname = './tmp/fake.hdf5' cat = _load_catalogue(fname) label = np.ones_like(np.array(cat['magnitude'])) f = h5py.File(tr_fname, 'w') _ = f.create_dataset("undef", data=label) f.close() labels = ['undef'] # Declustering with the classical GK algorithm declustering_meth = 'GardnerKnopoffType1' declustering_params = { 'time_distance_window': 'GardnerKnopoffWindow', 'fs_time_prop': 0.9 } out = decluster(fname, declustering_meth, declustering_params, output_folder, labels=labels, tr_fname=tr_fname, subcatalogues=subcatalogues, olab='_gk', save_af=True, fix_defaults=True) declustering_meth = 'GardnerKnopoffType1' declustering_params = { 'time_distance_window': 'UhrhammerWindow', 'fs_time_prop': 0.9 } out = decluster(fname, declustering_meth, declustering_params, output_folder, labels=labels, tr_fname=tr_fname, subcatalogues=subcatalogues, olab='_uh', save_af=True, fix_defaults=True) declustering_meth = 'GardnerKnopoffType1' declustering_params = { 'time_distance_window': 'GruenthalWindow', 'fs_time_prop': 0.9 } _ = decluster(fname, declustering_meth, declustering_params, output_folder, labels=labels, tr_fname=tr_fname, subcatalogues=subcatalogues, olab='_gr', save_af=True, fix_defaults=True)