def test_count_picks(self): """ Test counting of picks in an associator database """ db_tt = 'sqlite:///' + os.path.join(self.testing_path, 'tt_lsv_1D.db') db_tables = 'sqlite:///' + os.path.join(self.testing_path, 'assoc7_example_1D.db') assoc = assoc1D.LocalAssociator(db_tables, db_tt, max_km=80) self.assertEqual(assoc.count_picks(), 15) self.assertEqual(assoc.count_pick_modifieds(), 12) self.assertEqual(assoc.count_pick_modifieds('associated'), 7) self.assertEqual(assoc.count_pick_modifieds('associated', phase='P'), 3) self.assertEqual(assoc.count_pick_modifieds('associated', phase='S'), 4) self.assertEqual(assoc.count_pick_modifieds('matched'), 4) self.assertEqual(assoc.count_pick_modifieds('mismatched'), 0) self.assertEqual(assoc.count_pick_modifieds('single-phase'), 3)
def test_fullrun_NORDIC(self): """ Test Read in catalog from NORDIC and run with a tt_stations_1D file """ db_tt = 'sqlite:///' + os.path.join(self.testing_path, 'tt_lsv_1D.db') assoc_params = dict(max_km=80, aggregation=1, aggr_norm='L2', assoc_ot_uncert=1, nsta_declare=2, cutoff_outlier=10, loc_uncert_thresh=0.1) catalog_file = os.path.join(self.testing_path, 'test_catalog.nordic') db_assoc_file = 'assoc_1D.db' db_assoc_url = 'sqlite:///' + db_assoc_file events, wavefiles = read_nordic(catalog_file, True) txt = '' for event, wavefile in zip(events, wavefiles): if os.path.exists(db_assoc_file): os.remove(db_assoc_file) dbsession = make_assoc_session(db_assoc_url) for pick in event.picks: my_pick = tables1D.Pick.from_obspy(pick) dbsession.add(my_pick) dbsession.commit() assoc = assoc1D.LocalAssociator(db_assoc_url, db_tt, **assoc_params) assoc.id_candidate_events() assoc.associate_candidates() if assoc.count_associated(): assoc.single_phase() txt += str(assoc) + '\n' with open('temp.txt', 'w') as f: f.write(txt) self.assertTextFilesEqual( 'temp.txt', os.path.join(self.testing_path, 'test_catalog_out.txt')) os.remove('temp.txt') os.remove('assoc_1D.db')
tr.detrend('linear') scnl, picks, polarity, snr, uncert = picker.picks(tr) t_create = datetime.utcnow() # Record the time we made the picks # Add each pick to the database for i in range(len(picks)): new_pick = tables1D.Pick(scnl, picks[i].datetime, polarity[i], snr[i], uncert[i], t_create) session.add(new_pick) # Add pick i to the database session.commit() # Commit the pick to the database # Define the associator assocOK = assoc1D.LocalAssociator(db_assoc, db_tt, max_km=5000, aggregation=1, aggr_norm='L2', cutoff_outlier=1, assoc_ot_uncert=7, nsta_declare=3, loc_uncert_thresh=0.5) # Identify candidate events (Pick Aggregation) assocOK.id_candidate_events() # Associate events assocOK.associate_candidates() # Add singles stations to events assocOK.single_phase() # Plot example event
def test_1dassociater(random_filename): db_assoc = random_filename(ext='.db') db_tt_test = random_filename(ext='.db') # Our SQLite databases are: db_assoc = 'sqlite:///' + db_assoc shutil.copy(db_tt, db_tt_test) db_tt_test = 'sqlite:///' + db_tt_test # Traveltime database # Connect to our databases engine_assoc = create_engine(db_assoc, echo=False) # # Create the tables required to run the 1D associator tables1D.Base.metadata.create_all(engine_assoc) Session = sessionmaker(bind=engine_assoc) session = Session() # Find all waveform data in the data directory file_list = glob.glob(os.path.join(EX_DATA, '*.msd')) # Define our picker instance picker = fbpicker.FBPicker(t_long=5, freqmin=0.5, mode='rms', t_ma=20, nsigma=3, t_up=0.78, nr_len=2, nr_coeff=2, pol_len=10, pol_coeff=10, uncert_coeff=3) st = Stream() for f in file_list: st += obspy_read(f) # Pick the waveforms for s in st: # st.merge() # merge will cause issues if there is a data gap s.detrend('linear') scnl, picks, polarity, snr, uncert = picker.picks(s) t_create = datetime.utcnow() # Record the time we made the picks # Add each pick to the database for i in range(len(picks)): log.debug('st = {} Pick = {} {} {} scnl = {}'.format( s, i, picks[i], picks[i].datetime, scnl)) new_pick = tables1D.Pick(scnl, picks[i].datetime, polarity[i], snr[i], uncert[i], t_create) session.add(new_pick) # Add pick i to the database session.commit() # Commit the pick to the database log.debug('Wrote picks') # Define the associator assocOK = assoc1D.LocalAssociator(db_assoc, db_tt_test, max_km=350, aggregation=1, aggr_norm='L2', cutoff_outlier=30, assoc_ot_uncert=7, nsta_declare=3, loc_uncert_thresh=0.2) # Identify candidate events (Pick Aggregation) assocOK.id_candidate_events() # Associate events assocOK.associate_candidates() print("Unit Testing for 1Dassociator ...............") # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Function Testing rms sort list # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ radius = [ ('SIO', -137.26, 68.992, 83.5, 0.7514, 0), ('U32A', -137.26, 68.992, 203.0, 1.8268, 1), ('W35A', -137.26, 68.992, 42.5, 0.3825, 2), ('OKCFA', -137.26, 68.992, 33.0, 0.297, 3), ('X34A', -137.26, 68.992, 122.0, 1.0979, 4), ('FNO', -137.26, 68.992, 36.5, 0.3285, 5), ] lon = -137.26 lat = 68.992 st_declare = 3 # number of station requires to monitor earth quake rms_sort = [] rms_sort, cb = assocOK._LocalAssociator__accumulate_rms_sort( radius, lon, lat, st_declare) print("") print('rms = {}'.format(rms_sort)) print('Combinations Stations = {}'.format(cb)) print("") assert len(rms_sort) > 0 # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Function Testing for radius paremeters # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # candis_arr = [Candidate Event <2013-06-16T15:38:50.150000 U32A 203.00 1.83 18 19>] # radius=[] # radius, lon, lat = assocOK.accumulate_radius(candis_arr) # print ("") # print ("Radius = {}".format(radius)) # print ("Lon = {}".format(lon)) # print ("Lat = {}".format(lat)) # Add singles stations to events assocOK.single_phase() events = assocOK.assoc_db.query(Associated).all() assert len(events) == 1 event = events[0] assert event.nsta == 3 print(event.longitude) print(event.latitude) print('event.longitude = ', event.longitude) print('event.latitude = ', event.latitude)