def test_seconds_overflow(self): """ #2348 indicates that SEISAN sometimes overflows seconds into column 29. """ with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) cat = read_events( os.path.join(self.testing_path, "sfile_seconds_overflow")) event = cat[0] pick_times = { "LSb2": UTCDateTime(2009, 7, 2, 6, 49) + 100.24} for key, value in pick_times.items(): pick = [p for p in event.picks if p.waveform_id.station_code == key] self.assertEqual(len(pick), 1) self.assertEqual(pick[0].time, value) with NamedTemporaryFile(suffix=".out") as tf: write_select(cat, filename=tf.name) with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) cat_back = read_events(tf.name) self.assertEqual(len(cat_back), 1) for key, value in pick_times.items(): pick = [p for p in cat_back[0].picks if p.waveform_id.station_code == key] self.assertEqual(len(pick), 1) self.assertEqual(pick[0].time, value)
def test_write_select(self): cat = read_events() with NamedTemporaryFile(suffix='.out') as tf: write_select(cat, filename=tf.name) cat_back = read_events(tf.name) for event_1, event_2 in zip(cat, cat_back): self.assertTrue(test_similarity(event_1=event_1, event_2=event_2))
def test_write_select(self): cat = read_events() with NamedTemporaryFile(suffix='.out') as tf: # raises "UserWarning: mb is not convertible" with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) write_select(cat, filename=tf.name) cat_back = read_events(tf.name) for event_1, event_2 in zip(cat, cat_back): _assert_similarity(event_1=event_1, event_2=event_2)
def test_read_write_over_day(self): """ Nordic picks are relative to origin time - check that this works over day boundaries. """ event = full_test_event() event.origins[0].time -= 3600 self.assertGreater( event.picks[0].time.date, event.origins[0].time.date) with NamedTemporaryFile(suffix=".out") as tf: write_select(Catalog([event]), filename=tf.name) with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) event_back = read_events(tf.name)[0] _assert_similarity(event, event_back)
def test_high_precision_read_write(self): """ Test that high-precision writing works. """ # raises "UserWarning: AIN in header, currently unsupported" with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) cat = read_events( os.path.join(self.testing_path, "sfile_high_precision_picks")) event = cat[0] pick_times = { "LSd1": UTCDateTime(2010, 11, 26, 1, 28, 46.859), "LSd3": UTCDateTime(2010, 11, 26, 1, 28, 48.132), "LSd2": UTCDateTime(2010, 11, 26, 1, 28, 48.183), "LSd4": UTCDateTime(2010, 11, 26, 1, 28, 49.744)} for key, value in pick_times.items(): pick = [p for p in event.picks if p.waveform_id.station_code == key] self.assertEqual(len(pick), 1) self.assertEqual(pick[0].time, value) with NamedTemporaryFile(suffix=".out") as tf: write_select(cat, filename=tf.name) with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) cat_back = read_events(tf.name) self.assertEqual(len(cat_back), 1) for key, value in pick_times.items(): pick = [p for p in cat_back[0].picks if p.waveform_id.station_code == key] self.assertEqual(len(pick), 1) self.assertEqual(pick[0].time, value) # Check that writing to standard accuracy just gives a rounded version with NamedTemporaryFile(suffix=".out") as tf: cat.write(format="NORDIC", filename=tf.name, high_accuracy=False) with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) cat_back = read_events(tf.name) self.assertEqual(len(cat_back), 1) for key, value in pick_times.items(): pick = [p for p in cat_back[0].picks if p.waveform_id.station_code == key] self.assertEqual(len(pick), 1) rounded_pick_time = UTCDateTime( value.year, value.month, value.day, value.hour, value.minute) rounded_pick_time += round( value.second + (value.microsecond / 1e6), 2) self.assertEqual(pick[0].time, rounded_pick_time)
def test_long_phase_name(self): """ Nordic format supports 8 char phase names, sometimes. """ # raises "UserWarning: AIN in header, currently unsupported" with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) cat = read_events( os.path.join(self.testing_path, "sfile_long_phase")) # This file has one event with one pick pick = cat[0].picks[0] arrival = cat[0].origins[0].arrivals[0] self.assertEqual(pick.phase_hint, "PKiKP") self.assertEqual(arrival.time_weight, 1) with NamedTemporaryFile(suffix=".out") as tf: write_select(cat, filename=tf.name) with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) cat_back = read_events(tf.name) pick = cat_back[0].picks[0] arrival = cat_back[0].origins[0].arrivals[0] self.assertEqual(pick.phase_hint, "PKiKP") self.assertEqual(arrival.time_weight, 1)
def get_nordic(egf_table, t_event, xml_cat): cat = Catalog() wave_names = [] # The easiest way to solve this ---------------------------------------------------------------------------------------------------------- SAMBA_list = [ 'COSA', 'COVA', 'EORO', 'FRAN', 'GOVA', 'LABE', 'LARB', 'MTBA', 'MTFO', 'POCR2', 'SOLU', 'WHAT2', 'WHYM' ] ND_list = [ 'MOSQ', 'BELL', 'JACK', 'HURA', 'SHOT', 'MORG', 'TURI2', 'BRUN', 'GLOR' ] SD_list = [ 'OLIV', 'CASC', 'DATA', 'GORG', 'BARN', 'RICH', 'DELT', 'THOM', 'MONK', 'POMM' ] COSA_list = [ 'TEPE', 'HUVA', 'STBA', 'NOBU', 'ASPR', 'MORV', 'KING', 'HAAS' ] Alfa_list = [ 'BURA2', 'CAMEL', 'CASH', 'FBLA2', 'HATT', 'MTHA2', 'TURI', 'UMAT' ] Wizard_list = [ 'WZ01', 'WZ02', 'WZ03', 'WZ04', 'WZ05', 'WZ06', 'WZ07', 'WZ08', 'WZ09', 'WZ10', 'WZ11', 'WZ12', 'WZ13', 'WZ14', 'WZ15', 'WZ16', 'WZ17', 'WZ18', 'WZ19', 'WZ20' ] GeoN_list = [ 'JCZ', 'WKZ', 'MSZ', 'MLZ', 'RPZ', 'LBZ', 'EAZ', 'FOZ', 'WVZ', 'INZ', 'LTZ', 'DSZ', 'THZ', 'OXZ', 'NSBS', 'HDWS', 'MECS', 'LPLS', 'WNPS', 'GLNS', 'QTPS', 'TAFS', 'TWAS', 'MCNS', 'FGPS', 'FJDS', 'WHFS', 'WHAS', 'WHFS', 'HAFS', 'KOKS', 'APPS', 'HMCS', 'GMFS', 'ARPS', 'IFPS', 'SJFS', 'GLWS', 'TKAS', 'RDCS', 'WBCS', 'INGS', 'CSHS' ] # ---------------------------------------------------------------------------------------------------------------------------------------- for index1, egf in egf_table.iterrows(): st = Stream() date = datetime.datetime(int(egf['qyr']), int(egf['qmon']), int(egf['qdy']), int(egf['qhr']), int(egf['qmn']), int(egf['qsc'])) JDay = date.strftime('%j') #Change to UTCDateTime eventdt = UTCDateTime(date) print( '----------------------------------------------------------------------------------' ) print( '----------------------------------------------------------------------------------' ) print('Working in the EGF event: ', eventdt) print( '----------------------------------------------------------------------------------' ) print( '----------------------------------------------------------------------------------' ) # Time to get the waveforms print( '========================================== NOW IM READING THE NETWORKS DATA =======================================' ) egf_event = find_event(xml_cat, eventdt) stat_list = [] for j in range(len(egf_event.picks)): stat_list.append(egf_event.picks[j].waveform_id.station_code) stat_list = set(stat_list) print(stat_list) for stat in stat_list: if stat in SD_list or stat in ND_list: print('G E T T I N G S O U T H D W A R F S DATA') try: st += read( '/Volumes/GeoPhysics_11/users-data/chambeca/DWARFS/DWARFS_archive/Y' + eventdt.strftime('%Y') + '/R' + JDay + '.01/' + stat + '*', starttime=eventdt, endtime=eventdt + 50) except: print('NO DATA') pass elif stat in COSA_list: print('G E T T I N G C O S A DATA') try: st += read( '/Volumes/GeoPhysics_11/users-data/chambeca/COSA_archive/Y' + eventdt.strftime('%Y') + '/R' + JDay + '.01/' + stat + '*', starttime=eventdt, endtime=eventdt + 50) except: pass elif stat in SAMBA_list: print('G E T T I N G S A M B A DATA') st += read( '/Volumes/GeoPhysics_09/users-data/chambeca/SAMBA_archive/day_volumes_S/Y' + eventdt.strftime('%Y') + '/R' + JDay + '.01/' + stat + '*', starttime=eventdt, endtime=eventdt + 50) elif stat in GeoN_list: print('G E T T I N G G E O N E T DATA') client = FDSN_Client("GEONET") client_nrt = FDSN_Client("https://service-nrt.geonet.org.nz") try: st += client.get_waveforms(network="NZ", station=stat, location='*', channel='HH*', starttime=eventdt, endtime=eventdt + 50) except: pass print( '----------------------------------------------------------------------------------' ) print('The traces are:') print(st) print( '----------------------------------------------------------------------------------' ) for tr in st: tr.data = tr.data.astype(np.int32) for tr in st: if type(tr.data) == np.ma.core.MaskedArray: try: print('Masked data found for ' + tr.stats.station + ' ' + tr.stats.channel + ': padding empty spaces with zeros') tr.data = tr.split().detrend('simple').merge( fill_value=0)[0].data except: st.remove(tr) st.write( '/Users/home/juarezilma/Master/NETWORKS/DWARFS_S/Seisan_files/Target_Events/' + t_event[0:14] + '/Wav_files/' + eventdt.strftime('%Y') + eventdt.strftime('%m') + eventdt.strftime('%d') + eventdt.strftime('%H') + eventdt.strftime('%M') + eventdt.strftime('%S') + '_SD_multiplexed', format='MSEED') _write_nordic( event=egf_event, outdir= '/Users/home/juarezilma/Master/NETWORKS/DWARFS_S/Seisan_files/Target_Events/' + t_event[0:14] + '/S_files/', filename=None, userid="ICJG", evtype="L", wavefiles= '/Users/home/juarezilma/Master/NETWORKS/DWARFS_S/Seisan_files/Target_Events/' + t_event[0:14] + '/Wav_files/' + eventdt.strftime('%Y') + eventdt.strftime('%m') + eventdt.strftime('%d') + eventdt.strftime('%H') + eventdt.strftime('%M') + eventdt.strftime('%S') + '_SD_multiplexed') # To creat the select.out file wave_list = eventdt.strftime('%Y') + eventdt.strftime( '%m') + eventdt.strftime('%d') + eventdt.strftime( '%H') + eventdt.strftime('%M') + eventdt.strftime( '%S') + '_SD_multiplexed' wave_names.append(wave_list) cat.append(egf_event) #END FOT EGF write_select( catalog=cat, filename= '/Users/home/juarezilma/Master/NETWORKS/DWARFS_S/Seisan_files/Target_Events/' + t_event[0:14] + '/select_' + t_event[0:14] + '.out', userid='ICJG', evtype='L', wavefiles=wave_names)