def test_write_empty(self): """ Function to check that writing a blank event works as it should. """ test_event = Event() with self.assertRaises(NordicParsingError): _write_nordic(test_event, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test') test_event.origins.append(Origin()) with self.assertRaises(NordicParsingError): _write_nordic(test_event, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test') test_event.origins[0].time = UTCDateTime() with TemporaryWorkingDirectory(): test_sfile = _write_nordic(test_event, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test') self.assertTrue(os.path.isfile(test_sfile))
def test_read_wavename(self): testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309') wavefiles = readwavename(testing_path) self.assertEqual(len(wavefiles), 1) # Test that full paths are handled test_event = full_test_event() # Add the event to a catalogue which can be used for QuakeML testing test_cat = Catalog() test_cat += test_event # Check the read-write s-file functionality with TemporaryWorkingDirectory(): sfile = _write_nordic(test_cat[0], filename=None, userid='TEST', evtype='L', outdir='.', wavefiles=['walrus/test'], explosion=True, overwrite=True) self.assertEqual(readwavename(sfile), ['test']) # Check that multiple wavefiles are read properly with TemporaryWorkingDirectory(): sfile = _write_nordic(test_cat[0], filename=None, userid='TEST', evtype='L', outdir='.', wavefiles=['walrus/test', 'albert'], explosion=True, overwrite=True) self.assertEqual(readwavename(sfile), ['test', 'albert'])
def test_multi_writing(self): event = full_test_event() # Try to write the same event multiple times, but not overwrite sfiles = [] with TemporaryWorkingDirectory(): for _i in range(59): sfiles.append(_write_nordic(event=event, filename=None, overwrite=False)) with self.assertRaises(NordicParsingError): _write_nordic(event=event, filename=None, overwrite=False)
def test_multi_writing(self): event = full_test_event() # Try to write the same event multiple times, but not overwrite sfiles = [] for i in range(59): sfiles.append(_write_nordic(event=event, filename=None, overwrite=False)) with self.assertRaises(NordicParsingError): _write_nordic(event=event, filename=None, overwrite=False) for sfile in sfiles: os.remove(sfile)
def test_write_empty(self): """ Function to check that writing a blank event works as it should. """ test_event = Event() with self.assertRaises(NordicParsingError): _write_nordic(test_event, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test') test_event.origins.append(Origin()) with self.assertRaises(NordicParsingError): _write_nordic(test_event, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test') test_event.origins[0].time = UTCDateTime() test_sfile = _write_nordic(test_event, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test') self.assertTrue(os.path.isfile(test_sfile)) os.remove(test_sfile)
def picks_to_sfile(database, from_time, to_time, duration_time=300): db = seisnn.sql.Client(database=database) total_time = UTCDateTime(to_time) - UTCDateTime(from_time) start = from_time end = to_time for i in range(int(total_time / duration_time) + 1): if i == int(total_time / duration_time) + 1: ev, ARC = write_events(db, start, end) else: end = str(datetime.strptime( str(UTCDateTime(start) + duration_time), '%Y-%m-%dT%H:%M:%S.%fZ')) ev, ARC = write_events(db, start, end) start = end if not ARC == []: core._write_nordic(ev, filename=None, wavefiles=ARC, outdir='/home/andy/predict_sfile')
def test_read_wavename(self): testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309') wavefiles = readwavename(testing_path) self.assertEqual(len(wavefiles), 1) # Test that full paths are handled test_event = full_test_event() # Add the event to a catalogue which can be used for QuakeML testing test_cat = Catalog() test_cat += test_event # Check the read-write s-file functionality with TemporaryWorkingDirectory(): sfile = _write_nordic( test_cat[0], filename=None, userid='TEST', evtype='L', outdir='.', wavefiles=['walrus/test'], explosion=True, overwrite=True) self.assertEqual(readwavename(sfile), ['test']) # Check that multiple wavefiles are read properly with TemporaryWorkingDirectory(): sfile = _write_nordic( test_cat[0], filename=None, userid='TEST', evtype='L', outdir='.', wavefiles=['walrus/test', 'albert'], explosion=True, overwrite=True) self.assertEqual(readwavename(sfile), ['test', 'albert'])
def test_read_write(self): """ Function to test the read and write capabilities of sfile_util. """ # Set-up a test event test_event = full_test_event() # Add the event to a catalogue which can be used for QuakeML testing test_cat = Catalog() test_cat += test_event # Check the read-write s-file functionality with TemporaryWorkingDirectory(): sfile = _write_nordic(test_cat[0], filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) self.assertEqual(readwavename(sfile), ['test']) read_cat = Catalog() # raises "UserWarning: AIN in header, currently unsupported" with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) read_cat += read_nordic(sfile) read_ev = read_cat[0] test_ev = test_cat[0] for read_pick, test_pick in zip(read_ev.picks, test_ev.picks): self.assertEqual(read_pick.time, test_pick.time) self.assertEqual(read_pick.backazimuth, test_pick.backazimuth) self.assertEqual(read_pick.onset, test_pick.onset) self.assertEqual(read_pick.phase_hint, test_pick.phase_hint) self.assertEqual(read_pick.polarity, test_pick.polarity) self.assertEqual(read_pick.waveform_id.station_code, test_pick.waveform_id.station_code) self.assertEqual(read_pick.waveform_id.channel_code[-1], test_pick.waveform_id.channel_code[-1]) # assert read_ev.origins[0].resource_id ==\ # test_ev.origins[0].resource_id self.assertEqual(read_ev.origins[0].time, test_ev.origins[0].time) # Note that time_residual_RMS is not a quakeML format self.assertEqual(read_ev.origins[0].longitude, test_ev.origins[0].longitude) self.assertEqual(read_ev.origins[0].latitude, test_ev.origins[0].latitude) self.assertEqual(read_ev.origins[0].depth, test_ev.origins[0].depth) self.assertEqual(read_ev.magnitudes[0].mag, test_ev.magnitudes[0].mag) self.assertEqual(read_ev.magnitudes[1].mag, test_ev.magnitudes[1].mag) self.assertEqual(read_ev.magnitudes[2].mag, test_ev.magnitudes[2].mag) self.assertEqual(read_ev.magnitudes[0].creation_info, test_ev.magnitudes[0].creation_info) self.assertEqual(read_ev.magnitudes[1].creation_info, test_ev.magnitudes[1].creation_info) self.assertEqual(read_ev.magnitudes[2].creation_info, test_ev.magnitudes[2].creation_info) self.assertEqual(read_ev.magnitudes[0].magnitude_type, test_ev.magnitudes[0].magnitude_type) self.assertEqual(read_ev.magnitudes[1].magnitude_type, test_ev.magnitudes[1].magnitude_type) self.assertEqual(read_ev.magnitudes[2].magnitude_type, test_ev.magnitudes[2].magnitude_type) self.assertEqual(read_ev.event_descriptions, test_ev.event_descriptions) # assert read_ev.amplitudes[0].resource_id ==\ # test_ev.amplitudes[0].resource_id self.assertEqual(read_ev.amplitudes[0].period, test_ev.amplitudes[0].period) self.assertEqual(read_ev.amplitudes[0].snr, test_ev.amplitudes[0].snr) self.assertEqual(read_ev.amplitudes[2].period, test_ev.amplitudes[2].period) self.assertEqual(read_ev.amplitudes[2].snr, test_ev.amplitudes[2].snr) # Check coda magnitude pick # Resource ids get overwritten because you can't have two the same in # memory # self.assertEqual(read_ev.amplitudes[1].resource_id, # test_ev.amplitudes[1].resource_id) self.assertEqual(read_ev.amplitudes[1].type, test_ev.amplitudes[1].type) self.assertEqual(read_ev.amplitudes[1].unit, test_ev.amplitudes[1].unit) self.assertEqual(read_ev.amplitudes[1].generic_amplitude, test_ev.amplitudes[1].generic_amplitude) # Resource ids get overwritten because you can't have two the same in # memory # self.assertEqual(read_ev.amplitudes[1].pick_id, # test_ev.amplitudes[1].pick_id) self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code, test_ev.amplitudes[1].waveform_id.station_code) self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code, test_ev.amplitudes[1]. waveform_id.channel_code[0] + test_ev.amplitudes[1]. waveform_id.channel_code[-1]) self.assertEqual(read_ev.amplitudes[1].magnitude_hint, test_ev.amplitudes[1].magnitude_hint) # snr is not supported in s-file # self.assertEqual(read_ev.amplitudes[1].snr, # test_ev.amplitudes[1].snr) self.assertEqual(read_ev.amplitudes[1].category, test_ev.amplitudes[1].category)
def test_fail_writing(self): """ Test a deliberate fail. """ test_event = full_test_event() # Add the event to a catalogue which can be used for QuakeML testing test_cat = Catalog() test_cat += test_event test_ev = test_cat[0] test_cat.append(full_test_event()) with self.assertRaises(NordicParsingError): # Raises error due to multiple events in catalog _write_nordic(test_cat, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) with self.assertRaises(NordicParsingError): # Raises error due to too long userid _write_nordic(test_ev, filename=None, userid='TESTICLE', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) with self.assertRaises(NordicParsingError): # Raises error due to unrecognised event type _write_nordic(test_ev, filename=None, userid='TEST', evtype='U', outdir='.', wavefiles='test', explosion=True, overwrite=True) with self.assertRaises(NordicParsingError): # Raises error due to no output directory _write_nordic(test_ev, filename=None, userid='TEST', evtype='L', outdir='albatross', wavefiles='test', explosion=True, overwrite=True) invalid_origin = test_ev.copy() invalid_origin.origins = [] with self.assertRaises(NordicParsingError): _write_nordic(invalid_origin, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) invalid_origin = test_ev.copy() invalid_origin.origins[0].time = None with self.assertRaises(NordicParsingError): _write_nordic(invalid_origin, filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) # Write a near empty origin valid_origin = test_ev.copy() valid_origin.origins[0].latitude = None valid_origin.origins[0].longitude = None valid_origin.origins[0].depth = None with NamedTemporaryFile() as tf: _write_nordic(valid_origin, filename=tf.name, userid='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) self.assertTrue(os.path.isfile(tf.name))
def test_read_write(self): """ Function to test the read and write capabilities of sfile_util. """ # Set-up a test event test_event = full_test_event() # Sort the magnitudes - they are sorted on writing and we need to check # like-for-like test_event.magnitudes.sort(key=lambda obj: obj['mag'], reverse=True) # Add the event to a catalogue which can be used for QuakeML testing test_cat = Catalog() test_cat += test_event # Check the read-write s-file functionality with TemporaryWorkingDirectory(): sfile = _write_nordic(test_cat[0], filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) self.assertEqual(readwavename(sfile), ['test']) read_cat = Catalog() # raises "UserWarning: AIN in header, currently unsupported" with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) read_cat += read_nordic(sfile) read_ev = read_cat[0] test_ev = test_cat[0] for read_pick, test_pick in zip(read_ev.picks, test_ev.picks): self.assertEqual(read_pick.time, test_pick.time) self.assertEqual(read_pick.backazimuth, test_pick.backazimuth) self.assertEqual(read_pick.onset, test_pick.onset) self.assertEqual(read_pick.phase_hint, test_pick.phase_hint) self.assertEqual(read_pick.polarity, test_pick.polarity) self.assertEqual(read_pick.waveform_id.station_code, test_pick.waveform_id.station_code) self.assertEqual(read_pick.waveform_id.channel_code[-1], test_pick.waveform_id.channel_code[-1]) # assert read_ev.origins[0].resource_id ==\ # test_ev.origins[0].resource_id self.assertEqual(read_ev.origins[0].time, test_ev.origins[0].time) # Note that time_residual_RMS is not a quakeML format self.assertEqual(read_ev.origins[0].longitude, test_ev.origins[0].longitude) self.assertEqual(read_ev.origins[0].latitude, test_ev.origins[0].latitude) self.assertEqual(read_ev.origins[0].depth, test_ev.origins[0].depth) self.assertEqual(read_ev.magnitudes[0].mag, test_ev.magnitudes[0].mag) self.assertEqual(read_ev.magnitudes[1].mag, test_ev.magnitudes[1].mag) self.assertEqual(read_ev.magnitudes[2].mag, test_ev.magnitudes[2].mag) self.assertEqual(read_ev.magnitudes[0].creation_info, test_ev.magnitudes[0].creation_info) self.assertEqual(read_ev.magnitudes[1].creation_info, test_ev.magnitudes[1].creation_info) self.assertEqual(read_ev.magnitudes[2].creation_info, test_ev.magnitudes[2].creation_info) self.assertEqual(read_ev.magnitudes[0].magnitude_type, test_ev.magnitudes[0].magnitude_type) self.assertEqual(read_ev.magnitudes[1].magnitude_type, test_ev.magnitudes[1].magnitude_type) self.assertEqual(read_ev.magnitudes[2].magnitude_type, test_ev.magnitudes[2].magnitude_type) self.assertEqual(read_ev.event_descriptions, test_ev.event_descriptions) # assert read_ev.amplitudes[0].resource_id ==\ # test_ev.amplitudes[0].resource_id self.assertEqual(read_ev.amplitudes[0].period, test_ev.amplitudes[0].period) self.assertEqual(read_ev.amplitudes[0].snr, test_ev.amplitudes[0].snr) self.assertEqual(read_ev.amplitudes[2].period, test_ev.amplitudes[2].period) self.assertEqual(read_ev.amplitudes[2].snr, test_ev.amplitudes[2].snr) # Check coda magnitude pick # Resource ids get overwritten because you can't have two the same in # memory # self.assertEqual(read_ev.amplitudes[1].resource_id, # test_ev.amplitudes[1].resource_id) self.assertEqual(read_ev.amplitudes[1].type, test_ev.amplitudes[1].type) self.assertEqual(read_ev.amplitudes[1].unit, test_ev.amplitudes[1].unit) self.assertEqual(read_ev.amplitudes[1].generic_amplitude, test_ev.amplitudes[1].generic_amplitude) # Resource ids get overwritten because you can't have two the same in # memory # self.assertEqual(read_ev.amplitudes[1].pick_id, # test_ev.amplitudes[1].pick_id) self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code, test_ev.amplitudes[1].waveform_id.station_code) self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code, test_ev.amplitudes[1]. waveform_id.channel_code[0] + test_ev.amplitudes[1]. waveform_id.channel_code[-1]) self.assertEqual(read_ev.amplitudes[1].magnitude_hint, test_ev.amplitudes[1].magnitude_hint) # snr is not supported in s-file # self.assertEqual(read_ev.amplitudes[1].snr, # test_ev.amplitudes[1].snr) self.assertEqual(read_ev.amplitudes[1].category, test_ev.amplitudes[1].category)
def test_read_write(self): """ Function to test the read and write capabilities of sfile_util. """ # Set-up a test event test_event = full_test_event() # Add the event to a catalogue which can be used for QuakeML testing test_cat = Catalog() test_cat += test_event # Check the read-write s-file functionality sfile = _write_nordic(test_cat[0], filename=None, userid='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) self.assertEqual(readwavename(sfile), ['test']) read_cat = Catalog() read_cat += read_nordic(sfile) os.remove(sfile) read_ev = read_cat[0] test_ev = test_cat[0] for read_pick, test_pick in zip(read_ev.picks, test_ev.picks): self.assertEqual(read_pick.time, test_pick.time) self.assertEqual(read_pick.backazimuth, test_pick.backazimuth) self.assertEqual(read_pick.onset, test_pick.onset) self.assertEqual(read_pick.phase_hint, test_pick.phase_hint) self.assertEqual(read_pick.polarity, test_pick.polarity) self.assertEqual(read_pick.waveform_id.station_code, test_pick.waveform_id.station_code) self.assertEqual(read_pick.waveform_id.channel_code[-1], test_pick.waveform_id.channel_code[-1]) # assert read_ev.origins[0].resource_id ==\ # test_ev.origins[0].resource_id self.assertEqual(read_ev.origins[0].time, test_ev.origins[0].time) # Note that time_residual_RMS is not a quakeML format self.assertEqual(read_ev.origins[0].longitude, test_ev.origins[0].longitude) self.assertEqual(read_ev.origins[0].latitude, test_ev.origins[0].latitude) self.assertEqual(read_ev.origins[0].depth, test_ev.origins[0].depth) self.assertEqual(read_ev.magnitudes[0].mag, test_ev.magnitudes[0].mag) self.assertEqual(read_ev.magnitudes[1].mag, test_ev.magnitudes[1].mag) self.assertEqual(read_ev.magnitudes[2].mag, test_ev.magnitudes[2].mag) self.assertEqual(read_ev.magnitudes[0].creation_info, test_ev.magnitudes[0].creation_info) self.assertEqual(read_ev.magnitudes[1].creation_info, test_ev.magnitudes[1].creation_info) self.assertEqual(read_ev.magnitudes[2].creation_info, test_ev.magnitudes[2].creation_info) self.assertEqual(read_ev.magnitudes[0].magnitude_type, test_ev.magnitudes[0].magnitude_type) self.assertEqual(read_ev.magnitudes[1].magnitude_type, test_ev.magnitudes[1].magnitude_type) self.assertEqual(read_ev.magnitudes[2].magnitude_type, test_ev.magnitudes[2].magnitude_type) self.assertEqual(read_ev.event_descriptions, test_ev.event_descriptions) # assert read_ev.amplitudes[0].resource_id ==\ # test_ev.amplitudes[0].resource_id self.assertEqual(read_ev.amplitudes[0].period, test_ev.amplitudes[0].period) self.assertEqual(read_ev.amplitudes[0].snr, test_ev.amplitudes[0].snr) # Check coda magnitude pick # Resource ids get overwritten because you can't have two the same in # memory # self.assertEqual(read_ev.amplitudes[1].resource_id, # test_ev.amplitudes[1].resource_id) self.assertEqual(read_ev.amplitudes[1].type, test_ev.amplitudes[1].type) self.assertEqual(read_ev.amplitudes[1].unit, test_ev.amplitudes[1].unit) self.assertEqual(read_ev.amplitudes[1].generic_amplitude, test_ev.amplitudes[1].generic_amplitude) # Resource ids get overwritten because you can't have two the same in # memory # self.assertEqual(read_ev.amplitudes[1].pick_id, # test_ev.amplitudes[1].pick_id) self.assertEqual(read_ev.amplitudes[1].waveform_id.station_code, test_ev.amplitudes[1].waveform_id.station_code) self.assertEqual(read_ev.amplitudes[1].waveform_id.channel_code, test_ev.amplitudes[1]. waveform_id.channel_code[0] + test_ev.amplitudes[1]. waveform_id.channel_code[-1]) self.assertEqual(read_ev.amplitudes[1].magnitude_hint, test_ev.amplitudes[1].magnitude_hint) # snr is not supported in s-file # self.assertEqual(read_ev.amplitudes[1].snr, # test_ev.amplitudes[1].snr) self.assertEqual(read_ev.amplitudes[1].category, test_ev.amplitudes[1].category)
def get_nordic(egf_table, t_event, xml_cat): cat = Catalog() wave_names = [] # The easiest way to solve this ---------------------------------------------------------------------------------------------------------- SAMBA_list = [ 'COSA', 'COVA', 'EORO', 'FRAN', 'GOVA', 'LABE', 'LARB', 'MTBA', 'MTFO', 'POCR2', 'SOLU', 'WHAT2', 'WHYM' ] ND_list = [ 'MOSQ', 'BELL', 'JACK', 'HURA', 'SHOT', 'MORG', 'TURI2', 'BRUN', 'GLOR' ] SD_list = [ 'OLIV', 'CASC', 'DATA', 'GORG', 'BARN', 'RICH', 'DELT', 'THOM', 'MONK', 'POMM' ] COSA_list = [ 'TEPE', 'HUVA', 'STBA', 'NOBU', 'ASPR', 'MORV', 'KING', 'HAAS' ] Alfa_list = [ 'BURA2', 'CAMEL', 'CASH', 'FBLA2', 'HATT', 'MTHA2', 'TURI', 'UMAT' ] Wizard_list = [ 'WZ01', 'WZ02', 'WZ03', 'WZ04', 'WZ05', 'WZ06', 'WZ07', 'WZ08', 'WZ09', 'WZ10', 'WZ11', 'WZ12', 'WZ13', 'WZ14', 'WZ15', 'WZ16', 'WZ17', 'WZ18', 'WZ19', 'WZ20' ] GeoN_list = [ 'JCZ', 'WKZ', 'MSZ', 'MLZ', 'RPZ', 'LBZ', 'EAZ', 'FOZ', 'WVZ', 'INZ', 'LTZ', 'DSZ', 'THZ', 'OXZ', 'NSBS', 'HDWS', 'MECS', 'LPLS', 'WNPS', 'GLNS', 'QTPS', 'TAFS', 'TWAS', 'MCNS', 'FGPS', 'FJDS', 'WHFS', 'WHAS', 'WHFS', 'HAFS', 'KOKS', 'APPS', 'HMCS', 'GMFS', 'ARPS', 'IFPS', 'SJFS', 'GLWS', 'TKAS', 'RDCS', 'WBCS', 'INGS', 'CSHS' ] # ---------------------------------------------------------------------------------------------------------------------------------------- for index1, egf in egf_table.iterrows(): st = Stream() date = datetime.datetime(int(egf['qyr']), int(egf['qmon']), int(egf['qdy']), int(egf['qhr']), int(egf['qmn']), int(egf['qsc'])) JDay = date.strftime('%j') #Change to UTCDateTime eventdt = UTCDateTime(date) print( '----------------------------------------------------------------------------------' ) print( '----------------------------------------------------------------------------------' ) print('Working in the EGF event: ', eventdt) print( '----------------------------------------------------------------------------------' ) print( '----------------------------------------------------------------------------------' ) # Time to get the waveforms print( '========================================== NOW IM READING THE NETWORKS DATA =======================================' ) egf_event = find_event(xml_cat, eventdt) stat_list = [] for j in range(len(egf_event.picks)): stat_list.append(egf_event.picks[j].waveform_id.station_code) stat_list = set(stat_list) print(stat_list) for stat in stat_list: if stat in SD_list or stat in ND_list: print('G E T T I N G S O U T H D W A R F S DATA') try: st += read( '/Volumes/GeoPhysics_11/users-data/chambeca/DWARFS/DWARFS_archive/Y' + eventdt.strftime('%Y') + '/R' + JDay + '.01/' + stat + '*', starttime=eventdt, endtime=eventdt + 50) except: print('NO DATA') pass elif stat in COSA_list: print('G E T T I N G C O S A DATA') try: st += read( '/Volumes/GeoPhysics_11/users-data/chambeca/COSA_archive/Y' + eventdt.strftime('%Y') + '/R' + JDay + '.01/' + stat + '*', starttime=eventdt, endtime=eventdt + 50) except: pass elif stat in SAMBA_list: print('G E T T I N G S A M B A DATA') st += read( '/Volumes/GeoPhysics_09/users-data/chambeca/SAMBA_archive/day_volumes_S/Y' + eventdt.strftime('%Y') + '/R' + JDay + '.01/' + stat + '*', starttime=eventdt, endtime=eventdt + 50) elif stat in GeoN_list: print('G E T T I N G G E O N E T DATA') client = FDSN_Client("GEONET") client_nrt = FDSN_Client("https://service-nrt.geonet.org.nz") try: st += client.get_waveforms(network="NZ", station=stat, location='*', channel='HH*', starttime=eventdt, endtime=eventdt + 50) except: pass print( '----------------------------------------------------------------------------------' ) print('The traces are:') print(st) print( '----------------------------------------------------------------------------------' ) for tr in st: tr.data = tr.data.astype(np.int32) for tr in st: if type(tr.data) == np.ma.core.MaskedArray: try: print('Masked data found for ' + tr.stats.station + ' ' + tr.stats.channel + ': padding empty spaces with zeros') tr.data = tr.split().detrend('simple').merge( fill_value=0)[0].data except: st.remove(tr) st.write( '/Users/home/juarezilma/Master/NETWORKS/DWARFS_S/Seisan_files/Target_Events/' + t_event[0:14] + '/Wav_files/' + eventdt.strftime('%Y') + eventdt.strftime('%m') + eventdt.strftime('%d') + eventdt.strftime('%H') + eventdt.strftime('%M') + eventdt.strftime('%S') + '_SD_multiplexed', format='MSEED') _write_nordic( event=egf_event, outdir= '/Users/home/juarezilma/Master/NETWORKS/DWARFS_S/Seisan_files/Target_Events/' + t_event[0:14] + '/S_files/', filename=None, userid="ICJG", evtype="L", wavefiles= '/Users/home/juarezilma/Master/NETWORKS/DWARFS_S/Seisan_files/Target_Events/' + t_event[0:14] + '/Wav_files/' + eventdt.strftime('%Y') + eventdt.strftime('%m') + eventdt.strftime('%d') + eventdt.strftime('%H') + eventdt.strftime('%M') + eventdt.strftime('%S') + '_SD_multiplexed') # To creat the select.out file wave_list = eventdt.strftime('%Y') + eventdt.strftime( '%m') + eventdt.strftime('%d') + eventdt.strftime( '%H') + eventdt.strftime('%M') + eventdt.strftime( '%S') + '_SD_multiplexed' wave_names.append(wave_list) cat.append(egf_event) #END FOT EGF write_select( catalog=cat, filename= '/Users/home/juarezilma/Master/NETWORKS/DWARFS_S/Seisan_files/Target_Events/' + t_event[0:14] + '/select_' + t_event[0:14] + '.out', userid='ICJG', evtype='L', wavefiles=wave_names)