def test_missing_header(self): # Check that a suitable error is raised with self.assertRaises(NordicParsingError): # Raises AIN warning with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) readheader(os.path.join(self.testing_path, 'Sfile_no_header'))
def test_corrupt_header(self): filename = os.path.join(self.testing_path, '01-0411-15L.S201309') f = open(filename, 'r') with NamedTemporaryFile(suffix='.sfile') as tmp_file: fout = open(tmp_file.name, 'w') for line in f: fout.write(line[0:78]) f.close() fout.close() with self.assertRaises(NordicParsingError): readheader(tmp_file.name)
def test_header_mapping(self): head_1 = readheader(os.path.join(self.testing_path, '01-0411-15L.S201309')) with open(os.path.join(self.testing_path, '01-0411-15L.S201309'), 'r') as f: head_2 = _readheader(f=f) self.assertTrue(test_similarity(head_1, head_2))
def test_header_mapping(self): head_1 = readheader( os.path.join(self.testing_path, '01-0411-15L.S201309')) with open(os.path.join(self.testing_path, '01-0411-15L.S201309'), 'r') as f: head_2 = _readheader(f=f) self.assertTrue(test_similarity(head_1, head_2))
def test_read_latin1(self): """ Check that we can read dos formatted, latin1 encoded files. """ dos_file = os.path.join(self.testing_path, 'dos-file.sfile') self.assertTrue(_is_sfile(dos_file)) event = readheader(dos_file) self.assertEqual(event.origins[0].latitude, 60.328) cat = read_events(dos_file) self.assertEqual(cat[0].origins[0].latitude, 60.328) wavefiles = readwavename(dos_file) self.assertEqual(wavefiles[0], "90121311.0851W41") spectral_info = read_spectral_info(dos_file) self.assertEqual(len(spectral_info.keys()), 10) self.assertEqual(spectral_info[('AVERAGE', '')]['stress_drop'], 27.7) with self.assertRaises(UnicodeDecodeError): readheader(dos_file, 'ASCII')
def test_header_mapping(self): head_1 = readheader(os.path.join(self.testing_path, '01-0411-15L.S201309')) with open(os.path.join(self.testing_path, '01-0411-15L.S201309'), 'r') as f: # raises "UserWarning: AIN in header, currently unsupported" with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) head_2 = _readheader(f=f) _assert_similarity(head_1, head_2)
def test_header_mapping(self): head_1 = readheader(os.path.join(self.testing_path, '01-0411-15L.S201309')) with open(os.path.join(self.testing_path, '01-0411-15L.S201309'), 'r') as f: # raises "UserWarning: AIN in header, currently unsupported" with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) head_2 = _readheader(f=f) self.assertTrue(test_similarity(head_1, head_2))
def sfiles_to_event(sfile_list): """ Write an event.dat file from a list of Seisan events :type sfile_list: list :param sfile_list: List of s-files to sort and put into the database :returns: List of tuples of event ID (int) and Sfile name """ event_list = [] sort_list = [(readheader(sfile).origins[0].time, sfile) for sfile in sfile_list] sort_list.sort(key=lambda tup: tup[0]) sfile_list = [sfile[1] for sfile in sort_list] catalog = Catalog() for i, sfile in enumerate(sfile_list): event_list.append((i, sfile)) catalog.append(readheader(sfile)) # Hand off to sister function write_event(catalog) return event_list
def test_header_mapping(self): # Raise "UserWarning: Lines of type I..." with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) head_1 = readheader(os.path.join(self.testing_path, '01-0411-15L.S201309')) with open(os.path.join(self.testing_path, '01-0411-15L.S201309'), 'r') as f: # raises "UserWarning: AIN in header, currently unsupported" with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) tagged_lines = _get_line_tags(f=f) head_2 = _readheader(head_lines=tagged_lines['1']) _assert_similarity(head_1, head_2)
def test_write_event(self): """ Simple test function to test the writing of events. """ testing_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'test_data', 'REA', 'TEST_') sfile_list = glob.glob(os.path.join(testing_path, '*L.S??????')) event_list = sfiles_to_event(sfile_list) # Check that we have written a file self.assertTrue(os.path.isfile('event.dat')) with open('event.dat', 'r') as f: for line, event in zip(f, event_list): header = readheader(event[1]) event_id_input = event[0] output_event_info = line.strip().split() # Check that the event id's match self.assertEqual(event_id_input, int(output_event_info[-1])) time_string = str(header.origins[0].time.year) +\ str(header.origins[0].time.month).zfill(2) +\ str(header.origins[0].time.day).zfill(2) + ' ' +\ str(header.origins[0].time.hour).rjust(2) +\ str(header.origins[0].time.minute).zfill(2) +\ str(header.origins[0].time.second).zfill(2) +\ str(header.origins[0].time.microsecond)[0:2].zfill(2) self.assertEqual(output_event_info[0:2], time_string.split()) self.assertEqual(header.origins[0].latitude, float(output_event_info[2])) self.assertEqual(header.origins[0].longitude, float(output_event_info[3])) self.assertEqual(header.origins[0].depth / 1000, float(output_event_info[4])) if header.magnitudes[0]: self.assertEqual(header.magnitudes[0].mag, float(output_event_info[5])) if header.origins[0].quality.standard_error: self.assertEqual(header.origins[0].quality.standard_error, float(output_event_info[-2])) os.remove('event.dat')
def test_missing_header(self): # Check that a suitable error is raised with self.assertRaises(NordicParsingError): readheader(os.path.join(self.testing_path, 'Sfile_no_header'))
def test_write_catalog(self): """ Simple testing function for the write_catalogue function in \ catalog_to_dd. """ self.assertTrue(os.path.isfile('dt.ct')) # Check dt.ct file, should contain only a few linked events dt_file_out = open('dt.ct', 'r') event_pairs = [] event_links = [] event_pair = '' for i, line in enumerate(dt_file_out): if line[0] == '#': if i != 0: # Check the number of links self.assertTrue(len(event_links) >= self.minimum_links) # Check the distance between events event_1_name = [ event[1] for event in self.event_list if event[0] == int(event_pair.split()[1]) ][0] event_2_name = [ event[1] for event in self.event_list if event[0] == int(event_pair.split()[2]) ][0] event_1 = readheader(event_1_name) event_2 = readheader(event_2_name) event_1_location = (event_1.origins[0].latitude, event_1.origins[0].longitude, event_1.origins[0].depth / 1000) event_2_location = (event_2.origins[0].latitude, event_2.origins[0].longitude, event_2.origins[0].depth / 1000) hypocentral_seperation = dist_calc(event_1_location, event_2_location) self.assertTrue( hypocentral_seperation < self.maximum_separation) # Check that the differential times are accurate event_1_picks = read_events(event_1_name)[0].picks event_2_picks = read_events(event_2_name)[0].picks for pick_pair in event_links: station = pick_pair.split()[0] event_1_travel_time_output = pick_pair.split()[1] event_2_travel_time_output = pick_pair.split()[2] # weight = pick_pair.split()[3] phase = pick_pair.split()[4] # Extract the relevant pick information from the # two sfiles for pick in event_1_picks: if pick.waveform_id.station_code == station: if pick.phase_hint[0].upper() == phase: event_1_pick = pick for pick in event_2_picks: if pick.waveform_id.station_code == station: if pick.phase_hint[0].upper() == phase: event_2_pick = pick # Calculate the travel-time event_1_travel_time_input = event_1_pick.time -\ event_1.origins[0].time event_2_travel_time_input = event_2_pick.time -\ event_2.origins[0].time self.assertEqual(event_1_travel_time_input, float(event_1_travel_time_output)) self.assertEqual(event_2_travel_time_input, float(event_2_travel_time_output)) event_pair = line event_pairs.append(line) event_links = [] else: event_links.append(line) self.assertTrue(os.path.isfile('phase.dat')) dt_file_out.close()