def test_read_file_with_non_valid_blocks_in_between(self): """ Test reading MiniSEED files that have some non-valid blocks in-between. """ # This file has two 4096 bytes records. filename = os.path.join(self.path, 'data', 'test.mseed') with io.open(filename, "rb") as fh: rec1 = fh.read(4096) rec2 = fh.read(4096) reference = _read_mseed(filename) del reference[0].stats.mseed # Fill with zero bytes. for length in (128, 256, 512, 1024, 2048, 4096, 8192): with io.BytesIO() as buf: buf.write(rec1) buf.write(b'\x00' * length) buf.write(rec2) buf.seek(0, 0) # This will raise 1 warning per 128 bytes. with WarningsCapture() as w: st = _read_mseed(buf) self.assertEqual(len(w), length // 128) # Also explicitly test the first warning message which should be # identical for all cases. self.assertEqual( w[0].message.args[0], "readMSEEDBuffer(): Not a SEED record. Will skip bytes " "4096 to 4223.") # Should always be two records. self.assertEqual(st[0].stats.mseed.number_of_records, 2) # Remove things like file-size and what not. del st[0].stats.mseed self.assertEqual(reference, st) # Try the same thing but fill with random bytes. # The seed is not really needed but hopefully guards against the # very very rare case of random bytes making up a valid SEED record. np.random.seed(34980) for length in (128, 256, 512, 1024, 2048, 4096, 8192): with io.BytesIO() as buf: buf.write(rec1) buf.write(np.random.bytes(length)) buf.write(rec2) buf.seek(0, 0) # This will raise 1 warning per 128 bytes. with WarningsCapture() as w: st = _read_mseed(buf) self.assertEqual(len(w), length // 128) # Should always be two records. self.assertEqual(st[0].stats.mseed.number_of_records, 2) # Remove things like file-size and what not. del st[0].stats.mseed self.assertEqual(reference, st)
def test_reading_less_than_128_bytes(self): """ 128 bytes is the smallest possible MiniSEED record. Reading anything smaller should result in an error. """ filename = os.path.join(self.path, 'data', 'BW.BGLD.__.EHE.D.2008.001.first_10_records') with io.open(filename, 'rb') as fh: data = fh.read() # Reading at exactly 128 bytes offset will result in a truncation # warning. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with io.BytesIO(data[:128]) as buf: st = _read_mseed(buf) self.assertEqual(len(st), 0) # nothing is read here. self.assertGreaterEqual(len(w), 1) self.assertIs(w[-1].category, InternalMSEEDWarning) self.assertEqual( "readMSEEDBuffer(): Unexpected end of file when " "parsing record starting at offset 0. The rest of " "the file will not be read.", w[-1].message.args[0]) # Reading anything less result in an exception. with self.assertRaises(ObsPyMSEEDFilesizeTooSmallError) as e: with io.BytesIO(data[:127]) as buf: _read_mseed(buf) self.assertEqual( e.exception.args[0], "The smallest possible mini-SEED record is made up of 128 bytes. " "The passed buffer or file contains only 127.")
def test_reading_less_than_128_bytes(self): """ 128 bytes is the smallest possible MiniSEED record. Reading anything smaller should result in an error. """ filename = os.path.join(self.path, 'data', 'BW.BGLD.__.EHE.D.2008.001.first_10_records') with io.open(filename, 'rb') as fh: data = fh.read() # Reading at exactly 128 bytes offset will result in a truncation # warning. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with io.BytesIO(data[:128]) as buf: st = _read_mseed(buf) self.assertEqual(len(st), 0) # nothing is read here. self.assertGreaterEqual(len(w), 1) self.assertIs(w[-1].category, InternalMSEEDWarning) self.assertEqual("readMSEEDBuffer(): Unexpected end of file when " "parsing record starting at offset 0. The rest of " "the file will not be read.", w[-1].message.args[0]) # Reading anything less result in an exception. with self.assertRaises(ObsPyMSEEDFilesizeTooSmallError) as e: with io.BytesIO(data[:127]) as buf: _read_mseed(buf) self.assertEqual( e.exception.args[0], "The smallest possible mini-SEED record is made up of 128 bytes. " "The passed buffer or file contains only 127.")
def test_broken_last_record(self): """ Test if Libmseed is able to read files with broken last record. Use both methods, readMSTracesViaRecords and readMSTraces """ file = os.path.join(self.path, "data", "brokenlastrecord.mseed") # independent reading of the data, 128 Bytes header d = np.fromfile(file, dtype=np.uint8)[128:] data = util._unpack_steim_2(d, 5980, swapflag=self.swap, verbose=0) # test readMSTraces. Will raise an internal warning. with WarningsCapture() as w: data_record = _read_mseed(file)[0].data # This will raise 18 (!) warnings. It will skip 17 * 128 bytes due # to it not being a SEED records and then complain that the remaining # 30 bytes are not enough to constitute a full SEED record. self.assertEqual(len(w), 18) self.assertEqual(w[0].category, InternalMSEEDWarning) # Test against reference data. self.assertEqual(len(data_record), 5980) last10samples = [ 2862, 2856, 2844, 2843, 2851, 2853, 2853, 2854, 2857, 2863 ] np.testing.assert_array_equal(data_record[-10:], last10samples) # Also test against independently unpacked data. np.testing.assert_allclose(data_record, data)
def test_bug_write_read_float32_seed_win32(self): """ Test case for issue #64. """ # create stream object data = np.array([ 395.07809448, 395.0782, 1060.28112793, -1157.37487793, -1236.56237793, 355.07028198, -1181.42175293 ], dtype=np.float32) st = Stream([Trace(data=data)]) with NamedTemporaryFile() as tf: tempfile = tf.name _write_mseed(st, tempfile, format="MSEED") # read temp file directly without libmseed with open(tempfile, 'rb') as fp: fp.seek(56) dtype = np.dtype(native_str('>f4')) bin_data = from_buffer(fp.read(7 * dtype.itemsize), dtype=dtype) np.testing.assert_array_equal(data, bin_data) # read via ObsPy st2 = _read_mseed(tempfile) # test results np.testing.assert_array_equal(data, st2[0].data)
def test_reading_truncated_miniseed_files_case_2(self): """ Second test in the same vain as test_reading_truncated_miniseed_files. Previously forgot a `<=` test. """ filename = os.path.join(self.path, 'data', 'BW.BGLD.__.EHE.D.2008.001.first_10_records') with io.open(filename, 'rb') as fh: data = fh.read() data = data[:-256] # This is the offset for the record that later has to be recorded in # the warning. self.assertEqual(len(data) - 256, 4608) # The file now lacks information at the end. This will read the file # until that point and raise a warning that some things are missing. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with io.BytesIO(data) as buf: st = _read_mseed(buf) self.assertEqual(len(st), 1) self.assertEqual(len(w), 1) self.assertIs(w[0].category, InternalMSEEDWarning) self.assertEqual( "readMSEEDBuffer(): Unexpected end of file when " "parsing record starting at offset 4608. The rest of " "the file will not be read.", w[0].message.args[0])
def test_broken_last_record(self): """ Test if Libmseed is able to read files with broken last record. Use both methods, readMSTracesViaRecords and readMSTraces """ file = os.path.join(self.path, "data", "brokenlastrecord.mseed") # independent reading of the data, 128 Bytes header d = np.fromfile(file, dtype=np.uint8)[128:] data = util._unpack_steim_2(d, 5980, swapflag=self.swap, verbose=0) # test readMSTraces. Will raise an internal warning. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") data_record = _read_mseed(file)[0].data self.assertEqual(len(w), 1) self.assertEqual(w[0].category, InternalMSEEDReadingWarning) # Test against reference data. self.assertEqual(len(data_record), 5980) last10samples = [2862, 2856, 2844, 2843, 2851, 2853, 2853, 2854, 2857, 2863] np.testing.assert_array_equal(data_record[-10:], last10samples) # Also test against independently unpacked data. np.testing.assert_allclose(data_record, data)
def test_reading_truncated_miniseed_files_case_2(self): """ Second test in the same vain as test_reading_truncated_miniseed_files. Previously forgot a `<=` test. """ filename = os.path.join(self.path, 'data', 'BW.BGLD.__.EHE.D.2008.001.first_10_records') with io.open(filename, 'rb') as fh: data = fh.read() data = data[:-256] # This is the offset for the record that later has to be recorded in # the warning. self.assertEqual(len(data) - 256, 4608) # The file now lacks information at the end. This will read the file # until that point and raise a warning that some things are missing. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with io.BytesIO(data) as buf: st = _read_mseed(buf) self.assertEqual(len(st), 1) self.assertEqual(len(w), 1) self.assertIs(w[0].category, InternalMSEEDWarning) self.assertEqual("readMSEEDBuffer(): Unexpected end of file when " "parsing record starting at offset 4608. The rest of " "the file will not be read.", w[0].message.args[0])
def test_broken_last_record(self): """ Test if Libmseed is able to read files with broken last record. Use both methods, readMSTracesViaRecords and readMSTraces """ file = os.path.join(self.path, "data", "brokenlastrecord.mseed") # independent reading of the data, 128 Bytes header d = np.fromfile(file, dtype=np.uint8)[128:] data = util._unpack_steim_2(d, 5980, swapflag=self.swap, verbose=0) # test readMSTraces. Will raise an internal warning. with WarningsCapture() as w: data_record = _read_mseed(file)[0].data # This will raise 18 (!) warnings. It will skip 17 * 128 bytes due # to it not being a SEED records and then complain that the remaining # 30 bytes are not enough to constitute a full SEED record. self.assertEqual(len(w), 18) self.assertEqual(w[0].category, InternalMSEEDWarning) # Test against reference data. self.assertEqual(len(data_record), 5980) last10samples = [2862, 2856, 2844, 2843, 2851, 2853, 2853, 2854, 2857, 2863] np.testing.assert_array_equal(data_record[-10:], last10samples) # Also test against independently unpacked data. np.testing.assert_allclose(data_record, data)
def test_broken_last_record(self): """ Test if Libmseed is able to read files with broken last record. Use both methods, readMSTracesViaRecords and readMSTraces """ file = os.path.join(self.path, "data", "brokenlastrecord.mseed") # independent reading of the data, 128 Bytes header d = np.fromfile(file, dtype=np.uint8)[128:] data = util._unpack_steim_2(d, 5980, swapflag=self.swap, verbose=0) # test readMSTraces. Will raise an internal warning. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") data_record = _read_mseed(file)[0].data self.assertEqual(len(w), 1) self.assertEqual(w[0].category, InternalMSEEDWarning) # Test against reference data. self.assertEqual(len(data_record), 5980) last10samples = [2862, 2856, 2844, 2843, 2851, 2853, 2853, 2854, 2857, 2863] np.testing.assert_array_equal(data_record[-10:], last10samples) # Also test against independently unpacked data. np.testing.assert_allclose(data_record, data)
def test_time_shifting(self): """ Tests the shift_time_of_file() function. """ with NamedTemporaryFile() as tf: output_filename = tf.name # Test a normal file first. filename = os.path.join( self.path, 'data', "BW.BGLD.__.EHE.D.2008.001.first_10_records") # Shift by one second. util.shift_time_of_file(filename, output_filename, 10000) st_before = _read_mseed(filename) st_after = _read_mseed(output_filename) st_before[0].stats.starttime += 1 self.assertEqual(st_before, st_after) # Shift by 22 seconds in the other direction. util.shift_time_of_file(filename, output_filename, -220000) st_before = _read_mseed(filename) st_after = _read_mseed(output_filename) st_before[0].stats.starttime -= 22 self.assertEqual(st_before, st_after) # Shift by 11.33 seconds. util.shift_time_of_file(filename, output_filename, 113300) st_before = _read_mseed(filename) st_after = _read_mseed(output_filename) st_before[0].stats.starttime += 11.33 self.assertEqual(st_before, st_after) # Test a special case with the time correction applied flag set but # no actual time correction in the field. filename = os.path.join( self.path, 'data', "one_record_time_corr_applied_but_time_corr_is_zero.mseed") # Positive shift. util.shift_time_of_file(filename, output_filename, 22000) st_before = _read_mseed(filename) st_after = _read_mseed(output_filename) st_before[0].stats.starttime += 2.2 self.assertEqual(st_before, st_after) # Negative shift. util.shift_time_of_file(filename, output_filename, -333000) st_before = _read_mseed(filename) st_after = _read_mseed(output_filename) st_before[0].stats.starttime -= 33.3 self.assertEqual(st_before, st_after)
def test_reading_file_larger_than_2048_mib(self, getsize_mock): """ ObsPy can currently not directly read files that are larger than 2^31 bytes. This raises an exception with a description of how to get around it. """ getsize_mock.return_value = 2**31 + 1 filename = os.path.join(self.path, 'data', 'BW.BGLD.__.EHE.D.2008.001.first_10_records') with self.assertRaises(ObsPyMSEEDFilesizeTooLargeError) as e: _read_mseed(filename) self.assertEqual( e.exception.args[0], "ObsPy can currently not directly read mini-SEED files that are " "larger than 2^31 bytes (2048 MiB). To still read it, please " "read the file in chunks as documented here: " "https://github.com/obspy/obspy/pull/1419#issuecomment-221582369")
def test_reading_file_larger_than_2048_mib(self, getsize_mock): """ ObsPy can currently not directly read files that are larger than 2^31 bytes. This raises an exception with a description of how to get around it. """ getsize_mock.return_value = 2 ** 31 + 1 filename = os.path.join(self.path, 'data', 'BW.BGLD.__.EHE.D.2008.001.first_10_records') with self.assertRaises(ObsPyMSEEDFilesizeTooLargeError) as e: _read_mseed(filename) self.assertEqual( e.exception.args[0], "ObsPy can currently not directly read mini-SEED files that are " "larger than 2^31 bytes (2048 MiB). To still read it, please " "read the file in chunks as documented here: " "https://github.com/obspy/obspy/pull/1419#issuecomment-221582369")
def test_time_shifting_special_case(self): """ Sometimes actually changing the time value is necessary. This works but is considered experimental and thus emits a warning. Therefore Python >= 2.6 only. """ with NamedTemporaryFile() as tf: output_filename = tf.name # This file was created only for testing purposes. filename = os.path.join( self.path, 'data', "one_record_already_applied_time_correction.mseed") with warnings.catch_warnings(record=True): warnings.simplefilter('error', UserWarning) self.assertRaises(UserWarning, util.shift_time_of_file, input_file=filename, output_file=output_filename, timeshift=123400) # Now ignore the warnings and test the default values. warnings.simplefilter('ignore', UserWarning) util.shift_time_of_file(input_file=filename, output_file=output_filename, timeshift=123400) st_before = _read_mseed(filename) st_after = _read_mseed(output_filename) st_before[0].stats.starttime += 12.34 self.assertEqual(st_before, st_after) # Test negative shifts. with warnings.catch_warnings(record=True): warnings.simplefilter('ignore', UserWarning) util.shift_time_of_file(input_file=filename, output_file=output_filename, timeshift=-22222) st_before = _read_mseed(filename) st_after = _read_mseed(output_filename) st_before[0].stats.starttime -= 2.2222 self.assertEqual(st_before, st_after)
def test_unpackSteim2(self): """ Test decompression of Steim2 strings. Remove 128 Bytes of header by hand, see SEEDManual_V2.4.pdf page 100. """ steim2_file = os.path.join(self.path, 'data', 'steim2.mseed') # 128 Bytes header. with open(steim2_file, 'rb') as fp: data_string = fp.read()[128:] data = util._unpack_steim_2(data_string, 5980, swapflag=self.swap, verbose=0) data_record = _read_mseed(steim2_file)[0].data np.testing.assert_array_equal(data, data_record)
def test_unpackSteim1(self): """ Test decompression of Steim1 strings. Remove 64 Bytes of header by hand, see SEEDManual_V2.4.pdf page 100. """ steim1_file = os.path.join(self.path, 'data', 'BW.BGLD.__.EHE.D.2008.001.first_record') # 64 Bytes header. with open(steim1_file, 'rb') as fp: data_string = fp.read()[64:] data = util._unpack_steim_1(data_string, 412, swapflag=self.swap, verbose=0) data_record = _read_mseed(steim1_file)[0].data np.testing.assert_array_equal(data, data_record)
def test_brokenLastRecord(self): """ Test if Libmseed is able to read files with broken last record. Use both methods, readMSTracesViaRecords and readMSTraces """ file = os.path.join(self.path, "data", "brokenlastrecord.mseed") # independent reading of the data with open(file, 'rb') as fp: data_string = fp.read()[128:] # 128 Bytes header data = util._unpack_steim_2(data_string, 5980, swapflag=self.swap, verbose=0) # test readMSTraces data_record = _read_mseed(file)[0].data np.testing.assert_array_equal(data, data_record)
def test_getStartAndEndTime(self): """ Tests getting the start- and endtime of a file. The values are compared with the results of reading the full files. """ mseed_filenames = ['BW.BGLD.__.EHE.D.2008.001.first_10_records', 'test.mseed', 'timingquality.mseed'] for _i in mseed_filenames: filename = os.path.join(self.path, 'data', _i) # Get the start- and end time. (start, end) = util.get_start_and_end_time(filename) # Parse the whole file. stream = _read_mseed(filename) self.assertEqual(start, stream[0].stats.starttime) self.assertEqual(end, stream[0].stats.endtime)
def test_write_with_date_time_before_1970(self): """ Write an stream via libmseed with a datetime before 1970. This test depends on the platform specific localtime()/gmtime() function. """ # create trace tr = Trace(data=np.empty(1000)) tr.stats.starttime = UTCDateTime("1969-01-01T00:00:00") # write file with NamedTemporaryFile() as tf: tempfile = tf.name _write_mseed(Stream([tr]), tempfile, format="MSEED") # read again stream = _read_mseed(tempfile) stream.verify()
def test_one_sample_overlap(self): """ Both methods readMSTraces and readMSTracesViaRecords should recognize a single sample overlap. """ # create a stream with one sample overlapping trace1 = Trace(data=np.zeros(1000)) trace2 = Trace(data=np.zeros(10)) trace2.stats.starttime = UTCDateTime(999) st = Stream([trace1, trace2]) # write into MSEED with NamedTemporaryFile() as tf: tempfile = tf.name _write_mseed(st, tempfile, format="MSEED") # read it again new_stream = _read_mseed(tempfile) self.assertEqual(len(new_stream), 2)
def test_getStartAndEndTime(self): """ Tests getting the start- and endtime of a file. The values are compared with the results of reading the full files. """ mseed_filenames = [ 'BW.BGLD.__.EHE.D.2008.001.first_10_records', 'test.mseed', 'timingquality.mseed' ] for _i in mseed_filenames: filename = os.path.join(self.path, 'data', _i) # Get the start- and end time. (start, end) = util.get_start_and_end_time(filename) # Parse the whole file. stream = _read_mseed(filename) self.assertEqual(start, stream[0].stats.starttime) self.assertEqual(end, stream[0].stats.endtime)
def test_broken_last_record(self): """ Test if Libmseed is able to read files with broken last record. Use both methods, readMSTracesViaRecords and readMSTraces """ file = os.path.join(self.path, "data", "brokenlastrecord.mseed") # independent reading of the data with open(file, "rb") as fp: data_string = fp.read()[128:] # 128 Bytes header data = util._unpack_steim_2(data_string, 5980, swapflag=self.swap, verbose=0) # test readMSTraces. Will raise an internal warning. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") data_record = _read_mseed(file)[0].data self.assertEqual(len(w), 1) self.assertEqual(w[0].category, InternalMSEEDReadingWarning) np.testing.assert_array_equal(data, data_record)
def test_broken_last_record(self): """ Test if Libmseed is able to read files with broken last record. Use both methods, readMSTracesViaRecords and readMSTraces """ file = os.path.join(self.path, "data", "brokenlastrecord.mseed") # independent reading of the data with open(file, 'rb') as fp: data_string = fp.read()[128:] # 128 Bytes header data = util._unpack_steim_2(data_string, 5980, swapflag=self.swap, verbose=0) # test readMSTraces. Will raise an internal warning. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") data_record = _read_mseed(file)[0].data self.assertEqual(len(w), 1) self.assertEqual(w[0].category, InternalMSEEDReadingWarning) np.testing.assert_array_equal(data, data_record)
def test_bug_write_read_float32_seed_win32(self): """ Test case for issue #64. """ # create stream object data = np.array([395.07809448, 395.0782, 1060.28112793, -1157.37487793, -1236.56237793, 355.07028198, -1181.42175293], dtype=np.float32) st = Stream([Trace(data=data)]) with NamedTemporaryFile() as tf: tempfile = tf.name _write_mseed(st, tempfile, format="MSEED") # read temp file directly without libmseed with open(tempfile, 'rb') as fp: fp.seek(56) dtype = np.dtype(native_str('>f4')) bin_data = from_buffer(fp.read(7 * dtype.itemsize), dtype=dtype) np.testing.assert_array_equal(data, bin_data) # read via ObsPy st2 = _read_mseed(tempfile) # test results np.testing.assert_array_equal(data, st2[0].data)