def test_in_process_start(self): """ Test starting the parser in a state in the middle of processing, in the middle of a block. """ log.debug('Starting test_in_process_start') #[2818:2982] contains the first WE SIO header new_state = {StateKey.IN_PROCESS_DATA:[[2818,2982,3,0], [4059,4673,18,0]], StateKey.UNPROCESSED_DATA:[[2818,2982], [4058,5000]], StateKey.FILE_SIZE:[] } self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = FlordLWfpSioMuleParser(self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,1], [4059,4673,18,0]], [[2818,2982], [4058,5000]], self.particle_1a) result = self.parser.get_records(2) self.assertEqual(result[0], self.particle_1b) self.assertEqual(result[1], self.particle_1c) self.assert_state([[4059,4673,18,0]], [[4058,5000]]) self.assertEqual(self.publish_callback_value[-1], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[4058,5000]], self.particle_2a) self.stream_handle.close()
def test_simple(self): """ Read test data and pull out data particles one at a time. Assert that the results are those we expected. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) # NOTE: using the unprocessed data state of 0,5000 limits the file to reading # just 5000 bytes, so even though the file is longer, it only reads the first # 5000 self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,1], [4059,4673,18,0]], [[2818,2982], [4058,5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,2], [4059,4673,18,0]], [[2818,2982], [4058,5000]], self.particle_1b) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,0]], [[4058,5000]], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[4058,5000]], self.particle_2a) self.stream_handle.close()
def test_set_state(self): """ Test changing to a new state after initializing the parser and reading data, as if new data has been found and the state has changed """ log.debug('-------------------------------------------------Starting test_set_state') self.state = {StateKey.UNPROCESSED_DATA:[[4059, 4673]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[] } new_state = {StateKey.UNPROCESSED_DATA:[[2818, 2982], [4058, 4059], [4673, 5000]], StateKey.IN_PROCESS_DATA:[[2818, 2982, 3, 0]], StateKey.FILE_SIZE:[]} self.stream_handle = open(os.path.join(RESOURCE_PATH,'node58p1.dat')) self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # only 18 left in file at this point. Drain them, and make sure the next fetch fails result = self.parser.get_records(17) self.assert_state([[4059, 4673, 18, 17]],[[4059, 4673]]) result = self.parser.get_records(1) result = self.parser.get_records(1) self.assertEqual(result, []) self.parser.set_state(new_state) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) self.stream_handle.close()
def test_bad_data(self): """ Ensure that the bad record ( in this case a currupted status message ) causes a sample exception """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1_BADFLAGS.dat')) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException))
def test_bad_e_record(self): """ Ensure that the bad record causes a sample exception. The file 'bad_e_record.dat' includes a record containing one byte less than the expected 30 for the flord_l_wfp_sio_mule. The 'Number of Data Bytes' and the 'CRC Checksum' values in the SIO Mule header have been modified accordingly. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'bad_e_record.dat')) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException))
def test_set_state(self): """ Test changing to a new state after initializing the parser and reading data, as if new data has been found and the state has changed """ log.debug("-------------------------------------------------Starting test_set_state") self.state = {StateKey.UNPROCESSED_DATA: [[4059, 4673]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} new_state = { StateKey.UNPROCESSED_DATA: [[2818, 2982], [4058, 4059], [4673, 5000]], StateKey.IN_PROCESS_DATA: [[2818, 2982, 3, 0]], StateKey.FILE_SIZE: [], } self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) # only 18 left in file at this point. Drain them, and make sure the next fetch fails result = self.parser.get_records(17) self.assert_state([[4059, 4673, 18, 17]], [[4059, 4673]]) result = self.parser.get_records(1) result = self.parser.get_records(1) self.assertEqual(result, []) self.parser.set_state(new_state) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) self.stream_handle.close()
def test_simple(self): """ Read test data and pull out data particles one at a time. Assert that the results are those we expected. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) # NOTE: using the unprocessed data state of 0,5000 limits the file to reading # just 5000 bytes, so even though the file is longer, it only reads the first # 5000 self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_result( result, [[2818, 2982, 3, 1], [4059, 4673, 18, 0]], [[2818, 2982], [4058, 5000]], self.particle_1a ) result = self.parser.get_records(1) self.assert_result( result, [[2818, 2982, 3, 2], [4059, 4673, 18, 0]], [[2818, 2982], [4058, 5000]], self.particle_1b ) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 0]], [[4058, 5000]], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 1]], [[4058, 5000]], self.particle_2a) self.stream_handle.close()
def test_in_process_start(self): """ Test starting the parser in a state in the middle of processing, in the middle of a block. """ log.debug("Starting test_in_process_start") # [2818:2982] contains the first WE SIO header new_state = { StateKey.IN_PROCESS_DATA: [[2818, 2982, 3, 0], [4059, 4673, 18, 0]], StateKey.UNPROCESSED_DATA: [[2818, 2982], [4058, 5000]], StateKey.FILE_SIZE: [], } self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) self.parser = FlordLWfpSioMuleParser( self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_result( result, [[2818, 2982, 3, 1], [4059, 4673, 18, 0]], [[2818, 2982], [4058, 5000]], self.particle_1a ) result = self.parser.get_records(2) self.assertEqual(result[0], self.particle_1b) self.assertEqual(result[1], self.particle_1c) self.assert_state([[4059, 4673, 18, 0]], [[4058, 5000]]) self.assertEqual(self.publish_callback_value[-1], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 1]], [[4058, 5000]], self.particle_2a) self.stream_handle.close()
def test_update(self): """ Test a file that has a section of data replaced by 0s, as if a block of data has not yet been received. """ log.debug('------------------------------------------------------Starting test_update') self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} # this file has first block of WE data replaced by 0s self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1_1stWE0d.dat')) self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[2818, 2982], [4058, 5000]], self.particle_2a) self.stream_handle.close() next_state = self.parser._state self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = FlordLWfpSioMuleParser(self.config, next_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # there are 18 valid records in the second WE chunk. We read one above, now we need # to drain the remaining 17 to trigger the reparsing of the earlier block for kk in range(0, 17): result = self.parser.get_records(1) # the next fetch should find the now-replaced earlier data result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 2]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1b) self.stream_handle.close()
def test_bad_data(self): """ Ensure that the bad record ( in this case a currupted status message ) causes a sample exception """ self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1_BADFLAGS.dat")) self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException))
def test_mid_state_start(self): """ Test starting the parser in a state in the middle of processing, no in_process_data. """ log.debug('Starting test_mid_state_start') new_state = {StateKey.IN_PROCESS_DATA:[], StateKey.UNPROCESSED_DATA:[[2818,2982], [4058,5000]], StateKey.FILE_SIZE:[]} self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = FlordLWfpSioMuleParser(self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,1]], [[2818,2982], [4058,5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,2]], [[2818,2982], [4058,5000]], self.particle_1b) result = self.parser.get_records(1) self.assert_result(result, [], [[4058,5000]], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[4058,5000]], self.particle_2a) self.stream_handle.close()
def test_long_stream(self): """ Test a long stream """ log.debug("Start of test_long_stream.") self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.stream_handle.seek(0) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(12) self.assertEqual(result[0], self.particle_1a) self.assertEqual(result[1], self.particle_1b) self.assertEqual(result[2], self.particle_1c) self.assertEqual(result[3], self.particle_2a) self.assertEqual(result[-2], self.particle_1k) self.assertEqual(result[-1], self.particle_1l) self.assertEqual(self.publish_callback_value[-2], self.particle_1k) self.assertEqual(self.publish_callback_value[-1], self.particle_1l) self.assert_state([[4059,4673,18,9]],[[4058,5000]]) self.stream_handle.close()
def test_bad_e_record(self): """ Ensure that the bad record causes a sample exception. The file 'bad_e_record.dat' includes a record containing one byte less than the expected 30 for the flord_l_wfp_sio_mule. The 'Number of Data Bytes' and the 'CRC Checksum' values in the SIO Mule header have been modified accordingly. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, "bad_e_record.dat")) self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException))
def _build_parser(self, parser_state, infile): """ Build and return the parser """ config = self._parser_config config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flord_l_wfp_sio_mule', DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlordLWfpSioMuleParserDataParticle' }) log.debug("My Config: %s", config) self._parser = FlordLWfpSioMuleParser(config, parser_state, infile, self._save_parser_state, self._data_callback, self._sample_exception_callback) return self._parser
def _build_telemetered_parser(self, parser_state, stream_in): """ Build and return the telemetered parser @param parser_state starting parser state to pass to parser @param stream_in Handle of open file to pass to parser """ config = self._parser_config.get(DataSourceKey.FLORD_L_WFP_SIO_MULE) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flord_l_wfp_sio_mule', DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlordLWfpSioMuleParserDataParticle' }) log.debug("My Config: %s", config) parser = FlordLWfpSioMuleParser( config, parser_state, stream_in, lambda state: self._save_parser_state( state, DataSourceKey.FLORD_L_WFP_SIO_MULE), self._data_callback, self._sample_exception_callback) return parser
def test_get_many(self): """ Read test data and pull out multiple data particles at one time. Assert that the results are those we expected. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) # NOTE: using the unprocessed data state of 0,5000 limits the file to reading # just 5000 bytes, so even though the file is longer, it only reads the first # 5000 self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(4) self.assertEqual(result,[self.particle_1a, self.particle_1b, self.particle_1c, self.particle_2a]) self.assertEqual(self.publish_callback_value[0], self.particle_1a) self.assertEqual(self.publish_callback_value[1], self.particle_1b) self.assertEqual(self.publish_callback_value[2], self.particle_1c) self.assertEqual(self.publish_callback_value[3], self.particle_2a) self.assert_state([[4059,4673,18,1]],[[4058,5000]]) self.stream_handle.close()
def test_long_stream(self): """ Test a long stream """ log.debug("Start of test_long_stream.") self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) self.stream_handle.seek(0) self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(12) self.assertEqual(result[0], self.particle_1a) self.assertEqual(result[1], self.particle_1b) self.assertEqual(result[2], self.particle_1c) self.assertEqual(result[3], self.particle_2a) self.assertEqual(result[-2], self.particle_1k) self.assertEqual(result[-1], self.particle_1l) self.assertEqual(self.publish_callback_value[-2], self.particle_1k) self.assertEqual(self.publish_callback_value[-1], self.particle_1l) self.assert_state([[4059, 4673, 18, 9]], [[4058, 5000]]) self.stream_handle.close()
class FlordLWfpSioMuleParserUnitTestCase(ParserUnitTestCase): """ flord_l_wfp_sio_mule Parser unit test suite """ def state_callback(self, state): """ Call back method to watch what comes in via the position callback """ self.state_callback_value = state def pub_callback(self, pub): """ Call back method to watch what comes in via the publish callback """ self.publish_callback_value = pub def exception_callback(self, exception): """ Call back method to watch what comes in via the exception callback """ self.exception_callback_value = exception def setUp(self): ParserUnitTestCase.setUp(self) self.config = { DataSetDriverConfigKeys.PARTICLE_MODULE: "mi.dataset.parser.flord_l_wfp_sio_mule", DataSetDriverConfigKeys.PARTICLE_CLASS: "FlordLWfpSioMuleParserDataParticle", } # Define test data particles and their associated timestamps which will be # compared with returned results # First 'WE' SIO header in noe58p1.dat, first record. self.timestamp_1a = self.timestamp_to_ntp("Q\xf2W.") log.debug("Converted ntp timestamp 1a: %s", self.timestamp_1a) self.particle_1a = FlordLWfpSioMuleParserDataParticle( b"Q\xf2W.\x00\x00\x00\x00A9Y" "\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x009\x00e\x02:", internal_timestamp=self.timestamp_1a, ) # First 'WE' SIO header in noe58p1.dat, second record. self.timestamp_1b = self.timestamp_to_ntp("Q\xf2Xq") log.debug("Converted ntp timestamp 1b: %s", self.timestamp_1b) self.particle_1b = FlordLWfpSioMuleParserDataParticle( b"Q\xf2XqB\x8f\x83DA5\x1e\xb8D" "\xfd\x85qB\x82\x83\x12?\xf9\xba^\x009\x00d\x028", internal_timestamp=self.timestamp_1b, ) # First 'WE' SIO header in noe58p1.dat, third record. self.timestamp_1c = self.timestamp_to_ntp("Q\xf2Z\xd3") log.debug("Converted ntp timestamp 1c: %s", self.timestamp_1c) self.particle_1c = FlordLWfpSioMuleParserDataParticle( b"Q\xf2Z\xd3B\x84\x06GA2\x9a\xd4E" "\t\xd3\xd7B\x9b\xdc)?\xec\xac\x08\x00:\x00d\x027", internal_timestamp=self.timestamp_1c, ) # Second 'WE' SIO header in noe58p1.dat, first record. self.timestamp_2a = self.timestamp_to_ntp("Q\xf2\x8fn") log.debug("Converted ntp timestamp 2a: %s", self.timestamp_2a) self.particle_2a = FlordLWfpSioMuleParserDataParticle( b"Q\xf2\x8fn\x00\x00\x00\x00A7\xd5f" "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x008\x00a\x02=", internal_timestamp=self.timestamp_2a, ) # Last 'WE' SIO header in node58p1.dat, last record (when reading 12). self.timestamp_1l = self.timestamp_to_ntp("Q\xf2\x99q") log.debug("Converted ntp timestamp 1l: %s", self.timestamp_1l) self.particle_1l = FlordLWfpSioMuleParserDataParticle( b'Q\xf2\x99qC"\t\xceA/\x9alEM\x07\\C' "\x07\xd7\n?\xc3\x95\x81\x007\x00_\x02;", internal_timestamp=self.timestamp_1l, ) # Last 'WE' SIO header in node58p1.dat[0:300000], second to last record. self.timestamp_1k = self.timestamp_to_ntp("Q\xf2\x981") log.debug("Converted ntp timestamp 1k: %s", self.timestamp_1k) self.particle_1k = FlordLWfpSioMuleParserDataParticle( b"Q\xf2\x981C\x10\xe5kA/\xe4&EG\x8c\x00C" "\x04\xc2\x8f?\xc4\xfd\xf4\x006\x00_\x02;", internal_timestamp=self.timestamp_1k, ) # Last record of second 'WE' SIO header, the last record when pulling 5000 bytes. self.timestamp_m = self.timestamp_to_ntp("Q\xf2\xa5\xc9") log.debug("Converted ntp timestamp m: %s", self.timestamp_m) self.state_callback_value = None self.publish_callback_value = None self.exception_callback_value = None def assert_result(self, result, in_process_data, unprocessed_data, particle): self.assertEqual(result, [particle]) self.assert_state(in_process_data, unprocessed_data) self.assert_(isinstance(self.publish_callback_value, list)) self.assertEqual(self.publish_callback_value[0], particle) def assert_state(self, in_process_data, unprocessed_data): self.assertEqual(self.parser._state[StateKey.IN_PROCESS_DATA], in_process_data) self.assertEqual(self.parser._state[StateKey.UNPROCESSED_DATA], unprocessed_data) self.assertEqual(self.state_callback_value[StateKey.IN_PROCESS_DATA], in_process_data) self.assertEqual(self.state_callback_value[StateKey.UNPROCESSED_DATA], unprocessed_data) def timestamp_to_ntp(self, hex_timestamp): fields = struct.unpack(">I", hex_timestamp) timestamp = float(fields[0]) return ntplib.system_to_ntp_time(timestamp) def convert_utc_to_zulu(self, hex_timestamp): fields = struct.unpack(">I", hex_timestamp) timestamp = float(fields[0]) zulu_time = datetime.utcfromtimestamp(timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ") return zulu_time def test_simple(self): """ Read test data and pull out data particles one at a time. Assert that the results are those we expected. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) # NOTE: using the unprocessed data state of 0,5000 limits the file to reading # just 5000 bytes, so even though the file is longer, it only reads the first # 5000 self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_result( result, [[2818, 2982, 3, 1], [4059, 4673, 18, 0]], [[2818, 2982], [4058, 5000]], self.particle_1a ) result = self.parser.get_records(1) self.assert_result( result, [[2818, 2982, 3, 2], [4059, 4673, 18, 0]], [[2818, 2982], [4058, 5000]], self.particle_1b ) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 0]], [[4058, 5000]], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 1]], [[4058, 5000]], self.particle_2a) self.stream_handle.close() def test_get_many(self): """ Read test data and pull out multiple data particles at one time. Assert that the results are those we expected. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) # NOTE: using the unprocessed data state of 0,5000 limits the file to reading # just 5000 bytes, so even though the file is longer, it only reads the first # 5000 self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(4) self.assertEqual(result, [self.particle_1a, self.particle_1b, self.particle_1c, self.particle_2a]) self.assertEqual(self.publish_callback_value[0], self.particle_1a) self.assertEqual(self.publish_callback_value[1], self.particle_1b) self.assertEqual(self.publish_callback_value[2], self.particle_1c) self.assertEqual(self.publish_callback_value[3], self.particle_2a) self.assert_state([[4059, 4673, 18, 1]], [[4058, 5000]]) self.stream_handle.close() def test_long_stream(self): """ Test a long stream """ log.debug("Start of test_long_stream.") self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) self.stream_handle.seek(0) self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(12) self.assertEqual(result[0], self.particle_1a) self.assertEqual(result[1], self.particle_1b) self.assertEqual(result[2], self.particle_1c) self.assertEqual(result[3], self.particle_2a) self.assertEqual(result[-2], self.particle_1k) self.assertEqual(result[-1], self.particle_1l) self.assertEqual(self.publish_callback_value[-2], self.particle_1k) self.assertEqual(self.publish_callback_value[-1], self.particle_1l) self.assert_state([[4059, 4673, 18, 9]], [[4058, 5000]]) self.stream_handle.close() def test_mid_state_start(self): """ Test starting the parser in a state in the middle of processing, no in_process_data. """ log.debug("Starting test_mid_state_start") new_state = { StateKey.IN_PROCESS_DATA: [], StateKey.UNPROCESSED_DATA: [[2818, 2982], [4058, 5000]], StateKey.FILE_SIZE: [], } self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) self.parser = FlordLWfpSioMuleParser( self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 2]], [[2818, 2982], [4058, 5000]], self.particle_1b) result = self.parser.get_records(1) self.assert_result(result, [], [[4058, 5000]], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 1]], [[4058, 5000]], self.particle_2a) self.stream_handle.close() def test_bad_data(self): """ Ensure that the bad record ( in this case a currupted status message ) causes a sample exception """ self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1_BADFLAGS.dat")) self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException)) def test_bad_e_record(self): """ Ensure that the bad record causes a sample exception. The file 'bad_e_record.dat' includes a record containing one byte less than the expected 30 for the flord_l_wfp_sio_mule. The 'Number of Data Bytes' and the 'CRC Checksum' values in the SIO Mule header have been modified accordingly. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, "bad_e_record.dat")) self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException)) def test_in_process_start(self): """ Test starting the parser in a state in the middle of processing, in the middle of a block. """ log.debug("Starting test_in_process_start") # [2818:2982] contains the first WE SIO header new_state = { StateKey.IN_PROCESS_DATA: [[2818, 2982, 3, 0], [4059, 4673, 18, 0]], StateKey.UNPROCESSED_DATA: [[2818, 2982], [4058, 5000]], StateKey.FILE_SIZE: [], } self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) self.parser = FlordLWfpSioMuleParser( self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_result( result, [[2818, 2982, 3, 1], [4059, 4673, 18, 0]], [[2818, 2982], [4058, 5000]], self.particle_1a ) result = self.parser.get_records(2) self.assertEqual(result[0], self.particle_1b) self.assertEqual(result[1], self.particle_1c) self.assert_state([[4059, 4673, 18, 0]], [[4058, 5000]]) self.assertEqual(self.publish_callback_value[-1], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 1]], [[4058, 5000]], self.particle_2a) self.stream_handle.close() def test_set_state(self): """ Test changing to a new state after initializing the parser and reading data, as if new data has been found and the state has changed """ log.debug("-------------------------------------------------Starting test_set_state") self.state = {StateKey.UNPROCESSED_DATA: [[4059, 4673]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} new_state = { StateKey.UNPROCESSED_DATA: [[2818, 2982], [4058, 4059], [4673, 5000]], StateKey.IN_PROCESS_DATA: [[2818, 2982, 3, 0]], StateKey.FILE_SIZE: [], } self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) # only 18 left in file at this point. Drain them, and make sure the next fetch fails result = self.parser.get_records(17) self.assert_state([[4059, 4673, 18, 17]], [[4059, 4673]]) result = self.parser.get_records(1) result = self.parser.get_records(1) self.assertEqual(result, []) self.parser.set_state(new_state) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) self.stream_handle.close() def test_update(self): """ Test a file that has a section of data replaced by 0s, as if a block of data has not yet been received. """ log.debug("------------------------------------------------------Starting test_update") self.state = {StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: []} # this file has first block of WE data replaced by 0s self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1_1stWE0d.dat")) self.parser = FlordLWfpSioMuleParser( self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 1]], [[2818, 2982], [4058, 5000]], self.particle_2a) self.stream_handle.close() next_state = self.parser._state self.stream_handle = open(os.path.join(RESOURCE_PATH, "node58p1.dat")) self.parser = FlordLWfpSioMuleParser( self.config, next_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback ) # there are 18 valid records in the second WE chunk. We read one above, now we need # to drain the remaining 17 to trigger the reparsing of the earlier block for kk in range(0, 17): result = self.parser.get_records(1) # the next fetch should find the now-replaced earlier data result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 2]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1b) self.stream_handle.close()
class FlordLWfpSioMuleParserUnitTestCase(ParserUnitTestCase): """ flord_l_wfp_sio_mule Parser unit test suite """ def state_callback(self, state): """ Call back method to watch what comes in via the position callback """ self.state_callback_value = state def pub_callback(self, pub): """ Call back method to watch what comes in via the publish callback """ self.publish_callback_value = pub def exception_callback(self, exception): """ Call back method to watch what comes in via the exception callback """ self.exception_callback_value = exception def setUp(self): ParserUnitTestCase.setUp(self) self.config = { DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flord_l_wfp_sio_mule', DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlordLWfpSioMuleParserDataParticle' } # Define test data particles and their associated timestamps which will be # compared with returned results # First 'WE' SIO header in noe58p1.dat, first record. self.timestamp_1a = self.timestamp_to_ntp('Q\xf2W.') log.debug("Converted ntp timestamp 1a: %s",self.timestamp_1a) self.particle_1a = FlordLWfpSioMuleParserDataParticle(b'Q\xf2W.\x00\x00\x00\x00A9Y' \ '\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x009\x00e\x02:', internal_timestamp = self.timestamp_1a) # First 'WE' SIO header in noe58p1.dat, second record. self.timestamp_1b = self.timestamp_to_ntp('Q\xf2Xq') log.debug("Converted ntp timestamp 1b: %s",self.timestamp_1b) self.particle_1b = FlordLWfpSioMuleParserDataParticle(b'Q\xf2XqB\x8f\x83DA5\x1e\xb8D' \ '\xfd\x85qB\x82\x83\x12?\xf9\xba^\x009\x00d\x028', internal_timestamp = self.timestamp_1b) # First 'WE' SIO header in noe58p1.dat, third record. self.timestamp_1c = self.timestamp_to_ntp('Q\xf2Z\xd3') log.debug("Converted ntp timestamp 1c: %s",self.timestamp_1c) self.particle_1c = FlordLWfpSioMuleParserDataParticle(b'Q\xf2Z\xd3B\x84\x06GA2\x9a\xd4E' \ '\t\xd3\xd7B\x9b\xdc)?\xec\xac\x08\x00:\x00d\x027', internal_timestamp = self.timestamp_1c) # Second 'WE' SIO header in noe58p1.dat, first record. self.timestamp_2a = self.timestamp_to_ntp('Q\xf2\x8fn') log.debug("Converted ntp timestamp 2a: %s",self.timestamp_2a) self.particle_2a = FlordLWfpSioMuleParserDataParticle(b'Q\xf2\x8fn\x00\x00\x00\x00A7\xd5f' \ '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x008\x00a\x02=', internal_timestamp = self.timestamp_2a) # Last 'WE' SIO header in node58p1.dat, last record (when reading 12). self.timestamp_1l = self.timestamp_to_ntp('Q\xf2\x99q') log.debug("Converted ntp timestamp 1l: %s",self.timestamp_1l) self.particle_1l = FlordLWfpSioMuleParserDataParticle(b'Q\xf2\x99qC"\t\xceA/\x9alEM\x07\\C' \ '\x07\xd7\n?\xc3\x95\x81\x007\x00_\x02;', internal_timestamp = self.timestamp_1l) # Last 'WE' SIO header in node58p1.dat[0:300000], second to last record. self.timestamp_1k = self.timestamp_to_ntp('Q\xf2\x981') log.debug("Converted ntp timestamp 1k: %s",self.timestamp_1k) self.particle_1k = FlordLWfpSioMuleParserDataParticle(b'Q\xf2\x981C\x10\xe5kA/\xe4&EG\x8c\x00C' \ '\x04\xc2\x8f?\xc4\xfd\xf4\x006\x00_\x02;', internal_timestamp = self.timestamp_1k) # Last record of second 'WE' SIO header, the last record when pulling 5000 bytes. self.timestamp_m = self.timestamp_to_ntp('Q\xf2\xa5\xc9') log.debug("Converted ntp timestamp m: %s",self.timestamp_m) self.state_callback_value = None self.publish_callback_value = None self.exception_callback_value = None def assert_result(self, result, in_process_data, unprocessed_data, particle): self.assertEqual(result, [particle]) self.assert_state(in_process_data, unprocessed_data) self.assert_(isinstance(self.publish_callback_value, list)) self.assertEqual(self.publish_callback_value[0], particle) def assert_state(self, in_process_data, unprocessed_data): self.assertEqual(self.parser._state[StateKey.IN_PROCESS_DATA], in_process_data) self.assertEqual(self.parser._state[StateKey.UNPROCESSED_DATA], unprocessed_data) self.assertEqual(self.state_callback_value[StateKey.IN_PROCESS_DATA], in_process_data) self.assertEqual(self.state_callback_value[StateKey.UNPROCESSED_DATA], unprocessed_data) def timestamp_to_ntp(self, hex_timestamp): fields = struct.unpack('>I', hex_timestamp) timestamp = float(fields[0]) return ntplib.system_to_ntp_time(timestamp) def convert_utc_to_zulu(self, hex_timestamp): fields = struct.unpack('>I', hex_timestamp) timestamp = float(fields[0]) zulu_time = datetime.utcfromtimestamp(timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ") return zulu_time def test_simple(self): """ Read test data and pull out data particles one at a time. Assert that the results are those we expected. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) # NOTE: using the unprocessed data state of 0,5000 limits the file to reading # just 5000 bytes, so even though the file is longer, it only reads the first # 5000 self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,1], [4059,4673,18,0]], [[2818,2982], [4058,5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,2], [4059,4673,18,0]], [[2818,2982], [4058,5000]], self.particle_1b) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,0]], [[4058,5000]], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[4058,5000]], self.particle_2a) self.stream_handle.close() def test_get_many(self): """ Read test data and pull out multiple data particles at one time. Assert that the results are those we expected. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) # NOTE: using the unprocessed data state of 0,5000 limits the file to reading # just 5000 bytes, so even though the file is longer, it only reads the first # 5000 self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(4) self.assertEqual(result,[self.particle_1a, self.particle_1b, self.particle_1c, self.particle_2a]) self.assertEqual(self.publish_callback_value[0], self.particle_1a) self.assertEqual(self.publish_callback_value[1], self.particle_1b) self.assertEqual(self.publish_callback_value[2], self.particle_1c) self.assertEqual(self.publish_callback_value[3], self.particle_2a) self.assert_state([[4059,4673,18,1]],[[4058,5000]]) self.stream_handle.close() def test_long_stream(self): """ Test a long stream """ log.debug("Start of test_long_stream.") self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.stream_handle.seek(0) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(12) self.assertEqual(result[0], self.particle_1a) self.assertEqual(result[1], self.particle_1b) self.assertEqual(result[2], self.particle_1c) self.assertEqual(result[3], self.particle_2a) self.assertEqual(result[-2], self.particle_1k) self.assertEqual(result[-1], self.particle_1l) self.assertEqual(self.publish_callback_value[-2], self.particle_1k) self.assertEqual(self.publish_callback_value[-1], self.particle_1l) self.assert_state([[4059,4673,18,9]],[[4058,5000]]) self.stream_handle.close() def test_mid_state_start(self): """ Test starting the parser in a state in the middle of processing, no in_process_data. """ log.debug('Starting test_mid_state_start') new_state = {StateKey.IN_PROCESS_DATA:[], StateKey.UNPROCESSED_DATA:[[2818,2982], [4058,5000]], StateKey.FILE_SIZE:[]} self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = FlordLWfpSioMuleParser(self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,1]], [[2818,2982], [4058,5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,2]], [[2818,2982], [4058,5000]], self.particle_1b) result = self.parser.get_records(1) self.assert_result(result, [], [[4058,5000]], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[4058,5000]], self.particle_2a) self.stream_handle.close() def test_bad_data(self): """ Ensure that the bad record ( in this case a currupted status message ) causes a sample exception """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1_BADFLAGS.dat')) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException)) def test_bad_e_record(self): """ Ensure that the bad record causes a sample exception. The file 'bad_e_record.dat' includes a record containing one byte less than the expected 30 for the flord_l_wfp_sio_mule. The 'Number of Data Bytes' and the 'CRC Checksum' values in the SIO Mule header have been modified accordingly. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'bad_e_record.dat')) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException)) def test_in_process_start(self): """ Test starting the parser in a state in the middle of processing, in the middle of a block. """ log.debug('Starting test_in_process_start') #[2818:2982] contains the first WE SIO header new_state = {StateKey.IN_PROCESS_DATA:[[2818,2982,3,0], [4059,4673,18,0]], StateKey.UNPROCESSED_DATA:[[2818,2982], [4058,5000]], StateKey.FILE_SIZE:[] } self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = FlordLWfpSioMuleParser(self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,1], [4059,4673,18,0]], [[2818,2982], [4058,5000]], self.particle_1a) result = self.parser.get_records(2) self.assertEqual(result[0], self.particle_1b) self.assertEqual(result[1], self.particle_1c) self.assert_state([[4059,4673,18,0]], [[4058,5000]]) self.assertEqual(self.publish_callback_value[-1], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[4058,5000]], self.particle_2a) self.stream_handle.close() def test_set_state(self): """ Test changing to a new state after initializing the parser and reading data, as if new data has been found and the state has changed """ log.debug('-------------------------------------------------Starting test_set_state') self.state = {StateKey.UNPROCESSED_DATA:[[4059, 4673]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[] } new_state = {StateKey.UNPROCESSED_DATA:[[2818, 2982], [4058, 4059], [4673, 5000]], StateKey.IN_PROCESS_DATA:[[2818, 2982, 3, 0]], StateKey.FILE_SIZE:[]} self.stream_handle = open(os.path.join(RESOURCE_PATH,'node58p1.dat')) self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # only 18 left in file at this point. Drain them, and make sure the next fetch fails result = self.parser.get_records(17) self.assert_state([[4059, 4673, 18, 17]],[[4059, 4673]]) result = self.parser.get_records(1) result = self.parser.get_records(1) self.assertEqual(result, []) self.parser.set_state(new_state) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) self.stream_handle.close() def test_update(self): """ Test a file that has a section of data replaced by 0s, as if a block of data has not yet been received. """ log.debug('------------------------------------------------------Starting test_update') self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} # this file has first block of WE data replaced by 0s self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1_1stWE0d.dat')) self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[2818, 2982], [4058, 5000]], self.particle_2a) self.stream_handle.close() next_state = self.parser._state self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = FlordLWfpSioMuleParser(self.config, next_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # there are 18 valid records in the second WE chunk. We read one above, now we need # to drain the remaining 17 to trigger the reparsing of the earlier block for kk in range(0, 17): result = self.parser.get_records(1) # the next fetch should find the now-replaced earlier data result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 2]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1b) self.stream_handle.close()