def test_update(self): """ Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet, then using the returned state make a new parser with the test data that has the 0s filled in """ log.debug( '------------------------------------------------------Starting test_update' ) self.state = { StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: 1939566 } # this file has first block of WE data replaced by 0s self.stream_handle = open( os.path.join(RESOURCE_PATH, 'node58p1_1stWE0d.dat')) self.parser = DostaLnWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 1]], [[2818, 2982], [4058, 5000]], self.particle_2a) self.stream_handle.close() next_state = self.parser._state self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = DostaLnWfpSioMuleParser(self.config, next_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # first get the old 'in process' records # Once those are done, the un processed data will be checked # there are 18 valid records in the second WE chunk. We read one above, now we need # to drain the remaining 17 to trigger the reparsing of the earlier block for kk in range(0, 17): result = self.parser.get_records(1) # so now, the next fetch should find the now-replaced earlier data result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) # this should be the first of the newly filled in particles from result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 2]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1b) self.stream_handle.close()
def test_set_state(self): """ Test changing to a new state after initializing the parser and reading data, as if new data has been found and the state has changed """ log.debug('-------------------------------------------------Starting test_set_state') self.state = {StateKey.UNPROCESSED_DATA:[[4059, 4673]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE: 1939566} new_state = {StateKey.UNPROCESSED_DATA:[[2818, 2982], [4058, 4059], [4673, 5000]], StateKey.IN_PROCESS_DATA:[[2818, 2982, 3, 0]], StateKey.FILE_SIZE: 1939566} self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = DostaLnWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # only 18 left in file at this point. Drain them, and make sure the next fetch fails result = self.parser.get_records(17) self.assert_state([[4059, 4673, 18, 17]],[[4059, 4673]]) result = self.parser.get_records(1) result = self.parser.get_records(1) self.assertEqual(result, []) self.parser.set_state(new_state) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982], [4058, 4059], [4673, 5000]], self.particle_1a) self.stream_handle.close()
def test_in_process_start(self): """ test starting a parser with a state in the middle of processing """ log.debug('-------------------------------------------------------------Starting test_in_process_start') #[2818:2982] contains the first WE SIO header new_state = {StateKey.IN_PROCESS_DATA:[[2818,2982,3,0], [4059,4673,18,0]], StateKey.UNPROCESSED_DATA:[[2818,2982], [4058,5000]], StateKey.FILE_SIZE: 1939566} self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = DostaLnWfpSioMuleParser(self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[2818,2982,3,1], [4059,4673,18,0]], [[2818,2982], [4058,5000]], self.particle_1a) result = self.parser.get_records(2) self.assertEqual(result[0], self.particle_1b) self.assertEqual(result[1], self.particle_1c) self.assert_state([[4059,4673,18,0]], [[4058,5000]]) self.assertEqual(self.publish_callback_value[-1], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059,4673,18,1]], [[4058,5000]], self.particle_2a) self.stream_handle.close()
def test_long_stream(self): """ Test a long stream """ log.debug('---------------------------------------------------------Starting test_long_stream') self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.stream_handle.seek(0) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE: 1939566} self.parser = DostaLnWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(12) self.assertEqual(result[0], self.particle_1a) self.assertEqual(result[1], self.particle_1b) self.assertEqual(result[2], self.particle_1c) self.assertEqual(result[-2], self.particle_1k) self.assertEqual(result[-1], self.particle_1l) self.assertEqual(self.publish_callback_value[-2], self.particle_1k) self.assertEqual(self.publish_callback_value[-1], self.particle_1l) self.assert_state([[4059,4673,18,9]],[[4058,5000]]) self.stream_handle.close()
def test_get_many(self): """ Read test data from the file and pull out multiple data particles at one time. Assert that the results are those we expected. """ log.debug('--------------------------------------------------------Starting test_get_many') self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE: 1939566} self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = DostaLnWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(4) self.assertEqual(result, [self.particle_1a, self.particle_1b, self.particle_1c, self.particle_2a]) self.assertEqual(self.publish_callback_value[0], self.particle_1a) self.assertEqual(self.publish_callback_value[1], self.particle_1b) self.assertEqual(self.publish_callback_value[2], self.particle_1c) self.assertEqual(self.publish_callback_value[3], self.particle_2a) self.assert_state([[4059,4673,18,1]],[[4058,5000]]) self.stream_handle.close()
def test_mid_state_start(self): """ Test starting the parser in a state in the middle of processing """ log.debug( '-----------------------------------------------------------Starting test_mid_state_start' ) new_state = { StateKey.IN_PROCESS_DATA: [], StateKey.UNPROCESSED_DATA: [[2818, 2982]], StateKey.FILE_SIZE: 1939566 } self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) self.parser = DostaLnWfpSioMuleParser(self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 1]], [[2818, 2982]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 2]], [[2818, 2982]], self.particle_1b) result = self.parser.get_records(1) self.assert_result(result, [], [], self.particle_1c) self.stream_handle.close()
def test_bad_data(self): """ Ensure that the bad record ( in this case a currupted status message ) causes a sample exception """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1_BADFLAGS.dat')) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE: 1939566} log.debug('-------------------------------------------------------------Starting test_bad_data') self.parser = DostaLnWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException))
def test_bad_e_record(self): """ Ensure that the bad record causes a sample exception. The file 'bad_e_record.dat' includes a record containing one byte less than the expected 30 for the flord_l_wfp_sio_mule. The 'Number of Data Bytes' and the 'CRC Checksum' values in the SIO Mule header have been modified accordingly. """ self.stream_handle = open(os.path.join(RESOURCE_PATH, 'bad_e_record.dat')) self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]} self.parser = DostaLnWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException))
def test_simple(self): """ Read test data from the file and pull out data particles one at a time. Assert that the results are those we expected. """ log.debug( '------------------------------------------------------Starting test_simple' ) self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat')) # NOTE: using the unprocessed data state of 0,5000 limits the file to reading # just 5000 bytes, so even though the file is longer it only reads the first # 5000 self.state = { StateKey.UNPROCESSED_DATA: [[0, 5000]], StateKey.IN_PROCESS_DATA: [], StateKey.FILE_SIZE: 1939566 } self.parser = DostaLnWfpSioMuleParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback, self.exception_callback) result = self.parser.get_records(1) log.debug("IN_PROCESS_DATA: %s", self.parser._state[StateKey.IN_PROCESS_DATA]) log.debug("Unprocessed: %s", self.parser._state[StateKey.UNPROCESSED_DATA]) # An extra byte exists between SIO headers([4058:4059] and [7423,7424]) self.assert_result(result, [[2818, 2982, 3, 1], [4059, 4673, 18, 0]], [[2818, 2982], [4058, 5000]], self.particle_1a) result = self.parser.get_records(1) self.assert_result(result, [[2818, 2982, 3, 2], [4059, 4673, 18, 0]], [[2818, 2982], [4058, 5000]], self.particle_1b) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 0]], [[4058, 5000]], self.particle_1c) result = self.parser.get_records(1) self.assert_result(result, [[4059, 4673, 18, 1]], [[4058, 5000]], self.particle_2a) self.stream_handle.close()
def _build_telemetered_parser(self, parser_state, stream_in): """ Build and return the telemetered parser @param parser_state starting parser state to pass to parser @param infile Handle of open file to pass to parser """ config = self._parser_config.get(DataSourceKey.DOSTA_LN_WFP_SIO_MULE) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.dosta_ln_wfp_sio_mule', DataSetDriverConfigKeys.PARTICLE_CLASS: 'DostaLnWfpSioMuleParserDataParticle' }) log.debug("My Config: %s", config) parser = DostaLnWfpSioMuleParser( config, parser_state, stream_in, lambda state: self._save_parser_state( state, DataSourceKey.DOSTA_LN_WFP_SIO_MULE), self._data_callback, self._sample_exception_callback) return parser