def test_get_many(self): """ Read test data and pull out multiple data particles at one time. Assert that the results are those we expected. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, 'r') # Note: since the recovered and telemetered parser and particles are common # to each other, testing one is sufficient, will be completely tested # in driver tests parser = ParadJCsppParser( self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # try to get 2000 particles, there are only 194 data records # so should get 195 including the meta data particles = parser.get_records(2000) log.debug("*** test_get_many Num particles %s", len(particles)) self.assertEqual(len(particles), 195) stream_handle.close()
def test_get_many(self): """ Read test data and pull out multiple data particles at one time. Assert that the results are those we expected. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, 'r') # Note: since the recovered and telemetered parser and particles are common # to each other, testing one is sufficient, will be completely tested # in driver tests parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # try to get 2000 particles, there are only 194 data records # so should get 195 including the meta data particles = parser.get_records(2000) log.debug("*** test_get_many Num particles %s", len(particles)) self.assertEqual(len(particles), 195) stream_handle.close()
def test_simple(self): """ Read test data and pull out data particles Assert that the results are those we expected. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, 'r') # Note: since the recovered and telemetered parser and particles are common # to each other, testing one is sufficient, will be completely tested # in driver tests parser = ParadJCsppParser( self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) particles = parser.get_records(20) log.debug("*** test_simple Num particles %s", len(particles)) # load a dictionary from the yml file test_data = self.get_dict_from_yml('11079364_PPB_PARS_recov.yml') # check all the values against expected results. for i in range(len(particles)): self.assert_result(test_data['data'][i], particles[i]) stream_handle.close()
def test_simple(self): """ Read test data and pull out data particles Assert that the results are those we expected. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, 'r') # Note: since the recovered and telemetered parser and particles are common # to each other, testing one is sufficient, will be completely tested # in driver tests parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) particles = parser.get_records(20) log.debug("*** test_simple Num particles %s", len(particles)) # load a dictionary from the yml file test_data = self.get_dict_from_yml('11079364_PPB_PARS_recov.yml') # check all the values against expected results. for i in range(len(particles)): self.assert_result(test_data['data'][i], particles[i]) stream_handle.close()
def test_set_state(self): """ Test changing to a new state after initializing the parser and reading data, as if new data has been found and the state has changed """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, 'r') # 11079364_PPB_PARS_recov.yml has the metadata and the first 19 # instrument particles in it expected_results = self.get_dict_from_yml( '11079364_PPB_PARS_recov.yml') parser = ParadJCsppParser( self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) particles = parser.get_records(2) log.debug("Num particles: %s", len(particles)) self.assertTrue(len(particles) == 2) for i in range(len(particles)): self.assert_result(expected_results['data'][i], particles[i]) # position 1067 is the byte at the start of the 18th data record new_state = { StateKey.POSITION: 1067, StateKey.METADATA_EXTRACTED: True } parser.set_state(new_state) particles = parser.get_records(2) self.assertTrue(len(particles) == 2) # offset in the expected results, into the 18th result offset = 18 for i in range(len(particles)): self.assert_result(expected_results['data'][i + offset], particles[i]) stream_handle.close()
def test_additional_column(self): """ Ensure that additional column of data will cause an exception. """ file_path = os.path.join(RESOURCE_PATH, "11079364_PPB_PARS_ADDED_COLUMN.txt") stream_handle = open(file_path, O_MODE) log.info(self.exception_callback_value) parser = ParadJCsppParser(self._recovered_parser_config, stream_handle, self.exception_callback) parser.get_records(1) log.info("Exception callback value: %s", self.exception_callback_value) self.assertTrue(self.exception_callback_value is not None) stream_handle.close()
def test_set_state(self): """ Test changing to a new state after initializing the parser and reading data, as if new data has been found and the state has changed """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, 'r') # 11079364_PPB_PARS_recov.yml has the metadata and the first 19 # instrument particles in it expected_results = self.get_dict_from_yml('11079364_PPB_PARS_recov.yml') parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) particles = parser.get_records(2) log.debug("Num particles: %s", len(particles)) self.assertTrue(len(particles) == 2) for i in range(len(particles)): self.assert_result(expected_results['data'][i], particles[i]) # position 1067 is the byte at the start of the 18th data record new_state = {StateKey.POSITION: 1067, StateKey.METADATA_EXTRACTED: True} parser.set_state(new_state) particles = parser.get_records(2) self.assertTrue(len(particles) == 2) # offset in the expected results, into the 18th result offset = 18 for i in range(len(particles)): self.assert_result(expected_results['data'][i + offset], particles[i]) stream_handle.close()
def test_bad_data(self): """ Ensure that bad data is skipped when it exists and a RecoverableSampleException is thrown. """ file_path = os.path.join(RESOURCE_PATH, "11079364_BAD_PPB_PARS.txt") stream_handle = open(file_path, O_MODE) log.info(self.exception_callback_value) parser = ParadJCsppParser(self._recovered_parser_config, stream_handle, self.exception_callback) parser.get_records(1) log.info("Exception callback value: %s", self.exception_callback_value) self.assertTrue(self.exception_callback_value is not None) # 14 bad records self.assertEqual(len(self.exception_callback_value), 14) stream_handle.close()
def test_simple(self): """ Read test data and pull out data particles Assert that the results are those we expected. """ # Recovered file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, O_MODE) parser = ParadJCsppParser(self._recovered_parser_config, stream_handle, self.exception_callback) particles = parser.get_records(20) log.debug("*** test_simple Num particles %s", len(particles)) # check all the values against expected results. self.assert_particles(particles, "11079364_PPB_PARS_recov.yml", RESOURCE_PATH) stream_handle.close() # Telemetered file_path = os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt') stream_handle = open(file_path, O_MODE) parser = ParadJCsppParser(self._telemetered_parser_config, stream_handle, self.exception_callback) particles = parser.get_records(20) log.debug("*** test_simple Num particles %s", len(particles)) # check all the values against expected results. self.assert_particles(particles, "11079364_PPD_PARS_telem.yml", RESOURCE_PATH) stream_handle.close()
def test_bad_data(self): """ Ensure that bad data is skipped when it exists and a RecoverableSampleException is thrown. """ file_path = os.path.join(RESOURCE_PATH, '11079364_BAD_PPB_PARS.txt') stream_handle = open(file_path, O_MODE) log.debug(self.exception_callback_value) parser = ParadJCsppParser(self._recovered_parser_config, stream_handle, self.exception_callback) parser.get_records(1) log.debug("Exception callback value: %s", self.exception_callback_value) self.assertTrue(self.exception_callback_value is not None) # 14 bad records self.assertEqual(len(self.exception_callback_value), 14) stream_handle.close()
def test_additional_column(self): """ Ensure that additional column of data will cause an exception. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS_ADDED_COLUMN.txt') stream_handle = open(file_path, O_MODE) log.debug(self.exception_callback_value) parser = ParadJCsppParser(self._recovered_parser_config, stream_handle, self.exception_callback) parser.get_records(1) log.debug("Exception callback value: %s", self.exception_callback_value) self.assertTrue(self.exception_callback_value is not None) stream_handle.close()
def test_additional_column(self): """ Ensure that additional column of data will cause an exception. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS_ADDED_COLUMN.txt') stream_handle = open(file_path, 'rb') log.info(self.exception_callback_value) parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) parser.get_records(1) log.info("Exception callback value: %s", self.exception_callback_value) self.assertTrue(self.exception_callback_value is not None) stream_handle.close()
def create_yml(self): """ This utility creates a yml file """ fid = open(os.path.join(RESOURCE_PATH, "11079364_PPD_PARS.txt"), O_MODE) stream_handle = fid parser = ParadJCsppParser(self._telemetered_parser_config, stream_handle, self.exception_callback) particles = parser.get_records(20) self.particle_to_yml(particles, "11079364_PPD_PARS_telem.yml") fid.close()
def test_bad_data(self): """ Ensure that bad data is skipped when it exists and a RecoverableSampleException is thrown. """ file_path = os.path.join(RESOURCE_PATH, '11079364_BAD_PPB_PARS.txt') stream_handle = open(file_path, 'rb') log.info(self.exception_callback_value) parser = ParadJCsppParser( self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) parser.get_records(1) log.info("Exception callback value: %s", self.exception_callback_value) self.assertTrue(self.exception_callback_value is not None) # 14 bad records self.assertEqual(self.count, 14) stream_handle.close()
def test_additional_column(self): """ Ensure that additional column of data will cause an exception. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS_ADDED_COLUMN.txt') stream_handle = open(file_path, 'rb') log.info(self.exception_callback_value) parser = ParadJCsppParser( self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) parser.get_records(1) log.info("Exception callback value: %s", self.exception_callback_value) self.assertTrue(self.exception_callback_value is not None) stream_handle.close()
def test_bad_data(self): """ Ensure that bad data is skipped when it exists and a RecoverableSampleException is thrown. """ file_path = os.path.join(RESOURCE_PATH, '11079364_BAD_PPB_PARS.txt') stream_handle = open(file_path, 'rb') log.info(self.exception_callback_value) parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) parser.get_records(1) log.info("Exception callback value: %s", self.exception_callback_value) self.assertTrue(self.exception_callback_value is not None) # 14 bad records self.assertEqual(self.count, 14) stream_handle.close()
def create_yml(self): """ This utility creates a yml file """ fid = open(os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt'), O_MODE) stream_handle = fid parser = ParadJCsppParser(self._telemetered_parser_config, stream_handle, self.exception_callback) particles = parser.get_records(20) self.particle_to_yml(particles, '11079364_PPD_PARS_telem.yml') fid.close()
def create_yml(self): """ This utility creates a yml file """ fid = open(os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt'), 'r') stream_handle = fid parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_TELEMETERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) particles = parser.get_records(20) self.particle_to_yml(particles, '11079364_PPD_PARS_telem.yml') fid.close()
def create_yml(self): """ This utility creates a yml file """ fid = open(os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt'), 'r') stream_handle = fid parser = ParadJCsppParser( self.config.get(DataTypeKey.PARAD_J_CSPP_TELEMETERED), None, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) particles = parser.get_records(20) self.particle_to_yml(particles, '11079364_PPD_PARS_telem.yml') fid.close()
def test_mid_state_start(self): """ This test makes sure that we retrieve the correct particles upon starting with an offset state. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, 'rb') # position 315 is the end of the first data record, which would have produced the # metadata particle and the first instrument particle initial_state = { StateKey.POSITION: 315, StateKey.METADATA_EXTRACTED: True } parser = ParadJCsppParser( self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), initial_state, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # expect to get the 2nd and 3rd instrument particles next particles = parser.get_records(2) log.debug("Num particles: %s", len(particles)) self.assertTrue(len(particles) == 2) expected_results = self.get_dict_from_yml('mid_state_start.yml') for i in range(len(particles)): self.assert_result(expected_results['data'][i], particles[i]) # now expect the state to be the end of the 4 data record and metadata sent the_new_state = { StateKey.POSITION: 409, StateKey.METADATA_EXTRACTED: True } log.debug("********** expected state: %s", the_new_state) log.debug("******** new parser state: %s", parser._state) self.assertTrue(parser._state == the_new_state) stream_handle.close()
def test_mid_state_start(self): """ This test makes sure that we retrieve the correct particles upon starting with an offset state. """ file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt') stream_handle = open(file_path, 'rb') # position 315 is the end of the first data record, which would have produced the # metadata particle and the first instrument particle initial_state = {StateKey.POSITION: 315, StateKey.METADATA_EXTRACTED: True} parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), initial_state, stream_handle, self.state_callback, self.pub_callback, self.exception_callback) # expect to get the 2nd and 3rd instrument particles next particles = parser.get_records(2) log.debug("Num particles: %s", len(particles)) self.assertTrue(len(particles) == 2) expected_results = self.get_dict_from_yml('mid_state_start.yml') for i in range(len(particles)): self.assert_result(expected_results['data'][i], particles[i]) # now expect the state to be the end of the 4 data record and metadata sent the_new_state = {StateKey.POSITION: 409, StateKey.METADATA_EXTRACTED: True} log.debug("********** expected state: %s", the_new_state) log.debug("******** new parser state: %s", parser._state) self.assertTrue(parser._state == the_new_state) stream_handle.close()