def _build_eng_telemetered_parser(self, parser_state, infile, data_key): """ Build and return the specified parser as indicated by the data_key. """ config = self._parser_config.get(data_key) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.glider', DataSetDriverConfigKeys.PARTICLE_CLASS: [EngineeringMetadataDataParticle, EngineeringTelemeteredDataParticle, EngineeringScienceTelemeteredDataParticle] }) log.trace("EngineeringDataSetDriver._build_eng_telemetered_parser(): " "parser_state= %s, input file= %s, data_key= %s", parser_state, infile, data_key) parser = GliderEngineeringParser(config, parser_state, infile, lambda state, ingested: self._save_parser_state(state, data_key, ingested), self._data_callback, self._sample_exception_callback) return parser
def process(self): """ Process a file by opening the file and instantiating a parser and driver """ log = get_logger() with open(self._source_file_path, "rb") as file_handle: def exception_callback(exception): log.debug("Exception %s", exception) self._particle_data_hdlr_obj.setParticleDataCaptureFailure() # essentially comment out the state and data callbacks by inserting # lambda with None functions, so it doesn't complain about not being # able to pass arguments parser = GliderEngineeringParser(self._parser_config, None, file_handle, lambda state, ingested: None, lambda data: None, exception_callback) # instantiate the driver driver = DataSetDriver(parser, self._particle_data_hdlr_obj) # start the driver processing the file driver.processFileStream() return self._particle_data_hdlr_obj
def reset_eng_parser(self, state = {}): self.state_callback_values = [] self.publish_callback_values = [] self.error_callback_values = [] self.parser = GliderEngineeringParser(self.config, state, self.test_data, self.state_callback, self.pub_callback, self.error_callback)
class GliderParserUnitTestCase(ParserUnitTestCase): """ Glider Parser unit test base class and common tests. """ config = {} def state_callback(self, state, file_ingested): """ Call back method to watch what comes in via the state callback """ self.state_callback_values.append(state) self.file_ingested = file_ingested def pub_callback(self, particle): """ Call back method to watch what comes in via the publish callback """ self.publish_callback_values.append(particle) def error_callback(self, error): """ Call back method to watch what comes in via the state callback """ self.error_callback_values.append(error) def setUp(self): ParserUnitTestCase.setUp(self) def set_data(self, *args): """ Accept strings of data in args[] joined together and then a file handle to the concatenated string is returned. """ io = StringIO() for count, data in enumerate(args): io.write(data) #log.debug("Test data file: %s", io.getvalue()) io.seek(0) self.test_data = io def set_data_file(self, filename): """ Set test to read from a file. """ self.test_data = open(filename, "r") def reset_parser(self, state = {}): self.state_callback_values = [] self.publish_callback_values = [] self.error_callback_values = [] self.parser = GliderParser(self.config, state, self.test_data, self.state_callback, self.pub_callback, self.error_callback) def reset_eng_parser(self, state = {}): self.state_callback_values = [] self.publish_callback_values = [] self.error_callback_values = [] self.parser = GliderEngineeringParser(self.config, state, self.test_data, self.state_callback, self.pub_callback, self.error_callback) def get_published_value(self): return self.publish_callback_values.pop(0) def get_state_value(self): return self.state_callback_values.pop(0) def assert_state(self, expected_position): """ Verify the state """ state = self.parser._read_state log.debug("Current state: %s", state) position = state.get(StateKey.POSITION) self.assertEqual(position, expected_position) def assert_no_more_data(self): """ Verify we don't find any other records in the data file. """ records = self.parser.get_records(1) self.assertEqual(len(records), 0) def assert_generate_particle(self, particle_type, values_dict = None, expected_position = None): """ Verify that we can generate a particle of the correct type and that the state is set properly. @param particle_type type of particle we are producing @param values_dict key value pairs to test in the particle. @param expected_position upon publication of the particle, what should the state position indicate. """ # ensure the callback queues are empty before we start self.assertEqual(len(self.publish_callback_values), 0) self.assertEqual(len(self.state_callback_values), 0) records = self.parser.get_records(1) self.assertIsNotNone(records) self.assertIsInstance(records, list) self.assertEqual(len(records), 1) self.assertEqual(len(self.publish_callback_values), 1) self.assertEqual(len(self.state_callback_values), 1) particles = self.get_published_value() self.assertEqual(len(particles), 1) # Verify the data if values_dict: self.assert_particle_values(particles[0], values_dict) # Verify the parser state state = self.get_state_value() log.debug("Published state: %s", state) if expected_position: position = state.get(StateKey.POSITION) self.assertEqual(position, expected_position) def assert_particle_values(self, particle, expected_values): """ Verify the data in expected values is the data in the particle """ data_dict = particle.generate_dict() log.debug("Data in particle: %s", data_dict) log.debug("Expected Data: %s", expected_values) for key in expected_values.keys(): for value in data_dict['values']: if value['value_id'] == key: self.assertEqual(value['value'], expected_values[key]) def assert_type(self, records, particle_type): for particle in records: str_of_type = particle.type() self.assertEqual(particle_type, str_of_type) def assert_timestamp(self, ntp_timestamp, unix_timestamp): ntp_stamp = ntplib.system_to_ntp_time(unix_timestamp) assertion = np.allclose(ntp_timestamp, ntp_stamp) self.assertTrue(assertion) def test_init(self): """ Verify we can initialize """ self.set_data(HEADER) self.reset_parser() self.assert_state(1003) self.set_data(HEADER2) self.reset_parser() self.assert_state(1004) def test_exception(self): with self.assertRaises(SampleException): self.set_data("Foo") self.reset_parser() def test_chunker(self): """ Verify the chunker is returning values we expect. """ self.set_data(HEADER, CHUNKER_TEST) self.reset_parser() records = CHUNKER_TEST.strip("\n").split("\n") log.debug("Expected Records: %s", records) self.assertEqual(len(records), 2) # Load all data into the chunker self.parser.get_block(1024) self.assertEqual(CHUNKER_TEST.strip("\n"), self.parser._chunker.buffer.strip("\n")) (timestamp, data_record, start, end) = self.parser._chunker.get_next_data_with_index() log.debug("Data Record: %s", data_record) self.assertEqual(records[0]+"\n", data_record) (timestamp, data_record, start, end) = self.parser._chunker.get_next_data_with_index() self.assertEqual(records[1]+"\n", data_record)