def test_update(self):
        """
        Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
        then using the returned state make a new parser with the test data that has the 0s filled in
        """
        state = {StateKey.UNPROCESSED_DATA:[[0, 6300]],
            StateKey.IN_PROCESS_DATA:[], StateKey.METADATA_SENT: False, StateKey.FILE_SIZE: 9400}
        # this file has a block of DO data replaced by 0s
        stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_replaced.dat'))
        self.parser = DostadParser(self.config, state, stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        # 0-69 contains an incomplete block (end of a sample)
        # 390-570 is the zeroed block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_result(result, [[197,314,2,1], [637,754,1,0], [6131,6248,1,0]],
                           [[0,69], [197,314], [390,507], [637,754], [1329,1332], [2294,2363],
                            [4092,4161], [4351, 4927], [6131,6300]],
                            self.particle_metadata)
        result = self.parser.get_records(1)
        self.assert_result(result, [[637,754,1,0], [6131,6248,1,0]],
                           [[0,69], [390,507], [637,754], [1329,1332], [2294,2363],
                            [4092,4161], [4351, 4927], [6131,6300]],
                            self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[6131,6248,1,0]],
                           [[0,69], [390,507], [1329,1332], [2294,2363],
                            [4092,4161], [4351, 4927], [6131,6300]],
                            self.particle_c)
        stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = DostadParser(self.config, next_state, stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        # first get the old 'in process' records from 6131-6248
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69], [390,507], [1329,1332], [2294,2363],
                            [4092,4161], [4351, 4927], [6248,6300]],
                            self.particle_d)

        # this should be the first of the newly filled in particles from 390-507
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69], [1329,1332], [2294,2363],
                            [4092,4161], [4351, 4927], [6248,6300]],
                           self.particle_b)
        stream_handle.close()
        self.assertEqual(self.exception_callback_value, None)
Exemplo n.º 2
0
    def test_update(self):
        """
	Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
	then using the returned state make a new parser with the test data that has the 0s filled in
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 6300]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_replaced.dat'))
        self.parser = DostadParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(1)
        self.assert_result(
            result, [[637, 754, 1, 0, 1], [6150, 6267, 1, 0, 1]],
            [[0, 69], [390, 507], [637, 754], [944, 2370], [2560, 2947],
             [3137, 4173], [4363, 5437], [5683, 6072], [6150, 6300]],
            self.timestamp4, self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(
            result, [[6150, 6267, 1, 0, 1]],
            [[0, 69], [390, 507], [944, 2370], [2560, 2947], [3137, 4173],
             [4363, 5437], [5683, 6072], [6150, 6300]], self.timestamp4,
            self.particle_c_new)
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config, next_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        # first get the old 'in process' records
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(
            result, [],
            [[0, 69], [390, 507], [944, 2370], [2560, 2947], [3137, 4173],
             [4363, 5437], [5683, 6072], [6267, 6300]], self.timestamp4,
            self.particle_d)

        # this should be the first of the newly filled in particles from
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                            [4363, 5437], [5683, 6072], [6267, 6300]],
                           self.timestamp2, self.particle_b_new)
        self.stream_handle.close()
Exemplo n.º 3
0
    def test_get_many(self):
        """
        Read test data from the file and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """
        state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.METADATA_SENT: False,
            StateKey.FILE_SIZE: 9400
        }
        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config.get(DataTypeKey.DOSTA_ABCDJM_SIO_TELEMETERED), state,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        result = self.parser.get_records(4)
        stream_handle.close()
        self.assertEqual(result, [
            self.particle_metadata, self.particle_a, self.particle_b,
            self.particle_c
        ])
        # 0-69 contains an incomplete block (end of a sample)
        self.assert_state([], [[0, 69], [944, 1000]])
        self.assertEqual(self.publish_callback_value[0],
                         self.particle_metadata)
        self.assertEqual(self.publish_callback_value[1], self.particle_a)
        self.assertEqual(self.publish_callback_value[2], self.particle_b)
        self.assertEqual(self.publish_callback_value[3], self.particle_c)
        self.assertEqual(self.exception_callback_value, None)
Exemplo n.º 4
0
    def test_set_state(self):
        """
	test changing the state after initializing
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        new_state = {
            StateKey.UNPROCESSED_DATA: [[0, 69], [944, 6300]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: self.timestamp2
        }

        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        self.assert_result(result, [],
                           [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                            [4363, 5437], [5683, 6072], [6267, 6300]],
                           self.timestamp4, self.particle_d)
Exemplo n.º 5
0
    def test_in_process_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [[390, 507, 1, 0, 0],
                                       [637, 754, 1, 0, 0]],
            StateKey.UNPROCESSED_DATA: [[0, 69], [390, 507], [637, 754],
                                        [944, 6300]],
            StateKey.TIMESTAMP:
            self.timestamp3
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config, new_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        result = self.parser.get_records(1)

        # even though the state says this particle is not a new sequence, since it is the
        # first after setting the state it will be new
        self.assert_result(result, [[637, 754, 1, 0, 0]],
                           [[0, 69], [637, 754], [944, 6300]], self.timestamp2,
                           self.particle_b_new)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        self.assert_state([],
                          [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                           [4363, 5437], [5683, 6072], [6267, 6300]],
                          self.timestamp4)
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)
Exemplo n.º 6
0
 def test_mid_state_start(self):
     """
     test starting a parser with a state in the middle of processing
     """
     new_state = {
         StateKey.IN_PROCESS_DATA: [],
         StateKey.UNPROCESSED_DATA: [[0, 69], [314, 6300]],
         StateKey.METADATA_SENT: True,
         StateKey.FILE_SIZE: 9400
     }
     stream_handle = open(
         os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
     self.parser = DostadParser(
         self.config.get(DataTypeKey.DOSTA_ABCDJM_SIO_TELEMETERED),
         new_state, stream_handle, self.state_callback, self.pub_callback,
         self.exception_callback)
     result = self.parser.get_records(1)
     # 0-69 contains an incomplete block (end of a sample)
     # 1329-1332 there are 3 extra \n's between sio blocks
     # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
     # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
     # the final \x03
     self.assert_result(result, [[637, 754, 1, 0], [6131, 6248, 1, 0]],
                        [[0, 69], [637, 754], [1329, 1332], [2294, 2363],
                         [4092, 4161], [4351, 4927], [6131, 6300]],
                        self.particle_b)
     result = self.parser.get_records(2)
     self.assertEqual(result[0], self.particle_c)
     self.assertEqual(result[1], self.particle_d)
     self.assert_state([],
                       [[0, 69], [1329, 1332], [2294, 2363], [4092, 4161],
                        [4351, 4927], [6248, 6300]])
     stream_handle.close()
     self.assertEqual(self.exception_callback_value, None)
Exemplo n.º 7
0
    def test_long_stream(self):
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, data_len]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.parser = DostadParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[-3], self.particle_d)
        self.assertEqual(result[-2], self.particle_e)
        self.assertEqual(result[-1], self.particle_f)
        self.assert_state([],
                          [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                           [4363, 5437], [5683, 6072], [8273, 9400]],
                          self.timestamp6)
        self.assertEqual(self.publish_callback_value[-2], self.particle_e)
        self.assertEqual(self.publish_callback_value[-1], self.particle_f)
Exemplo n.º 8
0
    def test_in_process_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {
            StateKey.IN_PROCESS_DATA: [[390, 507, 1, 0], [637, 754, 1, 0],
                                       [6131, 6248, 1, 0]],
            StateKey.UNPROCESSED_DATA: [[390, 6300]],
            StateKey.METADATA_SENT:
            True,
            StateKey.FILE_SIZE:
            9400
        }
        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config.get(DataTypeKey.DOSTA_ABCDJM_SIO_TELEMETERED),
            new_state, stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)
        result = self.parser.get_records(1)

        self.assert_result(result, [[637, 754, 1, 0], [6131, 6248, 1, 0]],
                           [[507, 6300]], self.particle_b)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        self.assert_state([], [[507, 637], [754, 6131], [6248, 6300]])
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)
        self.assertEqual(self.exception_callback_value, None)

        stream_handle.close()
        self.assertEqual(self.exception_callback_value, None)
Exemplo n.º 9
0
    def test_long_stream(self):
        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config.get(DataTypeKey.DOSTA_ABCDJM_SIO_TELEMETERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        result = self.parser.get_records(7)
        stream_handle.close()
        self.assertEqual(result[0], self.particle_metadata)
        self.assertEqual(result[1], self.particle_a)
        self.assertEqual(result[2], self.particle_b)
        self.assertEqual(result[3], self.particle_c)
        self.assertEqual(result[4], self.particle_d)
        self.assertEqual(result[5], self.particle_e)
        self.assertEqual(result[6], self.particle_f)
        # 0-69 contains an incomplete block (end of a sample)
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_state([],
                          [[0, 69], [1329, 1332], [2294, 2363], [4092, 4161],
                           [4351, 4927], [9020, 9374]])
        self.assertEqual(self.publish_callback_value[5], self.particle_e)
        self.assertEqual(self.publish_callback_value[6], self.particle_f)
        self.assertEqual(self.exception_callback_value, None)
Exemplo n.º 10
0
    def _build_parser(self, parser_state, stream_in, data_key=None):
        """
        Build and return the parser
        """

        config = self._parser_config.get(data_key)

        #
        # If the key is DOSTA_ABCDJM_SIO_RECOVERED, build the WFP parser.
        #
        if data_key == DataTypeKey.DOSTA_ABCDJM_SIO_RECOVERED:
            config.update({
                DataSetDriverConfigKeys.PARTICLE_MODULE:
                'mi.dataset.parser.dostad',
                DataSetDriverConfigKeys.PARTICLE_CLASS: None,
                DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
                    METADATA_PARTICLE_CLASS_KEY:
                    DostadParserRecoveredMetadataDataParticle,
                    DATA_PARTICLE_CLASS_KEY: DostadParserRecoveredDataParticle
                }
            })

            parser = DostadRecoveredParser(
                config, parser_state, stream_in,
                lambda state, ingested: self._save_parser_state(
                    state, data_key, ingested), self._data_callback,
                self._sample_exception_callback)

            return parser

        #
        # If the key is DOSTA_ABCDJM_SIO_TELEMETERED, build the WFP SIO Mule parser.
        #
        elif data_key == DataTypeKey.DOSTA_ABCDJM_SIO_TELEMETERED:
            config.update({
                DataSetDriverConfigKeys.PARTICLE_MODULE:
                'mi.dataset.parser.dostad',
                DataSetDriverConfigKeys.PARTICLE_CLASS: None,
                DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
                    METADATA_PARTICLE_CLASS_KEY:
                    DostadParserTelemeteredMetadataDataParticle,
                    DATA_PARTICLE_CLASS_KEY:
                    DostadParserTelemeteredDataParticle
                }
            })

            parser = DostadParser(
                config, parser_state, stream_in,
                lambda state: self._save_parser_state(state, data_key),
                self._data_callback, self._sample_exception_callback)

            return parser

        #
        # If the key is one that we're not expecting, don't build any parser.
        #
        else:
            raise ConfigurationException(
                "Invalid data_key supplied to build parser")
Exemplo n.º 11
0
    def test_simple(self):
        """
        Read test data from the file and pull out data particles one at a time.
        Assert that the results are those we expected.
        """
        stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        # NOTE: using the unprocessed data state of 0,6300 limits the file to reading
        # just 6300 bytes, so even though the file is longer it only reads the first
        # 6300
        state = {StateKey.UNPROCESSED_DATA:[[0, 6300]],
            StateKey.IN_PROCESS_DATA:[], StateKey.METADATA_SENT: False, StateKey.FILE_SIZE: 9400}
        self.parser = DostadParser(self.config, state, stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        # the metadata particle and first particle come from the same block of data, first get
        # the metadata particle (this is why there are 2 particles parsed in the first in process state)
        # after that there are 3 more dosta samples parsed from 390-507, 637-754, and 6131-6248
        # 0-69 contains an incomplete block (end of a sample)
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
	# 4351-4927 has a bad AD then CT message where the size from the header does not line up with
	# the final \x03
	result = self.parser.get_records(1)
	in_process = [[197,314,2,1], [390, 507, 1, 0], [637, 754, 1, 0], [6131, 6248, 1, 0]]
	unprocessed = [[0,69], [197,314], [390,507], [637,754], [1329,1332], [2294,2363],
			    [4092,4161], [4351, 4927], [6131,6300]]
	self.assert_result(result, in_process, unprocessed, self.particle_metadata)
	self.assertEqual(self.parser._state[StateKey.METADATA_SENT], True)
	# then get the first dosta data particle, this clears out the block from 197-314
	result = self.parser.get_records(1)
	in_process = [[390, 507, 1, 0], [637, 754, 1, 0], [6131, 6248, 1, 0]]
	unprocessed = [[0,69], [390,507], [637,754], [1329,1332], [2294,2363],
			    [4092,4161], [4351, 4927], [6131,6300]]
	self.assert_result(result, in_process, unprocessed, self.particle_a)
	self.assertEqual(self.parser._state[StateKey.METADATA_SENT], True)

	result = self.parser.get_records(1)
	in_process = [[637, 754, 1, 0], [6131, 6248, 1, 0]]
	unprocessed = [[0,69], [637,754], [1329,1332], [2294,2363],
			    [4092,4161], [4351, 4927], [6131,6300]]
	self.assert_result(result, in_process, unprocessed, self.particle_b)

	result = self.parser.get_records(1)
	in_process = [[6131, 6248, 1, 0]]
	unprocessed = [[0,69], [1329,1332], [2294,2363], [4092,4161], [4351, 4927], [6131,6300]]
	self.assert_result(result, in_process, unprocessed, self.particle_c)

	result = self.parser.get_records(1)
	in_process = []
	unprocessed = [[0,69], [1329,1332], [2294,2363],[4092,4161], [4351, 4927], [6248,6300]]
	self.assert_result(result, in_process, unprocessed, self.particle_d)

	stream_handle.close()
	self.assertEqual(self.exception_callback_value, None)
Exemplo n.º 12
0
 def _build_parser(self, parser_state, infile):
     """
     Build and return the parser
     """
     config = self._parser_config
     # Fill in blanks with particle info
     config.update({
         'particle_module': 'mi.dataset.parser.dostad',
         'particle_class': 'DostadParserDataParticle'
     })
     log.debug("My Config: %s", config)
     self._parser = DostadParser(config, parser_state, infile,
                                 self._save_parser_state,
                                 self._data_callback)
     return self._parser
Exemplo n.º 13
0
    def test_simple(self):
        """
	Read test data from the file and pull out data particles one at a time.
	Assert that the results are those we expected.
	"""
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        # NOTE: using the unprocessed data state of 0,6300 limits the file to reading
        # just 6300 bytes, so even though the file is longer it only reads the first
        # 6300
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 6300]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.parser = DostadParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback)

        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [[390, 507, 1, 0, 0], [637, 754, 1, 0, 0], [6150, 6267, 1, 0, 1]],
            [[0, 69], [390, 507], [637, 754], [944, 2370], [2560, 2947],
             [3137, 4173], [4363, 5437], [5683, 6072], [6150, 6300]],
            self.timestamp4, self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(
            result, [[637, 754, 1, 0, 0], [6150, 6267, 1, 0, 1]],
            [[0, 69], [637, 754], [944, 2370], [2560, 2947], [3137, 4173],
             [4363, 5437], [5683, 6072], [6150, 6300]], self.timestamp4,
            self.particle_b)
        result = self.parser.get_records(1)
        self.assert_result(result, [[6150, 6267, 1, 0, 1]],
                           [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                            [4363, 5437], [5683, 6072], [6150, 6300]],
                           self.timestamp4, self.particle_c)
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                            [4363, 5437], [5683, 6072], [6267, 6300]],
                           self.timestamp4, self.particle_d)
        self.stream_handle.close()
Exemplo n.º 14
0
    def test_set_state(self):
        """
        test changing the state after initializing
        """
        state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.METADATA_SENT: False,
            StateKey.FILE_SIZE: 9400
        }
        new_state = {
            StateKey.UNPROCESSED_DATA: [[0, 69], [944, 6300]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.METADATA_SENT: True,
            StateKey.FILE_SIZE: 9400
        }

        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config.get(DataTypeKey.DOSTA_ABCDJM_SIO_TELEMETERED), state,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)
        # there should only be 4 records, make sure we stop there
        result = self.parser.get_records(4)
        # 0-69 contains an incomplete block (end of a sample)
        self.assert_state([], [[0, 69], [944, 1000]])
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        stream_handle.close()
        # 0-69 contains an incomplete block (end of a sample)
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_result(result, [],
                           [[0, 69], [1329, 1332], [2294, 2363], [4092, 4161],
                            [4351, 4927], [6248, 6300]], self.particle_d)
        self.assertEqual(self.exception_callback_value, None)
Exemplo n.º 15
0
 def _build_telemetered_parser(self, parser_state, stream_in):
     """
     Build and return the telemetered parser
     @param parser_state starting parser state to pass to parser
     @param stream_in Handle of open file to pass to parser
     """
     config = self._parser_config[DataSourceKey.DOSTA_ABCDJM_SIO_TELEMETERED]
     # Fill in blanks with particle info
     config.update({
         'particle_module': 'mi.dataset.parser.dostad',
         'particle_class': ['DostadParserDataParticle',
                            'DostadMetadataDataParticle']
     })
     log.debug("My Config: %s", config)
     parser = DostadParser(
         config,
         parser_state,
         stream_in,
         lambda state: self._save_parser_state(state, DataSourceKey.DOSTA_ABCDJM_SIO_TELEMETERED),
         self._data_callback,
         self._sample_exception_callback
     )
     return parser
Exemplo n.º 16
0
    def test_mid_state_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [],
            StateKey.UNPROCESSED_DATA: [[0, 69], [314, 1000]],
            StateKey.TIMESTAMP: self.timestamp1
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config, new_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        result = self.parser.get_records(1)
        self.assert_result(result, [
            [637, 754, 1, 0, 0],
        ], [[0, 69], [637, 754], [944, 1000]], self.timestamp3,
                           self.particle_b_new)
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3,
                           self.particle_c)
        self.stream_handle.close()
Exemplo n.º 17
0
    def test_get_many(self):
        """
	Read test data from the file and pull out multiple data particles at one time.
	Assert that the results are those we expected.
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = DostadParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(3)
        self.stream_handle.close()
        self.assertEqual(result,
                         [self.particle_a, self.particle_b, self.particle_c])
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        self.assertEqual(self.publish_callback_value[0], self.particle_a)
        self.assertEqual(self.publish_callback_value[1], self.particle_b)
        self.assertEqual(self.publish_callback_value[2], self.particle_c)