Ejemplo n.º 1
0
    def test_in_process_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [[314, 390, 1, 0, 0],
                                       [561, 637, 1, 0, 0]],
            StateKey.UNPROCESSED_DATA: [[0, 69], [314, 390], [561, 637],
                                        [944, 6150]],
            StateKey.TIMESTAMP:
            self.timestamp3
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, new_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        result = self.parser.get_records(1)

        # even though the state says this particle is not a new sequence, since it is the
        # first after setting the state it will be new
        self.assert_result(result, [[561, 637, 1, 0, 0]],
                           [[0, 69], [561, 637], [944, 6150]], self.timestamp2,
                           self.particle_b_new)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        self.assert_state([],
                          [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                           [4363, 5437], [5683, 6072]], self.timestamp4)
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)
Ejemplo n.º 2
0
    def test_simple(self):
        """
	Read test data from the file and pull out data particles one at a time.
	Assert that the results are those we expected.
	"""
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback)

        result = self.parser.get_records(1)
        self.assert_result(result, [[314, 390, 1, 0, 0], [561, 637, 1, 0, 0]],
                           [[0, 69], [314, 390], [561, 637], [944, 1000]],
                           self.timestamp3, self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[561, 637, 1, 0, 0]],
                           [[0, 69], [561, 637], [944, 1000]], self.timestamp3,
                           self.particle_b)
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3,
                           self.particle_c)

        self.stream_handle.close()
Ejemplo n.º 3
0
    def test_set_state(self):
        """
	test changing the state after initializing
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        new_state = {
            StateKey.UNPROCESSED_DATA: [[0, 69], [944, 6150]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: self.timestamp2
        }

        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        self.assert_result(result, [],
                           [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                            [4363, 5437], [5683, 6072]], self.timestamp4,
                           self.particle_d)
Ejemplo n.º 4
0
    def test_long_stream(self):
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, data_len]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[-3], self.particle_d)
        self.assertEqual(result[-2], self.particle_e)
        self.assertEqual(result[-1], self.particle_f)
        self.assert_state([],
                          [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                           [4363, 5437], [5683, 6072], [8273, 9400]],
                          self.timestamp6)
        self.assertEqual(self.publish_callback_value[-2], self.particle_e)
        self.assertEqual(self.publish_callback_value[-1], self.particle_f)
Ejemplo n.º 5
0
    def test_update(self):
        """
	Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
	then using the returned state make a new parser with the test data that has the 0s filled in
	"""
        self.state = {StateKey.UNPROCESSED_DATA: [[0, 6150]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_replaced.dat"))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source

        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [[561, 637, 1, 0, 1], [6072, 6150, 1, 0, 1]],
            [[0, 69], [314, 390], [561, 637], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6150]],
            self.timestamp4,
            self.particle_a,
        )
        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [[6072, 6150, 1, 0, 1]],
            [[0, 69], [314, 390], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6150]],
            self.timestamp4,
            self.particle_c_new,
        )
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, next_state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source

        # first get the old 'in process' records
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [],
            [[0, 69], [314, 390], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072]],
            self.timestamp4,
            self.particle_d,
        )

        # this should be the first of the newly filled in particles from
        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [],
            [[0, 69], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072]],
            self.timestamp2,
            self.particle_b_new,
        )
        self.stream_handle.close()
Ejemplo n.º 6
0
    def test_update(self):
        """
        Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
        then using the returned state make a new parser with the test data that has the 0s filled in
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 6150]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_replaced.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        self.assert_result(result, [[561,637,1,0], [6053,6131,1,0]],
                           [[0,69],[314,390],[561,637],[1329,1332],[2294,2363],[4092,4161],[4351,4927],[6053,6150]],
                           self.particle_a)
        result = self.parser.get_records(1)
        # 0-69, 6131-6150 contains an incomplete block
        # 314-390 is the zeroed block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_result(result, [[6053,6131,1,0]],
                           [[0,69],[314,390],[1329,1332],[2294,2363],[4092,4161],[4351,4927],[6053,6150]],
                           self.particle_c)
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, next_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        # first get the old 'in process' records from 6053-6131
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69], [314,390], [1329,1332],[2294,2363],[4092,4161],[4351,4927],[6131,6150]],
                           self.particle_d)

        # this should be the first of the newly filled in particles from 314-390
        result = self.parser.get_records(1)
        # 0-69, 6131-6150 contains an incomplete block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_result(result, [],
                           [[0,69], [1329,1332],[2294,2363],[4092,4161],[4351,4927],[6131,6150]],
                           self.particle_b)
        self.stream_handle.close()
Ejemplo n.º 7
0
    def test_update(self):
        """
	Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
	then using the returned state make a new parser with the test data that has the 0s filled in
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 6150]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_replaced.dat'))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(1)
        self.assert_result(
            result, [[561, 637, 1, 0, 1], [6072, 6150, 1, 0, 1]],
            [[0, 69], [314, 390], [561, 637], [944, 2370], [2560, 2947],
             [3137, 4173], [4363, 5437], [5683, 6150]], self.timestamp4,
            self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[6072, 6150, 1, 0, 1]],
                           [[0, 69], [314, 390], [944, 2370], [2560, 2947],
                            [3137, 4173], [4363, 5437], [5683, 6150]],
                           self.timestamp4, self.particle_c_new)
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, next_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        # first get the old 'in process' records
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0, 69], [314, 390], [944, 2370], [2560, 2947],
                            [3137, 4173], [4363, 5437], [5683, 6072]],
                           self.timestamp4, self.particle_d)

        # this should be the first of the newly filled in particles from
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                            [4363, 5437], [5683, 6072]], self.timestamp2,
                           self.particle_b_new)
        self.stream_handle.close()
Ejemplo n.º 8
0
    def test_set_state(self):
        """
	test changing the state after initializing
	"""
        self.state = {StateKey.UNPROCESSED_DATA: [[0, 1000]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        new_state = {
            StateKey.UNPROCESSED_DATA: [[0, 69], [944, 6150]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: self.timestamp2,
        }

        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        self.assert_result(
            result,
            [],
            [[0, 69], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072]],
            self.timestamp4,
            self.particle_d,
        )
Ejemplo n.º 9
0
    def test_long_stream(self):
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {StateKey.UNPROCESSED_DATA: [[0, data_len]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[-3], self.particle_d)
        self.assertEqual(result[-2], self.particle_e)
        self.assertEqual(result[-1], self.particle_f)
        self.assert_state(
            [],
            [[0, 69], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072], [8273, 9400]],
            self.timestamp6,
        )
        self.assertEqual(self.publish_callback_value[-2], self.particle_e)
        self.assertEqual(self.publish_callback_value[-1], self.particle_f)
Ejemplo n.º 10
0
    def test_long_stream(self):
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {StateKey.UNPROCESSED_DATA:[[0, data_len]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: data_len}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[3], self.particle_d)
        self.assertEqual(result[4], self.particle_e)
        self.assertEqual(result[5], self.particle_f)
        # 0-69 contains an incomplete block (end of a sample)
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_state([],
            [[0, 69], [1329,1332],[2294,2363],[4092,4161],[4351,4927], [9020,9400]])
        self.assertEqual(self.publish_callback_value[4], self.particle_e)
        self.assertEqual(self.publish_callback_value[5], self.particle_f)
Ejemplo n.º 11
0
    def test_in_process_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {StateKey.IN_PROCESS_DATA:[[314,390,1,0], [561,637,1,0]],
            StateKey.UNPROCESSED_DATA:[[0,69],[314,390],[561,637],[944,6150]],
            StateKey.FILE_SIZE: 9400}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, new_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)

        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,6150]],
                           self.particle_b)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        # 0-69, 6131-6150 contains an incomplete block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_state([],
            [[0,69],[1329,1332],[2294,2363],[4092,4161],[4351,4927], [6131,6150]])
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)
Ejemplo n.º 12
0
    def test_set_state(self):
        """
        test changing the state after initializing
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]], StateKey.IN_PROCESS_DATA:[]}
        new_state = {StateKey.UNPROCESSED_DATA:[[0,69],[944,6150]],
            StateKey.IN_PROCESS_DATA:[]}

        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([],
            [[0,69],[944,1000]])
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        self.assert_result(result, [],
                           [[0,69], [1329,1332],[2294,2363],[4092,4161],[4351,4927], [6131,6150]],
			   self.particle_d)
Ejemplo n.º 13
0
    def test_set_state(self):
        """
        test changing the state after initializing
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}
        new_state = {StateKey.UNPROCESSED_DATA:[[0,69],[944,6150]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}

        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([],
            [[0,69],[944,1000]])
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        # 0-69, 6131-6150 contains an incomplete block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_result(result, [],
                           [[0,69], [1329,1332],[2294,2363],[4092,4161],[4351,4927], [6131,6150]],
                           self.particle_d)
Ejemplo n.º 14
0
    def test_simple(self):
        """
	Read test data from the file and pull out data particles one at a time.
	Assert that the results are those we expected.
	"""
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000
        self.state = {StateKey.UNPROCESSED_DATA: [[0, 1000]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback)

        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [[314, 390, 1, 0, 0], [561, 637, 1, 0, 0]],
            [[0, 69], [314, 390], [561, 637], [944, 1000]],
            self.timestamp3,
            self.particle_a,
        )
        result = self.parser.get_records(1)
        self.assert_result(
            result, [[561, 637, 1, 0, 0]], [[0, 69], [561, 637], [944, 1000]], self.timestamp3, self.particle_b
        )
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3, self.particle_c)

        self.stream_handle.close()
Ejemplo n.º 15
0
    def test_in_process_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [[314, 390, 1, 0, 0], [561, 637, 1, 0, 0]],
            StateKey.UNPROCESSED_DATA: [[0, 69], [314, 390], [561, 637], [944, 6150]],
            StateKey.TIMESTAMP: self.timestamp3,
        }
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source
        result = self.parser.get_records(1)

        # even though the state says this particle is not a new sequence, since it is the
        # first after setting the state it will be new
        self.assert_result(
            result, [[561, 637, 1, 0, 0]], [[0, 69], [561, 637], [944, 6150]], self.timestamp2, self.particle_b_new
        )

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        self.assert_state(
            [], [[0, 69], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072]], self.timestamp4
        )
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)
Ejemplo n.º 16
0
    def test_simple(self):
        """
        Read test data from the file and pull out data particles one at a time.
        Assert that the results are those we expected.
        """
        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        # 0-69, 944-1000 are incomplete samples, 314-390 and 561-637 are samples that have
        # parsed but not yet returned (in_process)
        self.assert_result(result,
                           [[314,390,1,0], [561,637,1,0]],
                           [[0,69],[314,390],[561,637],[944,1000]], self.particle_a)
        result = self.parser.get_records(1)
        # 0-69, 944-1000 are incomplete samples, 561-637 is parsed but not yet 
        # returned (in_process)
        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,1000]], self.particle_b)
        result = self.parser.get_records(1)
        # all three samples that were parsed have been returned, no more in process
        self.assert_result(result, [],
                           [[0,69],[944,1000]], self.particle_c)

        self.stream_handle.close()
Ejemplo n.º 17
0
    def test_update(self):
        """
        Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
        then using the returned state make a new parser with the test data that has the 0s filled in
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 6150]],
            StateKey.IN_PROCESS_DATA:[]}
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_replaced.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        self.assert_result(result, [[561,637,1,0], [6053,6131,1,0]],
                           [[0,69],[314,390],[561,637],[1329,1332],[2294,2363],[4092,4161],[4351,4927],[6053,6150]],
                           self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[6053,6131,1,0]],
                           [[0,69],[314,390],[1329,1332],[2294,2363],[4092,4161],[4351,4927],[6053,6150]],
                           self.particle_c)
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, next_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        # first get the old 'in process' records
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69], [314,390], [1329,1332],[2294,2363],[4092,4161],[4351,4927],[6131,6150]],
                           self.particle_d)

        # this should be the first of the newly filled in particles from
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69], [1329,1332],[2294,2363],[4092,4161],[4351,4927],[6131,6150]],
                           self.particle_b)
        self.stream_handle.close()
Ejemplo n.º 18
0
 def build_telem_parser(self, state=None):
     """
     Build a telemetered parser, storing it in self.parser
     @param state initial parser state defaults to None
     """
     if self.stream_handle is None:
         self.fail(
             "Must set stream handle before building telemetered parser")
     self.parser = FlortdParser(self.telem_config, state,
                                self.stream_handle, self.state_callback,
                                self.pub_callback, self.exception_callback)
Ejemplo n.º 19
0
    def test_mid_state_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [],
            StateKey.UNPROCESSED_DATA: [[0, 69], [197, 1000]],
            StateKey.TIMESTAMP: self.timestamp1
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, new_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        result = self.parser.get_records(1)
        self.assert_result(result, [[561, 637, 1, 0, 0]],
                           [[0, 69], [561, 637], [944, 1000]], self.timestamp3,
                           self.particle_b_new)
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3,
                           self.particle_c)

        self.stream_handle.close()
Ejemplo n.º 20
0
 def _build_parser(self, parser_state, infile):
     """
     Build and return the parser
     """
     config = self._parser_config
     config.update({
         'particle_module': 'mi.dataset.parser.flortd',
         'particle_class': 'FlortdParserDataParticle'
     })
     log.debug("My Config: %s", config)
     self._parser = FlortdParser(config, parser_state, infile,
                                 self._save_parser_state,
                                 self._data_callback)
     return self._parser
Ejemplo n.º 21
0
    def test_get_many(self):
        """
	Read test data from the file and pull out multiple data particles at one time.
	Assert that the results are those we expected.
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(3)
        self.stream_handle.close()
        self.assertEqual(result,
                         [self.particle_a, self.particle_b, self.particle_c])
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        self.assertEqual(self.publish_callback_value[0], self.particle_a)
        self.assertEqual(self.publish_callback_value[1], self.particle_b)
        self.assertEqual(self.publish_callback_value[2], self.particle_c)
Ejemplo n.º 22
0
    def test_dash(self):
        """
        Test that the particle with a field replaced by dashes is found
        """
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_dash.dat'))
        self.parser = FlortdParser(self.config, None, self.stream_handle,
                                   self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        self.assert_result(result,
                           [[313,389,1,0]],
                           [[0,69],[313,499]], self.particle_a_dash)
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69],[389,499]], self.particle_b)
Ejemplo n.º 23
0
    def _build_parser(self, parser_state, stream_in, data_key):
        """
        Build the requested parser based on the data key
        @param parser_state starting parser state to pass to parser
        @param stream_in Handle of open file to pass to parser
        @param data_key Key to determine which parser type is built
        """

        if data_key == DataSourceKey.FLORT_DJ_SIO_TELEMETERED:
            config = self._parser_config.get(
                DataSourceKey.FLORT_DJ_SIO_TELEMETERED)
            config.update({
                DataSetDriverConfigKeys.PARTICLE_MODULE:
                'mi.dataset.parser.flortd',
                DataSetDriverConfigKeys.PARTICLE_CLASS:
                'FlortdParserDataParticle'
            })
            # build the telemetered parser
            parser = FlortdParser(
                config, parser_state, stream_in,
                lambda state: self._save_parser_state(
                    state, DataSourceKey.FLORT_DJ_SIO_TELEMETERED),
                self._data_callback, self._sample_exception_callback)

        elif data_key == DataSourceKey.FLORT_DJ_SIO_RECOVERED:
            config = self._parser_config.get(
                DataSourceKey.FLORT_DJ_SIO_RECOVERED)
            config.update({
                DataSetDriverConfigKeys.PARTICLE_MODULE:
                'mi.dataset.parser.flortd',
                DataSetDriverConfigKeys.PARTICLE_CLASS:
                'FlortdRecoveredParserDataParticle'
            })
            # build the recovered parser
            parser = FlortdRecoveredParser(
                config, parser_state, stream_in,
                lambda state, ingested: self._save_parser_state(
                    state, DataSourceKey.FLORT_DJ_SIO_RECOVERED, ingested),
                self._data_callback, self._sample_exception_callback)

        else:
            raise ConfigurationException(
                'Tried to build parser for unknown data source key %s' %
                data_key)

        return parser
Ejemplo n.º 24
0
    def test_get_many(self):
        """
	Read test data from the file and pull out multiple data particles at one time.
	Assert that the results are those we expected.
	"""
        self.state = {StateKey.UNPROCESSED_DATA: [[0, 1000]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source

        result = self.parser.get_records(3)
        self.stream_handle.close()
        self.assertEqual(result, [self.particle_a, self.particle_b, self.particle_c])
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        self.assertEqual(self.publish_callback_value[0], self.particle_a)
        self.assertEqual(self.publish_callback_value[1], self.particle_b)
        self.assertEqual(self.publish_callback_value[2], self.particle_c)
Ejemplo n.º 25
0
    def test_mid_state_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {StateKey.IN_PROCESS_DATA:[],
            StateKey.UNPROCESSED_DATA:[[0,69], [197,1000]]}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, new_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)
        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,1000]],
                           self.particle_b)
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69],[944,1000]],
                           self.particle_c)

	self.stream_handle.close()
Ejemplo n.º 26
0
    def test_mid_state_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [],
            StateKey.UNPROCESSED_DATA: [[0, 69], [197, 1000]],
            StateKey.TIMESTAMP: self.timestamp1,
        }
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source
        result = self.parser.get_records(1)
        self.assert_result(
            result, [[561, 637, 1, 0, 0]], [[0, 69], [561, 637], [944, 1000]], self.timestamp3, self.particle_b_new
        )
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3, self.particle_c)

        self.stream_handle.close()
    def test_get_many(self):
        """
        Read test data from the file and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]],
            StateKey.IN_PROCESS_DATA:[]}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(3)
        self.stream_handle.close()
        self.assertEqual(result,
                         [self.particle_a, self.particle_b, self.particle_c])
        # 0-69, 944-1000 are incomplete samples
        self.assert_state([],
                        [[0,69],[944,1000]])
        self.assertEqual(self.publish_callback_value[0], self.particle_a)
        self.assertEqual(self.publish_callback_value[1], self.particle_b)
        self.assertEqual(self.publish_callback_value[2], self.particle_c)
Ejemplo n.º 28
0
    def test_in_process_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {StateKey.IN_PROCESS_DATA:[[314,390,1,0], [561,637,1,0]],
            StateKey.UNPROCESSED_DATA:[[0,69],[314,390],[561,637],[944,6150]]}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, new_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)

        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,6150]],
                           self.particle_b)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        self.assert_state([],
            [[0,69],[1329,1332],[2294,2363],[4092,4161],[4351,4927], [6131,6150]])
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)
Ejemplo n.º 29
0
    def test_long_stream(self):
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {StateKey.UNPROCESSED_DATA:[[0, data_len]],
            StateKey.IN_PROCESS_DATA:[]}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[-3], self.particle_d)
        self.assertEqual(result[-2], self.particle_e)
        self.assertEqual(result[-1], self.particle_f)
        self.assert_state([],
            [[0, 69], [1329,1332],[2294,2363],[4092,4161],[4351,4927], [9020,9400]])
        self.assertEqual(self.publish_callback_value[-2], self.particle_e)
        self.assertEqual(self.publish_callback_value[-1], self.particle_f)
Ejemplo n.º 30
0
class FlortdParserUnitTestCase(ParserUnitTestCase):

    def state_callback(self, state):
        """ Call back method to watch what comes in via the position callback """
        self.state_callback_value = state

    def pub_callback(self, pub):
        """ Call back method to watch what comes in via the publish callback """
        self.publish_callback_value = pub

    def exception_callback(self, exception):
        """ Call back method to watch what comes in via the exception callback """
        self.exception_callback_value = exception

    def setUp(self):
        ParserUnitTestCase.setUp(self)
        self.config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flortd',
            DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlortdParserDataParticle'
            }

        # first FL tag
        self.particle_a = FlortdParserDataParticle(
            '51EF0E7507/23/13\t23:15:06\t700\t50\t695\t50\t460\t53\t545')
        self.particle_a_dash = FlortdParserDataParticle(
            '51EF0E7507/23/13\t23:15:06\t700\t50\t--\t50\t460\t53\t545')
        self.particle_b = FlortdParserDataParticle(
            '51EF190107/24/13\t00:00:06\t700\t85\t695\t50\t460\t51\t548')
        self.particle_c = FlortdParserDataParticle(
            '51EF6D6107/24/13\t06:00:05\t700\t78\t695\t72\t460\t51\t553')
        self.particle_d = FlortdParserDataParticle(
            '51EFC1C207/24/13\t12:00:06\t700\t169\t695\t127\t460\t58\t553')
        self.particle_e = FlortdParserDataParticle(
            '51F0162207/24/13\t18:00:06\t700\t262\t695\t84\t460\t55\t555')
        self.particle_f = FlortdParserDataParticle(
            '51F06A8207/25/13\t00:00:06\t700\t159\t695\t95\t460\t59\t554')

        self.state_callback_value = None
        self.publish_callback_value = None
        self.exception_callback_value = None

    def assert_result(self, result, in_process_data, unprocessed_data, particle):
        self.assertEqual(result, [particle])
        self.assert_state(in_process_data, unprocessed_data)
        self.assert_(isinstance(self.publish_callback_value, list))
        self.assertEqual(self.publish_callback_value[0], particle)

    def assert_state(self, in_process_data, unprocessed_data):
        self.assertEqual(self.parser._state[StateKey.IN_PROCESS_DATA], in_process_data)
        self.assertEqual(self.parser._state[StateKey.UNPROCESSED_DATA], unprocessed_data)
        self.assertEqual(self.state_callback_value[StateKey.IN_PROCESS_DATA], in_process_data)
        self.assertEqual(self.state_callback_value[StateKey.UNPROCESSED_DATA], unprocessed_data)

    def test_simple(self):
        """
        Read test data from the file and pull out data particles one at a time.
        Assert that the results are those we expected.
        """
        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        # 0-69, 944-1000 are incomplete samples, 314-390 and 561-637 are samples that have
        # parsed but not yet returned (in_process)
        self.assert_result(result,
                           [[314,390,1,0], [561,637,1,0]],
                           [[0,69],[314,390],[561,637],[944,1000]], self.particle_a)
        result = self.parser.get_records(1)
        # 0-69, 944-1000 are incomplete samples, 561-637 is parsed but not yet 
        # returned (in_process)
        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,1000]], self.particle_b)
        result = self.parser.get_records(1)
        # all three samples that were parsed have been returned, no more in process
        self.assert_result(result, [],
                           [[0,69],[944,1000]], self.particle_c)

        self.stream_handle.close()

    def test_get_many(self):
        """
        Read test data from the file and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(3)
        self.stream_handle.close()
        self.assertEqual(result,
                         [self.particle_a, self.particle_b, self.particle_c])
        # 0-69, 944-1000 are incomplete samples
        self.assert_state([],
                        [[0,69],[944,1000]])
        self.assertEqual(self.publish_callback_value[0], self.particle_a)
        self.assertEqual(self.publish_callback_value[1], self.particle_b)
        self.assertEqual(self.publish_callback_value[2], self.particle_c)

    def test_dash(self):
        """
        Test that the particle with a field replaced by dashes is found
        """
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_dash.dat'))
        self.parser = FlortdParser(self.config, None, self.stream_handle,
                                   self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        self.assert_result(result,
                           [[313,389,1,0]],
                           [[0,69],[313,499]], self.particle_a_dash)
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69],[389,499]], self.particle_b)

    def test_long_stream(self):
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {StateKey.UNPROCESSED_DATA:[[0, data_len]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: data_len}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[3], self.particle_d)
        self.assertEqual(result[4], self.particle_e)
        self.assertEqual(result[5], self.particle_f)
        # 0-69 contains an incomplete block (end of a sample)
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_state([],
            [[0, 69], [1329,1332],[2294,2363],[4092,4161],[4351,4927], [9020,9400]])
        self.assertEqual(self.publish_callback_value[4], self.particle_e)
        self.assertEqual(self.publish_callback_value[5], self.particle_f)

    def test_mid_state_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {StateKey.IN_PROCESS_DATA:[],
            StateKey.UNPROCESSED_DATA:[[0,69], [197,1000]],
            StateKey.FILE_SIZE: 9400}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, new_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        # 0-69, 944-1000 are incomplete samples
        result = self.parser.get_records(1)
        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,1000]],
                           self.particle_b)
        result = self.parser.get_records(1)
        # 0-69, 944-1000 are incomplete samples
        self.assert_result(result, [],
                           [[0,69],[944,1000]],
                           self.particle_c)

	self.stream_handle.close()

    def test_in_process_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {StateKey.IN_PROCESS_DATA:[[314,390,1,0], [561,637,1,0]],
            StateKey.UNPROCESSED_DATA:[[0,69],[314,390],[561,637],[944,6150]],
            StateKey.FILE_SIZE: 9400}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, new_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)

        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,6150]],
                           self.particle_b)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        # 0-69, 6131-6150 contains an incomplete block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_state([],
            [[0,69],[1329,1332],[2294,2363],[4092,4161],[4351,4927], [6131,6150]])
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)

    def test_set_state(self):
        """
        test changing the state after initializing
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}
        new_state = {StateKey.UNPROCESSED_DATA:[[0,69],[944,6150]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}

        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([],
            [[0,69],[944,1000]])
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        # 0-69, 6131-6150 contains an incomplete block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_result(result, [],
                           [[0,69], [1329,1332],[2294,2363],[4092,4161],[4351,4927], [6131,6150]],
                           self.particle_d)

    def test_update(self):
        """
        Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
        then using the returned state make a new parser with the test data that has the 0s filled in
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 6150]],
            StateKey.IN_PROCESS_DATA:[],
            StateKey.FILE_SIZE: 9400}
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_replaced.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        self.assert_result(result, [[561,637,1,0], [6053,6131,1,0]],
                           [[0,69],[314,390],[561,637],[1329,1332],[2294,2363],[4092,4161],[4351,4927],[6053,6150]],
                           self.particle_a)
        result = self.parser.get_records(1)
        # 0-69, 6131-6150 contains an incomplete block
        # 314-390 is the zeroed block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_result(result, [[6053,6131,1,0]],
                           [[0,69],[314,390],[1329,1332],[2294,2363],[4092,4161],[4351,4927],[6053,6150]],
                           self.particle_c)
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, next_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        # first get the old 'in process' records from 6053-6131
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69], [314,390], [1329,1332],[2294,2363],[4092,4161],[4351,4927],[6131,6150]],
                           self.particle_d)

        # this should be the first of the newly filled in particles from 314-390
        result = self.parser.get_records(1)
        # 0-69, 6131-6150 contains an incomplete block
        # 1329-1332 there are 3 extra \n's between sio blocks
        # 2294-2363, and 4092-4161 contains an error text string in between two sio blocks
        # 4351-4927 has a bad AD then CT message where the size from the header does not line up with
        # the final \x03
        self.assert_result(result, [],
                           [[0,69], [1329,1332],[2294,2363],[4092,4161],[4351,4927],[6131,6150]],
                           self.particle_b)
        self.stream_handle.close()
Ejemplo n.º 31
0
class FlortdParserUnitTestCase(ParserUnitTestCase):

    def state_callback(self, state):
        """ Call back method to watch what comes in via the position callback """
        self.state_callback_value = state

    def pub_callback(self, pub):
        """ Call back method to watch what comes in via the publish callback """
        self.publish_callback_value = pub

    def exception_callback(self, exception):
        """ Call back method to watch what comes in via the exception callback """
        self.exception_callback_value = exception

    def setUp(self):
        ParserUnitTestCase.setUp(self)
        self.config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flortd',
            DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlortdParserDataParticle'
            }

        # first FL tag
        self.timestamp1 = 3583610106.0
        self.particle_a = FlortdParserDataParticle(
            '51EF0E7507/23/13\t23:15:06\t700\t50\t695\t50\t460\t53\t545')
        self.timestamp2 = 3583612806.0
        self.particle_b = FlortdParserDataParticle(
            '51EF190107/24/13\t00:00:06\t700\t85\t695\t50\t460\t51\t548')
        self.timestamp3 = 3583634405.0
        self.particle_c = FlortdParserDataParticle(
            '51EF6D6107/24/13\t06:00:05\t700\t78\t695\t72\t460\t51\t553')
        self.timestamp4 = 3583656006.0
        self.particle_d = FlortdParserDataParticle(
            '51EFC1C207/24/13\t12:00:06\t700\t169\t695\t127\t460\t58\t553')
        self.timestamp5 = 3583677606.0
        self.particle_e = FlortdParserDataParticle(
            '51F0162207/24/13\t18:00:06\t700\t262\t695\t84\t460\t55\t555')
        self.timestamp6 = 3583699206.0
        self.particle_f = FlortdParserDataParticle(
            '51F06A8207/25/13\t00:00:06\t700\t159\t695\t95\t460\t59\t554')

        self.state_callback_value = None
        self.publish_callback_value = None
        self.exception_callback_value = None

    def assert_result(self, result, in_process_data, unprocessed_data, particle):
        self.assertEqual(result, [particle])
        self.assert_state(in_process_data, unprocessed_data)
        self.assert_(isinstance(self.publish_callback_value, list))
        self.assertEqual(self.publish_callback_value[0], particle)

    def assert_state(self, in_process_data, unprocessed_data):
        self.assertEqual(self.parser._state[StateKey.IN_PROCESS_DATA], in_process_data)
        self.assertEqual(self.parser._state[StateKey.UNPROCESSED_DATA], unprocessed_data)
        self.assertEqual(self.state_callback_value[StateKey.IN_PROCESS_DATA], in_process_data)
        self.assertEqual(self.state_callback_value[StateKey.UNPROCESSED_DATA], unprocessed_data)

    def test_simple(self):
        """
        Read test data from the file and pull out data particles one at a time.
        Assert that the results are those we expected.
        """
        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]],
            StateKey.IN_PROCESS_DATA:[]}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        self.assert_result(result,
                           [[314,390,1,0], [561,637,1,0]],
                           [[0,69],[314,390],[561,637],[944,1000]], self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,1000]], self.particle_b)
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69],[944,1000]], self.particle_c)

        self.stream_handle.close()

    def test_get_many(self):
        """
        Read test data from the file and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]],
            StateKey.IN_PROCESS_DATA:[]}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(3)
        self.stream_handle.close()
        self.assertEqual(result,
                         [self.particle_a, self.particle_b, self.particle_c])
        self.assert_state([],
                        [[0,69],[944,1000]])
        self.assertEqual(self.publish_callback_value[0], self.particle_a)
        self.assertEqual(self.publish_callback_value[1], self.particle_b)
        self.assertEqual(self.publish_callback_value[2], self.particle_c)

    def test_long_stream(self):
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {StateKey.UNPROCESSED_DATA:[[0, data_len]],
            StateKey.IN_PROCESS_DATA:[]}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[-3], self.particle_d)
        self.assertEqual(result[-2], self.particle_e)
        self.assertEqual(result[-1], self.particle_f)
        self.assert_state([],
            [[0, 69], [1329,1332],[2294,2363],[4092,4161],[4351,4927], [9020,9400]])
        self.assertEqual(self.publish_callback_value[-2], self.particle_e)
        self.assertEqual(self.publish_callback_value[-1], self.particle_f)

    def test_mid_state_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {StateKey.IN_PROCESS_DATA:[],
            StateKey.UNPROCESSED_DATA:[[0,69], [197,1000]]}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, new_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)
        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,1000]],
                           self.particle_b)
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69],[944,1000]],
                           self.particle_c)

	self.stream_handle.close()

    def test_in_process_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {StateKey.IN_PROCESS_DATA:[[314,390,1,0], [561,637,1,0]],
            StateKey.UNPROCESSED_DATA:[[0,69],[314,390],[561,637],[944,6150]]}
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, new_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)

        self.assert_result(result, [[561,637,1,0]],
                           [[0,69],[561,637],[944,6150]],
                           self.particle_b)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        self.assert_state([],
            [[0,69],[1329,1332],[2294,2363],[4092,4161],[4351,4927], [6131,6150]])
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)

    def test_set_state(self):
        """
        test changing the state after initializing
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 1000]], StateKey.IN_PROCESS_DATA:[]}
        new_state = {StateKey.UNPROCESSED_DATA:[[0,69],[944,6150]],
            StateKey.IN_PROCESS_DATA:[]}

        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([],
            [[0,69],[944,1000]])
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        self.assert_result(result, [],
                           [[0,69], [1329,1332],[2294,2363],[4092,4161],[4351,4927], [6131,6150]],
			   self.particle_d)

    def test_update(self):
        """
        Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
        then using the returned state make a new parser with the test data that has the 0s filled in
        """
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 6150]],
            StateKey.IN_PROCESS_DATA:[]}
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_replaced.dat'))
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        self.assert_result(result, [[561,637,1,0], [6053,6131,1,0]],
                           [[0,69],[314,390],[561,637],[1329,1332],[2294,2363],[4092,4161],[4351,4927],[6053,6150]],
                           self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[6053,6131,1,0]],
                           [[0,69],[314,390],[1329,1332],[2294,2363],[4092,4161],[4351,4927],[6053,6150]],
                           self.particle_c)
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node59p1_shorter.dat'))
        self.parser = FlortdParser(self.config, next_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        # first get the old 'in process' records
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69], [314,390], [1329,1332],[2294,2363],[4092,4161],[4351,4927],[6131,6150]],
                           self.particle_d)

        # this should be the first of the newly filled in particles from
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0,69], [1329,1332],[2294,2363],[4092,4161],[4351,4927],[6131,6150]],
                           self.particle_b)
        self.stream_handle.close()
Ejemplo n.º 32
0
class FlortdParserUnitTestCase(ParserUnitTestCase):
    def state_callback(self, state):
        """ Call back method to watch what comes in via the position callback """
        self.state_callback_value = state

    def pub_callback(self, pub):
        """ Call back method to watch what comes in via the publish callback """
        self.publish_callback_value = pub

    def setUp(self):
        ParserUnitTestCase.setUp(self)
        self.config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE: "mi.dataset.parser.flortd",
            DataSetDriverConfigKeys.PARTICLE_CLASS: "FlortdParserDataParticle",
        }

        # first FL tag
        self.timestamp1 = 3583610106.0
        self.particle_a = FlortdParserDataParticle(
            "07/23/13	23:15:06	700	50	695	50	460	53	545", internal_timestamp=self.timestamp1, new_sequence=True
        )

        self.timestamp2 = 3583612806.0
        self.particle_b = FlortdParserDataParticle(
            "07/24/13	00:00:06	700	85	695	50	460	51	548", internal_timestamp=self.timestamp2, new_sequence=False
        )
        self.particle_b_new = FlortdParserDataParticle(
            "07/24/13	00:00:06	700	85	695	50	460	51	548", internal_timestamp=self.timestamp2, new_sequence=True
        )

        self.timestamp3 = 3583634405.0
        self.particle_c = FlortdParserDataParticle(
            "07/24/13	06:00:05	700	78	695	72	460	51	553", internal_timestamp=self.timestamp3, new_sequence=False
        )
        self.particle_c_new = FlortdParserDataParticle(
            "07/24/13	06:00:05	700	78	695	72	460	51	553", internal_timestamp=self.timestamp3, new_sequence=True
        )

        self.timestamp4 = 3583656006.0
        self.particle_d = FlortdParserDataParticle(
            "07/24/13	12:00:06	700	169	695	127	460	58	553", internal_timestamp=self.timestamp4, new_sequence=True
        )

        self.timestamp5 = 3583677606.0
        self.particle_e = FlortdParserDataParticle(
            "07/24/13	18:00:06	700	262	695	84	460	55	555", internal_timestamp=self.timestamp5, new_sequence=False
        )

        self.timestamp6 = 3583699206.0
        self.particle_f = FlortdParserDataParticle(
            "07/25/13	00:00:06	700	159	695	95	460	59	554", internal_timestamp=self.timestamp6, new_sequence=False
        )

        self.state_callback_value = None
        self.publish_callback_value = None

    def assert_result(self, result, in_process_data, unprocessed_data, timestamp, particle):
        self.assertEqual(result, [particle])
        self.assert_state(in_process_data, unprocessed_data, timestamp)
        self.assert_(isinstance(self.publish_callback_value, list))
        self.assertEqual(self.publish_callback_value[0], particle)

    def assert_state(self, in_process_data, unprocessed_data, timestamp):
        self.assertEqual(self.parser._state[StateKey.IN_PROCESS_DATA], in_process_data)
        self.assertEqual(self.parser._state[StateKey.UNPROCESSED_DATA], unprocessed_data)
        self.assertEqual(self.state_callback_value[StateKey.IN_PROCESS_DATA], in_process_data)
        self.assertEqual(self.state_callback_value[StateKey.UNPROCESSED_DATA], unprocessed_data)
        self.assertEqual(self.state_callback_value[StateKey.TIMESTAMP], timestamp)

    def test_simple(self):
        """
	Read test data from the file and pull out data particles one at a time.
	Assert that the results are those we expected.
	"""
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000
        self.state = {StateKey.UNPROCESSED_DATA: [[0, 1000]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        self.parser = FlortdParser(self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback)

        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [[314, 390, 1, 0, 0], [561, 637, 1, 0, 0]],
            [[0, 69], [314, 390], [561, 637], [944, 1000]],
            self.timestamp3,
            self.particle_a,
        )
        result = self.parser.get_records(1)
        self.assert_result(
            result, [[561, 637, 1, 0, 0]], [[0, 69], [561, 637], [944, 1000]], self.timestamp3, self.particle_b
        )
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3, self.particle_c)

        self.stream_handle.close()

    def test_get_many(self):
        """
	Read test data from the file and pull out multiple data particles at one time.
	Assert that the results are those we expected.
	"""
        self.state = {StateKey.UNPROCESSED_DATA: [[0, 1000]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source

        result = self.parser.get_records(3)
        self.stream_handle.close()
        self.assertEqual(result, [self.particle_a, self.particle_b, self.particle_c])
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        self.assertEqual(self.publish_callback_value[0], self.particle_a)
        self.assertEqual(self.publish_callback_value[1], self.particle_b)
        self.assertEqual(self.publish_callback_value[2], self.particle_c)

    def test_long_stream(self):
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {StateKey.UNPROCESSED_DATA: [[0, data_len]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[-3], self.particle_d)
        self.assertEqual(result[-2], self.particle_e)
        self.assertEqual(result[-1], self.particle_f)
        self.assert_state(
            [],
            [[0, 69], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072], [8273, 9400]],
            self.timestamp6,
        )
        self.assertEqual(self.publish_callback_value[-2], self.particle_e)
        self.assertEqual(self.publish_callback_value[-1], self.particle_f)

    def test_mid_state_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [],
            StateKey.UNPROCESSED_DATA: [[0, 69], [197, 1000]],
            StateKey.TIMESTAMP: self.timestamp1,
        }
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source
        result = self.parser.get_records(1)
        self.assert_result(
            result, [[561, 637, 1, 0, 0]], [[0, 69], [561, 637], [944, 1000]], self.timestamp3, self.particle_b_new
        )
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3, self.particle_c)

        self.stream_handle.close()

    def test_in_process_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [[314, 390, 1, 0, 0], [561, 637, 1, 0, 0]],
            StateKey.UNPROCESSED_DATA: [[0, 69], [314, 390], [561, 637], [944, 6150]],
            StateKey.TIMESTAMP: self.timestamp3,
        }
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, new_state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source
        result = self.parser.get_records(1)

        # even though the state says this particle is not a new sequence, since it is the
        # first after setting the state it will be new
        self.assert_result(
            result, [[561, 637, 1, 0, 0]], [[0, 69], [561, 637], [944, 6150]], self.timestamp2, self.particle_b_new
        )

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        self.assert_state(
            [], [[0, 69], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072]], self.timestamp4
        )
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)

    def test_set_state(self):
        """
	test changing the state after initializing
	"""
        self.state = {StateKey.UNPROCESSED_DATA: [[0, 1000]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        new_state = {
            StateKey.UNPROCESSED_DATA: [[0, 69], [944, 6150]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: self.timestamp2,
        }

        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        self.assert_result(
            result,
            [],
            [[0, 69], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072]],
            self.timestamp4,
            self.particle_d,
        )

    def test_update(self):
        """
	Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
	then using the returned state make a new parser with the test data that has the 0s filled in
	"""
        self.state = {StateKey.UNPROCESSED_DATA: [[0, 6150]], StateKey.IN_PROCESS_DATA: [], StateKey.TIMESTAMP: 0.0}
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_replaced.dat"))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source

        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [[561, 637, 1, 0, 1], [6072, 6150, 1, 0, 1]],
            [[0, 69], [314, 390], [561, 637], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6150]],
            self.timestamp4,
            self.particle_a,
        )
        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [[6072, 6150, 1, 0, 1]],
            [[0, 69], [314, 390], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6150]],
            self.timestamp4,
            self.particle_c_new,
        )
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(os.path.join(RESOURCE_PATH, "node59p1_shorter.dat"))
        self.parser = FlortdParser(
            self.config, next_state, self.stream_handle, self.state_callback, self.pub_callback
        )  # last one is the link to the data source

        # first get the old 'in process' records
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [],
            [[0, 69], [314, 390], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072]],
            self.timestamp4,
            self.particle_d,
        )

        # this should be the first of the newly filled in particles from
        result = self.parser.get_records(1)
        self.assert_result(
            result,
            [],
            [[0, 69], [944, 2370], [2560, 2947], [3137, 4173], [4363, 5437], [5683, 6072]],
            self.timestamp2,
            self.particle_b_new,
        )
        self.stream_handle.close()
Ejemplo n.º 33
0
class FlortdParserUnitTestCase(ParserUnitTestCase):
    def state_callback(self, state):
        """ Call back method to watch what comes in via the position callback """
        self.state_callback_value = state

    def pub_callback(self, pub):
        """ Call back method to watch what comes in via the publish callback """
        self.publish_callback_value = pub

    def setUp(self):
        ParserUnitTestCase.setUp(self)
        self.config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE:
            'mi.dataset.parser.flortd',
            DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlortdParserDataParticle'
        }

        # first FL tag
        self.timestamp1 = 3583610106.0
        self.particle_a = FlortdParserDataParticle(
            '07/23/13	23:15:06	700	50	695	50	460	53	545',
            internal_timestamp=self.timestamp1,
            new_sequence=True)

        self.timestamp2 = 3583612806.0
        self.particle_b = FlortdParserDataParticle(
            '07/24/13	00:00:06	700	85	695	50	460	51	548',
            internal_timestamp=self.timestamp2,
            new_sequence=False)
        self.particle_b_new = FlortdParserDataParticle(
            '07/24/13	00:00:06	700	85	695	50	460	51	548',
            internal_timestamp=self.timestamp2,
            new_sequence=True)

        self.timestamp3 = 3583634405.0
        self.particle_c = FlortdParserDataParticle(
            '07/24/13	06:00:05	700	78	695	72	460	51	553',
            internal_timestamp=self.timestamp3,
            new_sequence=False)
        self.particle_c_new = FlortdParserDataParticle(
            '07/24/13	06:00:05	700	78	695	72	460	51	553',
            internal_timestamp=self.timestamp3,
            new_sequence=True)

        self.timestamp4 = 3583656006.0
        self.particle_d = FlortdParserDataParticle(
            '07/24/13	12:00:06	700	169	695	127	460	58	553',
            internal_timestamp=self.timestamp4,
            new_sequence=True)

        self.timestamp5 = 3583677606.0
        self.particle_e = FlortdParserDataParticle(
            '07/24/13	18:00:06	700	262	695	84	460	55	555',
            internal_timestamp=self.timestamp5,
            new_sequence=False)

        self.timestamp6 = 3583699206.0
        self.particle_f = FlortdParserDataParticle(
            '07/25/13	00:00:06	700	159	695	95	460	59	554',
            internal_timestamp=self.timestamp6,
            new_sequence=False)

        self.state_callback_value = None
        self.publish_callback_value = None

    def assert_result(self, result, in_process_data, unprocessed_data,
                      timestamp, particle):
        self.assertEqual(result, [particle])
        self.assert_state(in_process_data, unprocessed_data, timestamp)
        self.assert_(isinstance(self.publish_callback_value, list))
        self.assertEqual(self.publish_callback_value[0], particle)

    def assert_state(self, in_process_data, unprocessed_data, timestamp):
        self.assertEqual(self.parser._state[StateKey.IN_PROCESS_DATA],
                         in_process_data)
        self.assertEqual(self.parser._state[StateKey.UNPROCESSED_DATA],
                         unprocessed_data)
        self.assertEqual(self.state_callback_value[StateKey.IN_PROCESS_DATA],
                         in_process_data)
        self.assertEqual(self.state_callback_value[StateKey.UNPROCESSED_DATA],
                         unprocessed_data)
        self.assertEqual(self.state_callback_value[StateKey.TIMESTAMP],
                         timestamp)

    def test_simple(self):
        """
	Read test data from the file and pull out data particles one at a time.
	Assert that the results are those we expected.
	"""
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.parser = FlortdParser(self.config, self.state, self.stream_handle,
                                   self.state_callback, self.pub_callback)

        result = self.parser.get_records(1)
        self.assert_result(result, [[314, 390, 1, 0, 0], [561, 637, 1, 0, 0]],
                           [[0, 69], [314, 390], [561, 637], [944, 1000]],
                           self.timestamp3, self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[561, 637, 1, 0, 0]],
                           [[0, 69], [561, 637], [944, 1000]], self.timestamp3,
                           self.particle_b)
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3,
                           self.particle_c)

        self.stream_handle.close()

    def test_get_many(self):
        """
	Read test data from the file and pull out multiple data particles at one time.
	Assert that the results are those we expected.
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(3)
        self.stream_handle.close()
        self.assertEqual(result,
                         [self.particle_a, self.particle_b, self.particle_c])
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        self.assertEqual(self.publish_callback_value[0], self.particle_a)
        self.assertEqual(self.publish_callback_value[1], self.particle_b)
        self.assertEqual(self.publish_callback_value[2], self.particle_c)

    def test_long_stream(self):
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        data = self.stream_handle.read()
        data_len = len(data)
        self.stream_handle.seek(0)
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, data_len]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(6)
        self.stream_handle.close()
        self.assertEqual(result[0], self.particle_a)
        self.assertEqual(result[1], self.particle_b)
        self.assertEqual(result[2], self.particle_c)
        self.assertEqual(result[-3], self.particle_d)
        self.assertEqual(result[-2], self.particle_e)
        self.assertEqual(result[-1], self.particle_f)
        self.assert_state([],
                          [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                           [4363, 5437], [5683, 6072], [8273, 9400]],
                          self.timestamp6)
        self.assertEqual(self.publish_callback_value[-2], self.particle_e)
        self.assertEqual(self.publish_callback_value[-1], self.particle_f)

    def test_mid_state_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [],
            StateKey.UNPROCESSED_DATA: [[0, 69], [197, 1000]],
            StateKey.TIMESTAMP: self.timestamp1
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, new_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        result = self.parser.get_records(1)
        self.assert_result(result, [[561, 637, 1, 0, 0]],
                           [[0, 69], [561, 637], [944, 1000]], self.timestamp3,
                           self.particle_b_new)
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 69], [944, 1000]], self.timestamp3,
                           self.particle_c)

        self.stream_handle.close()

    def test_in_process_start(self):
        """
	test starting a parser with a state in the middle of processing
	"""
        new_state = {
            StateKey.IN_PROCESS_DATA: [[314, 390, 1, 0, 0],
                                       [561, 637, 1, 0, 0]],
            StateKey.UNPROCESSED_DATA: [[0, 69], [314, 390], [561, 637],
                                        [944, 6150]],
            StateKey.TIMESTAMP:
            self.timestamp3
        }
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, new_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        result = self.parser.get_records(1)

        # even though the state says this particle is not a new sequence, since it is the
        # first after setting the state it will be new
        self.assert_result(result, [[561, 637, 1, 0, 0]],
                           [[0, 69], [561, 637], [944, 6150]], self.timestamp2,
                           self.particle_b_new)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_c)
        self.assertEqual(result[1], self.particle_d)
        self.assert_state([],
                          [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                           [4363, 5437], [5683, 6072]], self.timestamp4)
        self.assertEqual(self.publish_callback_value[-1], self.particle_d)

    def test_set_state(self):
        """
	test changing the state after initializing
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 1000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        new_state = {
            StateKey.UNPROCESSED_DATA: [[0, 69], [944, 6150]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: self.timestamp2
        }

        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source
        # there should only be 3 records, make sure we stop there
        result = self.parser.get_records(3)
        self.assert_state([], [[0, 69], [944, 1000]], self.timestamp3)
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.stream_handle.close()
        self.assert_result(result, [],
                           [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                            [4363, 5437], [5683, 6072]], self.timestamp4,
                           self.particle_d)

    def test_update(self):
        """
	Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
	then using the returned state make a new parser with the test data that has the 0s filled in
	"""
        self.state = {
            StateKey.UNPROCESSED_DATA: [[0, 6150]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.TIMESTAMP: 0.0
        }
        # this file has a block of FL data replaced by 0s
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_replaced.dat'))
        self.parser = FlortdParser(
            self.config, self.state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        result = self.parser.get_records(1)
        self.assert_result(
            result, [[561, 637, 1, 0, 1], [6072, 6150, 1, 0, 1]],
            [[0, 69], [314, 390], [561, 637], [944, 2370], [2560, 2947],
             [3137, 4173], [4363, 5437], [5683, 6150]], self.timestamp4,
            self.particle_a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[6072, 6150, 1, 0, 1]],
                           [[0, 69], [314, 390], [944, 2370], [2560, 2947],
                            [3137, 4173], [4363, 5437], [5683, 6150]],
                           self.timestamp4, self.particle_c_new)
        self.stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        self.stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = FlortdParser(
            self.config, next_state, self.stream_handle, self.state_callback,
            self.pub_callback)  # last one is the link to the data source

        # first get the old 'in process' records
        # Once those are done, the un processed data will be checked
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0, 69], [314, 390], [944, 2370], [2560, 2947],
                            [3137, 4173], [4363, 5437], [5683, 6072]],
                           self.timestamp4, self.particle_d)

        # this should be the first of the newly filled in particles from
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0, 69], [944, 2370], [2560, 2947], [3137, 4173],
                            [4363, 5437], [5683, 6072]], self.timestamp2,
                           self.particle_b_new)
        self.stream_handle.close()