예제 #1
0
    def test_update(self):
        """
        Test a file which has had a section of data replaced by 0s, as if a block of data has not been received yet,
        then using the returned state make a new parser with the test data that has the 0s filled in
        """
        log.debug('Starting test_update')
        state = {
            StateKey.UNPROCESSED_DATA: [[0, 14700]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 17600
        }
        # this file has a block of FL data replaced by 0s
        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_replaced.dat'))
        self.parser = PhsenParser(self.config, state, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        result = self.parser.get_records(3)
        self.assertEqual(result,
                         [self.particle_b, self.particle_c, self.particle_d])
        self.assert_state(
            [[14142, 14646, 1, 0]],
            [[0, 172], [367, 911], [4100, 4171], [5899, 5968], [7697, 7764],
             [9654, 9723], [11451, 11520], [14142, 14700]])
        # was b and c
        stream_handle.close()

        next_state = self.parser._state
        # this file has the block of data that was missing in the previous file
        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = PhsenParser(self.config, next_state, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        # get last in process record
        result = self.parser.get_records(1)
        self.assert_result(
            result, [],
            [[0, 172], [367, 911], [4100, 4171], [5899, 5968], [7697, 7764],
             [9654, 9723], [11451, 11520], [14646, 14700]], self.particle_e)
        # now get the filled in record
        result = self.parser.get_records(1)
        self.assert_result(
            result, [[367, 911, 2, 1]],
            [[0, 172], [367, 911], [4100, 4171], [5899, 5968], [7697, 7764],
             [9654, 9723], [11451, 11520], [14646, 14700]],
            self.particle_control)
        result = self.parser.get_records(1)
        self.assert_result(result, [],
                           [[0, 172], [4100, 4171], [5899, 5968], [7697, 7764],
                            [9654, 9723], [11451, 11520], [14646, 14700]],
                           self.particle_a)
        stream_handle.close()
예제 #2
0
    def test_in_process_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        new_state = {
            StateKey.IN_PROCESS_DATA: [[1804, 2308, 1, 0]],
            StateKey.UNPROCESSED_DATA: [[0, 172], [1804, 2308], [4100, 4171],
                                        [5899, 5968], [7697, 7764],
                                        [8636, 16000]],
            StateKey.FILE_SIZE:
            17600
        }
        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = PhsenParser(self.config, new_state, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)
        result = self.parser.get_records(1)
        self.assert_result(result, [], [[0, 172], [4100, 4171], [5899, 5968],
                                        [7697, 7764], [8636, 16000]],
                           self.particle_c)

        result = self.parser.get_records(1)
        self.assert_result(
            result, [[14142, 14646, 1, 0], [14839, 15343, 1, 0]],
            [[0, 172], [4100, 4171], [5899, 5968], [7697, 7764], [9654, 9723],
             [11451, 11520], [14142, 14646], [14839, 15343], [15536, 16000]],
            self.particle_d)
예제 #3
0
    def test_simple(self):
        """
        Read test data and pull out data particles one at a time.
        Assert that the results are those we expected.
        """
        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        state = {
            StateKey.UNPROCESSED_DATA: [[0, 9000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 17600
        }
        self.parser = PhsenParser(self.config, state, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        result = self.parser.get_records(1)
        in_process = [[367, 911, 2, 1], [1106, 1610, 1, 0], [1804, 2308, 1, 0]]
        unprocessed = [[0, 172], [367, 911], [1106, 1610], [1804, 2308],
                       [4100, 4171], [5899, 5968], [7697, 7764], [8636, 9000]]
        self.assert_result(result, in_process, unprocessed,
                           self.particle_control)

        result = self.parser.get_records(1)
        in_process = [[1106, 1610, 1, 0], [1804, 2308, 1, 0]]
        unprocessed = [[0, 172], [1106, 1610], [1804, 2308], [4100, 4171],
                       [5899, 5968], [7697, 7764], [8636, 9000]]
        self.assert_result(result, in_process, unprocessed, self.particle_a)

        result = self.parser.get_records(1)
        in_process = [[1804, 2308, 1, 0]]
        unprocessed = [[0, 172], [1804, 2308], [4100, 4171], [5899, 5968],
                       [7697, 7764], [8636, 9000]]
        self.assert_result(result, in_process, unprocessed, self.particle_b)
        stream_handle.close()
예제 #4
0
 def _build_parser(self, parser_state, infile):
     """
     Build and return the parser
     """
     config = self._parser_config
     config.update({
         'particle_module': 'mi.dataset.parser.phsen',
         'particle_class': 'PhsenParserDataParticle'
     })
     log.debug("My Config: %s", config)
     self._parser = PhsenParser(
         config,
         parser_state,
         infile,
         self._save_parser_state,
         self._data_callback
     )
     return self._parser
예제 #5
0
    def test_set_state(self):
        """
        Test changing to a new state after initializing the parser and 
        reading data, as if new data has been found and the state has
        changed
        """
        state = {
            StateKey.UNPROCESSED_DATA: [[0, 9000]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 17600
        }
        new_state = {
            StateKey.UNPROCESSED_DATA: [[0, 172], [4100, 4171], [5899, 5968],
                                        [7697, 7764], [8636, 14700]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE:
            17600
        }

        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = PhsenParser(self.config, state, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)
        # there should only be 4 records, make sure we stop there
        result = self.parser.get_records(4)
        self.assert_state(
            [],
            [[0, 172], [4100, 4171], [5899, 5968], [7697, 7764], [8636, 9000]])
        result = self.parser.get_records(1)
        self.assertEqual(result, [])

        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        stream_handle.close()
        self.assert_result(result, [[14142, 14646, 1, 0]],
                           [[0, 172], [4100, 4171], [5899, 5968], [7697, 7764],
                            [9654, 9723], [11451, 11520], [14142, 14700]],
                           self.particle_d)
예제 #6
0
 def _build_telemetered_parser(self, parser_state, stream_in):
     """
     Build and return the telemetered parser
     @param parser_state starting parser state to pass to parser
     @param stream_in Handle of open file to pass to parser
     """
     config = self._parser_config.get(DataSourceKey.PHSEN_ABCDEF_SIO_MULE)
     config.update({
         DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.phsen',
         DataSetDriverConfigKeys.PARTICLE_CLASS: ['PhsenParserDataParticle',
                                                  'PhsenControlDataParticle']
     })
     log.debug("My Config: %s", config)
     parser = PhsenParser(
         config,
         parser_state,
         stream_in,
         lambda state: self._save_parser_state(state, DataSourceKey.PHSEN_ABCDEF_SIO_MULE),
         self._data_callback,
         self._sample_exception_callback
     )
     return parser
예제 #7
0
    def test_get_many(self):
        """
        Read test data and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """
        state = {
            StateKey.UNPROCESSED_DATA: [[0, 17600]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 17600
        }
        stream_handle = open(
            os.path.join(RESOURCE_PATH, 'node59p1_shorter.dat'))
        self.parser = PhsenParser(self.config, state, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        result = self.parser.get_records(7)
        stream_handle.close()
        self.assertEqual(result, [
            self.particle_control, self.particle_a, self.particle_b,
            self.particle_c, self.particle_d, self.particle_e, self.particle_f
        ])
        # the remaining in process data is actually a particle with a bad sample
        in_process = [[15536, 16040, 1, 0], [16301, 16805, 1, 0],
                      [16998, 17502, 1, 0]]
        unprocessed = [[0, 172], [4100, 4171], [5899, 5968], [7697, 7764],
                       [9654, 9723], [11451, 11520], [15536, 16040],
                       [16301, 16805], [16998, 17600]]
        self.assert_state(in_process, unprocessed)
        self.assertEqual(self.publish_callback_value[0], self.particle_control)
        self.assertEqual(self.publish_callback_value[1], self.particle_a)
        self.assertEqual(self.publish_callback_value[2], self.particle_b)
        self.assertEqual(self.publish_callback_value[3], self.particle_c)
        self.assertEqual(self.publish_callback_value[4], self.particle_d)
        self.assertEqual(self.publish_callback_value[5], self.particle_e)
        self.assertEqual(self.publish_callback_value[6], self.particle_f)