Example #1
0
    def test_mid_state_start(self):
        """
        Test starting the parser in a state in the middle of processing
        """
        log.debug(
            '-----------------------------------------------------------Starting test_mid_state_start'
        )

        new_state = {
            StateKey.IN_PROCESS_DATA: [],
            StateKey.UNPROCESSED_DATA: [[174, 290]],
            StateKey.FILE_SIZE: 7
        }

        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))

        self._parser = SioEngSioMuleParser(self.telem_config, new_state,
                                           stream_handle, self.state_callback,
                                           self.pub_callback,
                                           self.exception_callback)

        result = self._parser.get_records(1)

        self.assert_result(result, [[232, 290, 1, 0]], [[232, 290]],
                           self.particle_d)

        result = self._parser.get_records(1)

        self.assert_result(result, [], [], self.particle_e)

        stream_handle.close()
Example #2
0
    def test_long_stream(self):
        """
        Test a long stream 
        """
        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000

        state = {
            StateKey.UNPROCESSED_DATA: [[0, 700]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }

        self._parser = SioEngSioMuleParser(self.telem_config, state,
                                           stream_handle, self.state_callback,
                                           self.pub_callback,
                                           self.exception_callback)

        result = self._parser.get_records(12)

        self.assert_particle(result[0], self.particle_a)
        self.assert_particle(result[1], self.particle_b)
        self.assert_particle(result[2], self.particle_c)
        self.assert_particle(result[3], self.particle_d)
        self.assert_particle(result[4], self.particle_e)
        self.assert_particle(result[5], self.particle_f)

        self.assert_particle(result[-2], self.particle_11)
        self.assert_particle(result[-1], self.particle_12)

        self.assert_state([], [[696, 700]])

        stream_handle.close()
Example #3
0
    def test_simple(self):
        """
        Read test data and pull out data particles one at a time.
        Assert that the results are those we expected.
        """

        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))
        # NOTE: using the unprocessed data state of 0,200 limits the file to reading
        # just 200 bytes, so even though the file is longer it only reads the first
        # 200. FILE_SIZE is also ignored but must be present, so a dummy value is set
        state = {
            StateKey.UNPROCESSED_DATA: [[0, 200]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }
        self._parser = SioEngSioMuleParser(self.telem_config, state,
                                           stream_handle, self.state_callback,
                                           self.pub_callback,
                                           self.exception_callback)

        result = self._parser.get_records(1)
        self.assert_result(result, [[58, 116, 1, 0], [116, 174, 1, 0]],
                           [[58, 200]], self.particle_a)

        result = self._parser.get_records(1)
        self.assert_result(result, [[116, 174, 1, 0]], [[116, 200]],
                           self.particle_b)

        result = self._parser.get_records(1)
        self.assert_result(result, [], [[174, 200]], self.particle_c)

        stream_handle.close()
    def test_in_process_start(self):
        """
        test starting a parser with a state in the middle of processing
        """
        log.debug(
            '-------------------------------------------------------------Starting test_in_process_start'
        )
        new_state = {
            StateKey.IN_PROCESS_DATA: [[174, 232, 1, 0], [232, 290, 1, 0],
                                       [290, 348, 1, 0]],
            StateKey.UNPROCESSED_DATA: [[174, 600]],
            StateKey.FILE_SIZE:
            7
        }
        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))
        self.parser = SioEngSioMuleParser(self.config, new_state,
                                          self.stream_handle,
                                          self.state_callback,
                                          self.pub_callback,
                                          self.exception_callback)

        result = self.parser.get_records(1)
        self.assertEqual(result, [self.particle_d])

        self.assert_result(result, [[232, 290, 1, 0], [290, 348, 1, 0]],
                           [[232, 600]], self.particle_d)

        result = self.parser.get_records(2)
        self.assertEqual(result[0], self.particle_e)
        self.assertEqual(result[1], self.particle_f)
        log.debug('raw data in result:::::: %s', result[1].raw_data)

        self.assert_state([], [[348, 600]])

        self.stream_handle.close()
Example #5
0
    def test_set_state(self):
        """
        Test changing to a new state after initializing the parser and
        reading data, as if new data has been found and the state has
        changed
        """
        log.debug(
            '-------------------------------------------------------------Starting test_set_state	'
        )

        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))

        # NOTE: using the unprocessed data state of 0,700 limits the file to reading
        # just 700 bytes, so even though the file is longer it only reads the first
        # 700. Also, FILE_SIZE must exist but is unused so a dummy value is inserted

        state = {
            StateKey.UNPROCESSED_DATA: [[0, 700]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }

        self._parser = SioEngSioMuleParser(self.telem_config, state,
                                           stream_handle, self.state_callback,
                                           self.pub_callback,
                                           self.exception_callback)

        result = self._parser.get_records(1)

        self.assert_particle(result[0], self.particle_a)

        new_state2 = {
            StateKey.IN_PROCESS_DATA: [[174, 232, 1, 0], [232, 290, 1, 0],
                                       [290, 348, 1, 0]],
            StateKey.UNPROCESSED_DATA: [[174, 600]],
            StateKey.FILE_SIZE:
            7
        }

        log.debug("----------------- Setting State!------------")
        log.debug("New_state: %s", new_state2)
        self._parser.set_state(new_state2)

        result = self._parser.get_records(2)
        self.assert_particle(result[0], self.particle_d)
        self.assert_particle(result[1], self.particle_e)
        self.assert_state([[290, 348, 1, 0]], [[290, 600]])

        stream_handle.close()
Example #6
0
    def test_simple2(self):
        """
        Read test data and pull out data particles one at a time.
        Assert that the results are those we expected.
        """

        stream_handle = open(os.path.join(RESOURCE_PATH, 'node59p1.dat'))

        # A second test simple was written to use the node59p1.dat file instead
        # of the smaller STA15908.DAT. Unprocessed data was set to two sections
        # of the file so a reasonable number of particles would be created while
        # assuring the parser could read a larger file. FILE_SIZE is also ignored
        # but must be present, so a dummy value is set

        state = {
            StateKey.UNPROCESSED_DATA: [[0, 5000], [7800, 8800]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }

        self._parser = SioEngSioMuleParser(self.telem_config, state,
                                           stream_handle, self.state_callback,
                                           self.pub_callback,
                                           self.exception_callback)

        result = self._parser.get_records(1)

        self.assert_result(
            result, [[4190, 4244, 1, 0]],
            [[4190, 4244], [4336, 4394], [4853, 5000], [7800, 8800]],
            self.particle_AA)

        result = self._parser.get_records(1)
        self.assert_result(result, [],
                           [[4336, 4394], [4853, 5000], [7800, 8800]],
                           self.particle_BB)

        result = self._parser.get_records(1)
        self.assert_result(
            result, [],
            [[4336, 4394], [4853, 5000], [7800, 8664], [8792, 8800]],
            self.particle_CC)

        stream_handle.close()
Example #7
0
    def test_get_many(self):
        """
        Read test data and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """

        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))

        # NOTE: using the unprocessed data state of 0,600 limits the file to reading
        # just 600 bytes, so even though the file is longer it only reads the first
        # 600

        log.debug(
            '--------------------------------------------------------Starting test_get_many'
        )

        state = {
            StateKey.UNPROCESSED_DATA: [[0, 600]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }

        self._parser = SioEngSioMuleParser(self.telem_config, state,
                                           stream_handle, self.state_callback,
                                           self.pub_callback,
                                           self.exception_callback)

        result = self._parser.get_records(6)

        self.assert_state([[348, 406, 1, 0], [406, 464, 1, 0],
                           [464, 522, 1, 0], [522, 580, 1, 0]], [[348, 600]])

        self.assert_particle(result[0], self.particle_a)
        self.assert_particle(result[1], self.particle_b)
        self.assert_particle(result[2], self.particle_c)
        self.assert_particle(result[3], self.particle_d)
        self.assert_particle(result[4], self.particle_e)
        self.assert_particle(result[5], self.particle_f)

        stream_handle.close()
    def _build_telemetered_parser(self, parser_state, stream_in):
        """
        Build and return the telemetered parser
        @param parser_state starting parser state to pass to parser
        @param stream_in Handle of open file to pass to parser
        """

        config = self._parser_config[
            DataSourceKey.SIO_ENG_SIO_MULE_TELEMETERED]
        config.update({
            DataSetDriverConfigKeys.PARTICLE_MODULE:
            'mi.dataset.parser.sio_eng_sio_mule',
            DataSetDriverConfigKeys.PARTICLE_CLASS:
            'SioEngSioMuleParserDataParticle'
        })
        log.debug("My Config in _build_telemetered_parser: %s", config)
        parser = SioEngSioMuleParser(
            config, parser_state, stream_in,
            lambda state: self._save_parser_state(
                state, DataSourceKey.SIO_ENG_SIO_MULE_TELEMETERED),
            self._data_callback, self._sample_exception_callback)
        log.debug("_build_parser::::   Built parser, returning %s",
                  type(parser))
        return parser