Exemplo n.º 1
0
    def test_mid_state_start_recov(self):
        """
        Test starting the parser in a state in the middle of processing
        """
        log.debug(
            '-----------------------------------------------------------Starting test_mid_state_start'
        )

        new_state = {
            StateKey.IN_PROCESS_DATA: [],
            StateKey.UNPROCESSED_DATA: [[174, 290]],
            StateKey.FILE_SIZE: 7
        }

        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))

        self._parser = SioEngSioRecoveredParser(self.recov_config, new_state,
                                                stream_handle,
                                                self.state_callback_recovered,
                                                self.pub_callback,
                                                self.exception_callback)

        result = self._parser.get_records(1)

        self.assert_result(result, [[232, 290, 1, 0]], [[232, 290]],
                           self.particle_dr,
                           recov_flag=True)

        result = self._parser.get_records(1)

        self.assert_result(result, [], [], self.particle_er, recov_flag=True)

        stream_handle.close()
Exemplo n.º 2
0
    def test_set_state_recov(self):
        """
        Test changing to a new state after initializing the parser and
        reading data, as if new data has been found and the state has
        changed
        """
        log.debug(
            '-------------------------------------------------------------Starting test_set_state	'
        )

        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))

        # NOTE: using the unprocessed data state of 0,700 limits the file to reading
        # just 700 bytes, so even though the file is longer it only reads the first
        # 700. Also, FILE_SIZE must exist but is unused so a dummy value is inserted

        state = {
            StateKey.UNPROCESSED_DATA: [[0, 700]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }

        self._parser = SioEngSioRecoveredParser(self.recov_config, state,
                                                stream_handle,
                                                self.state_callback_recovered,
                                                self.pub_callback,
                                                self.exception_callback)

        result = self._parser.get_records(1)

        self.assert_particle(result[0], self.particle_ar)

        new_state2 = {
            StateKey.IN_PROCESS_DATA: [[174, 232, 1, 0], [232, 290, 1, 0],
                                       [290, 348, 1, 0]],
            StateKey.UNPROCESSED_DATA: [[174, 600]],
            StateKey.FILE_SIZE:
            7
        }

        log.debug("----------------- Setting State!------------")
        log.debug("New_state: %s", new_state2)
        self._parser.set_state(new_state2)

        result = self._parser.get_records(2)
        self.assert_particle(result[0], self.particle_d)
        self.assert_particle(result[1], self.particle_e)
        self.assert_state([[290, 348, 1, 0]], [[290, 600]], recov_flag=True)

        stream_handle.close()
Exemplo n.º 3
0
    def test_get_many_recov(self):
        """
        Read test data and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """

        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))

        # NOTE: using the unprocessed data state of 0,600 limits the file to reading
        # just 600 bytes, so even though the file is longer it only reads the first
        # 600

        log.debug(
            '--------------------------------------------------------Starting test_get_many'
        )

        state = {
            StateKey.UNPROCESSED_DATA: [[0, 600]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }

        self._parser = SioEngSioRecoveredParser(self.recov_config, state,
                                                stream_handle,
                                                self.state_callback_recovered,
                                                self.pub_callback,
                                                self.exception_callback)

        result = self._parser.get_records(6)

        # no more in process or unprocessed data
        self.assert_state([[348, 406, 1, 0], [406, 464, 1, 0],
                           [464, 522, 1, 0], [522, 580, 1, 0]], [[348, 600]],
                          recov_flag=True)

        self.assert_particle(result[0], self.particle_ar)
        self.assert_particle(result[1], self.particle_br)
        self.assert_particle(result[2], self.particle_cr)
        self.assert_particle(result[3], self.particle_dr)
        self.assert_particle(result[4], self.particle_er)
        self.assert_particle(result[5], self.particle_fr)

        stream_handle.close()
Exemplo n.º 4
0
    def create_recov_yml(self):
        """
        This utility creates a yml file
        Be sure to verify the results by eye before trusting!
        """

        fid = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'), 'r')

        stream_handle = fid
        parser = SioEngSioRecoveredParser(self.recov_config, None,
                                          stream_handle,
                                          self.state_callback_recovered,
                                          self.pub_callback,
                                          self.exception_callback)

        particles = parser.get_records(30)

        self.particle_to_yml(particles, 'STA15908.yml')
        fid.close()
Exemplo n.º 5
0
    def _build_recovered_parser(self, parser_state, stream_in):
        """
        Build and return the recovered parser
        @param parser_state starting parser state to pass to parser
        @param stream_in Handle of open file to pass to parser
        """
        config = self._parser_config[DataSourceKey.SIO_ENG_SIO_MULE_RECOVERED]
        config.update({
            DataSetDriverConfigKeys.PARTICLE_MODULE:
            'mi.dataset.parser.sio_eng_sio_mule',
            DataSetDriverConfigKeys.PARTICLE_CLASS:
            'SioEngSioRecoveredDataParticle'
        })
        log.debug("My Config: %s", config)

        parser = SioEngSioRecoveredParser(
            config, parser_state, stream_in,
            lambda state, ingested: self._save_parser_state(
                state, DataSourceKey.SIO_ENG_SIO_MULE_RECOVERED, ingested),
            self._data_callback, self._sample_exception_callback)
        return parser
Exemplo n.º 6
0
    def test_simple_recov(self):
        """
        Read test data and pull out data particles one at a time.
        Assert that the results are those we expected.
        """

        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))
        # NOTE: using the unprocessed data state of 0,200 limits the file to reading
        # just 200 bytes, so even though the file is longer it only reads the first
        # 200. FILE_SIZE is also ignored but must be present, so a dummy value is set
        state = {
            StateKey.UNPROCESSED_DATA: [[0, 200]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }

        self._parser = SioEngSioRecoveredParser(self.recov_config, state,
                                                stream_handle,
                                                self.state_callback_recovered,
                                                self.pub_callback,
                                                self.exception_callback)

        result = self._parser.get_records(1)
        self.assert_result(result, [[58, 116, 1, 0], [116, 174, 1, 0]],
                           [[58, 200]],
                           self.particle_ar,
                           recov_flag=True)

        result = self._parser.get_records(1)
        self.assert_result(result, [[116, 174, 1, 0]], [[116, 200]],
                           self.particle_br,
                           recov_flag=True)

        result = self._parser.get_records(1)
        self.assert_result(result, [], [[174, 200]],
                           self.particle_cr,
                           recov_flag=True)

        stream_handle.close()
Exemplo n.º 7
0
    def test_long_stream_recov(self):
        """
        Test a long stream
        """
        stream_handle = open(os.path.join(RESOURCE_PATH, 'STA15908.DAT'))
        # NOTE: using the unprocessed data state of 0,1000 limits the file to reading
        # just 1000 bytes, so even though the file is longer it only reads the first
        # 1000

        state = {
            StateKey.UNPROCESSED_DATA: [[0, 700]],
            StateKey.IN_PROCESS_DATA: [],
            StateKey.FILE_SIZE: 7
        }

        self._parser = SioEngSioRecoveredParser(self.recov_config, state,
                                                stream_handle,
                                                self.state_callback_recovered,
                                                self.pub_callback,
                                                self.exception_callback)

        result = self._parser.get_records(12)

        self.assert_particle(result[0], self.particle_ar)
        self.assert_particle(result[1], self.particle_br)
        self.assert_particle(result[2], self.particle_cr)
        self.assert_particle(result[3], self.particle_dr)
        self.assert_particle(result[4], self.particle_er)
        self.assert_particle(result[5], self.particle_fr)

        self.assert_particle(result[-2], self.particle_11r)
        self.assert_particle(result[-1], self.particle_12r)

        self.assert_state([], [[696, 700]], recov_flag=True)

        stream_handle.close()