def test_bad_data(self):
        """
        Ensure that bad data is skipped when it exists.
        """

        # the first useful record in this file is corrupted and will be ignored
        # we expect to get the metadata particle with the
        # timestamp from the 2nd data record and all of the valid engineering
        # data records

        file_path = os.path.join(RESOURCE_PATH, '01554008_BAD_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        log.info(self.exception_callback_value)

        parser = DbgPdbgCsppParser(self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED),
                                   stream_handle,
                                   self.exception_callback)

        # 18 particles
        particles = parser.get_records(7)

        self.assert_particles(particles, 'DBG_PDBG_bad_data_records.yml', RESOURCE_PATH)

        stream_handle.close()

        self.assert_(isinstance(self.exception_callback_value[0], RecoverableSampleException))
    def test_simple_telem(self):
        """
        Read test data and pull out data particles
        Assert that the results are those we expected.

        Because most of these files are ignored and there are only a few
        records of useful data in each one test_simple is the primary test
        There is no need for a test_get_many or other tests to get more particles
        """
        file_path = os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        # Note: since the recovered and telemetered parser and particles are common
        # to each other, testing one is sufficient, will be completely tested
        # in driver tests

        parser = DbgPdbgCsppParser(self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_TELEMETERED),
                                   stream_handle,
                                   self.exception_callback)

        particles = parser.get_records(8)

        log.debug("*** test_simple Num particles %s", len(particles))

        self.assert_particles(particles, '01554008_DBG_PDBG_telem.yml', RESOURCE_PATH)

        stream_handle.close()
Example #3
0
    def test_simple_telem(self):
        """
        Read test data and pull out data particles
        Assert that the results are those we expected.

        Because most of these files are ignored and there are only a few
        records of useful data in each one test_simple is the primary test
        There is no need for a test_get_many or other tests to get more particles
        """
        file_path = os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        # Note: since the recovered and telemetered parser and particles are common
        # to each other, testing one is sufficient, will be completely tested
        # in driver tests

        parser = DbgPdbgCsppParser(
            self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_TELEMETERED),
            stream_handle, self.exception_callback)

        particles = parser.get_records(8)

        log.debug("*** test_simple Num particles %s", len(particles))

        self.assert_particles(particles, '01554008_DBG_PDBG_telem.yml',
                              RESOURCE_PATH)

        stream_handle.close()
    def test_simple(self):
        """
        Read test data and pull out data particles
        Assert that the results are those we expected.

        Because most of these files are ignored and there are only a few
        records of useful data in each one test_simple is the primary test
        There is no need for a test_get_many or other tests to get more particles
        """
        file_path = os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        # Note: since the recovered and telemetered parser and particles are common
        # to each other, testing one is sufficient, will be completely tested
        # in driver tests

        parser = DbgPdbgCsppParser(self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED),
                                 None, stream_handle,
                                 self.state_callback, self.pub_callback,
                                 self.exception_callback)

        particles = parser.get_records(8)

        log.debug("*** test_simple Num particles %s", len(particles))


        test_data = self.get_dict_from_yml('01554008_DBG_PDBG_recov.yml')

        # check all the values against expected results.

        for i in range(len(particles)):

            self.assert_result(test_data['data'][i], particles[i])

        stream_handle.close()
Example #5
0
    def test_bad_data(self):
        """
        Ensure that bad data is skipped when it exists.
        """

        # the first useful record in this file is corrupted and will be ignored
        # we expect to get the metadata particle with the
        # timestamp from the 2nd data record and all of the valid engineering
        # data records

        file_path = os.path.join(RESOURCE_PATH, '01554008_BAD_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        log.info(self.exception_callback_value)

        parser = DbgPdbgCsppParser(
            self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED),
            stream_handle, self.exception_callback)

        # 18 particles
        particles = parser.get_records(7)

        self.assert_particles(particles, 'DBG_PDBG_bad_data_records.yml',
                              RESOURCE_PATH)

        stream_handle.close()

        self.assert_(
            isinstance(self.exception_callback_value[0],
                       RecoverableSampleException))
    def test_simple(self):
        """
        Read test data and pull out data particles
        Assert that the results are those we expected.

        Because most of these files are ignored and there are only a few
        records of useful data in each one test_simple is the primary test
        There is no need for a test_get_many or other tests to get more particles
        """
        file_path = os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        # Note: since the recovered and telemetered parser and particles are common
        # to each other, testing one is sufficient, will be completely tested
        # in driver tests

        parser = DbgPdbgCsppParser(
            self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        particles = parser.get_records(8)

        log.debug("*** test_simple Num particles %s", len(particles))

        test_data = self.get_dict_from_yml('01554008_DBG_PDBG_recov.yml')

        # check all the values against expected results.

        for i in range(len(particles)):

            self.assert_result(test_data['data'][i], particles[i])

        stream_handle.close()
    def test_mid_state_start(self):
        """
        This test makes sure that we retrieve the correct particles upon starting with an offset state.
        """

        file_path = os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        # position 5032 is the end of the 3rd data record, which would have produced the
        # metadata particle and the first 3 engineering particles
        initial_state = {
            StateKey.POSITION: 5032,
            StateKey.METADATA_EXTRACTED: True
        }

        parser = DbgPdbgCsppParser(
            self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED),
            initial_state, stream_handle, self.state_callback,
            self.pub_callback, self.exception_callback)

        #expect to get the 4th and 5th engineering particles next
        particles = parser.get_records(2)

        log.debug("Num particles: %s", len(particles))

        self.assertTrue(len(particles) == 2)

        expected_results = self.get_dict_from_yml(
            '01554008_DBG_PDBG_recov.yml')

        # skip the first 4 particles in the yml file due to mid state start
        offset = 4

        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i + offset],
                               particles[i])

        # now expect the state to be the end of the 5th data record and metadata sent
        the_new_state = {
            StateKey.POSITION: 10807,
            StateKey.METADATA_EXTRACTED: True
        }
        log.debug("********** expected state: %s", the_new_state)
        log.debug("******** new parser state: %s", parser._state)
        self.assertTrue(parser._state == the_new_state)

        stream_handle.close()
    def create_yml(self):
        """
        This utility creates a yml file
        Be sure to verify the results by eye before trusting!
        """

        fid = open(os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt'), 'r')

        stream_handle = fid
        parser = DbgPdbgCsppParser(self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED),
                                 None, stream_handle,
                                 self.state_callback, self.pub_callback,
                                 self.exception_callback)

        particles = parser.get_records(20)

        self.particle_to_yml(particles, '01554008_DBG_PDBG_recov.yml')
        fid.close()
    def create_yml(self):
        """
        This utility creates a yml file
        Be sure to verify the results by eye before trusting!
        """

        fid = open(os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt'), 'r')

        stream_handle = fid
        parser = DbgPdbgCsppParser(
            self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        particles = parser.get_records(20)

        self.particle_to_yml(particles, '01554008_DBG_PDBG_recov.yml')
        fid.close()
    def test_set_state(self):
        """
        Test changing to a new state after initializing the parser and
        reading data, as if new data has been found and the state has
        changed
        """

        file_path = os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        expected_results = self.get_dict_from_yml('01554008_DBG_PDBG_recov.yml')

        parser = DbgPdbgCsppParser(self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED),
                                 None, stream_handle,
                                 self.state_callback, self.pub_callback,
                                 self.exception_callback)

        # read all 8 particles from the file
        particles = parser.get_records(8)

        log.debug("Num particles: %s", len(particles))

        self.assertTrue(len(particles) == 8)

        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i], particles[i])

        # position 1528 is the byte at the start of the 2nd data record
        new_state = {StateKey.POSITION: 1528, StateKey.METADATA_EXTRACTED: True}

        parser.set_state(new_state)

        particles = parser.get_records(2)

        self.assertTrue(len(particles) == 2)

        # offset in the expected results
        # should not get the first 2 status particles or the metadata particle
        offset = 3
        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i + offset], particles[i])

        stream_handle.close()
    def test_mid_state_start(self):
        """
        This test makes sure that we retrieve the correct particles upon starting with an offset state.
        """

        file_path = os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        # position 5032 is the end of the 3rd data record, which would have produced the
        # metadata particle and the first 3 engineering particles
        initial_state = {StateKey.POSITION: 5032, StateKey.METADATA_EXTRACTED: True}

        parser = DbgPdbgCsppParser(self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED),
                                 initial_state, stream_handle,
                                 self.state_callback, self.pub_callback,
                                 self.exception_callback)

        #expect to get the 4th and 5th engineering particles next
        particles = parser.get_records(2)

        log.debug("Num particles: %s", len(particles))

        self.assertTrue(len(particles) == 2)

        expected_results = self.get_dict_from_yml('01554008_DBG_PDBG_recov.yml')

        # skip the first 4 particles in the yml file due to mid state start
        offset = 4

        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i + offset], particles[i])

        # now expect the state to be the end of the 5th data record and metadata sent
        the_new_state = {StateKey.POSITION: 10807, StateKey.METADATA_EXTRACTED: True}
        log.debug("********** expected state: %s", the_new_state)
        log.debug("******** new parser state: %s", parser._state)
        self.assertTrue(parser._state == the_new_state)

        stream_handle.close()
    def _build_parser(self, stream_handle):

        parser_config = {
            DataSetDriverConfigKeys.PARTICLE_CLASS: None,
            DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
                METADATA_PARTICLE_CLASS_KEY: DbgPdbgMetadataTelemeteredDataParticle,
                BATTERY_STATUS_CLASS_KEY: DbgPdbgTelemeteredBatteryParticle,
                GPS_ADJUSTMENT_CLASS_KEY: DbgPdbgTelemeteredGpsParticle
            }
        }

        parser = DbgPdbgCsppParser(parser_config, stream_handle,
                                   self._exception_callback)

        return parser
    def test_set_state(self):
        """
        Test changing to a new state after initializing the parser and
        reading data, as if new data has been found and the state has
        changed
        """

        file_path = os.path.join(RESOURCE_PATH, '01554008_DBG_PDBG.txt')
        stream_handle = open(file_path, 'r')

        expected_results = self.get_dict_from_yml(
            '01554008_DBG_PDBG_recov.yml')

        parser = DbgPdbgCsppParser(
            self.config.get(DbgPdbgDataTypeKey.DBG_PDBG_CSPP_RECOVERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        # read all 8 particles from the file
        particles = parser.get_records(8)

        log.debug("Num particles: %s", len(particles))

        self.assertTrue(len(particles) == 8)

        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i], particles[i])

        # position 1528 is the byte at the start of the 2nd data record
        new_state = {
            StateKey.POSITION: 1528,
            StateKey.METADATA_EXTRACTED: True
        }

        parser.set_state(new_state)

        particles = parser.get_records(2)

        self.assertTrue(len(particles) == 2)

        # offset in the expected results
        # should not get the first 2 status particles or the metadata particle
        offset = 3
        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i + offset],
                               particles[i])

        stream_handle.close()