def test_simple(self):
        """
        Read test data and pull out data particles
        Assert that the results are those we expected.
        """
        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, 'r')

        # Note: since the recovered and telemetered parser and particles are common
        # to each other, testing one is sufficient, will be completely tested
        # in driver tests

        parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED),
                                  None, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        particles = parser.get_records(20)

        log.debug("*** test_simple Num particles %s", len(particles))

        # load a dictionary from the yml file
        test_data = self.get_dict_from_yml('11079364_PPB_PARS_recov.yml')

        # check all the values against expected results.

        for i in range(len(particles)):

            self.assert_result(test_data['data'][i], particles[i])

        stream_handle.close()
Exemplo n.º 2
0
    def test_get_many(self):
        """
        Read test data and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """

        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, 'r')

        # Note: since the recovered and telemetered parser and particles are common
        # to each other, testing one is sufficient, will be completely tested
        # in driver tests

        parser = ParadJCsppParser(
            self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        # try to get 2000 particles, there are only 194 data records
        # so should get 195 including the meta data
        particles = parser.get_records(2000)

        log.debug("*** test_get_many Num particles %s", len(particles))
        self.assertEqual(len(particles), 195)

        stream_handle.close()
Exemplo n.º 3
0
    def test_simple(self):
        """
        Read test data and pull out data particles
        Assert that the results are those we expected.
        """
        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, 'r')

        # Note: since the recovered and telemetered parser and particles are common
        # to each other, testing one is sufficient, will be completely tested
        # in driver tests

        parser = ParadJCsppParser(
            self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        particles = parser.get_records(20)

        log.debug("*** test_simple Num particles %s", len(particles))

        # load a dictionary from the yml file
        test_data = self.get_dict_from_yml('11079364_PPB_PARS_recov.yml')

        # check all the values against expected results.

        for i in range(len(particles)):

            self.assert_result(test_data['data'][i], particles[i])

        stream_handle.close()
    def test_get_many(self):
        """
        Read test data and pull out multiple data particles at one time.
        Assert that the results are those we expected.
        """

        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, 'r')

        # Note: since the recovered and telemetered parser and particles are common
        # to each other, testing one is sufficient, will be completely tested
        # in driver tests

        parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED),
                                  None, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        # try to get 2000 particles, there are only 194 data records
        # so should get 195 including the meta data
        particles = parser.get_records(2000)

        log.debug("*** test_get_many Num particles %s", len(particles))
        self.assertEqual(len(particles), 195)

        stream_handle.close()
Exemplo n.º 5
0
    def create_yml(self):
        """
        This utility creates a yml file
        """

        fid = open(os.path.join(RESOURCE_PATH, "11079364_PPD_PARS.txt"), O_MODE)

        stream_handle = fid
        parser = ParadJCsppParser(self._telemetered_parser_config, stream_handle, self.exception_callback)

        particles = parser.get_records(20)

        self.particle_to_yml(particles, "11079364_PPD_PARS_telem.yml")
        fid.close()
Exemplo n.º 6
0
    def create_yml(self):
        """
        This utility creates a yml file
        """

        fid = open(os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt'),
                   O_MODE)

        stream_handle = fid
        parser = ParadJCsppParser(self._telemetered_parser_config,
                                  stream_handle, self.exception_callback)

        particles = parser.get_records(20)

        self.particle_to_yml(particles, '11079364_PPD_PARS_telem.yml')
        fid.close()
    def create_yml(self):
        """
        This utility creates a yml file
        """

        fid = open(os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt'), 'r')

        stream_handle = fid
        parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_TELEMETERED),
                                  None, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        particles = parser.get_records(20)

        self.particle_to_yml(particles, '11079364_PPD_PARS_telem.yml')
        fid.close()
Exemplo n.º 8
0
    def create_yml(self):
        """
        This utility creates a yml file
        """

        fid = open(os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt'), 'r')

        stream_handle = fid
        parser = ParadJCsppParser(
            self.config.get(DataTypeKey.PARAD_J_CSPP_TELEMETERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        particles = parser.get_records(20)

        self.particle_to_yml(particles, '11079364_PPD_PARS_telem.yml')
        fid.close()
Exemplo n.º 9
0
    def test_additional_column(self):
        """
        Ensure that additional column of data will cause an exception.
        """

        file_path = os.path.join(RESOURCE_PATH, "11079364_PPB_PARS_ADDED_COLUMN.txt")
        stream_handle = open(file_path, O_MODE)

        log.info(self.exception_callback_value)

        parser = ParadJCsppParser(self._recovered_parser_config, stream_handle, self.exception_callback)

        parser.get_records(1)

        log.info("Exception callback value: %s", self.exception_callback_value)

        self.assertTrue(self.exception_callback_value is not None)

        stream_handle.close()
Exemplo n.º 10
0
    def test_mid_state_start(self):
        """
        This test makes sure that we retrieve the correct particles upon starting with an offset state.
        """

        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, 'rb')

        # position 315 is the end of the first data record, which would have produced the
        # metadata particle and the first instrument particle
        initial_state = {
            StateKey.POSITION: 315,
            StateKey.METADATA_EXTRACTED: True
        }

        parser = ParadJCsppParser(
            self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), initial_state,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        # expect to get the 2nd and 3rd instrument particles next
        particles = parser.get_records(2)

        log.debug("Num particles: %s", len(particles))

        self.assertTrue(len(particles) == 2)

        expected_results = self.get_dict_from_yml('mid_state_start.yml')

        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i], particles[i])

        # now expect the state to be the end of the 4 data record and metadata sent
        the_new_state = {
            StateKey.POSITION: 409,
            StateKey.METADATA_EXTRACTED: True
        }
        log.debug("********** expected state: %s", the_new_state)
        log.debug("******** new parser state: %s", parser._state)
        self.assertTrue(parser._state == the_new_state)

        stream_handle.close()
    def test_set_state(self):
        """
        Test changing to a new state after initializing the parser and 
        reading data, as if new data has been found and the state has
        changed
        """
        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, 'r')

        # 11079364_PPB_PARS_recov.yml has the metadata and the first 19
        # instrument particles in it
        expected_results = self.get_dict_from_yml('11079364_PPB_PARS_recov.yml')

        parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED),
                                  None, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        particles = parser.get_records(2)

        log.debug("Num particles: %s", len(particles))

        self.assertTrue(len(particles) == 2)

        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i], particles[i])

        # position 1067 is the byte at the start of the 18th data record
        new_state = {StateKey.POSITION: 1067, StateKey.METADATA_EXTRACTED: True}

        parser.set_state(new_state)

        particles = parser.get_records(2)

        self.assertTrue(len(particles) == 2)

        # offset in the expected results, into the 18th result
        offset = 18
        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i + offset], particles[i])

        stream_handle.close()
Exemplo n.º 12
0
    def test_bad_data(self):
        """
        Ensure that bad data is skipped when it exists and a RecoverableSampleException is thrown.
        """

        file_path = os.path.join(RESOURCE_PATH, "11079364_BAD_PPB_PARS.txt")
        stream_handle = open(file_path, O_MODE)

        log.info(self.exception_callback_value)

        parser = ParadJCsppParser(self._recovered_parser_config, stream_handle, self.exception_callback)

        parser.get_records(1)

        log.info("Exception callback value: %s", self.exception_callback_value)

        self.assertTrue(self.exception_callback_value is not None)
        # 14 bad records
        self.assertEqual(len(self.exception_callback_value), 14)
        stream_handle.close()
Exemplo n.º 13
0
    def test_simple(self):
        """
        Read test data and pull out data particles
        Assert that the results are those we expected.
        """

        # Recovered
        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, O_MODE)

        parser = ParadJCsppParser(self._recovered_parser_config,
                                  stream_handle,
                                  self.exception_callback)

        particles = parser.get_records(20)

        log.debug("*** test_simple Num particles %s", len(particles))

        # check all the values against expected results.

        self.assert_particles(particles, "11079364_PPB_PARS_recov.yml", RESOURCE_PATH)

        stream_handle.close()

       # Telemetered
        file_path = os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt')
        stream_handle = open(file_path, O_MODE)

        parser = ParadJCsppParser(self._telemetered_parser_config,
                                  stream_handle,
                                  self.exception_callback)

        particles = parser.get_records(20)

        log.debug("*** test_simple Num particles %s", len(particles))

        # check all the values against expected results.

        self.assert_particles(particles, "11079364_PPD_PARS_telem.yml", RESOURCE_PATH)

        stream_handle.close()
Exemplo n.º 14
0
    def _build_parser(self, parser_state, infile, data_key=None):
        """
        Build and return the parser
        """
        config = self._parser_config.get(data_key)

        #
        # If the key is RECOVERED, build the recovered parser.
        #
        if data_key == DataTypeKey.PARAD_J_CSPP_RECOVERED:
            config.update({
                DataSetDriverConfigKeys.PARTICLE_MODULE:
                'mi.dataset.parser.parad_j_cspp',
                DataSetDriverConfigKeys.PARTICLE_CLASS: None,
                DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
                    METADATA_PARTICLE_CLASS_KEY:
                    ParadJCsppMetadataRecoveredDataParticle,
                    DATA_PARTICLE_CLASS_KEY:
                    ParadJCsppInstrumentRecoveredDataParticle
                }
            })

        #
        # If the key is TELEMETERED, build the telemetered parser.
        #
        elif data_key == DataTypeKey.PARAD_J_CSPP_TELEMETERED:
            config.update({
                DataSetDriverConfigKeys.PARTICLE_MODULE:
                'mi.dataset.parser.parad_j_cspp',
                DataSetDriverConfigKeys.PARTICLE_CLASS: None,
                DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
                    METADATA_PARTICLE_CLASS_KEY:
                    ParadJCsppMetadataTelemeteredDataParticle,
                    DATA_PARTICLE_CLASS_KEY:
                    ParadJCsppInstrumentTelemeteredDataParticle
                }
            })

        #
        # If the key is one that we're not expecting, don't build any parser.
        #
        else:
            raise ConfigurationException(
                "Invalid data_key (%s) supplied to build parser" % data_key)

        parser = ParadJCsppParser(
            config, parser_state, infile,
            lambda state, ingested: self._save_parser_state(
                state, data_key, ingested), self._data_callback,
            self._sample_exception_callback)

        return parser
Exemplo n.º 15
0
    def test_additional_column(self):
        """
        Ensure that additional column of data will cause an exception.
        """

        file_path = os.path.join(RESOURCE_PATH,
                                 '11079364_PPB_PARS_ADDED_COLUMN.txt')
        stream_handle = open(file_path, O_MODE)

        log.debug(self.exception_callback_value)

        parser = ParadJCsppParser(self._recovered_parser_config, stream_handle,
                                  self.exception_callback)

        parser.get_records(1)

        log.debug("Exception callback value: %s",
                  self.exception_callback_value)

        self.assertTrue(self.exception_callback_value is not None)

        stream_handle.close()
    def test_additional_column(self):
        """
        Ensure that additional column of data will cause an exception.
        """

        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS_ADDED_COLUMN.txt')
        stream_handle = open(file_path, 'rb')

        log.info(self.exception_callback_value)

        parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED),
                                  None, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        parser.get_records(1)

        log.info("Exception callback value: %s", self.exception_callback_value)

        self.assertTrue(self.exception_callback_value is not None)

        stream_handle.close()
Exemplo n.º 17
0
    def test_bad_data(self):
        """
        Ensure that bad data is skipped when it exists and a RecoverableSampleException is thrown.
        """

        file_path = os.path.join(RESOURCE_PATH, '11079364_BAD_PPB_PARS.txt')
        stream_handle = open(file_path, O_MODE)

        log.debug(self.exception_callback_value)

        parser = ParadJCsppParser(self._recovered_parser_config, stream_handle,
                                  self.exception_callback)

        parser.get_records(1)

        log.debug("Exception callback value: %s",
                  self.exception_callback_value)

        self.assertTrue(self.exception_callback_value is not None)
        # 14 bad records
        self.assertEqual(len(self.exception_callback_value), 14)
        stream_handle.close()
Exemplo n.º 18
0
    def test_bad_data(self):
        """
        Ensure that bad data is skipped when it exists and a RecoverableSampleException is thrown.
        """

        file_path = os.path.join(RESOURCE_PATH, '11079364_BAD_PPB_PARS.txt')
        stream_handle = open(file_path, 'rb')

        log.info(self.exception_callback_value)

        parser = ParadJCsppParser(
            self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        parser.get_records(1)

        log.info("Exception callback value: %s", self.exception_callback_value)

        self.assertTrue(self.exception_callback_value is not None)
        # 14 bad records
        self.assertEqual(self.count, 14)
        stream_handle.close()
Exemplo n.º 19
0
    def test_additional_column(self):
        """
        Ensure that additional column of data will cause an exception.
        """

        file_path = os.path.join(RESOURCE_PATH,
                                 '11079364_PPB_PARS_ADDED_COLUMN.txt')
        stream_handle = open(file_path, 'rb')

        log.info(self.exception_callback_value)

        parser = ParadJCsppParser(
            self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        parser.get_records(1)

        log.info("Exception callback value: %s", self.exception_callback_value)

        self.assertTrue(self.exception_callback_value is not None)

        stream_handle.close()
    def test_bad_data(self):
        """
        Ensure that bad data is skipped when it exists and a RecoverableSampleException is thrown.
        """

        file_path = os.path.join(RESOURCE_PATH, '11079364_BAD_PPB_PARS.txt')
        stream_handle = open(file_path, 'rb')

        log.info(self.exception_callback_value)

        parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED),
                                  None, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        parser.get_records(1)

        log.info("Exception callback value: %s", self.exception_callback_value)

        self.assertTrue(self.exception_callback_value is not None)
        # 14 bad records
        self.assertEqual(self.count, 14)
        stream_handle.close()
    def test_mid_state_start(self):
        """
        This test makes sure that we retrieve the correct particles upon starting with an offset state.
        """

        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, 'rb')

        # position 315 is the end of the first data record, which would have produced the
        # metadata particle and the first instrument particle
        initial_state = {StateKey.POSITION: 315, StateKey.METADATA_EXTRACTED: True}

        parser = ParadJCsppParser(self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED),
                                  initial_state, stream_handle,
                                  self.state_callback, self.pub_callback,
                                  self.exception_callback)

        # expect to get the 2nd and 3rd instrument particles next
        particles = parser.get_records(2)

        log.debug("Num particles: %s", len(particles))

        self.assertTrue(len(particles) == 2)

        expected_results = self.get_dict_from_yml('mid_state_start.yml')

        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i], particles[i])

        # now expect the state to be the end of the 4 data record and metadata sent
        the_new_state = {StateKey.POSITION: 409, StateKey.METADATA_EXTRACTED: True}
        log.debug("********** expected state: %s", the_new_state)
        log.debug("******** new parser state: %s", parser._state)
        self.assertTrue(parser._state == the_new_state)

        stream_handle.close()
Exemplo n.º 22
0
    def _build_parser(self, stream_handle):

        parser_config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.parad_j_cspp',
            DataSetDriverConfigKeys.PARTICLE_CLASS: None,
            DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
                METADATA_PARTICLE_CLASS_KEY: ParadJCsppMetadataRecoveredDataParticle,
                DATA_PARTICLE_CLASS_KEY: ParadJCsppInstrumentRecoveredDataParticle,
            }
        }

        parser = ParadJCsppParser(parser_config,
                                  stream_handle,
                                  self._exception_callback)

        return parser
Exemplo n.º 23
0
    def test_set_state(self):
        """
        Test changing to a new state after initializing the parser and 
        reading data, as if new data has been found and the state has
        changed
        """
        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, 'r')

        # 11079364_PPB_PARS_recov.yml has the metadata and the first 19
        # instrument particles in it
        expected_results = self.get_dict_from_yml(
            '11079364_PPB_PARS_recov.yml')

        parser = ParadJCsppParser(
            self.config.get(DataTypeKey.PARAD_J_CSPP_RECOVERED), None,
            stream_handle, self.state_callback, self.pub_callback,
            self.exception_callback)

        particles = parser.get_records(2)

        log.debug("Num particles: %s", len(particles))

        self.assertTrue(len(particles) == 2)

        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i], particles[i])

        # position 1067 is the byte at the start of the 18th data record
        new_state = {
            StateKey.POSITION: 1067,
            StateKey.METADATA_EXTRACTED: True
        }

        parser.set_state(new_state)

        particles = parser.get_records(2)

        self.assertTrue(len(particles) == 2)

        # offset in the expected results, into the 18th result
        offset = 18
        for i in range(len(particles)):
            self.assert_result(expected_results['data'][i + offset],
                               particles[i])

        stream_handle.close()
Exemplo n.º 24
0
    def test_simple(self):
        """
        Read test data and pull out data particles
        Assert that the results are those we expected.
        """

        # Recovered
        file_path = os.path.join(RESOURCE_PATH, '11079364_PPB_PARS.txt')
        stream_handle = open(file_path, O_MODE)

        parser = ParadJCsppParser(self._recovered_parser_config, stream_handle,
                                  self.exception_callback)

        particles = parser.get_records(20)

        log.debug("*** test_simple Num particles %s", len(particles))

        # check all the values against expected results.

        self.assert_particles(particles, "11079364_PPB_PARS_recov.yml",
                              RESOURCE_PATH)

        stream_handle.close()

        # Telemetered
        file_path = os.path.join(RESOURCE_PATH, '11079364_PPD_PARS.txt')
        stream_handle = open(file_path, O_MODE)

        parser = ParadJCsppParser(self._telemetered_parser_config,
                                  stream_handle, self.exception_callback)

        particles = parser.get_records(20)

        log.debug("*** test_simple Num particles %s", len(particles))

        # check all the values against expected results.

        self.assert_particles(particles, "11079364_PPD_PARS_telem.yml",
                              RESOURCE_PATH)

        stream_handle.close()