Exemplo n.º 1
0
    def test_build_yml_file(self):
        """
        Read test data. Should detect that there is a decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: START BUILDING YML FILE')
        stream_handle = open('/home/cgoodrich/Workspace/code/marine-integrations/mi/dataset/driver/ctdpf_ckl/wfp_sio_mule/resource/BIG_DATA_FILE.dat', 'rb')
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(50000)
        self.particle_to_yml(result, 'BIG_DATA_FILE.yml')

        log.debug('CAG TEST: FINISHED BUILDING YML FILE')
Exemplo n.º 2
0
 def test_simple_with_wrong_header(self):
     """
     Read test data. Should detect that the input stream ?????
     Data stream should be rejected.
     """
     log.debug('CAG TEST: FILE HAS THE WRONG HEADER')
     stream_handle = StringIO(TEST_DATA_wwh)
     self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                            stream_handle,
                                            self.state_callback,
                                            self.pub_callback,
                                            self.exception_callback)
     # next get records
     result = self.parser.get_records(1)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT WRONG HEADER')
     else:
         log.debug('CAG TEST: WRONG HEADER DETECTED')
Exemplo n.º 3
0
 def test_simple_with_no_data_recs(self):
     """
     Read test data. Should detect that there is no data between the header and footer.
     Data out should be a metadata particle only
     """
     log.debug('CAG TEST: NO DATA RECORDS')
     stream_handle = StringIO(TEST_DATA_ndr)
     self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                            stream_handle,
                                            self.state_callback,
                                            self.pub_callback,
                                            self.exception_callback)
     # next get records
     result = self.parser.get_records(1)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT NO DATA RECORDS CASE')
     else:
         log.debug('CAG TEST: NO DATA RECORDS DETECTED')
Exemplo n.º 4
0
 def test_simple_with_input_too_short(self):
     """
     Read test data. Should detect that the input stream ?????
     Data stream should be rejected.
     """
     log.debug('CAG TEST: FILE IS TOO SHORT')
     stream_handle = StringIO(TEST_DATA_wts)
     self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                            stream_handle,
                                            self.state_callback,
                                            self.pub_callback,
                                            self.exception_callback)
     # next get records
     result = self.parser.get_records(1)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT FILE IS TOO SHORT CASE')
     else:
         log.debug('CAG TEST: FILE IS TOO SHORT DETECTED')
Exemplo n.º 5
0
 def test_simple_with_no_eop(self):
     """
     Read test data. Should detect that the End of Profile (eop) is missing.
     Data stream should be rejected.
     """
     log.debug('CAG TEST: MISSING END OF PROFILE')
     stream_handle = StringIO(TEST_DATA_neop)
     self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                            stream_handle,
                                            self.state_callback,
                                            self.pub_callback,
                                            self.exception_callback)
     # next get records
     result = self.parser.get_records(4)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT MISSING END OF PROFILE')
     else:
         log.debug('CAG TEST: MISSING END OF PROFILE DETECTED')
Exemplo n.º 6
0
 def test_simple_with_no_time_stamp(self):
     """
     Read test data. Should detect that the data is missing the time stamp.
     Data stream should be rejected.
     """
     log.debug('CAG TEST: NO TIME STAMP')
     stream_handle = StringIO(TEST_DATA_nts)
     self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                            stream_handle,
                                            self.state_callback,
                                            self.pub_callback,
                                            self.exception_callback)
     # next get records
     result = self.parser.get_records(4)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT NO TIME STAMP')
     else:
         log.debug('CAG TEST: NO TIME STAMP DETECTED')
Exemplo n.º 7
0
 def test_simple_with_incorrect_header(self):
     """
     Read test data. Should detect that the header is NOT for a WC SIO block
     Data stream should be rejected.
     """
     log.debug('CAG TEST: INCORRECT HEADER')
     stream_handle = StringIO(TEST_DATA_bts)
     self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                            stream_handle,
                                            self.state_callback,
                                            self.pub_callback,
                                            self.exception_callback)
     # next get records
     result = self.parser.get_records(4)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT INCORRECT HEADER')
     else:
         log.debug('CAG TEST: INCORRECT HEADER DETECTED')
Exemplo n.º 8
0
    def test_simple_with_no_decimation_factor(self):
        """
        Read test data. Should detect that there is NO decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: FILE HAS NO DECIMATION FACTOR')
        stream_handle = StringIO(TEST_DATA_ndf)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(4)

        self.assertEqual(result, [
            self.particle_meta_ndf, self.particle_a, self.particle_b,
            self.particle_c
        ])
        log.debug('CAG TEST: NO DECIMATION FACTOR TEST PASSES')
    def test_build_yml_file(self):
        """
        Read test data. Should detect that there is a decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: START BUILDING YML FILE')
        stream_handle = open('/home/cgoodrich/Workspace/code/marine-integrations/mi/dataset/driver/ctdpf_ckl/wfp_sio_mule/resource/BIG_DATA_FILE.dat', 'rb')
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(50000)
        self.particle_to_yml(result, 'BIG_DATA_FILE.yml')

        log.debug('CAG TEST: FINISHED BUILDING YML FILE')
 def test_simple_with_wrong_header(self):
     """
     Read test data. Should detect that the input stream ?????
     Data stream should be rejected.
     """
     log.debug('CAG TEST: FILE HAS THE WRONG HEADER')
     stream_handle = StringIO(TEST_DATA_wwh)
     self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                             self.state_callback, self.pub_callback, self.exception_callback)
     # next get records
     result = self.parser.get_records(1)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT WRONG HEADER')
     else:
         log.debug('CAG TEST: WRONG HEADER DETECTED')
 def test_simple_with_input_too_short(self):
     """
     Read test data. Should detect that the input stream ?????
     Data stream should be rejected.
     """
     log.debug('CAG TEST: FILE IS TOO SHORT')
     stream_handle = StringIO(TEST_DATA_wts)
     self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                             self.state_callback, self.pub_callback, self.exception_callback)
     # next get records
     result = self.parser.get_records(1)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT FILE IS TOO SHORT CASE')
     else:
         log.debug('CAG TEST: FILE IS TOO SHORT DETECTED')
 def test_simple_with_no_eop(self):
     """
     Read test data. Should detect that the End of Profile (eop) is missing.
     Data stream should be rejected.
     """
     log.debug('CAG TEST: MISSING END OF PROFILE')
     stream_handle = StringIO(TEST_DATA_neop)
     self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                             self.state_callback, self.pub_callback, self.exception_callback)
     # next get records
     result = self.parser.get_records(4)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT MISSING END OF PROFILE')
     else:
         log.debug('CAG TEST: MISSING END OF PROFILE DETECTED')
 def test_simple_with_no_data_recs(self):
     """
     Read test data. Should detect that there is no data between the header and footer.
     Data out should be a metadata particle only
     """
     log.debug('CAG TEST: NO DATA RECORDS')
     stream_handle = StringIO(TEST_DATA_ndr)
     self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                             self.state_callback, self.pub_callback, self.exception_callback)
     # next get records
     result = self.parser.get_records(1)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT NO DATA RECORDS CASE')
     else:
         log.debug('CAG TEST: NO DATA RECORDS DETECTED')
 def test_simple_with_no_time_stamp(self):
     """
     Read test data. Should detect that the data is missing the time stamp.
     Data stream should be rejected.
     """
     log.debug('CAG TEST: NO TIME STAMP')
     stream_handle = StringIO(TEST_DATA_nts)
     self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                             self.state_callback, self.pub_callback, self.exception_callback)
     # next get records
     result = self.parser.get_records(4)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT NO TIME STAMP')
     else:
         log.debug('CAG TEST: NO TIME STAMP DETECTED')
 def test_simple_with_incorrect_header(self):
     """
     Read test data. Should detect that the header is NOT for a WC SIO block
     Data stream should be rejected.
     """
     log.debug('CAG TEST: INCORRECT HEADER')
     stream_handle = StringIO(TEST_DATA_bts)
     self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                             self.state_callback, self.pub_callback, self.exception_callback)
     # next get records
     result = self.parser.get_records(4)
     if result:
         log.debug('CAG TEST: FAILED TO DETECT INCORRECT HEADER')
     else:
         log.debug('CAG TEST: INCORRECT HEADER DETECTED')
    def test_simple_with_no_decimation_factor(self):
        """
        Read test data. Should detect that there is NO decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: FILE HAS NO DECIMATION FACTOR')
        stream_handle = StringIO(TEST_DATA_ndf)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(4)

        self.assertEqual(result, [self.particle_meta_ndf,
                                  self.particle_a,
                                  self.particle_b,
                                  self.particle_c])
        log.debug('CAG TEST: NO DECIMATION FACTOR TEST PASSES')
Exemplo n.º 17
0
class CtdpfCklWfpSioMuleParserUnitTestCase(ParserUnitTestCase):
    """
    ctdpf_ckl_wfp_sio_mule Parser unit test suite
    """
    def state_callback(self, file_ingested):
        """ Call back method to watch what comes in via the position callback """
        self.file_ingested_value = file_ingested

    def pub_callback(self, pub):
        """ Call back method to watch what comes in via the publish callback """
        self.publish_callback_value = pub

    def exception_callback(self, exception):
        """ Callback method to watch what comes in via the exception callback """
        self.exception_callback_value = exception

    def setUp(self):

        ParserUnitTestCase.setUp(self)

        self.config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.ctdpf_ckl_wfp_sio_mule',
            DataSetDriverConfigKeys.PARTICLE_CLASS: ['CtdpfCklWfpSioMuleDataParticle',
                                                     'CtdpfCklWfpSioMuleMetadataParticle']
            }

        self.file_ingested_value = None
        self.state_callback_value = None
        self.publish_callback_value = None

    def calc_timestamp(self, start, increment, sample_idx):
        new_time = start + (increment * sample_idx)
        return float(ntplib.system_to_ntp_time(new_time))

    def assert_result(self, result, particle, ingested):
        self.assertEqual(result, [particle])
        self.assertEqual(self.file_ingested_value, ingested)
        self.assert_(isinstance(self.publish_callback_value, list))
        self.assertEqual(self.publish_callback_value[0], particle)

    def particle_to_yml(self, particles, filename, mode='w'):
        """
        This is added as a testing helper, not actually as part of the parser tests. Since the same particles
        will be used for the driver test it is helpful to write them to .yml in the same form they need in the
        results.yml fids here.
        """
        # open write append, if you want to start from scratch manually delete this fid
        fid = open(os.path.join(RESOURCE_PATH, filename), mode)

        fid.write('header:\n')
        fid.write("    particle_object: 'MULTIPLE'\n")
        fid.write("    particle_type: 'MULTIPLE'\n")
        fid.write('data:\n')

        for i in range(0, len(particles)):
            particle_dict = particles[i].generate_dict()

            fid.write('  - _index: %d\n' %(i+1))

            fid.write('    particle_object: %s\n' % particles[i].__class__.__name__)
            fid.write('    particle_type: %s\n' % particle_dict.get('stream_name'))
            fid.write('    internal_timestamp: %f\n' % particle_dict.get('internal_timestamp'))

            for val in particle_dict.get('values'):
                if isinstance(val.get('value'), float):
                    fid.write('    %s: %16.16f\n' % (val.get('value_id'), val.get('value')))
                else:
                    fid.write('    %s: %s\n' % (val.get('value_id'), val.get('value')))
        fid.close()

    def test_build_yml_file(self):
        """
        Read test data. Should detect that there is a decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: START BUILDING YML FILE')
        stream_handle = open('/home/cgoodrich/Workspace/code/marine-integrations/mi/dataset/driver/ctdpf_ckl/wfp_sio_mule/resource/BIG_DATA_FILE.dat', 'rb')
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(50000)
        self.particle_to_yml(result, 'BIG_DATA_FILE.yml')

        log.debug('CAG TEST: FINISHED BUILDING YML FILE')

    def test_build_esc_free(self):
        """
        Do some stuff
        """
        log.debug('Remove ESC sequences')
        FILENAME = '/home/cgoodrich/Workspace/code/marine-integrations/mi/dataset/driver/ctdpf_ckl/wfp_sio_mule/resource/BIG_DATA_FILE.dat'
        f = open (FILENAME, "rb")

        input_buffer = f.read()
        log.debug('BUFFER BEFORE %d', len(input_buffer))
        input_buffer = input_buffer.replace(b'\x18\x6b', b'\x2b')
        input_buffer = input_buffer.replace(b'\x18\x58', b'\x18')
        log.debug('BUFFER AFTER %d', len(input_buffer))

        fid = open(os.path.join(RESOURCE_PATH, 'escBIG_DATA_FILE.dat'), 'w')
        fid.write(input_buffer)
class CtdpfCklWfpSioMuleParserUnitTestCase(ParserUnitTestCase):
    """
    ctdpf_ckl_wfp_sio_mule Parser unit test suite
    """
    def state_callback(self, file_ingested):
        """ Call back method to watch what comes in via the position callback """
        self.file_ingested_value = file_ingested

    def pub_callback(self, pub):
        """ Call back method to watch what comes in via the publish callback """
        self.publish_callback_value = pub

    def exception_callback(self, exception):
        """ Callback method to watch what comes in via the exception callback """
        self.exception_callback_value = exception

    def setUp(self):

        ParserUnitTestCase.setUp(self)

        self.config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.ctdpf_ckl_wfp_sio_mule',
            DataSetDriverConfigKeys.PARTICLE_CLASS: ['CtdpfCklWfpSioMuleDataParticle',
                                                     'CtdpfCklWfpSioMuleMetadataParticle']
            }

        # Define test data particles and their associated timestamps which will be
        # compared with returned results
        timefields = struct.unpack('>II', '\x52\x4e\x75\x82\x52\x4e\x76\x9a')
        start_time = int(timefields[0])
        end_time = int(timefields[1])

        # As there are only three records in the test data, divide by 3.
        time_increment = float(end_time - start_time) / 3.0

        self.start_timestamp = self.calc_timestamp(start_time, time_increment, 0)
        self.particle_meta = CtdpfCklWfpSioMuleMetadataParticle(EXPECTED_TIME_STAMP,
                                                                internal_timestamp=self.start_timestamp)
        self.particle_meta_ndf = CtdpfCklWfpSioMuleMetadataParticle(EXPECTED_TIME_STAMP_ndf,
                                                                internal_timestamp=self.start_timestamp)
        self.particle_a = CtdpfCklWfpSioMuleDataParticle(EXPECTED_VALUES_1,
                                                         internal_timestamp=self.start_timestamp)

        self.timestamp_2 = self.calc_timestamp(start_time, time_increment, 1)
        self.particle_b = CtdpfCklWfpSioMuleDataParticle(EXPECTED_VALUES_2,
                                                         internal_timestamp=self.timestamp_2)

        timestamp_3 = self.calc_timestamp(start_time, time_increment, 2)
        self.particle_c = CtdpfCklWfpSioMuleDataParticle(EXPECTED_VALUES_3,
                                                         internal_timestamp=timestamp_3)

# uncomment to generate yml
#self.particle_to_yml(self.particle_meta)
#self.particle_to_yml(self.particle_a)
#self.particle_to_yml(self.particle_b)
#self.particle_to_yml(self.particle_c)

        self.file_ingested_value = None
        self.state_callback_value = None
        self.publish_callback_value = None

    def calc_timestamp(self, start, increment, sample_idx):
        new_time = start + (increment * sample_idx)
        return float(ntplib.system_to_ntp_time(new_time))

    def assert_result(self, result, particle, ingested):
        self.assertEqual(result, [particle])
        self.assertEqual(self.file_ingested_value, ingested)
        self.assert_(isinstance(self.publish_callback_value, list))
        self.assertEqual(self.publish_callback_value[0], particle)

    @staticmethod
    def particle_to_yml(self, particle):
        """
        This is added as a testing helper, not actually as part of the parser tests. Since the same particles
        will be used for the driver test it is helpful to write them to .yml in the same form they need in the
        results.yml files here.
        """
        particle_dict = particle.generate_dict()
        # open write append, if you want to start from scratch manually delete this file
        fid = open('particle.yml', 'a')
        fid.write('  - _index: 1\n')
        fid.write('    internal_timestamp: %f\n' % particle_dict.get('internal_timestamp'))
        fid.write('    particle_object: %s\n' % particle.__class__.__name__)
        fid.write('    particle_type: %s\n' % particle_dict.get('stream_name'))
        for val in particle_dict.get('values'):
            if isinstance(val.get('value'), float):
                fid.write('    %s: %16.20f\n' % (val.get('value_id'), val.get('value')))
            else:
                fid.write('    %s: %s\n' % (val.get('value_id'), val.get('value')))
        fid.close()

    def test_simple_with_decimation_factor(self):
        """
        Read test data. Should detect that there is a decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: FILE HAS DECIMATION FACTOR')
        stream_handle = StringIO(TEST_DATA_wdf)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(4)

        self.assertEqual(result, [self.particle_meta,
                                  self.particle_a,
                                  self.particle_b,
                                  self.particle_c])
        log.debug('CAG TEST: DECIMATION FACTOR TEST PASSES')

    def test_simple_with_no_decimation_factor(self):
        """
        Read test data. Should detect that there is NO decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: FILE HAS NO DECIMATION FACTOR')
        stream_handle = StringIO(TEST_DATA_ndf)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(4)

        self.assertEqual(result, [self.particle_meta_ndf,
                                  self.particle_a,
                                  self.particle_b,
                                  self.particle_c])
        log.debug('CAG TEST: NO DECIMATION FACTOR TEST PASSES')

    def test_simple_with_incorrect_header(self):
        """
        Read test data. Should detect that the header is NOT for a WC SIO block
        Data stream should be rejected.
        """
        log.debug('CAG TEST: INCORRECT HEADER')
        stream_handle = StringIO(TEST_DATA_bts)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT INCORRECT HEADER')
        else:
            log.debug('CAG TEST: INCORRECT HEADER DETECTED')

    def test_simple_with_bad_time_stamp(self):
        """
        Read test data. Should detect that the data has a bad time stamp (only 7 bytes).
        Data stream should be rejected.
        """
        log.debug('CAG TEST: BAD TIME STAMP')
        stream_handle = StringIO(TEST_DATA_bts)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT BAD TIME STAMP')
        else:
            log.debug('CAG TEST: BAD TIME STAMP DETECTED')

    def test_simple_with_no_time_stamp(self):
        """
        Read test data. Should detect that the data is missing the time stamp.
        Data stream should be rejected.
        """
        log.debug('CAG TEST: NO TIME STAMP')
        stream_handle = StringIO(TEST_DATA_nts)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT NO TIME STAMP')
        else:
            log.debug('CAG TEST: NO TIME STAMP DETECTED')

    def test_simple_with_bad_eop(self):
        """
        Read test data. Should detect that the End of Profile (eop) is not all "F"s.
        Data stream should be rejected.
        """
        log.debug('CAG TEST: BAD END OF PROFILE')
        stream_handle = StringIO(TEST_DATA_beop)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT BAD END OF PROFILE')
        else:
            log.debug('CAG TEST: BAD END OF PROFILE DETECTED')

    def test_simple_with_no_eop(self):
        """
        Read test data. Should detect that the End of Profile (eop) is missing.
        Data stream should be rejected.
        """
        log.debug('CAG TEST: MISSING END OF PROFILE')
        stream_handle = StringIO(TEST_DATA_neop)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT MISSING END OF PROFILE')
        else:
            log.debug('CAG TEST: MISSING END OF PROFILE DETECTED')

    def test_simple_with_no_data_recs(self):
        """
        Read test data. Should detect that there is no data between the header and footer.
        Data out should be a metadata particle only
        """
        log.debug('CAG TEST: NO DATA RECORDS')
        stream_handle = StringIO(TEST_DATA_ndr)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(1)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT NO DATA RECORDS CASE')
        else:
            log.debug('CAG TEST: NO DATA RECORDS DETECTED')

    def test_simple_with_input_too_short(self):
        """
        Read test data. Should detect that the input stream ?????
        Data stream should be rejected.
        """
        log.debug('CAG TEST: FILE IS TOO SHORT')
        stream_handle = StringIO(TEST_DATA_wts)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(1)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT FILE IS TOO SHORT CASE')
        else:
            log.debug('CAG TEST: FILE IS TOO SHORT DETECTED')

    def test_simple_with_wrong_header(self):
        """
        Read test data. Should detect that the input stream ?????
        Data stream should be rejected.
        """
        log.debug('CAG TEST: FILE HAS THE WRONG HEADER')
        stream_handle = StringIO(TEST_DATA_wwh)
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        # next get records
        result = self.parser.get_records(1)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT WRONG HEADER')
        else:
            log.debug('CAG TEST: WRONG HEADER DETECTED')
class CtdpfCklWfpSioMuleParserUnitTestCase(ParserUnitTestCase):
    """
    ctdpf_ckl_wfp_sio_mule Parser unit test suite
    """
    def state_callback(self, file_ingested):
        """ Call back method to watch what comes in via the position callback """
        self.file_ingested_value = file_ingested

    def pub_callback(self, pub):
        """ Call back method to watch what comes in via the publish callback """
        self.publish_callback_value = pub

    def exception_callback(self, exception):
        """ Callback method to watch what comes in via the exception callback """
        self.exception_callback_value = exception

    def setUp(self):

        ParserUnitTestCase.setUp(self)

        self.config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.ctdpf_ckl_wfp_sio_mule',
            DataSetDriverConfigKeys.PARTICLE_CLASS: ['CtdpfCklWfpSioMuleDataParticle',
                                                     'CtdpfCklWfpSioMuleMetadataParticle']
            }

        self.file_ingested_value = None
        self.state_callback_value = None
        self.publish_callback_value = None

    def calc_timestamp(self, start, increment, sample_idx):
        new_time = start + (increment * sample_idx)
        return float(ntplib.system_to_ntp_time(new_time))

    def assert_result(self, result, particle, ingested):
        self.assertEqual(result, [particle])
        self.assertEqual(self.file_ingested_value, ingested)
        self.assert_(isinstance(self.publish_callback_value, list))
        self.assertEqual(self.publish_callback_value[0], particle)

    def particle_to_yml(self, particles, filename, mode='w'):
        """
        This is added as a testing helper, not actually as part of the parser tests. Since the same particles
        will be used for the driver test it is helpful to write them to .yml in the same form they need in the
        results.yml fids here.
        """
        # open write append, if you want to start from scratch manually delete this fid
        fid = open(os.path.join(RESOURCE_PATH, filename), mode)

        fid.write('header:\n')
        fid.write("    particle_object: 'MULTIPLE'\n")
        fid.write("    particle_type: 'MULTIPLE'\n")
        fid.write('data:\n')

        for i in range(0, len(particles)):
            particle_dict = particles[i].generate_dict()

            fid.write('  - _index: %d\n' %(i+1))

            fid.write('    particle_object: %s\n' % particles[i].__class__.__name__)
            fid.write('    particle_type: %s\n' % particle_dict.get('stream_name'))
            fid.write('    internal_timestamp: %f\n' % particle_dict.get('internal_timestamp'))

            for val in particle_dict.get('values'):
                if isinstance(val.get('value'), float):
                    fid.write('    %s: %16.16f\n' % (val.get('value_id'), val.get('value')))
                else:
                    fid.write('    %s: %s\n' % (val.get('value_id'), val.get('value')))
        fid.close()

    def test_build_yml_file(self):
        """
        Read test data. Should detect that there is a decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: START BUILDING YML FILE')
        stream_handle = open('/home/cgoodrich/Workspace/code/marine-integrations/mi/dataset/driver/ctdpf_ckl/wfp_sio_mule/resource/BIG_DATA_FILE.dat', 'rb')
        self.parser =  CtdpfCklWfpSioMuleParser(self.config, None, stream_handle,
                                                self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(50000)
        self.particle_to_yml(result, 'BIG_DATA_FILE.yml')

        log.debug('CAG TEST: FINISHED BUILDING YML FILE')

    def test_build_esc_free(self):
        """
        Do some stuff
        """
        log.debug('Remove ESC sequences')
        FILENAME = '/home/cgoodrich/Workspace/code/marine-integrations/mi/dataset/driver/ctdpf_ckl/wfp_sio_mule/resource/BIG_DATA_FILE.dat'
        f = open (FILENAME, "rb")

        input_buffer = f.read()
        log.debug('BUFFER BEFORE %d', len(input_buffer))
        input_buffer = input_buffer.replace(b'\x18\x6b', b'\x2b')
        input_buffer = input_buffer.replace(b'\x18\x58', b'\x18')
        log.debug('BUFFER AFTER %d', len(input_buffer))

        fid = open(os.path.join(RESOURCE_PATH, 'escBIG_DATA_FILE.dat'), 'w')
        fid.write(input_buffer)
Exemplo n.º 20
0
    def _build_parser(self, parser_state, infile, data_key=None):
        """
        Build and return the parser
        """
        # Default the parser to None
        parser = None

        config = self._parser_config.get(data_key)

        #
        # If the key is CTDPF_CKL_WFP, build the ctdpf_ckl_wfp parser and
        # provide a config that includes the specific recovered particle types.
        #
        if data_key == DataTypeKey.CTDPF_CKL_WFP:
            log.debug('CAG DRIVER - build parser for %s. State is %s',
                      data_key, parser_state)
            config.update({
                DataSetDriverConfigKeys.PARTICLE_MODULE:
                'mi.dataset.parser.ctdpf_ckl_wfp_particles',
                DataSetDriverConfigKeys.PARTICLE_CLASS: None,
                DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
                    INSTRUMENT_DATA_PARTICLE_CLASS:
                    CtdpfCklWfpRecoveredDataParticle,
                    METADATA_PARTICLE_CLASS:
                    CtdpfCklWfpRecoveredMetadataParticle
                }
            })

            parser = CtdpfCklWfpParser(
                config, parser_state, infile,
                lambda state, ingested: self._save_parser_state(
                    state, data_key, ingested), self._data_callback,
                self._sample_exception_callback, os.path.getsize(infile.name))
        #
        # If the key is CTDPF_CKL_WFP_SIO_MULE, build the ctdpf_ckl_wfp_sio_mule parser and
        # provide a config that includes the specific telemetered particle types.
        #
        elif data_key == DataTypeKey.CTDPF_CKL_WFP_SIO_MULE:
            log.debug('CAG DRIVER - build parser for %s. State is %s',
                      data_key, parser_state)
            config.update({
                DataSetDriverConfigKeys.PARTICLE_MODULE:
                'mi.dataset.parser.ctdpf_ckl_wfp_sio_mule',
                DataSetDriverConfigKeys.PARTICLE_CLASS: None,
                DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
                    INSTRUMENT_DATA_PARTICLE_CLASS:
                    CtdpfCklWfpSioMuleDataParticle,
                    METADATA_PARTICLE_CLASS: CtdpfCklWfpSioMuleMetadataParticle
                }
            })

            parser = CtdpfCklWfpSioMuleParser(
                config, parser_state, infile,
                lambda state: self._save_parser_state(
                    state, DataTypeKey.CTDPF_CKL_WFP_SIO_MULE),
                self._data_callback, self._sample_exception_callback)
        else:
            raise ConfigurationException(
                'Bad Configuration: %s - Failed to build ctdpf_ckl_wfp parser',
                config)

        return parser
Exemplo n.º 21
0
class CtdpfCklWfpSioMuleParserUnitTestCase(ParserUnitTestCase):
    """
    ctdpf_ckl_wfp_sio_mule Parser unit test suite
    """
    def state_callback(self, file_ingested):
        """ Call back method to watch what comes in via the position callback """
        self.file_ingested_value = file_ingested

    def pub_callback(self, pub):
        """ Call back method to watch what comes in via the publish callback """
        self.publish_callback_value = pub

    def exception_callback(self, exception):
        """ Callback method to watch what comes in via the exception callback """
        self.exception_callback_value = exception

    def setUp(self):

        ParserUnitTestCase.setUp(self)

        self.config = {
            DataSetDriverConfigKeys.PARTICLE_MODULE:
            'mi.dataset.parser.ctdpf_ckl_wfp_sio_mule',
            DataSetDriverConfigKeys.PARTICLE_CLASS: [
                'CtdpfCklWfpSioMuleDataParticle',
                'CtdpfCklWfpSioMuleMetadataParticle'
            ]
        }

        # Define test data particles and their associated timestamps which will be
        # compared with returned results
        timefields = struct.unpack('>II', '\x52\x4e\x75\x82\x52\x4e\x76\x9a')
        start_time = int(timefields[0])
        end_time = int(timefields[1])

        # As there are only three records in the test data, divide by 3.
        time_increment = float(end_time - start_time) / 3.0

        self.start_timestamp = self.calc_timestamp(start_time, time_increment,
                                                   0)
        self.particle_meta = CtdpfCklWfpSioMuleMetadataParticle(
            EXPECTED_TIME_STAMP, internal_timestamp=self.start_timestamp)
        self.particle_meta_ndf = CtdpfCklWfpSioMuleMetadataParticle(
            EXPECTED_TIME_STAMP_ndf, internal_timestamp=self.start_timestamp)
        self.particle_a = CtdpfCklWfpSioMuleDataParticle(
            EXPECTED_VALUES_1, internal_timestamp=self.start_timestamp)

        self.timestamp_2 = self.calc_timestamp(start_time, time_increment, 1)
        self.particle_b = CtdpfCklWfpSioMuleDataParticle(
            EXPECTED_VALUES_2, internal_timestamp=self.timestamp_2)

        timestamp_3 = self.calc_timestamp(start_time, time_increment, 2)
        self.particle_c = CtdpfCklWfpSioMuleDataParticle(
            EXPECTED_VALUES_3, internal_timestamp=timestamp_3)

        # uncomment to generate yml
        #self.particle_to_yml(self.particle_meta)
        #self.particle_to_yml(self.particle_a)
        #self.particle_to_yml(self.particle_b)
        #self.particle_to_yml(self.particle_c)

        self.file_ingested_value = None
        self.state_callback_value = None
        self.publish_callback_value = None

    def calc_timestamp(self, start, increment, sample_idx):
        new_time = start + (increment * sample_idx)
        return float(ntplib.system_to_ntp_time(new_time))

    def assert_result(self, result, particle, ingested):
        self.assertEqual(result, [particle])
        self.assertEqual(self.file_ingested_value, ingested)
        self.assert_(isinstance(self.publish_callback_value, list))
        self.assertEqual(self.publish_callback_value[0], particle)

    @staticmethod
    def particle_to_yml(self, particle):
        """
        This is added as a testing helper, not actually as part of the parser tests. Since the same particles
        will be used for the driver test it is helpful to write them to .yml in the same form they need in the
        results.yml files here.
        """
        particle_dict = particle.generate_dict()
        # open write append, if you want to start from scratch manually delete this file
        fid = open('particle.yml', 'a')
        fid.write('  - _index: 1\n')
        fid.write('    internal_timestamp: %f\n' %
                  particle_dict.get('internal_timestamp'))
        fid.write('    particle_object: %s\n' % particle.__class__.__name__)
        fid.write('    particle_type: %s\n' % particle_dict.get('stream_name'))
        for val in particle_dict.get('values'):
            if isinstance(val.get('value'), float):
                fid.write('    %s: %16.20f\n' %
                          (val.get('value_id'), val.get('value')))
            else:
                fid.write('    %s: %s\n' %
                          (val.get('value_id'), val.get('value')))
        fid.close()

    def test_simple_with_decimation_factor(self):
        """
        Read test data. Should detect that there is a decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: FILE HAS DECIMATION FACTOR')
        stream_handle = StringIO(TEST_DATA_wdf)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(4)

        self.assertEqual(result, [
            self.particle_meta, self.particle_a, self.particle_b,
            self.particle_c
        ])
        log.debug('CAG TEST: DECIMATION FACTOR TEST PASSES')

    def test_simple_with_no_decimation_factor(self):
        """
        Read test data. Should detect that there is NO decimation factor in the data.
        Check that the data matches the expected results.
        """
        log.debug('CAG TEST: FILE HAS NO DECIMATION FACTOR')
        stream_handle = StringIO(TEST_DATA_ndf)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(4)

        self.assertEqual(result, [
            self.particle_meta_ndf, self.particle_a, self.particle_b,
            self.particle_c
        ])
        log.debug('CAG TEST: NO DECIMATION FACTOR TEST PASSES')

    def test_simple_with_incorrect_header(self):
        """
        Read test data. Should detect that the header is NOT for a WC SIO block
        Data stream should be rejected.
        """
        log.debug('CAG TEST: INCORRECT HEADER')
        stream_handle = StringIO(TEST_DATA_bts)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT INCORRECT HEADER')
        else:
            log.debug('CAG TEST: INCORRECT HEADER DETECTED')

    def test_simple_with_bad_time_stamp(self):
        """
        Read test data. Should detect that the data has a bad time stamp (only 7 bytes).
        Data stream should be rejected.
        """
        log.debug('CAG TEST: BAD TIME STAMP')
        stream_handle = StringIO(TEST_DATA_bts)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT BAD TIME STAMP')
        else:
            log.debug('CAG TEST: BAD TIME STAMP DETECTED')

    def test_simple_with_no_time_stamp(self):
        """
        Read test data. Should detect that the data is missing the time stamp.
        Data stream should be rejected.
        """
        log.debug('CAG TEST: NO TIME STAMP')
        stream_handle = StringIO(TEST_DATA_nts)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT NO TIME STAMP')
        else:
            log.debug('CAG TEST: NO TIME STAMP DETECTED')

    def test_simple_with_bad_eop(self):
        """
        Read test data. Should detect that the End of Profile (eop) is not all "F"s.
        Data stream should be rejected.
        """
        log.debug('CAG TEST: BAD END OF PROFILE')
        stream_handle = StringIO(TEST_DATA_beop)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT BAD END OF PROFILE')
        else:
            log.debug('CAG TEST: BAD END OF PROFILE DETECTED')

    def test_simple_with_no_eop(self):
        """
        Read test data. Should detect that the End of Profile (eop) is missing.
        Data stream should be rejected.
        """
        log.debug('CAG TEST: MISSING END OF PROFILE')
        stream_handle = StringIO(TEST_DATA_neop)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(4)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT MISSING END OF PROFILE')
        else:
            log.debug('CAG TEST: MISSING END OF PROFILE DETECTED')

    def test_simple_with_no_data_recs(self):
        """
        Read test data. Should detect that there is no data between the header and footer.
        Data out should be a metadata particle only
        """
        log.debug('CAG TEST: NO DATA RECORDS')
        stream_handle = StringIO(TEST_DATA_ndr)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(1)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT NO DATA RECORDS CASE')
        else:
            log.debug('CAG TEST: NO DATA RECORDS DETECTED')

    def test_simple_with_input_too_short(self):
        """
        Read test data. Should detect that the input stream ?????
        Data stream should be rejected.
        """
        log.debug('CAG TEST: FILE IS TOO SHORT')
        stream_handle = StringIO(TEST_DATA_wts)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(1)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT FILE IS TOO SHORT CASE')
        else:
            log.debug('CAG TEST: FILE IS TOO SHORT DETECTED')

    def test_simple_with_wrong_header(self):
        """
        Read test data. Should detect that the input stream ?????
        Data stream should be rejected.
        """
        log.debug('CAG TEST: FILE HAS THE WRONG HEADER')
        stream_handle = StringIO(TEST_DATA_wwh)
        self.parser = CtdpfCklWfpSioMuleParser(self.config, None,
                                               stream_handle,
                                               self.state_callback,
                                               self.pub_callback,
                                               self.exception_callback)
        # next get records
        result = self.parser.get_records(1)
        if result:
            log.debug('CAG TEST: FAILED TO DETECT WRONG HEADER')
        else:
            log.debug('CAG TEST: WRONG HEADER DETECTED')