def test_in_process_start(self):
        """
        Test starting the parser in a state in the middle of processing, in the middle of a block.
        """
        log.debug('Starting test_in_process_start')
        #[2818:2982] contains the first WE SIO header
        new_state = {StateKey.IN_PROCESS_DATA:[[2818,2982,3,0], [4059,4673,18,0]],
            StateKey.UNPROCESSED_DATA:[[2818,2982], [4058,5000]], StateKey.FILE_SIZE:[] }
        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat'))
        self.parser = FlordLWfpSioMuleParser(self.config, new_state, self.stream_handle,
                        self.state_callback, self.pub_callback, self.exception_callback)
        
        result = self.parser.get_records(1)
        self.assert_result(result, [[2818,2982,3,1], [4059,4673,18,0]],
                           [[2818,2982], [4058,5000]], self.particle_1a) 
        
        result = self.parser.get_records(2)
	self.assertEqual(result[0], self.particle_1b)
        self.assertEqual(result[1], self.particle_1c)
        self.assert_state([[4059,4673,18,0]], [[4058,5000]])
        self.assertEqual(self.publish_callback_value[-1], self.particle_1c)
	
	result = self.parser.get_records(1)
        self.assert_result(result, [[4059,4673,18,1]],
                           [[4058,5000]], self.particle_2a) 
	
        self.stream_handle.close()     
    def test_set_state(self):
        """
        Test changing to a new state after initializing the parser and reading data,
	as if new data has been found and the state has changed
        """
        log.debug('-------------------------------------------------Starting test_set_state')
        self.state = {StateKey.UNPROCESSED_DATA:[[4059, 4673]], StateKey.IN_PROCESS_DATA:[],
	    StateKey.FILE_SIZE:[] }
        new_state = {StateKey.UNPROCESSED_DATA:[[2818, 2982], [4058, 4059], [4673, 5000]],
            StateKey.IN_PROCESS_DATA:[[2818, 2982, 3, 0]], StateKey.FILE_SIZE:[]}

        self.stream_handle = open(os.path.join(RESOURCE_PATH,'node58p1.dat'))
        self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        
	# only 18 left in file at this point.  Drain them, and make sure the next fetch fails
	result = self.parser.get_records(17)	
        self.assert_state([[4059, 4673, 18, 17]],[[4059, 4673]])
        result = self.parser.get_records(1)
        result = self.parser.get_records(1)
        self.assertEqual(result, [])      
	
        self.parser.set_state(new_state)
        result = self.parser.get_records(1)
        self.assert_result(result,
                           [[2818, 2982, 3, 1]],
                           [[2818, 2982], [4058, 4059], [4673, 5000]],
                           self.particle_1a)
	
        self.stream_handle.close()
    def test_simple(self):
        """
        Read test data and pull out data particles one at a time.
        Assert that the results are those we expected.
        """
        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat'))
        # NOTE: using the unprocessed data state of 0,5000 limits the file to reading
        # just 5000 bytes, so even though the file is longer, it only reads the first
        # 5000
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[],
		      StateKey.FILE_SIZE:[]}
        self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle,
            self.state_callback, self.pub_callback, self.exception_callback) 
        result = self.parser.get_records(1)
   
        self.assert_result(result, [[2818,2982,3,1], [4059,4673,18,0]],
                           [[2818,2982], [4058,5000]], self.particle_1a)
        
        result = self.parser.get_records(1)
        self.assert_result(result, [[2818,2982,3,2], [4059,4673,18,0]],
                           [[2818,2982], [4058,5000]], self.particle_1b)
        
        result = self.parser.get_records(1)
        self.assert_result(result, [[4059,4673,18,0]],
                           [[4058,5000]], self.particle_1c)
                
        result = self.parser.get_records(1)
        self.assert_result(result, [[4059,4673,18,1]],
                           [[4058,5000]], self.particle_2a)
	
        self.stream_handle.close()
    def test_update(self):
        """
        Test a file that has a section of data replaced by 0s, as if a block of
	data has not yet been received. 
        """
        log.debug('------------------------------------------------------Starting test_update')
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[],
	    StateKey.FILE_SIZE:[]}
        # this file has first block of WE data replaced by 0s
        self.stream_handle = open(os.path.join(RESOURCE_PATH,
                                               'node58p1_1stWE0d.dat'))
        self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(1)
        self.assert_result(result,
                           [[4059,4673,18,1]],
                           [[2818, 2982], [4058, 5000]],
                           self.particle_2a)    
        self.stream_handle.close()

        next_state = self.parser._state
        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat'))        
        self.parser = FlordLWfpSioMuleParser(self.config, next_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
 	
	# there are 18 valid records in the second WE chunk. We read one above, now we need
	# to drain the remaining 17 to trigger the reparsing of the earlier block
	for kk in range(0, 17):
            result = self.parser.get_records(1)
	
	# the next fetch should find the now-replaced earlier data    
        result = self.parser.get_records(1)	
        self.assert_result(result,
                           [[2818, 2982, 3, 1]],
                           [[2818, 2982], [4058, 4059], [4673, 5000]],
                           self.particle_1a)

        result = self.parser.get_records(1)
        self.assert_result(result,
                           [[2818, 2982, 3, 2]],
                           [[2818, 2982], [4058, 4059], [4673, 5000]],
                           self.particle_1b)
	
        self.stream_handle.close()
    def test_bad_data(self):
        """
        Ensure that the bad record ( in this case a currupted status message ) causes a sample exception
        """
	self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1_BADFLAGS.dat'))
	self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]],
	    StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]}
       
	self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)
	self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException))
    def test_bad_e_record(self):
        """
        Ensure that the bad record causes a sample exception. The file 'bad_e_record.dat'
	includes a record containing one byte less than the expected 30 for the
	flord_l_wfp_sio_mule. The 'Number of Data Bytes' and the 'CRC Checksum' values in the
	SIO Mule header have been modified accordingly.
        """
	self.stream_handle = open(os.path.join(RESOURCE_PATH, 'bad_e_record.dat'))
	self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]],
	    StateKey.IN_PROCESS_DATA:[], StateKey.FILE_SIZE:[]}
       
	self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)
	self.assert_(isinstance(self.exception_callback_value, UnexpectedDataException))
Exemple #7
0
    def _build_parser(self, parser_state, infile):
        """
        Build and return the parser
        """
        config = self._parser_config
        config.update({
            DataSetDriverConfigKeys.PARTICLE_MODULE:
            'mi.dataset.parser.flord_l_wfp_sio_mule',
            DataSetDriverConfigKeys.PARTICLE_CLASS:
            'FlordLWfpSioMuleParserDataParticle'
        })

        log.debug("My Config: %s", config)
        self._parser = FlordLWfpSioMuleParser(config, parser_state, infile,
                                              self._save_parser_state,
                                              self._data_callback,
                                              self._sample_exception_callback)
        return self._parser
Exemple #8
0
 def _build_telemetered_parser(self, parser_state, stream_in):
     """
     Build and return the telemetered parser
     @param parser_state starting parser state to pass to parser
     @param stream_in Handle of open file to pass to parser
     """
     config = self._parser_config.get(DataSourceKey.FLORD_L_WFP_SIO_MULE)
     config.update({
         DataSetDriverConfigKeys.PARTICLE_MODULE:
         'mi.dataset.parser.flord_l_wfp_sio_mule',
         DataSetDriverConfigKeys.PARTICLE_CLASS:
         'FlordLWfpSioMuleParserDataParticle'
     })
     log.debug("My Config: %s", config)
     parser = FlordLWfpSioMuleParser(
         config, parser_state, stream_in,
         lambda state: self._save_parser_state(
             state, DataSourceKey.FLORD_L_WFP_SIO_MULE),
         self._data_callback, self._sample_exception_callback)
     return parser
    def test_mid_state_start(self):
	"""
        Test starting the parser in a state in the middle of processing, no in_process_data.
        """
	log.debug('Starting test_mid_state_start')
        new_state = {StateKey.IN_PROCESS_DATA:[],
	    StateKey.UNPROCESSED_DATA:[[2818,2982], [4058,5000]], StateKey.FILE_SIZE:[]}

        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat'))
        self.parser = FlordLWfpSioMuleParser(self.config, new_state, self.stream_handle,
                                  self.state_callback, self.pub_callback, self.exception_callback)
        result = self.parser.get_records(1)
        self.assert_result(result, [[2818,2982,3,1]],
                           [[2818,2982], [4058,5000]], self.particle_1a)
        result = self.parser.get_records(1)
        self.assert_result(result, [[2818,2982,3,2]],
                           [[2818,2982], [4058,5000]], self.particle_1b)
	result = self.parser.get_records(1)
        self.assert_result(result, [], [[4058,5000]], self.particle_1c)
	result = self.parser.get_records(1)
        self.assert_result(result, [[4059,4673,18,1]], [[4058,5000]], self.particle_2a)
        self.stream_handle.close()
    def test_long_stream(self):
        """
        Test a long stream 
        """
        log.debug("Start of test_long_stream.")
        self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node58p1.dat'))
        self.stream_handle.seek(0)
        self.state = {StateKey.UNPROCESSED_DATA:[[0, 5000]], StateKey.IN_PROCESS_DATA:[],
	    StateKey.FILE_SIZE:[]}
        self.parser = FlordLWfpSioMuleParser(self.config, self.state, self.stream_handle,
            self.state_callback, self.pub_callback, self.exception_callback)

        result = self.parser.get_records(12) 
        self.assertEqual(result[0], self.particle_1a)
        self.assertEqual(result[1], self.particle_1b)
        self.assertEqual(result[2], self.particle_1c)
        self.assertEqual(result[3], self.particle_2a)
        self.assertEqual(result[-2], self.particle_1k)
        self.assertEqual(result[-1], self.particle_1l)
        self.assertEqual(self.publish_callback_value[-2], self.particle_1k)
        self.assertEqual(self.publish_callback_value[-1], self.particle_1l)   
        self.assert_state([[4059,4673,18,9]],[[4058,5000]])
        self.stream_handle.close()