def test_sequences(self): """ Test new sequence flags are set correctly """ self.clean_file() self.driver.start_sampling() self.clear_async_data() # step 2 contains 2 blocks, start with this and get both since we used them # separately in other tests (no new sequences) self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1-2.txt.result.yml', count=2, timeout=10) # This file has had a section of data replaced with 0s, this should start a new # sequence for the data following the missing data self.clear_async_data() self.create_sample_data('node59p1_step3.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_3.txt.result.yml', count=1, timeout=10) # Now fill in the zeroed section from step3, this should just return the new # data with a new sequence flag self.clear_async_data() self.create_sample_data('node59p1_step4.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_4.txt.result.yml', count=1, timeout=10) # start over now, using step 4, make sure sequence flags just account for # missing data in file (there are some sections of bad data that don't # match in headers self.driver.stop_sampling() # Reset the driver with no memento self.memento = None self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.event_callback, self.exception_callback) self.driver.start_sampling() self.clear_async_data() self.create_sample_data('node59p1_step4.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1-4.txt.result.yml', count=4, timeout=10)
def test_stop_resume(self): """ Test the ability to stop and restart the process """ self.clean_file() # Create and store the new driver state self.memento = {DataSourceConfigKey.HARVESTER: {'last_filesize': 911, 'last_checksum': '8b7cf73895eded0198b3f3621f962abc'}, DataSourceConfigKey.PARSER: {'in_process_data': [], 'unprocessed_data':[[0, 172]], 'timestamp': 3583699199.0}} self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.exception_callback) # create some data to parse self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.driver.start_sampling() # verify data is produced self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml', count=1, timeout=10)
def test_get(self): """ Test that we can get data from files. Verify that the driver sampling can be started and stopped """ self.clean_file() # Start sampling and watch for an exception self.driver.start_sampling() self.clear_async_data() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1.txt.result.yml', count=1, timeout=10) # there is only one file we read from, this example 'appends' data to # the end of the node59p1.dat file, and the data from the new append # is returned (not including the original data from _step1) self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml', count=1, timeout=10) # now 'appends' the rest of the data and just check if we get the right number self.clear_async_data() self.create_sample_data("node59p1_step4.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, count=2, timeout=10) self.driver.stop_sampling() # Reset the driver with no memento self.memento = None self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.event_callback, self.exception_callback) self.driver.start_sampling() self.clear_async_data() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, count=1, timeout=10)
def test_stop_resume(self): """ Test the ability to stop and restart the process """ self.clean_file() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") driver_config = self._driver_config()['startup_config'] fullfile = os.path.join(driver_config['harvester']['directory'], driver_config['harvester']['pattern']) mod_time = os.path.getmtime(fullfile) # Create and store the new driver state self.memento = { DriverStateKey.FILE_SIZE: 911, DriverStateKey.FILE_CHECKSUM: '8b7cf73895eded0198b3f3621f962abc', DriverStateKey.FILE_MOD_DATE: mod_time, DriverStateKey.PARSER_STATE: { 'in_process_data': [], 'unprocessed_data': [[0, 172]], 'timestamp': 3583699199.0 } } self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.event_callback, self.exception_callback) # create some data to parse self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.driver.start_sampling() # verify data is produced self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml', count=1, timeout=10)
def test_stop_resume(self): """ Test the ability to stop and restart the process """ self.clean_file() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") driver_config = self._driver_config()['startup_config'] fullfile = os.path.join(driver_config['harvester']['directory'], driver_config['harvester']['pattern']) mod_time = os.path.getmtime(fullfile) # Create and store the new driver state self.memento = {DriverStateKey.FILE_SIZE: 911, DriverStateKey.FILE_CHECKSUM: '8b7cf73895eded0198b3f3621f962abc', DriverStateKey.FILE_MOD_DATE: mod_time, DriverStateKey.PARSER_STATE: {'in_process_data': [], 'unprocessed_data':[[0, 172]], 'timestamp': 3583699199.0} } self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.event_callback, self.exception_callback) # create some data to parse self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.driver.start_sampling() # verify data is produced self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml', count=1, timeout=10)
class IntegrationTest(DataSetIntegrationTestCase): def clean_file(self): # remove just the file we are using driver_config = self._driver_config()['startup_config'] log.debug('startup config %s', driver_config) fullfile = os.path.join(driver_config['harvester']['directory'], driver_config['harvester']['pattern']) if os.path.exists(fullfile): os.remove(fullfile) def test_get(self): """ Test that we can get data from files. Verify that the driver sampling can be started and stopped """ self.clean_file() # Start sampling and watch for an exception self.driver.start_sampling() self.clear_async_data() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1.txt.result.yml', count=1, timeout=10) # there is only one file we read from, this example 'appends' data to # the end of the node59p1.dat file, and the data from the new append # is returned (not including the original data from _step1) self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml', count=1, timeout=10) # now 'appends' the rest of the data and just check if we get the right number self.clear_async_data() self.create_sample_data("node59p1_step4.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, count=2, timeout=10) self.driver.stop_sampling() # reset the parser and harvester states self.driver.clear_states() self.driver.start_sampling() self.clear_async_data() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, count=1, timeout=10) def test_harvester_new_file_exception(self): """ Test an exception raised after the driver is started during the file read. Should call the exception callback. """ self.clean_file() # create the file so that it is unreadable self.create_sample_data("node59p1_step1.dat", "node59p1.dat", mode=000) # Start sampling and watch for an exception self.driver.start_sampling() self.assert_exception(IOError) # At this point the harvester thread is dead. The agent # exception handle should handle this case. def test_stop_resume(self): """ Test the ability to stop and restart the process """ self.clean_file() # Create and store the new driver state self.memento = {DataSourceConfigKey.HARVESTER: {'last_filesize': 911, 'last_checksum': '8b7cf73895eded0198b3f3621f962abc'}, DataSourceConfigKey.PARSER: {'in_process_data': [], 'unprocessed_data':[[0, 172]], 'timestamp': 3583699199.0}} self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.exception_callback) # create some data to parse self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.driver.start_sampling() # verify data is produced self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml', count=1, timeout=10) def test_sequences(self): """ Test new sequence flags are set correctly """ self.clean_file() self.driver.start_sampling() self.clear_async_data() # step 2 contains 2 blocks, start with this and get both since we used them # separately in other tests (no new sequences) self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1-2.txt.result.yml', count=2, timeout=10) # This file has had a section of data replaced with 0s, this should start a new # sequence for the data following the missing data self.clear_async_data() self.create_sample_data('node59p1_step3.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_3.txt.result.yml', count=1, timeout=10) # Now fill in the zeroed section from step3, this should just return the new # data with a new sequence flag self.clear_async_data() self.create_sample_data('node59p1_step4.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_4.txt.result.yml', count=1, timeout=10) # start over now, using step 4, make sure sequence flags just account for # missing data in file (there are some sections of bad data that don't # match in headers self.driver.stop_sampling() # reset the parser and harvester states self.driver.clear_states() self.driver.start_sampling() self.clear_async_data() self.create_sample_data('node59p1_step4.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1-4.txt.result.yml', count=4, timeout=10)
from mi.dataset.dataset_driver import DriverParameter from mi.dataset.driver.mflm.phsen.driver import MflmPHSENDataSetDriver from mi.dataset.parser.phsen import PhsenParserDataParticle from pyon.agent.agent import ResourceAgentState from interface.objects import ResourceAgentErrorEvent from interface.objects import ResourceAgentConnectionLostErrorEvent # Fill in driver details DataSetTestCase.initialize( driver_module='mi.dataset.driver.mflm.phsen.driver', driver_class='MflmPHSENDataSetDriver', agent_resource_id = '123xyz', agent_name = 'Agent007', agent_packet_config = MflmPHSENDataSetDriver.stream_config(), startup_config = { 'harvester': { 'directory': '/tmp/dsatest', 'pattern': 'node59p1.dat', 'frequency': 1, }, 'parser': {} } ) SAMPLE_STREAM = 'phsen_parsed' ############################################################################### # INTEGRATION TESTS #
from mi.dataset.parser.phsen import DataParticleType from mi.dataset.parser.phsen_abcdef import PhsenRecoveredInstrumentDataParticle, \ PhsenRecoveredMetadataDataParticle, StateKey from mi.dataset.parser.phsen_abcdef import DataParticleType as RecoveredDataParticleType from mi.dataset.parser.sio_mule_common import StateKey as SioMuleStateKey TELEM_DIR = '/tmp/dsatest' RECOVERED_DIR = '/tmp/recoveredtest' # Fill in driver details DataSetTestCase.initialize( driver_module='mi.dataset.driver.mflm.phsen.driver', driver_class='MflmPHSENDataSetDriver', agent_resource_id='123xyz', agent_name='Agent007', agent_packet_config=MflmPHSENDataSetDriver.stream_config(), startup_config={ DataSourceConfigKey.HARVESTER: { DataSourceKey.PHSEN_ABCDEF_SIO_MULE: { DataSetDriverConfigKeys.DIRECTORY: TELEM_DIR, DataSetDriverConfigKeys.PATTERN: 'node59p1.dat', DataSetDriverConfigKeys.FREQUENCY: 1, DataSetDriverConfigKeys.FILE_MOD_WAIT_TIME: 2, }, DataSourceKey.PHSEN_ABCDEF: { DataSetDriverConfigKeys.DIRECTORY: RECOVERED_DIR, DataSetDriverConfigKeys.PATTERN: 'SAMI_*.txt', DataSetDriverConfigKeys.FREQUENCY: 1, DataSetDriverConfigKeys.FILE_MOD_WAIT_TIME: 2, } },
class IntegrationTest(DataSetIntegrationTestCase): def clean_file(self): # remove just the file we are using driver_config = self._driver_config()['startup_config'] log.debug('startup config %s', driver_config) fullfile = os.path.join(driver_config['harvester']['directory'], driver_config['harvester']['pattern']) if os.path.exists(fullfile): os.remove(fullfile) def test_get(self): """ Test that we can get data from files. Verify that the driver sampling can be started and stopped """ self.clean_file() # Start sampling and watch for an exception self.driver.start_sampling() self.clear_async_data() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1.txt.result.yml', count=1, timeout=10) # there is only one file we read from, this example 'appends' data to # the end of the node59p1.dat file, and the data from the new append # is returned (not including the original data from _step1) self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml', count=1, timeout=10) # now 'appends' the rest of the data and just check if we get the right number self.clear_async_data() self.create_sample_data("node59p1_step4.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, count=2, timeout=10) self.driver.stop_sampling() # Reset the driver with no memento self.memento = None self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.event_callback, self.exception_callback) self.driver.start_sampling() self.clear_async_data() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, count=1, timeout=10) def test_harvester_new_file_exception(self): """ Test an exception raised after the driver is started during the file read. Should call the exception callback. """ self.clean_file() # create the file so that it is unreadable self.create_sample_data("node59p1_step1.dat", "node59p1.dat", mode=000) # Start sampling and watch for an exception self.driver.start_sampling() self.assert_exception(ValueError) # At this point the harvester thread is dead. The agent # exception handle should handle this case. def test_stop_resume(self): """ Test the ability to stop and restart the process """ self.clean_file() self.create_sample_data("node59p1_step1.dat", "node59p1.dat") driver_config = self._driver_config()['startup_config'] fullfile = os.path.join(driver_config['harvester']['directory'], driver_config['harvester']['pattern']) mod_time = os.path.getmtime(fullfile) # Create and store the new driver state self.memento = { DriverStateKey.FILE_SIZE: 911, DriverStateKey.FILE_CHECKSUM: '8b7cf73895eded0198b3f3621f962abc', DriverStateKey.FILE_MOD_DATE: mod_time, DriverStateKey.PARSER_STATE: { 'in_process_data': [], 'unprocessed_data': [[0, 172]], 'timestamp': 3583699199.0 } } self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.event_callback, self.exception_callback) # create some data to parse self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.driver.start_sampling() # verify data is produced self.assert_data(PhsenParserDataParticle, 'test_data_2.txt.result.yml', count=1, timeout=10) def test_sequences(self): """ Test new sequence flags are set correctly """ self.clean_file() self.driver.start_sampling() self.clear_async_data() # step 2 contains 2 blocks, start with this and get both since we used them # separately in other tests (no new sequences) self.clear_async_data() self.create_sample_data("node59p1_step2.dat", "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1-2.txt.result.yml', count=2, timeout=10) # This file has had a section of data replaced with 0s, this should start a new # sequence for the data following the missing data self.clear_async_data() self.create_sample_data('node59p1_step3.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_3.txt.result.yml', count=1, timeout=10) # Now fill in the zeroed section from step3, this should just return the new # data with a new sequence flag self.clear_async_data() self.create_sample_data('node59p1_step4.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_4.txt.result.yml', count=1, timeout=10) # start over now, using step 4, make sure sequence flags just account for # missing data in file (there are some sections of bad data that don't # match in headers self.driver.stop_sampling() # Reset the driver with no memento self.memento = None self.driver = MflmPHSENDataSetDriver( self._driver_config()['startup_config'], self.memento, self.data_callback, self.state_callback, self.event_callback, self.exception_callback) self.driver.start_sampling() self.clear_async_data() self.create_sample_data('node59p1_step4.dat', "node59p1.dat") self.assert_data(PhsenParserDataParticle, 'test_data_1-4.txt.result.yml', count=4, timeout=10)