예제 #1
0
    def test_stop_resume(self):
        """
        Test the ability to stop and restart the process
        """
        self.clean_file()

        # Create and store the new driver state
        self.memento = {
            DataSourceConfigKey.HARVESTER: {
                'last_filesize': 300,
                'last_checksum': 'a640fd577c65ed07ed67f1d2e73d34e2'
            },
            DataSourceConfigKey.PARSER: {
                'in_process_data': [],
                'unprocessed_data': [[0, 69], [197, 300]],
                'timestamp': 3583610106.0
            }
        }
        self.driver = MflmFLORTDDataSetDriver(
            self._driver_config()['startup_config'], self.memento,
            self.data_callback, self.state_callback, self.exception_callback)

        # create some data to parse
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")

        self.driver.start_sampling()

        # verify data is produced
        self.assert_data(FlortdParserDataParticle,
                         'test_data_2.txt.result.yml',
                         count=1,
                         timeout=10)
예제 #2
0
    def test_sequences(self):
        """
        Test new sequence flags are set correctly
        """
        self.clean_file()

        self.driver.start_sampling()

        self.clear_async_data()

        # step 2 contains 2 blocks, start with this and get both since we used them
        # separately in other tests (no new sequences)
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_1-2.txt.result.yml',
                         count=2, timeout=10)

        # This file has had a section of FL data replaced with 0s, this should start a new
        # sequence for the data following the missing AD data
        self.clear_async_data()
        self.create_sample_data('node59p1_step3.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_3.txt.result.yml',
                         count=3, timeout=10)

        # Now fill in the zeroed section from step3, this should just return the new
        # data with a new sequence flag
        self.clear_async_data()
        self.create_sample_data('node59p1_step4.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_4.txt.result.yml',
                         count=1, timeout=10)

        # start over now, using step 4, make sure sequence flags just account for
        # missing data in file (there are some sections of bad data that don't
        # match in headers
        self.driver.stop_sampling()
        self.memento = None
        self.driver = MflmFLORTDDataSetDriver(
            self._driver_config()['startup_config'],
            self.memento,
            self.data_callback,
            self.state_callback,
            self.event_callback,
            self.exception_callback)
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data('node59p1_step4.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_1-4.txt.result.yml',
                         count=6, timeout=10)
예제 #3
0
    def test_stop_resume(self):
        """
        Test the ability to stop and restart the process
        """
        self.clean_file()

        # Create and store the new driver state
        self.memento = {DataSourceConfigKey.HARVESTER: {'last_filesize': 300,
                                                        'last_checksum': 'a640fd577c65ed07ed67f1d2e73d34e2'},
                        DataSourceConfigKey.PARSER: {'in_process_data': [],
                                                     'unprocessed_data':[[0,69], [197,300]],
                                                     'timestamp': 3583610106.0}}
        self.driver = MflmFLORTDDataSetDriver(
            self._driver_config()['startup_config'],
            self.memento,
            self.data_callback,
            self.state_callback,
            self.exception_callback)

        # create some data to parse
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")

        self.driver.start_sampling()

        # verify data is produced
        self.assert_data(FlortdParserDataParticle, 'test_data_2.txt.result.yml',
                         count=1, timeout=10)
예제 #4
0
    def test_get(self):
        """
        Test that we can get data from files.  Verify that the driver
        sampling can be started and stopped
        """
        self.clean_file()

        # Start sampling and watch for an exception
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_1.txt.result.yml',
                         count=1, timeout=10)

        # there is only one file we read from, this example 'appends' data to
        # the end of the node59p1.dat file, and the data from the new append
        # is returned (not including the original data from _step1)
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_2.txt.result.yml',
                         count=1, timeout=10)

        # now 'appends' the rest of the data and just check if we get the right number
        self.clear_async_data()
        self.create_sample_data("node59p1_step4.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, count=4, timeout=10)

        self.driver.stop_sampling()
        # Reset the driver with no memento
        self.memento = None
        self.driver = MflmFLORTDDataSetDriver(
            self._driver_config()['startup_config'],
            self.memento,
            self.data_callback,
            self.state_callback,
            self.event_callback,
            self.exception_callback)
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, count=1, timeout=10)
예제 #5
0
    def test_stop_resume(self):
        """
        Test the ability to stop and restart the process
        """
        self.clean_file()
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat")
        driver_config = self._driver_config()['startup_config']
        fullfile = os.path.join(driver_config['harvester']['directory'],
                            driver_config['harvester']['pattern'])
        mod_time = os.path.getmtime(fullfile)

        # Create and store the new driver state
        self.memento = {DriverStateKey.FILE_SIZE: 300,
                        DriverStateKey.FILE_CHECKSUM: 'a640fd577c65ed07ed67f1d2e73d34e2',
                        DriverStateKey.FILE_MOD_DATE: mod_time,
                        DriverStateKey.PARSER_STATE: {'in_process_data': [],
                                                     'unprocessed_data':[[0,69], [197,300]],
                                                     'timestamp': 3583610106.0}
                        }

        self.driver = MflmFLORTDDataSetDriver(
            self._driver_config()['startup_config'],
            self.memento,
            self.data_callback,
            self.state_callback,
            self.event_callback,
            self.exception_callback)

        # create some data to parse
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")

        self.driver.start_sampling()

        # verify data is produced
        self.assert_data(FlortdParserDataParticle, 'test_data_2.txt.result.yml',
                         count=1, timeout=10)
예제 #6
0
from mi.dataset.dataset_driver import DriverParameter, DriverStateKey

from mi.dataset.driver.mflm.flort.driver import MflmFLORTDDataSetDriver, DataSourceKey
from mi.dataset.parser.flortd import FlortdParserDataParticle, \
                                     FlortdRecoveredParserDataParticle, DataParticleType
from mi.dataset.parser.sio_mule_common import StateKey

TELEM_DIR = '/tmp/dsatest1'
RECOV_DIR = '/tmp/dsatest2'

DataSetTestCase.initialize(
    driver_module='mi.dataset.driver.mflm.flort.driver',
    driver_class='MflmFLORTDDataSetDriver',
    agent_resource_id='123xyz',
    agent_name='Agent007',
    agent_packet_config=MflmFLORTDDataSetDriver.stream_config(),
    startup_config={
        DataSourceConfigKey.HARVESTER: {
            DataSourceKey.FLORT_DJ_SIO_TELEMETERED: {
                DataSetDriverConfigKeys.DIRECTORY: TELEM_DIR,
                DataSetDriverConfigKeys.PATTERN: 'node59p1.dat',
                DataSetDriverConfigKeys.FREQUENCY: 1,
                DataSetDriverConfigKeys.FILE_MOD_WAIT_TIME: 2,
            },
            DataSourceKey.FLORT_DJ_SIO_RECOVERED: {
                DataSetDriverConfigKeys.DIRECTORY: RECOV_DIR,
                DataSetDriverConfigKeys.PATTERN: 'FLO*.DAT',
                DataSetDriverConfigKeys.FREQUENCY: 1,
                DataSetDriverConfigKeys.FILE_MOD_WAIT_TIME: 2,
            },
        },
예제 #7
0
from mi.dataset.driver.mflm.flort.driver import MflmFLORTDDataSetDriver, DataSourceKey
from mi.dataset.parser.flortd import FlortdParserDataParticle, \
                                     FlortdRecoveredParserDataParticle, DataParticleType
from mi.dataset.parser.sio_mule_common import StateKey


TELEM_DIR = '/tmp/dsatest1'
RECOV_DIR = '/tmp/dsatest2'

DataSetTestCase.initialize(
    driver_module='mi.dataset.driver.mflm.flort.driver',
    driver_class='MflmFLORTDDataSetDriver',
    agent_resource_id = '123xyz',
    agent_name = 'Agent007',
    agent_packet_config = MflmFLORTDDataSetDriver.stream_config(),
    startup_config = {
        DataSourceConfigKey.HARVESTER:
        {
            DataSourceKey.FLORT_DJ_SIO_TELEMETERED: {
                DataSetDriverConfigKeys.DIRECTORY: TELEM_DIR,
                DataSetDriverConfigKeys.PATTERN: 'node59p1.dat',
                DataSetDriverConfigKeys.FREQUENCY: 1,
                DataSetDriverConfigKeys.FILE_MOD_WAIT_TIME: 2,
            },
            DataSourceKey.FLORT_DJ_SIO_RECOVERED: {
                DataSetDriverConfigKeys.DIRECTORY: RECOV_DIR,
                DataSetDriverConfigKeys.PATTERN: 'FLO*.DAT',
                DataSetDriverConfigKeys.FREQUENCY: 1,
                DataSetDriverConfigKeys.FILE_MOD_WAIT_TIME: 2,
            },
예제 #8
0
class IntegrationTest(DataSetIntegrationTestCase):
    def clean_file(self):
        # remove just the file we are using
        driver_config = self._driver_config()['startup_config']
        log.debug('startup config %s', driver_config)
        fullfile = os.path.join(driver_config['harvester']['directory'],
                                driver_config['harvester']['pattern'])
        if os.path.exists(fullfile):
            os.remove(fullfile)

    def test_get(self):
        """
        Test that we can get data from files.  Verify that the driver
        sampling can be started and stopped
        """
        self.clean_file()

        # Start sampling and watch for an exception
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle,
                         'test_data_1.txt.result.yml',
                         count=1,
                         timeout=10)

        # there is only one file we read from, this example 'appends' data to
        # the end of the node59p1.dat file, and the data from the new append
        # is returned (not including the original data from _step1)
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle,
                         'test_data_2.txt.result.yml',
                         count=1,
                         timeout=10)

        # now 'appends' the rest of the data and just check if we get the right number
        self.clear_async_data()
        self.create_sample_data("node59p1_step4.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, count=4, timeout=10)

        self.driver.stop_sampling()
        # reset the parser and harvester states
        self.driver.clear_states()
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, count=1, timeout=10)

    def test_harvester_new_file_exception(self):
        """
        Test an exception raised after the driver is started during
        the file read.  Should call the exception callback.
        """
        self.clean_file()

        # create the file so that it is unreadable
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat", mode=000)

        # Start sampling and watch for an exception
        self.driver.start_sampling()

        self.assert_exception(IOError)

        # At this point the harvester thread is dead.  The agent
        # exception handle should handle this case.

    def test_stop_resume(self):
        """
        Test the ability to stop and restart the process
        """
        self.clean_file()

        # Create and store the new driver state
        self.memento = {
            DataSourceConfigKey.HARVESTER: {
                'last_filesize': 300,
                'last_checksum': 'a640fd577c65ed07ed67f1d2e73d34e2'
            },
            DataSourceConfigKey.PARSER: {
                'in_process_data': [],
                'unprocessed_data': [[0, 69], [197, 300]],
                'timestamp': 3583610106.0
            }
        }
        self.driver = MflmFLORTDDataSetDriver(
            self._driver_config()['startup_config'], self.memento,
            self.data_callback, self.state_callback, self.exception_callback)

        # create some data to parse
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")

        self.driver.start_sampling()

        # verify data is produced
        self.assert_data(FlortdParserDataParticle,
                         'test_data_2.txt.result.yml',
                         count=1,
                         timeout=10)

    def test_sequences(self):
        """
        Test new sequence flags are set correctly
        """
        self.clean_file()

        self.driver.start_sampling()

        self.clear_async_data()

        # step 2 contains 2 blocks, start with this and get both since we used them
        # separately in other tests (no new sequences)
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle,
                         'test_data_1-2.txt.result.yml',
                         count=2,
                         timeout=10)

        # This file has had a section of FL data replaced with 0s, this should start a new
        # sequence for the data following the missing AD data
        self.clear_async_data()
        self.create_sample_data('node59p1_step3.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle,
                         'test_data_3.txt.result.yml',
                         count=3,
                         timeout=10)

        # Now fill in the zeroed section from step3, this should just return the new
        # data with a new sequence flag
        self.clear_async_data()
        self.create_sample_data('node59p1_step4.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle,
                         'test_data_4.txt.result.yml',
                         count=1,
                         timeout=10)

        # start over now, using step 4, make sure sequence flags just account for
        # missing data in file (there are some sections of bad data that don't
        # match in headers
        self.driver.stop_sampling()
        # reset the parser and harvester states
        self.driver.clear_states()
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data('node59p1_step4.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle,
                         'test_data_1-4.txt.result.yml',
                         count=6,
                         timeout=10)
예제 #9
0
class IntegrationTest(DataSetIntegrationTestCase):

    def clean_file(self):
        # remove just the file we are using
        driver_config = self._driver_config()['startup_config']
        log.debug('startup config %s', driver_config)
        fullfile = os.path.join(driver_config['harvester']['directory'],
                            driver_config['harvester']['pattern'])
        if os.path.exists(fullfile):
            os.remove(fullfile)

    def test_get(self):
        """
        Test that we can get data from files.  Verify that the driver
        sampling can be started and stopped
        """
        self.clean_file()

        # Start sampling and watch for an exception
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_1.txt.result.yml',
                         count=1, timeout=10)

        # there is only one file we read from, this example 'appends' data to
        # the end of the node59p1.dat file, and the data from the new append
        # is returned (not including the original data from _step1)
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_2.txt.result.yml',
                         count=1, timeout=10)

        # now 'appends' the rest of the data and just check if we get the right number
        self.clear_async_data()
        self.create_sample_data("node59p1_step4.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, count=4, timeout=10)

        self.driver.stop_sampling()
        # reset the parser and harvester states
        self.driver.clear_states()
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, count=1, timeout=10)

    def test_harvester_new_file_exception(self):
        """
        Test an exception raised after the driver is started during
        the file read.  Should call the exception callback.
        """
        self.clean_file()

        # create the file so that it is unreadable
        self.create_sample_data("node59p1_step1.dat", "node59p1.dat", mode=000)

        # Start sampling and watch for an exception
        self.driver.start_sampling()

        self.assert_exception(IOError)

        # At this point the harvester thread is dead.  The agent
        # exception handle should handle this case.

    def test_stop_resume(self):
        """
        Test the ability to stop and restart the process
        """
        self.clean_file()

        # Create and store the new driver state
        self.memento = {DataSourceConfigKey.HARVESTER: {'last_filesize': 300,
                                                        'last_checksum': 'a640fd577c65ed07ed67f1d2e73d34e2'},
                        DataSourceConfigKey.PARSER: {'in_process_data': [],
                                                     'unprocessed_data':[[0,69], [197,300]],
                                                     'timestamp': 3583610106.0}}
        self.driver = MflmFLORTDDataSetDriver(
            self._driver_config()['startup_config'],
            self.memento,
            self.data_callback,
            self.state_callback,
            self.exception_callback)

        # create some data to parse
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")

        self.driver.start_sampling()

        # verify data is produced
        self.assert_data(FlortdParserDataParticle, 'test_data_2.txt.result.yml',
                         count=1, timeout=10)

    def test_sequences(self):
        """
        Test new sequence flags are set correctly
        """
        self.clean_file()

        self.driver.start_sampling()

        self.clear_async_data()

        # step 2 contains 2 blocks, start with this and get both since we used them
        # separately in other tests (no new sequences)
        self.clear_async_data()
        self.create_sample_data("node59p1_step2.dat", "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_1-2.txt.result.yml',
                         count=2, timeout=10)

        # This file has had a section of FL data replaced with 0s, this should start a new
        # sequence for the data following the missing AD data
        self.clear_async_data()
        self.create_sample_data('node59p1_step3.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_3.txt.result.yml',
                         count=3, timeout=10)

        # Now fill in the zeroed section from step3, this should just return the new
        # data with a new sequence flag
        self.clear_async_data()
        self.create_sample_data('node59p1_step4.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_4.txt.result.yml',
                         count=1, timeout=10)

        # start over now, using step 4, make sure sequence flags just account for
        # missing data in file (there are some sections of bad data that don't
        # match in headers
        self.driver.stop_sampling()
        # reset the parser and harvester states
        self.driver.clear_states()
        self.driver.start_sampling()

        self.clear_async_data()
        self.create_sample_data('node59p1_step4.dat', "node59p1.dat")
        self.assert_data(FlortdParserDataParticle, 'test_data_1-4.txt.result.yml',
                         count=6, timeout=10)