Esempio n. 1
0
 def test_repr_JobResult_with_DiskJobResult(self, mocked_helper):
     """
     verify that _repr_JobResult() called with DiskJobResult
     calls _repr_DiskJobResult
     """
     result = DiskJobResult({})
     helper = SessionSuspendHelper()
     helper._repr_JobResult(result)
     mocked_helper._repr_DiskJobResult.assertCalledOnceWith(result)
Esempio n. 2
0
    def checkpoint(self):
        """
        Create a checkpoint of the session.

        After calling this method you can later reopen the same session with
        :meth:`SessionManager.open_session()`.
        """
        logger.debug("SessionManager.checkpoint()")
        data = SessionSuspendHelper().suspend(self.state)
        logger.debug("Saving %d bytes of checkpoint data to %r", len(data),
                     self.storage.location)
        try:
            self.storage.save_checkpoint(data)
        except LockedStorageError:
            self.storage.break_lock()
            self.storage.save_checkpoint(data)
Esempio n. 3
0
    def setUp(self):
        # Crete a "__category__" job
        self.category_job = JobDefinition({
            "plugin": "local",
            "name": "__category__"
        })
        # Create a "generator" job
        self.generator_job = JobDefinition({
            "plugin": "local",
            "name": "generator"
        })
        # Keep a variable for the (future) generated job
        self.generated_job = None
        # Create a result for the "__category__" job.
        # It must define a verbatim copy of the "generator" job
        self.category_result = MemoryJobResult({
            "io_log": [
                (0.0, "stdout", b'plugin:local\n'),
                (0.1, "stdout", b'name:generator\n'),
            ]
        })
        # Create a result for the "generator" job.
        # It will define the "generated" job
        self.generator_result = MemoryJobResult({
            "io_log": [(0.0, 'stdout', b'name:generated')]
        })
        # Create a session that knows about the two jobs that exist
        # directly as files (__category__ and generator)
        self.session_state = SessionState([
            self.category_job, self.generator_job])
        # Select both of them for execution.
        self.session_state.update_desired_job_list([
            self.category_job, self.generator_job])
        # "execute" the "__category__" job by showing the session the result
        self.session_state.update_job_result(
            self.category_job, self.category_result)
        # Ensure that the generator job gained the "via" attribute
        # This is how we know the code above has no typos or anything.
        self.assertEqual(
            self.generator_job.via, self.category_job.get_checksum())
        # "execute" the "generator" job by showing the session the result.
        # Connect the 'on_job_added' signal to a helper function that
        # extracts the "generated" job

        def job_added(self, job):
            self.generated_job = job
        # Use partial to supply 'self' from the class into the function above
        self.session_state.on_job_added.connect(partial(job_added, self))
        # Show the result of the "generator" job to the session,
        # this will define the "generated" job, fire the signal
        # and call our callback
        self.session_state.update_job_result(
            self.generator_job, self.generator_result)
        # Ensure that we got the generated_job variable assigned
        # (by the event/signal handled above)
        self.assertIsNot(self.generated_job, None)
        # Now the stage is set for testing. Let's create the suspend helper
        # and use the data we've defined so far to create JSON-friendly
        # description of the session state.
        self.helper = SessionSuspendHelper()
        self.data = self.helper._repr_SessionState(self.session_state)
Esempio n. 4
0
class GeneratedJobSuspendTests(TestCase):
    """
    Tests that check how SessionSuspendHelper behaves when faced with
    generated jobs. This tests sets up the following job hierarchy:

        __category__
           \-> generator
                \-> generated

    The "__category__" job is a typical "catter" job that cats an existing
    job from somewhere else in the filesystem. This type of generated job
    is used often for category assignment.

    The "generator" job is a typical non-catter job that actually creates
    new jobs in some way. In this test it generates a job called "generated".
    """

    def setUp(self):
        # Crete a "__category__" job
        self.category_job = JobDefinition({
            "plugin": "local",
            "name": "__category__"
        })
        # Create a "generator" job
        self.generator_job = JobDefinition({
            "plugin": "local",
            "name": "generator"
        })
        # Keep a variable for the (future) generated job
        self.generated_job = None
        # Create a result for the "__category__" job.
        # It must define a verbatim copy of the "generator" job
        self.category_result = MemoryJobResult({
            "io_log": [
                (0.0, "stdout", b'plugin:local\n'),
                (0.1, "stdout", b'name:generator\n'),
            ]
        })
        # Create a result for the "generator" job.
        # It will define the "generated" job
        self.generator_result = MemoryJobResult({
            "io_log": [(0.0, 'stdout', b'name:generated')]
        })
        # Create a session that knows about the two jobs that exist
        # directly as files (__category__ and generator)
        self.session_state = SessionState([
            self.category_job, self.generator_job])
        # Select both of them for execution.
        self.session_state.update_desired_job_list([
            self.category_job, self.generator_job])
        # "execute" the "__category__" job by showing the session the result
        self.session_state.update_job_result(
            self.category_job, self.category_result)
        # Ensure that the generator job gained the "via" attribute
        # This is how we know the code above has no typos or anything.
        self.assertEqual(
            self.generator_job.via, self.category_job.get_checksum())
        # "execute" the "generator" job by showing the session the result.
        # Connect the 'on_job_added' signal to a helper function that
        # extracts the "generated" job

        def job_added(self, job):
            self.generated_job = job
        # Use partial to supply 'self' from the class into the function above
        self.session_state.on_job_added.connect(partial(job_added, self))
        # Show the result of the "generator" job to the session,
        # this will define the "generated" job, fire the signal
        # and call our callback
        self.session_state.update_job_result(
            self.generator_job, self.generator_result)
        # Ensure that we got the generated_job variable assigned
        # (by the event/signal handled above)
        self.assertIsNot(self.generated_job, None)
        # Now the stage is set for testing. Let's create the suspend helper
        # and use the data we've defined so far to create JSON-friendly
        # description of the session state.
        self.helper = SessionSuspendHelper()
        self.data = self.helper._repr_SessionState(self.session_state)

    def test_state_tracked_for_all_jobs(self):
        """
        verify that 'state' keeps track of all three jobs
        """
        self.assertIn(self.category_job.name, self.data['jobs'])
        self.assertIn(self.generator_job.name, self.data['jobs'])
        self.assertIn(self.generated_job.name, self.data['jobs'])

    def test_category_job_result_is_saved(self):
        """
        verify that the 'category' job result was saved
        """
        # This result is essential to re-create the association
        # with the 'generator' job. In theory we could get it from
        # the 'via' attribute but that is only true for category assignment
        # where the child job already exists and is defined on the
        # filesystem. This would not work in the case of truly generated jobs
        # so for consistency it is done the same way.
        self.assertEqual(
            self.data['results']['__category__'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': [
                    [0.0, 'stdout', 'cGx1Z2luOmxvY2FsCg=='],
                    [0.1, 'stdout', 'bmFtZTpnZW5lcmF0b3IK']
                ]
            }]
        )

    def test_generator_job_result_is_saved(self):
        """
        verify that the 'generator' job result was saved
        """
        self.assertEqual(
            self.data['results']['generator'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': [
                    [0.0, 'stdout', 'bmFtZTpnZW5lcmF0ZWQ='],
                ]
            }]
        )

    def test_generated_job_result_is_saved(self):
        """
        verify that the 'generated' job result was saved
        """
        # This is the implicit "empty" result that all jobs have
        self.assertEqual(
            self.data['results']['generated'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': []
            }]
        )

    def test_sanity_check(self):
        """
        verify that the whole suspend data looks right
        """
        # This test is pretty much a "eyeball" inspection test
        # where we can see everything at a glance and not have to
        # deduce how each part looks like from the tests above.
        #
        # All the data below is verbatim copy of the generated  suspend data
        # that was created when this test was written. The only modification
        # was wrapping of the checksums in ( ) to make them wrap correctly
        # so that the file can stay PEP-8 clean
        self.maxDiff = None
        self.assertEqual(self.data, {
            'jobs': {
                '__category__': (
                    '5267192a5eac9288d144242d800b981eeca476c17e0'
                    'dd32a09c4b3ea0a14f955'),
                'generator': (
                    '7e67e23b7e7a6a5803721a9f282c0e88c7f40bae470'
                    '950f880e419bb9c7665d8'),
                'generated': (
                    'bfee8c57b6adc9f0f281b59fe818de2ed98b6affb78'
                    '9cf4fbf282d89453190d3'),
            },
            'results': {
                '__category__': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [
                        [0.0, 'stdout', 'cGx1Z2luOmxvY2FsCg=='],
                        [0.1, 'stdout', 'bmFtZTpnZW5lcmF0b3IK']],
                    'outcome': None,
                    'return_code': None,
                }],
                'generator': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [
                        [0.0, 'stdout', 'bmFtZTpnZW5lcmF0ZWQ=']],
                    'outcome': None,
                    'return_code': None,
                }],
                'generated': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [],
                    'outcome': None,
                    'return_code': None,
                }]
            },
            'desired_job_list': ['__category__', 'generator'],
            'metadata': {
                'flags': [],
                'running_job_name': None,
                'title': None
            },
        })
Esempio n. 5
0
 def setUp(self):
     self.helper = SessionSuspendHelper()
Esempio n. 6
0
class SessionSuspendHelperTests(TestCase):
    """
    Tests for various methods of SessionSuspendHelper
    """

    def setUp(self):
        self.helper = SessionSuspendHelper()

    def test_repr_IOLogRecord(self):
        """
        verify that the representation of IOLogRecord is okay
        """
        record = IOLogRecord(0.0, "stdout", b"binary data")
        data = self.helper._repr_IOLogRecord(record)
        self.assertEqual(data, [0.0, "stdout", "YmluYXJ5IGRhdGE="])

    @mock.patch('plainbox.impl.session.suspend.SessionSuspendHelper')
    def test_repr_JobResult_with_MemoryJobResult(self, mocked_helper):
        """
        verify that _repr_JobResult() called with MemoryJobResult
        calls _repr_MemoryJobResult
        """
        result = MemoryJobResult({})
        helper = SessionSuspendHelper()
        helper._repr_JobResult(result)
        mocked_helper._repr_MemoryJobResult.assertCalledOnceWith(result)

    @mock.patch('plainbox.impl.session.suspend.SessionSuspendHelper')
    def test_repr_JobResult_with_DiskJobResult(self, mocked_helper):
        """
        verify that _repr_JobResult() called with DiskJobResult
        calls _repr_DiskJobResult
        """
        result = DiskJobResult({})
        helper = SessionSuspendHelper()
        helper._repr_JobResult(result)
        mocked_helper._repr_DiskJobResult.assertCalledOnceWith(result)

    def test_repr_JobResult_with_junk(self):
        """
        verify that _repr_JobResult() raises TypeError when
        called with something other than JobResult instances
        """
        with self.assertRaises(TypeError):
            self.helper._repr_JobResult(None)

    def test_repr_SessionMetaData_empty_metadata(self):
        """
        verify that representation of empty SessionMetaData is okay
        """
        # all defaults with empty values
        data = self.helper._repr_SessionMetaData(SessionMetaData())
        self.assertEqual(data, {
            'title': None,
            'flags': [],
            'running_job_name': None
        })

    def test_repr_SessionMetaData_typical_metadata(self):
        """
        verify that representation of typical SessionMetaData is okay
        """
        # no surprises here, just the same data copied over
        data = self.helper._repr_SessionMetaData(SessionMetaData(
            title='USB Testing session',
            flags=['incomplete'],
            running_job_name='usb/detect'
        ))
        self.assertEqual(data, {
            'title': 'USB Testing session',
            'flags': ['incomplete'],
            'running_job_name': 'usb/detect',
        })

    def test_repr_SessionState_empty_session(self):
        """
        verify that representation of empty SessionState is okay
        """
        data = self.helper._repr_SessionState(SessionState([]))
        self.assertEqual(data, {
            'jobs': {},
            'results': {},
            'desired_job_list': [],
            'metadata': {
                'title': None,
                'flags': [],
                'running_job_name': None,
            },
        })

    def test_json_repr_has_version_field(self):
        """
        verify that the json representation has the 'version' field
        """
        data = self.helper._json_repr(SessionState([]))
        self.assertIn("version", data)

    def test_json_repr_current_version(self):
        """
        verify what the version field is
        """
        data = self.helper._json_repr(SessionState([]))
        self.assertEqual(data['version'], 1)

    def test_json_repr_stores_session_state(self):
        """
        verify that the json representation has the 'session' field
        """
        data = self.helper._json_repr(SessionState([]))
        self.assertIn("session", data)

    def test_suspend(self):
        """
        verify that the suspend() method returns gzipped JSON representation
        """
        data = self.helper.suspend(SessionState([]))
        # XXX: we cannot really test what the compressed data looks like
        # because apparently python3.2 gzip output is non-deterministic.
        # It seems to be an instance of the gzip bug that was fixed a few
        # years ago.
        #
        # I've filed a bug on python3.2 in Ubuntu and Python upstream project
        # https://bugs.launchpad.net/ubuntu/+source/python3.2/+bug/871083
        #
        # In the meantime we can only test that we got bytes out
        self.assertIsInstance(data, bytes)
        # And that we can gzip uncompress them and get what we expected
        self.assertEqual(gzip.decompress(data), (
            b'{"session":{"desired_job_list":[],"jobs":{},"metadata":'
            b'{"flags":[],"running_job_name":null,"title":null},"results":{}'
            b'},"version":1}'))