Example #1
0
 def test_1388055(self):
     """
     https://bugs.launchpad.net/plainbox/+bug/1388055
     """
     # This bug is about being able to resume a session despite job database
     # modification. Let's assume the following session first:
     # - desired job list: [a]
     # - run list [a_dep, a] (computed)
     # - job_repr: {a_dep: checksum}
     job_a = make_job(id='a', depends='a_dep')
     job_a_dep = make_job(id='a_dep')
     state = SessionState([job_a, job_a_dep])
     state.update_desired_job_list([job_a])
     self.assertEqual(state.run_list, [job_a_dep, job_a])
     self.assertEqual(state.desired_job_list, [job_a])
     helper = SessionSuspendHelper4()
     session_dir = None
     # Mock away the meta-data as we're not testing that
     with mock.patch.object(helper, '_repr_SessionMetaData') as m:
         m.return_value = 'mocked'
         actual = helper._repr_SessionState(state, session_dir)
     expected = {
         'jobs': {
             job_a_dep.id: job_a_dep.checksum,
             job_a.id: job_a.checksum,
         },
         'desired_job_list': [job_a.id],
         'mandatory_job_list': [],
         'results': {},
         'metadata': 'mocked'
     }
     self.assertEqual(expected, actual)
Example #2
0
    def _build_SessionState(self, session_repr, early_cb=None):
        """
        Reconstruct the session state object.

        This method creates a fresh SessionState instance and restores
        jobs, results, meta-data and desired job list using helper methods.
        """
        # Construct a fresh session object.
        session = SessionState(self.job_list)
        logger.debug(_("Constructed new session for resume %r"), session)
        # Give early_cb a chance to see the session before we start resuming.
        # This way applications can see, among other things, generated jobs
        # as they are added to the session, by registering appropriate signal
        # handlers on the freshly-constructed session instance.
        if early_cb is not None:
            logger.debug(_("Invoking early callback %r"), early_cb)
            new_session = early_cb(session)
            if new_session is not None:
                logger.debug(_("Using different session for resume: %r"),
                             new_session)
                session = new_session
        # Restore bits and pieces of state
        logger.debug(_("Starting to restore jobs and results to %r..."),
                     session)
        self._restore_SessionState_jobs_and_results(session, session_repr)
        logger.debug(_("Starting to restore metadata..."))
        self._restore_SessionState_metadata(session, session_repr)
        logger.debug(_("Starting to restore desired job list..."))
        self._restore_SessionState_desired_job_list(session, session_repr)
        logger.debug(_("Starting to restore job list..."))
        self._restore_SessionState_job_list(session, session_repr)
        # Return whatever we've got
        logger.debug(_("Resume complete!"))
        return session
    def create_with_unit_list(cls, unit_list=None, repo=None):
        """
        Create a session manager with a fresh session.

        This method populates the session storage with all of the well known
        directories (using :meth:`WellKnownDirsHelper.populate()`)

        :param unit_list:
            If specified then this will be the initial list of units known by
            the session state object.
        :param repo:
            If specified then this particular repository will be used to create
            the storage for this session. If left out, a new repository is
            constructed with the default location.
        :ptype repo:
            :class:`~plainbox.impl.session.storage.SessionStorageRepository`.
        :return:
            fresh :class:`SessionManager` instance
        """
        logger.debug("SessionManager.create_with_unit_list()")
        if unit_list is None:
            unit_list = []
        state = SessionState(unit_list)
        if repo is None:
            repo = SessionStorageRepository()
        storage = SessionStorage.create(repo.location)
        context = SessionDeviceContext(state)
        WellKnownDirsHelper(storage).populate()
        return cls([context], storage)
Example #4
0
    def load_session(cls, unit_list, storage, early_cb=None, flags=None):
        """
        Load a previously checkpointed session.

        This method allows one to re-open a session that was previously
        created by :meth:`SessionManager.checkpoint()`

        :param unit_list:
            List of all known units. This argument is used to reconstruct the
            session from a dormant state. Since the suspended data cannot
            capture implementation details of each unit reliably, actual units
            need to be provided externally. Unlike in :meth:`create_session()`
            this list really needs to be complete, it must also include any
            generated units.
        :param storage:
            The storage that should be used for this particular session.
            The storage object holds references to existing directories
            in the file system. When restoring an existing dormant session
            it is important to use the correct storage object, the one that
            corresponds to the file system location used be the session
            before it was saved.
        :ptype storage:
            :class:`~plainbox.impl.session.storage.SessionStorage`
        :param early_cb:
            A callback that allows the caller to "see" the session object
            early, before the bulk of resume operation happens. This method can
            be used to register callbacks on the new session before this method
            call returns. The callback accepts one argument, session, which is
            being resumed. This is being passed directly to
            :meth:`plainbox.impl.session.resume.SessionResumeHelper.resume()`
        :param flags:
            An optional set of flags that may influence the resume process.
            Currently this is an internal implementation detail and no "public"
            flags are provided. Passing None here is a safe equvalent of using
            this API before it was introduced.
        :raises:
            Anything that can be raised by
            :meth:`~plainbox.impl.session.storage.SessionStorage.
            load_checkpoint()` and :meth:`~plainbox.impl.session.suspend.
            SessionResumeHelper.resume()`
        :returns:
            Fresh instance of :class:`SessionManager`
        """
        logger.debug("SessionManager.load_session()")
        try:
            data = storage.load_checkpoint()
        except IOError as exc:
            if exc.errno == errno.ENOENT:
                state = SessionState(unit_list)
            else:
                raise
        else:
            state = SessionResumeHelper(unit_list, flags,
                                        storage.location).resume(
                                            data, early_cb)
        context = SessionDeviceContext(state)
        return cls([context], storage)
Example #5
0
    def test_repr_SessionState_typical_session(self):
        """
        verify the representation of a SessionState with some unused jobs

        Unused jobs should just have no representation. Their checksum
        should not be mentioned. Their results (empty results) should be
        ignored.
        """
        used_job = JobDefinition({
            "plugin": "shell",
            "id": "used",
            "command": "echo 'hello world'",
        })
        unused_job = JobDefinition({
            "plugin": "shell",
            "id": "unused",
            "command": "echo 'hello world'",
        })
        used_result = MemoryJobResult({
            "io_log": [
                (0.0, "stdout", b'hello world\n'),
            ],
            'outcome':
            IJobResult.OUTCOME_PASS
        })
        session_state = SessionState([used_job, unused_job])
        session_state.update_desired_job_list([used_job])
        session_state.update_job_result(used_job, used_result)
        data = self.helper._repr_SessionState(session_state, self.session_dir)
        self.assertEqual(
            data, {
                'jobs': {
                    'used':
                    ('8c393c19fdfde1b6afc5b79d0a1617ecf7531cd832a16450dc'
                     '2f3f50d329d373')
                },
                'results': {
                    'used': [{
                        'comments': None,
                        'execution_duration': None,
                        'io_log': [[0.0, 'stdout', 'aGVsbG8gd29ybGQK']],
                        'outcome': 'pass',
                        'return_code': None
                    }]
                },
                'desired_job_list': ['used'],
                'mandatory_job_list': [],
                'metadata': {
                    'title': None,
                    'flags': [],
                    'running_job_name': None,
                    'app_blob': '',
                    'app_id': None,
                    'custom_joblist': False,
                    'rejected_jobs': []
                },
            })
Example #6
0
 def test_repr_SessionState_empty_session(self):
     """
     verify that representation of empty SessionState is okay
     """
     data = self.helper._repr_SessionState(SessionState([]))
     self.assertEqual(
         data, {
             'jobs': {},
             'results': {},
             'desired_job_list': [],
             'metadata': {
                 'title': None,
                 'flags': [],
                 'running_job_name': None,
             },
         })
Example #7
0
 def test_suspend(self):
     """
     verify that the suspend() method returns gzipped JSON representation
     """
     data = self.helper.suspend(SessionState([]))
     # XXX: we cannot really test what the compressed data looks like
     # because apparently python3.2 gzip output is non-deterministic.
     # It seems to be an instance of the gzip bug that was fixed a few
     # years ago.
     #
     # I've filed a bug on python3.2 in Ubuntu and Python upstream project
     # https://bugs.launchpad.net/ubuntu/+source/python3.2/+bug/871083
     #
     # In the meantime we can only test that we got bytes out
     self.assertIsInstance(data, bytes)
     # And that we can gzip uncompress them and get what we expected
     self.assertEqual(gzip.decompress(data), (
         b'{"session":{"desired_job_list":[],"jobs":{},"metadata":'
         b'{"app_blob":null,"flags":[],"running_job_name":null,"title":null'
         b'},"results":{}},"version":2}'))
Example #8
0
    def create_with_unit_list(cls, unit_list=None):
        """
        Create a session manager with a fresh session.

        This method populates the session storage with all of the well known
        directories (using :meth:`WellKnownDirsHelper.populate()`)

        :param unit_list:
            If specified then this will be the initial list of units known by
            the session state object.
        :return:
            fresh :class:`SessionManager` instance
        """
        logger.debug("SessionManager.create_with_unit_list()")
        if unit_list is None:
            unit_list = []
        state = SessionState(unit_list)
        storage = SessionStorage.create()
        context = SessionDeviceContext(state)
        return cls([context], storage)
Example #9
0
 def test_repr_SessionState_empty_session(self):
     """
     verify that representation of empty SessionState is okay
     """
     data = self.helper._repr_SessionState(SessionState([]),
                                           self.session_dir)
     self.assertEqual(
         data, {
             'jobs': {},
             'results': {},
             'desired_job_list': [],
             'mandatory_job_list': [],
             'metadata': {
                 'title': None,
                 'flags': [],
                 'running_job_name': None,
                 'app_blob': '',
                 'app_id': None,
                 'custom_joblist': False,
                 'rejected_jobs': []
             },
         })
Example #10
0
    def create_session(cls, job_list=None, repo=None, legacy_mode=False):
        """
        Create a session manager with a fresh session.

        This method populates the session storage with all of the well known
        directories (using :meth:`WellKnownDirsHelper.populate()`)

        :param job_list:
            If specified then this will be the initial list of jobs known
            by the session state object. This can be specified for convenience
            but is really optional since the application can always add more
            jobs to an existing session.
        :ptype job_list:
            list of :class:`~plainbox.abc.IJobDefinition`.
        :param repo:
            If specified then this particular repository will be used to create
            the storage for this session. If left out, a new repository is
            constructed with the default location.
        :ptype repo:
            :class:`~plainbox.impl.session.storage.SessionStorageRepository`.
        :param legacy_mode:
            Propagated to
            :meth:`~plainbox.impl.session.storage.SessionStorage.create()`
            to ensure that legacy (single session) mode is used.
        :ptype legacy_mode:
            bool
        :return:
            fresh :class:`SessionManager` instance
        """
        logger.debug("SessionManager.create_session()")
        if job_list is None:
            job_list = []
        state = SessionState(job_list)
        if repo is None:
            repo = SessionStorageRepository()
        storage = SessionStorage.create(repo.location, legacy_mode)
        WellKnownDirsHelper(storage).populate()
        return cls(state, storage)
Example #11
0
 def test_json_repr_has_version_field(self):
     """
     verify that the json representation has the 'version' field
     """
     data = self.helper._json_repr(SessionState([]))
     self.assertIn("version", data)
Example #12
0
    def setUp(self):
        # Crete a "__category__" job
        self.category_job = JobDefinition({
            "plugin": "local",
            "id": "__category__"
        })
        # Create a "generator" job
        self.generator_job = JobDefinition({
            "plugin": "local",
            "id": "generator"
        })
        # Keep a variable for the (future) generated job
        self.generated_job = None
        # Create a result for the "__category__" job.
        # It must define a verbatim copy of the "generator" job
        self.category_result = MemoryJobResult({
            "io_log": [
                (0.0, "stdout", b'plugin:local\n'),
                (0.1, "stdout", b'id:generator\n'),
            ]
        })
        # Create a result for the "generator" job.
        # It will define the "generated" job
        self.generator_result = MemoryJobResult({
            "io_log": [(0.0, 'stdout', b'id:generated')]
        })
        # Create a session that knows about the two jobs that exist
        # directly as files (__category__ and generator)
        self.session_state = SessionState([
            self.category_job, self.generator_job])
        # Select both of them for execution.
        self.session_state.update_desired_job_list([
            self.category_job, self.generator_job])
        # "execute" the "__category__" job by showing the session the result
        self.session_state.update_job_result(
            self.category_job, self.category_result)
        # Ensure that the generator job gained the "via" attribute
        # This is how we know the code above has no typos or anything.
        self.assertEqual(
            self.generator_job.via, self.category_job.checksum)
        # "execute" the "generator" job by showing the session the result.
        # Connect the 'on_job_added' signal to a helper function that
        # extracts the "generated" job

        def job_added(self, job):
            self.generated_job = job
        # Use partial to supply 'self' from the class into the function above
        self.session_state.on_job_added.connect(partial(job_added, self))
        # Show the result of the "generator" job to the session,
        # this will define the "generated" job, fire the signal
        # and call our callback
        self.session_state.update_job_result(
            self.generator_job, self.generator_result)
        # Ensure that we got the generated_job variable assigned
        # (by the event/signal handled above)
        self.assertIsNot(self.generated_job, None)
        # Now the stage is set for testing. Let's create the suspend helper
        # and use the data we've defined so far to create JSON-friendly
        # description of the session state.
        self.helper = SessionSuspendHelper1()
        self.data = self.helper._repr_SessionState(self.session_state)
Example #13
0
class GeneratedJobSuspendTests(TestCase):
    """
    Tests that check how SessionSuspendHelper behaves when faced with
    generated jobs. This tests sets up the following job hierarchy:

        __category__
           \-> generator
                \-> generated

    The "__category__" job is a typical "catter" job that cats an existing
    job from somewhere else in the filesystem. This type of generated job
    is used often for category assignment.

    The "generator" job is a typical non-catter job that actually creates
    new jobs in some way. In this test it generates a job called "generated".
    """

    def setUp(self):
        # Crete a "__category__" job
        self.category_job = JobDefinition({
            "plugin": "local",
            "id": "__category__"
        })
        # Create a "generator" job
        self.generator_job = JobDefinition({
            "plugin": "local",
            "id": "generator"
        })
        # Keep a variable for the (future) generated job
        self.generated_job = None
        # Create a result for the "__category__" job.
        # It must define a verbatim copy of the "generator" job
        self.category_result = MemoryJobResult({
            "io_log": [
                (0.0, "stdout", b'plugin:local\n'),
                (0.1, "stdout", b'id:generator\n'),
            ]
        })
        # Create a result for the "generator" job.
        # It will define the "generated" job
        self.generator_result = MemoryJobResult({
            "io_log": [(0.0, 'stdout', b'id:generated')]
        })
        # Create a session that knows about the two jobs that exist
        # directly as files (__category__ and generator)
        self.session_state = SessionState([
            self.category_job, self.generator_job])
        # Select both of them for execution.
        self.session_state.update_desired_job_list([
            self.category_job, self.generator_job])
        # "execute" the "__category__" job by showing the session the result
        self.session_state.update_job_result(
            self.category_job, self.category_result)
        # Ensure that the generator job gained the "via" attribute
        # This is how we know the code above has no typos or anything.
        self.assertEqual(
            self.generator_job.via, self.category_job.checksum)
        # "execute" the "generator" job by showing the session the result.
        # Connect the 'on_job_added' signal to a helper function that
        # extracts the "generated" job

        def job_added(self, job):
            self.generated_job = job
        # Use partial to supply 'self' from the class into the function above
        self.session_state.on_job_added.connect(partial(job_added, self))
        # Show the result of the "generator" job to the session,
        # this will define the "generated" job, fire the signal
        # and call our callback
        self.session_state.update_job_result(
            self.generator_job, self.generator_result)
        # Ensure that we got the generated_job variable assigned
        # (by the event/signal handled above)
        self.assertIsNot(self.generated_job, None)
        # Now the stage is set for testing. Let's create the suspend helper
        # and use the data we've defined so far to create JSON-friendly
        # description of the session state.
        self.helper = SessionSuspendHelper1()
        self.data = self.helper._repr_SessionState(self.session_state)

    def test_state_tracked_for_all_jobs(self):
        """
        verify that 'state' keeps track of all three jobs
        """
        self.assertIn(self.category_job.id, self.data['jobs'])
        self.assertIn(self.generator_job.id, self.data['jobs'])
        self.assertIn(self.generated_job.id, self.data['jobs'])

    def test_category_job_result_is_saved(self):
        """
        verify that the 'category' job result was saved
        """
        # This result is essential to re-create the association
        # with the 'generator' job. In theory we could get it from
        # the 'via' attribute but that is only true for category assignment
        # where the child job already exists and is defined on the
        # filesystem. This would not work in the case of truly generated jobs
        # so for consistency it is done the same way.
        self.assertEqual(
            self.data['results']['__category__'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': [
                    [0.0, 'stdout', 'cGx1Z2luOmxvY2FsCg=='],
                    [0.1, 'stdout', 'aWQ6Z2VuZXJhdG9yCg==']
                ]
            }]
        )

    def test_generator_job_result_is_saved(self):
        """
        verify that the 'generator' job result was saved
        """
        self.assertEqual(
            self.data['results']['generator'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': [
                    [0.0, 'stdout', 'aWQ6Z2VuZXJhdGVk'],
                ]
            }]
        )

    def test_generated_job_result_is_saved(self):
        """
        verify that the 'generated' job result was saved
        """
        # This is the implicit "empty" result that all jobs have
        self.assertEqual(
            self.data['results']['generated'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': []
            }]
        )

    def test_sanity_check(self):
        """
        verify that the whole suspend data looks right
        """
        # This test is pretty much a "eyeball" inspection test
        # where we can see everything at a glance and not have to
        # deduce how each part looks like from the tests above.
        #
        # All the data below is verbatim copy of the generated  suspend data
        # that was created when this test was written. The only modification
        # was wrapping of the checksums in ( ) to make them wrap correctly
        # so that the file can stay PEP-8 clean
        self.maxDiff = None
        self.assertEqual(self.data, {
            'jobs': {
                '__category__': (
                    'e2475434e4c0b2c825541430e526fe0565780dfeb67'
                    '050f3b7f3453aa3cc439b'),
                'generator': (
                    'b2aa7b7c4298678cebfdbe30f4aae5be97d320910a5'
                    'b4dd312606099f35c03b6'),
                'generated': (
                    '57b395e91bb4af94143eb19586bd18e4013efc5e60d'
                    '6050d9ec0bea15dd19489'),
            },
            'results': {
                '__category__': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [
                        [0.0, 'stdout', 'cGx1Z2luOmxvY2FsCg=='],
                        [0.1, 'stdout', 'aWQ6Z2VuZXJhdG9yCg==']],
                    'outcome': None,
                    'return_code': None,
                }],
                'generator': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [
                        [0.0, 'stdout', 'aWQ6Z2VuZXJhdGVk']],
                    'outcome': None,
                    'return_code': None,
                }],
                'generated': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [],
                    'outcome': None,
                    'return_code': None,
                }]
            },
            'desired_job_list': ['__category__', 'generator'],
            'metadata': {
                'flags': [],
                'running_job_name': None,
                'title': None
            },
        })
Example #14
0
class GeneratedJobSuspendTests(TestCase):
    """
    Tests that check how SessionSuspendHelper behaves when faced with
    generated jobs. This tests sets up the following job hierarchy:

        __category__
           \-> generator
                \-> generated

    The "__category__" job is a typical "catter" job that cats an existing
    job from somewhere else in the filesystem. This type of generated job
    is used often for category assignment.

    The "generator" job is a typical non-catter job that actually creates
    new jobs in some way. In this test it generates a job called "generated".
    """
    def setUp(self):
        # Crete a "__category__" job
        self.category_job = JobDefinition({
            "plugin": "local",
            "id": "__category__"
        })
        # Create a "generator" job
        self.generator_job = JobDefinition({
            "plugin": "local",
            "id": "generator"
        })
        # Keep a variable for the (future) generated job
        self.generated_job = None
        # Create a result for the "__category__" job.
        # It must define a verbatim copy of the "generator" job
        self.category_result = MemoryJobResult({
            "io_log": [
                (0.0, "stdout", b'plugin:local\n'),
                (0.1, "stdout", b'id:generator\n'),
            ]
        })
        # Create a result for the "generator" job.
        # It will define the "generated" job
        self.generator_result = MemoryJobResult(
            {"io_log": [(0.0, 'stdout', b'id:generated')]})
        # Create a session that knows about the two jobs that exist
        # directly as files (__category__ and generator)
        self.session_state = SessionState(
            [self.category_job, self.generator_job])
        # Select both of them for execution.
        self.session_state.update_desired_job_list(
            [self.category_job, self.generator_job])
        # "execute" the "__category__" job by showing the session the result
        self.session_state.update_job_result(self.category_job,
                                             self.category_result)
        # Ensure that the generator job gained the "via" attribute
        # This is how we know the code above has no typos or anything.
        self.assertEqual(self.generator_job.via, self.category_job.checksum)

        # "execute" the "generator" job by showing the session the result.
        # Connect the 'on_job_added' signal to a helper function that
        # extracts the "generated" job

        def job_added(self, job):
            self.generated_job = job

        # Use partial to supply 'self' from the class into the function above
        self.session_state.on_job_added.connect(partial(job_added, self))
        # Show the result of the "generator" job to the session,
        # this will define the "generated" job, fire the signal
        # and call our callback
        self.session_state.update_job_result(self.generator_job,
                                             self.generator_result)
        # Ensure that we got the generated_job variable assigned
        # (by the event/signal handled above)
        self.assertIsNot(self.generated_job, None)
        # Now the stage is set for testing. Let's create the suspend helper
        # and use the data we've defined so far to create JSON-friendly
        # description of the session state.
        self.helper = SessionSuspendHelper1()
        self.data = self.helper._repr_SessionState(self.session_state)

    def test_state_tracked_for_all_jobs(self):
        """
        verify that 'state' keeps track of all three jobs
        """
        self.assertIn(self.category_job.id, self.data['jobs'])
        self.assertIn(self.generator_job.id, self.data['jobs'])
        self.assertIn(self.generated_job.id, self.data['jobs'])

    def test_category_job_result_is_saved(self):
        """
        verify that the 'category' job result was saved
        """
        # This result is essential to re-create the association
        # with the 'generator' job. In theory we could get it from
        # the 'via' attribute but that is only true for category assignment
        # where the child job already exists and is defined on the
        # filesystem. This would not work in the case of truly generated jobs
        # so for consistency it is done the same way.
        self.assertEqual(self.data['results']['__category__'], [{
            'comments':
            None,
            'execution_duration':
            None,
            'outcome':
            None,
            'return_code':
            None,
            'io_log': [[0.0, 'stdout', 'cGx1Z2luOmxvY2FsCg=='],
                       [0.1, 'stdout', 'aWQ6Z2VuZXJhdG9yCg==']]
        }])

    def test_generator_job_result_is_saved(self):
        """
        verify that the 'generator' job result was saved
        """
        self.assertEqual(self.data['results']['generator'],
                         [{
                             'comments': None,
                             'execution_duration': None,
                             'outcome': None,
                             'return_code': None,
                             'io_log': [
                                 [0.0, 'stdout', 'aWQ6Z2VuZXJhdGVk'],
                             ]
                         }])

    def test_generated_job_result_is_saved(self):
        """
        verify that the 'generated' job result was saved
        """
        # This is the implicit "empty" result that all jobs have
        self.assertEqual(self.data['results']['generated'],
                         [{
                             'comments': None,
                             'execution_duration': None,
                             'outcome': None,
                             'return_code': None,
                             'io_log': []
                         }])

    def test_sanity_check(self):
        """
        verify that the whole suspend data looks right
        """
        # This test is pretty much a "eyeball" inspection test
        # where we can see everything at a glance and not have to
        # deduce how each part looks like from the tests above.
        #
        # All the data below is verbatim copy of the generated  suspend data
        # that was created when this test was written. The only modification
        # was wrapping of the checksums in ( ) to make them wrap correctly
        # so that the file can stay PEP-8 clean
        self.maxDiff = None
        self.assertEqual(
            self.data, {
                'jobs': {
                    '__category__':
                    ('e2475434e4c0b2c825541430e526fe0565780dfeb67'
                     '050f3b7f3453aa3cc439b'),
                    'generator': ('b2aa7b7c4298678cebfdbe30f4aae5be97d320910a5'
                                  'b4dd312606099f35c03b6'),
                    'generated': ('57b395e91bb4af94143eb19586bd18e4013efc5e60d'
                                  '6050d9ec0bea15dd19489'),
                },
                'results': {
                    '__category__': [{
                        'comments':
                        None,
                        'execution_duration':
                        None,
                        'io_log': [[0.0, 'stdout', 'cGx1Z2luOmxvY2FsCg=='],
                                   [0.1, 'stdout', 'aWQ6Z2VuZXJhdG9yCg==']],
                        'outcome':
                        None,
                        'return_code':
                        None,
                    }],
                    'generator': [
                        {
                            'comments': None,
                            'execution_duration': None,
                            'io_log': [[0.0, 'stdout', 'aWQ6Z2VuZXJhdGVk']],
                            'outcome': None,
                            'return_code': None,
                        }
                    ],
                    'generated': [{
                        'comments': None,
                        'execution_duration': None,
                        'io_log': [],
                        'outcome': None,
                        'return_code': None,
                    }]
                },
                'desired_job_list': ['__category__', 'generator'],
                'metadata': {
                    'flags': [],
                    'running_job_name': None,
                    'title': None
                },
            })
Example #15
0
 def test_json_repr_current_version(self):
     """
     verify what the version field is
     """
     data = self.helper._json_repr(SessionState([]), self.session_dir)
     self.assertEqual(data['version'], 6)
Example #16
0
 def test_json_repr_stores_session_state(self):
     """
     verify that the json representation has the 'session' field
     """
     data = self.helper._json_repr(SessionState([]), self.session_dir)
     self.assertIn("session", data)
Example #17
0
    def setUp(self):
        # Crete a "__category__" job
        self.category_job = JobDefinition({
            "plugin": "local",
            "id": "__category__"
        })
        # Create a "generator" job
        self.generator_job = JobDefinition({
            "plugin": "local",
            "id": "generator"
        })
        # Keep a variable for the (future) generated job
        self.generated_job = None
        # Create a result for the "__category__" job.
        # It must define a verbatim copy of the "generator" job
        self.category_result = MemoryJobResult({
            "io_log": [
                (0.0, "stdout", b'plugin:local\n'),
                (0.1, "stdout", b'id:generator\n'),
            ]
        })
        # Create a result for the "generator" job.
        # It will define the "generated" job
        self.generator_result = MemoryJobResult(
            {"io_log": [(0.0, 'stdout', b'id:generated')]})
        # Create a session that knows about the two jobs that exist
        # directly as files (__category__ and generator)
        self.session_state = SessionState(
            [self.category_job, self.generator_job])
        # Select both of them for execution.
        self.session_state.update_desired_job_list(
            [self.category_job, self.generator_job])
        # "execute" the "__category__" job by showing the session the result
        self.session_state.update_job_result(self.category_job,
                                             self.category_result)
        # Ensure that the generator job gained the "via" attribute
        # This is how we know the code above has no typos or anything.
        self.assertEqual(self.generator_job.via, self.category_job.checksum)

        # "execute" the "generator" job by showing the session the result.
        # Connect the 'on_job_added' signal to a helper function that
        # extracts the "generated" job

        def job_added(self, job):
            self.generated_job = job

        # Use partial to supply 'self' from the class into the function above
        self.session_state.on_job_added.connect(partial(job_added, self))
        # Show the result of the "generator" job to the session,
        # this will define the "generated" job, fire the signal
        # and call our callback
        self.session_state.update_job_result(self.generator_job,
                                             self.generator_result)
        # Ensure that we got the generated_job variable assigned
        # (by the event/signal handled above)
        self.assertIsNot(self.generated_job, None)
        # Now the stage is set for testing. Let's create the suspend helper
        # and use the data we've defined so far to create JSON-friendly
        # description of the session state.
        self.helper = SessionSuspendHelper1()
        self.data = self.helper._repr_SessionState(self.session_state)
Example #18
0
class GeneratedJobSuspendTests(TestCase):
    """
    Tests that check how SessionSuspendHelper behaves when faced with
    generated jobs. This tests sets up the following job hierarchy:

        __category__
           \-> generator
                \-> generated

    The "__category__" job is a typical "catter" job that cats an existing
    job from somewhere else in the filesystem. This type of generated job
    is used often for category assignment.

    The "generator" job is a typical non-catter job that actually creates
    new jobs in some way. In this test it generates a job called "generated".
    """

    def setUp(self):
        # Crete a "__category__" job
        self.category_job = JobDefinition({
            "plugin": "local",
            "name": "__category__"
        })
        # Create a "generator" job
        self.generator_job = JobDefinition({
            "plugin": "local",
            "name": "generator"
        })
        # Keep a variable for the (future) generated job
        self.generated_job = None
        # Create a result for the "__category__" job.
        # It must define a verbatim copy of the "generator" job
        self.category_result = MemoryJobResult({
            "io_log": [
                (0.0, "stdout", b'plugin:local\n'),
                (0.1, "stdout", b'name:generator\n'),
            ]
        })
        # Create a result for the "generator" job.
        # It will define the "generated" job
        self.generator_result = MemoryJobResult({
            "io_log": [(0.0, 'stdout', b'name:generated')]
        })
        # Create a session that knows about the two jobs that exist
        # directly as files (__category__ and generator)
        self.session_state = SessionState([
            self.category_job, self.generator_job])
        # Select both of them for execution.
        self.session_state.update_desired_job_list([
            self.category_job, self.generator_job])
        # "execute" the "__category__" job by showing the session the result
        self.session_state.update_job_result(
            self.category_job, self.category_result)
        # Ensure that the generator job gained the "via" attribute
        # This is how we know the code above has no typos or anything.
        self.assertEqual(
            self.generator_job.via, self.category_job.get_checksum())
        # "execute" the "generator" job by showing the session the result.
        # Connect the 'on_job_added' signal to a helper function that
        # extracts the "generated" job

        def job_added(self, job):
            self.generated_job = job
        # Use partial to supply 'self' from the class into the function above
        self.session_state.on_job_added.connect(partial(job_added, self))
        # Show the result of the "generator" job to the session,
        # this will define the "generated" job, fire the signal
        # and call our callback
        self.session_state.update_job_result(
            self.generator_job, self.generator_result)
        # Ensure that we got the generated_job variable assigned
        # (by the event/signal handled above)
        self.assertIsNot(self.generated_job, None)
        # Now the stage is set for testing. Let's create the suspend helper
        # and use the data we've defined so far to create JSON-friendly
        # description of the session state.
        self.helper = SessionSuspendHelper()
        self.data = self.helper._repr_SessionState(self.session_state)

    def test_state_tracked_for_all_jobs(self):
        """
        verify that 'state' keeps track of all three jobs
        """
        self.assertIn(self.category_job.name, self.data['jobs'])
        self.assertIn(self.generator_job.name, self.data['jobs'])
        self.assertIn(self.generated_job.name, self.data['jobs'])

    def test_category_job_result_is_saved(self):
        """
        verify that the 'category' job result was saved
        """
        # This result is essential to re-create the association
        # with the 'generator' job. In theory we could get it from
        # the 'via' attribute but that is only true for category assignment
        # where the child job already exists and is defined on the
        # filesystem. This would not work in the case of truly generated jobs
        # so for consistency it is done the same way.
        self.assertEqual(
            self.data['results']['__category__'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': [
                    [0.0, 'stdout', 'cGx1Z2luOmxvY2FsCg=='],
                    [0.1, 'stdout', 'bmFtZTpnZW5lcmF0b3IK']
                ]
            }]
        )

    def test_generator_job_result_is_saved(self):
        """
        verify that the 'generator' job result was saved
        """
        self.assertEqual(
            self.data['results']['generator'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': [
                    [0.0, 'stdout', 'bmFtZTpnZW5lcmF0ZWQ='],
                ]
            }]
        )

    def test_generated_job_result_is_saved(self):
        """
        verify that the 'generated' job result was saved
        """
        # This is the implicit "empty" result that all jobs have
        self.assertEqual(
            self.data['results']['generated'], [{
                'comments': None,
                'execution_duration': None,
                'outcome': None,
                'return_code': None,
                'io_log': []
            }]
        )

    def test_sanity_check(self):
        """
        verify that the whole suspend data looks right
        """
        # This test is pretty much a "eyeball" inspection test
        # where we can see everything at a glance and not have to
        # deduce how each part looks like from the tests above.
        #
        # All the data below is verbatim copy of the generated  suspend data
        # that was created when this test was written. The only modification
        # was wrapping of the checksums in ( ) to make them wrap correctly
        # so that the file can stay PEP-8 clean
        self.maxDiff = None
        self.assertEqual(self.data, {
            'jobs': {
                '__category__': (
                    '5267192a5eac9288d144242d800b981eeca476c17e0'
                    'dd32a09c4b3ea0a14f955'),
                'generator': (
                    '7e67e23b7e7a6a5803721a9f282c0e88c7f40bae470'
                    '950f880e419bb9c7665d8'),
                'generated': (
                    'bfee8c57b6adc9f0f281b59fe818de2ed98b6affb78'
                    '9cf4fbf282d89453190d3'),
            },
            'results': {
                '__category__': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [
                        [0.0, 'stdout', 'cGx1Z2luOmxvY2FsCg=='],
                        [0.1, 'stdout', 'bmFtZTpnZW5lcmF0b3IK']],
                    'outcome': None,
                    'return_code': None,
                }],
                'generator': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [
                        [0.0, 'stdout', 'bmFtZTpnZW5lcmF0ZWQ=']],
                    'outcome': None,
                    'return_code': None,
                }],
                'generated': [{
                    'comments': None,
                    'execution_duration': None,
                    'io_log': [],
                    'outcome': None,
                    'return_code': None,
                }]
            },
            'desired_job_list': ['__category__', 'generator'],
            'metadata': {
                'flags': [],
                'running_job_name': None,
                'title': None
            },
        })