Esempio n. 1
0
    def _run_jobs(self, ns, job_list, exporter, transport=None):
        # Compute the run list, this can give us notification about problems in
        # the selected jobs. Currently we just display each problem
        matching_job_list = self._get_matching_job_list(ns, job_list)
        print("[ Analyzing Jobs ]".center(80, '='))
        # Create a session that handles most of the stuff needed to run jobs
        try:
            session = SessionState(job_list)
        except DependencyDuplicateError as exc:
            # Handle possible DependencyDuplicateError that can happen if
            # someone is using plainbox for job development.
            print("The job database you are currently using is broken")
            print("At least two jobs contend for the name {0}".format(
                exc.job.name))
            print("First job defined in: {0}".format(exc.job.origin))
            print("Second job defined in: {0}".format(
                exc.duplicate_job.origin))
            raise SystemExit(exc)
        with session.open():
            if session.previous_session_file():
                if self.ask_for_resume():
                    session.resume()
                else:
                    session.clean()
            self._update_desired_job_list(session, matching_job_list)
            if (sys.stdin.isatty() and sys.stdout.isatty() and not
                    ns.not_interactive):
                outcome_callback = self.ask_for_outcome
            else:
                outcome_callback = None
            runner = JobRunner(
                session.session_dir,
                session.jobs_io_log_dir,
                outcome_callback=outcome_callback,
                dry_run=ns.dry_run
            )
            self._run_jobs_with_session(ns, session, runner)
            # Get a stream with exported session data.
            exported_stream = io.BytesIO()
            data_subset = exporter.get_session_data_subset(session)
            exporter.dump(data_subset, exported_stream)
            exported_stream.seek(0)  # Need to rewind the file, puagh
            # Write the stream to file if requested
            self._save_results(ns.output_file, exported_stream)
            # Invoke the transport?
            if transport:
                exported_stream.seek(0)
                try:
                    transport.send(exported_stream.read())
                except InvalidSchema as exc:
                    print("Invalid destination URL: {0}".format(exc))
                except ConnectionError as exc:
                    print(("Unable to connect "
                           "to destination URL: {0}").format(exc))
                except HTTPError as exc:
                    print(("Server returned an error when "
                           "receiving or processing: {0}").format(exc))

        # FIXME: sensible return value
        return 0
Esempio n. 2
0
 def _run_jobs(self, ns, job_list, exporter):
     # Compute the run list, this can give us notification about problems in
     # the selected jobs. Currently we just display each problem
     matching_job_list = self._get_matching_job_list(ns, job_list)
     print("[ Analyzing Jobs ]".center(80, '='))
     # Create a session that handles most of the stuff needed to run jobs
     session = SessionState(job_list)
     self._update_desired_job_list(session, matching_job_list)
     with session.open():
         if (sys.stdin.isatty() and sys.stdout.isatty() and not
                 ns.not_interactive):
             outcome_callback = self.ask_for_outcome
         else:
             outcome_callback = None
         runner = JobRunner(self.checkbox, session.session_dir,
                            session.jobs_io_log_dir,
                            outcome_callback=outcome_callback)
         self._run_jobs_with_session(ns, session, runner)
         self._save_results(ns, session, exporter)
     # FIXME: sensible return value
     return 0
Esempio n. 3
0
File: box.py Progetto: zyga/plainbox
 def _run_jobs(self, ns, job_list):
     # Compute the run list, this can give us notification about problems in
     # the selected jobs. Currently we just display each problem
     matching_job_list = self._get_matching_job_list(ns, job_list)
     print("[ Analyzing Jobs ]".center(80, '='))
     # Create a session that handles most of the stuff needed to run jobs
     session = SessionState(job_list)
     self._update_desired_job_list(session, matching_job_list)
     with session.open():
         if (sys.stdin.isatty() and sys.stdout.isatty() and not
                 ns.not_interactive):
             outcome_callback = self.ask_for_outcome
         else:
             outcome_callback = None
         runner = JobRunner(self._checkbox, session.session_dir,
                            outcome_callback=outcome_callback)
         self._run_jobs_with_session(ns, session, runner)
     print("[ Results ]".center(80, '='))
     for job_name in sorted(session.job_state_map):
         job_state = session.job_state_map[job_name]
         if job_state.result.outcome != JobResult.OUTCOME_NONE:
             print("{}: {}".format(job_name, job_state.result.outcome))
Esempio n. 4
0
class _SRUInvocation:
    """
    Helper class instantiated to perform a particular invocation of the sru
    command. Unlike the SRU command itself, this class is instantiated each
    time.
    """

    def __init__(self, ns, config):
        self.ns = ns
        self.checkbox = CheckBox()
        self.config = config
        self.whitelist = WhiteList.from_file(os.path.join(
            self.checkbox.whitelists_dir, "sru.whitelist"))
        self.job_list = self.checkbox.get_builtin_jobs()
        # XXX: maybe allow specifying system_id from command line?
        self.exporter = XMLSessionStateExporter(system_id=None)
        self.session = None
        self.runner = None

    def run(self):
        # Compute the run list, this can give us notification about problems in
        # the selected jobs. Currently we just display each problem
        # Create a session that handles most of the stuff needed to run jobs
        try:
            self.session = SessionState(self.job_list)
        except DependencyDuplicateError as exc:
            # Handle possible DependencyDuplicateError that can happen if
            # someone is using plainbox for job development.
            print("The job database you are currently using is broken")
            print("At least two jobs contend for the name {0}".format(
                exc.job.name))
            print("First job defined in: {0}".format(exc.job.origin))
            print("Second job defined in: {0}".format(
                exc.duplicate_job.origin))
            raise SystemExit(exc)
        with self.session.open():
            self._set_job_selection()
            self.runner = JobRunner(
                self.session.session_dir,
                self.session.jobs_io_log_dir,
                command_io_delegate=self,
                outcome_callback=None,  # SRU runs are never interactive
                dry_run=self.ns.dry_run
            )
            self._run_all_jobs()
            if self.config.fallback_file is not Unset:
                self._save_results()
            self._submit_results()
        # FIXME: sensible return value
        return 0

    def _set_job_selection(self):
        desired_job_list = get_matching_job_list(self.job_list, self.whitelist)
        problem_list = self.session.update_desired_job_list(desired_job_list)
        if problem_list:
            logger.warning("There were some problems with the selected jobs")
            for problem in problem_list:
                logger.warning("- %s", problem)
            logger.warning("Problematic jobs will not be considered")

    def _save_results(self):
        print("Saving results to {0}".format(self.config.fallback_file))
        data = self.exporter.get_session_data_subset(self.session)
        with open(self.config.fallback_file, "wt", encoding="UTF-8") as stream:
            translating_stream = ByteStringStreamTranslator(stream, "UTF-8")
            self.exporter.dump(data, translating_stream)

    def _submit_results(self):
        print("Submitting results to {0} for secure_id {1}".format(
              self.config.c3_url, self.config.secure_id))
        options_string = "secure_id={0}".format(self.config.secure_id)
        # Create the transport object
        try:
            transport = CertificationTransport(
                self.config.c3_url, options_string, self.config)
        except InvalidSecureIDError as exc:
            print(exc)
            return False
        # Prepare the data for submission
        data = self.exporter.get_session_data_subset(self.session)
        with tempfile.NamedTemporaryFile(mode='w+b') as stream:
            # Dump the data to the temporary file
            self.exporter.dump(data, stream)
            # Flush and rewind
            stream.flush()
            stream.seek(0)
            try:
                # Send the data, reading from the temporary file
                result = transport.send(stream)
                if 'url' in result:
                    print("Successfully sent, submission status at {0}".format(
                          result['url']))
                else:
                    print("Successfully sent, server response: {0}".format(
                          result))

            except InvalidSchema as exc:
                print("Invalid destination URL: {0}".format(exc))
            except ConnectionError as exc:
                print("Unable to connect to destination URL: {0}".format(exc))
            except HTTPError as exc:
                print(("Server returned an error when "
                       "receiving or processing: {0}").format(exc))
            except IOError as exc:
                print("Problem reading a file: {0}".format(exc))

    def _run_all_jobs(self):
        again = True
        while again:
            again = False
            for job in self.session.run_list:
                # Skip jobs that already have result, this is only needed when
                # we run over the list of jobs again, after discovering new
                # jobs via the local job output
                result = self.session.job_state_map[job.name].result
                if result.outcome is not None:
                    continue
                self._run_single_job(job)
                self.session.persistent_save()
                if job.plugin == "local":
                    # After each local job runs rebuild the list of matching
                    # jobs and run everything again
                    self._set_job_selection()
                    again = True
                    break

    def _run_single_job(self, job):
        print("- {}:".format(job.name), end=' ')
        job_state, job_result = run_job_if_possible(
            self.session, self.runner, self.config, job)
        print("{0}".format(job_result.outcome))
        if job_result.comments is not None:
            print("comments: {0}".format(job_result.comments))
        if job_state.readiness_inhibitor_list:
            print("inhibitors:")
        for inhibitor in job_state.readiness_inhibitor_list:
            print("  * {}".format(inhibitor))
        self.session.update_job_result(job, job_result)
Esempio n. 5
0
class SessionStateLocalStorageTests(TestCase):

    def setUp(self):
        # session data are kept in XDG_CACHE_HOME/plainbox/.session
        # To avoid resuming a real session, we have to select a temporary
        # location instead
        self._sandbox = tempfile.mkdtemp()
        self._env = os.environ
        os.environ['XDG_CACHE_HOME'] = self._sandbox

    def job_state(self, name):
        # A helper function to avoid overly long expressions
        return self.session.job_state_map[name]

    def test_persistent_save(self):
        self.job_A = make_job("A")
        self.job_list = [self.job_A]
        self.session = SessionState(self.job_list)
        result_A = JobResult({
            'job': self.job_A,
            'outcome': JobResult.OUTCOME_PASS,
            'comments': 'All good',
            'return_code': 0,
            'io_log': ((0, 'stdout', "Success !\n"),)
        })
        session_json_text = """{
            "_job_state_map": {
                "A": {
                    "_job": {
                        "data": {
                            "name": "A",
                            "plugin": "dummy",
                            "requires": null,
                            "depends": null
                        },
                        "_class_id": "JOB_DEFINITION"
                    },
                    "_result": {
                        "data": {
                            "job": {
                                "data": {
                                    "name": "A",
                                    "plugin": "dummy",
                                    "requires": null,
                                    "depends": null
                                },
                                "_class_id": "JOB_DEFINITION"
                            },
                            "outcome": "pass",
                            "return_code": 0,
                            "comments": "All good",
                            "io_log": [
                                [
                                    0,
                                    "stdout",
                                    "Success !\\n"
                                ]
                            ]
                        },
                        "_class_id": "JOB_RESULT"
                    },
                    "_class_id": "JOB_STATE"
                }
            },
            "_desired_job_list": [
                {
                    "data": {
                        "name": "A",
                        "plugin": "dummy",
                        "requires": null,
                        "depends": null
                    },
                    "_class_id": "JOB_DEFINITION"
                }
            ],
            "_class_id": "SESSION_STATE"
        }"""
        self.session.open()
        self.session.update_desired_job_list([self.job_A])
        self.session.update_job_result(self.job_A, result_A)
        self.session.persistent_save()
        session_file = self.session.previous_session_file()
        self.session.close()
        self.assertIsNotNone(session_file)
        with open(session_file) as f:
            raw_json = json.load(f)
            self.maxDiff = None
            self.assertEqual(raw_json, json.loads(session_json_text))

    def test_resume_session(self):
        # All of the tests below are using one session. The session has four
        # jobs, Job A depends on a resource provided by job R which has no
        # dependencies at all. Both Job X and Y depend on job A.
        #
        # A -(resource dependency)-> R
        #
        # X -(direct dependency) -> A
        #
        # Y -(direct dependency) -> A
        self.job_A = make_job("A", requires="R.attr == 'value'")
        self.job_A_expr = self.job_A.get_resource_program().expression_list[0]
        self.job_R = make_job("R", plugin="resource")
        self.job_X = make_job("X", depends='A')
        self.job_Y = make_job("Y", depends='A')
        self.job_list = [self.job_A, self.job_R, self.job_X, self.job_Y]
        # Create a new session (session_dir is empty)
        self.session = SessionState(self.job_list)
        result_R = JobResult({
            'job': self.job_R,
            'io_log': make_io_log(((0, 'stdout', b"attr: value\n"),),
                                  self._sandbox)
        })
        result_A = JobResult({
            'job': self.job_A,
            'outcome': JobResult.OUTCOME_PASS
        })
        result_X = JobResult({
            'job': self.job_X,
            'outcome': JobResult.OUTCOME_PASS
        })
        # Job Y can't start as it requires job A
        self.assertFalse(self.job_state('Y').can_start())
        self.session.update_desired_job_list([self.job_X, self.job_Y])
        self.session.open()
        self.session.update_job_result(self.job_R, result_R)
        self.session.update_job_result(self.job_A, result_A)
        self.session.update_job_result(self.job_X, result_X)
        self.session.persistent_save()
        self.session.close()
        # Create a new session (session_dir should contain session data)
        self.session = SessionState(self.job_list)
        self.session.open()
        # Resume the previous session
        self.session.resume()
        # This time job Y can start
        self.assertTrue(self.job_state('Y').can_start())
        self.session.close()

    def tearDown(self):
        shutil.rmtree(self._sandbox)
        os.environ = self._env
Esempio n. 6
0
class SessionStateLocalStorageTests(TestCase):
    def setUp(self):
        # session data are kept in XDG_CACHE_HOME/plainbox/.session
        # To avoid resuming a real session, we have to select a temporary
        # location instead
        self._sandbox = tempfile.mkdtemp()
        self._env = os.environ
        os.environ['XDG_CACHE_HOME'] = self._sandbox

    def job_state(self, name):
        # A helper function to avoid overly long expressions
        return self.session.job_state_map[name]

    def test_persistent_save(self):
        self.job_A = make_job("A")
        self.job_list = [self.job_A]
        self.session = SessionState(self.job_list)
        result_A = JobResult({
            'job': self.job_A,
            'outcome': JobResult.OUTCOME_PASS,
            'comments': 'All good',
            'return_code': 0,
            'io_log': ((0, 'stdout', "Success !\n"), )
        })
        session_json_text = """{
            "_job_state_map": {
                "A": {
                    "_job": {
                        "data": {
                            "name": "A",
                            "plugin": "dummy",
                            "requires": null,
                            "depends": null
                        },
                        "_class_id": "JOB_DEFINITION"
                    },
                    "_result": {
                        "data": {
                            "job": {
                                "data": {
                                    "name": "A",
                                    "plugin": "dummy",
                                    "requires": null,
                                    "depends": null
                                },
                                "_class_id": "JOB_DEFINITION"
                            },
                            "outcome": "pass",
                            "return_code": 0,
                            "comments": "All good",
                            "io_log": [
                                [
                                    0,
                                    "stdout",
                                    "Success !\\n"
                                ]
                            ]
                        },
                        "_class_id": "JOB_RESULT"
                    },
                    "_class_id": "JOB_STATE"
                }
            },
            "_desired_job_list": [
                {
                    "data": {
                        "name": "A",
                        "plugin": "dummy",
                        "requires": null,
                        "depends": null
                    },
                    "_class_id": "JOB_DEFINITION"
                }
            ],
            "_class_id": "SESSION_STATE"
        }"""
        self.session.open()
        self.session.update_desired_job_list([self.job_A])
        self.session.update_job_result(self.job_A, result_A)
        self.session.persistent_save()
        session_file = self.session.previous_session_file()
        self.session.close()
        self.assertIsNotNone(session_file)
        with open(session_file) as f:
            raw_json = json.load(f)
            self.maxDiff = None
            self.assertEqual(raw_json, json.loads(session_json_text))

    def test_resume_session(self):
        # All of the tests below are using one session. The session has four
        # jobs, Job A depends on a resource provided by job R which has no
        # dependencies at all. Both Job X and Y depend on job A.
        #
        # A -(resource dependency)-> R
        #
        # X -(direct dependency) -> A
        #
        # Y -(direct dependency) -> A
        self.job_A = make_job("A", requires="R.attr == 'value'")
        self.job_A_expr = self.job_A.get_resource_program().expression_list[0]
        self.job_R = make_job("R", plugin="resource")
        self.job_X = make_job("X", depends='A')
        self.job_Y = make_job("Y", depends='A')
        self.job_list = [self.job_A, self.job_R, self.job_X, self.job_Y]
        # Create a new session (session_dir is empty)
        self.session = SessionState(self.job_list)
        result_R = JobResult({
            'job':
            self.job_R,
            'io_log':
            make_io_log(((0, 'stdout', b"attr: value\n"), ), self._sandbox)
        })
        result_A = JobResult({
            'job': self.job_A,
            'outcome': JobResult.OUTCOME_PASS
        })
        result_X = JobResult({
            'job': self.job_X,
            'outcome': JobResult.OUTCOME_PASS
        })
        # Job Y can't start as it requires job A
        self.assertFalse(self.job_state('Y').can_start())
        self.session.update_desired_job_list([self.job_X, self.job_Y])
        self.session.open()
        self.session.update_job_result(self.job_R, result_R)
        self.session.update_job_result(self.job_A, result_A)
        self.session.update_job_result(self.job_X, result_X)
        self.session.persistent_save()
        self.session.close()
        # Create a new session (session_dir should contain session data)
        self.session = SessionState(self.job_list)
        self.session.open()
        # Resume the previous session
        self.session.resume()
        # This time job Y can start
        self.assertTrue(self.job_state('Y').can_start())
        self.session.close()

    def tearDown(self):
        shutil.rmtree(self._sandbox)
        os.environ = self._env