Пример #1
0
 def test_job_keyvals(self):
     keyval_dict = {'mykey': 'myvalue'}
     job_id = rpc_interface.create_job(name='test', priority='Medium',
                                       control_file='foo',
                                       control_type='Client',
                                       hosts=['host1'],
                                       keyvals=keyval_dict)
     jobs = rpc_interface.get_jobs(id=job_id)
     self.assertEquals(len(jobs), 1)
     self.assertEquals(jobs[0]['keyvals'], keyval_dict)
 def test_test_retry(self):
     job_id = rpc_interface.create_job(name='flake',
                                       priority='Medium',
                                       control_file='foo',
                                       control_type=CLIENT,
                                       hosts=['host1'],
                                       test_retry=10)
     jobs = rpc_interface.get_jobs(id=job_id)
     self.assertEquals(len(jobs), 1)
     self.assertEquals(jobs[0]['test_retry'], 10)
Пример #3
0
    def test_get_jobs_filters(self):
        HqeStatus = models.HostQueueEntry.Status

        def create_two_host_job():
            return self._create_job(hosts=[1, 2])

        def set_hqe_statuses(job, first_status, second_status):
            entries = job.hostqueueentry_set.all()
            entries[0].update_object(status=first_status)
            entries[1].update_object(status=second_status)

        queued = create_two_host_job()

        queued_and_running = create_two_host_job()
        set_hqe_statuses(queued_and_running, HqeStatus.QUEUED,
                         HqeStatus.RUNNING)

        running_and_complete = create_two_host_job()
        set_hqe_statuses(running_and_complete, HqeStatus.RUNNING,
                         HqeStatus.COMPLETED)

        complete = create_two_host_job()
        set_hqe_statuses(complete, HqeStatus.COMPLETED, HqeStatus.COMPLETED)

        started_but_inactive = create_two_host_job()
        set_hqe_statuses(started_but_inactive, HqeStatus.QUEUED,
                         HqeStatus.COMPLETED)

        parsing = create_two_host_job()
        set_hqe_statuses(parsing, HqeStatus.PARSING, HqeStatus.PARSING)

        def check_job_ids(actual_job_dicts, expected_jobs):
            self.assertEquals(
                set(job_dict['id'] for job_dict in actual_job_dicts),
                set(job.id for job in expected_jobs))

        check_job_ids(rpc_interface.get_jobs(not_yet_run=True), [queued])
        check_job_ids(rpc_interface.get_jobs(running=True), [
            queued_and_running, running_and_complete, started_but_inactive,
            parsing
        ])
        check_job_ids(rpc_interface.get_jobs(finished=True), [complete])
Пример #4
0
 def test_job_keyvals(self):
     keyval_dict = {'mykey': 'myvalue'}
     job_id = rpc_interface.create_job(name='test',
                                       priority='Medium',
                                       control_file='foo',
                                       control_type='Client',
                                       hosts=['host1'],
                                       keyvals=keyval_dict)
     jobs = rpc_interface.get_jobs(id=job_id)
     self.assertEquals(len(jobs), 1)
     self.assertEquals(jobs[0]['keyvals'], keyval_dict)
Пример #5
0
    def test_get_jobs_filters(self):
        HqeStatus = models.HostQueueEntry.Status
        def create_two_host_job():
            return self._create_job(hosts=[1, 2])
        def set_hqe_statuses(job, first_status, second_status):
            entries = job.hostqueueentry_set.all()
            entries[0].update_object(status=first_status)
            entries[1].update_object(status=second_status)

        queued = create_two_host_job()

        queued_and_running = create_two_host_job()
        set_hqe_statuses(queued_and_running, HqeStatus.QUEUED,
                           HqeStatus.RUNNING)

        running_and_complete = create_two_host_job()
        set_hqe_statuses(running_and_complete, HqeStatus.RUNNING,
                           HqeStatus.COMPLETED)

        complete = create_two_host_job()
        set_hqe_statuses(complete, HqeStatus.COMPLETED, HqeStatus.COMPLETED)

        started_but_inactive = create_two_host_job()
        set_hqe_statuses(started_but_inactive, HqeStatus.QUEUED,
                           HqeStatus.COMPLETED)

        parsing = create_two_host_job()
        set_hqe_statuses(parsing, HqeStatus.PARSING, HqeStatus.PARSING)

        def check_job_ids(actual_job_dicts, expected_jobs):
            self.assertEquals(
                    set(job_dict['id'] for job_dict in actual_job_dicts),
                    set(job.id for job in expected_jobs))

        check_job_ids(rpc_interface.get_jobs(not_yet_run=True), [queued])
        check_job_ids(rpc_interface.get_jobs(running=True),
                      [queued_and_running, running_and_complete,
                       started_but_inactive, parsing])
        check_job_ids(rpc_interface.get_jobs(finished=True), [complete])
    def test_get_jobs_type_filters(self):
        self.assertRaises(AssertionError,
                          rpc_interface.get_jobs,
                          suite=True,
                          sub=True)
        self.assertRaises(AssertionError,
                          rpc_interface.get_jobs,
                          suite=True,
                          standalone=True)
        self.assertRaises(AssertionError,
                          rpc_interface.get_jobs,
                          standalone=True,
                          sub=True)

        parent_job = self._create_job(hosts=[1])
        child_jobs = self._create_job(hosts=[1, 2],
                                      parent_job_id=parent_job.id)
        standalone_job = self._create_job(hosts=[1])

        self._check_job_ids(rpc_interface.get_jobs(suite=True), [parent_job])
        self._check_job_ids(rpc_interface.get_jobs(sub=True), [child_jobs])
        self._check_job_ids(rpc_interface.get_jobs(standalone=True),
                            [standalone_job])