Exemple #1
0
    def runTest(self):
        "test a serial job"

        taskmaster = Taskmaster(num_tasks, self, RandomTask)
        jobs = SCons.Job.Jobs(1, taskmaster)
        jobs.run()

        self.failUnless(taskmaster.tasks_were_serial(),
                        "the tasks were not executed in series")
        self.failUnless(taskmaster.all_tasks_are_executed(),
                        "all the tests were not executed")
        self.failUnless(taskmaster.all_tasks_are_iterated(),
                        "all the tests were not iterated over")
        self.failUnless(taskmaster.all_tasks_are_postprocessed(),
                        "all the tests were not postprocessed")
        self.failIf(taskmaster.num_failed,
                    "some task(s) failed to execute")
Exemple #2
0
    def runTest(self):
        "test handling lack of parallel support"

        def NoParallel(tm, num, stack_size):
            raise NameError

        save_Parallel = SCons.Job.Parallel
        SCons.Job.Parallel = NoParallel
        try:
            taskmaster = Taskmaster(num_tasks, self, RandomTask)
            jobs = SCons.Job.Jobs(2, taskmaster)
            self.failUnless(jobs.num_jobs == 1,
                            "unexpected number of jobs %d" % jobs.num_jobs)
            jobs.run()
            self.failUnless(taskmaster.tasks_were_serial(),
                            "the tasks were not executed in series")
            self.failUnless(taskmaster.all_tasks_are_executed(),
                            "all the tests were not executed")
            self.failUnless(taskmaster.all_tasks_are_iterated(),
                            "all the tests were not iterated over")
            self.failUnless(taskmaster.all_tasks_are_postprocessed(),
                            "all the tests were not postprocessed")
            self.failIf(taskmaster.num_failed,
                        "some task(s) failed to execute")
        finally:
            SCons.Job.Parallel = save_Parallel
Exemple #3
0
    def runTest(self):
        "test parallel jobs with tasks that raise exceptions"

        taskmaster = Taskmaster(num_tasks, self, ExceptionTask)
        jobs = SCons.Job.Jobs(num_jobs, taskmaster)
        jobs.run()

        self.failIf(taskmaster.num_executed, "a task was executed")
        self.failUnless(taskmaster.num_iterated >= 1,
                        "one or more task should have been iterated")
        self.failUnless(taskmaster.num_failed >= 1,
                        "one or more tasks should have failed")
        self.failUnless(taskmaster.num_postprocessed >= 1,
                        "one or more tasks should have been postprocessed")
Exemple #4
0
    def runTest(self):
        "test a serial job with tasks that raise exceptions"

        taskmaster = Taskmaster(num_tasks, self, ExceptionTask)
        jobs = SCons.Job.Jobs(1, taskmaster)
        jobs.run()

        self.failIf(taskmaster.num_executed, "a task was executed")
        self.failUnless(taskmaster.num_iterated == 1,
                        "exactly one task should have been iterated")
        self.failUnless(taskmaster.num_failed == 1,
                        "exactly one task should have failed")
        self.failUnless(taskmaster.num_postprocessed == 1,
                        "exactly one task should have been postprocessed")
Exemple #5
0
 def runTest(self):
     "test handling lack of parallel support"
     def NoParallel(tm, num, stack_size):
         raise NameError
     save_Parallel = SCons.Job.Parallel
     SCons.Job.Parallel = NoParallel
     try:
         taskmaster = Taskmaster(num_tasks, self, RandomTask)
         jobs = SCons.Job.Jobs(2, taskmaster)
         self.failUnless(jobs.num_jobs == 1,
                         "unexpected number of jobs %d" % jobs.num_jobs)
         jobs.run()
         self.failUnless(taskmaster.tasks_were_serial(),
                         "the tasks were not executed in series")
         self.failUnless(taskmaster.all_tasks_are_executed(),
                         "all the tests were not executed")
         self.failUnless(taskmaster.all_tasks_are_iterated(),
                         "all the tests were not iterated over")
         self.failUnless(taskmaster.all_tasks_are_postprocessed(),
                         "all the tests were not postprocessed")
         self.failIf(taskmaster.num_failed,
                     "some task(s) failed to execute")
     finally:
         SCons.Job.Parallel = save_Parallel
Exemple #6
0
    def runTest(self):
        "test a serial job"

        taskmaster = Taskmaster(num_tasks, self, RandomTask)
        jobs = SCons.Job.Jobs(1, taskmaster)
        jobs.run()

        self.failUnless(taskmaster.tasks_were_serial(),
                        "the tasks were not executed in series")
        self.failUnless(taskmaster.all_tasks_are_executed(),
                        "all the tests were not executed")
        self.failUnless(taskmaster.all_tasks_are_iterated(),
                        "all the tests were not iterated over")
        self.failUnless(taskmaster.all_tasks_are_postprocessed(),
                        "all the tests were not postprocessed")
        self.failIf(taskmaster.num_failed, "some task(s) failed to execute")
Exemple #7
0
    def runTest(self):
        "test parallel jobs"

        try:
            import threading
        except:
            raise NoThreadsException()

        taskmaster = Taskmaster(num_tasks, self, RandomTask)
        jobs = SCons.Job.Jobs(num_jobs, taskmaster)
        jobs.run()

        self.failUnless(not taskmaster.tasks_were_serial(),
                        "the tasks were not executed in parallel")
        self.failUnless(taskmaster.all_tasks_are_executed(),
                        "all the tests were not executed")
        self.failUnless(taskmaster.all_tasks_are_iterated(),
                        "all the tests were not iterated over")
        self.failUnless(taskmaster.all_tasks_are_postprocessed(),
                        "all the tests were not postprocessed")
        self.failIf(taskmaster.num_failed, "some task(s) failed to execute")

        # Verify that parallel jobs will pull all of the completed tasks
        # out of the queue at once, instead of one by one.  We do this by
        # replacing the default ThreadPool class with one that records the
        # order in which tasks are put() and get() to/from the pool, and
        # which sleeps a little bit before call get() to let the initial
        # tasks complete and get their notifications on the resultsQueue.

        class SleepTask(Task):
            def _do_something(self):
                time.sleep(0.1)

        global SaveThreadPool
        SaveThreadPool = SCons.Job.ThreadPool

        class WaitThreadPool(SaveThreadPool):
            def put(self, task):
                ThreadPoolCallList.append('put(%s)' % task.i)
                return SaveThreadPool.put(self, task)

            def get(self):
                time.sleep(0.5)
                result = SaveThreadPool.get(self)
                ThreadPoolCallList.append('get(%s)' % result[0].i)
                return result

        SCons.Job.ThreadPool = WaitThreadPool

        try:
            taskmaster = Taskmaster(3, self, SleepTask)
            jobs = SCons.Job.Jobs(2, taskmaster)
            jobs.run()

            # The key here is that we get(1) and get(2) from the
            # resultsQueue before we put(3), but get(1) and get(2) can
            # be in either order depending on how the first two parallel
            # tasks get scheduled by the operating system.
            expect = [
                ['put(1)', 'put(2)', 'get(1)', 'get(2)', 'put(3)', 'get(3)'],
                ['put(1)', 'put(2)', 'get(2)', 'get(1)', 'put(3)', 'get(3)'],
            ]
            assert ThreadPoolCallList in expect, ThreadPoolCallList

        finally:
            SCons.Job.ThreadPool = SaveThreadPool
Exemple #8
0
    def runTest(self):
        "test parallel jobs"

        try:
            import threading
        except:
            raise NoThreadsException()

        taskmaster = Taskmaster(num_tasks, self, RandomTask)
        jobs = SCons.Job.Jobs(num_jobs, taskmaster)
        jobs.run()

        self.failUnless(not taskmaster.tasks_were_serial(),
                        "the tasks were not executed in parallel")
        self.failUnless(taskmaster.all_tasks_are_executed(),
                        "all the tests were not executed")
        self.failUnless(taskmaster.all_tasks_are_iterated(),
                        "all the tests were not iterated over")
        self.failUnless(taskmaster.all_tasks_are_postprocessed(),
                        "all the tests were not postprocessed")
        self.failIf(taskmaster.num_failed,
                    "some task(s) failed to execute")

        # Verify that parallel jobs will pull all of the completed tasks
        # out of the queue at once, instead of one by one.  We do this by
        # replacing the default ThreadPool class with one that records the
        # order in which tasks are put() and get() to/from the pool, and
        # which sleeps a little bit before call get() to let the initial
        # tasks complete and get their notifications on the resultsQueue.

        class SleepTask(Task):
            def _do_something(self):
                time.sleep(0.1)

        global SaveThreadPool
        SaveThreadPool = SCons.Job.ThreadPool

        class WaitThreadPool(SaveThreadPool):
            def put(self, task):
                ThreadPoolCallList.append('put(%s)' % task.i)
                return SaveThreadPool.put(self, task)
            def get(self):
                time.sleep(0.5)
                result = SaveThreadPool.get(self)
                ThreadPoolCallList.append('get(%s)' % result[0].i)
                return result

        SCons.Job.ThreadPool = WaitThreadPool

        try:
            taskmaster = Taskmaster(3, self, SleepTask)
            jobs = SCons.Job.Jobs(2, taskmaster)
            jobs.run()

            # The key here is that we get(1) and get(2) from the
            # resultsQueue before we put(3), but get(1) and get(2) can
            # be in either order depending on how the first two parallel
            # tasks get scheduled by the operating system.
            expect = [
                ['put(1)', 'put(2)', 'get(1)', 'get(2)', 'put(3)', 'get(3)'],
                ['put(1)', 'put(2)', 'get(2)', 'get(1)', 'put(3)', 'get(3)'],
            ]
            assert ThreadPoolCallList in expect, ThreadPoolCallList

        finally:
            SCons.Job.ThreadPool = SaveThreadPool