Ejemplo n.º 1
0
def c_task_instance(**kwargs):
    """ Creates a task instance for testing """
    task_instance = TaskInstance()
    task_instance.task_key = 'foo.bar'
    task_instance.status = STATUS_STOPPED
    task_instance.__dict__.update(kwargs)
    task_instance.save()
    return task_instance 
Ejemplo n.º 2
0
 def setUp(self):
     TaskManagerTestCaseMixIn.setUp(self)
     
     self.completion = {}
     for task in self.tasks:
         self.completion[task] = None
     
     self.task_instances = []
     for task in self.tasks:
         #queued tasks
         task_instance = TaskInstance()
         task_instance.task_key=task
         task_instance.save()
         self.task_instances.append(task_instance)
         
         #running tasks
         task_instance = TaskInstance()
         task_instance.task_key=task
         task_instance.started = datetime.now()
         task_instance.save()
         self.task_instances.append(task_instance)
         
         #finished tasks
         task_instance = TaskInstance()
         task_instance.task_key=task
         task_instance.started = datetime.now()
         completed_time = datetime.now()
         task_instance.completed = completed_time
         task_instance.save()
         self.completion[task] = completed_time
         self.task_instances.append(task_instance)
         
         #failed tasks
         task_instance = TaskInstance()
         task_instance.task_key=task
         task_instance.started = datetime.now()
         task_instance.status = -1
         task_instance.save()
         self.task_instances.append(task_instance)
Ejemplo n.º 3
0
    def setUp(self):
        self.tasks = [
                'demo.demo_task.TestTask',
                'demo.demo_task.TestContainerTask',
                'demo.demo_task.TestParallelTask'
                ]
        self.completion = {}
        for task in self.tasks:
            self.completion[task] = None

        # setup manager with an internal cache we can alter
        self.manager = TaskManager(None, lazy_init=True)
        pydra_settings.TASK_DIR_INTERNAL = '/var/lib/pydra/test_tasks_internal'

        # find at least one task package to use for testing
        self.package = 'demo'
        self.package_dir = '%s/%s' % (self.manager.tasks_dir_internal, self.package)

        self.task_instances = []
        for task in self.tasks [:2]:
            #queued tasks
            task_instance = TaskInstance()
            task_instance.task_key=task
            task_instance.save()
            self.task_instances.append(task_instance)

            #running tasks
            task_instance = TaskInstance()
            task_instance.task_key=task
            task_instance.started = datetime.now()
            task_instance.save()
            self.task_instances.append(task_instance)

            #finished tasks
            task_instance = TaskInstance()
            task_instance.task_key=task
            task_instance.started = datetime.now()
            completed_time = datetime.now()
            task_instance.completed = completed_time
            task_instance.save()
            self.completion[task] = completed_time
            self.task_instances.append(task_instance)

            #failed tasks
            task_instance = TaskInstance()
            task_instance.task_key=task
            task_instance.started = datetime.now()
            task_instance.status = -1
            task_instance.save()
            self.task_instances.append(task_instance)
Ejemplo n.º 4
0
    def _queue_task(self, task_key, args={}, priority=5):
        """
        Adds a (root) task that is to be run.

        Under the hood, the scheduler creates a task instance for the task, puts
        it into the queue, and then tries to advance the queue.
        """
        logger.info('Queued Task: %s - Args:  %s' % (task_key, args))

        task_instance = TaskInstance()
        task_instance.task_key = task_key
        task_instance.priority = priority
        task_instance.args = simplejson.dumps(args)
        task_instance.queued = datetime.now()
        task_instance.status = STATUS_STOPPED
        task_instance.save()
        
        # queue the root task as the first work request.  This lets the queue
        # advancement logic to function the same for a root task or a subtask
        task_instance.queue_worker_request(task_instance)
        
        with self._queue_lock:
            heappush(self._queue, [task_instance.compute_score(),task_instance])
            # cache this task
            self._active_tasks[task_instance.id] = task_instance

        threads.deferToThread(self._schedule)        
        return task_instance