def attach(self, controller): # here `Task.attach` is the invocation of the superclass' # `attach` method (which attaches *this* object to a controller), # while `self.task.attach` is the propagation of the `attach` # method to the wrapped task. (Same for `detach` below.) Task.attach(self, controller) self.task.attach(controller)
def test_standard_session_iterator_for_tasks(self): self.sess.add(Task(jobname='task-1')) self.sess.add(Task(jobname='task-2')) self.sess.add(Task(jobname='task-3')) assert (set(('task-1', 'task-2', 'task-3')) == set(job.jobname for job in self.sess))
def attach(self, controller): """ Use the given Controller interface for operations on the job associated with this task. """ if self._current_task is not None: self.tasks[self._current_task].attach(controller) Task.attach(self, controller)
def attach(self, controller): """ Use the given Controller interface for operations on the job associated with this task. """ for task in self.tasks: if not task._attached: task.attach(controller) Task.attach(self, controller)
def __init__(self, task, max_retries=0, **extra_args): """ Wrap `task` and resubmit it until `self.retry()` returns `False`. :param Task task: A `Task` instance that should be retried. :param int max_retries: Maximum number of times `task` should be re-submitted; use 0 for 'no limit'. """ self.max_retries = max_retries self.retried = 0 self.task = task Task.__init__(self, **extra_args)
def test_workflow_iterator_for_session(self): coll = TaskCollection( jobname='collection', tasks=[Task(jobname='task-%d' % i) for i in range(3)]) coll2 = TaskCollection( jobname='collection-1', tasks=[Task(jobname='task-1-%d' % i) for i in range(3)]) coll.tasks.append(coll2) self.sess.add(coll) assert (['collection', 'task-0', 'task-1', 'task-2', 'collection-1', 'task-1-0', 'task-1-1', 'task-1-2'] == [job.jobname for job in self.sess.iter_workflow()])
def __init__(self, task, max_retries=0, **extra_args): """ Wrap `task` and resubmit it until `self.retry()` returns `False`. :param Task task: A `Task` instance that should be retried. :param int max_retries: Maximum number of times `task` should be re-submitted; use 0 for 'no limit'. """ self.max_retries = max_retries self.retried = 0 self.task = task self.would_output = self.task.would_output Task.__init__(self, **extra_args)
def test_standard_session_iterator_for_tasks_and_task_collections(self): coll = TaskCollection(jobname='collection', tasks=[Task() for i in range(3)]) self.sess.add(coll) assert (['collection'] == [job.jobname for job in self.sess])
def test_persist_urlkeydict(self): """ Test that we can persist GC3Pie's `UrlKeyDict` classes. """ a = Task(attr=UrlKeyDict({'/tmp/1': 1, '/tmp/2': 2})) id_ = self.store.save(a) b = self.store.load(id_) assert b.attr == a.attr
def test_persist_urlvaluedict(self): """ Test that we can persist GC3Pie's `UrlValueDict` classes. """ a = Task(attr=UrlKeyDict({'foo': '/tmp/1', 'bar': '/tmp/2'})) id_ = self.store.save(a) b = self.store.load(id_) assert b.attr == a.attr
def __init__(self, tasks=None, **extra_args): if tasks is None: self.tasks = [ ] else: self.tasks = tasks Task.__init__(self, **extra_args)
b = self.store.load(id_) assert b.attr == a.attr def test_persist_urlvaluedict(self): """ Test that we can persist GC3Pie's `UrlValueDict` classes. """ a = Task(attr=UrlKeyDict({'foo': '/tmp/1', 'bar': '/tmp/2'})) id_ = self.store.save(a) b = self.store.load(id_) assert b.attr == a.attr @pytest.mark.parametrize( "task", (Task(), gc3libs.workflow.TaskCollection(tasks=[Task(), Task()]), gc3libs.workflow.SequentialTaskCollection([Task(), Task() ]), MyStagedTaskCollection(), gc3libs.workflow.ParallelTaskCollection(tasks=[Task(), Task()]), MyChunkedParameterSweep(1, 20, 1, 5), gc3libs.workflow.RetryableTask(Task()))) def test_task_objects_buggy(task): """ Test that all `Task`-like objects are persistable """ with NamedTemporaryFile(prefix='gc3libs.', suffix='.tmp') as tmp: store = make_store("sqlite://%s" % tmp.name) id = store.save(task) store.load(id)
def __init__(self, tasks=None, **extra_args): if tasks is None: self.tasks = [] else: self.tasks = tasks Task.__init__(self, **extra_args)
def detach(self): for task in self.tasks: task.detach() Task.detach(self)
def detach(self): # see comment in `attach` above Task.detach(self) self.task.detach()
# check that IDs are distinct assert container_id == container.persistent_id objid = container[0].persistent_id assert objid != container_id # check that loading the container re-creates the same contained object del container container = self.store.load(container_id) obj = self.store.load(objid) assert obj == container[0] # return objects for further testing return (container_id, objid) @pytest.mark.parametrize("task", (Task(), gc3libs.workflow.TaskCollection(tasks=[Task(), Task()]), gc3libs.workflow.SequentialTaskCollection([Task(), Task()]), MyStagedTaskCollection(), gc3libs.workflow.ParallelTaskCollection(tasks=[Task(), Task()]), MyChunkedParameterSweep(1, 20, 1, 5), gc3libs.workflow.RetryableTask(Task()))) def test_task_objects_buggy(task): """ Test that all `Task`-like objects are persistable """ fd, tmpfile = tempfile.mkstemp() store = make_store("sqlite://%s" % tmpfile) try: id = store.save(task) store.load(id)
def new_task(self, param, **extra_args): return Task(**extra_args)
def stage0(self): return Task()