Esempio n. 1
0
 def test_iterate_eager(self):
     ar1 = EagerResult(uuid(), 42, states.SUCCESS)
     ar2 = EagerResult(uuid(), 42, states.SUCCESS)
     ts = self.app.GroupResult(uuid(), [ar1, ar2])
     it = ts.iterate()
     self.assertEqual(next(it), 42)
     self.assertEqual(next(it), 42)
Esempio n. 2
0
 def test_iterate_eager(self):
     ar1 = EagerResult(gen_unique_id(), 42, states.SUCCESS)
     ar2 = EagerResult(gen_unique_id(), 42, states.SUCCESS)
     ts = TaskSetResult(gen_unique_id(), [ar1, ar2])
     it = iter(ts)
     self.assertEqual(it.next(), 42)
     self.assertEqual(it.next(), 42)
Esempio n. 3
0
 def test_get_sync_subtask_option(self, task_join_will_block):
     task_join_will_block.return_value = True
     tid = uuid()
     res_subtask_async = EagerResult(tid, 'x', 'x', states.SUCCESS)
     with pytest.raises(RuntimeError):
         res_subtask_async.get()
     res_subtask_async.get(disable_sync_subtasks=False)
Esempio n. 4
0
 def done(self):
     if self.task_id:
         if getattr(settings, 'CELERY_ALWAYS_EAGER', False):
             result = EagerResult(self.task_id, None, 'SUCCESS')
         else:
             result = AsyncResult(self.task_id)
         return result.ready()
Esempio n. 5
0
 def test_iterate_eager(self):
     ar1 = EagerResult(uuid(), 42, states.SUCCESS)
     ar2 = EagerResult(uuid(), 42, states.SUCCESS)
     ts = GroupResult(uuid(), [ar1, ar2])
     it = iter(ts)
     self.assertEqual(it.next(), 42)
     self.assertEqual(it.next(), 42)
Esempio n. 6
0
 def done(self):
     if self.task_id:
         if getattr(settings, 'CELERY_ALWAYS_EAGER', False):
             result = EagerResult(self.task_id, None, 'SUCCESS')
         else:
             result = AsyncResult(self.task_id)
         return result.ready()
Esempio n. 7
0
 def test_iterate_eager(self):
     ar1 = EagerResult(uuid(), 42, states.SUCCESS)
     ar2 = EagerResult(uuid(), 42, states.SUCCESS)
     ts = self.app.GroupResult(uuid(), [ar1, ar2])
     with pytest.warns(CPendingDeprecationWarning):
         it = ts.iterate()
     assert next(it) == 42
     assert next(it) == 42
Esempio n. 8
0
    def test_propagates_for_parent(self):
        x = self.app.AsyncResult(uuid())
        x.backend = Mock()
        x.parent = EagerResult(uuid(), KeyError('foo'), states.FAILURE)
        with self.assertRaises(KeyError):
            x.get(propagate=True)
        self.assertFalse(x.backend.wait_for.called)

        x.parent = EagerResult(uuid(), 42, states.SUCCESS)
        x.get(propagate=True)
        self.assertTrue(x.backend.wait_for.called)
Esempio n. 9
0
    def test_propagates_for_parent(self):
        x = self.app.AsyncResult(uuid())
        x.backend = Mock(name='backend')
        x.backend.get_task_meta.return_value = {}
        x.backend.wait_for_pending.return_value = 84
        x.parent = EagerResult(uuid(), KeyError('foo'), states.FAILURE)
        with pytest.raises(KeyError):
            x.get(propagate=True)
        x.backend.wait_for_pending.assert_not_called()

        x.parent = EagerResult(uuid(), 42, states.SUCCESS)
        assert x.get(propagate=True) == 84
        x.backend.wait_for_pending.assert_called()
Esempio n. 10
0
    def test_propagates_for_parent(self):
        x = self.app.AsyncResult(uuid())
        x.backend = Mock(name='backend')
        x.backend.get_task_meta.return_value = {}
        x.backend.wait_for_pending.return_value = 84
        x.parent = EagerResult(uuid(), KeyError('foo'), states.FAILURE)
        with self.assertRaises(KeyError):
            x.get(propagate=True)
        self.assertFalse(x.backend.wait_for_pending.called)

        x.parent = EagerResult(uuid(), 42, states.SUCCESS)
        self.assertEqual(x.get(propagate=True), 84)
        self.assertTrue(x.backend.wait_for_pending.called)
Esempio n. 11
0
    def apply_async(self, args=None, kwargs=None, **options):
        """
        Queues a task, raises an exception by default if already queued.

        :param \*args: positional arguments passed on to the task.
        :param \*\*kwargs: keyword arguments passed on to the task.
        :keyword \*\*once: (optional)
            :param: graceful: (optional)
                If True, wouldn't raise an exception if already queued.
                Instead will return none.
            :param: timeout: (optional)
                An `int' number of seconds after which the lock will expire.
                If not set, defaults to 1 hour.
            :param: keys: (optional)

        """
        once_options = options.get('once', {})
        once_graceful = once_options.get(
            'graceful', self.once.get('graceful', False))
        once_timeout = once_options.get(
            'timeout', self.once.get('timeout', self.default_timeout))

        key = self.get_key(args, kwargs)
        try:
            self.raise_or_lock(key, once_timeout)
        except self.AlreadyQueued as e:
            if once_graceful:
                return EagerResult(None, None, states.REJECTED)
            raise e
        return super(QueueOnce, self).apply_async(args, kwargs, **options)
Esempio n. 12
0
    def resume(self, direct=True):
        """
        Resumes the process step by running all pending child steps and tasks

        Args:
            direct: False if the step is called from a parent step,
                    true otherwise

        Returns:
            The executed workflow if direct is true, the workflow non-executed
            otherwise
        """

        logger.debug('Resuming step {} ({})'.format(self.name, self.pk))
        child_steps = self.get_children()

        step_descendants = self.get_descendants(include_self=True)
        recursive_tasks = ProcessTask.objects.filter(
            processstep__in=step_descendants,
            undone__isnull=True,
            undo_type=False,
            status=celery_states.PENDING,
        )

        if not recursive_tasks.exists():
            if direct:
                return EagerResult(self.celery_id, [], celery_states.SUCCESS)

            return group()

        tasks = self.tasks(manager='by_step_pos').filter(
            undone__isnull=True, undo_type=False, status=celery_states.PENDING)

        return self.run_children(tasks, child_steps, direct)
Esempio n. 13
0
    def apply(self, args=None, kwargs=None,
              link=None, link_error=None, **options):
        """Execute this task locally, by blocking until the task returns.

        :param args: positional arguments passed on to the task.
        :param kwargs: keyword arguments passed on to the task.
        :keyword throw: Re-raise task exceptions.  Defaults to
                        the :setting:`CELERY_EAGER_PROPAGATES_EXCEPTIONS`
                        setting.

        :rtype :class:`celery.result.EagerResult`:

        """
        # trace imports Task, so need to import inline.
        from celery.app.trace import eager_trace_task

        app = self._get_app()
        args = args or ()
        # add 'self' if this is a bound method.
        if self.__self__ is not None:
            args = (self.__self__, ) + tuple(args)
        kwargs = kwargs or {}
        task_id = options.get('task_id') or uuid()
        retries = options.get('retries', 0)
        throw = app.either('CELERY_EAGER_PROPAGATES_EXCEPTIONS',
                           options.pop('throw', None))

        # Make sure we get the task instance, not class.
        task = app._tasks[self.name]

        request = {'id': task_id,
                   'retries': retries,
                   'is_eager': True,
                   'logfile': options.get('logfile'),
                   'loglevel': options.get('loglevel', 0),
                   'callbacks': maybe_list(link),
                   'errbacks': maybe_list(link_error),
                   'delivery_info': {'is_eager': True}}
        if self.accept_magic_kwargs:
            default_kwargs = {'task_name': task.name,
                              'task_id': task_id,
                              'task_retries': retries,
                              'task_is_eager': True,
                              'logfile': options.get('logfile'),
                              'loglevel': options.get('loglevel', 0),
                              'delivery_info': {'is_eager': True}}
            supported_keys = fun_takes_kwargs(task.run, default_kwargs)
            extend_with = dict((key, val)
                               for key, val in items(default_kwargs)
                               if key in supported_keys)
            kwargs.update(extend_with)

        tb = None
        retval, info = eager_trace_task(task, task_id, args, kwargs,
                                        app=self._get_app(),
                                        request=request, propagate=throw)
        if isinstance(retval, ExceptionInfo):
            retval, tb = retval.exception, retval.traceback
        state = states.SUCCESS if info is None else info.state
        return EagerResult(task_id, retval, state, traceback=tb)
Esempio n. 14
0
 def test_children(self):
     x = self.app.AsyncResult('1')
     children = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)]
     x._cache = {'children': children, 'status': states.SUCCESS}
     x.backend = Mock()
     self.assertTrue(x.children)
     self.assertEqual(len(x.children), 3)
Esempio n. 15
0
    def apply(self,
              args=None,
              kwargs=None,
              link=None,
              link_error=None,
              **options):
        """Execute this task locally, by blocking until the task returns.

        :param args: positional arguments passed on to the task.
        :param kwargs: keyword arguments passed on to the task.
        :keyword throw: Re-raise task exceptions.  Defaults to
                        the :setting:`task_eager_propagates`
                        setting.

        :rtype :class:`celery.result.EagerResult`:

        """
        # trace imports Task, so need to import inline.
        from celery.app.trace import build_tracer

        app = self._get_app()
        args = args or ()
        # add 'self' if this is a bound method.
        if self.__self__ is not None:
            args = (self.__self__, ) + tuple(args)
        kwargs = kwargs or {}
        task_id = options.get('task_id') or uuid()
        retries = options.get('retries', 0)
        throw = app.either('task_eager_propagates', options.pop('throw', None))

        # Make sure we get the task instance, not class.
        task = app._tasks[self.name]

        request = {
            'id': task_id,
            'retries': retries,
            'is_eager': True,
            'logfile': options.get('logfile'),
            'loglevel': options.get('loglevel', 0),
            'callbacks': maybe_list(link),
            'errbacks': maybe_list(link_error),
            'headers': options.get('headers'),
            'delivery_info': {
                'is_eager': True
            }
        }
        tb = None
        tracer = build_tracer(
            task.name,
            task,
            eager=True,
            propagate=throw,
            app=self._get_app(),
        )
        ret = tracer(task_id, args, kwargs, request)
        retval = ret.retval
        if isinstance(retval, ExceptionInfo):
            retval, tb = retval.exception, retval.traceback
        state = states.SUCCESS if ret.info is None else ret.info.state
        return EagerResult(task_id, retval, state, traceback=tb)
Esempio n. 16
0
    def apply(self, args=None, kwargs=None,
              link=None, link_error=None,
              task_id=None, retries=None, throw=None,
              logfile=None, loglevel=None, headers=None, **options):
        """Execute this task locally, by blocking until the task returns.

        Arguments:
            args (Tuple): positional arguments passed on to the task.
            kwargs (Dict): keyword arguments passed on to the task.
            throw (bool): Re-raise task exceptions.
                Defaults to the :setting:`task_eager_propagates` setting.

        Returns:
            celery.result.EagerResult: pre-evaluated result.
        """
        # trace imports Task, so need to import inline.
        from celery.app.trace import build_tracer

        app = self._get_app()
        args = args or ()
        kwargs = kwargs or {}
        task_id = task_id or uuid()
        retries = retries or 0
        if throw is None:
            throw = app.conf.task_eager_propagates

        # Make sure we get the task instance, not class.
        task = app._tasks[self.name]

        request = {
            'id': task_id,
            'retries': retries,
            'is_eager': True,
            'logfile': logfile,
            'loglevel': loglevel or 0,
            'hostname': gethostname(),
            'callbacks': maybe_list(link),
            'errbacks': maybe_list(link_error),
            'headers': headers,
            'ignore_result': options.get('ignore_result', False),
            'delivery_info': {
                'is_eager': True,
                'exchange': options.get('exchange'),
                'routing_key': options.get('routing_key'),
                'priority': options.get('priority'),
            },
        }
        tb = None
        tracer = build_tracer(
            task.name, task, eager=True,
            propagate=throw, app=self._get_app(),
        )
        ret = tracer(task_id, args, kwargs, request)
        retval = ret.retval
        if isinstance(retval, ExceptionInfo):
            retval, tb = retval.exception, retval.traceback
        if isinstance(retval, Retry) and retval.sig is not None:
            return retval.sig.apply(retries=retries + 1)
        state = states.SUCCESS if ret.info is None else ret.info.state
        return EagerResult(task_id, retval, state, traceback=tb)
Esempio n. 17
0
 def test_children(self):
     x = self.app.AsyncResult('1')
     children = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)]
     x.backend = Mock()
     x.backend.get_children.return_value = children
     x.backend.READY_STATES = states.READY_STATES
     self.assertTrue(x.children)
     self.assertEqual(len(x.children), 3)
Esempio n. 18
0
    def retry(self, direct=True):
        """
        Retries the process step by first retrying all child steps and then all
        failed tasks.

        Args:
            direct: False if the step is called from a parent step,
                    true otherwise

        Returns:
            none
        """
        def create_sub_task(t):
            t = t.create_retry_obj()
            t.params['_options'] = {
                'args': t.args,
                'responsible': t.responsible_id,
                'ip': t.information_package_id,
                'step': self.id,
                'step_pos': t.processstep_pos,
                'hidden': t.hidden,
                'result_params': t.result_params,
            }
            created = self._create_task(t.name)
            return created.si(*t.args, **t.params).set(task_id=str(t.pk),
                                                       queue=created.queue)

        child_steps = self.child_steps.all()

        tasks = self.tasks(manager='by_step_pos').filter(
            undone__isnull=False,
            retried__isnull=True).order_by('processstep_pos')

        if not tasks.exists() and not child_steps.exists():
            if direct:
                return EagerResult(self.pk, [], celery_states.SUCCESS)

            return group()

        func = group if self.parallel else chain

        step_canvas = func(s.retry(direct=False) for s in child_steps)
        task_canvas = func(create_sub_task(t) for t in tasks)

        if not child_steps:
            workflow = task_canvas
        elif not tasks:
            workflow = step_canvas
        else:
            workflow = (step_canvas | task_canvas)

        if direct:
            if self.eager:
                return workflow.apply()
            else:
                return workflow.apply_async()
        else:
            return workflow
Esempio n. 19
0
    def run(self, direct=True):
        """
        Runs the process step by first running the child steps and then the
        tasks.

        Args:
            direct: False if the step is called from a parent step,
                    true otherwise

        Returns:
            The executed chain consisting of potential child steps followed by
            tasks if called directly. The chain "non-executed" if direct is
            false
        """
        def create_sub_task(t):
            created = self._create_task(t.name)
            t.params['_options'] = {
                'args': t.args,
                'responsible': t.responsible_id,
                'ip': t.information_package_id,
                'step': self.id,
                'step_pos': t.processstep_pos,
                'hidden': t.hidden,
                'result_params': t.result_params,
            }
            return created.si(*t.args, **t.params).set(task_id=str(t.pk),
                                                       queue=created.queue)

        func = group if self.parallel else chain

        child_steps = self.child_steps.all()
        tasks = self.tasks(manager='by_step_pos').all()

        if not tasks.exists() and not child_steps.exists():
            if direct:
                return EagerResult(self.pk, [], celery_states.SUCCESS)

            return group()

        step_canvas = func(s.run(direct=False)
                           for s in child_steps) if child_steps else chain()
        task_canvas = func(create_sub_task(t) for t in tasks)

        if not child_steps:
            workflow = task_canvas
        elif not tasks:
            workflow = step_canvas
        else:
            workflow = (step_canvas | task_canvas)

        if direct:
            if self.eager:
                return workflow.apply()
            else:
                return workflow.apply_async()
        else:
            return workflow
Esempio n. 20
0
 def status(self):
     status = "PENDING"
     if self.task_id:
         if getattr(settings, 'CELERY_ALWAYS_EAGER', False):
             result = EagerResult(self.task_id, None, 'SUCCESS')
         else:
             result = AsyncResult(self.task_id)
         status = result.state
     return status
Esempio n. 21
0
    def resume(self, direct=True):
        """
        Resumes the process step by running all pending child steps and tasks

        Args:
            direct: False if the step is called from a parent step,
                    true otherwise

        Returns:
            The executed workflow if direct is true, the workflow non-executed
            otherwise
        """
        def create_sub_task(t):
            created = self._create_task(t.name)
            t.params['_options'] = {
                'args': t.args,
                'responsible': t.responsible_id,
                'ip': t.information_package_id,
                'step': self.id,
                'step_pos': t.processstep_pos,
                'hidden': t.hidden,
                'result_params': t.result_params
            }
            return created.si(*t.args, **t.params).set(task_id=str(t.pk),
                                                       queue=created.queue)

        func = group if self.parallel else chain

        child_steps = self.child_steps.filter(
            tasks__status=celery_states.PENDING)
        tasks = self.tasks(manager='by_step_pos').filter(
            undone__isnull=True, undo_type=False, status=celery_states.PENDING)

        if not tasks.exists() and not child_steps.exists():
            return EagerResult(self.pk, [], celery_states.SUCCESS)

        step_canvas = func(s.run(direct=False) for s in child_steps)
        task_canvas = func(create_sub_task(t) for t in tasks)

        if not child_steps:
            workflow = task_canvas
        elif not tasks:
            workflow = step_canvas
        else:
            workflow = (step_canvas | task_canvas)

        if direct:
            if self.eager:
                return workflow.apply()
            else:
                return workflow.apply_async()
        else:
            return workflow
Esempio n. 22
0
 def test_get_sync_subtask_option(self, task_join_will_block):
     task_join_will_block.return_value = True
     tid = uuid()
     res_subtask_async = EagerResult(tid, 'x', 'x', states.SUCCESS)
     with pytest.raises(RuntimeError):
         res_subtask_async.get()
     res_subtask_async.get(disable_sync_subtasks=False)
Esempio n. 23
0
File: base.py Progetto: frac/celery
    def apply(self, args=None, kwargs=None, **options):
        """Execute this task locally, by blocking until the task
        returns.

        :param args: positional arguments passed on to the task.
        :param kwargs: keyword arguments passed on to the task.
        :keyword throw: Re-raise task exceptions.  Defaults to
                        the :setting:`CELERY_EAGER_PROPAGATES_EXCEPTIONS`
                        setting.

        :rtype :class:`celery.result.EagerResult`:

        """
        args = args or []
        kwargs = kwargs or {}
        task_id = options.get("task_id") or gen_unique_id()
        retries = options.get("retries", 0)
        throw = self.app.either("CELERY_EAGER_PROPAGATES_EXCEPTIONS",
                                options.pop("throw", None))

        # Make sure we get the task instance, not class.
        task = tasks[self.name]

        request = {"id": task_id,
                   "retries": retries,
                   "is_eager": True,
                   "logfile": options.get("logfile"),
                   "loglevel": options.get("loglevel", 0),
                   "delivery_info": {"is_eager": True}}
        if self.accept_magic_kwargs:
            default_kwargs = {"task_name": task.name,
                              "task_id": task_id,
                              "task_retries": retries,
                              "task_is_eager": True,
                              "logfile": options.get("logfile"),
                              "loglevel": options.get("loglevel", 0),
                              "delivery_info": {"is_eager": True}}
            supported_keys = fun_takes_kwargs(task.run, default_kwargs)
            extend_with = dict((key, val)
                                    for key, val in default_kwargs.items()
                                        if key in supported_keys)
            kwargs.update(extend_with)

        trace = TaskTrace(task.name, task_id, args, kwargs,
                          task=task, request=request, propagate=throw)
        retval = trace.execute()
        if isinstance(retval, ExceptionInfo):
            retval = retval.exception
        return EagerResult(task_id, retval, trace.status,
                           traceback=trace.strtb)
Esempio n. 24
0
def test_status_view_none_return():
    '''test status return content with uuids and status'''
    task_id = '12345678-1234-1234-1234-123456781234'
    url = reverse('task-status', args=(task_id, ))
    client = Client()
    with mock.patch('web.views.AsyncResult') as asyncresult_class:
        asyncresult_class.return_value = EagerResult(id=task_id,
                                                     ret_value=None,
                                                     state='PENDING')
        response = client.get(path=url)
    match_res = re.match(
        r'<h1>Result of task 12345678-1234-1234-'
        r'1234-123456781234</h1><br>(.*)', response.content.decode('utf-8'))
    assert match_res, 'Task status regex not found in GET response'
    assert match_res.group(1) == 'None', \
        "Task status return not NONE result in response"
Esempio n. 25
0
    def run_children(self, tasks, steps, direct=True):
        tasks = tasks.filter(status=celery_states.PENDING, )

        if not tasks.exists() and not steps.exists():
            if direct:
                return EagerResult(self.celery_id, [], celery_states.SUCCESS)

            return group()

        func = group if self.parallel else chain
        result_list = sorted(itertools.chain(steps, tasks),
                             key=lambda x: (x.get_pos(), x.time_created))

        on_error_tasks = self.on_error(manager='by_step_pos').all()
        if on_error_tasks.exists():
            on_error_group = group(
                create_sub_task(t, self, immutable=False)
                for t in on_error_tasks)
        else:
            on_error_group = None

        if direct:
            logger.debug('Creating celery workflow')
        else:
            logger.debug('Creating partial celery workflow')

        workflow = func(
            y
            for y in (x.resume(direct=False) if isinstance(x, ProcessStep) else
                      create_sub_task(x, self, link_error=on_error_group)
                      for x in result_list)
            if not hasattr(y, 'tasks') or len(y.tasks))

        if direct:
            logger.info('Celery workflow created')
        else:
            logger.info('Partial celery workflow created')

        if direct:
            if self.eager:
                logger.info('Running workflow eagerly')
                return workflow.apply(link_error=on_error_group)
            else:
                logger.info('Running workflow non-eagerly')
                return workflow.apply_async(link_error=on_error_group)
        else:
            return workflow
Esempio n. 26
0
    def undo(self, only_failed=False, direct=True):
        """
        Undos the process step by first undoing all tasks and then the
        child steps.

        Args:
            only_failed: If true, only undo the failed tasks,
                undo all tasks otherwise

        Returns:
            AsyncResult/EagerResult if there is atleast one task or child
            steps, otherwise None
        """

        child_steps = self.child_steps.all()
        tasks = self.tasks(manager='by_step_pos').all()

        if only_failed:
            tasks = tasks.filter(status=celery_states.FAILURE)

        tasks = tasks.filter(undo_type=False, undone__isnull=True)

        if not tasks.exists() and not child_steps.exists():
            if direct:
                return EagerResult(self.pk, [], celery_states.SUCCESS)

            return group()

        func = group if self.parallel else chain

        result_list = sorted(itertools.chain(child_steps, tasks),
                             key=lambda x: (x.get_pos(), x.time_created),
                             reverse=True)
        workflow = func(
            x.undo(only_failed=only_failed, direct=False) if isinstance(
                x, ProcessStep) else create_sub_task(x.create_undo_obj(), self)
            for x in result_list)

        if direct:
            if self.eager:
                return workflow.apply()
            else:
                return workflow.apply_async()
        else:
            return workflow
Esempio n. 27
0
    def resume(self, direct=True):
        """
        Resumes the process step by running all pending child steps and tasks

        Args:
            direct: False if the step is called from a parent step,
                    true otherwise

        Returns:
            The executed workflow if direct is true, the workflow non-executed
            otherwise
        """

        logger.debug('Resuming step {} ({})'.format(self.name, self.pk))
        ProcessTask.objects.filter(
            processstep__in=self.get_descendants(include_self=True),
            status__in=[celery_states.PENDING, celery_states.FAILURE],
        ).update(
            status=celery_states.PENDING,
            time_started=None,
            time_done=None,
            traceback='',
            exception='',
            progress=0,
            result=None,
        )
        child_steps = self.get_children()

        step_descendants = self.get_descendants(include_self=True)
        recursive_tasks = ProcessTask.objects.filter(
            processstep__in=step_descendants,
            status=celery_states.PENDING,
        )

        if not recursive_tasks.exists():
            if direct:
                return EagerResult(self.celery_id, [], celery_states.SUCCESS)

            return group()

        tasks = self.tasks(manager='by_step_pos').filter(
            status=celery_states.PENDING
        )

        return self.run_children(tasks, child_steps, direct)
Esempio n. 28
0
    def test_build_graph_get_leaf_collect(self):
        x = self.app.AsyncResult('1')
        x.backend._cache['1'] = {'status': states.SUCCESS, 'result': None}
        c = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)]
        x.iterdeps = Mock()
        x.iterdeps.return_value = ((None, x), (x, c[0]), (c[0], c[1]), (c[1],
                                                                        c[2]))
        x.backend.READY_STATES = states.READY_STATES
        assert x.graph
        assert x.get_leaf() is 2

        it = x.collect()
        assert list(it) == [
            (x, None),
            (c[0], 0),
            (c[1], 1),
            (c[2], 2),
        ]
Esempio n. 29
0
    def test_build_graph_get_leaf_collect(self):
        x = AsyncResult("1")
        x.backend._cache["1"] = {"status": states.SUCCESS, "result": None}
        c = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)]
        x.iterdeps = Mock()
        x.iterdeps.return_value = ((None, x), (x, c[0]), (c[0], c[1]), (c[1],
                                                                        c[2]))
        x.backend.READY_STATES = states.READY_STATES
        self.assertTrue(x.graph)

        self.assertIs(x.get_leaf(), 2)

        it = x.collect()
        self.assertListEqual(list(it), [
            (x, None),
            (c[0], 0),
            (c[1], 1),
            (c[2], 2),
        ])
Esempio n. 30
0
 def test_iterdeps(self):
     x = self.app.AsyncResult('1')
     c = [EagerResult(str(i), i, states.SUCCESS) for i in range(3)]
     x._cache = {'status': states.SUCCESS, 'result': None, 'children': c}
     for child in c:
         child.backend = Mock()
         child.backend.get_children.return_value = []
     it = x.iterdeps()
     self.assertListEqual(list(it), [
         (None, x),
         (x, c[0]),
         (x, c[1]),
         (x, c[2]),
     ])
     x._cache = None
     x.ready = Mock()
     x.ready.return_value = False
     with self.assertRaises(IncompleteStream):
         list(x.iterdeps())
     list(x.iterdeps(intermediate=True))
Esempio n. 31
0
    def retry(self, direct=True):
        """
        Retries the process step by first retrying all child steps and then all
        failed tasks.

        Args:
            direct: False if the step is called from a parent step,
                    true otherwise

        Returns:
            none
        """

        child_steps = self.child_steps.all()

        tasks = self.tasks(manager='by_step_pos').filter(
            retried__isnull=True
        ).order_by('processstep_pos')

        if not tasks.exists() and not child_steps.exists():
            if direct:
                return EagerResult(self.celery_id, [], celery_states.SUCCESS)

            return group()

        func = group if self.parallel else chain

        result_list = sorted(itertools.chain(child_steps, tasks), key=lambda x: (x.get_pos(), x.time_created))
        workflow = func(
            x.retry(direct=False) if isinstance(x, ProcessStep) else create_sub_task(
                x.create_retry_obj(), self
            ) for x in result_list
        )

        if direct:
            if self.eager:
                return workflow.apply()
            else:
                return workflow.apply_async()
        else:
            return workflow
Esempio n. 32
0
def apply(task, args, kwargs, **options):
    """Apply the task locally.

    :keyword throw: Re-raise task exceptions. Defaults to
        the :setting:`CELERY_EAGER_PROPAGATES_EXCEPTIONS` setting.

    This will block until the task completes, and returns a
    :class:`celery.result.EagerResult` instance.

    """
    args = args or []
    kwargs = kwargs or {}
    task_id = options.get("task_id") or gen_unique_id()
    retries = options.get("retries", 0)
    throw = options.pop("throw", conf.EAGER_PROPAGATES_EXCEPTIONS)

    task = tasks[task.name]  # make sure we get the instance, not class.

    default_kwargs = {
        "task_name": task.name,
        "task_id": task_id,
        "task_retries": retries,
        "task_is_eager": True,
        "logfile": options.get("logfile"),
        "delivery_info": {
            "is_eager": True
        },
        "loglevel": options.get("loglevel", 0)
    }
    supported_keys = fun_takes_kwargs(task.run, default_kwargs)
    extend_with = dict((key, val) for key, val in default_kwargs.items()
                       if key in supported_keys)
    kwargs.update(extend_with)

    trace = TaskTrace(task.name, task_id, args, kwargs, task=task)
    retval = trace.execute()
    if isinstance(retval, ExceptionInfo):
        if throw:
            raise retval.exception
        retval = retval.exception
    return EagerResult(task_id, retval, trace.status, traceback=trace.strtb)
Esempio n. 33
0
 def test_wait(self):
     res = EagerResult('x', 'x', states.RETRY)
     res.wait()
     assert res.state == states.RETRY
     assert res.status == states.RETRY
Esempio n. 34
0
 def test_forget(self):
     res = EagerResult("x", "x", states.RETRY)
     res.forget()
Esempio n. 35
0
 def test_wait(self):
     res = EagerResult('x', 'x', states.RETRY)
     res.wait()
     self.assertEqual(res.state, states.RETRY)
     self.assertEqual(res.status, states.RETRY)
Esempio n. 36
0
 def test_forget(self):
     res = EagerResult('x', 'x', states.RETRY)
     res.forget()