def apply_async(self, connection=None, connect_timeout=None, publisher=None, taskset_id=None): """Apply TaskSet.""" app = self.app if app.conf.CELERY_ALWAYS_EAGER: return self.apply(taskset_id=taskset_id) with app.default_connection(connection, connect_timeout) as conn: setid = taskset_id or uuid() pub = publisher or self.Publisher(connection=conn) try: results = self._async_results(setid, pub) finally: if not publisher: # created by us. pub.close() result = app.TaskSetResult(setid, results) parent = get_current_task() if parent: parent.request.children.append(result) return result
def run(self, tasks, result, setid): app = self.app result = from_serializable(result) if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER: return app.TaskSetResult(result.id, [subtask(task).apply(taskset_id=setid) for task in tasks]) with app.default_producer() as pub: [subtask(task).apply_async(taskset_id=setid, publisher=pub) for task in tasks] parent = get_current_task() if parent: parent.request.children.append(result) return result
def run(self, tasks, result): app = self.app result = from_serializable(result) with app.pool.acquire(block=True) as conn: with app.amqp.TaskPublisher(conn) as publisher: res_ = [subtask(task).apply_async( taskset_id=self.request.taskset, publisher=publisher) for task in tasks] parent = get_current_task() if parent: parent.request.children.append(result) if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER: return app.TaskSetResult(result.id, res_) return result
def apply_async(self, connection=None, connect_timeout=None, publisher=None, taskset_id=None): """Apply TaskSet.""" app = self.app if app.conf.CELERY_ALWAYS_EAGER: return self.apply(taskset_id=taskset_id) with app.default_connection(connection, connect_timeout) as conn: setid = taskset_id or uuid() pub = publisher or self.Publisher(conn) results = self._async_results(setid, pub) result = app.TaskSetResult(setid, results) parent = get_current_task() if parent: parent.request.children.append(result) return result
def run(self, tasks, result): app = self.app result = from_serializable(result) if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER: return app.TaskSetResult(result.id, [ subtask(task).apply(taskset_id=self.request.taskset) for task in tasks ]) with app.pool.acquire(block=True) as conn: with app.amqp.TaskPublisher(conn) as publisher: [ subtask(task).apply_async( taskset_id=self.request.taskset, publisher=publisher) for task in tasks ] parent = get_current_task() if parent: parent.request.children.append(result) return result
def apply_async(self, args=None, kwargs=None, task_id=None, publisher=None, connection=None, router=None, queues=None, link=None, link_error=None, **options): """Apply tasks asynchronously by sending a message. :keyword args: The positional arguments to pass on to the task (a :class:`list` or :class:`tuple`). :keyword kwargs: The keyword arguments to pass on to the task (a :class:`dict`) :keyword countdown: Number of seconds into the future that the task should execute. Defaults to immediate execution (do not confuse with the `immediate` flag, as they are unrelated). :keyword eta: A :class:`~datetime.datetime` object describing the absolute time and date of when the task should be executed. May not be specified if `countdown` is also supplied. (Do not confuse this with the `immediate` flag, as they are unrelated). :keyword expires: Either a :class:`int`, describing the number of seconds, or a :class:`~datetime.datetime` object that describes the absolute time and date of when the task should expire. The task will not be executed after the expiration time. :keyword connection: Re-use existing broker connection instead of establishing a new one. :keyword retry: If enabled sending of the task message will be retried in the event of connection loss or failure. Default is taken from the :setting:`CELERY_TASK_PUBLISH_RETRY` setting. Note you need to handle the publisher/connection manually for this to work. :keyword retry_policy: Override the retry policy used. See the :setting:`CELERY_TASK_PUBLISH_RETRY` setting. :keyword routing_key: The routing key used to route the task to a worker server. Defaults to the :attr:`routing_key` attribute. :keyword exchange: The named exchange to send the task to. Defaults to the :attr:`exchange` attribute. :keyword exchange_type: The exchange type to initialize the exchange if not already declared. Defaults to the :attr:`exchange_type` attribute. :keyword immediate: Request immediate delivery. Will raise an exception if the task cannot be routed to a worker immediately. (Do not confuse this parameter with the `countdown` and `eta` settings, as they are unrelated). Defaults to the :attr:`immediate` attribute. :keyword mandatory: Mandatory routing. Raises an exception if there's no running workers able to take on this task. Defaults to the :attr:`mandatory` attribute. :keyword priority: The task priority, a number between 0 and 9. Defaults to the :attr:`priority` attribute. :keyword serializer: A string identifying the default serialization method to use. Can be `pickle`, `json`, `yaml`, `msgpack` or any custom serialization method that has been registered with :mod:`kombu.serialization.registry`. Defaults to the :attr:`serializer` attribute. :keyword compression: A string identifying the compression method to use. Can be one of ``zlib``, ``bzip2``, or any custom compression methods registered with :func:`kombu.compression.register`. Defaults to the :setting:`CELERY_MESSAGE_COMPRESSION` setting. :keyword link: A single, or a list of subtasks to apply if the task exits successfully. :keyword link_error: A single, or a list of subtasks to apply if an error occurs while executing the task. .. note:: If the :setting:`CELERY_ALWAYS_EAGER` setting is set, it will be replaced by a local :func:`apply` call instead. """ app = self._get_app() router = app.amqp.Router(queues) conf = app.conf if conf.CELERY_ALWAYS_EAGER: return self.apply(args, kwargs, task_id=task_id, **options) options = dict(extract_exec_options(self), **options) options = router.route(options, self.name, args, kwargs) publish = publisher or app.amqp.publisher_pool.acquire(block=True) evd = None if conf.CELERY_SEND_TASK_SENT_EVENT: evd = app.events.Dispatcher(channel=publish.channel, buffer_while_offline=False) try: task_id = publish.delay_task(self.name, args, kwargs, task_id=task_id, event_dispatcher=evd, callbacks=maybe_list(link), errbacks=maybe_list(link_error), **options) finally: if not publisher: publish.release() result = self.AsyncResult(task_id) parent = get_current_task() if parent: parent.request.children.append(result) return result