Exemplo n.º 1
0
    def __init__(self, loglevel=None, hostname=None, ready_callback=noop,
            queues=None, app=None, pidfile=None, **kwargs):
        self.app = app_or_default(app or self.app)

        # all new threads start without a current app, so if an app is not
        # passed on to the thread it will fall back to the "default app",
        # which then could be the wrong app.  So for the worker
        # we set this to always return our app.  This is a hack,
        # and means that only a single app can be used for workers
        # running in the same process.
        set_default_app(self.app)

        self._shutdown_complete = Event()
        self.setup_defaults(kwargs, namespace="celeryd")
        self.app.select_queues(queues)  # select queues subset.

        # Options
        self.loglevel = loglevel or self.loglevel
        self.hostname = hostname or socket.gethostname()
        self.ready_callback = ready_callback
        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self.pidfile = pidfile
        self.pidlock = None
        self.use_eventloop = (detect_environment() == "default" and
                              self.app.broker_connection().is_evented)

        # Initialize boot steps
        self.pool_cls = _concurrency.get_implementation(self.pool_cls)
        self.components = []
        self.namespace = Namespace(app=self.app).apply(self, **kwargs)
Exemplo n.º 2
0
 def __init__(self, channel, handlers=None, routing_key='#',
              node_id=None, app=None, queue_prefix=None,
              accept=None, queue_ttl=None, queue_expires=None):
     self.app = app_or_default(app or self.app)
     self.channel = maybe_channel(channel)
     self.handlers = {} if handlers is None else handlers
     self.routing_key = routing_key
     self.node_id = node_id or uuid()
     self.queue_prefix = queue_prefix or self.app.conf.event_queue_prefix
     self.exchange = get_exchange(
         self.connection or self.app.connection_for_write())
     if queue_ttl is None:
         queue_ttl = self.app.conf.event_queue_ttl
     if queue_expires is None:
         queue_expires = self.app.conf.event_queue_expires
     self.queue = Queue(
         '.'.join([self.queue_prefix, self.node_id]),
         exchange=self.exchange,
         routing_key=self.routing_key,
         auto_delete=True, durable=False,
         message_ttl=queue_ttl,
         expires=queue_expires,
     )
     self.clock = self.app.clock
     self.adjust_clock = self.clock.adjust
     self.forward_clock = self.clock.forward
     if accept is None:
         accept = {self.app.conf.event_serializer, 'json'}
     self.accept = accept
Exemplo n.º 3
0
def migrate_tasks(source, dest, timeout=1.0, app=None,
        migrate=None, callback=None):
    state = State()
    app = app_or_default(app)

    def update_state(body, message):
        state.count += 1

    producer = app.amqp.TaskPublisher(dest)
    if migrate is None:
        migrate = partial(migrate_task, producer)
    if callback is not None:
        callback = partial(callback, state)
    consumer = app.amqp.get_task_consumer(source)
    consumer.register_callback(update_state)
    consumer.register_callback(callback)
    consumer.register_callback(migrate)

    # declare all queues on the new broker.
    for queue in consumer.queues:
        queue(producer.channel).declare()
        try:
            _, mcount, _ = queue(consumer.channel).queue_declare(passive=True)
            if mcount:
                state.total_apx += mcount
        except source.channel_errors + (StdChannelError, ):
            pass

    # start migrating messages.
    with consumer:
        try:
            for _ in eventloop(source, timeout=timeout):
                pass
        except socket.timeout:
            return
Exemplo n.º 4
0
def policy_main(app=None, loglevel=0, logfile=None, **kwargs):
    """ Policy manager entry-point function. """
    import logging
    import sys
    # Set process name that appears in logging messages.
    import multiprocessing
    multiprocessing.current_process().name = 'cm-policy-manager'
    
    app = app_or_default(app)
    if not isinstance(loglevel, int):
        loglevel = LOG_LEVELS[loglevel.upper()]
    logger = app.log.setup_logger(loglevel=loglevel,
                                  logfile=logfile,
                                  name="cm.policy")
    orig_ios = (sys.stdout, sys.stderr)
    logger.warning('-> cmpolicy: Loading policy manager...')
    conn = app.broker_connection()
    try:
        try:
            with PolicyMain(conn, logger, app=app) as pmain:
                pmain.loop()
        except (KeyboardInterrupt, SystemExit):
            pass
        except Exception:
            import traceback
            tb = traceback.format_exc()
            logger.error('\n'+tb)
            raise
    finally:
        conn.close()
        logger.warning('-> cmpolicy: Policy manager shut down.\n')
        sys.stdout, sys.stderr = orig_ios
Exemplo n.º 5
0
def redis(app=None):
    app = app_or_default(app)

    if not hasattr(app, "redbeat_redis") or app.redbeat_redis is None:
        app.redbeat_redis = StrictRedis.from_url(app.conf.REDBEAT_REDIS_URL, decode_responses=True)

    return app.redbeat_redis
Exemplo n.º 6
0
 def handle(self, *args, **options):
     w = Worker(app_or_default())
     options = {
         'loglevel': 'DEBUG',
         'traceback': True
     }
     w.run(**options)
Exemplo n.º 7
0
def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
        logfile=None, app=None):
    app = app_or_default(app)
    if not isinstance(loglevel, int):
        loglevel = LOG_LEVELS[loglevel.upper()]
    logger = app.log.setup_logger(loglevel=loglevel,
                                  logfile=logfile,
                                  name="celery.evcam")
    logger.info(
        "-> evcam: Taking snapshots with %s (every %s secs.)\n" % (
            camera, freq))
    state = State()
    cam = instantiate(camera, state, app=app,
                      freq=freq, maxrate=maxrate, logger=logger)
    cam.install()
    conn = app.broker_connection()
    recv = EventReceiver(conn, app=app, handlers={"*": state.event})
    try:
        try:
            recv.capture(limit=None)
        except KeyboardInterrupt:
            raise SystemExit
    finally:
        cam.cancel()
        conn.close()
Exemplo n.º 8
0
 def __init__(self, app=None, *args, **kwargs):
     super(CeleryProc, self).__init__(*args, **kwargs)
     if app is not None:
         self.app = app
     self.app = app_or_default(self.app)
     self.connection = self.app.connection()
     self.channel = self.connection.channel()
Exemplo n.º 9
0
    def __init__(self, concurrency=None, loglevel=None, logfile=None,
            hostname=None, discard=False, run_clockservice=False,
            schedule=None, task_time_limit=None, task_soft_time_limit=None,
            max_tasks_per_child=None, queues=None, events=False, db=None,
            include=None, app=None, pidfile=None,
            redirect_stdouts=None, redirect_stdouts_level=None,
            autoscale=None, scheduler_cls=None, pool=None, **kwargs):
        self.app = app = app_or_default(app)
        self.concurrency = (concurrency or
                            app.conf.CELERYD_CONCURRENCY or
                            multiprocessing.cpu_count())
        self.loglevel = loglevel or app.conf.CELERYD_LOG_LEVEL
        self.logfile = logfile or app.conf.CELERYD_LOG_FILE

        self.hostname = hostname or socket.gethostname()
        self.discard = discard
        self.run_clockservice = run_clockservice
        if self.app.IS_WINDOWS and self.run_clockservice:
            self.die("-B option does not work on Windows.  "
                     "Please run celerybeat as a separate service.")
        self.schedule = schedule or app.conf.CELERYBEAT_SCHEDULE_FILENAME
        self.scheduler_cls = scheduler_cls or app.conf.CELERYBEAT_SCHEDULER
        self.events = events
        self.task_time_limit = (task_time_limit or
                                app.conf.CELERYD_TASK_TIME_LIMIT)
        self.task_soft_time_limit = (task_soft_time_limit or
                                     app.conf.CELERYD_TASK_SOFT_TIME_LIMIT)
        self.max_tasks_per_child = (max_tasks_per_child or
                                    app.conf.CELERYD_MAX_TASKS_PER_CHILD)
        self.redirect_stdouts = (redirect_stdouts or
                                 app.conf.CELERY_REDIRECT_STDOUTS)
        self.redirect_stdouts_level = (redirect_stdouts_level or
                                       app.conf.CELERY_REDIRECT_STDOUTS_LEVEL)
        self.pool = (pool or app.conf.CELERYD_POOL)
        self.db = db
        self.use_queues = queues or []
        self.queues = None
        self.include = include or []
        self.pidfile = pidfile
        self.autoscale = None
        if autoscale:
            max_c, _, min_c = partition(autoscale, ",")
            self.autoscale = [int(max_c), min_c and int(min_c) or 0]
        self._isatty = sys.stdout.isatty()

        self.colored = app.log.colored(self.logfile)

        if isinstance(self.use_queues, basestring):
            self.use_queues = self.use_queues.split(",")
        if isinstance(self.include, basestring):
            self.include = self.include.split(",")

        if not isinstance(self.loglevel, int):
            try:
                self.loglevel = LOG_LEVELS[self.loglevel.upper()]
            except KeyError:
                self.die("Unknown level %r. Please use one of %s." % (
                            self.loglevel,
                            "|".join(l for l in LOG_LEVELS.keys()
                                        if isinstance(l, basestring))))
Exemplo n.º 10
0
def get_backend_cls(backend, loader=None):
    """Get backend class by name/alias"""
    loader = loader or app_or_default().loader
    if backend not in _backend_cache:
        aliases = dict(BACKEND_ALIASES, **loader.override_backends)
        _backend_cache[backend] = get_cls_by_name(backend, aliases)
    return _backend_cache[backend]
Exemplo n.º 11
0
def move_tasks(conn, predicate, exchange, routing_key, app=None, **kwargs):
    """Find tasks by filtering them and move the tasks to a new queue.

    :param conn: Connection to use.
    :param predicate: Filter function with signature ``(body, message)``.
    :param exchange: Destination exchange.
    :param routing_key: Destination routing key.

    Also supports the same keyword arguments as :func:`start_filter`.

    To demonstrate, the :func:`move_task_by_id` operation can be implemented
    like this:

    .. code-block:: python

        def is_wanted_task(body, message):
            if body['id'] == wanted_id:
                return True

        move_tasks(conn, is_wanted_task, exchange, routing_key)

    """
    app = app_or_default(app)
    producer = app.amqp.TaskProducer(conn)

    def on_task(body, message):
        if predicate(body, message):
            republish(producer, message,
                      exchange=exchange, routing_key=routing_key)
            message.ack()

    return start_filter(app, conn, on_task, **kwargs)
Exemplo n.º 12
0
def main():
    def print_event(event):
        type = event.pop("type")
        s = "Event: {0}\n".format(type)
        keys = event.keys()
        keys.sort()
        for k in keys:
            v = event[k]
            s += "    {0}: {1}\n".format(k, v)
        print s

    print "Initializing event listener."
    app = app_or_default(None)
    conn = app.broker_connection()
    recv = EventReceiver(conn, handlers={"*": print_event}, app=app)

    try:
        try:
            print "Listening for events...  (use Ctrl-C to exit)"
            recv.capture(limit=None)
        except KeyboardInterrupt, SystemExit:
            raise SystemExit
        except Exception:
            import traceback

            print "Exception while listening for events:\n"
            traceback.print_exc()
Exemplo n.º 13
0
    def __init__(self, max_interval=None, app=None,
                 socket_timeout=30, pidfile=None, no_color=None,
                 loglevel=None, logfile=None, schedule=None,
                 scheduler_cls=None, redirect_stdouts=None,
                 redirect_stdouts_level=None, **kwargs):
        """Starts the beat task scheduler."""
        self.app = app = app_or_default(app or self.app)
        self.loglevel = self._getopt('log_level', loglevel)
        self.logfile = self._getopt('log_file', logfile)
        self.schedule = self._getopt('schedule_filename', schedule)
        self.scheduler_cls = self._getopt('scheduler', scheduler_cls)
        self.redirect_stdouts = self._getopt(
            'redirect_stdouts', redirect_stdouts,
        )
        self.redirect_stdouts_level = self._getopt(
            'redirect_stdouts_level', redirect_stdouts_level,
        )

        self.max_interval = max_interval
        self.socket_timeout = socket_timeout
        self.no_color = no_color
        self.colored = app.log.colored(
            self.logfile,
            enabled=not no_color if no_color is not None else no_color,
        )
        self.pidfile = pidfile

        if not isinstance(self.loglevel, int):
            self.loglevel = LOG_LEVELS[self.loglevel.upper()]
Exemplo n.º 14
0
Arquivo: job.py Projeto: brainy/celery
    def __init__(self, task_name, task_id, args, kwargs,
            on_ack=noop, retries=0, delivery_info=None, hostname=None,
            email_subject=None, email_body=None, logger=None,
            eventer=None, eta=None, expires=None, app=None,
            taskset_id=None, **opts):
        self.app = app_or_default(app)
        self.task_name = task_name
        self.task_id = task_id
        self.taskset_id = taskset_id
        self.retries = retries
        self.args = args
        self.kwargs = kwargs
        self.eta = eta
        self.expires = expires
        self.on_ack = on_ack
        self.delivery_info = delivery_info or {}
        self.hostname = hostname or socket.gethostname()
        self.logger = logger or self.app.log.get_default_logger()
        self.eventer = eventer
        self.email_subject = email_subject or self.email_subject
        self.email_body = email_body or self.email_body

        self.task = tasks[self.task_name]
        self._store_errors = True
        if self.task.ignore_result:
            self._store_errors = self.task.store_errors_even_if_ignored
Exemplo n.º 15
0
def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
          logfile=None, pidfile=None, timer=None, app=None):
    """Start snapshot recorder."""
    app = app_or_default(app)

    if pidfile:
        platforms.create_pidlock(pidfile)

    app.log.setup_logging_subsystem(loglevel, logfile)

    print('-> evcam: Taking snapshots with {0} (every {1} secs.)'.format(
        camera, freq))
    state = app.events.State()
    cam = instantiate(camera, state, app=app, freq=freq,
                      maxrate=maxrate, timer=timer)
    cam.install()
    conn = app.connection_for_read()
    recv = app.events.Receiver(conn, handlers={'*': state.event})
    try:
        try:
            recv.capture(limit=None)
        except KeyboardInterrupt:
            raise SystemExit
    finally:
        cam.cancel()
        conn.close()
Exemplo n.º 16
0
    def test_send_email(self):
        app = app_or_default()
        old_mail_admins = app.mail_admins
        old_enable_mails = mytask.send_error_emails
        mail_sent = [False]

        def mock_mail_admins(*args, **kwargs):
            mail_sent[0] = True

        app.mail_admins = mock_mail_admins
        mytask.send_error_emails = True
        try:
            tw = TaskRequest(mytask.name, gen_unique_id(), [1], {"f": "x"})
            try:
                raise KeyError("moofoobar")
            except:
                einfo = ExceptionInfo(sys.exc_info())

            tw.on_failure(einfo)
            self.assertTrue(mail_sent[0])

            mail_sent[0] = False
            mytask.send_error_emails = False
            tw.on_failure(einfo)
            self.assertFalse(mail_sent[0])

        finally:
            app.mail_admins = old_mail_admins
            mytask.send_error_emails = old_enable_mails
Exemplo n.º 17
0
Arquivo: base.py Projeto: kertz/celery
    def __init__(self, app=None, serializer=None, max_cached_results=None, **kwargs):
        from celery.app import app_or_default

        self.app = app_or_default(app)
        self.serializer = serializer or self.app.conf.CELERY_RESULT_SERIALIZER
        (self.content_type, self.content_encoding, self.encoder) = serialization.registry._encoders[self.serializer]
        self._cache = LRUCache(limit=max_cached_results or self.app.conf.CELERY_MAX_CACHED_RESULTS)
Exemplo n.º 18
0
 def __init__(self, connection=None, hostname=None, enabled=True,
              channel=None, buffer_while_offline=True, app=None,
              serializer=None, groups=None, delivery_mode=1):
     self.app = app_or_default(app or self.app)
     self.connection = connection
     self.channel = channel
     self.hostname = hostname or anon_nodename()
     self.buffer_while_offline = buffer_while_offline
     self.mutex = threading.Lock()
     self.producer = None
     self._outbound_buffer = deque()
     self.serializer = serializer or self.app.conf.CELERY_EVENT_SERIALIZER
     self.on_enabled = set()
     self.on_disabled = set()
     self.groups = set(groups or [])
     self.tzoffset = [-time.timezone, -time.altzone]
     self.clock = self.app.clock
     self.delivery_mode = delivery_mode
     if not connection and channel:
         self.connection = channel.connection.client
     self.enabled = enabled
     conninfo = self.connection or self.app.connection()
     self.exchange = get_exchange(conninfo)
     if conninfo.transport.driver_type in self.DISABLED_TRANSPORTS:
         self.enabled = False
     if self.enabled:
         self.enable()
     self.headers = {'hostname': self.hostname}
     self.pid = os.getpid()
Exemplo n.º 19
0
    def __init__(self, connection=None, hostname=None, enabled=True,
                 channel=None, buffer_while_offline=True, app=None,
                 serializer=None):
        self.app = app_or_default(app or self.app)
        self.connection = connection
        self.channel = channel
        self.hostname = hostname or socket.gethostname()
        self.buffer_while_offline = buffer_while_offline
        self.mutex = threading.Lock()
        self.producer = None
        self._outbound_buffer = deque()
        self.serializer = serializer or self.app.conf.CELERY_EVENT_SERIALIZER
        self.on_enabled = set()
        self.on_disabled = set()

        self.enabled = enabled
        if not connection and channel:
            self.connection = channel.connection.client
        self.enabled = enabled
        conninfo = self.connection or self.app.connection()
        self.exchange = get_exchange(conninfo)
        if conninfo.transport.driver_type in self.DISABLED_TRANSPORTS:
            self.enabled = False
        if self.enabled:
            self.enable()
Exemplo n.º 20
0
Arquivo: job.py Projeto: ronnix/celery
    def __init__(
        self,
        body,
        on_ack=noop,
        hostname=None,
        eventer=None,
        app=None,
        connection_errors=None,
        request_dict=None,
        delivery_info=None,
        task=None,
        **opts
    ):
        self.app = app or app_or_default(app)
        name = self.name = body["task"]
        self.id = body["id"]
        self.args = body.get("args", [])
        self.kwargs = body.get("kwargs", {})
        try:
            self.kwargs.items
        except AttributeError:
            raise exceptions.InvalidTaskError("Task keyword arguments is not a mapping")
        if NEEDS_KWDICT:
            self.kwargs = kwdict(self.kwargs)
        eta = body.get("eta")
        expires = body.get("expires")
        utc = body.get("utc", False)
        self.on_ack = on_ack
        self.hostname = hostname or socket.gethostname()
        self.eventer = eventer
        self.connection_errors = connection_errors or ()
        self.task = task or self.app.tasks[name]
        self.acknowledged = self._already_revoked = False
        self.time_start = self.worker_pid = self._terminate_on_ack = None
        self._tzlocal = None

        # timezone means the message is timezone-aware, and the only timezone
        # supported at this point is UTC.
        if eta is not None:
            tz = tz_utc if utc else self.tzlocal
            self.eta = tz_to_local(maybe_iso8601(eta), self.tzlocal, tz)
        else:
            self.eta = None
        if expires is not None:
            tz = tz_utc if utc else self.tzlocal
            self.expires = tz_to_local(maybe_iso8601(expires), self.tzlocal, tz)
        else:
            self.expires = None

        delivery_info = {} if delivery_info is None else delivery_info
        self.delivery_info = {
            "exchange": delivery_info.get("exchange"),
            "routing_key": delivery_info.get("routing_key"),
            "priority": delivery_info.get("priority"),
        }

        # amqplib transport adds the channel here for some reason, so need
        # to remove it.
        self.delivery_info.pop("channel", None)
        self.request_dict = body
Exemplo n.º 21
0
def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
        logfile=None, pidfile=None, timer=None, app=None):
    app = app_or_default(app)

    if pidfile:
        platforms.create_pidlock(pidfile)

    app.log.setup_logging_subsystem(loglevel, logfile)

    logger.info(
        "-> evcam: Taking snapshots with %s (every %s secs.)\n" % (
            camera, freq))
    state = app.events.State()
    cam = instantiate(camera, state, app=app, freq=freq,
                      maxrate=maxrate, timer=timer)
    cam.install()
    conn = app.broker_connection()
    recv = app.events.Receiver(conn, handlers={"*": state.event})
    try:
        try:
            recv.capture(limit=None)
        except KeyboardInterrupt:
            raise SystemExit
    finally:
        cam.cancel()
        conn.close()
Exemplo n.º 22
0
def task(*args, **kwargs):
    """Decorator to create a task class out of any callable.

    .. admonition:: Examples

        .. code-block:: python

            @task()
            def refresh_feed(url):
                return Feed.objects.get(url=url).refresh()

        With setting extra options and using retry.

        .. code-block:: python

            @task(exchange="feeds")
            def refresh_feed(url, **kwargs):
                try:
                    return Feed.objects.get(url=url).refresh()
                except socket.error, exc:
                    refresh_feed.retry(args=[url], kwargs=kwargs, exc=exc)

        Calling the resulting task:

            >>> refresh_feed("http://example.com/rss") # Regular
            <Feed: http://example.com/rss>
            >>> refresh_feed.delay("http://example.com/rss") # Async
            <AsyncResult: 8998d0f4-da0b-4669-ba03-d5ab5ac6ad5d>
    """
    kwargs.setdefault("accept_magic_kwargs", False)
    return app_or_default().task(*args, **kwargs)
Exemplo n.º 23
0
    def test_init_queues(self):
        app = app_or_default()
        c = app.conf
        p, app.amqp.queues = app.amqp.queues, {
                "celery": {"exchange": "celery",
                           "binding_key": "celery"},
                "video": {"exchange": "video",
                           "binding_key": "video"}}
        try:
            worker = self.Worker(queues=["video"])
            worker.init_queues()
            self.assertIn("video", worker.queues)
            self.assertNotIn("celery", worker.queues)

            c.CELERY_CREATE_MISSING_QUEUES = False
            self.assertRaises(ImproperlyConfigured,
                    self.Worker(queues=["image"]).init_queues)
            c.CELERY_CREATE_MISSING_QUEUES = True
            worker = self.Worker(queues=["image"])
            worker.init_queues()
            self.assertIn("image", worker.queues)
            self.assertDictContainsSubset({"exchange": "image",
                                           "routing_key": "image",
                                           "binding_key": "image",
                                           "exchange_type": "direct"},
                                            worker.queues["image"])
        finally:
            app.amqp.queues = p
Exemplo n.º 24
0
 def __init__(self, channel, handlers=None, routing_key='#',
              node_id=None, app=None, queue_prefix=None,
              accept=None, queue_ttl=None, queue_expires=None):
     self.app = app_or_default(app or self.app)
     self.channel = maybe_channel(channel)
     self.handlers = {} if handlers is None else handlers
     self.routing_key = routing_key
     self.node_id = node_id or uuid()
     self.queue_prefix = queue_prefix
     if queue_prefix:
         self.queue_prefix = queue_prefix
     else:
         self.queue_prefix = self.app.conf.CELERY_EVENT_QUEUE_PREFIX
     self.exchange = get_exchange(self.connection or self.app.connection())
     self.queue = Queue(
         '.'.join([self.queue_prefix, self.node_id]),
         exchange=self.exchange,
         routing_key=self.routing_key,
         auto_delete=True, durable=False,
         queue_arguments=self._get_queue_arguments(
             ttl=queue_ttl, expires=queue_expires,
         ),
     )
     self.clock = self.app.clock
     self.adjust_clock = self.clock.adjust
     self.forward_clock = self.clock.forward
     if accept is None:
         accept = {self.app.conf.CELERY_EVENT_SERIALIZER, 'json'}
     self.accept = accept
Exemplo n.º 25
0
 def test_missing_dburi_raises_ImproperlyConfigured(self):
     conf = app_or_default().conf
     prev, conf.CELERY_RESULT_DBURI = conf.CELERY_RESULT_DBURI, None
     try:
         self.assertRaises(ImproperlyConfigured, DatabaseBackend)
     finally:
         conf.CELERY_RESULT_DBURI = prev
Exemplo n.º 26
0
 def test_lookup_takes_first(self):
     R = routes.prepare(({"celery.ping": {"queue": "bar"}},
                         {"celery.ping": {"queue": "foo"}}))
     router = routes.Router(R, app_or_default().conf.CELERY_QUEUES)
     self.assertDictContainsSubset(b_queue,
             router.route({}, "celery.ping",
                 args=[1, 2], kwargs={}))
Exemplo n.º 27
0
    def __init__(self, loglevel=None, hostname=None, ready_callback=noop,
                 queues=None, app=None, pidfile=None, use_eventloop=None,
                 **kwargs):
        self.app = app_or_default(app or self.app)

        self._shutdown_complete = Event()
        self.setup_defaults(kwargs, namespace='celeryd')
        self.app.select_queues(queues)  # select queues subset.

        # Options
        self.loglevel = loglevel or self.loglevel
        self.hostname = hostname or socket.gethostname()
        self.ready_callback = ready_callback
        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self.pidfile = pidfile
        self.pidlock = None
        # this connection is not established, only used for params
        self._conninfo = self.app.connection()
        self.use_eventloop = (
            self.should_use_eventloop() if use_eventloop is None
            else use_eventloop
        )

        # Update celery_include to have all known task modules, so that we
        # ensure all task modules are imported in case an execv happens.
        task_modules = set(task.__class__.__module__
                           for task in self.app.tasks.itervalues())
        self.app.conf.CELERY_INCLUDE = tuple(
            set(self.app.conf.CELERY_INCLUDE) | task_modules,
        )

        # Initialize boot steps
        self.pool_cls = _concurrency.get_implementation(self.pool_cls)
        self.components = []
        self.namespace = Namespace(app=self.app).apply(self, **kwargs)
Exemplo n.º 28
0
 def __init__(self, tasks=None, app=None, Publisher=None):
     self.app = app_or_default(app or self.app)
     super(TaskSet, self).__init__(
         maybe_signature(t, app=self.app) for t in tasks or []
     )
     self.Publisher = Publisher or self.app.amqp.Producer
     self.total = len(self)  # XXX compat
Exemplo n.º 29
0
 def __init__(self, *args, **kwargs):
     app = self.app = app_or_default(kwargs.get("app"))
     kwargs["exchange"] = kwargs.get("exchange") or \
                             app.conf.CELERY_BROADCAST_EXCHANGE
     kwargs["exchange_type"] = kwargs.get("exchange_type") or \
                             app.conf.CELERY_BROADCAST_EXCHANGE_TYPE
     super(BroadcastPublisher, self).__init__(*args, **kwargs)
Exemplo n.º 30
0
 def handle(self, *args, **options):
     b = Beat(app_or_default())
     options = {
         'loglevel': 'DEBUG',
         'traceback': True
     }
     b.run(**options)
Exemplo n.º 31
0
def redis(app=None):
    app = app_or_default(app)
    conf = ensure_conf(app)
    if not hasattr(app, 'redbeat_redis') or app.redbeat_redis is None:
        redis_options = conf.app.conf.get(
            'REDBEAT_REDIS_OPTIONS',
            conf.app.conf.get('BROKER_TRANSPORT_OPTIONS', {}))
        if conf.redis_url.startswith(
                'redis-sentinel') and 'sentinels' in redis_options:
            from redis.sentinel import Sentinel
            sentinel = Sentinel(
                redis_options['sentinels'],
                socket_timeout=redis_options.get('socket_timeout'),
                password=redis_options.get('password'),
                decode_responses=True)
            app.redbeat_redis = sentinel.master_for(
                redis_options.get('service_name', 'master'))
        else:
            app.redbeat_redis = StrictRedis.from_url(conf.redis_url,
                                                     decode_responses=True)

    return app.redbeat_redis
Exemplo n.º 32
0
    def __init__(self,
                 task_name,
                 task_id,
                 args,
                 kwargs,
                 on_ack=noop,
                 retries=0,
                 delivery_info=None,
                 hostname=None,
                 email_subject=None,
                 email_body=None,
                 logger=None,
                 eventer=None,
                 eta=None,
                 expires=None,
                 app=None,
                 taskset_id=None,
                 **opts):
        self.app = app_or_default(app)
        self.task_name = task_name
        self.task_id = task_id
        self.taskset_id = taskset_id
        self.retries = retries
        self.args = args
        self.kwargs = kwargs
        self.eta = eta
        self.expires = expires
        self.on_ack = on_ack
        self.delivery_info = delivery_info or {}
        self.hostname = hostname or socket.gethostname()
        self.logger = logger or self.app.log.get_default_logger()
        self.eventer = eventer
        self.email_subject = email_subject or self.email_subject
        self.email_body = email_body or self.email_body

        self.task = tasks[self.task_name]
        self._store_errors = True
        if self.task.ignore_result:
            self._store_errors = self.task.store_errors_even_if_ignored
Exemplo n.º 33
0
    def __init__(self,
                 max_interval=None,
                 app=None,
                 socket_timeout=30,
                 pidfile=None,
                 no_color=None,
                 **kwargs):
        """Starts the celerybeat task scheduler."""
        self.app = app = app_or_default(app or self.app)
        self.setup_defaults(kwargs, namespace='celerybeat')

        self.max_interval = max_interval
        self.socket_timeout = socket_timeout
        self.no_color = no_color
        self.colored = app.log.colored(
            self.logfile,
            enabled=not no_color if no_color is not None else no_color,
        )
        self.pidfile = pidfile

        if not isinstance(self.loglevel, int):
            self.loglevel = LOG_LEVELS[self.loglevel.upper()]
Exemplo n.º 34
0
def migrate_tasks(source,
                  dest,
                  timeout=1.0,
                  app=None,
                  migrate=None,
                  callback=None):
    state = State()
    app = app_or_default(app)

    def update_state(body, message):
        state.count += 1

    producer = app.amqp.TaskProducer(dest)
    if migrate is None:
        migrate = partial(migrate_task, producer)
    consumer = app.amqp.TaskConsumer(source)
    consumer.register_callback(update_state)
    if callback is not None:
        callback = partial(callback, state)
        consumer.register_callback(callback)
    consumer.register_callback(migrate)

    # declare all queues on the new broker.
    for queue in consumer.queues:
        queue(producer.channel).declare()
        try:
            _, mcount, _ = queue(consumer.channel).queue_declare(passive=True)
            if mcount:
                state.total_apx += mcount
        except source.channel_errors + (StdChannelError, ):
            pass

    # start migrating messages.
    with consumer:
        try:
            for _ in eventloop(source, timeout=timeout):  # pragma: no cover
                pass
        except socket.timeout:
            return
Exemplo n.º 35
0
 def __init__(self,
              channel,
              handlers=None,
              routing_key='#',
              node_id=None,
              app=None,
              queue_prefix=None,
              accept=None,
              queue_ttl=None,
              queue_expires=None):
     self.app = app_or_default(app or self.app)
     self.channel = maybe_channel(channel)
     self.handlers = {} if handlers is None else handlers
     self.routing_key = routing_key
     self.node_id = node_id or uuid()
     self.queue_prefix = queue_prefix or self.app.conf.event_queue_prefix
     self.exchange = get_exchange(self.connection
                                  or self.app.connection_for_write(),
                                  name=self.app.conf.event_exchange)
     if queue_ttl is None:
         queue_ttl = self.app.conf.event_queue_ttl
     if queue_expires is None:
         queue_expires = self.app.conf.event_queue_expires
     self.queue = Queue(
         '.'.join([self.queue_prefix, self.node_id]),
         exchange=self.exchange,
         routing_key=self.routing_key,
         auto_delete=True,
         durable=False,
         message_ttl=queue_ttl,
         expires=queue_expires,
     )
     self.clock = self.app.clock
     self.adjust_clock = self.clock.adjust
     self.forward_clock = self.clock.forward
     if accept is None:
         accept = {self.app.conf.event_serializer, 'json'}
     self.accept = accept
Exemplo n.º 36
0
def evcam(
    camera,
    freq=1.0,
    maxrate=None,
    loglevel=0,
    logfile=None,
    pidfile=None,
    timer=None,
    app=None,
):
    """Start snapshot recorder."""
    app = app_or_default(app)

    if pidfile:
        platforms.create_pidlock(pidfile)

    app.log.setup_logging_subsystem(loglevel, logfile)

    print("-> evcam: Taking snapshots with {0} (every {1} secs.)".format(
        camera, freq))
    state = app.events.State()
    cam = instantiate(camera,
                      state,
                      app=app,
                      freq=freq,
                      maxrate=maxrate,
                      timer=timer)
    cam.install()
    conn = app.connection_for_read()
    recv = app.events.Receiver(conn, handlers={"*": state.event})
    try:
        try:
            recv.capture(limit=None)
        except KeyboardInterrupt:
            raise SystemExit
    finally:
        cam.cancel()
        conn.close()
Exemplo n.º 37
0
    def _test_on_failure(self, exception):
        app = app_or_default()
        tid = uuid()
        tw = TaskRequest(mytask.name, tid, [4], {"f": "x"})
        try:
            raise exception
        except Exception:
            exc_info = ExceptionInfo(sys.exc_info())

            logfh = WhateverIO()
            tw.logger.handlers = []
            tw.logger = setup_logger(logfile=logfh, loglevel=logging.INFO,
                                     root=False)

            app.conf.CELERY_SEND_TASK_ERROR_EMAILS = True

            tw.on_failure(exc_info)
            logvalue = logfh.getvalue()
            self.assertIn(mytask.name, logvalue)
            self.assertIn(tid, logvalue)
            self.assertIn("ERROR", logvalue)

            app.conf.CELERY_SEND_TASK_ERROR_EMAILS = False
Exemplo n.º 38
0
    def __init__(self,
                 connection=None,
                 hostname=None,
                 enabled=True,
                 channel=None,
                 buffer_while_offline=True,
                 app=None,
                 serializer=None):
        self.app = app_or_default(app)
        self.connection = connection
        self.channel = channel
        self.hostname = hostname or socket.gethostname()
        self.buffer_while_offline = buffer_while_offline
        self.mutex = threading.Lock()
        self.publisher = None
        self._outbound_buffer = deque()
        self.serializer = serializer or self.app.conf.CELERY_EVENT_SERIALIZER
        self.on_enabled = set()
        self.on_disabled = set()

        self.enabled = enabled
        if self.enabled:
            self.enable()
Exemplo n.º 39
0
    def __init__(self,
                 loglevel=None,
                 hostname=None,
                 ready_callback=noop,
                 queues=None,
                 app=None,
                 pidfile=None,
                 **kwargs):
        self.app = app_or_default(app or self.app)
        # all new threads start without a current app, so if an app is not
        # passed on to the thread it will fall back to the "default app",
        # which then could be the wrong app.  So for the worker
        # we set this to always return our app.  This is a hack,
        # and means that only a single app can be used for workers
        # running in the same process.
        set_default_app(self.app)
        self.app.finalize()
        trace._tasks = self.app._tasks

        self._shutdown_complete = Event()
        self.setup_defaults(kwargs, namespace="celeryd")
        self.app.select_queues(queues)  # select queues subset.

        # Options
        self.loglevel = loglevel or self.loglevel
        self.hostname = hostname or socket.gethostname()
        self.ready_callback = ready_callback
        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self.pidfile = pidfile
        self.pidlock = None
        self.use_eventloop = (detect_environment() == "default"
                              and self.app.broker_connection().is_evented)

        # Initialize boot steps
        self.pool_cls = _concurrency.get_implementation(self.pool_cls)
        self.components = []
        self.namespace = Namespace(app=self.app).apply(self, **kwargs)
Exemplo n.º 40
0
def evtop(app=None):
    sys.stderr.write("-> evtop: starting capture...\n")
    app = app_or_default(app)
    state = app.events.State()
    conn = app.broker_connection()
    recv = app.events.Receiver(conn, handlers={"*": state.event})
    capture = recv.itercapture()
    consumer = capture.next()
    display = CursesMonitor(state, app=app)
    display.init_screen()
    refresher = DisplayThread(display)
    refresher.start()
    try:
        capture.next()
    except Exception:
        refresher.shutdown = True
        refresher.join()
        display.resetscreen()
        raise
    except (KeyboardInterrupt, SystemExit):
        conn and conn.close()
        refresher.shutdown = True
        refresher.join()
        display.resetscreen()
Exemplo n.º 41
0
def get_redis(app=None):
    app = app_or_default(app)
    conf = ensure_conf(app)
    if not hasattr(app, 'redbeat_redis') or app.redbeat_redis is None:
        redis_options = conf.app.conf.get(
            'REDBEAT_REDIS_OPTIONS',
            conf.app.conf.get('BROKER_TRANSPORT_OPTIONS', {}))
        retry_period = redis_options.get('retry_period')
        if conf.redis_url.startswith(
                'redis-sentinel') and 'sentinels' in redis_options:
            from redis.sentinel import Sentinel
            sentinel = Sentinel(
                redis_options['sentinels'],
                socket_timeout=redis_options.get('socket_timeout'),
                password=redis_options.get('password'),
                db=redis_options.get('db', 0),
                decode_responses=True)
            connection = sentinel.master_for(
                redis_options.get('service_name', 'master'))
        elif conf.redis_url.startswith('rediss'):
            ssl_options = {'ssl_cert_reqs': ssl.CERT_REQUIRED}
            if isinstance(conf.redis_use_ssl, dict):
                ssl_options.update(conf.redis_use_ssl)
            connection = StrictRedis.from_url(conf.redis_url,
                                              decode_responses=True,
                                              **ssl_options)
        else:
            connection = StrictRedis.from_url(conf.redis_url,
                                              decode_responses=True)

        if retry_period is None:
            app.redbeat_redis = connection
        else:
            app.redbeat_redis = RetryingConnection(retry_period, connection)

    return app.redbeat_redis
Exemplo n.º 42
0
def evcam(camera,
          freq=1.0,
          maxrate=None,
          loglevel=0,
          logfile=None,
          pidfile=None,
          timer=None,
          app=None):
    app = app_or_default(app)

    if pidfile:
        pidlock = platforms.create_pidlock(pidfile).acquire()
        atexit.register(pidlock.release)

    app.log.setup_logging_subsystem(loglevel, logfile)

    logger.info("-> evcam: Taking snapshots with %s (every %s secs.)\n" %
                (camera, freq))
    state = app.events.State()
    cam = instantiate(camera,
                      state,
                      app=app,
                      freq=freq,
                      maxrate=maxrate,
                      timer=timer)
    cam.install()
    conn = app.broker_connection()
    recv = app.events.Receiver(conn, handlers={"*": state.event})
    try:
        try:
            recv.capture(limit=None)
        except KeyboardInterrupt:
            raise SystemExit
    finally:
        cam.cancel()
        conn.close()
Exemplo n.º 43
0
 def __init__(self,
              connection=None,
              hostname=None,
              enabled=True,
              channel=None,
              buffer_while_offline=True,
              app=None,
              serializer=None,
              groups=None,
              delivery_mode=1):
     self.app = app_or_default(app or self.app)
     self.connection = connection
     self.channel = channel
     self.hostname = hostname or anon_nodename()
     self.buffer_while_offline = buffer_while_offline
     self.mutex = threading.Lock()
     self.producer = None
     self._outbound_buffer = deque()
     self.serializer = serializer or self.app.conf.CELERY_EVENT_SERIALIZER
     self.on_enabled = set()
     self.on_disabled = set()
     self.groups = set(groups or [])
     self.tzoffset = [-time.timezone, -time.altzone]
     self.clock = self.app.clock
     self.delivery_mode = delivery_mode
     if not connection and channel:
         self.connection = channel.connection.client
     self.enabled = enabled
     conninfo = self.connection or self.app.connection()
     self.exchange = get_exchange(conninfo)
     if conninfo.transport.driver_type in self.DISABLED_TRANSPORTS:
         self.enabled = False
     if self.enabled:
         self.enable()
     self.headers = {'hostname': self.hostname}
     self.pid = os.getpid()
Exemplo n.º 44
0
 def __init__(self, task_id, backend=None, task_name=None, app=None):
     app = app_or_default(app)
     backend = backend or app.backend
     super(AsyncResult, self).__init__(task_id, backend,
                                       task_name=task_name, app=app)
Exemplo n.º 45
0
from __future__ import absolute_import
import six

from django.core.urlresolvers import reverse
from django.views.generic import DetailView, TemplateView
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_text
from django.template.defaultfilters import escape

from celery import app, states

celery_app = app.app_or_default()

from django.contrib.contenttypes.models import ContentType

from ..models import WooeyJob, WooeyFile, Favorite
from .. import settings as wooey_settings
from ..backend.utils import valid_user, get_file_previews
from ..django_compat import JsonResponse
from django.db.models import Q
from django.views.generic import ListView
from django.template.loader import render_to_string

SPANBASE = "<span title='{}' class='glyphicon {}'></span> "
MAXIMUM_JOBS_NAVBAR = 10
STATE_MAPPER = {
    #  Default Primary Success Info Warning Danger
    WooeyJob.COMPLETED: SPANBASE.format(_('Success'), 'success glyphicon-ok'),
    WooeyJob.RUNNING: SPANBASE.format(_('Executing'), 'success glyphicon-refresh spinning'),
    states.PENDING: SPANBASE.format(_('Queued'), 'glyphicon-time'),
    states.REVOKED: SPANBASE.format(_('Halted'), 'danger glyphicon-stop'),
Exemplo n.º 46
0
 def test_parse_options(self):
     cmd = beat_bin.beat()
     cmd.app = app_or_default()
     options, args = cmd.parse_options('celery beat', ['-s', 'foo'])
     self.assertEqual(options.schedule, 'foo')
Exemplo n.º 47
0
 def test_detach(self):
     cmd = beat_bin.beat()
     cmd.app = app_or_default()
     cmd.run(detach=True)
     self.assertTrue(MockDaemonContext.opened)
     self.assertTrue(MockDaemonContext.closed)
Exemplo n.º 48
0
 def setUp(self):
     app = app_or_default()
     self.i = Control(app=app).inspect()
Exemplo n.º 49
0
 def setUp(self):
     self.app = app_or_default()
     self.app.events = self.MockEvents()
Exemplo n.º 50
0
 def __init__(self, ready_queue, callback, app=None, **kw):
     self.app = app_or_default(app)
     self.ready_queue = ready_queue
     self.callback = callback
     self._does_debug = logger.isEnabledFor(logging.DEBUG)
     super(Mediator, self).__init__()
Exemplo n.º 51
0
 def setUp(self):
     app = self.app = app_or_default()
     self.c = Control(app=app)
     self.prev, app.control = app.control, self.c
     self.i = self.c.inspect()
Exemplo n.º 52
0
    def __init__(self,
                 handle_task,
                 init_callback=noop,
                 hostname=None,
                 pool=None,
                 app=None,
                 timer=None,
                 controller=None,
                 hub=None,
                 amqheartbeat=None,
                 worker_options=None,
                 disable_rate_limits=False,
                 **kwargs):
        self.app = app_or_default(app)
        self.controller = controller
        self.init_callback = init_callback
        self.hostname = hostname or socket.gethostname()
        self.pid = os.getpid()
        self.pool = pool
        self.timer = timer or default_timer
        self.strategies = {}
        conninfo = self.app.connection()
        self.connection_errors = conninfo.connection_errors
        self.channel_errors = conninfo.channel_errors
        self._restart_state = restart_state(maxR=5, maxT=1)

        self._does_info = logger.isEnabledFor(logging.INFO)
        self.handle_task = handle_task
        self.amqheartbeat_rate = self.app.conf.BROKER_HEARTBEAT_CHECKRATE
        self.disable_rate_limits = disable_rate_limits

        # this contains a tokenbucket for each task type by name, used for
        # rate limits, or None if rate limits are disabled for that task.
        self.task_buckets = defaultdict(lambda: None)
        self.reset_rate_limits()

        if hub:
            self.amqheartbeat = amqheartbeat
            if self.amqheartbeat is None:
                self.amqheartbeat = self.app.conf.BROKER_HEARTBEAT
            self.hub = hub
            self.hub.on_init.append(self.on_poll_init)
        else:
            self.hub = None
            self.amqheartbeat = 0

        if not hasattr(self, 'loop'):
            self.loop = loops.asynloop if hub else loops.synloop

        if _detect_environment() == 'gevent':
            # there's a gevent bug that causes timeouts to not be reset,
            # so if the connection timeout is exceeded once, it can NEVER
            # connect again.
            self.app.conf.BROKER_CONNECTION_TIMEOUT = None

        self.steps = []
        self.blueprint = self.Blueprint(
            app=self.app,
            on_close=self.on_close,
        )
        self.blueprint.apply(self, **dict(worker_options or {}, **kwargs))
Exemplo n.º 53
0
    def __init__(self,
                 concurrency=None,
                 loglevel=None,
                 logfile=None,
                 hostname=None,
                 discard=False,
                 run_clockservice=False,
                 schedule=None,
                 task_time_limit=None,
                 task_soft_time_limit=None,
                 max_tasks_per_child=None,
                 queues=None,
                 events=None,
                 db=None,
                 include=None,
                 app=None,
                 pidfile=None,
                 redirect_stdouts=None,
                 redirect_stdouts_level=None,
                 autoscale=None,
                 scheduler_cls=None,
                 pool=None,
                 **kwargs):
        self.app = app = app_or_default(app)
        conf = app.conf
        self.concurrency = (concurrency or conf.CELERYD_CONCURRENCY
                            or cpu_count())
        self.loglevel = loglevel or conf.CELERYD_LOG_LEVEL
        self.logfile = logfile or conf.CELERYD_LOG_FILE

        self.hostname = hostname or socket.gethostname()
        self.discard = discard
        self.run_clockservice = run_clockservice
        if self.app.IS_WINDOWS and self.run_clockservice:
            self.die("-B option does not work on Windows.  "
                     "Please run celerybeat as a separate service.")
        self.schedule = schedule or conf.CELERYBEAT_SCHEDULE_FILENAME
        self.scheduler_cls = scheduler_cls or conf.CELERYBEAT_SCHEDULER
        self.events = events if events is not None else conf.CELERY_SEND_EVENTS
        self.task_time_limit = (task_time_limit
                                or conf.CELERYD_TASK_TIME_LIMIT)
        self.task_soft_time_limit = (task_soft_time_limit
                                     or conf.CELERYD_TASK_SOFT_TIME_LIMIT)
        self.max_tasks_per_child = (max_tasks_per_child
                                    or conf.CELERYD_MAX_TASKS_PER_CHILD)
        self.redirect_stdouts = (redirect_stdouts
                                 or conf.CELERY_REDIRECT_STDOUTS)
        self.redirect_stdouts_level = (redirect_stdouts_level
                                       or conf.CELERY_REDIRECT_STDOUTS_LEVEL)
        self.pool = pool or conf.CELERYD_POOL
        self.db = db
        self.use_queues = [] if queues is None else queues
        self.queues = None
        self.include = [] if include is None else include
        self.pidfile = pidfile
        self.autoscale = None
        if autoscale:
            max_c, _, min_c = autoscale.partition(",")
            self.autoscale = [int(max_c), min_c and int(min_c) or 0]
        self._isatty = sys.stdout.isatty()

        self.colored = app.log.colored(self.logfile)

        if isinstance(self.use_queues, basestring):
            self.use_queues = self.use_queues.split(",")
        if isinstance(self.include, basestring):
            self.include = self.include.split(",")

        if not isinstance(self.loglevel, int):
            try:
                self.loglevel = LOG_LEVELS[self.loglevel.upper()]
            except KeyError:
                self.die(
                    "Unknown level %r. Please use one of %s." %
                    (self.loglevel, "|".join(l for l in LOG_LEVELS.keys()
                                             if isinstance(l, basestring))))
Exemplo n.º 54
0
 def __init__(self, taskset_id, subtasks, app=None):
     self.taskset_id = taskset_id
     self.subtasks = subtasks
     self.app = app_or_default(app)
Exemplo n.º 55
0
 def setUp(self):
     self.app = app_or_default()
     self.state = self.app.events.State()
Exemplo n.º 56
0
 def __init__(self, app=None, **kwargs):
     from celery.app import app_or_default
     self.app = app_or_default(app)
     self.task_modules = set()
Exemplo n.º 57
0
 def __init__(self, app=None, no_color=False):
     self.app = app_or_default(app)
     self.colored = term.colored(enabled=not no_color)
Exemplo n.º 58
0
 def setUp(self):
     self.app = app_or_default()
     self.control = Control(app=self.app)
     self.app.control = self.control
Exemplo n.º 59
0
def move(predicate, connection=None, exchange=None, routing_key=None,
         source=None, app=None, callback=None, limit=None, transform=None,
         **kwargs):
    """Find tasks by filtering them and move the tasks to a new queue.

    Arguments:
        predicate (Callable): Filter function used to decide the messages
            to move.  Must accept the standard signature of ``(body, message)``
            used by Kombu consumer callbacks.  If the predicate wants the
            message to be moved it must return either:

                1) a tuple of ``(exchange, routing_key)``, or

                2) a :class:`~kombu.entity.Queue` instance, or

                3) any other true value means the specified
                    ``exchange`` and ``routing_key`` arguments will be used.
        connection (kombu.Connection): Custom connection to use.
        source: List[Union[str, kombu.Queue]]: Optional list of source
            queues to use instead of the default (queues
            in :setting:`task_queues`).  This list can also contain
            :class:`~kombu.entity.Queue` instances.
        exchange (str, kombu.Exchange): Default destination exchange.
        routing_key (str): Default destination routing key.
        limit (int): Limit number of messages to filter.
        callback (Callable): Callback called after message moved,
            with signature ``(state, body, message)``.
        transform (Callable): Optional function to transform the return
            value (destination) of the filter function.

    Also supports the same keyword arguments as :func:`start_filter`.

    To demonstrate, the :func:`move_task_by_id` operation can be implemented
    like this:

    .. code-block:: python

        def is_wanted_task(body, message):
            if body['id'] == wanted_id:
                return Queue('foo', exchange=Exchange('foo'),
                             routing_key='foo')

        move(is_wanted_task)

    or with a transform:

    .. code-block:: python

        def transform(value):
            if isinstance(value, str):
                return Queue(value, Exchange(value), value)
            return value

        move(is_wanted_task, transform=transform)

    Note:
        The predicate may also return a tuple of ``(exchange, routing_key)``
        to specify the destination to where the task should be moved,
        or a :class:`~kombu.entity.Queue` instance.
        Any other true value means that the task will be moved to the
        default exchange/routing_key.
    """
    app = app_or_default(app)
    queues = [_maybe_queue(app, queue) for queue in source or []] or None
    with app.connection_or_acquire(connection, pool=False) as conn:
        producer = app.amqp.Producer(conn)
        state = State()

        def on_task(body, message):
            ret = predicate(body, message)
            if ret:
                if transform:
                    ret = transform(ret)
                if isinstance(ret, Queue):
                    maybe_declare(ret, conn.default_channel)
                    ex, rk = ret.exchange.name, ret.routing_key
                else:
                    ex, rk = expand_dest(ret, exchange, routing_key)
                republish(producer, message,
                          exchange=ex, routing_key=rk)
                message.ack()

                state.filtered += 1
                if callback:
                    callback(state, body, message)
                if limit and state.filtered >= limit:
                    raise StopFiltering()

        return start_filter(app, conn, on_task, consume_from=queues, **kwargs)
Exemplo n.º 60
0
 def __init__(self, task_id, backend, task_name=None, app=None):
     self.task_id = task_id
     self.backend = backend
     self.task_name = task_name
     self.app = app_or_default(app)