def __init__(self, connection=None, hostname=None, enabled=True,
              channel=None, buffer_while_offline=True, app=None,
              serializer=None, groups=None, delivery_mode=1,
              buffer_group=None, buffer_limit=24, on_send_buffered=None):
     self.app = app_or_default(app or self.app)
     self.connection = connection
     self.channel = channel
     self.hostname = hostname or anon_nodename()
     self.buffer_while_offline = buffer_while_offline
     self.buffer_group = buffer_group or frozenset()
     self.buffer_limit = buffer_limit
     self.on_send_buffered = on_send_buffered
     self._group_buffer = defaultdict(list)
     self.mutex = threading.Lock()
     self.producer = None
     self._outbound_buffer = deque()
     self.serializer = serializer or self.app.conf.event_serializer
     self.on_enabled = set()
     self.on_disabled = set()
     self.groups = set(groups or [])
     self.tzoffset = [-time.timezone, -time.altzone]
     self.clock = self.app.clock
     self.delivery_mode = delivery_mode
     if not connection and channel:
         self.connection = channel.connection.client
     self.enabled = enabled
     conninfo = self.connection or self.app.connection_for_write()
     self.exchange = get_exchange(conninfo)
     if conninfo.transport.driver_type in self.DISABLED_TRANSPORTS:
         self.enabled = False
     if self.enabled:
         self.enable()
     self.headers = {'hostname': self.hostname}
     self.pid = os.getpid()
Exemple #2
0
def _start_worker_thread(app,
                         concurrency=1,
                         pool='solo',
                         loglevel=WORKER_LOGLEVEL,
                         logfile=None,
                         WorkController=TestWorkController,
                         perform_ping_check=True,
                         shutdown_timeout=10.0,
                         **kwargs):
    # type: (Celery, int, str, Union[str, int], str, Any, **Any) -> Iterable
    """Start Celery worker in a thread.

    Yields:
        celery.worker.Worker: worker instance.
    """
    setup_app_for_worker(app, loglevel, logfile)
    if perform_ping_check:
        assert 'celery.ping' in app.tasks
    # Make sure we can connect to the broker
    with app.connection(hostname=os.environ.get('TEST_BROKER')) as conn:
        conn.default_channel.queue_declare

    worker = WorkController(
        app=app,
        concurrency=concurrency,
        hostname=anon_nodename(),
        pool=pool,
        loglevel=loglevel,
        logfile=logfile,
        # not allowed to override TestWorkController.on_consumer_ready
        ready_callback=None,
        without_heartbeat=kwargs.pop("without_heartbeat", True),
        without_mingle=True,
        without_gossip=True,
        **kwargs)

    t = threading.Thread(target=worker.start, daemon=True)
    t.start()
    worker.ensure_started()
    _set_task_join_will_block(False)

    try:
        yield worker
    finally:
        from celery.worker import state
        state.should_terminate = 0
        t.join(shutdown_timeout)
        if t.is_alive():
            raise RuntimeError(
                "Worker thread failed to exit within the allocated timeout. "
                "Consider raising `shutdown_timeout` if your tasks take longer "
                "to execute.")
        state.should_terminate = None
def _start_worker_thread(
    app,
    concurrency=1,
    pool="solo",
    loglevel=WORKER_LOGLEVEL,
    logfile=None,
    WorkController=TestWorkController,
    perform_ping_check=True,
    **kwargs
):
    # type: (Celery, int, str, Union[str, int], str, Any, **Any) -> Iterable
    """Start Celery worker in a thread.

    Yields:
        celery.worker.Worker: worker instance.
    """
    setup_app_for_worker(app, loglevel, logfile)
    if perform_ping_check:
        assert "celery.ping" in app.tasks
    # Make sure we can connect to the broker
    with app.connection(hostname=os.environ.get("TEST_BROKER")) as conn:
        conn.default_channel.queue_declare

    worker = WorkController(
        app=app,
        concurrency=concurrency,
        hostname=anon_nodename(),
        pool=pool,
        loglevel=loglevel,
        logfile=logfile,
        # not allowed to override TestWorkController.on_consumer_ready
        ready_callback=None,
        without_heartbeat=True,
        without_mingle=True,
        without_gossip=True,
        **kwargs
    )

    t = threading.Thread(target=worker.start)
    t.start()
    worker.ensure_started()
    _set_task_join_will_block(False)

    yield worker

    from celery.worker import state

    state.should_terminate = 0
    t.join(10)
    state.should_terminate = None
Exemple #4
0
def _start_worker_thread(app,
                         concurrency=1,
                         pool='solo',
                         loglevel=WORKER_LOGLEVEL,
                         logfile=None,
                         WorkController=TestWorkController,
                         **kwargs):
    # type: (Celery, int, str, Union[str, int], str, Any, **Any) -> Iterable
    """Start Celery worker in a thread.

    Yields:
        celery.worker.Worker: worker instance.
    """
    setup_app_for_worker(app, loglevel, logfile)
    assert 'celery.ping' in app.tasks
    # Make sure we can connect to the broker
    with app.connection() as conn:
        conn.default_channel.queue_declare

    worker = WorkController(
        app=app,
        concurrency=concurrency,
        hostname=anon_nodename(),
        pool=pool,
        loglevel=loglevel,
        logfile=logfile,
        # not allowed to override TestWorkController.on_consumer_ready
        ready_callback=None,
        without_heartbeat=True,
        without_mingle=True,
        without_gossip=True,
        **kwargs)

    t = threading.Thread(target=worker.start)
    t.start()
    worker.ensure_started()
    _set_task_join_will_block(False)

    yield worker

    from celery.worker import state
    state.should_terminate = 0
    t.join(10)
    state.should_terminate = None
Exemple #5
0
def _start_worker_thread(app: Celery,
                         concurrency: int = None,
                         pool: str = None,
                         loglevel: Union[str, int] = None,
                         logfile: str = None,
                         work_controller: Any = WorkController,
                         **kwargs: Any) -> Iterable:
    """Start Celery worker in a thread.

    Yields:
        celery.worker.Worker: worker instance.
    """
    setup_app_for_worker(app, loglevel, logfile)
    assert 'celery.ping' in app.tasks

    # Make sure we can connect to the broker
    with app.connection() as conn:
        conn.default_channel.queue_declare

    _worker = work_controller(
        app=app,
        concurrency=concurrency,
        hostname=anon_nodename(),
        pool=pool,
        loglevel=loglevel,
        logfile=logfile,
        # not allowed to override WorkController.on_consumer_ready
        ready_callback=None,
        without_heartbeat=True,
        without_mingle=True,
        without_gossip=True,
        **kwargs)

    t = threading.Thread(target=_worker.start)
    t.start()
    _worker.ensure_started()
    _set_task_join_will_block(False)

    yield _worker

    from celery.worker import state
    state.should_terminate = 0
    # t.join(10)
    state.should_terminate = None
Exemple #6
0
def _start_worker_thread(app,
                         concurrency=1,
                         pool='solo',
                         loglevel=log_level,
                         logfile=None,
                         WorkController=TestWorkController,
                         **kwargs):
    from celery.utils.nodenames import anon_nodename
    from celery.result import _set_task_join_will_block
    import threading

    setup_app_for_worker(app, loglevel, logfile)

    worker = WorkController(app=app,
                            concurrency=concurrency,
                            hostname=anon_nodename(),
                            pool=pool,
                            loglevel=loglevel,
                            logfile=logfile,
                            ready_callback=None,
                            without_heartbeat=True,
                            without_mingle=True,
                            without_gossip=True,
                            **kwargs)

    t = threading.Thread(target=worker.start)
    t.start()
    worker.ensure_started()
    _set_task_join_will_block(False)

    yield worker

    from celery.worker import state
    state.should_terminate = 0
    t.join(10)
    state.should_terminate = None
Exemple #7
0
    def as_task_v2(self, task_id, name, args=None, kwargs=None,
                   countdown=None, eta=None, group_id=None,
                   expires=None, retries=0, chord=None,
                   callbacks=None, errbacks=None, reply_to=None,
                   time_limit=None, soft_time_limit=None,
                   create_sent_event=False, root_id=None, parent_id=None,
                   shadow=None, chain=None, now=None, timezone=None,
                   origin=None, argsrepr=None, kwargsrepr=None):
        args = args or ()
        kwargs = kwargs or {}
        if not isinstance(args, (list, tuple)):
            raise TypeError('task args must be a list or tuple')
        if not isinstance(kwargs, Mapping):
            raise TypeError('task keyword arguments must be a mapping')
        if countdown:  # convert countdown to ETA
            self._verify_seconds(countdown, 'countdown')
            now = now or self.app.now()
            timezone = timezone or self.app.timezone
            eta = maybe_make_aware(
                now + timedelta(seconds=countdown), tz=timezone,
            )
        if isinstance(expires, numbers.Real):
            self._verify_seconds(expires, 'expires')
            now = now or self.app.now()
            timezone = timezone or self.app.timezone
            expires = maybe_make_aware(
                now + timedelta(seconds=expires), tz=timezone,
            )
        eta = eta and eta.isoformat()
        expires = expires and expires.isoformat()

        if argsrepr is None:
            argsrepr = saferepr(args, self.argsrepr_maxsize)
        if kwargsrepr is None:
            kwargsrepr = saferepr(kwargs, self.kwargsrepr_maxsize)

        if JSON_NEEDS_UNICODE_KEYS:  # pragma: no cover
            if callbacks:
                callbacks = [utf8dict(callback) for callback in callbacks]
            if errbacks:
                errbacks = [utf8dict(errback) for errback in errbacks]
            if chord:
                chord = utf8dict(chord)

        return task_message(
            headers={
                'lang': 'py',
                'task': name,
                'id': task_id,
                'eta': eta,
                'expires': expires,
                'group': group_id,
                'retries': retries,
                'timelimit': [time_limit, soft_time_limit],
                'root_id': root_id,
                'parent_id': parent_id,
                'argsrepr': argsrepr,
                'kwargsrepr': kwargsrepr,
                'origin': origin or anon_nodename()
            },
            properties={
                'correlation_id': task_id,
                'reply_to': reply_to or '',
            },
            body=(
                args, kwargs, {
                    'callbacks': callbacks,
                    'errbacks': errbacks,
                    'chain': chain,
                    'chord': chord,
                },
            ),
            sent_event={
                'uuid': task_id,
                'root_id': root_id,
                'parent_id': parent_id,
                'name': name,
                'args': argsrepr,
                'kwargs': kwargsrepr,
                'retries': retries,
                'eta': eta,
                'expires': expires,
            } if create_sent_event else None,
        )
Exemple #8
0
    def as_task_v2(self,
                   task_id,
                   name,
                   args=None,
                   kwargs=None,
                   countdown=None,
                   eta=None,
                   group_id=None,
                   expires=None,
                   retries=0,
                   chord=None,
                   callbacks=None,
                   errbacks=None,
                   reply_to=None,
                   time_limit=None,
                   soft_time_limit=None,
                   create_sent_event=False,
                   root_id=None,
                   parent_id=None,
                   shadow=None,
                   chain=None,
                   now=None,
                   timezone=None,
                   origin=None,
                   argsrepr=None,
                   kwargsrepr=None):
        args = args or ()
        kwargs = kwargs or {}
        if not isinstance(args, (list, tuple)):
            raise TypeError('task args must be a list or tuple')
        if not isinstance(kwargs, Mapping):
            raise TypeError('task keyword arguments must be a mapping')
        if countdown:  # convert countdown to ETA
            self._verify_seconds(countdown, 'countdown')
            now = now or self.app.now()
            timezone = timezone or self.app.timezone
            eta = maybe_make_aware(
                now + timedelta(seconds=countdown),
                tz=timezone,
            )
        if isinstance(expires, numbers.Real):
            self._verify_seconds(expires, 'expires')
            now = now or self.app.now()
            timezone = timezone or self.app.timezone
            expires = maybe_make_aware(
                now + timedelta(seconds=expires),
                tz=timezone,
            )
        if not isinstance(eta, string_t):
            eta = eta and eta.isoformat()
        # If we retry a task `expires` will already be ISO8601-formatted.
        if not isinstance(expires, string_t):
            expires = expires and expires.isoformat()

        if argsrepr is None:
            argsrepr = saferepr(args, self.argsrepr_maxsize)
        if kwargsrepr is None:
            kwargsrepr = saferepr(kwargs, self.kwargsrepr_maxsize)

        if JSON_NEEDS_UNICODE_KEYS:  # pragma: no cover
            if callbacks:
                callbacks = [utf8dict(callback) for callback in callbacks]
            if errbacks:
                errbacks = [utf8dict(errback) for errback in errbacks]
            if chord:
                chord = utf8dict(chord)

        if not root_id:  # empty root_id defaults to task_id
            root_id = task_id

        return task_message(
            headers={
                'lang': 'py',
                'task': name,
                'id': task_id,
                'shadow': shadow,
                'eta': eta,
                'expires': expires,
                'group': group_id,
                'retries': retries,
                'timelimit': [time_limit, soft_time_limit],
                'root_id': root_id,
                'parent_id': parent_id,
                'argsrepr': argsrepr,
                'kwargsrepr': kwargsrepr,
                'origin': origin or anon_nodename()
            },
            properties={
                'correlation_id': task_id,
                'reply_to': reply_to or '',
            },
            body=(
                args,
                kwargs,
                {
                    'callbacks': callbacks,
                    'errbacks': errbacks,
                    'chain': chain,
                    'chord': chord,
                },
            ),
            sent_event={
                'uuid': task_id,
                'root_id': root_id,
                'parent_id': parent_id,
                'name': name,
                'args': argsrepr,
                'kwargs': kwargsrepr,
                'retries': retries,
                'eta': eta,
                'expires': expires,
            } if create_sent_event else None,
        )
Exemple #9
0
import os
import re
from datetime import datetime, timedelta
from time import sleep
from unittest.mock import ANY

import pytest

from celery.utils.nodenames import anon_nodename

from .tasks import add, sleeping

NODENAME = anon_nodename()

_flaky = pytest.mark.flaky(reruns=5, reruns_delay=2)
_timeout = pytest.mark.timeout(timeout=300)


def flaky(fn):
    return _timeout(_flaky(fn))


@pytest.fixture()
def inspect(manager):
    return manager.app.control.inspect()


class test_Inspect:
    """Integration tests fo app.control.inspect() API"""
    @flaky
    def test_ping(self, inspect):
Exemple #10
0
    def as_task_v2(
        self,
        task_id,
        name,
        args=None,
        kwargs=None,
        countdown=None,
        eta=None,
        group_id=None,
        group_index=None,
        expires=None,
        retries=0,
        chord=None,
        callbacks=None,
        errbacks=None,
        reply_to=None,
        time_limit=None,
        soft_time_limit=None,
        create_sent_event=False,
        root_id=None,
        parent_id=None,
        shadow=None,
        chain=None,
        now=None,
        timezone=None,
        origin=None,
        argsrepr=None,
        kwargsrepr=None,
    ):
        args = args or ()
        kwargs = kwargs or {}
        if not isinstance(args, (list, tuple)):
            raise TypeError("task args must be a list or tuple")
        if not isinstance(kwargs, Mapping):
            raise TypeError("task keyword arguments must be a mapping")
        if countdown:  # convert countdown to ETA
            self._verify_seconds(countdown, "countdown")
            now = now or self.app.now()
            timezone = timezone or self.app.timezone
            eta = maybe_make_aware(
                now + timedelta(seconds=countdown),
                tz=timezone,
            )
        if isinstance(expires, numbers.Real):
            self._verify_seconds(expires, "expires")
            now = now or self.app.now()
            timezone = timezone or self.app.timezone
            expires = maybe_make_aware(
                now + timedelta(seconds=expires),
                tz=timezone,
            )
        if not isinstance(eta, string_t):
            eta = eta and eta.isoformat()
        # If we retry a task `expires` will already be ISO8601-formatted.
        if not isinstance(expires, string_t):
            expires = expires and expires.isoformat()

        if argsrepr is None:
            argsrepr = saferepr(args, self.argsrepr_maxsize)
        if kwargsrepr is None:
            kwargsrepr = saferepr(kwargs, self.kwargsrepr_maxsize)

        if JSON_NEEDS_UNICODE_KEYS:  # pragma: no cover
            if callbacks:
                callbacks = [utf8dict(callback) for callback in callbacks]
            if errbacks:
                errbacks = [utf8dict(errback) for errback in errbacks]
            if chord:
                chord = utf8dict(chord)

        if not root_id:  # empty root_id defaults to task_id
            root_id = task_id

        return task_message(
            headers={
                "lang": "py",
                "task": name,
                "id": task_id,
                "shadow": shadow,
                "eta": eta,
                "expires": expires,
                "group": group_id,
                "group_index": group_index,
                "retries": retries,
                "timelimit": [time_limit, soft_time_limit],
                "root_id": root_id,
                "parent_id": parent_id,
                "argsrepr": argsrepr,
                "kwargsrepr": kwargsrepr,
                "origin": origin or anon_nodename(),
            },
            properties={
                "correlation_id": task_id,
                "reply_to": reply_to or "",
            },
            body=(
                args,
                kwargs,
                {
                    "callbacks": callbacks,
                    "errbacks": errbacks,
                    "chain": chain,
                    "chord": chord,
                },
            ),
            sent_event={
                "uuid": task_id,
                "root_id": root_id,
                "parent_id": parent_id,
                "name": name,
                "args": argsrepr,
                "kwargs": kwargsrepr,
                "retries": retries,
                "eta": eta,
                "expires": expires,
            } if create_sent_event else None,
        )