Beispiel #1
0
    def apply_async(self, target, args=None, kwargs=None, callbacks=None,
            errbacks=None, accept_callback=None, timeout_callback=None,
            **compat):
        """Equivalent of the :func:``apply`` built-in function.

        All ``callbacks`` and ``errbacks`` should complete immediately since
        otherwise the thread which handles the result will get blocked.

        """
        args = args or []
        kwargs = kwargs or {}
        callbacks = callbacks or []
        errbacks = errbacks or []

        on_ready = curry(self.on_ready, callbacks, errbacks)
        on_worker_error = curry(self.on_worker_error, errbacks)

        self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)" % (
            target, args, kwargs))

        return self._pool.apply_async(target, args, kwargs,
                                      callback=on_ready,
                                      accept_callback=accept_callback,
                                      timeout_callback=timeout_callback,
                                      error_callback=on_worker_error,
                                      waitforslot=self.putlocks)
Beispiel #2
0
    def apply_async(self,
                    target,
                    args=None,
                    kwargs=None,
                    callbacks=None,
                    errbacks=None,
                    accept_callback=None,
                    timeout_callback=None,
                    **compat):
        """Equivalent of the :func:``apply`` built-in function.

        All ``callbacks`` and ``errbacks`` should complete immediately since
        otherwise the thread which handles the result will get blocked.

        """
        args = args or []
        kwargs = kwargs or {}
        callbacks = callbacks or []
        errbacks = errbacks or []

        on_ready = curry(self.on_ready, callbacks, errbacks)
        on_worker_error = curry(self.on_worker_error, errbacks)

        self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)" %
                          (target, args, kwargs))

        return self._pool.apply_async(target,
                                      args,
                                      kwargs,
                                      callback=on_ready,
                                      accept_callback=accept_callback,
                                      timeout_callback=timeout_callback,
                                      error_callback=on_worker_error,
                                      waitforslot=self.putlocks)
Beispiel #3
0
def find_nearest_pickleable_exception(exc):
    """With an exception instance, iterate over its super classes (by mro)
    and find the first super exception that is pickleable. It does
    not go below :exc:`Exception` (i.e. it skips :exc:`Exception`,
    :class:`BaseException` and :class:`object`). If that happens
    you should use :exc:`UnpickleableException` instead.

    :param exc: An exception instance.

    :returns: the nearest exception if it's not :exc:`Exception` or below,
        if it is it returns ``None``.

    :rtype: :exc:`Exception`

    """

    unwanted = (Exception, BaseException, object)
    is_unwanted = lambda exc: any(map(curry(operator.is_, exc), unwanted))

    mro_ = getattr(exc.__class__, "mro", lambda: [])
    for supercls in mro_():
        if is_unwanted(supercls):
            # only BaseException and object, from here on down,
            # we don't care about these.
            return None
        try:
            exc_args = getattr(exc, "args", [])
            superexc = supercls(*exc_args)
            pickle.dumps(superexc)
        except:
            pass
        else:
            return superexc
    return None
Beispiel #4
0
def fun_takes_kwargs(fun, kwlist=[]):
    """With a function, and a list of keyword arguments, returns arguments
    in the list which the function takes.

    If the object has an ``argspec`` attribute that is used instead
    of using the :meth:`inspect.getargspec`` introspection.

    :param fun: The function to inspect arguments of.
    :param kwlist: The list of keyword arguments.

    Examples

        >>> def foo(self, x, y, logfile=None, loglevel=None):
        ...     return x * y
        >>> fun_takes_kwargs(foo, ["logfile", "loglevel", "task_id"])
        ["logfile", "loglevel"]

        >>> def foo(self, x, y, **kwargs):
        >>> fun_takes_kwargs(foo, ["logfile", "loglevel", "task_id"])
        ["logfile", "loglevel", "task_id"]

    """
    argspec = getattr(fun, "argspec", getargspec(fun))
    args, _varargs, keywords, _defaults = argspec
    if keywords != None:
        return kwlist
    return filter(curry(operator.contains, args), kwlist)
Beispiel #5
0
def fun_takes_kwargs(fun, kwlist=[]):
    """With a function, and a list of keyword arguments, returns arguments
    in the list which the function takes.

    If the object has an ``argspec`` attribute that is used instead
    of using the :meth:`inspect.getargspec`` introspection.

    :param fun: The function to inspect arguments of.
    :param kwlist: The list of keyword arguments.

    Examples

        >>> def foo(self, x, y, logfile=None, loglevel=None):
        ...     return x * y
        >>> fun_takes_kwargs(foo, ["logfile", "loglevel", "task_id"])
        ["logfile", "loglevel"]

        >>> def foo(self, x, y, **kwargs):
        >>> fun_takes_kwargs(foo, ["logfile", "loglevel", "task_id"])
        ["logfile", "loglevel", "task_id"]

    """
    argspec = getattr(fun, "argspec", getargspec(fun))
    args, _varargs, keywords, _defaults = argspec
    if keywords != None:
        return kwlist
    return filter(curry(operator.contains, args), kwlist)
Beispiel #6
0
    def apply_async(self,
                    target,
                    args=None,
                    kwargs=None,
                    callbacks=None,
                    errbacks=None,
                    accept_callback=None,
                    **compat):
        args = args or []
        kwargs = kwargs or {}
        callbacks = callbacks or []
        errbacks = errbacks or []

        on_ready = curry(self.on_ready, callbacks, errbacks)

        self.logger.debug("ThreadPool: Apply %s (args:%s kwargs:%s)" %
                          (target, args, kwargs))

        req = WorkRequest(do_work,
                          (target, args, kwargs, on_ready, accept_callback))
        self._pool.putRequest(req)
        # threadpool also has callback support,
        # but for some reason the callback is not triggered
        # before you've collected the results.
        # Clear the results (if any), so it doesn't grow too large.
        self._pool._results_queue.queue.clear()
        return req
Beispiel #7
0
 def test_apply_raises_404_on_unregistered_task(self):
     conf.ALWAYS_EAGER = True
     try:
         name = "xxx.does.not.exist"
         action = curry(self.client.get, task_apply(kwargs={
                     "task_name": name}) + "?x=4&y=4")
         self.assertRaises(TemplateDoesNotExist, action)
     finally:
         conf.ALWAYS_EAGER = False
 def test_apply_raises_404_on_unregistered_task(self):
     conf.ALWAYS_EAGER = True
     try:
         name = "xxx.does.not.exist"
         action = curry(self.client.get,
                        task_apply(kwargs={"task_name": name}) + "?x=4&y=4")
         self.assertRaises(TemplateDoesNotExist, action)
     finally:
         conf.ALWAYS_EAGER = False
Beispiel #9
0
    def apply_async(self, target, args=None, kwargs=None, callbacks=None,
            errbacks=None, accept_callback=None, **compat):
        args = args or []
        kwargs = kwargs or {}
        callbacks = callbacks or []
        errbacks = errbacks or []

        on_ready = curry(self.on_ready, callbacks, errbacks)

        self.logger.debug("ThreadPool: Apply %s (args:%s kwargs:%s)" % (
            target, args, kwargs))

        req = WorkRequest(do_work, (target, args, kwargs, on_ready,
                                    accept_callback))
        self._pool.putRequest(req)
        # threadpool also has callback support,
        # but for some reason the callback is not triggered
        # before you've collected the results.
        # Clear the results (if any), so it doesn't grow too large.
        self._pool._results_queue.queue.clear()
        return req
Beispiel #10
0
    def apply_async(self, target, args=None, kwargs=None, callbacks=None,
            errbacks=None, on_ack=noop):
        """Equivalent of the :func:``apply`` built-in function.

        All ``callbacks`` and ``errbacks`` should complete immediately since
        otherwise the thread which handles the result will get blocked.

        """
        args = args or []
        kwargs = kwargs or {}
        callbacks = callbacks or []
        errbacks = errbacks or []

        on_ready = curry(self.on_ready, callbacks, errbacks, on_ack)

        self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)" % (
            target, args, kwargs))

        self.replace_dead_workers()

        return self._pool.apply_async(target, args, kwargs,
                                        callback=on_ready)
Beispiel #11
0
def fun_takes_kwargs(fun, kwlist=[]):
    """With a function, and a list of keyword arguments, returns arguments
    in the list which the function takes.

    :param fun: The function to inspect arguments of.
    :param kwlist: The list of keyword arguments.

    Examples

        >>> def foo(self, x, y, logfile=None, loglevel=None):
        ...     return x * y
        >>> fun_takes_kwargs(foo, ["logfile", "loglevel", "task_id"])
        ["logfile", "loglevel"]

        >>> def foo(self, x, y, **kwargs):
        >>> fun_takes_kwargs(foo, ["logfile", "loglevel", "task_id"])
        ["logfile", "loglevel", "task_id"]

    """
    args, _varargs, keywords, _defaults = getargspec(fun)
    if keywords != None:
        return kwlist
    return filter(curry(operator.contains, args), kwlist)
Beispiel #12
0
from celery.backends import default_backend
from celery.exceptions import RetryTaskError
from celery.datastructures import ExceptionInfo
from celery.decorators import task
from celery.utils import gen_unique_id, get_full_cls_name
from celery.utils.functional import curry

from djcelery.views import task_webhook
from djcelery.tests.req import MockRequest


def reversestar(name, **kwargs):
    return reverse(name, kwargs=kwargs)


task_is_successful = curry(reversestar, "celery-is_task_successful")
task_status = curry(reversestar, "celery-task_status")
task_apply = curry(reverse, "celery-apply")
registered_tasks = curry(reverse, "celery-tasks")
scratch = {}


@task()
def mytask(x, y):
    ret = scratch["result"] = int(x) * int(y)
    return ret


def create_exception(name, base=Exception):
    return type(name, (base, ), {})
Beispiel #13
0
sys.path.insert(0, os.getcwd())
import time
import unittest2 as unittest
from itertools import chain, izip


from celery.task.base import Task
from celery.utils import timeutils
from celery.utils import gen_unique_id
from celery.utils.functional import curry
from celery.worker import buckets
from celery.registry import TaskRegistry

from celery.tests.utils import skip_if_environ

skip_if_disabled = curry(skip_if_environ("SKIP_RLIMITS"))


class MockJob(object):

    def __init__(self, task_id, task_name, args, kwargs):
        self.task_id = task_id
        self.task_name = task_name
        self.args = args
        self.kwargs = kwargs

    def __eq__(self, other):
        if isinstance(other, self.__class__):
            return bool(self.task_id == other.task_id \
                    and self.task_name == other.task_name \
                    and self.args == other.args \
from celery.utils.functional import curry

from celery import conf
from celery import states
from celery.utils import gen_unique_id, get_full_cls_name
from celery.backends import default_backend
from celery.exceptions import RetryTaskError
from celery.decorators import task
from celery.datastructures import ExceptionInfo


def reversestar(name, **kwargs):
    return reverse(name, kwargs=kwargs)


task_is_successful = curry(reversestar, "celery-is_task_successful")
task_status = curry(reversestar, "celery-task_status")
task_apply = curry(reverse, "celery-apply")

scratch = {}


@task()
def mytask(x, y):
    ret = scratch["result"] = int(x) * int(y)
    return ret


def create_exception(name, base=Exception):
    return type(name, (base, ), {})
Beispiel #15
0
def get_backend_cls(backend):
    """Get backend class by name/alias"""
    if backend not in _backend_cache:
        aliases = dict(BACKEND_ALIASES, **current_loader().override_backends)
        _backend_cache[backend] = get_cls_by_name(backend, aliases)
    return _backend_cache[backend]


"""
.. function:: get_default_backend_cls()

    Get the backend class specified in the ``CELERY_RESULT_BACKEND`` setting.

"""
get_default_backend_cls = curry(get_backend_cls, conf.RESULT_BACKEND)


"""
.. class:: DefaultBackend

    The default backend class used for storing task results and status,
    specified in the ``CELERY_RESULT_BACKEND`` setting.

"""
DefaultBackend = get_default_backend_cls()

"""
.. data:: default_backend

    An instance of :class:`DefaultBackend`.
import sys
import time
import unittest2 as unittest

from itertools import chain, izip

from celery.registry import TaskRegistry
from celery.task.base import Task
from celery.utils import timeutils
from celery.utils import gen_unique_id
from celery.utils.functional import curry
from celery.worker import buckets

from celery.tests.utils import skip_if_environ

skip_if_disabled = curry(skip_if_environ("SKIP_RLIMITS"))


class MockJob(object):
    def __init__(self, task_id, task_name, args, kwargs):
        self.task_id = task_id
        self.task_name = task_name
        self.args = args
        self.kwargs = kwargs

    def __eq__(self, other):
        if isinstance(other, self.__class__):
            return bool(self.task_id == other.task_id \
                    and self.task_name == other.task_name \
                    and self.args == other.args \
                    and self.kwargs == other.kwargs)
Beispiel #17
0
def get_backend_cls(backend):
    """Get backend class by name/alias"""
    if backend not in _backend_cache:
        aliases = dict(BACKEND_ALIASES, **current_loader().override_backends)
        _backend_cache[backend] = get_cls_by_name(backend, aliases)
    return _backend_cache[backend]


"""
.. function:: get_default_backend_cls()

    Get the backend class specified in the ``CELERY_RESULT_BACKEND`` setting.

"""
get_default_backend_cls = curry(get_backend_cls, conf.RESULT_BACKEND)
"""
.. class:: DefaultBackend

    The default backend class used for storing task results and status,
    specified in the ``CELERY_RESULT_BACKEND`` setting.

"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend

    An instance of :class:`DefaultBackend`.

"""
default_backend = DefaultBackend()
Beispiel #18
0
def apply_async(task, args=None, kwargs=None, countdown=None, eta=None,
        routing_key=None, exchange=None, task_id=None,
        immediate=None, mandatory=None, priority=None, connection=None,
        connect_timeout=AMQP_CONNECTION_TIMEOUT, serializer=None, **opts):
    """Run a task asynchronously by the celery daemon(s).

    :param task: The task to run (a callable object, or a :class:`Task`
        instance

    :param args: The positional arguments to pass on to the task (a ``list``).

    :param kwargs: The keyword arguments to pass on to the task (a ``dict``)

    :param countdown: Number of seconds into the future that the task should
        execute. Defaults to immediate delivery (Do not confuse that with
        the ``immediate`` setting, they are unrelated).

    :param eta: A :class:`datetime.datetime` object that describes the
        absolute time when the task should execute. May not be specified
        if ``countdown`` is also supplied. (Do not confuse this with the
        ``immediate`` setting, they are unrelated).

    :keyword routing_key: The routing key used to route the task to a worker
        server.

    :keyword exchange: The named exchange to send the task to. Defaults to
        :attr:`celery.task.base.Task.exchange`.

    :keyword immediate: Request immediate delivery. Will raise an exception
        if the task cannot be routed to a worker immediately.
        (Do not confuse this parameter with the ``countdown`` and ``eta``
        settings, as they are unrelated).

    :keyword mandatory: Mandatory routing. Raises an exception if there's
        no running workers able to take on this task.

    :keyword connection: Re-use existing AMQP connection.
        The ``connect_timeout`` argument is not respected if this is set.

    :keyword connect_timeout: The timeout in seconds, before we give up
        on establishing a connection to the AMQP server.

    :keyword priority: The task priority, a number between ``0`` and ``9``.

    :keyword serializer: A string identifying the default serialization
        method to use. Defaults to the ``CELERY_TASK_SERIALIZER`` setting.
        Can be ``pickle`` ``json``, ``yaml``, or any custom serialization
        methods that have been registered with
        :mod:`carrot.serialization.registry`.

    """
    args = args or []
    kwargs = kwargs or {}
    routing_key = routing_key or getattr(task, "routing_key", None)
    exchange = exchange or getattr(task, "exchange", None)
    if immediate is None:
        immediate = getattr(task, "immediate", None)
    if mandatory is None:
        mandatory = getattr(task, "mandatory", None)
    if priority is None:
        priority = getattr(task, "priority", None)
    serializer = serializer or getattr(task, "serializer", None)
    taskset_id = opts.get("taskset_id")
    publisher = opts.get("publisher")
    retries = opts.get("retries", 0)
    if countdown:
        eta = datetime.now() + timedelta(seconds=countdown)

    from celery.conf import ALWAYS_EAGER
    if ALWAYS_EAGER:
        return apply(task, args, kwargs)

    need_to_close_connection = False
    if not publisher:
        if not connection:
            connection = DjangoBrokerConnection(
                            connect_timeout=connect_timeout)
            need_to_close_connection = True
        publisher = TaskPublisher(connection=connection)

    delay_task = publisher.delay_task
    if taskset_id:
        delay_task = curry(publisher.delay_task_in_set, taskset_id)

    task_id = delay_task(task.name, args, kwargs,
                         task_id=task_id, retries=retries,
                         routing_key=routing_key, exchange=exchange,
                         mandatory=mandatory, immediate=immediate,
                         serializer=serializer, priority=priority,
                         eta=eta)

    if need_to_close_connection:
        publisher.close()
        connection.close()

    return AsyncResult(task_id)