示例#1
0
def start_worker(
        app,
        concurrency=1,
        pool='solo',
        loglevel=log_level,  #
        logfile=None,
        **kwargs):
    from celery.utils.dispatch import Signal

    test_worker_starting = Signal(
        name='test_worker_starting',
        providing_args={},
    )
    test_worker_stopped = Signal(
        name='test_worker_stopped',
        providing_args={'worker'},
    )
    test_worker_starting.send(sender=app)

    with _start_worker_thread(app,
                              concurrency=concurrency,
                              pool=pool,
                              loglevel=loglevel,
                              logfile=logfile,
                              **kwargs) as worker:
        yield worker

    test_worker_stopped.send(sender=app, worker=worker)
示例#2
0
文件: snapshot.py 项目: jokar/minion
class Polaroid(object):
    shutter_signal = Signal(providing_args=("state", ))
    cleanup_signal = Signal()
    clear_after = False

    _tref = None

    def __init__(self, state, freq=1.0, maxrate=None,
            cleanup_freq=3600.0, logger=None):
        self.state = state
        self.freq = freq
        self.cleanup_freq = cleanup_freq
        self.logger = logger or log.get_default_logger(name="celery.cam")
        self.maxrate = maxrate and TokenBucket(rate(maxrate))

    def install(self):
        self._tref = timer2.apply_interval(self.freq * 1000.0,
                                           self.capture)
        self._ctref = timer2.apply_interval(self.cleanup_freq * 1000.0,
                                            self.cleanup)

    def on_shutter(self, state):
        pass

    def on_cleanup(self):
        pass

    def cleanup(self):
        self.debug("Cleanup: Running...")
        self.cleanup_signal.send(None)
        self.on_cleanup()

    def debug(self, msg):
        if self.logger:
            self.logger.debug(msg)

    def shutter(self):
        if self.maxrate is None or self.maxrate.can_consume():
            self.debug("Shutter: %s" % (self.state, ))
            self.shutter_signal.send(self.state)
            self.on_shutter(self.state)

    def capture(self):
        self.state.freeze_while(self.shutter, clear_after=self.clear_after)

    def cancel(self):
        if self._tref:
            self._tref()
            self._tref.cancel()
        if self._ctref:
            self._ctref.cancel()

    def __enter__(self):
        self.install()
        return self

    def __exit__(self, *exc_info):
        self.cancel()
示例#3
0
class Polaroid(object):
    timer = timer2
    shutter_signal = Signal(providing_args=('state', ))
    cleanup_signal = Signal()
    clear_after = False

    _tref = None
    _ctref = None

    def __init__(self, state, freq=1.0, maxrate=None,
                 cleanup_freq=3600.0, timer=None, app=None):
        self.app = app_or_default(app)
        self.state = state
        self.freq = freq
        self.cleanup_freq = cleanup_freq
        self.timer = timer or self.timer
        self.logger = logger
        self.maxrate = maxrate and TokenBucket(rate(maxrate))

    def install(self):
        self._tref = self.timer.apply_interval(self.freq * 1000.0,
                                               self.capture)
        self._ctref = self.timer.apply_interval(self.cleanup_freq * 1000.0,
                                                self.cleanup)

    def on_shutter(self, state):
        pass

    def on_cleanup(self):
        pass

    def cleanup(self):
        logger.debug('Cleanup: Running...')
        self.cleanup_signal.send(None)
        self.on_cleanup()

    def shutter(self):
        if self.maxrate is None or self.maxrate.can_consume():
            logger.debug('Shutter: %s', self.state)
            self.shutter_signal.send(self.state)
            self.on_shutter(self.state)

    def capture(self):
        self.state.freeze_while(self.shutter, clear_after=self.clear_after)

    def cancel(self):
        if self._tref:
            self._tref()  # flush all received events.
            self._tref.cancel()
        if self._ctref:
            self._ctref.cancel()

    def __enter__(self):
        self.install()
        return self

    def __exit__(self, *exc_info):
        self.cancel()
示例#4
0
class Camera(object):
    clear_after = False
    _tref = None
    shutter_signal = Signal(providing_args=('state', ))

    def __init__(self, state, config):
        self.state = state
        self.timer = Timer()
        self.freq = config.getfloat('camera', 'frequency')

    def install(self):
        self._tref = self.timer.call_repeatedly(self.freq, self.capture)

    def on_shutter(self, monitor):
        pass

    def after_shutter(self):
        pass

    def capture(self):
        self.state.freeze_while(self.shutter, clear_after=self.clear_after)
        self.after_shutter()

    def shutter(self):
        self.shutter_signal.send(self.state)
        self.on_shutter(self.state)

    def cancel(self):
        if self._tref:
            self._tref()  # flush all received events.
            self._tref.cancel()

    def __enter__(self):
        self.install()
        return self

    def __exit__(self, *exc_info):
        self.cancel()
        return True
示例#5
0
"""cyme.branch.signals"""

from __future__ import absolute_import

from celery.utils.dispatch import Signal

#: Sent when the http server is ready to accept requests.
#: Arguments:
#:
#:     :sender: the :class:`~cyme.httpd.HttpServer` instance.
#:     :addrport: the ``(hostname, port)`` tuple.
#:     :handler: the WSGI handler used.
#:     :sock: the socket used.
httpd_ready = Signal(providing_args=['addrport', 'handler', 'sock'])

#: Sent when the supervisor is ready.
#: Arguments:
#:
#:     :sender: is the :class:`~cyme.supervisor.Supervisor` instance.
supervisor_ready = Signal()

#: Sent when a controller is ready.
#:
#: Arguments:
#:     :sender: is the :class:`~cyme.controller.Controller` instance.
controller_ready = Signal()

#: Sent when the branch and all its components are ready to serve.
#:
#: Arguments:
#:     :sender: is the :class:`~cyme.branch.Branch` instance.
示例#6
0
    def __init__(self,
                 main=None,
                 loader=None,
                 backend=None,
                 amqp=None,
                 events=None,
                 log=None,
                 control=None,
                 set_as_current=True,
                 tasks=None,
                 broker=None,
                 include=None,
                 changes=None,
                 config_source=None,
                 fixups=None,
                 task_cls=None,
                 autofinalize=True,
                 **kwargs):
        self.clock = LamportClock()
        self.main = main
        self.amqp_cls = amqp or self.amqp_cls
        self.events_cls = events or self.events_cls
        self.loader_cls = loader or self.loader_cls
        self.log_cls = log or self.log_cls
        self.control_cls = control or self.control_cls
        self.task_cls = task_cls or self.task_cls
        self.set_as_current = set_as_current
        self.registry_cls = symbol_by_name(self.registry_cls)
        self.user_options = defaultdict(set)
        self.steps = defaultdict(set)
        self.autofinalize = autofinalize

        self.configured = False
        self._config_source = config_source
        self._pending_defaults = deque()
        self._pending_periodic_tasks = deque()

        self.finalized = False
        self._finalize_mutex = threading.Lock()
        self._pending = deque()
        self._tasks = tasks
        if not isinstance(self._tasks, TaskRegistry):
            self._tasks = TaskRegistry(self._tasks or {})

        # If the class defines a custom __reduce_args__ we need to use
        # the old way of pickling apps, which is pickling a list of
        # args instead of the new way that pickles a dict of keywords.
        self._using_v1_reduce = app_has_custom(self, '__reduce_args__')

        # these options are moved to the config to
        # simplify pickling of the app object.
        self._preconf = changes or {}
        if broker:
            self._preconf['BROKER_URL'] = broker
        if backend:
            self._preconf['CELERY_RESULT_BACKEND'] = backend
        if include:
            self._preconf['CELERY_IMPORTS'] = include

        # - Apply fixups.
        self.fixups = set(self.builtin_fixups) if fixups is None else fixups
        # ...store fixup instances in _fixups to keep weakrefs alive.
        self._fixups = [symbol_by_name(fixup)(self) for fixup in self.fixups]

        if self.set_as_current:
            self.set_current()

        # Signals
        if self.on_configure is None:
            # used to be a method pre 3.2
            self.on_configure = Signal()
        self.on_after_configure = Signal()
        self.on_after_finalize = Signal()

        self.on_init()
        _register_app(self)
示例#7
0
from celery.utils.dispatch import Signal

subtask_success = Signal(
    name='subtask_success',
    providing_args=['root_id', 'task_id', 'retval'],
)
示例#8
0
    def __init__(self,
                 main=None,
                 loader=None,
                 backend=None,
                 amqp=None,
                 events=None,
                 log=None,
                 control=None,
                 set_as_current=True,
                 tasks=None,
                 broker=None,
                 include=None,
                 changes=None,
                 config_source=None,
                 fixups=None,
                 task_cls=None,
                 autofinalize=True,
                 namespace=None,
                 strict_typing=True,
                 **kwargs):
        self.clock = LamportClock()
        self.main = main
        self.amqp_cls = amqp or self.amqp_cls
        self.events_cls = events or self.events_cls
        self.loader_cls = loader or self._get_default_loader()
        self.log_cls = log or self.log_cls
        self.control_cls = control or self.control_cls
        self.task_cls = task_cls or self.task_cls
        self.set_as_current = set_as_current
        self.registry_cls = symbol_by_name(self.registry_cls)
        self.user_options = defaultdict(set)
        self.steps = defaultdict(set)
        self.autofinalize = autofinalize
        self.namespace = namespace
        self.strict_typing = strict_typing

        self.configured = False
        self._config_source = config_source
        self._pending_defaults = deque()
        self._pending_periodic_tasks = deque()

        self.finalized = False
        self._finalize_mutex = threading.Lock()
        self._pending = deque()
        self._tasks = tasks
        if not isinstance(self._tasks, TaskRegistry):
            self._tasks = self.registry_cls(self._tasks or {})

        # If the class defines a custom __reduce_args__ we need to use
        # the old way of pickling apps: pickling a list of
        # args instead of the new way that pickles a dict of keywords.
        self._using_v1_reduce = app_has_custom(self, '__reduce_args__')

        # these options are moved to the config to
        # simplify pickling of the app object.
        self._preconf = changes or {}
        self._preconf_set_by_auto = set()
        self.__autoset('broker_url', broker)
        self.__autoset('result_backend', backend)
        self.__autoset('include', include)
        self._conf = Settings(
            PendingConfiguration(self._preconf, self._finalize_pending_conf),
            prefix=self.namespace,
            keys=(_old_key_to_new, _new_key_to_old),
        )

        # - Apply fix-ups.
        self.fixups = set(self.builtin_fixups) if fixups is None else fixups
        # ...store fixup instances in _fixups to keep weakrefs alive.
        self._fixups = [symbol_by_name(fixup)(self) for fixup in self.fixups]

        if self.set_as_current:
            self.set_current()

        # Signals
        if self.on_configure is None:
            # used to be a method pre 4.0
            self.on_configure = Signal(name='app.on_configure')
        self.on_after_configure = Signal(
            name='app.on_after_configure',
            providing_args={'source'},
        )
        self.on_after_finalize = Signal(name='app.on_after_finalize')
        self.on_after_fork = Signal(name='app.on_after_fork')

        self.on_init()
        _register_app(self)
示例#9
0
"""Definitions of custom :doc:`Celery signals <celery:userguide/signals>`
related to VOEvents.

These signals allow us to keep the VOEvent broker code decoupled from any
GCN-specific logic. Notably, it allows us to keep all of the details of
the GCN-specific "Notice Type" concept out of :mod:`gwcelery.voevent`.
"""
from celery.utils.dispatch import Signal

lvalert_received = Signal(name='lvalert_received',
                          providing_args=('node', 'payload'))
"""Fired whenever a VOEvent is received.

Parameters
----------
xml_document : :class:`comet.utility.xml.xml_document`
    The XML document that was received. The raw file contents are available as
    ``xml_document.raw_bytes``. The ``lxml.etree`` representation of the
    document is available as ``xml_document.element``.
"""
示例#10
0
"""Embedded workers for integration tests."""
from __future__ import absolute_import, unicode_literals
import os
import threading
from contextlib import contextmanager
from celery import worker
from celery.result import allow_join_result, _set_task_join_will_block
from celery.utils.dispatch import Signal
from celery.utils.nodenames import anon_nodename

WORKER_LOGLEVEL = os.environ.get('WORKER_LOGLEVEL', 'error')

test_worker_starting = Signal(
    name='test_worker_starting',
    providing_args={},
)
test_worker_started = Signal(
    name='test_worker_started',
    providing_args={'worker', 'consumer'},
)
test_worker_stopped = Signal(
    name='test_worker_stopped',
    providing_args={'worker'},
)


class TestWorkController(worker.WorkController):
    """Worker that can synchronize on being fully started."""
    def __init__(self, *args, **kwargs):
        # type: (*Any, **Any) -> None
        self._on_started = threading.Event()
示例#11
0
文件: celery.py 项目: petli/catalog
             broker=config.CATALOG_BROKER_URL)

# Use configuration provided by user
app.config_from_object(config)

# And set some technical stuff that the user shouldn't be allowed to touch
app.conf.update(
    CELERY_TASK_SERIALIZER='json',
    CELERY_ACCEPT_CONTENT=['json'],
    CELERY_RESULT_SERIALIZER='json',
    CELERY_RESULT_BACKEND='amqp',
    CELERY_TASK_RESULT_EXPIRES=30,
    CELERY_TASK_RESULT_DURABLE=False,
)

on_create_work = Signal(providing_args=('timestamp', 'user_uri', 'work_uri',
                                        'work_data'))
on_update_work = Signal(providing_args=('timestamp', 'user_uri', 'work_uri',
                                        'work_data'))
on_delete_work = Signal(providing_args=('timestamp', 'user_uri', 'work_uri'))
on_create_work_source = Signal(providing_args=('timestamp', 'user_uri',
                                               'work_uri', 'source_uri',
                                               'source_data'))
on_create_stock_source = Signal(providing_args=('timestamp', 'user_uri',
                                                'source_uri', 'source_data'))
on_update_source = Signal(providing_args=('timestamp', 'user_uri',
                                          'source_uri', 'source_data'))
on_delete_source = Signal(providing_args=('timestamp', 'user_uri',
                                          'source_uri'))
on_create_post = Signal(providing_args=('timestamp', 'user_uri', 'work_uri',
                                        'post_uri', 'post_data'))
on_update_post = Signal(providing_args=('timestamp', 'user_uri', 'post_uri',
示例#12
0
"""Definitions of custom :doc:`Celery signals <celery:userguide/signals>`
related to emails.

These signals allow us to keep the VOEvent validation code decoupled from the
email client itself.
"""
from celery.utils.dispatch import Signal

email_received = Signal(name='email_received', providing_args=('rfc822', ))
"""Fired whenever an email message is received.

Parameters
----------
rfc822 : bytes
    The :rfc:`822` contents of the message.

Examples
--------

Register an email listener like this::

    import email
    import email.policy

    @email_received.connect
    def on_email_received(rfc822, **kwargs):
        # Parse the RFC822 email.
        message = email.message_from_bytes(rfc822, policy=email.policy.default)
        # Print some of the message headers.
        print('Subject:', message['Subject'])
        print('From:', message['From'])
示例#13
0
"""Definitions of custom :doc:`Celery signals <celery:userguide/signals>`
related to VOEvents.

These signals allow us to keep the VOEvent broker code decoupled from any
GCN-specific logic. Notably, it allows us to keep all of the details of
the GCN-specific "Notice Type" concept out of :mod:`gwcelery.voevent`.
"""
from celery.utils.dispatch import Signal

voevent_received = Signal(name='voevent_received',
                          providing_args=('xml_document', ))
"""Fired whenever a VOEvent is received.

Parameters
----------
xml_document : :class:`comet.utility.xml.xml_document`
    The XML document that was received. The raw file contents are available as
    ``xml_document.raw_bytes``. The ``lxml.etree`` representation of the
    document is available as ``xml_document.element``.
"""
示例#14
0
    'beat_init',
    'beat_embedded_init',
    'heartbeat_sent',
    'eventlet_pool_started',
    'eventlet_pool_preshutdown',
    'eventlet_pool_postshutdown',
    'eventlet_pool_apply',
)

# - Task
before_task_publish = Signal(
    name='before_task_publish',
    providing_args={
        'body',
        'exchange',
        'routing_key',
        'headers',
        'properties',
        'declare',
        'retry_policy',
    },
)
after_task_publish = Signal(
    name='after_task_publish',
    providing_args={'body', 'exchange', 'routing_key'},
)
task_prerun = Signal(
    name='task_prerun',
    providing_args={'task_id', 'task', 'args', 'kwargs'},
)
task_postrun = Signal(
    name='task_postrun',
示例#15
0
from celery.utils.dispatch import Signal

task_sent = Signal(
    providing_args=["task_id", "task", "args", "kwargs", "eta", "taskset"])

task_prerun = Signal(providing_args=["task_id", "task", "args", "kwargs"])

task_postrun = Signal(
    providing_args=["task_id", "task", "args", "kwargs", "retval"])

worker_init = Signal(providing_args=[])
worker_process_init = Signal(providing_args=[])
worker_ready = Signal(providing_args=[])
worker_shutdown = Signal(providing_args=[])
示例#16
0
import logging

import requests
from celery import task
from celery.signals import celeryd_init
from celery.utils.dispatch import Signal
from .models import SensorManager

from .constants import SENSOR_DATA_URI, WEATHER_DATA_URI, EXTERNAL_API_TIMEOUT

sensor_fetched = Signal(providing_args=['sensor_id', 'timestamp', 'value'])
weather_fetched = Signal(providing_args=['temperature'])


def write_sensor_event(sender, sensor_id: str, timestamp: int, value: float,
                       *args, **kwargs):
    SensorManager.write_sensor_event(sensor_id, timestamp, value)


def update_statistics(sender, sensor_id: str, timestamp, value: float, *args,
                      **kwargs):
    SensorManager.update_sensor_statistics(sensor_id, value)


def update_helsinki_temperature(sender, temperature: float, *args, **kwargs):
    SensorManager.update_helsinki_temperature(temperature)


@celeryd_init.connect
def init_signals(*args, **kwargs):
    sensor_fetched.connect(write_sensor_event,
示例#17
0
        gc.collect()


def receiver_1_arg(val, **kwargs):
    return val


class Callable(object):
    def __call__(self, val, **kwargs):
        return val

    def a(self, val, **kwargs):
        return val


a_signal = Signal(providing_args=["val"])


class DispatcherTests(Case):
    """Test suite for dispatcher (barely started)"""
    def _testIsClean(self, signal):
        """Assert that everything has been cleaned up automatically"""
        self.assertEqual(signal.receivers, [])

        # force cleanup just in case
        signal.receivers = []

    def testExact(self):
        a_signal.connect(receiver_1_arg, sender=self)
        expected = [(receiver_1_arg, "test")]
        result = a_signal.send(sender=self, val="test")
示例#18
0
class Polaroid:
    """Record event snapshots."""

    timer = None
    shutter_signal = Signal(name='shutter_signal', providing_args={'state'})
    cleanup_signal = Signal(name='cleanup_signal')
    clear_after = False

    _tref = None
    _ctref = None

    def __init__(self,
                 state,
                 freq=1.0,
                 maxrate=None,
                 cleanup_freq=3600.0,
                 timer=None,
                 app=None):
        self.app = app_or_default(app)
        self.state = state
        self.freq = freq
        self.cleanup_freq = cleanup_freq
        self.timer = timer or self.timer or Timer()
        self.logger = logger
        self.maxrate = maxrate and TokenBucket(rate(maxrate))

    def install(self):
        self._tref = self.timer.call_repeatedly(self.freq, self.capture)
        self._ctref = self.timer.call_repeatedly(
            self.cleanup_freq,
            self.cleanup,
        )

    def on_shutter(self, state):
        pass

    def on_cleanup(self):
        pass

    def cleanup(self):
        logger.debug('Cleanup: Running...')
        self.cleanup_signal.send(sender=self.state)
        self.on_cleanup()

    def shutter(self):
        if self.maxrate is None or self.maxrate.can_consume():
            logger.debug('Shutter: %s', self.state)
            self.shutter_signal.send(sender=self.state)
            self.on_shutter(self.state)

    def capture(self):
        self.state.freeze_while(self.shutter, clear_after=self.clear_after)

    def cancel(self):
        if self._tref:
            self._tref()  # flush all received events.
            self._tref.cancel()
        if self._ctref:
            self._ctref.cancel()

    def __enter__(self):
        self.install()
        return self

    def __exit__(self, *exc_info):
        self.cancel()
示例#19
0
"""Embedded workers for integration tests."""
from __future__ import absolute_import, unicode_literals

import os
import threading

from contextlib import contextmanager

from celery import worker
from celery.result import allow_join_result, _set_task_join_will_block
from celery.utils.dispatch import Signal
from celery.utils.nodenames import anon_nodename

test_worker_starting = Signal(providing_args=[])
test_worker_started = Signal(providing_args=['worker', 'consumer'])
test_worker_stopped = Signal(providing_args=['worker'])

WORKER_LOGLEVEL = os.environ.get('WORKER_LOGLEVEL', 'error')


class TestWorkController(worker.WorkController):
    """Worker that can synchronize on being fully started."""
    def __init__(self, *args, **kwargs):
        # type: (*Any, **Any) -> None
        self._on_started = threading.Event()
        super(TestWorkController, self).__init__(*args, **kwargs)

    def on_consumer_ready(self, consumer):
        # type: (celery.worker.consumer.Consumer) -> None
        """Callback called when the Consumer blueprint is fully started."""
        self._on_started.set()
示例#20
0
文件: signals.py 项目: WoLpH/celery
    The level of the logging object.

* logfile
    The name of the logfile.

* format
    The log format string.

* colorize
    Specify if log messages are colored or not.


"""
from celery.utils.dispatch import Signal

task_sent = Signal(
    providing_args=["task_id", "task", "args", "kwargs", "eta", "taskset"])

task_prerun = Signal(providing_args=["task_id", "task", "args", "kwargs"])

task_postrun = Signal(
    providing_args=["task_id", "task", "args", "kwargs", "retval"])

task_failure = Signal(providing_args=[
    "task_id", "exception", "args", "kwargs", "traceback", "einfo"
])

worker_init = Signal(providing_args=[])
worker_process_init = Signal(providing_args=[])
worker_ready = Signal(providing_args=[])
worker_shutdown = Signal(providing_args=[])
示例#21
0
        gc.collect()


def receiver_1_arg(val, **kwargs):
    return val


class Callable(object):
    def __call__(self, val, **kwargs):
        return val

    def a(self, val, **kwargs):
        return val


a_signal = Signal(providing_args=['val'])


class DispatcherTests(Case):
    """Test suite for dispatcher (barely started)"""
    def _testIsClean(self, signal):
        """Assert that everything has been cleaned up automatically"""
        self.assertEqual(signal.receivers, [])

        # force cleanup just in case
        signal.receivers = []

    def testExact(self):
        a_signal.connect(receiver_1_arg, sender=self)
        expected = [(receiver_1_arg, 'test')]
        result = a_signal.send(sender=self, val='test')
示例#22
0
"""Embedded workers for integration tests."""
from __future__ import absolute_import, unicode_literals

import os
import threading
from contextlib import contextmanager

from celery import worker
from celery.result import _set_task_join_will_block, allow_join_result
from celery.utils.dispatch import Signal
from celery.utils.nodenames import anon_nodename

WORKER_LOGLEVEL = os.environ.get("WORKER_LOGLEVEL", "error")

test_worker_starting = Signal(
    name="test_worker_starting",
    providing_args={},
)
test_worker_started = Signal(
    name="test_worker_started",
    providing_args={"worker", "consumer"},
)
test_worker_stopped = Signal(
    name="test_worker_stopped",
    providing_args={"worker"},
)


class TestWorkController(worker.WorkController):
    """Worker that can synchronize on being fully started."""

    def __init__(self, *args, **kwargs):
示例#23
0
        gc.collect()


def receiver_1_arg(val, **kwargs):
    return val


class Callable(object):
    def __call__(self, val, **kwargs):
        return val

    def a(self, val, **kwargs):
        return val


a_signal = Signal(providing_args=['val'], use_caching=False)


class test_Signal:
    """Test suite for dispatcher (barely started)"""
    def _testIsClean(self, signal):
        """Assert that everything has been cleaned up automatically"""
        assert not signal.has_listeners()
        assert signal.receivers == []

    def test_exact(self):
        a_signal.connect(receiver_1_arg, sender=self)
        try:
            expected = [(receiver_1_arg, 'test')]
            result = a_signal.send(sender=self, val='test')
            assert result == expected
示例#24
0
文件: signals.py 项目: jokar/minion
from celery.utils.dispatch import Signal

task_sent = Signal(
    providing_args=["task_id", "task", "args", "kwargs", "eta", "taskset"])

task_prerun = Signal(providing_args=["task_id", "task", "args", "kwargs"])

task_postrun = Signal(
    providing_args=["task_id", "task", "args", "kwargs", "retval"])

worker_init = Signal(providing_args=[])
worker_process_init = Signal(providing_args=[])
worker_ready = Signal(providing_args=[])
worker_shutdown = Signal(providing_args=[])

setup_logging = Signal(
    providing_args=["loglevel", "logfile", "format", "colorize"])