Example #1
0
def process_initializer(app, hostname):
    """Pool child process initializer.

    This will initialize a child pool process to ensure the correct
    app instance is used and things like
    logging works.

    """
    platforms.signals.reset(*WORKER_SIGRESET)
    platforms.signals.ignore(*WORKER_SIGIGNORE)
    platforms.set_mp_process_title('celeryd', hostname=hostname)
    # This is for Windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.loader.init_worker()
    app.loader.init_worker_process()
    app.log.setup(int(os.environ.get('CELERY_LOG_LEVEL', 0) or 0),
                  os.environ.get('CELERY_LOG_FILE') or None,
                  bool(os.environ.get('CELERY_LOG_REDIRECT', False)),
                  str(os.environ.get('CELERY_LOG_REDIRECT_LEVEL')))
    if os.environ.get('FORKED_BY_MULTIPROCESSING'):
        # pool did execv after fork
        trace.setup_worker_optimizations(app)
    else:
        app.set_current()
        set_default_app(app)
        app.finalize()
        trace._tasks = app._tasks  # enables fast_trace_task optimization.
    # rebuild execution handler for all tasks.
    from celery.app.trace import build_tracer
    for name, task in items(app.tasks):
        task.__trace__ = build_tracer(name, task, app.loader, hostname,
                                      app=app)
    signals.worker_process_init.send(sender=None)
Example #2
0
def process_initializer(app, hostname):
    """Initializes the process so it can be used to process tasks."""
    platforms.signals.reset(*WORKER_SIGRESET)
    platforms.signals.ignore(*WORKER_SIGIGNORE)
    platforms.maybe_patch_process_group()
    platforms.set_mp_process_title('celeryd', hostname=hostname)
    # This is for Windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.log.setup(int(os.environ.get('CELERY_LOG_LEVEL', 0)),
                  os.environ.get('CELERY_LOG_FILE') or None,
                  bool(os.environ.get('CELERY_LOG_REDIRECT', False)),
                  str(os.environ.get('CELERY_LOG_REDIRECT_LEVEL')))
    app.loader.init_worker()
    app.loader.init_worker_process()
    if os.environ.get('FORKED_BY_MULTIPROCESSING'):
        # pool did execv after fork
        trace.setup_worker_optimizations(app)
    else:
        app.set_current()
        set_default_app(app)
        app.finalize()
        trace._tasks = app._tasks  # enables fast_trace_task optimization.
    from celery.task.trace import build_tracer
    for name, task in items(app.tasks):
        task.__trace__ = build_tracer(name, task, app.loader, hostname)
    signals.worker_process_init.send(sender=None)
Example #3
0
def process_initializer(app, hostname):
    """Pool child process initializer."""
    platforms.signals.reset(*WORKER_SIGRESET)
    platforms.signals.ignore(*WORKER_SIGIGNORE)
    platforms.set_mp_process_title("celeryd", hostname=hostname)
    # This is for Windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.loader.init_worker()
    app.loader.init_worker_process()
    app.log.setup(
        int(os.environ.get("CELERY_LOG_LEVEL", 0)),
        os.environ.get("CELERY_LOG_FILE") or None,
        bool(os.environ.get("CELERY_LOG_REDIRECT", False)),
        str(os.environ.get("CELERY_LOG_REDIRECT_LEVEL")),
    )
    if os.environ.get("FORKED_BY_MULTIPROCESSING"):
        # pool did execv after fork
        trace.setup_worker_optimizations(app)
    else:
        app.set_current()
        set_default_app(app)
        app.finalize()
        trace._tasks = app._tasks  # enables fast_trace_task optimization.
    from celery.task.trace import build_tracer

    for name, task in items(app.tasks):
        task.__trace__ = build_tracer(name, task, app.loader, hostname)
    signals.worker_process_init.send(sender=None)
Example #4
0
def execute_and_trace(task_name, *args, **kwargs):
    """This is a pickleable method used as a target when applying to pools.

    It's the same as::

        >>> WorkerTaskTrace(task_name, *args, **kwargs).execute_safe()

    """
    platforms.set_mp_process_title("celeryd", info=task_name)
    try:
        return WorkerTaskTrace(task_name, *args, **kwargs).execute_safe()
    finally:
        platforms.set_mp_process_title("celeryd")
Example #5
0
File: job.py Project: tobych/celery
def execute_and_trace(task_name, *args, **kwargs):
    """This is a pickleable method used as a target when applying to pools.

    It's the same as::

        >>> WorkerTaskTrace(task_name, *args, **kwargs).execute_safe()

    """
    hostname = kwargs.get("hostname")
    platforms.set_mp_process_title("celeryd", task_name, hostname=hostname)
    try:
        return WorkerTaskTrace(task_name, *args, **kwargs).execute_safe()
    finally:
        platforms.set_mp_process_title("celeryd", "-idle-", hostname)
Example #6
0
def process_initializer(app, hostname):
    """Pool child process initializer.

    Initialize the child pool process to ensure the correct
    app instance is used and things like logging works.
    """
    _set_task_join_will_block(True)
    platforms.signals.reset(*WORKER_SIGRESET)
    platforms.signals.ignore(*WORKER_SIGIGNORE)
    platforms.set_mp_process_title("celeryd", hostname=hostname)
    # This is for Windows and other platforms not supporting
    # fork().  Note that init_worker makes sure it's only
    # run once per process.
    app.loader.init_worker()
    app.loader.init_worker_process()
    logfile = os.environ.get("CELERY_LOG_FILE") or None
    if logfile and "%i" in logfile.lower():
        # logfile path will differ so need to set up logging again.
        app.log.already_setup = False
    app.log.setup(
        int(os.environ.get("CELERY_LOG_LEVEL", 0) or 0),
        logfile,
        bool(os.environ.get("CELERY_LOG_REDIRECT", False)),
        str(os.environ.get("CELERY_LOG_REDIRECT_LEVEL")),
        hostname=hostname,
    )
    if os.environ.get("FORKED_BY_MULTIPROCESSING"):
        # pool did execv after fork
        trace.setup_worker_optimizations(app, hostname)
    else:
        app.set_current()
        set_default_app(app)
        app.finalize()
        trace._tasks = app._tasks  # enables fast_trace_task optimization.
    # rebuild execution handler for all tasks.
    from celery.app.trace import build_tracer

    for name, task in items(app.tasks):
        task.__trace__ = build_tracer(name,
                                      task,
                                      app.loader,
                                      hostname,
                                      app=app)
    from celery.worker import state as worker_state

    worker_state.reset_state()
    signals.worker_process_init.send(sender=None)
Example #7
0
def process_initializer(app, hostname):
    """Initializes the process so it can be used to process tasks.

    Used for multiprocessing environments.

    """
    app = app_or_default(app)
    [platforms.reset_signal(signal) for signal in WORKER_SIGRESET]
    [platforms.ignore_signal(signal) for signal in WORKER_SIGIGNORE]
    platforms.set_mp_process_title("celeryd", hostname=hostname)

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.loader.init_worker()

    signals.worker_process_init.send(sender=None)
Example #8
0
def process_initializer():
    """Initializes the process so it can be used to process tasks.

    Used for multiprocessing environments.

    """
    map(platforms.reset_signal, WORKER_SIGRESET)
    map(platforms.ignore_signal, WORKER_SIGIGNORE)
    platforms.set_mp_process_title("celeryd")

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    from celery.loaders import current_loader
    current_loader().init_worker()

    signals.worker_process_init.send(sender=None)
Example #9
0
def process_initializer(app, hostname):
    """Initializes the process so it can be used to process tasks."""
    app = app_or_default(app)
    app.set_current()
    platforms.signals.reset(*WORKER_SIGRESET)
    platforms.signals.ignore(*WORKER_SIGIGNORE)
    platforms.set_mp_process_title("celeryd", hostname=hostname)
    # This is for Windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.log.setup(int(os.environ.get("CELERY_LOG_LEVEL", 0)),
                  os.environ.get("CELERY_LOG_FILE") or None,
                  bool(os.environ.get("CELERY_LOG_REDIRECT", False)),
                  str(os.environ.get("CELERY_LOG_REDIRECT_LEVEL")))
    app.loader.init_worker()
    app.loader.init_worker_process()
    signals.worker_process_init.send(sender=None)
Example #10
0
def process_initializer(app, hostname):
    """Initializes the process so it can be used to process tasks."""
    app = app_or_default(app)
    app.set_current()
    platforms.signals.reset(*WORKER_SIGRESET)
    platforms.signals.ignore(*WORKER_SIGIGNORE)
    platforms.set_mp_process_title("celeryd", hostname=hostname)
    # This is for Windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.log.setup(int(os.environ.get("CELERY_LOG_LEVEL", 0)),
                  os.environ.get("CELERY_LOG_FILE") or None,
                  bool(os.environ.get("CELERY_LOG_REDIRECT", False)),
                  str(os.environ.get("CELERY_LOG_REDIRECT_LEVEL")))
    app.loader.init_worker()
    app.loader.init_worker_process()
    signals.worker_process_init.send(sender=None)
Example #11
0
def process_initializer(hostname):
    """Initializes the process so it can be used to process tasks.

    Used for multiprocessing environments.

    """
    map(platforms.reset_signal, WORKER_SIGRESET)
    map(platforms.ignore_signal, WORKER_SIGIGNORE)
    platforms.set_mp_process_title("celeryd", hostname=hostname)

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    from celery.loaders import current_loader
    current_loader().init_worker()

    signals.worker_process_init.send(sender=None)
Example #12
0
def process_initializer(app, hostname):
    """Initializes the process so it can be used to process tasks.

    Used for multiprocessing environments.

    """
    app = app_or_default(app)
    app.set_current()
    [platforms.reset_signal(signal) for signal in WORKER_SIGRESET]
    [platforms.ignore_signal(signal) for signal in WORKER_SIGIGNORE]
    platforms.set_mp_process_title("celeryd", hostname=hostname)

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.loader.init_worker()

    signals.worker_process_init.send(sender=None)
Example #13
0
def process_initializer(app, hostname):
    """Initializes the process so it can be used to process tasks."""
    app.set_current()
    set_default_app(app)
    trace._tasks = app._tasks  # make sure this optimization is set.
    platforms.signals.reset(*WORKER_SIGRESET)
    platforms.signals.ignore(*WORKER_SIGIGNORE)
    platforms.set_mp_process_title('celeryd', hostname=hostname)
    # This is for Windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.log.setup(int(os.environ.get('CELERY_LOG_LEVEL', 0)),
                  os.environ.get('CELERY_LOG_FILE') or None,
                  bool(os.environ.get('CELERY_LOG_REDIRECT', False)),
                  str(os.environ.get('CELERY_LOG_REDIRECT_LEVEL')))
    app.loader.init_worker()
    app.loader.init_worker_process()
    app.finalize()

    from celery.task.trace import build_tracer
    for name, task in app.tasks.iteritems():
        task.__trace__ = build_tracer(name, task, app.loader, hostname)
    signals.worker_process_init.send(sender=None)
Example #14
0
def process_initializer(app, hostname):
    """Initializes the process so it can be used to process tasks."""
    app.set_current()
    set_default_app(app)
    trace._tasks = app._tasks  # make sure this optimization is set.
    platforms.signals.reset(*WORKER_SIGRESET)
    platforms.signals.ignore(*WORKER_SIGIGNORE)
    platforms.set_mp_process_title("celeryd", hostname=hostname)
    # This is for Windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    app.log.setup(int(os.environ.get("CELERY_LOG_LEVEL", 0)),
                  os.environ.get("CELERY_LOG_FILE") or None,
                  bool(os.environ.get("CELERY_LOG_REDIRECT", False)),
                  str(os.environ.get("CELERY_LOG_REDIRECT_LEVEL")))
    app.loader.init_worker()
    app.loader.init_worker_process()
    app.finalize()

    from celery.task.trace import build_tracer
    for name, task in app.tasks.iteritems():
        task.__trace__ = build_tracer(name, task, app.loader, hostname)
    signals.worker_process_init.send(sender=None)
Example #15
0
def set_process_status(info):
    arg_start = "manage" in sys.argv[0] and 2 or 1
    if sys.argv[arg_start:]:
        info = "%s (%s)" % (info, " ".join(sys.argv[arg_start:]))
    return platforms.set_mp_process_title("celeryd", info=info)
Example #16
0
 def set_process_status(self, info):
     return platforms.set_mp_process_title('celeryd',
             info='%s (%s)' % (info, platforms.strargv(sys.argv)),
             hostname=self.hostname)
Example #17
0
 def test_mp_hostname(self, current_process, set_process_title):
     current_process().name = 'Foo'
     set_mp_process_title('foo', hostname='*****@*****.**', info='hello')
     set_process_title.assert_called_with('foo: [email protected]:Foo', info='hello')
Example #18
0
 def set_process_status(self, info):
     return platforms.set_mp_process_title(
         'celeryd',
         info='%s (%s)' % (info, platforms.strargv(sys.argv)),
         hostname=self.hostname)
Example #19
0
 def set_process_status(self, info):
     return platforms.set_mp_process_title(
         'celeryd',
         info=f'{info} ({platforms.strargv(sys.argv)})',
         hostname=self.hostname,
     )
Example #20
0
 def set_process_status(self, info):
     info = "%s (%s)" % (info, platforms.strargv(sys.argv))
     return platforms.set_mp_process_title("celeryd",
                                           info=info,
                                           hostname=self.hostname)
Example #21
0
 def set_process_status(self, info):
     info = "%s (%s)" % (info, platforms.strargv(sys.argv))
     return platforms.set_mp_process_title("celeryd",
                                           info=info,
                                           hostname=self.hostname)
Example #22
0
 def test_mp_hostname(self, current_process, set_process_title):
     current_process().name = 'Foo'
     set_mp_process_title('foo', hostname='*****@*****.**', info='hello')
     set_process_title.assert_called_with('foo: [email protected]:Foo', info='hello')
Example #23
0
 def set_process_status(self, info):
     return platforms.set_mp_process_title(
         'celeryd',
         info='{0} ({1})'.format(info, platforms.strargv(sys.argv)),
         hostname=self.hostname,
     )
Example #24
0
 def test_mp_hostname(self, current_process, set_process_title):
     current_process().name = "Foo"
     set_mp_process_title("foo", hostname="*****@*****.**", info="hello")
     set_process_title.assert_called_with("foo: [email protected]:Foo", info="hello")