Exemplo n.º 1
0
def setup_worker_optimizations(app):
    global _tasks
    global trace_task_ret

    # make sure custom Task.__call__ methods that calls super
    # will not mess up the request/task stack.
    _install_stack_protection()

    # all new threads start without a current app, so if an app is not
    # passed on to the thread it will fall back to the "default app",
    # which then could be the wrong app.  So for the worker
    # we set this to always return our app.  This is a hack,
    # and means that only a single app can be used for workers
    # running in the same process.
    app.set_current()
    set_default_app(app)

    # evaluate all task classes by finalizing the app.
    app.finalize()

    # set fast shortcut to task registry
    _tasks = app._tasks

    trace_task_ret = _fast_trace_task
    from celery.worker import job as job_module
    job_module.trace_task_ret = _fast_trace_task
    job_module.__optimize__()
Exemplo n.º 2
0
def setup_worker_optimizations(app, hostname=None):
    global trace_task_ret

    hostname = hostname or socket.gethostname()

    # make sure custom Task.__call__ methods that calls super
    # will not mess up the request/task stack.
    _install_stack_protection()

    # all new threads start without a current app, so if an app is not
    # passed on to the thread it will fall back to the "default app",
    # which then could be the wrong app.  So for the worker
    # we set this to always return our app.  This is a hack,
    # and means that only a single app can be used for workers
    # running in the same process.
    app.set_current()
    set_default_app(app)

    # evaluate all task classes by finalizing the app.
    app.finalize()

    # set fast shortcut to task registry
    _localized[:] = [
        app._tasks,
        prepare_accept_content(app.conf.CELERY_ACCEPT_CONTENT),
        hostname,
    ]

    trace_task_ret = _fast_trace_task
    from celery.worker import request as request_module
    request_module.trace_task_ret = _fast_trace_task
    request_module.__optimize__()
Exemplo n.º 3
0
Arquivo: trace.py Projeto: frol/celery
def setup_worker_optimizations(app):
    global _tasks
    global trace_task_ret

    # make sure custom Task.__call__ methods that calls super
    # will not mess up the request/task stack.
    _install_stack_protection()

    # all new threads start without a current app, so if an app is not
    # passed on to the thread it will fall back to the "default app",
    # which then could be the wrong app.  So for the worker
    # we set this to always return our app.  This is a hack,
    # and means that only a single app can be used for workers
    # running in the same process.
    app.set_current()
    set_default_app(app)

    # evaluate all task classes by finalizing the app.
    app.finalize()

    # set fast shortcut to task registry
    _tasks = app._tasks

    trace_task_ret = _fast_trace_task
    try:
        job = sys.modules['celery.worker.job']
    except KeyError:
        pass
    else:
        job.trace_task_ret = _fast_trace_task
        job.__optimize__()
Exemplo n.º 4
0
def setup_worker_optimizations(app, hostname=None):
    global trace_task_ret

    hostname = hostname or gethostname()

    # make sure custom Task.__call__ methods that calls super
    # won't mess up the request/task stack.
    _install_stack_protection()

    # all new threads start without a current app, so if an app is not
    # passed on to the thread it will fall back to the "default app",
    # which then could be the wrong app.  So for the worker
    # we set this to always return our app.  This is a hack,
    # and means that only a single app can be used for workers
    # running in the same process.
    app.set_current()
    set_default_app(app)

    # evaluate all task classes by finalizing the app.
    app.finalize()

    # set fast shortcut to task registry
    _localized[:] = [
        app._tasks,
        prepare_accept_content(app.conf.accept_content),
        hostname,
    ]

    trace_task_ret = _fast_trace_task
    from celery.worker import request as request_module
    request_module.trace_task_ret = _fast_trace_task
    request_module.__optimize__()
Exemplo n.º 5
0
    def __init__(self,
                 loglevel=None,
                 hostname=None,
                 ready_callback=noop,
                 queues=None,
                 app=None,
                 **kwargs):
        self.app = app_or_default(app or self.app)

        # all new threads start without a current app, so if an app is not
        # passed on to the thread it will fall back to the "default app",
        # which then could be the wrong app.  So for the worker
        # we set this to always return our app.  This is a hack,
        # and means that only a single app can be used for workers
        # running in the same process.
        set_default_app(self.app)

        self._shutdown_complete = threading.Event()
        self.setup_defaults(kwargs, namespace="celeryd")
        self.app.select_queues(queues)  # select queues subset.

        # Options
        self.loglevel = loglevel or self.loglevel
        self.hostname = hostname or socket.gethostname()
        self.ready_callback = ready_callback
        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self._finalize_db = None

        # Initialize boot steps
        self.pool_cls = _concurrency.get_implementation(self.pool_cls)
        self.components = []
        self.namespace = Namespace(app=self.app).apply(self, **kwargs)
Exemplo n.º 6
0
    def __init__(self, loglevel=None, hostname=None, ready_callback=noop,
            queues=None, app=None, pidfile=None, **kwargs):
        self.app = app_or_default(app or self.app)

        # all new threads start without a current app, so if an app is not
        # passed on to the thread it will fall back to the "default app",
        # which then could be the wrong app.  So for the worker
        # we set this to always return our app.  This is a hack,
        # and means that only a single app can be used for workers
        # running in the same process.
        set_default_app(self.app)

        self._shutdown_complete = Event()
        self.setup_defaults(kwargs, namespace="celeryd")
        self.app.select_queues(queues)  # select queues subset.

        # Options
        self.loglevel = loglevel or self.loglevel
        self.hostname = hostname or socket.gethostname()
        self.ready_callback = ready_callback
        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self.pidfile = pidfile
        self.pidlock = None
        self.use_eventloop = (detect_environment() == "default" and
                              self.app.broker_connection().is_evented)

        # Initialize boot steps
        self.pool_cls = _concurrency.get_implementation(self.pool_cls)
        self.components = []
        self.namespace = Namespace(app=self.app).apply(self, **kwargs)
Exemplo n.º 7
0
def bugreport(app=None):
    try:
        import celery
        from celery.app import set_default_app
        import tornado
        import babel

        set_default_app(app)

        return 'flower   -> flower:%s tornado:%s babel:%s%s' % (
            __version__, tornado.version, babel.__version__,
            celery.bugreport())
    except (ImportError, AttributeError):
        return 'Unknown Celery version'
Exemplo n.º 8
0
    def __init__(self, app=None, hostname=None, **kwargs):
        self.app = app_or_default(app or self.app)
        # all new threads start without a current app, so if an app is not
        # passed on to the thread it will fall back to the "default app",
        # which then could be the wrong app.  So for the worker
        # we set this to always return our app.  This is a hack,
        # and means that only a single app can be used for workers
        # running in the same process.
        set_default_app(self.app)
        self.app.finalize()
        trace._tasks = self.app._tasks   # optimization
        self.hostname = hostname or socket.gethostname()
        self.on_before_init(**kwargs)

        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self._shutdown_complete = Event()
        self.setup_instance(**self.prepare_args(**kwargs))
Exemplo n.º 9
0
def bugreport(app=None):
    try:
        import celery
        from celery.app import set_default_app
        import tornado
        import babel

        set_default_app(app)

        return 'flower   -> flower:%s tornado:%s babel:%s%s' % (
            __version__,
            tornado.version,
            babel.__version__,
            celery.bugreport()
        )
    except (ImportError, AttributeError):
        return 'Unknown Celery version'
Exemplo n.º 10
0
    def __init__(self, app=None, hostname=None, **kwargs):
        self.app = app_or_default(app or self.app)
        # all new threads start without a current app, so if an app is not
        # passed on to the thread it will fall back to the "default app",
        # which then could be the wrong app.  So for the worker
        # we set this to always return our app.  This is a hack,
        # and means that only a single app can be used for workers
        # running in the same process.
        set_default_app(self.app)
        self.app.finalize()
        trace._tasks = self.app._tasks  # optimization
        self.hostname = hostname or socket.gethostname()
        self.on_before_init(**kwargs)

        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self._shutdown_complete = Event()
        self.setup_instance(**self.prepare_args(**kwargs))
Exemplo n.º 11
0
    def __init__(self,
                 loglevel=None,
                 hostname=None,
                 ready_callback=noop,
                 queues=None,
                 app=None,
                 pidfile=None,
                 **kwargs):
        self.app = app_or_default(app or self.app)
        # all new threads start without a current app, so if an app is not
        # passed on to the thread it will fall back to the "default app",
        # which then could be the wrong app.  So for the worker
        # we set this to always return our app.  This is a hack,
        # and means that only a single app can be used for workers
        # running in the same process.
        set_default_app(self.app)
        self.app.finalize()
        trace._tasks = self.app._tasks

        self._shutdown_complete = Event()
        self.setup_defaults(kwargs, namespace='celeryd')
        self.app.select_queues(queues)  # select queues subset.

        # Options
        self.loglevel = loglevel or self.loglevel
        self.hostname = hostname or socket.gethostname()
        self.ready_callback = ready_callback
        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self.pidfile = pidfile
        self.pidlock = None
        self.use_eventloop = (detect_environment() == 'default'
                              and self.app.broker_connection().is_evented
                              and not self.app.IS_WINDOWS)

        # Update celery_include to have all known task modules, so that we
        # ensure all task modules are imported in case an execv happens.
        task_modules = set(task.__class__.__module__
                           for task in self.app.tasks.itervalues())
        self.app.conf.CELERY_INCLUDE = tuple(
            set(self.app.conf.CELERY_INCLUDE) | task_modules, )

        # Initialize boot steps
        self.pool_cls = _concurrency.get_implementation(self.pool_cls)
        self.components = []
        self.namespace = Namespace(app=self.app).apply(self, **kwargs)
Exemplo n.º 12
0
    def __init__(self, loglevel=None, hostname=None, ready_callback=noop,
            queues=None, app=None, pidfile=None, **kwargs):
        self.app = app_or_default(app or self.app)
        # all new threads start without a current app, so if an app is not
        # passed on to the thread it will fall back to the "default app",
        # which then could be the wrong app.  So for the worker
        # we set this to always return our app.  This is a hack,
        # and means that only a single app can be used for workers
        # running in the same process.
        set_default_app(self.app)
        self.app.finalize()
        trace._tasks = self.app._tasks

        self._shutdown_complete = Event()
        self.setup_defaults(kwargs, namespace='celeryd')
        self.app.select_queues(queues)  # select queues subset.

        # Options
        self.loglevel = loglevel or self.loglevel
        self.hostname = hostname or socket.gethostname()
        self.ready_callback = ready_callback
        self._finalize = Finalize(self, self.stop, exitpriority=1)
        self.pidfile = pidfile
        self.pidlock = None
        self.use_eventloop = (detect_environment() == 'default' and
                              self.app.broker_connection().is_evented and
                              not self.app.IS_WINDOWS)

        # Update celery_include to have all known task modules, so that we
        # ensure all task modules are imported in case an execv happens.
        task_modules = set(task.__class__.__module__
                            for task in self.app.tasks.itervalues())
        self.app.conf.CELERY_INCLUDE = tuple(
            set(self.app.conf.CELERY_INCLUDE) | task_modules,
        )

        # Initialize boot steps
        self.pool_cls = _concurrency.get_implementation(self.pool_cls)
        self.components = []
        self.namespace = Namespace(app=self.app).apply(self, **kwargs)