예제 #1
0
파일: __init__.py 프로젝트: kmike/celery
def process_initializer():
    # There seems to a bug in multiprocessing (backport?)
    # when detached, where the worker gets EOFErrors from time to time
    # and the logger is left from the parent process causing a crash.
    _hijack_multiprocessing_logger()

    platform.reset_signal("SIGTERM")
    platform.set_mp_process_title("celeryd")

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    from celery.loaders import current_loader
    current_loader().init_worker()
예제 #2
0
파일: beat.py 프로젝트: B-Rich/playdoh-lib
    def init_loader(self):
        # Run the worker init handler.
        # (Usually imports task modules and such.)
        from celery.loaders import current_loader

        self.loader = current_loader()
        self.loader.init_worker()
예제 #3
0
 def init_loader(self):
     from celery.loaders import current_loader, load_settings
     self.loader = current_loader()
     self.settings = load_settings()
     if not self.loader.configured:
         raise ImproperlyConfigured(
             "Celery needs to be configured to run celeryd.")
예제 #4
0
def get_registered_tasks():
    from celery.loaders import current_loader
    from ws.tasks import BPMTask
    current_loader = current_loader()
    current_loader.import_default_modules()
    return [key for key, value in task_registry.items()\
            if isinstance(value, BPMTask)]
예제 #5
0
    def test_send_email(self):
        from celery.loaders import current_loader
        loader = current_loader()
        old_mail_admins = loader.mail_admins
        old_enable_mails = mytask.send_error_emails
        mail_sent = [False]

        def mock_mail_admins(*args, **kwargs):
            mail_sent[0] = True

        loader.mail_admins = mock_mail_admins
        mytask.send_error_emails = True
        try:
            tw = TaskRequest(mytask.name, gen_unique_id(), [1], {"f": "x"})
            try:
                raise KeyError("moofoobar")
            except:
                einfo = ExceptionInfo(sys.exc_info())

            tw.on_failure(einfo)
            self.assertTrue(mail_sent[0])

            mail_sent[0] = False
            mytask.send_error_emails = False
            tw.on_failure(einfo)
            self.assertFalse(mail_sent[0])

        finally:
            loader.mail_admins = old_mail_admins
            mytask.send_error_emails = old_enable_mails
예제 #6
0
def get_registered_tasks():
    from celery.loaders import current_loader
    from ws.tasks import BPMTask
    current_loader = current_loader()
    current_loader.import_default_modules()
    return [key for key, value in task_registry.items()\
            if isinstance(value, BPMTask)]
예제 #7
0
 def init_loader(self):
     from celery.loaders import current_loader, load_settings
     self.loader = current_loader()
     self.settings = load_settings()
     if not self.loader.configured:
         raise ImproperlyConfigured(
                 "Celery needs to be configured to run celeryd.")
예제 #8
0
    def test_send_email(self):
        from celery.loaders import current_loader
        loader = current_loader()
        old_mail_admins = loader.mail_admins
        old_enable_mails = mytask.send_error_emails
        mail_sent = [False]

        def mock_mail_admins(*args, **kwargs):
            mail_sent[0] = True

        loader.mail_admins = mock_mail_admins
        mytask.send_error_emails = True
        try:
            tw = TaskRequest(mytask.name, gen_unique_id(), [1], {"f": "x"})
            try:
                raise KeyError("moofoobar")
            except:
                einfo = ExceptionInfo(sys.exc_info())

            tw.on_failure(einfo)
            self.assertTrue(mail_sent[0])

            mail_sent[0] = False
            mytask.send_error_emails = False
            tw.on_failure(einfo)
            self.assertFalse(mail_sent[0])

        finally:
            loader.mail_admins = old_mail_admins
            mytask.send_error_emails = old_enable_mails
예제 #9
0
파일: job.py 프로젝트: maximbo/celery
    def __init__(self, *args, **kwargs):
        self.loader = kwargs.pop("loader", current_loader())
        super(WorkerTaskTrace, self).__init__(*args, **kwargs)

        self._store_errors = True
        if self.task.ignore_result:
            self._store_errors = conf.STORE_ERRORS_EVEN_IF_IGNORED
        self.super = super(WorkerTaskTrace, self)
    def __init__(self, *args, **kwargs):
        self.loader = kwargs.pop("loader", current_loader())
        super(WorkerTaskTrace, self).__init__(*args, **kwargs)

        self._store_errors = True
        if self.task.ignore_result:
            self._store_errors = conf.STORE_ERRORS_EVEN_IF_IGNORED
        self.super = super(WorkerTaskTrace, self)
예제 #11
0
파일: __init__.py 프로젝트: jokar/minion
def process_initializer(hostname):
    """Initializes the process so it can be used to process tasks.

    Used for multiprocessing environments.

    """
    map(platforms.reset_signal, WORKER_SIGRESET)
    map(platforms.ignore_signal, WORKER_SIGIGNORE)
    platforms.set_mp_process_title("celeryd", hostname=hostname)

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    from celery.loaders import current_loader
    current_loader().init_worker()

    signals.worker_process_init.send(sender=None)
예제 #12
0
파일: __init__.py 프로젝트: clayg/celery
def process_initializer():
    """Initializes the process so it can be used to process tasks.

    Used for multiprocessing environments.

    """
    map(platform.reset_signal, WORKER_SIGRESET)
    map(platform.ignore_signal, WORKER_SIGIGNORE)
    platform.set_mp_process_title("celeryd")

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    from celery.loaders import current_loader
    current_loader().init_worker()

    signals.worker_process_init.send(sender=None)
예제 #13
0
 def test_load_settings(self):
     loader = loaders.current_loader()
     loaders._settings = None
     settings = loaders.load_settings()
     self.assertTrue(loaders._settings)
     settings = loaders.load_settings()
     self.assertIs(settings, loaders._settings)
     self.assertIs(settings, loader.conf)
예제 #14
0
    def test_current_loader(self):
        warnings.resetwarnings()
        with catch_warnings(record=True) as log:
            self.assertIs(loaders.current_loader(), self.app.loader)
            warning = log[0].message

            self.assertIsInstance(warning, CPendingDeprecationWarning)
            self.assertIn("deprecation", warning.args[0])
예제 #15
0
    def test_current_loader(self):
        warnings.resetwarnings()
        with catch_warnings(record=True) as log:
            self.assertIs(loaders.current_loader(), self.app.loader)
            warning = log[0].message

            self.assertIsInstance(warning, CPendingDeprecationWarning)
            self.assertIn("deprecation", warning.args[0])
예제 #16
0
파일: job.py 프로젝트: jokar/minion
    def __init__(self, *args, **kwargs):
        self.loader = kwargs.get("loader") or current_loader()
        self.hostname = kwargs.get("hostname") or socket.gethostname()
        super(WorkerTaskTrace, self).__init__(*args, **kwargs)

        self._store_errors = True
        if self.task.ignore_result:
            self._store_errors = self.task.store_errors_even_if_ignored
        self.super = super(WorkerTaskTrace, self)
예제 #17
0
파일: celerybeat.py 프로젝트: kmike/celery
def run_clockservice(loglevel=conf.CELERYBEAT_LOG_LEVEL,
        logfile=conf.CELERYBEAT_LOG_FILE,
        schedule=conf.CELERYBEAT_SCHEDULE_FILENAME, **kwargs):
    """Starts the celerybeat clock server."""

    print("celerybeat %s is starting." % celery.__version__)

    # Setup logging
    if not isinstance(loglevel, int):
        loglevel = conf.LOG_LEVELS[loglevel.upper()]

    # Run the worker init handler.
    # (Usually imports task modules and such.)
    from celery.loaders import current_loader
    current_loader().init_worker()


    # Dump configuration to screen so we have some basic information
    # when users sends e-mails.

    print(STARTUP_INFO_FMT % {
            "conninfo": info.format_broker_info(),
            "logfile": logfile or "@stderr",
            "loglevel": conf.LOG_LEVELS[loglevel],
            "schedule": schedule,
    })

    print("celerybeat has started.")
    arg_start = "manage" in sys.argv[0] and 2 or 1
    platform.set_process_title("celerybeat",
                               info=" ".join(sys.argv[arg_start:]))

    def _run_clock():
        from celery.log import setup_logger
        logger = setup_logger(loglevel, logfile)
        clockservice = ClockService(logger=logger, schedule_filename=schedule)

        try:
            install_sync_handler(clockservice)
            clockservice.start()
        except Exception, e:
            emergency_error(logfile,
                    "celerybeat raised exception %s: %s\n%s" % (
                            e.__class__, e, traceback.format_exc()))
예제 #18
0
def configure():
    from celery.loaders import current_loader
    from django.conf import settings

    if not settings.configured:
        settings_module = os.environ.get("CELERY_CONFIG_MODULE",
                                         "celeryconfig")
        settings.configure(SETTINGS_MODULE=settings_module,
                           **dict(DEFAULTS, **current_loader().conf))
        settings.DEBUG = True
예제 #19
0
파일: job.py 프로젝트: jokar/minion
 def send_error_email(self, task, context, exc,
         whitelist=None, enabled=False, fail_silently=True):
     if enabled and not task.disable_error_emails:
         if whitelist:
             if not isinstance(exc, tuple(whitelist)):
                 return
         subject = self.email_subject.strip() % context
         body = self.email_body.strip() % context
         return current_loader().mail_admins(subject, body,
                                             fail_silently=fail_silently)
예제 #20
0
def configure():
    from celery.loaders import current_loader
    from django.conf import settings

    if not settings.configured:
        settings_module = os.environ.get("CELERY_CONFIG_MODULE",
                                         "celeryconfig")
        settings.configure(SETTINGS_MODULE=settings_module,
                           **dict(DEFAULTS,
                                  **current_loader().conf))
        settings.DEBUG = True
예제 #21
0
def process_initializer():
    """Initializes the process so it can be used to process tasks.

    Used for multiprocessing environments.

    """
    # There seems to a bug in multiprocessing (backport?)
    # when detached, where the worker gets EOFErrors from time to time
    # and the logger is left from the parent process causing a crash.
    _hijack_multiprocessing_logger()

    platform.reset_signal("SIGTERM")
    platform.ignore_signal("SIGINT")
    platform.set_mp_process_title("celeryd")

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    from celery.loaders import current_loader
    current_loader().init_worker()

    signals.worker_process_init.send(sender=None)
def process_initializer():
    """Initializes the process so it can be used to process tasks.

    Used for multiprocessing environments.

    """
    # There seems to a bug in multiprocessing (backport?)
    # when detached, where the worker gets EOFErrors from time to time
    # and the logger is left from the parent process causing a crash.
    _hijack_multiprocessing_logger()

    map(platform.reset_signal, WORKER_SIGRESET)
    map(platform.ignore_signal, WORKER_SIGIGNORE)
    platform.set_mp_process_title("celeryd")

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    from celery.loaders import current_loader
    current_loader().init_worker()

    signals.worker_process_init.send(sender=None)
예제 #23
0
파일: __init__.py 프로젝트: washeck/celery
def process_initializer():
    # There seems to a bug in multiprocessing (backport?)
    # when detached, where the worker gets EOFErrors from time to time
    # and the logger is left from the parent process causing a crash.
    _hijack_multiprocessing_logger()

    platform.reset_signal("SIGTERM")
    platform.set_mp_process_title("celeryd")

    # On Windows we need to run a dummy command 'celeryinit'
    # for django to fully initialize after fork()
    if not callable(getattr(os, "fork", None)):
        from django.core.management import execute_manager

        settings_mod = os.environ.get("DJANGO_SETTINGS_MODULE", "settings")
        project_settings = __import__(settings_mod, {}, {}, [""])
        execute_manager(project_settings, argv=["manage.py", "celeryinit"])

    # This is for windows and other platforms not supporting
    # fork(). Note that init_worker makes sure it's only
    # run once per process.
    from celery.loaders import current_loader

    current_loader().init_worker()
예제 #24
0
 def test_current_loader(self):
     with self.assertWarnsRegex(CPendingDeprecationWarning, r'deprecation'):
         self.assertIs(loaders.current_loader(), self.app.loader)
예제 #25
0
파일: __init__.py 프로젝트: jokar/minion
def get_backend_cls(backend):
    """Get backend class by name/alias"""
    if backend not in _backend_cache:
        aliases = dict(BACKEND_ALIASES, **current_loader().override_backends)
        _backend_cache[backend] = get_cls_by_name(backend, aliases)
    return _backend_cache[backend]
예제 #26
0
def get_backend_cls(backend):
    """Get backend class by name/alias"""
    if backend not in _backend_cache:
        aliases = dict(BACKEND_ALIASES, **current_loader().override_backends)
        _backend_cache[backend] = get_cls_by_name(backend, aliases)
    return _backend_cache[backend]
예제 #27
0
 def test_current_loader(self):
     with self.assertPendingDeprecation():
         self.assertIs(loaders.current_loader(), self.app.loader)
예제 #28
0
 def test_current_loader(self):
     with self.assertPendingDeprecation():
         self.assertIs(loaders.current_loader(), self.app.loader)
예제 #29
0
 def test_current_loader(self):
     self.assertIs(loaders.current_loader(), self.app.loader)
예제 #30
0
 def test_current_loader(self):
     loader1 = loaders.current_loader()
     loader2 = loaders.current_loader()
     self.assertIs(loader1, loader2)
     self.assertIs(loader2, loaders._loader)
예제 #31
0
파일: celeryd.py 프로젝트: maximbo/celery
def run_worker(concurrency=conf.CELERYD_CONCURRENCY,
        loglevel=conf.CELERYD_LOG_LEVEL, logfile=conf.CELERYD_LOG_FILE,
        hostname=None,
        discard=False, run_clockservice=False, events=False, **kwargs):
    """Starts the celery worker server."""

    hostname = hostname or socket.gethostname()

    print("celery@%s v%s is starting." % (hostname, celery.__version__))

    from celery.loaders import current_loader, load_settings
    loader = current_loader()
    settings = load_settings()

    if not concurrency:
        concurrency = multiprocessing.cpu_count()

    if conf.CELERY_BACKEND == "database" \
            and settings.DATABASE_ENGINE == "sqlite3" and \
            concurrency > 1:
        import warnings
        warnings.warn("The sqlite3 database engine doesn't support "
                "concurrency. We'll be using a single process only.",
                UserWarning)
        concurrency = 1

    # Setup logging
    if not isinstance(loglevel, int):
        loglevel = conf.LOG_LEVELS[loglevel.upper()]

    if discard:
        discarded_count = discard_all()
        what = discarded_count > 1 and "messages" or "message"
        print("discard: Erased %d %s from the queue.\n" % (
                discarded_count, what))

    # Run the worker init handler.
    # (Usually imports task modules and such.)
    loader.on_worker_init()

    # Dump configuration to screen so we have some basic information
    # when users sends e-mails.

    tasklist = ""
    if loglevel <= logging.INFO:
        from celery.registry import tasks
        tasklist = tasks.keys()
        if not loglevel <= logging.DEBUG:
            tasklist = filter(lambda s: not s.startswith("celery."), tasklist)
        tasklist = TASK_LIST_FMT % "\n".join("        . %s" % task
                                                for task in sorted(tasklist))

    print(STARTUP_INFO_FMT % {
            "conninfo": info.format_broker_info(),
            "queues": info.format_routing_table(indent=8),
            "concurrency": concurrency,
            "loglevel": conf.LOG_LEVELS[loglevel],
            "logfile": logfile or "[stderr]",
            "celerybeat": run_clockservice and "ON" or "OFF",
            "events": events and "ON" or "OFF",
            "tasks": tasklist,
            "loader": loader.__class__.__module__,
    })

    print("Celery has started.")
    set_process_status("Running...")

    def run_worker():
        worker = WorkController(concurrency=concurrency,
                                loglevel=loglevel,
                                logfile=logfile,
                                hostname=hostname,
                                embed_clockservice=run_clockservice,
                                send_events=events)

        # Install signal handler so SIGHUP restarts the worker.
        install_worker_restart_handler(worker)

        from celery import signals
        signals.worker_init.send(sender=worker)

        try:
            worker.start()
        except Exception, e:
            emergency_error(logfile, "celeryd raised exception %s: %s\n%s" % (
                            e.__class__, e, traceback.format_exc()))
예제 #32
0
 def test_loader_is_configured(self):
     from celery.loaders import current_loader, load_settings
     loader = current_loader()
     self.assertIsInstance(loader, celery.FlaskLoader)
     settings = load_settings()
     self.assertTrue(loader.configured)
 def init_loader(self):
     # Run the worker init handler.
     # (Usually imports task modules and such.)
     from celery.loaders import current_loader
     current_loader().init_worker()
예제 #34
0
 def test_current_loader(self):
     with self.assertWarnsRegex(CPendingDeprecationWarning, r"deprecation"):
         self.assertIs(loaders.current_loader(), self.app.loader)
예제 #35
0
 def init_loader(self):
     from celery.loaders import current_loader, load_settings
     self.loader = current_loader()
     self.settings = load_settings()
예제 #36
0
 def test_current_loader(self):
     self.assertIs(loaders.current_loader(), self.app.loader)
예제 #37
0
파일: apvyd.py 프로젝트: jasonbaker/apvy
def main():
    from celery.loaders import current_loader
    current_loader().init_worker()
    connect_to_server()
    reactor.run()