Example #1
0
 def tasklist(self, include_builtins=True):
     from celery.registry import tasks
     tasklist = tasks.keys()
     if not include_builtins:
         tasklist = filter(lambda s: not s.startswith("celery."),
                           tasklist)
     return "\n".join("  . %s" % task for task in sorted(tasklist))
Example #2
0
 def tasklist(self, include_builtins=True):
     from celery.registry import tasks
     tasklist = tasks.keys()
     if not include_builtins:
         tasklist = filter(lambda s: not s.startswith("celery."), tasklist)
     return TASK_LIST_FMT % "\n".join("\t. %s" % task
                                      for task in sorted(tasklist))
Example #3
0
def dump_tasks(panel, **kwargs):
    def _extract_info(task):
        fields = dict((field, str(getattr(task, field, None)))
                      for field in TASK_INFO_FIELDS
                      if getattr(task, field, None) is not None)
        info = map("=".join, fields.items())
        if not info:
            return task.name
        return "%s [%s]" % (task.name, " ".join(info))

    info = map(_extract_info, (tasks[task] for task in sorted(tasks.keys())))
    panel.logger.debug("* Dump of currently registered tasks:\n%s" %
                       ("\n".join(info)))

    return info
Example #4
0
def dump_tasks(panel, **kwargs):
    def _extract_info(task):
        fields = dict(
            (field, str(getattr(task, field, None)))
            for field in TASK_INFO_FIELDS
            if getattr(task, field, None) is not None
        )
        info = map("=".join, fields.items())
        if not info:
            return task.name
        return "%s [%s]" % (task.name, " ".join(info))

    info = map(_extract_info, (tasks[task] for task in sorted(tasks.keys())))
    panel.logger.warn("* Dump of currently registered tasks:\n%s" % ("\n".join(info)))

    return info
Example #5
0
def run_worker(concurrency=conf.CELERYD_CONCURRENCY,
        loglevel=conf.CELERYD_LOG_LEVEL, logfile=conf.CELERYD_LOG_FILE,
        hostname=None,
        discard=False, run_clockservice=False, events=False, **kwargs):
    """Starts the celery worker server."""

    hostname = hostname or socket.gethostname()

    print("celery@%s v%s is starting." % (hostname, celery.__version__))

    from celery.loaders import current_loader, load_settings
    loader = current_loader()
    settings = load_settings()

    if not concurrency:
        concurrency = multiprocessing.cpu_count()

    if conf.CELERY_BACKEND == "database" \
            and settings.DATABASE_ENGINE == "sqlite3" and \
            concurrency > 1:
        import warnings
        warnings.warn("The sqlite3 database engine doesn't support "
                "concurrency. We'll be using a single process only.",
                UserWarning)
        concurrency = 1

    # Setup logging
    if not isinstance(loglevel, int):
        loglevel = conf.LOG_LEVELS[loglevel.upper()]

    if discard:
        discarded_count = discard_all()
        what = discarded_count > 1 and "messages" or "message"
        print("discard: Erased %d %s from the queue.\n" % (
                discarded_count, what))

    # Run the worker init handler.
    # (Usually imports task modules and such.)
    loader.on_worker_init()

    # Dump configuration to screen so we have some basic information
    # when users sends e-mails.

    tasklist = ""
    if loglevel <= logging.INFO:
        from celery.registry import tasks
        tasklist = tasks.keys()
        if not loglevel <= logging.DEBUG:
            tasklist = filter(lambda s: not s.startswith("celery."), tasklist)
        tasklist = TASK_LIST_FMT % "\n".join("        . %s" % task
                                                for task in sorted(tasklist))

    print(STARTUP_INFO_FMT % {
            "conninfo": info.format_broker_info(),
            "queues": info.format_routing_table(indent=8),
            "concurrency": concurrency,
            "loglevel": conf.LOG_LEVELS[loglevel],
            "logfile": logfile or "[stderr]",
            "celerybeat": run_clockservice and "ON" or "OFF",
            "events": events and "ON" or "OFF",
            "tasks": tasklist,
            "loader": loader.__class__.__module__,
    })

    print("Celery has started.")
    set_process_status("Running...")

    def run_worker():
        worker = WorkController(concurrency=concurrency,
                                loglevel=loglevel,
                                logfile=logfile,
                                hostname=hostname,
                                embed_clockservice=run_clockservice,
                                send_events=events)

        # Install signal handler so SIGHUP restarts the worker.
        install_worker_restart_handler(worker)

        from celery import signals
        signals.worker_init.send(sender=worker)

        try:
            worker.start()
        except Exception, e:
            emergency_error(logfile, "celeryd raised exception %s: %s\n%s" % (
                            e.__class__, e, traceback.format_exc()))