def __init__(self, app, *args, **kwargs): """Constructor.""" self.app = app # Here we can also change application settings. E.g. # changing the task time limit: # self.app.conf.task_queues = ( Queue('celery', Exchange('celery', type='direct'), routing_key='celery'), Broadcast('broadcast_tasks') ) self.app.conf.task_routes = { 'gwvolman.tasks.shutdown_container': {'queue': 'broadcast_tasks'} }
class Config: CELERY_BROKER_URL = config.get('bamsi', 'CELERY_BROKER_URL') CELERY_RESULT_BACKEND = config.get('bamsi', 'CELERY_RESULT_BACKEND') MASTER_IP = config.get('bamsi', 'MASTER_IP') MASTER_PORT = config.get('bamsi', 'MASTER_PORT') DATA_PATH = config.get('bamsi', 'DATA_PATH') CELERY_QUEUES = ( Queue('default', Exchange('default'), routing_key='default'), Broadcast('q1'), ) CELERY_ROUTES = { 'tapp.readBAM': 'default', 'tapp.readMock': { 'queue': 'q1' } } NO_OF_ROWS = 3000
from home.job import JobDescription from rec_core.worker import RecommendationWorker from home.models import News import celery import logging logging.basicConfig(level=logging.DEBUG) from kombu.common import Broadcast, Queue # Celery Task wrapper app = celery.Celery('most-cb', backend='rpc://', broker='pyamqp://{}:{}@{}:{}//'.format(settings.RABBITMQ_USER, settings.RABBITMQ_PWD, settings.RABBITMQ_HOST, settings.RABBITMQ_PORT)) app.config_from_object('celeryconfig') app.conf.task_queues = (Broadcast('most_broadcast_tasks'), Queue("most_recommend_tasks")) app.conf.task_routes = { 'most.append_news': { # 'queue': 'broadcast_tasks', 'exchange': 'most_broadcast_tasks' }, 'most.contentbased': { 'queue': 'most_recommend_tasks' }, } worker = RecommendationWorker() @app.task def contentbased(job_description): if job_description.task == "get_browsed_embedding":
# assets building options ASSETS_DEBUG = DEBUG # noqa: F405 ASSETS_AUTO_BUILD = DEBUG # noqa: F405 ## # Celery options ## # load custom kombu encoder CELERY_SEND_TASK_ERROR_EMAILS = True CELERY_RESULT_BACKEND = CELERY_BROKER_URL # noqa: F405 CELERY_ACCEPT_CONTENT = ['json'] CELERY_TASK_SERIALIZER = "json" CELERY_RESULT_SERIALIZER = "json" CELERY_TASK_QUEUES = ( Queue('default', Exchange('default'), routing_key='default'), Broadcast('broadcast_tasks'), ) CELERY_TASK_ROUTES = { 'inboxen.tasks.force_garbage_collection': { 'queue': 'broadcast_tasks' } } CELERY_TASK_DEFAULT_QUEUE = 'default' CELERY_TASK_DEFAULT_EXCHANGE = 'default' CELERY_TASK_DEFAULT_ROUTING_KEY = 'default' CELERY_BEAT_SCHEDULE = { 'statistics': { 'task': 'inboxen.tasks.statistics',
from celery.schedules import crontab CELERY_TIMEZONE = 'UTC' CELERYBEAT_SCHEDULE = { 'collect-articles': { 'task': 'argos.tasks.periodic.collect', 'schedule': crontab(minute='*/10') }, 'cluster-articles': { 'task': 'argos.tasks.periodic.cluster_articles', 'schedule': crontab(minute='*/10') } } from kombu.common import Broadcast, Queue # Create a broadcast queue so the tasks are sent to # *all* workers (that are listening to that queue). CELERY_DEFAULT_QUEUE = 'default' CELERY_QUEUES = (Queue('default'), Broadcast('broadcast_tasks'), Queue('clustering')) CELERY_ROUTES = { 'argos.tasks.periodic.collect': { 'queue': 'broadcast_tasks' }, 'argos.tasks.periodic.cluster_articles': { 'queue': 'clustering' } }
from celery import Celery from celery.signals import setup_logging from kombu.common import Broadcast, Queue from . import config _log = logging.getLogger(__name__) RELOAD_CACHE_EXCHANGE_NAME = 'fmn.tasks.reload_cache' @setup_logging.connect def configure_logging(**kwargs): """ Signal sent by Celery when logging needs to be setup for a worker. Arguments are unused. """ logging.config.dictConfig(config.app_conf['logging']) _log.info('Logging successfully configured for Celery') #: The celery application object app = Celery('FMN') app.conf.task_queues = ( Broadcast(RELOAD_CACHE_EXCHANGE_NAME), Queue('fmn.tasks.unprocessed_messages'), ) app.conf.update(**config.app_conf['celery'])
from kombu.common import Broadcast, Exchange, Queue # Exchanges setup plugins_generic = Exchange("plugins.generic", type="fanout") plugins = Exchange("plugins", arguments={"alternate-exchange": "plugins.generic"}) # Queues setup task_queues = ( Broadcast("collector"), Queue("plugins.generic", exchange=plugins_generic), Queue("plugins.windows", exchange=plugins, routing_key="plugins.windows"), Queue("plugins.linux", exchange=plugins, routing_key="plugins.linux"), Queue("plugins.macos", exchange=plugins, routing_key="plugins.macos"), Queue("plugins.sandbox", exchange=plugins, routing_key="plugins.sandbox"), Queue("manager"), Queue("store"), ) # Celery Routing task_routes = ([ ("aleph.collectors.tasks.*", { "queue": "collector" }), ("aleph.storages.tasks.*", { "queue": "store" }), ("aleph.datastores.tasks.*", { "queue": "store" }), ("aleph.tasks.*", { "queue": "manager"
DB_address, DB_user, DB_password, DB_name, redis_info = config() #连接到redis服务 #app = Celery('openstack', broker= "redis://:[email protected]:6379/1") app = Celery('openstack', broker= redis_info) #允许root执行 platforms.C_FORCE_ROOT = True #celery任务队列 app.conf.update( task_queues=( Queue("CREATEVM", Exchange("CREATEVM"), route_key="CREATEVM"), Queue("DELETEVM", Exchange("DELETEVM"), route_key="DELETEVM"), Queue("SHUTDOWNVM", Exchange("SHUTDOWNVM"), route_key="SHUTDOWNVM"), Queue("BOOTVM", Exchange("BOOTVM"), route_key="BOOTVM"), Queue("SUSPENDVM", Exchange("SUSPENDVM"), route_key="SUSPENDVM"), Queue("RESUMEVM", Exchange("RESUMEVM"), route_key="RESUMEVM"), Queue("UPDATESYSINFO", Exchange("UPDATESYSINFO"), route_key="UPDATESYSINFO"), ) ) def connect_db(): """ 连接数据库 """ try: db = MySQLdb.connect(host=DB_address,