Пример #1
0
 def __init__(self, app, *args, **kwargs):
     """Constructor."""
     self.app = app
     # Here we can also change application settings. E.g.
     # changing the task time limit:
     #
     self.app.conf.task_queues = (
         Queue('celery', Exchange('celery', type='direct'),
               routing_key='celery'),
         Broadcast('broadcast_tasks')
     )
     self.app.conf.task_routes = {
         'gwvolman.tasks.shutdown_container': {'queue': 'broadcast_tasks'}
     }
Пример #2
0
class Config:
    CELERY_BROKER_URL = config.get('bamsi', 'CELERY_BROKER_URL')
    CELERY_RESULT_BACKEND = config.get('bamsi', 'CELERY_RESULT_BACKEND')
    MASTER_IP = config.get('bamsi', 'MASTER_IP')
    MASTER_PORT = config.get('bamsi', 'MASTER_PORT')
    DATA_PATH = config.get('bamsi', 'DATA_PATH')
    CELERY_QUEUES = (
        Queue('default', Exchange('default'), routing_key='default'),
        Broadcast('q1'),
    )
    CELERY_ROUTES = {
        'tapp.readBAM': 'default',
        'tapp.readMock': {
            'queue': 'q1'
        }
    }
    NO_OF_ROWS = 3000
Пример #3
0
# assets building options
ASSETS_DEBUG = DEBUG  # noqa: F405
ASSETS_AUTO_BUILD = DEBUG  # noqa: F405
##
# Celery options
##

# load custom kombu encoder
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERY_RESULT_BACKEND = CELERY_BROKER_URL  # noqa: F405
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = "json"
CELERY_RESULT_SERIALIZER = "json"

CELERY_TASK_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Broadcast('broadcast_tasks'),
)
CELERY_TASK_ROUTES = {
    'inboxen.tasks.force_garbage_collection': {
        'queue': 'broadcast_tasks'
    }
}

CELERY_TASK_DEFAULT_QUEUE = 'default'
CELERY_TASK_DEFAULT_EXCHANGE = 'default'
CELERY_TASK_DEFAULT_ROUTING_KEY = 'default'

CELERY_BEAT_SCHEDULE = {
    'statistics': {
        'task': 'inboxen.tasks.statistics',
Пример #4
0
from kombu.common import Broadcast, Exchange, Queue

# Exchanges setup
plugins_generic = Exchange("plugins.generic", type="fanout")
plugins = Exchange("plugins",
                   arguments={"alternate-exchange": "plugins.generic"})

# Queues setup
task_queues = (
    Broadcast("collector"),
    Queue("plugins.generic", exchange=plugins_generic),
    Queue("plugins.windows", exchange=plugins, routing_key="plugins.windows"),
    Queue("plugins.linux", exchange=plugins, routing_key="plugins.linux"),
    Queue("plugins.macos", exchange=plugins, routing_key="plugins.macos"),
    Queue("plugins.sandbox", exchange=plugins, routing_key="plugins.sandbox"),
    Queue("manager"),
    Queue("store"),
)
# Celery Routing
task_routes = ([
    ("aleph.collectors.tasks.*", {
        "queue": "collector"
    }),
    ("aleph.storages.tasks.*", {
        "queue": "store"
    }),
    ("aleph.datastores.tasks.*", {
        "queue": "store"
    }),
    ("aleph.tasks.*", {
        "queue": "manager"
Пример #5
0
DB_address, DB_user, DB_password, DB_name, redis_info = config()


#连接到redis服务
#app = Celery('openstack', broker= "redis://:[email protected]:6379/1")
app = Celery('openstack', broker= redis_info)


#允许root执行
platforms.C_FORCE_ROOT = True

#celery任务队列
app.conf.update(
    task_queues=(
        Queue("CREATEVM", Exchange("CREATEVM"), route_key="CREATEVM"),
        Queue("DELETEVM", Exchange("DELETEVM"), route_key="DELETEVM"),
        Queue("SHUTDOWNVM", Exchange("SHUTDOWNVM"), route_key="SHUTDOWNVM"),
        Queue("BOOTVM", Exchange("BOOTVM"), route_key="BOOTVM"),
        Queue("SUSPENDVM", Exchange("SUSPENDVM"), route_key="SUSPENDVM"),
        Queue("RESUMEVM", Exchange("RESUMEVM"), route_key="RESUMEVM"),
        Queue("UPDATESYSINFO", Exchange("UPDATESYSINFO"), route_key="UPDATESYSINFO"),
)
)

def connect_db():
    """
    连接数据库
    """
    try:
        db = MySQLdb.connect(host=DB_address,