Esempio n. 1
0
    def test_arguments(self):
        q = Broadcast(name='test_Broadcast')
        self.assertTrue(q.name.startswith('bcast.'))
        self.assertEqual(q.alias, 'test_Broadcast')
        self.assertTrue(q.auto_delete)
        self.assertEqual(q.exchange.name, 'test_Broadcast')
        self.assertEqual(q.exchange.type, 'fanout')

        q = Broadcast('test_Broadcast', 'explicit_queue_name')
        self.assertEqual(q.name, 'explicit_queue_name')
        self.assertEqual(q.exchange.name, 'test_Broadcast')
Esempio n. 2
0
    def test_arguments(self):
        q = Broadcast(name="test_Broadcast")
        self.assertTrue(q.name.startswith("bcast."))
        self.assertEqual(q.alias, "test_Broadcast")
        self.assertTrue(q.auto_delete)
        self.assertEqual(q.exchange.name, "test_Broadcast")
        self.assertEqual(q.exchange.type, "fanout")

        q = Broadcast("test_Broadcast", "explicit_queue_name")
        self.assertEqual(q.name, "explicit_queue_name")
        self.assertEqual(q.exchange.name, "test_Broadcast")
Esempio n. 3
0
    def test_arguments(self):
        q = Broadcast(name='test_Broadcast')
        assert q.name.startswith('bcast.')
        assert q.alias == 'test_Broadcast'
        assert q.auto_delete
        assert q.exchange.name == 'test_Broadcast'
        assert q.exchange.type == 'fanout'

        q = Broadcast('test_Broadcast', 'explicit_queue_name')
        assert q.name.startswith('explicit_queue_name.')
        assert q.exchange.name == 'test_Broadcast'

        q2 = q(Mock())
        assert q2.name.split('.')[0] == q.name.split('.')[0]
Esempio n. 4
0
class CeleryConf:
    BROKER_URL = conf.rabbitmq.url  # 中间人
    BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 180000}

    CELERYD_MAX_TASKS_PER_CHILD = 200
    CELERY_IGNORE_RESULT = True
    CELERY_TASK_SERIALIZER = 'json'
    CELERY_RESULT_SERIALIZER = 'json'
    CELERY_ACCEPT_CONTENT = ['json']
    CELERY_TIMEZONE = 'Asia/Shanghai'
    CELERY_ENABLE_UTC = False
    CELERY_ACKS_LATE = True
    CELERYD_PREFETCH_MULTIPLIER = 1

    # 定义exchange
    EX_MQ = Exchange('mq', type='direct')
    EX_FAN = Exchange('fans', type='fanout')
    EX_TOPIC = Exchange('topic', type='topic')

    # 定义queue
    CELERY_QUEUES = (
        Queue('mq', exchange=EX_MQ, routing_key='mq'),
        Queue('fan-1', exchange=EX_FAN),
        Queue('fan-2', exchange=EX_FAN),  # 绑定同一个fanout模式的exchange的queue,都会收到消息
        Broadcast(name='fan-3',
                  exchange=EX_FAN),  # 广播模式,监听同一个Broadcast队列的所有worker都能收到消息
        Queue('topic-1', exchange=EX_TOPIC, routing_key='topic.*'),
        Queue('topic-2', exchange=EX_TOPIC, routing_key='#.job'))

    def queue_name_list(self):
        return [q.name for q in self.CELERY_QUEUES]
Esempio n. 5
0
class CeleryConfigMixin(object):
    CELERY_TASK_DEFAULT_QUEUE = 'default'
    CELERY_TASK_DEFAULT_EXCHANGE = 'default'
    CELERY_TASK_DEFAULT_ROUTING_KEY = 'default'

    CELERY_TASK_QUEUES = (
        Queue('default', Exchange('default'), routing_key='default'),
        Broadcast('broadcast', routing_key='broadcast'),
    )

    CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'amqp://*****:*****@localhost/plexlib')
    CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', '')

    CELERY_BEAT_SCHEDULE = {
        'check-volumes': {
            'task': 'plexlib.tasks.check_video_volumes',
            'schedule': timedelta(minutes=30),
            'options': {'queue': 'broadcast'}
        }
    }

    def __init__(self, *args, **kwargs):
        process_environment(self, 'CELERY_')

        super(CeleryConfigMixin, self).__init__(*args, **kwargs)
Esempio n. 6
0
 def test_send_task_message__broadcast_without_exchange(self):
     from kombu.common import Broadcast
     evd = Mock(name='evd')
     self.app.amqp.send_task_message(
         Mock(), 'foo', self.simple_message, retry=False,
         routing_key='xyz', queue=Broadcast('abc'),
         event_dispatcher=evd,
     )
     evd.publish.assert_called()
     event = evd.publish.call_args[0][1]
     assert event['routing_key'] == 'xyz'
     assert event['exchange'] == 'abc'
Esempio n. 7
0
 def __init__(self, app, *args, **kwargs):
     """Constructor."""
     self.app = app
     # Here we can also change application settings. E.g.
     # changing the task time limit:
     #
     self.app.conf.task_queues = (
         Queue('celery', Exchange('celery', type='direct'),
               routing_key='celery'),
         Broadcast('broadcast_tasks')
     )
     self.app.conf.task_routes = {
         'gwvolman.tasks.shutdown_container': {'queue': 'broadcast_tasks'}
     }
Esempio n. 8
0
    def test_arguments(self):
        with patch('kombu.common.uuid', return_value='test') as uuid_mock:
            q = Broadcast(name='test_Broadcast')
            uuid_mock.assert_called_with()
            assert q.name == 'bcast.test'
            assert q.alias == 'test_Broadcast'
            assert q.auto_delete
            assert q.exchange.name == 'test_Broadcast'
            assert q.exchange.type == 'fanout'

        q = Broadcast('test_Broadcast', 'explicit_queue_name')
        assert q.name == 'explicit_queue_name'
        assert q.exchange.name == 'test_Broadcast'

        q2 = q(Mock())
        assert q2.name == q.name

        with patch('kombu.common.uuid', return_value='test') as uuid_mock:
            q = Broadcast('test_Broadcast', 'explicit_queue_name', unique=True)
            uuid_mock.assert_called_with()
            assert q.name == 'explicit_queue_name.test'

            q2 = q(Mock())
            assert q2.name.split('.')[0] == q.name.split('.')[0]
Esempio n. 9
0
def init_app(self, app: Flask):
    self.conf.task_result_expires = 18000  # 5 hours.
    self.conf.task_default_queue = 'nerd'
    self.conf.task_queues = (Broadcast('nerd_broadcast'), )
    self.conf.task_routes = {
        'nerd.tasks.corpus.reload': {
            'queue': 'nerd_broadcast'
        }
    }
    self.conf.update(app.config)

    class ContextTask(self.Task):
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return self.run(*args, **kwargs)

    self.Task = ContextTask
Esempio n. 10
0
File: tapp.py Progetto: NGDSG/BAMSI
class Config:
    CELERY_BROKER_URL = config.get('bamsi', 'CELERY_BROKER_URL')
    CELERY_RESULT_BACKEND = config.get('bamsi', 'CELERY_RESULT_BACKEND')
    MASTER_IP = config.get('bamsi', 'MASTER_IP')
    MASTER_PORT = config.get('bamsi', 'MASTER_PORT')
    DATA_PATH = config.get('bamsi', 'DATA_PATH')
    CELERY_QUEUES = (
        Queue('default', Exchange('default'), routing_key='default'),
        Broadcast('q1'),
    )
    CELERY_ROUTES = {
        'tapp.readBAM': 'default',
        'tapp.readMock': {
            'queue': 'q1'
        }
    }
    NO_OF_ROWS = 3000
Esempio n. 11
0
def create_queue(queue):
    name = queue[CELERY_QUEUE_NAME]
    exchange_type = queue[CELERY_QUEUE_EXCHANGE]
    routing_key = queue[CELERY_QUEUE_ROUTING_KEY]
    durable = queue[
        CELERY_QUEUES_DURABLE] if CELERY_QUEUES_DURABLE in queue else True
    if exchange_type == 'fanout':
        return Broadcast(name,
                         exchnage=Exchange(type=exchange_type,
                                           routing_key=routing_key),
                         durable=durable,
                         routing_key=routing_key,
                         auto_delete=True)
    else:
        return Queue(name,
                     exchange=Exchange(type=exchange_type),
                     durable=durable,
                     routing_key=routing_key)
Esempio n. 12
0
File: celery.py Progetto: mavit/fmn
from celery import Celery
from celery.signals import setup_logging
from kombu.common import Broadcast, Queue

from . import config

_log = logging.getLogger(__name__)

RELOAD_CACHE_EXCHANGE_NAME = 'fmn.tasks.reload_cache'


@setup_logging.connect
def configure_logging(**kwargs):
    """
    Signal sent by Celery when logging needs to be setup for a worker.

    Arguments are unused.
    """
    logging.config.dictConfig(config.app_conf['logging'])
    _log.info('Logging successfully configured for Celery')


#: The celery application object
app = Celery('FMN')
app.conf.task_queues = (
    Broadcast(RELOAD_CACHE_EXCHANGE_NAME),
    Queue('fmn.tasks.unprocessed_messages'),
)
app.conf.update(**config.app_conf['celery'])
Esempio n. 13
0
class CeleryConf:
    CELERY_ACCEPT_CONTENT = ['json']
    CELERY_IMPORTS = ()
    CELERY_QUEUES = (Broadcast('agent_broadcast_tasks'), )
Esempio n. 14
0
enable_utc = True
task_acks_late = True
worker_prefetch_multiplier = 1
worker_max_tasks_per_child = 200

# 不要和修改删除本文件中的东西

# Queue('sqlmap', Exchange('hunter_raw_traffic', type='fanout'), routing_key='hunter'),
# Queue('xssfork', Exchange('hunter_raw_traffic', type='fanout'), routing_key='hunter'),
# Queue('hunter', Exchange('hunter_raw_traffic', type='fanout'), routing_key='hunter'),
# Broadcast(exchange=Exchange('hunter_system_notice', type='fanout'), routing_key='system'),

task_queues = {
Queue('xsseye', Exchange('hunter_raw_traffic', type='fanout'), routing_key='hunter'),
    Queue('task', Exchange('hunter_task_notice', type='fanout'), routing_key='task'),
    Broadcast(exchange=Exchange('hunter_system_notice', type='fanout'), routing_key='system'),
}


class MyRouter(object):
    def route_for_task(self, task, args=None, kwargs=None):
        if task.startswith('hunter_celery.scan_celery'):
            return {
                'exchange': 'hunter_raw_traffic', "routing_key": "hunter"
            }
        elif task.startswith('hunter_celery.system_notice_celery'):
            return {
                'exchange': 'hunter_system_notice', "routing_key": "system"
            }
        elif task.startswith('hunter_celery.task_notice_celery'):
            return {
Esempio n. 15
0
import pandas as pd

from celery import Celery
from kombu.common import Broadcast
from config import file, es

# pip install celery==4.3.0
# pip install elasticsearch==7.8.0

app = Celery(__name__,
             broker='amqp://*****:*****@192.168.199.142:5672/units')
app.conf.task_queues = (Broadcast('units_collector', ), )
app.conf.task_routes = {
    'units_collector': {
        'queue': 'units_collector',
        'exchange_type': 'fanout',
        'exchange': 'units_collector',
    },
}


def query(unit_name):
    response = es.search(
        index='units',
        body={"query": {
            "term": {
                "unit_name.keyword": unit_name
            }
        }})
    return response['hits']['total']['value'] > 0
Esempio n. 16
0
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dva.settings')

from django.conf import settings  # noqa

app = Celery('dva')

# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.conf.update(
    CELERYD_PREFETCH_MULTIPLIER=1,
    CELERY_ACCEPT_CONTENT=['json'],
    CELERY_TASK_SERIALIZER='json',
    CELERY_RESULT_SERIALIZER='json',
    CELERY_RESULT_BACKEND='django-db',

)
app.conf.task_queue_max_priority = 10
app.conf.task_queues = (Broadcast('qmanager'),)
app.conf.task_routes = {
    'manage_host': {'queue': 'qmanager'},
}
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)



@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Esempio n. 17
0
from home.job import JobDescription
from rec_core.worker import RecommendationWorker
from home.models import News
import celery
import logging
logging.basicConfig(level=logging.DEBUG)
from kombu.common import Broadcast, Queue


# Celery Task wrapper
app = celery.Celery('most-cb', backend='rpc://', broker='pyamqp://{}:{}@{}:{}//'.format(settings.RABBITMQ_USER, 
                                                                                          settings.RABBITMQ_PWD, 
                                                                                          settings.RABBITMQ_HOST, 
                                                                                          settings.RABBITMQ_PORT))
app.config_from_object('celeryconfig')
app.conf.task_queues = (Broadcast('most_broadcast_tasks'), Queue("most_recommend_tasks"))
app.conf.task_routes = {
    'most.append_news': {
        # 'queue': 'broadcast_tasks',
        'exchange': 'most_broadcast_tasks'
    },
    'most.contentbased': {
        'queue': 'most_recommend_tasks'
    },
}
worker =  RecommendationWorker()


@app.task
def contentbased(job_description):
    if job_description.task == "get_browsed_embedding":
Esempio n. 18
0
from celery.schedules import crontab

CELERY_TIMEZONE = 'UTC'
CELERYBEAT_SCHEDULE = {
    'collect-articles': {
        'task': 'argos.tasks.periodic.collect',
        'schedule': crontab(minute='*/10')
    },
    'cluster-articles': {
        'task': 'argos.tasks.periodic.cluster_articles',
        'schedule': crontab(minute='*/10')
    }
}

from kombu.common import Broadcast, Queue

# Create a broadcast queue so the tasks are sent to
# *all* workers (that are listening to that queue).
CELERY_DEFAULT_QUEUE = 'default'
CELERY_QUEUES = (Queue('default'), Broadcast('broadcast_tasks'),
                 Queue('clustering'))
CELERY_ROUTES = {
    'argos.tasks.periodic.collect': {
        'queue': 'broadcast_tasks'
    },
    'argos.tasks.periodic.cluster_articles': {
        'queue': 'clustering'
    }
}
Esempio n. 19
0
import os
import sys
from celery import Celery
from kombu.common import Broadcast

# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "conf.settings")

from django.conf import settings

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
APPS_DIR = os.path.join(BASE_DIR, 'apps')
sys.path.insert(2, APPS_DIR)

# @todo add expire time to messages
app = Celery('GridScaleWorker')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.task_queues = (Broadcast('broadcast_tasks'), )
app.conf.task_routes = {
    'tasks.reload_foreign_key': {
        'queue': 'broadcast_tasks',
        'exchange': 'broadcast_tasks'
    }
}


@app.task(bind=True)
def debug_task(self):
    print(('Request: {0!r}'.format(self.request)))
Esempio n. 20
0
ASSETS_DEBUG = DEBUG  # noqa: F405
ASSETS_AUTO_BUILD = DEBUG  # noqa: F405
##
# Celery options
##

# load custom kombu encoder
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERY_RESULT_BACKEND = CELERY_BROKER_URL  # noqa: F405
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = "json"
CELERY_RESULT_SERIALIZER = "json"

CELERY_TASK_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Broadcast('broadcast_tasks'),
)
CELERY_TASK_ROUTES = {
    'inboxen.tasks.force_garbage_collection': {
        'queue': 'broadcast_tasks'
    }
}

CELERY_TASK_DEFAULT_QUEUE = 'default'
CELERY_TASK_DEFAULT_EXCHANGE = 'default'
CELERY_TASK_DEFAULT_ROUTING_KEY = 'default'

CELERY_BEAT_SCHEDULE = {
    'statistics': {
        'task': 'inboxen.tasks.statistics',
        'schedule': datetime.timedelta(days=1),
Esempio n. 21
0
from kombu import Exchange, Queue
from kombu.common import Broadcast

broker_url = 'amqp://*****:*****@localhost:5672//'

task_queues = (Broadcast('sport_news', routing_key='sport_news'), )

task_create_missing_queues = False
Esempio n. 22
0
import random

from celery import Celery
from kombu import Exchange
from kombu.common import Broadcast


app = Celery(broker='amqp://guest@localhost//', backend='rpc://')

bc_exchange = Exchange('bq', type='fanout')
bc_queue = 'bq'

app.conf.task_queues = (
    Broadcast(name=bc_queue, exchange=bc_exchange),
)


@app.task
def add(x, y):
    print(x, y)
    print(random.random())
    return x + y
Esempio n. 23
0
from celery.exceptions import WorkerShutdown

logger = get_logger(__name__)

os.environ.setdefault('FORKED_BY_MULTIPROCESSING', '1')

app = Celery('ap_domain_worker')
# app.config_from_object('tbd.celeryconfig')
app.conf.update(
    broker_url = 'amqp://*****:*****@192.168.1.245/',
    result_backend = 'rpc://',
    task_serializer='json',
    accept_content=['json'],  # Ignore other content
    result_serializer='json',
    result_expires=1*60,
    task_queues=(Broadcast('broadcast_tasks'), Queue('celery')),
    task_routes={
        'tbd.tasks.upgrade': {
            'queue': 'broadcast_tasks',
            'exchange': 'broadcast_tasks'
        }
    }
)

@celeryd_after_setup.connect
def capture_worker_name(sender, instance, **kwargs):
    os.environ["CELERY_WORKER_NAME"] = sender

@task_prerun.connect
def init_task(sender=None, task=None, task_id=None, **kwargs):
    print('worker {0!r} task {1!s} is running with request: {2}'.format(task.app.Worker, task_id, task.request))
Esempio n. 24
0
from celery.schedules import crontab
from lost.settings import LOST_CONFIG
from lost.logic.pipeline.worker import send_life_sign
from kombu.common import Broadcast

CELERY_IMPORTS = ('lost.logic.tasks', 'lost.api.pipeline.tasks')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'

CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_QUEUES = (Broadcast('worker_status'), )

CELERYBEAT_SCHEDULE = {
    'exec_pipe': {
        'task': 'lost.logic.tasks.exec_pipe',
        'schedule': int(LOST_CONFIG.pipe_schedule)
    },
    'worker_life_sign': {
        'task': 'lost.logic.pipeline.worker.send_life_sign',
        'schedule': int(LOST_CONFIG.worker_beat),
        'options': {
            'queue': 'worker_status',
            'exchange': 'worker_status'
        }
    },
    'release_annos': {
        'task': 'lost.logic.tasks.release_annos',
        'schedule': int(LOST_CONFIG.session_timeout) * 60
    }
Esempio n. 25
0
CELERY_TASK_RESULT_EXPIRES = 30  # in seconds

CELERY_DEFAULT_QUEUE = 'default'
CELERY_QUEUES = (
    Queue('default', routing_key='default.#'),
    Queue('nlp', routing_key='nlp.#'),
    Queue('pe', routing_key='pe.#'),
    Queue('te', routing_key='te.#'),
    Queue('update_engines', routing_key='update.#'),
    Queue('feed', routing_key='feed.#'),
    Queue('consumers', routing_key='consumers.#'),
    Queue('altmetric', routing_key='altmetric.#'),
    Queue('library', routing_key='library.#'),
    Queue('beat', routing_key='beat.#'),
    Queue('test', routing_key='test.#'),
    Broadcast('broadcast_pe_tasks'),
    Broadcast('broadcast_te_tasks'),
)
CELERY_DEFAULT_EXCHANGE = 'tasks'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_DEFAULT_ROUTING_KEY = 'default'

CELERY_ROUTES = ('config.routers.MyRouter', )

CELERYBEAT_SCHEDULE = {
    'update-altmetric': {
        'task': 'etalia.altmetric_app.tasks.update_altmetric_periodic',
        'schedule': crontab(minute=0, hour=0, day_of_week='mon,wed,fri'),
        'options': {
            'queue': 'beat'
        }
Esempio n. 26
0
# delivery_mode: =1,message不写入磁盘;=2(默认)message会写入磁盘
default_exchange = Exchange('celery-demo', delivery_mode=1)
broadcast_exchange = Exchange(
    'celery-demo-broadcast',
    # type='fanout',
    delivery_mode=1)

tasks_queues = (
    # durable: Boolean,重启后是否激活
    Queue('celery-demo',
          default_exchange,
          routing_key='default',
          auto_delete=True,
          durable=True),
    Broadcast('broadcast_tasks', exchange=broadcast_exchange),

    # 广播的时候似乎无法触发task1
    # Queue('task1', broadcast_exchange),
    # Queue('task2', default_exchange, routing_key='task2'),
)

# 配置路由
task_routes = {
    'broadcast_task': {
        'queue': 'broadcast_tasks',
        'exchange': 'broadcast_tasks'
    },
    'task1': {
        'queue': 'task1',
    },
Esempio n. 27
0
CELERY_BROKER_URL = 'amqp://*****:*****@localhost:5672//'
CELERY_EVENT_QUEUE_TTL = 5
CELERY_TASK_DEFAULT_QUEUE = 'tower'
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_TRACK_STARTED = True
CELERY_TASK_TIME_LIMIT = None
CELERY_TASK_SOFT_TIME_LIMIT = None
CELERY_WORKER_POOL_RESTARTS = True
CELERY_BEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
CELERY_BEAT_MAX_LOOP_INTERVAL = 60
CELERY_RESULT_BACKEND = 'django-db'
CELERY_IMPORTS = ('awx.main.scheduler.tasks', )
CELERY_TASK_QUEUES = (Queue('tower', Exchange('tower'), routing_key='tower'),
                      Broadcast('tower_broadcast_all'))
CELERY_TASK_ROUTES = {}

CELERY_BEAT_SCHEDULE = {
    'tower_scheduler': {
        'task': 'awx.main.tasks.awx_periodic_scheduler',
        'schedule': timedelta(seconds=30),
        'options': {
            'expires': 20,
        }
    },
    'admin_checks': {
        'task': 'awx.main.tasks.run_administrative_checks',
        'schedule': timedelta(days=30)
    },
    'cluster_heartbeat': {
Esempio n. 28
0
IGNORE_CELERY_INSPECTOR = True
CELERY_RDBSIG = 1
CELERY_ALWAYS_EAGER = True
CELERY_BROKER_URL = BROKER_URL
CELERY_BROKER_POOL_LIMIT = None
CELERY_EVENT_QUEUE_TTL = 5
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TRACK_STARTED = True
CELERYD_TASK_TIME_LIMIT = 86400
CELERYD_TASK_SOFT_TIME_LIMIT = None
CELERYD_POOL_RESTARTS = True
CELERY_RESULT_BACKEND = 'django-db'
CELERY_IMPORTS = ('cyborgbackup.main.utils.tasks', 'cyborgbackup.main.tasks')
CELERY_QUEUES = (
    Queue('cyborgbackup', Exchange('cyborgbackup'), routing_key='cyborgbackup'),
    Broadcast('cyborgbackup_broadcast_all')
)
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'

CELERY_ROUTES = {}
CELERY_BEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
CELERY_BEAT_SCHEDULE_FILENAME = os.path.join(
    BASE_DIR,
    'var',
    'run',
    'celerybeat-schedule')
CELERY_BEAT_MAX_LOOP_INTERVAL = 60
CELERY_BEAT_SCHEDULE = {
    'cyborgbackup_notify_daily': {
Esempio n. 29
0
from celery import Celery
from celery.utils.log import get_task_logger

from kombu import Queue, Exchange
from kombu.common import Broadcast

import endpoints
import sys

from custom_logging import logger
from settings import BROKER_URL, BROKER_QUEUE, BROKER_TASK

app = Celery('api.tasks', broker=BROKER_URL)
app.conf.update(
    CELERY_DEFAULT_QUEUE=BROKER_QUEUE,
    CELERY_QUEUES=(Broadcast(BROKER_QUEUE), ),
)


@app.task(name=BROKER_TASK)
def on_measurement_received(measurement_json):
    logger.debug('[opentele] Measurement received: %s' % (measurement_json))

    try:
        endpoints.process_measurement(measurement_json)
    except Exception, e:
        logger.error('[opentele] Error processing measurement: %s' % (e))
Esempio n. 30
0
# SECURE_HSTS_INCLUDE_SUBDOMAINS = True
# SECURE_BROWSER_XSS_FILTER = True
# SESSION_COOKIE_SECURE = True
# CSRF_COOKIE_SECURE = True
# CSRF_COOKIE_HTTPONLY = True
# SECURE_CONTENT_TYPE_NOSNIFF = True
# X_FRAME_OPTIONS = 'DENY'

CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_ENABLE_UTC = True
CELERY_DEFAULT_QUEUE = 'host_ctrl'
CELERY_QUEUES = (
    Queue('host_ctrl', Exchange('host_ctrl'), routing_key='host_ctrl.#'),
    Broadcast(name='every_host', queue='every_host', routing_key='every_host.#')
)
CELERY_ROUTES = {'host.dockerStatus': {'queue': 'every_host'}}

CELERYBEAT_SCHEDULE = {
    'docker-status-every-10-seconds': {
        'task': 'host.dockerStatus',
        'schedule': timedelta(seconds=10),
        'args': (),
    },
}

if 'REDIS_URL' in os.environ:
    REDIS_URL = os.environ['REDIS_URL']
elif os.path.exists('/var/run/redis/redis.sock'):
    REDIS_URL = 'unix:///var/run/redis/redis.sock'