Exemple #1
0
        vhost=os.environ.get('RABBIT_ENV_VHOST', ''))

# We don't want to have dead connections stored on rabbitmq, so we have to negotiate using heartbeats
BROKER_HEARTBEAT = '?heartbeat=30'
if not BROKER_URL.endswith(BROKER_HEARTBEAT):
    BROKER_URL += BROKER_HEARTBEAT

BROKER_POOL_LIMIT = 1
BROKER_CONNECTION_TIMEOUT = 10

# Celery configuration

# configure queues, currently we have only one
CELERY_DEFAULT_QUEUE = 'default'
CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
)

# Sensible settings for celery
CELERY_ALWAYS_EAGER = False
CELERY_ACKS_LATE = True
CELERY_TASK_PUBLISH_RETRY = True
CELERY_DISABLE_RATE_LIMITS = False

# By default we will ignore result
# If you want to see results and try out tasks interactively, change it to False
# Or change this setting on tasks level
CELERY_IGNORE_RESULT = True
CELERY_SEND_TASK_ERROR_EMAILS = False
CELERY_TASK_RESULT_EXPIRES = 600
                'propagate': False,
            },
        },
    }
# redis celery/message broker config
from kombu import Exchange, Queue
import djcelery
# BROKER_URL with AWS ElastiCache redis looks something like:
# 'redis://xx-yy-zzrr0aax9a.ntmprk.0001.usw2.cache.amazonaws.com:6379/1'
BROKER_URL = 'redis://127.0.0.1:6379/1'
BROKER_HOST = '127.0.0.1'
CELERY_DEFAULT_QUEUE = 'seed-dev'
CELERY_QUEUES = (
    Queue(
        CELERY_DEFAULT_QUEUE,
        Exchange(CELERY_DEFAULT_QUEUE),
        routing_key=CELERY_DEFAULT_QUEUE
    ),
)
djcelery.setup_loader()

try:
    INSTALLED_APPS += (
        'lettuce.django',
        'salad',
    )
    TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
    NOSE_PLUGINS = [
        'nose_exclude.NoseExclude',
    ]
    NOSE_ARGS = ['--exclude-dir=data_importer']
Exemple #3
0
# app = Celery(__name__)
# app.conf.update({
#     'broker_url': 'amqp://*****:*****@localhost/subtleseeker_vhost',
#     # 'imports': (
#     #     'tasks',
#     # ),
#     # 'task_routes': ('task_router.TaskRouter'),
#     # 'task_serializer': 'json',
#     # 'result_serializer': 'json',
#     # 'accept_content': ['json']
#     })

default_exchange = Exchange(default_exchange_name, type='direct')
default_queue = Queue(default_queue_name,
                      default_exchange,
                      routing_key=default_routing_key,
                      queue_arguments={
                          'x-dead-letter-exchange': deadletter_exchange_name,
                          'x-dead-letter-routing-key': deadletter_routing_key
                      })

app.conf.task_queues = (default_queue, )

# Add steps to workers that declare DLX and DLQ if they don't exist
app.steps['worker'].add(DeclareDLXnDLQ)

app.conf.task_default_queue = default_queue_name
app.conf.task_default_exchange = default_exchange_name
app.conf.task_default_routing_key = default_routing_key
"""Module with Celery configurations to Euromillions Results worker."""
from kombu import Queue

task_acks_late = True

worker_prefetch_multiplier = 1

task_queues = [Queue(name="euro")]

result_expires = 60 * 60 * 48  # 48 hours in seconds
Exemple #5
0
        "LOCATION": REDIS_URL,
        "TIMEOUT": 60 * 60 * 24 * 3,
        "OPTIONS": {
            "MAX_ENTRIES": None,
            "CLIENT_CLASS": "django_redis.client.DefaultClient",
        }
    }
}

# CELERY
CELERY_BROKER_URL = REDIS_URL
CELERY_RESULT_BACKEND = 'django-db'
CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'
from celery_app import app  # pylint: disable=wrong-import-position,unused-import
CELERY_TASK_QUEUES = [
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('perm', Exchange('perm'), routing_key='perm'),
    Queue('dept', Exchange('dept'), routing_key='dept'),
    Queue('group', Exchange('group'), routing_key='group'),
    Queue('sql_ldap', Exchange('sql_ldap'), routing_key='sql_ldap'),
]
CELERY_TASK_DEFAULT_QUEUE = 'default'

# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/

STATIC_URL = '/static/'

STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')

STATICFILES_DIRS = [
Exemple #6
0
 def new_missing(self, name):
     return Queue(name, self.autoexchange(name), name)
 def queue(self, ):
     queue = Queue(name="queue")
     return queue
Exemple #8
0
#: celery worker config
CELERY_WORKER_DISABLE_RATE_LIMITS = True
CELERY_WORKER_TASK_SOFT_TIME_LIMIT = 300
CELERY_WORKER_LOG_FORMAT = '%(message)s level=%(levelname)s process=%(processName)s'
CELERY_WORKER_TASK_LOG_FORMAT = ' '.join(
    [CELERY_WORKER_LOG_FORMAT, 'task=%(task_name)s task_id=%(task_id)s'])
CELERY_WORKER_CONCURRENCY = env('CELERY_WORKER_CONCURRENCY') or None

#: celery routing config
CELERY_TASK_DEFAULT_QUEUE = celery_queue('default')
CELERY_TASK_DEFAULT_EXCHANGE = celery_queue('default')
CELERY_TASK_DEFAULT_ROUTING_KEY = 'default'

CELERY_TASK_QUEUES = (
    Queue(celery_queue('default'),
          Exchange(celery_queue('default')),
          routing_key='default'),
    Queue(celery_queue('expiry'),
          Exchange(celery_queue('expiry'), type='topic'),
          routing_key='expiry.#'),
    Queue(celery_queue('legal'),
          Exchange(celery_queue('legal'), type='topic'),
          routing_key='legal.#'),
    Queue(celery_queue('publish'),
          Exchange(celery_queue('publish'), type='topic'),
          routing_key='publish.#'),
)

CELERY_TASK_ROUTES = {
    'apps.archive.content_expiry': {
        'queue': celery_queue('expiry'),
Exemple #9
0
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
"""periodic_task_settings -- settings for celery periodic tasks."""

from datetime import timedelta

from celery.schedules import crontab
from kombu import Exchange, Queue

CELERY_QUEUES = (
    Queue('celery', routing_key='celery'),
    Queue('feeds', routing_key='feeds'),
)

CELERYBEAT_SCHEDULE = {
    'expire-tasks': {
        'task': 'teams.tasks.expire_tasks',
        'schedule': crontab(minute=0, hour=7),
    },
    'add_videos_notification_daily': {
        'task': 'teams.tasks.add_videos_notification_daily',
        'schedule': crontab(minute=0, hour=23),
    },
    'add_videos_notification_hourly': {
        'task': 'teams.tasks.add_videos_notification_hourly',
        'schedule': crontab(minute=0),
Exemple #10
0
from manager.tables_accessors import add_answerset, get_question_by_id, get_qgraph_id_by_question_id
from manager.logging_config import set_up_main_logger, clear_log_handlers, add_task_id_based_handler  # set up the logger

logger = logging.getLogger(__name__)

# set up Celery
celery = Celery(app.name)
celery.conf.update(
    broker_url=os.environ["CELERY_BROKER_URL"],
    result_backend=os.environ["CELERY_RESULT_BACKEND"],
    task_track_started=True,
)
task_exchange = Exchange('manager', type='topic')
celery.conf.task_queues = (
    Queue('manager_answer',
          exchange=task_exchange,
          routing_key='manager.answer'),
    Queue('manager_update',
          exchange=task_exchange,
          routing_key='manager.update'),
    Queue('manager_pubmed',
          exchange=task_exchange,
          routing_key='manager.pubmed'),
)


@signals.after_task_publish.connect()
def initialize_task(headers, body, exchange, routing_key, **kwargs):
    '''
        Task run as a task is added to the celery queue
        We use this to save initial values for update and answer tasks
Exemple #11
0
CELERY_TASK_RESULT_EXPIRES = 1
CELERY_DISABLE_RATE_LIMITS = True
CELERY_DEFAULT_QUEUE = "default"
CELERY_DEFAULT_EXCHANGE = "default"
CELERY_DEFAULT_EXCHANGE_TYPE = "direct"
CELERY_DEFAULT_ROUTING_KEY = "default"
CELERY_CREATE_MISSING_QUEUES = True
CELERY_IMPORTS = (
    'geonode.tasks.deletion',
    'geonode.tasks.update',
    'geonode.tasks.email'
)


CELERY_QUEUES = [
    Queue('default', routing_key='default'),
    Queue('cleanup', routing_key='cleanup'),
    Queue('update', routing_key='update'),
    Queue('email', routing_key='email'),
]


# AWS S3 Settings

S3_STATIC_ENABLED = os.environ.get('S3_STATIC_ENABLED', False)
S3_MEDIA_ENABLED = os.environ.get('S3_MEDIA_ENABLED', False)

# Required to run Sync Media to S3
AWS_BUCKET_NAME = os.environ.get('S3_BUCKET_NAME', '')

AWS_STORAGE_BUCKET_NAME = os.environ.get('S3_BUCKET_NAME', '')
Exemple #12
0
    # Celery the same as our old NSQ worker:
    task_acks_late=True,
    worker_disable_rate_limits=True,
    task_ignore_result=True,
    imports=("h.tasks.admin", "h.tasks.cleanup", "h.tasks.indexer",
             "h.tasks.mailer"),
    task_routes={
        "h.tasks.indexer.add_annotation": "indexer",
        "h.tasks.indexer.delete_annotation": "indexer",
        "h.tasks.indexer.reindex_user_annotations": "indexer",
    },
    task_serializer="json",
    task_queues=[
        Queue(
            "celery",
            durable=True,
            routing_key="celery",
            exchange=Exchange("celery", type="direct", durable=True),
        ),
        Queue(
            "indexer",
            durable=True,
            routing_key="indexer",
            exchange=Exchange("indexer", type="direct", durable=True),
        ),
    ],
    # Only accept one task at a time. This also probably isn't what we want
    # (especially not for, say, a search indexer task) but it makes the
    # behaviour consistent with the previous NSQ-based worker:
    worker_prefetch_multiplier=1,
)
    ),
    'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
}

DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 * 10

# celery
CELERY_DEFAULT_QUEUE = 'default'

CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0')
BROKER_URL = CELERY_BROKER_URL
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'

CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('sequential_queue', Exchange('long'), routing_key='sequential_queue')
)
CELERY_ROUTES = {
    'giscube.tasks.async_haystack_rebuild_index': {
        'queue': 'sequential_queue'
    }
}

CELERY_RESULT_BACKEND = 'django-db'
CELERY_CACHE_BACKEND = 'django-cache'
CELERY_TASK_TRACK_STARTED = True

USER_ASSETS_STORAGE_CLASS = 'django.core.files.storage.FileSystemStorage'

LAYERSERVER_FILE_STORAGE_CLASS = 'django.core.files.storage.FileSystemStorage'
Exemple #14
0
"""

Example producer that sends a single message and exits.

You can use `complete_receive.py` to receive the message sent.

"""
from __future__ import with_statement

from kombu import Connection, Producer, Exchange, Queue

#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange('kombu_demo', type='direct')
queue = Queue('kombu_demo', exchange, routing_key='kombu_demo')


with Connection('amqp://*****:*****@localhost:5672//') as connection:

    #: Producers are used to publish messages.
    #: a default exchange and routing key can also be specifed
    #: as arguments the Producer, but we rather specify this explicitly
    #: at the publish call.
    producer = Producer(connection)

    #: Publish the message using the json serializer (which is the default),
    #: and zlib compression.  The kombu consumer will automatically detect
    #: encoding, serialization and compression used and decode accordingly.
    producer.publish({'hello': 'world'},
                     exchange=exchange,
                     routing_key='kombu_demo',
Exemple #15
0
 def __init__(self, connection, queueNmae):
     self.connection = connection
     self.queues = [Queue(queueNmae, durable=False)]
Exemple #16
0
CELERY_SEND_EVENTS = True
CELERY_TASK_RESULT_EXPIRES = 3 * 60 * 60  # Store results for 3 hours
CELERYD_MAX_TASKS_PER_CHILD = 10
CELERYD_LOG_FORMAT = "[%(asctime)s: %(name)s-%(levelname)s"\
    "/%(processName)s [PID:%(process)d]"\
    " @ %(pathname)s on %(lineno)d] %(message)s"
CELERYD_TASK_LOG_FORMAT = "[%(asctime)s: %(name)s-%(levelname)s"\
    "/%(processName)s [PID:%(process)d]"\
    " [%(task_name)s(%(task_id)s)] "\
    "@ %(pathname)s on %(lineno)d] %(message)s"
# To use Manual Routing:
# - 1. Create an Exchange,
# - 2. Create a Queue,
# - 3. Bind Queue to Exchange
CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('email', Exchange('default'), routing_key='email.sending'),
    Queue('ssh_deploy', Exchange('deployment'), routing_key='long.deployment'),
    Queue('fast_deploy',
          Exchange('deployment'),
          routing_key='short.deployment'),
    Queue('imaging', Exchange('imaging'), routing_key='imaging'),
    Queue('periodic', Exchange('periodic'), routing_key='periodic'),
)
CELERY_DEFAULT_QUEUE = 'default'
CELERY_DEFAULT_ROUTING_KEY = "default"
CELERY_DEFAULT_EXCHANGE = 'default'
CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
#NOTE: We are Using atmosphere's celery_router as an interim solution.
CELERY_ROUTES = ('atmosphere.celery_router.CloudRouter', )
# # Django-Celery Development settings
Exemple #17
0
class Config:
    # 开启跨站请求伪造防护
    SECRET_KEY = os.environ.get('SECRET_KEY') or os.urandom(24)
    """SQLALCHEMY配置"""
    SQLALCHEMY_COMMIT_ON_TEARDOWN = False
    SQLALCHEMY_RECORD_QUERIES = True
    SQLALCHEMY_TRACK_MODIFICATIONS = True
    """配置上传文件相关"""

    UPLOAD_FOLDER = os.path.abspath(os.path.join(os.getcwd(), "..", "disk"))
    # UPLOAD_FOLDER = os.path.join(os.path.dirname(__file__), 'uploads')
    ALLOWED_EXTENSIONS = ('txt', 'png', 'jpg', 'jpeg')
    """Flask Uploads 配置"""
    UPLOADED_PHOTOS_DEST = UPLOAD_FOLDER
    UPLOADS_DEFAULT_DEST = UPLOAD_FOLDER
    MAX_CONTENT_LENGTH = 10 * 1024 * 1024
    """Flask Security 配置"""
    SECURITY_PASSWORD_SALT = "saltValue"
    SECURITY_PASSWORD_HASH = "sha512_crypt"
    """ Logging 设置 """
    LOGGING_FORMATTER = "%(asctime)-15s %(levelname)s %(filename)s %(lineno)d %(process)d %(message)s"
    LOGGING_DATE_FORMATTER = "%a %d %b %Y %H:%M:%S"
    LOGGING_DIR = os.path.join(root_dir, 'logs')
    """Celery 配置"""
    from datetime import timedelta
    from kombu import Exchange, Queue
    # 导入Task所在的模块,所有使用celery.task装饰器装饰过的函数,所需要把所在的模块导入
    # 我们之前创建的几个测试用函数,都在handlers.async_tasks和handlers.schedules中
    # 所以在这里需要导入这两个模块,以str表示模块的位置,模块组成tuple后赋值给CELERY_IMPORTS
    # 这样Celery在启动时,会自动找到这些模块,并导入模块内的task
    CELERY_IMPORTS = ('celery_tasks.tasks')

    # 为Celery设定多个队列,CELERY_QUEUES是个tuple,每个tuple的元素都是由一个Queue的实例组成
    # 创建Queue的实例时,传入name和routing_key,name即队列名称
    CELERY_QUEUES = {
        Queue('default', routing_key='task.#'),
        Queue('web_task', routing_key='web.#'),
        Queue('debet_task', routing_key='debet.#',
              delivery_mode=1),  # 设置了阅后即焚模式
    }
    # 默认的交换机名称
    CELERY_DEFAULT_EXCHANGE = 'tasks'
    # 默认的交换机类型
    CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
    # 默认的路由键
    CELERY_DEFAULT_ROUTING_KEY = 'task.default'
    # 最后,为不同的task指派不同的队列
    # 将所有的task组成dict,key为task的名称,即task所在的模块,及函数名
    # 如async_send_email所在的模块为handlers.async_tasks
    # 那么task名称就是handlers.async_tasks.async_send_email
    # 每个task的value值也是为dict,设定需要指派的队列name,及对应的routing_key
    # 这里的name和routing_key需要和CELERY_QUEUES设定的完全一致
    CELERY_ROUTES = {
        'celery_tasks.tasks.async_email_to': {
            'queue': 'web_task',
            'routing_key': 'task.email'
        },
        'celery_tasks.tasks.async_parser_feed': {
            'queue': 'debet_task',
            'routing_key': 'task.parser'
        },
        'celery_tasks.tasks.report_local_ip': {
            'queue': 'debet_task',
            'routing_key': 'task.report.ip'
        },
        'celery_tasks.tasks.parse_rsses': {
            'queue': 'debet_task',
            'routing_key': 'task.parse.rss'
        },
    }

    CELERY_RESULT_BACKEND = 'redis://localhost:6379'

    BROKER_URL = 'redis://localhost:6379/0'

    CELERY_TIMEZONE = 'Asia/Shanghai'

    CELERY_TASK_SERIALIZER = 'json'

    CELERY_RESULT_SERIALIZER = 'json'

    CELERY_ACCEPT_CONTENT = ['json']
    # 是否忽略结果
    CELERY_IGNORE_RESULT = False

    # 定义定时任务
    CELERYBEAT_SCHEDULE = {
        'celery_tasks.tasks.parse_rsses': {
            'task': 'celery_tasks.tasks.parse_rsses',
            'schedule': timedelta(seconds=60 * 60 * 4),
            'args': ()
        }
    }

    # 限制此类型的任务, 每分钟只处理10个
    CELERY_ANNOTATIONS = {
        'celery_tasks.tasks.report_local_ip': {
            'rate_limit': '1/m'
        },
        'celery_tasks.tasks.parse_rsses': {
            'rate_limit': '1/m'
        },
    }

    @classmethod
    def init_app(app, *args, **kwargs):
        log_level_configs = {
            logging.DEBUG: 'debug.log',
            logging.ERROR: 'error.log'
        }
        for level, file_name in log_level_configs.items():
            level: str = logging.getLevelName(level)
            filename = os.path.join(Config.LOGGING_DIR, file_name)
            if not os.path.exists(Config.LOGGING_DIR):
                os.makedirs(Config.LOGGING_DIR)

            if not os.path.exists(filename):
                open(filename, 'w').close()

            logging.basicConfig(filename=filename, level=level)
from django.core.management.base import BaseCommand
from kombu import Connection, Queue
from kombu.mixins import ConsumerProducerMixin

from app.settings import CELERY_BROKER_URL

queue = Queue('celery-default', routing_key='celery-default', no_declare=True)
dead_letter_queue = Queue('celery-default-XQ',
                          routing_key='celery-default-XQ',
                          no_declare=True)


class Command(BaseCommand):
    def handle(self, *args, **options):
        with Connection(CELERY_BROKER_URL) as connection:
            worker = Worker(connection, dead_letter_queue, queue, 1)
            worker.run()


class Worker(ConsumerProducerMixin):
    def __init__(self, connection, queue_from, queue_to, read_limit=1000):
        self.connection = connection
        self.queue_from = queue_from
        self.queue_to = queue_to
        self.read_limit = read_limit
        self.read_count = 0

    def get_consumers(self, Consumer, channel):
        return [
            Consumer(
                queues=[self.queue_from],
from nameko.amqp.publish import Publisher as PublisherCore
from nameko.amqp.publish import get_producer
from nameko.constants import AMQP_URI_CONFIG_KEY, HEARTBEAT_CONFIG_KEY
from nameko.containers import WorkerContext
from nameko.exceptions import ContainerBeingKilled
from nameko.messaging import (Consumer, HeaderDecoder, HeaderEncoder,
                              Publisher, QueueConsumer, consume)
from nameko.testing.services import dummy, entrypoint_hook, entrypoint_waiter
from nameko.testing.utils import (ANY_PARTIAL, DummyProvider, get_extension,
                                  unpack_mock_call, wait_for_call)
from nameko.testing.waiting import wait_for_call as patch_wait

from test import skip_if_no_toxiproxy

foobar_ex = Exchange('foobar_ex', durable=False)
foobar_queue = Queue('foobar_queue', exchange=foobar_ex, durable=False)

CONSUME_TIMEOUT = 1.2  # a bit more than 1 second


@pytest.yield_fixture
def patch_maybe_declare():
    with patch('nameko.messaging.maybe_declare', autospec=True) as patched:
        yield patched


def test_consume_provider(mock_container):

    container = mock_container
    container.shared_extensions = {}
    container.service_name = "service"
Exemple #20
0
CELERY_IMPORTS = (
    'sentry.tasks.beacon',
    'sentry.tasks.clear_expired_snoozes',
    'sentry.tasks.check_auth',
    'sentry.tasks.deletion',
    'sentry.tasks.digests',
    'sentry.tasks.email',
    'sentry.tasks.merge',
    'sentry.tasks.store',
    'sentry.tasks.options',
    'sentry.tasks.ping',
    'sentry.tasks.post_process',
    'sentry.tasks.process_buffer',
)
CELERY_QUEUES = [
    Queue('default', routing_key='default'),
    Queue('alerts', routing_key='alerts'),
    Queue('auth', routing_key='auth'),
    Queue('cleanup', routing_key='cleanup'),
    Queue('search', routing_key='search'),
    Queue('events', routing_key='events'),
    Queue('update', routing_key='update'),
    Queue('email', routing_key='email'),
    Queue('options', routing_key='options'),
    Queue('digests.delivery', routing_key='digests.delivery'),
    Queue('digests.scheduling', routing_key='digests.scheduling'),
]

for queue in CELERY_QUEUES:
    queue.durable = False
        class Service(object):
            name = "consume"

            @consume(Queue("test_queue"))
            def recv(self, payload):
                tracker("recv", payload)
Exemple #22
0
from boto.s3.key import Key
from kombu import Exchange, Queue
from otpsetup.shortcuts import DjangoBrokerConnection
from otpsetup.shortcuts import check_for_running_instance
from otpsetup.client.models import GtfsFile
from otpsetup import settings
from shutil import copyfileobj
from tempfile import TemporaryFile
from urllib2 import urlopen

import uuid

print "Starting Transloader"

exchange = Exchange("amq.direct", type="direct", durable=True)
queue = Queue("transload", exchange=exchange, routing_key="transload")


def s3_bucket(cache={}):
    if not 'bucket' in cache:

        connection = connect_s3(settings.AWS_ACCESS_KEY_ID,
                                settings.AWS_SECRET_KEY)
        bucket = connection.get_bucket(settings.S3_BUCKET)
        cache['bucket'] = bucket
    else:
        return cache['bucket']
    return bucket


def s3_key(bucket, gtfsfile):
#!/usr/bin/env python

from kombu import Connection, Exchange, Queue


def process_message(body, message):
    print body
    message.ack()


nova_exchange = Exchange('nova', 'topic', durable=False)
notifications_queue = Queue('notification-listener',
                            exchange=nova_exchange,
                            routing_key='notifications.info')
conn = Connection('amqp://*****:*****@192.168.0.10//')
consumer = conn.Consumer(notifications_queue, callbacks=[process_message])
consumer.consume()

while True:
    conn.drain_events()
Exemple #24
0
#from flask import current_app
from kombu import Queue
import os
import json
from sqlalchemy.orm import exc as ormexc

# ============================= INITIALIZATION ==================================== #

proj_home = os.path.realpath(os.path.join(os.path.dirname(__file__), '../'))
app = app_module.myADSCelery('myADS-pipeline',
                             proj_home=proj_home,
                             local_config=globals().get('local_config', {}))
logger = app.logger

app.conf.CELERY_QUEUES = (Queue('process', app.exchange,
                                routing_key='process'), )

# ============================= TASKS ============================================= #


@app.task(queue='process')
def task_process_myads(message):
    """
    Process the myADS notifications for a given user

    :param message: contains the message inside the packet
        {
         'userid': adsws user ID,
         'frequency': 'daily' or 'weekly',
         'force': Boolean (if present, we'll reprocess myADS notifications for the user,
            even if they were already processed today)
# Specific celery settings. Can be modified accordingly or leave as default
CELERY_ALWAYS_EAGER = False
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_IGNORE_RESULT = False
CELERY_SEND_EVENTS = True
CELERY_TASK_RESULT_EXPIRES = 24 * 3600
CELERY_DISABLE_RATE_LIMITS = True
CELERY_DEFAULT_QUEUE = "default"
CELERY_DEFAULT_EXCHANGE = "default"
CELERY_DEFAULT_EXCHANGE_TYPE = "direct"
CELERY_DEFAULT_ROUTING_KEY = "default"
CELERY_CREATE_MISSING_QUEUES = True

# Defining Celery queue to avoid clash between tasks. Leave as default
CELERY_QUEUES = [
    Queue('default', routing_key='default'),
    Queue('cleanup', routing_key='cleanup'),
    Queue('update', routing_key='update'),
    Queue('email', routing_key='email'),
    Queue('inasafe-headless', routing_key='inasafe-headless'),
    Queue('geosafe', routing_key='geosafe'),
]

# Schedule for periodic tasks
CELERYBEAT_SCHEDULE = {
    # executes every night at 0:0 AM
    'clean-impact-nightly': {
        'task': 'geosafe.tasks.analysis.clean_impact_result',
        'schedule': crontab(hour='0', minute='0')
    }
}
Exemple #26
0
CELERY_WORKER_TASK_LOG_FORMAT = " ".join(
    [CELERY_WORKER_LOG_FORMAT, "task=%(task_name)s task_id=%(task_id)s"])
CELERY_WORKER_CONCURRENCY = env("CELERY_WORKER_CONCURRENCY") or None
CELERY_WORKER_PREFETCH_MULTIPLIER = 1

#: celery routing config
CELERY_TASK_DEFAULT_QUEUE = celery_queue("default")
CELERY_TASK_DEFAULT_EXCHANGE = celery_queue("default")
CELERY_TASK_DEFAULT_ROUTING_KEY = "default"

HIGH_PRIORITY_QUEUE = celery_queue("publish_priority")
HIGH_PRIORITY_QUEUE_ENABLED = False

CELERY_TASK_QUEUES = (
    Queue(celery_queue("default"),
          Exchange(celery_queue("default")),
          routing_key="default"),
    Queue(celery_queue("expiry"),
          Exchange(celery_queue("expiry"), type="topic"),
          routing_key="expiry.#"),
    Queue(celery_queue("legal"),
          Exchange(celery_queue("legal"), type="topic"),
          routing_key="legal.#"),
    Queue(celery_queue("ingest"),
          Exchange(celery_queue("ingest"), type="topic"),
          routing_key="ingest.#"),
    Queue(celery_queue("publish"),
          Exchange(celery_queue("publish"), type="topic"),
          routing_key="publish.#"),
    Queue(
        HIGH_PRIORITY_QUEUE,
Exemple #27
0
    'sentry.tasks.email',
    'sentry.tasks.merge',
    'sentry.tasks.options',
    'sentry.tasks.ping',
    'sentry.tasks.post_process',
    'sentry.tasks.process_buffer',
    'sentry.tasks.reports',
    'sentry.tasks.reprocessing',
    'sentry.tasks.scheduler',
    'sentry.tasks.store',
    'sentry.tasks.unmerge',
    'sentry.tasks.symcache_update',
    'sentry.tasks.servicehooks',
)
CELERY_QUEUES = [
    Queue('alerts', routing_key='alerts'),
    Queue('auth', routing_key='auth'),
    Queue('commits', routing_key='commits'),
    Queue('cleanup', routing_key='cleanup'),
    Queue('default', routing_key='default'),
    Queue('digests.delivery', routing_key='digests.delivery'),
    Queue('digests.scheduling', routing_key='digests.scheduling'),
    Queue('email', routing_key='email'),
    Queue('events.preprocess_event', routing_key='events.preprocess_event'),
    Queue('events.reprocessing.preprocess_event',
          routing_key='events.reprocessing.preprocess_event'),
    Queue('events.process_event', routing_key='events.process_event'),
    Queue('events.reprocessing.process_event',
          routing_key='events.reprocessing.process_event'),
    Queue('events.reprocess_events', routing_key='events.reprocess_events'),
    Queue('events.save_event', routing_key='events.save_event'),
Exemple #28
0
 def new_missing(self, name):
     return Queue(name, Exchange(name), name)
Exemple #29
0
 def test_add_default_exchange(self):
     ex = Exchange('fff', 'fanout')
     q = Queues(default_exchange=ex)
     q.add(Queue('foo'))
     self.assertEqual(q['foo'].exchange, ex)
Exemple #30
0
        logging.exception(
            "Error while scheduling the crawl job with default project")


class Worker(ConsumerMixin):
    def __init__(self, connection, queues):
        self.connection = connection
        self.queues = queues

    def get_consumers(self, Consumer, channel):
        return [Consumer(queues=self.queues, callbacks=[self.on_message])]

    def on_message(self, body, message):
        schedule_crawl_job(format(body))
        message.ack()


exchange = Exchange("example-exchange", type="direct")
queues = [Queue("example-queue", exchange, routing_key="BOB")]

with Connection(rabbit_url, heartbeat=10) as conn:
    try:
        print("Connection started")
        worker = Worker(conn, queues)
        worker.run()
    except KeyboardInterrupt:
        print("Goodbye")
        exit(0)
    except:
        raise