Ejemplo n.º 1
0
Archivo: case.py Proyecto: xacce/celery
def UnitApp(name=None, set_as_current=False, log=UnitLogging, **kwargs):
    app = Celery(name or 'celery.tests',
                 set_as_current=set_as_current,
                 log=log,
                 **kwargs)
    app.add_defaults(deepcopy(CELERY_TEST_CONFIG))
    return app
Ejemplo n.º 2
0
    def test_add_defaults(self):
        app = Celery(set_as_current=False)

        self.assertFalse(app.configured)
        _conf = {'FOO': 300}
        conf = lambda: _conf
        app.add_defaults(conf)
        self.assertIn(conf, app._pending_defaults)
        self.assertFalse(app.configured)
        self.assertEqual(app.conf.FOO, 300)
        self.assertTrue(app.configured)
        self.assertFalse(app._pending_defaults)

        # defaults not pickled
        appr = loads(dumps(app))
        with self.assertRaises(AttributeError):
            appr.conf.FOO

        # add more defaults after configured
        conf2 = {'FOO': 'BAR'}
        app.add_defaults(conf2)
        self.assertEqual(app.conf.FOO, 'BAR')

        self.assertIn(_conf, app.conf.defaults)
        self.assertIn(conf2, app.conf.defaults)
Ejemplo n.º 3
0
def TestApp(name=None,
            config=None,
            enable_logging=False,
            set_as_current=False,
            log=UnitLogging,
            backend=None,
            broker=None,
            **kwargs):
    """App used for testing."""
    from . import tasks  # noqa

    config = dict(deepcopy(DEFAULT_TEST_CONFIG), **config or {})
    if broker is not None:
        config.pop("broker_url", None)
    if backend is not None:
        config.pop("result_backend", None)
    log = None if enable_logging else log
    test_app = Celery(name or "celery.tests",
                      set_as_current=set_as_current,
                      log=log,
                      broker=broker,
                      backend=backend,
                      **kwargs)
    test_app.add_defaults(config)
    return test_app
Ejemplo n.º 4
0
def UnitApp(name=None, set_as_current=False, log=UnitLogging,
            broker='memory://', backend='cache+memory://', **kwargs):
    app = Celery(name or 'celery.tests',
                 set_as_current=set_as_current,
                 log=log, broker=broker, backend=backend,
                 **kwargs)
    app.add_defaults(deepcopy(CELERY_TEST_CONFIG))
    return app
Ejemplo n.º 5
0
def UnitApp(name=None, set_as_current=False, log=UnitLogging,
            broker='memory://', backend='cache+memory://', **kwargs):
    app = Celery(name or 'celery.tests',
                 set_as_current=set_as_current,
                 log=log, broker=broker, backend=backend,
                 **kwargs)
    app.add_defaults(deepcopy(CELERY_TEST_CONFIG))
    return app
Ejemplo n.º 6
0
def TestApp(name=None,
            set_as_current=False,
            log=UnitLogging,
            broker='memory://',
            backend=None,
            **kwargs):
    """App used for testing."""
    test_app = Celery(name or 'celery.tests',
                      set_as_current=set_as_current,
                      log=log,
                      broker=broker,
                      backend=backend or 'cache+memory://',
                      **kwargs)
    test_app.add_defaults(deepcopy(CELERY_TEST_CONFIG))
    return test_app
Ejemplo n.º 7
0
def build_app(config=None):
    config = merge_configs(
        {k: v for (k, (_, v)) in DEFAULT_CONFIG.items()}, config or {}
    )

    config["task_queues"] = CELERY_QUEUES + [
        Queue(queue, Exchange(queue), routing_key=queue)
        for queue in config.get("task_queues", ())
    ]
    logger.debug("Creating a Celery app with %s", config)

    # Instantiate the Celery app
    app = Celery(broker=config["task_broker"], task_cls="swh.scheduler.task:SWHTask")
    app.add_defaults(CELERY_DEFAULT_CONFIG)
    app.add_defaults(config)
    return app
Ejemplo n.º 8
0
def TestApp(name=None, config=None, enable_logging=False, set_as_current=False,
            log=UnitLogging, backend=None, broker=None, **kwargs):
    """App used for testing."""
    from . import tasks  # noqa
    config = dict(deepcopy(DEFAULT_TEST_CONFIG), **config or {})
    if broker is not None:
        config.pop('broker_url', None)
    if backend is not None:
        config.pop('result_backend', None)
    log = None if enable_logging else log
    test_app = Celery(
        name or 'celery.tests',
        set_as_current=set_as_current,
        log=log,
        broker=broker,
        backend=backend,
        **kwargs)
    test_app.add_defaults(config)
    return test_app
Ejemplo n.º 9
0
    Load settings into celery as late as possible.
    '''
    config_updates = {
        'BROKER_URL': settings.broker_url,
        'CELERY_ROUTES': {
            'yalp.pipeline.tasks.process_message': {
                'queue': settings.parser_queue,
            },
        },
    }
    config_updates.update(settings.celery_advanced)
    return config_updates


app = Celery()
app.add_defaults(lazy_update_app_config)


class PipelineTask(Task):
    '''
    Pipeline celery task.
    '''
    abstract = True
    _config = None
    _parsers = None
    _outputers = None
    _output_queue = None
    _output_exchange = None

    @property
    def config(self):
Ejemplo n.º 10
0
# Authors: Nikolay Melnik <*****@*****.**>

import logging
from logging import config as logging_config

from flask.ext.admin import Admin
from celery import Celery

from app import create_app
from rest_api import Api

__version__ = '2016.08.08'

app = create_app()
celery = Celery('cloudml')
celery.add_defaults(lambda: app.config)

api = Api(app)
admin = Admin(app, 'CloudML Admin Interface')

logging_config.dictConfig(app.config['LOGGING'])

# Note: to see sqlalchemy SQL statements, uncomment the following,
# and watch out for the logs will be huge
# logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)

if app.config.get('SEND_ERROR_EMAILS'):
    from logging.handlers import SMTPHandler
    mail_handler = SMTPHandler(
        mailhost=(app.config['EMAIL_HOST'], app.config['EMAIL_PORT']),
        fromaddr=app.config['SERVER_EMAIL'],
Ejemplo n.º 11
0
        }

    # Final Celery settings
    return {
        "beat_schedule": schedule,
        "broker_url": config.celery_broker_url,
        "timezone": config.schedule_timezone,
    }


# =============================================================================
# The Celery app
# =============================================================================

celery_app = Celery()
celery_app.add_defaults(get_celery_settings_dict)
# celery_app.autodiscover_tasks([CELERY_APP_NAME],
#                               related_name=CELERY_TASKS_MODULE)

_ = '''

@celery_app.on_configure.connect
def _app_on_configure(**kwargs) -> None:
    log.critical("@celery_app.on_configure: {!r}", kwargs)


@celery_app.on_after_configure.connect
def _app_on_after_configure(**kwargs) -> None:
    log.critical("@celery_app.on_after_configure: {!r}", kwargs)

'''
Ejemplo n.º 12
0
import os
from celery import Celery
import django
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'powersong.settings')

django.setup()

app = Celery('powersong')

app.config_from_object('django.conf:settings', namespace='CELERY')

app.add_defaults({
    'CELERYD_HIJACK_ROOT_LOGGER': False,
})

app.autodiscover_tasks()

app.control.rate_limit('powersong.tasks.strava_task', '40/m')
app.control.rate_limit('powersong.tasks.lastfm_task', '250/m')
app.control.rate_limit('powersong.tasks.spotify_task', '250/m')


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Ejemplo n.º 13
0
from operator import StackOperatorRedis

# allow the usage of ssh config file by fabric
env.use_ssh_config = True
env.forward_agent = True

# load config from file via environ variable
config = os.environ.get('GACHETTE_SETTINGS', './config.rc')
dd = imp.new_module('config')

with open(config) as config_file:
    dd.__file__ = config_file.name
    exec(compile(config_file.read(), config_file.name, 'exec'), dd.__dict__)

celery = Celery()
celery.add_defaults(dd)

# get settings
key_filename = None if not hasattr(dd, "BUILD_KEY_FILENAME") else dd.BUILD_KEY_FILENAME
host = dd.BUILD_HOST

MAGIC_DOMAIN = "main"

def send_notification(data):
    """
    Send notification using Pubsub Redis
    """
    red = Redis(dd.REDIS_HOST, int(dd.REDIS_PORT))
    red.publish("all", ['publish', data])

Ejemplo n.º 14
0
from . import sentry
from . import voevent

__all__ = ('app', )

__version__ = get_versions()['version']
del get_versions

# Use redis broker, because it supports locks (and thus singleton tasks).
app = Celery(__name__, broker='redis://', autofinalize=False)
"""Celery application object."""

# Register LVAlert and VOEvent subsystems.
lvalert.install(app)
voevent.install(app)

# Register all tasks.
app.autodiscover_tasks([__name__])

# Add default configuration.
app.add_defaults(playground)
app.finalize()

# Customize configuration from environment variable.
app.config_from_envvar('CELERY_CONFIG_MODULE', silent=True)

# Use the same URL for both the result backend and the broker.
app.conf['result_backend'] = app.conf.broker_url

sentry.configure()
Ejemplo n.º 15
0
from __future__ import absolute_import

import os

from celery import Celery

from django.conf import settings

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')

app = Celery('cyano')

app.add_defaults({
    'CELERYD_HIJACK_ROOT_LOGGER': False,
})

# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)

@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Ejemplo n.º 16
0
import os
import random
import logging

logger = logging.getLogger(__name__)

from celery import Celery

redis_hostname = os.getenv('REDIS_HOSTNAME')
app = Celery('hello', broker='redis://{}/0'.format(redis_hostname))
app.add_defaults(
    dict(CELERY_RESULT_BACKEND='redis://{}/1'.format(redis_hostname),
         CELERY_ACCEPT_CONTENT=['json'],
         CELERY_TASK_SERIALIZER='json',
         CELERY_RESULT_SERIALIZER='json'))


@app.task(bind=True)
def simple(self, something=None):
    logger.warning('SIMPLE simple {}'.format(something))
    return 'simple return'
Ejemplo n.º 17
0
from flask import Flask
from flask.ext.pymongo import PyMongo

from flask_debugtoolbar import DebugToolbarExtension
from raven.contrib.flask import Sentry
from celery import Celery

from utils import (configure_error_handlers, configure_app,
    configure_template_filters)


app = Flask(__name__)
configure_app(app, 'config.DevelopmentConfig')
configure_error_handlers(app)
# configure_middleware_handlers(app)
configure_template_filters(app)
toolbar = DebugToolbarExtension(app)
mongo = PyMongo(app)


if not app.config.get('DEBUG'):
    sentry = Sentry(app)

celery = Celery()
celery.add_defaults(app.config)

from core.views import *

if __name__ == '__main__':
    app.run()