Example #1
0
def test_custom_serializer(container_factory, rabbit_config,
                           sniffer_queue_factory):

    def encode(value):
        value = json.dumps(value)
        return value.upper()

    def decode(value):
        value = value.lower()
        return json.loads(value)

    register("upperjson", encode, decode, "application/x-upper-json", "utf-8")

    class Service(object):
        name = "service"

        @rpc
        def echo(self, arg):
            return arg

    rabbit_config[SERIALIZER_CONFIG_KEY] = "upperjson"
    container = container_factory(Service, rabbit_config)
    container.start()

    get_messages = sniffer_queue_factory('nameko-rpc')

    # verify RPC works end-to-end
    with ServiceRpcProxy('service', rabbit_config) as proxy:
        assert proxy.echo("hello") == "hello"

    # verify sniffed messages serialized as expected
    msg = get_messages()[0]
    assert '"RESULT": "HELLO"' in msg['payload']
    assert msg['properties']['content_type'] == "application/x-upper-json"
Example #2
0
def make_celery(app, celery):
    """ From http://flask.pocoo.org/docs/0.10/patterns/celery/ """
    # Register our custom serializer type before updating the configuration.
    from kombu.serialization import register
    from doorman.celery_serializer import djson_dumps, djson_loads

    register(
        'djson', djson_dumps, djson_loads,
        content_type='application/x-djson',
        content_encoding='utf-8'
    )

    # Actually update the config
    celery.config_from_object(app.config)

    # Register Sentry client
    if 'SENTRY_DSN' in app.config and app.config['SENTRY_DSN']:
        client = Client(app.config['SENTRY_DSN'])
        # register a custom filter to filter out duplicate logs
        register_logger_signal(client)
        # hook into the Celery error handler
        register_signal(client)

    TaskBase = celery.Task

    class ContextTask(TaskBase):

        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
Example #3
0
def make_celery(app, celery):
    """ From http://flask.pocoo.org/docs/0.10/patterns/celery/ """
    # Register our custom serializer type before updating the configuration.
    from kombu.serialization import register
    from doorman.celery_serializer import djson_dumps, djson_loads

    register(
        'djson', djson_dumps, djson_loads,
        content_type='application/x-djson',
        content_encoding='utf-8'
    )

    # Actually update the config
    celery.config_from_object(app.config)

    TaskBase = celery.Task

    class ContextTask(TaskBase):

        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
Example #4
0
def register_kombu_serializers():
    """
    Register our custom pickle serializer which knows how to handle UTF-8 (non
    ascii) messages.

    Default kombu pickle de-serializer calls .encode() on the bytes object without providing an
    encoding. This means it default to "ascii" and fail with UnicodeDecode error.

    https://github.com/celery/kombu/blob/3.0/kombu/utils/encoding.py#L47
    """
    def pickle_dumps(obj, dumper=pickle.dumps):
        return dumper(obj, protocol=pickle_protocol)

    if six.PY3:
        def str_to_bytes(s):
            if isinstance(s, str):
                return s.encode('utf-8')
            return s

        def unpickle(s):
            return pickle_loads(str_to_bytes(s))
    else:
        def str_to_bytes(s):                # noqa
            if isinstance(s, unicode):
                return s.encode('utf-8')
            return s
        unpickle = pickle_loads  # noqa

    register('pickle', pickle_dumps, unpickle,
             content_type='application/x-python-serialize',
             content_encoding='binary')
Example #5
0
def register_kombu_custom_serializer():
    register(
        'customjson',
        custom_dumps,
        custom_loads,
        content_type='application/x-customjson',
        content_encoding='utf-8'
    )
Example #6
0
    def _register_mistral_serialization():
        """Adds mistral serializer to available serializers in kombu.

        :return: None
        """
        serialization.register(
            'mistral_serialization',
            encoder=serializers.KombuSerializer.serialize,
            decoder=serializers.KombuSerializer.deserialize,
            content_type='application/json'
        )
Example #7
0
def register_serializer():
    """
    This is needed for the Kombu entry point to load encoders and decoders
    to the registry. Celery depends on Kombu for low level serialization
    from task.
    """
    from kombu.serialization import register
    register(
        'tryson',
        functools.partial(json.dumps, cls=JSONEncoder),
        functools.partial(json.loads, object_hook=JSONDecoder()),
        content_type='application/x-tryson',
        content_encoding='binary',
    )
Example #8
0
    def __init__(self, name, connection, deployment, durable, queue_arguments,
                 exchange, topics):
        self.connection = connection
        self.deployment = deployment
        self.durable = durable
        self.queue_arguments = queue_arguments
        self.name = name
        self.last_time = None
        self.pmi = None
        self.processed = 0
        self.total_processed = 0
        self.topics = topics
        self.exchange = exchange
        signal.signal(signal.SIGTERM, self._shutdown)

        register('bufferjson', self.loads, anyjson.dumps,
                 content_type='application/json',
                 content_encoding='binary')
Example #9
0
    def register_changes_json():
        from kombu.serialization import register
        from kombu.utils.encoding import bytes_t
        from json import dumps, loads
        from uuid import UUID

        def _loads(obj):
            if isinstance(obj, UUID):
                obj = obj.hex
            elif isinstance(obj, bytes_t):
                obj = obj.decode()
            elif isinstance(obj, buffer):
                obj = bytes(obj).decode()
            return loads(obj)

        register('changes_json', dumps, _loads,
                 content_type='application/json',
                 content_encoding='utf-8')
Example #10
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<*****@*****.**>
#         http://binux.me
# Created on 2015-05-22 20:54:01

import time
import umsgpack
from kombu import Connection, enable_insecure_serializers
from kombu.serialization import register
from kombu.exceptions import ChannelError
from six.moves import queue as BaseQueue

register('umsgpack', umsgpack.packb, umsgpack.unpackb, 'application/x-msgpack')
enable_insecure_serializers(['umsgpack'])


class KombuQueue(object):
    """
    kombu is a high-level interface for multiple message queue backends.

    KombuQueue is built on top of kombu API.
    """

    Empty = BaseQueue.Empty
    Full = BaseQueue.Full
    max_timeout = 0.3

    def __init__(self, name, url="amqp://", maxsize=0, lazy_limit=True):
        """
Example #11
0
from django.conf import settings

app = Celery('phase')


# We need a custom serializer to handle date and datetime objects.
class JSONSerializer(json.JSONEncoder):
    def default(self, data):
        if isinstance(data, (date, datetime)):
            return data.isoformat()
        elif isinstance(data, Decimal):
            return float(data)
        raise TypeError("Unable to serialize %r (type: %s)" %
                        (data, type(data)))


def my_dumps(obj):
    return json.dumps(obj, cls=JSONSerializer)


register('betterjson',
         my_dumps,
         json.loads,
         content_type='application/x-myjson',
         content_encoding='utf-8')

# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
Example #12
0
from celery.signals import task_revoked, user_preload_options
from kombu.serialization import register
from pyramid.paster import bootstrap
from pyramid.request import Request
from pyramid.scripting import prepare
from pyramid.settings import asbool
from pyramid.threadlocal import get_current_request

from testscaffold.celery.encoders import json_dumps, json_loads

log = logging.getLogger(__name__)

register(
    "date_json",
    json_dumps,
    json_loads,
    content_type="application/x-date_json",
    content_encoding="utf-8",
)

CELERY_CONFIG = {
    "CELERY_IMPORTS": ("testscaffold.celery.tasks",),
    "CELERYD_TASK_TIME_LIMIT": 300,
    "CELERYD_MAX_TASKS_PER_CHILD": 1000,
    "CELERY_IGNORE_RESULT": True,
    "CELERY_ACCEPT_CONTENT": ("date_json",),
    "CELERY_TASK_SERIALIZER": "date_json",
    "CELERY_RESULT_SERIALIZER": "date_json",
    "BROKER_URL": None,
    "CELERYD_CONCURRENCY": None,
    "CELERY_TIMEZONE": None,
Example #13
0
PROCESS_STATES_LOADING_MUTATION = """
    mutation updateProcessStates($name: String, $loading: Boolean) {
        update_view_processes(where: {name: {_like: $name}}, _set: {loading: $loading}) {
        affected_rows
        returning {
          name
          loading
        }
      }
    }
"""

serialization.register(
    "ujson",
    json.dumps,
    json.loads,
    content_type="application/x-ujson",
    content_encoding="utf-8",
)


class TLSSMTPHandler(SMTPHandler):
    def emit(self, record):
        """
        Emit a record.
        Format the record and send it to the specified addressees.
        """
        try:
            import smtplib

            try:
Example #14
0
from kombu.serialization import register
from bson.json_util import dumps, loads
from celery import signals

from fame.common.config import fame_config
from fame.core import fame_init

register('json_util',
         dumps,
         loads,
         content_type='application/json',
         content_encoding='utf-8')

MONGO = 'mongodb://{}:{}/{}'.format(fame_config.mongo_host,
                                    fame_config.mongo_port,
                                    fame_config.mongo_db)
if fame_config.mongo_user and fame_config.mongo_password:
    MONGO = 'mongodb://{}:{}@{}:{}/{}'.format(fame_config.mongo_user,
                                              fame_config.mongo_password,
                                              fame_config.mongo_host,
                                              fame_config.mongo_port,
                                              fame_config.mongo_db)

BROKER_URL = MONGO
CELERY_RESULT_BACKEND = MONGO
CELERY_ACCEPT_CONTENT = ['json_util']
CELERY_TASK_SERIALIZER = 'json_util'

CELERY_IMPORTS = ('fame.core.analysis', 'fame.core.repository')

Example #15
0
            return Website(obj['url'])
    return obj


def w_dumps(obj):
    return json.dumps(obj, cls=WEncoder)


def w_loads(obj):
    return json.loads(obj, object_hook=w_decoder)


register(
    name='myjson',
    encoder=w_dumps,
    decoder=w_loads,
    content_type='application/x-myjson',
    content_encoding='utf-8'
)


def loads(s):
    return s


def dumps(s):
    return s


register(
    'mem_serializer', dumps, loads,
Example #16
0
def register_uuid_json():
    """Register a encoder/decoder for UUID compatable JSON serialization."""
    register('uuid_json', _dumps, _loads,
                      content_type='application/json',
                      content_encoding='utf-8')
Example #17
0
import os

# Register custom serializer for Celery that allows for encoding and decoding
# Python datetime objects (and potentially other ones)
from kombu.serialization import register
from serializers import encoder, decoder

register('ocd_serializer',
         encoder,
         decoder,
         content_encoding='binary',
         content_type='application/ocd-msgpack')

CELERY_CONFIG = {
    'BROKER_URL': 'redis://127.0.0.1:6379/0',
    'CELERY_ACCEPT_CONTENT': ['ocd_serializer'],
    'CELERY_TASK_SERIALIZER': 'ocd_serializer',
    'CELERY_RESULT_SERIALIZER': 'ocd_serializer',
    'CELERY_RESULT_BACKEND':
    'ocd_backend.result_backends:OCDRedisBackend+redis://127.0.0.1:6379/0',
    'CELERY_IGNORE_RESULT': True,
    'CELERY_DISABLE_RATE_LIMITS': True,
    # Expire results after 30 minutes; otherwise Redis will keep
    # claiming memory for a day
    'CELERY_TASK_RESULT_EXPIRES': 1800
}

LOGGING = {
    'version': 1,
    'formatters': {
        'console': {
Example #18
0
            return o
        klass = o[TYPE]
        if klass == DATETIME:
            return date_parser.parse(o[VALUE])
        return o

    @staticmethod
    def loads(text):
        return json.loads(text, cls=JSONDecoder)


class StaticFilesStorage(ManifestStaticFilesStorage):
    """
    Exactly the same as ManifestStaticFilesStorage from the Django contrib
    package, except this one optionally changes the manifest file name
    based on the value of STATICFILES_MANIFEST in settings.
    """

    manifest_name = getattr(settings, "STATICFILES_MANIFEST",
                            "staticfiles.json")


# register serializers for JSON that handle UUIDs and datetime objects
register(
    name=getattr(settings, "EDD_SERIALIZE_NAME", "edd-json"),
    encoder=JSONEncoder.dumps,
    decoder=JSONDecoder.loads,
    content_type="application/x-edd-json",
    content_encoding="UTF-8",
)
Example #19
0
import logging.config
import os
import pickle

from bugsnag.handlers import BugsnagHandler
from kombu.serialization import register
from pythonjsonlogger import jsonlogger
from version import __version__, __version_info__

register('ocd_serializer', pickle.dumps, pickle.loads,
         content_encoding='binary',
         content_type='application/x-pickle2')

APP_VERSION = __version__
MAJOR_VERSION = __version_info__[0]
MINOR_VERSION = __version_info__[1]

BUGSNAG_APIKEY = os.getenv('BUGSNAG_APIKEY')

RELEASE_STAGE = os.getenv('RELEASE_STAGE', 'production')

REDIS_HOST = os.getenv('REDIS_HOST', "redis")
REDIS_PORT = os.getenv('REDIS_PORT', "6379")
REDIS_URL = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT)

ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_PATH = os.path.dirname(ROOT_PATH)
LOCAL_DUMPS_DIR = os.path.join(PROJECT_PATH, 'local_dumps')
DUMPS_DIR = os.path.join(PROJECT_PATH, 'dumps')

# Use this timezone as default for timezone unaware dates
Example #20
0
            return CashFlowStatement(**o["kwargs"])
    return o


def dumps(o):
    return json.dumps(o, cls=Encoder)


def loads(s):
    return json.loads(s, object_hook=decoder)


register(
    "fiberjson",
    dumps,
    loads,
    content_type="application/x-fiberjson",
    content_encoding="utf-8",
)

app = Celery("tasks",
             backend="amqp://redis",
             broker="amqp://rabbitmq",
             include="example.tasks")

app.conf.update(
    accept_content=["fiberjson"],
    task_serializer="fiberjson",
    result_serializer="fiberjson",
)
Example #21
0
#
# Copyright (C) Zenoss, Inc. 2009-2019 all rights reserved.
#
# This content is made available according to terms specified in
# License.zenoss under the directory where your Zenoss product is installed.
#
##############################################################################

from __future__ import absolute_import

from celery import Celery
from kombu.serialization import register

from .serialization import without_unicode


# Register custom serializer
register(
    "without-unicode",
    without_unicode.dump,
    without_unicode.load,
    content_type="application/x-without-unicode",
    content_encoding="utf-8",
)

app = Celery(
    "zenjobs",
    config_source="Products.Jobber.config:Celery",
    task_cls="Products.Jobber.task:ZenTask",
)
Example #22
0
 def setUp(self):
     unittest.TestCase.setUp(self)
     serialization.register('adsmsg', *serializer.register_args)
# Register your new serializer methods into kombu
from kombu.serialization import register

import openpathsampling.netcdfplus.dictify as dfy

_ops_to = dfy.CachedUUIDObjectJSON()
# _ops_from = dfy.CachedUUIDObjectJSON()

register('opsjson',
         _ops_to.to_json,
         _ops_to.from_json,
         content_type='application/x-opsjson',
         content_encoding='utf-8')

# Tell celery to use your new serializer:
CELERY_ACCEPT_CONTENT = ['opsjson']
CELERY_TASK_SERIALIZER = 'opsjson'
CELERY_RESULT_SERIALIZER = 'opsjson'
Example #24
0
from ichnaea.log import configure_raven, configure_stats

CELERY_QUEUES = (
    Queue("celery_default", routing_key="celery_default"),
    Queue("celery_export", routing_key="celery_export"),
    Queue("celery_incoming", routing_key="celery_incoming"),
    Queue("celery_insert", routing_key="celery_insert"),
    Queue("celery_monitor", routing_key="celery_monitor"),
    Queue("celery_reports", routing_key="celery_reports"),
    Queue("celery_upload", routing_key="celery_upload"),
)  #: List of :class:`kombu.Queue` instances.

register(
    "internal_json",
    customjson.kombu_dumps,
    customjson.kombu_loads,
    content_type="application/x-internaljson",
    content_encoding="utf-8",
)


def configure_celery(celery_app):
    """
    Configure the celery app stored in :data:`ichnaea.async.app.celery_app`.
    This is executed both inside the master worker process and once in
    each forked worker process.

    This parses the application ini and reads in the
    :mod:`ichnaea.async.settings`.
    """
Example #25
0
from .vpp_mock import (
    activate_izitru_mock, activate_tineye_mock, activate_incandescent_mock
)
from .incandescent import (
    get_incandescent_results, get_incandescent_results_callback
)
from .tineye import get_tineye_results
from .izitru import get_izitru_results


# @TODO: for debug purpose
from pprint import pprint  # noqa
from .logging import debug, print_task_exception   # noqa


register('dill', dill.dumps, dill.loads, content_type='application/x-binary-data', content_encoding='binary')


def get_original_image(item, resource):
    if 'renditions' in item:
        driver = app.data.mongo
        px = driver.current_mongo_prefix(resource)
        _fs = GridFS(driver.pymongo(prefix=px).db)
        for k, v in item['renditions'].items():
            if k == 'original':
                _file = _fs.get(ObjectId(v['media']))
                content = _file.read()
                href = v['href']
                return (href, content)
    raise ImageNotFoundException()
Example #26
0
import os
from uuid import uuid4

from celery import Celery
from celery.exceptions import TaskRevokedError
from celery.states import SUCCESS
from kombu.serialization import register

from flagger.json import dumps, loads

register('flagger-json',
         dumps,
         loads,
         content_type='application/x-flagger-json',
         content_encoding='utf-8')
app = Celery('flagger.workers.workers')
config = {
    'accept_content': ['flagger-json'],
    'broker_url':
    os.environ.get('broker_url'),
    'result_backend':
    os.environ.get('result_backend', 'db+sqlite:///../flagger.sqlite'),
    'result_serializer':
    'flagger-json',
    'task_serializer':
    'flagger-json',
    # 'worker_concurrency': os.environ.get('worker_concurrency', 16),
}
app.config_from_object(config)

Example #27
0
        default=encode_datetime,
    )


def unpack(s):
    return msgpack.unpackb(
        s,
        encoding="utf-8",
        unicode_errors="ignore",
        object_hook=decode_datetime,
    )


register(
    "unicode-msgpack-with-dates",
    pack,
    unpack,
    content_type=MESSAGE_CONTENT_TYPE,
    content_encoding=MESSAGE_CONTENT_ENCODING,
)

# This is around for compatibility reasons (so that we're able to decode any messages
# that are already in queues, with the old/non-date-aware content_type).
register(
    'unicode-msgpack',
    pack,
    unpack,
    content_type='application/x-unicode-msgpack',
    content_encoding='binary'
)
Example #28
0
def register_datecompatible_serializer():  # pragma: nocover
    from kombu.serialization import register
    register('nameko-serializer', *register_args)
Example #29
0
CELERY_QUEUES = (
    Queue('celery_cell', routing_key='celery_cell'),
    Queue('celery_default', routing_key='celery_default'),
    Queue('celery_export', routing_key='celery_export'),
    Queue('celery_incoming', routing_key='celery_incoming'),
    Queue('celery_monitor', routing_key='celery_monitor'),
    Queue('celery_ocid', routing_key='celery_ocid'),
    Queue('celery_reports', routing_key='celery_reports'),
    Queue('celery_upload', routing_key='celery_upload'),
    Queue('celery_wifi', routing_key='celery_wifi'),
)  #: List of :class:`kombu.Queue` instances.

register('internal_json',
         internaljson.internal_dumps,
         internaljson.internal_loads,
         content_type='application/x-internaljson',
         content_encoding='utf-8')


def configure_celery(celery_app):
    """
    Configure the celery app stored in :data:`ichnaea.async.app.celery_app`.
    This is executed both inside the master worker process and once in
    each forked worker process.

    This parses the application ini and reads in the
    :mod:`ichnaea.async.settings`.
    """

    conf = read_config()
Example #30
0
class JSONDecoder(json.JSONDecoder):
    """
    Complement of JSONEncoder, translates encoded datetime objects back to real datetime.
    """
    def __init__(self, *args, **kwargs):
        super(JSONDecoder, self).__init__(object_hook=self.object_hook,
                                          *args,
                                          **kwargs)

    def object_hook(self, o):
        if TYPE not in o:
            return o
        klass = o[TYPE]
        if klass == 'datetime':
            return parser.parse(o[VALUE])
        return o

    @staticmethod
    def loads(text):
        return json.loads(text, cls=JSONDecoder)


# register serializers for JSON that handle UUIDs and datetime objects
register(
    name=getattr(settings, 'EDD_SERIALIZE_NAME', 'edd-json'),
    encoder=JSONEncoder.dumps,
    decoder=JSONDecoder.loads,
    content_type='application/x-edd-json',
    content_encoding='UTF-8',
)
Example #31
0
SERVER_EMAIL = '*****@*****.**'

CELERYD_MAX_TASKS_PER_CHILD = 1

# Default queue
CELERY_DEFAULT_QUEUE = 'seed-common'
CELERY_QUEUES = (
    Queue(
        CELERY_DEFAULT_QUEUE,
        Exchange(CELERY_DEFAULT_QUEUE),
        routing_key=CELERY_DEFAULT_QUEUE
    ),
)

# Register our custom JSON serializer so we can serialize datetime objects in celery.
register('seed_json', CeleryDatetimeSerializer.seed_dumps, CeleryDatetimeSerializer.seed_loads,
         content_type='application/json', content_encoding='utf-8')

CELERY_ACCEPT_CONTENT = ['seed_json']
CELERY_TASK_SERIALIZER = 'seed_json'
CELERY_RESULT_SERIALIZER = 'seed_json'
CELERY_TASK_RESULT_EXPIRES = 18000  # 5 hours
CELERY_MESSAGE_COMPRESSION = 'gzip'

BROKER_URL = 'amqp://*****:*****@localhost:5672//'

LOG_FILE = join(SITE_ROOT, '../logs/py.log/')

# Set translation languages for i18n
LANGUAGES = (
    ('en', 'English'),
)
 def test_register(self):
     register(None, None, None, None)
Example #33
0
    configure_stats,
)

CELERY_QUEUES = (
    Queue('celery_default', routing_key='celery_default'),
    Queue('celery_export', routing_key='celery_export'),
    Queue('celery_incoming', routing_key='celery_incoming'),
    Queue('celery_insert', routing_key='celery_insert'),
    Queue('celery_monitor', routing_key='celery_monitor'),
    Queue('celery_reports', routing_key='celery_reports'),
    Queue('celery_upload', routing_key='celery_upload'),
)
EXPORT_QUEUE_PREFIX = 'queue_export_'

register('internal_json', customjson.kombu_dumps, customjson.kombu_loads,
         content_type='application/x-internaljson',
         content_encoding='utf-8')


def configure_celery(celery_app):
    conf = read_config()
    if conf.has_section('celery'):
        section = conf.get_map('celery')
    else:  # pragma: no cover
        # happens while building docs locally and on rtfd.org
        return

    # testing settings
    always_eager = bool(os.environ.get('CELERY_ALWAYS_EAGER', False))
    redis_uri = os.environ.get('REDIS_URI', 'redis://localhost:6379/1')
Example #34
0
class NumpyKombuJSONEncoder(_json.JSONEncoder):
    """ Special json encoder for numpy types """
    def default(self, obj):
        res = convert_simple_numpy_type(obj)
        if res is not None:
            return res

        return super(NumpyKombuJSONEncoder, self).default(obj)

# Encoder function
def my_dumps(obj):
    return _json.dumps(obj, cls=NumpyKombuJSONEncoder)

# Register your new serializer methods into kombu
from kombu.serialization import register

register('myjson', my_dumps, _json.loads,
    content_type='application/json',
    content_encoding='utf-8')

# Tell celery to use your new serializer:
#celeryApp.conf.accept_content = ['myjson']
celeryApp.conf.task_serializer = 'myjson'
celeryApp.conf.result_serializer = 'myjson'

# try monkey patch startup timeout since we take longer than 4.0 seconds to startup
from celery.concurrency import asynpool
asynpool.PROC_ALIVE_TIMEOUT = 60.0

Example #35
0
from . import commonconfig, commandconfig, monitorconfig
from kombu.serialization import register
import json


def oid_safe_dumps(obj):
    return json.dumps(obj, default=str)


def oid_safe_loads(obj):
    return json.loads(obj)


register('oid_safe_json',
         oid_safe_dumps,
         oid_safe_loads,
         content_type='application/x-oid_safe_json',
         content_encoding='utf-8')

_includes = [
    'cumulus.ansible.tasks.cluster', 'cumulus.ansible.tasks.volume',
    'cumulus.tasks.cluster', 'cumulus.tasks.job', 'cumulus.ssh.tasks.key',
    'cumulus.aws.ec2.tasks.key'
]

taskflow_modules = find_taskflow_modules()
_includes += taskflow_modules

# Route short tasks to their own queue
_routes = {
    'cumulus.tasks.job.monitor_job': {
Example #36
0
 def register(cls):
     from kombu.serialization import register
     register(cls.name, cls.model_encode, cls.model_decode, 'application/json', 'utf-8')
Example #37
0
import json

from flask import Flask
from flask.ext.login import LoginManager
from kombu.serialization import register

from timetracker.utils import mongo_encoder


register('mongo_json', mongo_encoder, json.loads, 'application/json')


app = Flask(__name__)

app.config['SECRET_KEY'] = '123456790'
app.config['MONGODB_SETTINGS'] = {'DB': 'timetracker'}
app.config['CELERY_BROKER_URL'] = 'amqp://'
app.config['UPLOAD_PATH'] = '/tmp/'

login_manager = LoginManager()
login_manager.init_app(app)

import timetracker.models
import timetracker.api
import timetracker.admin
Example #38
0
import os
from os import walk
from os.path import join, dirname, abspath
import sys
from dive.base.serialization import pjson_dumps, pjson_loads
from kombu.serialization import register, registry
env = os.environ.get
base_dir_path = lambda x: abspath(join(dirname(__file__), x))

# Register custom PJSON to celery
register('pjson',
         pjson_dumps,
         pjson_loads,
         content_type='application/x-pjson',
         content_encoding='utf-8')

registry.enable('application/x-pjson')


class BaseConfig(object):
    # General
    SITE_URL = 'localhost::8081'
    SITE_TITLE = 'dive'
    SECRET_KEY = 'dive'
    PREFERRED_URL_SCHEME = 'http'
    SECURITY_PASSWORD_SALT = 'nacl'

    # Flask
    HOST = '0.0.0.0'
    DEBUG = True
    PORT = 8081
Example #39
0
from __future__ import absolute_import
from celery.schedules import crontab
from kombu.serialization import register
from datetime import timedelta
import os
from futuschedule import tasks

from futuschedule.myjson import my_dumps, my_loads

register('myjson', my_dumps, my_loads,
    content_type='application/x-myjson',
    content_encoding='utf-8')

BROKER_URL = os.getenv('BROKER_URL', "redis://127.0.0.1:6379/0")
CELERY_RESULT_BACKEND = os.getenv('RESULT_BACKEND', "redis://127.0.0.1/0")

CELERYD_PREFETCH_MULTIPLIER = 6
CELERY_ACCEPT_CONTENT = ['myjson']
CELERY_TASK_SERIALIZER = 'myjson'
CELERY_RESULT_SERIALIZER = CELERY_TASK_SERIALIZER
CELERY_ACCEPT_CONTENT = [CELERY_TASK_SERIALIZER, ]

CELERYBEAT_SCHEDULE = {
    'refresh-users': {
        'task': 'futuschedule.tasks.refresh_users',
        'schedule': crontab(minute=0),
    },
    'update-meeting-rooms': {
        'task': 'futuschedule.tasks.update_meeting_rooms',
        'schedule': crontab(minute=0),
    }
Example #40
0
from kombu.serialization import register
from .decoders import data_dumps, data_loads


register('myjson', data_dumps, data_loads,
         content_type='application/x-myjson',
         content_encoding='utf-8')

# Tell celery to use your new serializer:
accept_content = ['myjson']
task_serializer = 'myjson'
result_serializer = 'myjson'
        if isinstance(v, list):
            kwargs[k] = [try_cast(val) for val in v]

        if isinstance(v, dict):
            cast_item(v)

    return o


def dumps(o):
    with superdesk.app.app_context():
        return MongoJSONEncoder().encode(o)


register("eve/json", dumps, loads, content_type="application/json")


class AppContextTask(TaskBase):
    abstract = True
    serializer = "eve/json"

    def __call__(self, *args, **kwargs):
        with superdesk.app.app_context():
            try:
                return super().__call__(*args, **kwargs)
            except werkzeug.exceptions.InternalServerError as e:
                superdesk.app.sentry.captureException()
                logger.exception(e)

    def on_failure(self, exc, task_id, args, kwargs, einfo):
Example #42
0
"""Configure Celery."""
import os

from celery.utils.log import get_task_logger
from json_tricks.nonp import dumps
from json_tricks.nonp import loads
from kombu.serialization import register

logger = get_task_logger(__name__)

# Register json-tricks as json encoder
register(
    'json_tricks.nonp',
    lambda obj: dumps(obj, conv_str_byte=True),
    lambda obj: loads(obj, conv_str_byte=True),
    content_type='application/x-json-tricks',
    content_encoding='utf-8',
)

# Global configuration.
accept_content = ['application/x-json-tricks', 'application/json']
imports = ('api.celery.tasks', )
timezone = 'America/Chicago'

# Beat configuration.
beat_max_loop_interval = 5

# Broker configuration.
broker_url = os.environ.get('CELERY_BROKER_URL', 'rpc://')

# Result configuration.
Example #43
0
import os
from os import walk
from os.path import join, dirname, abspath
import sys
from dive.base.serialization import pjson_dumps, pjson_loads
from kombu.serialization import register, registry
env = os.environ.get
base_dir_path = lambda x: abspath(join(dirname(__file__), x))


# Register custom PJSON to celery
register('pjson', pjson_dumps, pjson_loads,
    content_type='application/x-pjson',
    content_encoding='utf-8')

registry.enable('application/x-pjson')


class BaseConfig(object):
    # General
    SITE_URL = 'localhost:3009'
    SITE_TITLE = 'dive'
    SECRET_KEY = 'dive'
    PREFERRED_URL_SCHEME = 'http'
    SECURITY_PASSWORD_SALT = 'nacl'

    # Flask
    HOST = '0.0.0.0'
    DEBUG = True
    PORT = 8081
    COMPRESS = True
"""Configure Celery."""
import os

from celery.schedules import crontab
from json_tricks.nonp import dumps
from json_tricks.nonp import loads
from kombu.serialization import register

# Register json-tricks as json encoder
register('json_tricks.nonp', dumps, loads, content_type='application/x-json-tricks', content_encoding='utf-8')

# Global configuration.
accept_content = ['application/json', 'application/x-json-tricks']
imports = ('{{ cookiecutter.project_name }}.celery.tasks', )
timezone = 'America/Chicago'

# Beat configuration.
beat_max_loop_interval = 5

# Broker configuration.
broker_url = os.environ.get('CELERY_BROKER_URL', 'rpc://')

# Result configuration.
result_backend = os.environ.get('CELERY_RESULT_BACKEND', 'redis://')
result_serializer = 'json_tricks.nonp'

# Task configuration.
task_serializer = 'json_tricks.nonp'

# Worker condiguration.
worker_concurrency = 1
import logging.config
import os
import pickle

from bugsnag.handlers import BugsnagHandler
from kombu.serialization import register
from pythonjsonlogger import jsonlogger
from version import __version__, __version_info__

register('ocd_serializer', pickle.dumps, pickle.loads,
         content_encoding='binary',
         content_type='application/x-pickle2')

APP_VERSION = __version__
MAJOR_VERSION = __version_info__[0]
MINOR_VERSION = __version_info__[1]

BUGSNAG_APIKEY = os.getenv('BUGSNAG_APIKEY')

RELEASE_STAGE = os.getenv('RELEASE_STAGE', 'development')

REDIS_HOST = os.getenv('REDIS_SERVER_HOST', "redis")
REDIS_PORT = os.getenv('REDIS_SERVER_PORT', "6379")
REDIS_URL = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT)

ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_PATH = os.path.dirname(ROOT_PATH)
LOCAL_DUMPS_DIR = os.path.join(PROJECT_PATH, 'local_dumps')
DUMPS_DIR = os.path.join(PROJECT_PATH, 'dumps')

# Use this timezone as default for timezone unaware dates
Example #46
0
CELERYD_MAX_TASKS_PER_CHILD = 1

# Default queue
CELERY_DEFAULT_QUEUE = 'seed-common'
CELERY_QUEUES = (
    Queue(
        CELERY_DEFAULT_QUEUE,
        Exchange(CELERY_DEFAULT_QUEUE),
        routing_key=CELERY_DEFAULT_QUEUE
    ),
)

# Register our custom JSON serializer so we can serialize datetime objects in celery.
register('seed_json', CeleryDatetimeSerializer.seed_dumps,
         CeleryDatetimeSerializer.seed_loads,
         content_type='application/json', content_encoding='utf-8')

CELERY_ACCEPT_CONTENT = ['seed_json']
CELERY_TASK_SERIALIZER = 'seed_json'
CELERY_RESULT_SERIALIZER = 'seed_json'
CELERY_TASK_RESULT_EXPIRES = 18000  # 5 hours
CELERY_MESSAGE_COMPRESSION = 'gzip'

BROKER_URL = 'amqp://*****:*****@localhost:5672//'

LOG_FILE = join(SITE_ROOT, '../logs/py.log/')

# Set translation languages for i18n
LANGUAGES = (
    ('en', 'English'),
Example #47
0
from flask.ext.babel import Babel
from flask.ext.pymongo import PyMongo
from flask_socketio import SocketIO

from kombu.serialization import register
from kombu import serialization

from .utils import memt_dumps, memt_loads

# Flask extensions
babel = Babel()
mongo = PyMongo()

socketio = SocketIO()

# Celery config
celery = Celery(__name__,
                broker=Config.CELERY_BROKER_URL,
                backend=Config.CELERY_RESULT_BACKEND)

register('memtjson', memt_dumps, memt_loads,
    content_type='application/x-memtjson',
    content_encoding='utf-8')
serialization.registry._decoders.pop("application/x-python-serialize")


if not os.environ.get('PRODUCTION'):
    from flask_debugtoolbar import DebugToolbarExtension
    toolbar = DebugToolbarExtension()

Example #48
0
import os

# Register custom serializer for Celery that allows for encoding and decoding
# Python datetime objects (and potentially other ones)
from kombu.serialization import register
from serializers import encoder, decoder

register('ocd_serializer', encoder, decoder, content_encoding='binary',
         content_type='application/ocd-msgpack')

CELERY_CONFIG = {
    'BROKER_URL': 'redis://127.0.0.1:6379/0',
    'CELERY_ACCEPT_CONTENT': ['ocd_serializer'],
    'CELERY_TASK_SERIALIZER': 'ocd_serializer',
    'CELERY_RESULT_SERIALIZER': 'ocd_serializer',
    'CELERY_RESULT_BACKEND': 'ocd_backend.result_backends:OCDRedisBackend+redis://127.0.0.1:6379/0',
    'CELERY_IGNORE_RESULT': True,
    'CELERY_DISABLE_RATE_LIMITS': True,
    # Expire results after 30 minutes; otherwise Redis will keep
    # claiming memory for a day
    'CELERY_TASK_RESULT_EXPIRES': 1800
}

LOGGING = {
    'version': 1,
    'formatters': {
        'console': {
            'format': '[%(asctime)s] [%(name)s] [%(levelname)s] - %(message)s',
            'datefmt': '%Y-%m-%d %H:%M:%S'
        }
    },
Example #49
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<*****@*****.**>
#         http://binux.me
# Created on 2015-05-22 20:54:01

import time
import umsgpack
from kombu import Connection, enable_insecure_serializers
from kombu.serialization import register
from kombu.exceptions import ChannelError
from pyspider.libs.queue import Queue as BaseQueue


register('umsgpack', umsgpack.packb, umsgpack.unpackb, 'application/x-msgpack')
enable_insecure_serializers(['umsgpack'])


class KombuQueue(object):
    """
    kombu is a high-level interface for multiple message queue backends.

    KombuQueue is built on top of kombu API.
    """

    Empty = BaseQueue.Empty
    Full = BaseQueue.Full
    max_timeout = 0.3

    def __init__(self, name, url="amqp://", maxsize=0, lazy_limit=True):
Example #50
0
                typ = obj.pop('__type__')
                mod = import_module(obj.pop('__module__'))
                klass = getattr(mod, typ)
                return klass(**obj)
            except KeyError:
                pass
        return obj

    return simplejson.loads(string, object_hook=decode)


# Register a custom serializer. We do this so we can conveniently
# transfer objects without resorting to pickling.
serialization.register('custom_json',
                       custom_dumps,
                       custom_loads,
                       content_type='application/x-bioconda-json',
                       content_encoding='utf8')

# Instantiate Celery app, setting our AsyncTask as default
# task class and loading the tasks from tasks.py
capp = Celery(  # pylint: disable=invalid-name
    task_cls=AsyncTask,
    include=['bioconda_utils.bot.tasks'])

# Celery must be configured at module level to catch worker as well
# Settings are suggestions from CloudAMPQ
capp.conf.update(
    # Set the URL to the AMQP broker using environment variable
    broker_url=os.environ.get('CLOUDAMQP_URL'),
Example #51
0
# KPIit is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Default configuration for KPIit."""

import os

from celery.schedules import crontab
from kombu.serialization import register

from kpiit.config import config
from kpiit.json import metric_dumps, metric_loads

# Register new JSON serializer
register(config['celery']['serializer'],
         metric_dumps,
         metric_loads,
         content_type='application/x-metricjson',
         content_encoding='utf-8')

# Default schedule crontabs
SCHEDULE_DOI_MONTHLY = crontab(**config['celery']['schedules']['doi'])
SCHEDULE_REPO_DAILY = crontab(**config['celery']['schedules']['repo'])

#: URL of message broker for Celery (default is Redis).
broker_url = config['celery']['broker_url']
#: URL of backend for result storage (default is Redis).
result_backend = config['celery']['result_backend']
#: List of modules to import when the Celery worker starts.
imports = ['kpiit.tasks']

#: Scheduled tasks configuration (aka cronjobs).
Example #52
0
def register_custom_serializer() -> None:
    from kombu.serialization import register
    register('customjson', custom_json_dumps, json.loads,
             content_type='application/x-customjson',
             content_encoding='utf-8')
Example #53
0
__author__ = "Nils Tobias Schmidt"
__email__ = "schmidt89 at informatik.uni-marburg.de"

import pickle

from celery import Celery
from kombu.serialization import register

from androlyze.Constants import PROJECT_NAME
from androlyze.celery.celerysettings import settings
from androlyze.settings import *
from androlyze.util import Util
from androlyze.analyze.distributed.tasks.AnalyzeTask import AnalyzeTask
from celery.registry import tasks

# worker has to import androguard too
Util.set_androguard_path(settings)

# set pickle to specific protocol
register('pickle', lambda s: pickle.dumps(s, 2), lambda s: pickle.loads(s),
        content_type='application/x-python-serialize',
        content_encoding='binary')

app = Celery(PROJECT_NAME)

# load config
app.config_from_object(CELERY_CONF)

if __name__ == '__main__':
    app.start()
Example #54
0
    worker_sent_task_events = True
    # Prefetching is only necessary with high-latency brokers, and apparently can cause problems
    worker_prefetch_multiplier = 1


app = Celery(task_cls=CredentialedGirderTask, config_source=CeleryAppConfig)


@app.on_after_configure.connect
def setupPeriodicTasks(sender, **kwargs):
    from isic_archive.tasks import maybeSendIngestionNotifications
    sender.add_periodic_task(
        30,
        maybeSendIngestionNotifications.s(),
        name='Send any necessary notifications for ingested batches.')


@worker_process_init.connect
def addMailTemplates(sender, **kwargs):
    """Perform the necessary steps from IsicArchive.load."""
    mail_utils.addTemplateDirectory(pkg_resources.resource_filename(
        'isic_archive', 'mail_templates'),
                                    prepend=True)


register('jsonpickle',
         jsonpickle.encode,
         jsonpickle.decode,
         content_type='application/json',
         content_encoding='utf-8')
Example #55
0
    Queue('celery_blue', routing_key='celery_blue'),
    Queue('celery_cell', routing_key='celery_cell'),
    Queue('celery_content', routing_key='celery_content'),
    Queue('celery_default', routing_key='celery_default'),
    Queue('celery_export', routing_key='celery_export'),
    Queue('celery_incoming', routing_key='celery_incoming'),
    Queue('celery_monitor', routing_key='celery_monitor'),
    Queue('celery_ocid', routing_key='celery_ocid'),
    Queue('celery_reports', routing_key='celery_reports'),
    Queue('celery_upload', routing_key='celery_upload'),  # BBB
    Queue('celery_wifi', routing_key='celery_wifi'),
)  #: List of :class:`kombu.Queue` instances.

register('internal_json',
         simplejson.dumps,
         simplejson.loads,
         content_type='application/x-internaljson',
         content_encoding='utf-8')


def configure_celery(celery_app):
    """
    Configure the celery app stored in :data:`ichnaea.async.app.celery_app`.
    This is executed both inside the master worker process and once in
    each forked worker process.

    This parses the application ini and reads in the
    :mod:`ichnaea.async.settings`.
    """

    # This happens at module import time and depends on a properly
Example #56
0
        if isinstance(v, list):
            kwargs[k] = [try_cast(val) for val in v]

        if isinstance(v, dict):
            cast_item(v)

    return o


def dumps(o):
    with superdesk.app.app_context():
        return MongoJSONEncoder().encode(o)


register('eve/json', dumps, loads, content_type='application/json')


def handle_exception(exc):
    """Log exception to logger and sentry."""
    logger.exception(exc)
    superdesk.app.sentry.captureException()


class AppContextTask(TaskBase):
    abstract = True
    serializer = 'eve/json'
    app_errors = (
        SuperdeskError,
        werkzeug.exceptions.InternalServerError,  # mongo layer err
    )
Example #57
0
 def test_register(self):
     register(None, None, None, None)
Example #58
0
        return data


# Encoder function
def _dumps(obj):
    return json.dumps(obj, cls=NautobotKombuJSONEncoder)


# Decoder function
def _loads(obj):
    return json.loads(obj, object_hook=nautobot_kombu_json_loads_hook)


# Register the custom serialization type
register("nautobot_json",
         _dumps,
         _loads,
         content_type="application/x-nautobot-json",
         content_encoding="utf-8")

#
# nautobot_task
#
# By exposing `shared_task` within our own namespace, we leave the door open to
# extending and expanding the usage and meaning of shared_task without having
# to undergo further refactoring of task's decorators. We could also transparently
# swap out shared_task to a custom base task.
#

nautobot_task = shared_task
Example #59
0
        if isinstance(v, list):
            kwargs[k] = [try_cast(val) for val in v]

        if isinstance(v, dict):
            cast_item(v)

    return o


def dumps(o):
    with superdesk.app.app_context():
        return MongoJSONEncoder().encode(o)


register('eve/json', dumps, loads, content_type='application/json')


def handle_exception(exc):
    """Log exception to logger and sentry."""
    logger.exception(exc)
    superdesk.app.sentry.captureException()


class AppContextTask(TaskBase):
    abstract = True
    serializer = 'eve/json'
    app_errors = (
        SuperdeskError,
        werkzeug.exceptions.InternalServerError,  # mongo layer err
    )
Example #60
0
    def filter(self, rec):
        if not hij_log_filter:
            return True
        for filter_entry in hij_log_filter:
            for filter_entry_key in filter_entry:
                if rec.__dict__[filter_entry_key] == filter_entry[filter_entry_key]:
                    return True
        return False


mail_log.addFilter(HijackLogFilter())
hij_log.addFilter(HijackLogFilter())

# additional serializer for pg-amqp messages
serialization.register(
    "txtjson", json.dumps, json.loads, content_type="text", content_encoding="utf-8"
)


class Detection:
    """
    Detection Service.
    """

    def __init__(self):
        self.worker = None
        signal.signal(signal.SIGTERM, self.exit)
        signal.signal(signal.SIGINT, self.exit)
        signal.signal(signal.SIGCHLD, signal.SIG_IGN)

    def run(self) -> NoReturn: