示例#1
0
 def test_enable(self):
     registry._disabled_content_types.add('application/json')
     registry.enable('json')
     assert 'application/json' not in registry._disabled_content_types
     registry._disabled_content_types.add('application/json')
     registry.enable('application/json')
     assert 'application/json' not in registry._disabled_content_types
示例#2
0
 def test_enable(self):
     registry._disabled_content_types.add('application/json')
     registry.enable('json')
     self.assertNotIn('application/json', registry._disabled_content_types)
     registry._disabled_content_types.add('application/json')
     registry.enable('application/json')
     self.assertNotIn('application/json', registry._disabled_content_types)
示例#3
0
 def test_enable(self):
     registry._disabled_content_types.add('application/json')
     registry.enable('json')
     self.assertNotIn('application/json', registry._disabled_content_types)
     registry._disabled_content_types.add('application/json')
     registry.enable('application/json')
     self.assertNotIn('application/json', registry._disabled_content_types)
示例#4
0
 def test_enable(self):
     registry._disabled_content_types.add('application/json')
     registry.enable('json')
     assert 'application/json' not in registry._disabled_content_types
     registry._disabled_content_types.add('application/json')
     registry.enable('application/json')
     assert 'application/json' not in registry._disabled_content_types
示例#5
0
 def _eager_serialize_result(self, result, **kwargs_):
     app = self._app
     producer = kwargs_.get('producer') if kwargs_ else None
     result_value = result._result
     with app.producer_or_acquire(producer) as producer:
         serializer = kwargs_.get('serializer', producer.serializer) or self._task_serializer
         registry.enable(serializer)
         dtype, encoding, data = registry.dumps(result_value, serializer)
         result._result = registry.loads(data, dtype, encoding)
     return result
示例#6
0
 def _eager_serialize_args(self, args=None, kwargs=None, **kwargs_):
     # Perform a noop serialization backtrip to assert args and kwargs
     # will be serialized appropriately when an async call through kombu
     # is actually performed. This is done to make sure we catch the
     # serializations errors with our test suite which runs with the
     # CELERY_ALWAYS_EAGER setting set to True. See the following Celery
     # issue for details https://github.com/celery/celery/issues/4008.
     app = self._app
     producer = kwargs_.get('producer') if kwargs else None
     with app.producer_or_acquire(producer) as producer:
         serializer = kwargs_.get('serializer', producer.serializer) or self._task_serializer
         registry.enable(serializer)
         args_content_type, args_content_encoding, args_data = registry.dumps(args, serializer)
         kwargs_content_type, kwargs_content_encoding, kwargs_data = registry.dumps(kwargs, serializer)
         args = registry.loads(args_data, args_content_type, args_content_encoding)
         kwargs = registry.loads(kwargs_data, kwargs_content_type, kwargs_content_encoding)
     return args, kwargs
示例#7
0
文件: __init__.py 项目: dalma6/tutti
from kombu.serialization import registry
registry.enable('application/json')

from kombu.serialization import registry

CELERY_ACCEPT_CONTENT = ['json', 'application/text']
CELERYD_POOL_RESTARTS = True  # Required for /worker/pool/restart API
registry.enable('json')
registry.enable('application/text')
示例#9
0
import os
from os import walk
from os.path import join, dirname, abspath
import sys
from dive.base.serialization import pjson_dumps, pjson_loads
from kombu.serialization import register, registry
env = os.environ.get
base_dir_path = lambda x: abspath(join(dirname(__file__), x))


# Register custom PJSON to celery
register('pjson', pjson_dumps, pjson_loads,
    content_type='application/x-pjson',
    content_encoding='utf-8')

registry.enable('application/x-pjson')


class BaseConfig(object):
    # General
    SITE_URL = 'localhost:3009'
    SITE_TITLE = 'dive'
    SECRET_KEY = 'dive'
    PREFERRED_URL_SCHEME = 'http'
    SECURITY_PASSWORD_SALT = 'nacl'

    # Flask
    HOST = '0.0.0.0'
    DEBUG = True
    PORT = 8081
    COMPRESS = True
from kombu import Exchange, Queue
from kombu.serialization import registry

task_default_queue = 'default'
task_queues = (
    Queue('userpaper', routing_key='expertise.service.celery_tasks.run_userpaper'),
    Queue('expertise', routing_key='expertise.service.celery_tasks.run_expertise')
)
# CELERY_IMPORTS = ('tasks')
task_ignore_result = False
# broker_url = 'redis://localhost:6379/0'
broker_url = 'redis://localhost:6379/10'
result_backend = 'redis://localhost:6379/10'
# CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
task_serializer = 'pickle'
result_serializer = 'pickle'
accept_content = ['pickle', 'application/x-python-serialize']
task_create_missing_queues = True
# result_backend = 'redis://localhost:6379/0'

registry.enable('pickle')
registry.enable('application/x-python-serialize')
示例#11
0
import os
print(os.getcwd())
# Celery is broken on windows
import sys
sys.setrecursionlimit(10000)
from celery import Celery
from kombu.serialization import registry

registry.enable('pickle')
celery = Celery('tasks')
celery.config_from_object('redditrepostsleuth.core.celery.celeryconfig')

if __name__ == '__main__':
    celery.start()
示例#12
0
def disable_untrusted_serializers(whitelist=None):
    for name in set(registry._decoders) - set(whitelist or []):
        registry.disable(name)
    for name in whitelist or []:
        registry.enable(name)
示例#13
0
from __future__ import absolute_import
from celery import Celery
from celery import bootsteps
from kombu import Consumer, Exchange, Queue, Connection
from kombu.serialization import registry
import json

# accept plain text content type
CELERY_ACCEPT_CONTENT = ['text/plain']
registry.enable('text/plain')

my_queue = Queue('scdf', Exchange('scdf'), 'routing_key')

BROKER_URL = 'amqp://*****:*****@rabbitmq/scdf-vhost'
result_backend = 'db+postgres://postgres:password@postgres'

app = Celery(broker=BROKER_URL, backend=result_backend, include=['task'])

# If True the task will report its status as “started” when the task is executed by a worker.
# the following allows status of task to be tracked when it has started, not just succeeded
# diabled as it may cause bugs in postgres due to excessive inserting and updating of result table
app.conf.task_track_started = False

# building a custom message consumer to convert postgres-rabbitmq messages to a format that celery understands


class MyConsumerStep(bootsteps.ConsumerStep):
    def get_consumers(self, channel):
        return [
            Consumer(channel,
                     queues=[my_queue],
示例#14
0
CELERY_BROKER_URL = 'amqp://localhost'
# CELERY_RESULT_BACKEND = 'amqp://localhost' # todo remove this url and add it to env vars
CELERY_RESULT_BACKEND = config('DJANGO_CELERY_RESULT_BACKEND')

# Celery Data Format
CELERY_ACCEPT_CONTENT = ['pickle', 'application/x-python-serialize', 'json']
CELERY_TASK_SERIALIZER = 'pickle'
CELERY_RESULT_SERIALIZER = 'pickle'
CELERY_TIMEZONE = TIME_ZONE
CELERY_TASK_CREATE_MISSING_QUEUES = True
CELERY_RESULT_EXPIRES = 3600
CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'

from kombu.serialization import registry

registry.enable('json')
registry.enable('application/text')
registry.enable('application/x-python-serialize')

# task_routes = {
#     'path.to.the.new_task': {
#         'queue': 'tg_queue',
#         'routing_key': 'tg_queue',
#     },
#     'path.to.the.slow_task': {
#         'queue': 'slow_queue',
#         'routing_key': 'slow_queue'
#     }
# }

# task_annotations = {
示例#15
0
def disable_untrusted_serializers(whitelist=None):
    for name in set(registry._decoders) - set(whitelist or []):
        registry.disable(name)
    for name in whitelist or []:
        registry.enable(name)
示例#16
0
# Celery is refusing to deserialize content of my custom serialization throwing ContentDisallowed Exception
from kombu.serialization import registry
registry.enable('pickle')