def test_getattr__setattr(self): x = AttributeDict({"foo": "bar"}) self.assertEqual(x["foo"], "bar") with self.assertRaises(AttributeError): x.bar x.bar = "foo" self.assertEqual(x["bar"], "foo")
def test_getattr__setattr(self): x = AttributeDict({'foo': 'bar'}) self.assertEqual(x['foo'], 'bar') with self.assertRaises(AttributeError): x.bar x.bar = 'foo' self.assertEqual(x['bar'], 'foo')
def setup_settings(self, settingsdict): settings = AttributeDict(DEFAULT_SETTINGS, **settingsdict) settings.CELERY_TASK_ERROR_WHITELIST = tuple( getattr(import_module(mod), cls) for fqn in settings.CELERY_TASK_ERROR_WHITELIST for mod, cls in (fqn.rsplit('.', 1), )) return settings
def setup_settings(self, settingsdict): settings = AttributeDict(DEFAULT_SETTINGS, **settingsdict) installed_apps = set(list(DEFAULT_SETTINGS["INSTALLED_APPS"]) + \ list(settings.INSTALLED_APPS)) settings.INSTALLED_APPS = tuple(installed_apps) settings.CELERY_TASK_ERROR_WHITELIST = tuple( getattr(import_module(mod), cls) for fqn in settings.CELERY_TASK_ERROR_WHITELIST for mod, cls in (fqn.rsplit('.', 1), )) return settings
def test_process_initializer(self, set_mp_process_title, _signals): from celery import Celery from celery import signals from celery._state import _tls from celery.concurrency.processes import process_initializer from celery.concurrency.processes import (WORKER_SIGRESET, WORKER_SIGIGNORE) def on_worker_process_init(**kwargs): on_worker_process_init.called = True on_worker_process_init.called = False signals.worker_process_init.connect(on_worker_process_init) loader = Mock() loader.override_backends = {} app = Celery(loader=loader, set_as_current=False) app.loader = loader app.conf = AttributeDict(DEFAULTS) process_initializer(app, 'awesome.worker.com') _signals.ignore.assert_any_call(*WORKER_SIGIGNORE) _signals.reset.assert_any_call(*WORKER_SIGRESET) self.assertTrue(app.loader.init_worker.call_count) self.assertTrue(on_worker_process_init.called) self.assertIs(_tls.current_app, app) set_mp_process_title.assert_called_with( 'celeryd', hostname='awesome.worker.com', ) with patch('celery.app.trace.setup_worker_optimizations') as swo: os.environ['FORKED_BY_MULTIPROCESSING'] = "1" try: process_initializer(app, 'luke.worker.com') swo.assert_called_with(app) finally: os.environ.pop('FORKED_BY_MULTIPROCESSING', None)
def test_process_initializer(self, set_mp_process_title, _signals): from celery import Celery from celery import signals from celery.app import _tls from celery.concurrency.processes import process_initializer from celery.concurrency.processes import (WORKER_SIGRESET, WORKER_SIGIGNORE) def on_worker_process_init(**kwargs): on_worker_process_init.called = True on_worker_process_init.called = False signals.worker_process_init.connect(on_worker_process_init) loader = Mock() app = Celery(loader=loader, set_as_current=False) app.conf = AttributeDict(DEFAULTS) process_initializer(app, "awesome.worker.com") self.assertIn((tuple(WORKER_SIGIGNORE), {}), _signals.ignore.call_args_list) self.assertIn((tuple(WORKER_SIGRESET), {}), _signals.reset.call_args_list) self.assertTrue(app.loader.init_worker.call_count) self.assertTrue(on_worker_process_init.called) self.assertIs(_tls.current_app, app) set_mp_process_title.assert_called_with("celeryd", hostname="awesome.worker.com")
def test_process_initializer(self, set_mp_process_title, _signals): from celery import Celery from celery import signals from celery._state import _tls from celery.concurrency.processes import process_initializer from celery.concurrency.processes import (WORKER_SIGRESET, WORKER_SIGIGNORE) def on_worker_process_init(**kwargs): on_worker_process_init.called = True on_worker_process_init.called = False signals.worker_process_init.connect(on_worker_process_init) loader = Mock() loader.override_backends = {} app = Celery(loader=loader, set_as_current=False) app.loader = loader app.conf = AttributeDict(DEFAULTS) process_initializer(app, 'awesome.worker.com') _signals.ignore.assert_any_call(*WORKER_SIGIGNORE) _signals.reset.assert_any_call(*WORKER_SIGRESET) self.assertTrue(app.loader.init_worker.call_count) self.assertTrue(on_worker_process_init.called) self.assertIs(_tls.current_app, app) set_mp_process_title.assert_called_with( 'celeryd', hostname='awesome.worker.com', )
def __init__(self, c): self.c = c self.hostname = c.hostname self.node = c.app.control.mailbox.Node( c.hostname, handlers=control.Panel.data, state=AttributeDict(app=c.app, hostname=c.hostname, consumer=c), )
def test_conf_raises_KeyError(self): self.app.conf = AttributeDict({ 'result_serializer': 'json', 'result_cache_max': 1, 'result_expires': None, 'accept_content': ['json'], }) self.Backend(app=self.app)
def test_conf_raises_KeyError(self): self.app.conf = AttributeDict({ 'CELERY_RESULT_SERIALIZER': 'json', 'CELERY_MAX_CACHED_RESULTS': 1, 'CELERY_ACCEPT_CONTENT': ['json'], 'CELERY_TASK_RESULT_EXPIRES': None, }) self.Backend(app=self.app, new_join=True)
def test_conf_raises_KeyError(self): conf = AttributeDict({'CELERY_RESULT_SERIALIZER': 'json', 'CELERY_MAX_CACHED_RESULTS': 1, 'CELERY_TASK_RESULT_EXPIRES': None}) prev, current_app.conf = current_app.conf, conf try: self.MockBackend() finally: current_app.conf = prev
def __init__(self, c): self.c = c self.hostname = c.hostname self.node = c.app.control.mailbox.Node( safe_str(c.hostname), handlers=control.Panel.data, state=AttributeDict(app=c.app, hostname=c.hostname, consumer=c), ) self._forward_clock = self.c.app.clock.forward
def __init__(self, ready_queue, init_callback=noop, send_events=False, hostname=None, initial_prefetch_count=2, pool=None, app=None, timer=None, controller=None, hub=None, amqheartbeat=None, **kwargs): self.app = app_or_default(app) self.connection = None self.task_consumer = None self.controller = controller self.broadcast_consumer = None self.ready_queue = ready_queue self.send_events = send_events self.init_callback = init_callback self.hostname = hostname or socket.gethostname() self.initial_prefetch_count = initial_prefetch_count self.event_dispatcher = None self.heart = None self.pool = pool self.timer = timer or timer2.default_timer pidbox_state = AttributeDict( app=self.app, hostname=self.hostname, listener=self, # pre 2.2 consumer=self) self.pidbox_node = self.app.control.mailbox.Node(self.hostname, state=pidbox_state, handlers=Panel.data) conninfo = self.app.connection() self.connection_errors = conninfo.connection_errors self.channel_errors = conninfo.channel_errors self._does_info = logger.isEnabledFor(logging.INFO) self.strategies = {} if hub: hub.on_init.append(self.on_poll_init) self.hub = hub self._quick_put = self.ready_queue.put self.amqheartbeat = amqheartbeat if self.amqheartbeat is None: self.amqheartbeat = self.app.conf.BROKER_HEARTBEAT if not hub: self.amqheartbeat = 0 if _detect_environment() == 'gevent': # there's a gevent bug that causes timeouts to not be reset, # so if the connection timeout is exceeded once, it can NEVER # connect again. self.app.conf.BROKER_CONNECTION_TIMEOUT = None
def test_conf_raises_KeyError(self): conf = AttributeDict({ 'CELERY_RESULT_SERIALIZER': 'json', 'CELERY_MAX_CACHED_RESULTS': 1, 'CELERY_ACCEPT_CONTENT': ['json'], 'CELERY_TASK_RESULT_EXPIRES': None }) prev, self.app.conf = self.app.conf, conf try: self.MockBackend(app=self.app) finally: self.app.conf = prev
def __init__(self, body, on_ack=noop, hostname=None, eventer=None, app=None, connection_errors=None, request_dict=None, delivery_info=None, task=None, Context=Context, **opts): self.app = app self.name = body['task'] self.id = body['id'] self.args = body['args'] try: self.kwargs = body['kwargs'] if NEEDS_KWDICT: self.kwargs = kwdict(self.kwargs) except KeyError: self.kwargs = {} try: self.flags = body['flags'] except KeyError: pass self.on_ack = on_ack self.hostname = hostname self.eventer = eventer self.connection_errors = connection_errors or () self.task = task or self.app._tasks[self.name] if 'eta' in body: eta = body['eta'] tz = tz_utc if utc else self.tzlocal self.eta = tz_to_local(maybe_iso8601(eta), self.tzlocal, tz) if 'expires' in body: expires = body['expires'] tz = tz_utc if utc else self.tzlocal self.expires = tz_to_local(maybe_iso8601(expires), self.tzlocal, tz) if delivery_info: self.delivery_info = { 'exchange': delivery_info.get('exchange'), 'routing_key': delivery_info.get('routing_key'), } self.request_dict = AttributeDict( { 'called_directly': False, 'callbacks': [], 'errbacks': [], 'chord': None }, **body)
def __init__(self, body, on_ack=noop, hostname=None, eventer=None, app=None, connection_errors=None, request_dict=None, delivery_info=None, task=None, Context=Context, **opts): self.app = app self.name = body["task"] self.id = body["id"] self.args = body["args"] try: self.kwargs = body["kwargs"] if NEEDS_KWDICT: self.kwargs = kwdict(self.kwargs) except KeyError: self.kwargs = {} try: self.flags = body["flags"] except KeyError: pass self.on_ack = on_ack self.hostname = hostname self.eventer = eventer self.connection_errors = connection_errors or () self.task = task or self.app._tasks[self.name] if "eta" in body: eta = body["eta"] tz = tz_utc if utc else self.tzlocal self.eta = tz_to_local(maybe_iso8601(eta), self.tzlocal, tz) if "expires" in body: expires = body["expires"] tz = tz_utc if utc else self.tzlocal self.expires = tz_to_local(maybe_iso8601(expires), self.tzlocal, tz) if delivery_info: self.delivery_info = { "exchange": delivery_info.get("exchange"), "routing_key": delivery_info.get("routing_key"), } self.request_dict = AttributeDict( { "called_directly": False, "callbacks": [], "errbacks": [], "chord": None }, **body)
def test_process_initializer(self, set_mp_process_title, _signals): with restore_logging(): from celery import signals from celery._state import _tls from celery.concurrency.prefork import ( process_initializer, WORKER_SIGRESET, WORKER_SIGIGNORE, ) def on_worker_process_init(**kwargs): on_worker_process_init.called = True on_worker_process_init.called = False signals.worker_process_init.connect(on_worker_process_init) def Loader(*args, **kwargs): loader = Mock(*args, **kwargs) loader.conf = {} loader.override_backends = {} return loader with self.Celery(loader=Loader) as app: app.conf = AttributeDict(DEFAULTS) process_initializer(app, 'awesome.worker.com') _signals.ignore.assert_any_call(*WORKER_SIGIGNORE) _signals.reset.assert_any_call(*WORKER_SIGRESET) self.assertTrue(app.loader.init_worker.call_count) self.assertTrue(on_worker_process_init.called) self.assertIs(_tls.current_app, app) set_mp_process_title.assert_called_with( 'celeryd', hostname='awesome.worker.com', ) with patch('celery.app.trace.setup_worker_optimizations') as S: os.environ['FORKED_BY_MULTIPROCESSING'] = "1" try: process_initializer(app, 'luke.worker.com') S.assert_called_with(app, 'luke.worker.com') finally: os.environ.pop('FORKED_BY_MULTIPROCESSING', None) os.environ['CELERY_LOG_FILE'] = 'worker%I.log' app.log.setup = Mock(name='log_setup') try: process_initializer(app, 'luke.worker.com') finally: os.environ.pop('CELERY_LOG_FILE', None)
def create_state(self, **kwargs): kwargs.setdefault('app', self.app) kwargs.setdefault('hostname', hostname) return AttributeDict(kwargs)
id = 141 format = _(u'Reply by reviewer on {addon} {version}.') short = _(u'Reviewer Reply') keep = True review_queue = True LOGS = [ x for x in vars().values() if isclass(x) and issubclass(x, _LOG) and x != _LOG ] # Make sure there's no duplicate IDs. assert len(LOGS) == len(set(log.id for log in LOGS)) LOG_BY_ID = dict((l.id, l) for l in LOGS) LOG = AttributeDict((l.__name__, l) for l in LOGS) LOG_ADMINS = [l.id for l in LOGS if hasattr(l, 'admin_event')] LOG_KEEP = [l.id for l in LOGS if hasattr(l, 'keep')] LOG_EDITORS = [l.id for l in LOGS if hasattr(l, 'editor_event')] LOG_REVIEW_QUEUE = [l.id for l in LOGS if hasattr(l, 'review_queue')] LOG_EDITOR_REVIEW_ACTION = [ l.id for l in LOGS if hasattr(l, 'editor_review_action') ] # Is the user emailed the message? LOG_REVIEW_EMAIL_USER = [l.id for l in LOGS if hasattr(l, 'review_email_user')] # Logs *not* to show to the developer. LOG_HIDE_DEVELOPER = [ l.id for l in LOGS if (getattr(l, 'hide_developer', False) or l.id in LOG_ADMINS) ]
def test_defaults(self): cmd1 = Command(defaults=None) self.assertTrue(cmd1.defaults) cmd2 = Command(defaults=AttributeDict({"foo": "bar"})) self.assertTrue(cmd2.defaults)
def setup_settings(self, settingsdict): return AttributeDict(settingsdict)
def create_state(self, **kwargs): kwargs.setdefault("logger", self.app.log.get_default_logger()) kwargs.setdefault("app", self.app) return AttributeDict(kwargs)
NOTIFICATION_GROUPS = {'dev': _('Developer'), 'user': _('User Notifications')} APP_NOTIFICATIONS = [ app_reply, app_new_review, app_reviewed, app_individual_contact, app_surveys, app_regions ] APP_NOTIFICATIONS_BY_ID = dict((l.id, l) for l in APP_NOTIFICATIONS) APP_NOTIFICATIONS_DEFAULT = [l.id for l in APP_NOTIFICATIONS] APP_NOTIFICATIONS_CHOICES = [(l.id, l.label) for l in APP_NOTIFICATIONS] APP_NOTIFICATIONS_CHOICES_NOT_DEV = [(l.id, l.label) for l in APP_NOTIFICATIONS if l.group != 'dev'] NOTIFICATIONS = [ x for x in vars().values() if isclass(x) and issubclass(x, _NOTIFICATION) and x != _NOTIFICATION and not getattr(x, 'app', False) ] NOTIFICATIONS_BY_ID = dict((l.id, l) for l in NOTIFICATIONS) ALL_NOTIFICATIONS_BY_ID = dict( (l.id, l) for l in NOTIFICATIONS + APP_NOTIFICATIONS) NOTIFICATIONS_BY_SHORT = dict( (l.short, l) for l in NOTIFICATIONS + APP_NOTIFICATIONS) NOTIFICATION = AttributeDict((l.__name__, l) for l in NOTIFICATIONS) NOTIFICATIONS_DEFAULT = [l.id for l in NOTIFICATIONS if l.default_checked] NOTIFICATIONS_CHOICES = [(l.id, l.label) for l in NOTIFICATIONS] NOTIFICATIONS_CHOICES_NOT_DEV = [(l.id, l.label) for l in NOTIFICATIONS if l.group != 'dev']
task = {'task': T.name, 'args': (), 'kwargs': {}, 'id': tid, 'flags': 0} app = current_app._get_current_object() def on_task(req): req.execute_using_pool(P) def on_ack(*a): pass m = Message(None, {}, {}, task) x = Consumer(on_task, hostname=hostname, app=app) x.update_strategies() name = T.name ts = time() from celery.datastructures import AttributeDict from celery.app.trace import trace_task_ret request = AttributeDict( { 'called_directly': False, 'callbacks': [], 'errbacks': [], 'chord': None }, **task) for i in range(100000): trace_task_ret(T, tid, (), {}, request) print(time() - ts)
def create_state(self, **kwargs): kwargs.setdefault("app", self.app) return AttributeDict(kwargs)
ready_queue = Queue() def on_put(req): req.execute_using_pool(P) def on_ack(*a): pass m = Message(None, {}, {}, task) ready_queue.put = on_put x = Consumer(ready_queue, hostname=hostname, app=app) x.update_strategies() name = T.name ts = time() from celery.datastructures import AttributeDict from celery.task.trace import trace_task_ret request = AttributeDict( { "called_directly": False, "callbacks": [], "errbacks": [], "chord": None }, **task) for i in xrange(100000): trace_task_ret(T, tid, (), {}, request) print(time() - ts)