def test_task_registry(self): r = TaskRegistry() self.assertIsInstance(r, dict, "TaskRegistry is mapping") self.assertRegisterUnregisterCls(r, MockTask) self.assertRegisterUnregisterCls(r, MockPeriodicTask) r.register(MockPeriodicTask) r.unregister(MockPeriodicTask.name) self.assertNotIn(MockPeriodicTask, r) r.register(MockPeriodicTask) tasks = dict(r) self.assertIsInstance(tasks.get(MockTask.name), MockTask) self.assertIsInstance(tasks.get(MockPeriodicTask.name), MockPeriodicTask) self.assertIsInstance(r[MockTask.name], MockTask) self.assertIsInstance(r[MockPeriodicTask.name], MockPeriodicTask) r.unregister(MockTask) self.assertNotIn(MockTask.name, r) r.unregister(MockPeriodicTask) self.assertNotIn(MockPeriodicTask.name, r) self.assertTrue(MockTask().run()) self.assertTrue(MockPeriodicTask().run())
def tasks_no_install(self): tr = TaskRegistry() map(tr.register, self._internal_iter()) from scanworker.masterworker import MasterWorkerTask tr.register(MasterWorkerTask) from scanworker.result import ScanRunResultHandlerTask, ScanRunErrorHandlerTask, ScanRunFinalizerTask map(tr.register, [ScanRunErrorHandlerTask, ScanRunResultHandlerTask, ScanRunFinalizerTask, EngineUpdateTask]) return tr
class test_TaskBucket(Case): def setUp(self): self.registry = TaskRegistry() self.task_classes = (TaskA, TaskB, TaskC) for task_cls in self.task_classes: self.registry.register(task_cls) @skip_if_disabled def test_get_nowait(self): x = buckets.TaskBucket(task_registry=self.registry) with self.assertRaises(buckets.Empty): x.get_nowait() @patch("celery.worker.buckets.sleep") def test_get_block(self, sleep): x = buckets.TaskBucket(task_registry=self.registry) x.not_empty = Mock() get = x._get = Mock() remaining = [0] def effect(): if get.call_count == 1: raise Empty() rem = remaining[0] remaining[0] = 0 return rem, Mock() get.side_effect = effect with mock_context(Mock()) as context: x.not_empty = context x.wait = Mock() x.get(block=True) get.reset() remaining[0] = 1 x.get(block=True) def test_get_raises_rate(self): x = buckets.TaskBucket(task_registry=self.registry) x.buckets = {1: Mock()} x.buckets[1].get_nowait.side_effect = buckets.RateLimitExceeded() x.buckets[1].expected_time.return_value = 0 x._get() @skip_if_disabled def test_refresh(self): reg = {} x = buckets.TaskBucket(task_registry=reg) reg["foo"] = "something" x.refresh() self.assertIn("foo", x.buckets) self.assertTrue(x.get_bucket_for_type("foo")) @skip_if_disabled def test__get_queue_for_type(self): x = buckets.TaskBucket(task_registry={}) x.buckets["foo"] = buckets.TokenBucketQueue(fill_rate=1) self.assertIs(x._get_queue_for_type("foo"), x.buckets["foo"].queue) x.buckets["bar"] = buckets.FastQueue() self.assertIs(x._get_queue_for_type("bar"), x.buckets["bar"]) @skip_if_disabled def test_update_bucket_for_type(self): bucket = buckets.TaskBucket(task_registry=self.registry) b = bucket._get_queue_for_type(TaskC.name) self.assertIs(bucket.update_bucket_for_type(TaskC.name).queue, b) self.assertIs(bucket.buckets[TaskC.name].queue, b) @skip_if_disabled def test_auto_add_on_missing_put(self): reg = {} b = buckets.TaskBucket(task_registry=reg) reg["nonexisting.task"] = "foo" b.put(MockJob(uuid(), "nonexisting.task", (), {})) self.assertIn("nonexisting.task", b.buckets) @skip_if_disabled def test_auto_add_on_missing(self): b = buckets.TaskBucket(task_registry=self.registry) for task_cls in self.task_classes: self.assertIn(task_cls.name, b.buckets.keys()) self.registry.register(TaskD) self.assertTrue(b.get_bucket_for_type(TaskD.name)) self.assertIn(TaskD.name, b.buckets.keys()) self.registry.unregister(TaskD) @skip_if_disabled def test_has_rate_limits(self): b = buckets.TaskBucket(task_registry=self.registry) self.assertEqual(b.buckets[TaskA.name]._bucket.fill_rate, 10) self.assertIsInstance(b.buckets[TaskB.name], buckets.Queue) self.assertEqual(b.buckets[TaskC.name]._bucket.fill_rate, 1) self.registry.register(TaskD) b.init_with_registry() try: self.assertEqual(b.buckets[TaskD.name]._bucket.fill_rate, 1000 / 60.0) finally: self.registry.unregister(TaskD) @skip_if_disabled def test_on_empty_buckets__get_raises_empty(self): b = buckets.TaskBucket(task_registry=self.registry) with self.assertRaises(buckets.Empty): b.get(block=False) self.assertEqual(b.qsize(), 0) @skip_if_disabled def test_put__get(self): b = buckets.TaskBucket(task_registry=self.registry) job = MockJob(uuid(), TaskA.name, ["theqbf"], {"foo": "bar"}) b.put(job) self.assertEqual(b.get(), job) @skip_if_disabled def test_fill_rate(self): b = buckets.TaskBucket(task_registry=self.registry) cjob = lambda i: MockJob(uuid(), TaskA.name, [i], {}) jobs = [cjob(i) for i in xrange(20)] [b.put(job) for job in jobs] self.assertEqual(b.qsize(), 20) # 20 items should take at least one second to complete time_start = time.time() for i, job in enumerate(jobs): sys.stderr.write(".") self.assertEqual(b.get(), job) self.assertGreater(time.time() - time_start, 1.5) @skip_if_disabled def test__very_busy_queue_doesnt_block_others(self): b = buckets.TaskBucket(task_registry=self.registry) cjob = lambda i, t: MockJob(uuid(), t.name, [i], {}) ajobs = [cjob(i, TaskA) for i in xrange(10)] bjobs = [cjob(i, TaskB) for i in xrange(20)] jobs = list(chain(*izip(bjobs, ajobs))) for job in jobs: b.put(job) got_ajobs = 0 for job in (b.get() for i in xrange(20)): if job.name == TaskA.name: got_ajobs += 1 self.assertGreater(got_ajobs, 2) @skip_if_disabled def test_thorough__multiple_types(self): self.registry.register(TaskD) try: b = buckets.TaskBucket(task_registry=self.registry) cjob = lambda i, t: MockJob(uuid(), t.name, [i], {}) ajobs = [cjob(i, TaskA) for i in xrange(10)] bjobs = [cjob(i, TaskB) for i in xrange(10)] cjobs = [cjob(i, TaskC) for i in xrange(10)] djobs = [cjob(i, TaskD) for i in xrange(10)] # Spread the jobs around. jobs = list(chain(*izip(ajobs, bjobs, cjobs, djobs))) [b.put(job) for job in jobs] for i, job in enumerate(jobs): sys.stderr.write(".") self.assertTrue(b.get(), job) self.assertEqual(i + 1, len(jobs)) finally: self.registry.unregister(TaskD) @skip_if_disabled def test_empty(self): x = buckets.TaskBucket(task_registry=self.registry) self.assertTrue(x.empty()) x.put(MockJob(uuid(), TaskC.name, [], {})) self.assertFalse(x.empty()) x.clear() self.assertTrue(x.empty()) @skip_if_disabled def test_items(self): x = buckets.TaskBucket(task_registry=self.registry) x.buckets[TaskA.name].put(1) x.buckets[TaskB.name].put(2) x.buckets[TaskC.name].put(3) self.assertEqual(sorted(x.items), [1, 2, 3])
class test_TaskBucket(Case): def setUp(self): self.registry = TaskRegistry() self.task_classes = (TaskA, TaskB, TaskC) for task_cls in self.task_classes: self.registry.register(task_cls) @skip_if_disabled def test_get_nowait(self): x = buckets.TaskBucket(task_registry=self.registry) with self.assertRaises(buckets.Empty): x.get_nowait() @patch('celery.worker.buckets.sleep') def test_get_block(self, sleep): x = buckets.TaskBucket(task_registry=self.registry) x.not_empty = Mock() get = x._get = Mock() remaining = [0] def effect(): if get.call_count == 1: raise Empty() rem = remaining[0] remaining[0] = 0 return rem, Mock() get.side_effect = effect with mock_context(Mock()) as context: x.not_empty = context x.wait = Mock() x.get(block=True) get.reset() remaining[0] = 1 x.get(block=True) def test_get_raises_rate(self): x = buckets.TaskBucket(task_registry=self.registry) x.buckets = {1: Mock()} x.buckets[1].get_nowait.side_effect = buckets.RateLimitExceeded() x.buckets[1].expected_time.return_value = 0 x._get() @skip_if_disabled def test_refresh(self): reg = {} x = buckets.TaskBucket(task_registry=reg) reg['foo'] = 'something' x.refresh() self.assertIn('foo', x.buckets) self.assertTrue(x.get_bucket_for_type('foo')) @skip_if_disabled def test__get_queue_for_type(self): x = buckets.TaskBucket(task_registry={}) x.buckets['foo'] = buckets.TokenBucketQueue(fill_rate=1) self.assertIs(x._get_queue_for_type('foo'), x.buckets['foo'].queue) x.buckets['bar'] = buckets.FastQueue() self.assertIs(x._get_queue_for_type('bar'), x.buckets['bar']) @skip_if_disabled def test_update_bucket_for_type(self): bucket = buckets.TaskBucket(task_registry=self.registry) b = bucket._get_queue_for_type(TaskC.name) self.assertIs(bucket.update_bucket_for_type(TaskC.name).queue, b) self.assertIs(bucket.buckets[TaskC.name].queue, b) @skip_if_disabled def test_auto_add_on_missing_put(self): reg = {} b = buckets.TaskBucket(task_registry=reg) reg['nonexisting.task'] = 'foo' b.put(MockJob(uuid(), 'nonexisting.task', (), {})) self.assertIn('nonexisting.task', b.buckets) @skip_if_disabled def test_auto_add_on_missing(self): b = buckets.TaskBucket(task_registry=self.registry) for task_cls in self.task_classes: self.assertIn(task_cls.name, b.buckets.keys()) self.registry.register(TaskD) self.assertTrue(b.get_bucket_for_type(TaskD.name)) self.assertIn(TaskD.name, b.buckets.keys()) self.registry.unregister(TaskD) @skip_if_disabled def test_has_rate_limits(self): b = buckets.TaskBucket(task_registry=self.registry) self.assertEqual(b.buckets[TaskA.name]._bucket.fill_rate, 10) self.assertIsInstance(b.buckets[TaskB.name], buckets.Queue) self.assertEqual(b.buckets[TaskC.name]._bucket.fill_rate, 1) self.registry.register(TaskD) b.init_with_registry() try: self.assertEqual(b.buckets[TaskD.name]._bucket.fill_rate, 1000 / 60.0) finally: self.registry.unregister(TaskD) @skip_if_disabled def test_on_empty_buckets__get_raises_empty(self): b = buckets.TaskBucket(task_registry=self.registry) with self.assertRaises(buckets.Empty): b.get(block=False) self.assertEqual(b.qsize(), 0) @skip_if_disabled def test_put__get(self): b = buckets.TaskBucket(task_registry=self.registry) job = MockJob(uuid(), TaskA.name, ['theqbf'], {'foo': 'bar'}) b.put(job) self.assertEqual(b.get(), job) @skip_if_disabled def test_fill_rate(self): b = buckets.TaskBucket(task_registry=self.registry) cjob = lambda i: MockJob(uuid(), TaskA.name, [i], {}) jobs = [cjob(i) for i in xrange(20)] [b.put(job) for job in jobs] self.assertEqual(b.qsize(), 20) # 20 items should take at least one second to complete time_start = time.time() for i, job in enumerate(jobs): sys.stderr.write('.') self.assertEqual(b.get(), job) self.assertGreater(time.time() - time_start, 1.5) @skip_if_disabled def test__very_busy_queue_doesnt_block_others(self): b = buckets.TaskBucket(task_registry=self.registry) cjob = lambda i, t: MockJob(uuid(), t.name, [i], {}) ajobs = [cjob(i, TaskA) for i in xrange(10)] bjobs = [cjob(i, TaskB) for i in xrange(20)] jobs = list(chain(*izip(bjobs, ajobs))) for job in jobs: b.put(job) got_ajobs = 0 for job in (b.get() for i in xrange(20)): if job.name == TaskA.name: got_ajobs += 1 self.assertGreater(got_ajobs, 2) @skip_if_disabled def test_thorough__multiple_types(self): self.registry.register(TaskD) try: b = buckets.TaskBucket(task_registry=self.registry) cjob = lambda i, t: MockJob(uuid(), t.name, [i], {}) ajobs = [cjob(i, TaskA) for i in xrange(10)] bjobs = [cjob(i, TaskB) for i in xrange(10)] cjobs = [cjob(i, TaskC) for i in xrange(10)] djobs = [cjob(i, TaskD) for i in xrange(10)] # Spread the jobs around. jobs = list(chain(*izip(ajobs, bjobs, cjobs, djobs))) [b.put(job) for job in jobs] for i, job in enumerate(jobs): sys.stderr.write('.') self.assertTrue(b.get(), job) self.assertEqual(i + 1, len(jobs)) finally: self.registry.unregister(TaskD) @skip_if_disabled def test_empty(self): x = buckets.TaskBucket(task_registry=self.registry) self.assertTrue(x.empty()) x.put(MockJob(uuid(), TaskC.name, [], {})) self.assertFalse(x.empty()) x.clear() self.assertTrue(x.empty()) @skip_if_disabled def test_items(self): x = buckets.TaskBucket(task_registry=self.registry) x.buckets[TaskA.name].put(1) x.buckets[TaskB.name].put(2) x.buckets[TaskC.name].put(3) self.assertEqual(sorted(x.items), [1, 2, 3])
from __future__ import absolute_import from celery import Celery from celery.app.registry import TaskRegistry from celery_module.celery_task_class import baseclass from celery_module.config import Config registry = TaskRegistry() for model in Config.models: registry.register(baseclass(problem=model["problem"])) app = Celery( "celery_module", broker="redis://", #redis://192.168.38.6:6379/0 backend="redis://", tasks=registry) if __name__ == '__main__': app.start()
"""A task queue for machine learning model deployment.""" # this is imported here so that the python path is set correctly before importing Celery when executing outside of a virtual environment import model_task_queue import os from celery import Celery from celery.app.registry import TaskRegistry from model_task_queue import __name__ from model_task_queue.ml_model_task import MLModelPredictionTask from model_task_queue.config import Config # creating a TaskRegistry in order to be able to instantiate a dynamic number of task in the celery app registry = TaskRegistry() # instantiating the MLModelPredictionTask objects and adding them to a TaskRegistry object for model in Config.models: registry.register( MLModelPredictionTask(module_name=model["module_name"], class_name=model["class_name"])) # instantiating the Celery app object app = Celery(__name__, tasks=registry) # importing the connection settings app.config_from_object("model_task_queue.config:{}".format( os.environ['APP_SETTINGS']))
from celery.app.registry import TaskRegistry from friendship.contrib.suggestions.backends import importers from friendship.contrib.suggestions.backends.runners import AsyncRunner from friendship.contrib.suggestions.settings import RUNNER if issubclass(RUNNER, AsyncRunner): tasks = TaskRegistry() tasks.register(importers.GoogleImporter) tasks.register(importers.FacebookImporter) tasks.register(importers.TwitterImporter) tasks.register(importers.YahooImporter) tasks.register(importers.LinkedInImporter)
from __future__ import absolute_import import os from celery import Celery from django.apps import apps from core.tasks import sample_task from celery.app.registry import TaskRegistry os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings") app = Celery("core") app.config_from_object("django.conf:settings", namespace="CELERY") app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()]) # This line will tell Celery to autodiscover all your tasks.py that are in your app foldersapp registration = TaskRegistry() registration.register(sample_task)
from celery.app.registry import TaskRegistry from weather_forecast.tasks.receive_weather_forecast import ReceiveWeatherForecastTask from weather_forecast.tasks.receive_weather_forecast_failure import ReceiveWeatherForecastFailureTask task_registry = TaskRegistry() task_registry.register(ReceiveWeatherForecastTask) task_registry.register(ReceiveWeatherForecastFailureTask)
profile.url, profile.url2, profile.dob, profile.ssn, profile.spouse, profile.department, profile.education, profile.student, profile.remember_login, profile.exported, profile.direct_mail, profile.notes, profile.admin_notes, profile.referral_source, profile.hide_in_search, profile.hide_address, profile.hide_email, profile.hide_phone, profile.first_responder, profile.agreed_to_tos, profile.original_username, '\n', ] data_row = [escape_csv(value) for value in data_row] data_rows.append(data_row) return render_excel(filename, field_list, data_rows, '.csv') TaskRegistry.register(ExportProfilesTask)
from __future__ import absolute_import import os from celery import Celery from celery.app.registry import TaskRegistry from drfelasticsearchdsl.tasks import ( task_reindex_new_added_enterprise_in_background, task_reindex_new_added_skill_in_background, task_create_enterprise_saved_searches_in_background, ) from django.apps import apps os.environ.setdefault("DJANGO_SETTINGS_MODULE", "drfelasticsearchdsl.settings") app = Celery("drfelasticsearchdsl") # Using a string here means the worker doesn't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object("django.conf:settings") app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()]) # This line will tell Celery to autodiscover all your tasks.py that are in your app foldersapp registration = TaskRegistry() registration.register(task_reindex_new_added_enterprise_in_background) registration.register(task_reindex_new_added_skill_in_background) registration.register(task_create_enterprise_saved_searches_in_background)
from celery.app.registry import TaskRegistry from integration_open_weather.tasks.open_weather_forecast import OpenWeatherForecastTask task_registry = TaskRegistry() task_registry.register(OpenWeatherForecastTask)