def test_worker_int_handler_only_stop_MainProcess(self): try: import _multiprocessing # noqa except ImportError: raise SkipTest("only relevant for multiprocessing") process = current_process() name, process.name = process.name, "OtherProcess" with patch("celery.apps.worker.active_thread_count") as c: c.return_value = 3 try: worker = self._Worker() handlers = self.psig(cd.install_worker_int_handler, worker) handlers["SIGINT"]("SIGINT", object()) self.assertTrue(state.should_stop) finally: process.name = name state.should_stop = None with patch("celery.apps.worker.active_thread_count") as c: c.return_value = 1 try: worker = self._Worker() handlers = self.psig(cd.install_worker_int_handler, worker) with self.assertRaises(WorkerShutdown): handlers["SIGINT"]("SIGINT", object()) finally: process.name = name state.should_stop = None
def test_load(self): with patch('pkg_resources.iter_entry_points') as iterep: with patch('celery.bin.base.symbol_by_name') as symbyname: ep = Mock() ep.name = 'ep' ep.module_name = 'foo' ep.attrs = ['bar', 'baz'] iterep.return_value = [ep] cls = symbyname.return_value = Mock() register = Mock() e = Extensions('unit', register) e.load() symbyname.assert_called_with('foo:bar') register.assert_called_with(cls, name='ep') with patch('celery.bin.base.symbol_by_name') as symbyname: symbyname.side_effect = SyntaxError() with patch('warnings.warn') as warn: e.load() self.assertTrue(warn.called) with patch('celery.bin.base.symbol_by_name') as symbyname: symbyname.side_effect = KeyError('foo') with self.assertRaises(KeyError): e.load()
def test_iterate_respects_subpolling_interval(self): r1 = self.app.AsyncResult(uuid()) r2 = self.app.AsyncResult(uuid()) backend = r1.backend = r2.backend = Mock() backend.subpolling_interval = 10 ready = r1.ready = r2.ready = Mock() def se(*args, **kwargs): ready.side_effect = KeyError() return False ready.return_value = False ready.side_effect = se x = self.app.ResultSet([r1, r2]) with self.dummy_copy(): with patch('celery.result.time') as _time: with self.assertPendingDeprecation(): with self.assertRaises(KeyError): list(x.iterate()) _time.sleep.assert_called_with(10) backend.subpolling_interval = 0 with patch('celery.result.time') as _time: with self.assertPendingDeprecation(): with self.assertRaises(KeyError): ready.return_value = False ready.side_effect = se list(x.iterate()) self.assertFalse(_time.sleep.called)
def test_startup_info(self, stdout, stderr): worker = self.Worker(app=self.app) worker.on_start() self.assertTrue(worker.startup_info()) worker.loglevel = logging.DEBUG self.assertTrue(worker.startup_info()) worker.loglevel = logging.INFO self.assertTrue(worker.startup_info()) prev_loader = self.app.loader worker = self.Worker(app=self.app, queues='foo,bar,baz,xuzzy,do,re,mi') with patch('celery.apps.worker.qualname') as qualname: qualname.return_value = 'acme.backed_beans.Loader' self.assertTrue(worker.startup_info()) with patch('celery.apps.worker.qualname') as qualname: qualname.return_value = 'celery.loaders.Loader' self.assertTrue(worker.startup_info()) from celery.loaders.app import AppLoader self.app.loader = AppLoader(app=self.app) self.assertTrue(worker.startup_info()) self.app.loader = prev_loader worker.task_events = True self.assertTrue(worker.startup_info()) # test when there are too few output lines # to draft the ascii art onto prev, cd.ARTLINES = cd.ARTLINES, ['the quick brown fox'] try: self.assertTrue(worker.startup_info()) finally: cd.ARTLINES = prev
def test_worker_term_hard_handler_only_stop_MainProcess(self): process = current_process() name, process.name = process.name, 'OtherProcess' try: with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 3 worker = self._Worker() handlers = self.psig( cd.install_worker_term_hard_handler, worker) try: handlers['SIGQUIT']('SIGQUIT', object()) self.assertTrue(state.should_terminate) finally: state.should_terminate = None with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 1 worker = self._Worker() handlers = self.psig( cd.install_worker_term_hard_handler, worker) try: with self.assertRaises(WorkerTerminate): handlers['SIGQUIT']('SIGQUIT', object()) finally: state.should_terminate = None finally: process.name = name
def test_load(self): with patch("pkg_resources.iter_entry_points") as iterep: with patch("celery.bin.base.symbol_by_name") as symbyname: ep = Mock() ep.name = "ep" ep.module_name = "foo" ep.attrs = ["bar", "baz"] iterep.return_value = [ep] cls = symbyname.return_value = Mock() register = Mock() e = Extensions("unit", register) e.load() symbyname.assert_called_with("foo:bar") register.assert_called_with(cls, name="ep") with patch("celery.bin.base.symbol_by_name") as symbyname: symbyname.side_effect = SyntaxError() with patch("warnings.warn") as warn: e.load() self.assertTrue(warn.called) with patch("celery.bin.base.symbol_by_name") as symbyname: symbyname.side_effect = KeyError("foo") with self.assertRaises(KeyError): e.load()
def test_limit_task(self): c = self.get_consumer() with patch('celery.worker.consumer.task_reserved') as reserved: bucket = Mock() request = Mock() bucket.can_consume.return_value = True c._limit_task(request, bucket, 3) bucket.can_consume.assert_called_with(3) reserved.assert_called_with(request) c.on_task_request.assert_called_with(request) with patch('celery.worker.consumer.task_reserved') as reserved: bucket.can_consume.return_value = False bucket.expected_time.return_value = 3.33 limit_order = c._limit_order c._limit_task(request, bucket, 4) self.assertEqual(c._limit_order, limit_order + 1) bucket.can_consume.assert_called_with(4) c.timer.call_after.assert_called_with( 3.33, c._limit_move_to_pool, (request,), priority=c._limit_order, ) bucket.expected_time.assert_called_with(4) self.assertFalse(reserved.called)
def test_worker_term_hard_handler_only_stop_MainProcess(self): try: import _multiprocessing # noqa except ImportError: raise SkipTest('only relevant for multiprocessing') process = current_process() name, process.name = process.name, 'OtherProcess' try: with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 3 worker = self._Worker() handlers = self.psig( cd.install_worker_term_hard_handler, worker) try: handlers['SIGQUIT']('SIGQUIT', object()) self.assertTrue(state.should_terminate) finally: state.should_terminate = False with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 1 worker = self._Worker() handlers = self.psig( cd.install_worker_term_hard_handler, worker) with self.assertRaises(WorkerTerminate): handlers['SIGQUIT']('SIGQUIT', object()) finally: process.name = name
def test_compat_multi(self): with patch("celery.__main__.maybe_patch_concurrency") as mpc: with patch("celery.__main__._warn_deprecated") as depr: with patch("celery.bin.multi.main") as main: __main__._compat_multi() mpc.assert_called_with() depr.assert_called_with("celery multi") main.assert_called_with()
def test_compat_beat(self): with patch('celery.__main__.maybe_patch_concurrency') as mpc: with patch('celery.__main__._warn_deprecated') as depr: with patch('celery.bin.beat.main') as main: __main__._compat_beat() mpc.assert_called_with() depr.assert_called_with('celery beat') main.assert_called_with()
def test_compat_multi(self): with patch('celery.__main__.maybe_patch_concurrency') as mpc: with patch('celery.__main__._warn_deprecated') as depr: with patch('celery.bin.multi.main') as main: __main__._compat_multi() self.assertFalse(mpc.called) depr.assert_called_with('celery multi') main.assert_called_with()
def move_context(self, **kwargs): with patch('celery.contrib.migrate.start_filter') as start: with patch('celery.contrib.migrate.republish') as republish: pred = Mock(name='predicate') move(pred, app=self.app, connection=self.app.connection(), **kwargs) self.assertTrue(start.called) callback = start.call_args[0][2] yield callback, pred, republish
def test_closes(self): with patch("os.close") as _close: with patch("celery.platforms.get_fdmax") as fdmax: fdmax.return_value = 3 close_open_fds() _close.assert_has_calls([call(2), call(1), call(0)]) _close.side_effect = OSError() _close.side_effect.errno = errno.EBADF close_open_fds()
def test_main__multi(self): with patch('celery.__main__.maybe_patch_concurrency') as mpc: with patch('celery.bin.celery.main') as main: prev, sys.argv = sys.argv, ['foo', 'multi'] try: __main__.main() mpc.assert_not_called() main.assert_called_with() finally: sys.argv = prev
def test_process_shutdown_on_worker_shutdown(self): from celery.concurrency.prefork import process_destructor from celery.concurrency.asynpool import Worker with patch('celery.signals.worker_process_shutdown') as ws: with patch('os._exit') as _exit: worker = Worker(None, None, on_exit=process_destructor) worker._do_exit(22, 3.1415926) ws.send.assert_called_with( sender=None, pid=22, exitcode=3.1415926, ) _exit.assert_called_with(3.1415926)
def test_closes(self): with patch('os.close') as _close: with patch('os.closerange', create=True) as closerange: with patch('celery.platforms.get_fdmax') as fdmax: fdmax.return_value = 3 close_open_fds() if not closerange.called: _close.assert_has_calls([call(2), call(1), call(0)]) _close.side_effect = OSError() _close.side_effect.errno = errno.EBADF close_open_fds()
def test_apply_async_has__self__(self): @self.app.task(__self__='hello', shared=False) def aawsX(): pass with patch('celery.app.amqp.AMQP.create_task_message') as create: with patch('celery.app.amqp.AMQP.send_task_message') as send: create.return_value = Mock(), Mock(), Mock(), Mock() aawsX.apply_async((4, 5)) args = create.call_args[0][2] self.assertEqual(args, ('hello', 4, 5)) self.assertTrue(send.called)
def test_process_shutdown_on_worker_shutdown(self): raise SkipTest("unstable test") from celery.concurrency.prefork import process_destructor from celery.concurrency.asynpool import Worker with patch("celery.signals.worker_process_shutdown") as ws: Worker._make_shortcuts = Mock() with patch("os._exit") as _exit: worker = Worker(None, None, on_exit=process_destructor) worker._do_exit(22, 3.1415926) ws.send.assert_called_with(sender=None, pid=22, exitcode=3.1415926) _exit.assert_called_with(3.1415926)
def test_autodiscover_tasks(self): self.app.conf.CELERY_FORCE_BILLIARD_LOGGING = True with patch('celery.app.base.ensure_process_aware_logger') as ep: self.app.loader.autodiscover_tasks = Mock() self.app.autodiscover_tasks(['proj.A', 'proj.B']) ep.assert_called_with() self.app.loader.autodiscover_tasks.assert_called_with( ['proj.A', 'proj.B'], 'tasks', ) with patch('celery.app.base.ensure_process_aware_logger') as ep: self.app.conf.CELERY_FORCE_BILLIARD_LOGGING = False self.app.autodiscover_tasks(['proj.A', 'proj.B']) self.assertFalse(ep.called)
def test_find_related_module(self): with patch('importlib.import_module') as imp: with patch('imp.find_module') as find: imp.return_value = Mock() imp.return_value.__path__ = 'foo' base.find_related_module(base, 'tasks') imp.side_effect = AttributeError() base.find_related_module(base, 'tasks') imp.side_effect = None find.side_effect = ImportError() base.find_related_module(base, 'tasks')
def test_run(self): with patch('celery.result.AsyncResult.get') as get: out = WhateverIO() r = result(app=self.app, stdout=out) get.return_value = 'Jerry' r.run('id') self.assertIn('Jerry', out.getvalue()) get.return_value = 'Elaine' r.run('id', task=self.add.name) self.assertIn('Elaine', out.getvalue()) with patch('celery.result.AsyncResult.traceback') as tb: r.run('id', task=self.add.name, traceback=True) self.assertIn(str(tb), out.getvalue())
def test_run(self): with patch("celery.result.AsyncResult.get") as get: out = WhateverIO() r = result(app=self.app, stdout=out) get.return_value = "Jerry" r.run("id") self.assertIn("Jerry", out.getvalue()) get.return_value = "Elaine" r.run("id", task=self.add.name) self.assertIn("Elaine", out.getvalue()) with patch("celery.result.AsyncResult.traceback") as tb: r.run("id", task=self.add.name, traceback=True) self.assertIn(str(tb), out.getvalue())
def test_worker_term_hard_handler_when_single_threaded(self): with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 1 worker = self._Worker() handlers = self.psig(cd.install_worker_term_hard_handler, worker) with self.assertRaises(WorkerTerminate): handlers['SIGQUIT']('SIGQUIT', object())
def test_run(self, real): out = WhateverIO() i = inspect(app=self.app, stdout=out) with self.assertRaises(Error): i.run() with self.assertRaises(Error): i.run('help') with self.assertRaises(Error): i.run('xyzzybaz') i.run('ping') real.assert_called() i.run('ping', destination='foo,bar') self.assertEqual(real.call_args[1]['destination'], ['foo', 'bar']) self.assertEqual(real.call_args[1]['timeout'], 0.2) callback = real.call_args[1]['callback'] callback({'foo': {'ok': 'pong'}}) self.assertIn('OK', out.getvalue()) with patch('celery.bin.celery.dumps') as dumps: i.run('ping', json=True) dumps.assert_called() instance = real.return_value = Mock() instance._request.return_value = None with self.assertRaises(Error): i.run('ping') out.seek(0) out.truncate() i.quiet = True i.say_chat('<-', 'hello') self.assertFalse(out.getvalue())
def test_cpu_count(self): with patch('celery.worker.cpu_count') as cpu_count: cpu_count.side_effect = NotImplementedError() w = self.app.Worker(concurrency=None) self.assertEqual(w.concurrency, 2) w = self.app.Worker(concurrency=5) self.assertEqual(w.concurrency, 5)
def test_run_from_argv(self): with patch('celery.bin.multi.MultiTool') as MultiTool: m = MultiTool.return_value = Mock() multi(self.app).run_from_argv('celery', ['arg'], command='multi') m.execute_from_commandline.assert_called_with( ['multi', 'arg'], 'celery', )
def test_on_worker_init(self): with self.fixup_context(self.app) as (f, _, _): with patch('celery.fixups.django.DjangoWorkerFixup') as DWF: f.on_worker_init() DWF.assert_called_with(f.app) DWF.return_value.install.assert_called_with() self.assertIs(f._worker_fixup, DWF.return_value)
def setup_election(self, g, c): g.start(c) g.clock = self.app.clock self.assertNotIn('idx', g.consensus_replies) self.assertIsNone(g.on_elect_ack({'id': 'idx'})) g.state.alive_workers.return_value = [ '*****@*****.**', '*****@*****.**', '*****@*****.**', ] g.consensus_replies['id1'] = [] g.consensus_requests['id1'] = [] e1 = self.Event('id1', 1, '*****@*****.**') e2 = self.Event('id1', 2, '*****@*****.**') e3 = self.Event('id1', 3, '*****@*****.**') g.on_elect(e1) g.on_elect(e2) g.on_elect(e3) self.assertEqual(len(g.consensus_requests['id1']), 3) with patch('celery.worker.consumer.info'): g.on_elect_ack(e1) self.assertEqual(len(g.consensus_replies['id1']), 1) g.on_elect_ack(e2) self.assertEqual(len(g.consensus_replies['id1']), 2) g.on_elect_ack(e3) with self.assertRaises(KeyError): g.consensus_replies['id1']
def test_on_elect_ack_win_but_no_action(self): c = self.Consumer(hostname='*****@*****.**') # I will win g = Gossip(c) g.election_handlers = {} with patch('celery.worker.consumer.error') as error: self.setup_election(g, c) self.assertTrue(error.called)
def test_warns_if_running_as_privileged_user(self, _exit): app = self.app if app.IS_WINDOWS: raise SkipTest('Not applicable on Windows') with patch('os.getuid') as getuid: getuid.return_value = 0 self.app.conf.CELERY_ACCEPT_CONTENT = ['pickle'] worker = self.Worker(app=self.app) worker.on_start() _exit.assert_called_with(1) from celery import platforms platforms.C_FORCE_ROOT = True try: with self.assertWarnsRegex( RuntimeWarning, r'absolutely not recommended'): worker = self.Worker(app=self.app) worker.on_start() finally: platforms.C_FORCE_ROOT = False self.app.conf.CELERY_ACCEPT_CONTENT = ['json'] with self.assertWarnsRegex( RuntimeWarning, r'absolutely not recommended'): worker = self.Worker(app=self.app) worker.on_start()
def test_shutdown(self): with patch('celery.worker.pidbox.ignore_errors') as eig: parent = Mock() pbox = Pidbox(parent) pbox._close_channel = Mock() self.assertIs(pbox.c, parent) pconsumer = pbox.consumer = Mock() cancel = pconsumer.cancel pbox.shutdown(parent) eig.assert_called_with(parent, cancel) pbox._close_channel.assert_called_with(parent)
def test_consume_raises_inner_block(self): with patch('kombu.connection.Connection.drain_events') as drain: def se(*args, **kwargs): drain.side_effect = ValueError() raise KeyError('foo') drain.side_effect = se b = AMQPBackend(self.app) with self.assertRaises(ValueError): next(b.consume('id1'))
def chord_context(self, size=1): with patch('celery.backends.redis.maybe_signature') as ms: tasks = [self.create_task() for i in range(size)] request = Mock(name='request') request.id = 'id1' request.group = 'gid1' callback = ms.return_value = Signature('add') callback.id = 'id1' callback['chord_size'] = size callback.delay = Mock(name='callback.delay') yield tasks, request, callback
def test_rdb(self, get_avail_port): sock = Mock() get_avail_port.return_value = (sock, 8000) sock.accept.return_value = (Mock(), ['helu']) out = WhateverIO() with Rdb(out=out) as rdb: self.assertTrue(get_avail_port.called) self.assertIn('helu', out.getvalue()) # set_quit with patch('sys.settrace') as settrace: rdb.set_quit() settrace.assert_called_with(None) # set_trace with patch('celery.contrib.rdb.Pdb.set_trace') as pset: with patch('celery.contrib.rdb._frame'): rdb.set_trace() rdb.set_trace(Mock()) pset.side_effect = SockErr pset.side_effect.errno = errno.ENOENT with self.assertRaises(SockErr): rdb.set_trace() # _close_session rdb._close_session() rdb.active = True rdb._handle = None rdb._client = None rdb._sock = None rdb._close_session() # do_continue rdb.set_continue = Mock() rdb.do_continue(Mock()) rdb.set_continue.assert_called_with() # do_quit rdb.set_quit = Mock() rdb.do_quit(Mock()) rdb.set_quit.assert_called_with()
def test_worker_int_handler(self): worker = self._Worker() handlers = self.psig(cd.install_worker_int_handler, worker) next_handlers = {} state.should_stop = None state.should_terminate = None class Signals(platforms.Signals): def __setitem__(self, sig, handler): next_handlers[sig] = handler with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 3 p, platforms.signals = platforms.signals, Signals() try: handlers['SIGINT']('SIGINT', object()) self.assertTrue(state.should_stop) self.assertEqual(state.should_stop, EX_FAILURE) finally: platforms.signals = p state.should_stop = None try: next_handlers['SIGINT']('SIGINT', object()) self.assertTrue(state.should_terminate) self.assertEqual(state.should_terminate, EX_FAILURE) finally: state.should_terminate = None with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 1 p, platforms.signals = platforms.signals, Signals() try: with self.assertRaises(WorkerShutdown): handlers['SIGINT']('SIGINT', object()) finally: platforms.signals = p with self.assertRaises(WorkerTerminate): next_handlers['SIGINT']('SIGINT', object())
def test_get_connection_no_connection_mongodb_uri(self): with patch('pymongo.MongoClient') as mock_Connection: mongodb_uri = 'mongodb://%s:%d' % (MONGODB_HOST, MONGODB_PORT) self.backend._connection = None self.backend.host = mongodb_uri mock_Connection.return_value = sentinel.connection connection = self.backend._get_connection() mock_Connection.assert_called_once_with( host=mongodb_uri, **self.backend._prepare_client_options()) self.assertEqual(sentinel.connection, connection)
def test_load_extensions_commands(self): with patch('celery.bin.celery.Extensions') as Ext: prev, mod.command_classes = list(mod.command_classes), Mock() try: ext = Ext.return_value = Mock(name='Extension') ext.load.return_value = ['foo', 'bar'] x = CeleryCommand(app=self.app) x.load_extension_commands() mod.command_classes.append.assert_called_with( ('Extensions', ['foo', 'bar'], 'magenta'), ) finally: mod.command_classes = prev
def test_autodiscover_tasks_lazy(self): with patch('celery.signals.import_modules') as import_modules: def lazy_list(): return [1, 2, 3] self.app.autodiscover_tasks(lazy_list) self.assertTrue(import_modules.connect.called) prom = import_modules.connect.call_args[0][0] self.assertIsInstance(prom, promise) self.assertEqual(prom.fun, self.app._autodiscover_tasks) self.assertEqual(prom.args[0](), [1, 2, 3])
def test_max_restarts_exceeded(self): c = self.get_consumer() def se(*args, **kwargs): c.blueprint.state = CLOSE raise RestartFreqExceeded() c._restart_state.step.side_effect = se c.blueprint.start.side_effect = socket.error() with patch('celery.worker.consumer.sleep') as sleep: c.start() sleep.assert_called_with(1)
def test_get_connection_no_connection_host(self): with patch('pymongo.MongoClient') as mock_Connection: self.backend._connection = None self.backend.host = MONGODB_HOST self.backend.port = MONGODB_PORT mock_Connection.return_value = sentinel.connection connection = self.backend._get_connection() mock_Connection.assert_called_once_with( host='mongodb://localhost:27017', **self.backend._prepare_client_options()) self.assertEqual(sentinel.connection, connection)
def test_crash(self): class T(bgThread): def body(self): raise KeyError() with patch('os._exit') as _exit: with override_stdouts(): _exit.side_effect = ValueError() t = T() with self.assertRaises(ValueError): t.run() _exit.assert_called_with(1)
def test_open(self, dup2, open, close, closer, umask, chdir, _exit, setsid, fork): x = DaemonContext(workdir='/opt/workdir', umask=0o22) x.stdfds = [0, 1, 2] fork.return_value = 0 with x: self.assertTrue(x._is_open) with x: pass self.assertEqual(fork.call_count, 2) setsid.assert_called_with() self.assertFalse(_exit.called) chdir.assert_called_with(x.workdir) umask.assert_called_with(0o22) self.assertTrue(dup2.called) fork.reset_mock() fork.return_value = 1 x = DaemonContext(workdir='/opt/workdir') x.stdfds = [0, 1, 2] with x: pass self.assertEqual(fork.call_count, 1) _exit.assert_called_with(0) x = DaemonContext(workdir='/opt/workdir', fake=True) x.stdfds = [0, 1, 2] x._detach = Mock() with x: pass self.assertFalse(x._detach.called) x.after_chdir = Mock() with x: pass x.after_chdir.assert_called_with() x = DaemonContext(workdir='/opt/workdir', umask="0755") self.assertEqual(x.umask, 493) x = DaemonContext(workdir='/opt/workdir', umask="493") self.assertEqual(x.umask, 493) x.redirect_to_null(None) with patch('celery.platforms.mputil') as mputil: x = DaemonContext(after_forkers=True) x.open() mputil._run_after_forkers.assert_called_with() x = DaemonContext(after_forkers=False) x.open()
def test_fixup(self): with patch('celery.fixups.django.DjangoFixup') as Fixup: with patch.dict(os.environ, DJANGO_SETTINGS_MODULE=''): fixup(self.app) self.assertFalse(Fixup.called) with patch.dict(os.environ, DJANGO_SETTINGS_MODULE='settings'): with mask_modules('django'): with self.assertWarnsRegex(UserWarning, 'but Django is'): fixup(self.app) self.assertFalse(Fixup.called) with patch_modules('django'): fixup(self.app) self.assertTrue(Fixup.called)
def test_get_connection_no_connection_host(self): with patch('pymongo.MongoClient') as mock_Connection: self.backend._connection = None self.backend.host = MONGODB_HOST self.backend.port = MONGODB_PORT mock_Connection.return_value = sentinel.connection connection = self.backend._get_connection() mock_Connection.assert_called_once_with( host='mongodb://localhost:27017', ssl=False, max_pool_size=10, auto_start_request=False) self.assertEqual(sentinel.connection, connection)
def test_on_close_clears_semaphore_timer_and_reqs(self): with patch('celery.worker.consumer.consumer.reserved_requests') as res: c = self.get_consumer() c.on_close() c.controller.semaphore.clear.assert_called_with() c.timer.clear.assert_called_with() res.clear.assert_called_with() c.pool.flush.assert_called_with() c.controller = None c.timer = None c.pool = None c.on_close()
def test_install(self): self.app.conf = {'CELERY_DB_REUSE_MAX': None} self.app.loader = Mock() with self.fixup_context(self.app) as (f, _, _): with patch('celery.fixups.django.signals') as sigs: f.install() sigs.beat_embedded_init.connect.assert_called_with( f.close_database, ) sigs.worker_ready.connect.assert_called_with(f.on_worker_ready) sigs.task_prerun.connect.assert_called_with(f.on_task_prerun) sigs.task_postrun.connect.assert_called_with(f.on_task_postrun) sigs.worker_process_init.connect.assert_called_with( f.on_worker_process_init, )
def test_fixup(self): with patch('celery.fixups.django.DjangoFixup') as Fixup: with patch.dict(os.environ, DJANGO_SETTINGS_MODULE=''): fixup(self.app) Fixup.assert_not_called() with patch.dict(os.environ, DJANGO_SETTINGS_MODULE='settings'): with mock.mask_modules('django'): with self.assertWarnsRegex(UserWarning, 'but Django is'): fixup(self.app) Fixup.assert_not_called() with mock.module_exists('django'): fixup(self.app) Fixup.assert_called()
def test_purge(self): s = LimitedSet(maxlen=None) [s.add(i) for i in range(10)] s.maxlen = 2 s.purge(1) self.assertEqual(len(s), 9) s.purge(None) self.assertEqual(len(s), 2) # expired s = LimitedSet(maxlen=None, expires=1) [s.add(i) for i in range(10)] s.maxlen = 2 s.purge(1, now=lambda: time() + 100) self.assertEqual(len(s), 9) s.purge(None, now=lambda: time() + 100) self.assertEqual(len(s), 2) # not expired s = LimitedSet(maxlen=None, expires=1) [s.add(i) for i in range(10)] s.maxlen = 2 s.purge(1, now=lambda: time() - 100) self.assertEqual(len(s), 10) s.purge(None, now=lambda: time() - 100) self.assertEqual(len(s), 10) s = LimitedSet(maxlen=None) [s.add(i) for i in range(10)] s.maxlen = 2 with patch('celery.datastructures.heappop') as hp: hp.side_effect = IndexError() s.purge() hp.assert_called_with(s._heap) with patch('celery.datastructures.heappop') as hp: s._data = {i * 2: i * 2 for i in range(10)} s.purge() self.assertEqual(hp.call_count, 10)
def test_start(self): c = Mock() c.timer = Mock() c.event_dispatcher = Mock() with patch('celery.worker.heartbeat.Heart') as hcls: h = Heart(c) self.assertTrue(h.enabled) self.assertIsNone(c.heart) h.start(c) self.assertTrue(c.heart) hcls.assert_called_with(c.timer, c.event_dispatcher) c.heart.start.assert_called_with()
def test_limit_task(self): c = self.get_consumer() with patch('celery.worker.consumer.task_reserved') as reserved: bucket = Mock() request = Mock() bucket.can_consume.return_value = True c._limit_task(request, bucket, 3) bucket.can_consume.assert_called_with(3) reserved.assert_called_with(request) c.on_task_request.assert_called_with(request) with patch('celery.worker.consumer.task_reserved') as reserved: bucket.can_consume.return_value = False bucket.expected_time.return_value = 3.33 c._limit_task(request, bucket, 4) bucket.can_consume.assert_called_with(4) c.timer.call_after.assert_called_with( 3.33, c._limit_task, (request, bucket, 4), ) bucket.expected_time.assert_called_with(4) self.assertFalse(reserved.called)
def test_get_connection_no_connection_mongodb_uri(self): with patch('pymongo.MongoClient') as mock_Connection: mongodb_uri = 'mongodb://%s:%d' % (MONGODB_HOST, MONGODB_PORT) self.backend._connection = None self.backend.host = mongodb_uri mock_Connection.return_value = sentinel.connection connection = self.backend._get_connection() mock_Connection.assert_called_once_with(host=mongodb_uri, max_pool_size=10, auto_start_request=False) self.assertEqual(sentinel.connection, connection)
def test_worker_term_handler_only_stop_MainProcess(self): try: import _multiprocessing # noqa except ImportError: raise SkipTest('only relevant for multiprocessing') process = current_process() name, process.name = process.name, 'OtherProcess' try: with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 3 worker = self._Worker() handlers = self.psig(cd.install_worker_term_handler, worker) handlers['SIGTERM']('SIGTERM', object()) self.assertEqual(state.should_stop, EX_OK) with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 1 worker = self._Worker() handlers = self.psig(cd.install_worker_term_handler, worker) with self.assertRaises(WorkerShutdown): handlers['SIGTERM']('SIGTERM', object()) finally: process.name = name state.should_stop = None
def test_setgroups_raises_EPERM(self, hack, getgroups): with patch('os.sysconf') as sysconf: sysconf.side_effect = ValueError() eperm = OSError() eperm.errno = errno.EPERM hack.side_effect = eperm getgroups.return_value = list(range(400)) setgroups(list(range(400))) getgroups.assert_called_with() getgroups.return_value = [1000] with self.assertRaises(OSError): setgroups(list(range(400))) getgroups.assert_called_with()
def test_sync_clock_and_purge(self): passthrough = Mock() passthrough.side_effect = lambda x: x with patch('celery.worker.state.revoked') as revoked: d = {'clock': 0} self.p.clock = Mock() self.p.clock.forward.return_value = 627 self.p._dumps = passthrough self.p.compress = passthrough self.p._sync_with(d) revoked.purge.assert_called_with() self.assertEqual(d['clock'], 627) self.assertNotIn('revoked', d) self.assertIs(d['zrevoked'], revoked)
def test_process_initializer(self, set_mp_process_title, _signals): with restore_logging(): from celery import signals from celery._state import _tls from celery.concurrency.prefork import ( process_initializer, WORKER_SIGRESET, WORKER_SIGIGNORE, ) def on_worker_process_init(**kwargs): on_worker_process_init.called = True on_worker_process_init.called = False signals.worker_process_init.connect(on_worker_process_init) def Loader(*args, **kwargs): loader = Mock(*args, **kwargs) loader.conf = {} loader.override_backends = {} return loader with self.Celery(loader=Loader) as app: app.conf = AttributeDict(DEFAULTS) process_initializer(app, 'awesome.worker.com') _signals.ignore.assert_any_call(*WORKER_SIGIGNORE) _signals.reset.assert_any_call(*WORKER_SIGRESET) self.assertTrue(app.loader.init_worker.call_count) self.assertTrue(on_worker_process_init.called) self.assertIs(_tls.current_app, app) set_mp_process_title.assert_called_with( 'celeryd', hostname='awesome.worker.com', ) with patch('celery.app.trace.setup_worker_optimizations') as S: os.environ['FORKED_BY_MULTIPROCESSING'] = "1" try: process_initializer(app, 'luke.worker.com') S.assert_called_with(app, 'luke.worker.com') finally: os.environ.pop('FORKED_BY_MULTIPROCESSING', None) os.environ['CELERY_LOG_FILE'] = 'worker%I.log' app.log.setup = Mock(name='log_setup') try: process_initializer(app, 'luke.worker.com') finally: os.environ.pop('CELERY_LOG_FILE', None)
def test_worker_int_handler_only_stop_MainProcess(self): process = current_process() name, process.name = process.name, 'OtherProcess' with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 3 try: worker = self._Worker() handlers = self.psig(cd.install_worker_int_handler, worker) handlers['SIGINT']('SIGINT', object()) self.assertTrue(state.should_stop) finally: process.name = name state.should_stop = None with patch('celery.apps.worker.active_thread_count') as c: c.return_value = 1 try: worker = self._Worker() handlers = self.psig(cd.install_worker_int_handler, worker) with self.assertRaises(WorkerShutdown): handlers['SIGINT']('SIGINT', object()) finally: process.name = name state.should_stop = None
def test_on_elect(self): c = self.Consumer() g = Gossip(c) g.start(c) event = self.Event('id1') g.on_elect(event) in_heap = g.consensus_requests['id1'] self.assertTrue(in_heap) g.dispatcher.send.assert_called_with('worker-elect-ack', id='id1') event.pop('clock') with patch('celery.worker.consumer.error') as error: g.on_elect(event) self.assertTrue(error.called)
def test_startup_info(self, stdout, stderr): worker = self.Worker(app=self.app) worker.on_start() self.assertTrue(worker.startup_info()) worker.loglevel = logging.DEBUG self.assertTrue(worker.startup_info()) worker.loglevel = logging.INFO self.assertTrue(worker.startup_info()) worker.autoscale = 13, 10 self.assertTrue(worker.startup_info()) prev_loader = self.app.loader worker = self.Worker(app=self.app, queues='foo,bar,baz,xuzzy,do,re,mi') with patch('celery.apps.worker.qualname') as qualname: qualname.return_value = 'acme.backed_beans.Loader' self.assertTrue(worker.startup_info()) with patch('celery.apps.worker.qualname') as qualname: qualname.return_value = 'celery.loaders.Loader' self.assertTrue(worker.startup_info()) from celery.loaders.app import AppLoader self.app.loader = AppLoader(app=self.app) self.assertTrue(worker.startup_info()) self.app.loader = prev_loader worker.send_events = True self.assertTrue(worker.startup_info()) # test when there are too few output lines # to draft the ascii art onto prev, cd.ARTLINES = cd.ARTLINES, ['the quick brown fox'] try: self.assertTrue(worker.startup_info()) finally: cd.ARTLINES = prev
def test_fire_timers_raises(self): hub = Hub() eback = Mock() eback.side_effect = KeyError('foo') hub.timer = Mock() hub.scheduler = iter([(0, eback)]) with self.assertRaises(KeyError): hub.fire_timers(propagate=(KeyError, )) eback.side_effect = ValueError('foo') hub.scheduler = iter([(0, eback)]) with patch('kombu.async.hub.logger') as logger: with self.assertRaises(StopIteration): hub.fire_timers() self.assertTrue(logger.error.called)
def test_call_task(self): c = self.Consumer() c.app.connection_for_read = _amqp_connection() g = Gossip(c) g.start(c) signature = g.app.signature = Mock(name='app.signature') task = Mock() g.call_task(task) signature.assert_called_with(task) signature.return_value.apply_async.assert_called_with() signature.return_value.apply_async.side_effect = MemoryError() with patch('celery.worker.consumer.gossip.error') as error: g.call_task(task) error.assert_called()