def test_recycle(r): # set up the Sentinel list_key = 'test_recycle_test:q' async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) start_event = Event() stop_event = Event() # override settings Conf.RECYCLE = 2 Conf.WORKERS = 1 # set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, list_key=list_key) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) task_queue = Queue() result_queue = Queue() # push two tasks pusher(task_queue, stop_event, list_key=list_key, r=r) pusher(task_queue, stop_event, list_key=list_key, r=r) # worker should exit on recycle worker(task_queue, result_queue, Value('f', -1)) # check if the work has been done assert result_queue.qsize() == 2 # save_limit test Conf.SAVE_LIMIT = 1 result_queue.put('STOP') # run monitor monitor(result_queue) assert Success.objects.count() == Conf.SAVE_LIMIT r.delete(list_key)
def test_recycle(broker, monkeypatch): # set up the Sentinel broker.list_key = 'test_recycle_test:q' async('django_q.tests.tasks.multiply', 2, 2, broker=broker) async('django_q.tests.tasks.multiply', 2, 2, broker=broker) async('django_q.tests.tasks.multiply', 2, 2, broker=broker) start_event = Event() stop_event = Event() # override settings monkeypatch.setattr(Conf, 'RECYCLE', 2) monkeypatch.setattr(Conf, 'WORKERS', 1) # set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, broker=broker) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 async('django_q.tests.tasks.multiply', 2, 2, broker=broker) async('django_q.tests.tasks.multiply', 2, 2, broker=broker) task_queue = Queue() result_queue = Queue() # push two tasks pusher(task_queue, stop_event, broker=broker) pusher(task_queue, stop_event, broker=broker) # worker should exit on recycle worker(task_queue, result_queue, Value('f', -1)) # check if the work has been done assert result_queue.qsize() == 2 # save_limit test monkeypatch.setattr(Conf, 'SAVE_LIMIT', 1) result_queue.put('STOP') # run monitor monitor(result_queue) assert Success.objects.count() == Conf.SAVE_LIMIT broker.delete_queue()
def test_sentinel(): start_event = Event() stop_event = Event() stop_event.set() s = Sentinel(stop_event, start_event, list_key='sentinel_test:q') assert start_event.is_set() assert s.status() == Conf.STOPPED
def test_max_rss(broker, monkeypatch): # set up the Sentinel broker.list_key = "test_max_rss_test:q" async_task("django_q.tests.tasks.multiply", 2, 2, broker=broker) start_event = Event() stop_event = Event() cluster_id = uuidlib.uuid4() # override settings monkeypatch.setattr(Conf, "MAX_RSS", 40000) monkeypatch.setattr(Conf, "WORKERS", 1) # set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, cluster_id=cluster_id, broker=broker) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 async_task("django_q.tests.tasks.multiply", 2, 2, broker=broker) task_queue = Queue() result_queue = Queue() # push the task pusher(task_queue, stop_event, broker=broker) # worker should exit on recycle worker(task_queue, result_queue, Value("f", -1)) # check if the work has been done assert result_queue.qsize() == 1 # save_limit test monkeypatch.setattr(Conf, "SAVE_LIMIT", 1) result_queue.put("STOP") # run monitor monitor(result_queue) assert Success.objects.count() == Conf.SAVE_LIMIT broker.delete_queue()
def test_recycle(r): # set up the Sentinel list_key = 'test_recycle_test:q' async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) start_event = Event() stop_event = Event() # override settings Conf.RECYCLE = 2 Conf.WORKERS = 1 # set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, list_key=list_key) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) async ('django_q.tests.tasks.multiply', 2, 2, list_key=list_key, redis=r) task_queue = Queue() result_queue = Queue() # push two tasks pusher(task_queue, stop_event, list_key=list_key, r=r) pusher(task_queue, stop_event, list_key=list_key, r=r) # worker should exit on recycle worker(task_queue, result_queue, Value('f', -1)) # check if the work has been done assert result_queue.qsize() == 2 # save_limit test Conf.SAVE_LIMIT = 1 result_queue.put('STOP') # run monitor monitor(result_queue) assert Success.objects.count() == Conf.SAVE_LIMIT r.delete(list_key)
def test_recycle(broker, monkeypatch): # set up the Sentinel broker.list_key = 'test_recycle_test:q' async ('django_q.tests.tasks.multiply', 2, 2, broker=broker) async ('django_q.tests.tasks.multiply', 2, 2, broker=broker) async ('django_q.tests.tasks.multiply', 2, 2, broker=broker) start_event = Event() stop_event = Event() # override settings monkeypatch.setattr(Conf, 'RECYCLE', 2) monkeypatch.setattr(Conf, 'WORKERS', 1) # set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, broker=broker) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 async ('django_q.tests.tasks.multiply', 2, 2, broker=broker) async ('django_q.tests.tasks.multiply', 2, 2, broker=broker) task_queue = Queue() result_queue = Queue() # push two tasks pusher(task_queue, stop_event, broker=broker) pusher(task_queue, stop_event, broker=broker) # worker should exit on recycle worker(task_queue, result_queue, Value('f', -1)) # check if the work has been done assert result_queue.qsize() == 2 # save_limit test monkeypatch.setattr(Conf, 'SAVE_LIMIT', 1) result_queue.put('STOP') # run monitor monitor(result_queue) assert Success.objects.count() == Conf.SAVE_LIMIT broker.delete_queue()
def test_sentinel(): start_event = Event() stop_event = Event() stop_event.set() s = Sentinel(stop_event, start_event, broker=get_broker('sentinel_test:q')) assert start_event.is_set() assert s.status() == Conf.STOPPED
def test_sentinel(): start_event = Event() stop_event = Event() stop_event.set() cluster_id = uuidlib.uuid4() s = Sentinel(stop_event, start_event, cluster_id=cluster_id, broker=get_broker('sentinel_test:q')) assert start_event.is_set() assert s.status() == Conf.STOPPED
def test_timeout(r): # set up the Sentinel list_key = 'timeout_test:q' async('django_q.tests.tasks.count_forever', list_key=list_key) start_event = Event() stop_event = Event() # Set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, list_key=list_key, timeout=1) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1
def test_timeout(r): # set up the Sentinel list_key = 'timeout_test:q' async ('django_q.tests.tasks.count_forever', list_key=list_key) start_event = Event() stop_event = Event() # Set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, list_key=list_key, timeout=1) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1
def test_timeout_override(broker): # set up the Sentinel broker.list_key = 'timeout_override_test:q' async ('django_q.tests.tasks.count_forever', broker=broker, timeout=1) start_event = Event() stop_event = Event() # Set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, broker=broker, timeout=10) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 broker.delete_queue()
def test_timeout_override(broker): # set up the Sentinel broker.list_key = 'timeout_override_test:q' async('django_q.tests.tasks.count_forever', broker=broker, timeout=1) start_event = Event() stop_event = Event() # Set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, broker=broker, timeout=10) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 broker.delete_queue()
def test_timeout_task_finishes(broker, cluster_config_timeout, async_task_kwargs): # set up the Sentinel broker.list_key = 'timeout_test:q' broker.purge_queue() async_task('time.sleep', 3, broker=broker, **async_task_kwargs) start_event = Event() stop_event = Event() # Set a timer to stop the Sentinel threading.Timer(6, stop_event.set).start() s = Sentinel(stop_event, start_event, broker=broker, timeout=cluster_config_timeout) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 0 broker.delete_queue()
def test_bad_secret(broker, monkeypatch): broker.list_key = "test_bad_secret:q" async_task("math.copysign", 1, -1, broker=broker) stop_event = Event() stop_event.set() start_event = Event() cluster_id = uuidlib.uuid4() s = Sentinel(stop_event, start_event, cluster_id=cluster_id, broker=broker, start=False) Stat(s).save() # change the SECRET monkeypatch.setattr(Conf, "SECRET_KEY", "OOPS") stat = Stat.get_all() assert len(stat) == 0 assert Stat.get(pid=s.parent_pid, cluster_id=cluster_id) is None task_queue = Queue() pusher(task_queue, stop_event, broker=broker) result_queue = Queue() task_queue.put("STOP") worker( task_queue, result_queue, Value("f", -1), ) assert result_queue.qsize() == 0 broker.delete_queue()
def test_recycle(r): # set up the Sentinel list_key = 'test_recycle_test:q' async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key) async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key) async('django_q.tests.tasks.multiply', 2, 2, list_key=list_key) start_event = Event() stop_event = Event() # override settings Conf.RECYCLE = 2 Conf.WORKERS = 1 # Set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel(stop_event, start_event, list_key=list_key) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 r.delete(list_key)
def test_bad_broker(broker, mocker): mocker.patch.object(broker, 'set_stat', side_effect=Exception('Unusable connection')) stop_event = Event() stop_event.set() start_event = Event() s = Sentinel(stop_event, start_event, broker=broker, start=False) mock_close = mocker.patch.object(broker, 'close') Stat(s).save() assert mock_close.called
def test_timeout(broker, cluster_config_timeout, async_task_kwargs): # set up the Sentinel broker.list_key = "timeout_test:q" broker.purge_queue() async_task("time.sleep", 5, broker=broker, **async_task_kwargs) start_event = Event() stop_event = Event() cluster_id = uuidlib.uuid4() # Set a timer to stop the Sentinel threading.Timer(3, stop_event.set).start() s = Sentinel( stop_event, start_event, cluster_id=cluster_id, broker=broker, timeout=cluster_config_timeout, ) assert start_event.is_set() assert s.status() == Conf.STOPPED assert s.reincarnations == 1 broker.delete_queue()
def test_bad_secret(broker, monkeypatch): broker.list_key = 'test_bad_secret:q' async('math.copysign', 1, -1, broker=broker) stop_event = Event() stop_event.set() start_event = Event() s = Sentinel(stop_event, start_event, broker=broker, start=False) Stat(s).save() # change the SECRET monkeypatch.setattr(Conf, "SECRET_KEY", "OOPS") stat = Stat.get_all() assert len(stat) == 0 assert Stat.get(s.parent_pid) is None task_queue = Queue() pusher(task_queue, stop_event, broker=broker) result_queue = Queue() task_queue.put('STOP') worker(task_queue, result_queue, Value('f', -1), ) assert result_queue.qsize() == 0 broker.delete_queue()
def select_django_q_settings(self, queue): """ Django Q doesn't allow us to have separate configs per queue. Update the Conf dict manually with our adjusted settings. """ env_queue = os.environ.get('Q_CLUSTER_QUEUE', queue) or queue if env_queue != queue: raise CommandError( 'conflicting Q_CLUSTER_QUEUE env/option: {!r} != {!r}'.format( env_queue, queue)) settings.Q_CLUSTER_QUEUE = queue settings_q = settings.Q_CLUSTER if queue == settings.Q_DUTREE_QUEUE: settings_q['workers'] = Conf.WORKERS = settings.Q_DUTREE_WORKERS settings_q['scheduler'] = Conf.SCHEDULER = False # Double check that the Sentinel gets the values from our updated Conf # class. dummy_sentinel = Sentinel(None, None, None, start=False) assert dummy_sentinel.pool_size == settings_q['workers']