def test_disappeared_job(redis: Redis, random_queue_name: str): enqueue( redis, random_queue_name, "minique_tests.jobs.sum_positive_values", job_ttl=1 ) assert Queue(redis, random_queue_name).length == 1 time.sleep(2) worker = TestWorker.for_queue_names(redis, random_queue_name) with pytest.raises(NoSuchJob): # It's expired :( worker.tick()
def test_duplicate_names(redis: Redis, random_queue_name: str): job = enqueue(redis, random_queue_name, "minique_tests.jobs.sum_positive_values") with pytest.raises(DuplicateJob): enqueue( redis, random_queue_name, "minique_tests.jobs.sum_positive_values", job_id=job.id, )
def test_unjsonable_retval(redis: Redis, random_queue_name: str, sentry_event_calls): job = enqueue(redis, random_queue_name, job_with_unjsonable_retval) TestWorker.for_queue_names(redis, random_queue_name).tick() assert job.status == JobStatus.FAILED assert job.result["exception_type"] == "TypeError" assert "not JSON serializable" in job.result["exception_value"] check_sentry_event_calls(sentry_event_calls, 1)
def test_cancel(redis: Redis, random_queue_name: str) -> None: job = enqueue(redis, random_queue_name, "minique_tests.jobs.sum_positive_values") assert Queue(redis, random_queue_name).length == 1 cancel_job(redis, job.id) assert (Queue(redis, random_queue_name).length == 0 ) # Canceling does remove the job from the queue TestWorker.for_queue_names(redis, random_queue_name).tick()
def test_invalid_callable_name( redis: Redis, random_queue_name: str, sentry_event_calls ): job = enqueue(redis, random_queue_name, "os.system", {"command": "evil"}) worker = TestWorker.for_queue_names(redis, random_queue_name) worker.tick() assert job.has_finished assert job.status == JobStatus.FAILED assert job.result["exception_type"] == "InvalidJob" check_sentry_event_calls(sentry_event_calls, 1)
def test_rerun_done_job(redis: Redis, random_queue_name: str, sentry_event_calls): job = enqueue(redis, random_queue_name, "minique_tests.jobs.sum_positive_values") worker = TestWorker.for_queue_names(redis, random_queue_name) worker.tick() assert job.has_finished # This should normally never be possible, # but let's re-enqueue the job anyway by touching some internals: queue = Queue(redis, random_queue_name) redis.rpush(queue.redis_key, job.id) with pytest.raises(AlreadyAcquired): worker.tick() check_sentry_event_calls(sentry_event_calls, 2)
def test_ensure_enqueued(redis: Redis, random_queue_name: str) -> None: j1 = enqueue(redis, random_queue_name, "minique_tests.jobs.sum_positive_values") j2 = enqueue(redis, random_queue_name, "minique_tests.jobs.sum_positive_values") queue = j1.get_queue() assert queue.length == 2 assert j1.ensure_enqueued() == (False, 0) # Did not need to re-enqueue assert j2.ensure_enqueued() == (False, 1) # Did not need to re-enqueue assert (redis.lpop(queue.redis_key) == j1.id.encode() ) # pop first item, must be the first job assert queue.length == 1 assert j1.ensure_enqueued() == (True, 1) # Did re-enqueue in last position assert j2.ensure_enqueued() == (False, 0) # Did not need to re-enqueue TestWorker.for_queue_names(redis, queue.name).tick() assert queue.length == 1 TestWorker.for_queue_names(redis, queue.name).tick() assert queue.length == 0 for job in (j1, j2): with pytest.raises( Exception): # Refuses to be enqueued after completion job.ensure_enqueued()
def test_job_runner_override(redis: Redis, random_queue_name: str, capsys, problem: bool): args = {"a": "err", "b": -8} if problem else {"a": 10, "b": 15} job = enqueue(redis, random_queue_name, "minique_tests.jobs.sum_positive_values", args) assert not job.has_finished worker = HonkWorker.for_queue_names(redis, [random_queue_name]) r_job = worker.tick() assert job == r_job # we executed that particular job, right? assert job.has_finished output = capsys.readouterr()[0] assert "Hooooooooonk." in output assert ("Alarmed honk!" in output) == problem
def test_job_message(redis: Redis, random_queue_name: str) -> None: job = enqueue(redis, random_queue_name, job_with_a_message) worker_thread = threading.Thread(target=run_synchronously, args=(job, )) worker_thread.start() def check_mailbox(expected_message): nonce = message_test_outbox.get() assert job.meta == expected_message message_test_inbox.put(nonce) check_mailbox("oh, hello") check_mailbox({"message": "progress occurs", "status": [1, 2, 3]}) worker_thread.join() assert job.result == 42
def test_custom_encoding(redis: Redis, random_queue_name: str): job = enqueue( redis, random_queue_name, callable="minique_tests.jobs.wrap_kwargs", kwargs={ "foo": {1, 4, 8}, }, encoding_name="special_json", ) run_synchronously(job) assert job.encoding_name == "special_json" assert job.result == {"foo": {8, 1, 4}} assert b"$set$" in job.encoded_result
def test_basics(redis: Redis, success: bool, random_queue_name: str) -> None: kwargs = {"a": 10, "b": (15 if success else 0)} job = enqueue(redis, random_queue_name, "minique_tests.jobs.sum_positive_values", kwargs) assert not job.has_finished assert job.kwargs == kwargs worker = TestWorker.for_queue_names(redis, [random_queue_name]) r_job = worker.tick() assert job == r_job # we executed that particular job, right? for job in (job, get_job(redis, job.id)): assert job.encoding_name == "json" assert job.has_finished assert job.acquisition_info["worker"] == worker.id assert job.duration > 0 if success: assert job.status == JobStatus.SUCCESS assert job.result == 25 else: assert job.status == JobStatus.FAILED
def test_unjsonable_arg(redis: Redis, random_queue_name: str): kwargs = { "phooey": uuid.uuid4(), } with pytest.raises(TypeError): enqueue(redis, random_queue_name, job_with_unjsonable_retval, kwargs)
def test_heartbeat_worker(redis: Redis, random_queue_name: str) -> None: job = enqueue(redis, random_queue_name, reverse_job_id) assert job.heartbeat is None worker = TestHeartbeatWorker.for_queue_names(redis, [random_queue_name]) worker.tick() assert isinstance(job.heartbeat, float)
def test_job_object_access(redis: Redis, random_queue_name: str) -> None: job = enqueue(redis, random_queue_name, reverse_job_id) run_synchronously(job) assert job.result == job.id[::-1]