def test_queue_is_empty(redis): """Detecting empty queues.""" q = Queue('example') assert (yield from q.is_empty()) yield from redis.rpush('rq:queue:example', 'sentinel message') assert not (yield from q.is_empty())
def test_ttl_via_enqueue(redis): """Enqueue set custom TTL on job.""" ttl = 1 queue = Queue(connection=redis) job = yield from queue.enqueue(say_hello, ttl=ttl) assert job.get_ttl() == ttl
def test_custom_exc_handling(loop): """Custom exception handling.""" @asyncio.coroutine def black_hole(job, *exc_info): # Don't fall through to default behaviour (moving to failed # queue) return False q = Queue() failed_q = get_failed_queue() # Preconditions assert not (yield from failed_q.count) assert not (yield from q.count) # Action job = yield from q.enqueue(div_by_zero) assert (yield from q.count) == 1 w = Worker([q], exception_handlers=black_hole) yield from w.work(burst=True, loop=loop) # Should silently pass # Postconditions assert not (yield from q.count) assert not (yield from failed_q.count) # Check the job job = yield from Job.fetch(job.id) assert job.is_failed
def test_work_is_unreadable(redis, loop): """Unreadable jobs are put on the failed queue.""" q = Queue() failed_q = get_failed_queue() assert (yield from failed_q.count) == 0 assert (yield from q.count) == 0 # NOTE: We have to fake this enqueueing for this test case. # What we're simulating here is a call to a function that is not # importable from the worker process. job = Job.create(func=say_hello, args=(3,)) yield from job.save() # NOTE: replacement and original strings must have the same length data = yield from redis.hget(job.key, 'data') invalid_data = data.replace(b'say_hello', b'fake_attr') assert data != invalid_data yield from redis.hset(job.key, 'data', invalid_data) # We use the low-level internal function to enqueue any data # (bypassing validity checks) yield from q.push_job_id(job.id) assert (yield from q.count) == 1 # All set, we're going to process it w = Worker([q]) yield from w.work(burst=True, loop=loop) # Should silently pass assert (yield from q.count) == 0 assert (yield from failed_q.count) == 1
def test_create_job_with_ttl_should_have_ttl_after_enqueued(redis): """Create jobs with ttl and checks if get_jobs returns it properly.""" queue = Queue(connection=redis) yield from queue.enqueue(say_hello, job_id="1234", ttl=10) job = (yield from queue.get_jobs())[0] assert job.ttl == 10
def test_create_job_with_ttl_should_expire(redis): """A job created with ttl expires.""" queue = Queue(connection=redis) queue.enqueue(say_hello, job_id="1234", ttl=1) time.sleep(1) assert not len((yield from queue.get_jobs()))
def test_worker_sets_job_status(loop): """Ensure that worker correctly sets job status.""" q = Queue() w = Worker([q]) job = yield from q.enqueue(say_hello) assert (yield from job.get_status()) == JobStatus.QUEUED assert (yield from job.is_queued) assert not (yield from job.is_finished) assert not (yield from job.is_failed) yield from w.work(burst=True, loop=loop) job = yield from Job.fetch(job.id) assert (yield from job.get_status()) == JobStatus.FINISHED assert not (yield from job.is_queued) assert (yield from job.is_finished) assert not (yield from job.is_failed) # Failed jobs should set status to "failed" job = yield from q.enqueue(div_by_zero, args=(1,)) yield from w.work(burst=True, loop=loop) job = yield from Job.fetch(job.id) assert (yield from job.get_status()) == JobStatus.FAILED assert not (yield from job.is_queued) assert not (yield from job.is_finished) assert (yield from job.is_failed)
def test_work_fails(loop): """Failing jobs are put on the failed queue.""" q = Queue() failed_q = get_failed_queue() # Preconditions assert not (yield from failed_q.count) assert not (yield from q.count) # Action job = yield from q.enqueue(div_by_zero) assert (yield from q.count) == 1 # keep for later enqueued_at_date = strip_microseconds(job.enqueued_at) w = Worker([q]) yield from w.work(burst=True, loop=loop) # Should silently pass # Postconditions assert not (yield from q.count) assert (yield from failed_q.count) == 1 assert not (yield from w.get_current_job_id()) # Check the job job = yield from Job.fetch(job.id) assert job.origin == q.name # Should be the original enqueued_at date, not the date of enqueueing # to the failed queue assert job.enqueued_at == enqueued_at_date assert job.exc_info # should contain exc_info
def test_dequeue_deleted_jobs(): """Dequeueing deleted jobs from queues don't blow the stack.""" q = Queue() for _ in range(1, 1000): job = yield from q.enqueue(say_hello) yield from job.delete() yield from q.dequeue()
def test_get_call_string_unicode(redis): """Call string with unicode keyword arguments.""" queue = Queue(connection=redis) job = yield from queue.enqueue( echo, arg_with_unicode=UnicodeStringObject()) assert job.get_call_string() yield from job.perform()
def test_work_via_string_argument(loop): """Worker processes work fed via string arguments.""" q = Queue('foo') w = Worker([q]) job = yield from q.enqueue('fixtures.say_hello', name='Frank') assert (yield from w.work(burst=True, loop=loop)) assert (yield from job.result) == 'Hi there, Frank!'
def test_custom_default_timeout(): """Override default timeout.""" connection = object() q = Queue(connection) assert q.default_timeout == 180 q = Queue(connection, default_timeout=500) assert q.default_timeout == 500
def test_empty_remove_jobs(redis): """Emptying a queue deletes the associated job objects.""" q = Queue('example') job = yield from q.enqueue(lambda x: x) assert (yield from Job.exists(job.id)) yield from q.empty() assert not (yield from Job.exists(job.id))
def test_work_and_quit(loop): """Worker processes work, then quits.""" fooq, barq = Queue('foo'), Queue('bar') w = Worker([fooq, barq]) assert not (yield from w.work(burst=True, loop=loop)) yield from fooq.enqueue(say_hello, name='Frank') assert (yield from w.work(burst=True, loop=loop))
def test_queue_order(): """Mathematical order of queues.""" connection = object() q1 = Queue(connection, 'a') q2 = Queue(connection, 'b') q3 = Queue(connection, 'c') assert q1 < q2 assert q3 > q2
def test_job_access_within_job_function(): """The current job is accessible within the job function.""" q = Queue() # access_self calls get_current_job() and asserts yield from q.enqueue(access_self) with SynchronousConnection(): w = SynchronousWorker([SynchronousQueue()]) w.work(burst=True)
def test_work_unicode_friendly(loop): """Worker processes work with unicode description, then quits.""" q = Queue('foo') w = Worker([q]) job = yield from q.enqueue( 'fixtures.say_hello', name='Adam', description='你好 世界!') assert (yield from w.work(burst=True, loop=loop)) assert (yield from job.result) == 'Hi there, Adam!' assert job.description == '你好 世界!'
def go(): redis = yield from create_redis(('localhost', 6379)) queue = Queue('my_async_queue', connection=redis) job = yield from queue.enqueue( http_client.fetch_page, 'https://www.python.org') yield from asyncio.sleep(5) result = yield from job.result assert '</html>' in result, 'Given content is not a html page' print('Well done, Turner!') redis.close()
def test_empty_queue(redis): """Emptying queues.""" q = Queue('example', connection=redis) yield from redis.rpush('rq:queue:example', 'foo') yield from redis.rpush('rq:queue:example', 'bar') assert not (yield from q.is_empty()) yield from q.empty() assert (yield from q.is_empty()) assert (yield from redis.lpop('rq:queue:example')) is None
def test_dequeue_any(): """Fetching work from any given queue.""" fooq = Queue('foo') barq = Queue('bar') assert not (yield from Queue.dequeue_any([fooq, barq], None)) # Enqueue a single item yield from barq.enqueue(say_hello) job, queue = yield from Queue.dequeue_any([fooq, barq], None) assert job.func == say_hello assert queue == barq # Enqueue items on both queues yield from barq.enqueue(say_hello, 'for Bar') yield from fooq.enqueue(say_hello, 'for Foo') job, queue = yield from Queue.dequeue_any([fooq, barq], None) assert queue == fooq assert job.func == say_hello assert job.origin == fooq.name assert job.args[0] == 'for Foo', 'Foo should be dequeued first.' job, queue = yield from Queue.dequeue_any([fooq, barq], None) assert queue == barq assert job.func == say_hello assert job.origin == barq.name assert job.args[0] == 'for Bar', 'Bar should be dequeued second.'
def test_dequeue_class_method(): """Dequeueing class method jobs from queues.""" q = Queue() yield from q.enqueue(Number.divide, 3, 4) job = yield from q.dequeue() assert job.instance.__dict__ == Number.__dict__ assert job.func.__name__ == 'divide' assert job.args == (3, 4)
def test_jobs(): """Getting jobs out of a queue.""" q = Queue('example') assert not (yield from q.jobs) job = yield from q.enqueue(say_hello) assert (yield from q.jobs) == [job] # Deleting job removes it from queue yield from job.delete() assert not (yield from q.job_ids)
def test_equality(): """Mathematical equality of queues.""" connection = object() q1 = Queue(connection, 'foo') q2 = Queue(connection, 'foo') q3 = Queue(connection, 'bar') assert q1 == q2 assert q2 == q1 assert q1 != q3 assert q2 != q3
def test_dequeue_any_ignores_nonexisting_jobs(): """Dequeuing (from any queue) silently ignores non-existing jobs.""" q = Queue('low') uuid = '49f205ab-8ea3-47dd-a1b5-bfa186870fc8' yield from q.push_job_id(uuid) # Dequeue simply ignores the missing job and returns None assert (yield from q.count) == 1 assert not (yield from Queue.dequeue_any([Queue(), Queue('low')], None)) assert not (yield from q.count)
def test_create_job_with_id(redis): """Create jobs with a custom ID.""" queue = Queue(connection=redis) job = yield from queue.enqueue(say_hello, job_id="1234") assert job.id == "1234" yield from job.perform() with pytest.raises(TypeError): yield from queue.enqueue(say_hello, job_id=1234)
def test_delete(redis): """job.delete() deletes itself & dependents mapping from Redis.""" queue = Queue(connection=redis) job = yield from queue.enqueue(say_hello) job2 = Job.create(func=say_hello, depends_on=job) yield from job2.register_dependency() yield from job.delete() assert not (yield from redis.exists(job.key)) assert not (yield from redis.exists(job.dependents_key)) assert job.id not in (yield from queue.get_job_ids())
def test_get_current_job(redis): """Ensure worker.get_current_job() works properly.""" q = Queue() worker = Worker([q]) job = yield from q.enqueue_call(say_hello) assert not (yield from redis.hget(worker.key, 'current_job')) yield from worker.set_current_job_id(job.id) current_id = as_text((yield from redis.hget(worker.key, 'current_job'))) assert (yield from worker.get_current_job_id()) == current_id assert (yield from worker.get_current_job()) == job
def test_remove(): """Ensure queue.remove properly removes Job from queue.""" q = Queue('example') job = yield from q.enqueue(say_hello) assert job.id in (yield from q.job_ids) yield from q.remove(job) assert job.id not in (yield from q.job_ids) job = yield from q.enqueue(say_hello) assert job.id in (yield from q.job_ids) yield from q.remove(job.id) assert job.id not in (yield from q.job_ids)
def test_enqueue_sets_metadata(): """Enqueueing job onto queues modifies meta data.""" q = Queue() job = Job.create(func=say_hello, args=('Nick',), kwargs=dict(foo='bar')) # Preconditions assert not job.enqueued_at # Action yield from q.enqueue_job(job) # Postconditions assert job.enqueued_at
def test_never_expire_during_execution(redis): """Test what happens when job expires during execution.""" ttl = 1 queue = Queue(connection=redis) job = yield from queue.enqueue(long_running_job, args=(2,), ttl=ttl) assert job.get_ttl() == ttl yield from job.save() yield from job.perform() assert job.get_ttl() == -1 assert (yield from job.exists(job.id)) assert (yield from job.result) == 'Done sleeping...'
def test_pop_job_id(): """Popping job IDs from queues.""" # Set up q = Queue() uuid = '112188ae-4e9d-4a5b-a5b3-f26f2cb054da' yield from q.push_job_id(uuid) # Pop it off the queue... assert (yield from q.count) assert (yield from q.pop_job_id()) == uuid # ...and assert the queue count when down assert not (yield from q.count)
def test_dequeue_instance_method(): """Dequeueing instance method jobs from queues.""" q = Queue() n = Number(2) yield from q.enqueue(n.div, 4) job = yield from q.dequeue() # The instance has been pickled and unpickled, so it is now a # separate object. Test for equality using each object's __dict__ # instead. assert job.instance.__dict__ == n.__dict__ assert job.func.__name__ == 'div' assert job.args == (4,)
def test_compact(redis): """Queue.compact() removes non-existing jobs.""" q = Queue() yield from q.enqueue(say_hello, 'Alice') yield from q.enqueue(say_hello, 'Charlie') yield from redis.lpush(q.key, '1', '2') assert (yield from q.count) == 4 yield from q.compact() assert (yield from q.count) == 2 with pytest.raises(RuntimeError): len(q)
def test_queue_magic_methods(): """Test simple magic method behavior of the Queue class.""" connection = object() q = Queue(connection) assert hash(q) == hash('default') assert str(q) == "<Queue 'default'>" assert repr(q) == "Queue('default')"
def test_store_connection(): """Each queue store connection we give it.""" connection = object() q = Queue(connection) assert q.connection is connection
def test_create_queue(): """We can create queue instance.""" connection = object() q = Queue(connection) assert q.name == 'default'
def test_custom_job_string(): """Ensure custom job string assignment works as expected.""" connection = object() q = Queue(connection, job_class='fixtures.CustomJob') assert q.job_class == CustomJob
def test_custom_job_class(): """Ensure custom job class assignment works as expected.""" connection = object() q = Queue(connection, job_class=CustomJob) assert q.job_class == CustomJob
def test_create_named_queue(): """We can create named queue instance.""" connection = object() q = Queue(connection, 'my-queue') assert q.name == 'my-queue'