def test_worker_sets_job_status(loop): """Ensure that worker correctly sets job status.""" q = Queue() w = Worker([q]) job = yield from q.enqueue(say_hello) assert (yield from job.get_status()) == JobStatus.QUEUED assert (yield from job.is_queued) assert not (yield from job.is_finished) assert not (yield from job.is_failed) yield from w.work(burst=True, loop=loop) job = yield from Job.fetch(job.id) assert (yield from job.get_status()) == JobStatus.FINISHED assert not (yield from job.is_queued) assert (yield from job.is_finished) assert not (yield from job.is_failed) # Failed jobs should set status to "failed" job = yield from q.enqueue(div_by_zero, args=(1,)) yield from w.work(burst=True, loop=loop) job = yield from Job.fetch(job.id) assert (yield from job.get_status()) == JobStatus.FAILED assert not (yield from job.is_queued) assert not (yield from job.is_finished) assert (yield from job.is_failed)
def test_empty_remove_jobs(redis): """Emptying a queue deletes the associated job objects.""" q = Queue('example') job = yield from q.enqueue(lambda x: x) assert (yield from Job.exists(job.id)) yield from q.empty() assert not (yield from Job.exists(job.id))
def test_data_property_sets_job_properties(): """Job tuple gets derived lazily from data property.""" job = Job() job.data = dumps(('foo', None, (1, 2, 3), {'bar': 'qux'})) assert job.func_name == 'foo' assert not job.instance assert job.args == (1, 2, 3) assert job.kwargs == {'bar': 'qux'}
def test_job_properties_set_data_property(): """Data property gets derived from the job tuple.""" job = Job() job.func_name = 'foo' fname, instance, args, kwargs = loads(job.data) assert fname == job.func_name assert not instance assert args == () assert kwargs == {}
def test_result_ttl_is_persisted(redis): """Ensure that job's result_ttl is set properly""" job = Job.create(func=say_hello, args=('Lionel',), result_ttl=10) yield from job.save() yield from Job.fetch(job.id, connection=redis) assert job.result_ttl == 10 job = Job.create(func=say_hello, args=('Lionel',)) yield from job.save() yield from Job.fetch(job.id, connection=redis) assert not job.result_ttl
def test_get_job_ttl(): """Getting job TTL.""" ttl = 1 job = Job.create(func=say_hello, ttl=ttl) yield from job.save() assert job.get_ttl() == ttl job = Job.create(func=say_hello) yield from job.save() assert not job.get_ttl()
def test_custom_meta_is_persisted(redis): """Additional meta data on jobs are stored persisted correctly.""" job = Job.create(func=say_hello, args=('Lionel',)) job.meta['foo'] = 'bar' yield from job.save() raw_data = yield from redis.hget(job.key, 'meta') assert loads(raw_data)['foo'] == 'bar' job2 = yield from Job.fetch(job.id) assert job2.meta['foo'] == 'bar'
def test_store_then_fetch(): """Store, then fetch.""" job = Job.create(func=some_calculation, args=(3, 4), kwargs=dict(z=2)) yield from job.save() job2 = yield from Job.fetch(job.id) assert job.func == job2.func assert job.args == job2.args assert job.kwargs == job2.kwargs # Mathematical equation assert job == job2
def test_description_is_persisted(redis): """Ensure that job's custom description is set properly.""" job = Job.create(func=say_hello, args=('Lionel',), description='Say hello!') yield from job.save() yield from Job.fetch(job.id, connection=redis) assert job.description == 'Say hello!' # Ensure job description is constructed from function call string job = Job.create(func=say_hello, args=('Lionel',)) yield from job.save() yield from Job.fetch(job.id, connection=redis) assert job.description == "fixtures.say_hello('Lionel')"
def test_get_result_ttl(): """Getting job result TTL.""" job_result_ttl = 1 default_ttl = 2 job = Job.create(func=say_hello, result_ttl=job_result_ttl) yield from job.save() assert job.get_result_ttl(default_ttl=default_ttl) == job_result_ttl assert job.get_result_ttl() == job_result_ttl job = Job.create(func=say_hello) yield from job.save() assert job.get_result_ttl(default_ttl=default_ttl) == default_ttl assert not job.get_result_ttl()
def test_work_is_unreadable(redis, loop): """Unreadable jobs are put on the failed queue.""" q = Queue() failed_q = get_failed_queue() assert (yield from failed_q.count) == 0 assert (yield from q.count) == 0 # NOTE: We have to fake this enqueueing for this test case. # What we're simulating here is a call to a function that is not # importable from the worker process. job = Job.create(func=say_hello, args=(3,)) yield from job.save() # NOTE: replacement and original strings must have the same length data = yield from redis.hget(job.key, 'data') invalid_data = data.replace(b'say_hello', b'fake_attr') assert data != invalid_data yield from redis.hset(job.key, 'data', invalid_data) # We use the low-level internal function to enqueue any data # (bypassing validity checks) yield from q.push_job_id(job.id) assert (yield from q.count) == 1 # All set, we're going to process it w = Worker([q]) yield from w.work(burst=True, loop=loop) # Should silently pass assert (yield from q.count) == 0 assert (yield from failed_q.count) == 1
def test_job_dependency(loop): """Enqueue dependent jobs only if their parents don't fail.""" q = Queue() w = Worker([q]) parent_job = yield from q.enqueue(say_hello) job = yield from q.enqueue_call(say_hello, depends_on=parent_job) yield from w.work(burst=True, loop=loop) job = yield from Job.fetch(job.id) assert (yield from job.get_status()) == JobStatus.FINISHED parent_job = yield from q.enqueue(div_by_zero) job = yield from q.enqueue_call(say_hello, depends_on=parent_job) yield from w.work(burst=True, loop=loop) job = yield from Job.fetch(job.id) assert (yield from job.get_status()) != JobStatus.FINISHED
def test_remove(): """Ensure queue.remove properly removes Job from queue.""" connection = object() sentinel = [] class Protocol: @staticmethod @asyncio.coroutine def cancel_job(redis, name, id): assert redis is connection assert name == 'example' assert id == '56e6ba45-1aa3-4724-8c9f-51b7b0031cee' sentinel.append(1) class TestQueue(Queue): protocol = Protocol() q = TestQueue(connection, 'example') job = Job(connection=connection, id='56e6ba45-1aa3-4724-8c9f-51b7b0031cee', func=say_hello, args=(), kwargs={}, description='fixtures.say_hello()', timeout=180, result_ttl=5000, origin='default', created_at=datetime(2016, 4, 5, 22, 40, 35)) yield from q.remove(job) yield from q.remove(job.id) assert len(sentinel) == 2
def test_unicode(): """Unicode in job description.""" job = Job.create('myfunc', args=[12, "☃"], kwargs=dict(snowman="☃", null=None)) expected_string = "myfunc(12, '☃', null=None, snowman='☃')" assert job.description, expected_string
def test_custom_exc_handling(loop): """Custom exception handling.""" @asyncio.coroutine def black_hole(job, *exc_info): # Don't fall through to default behaviour (moving to failed # queue) return False q = Queue() failed_q = get_failed_queue() # Preconditions assert not (yield from failed_q.count) assert not (yield from q.count) # Action job = yield from q.enqueue(div_by_zero) assert (yield from q.count) == 1 w = Worker([q], exception_handlers=black_hole) yield from w.work(burst=True, loop=loop) # Should silently pass # Postconditions assert not (yield from q.count) assert not (yield from failed_q.count) # Check the job job = yield from Job.fetch(job.id) assert job.is_failed
def test_work_fails(loop): """Failing jobs are put on the failed queue.""" q = Queue() failed_q = get_failed_queue() # Preconditions assert not (yield from failed_q.count) assert not (yield from q.count) # Action job = yield from q.enqueue(div_by_zero) assert (yield from q.count) == 1 # keep for later enqueued_at_date = strip_microseconds(job.enqueued_at) w = Worker([q]) yield from w.work(burst=True, loop=loop) # Should silently pass # Postconditions assert not (yield from q.count) assert (yield from failed_q.count) == 1 assert not (yield from w.get_current_job_id()) # Check the job job = yield from Job.fetch(job.id) assert job.origin == q.name # Should be the original enqueued_at date, not the date of enqueueing # to the failed queue assert job.enqueued_at == enqueued_at_date assert job.exc_info # should contain exc_info
def test_create_job_from_callable_class(): """Creation of jobs using a callable class specifier.""" kallable = CallableObject() job = Job.create(func=kallable) assert job.func == kallable.__call__ assert job.instance == kallable
def test_cleanup(redis): """Test that jobs and results are expired properly.""" job = Job.create(func=say_hello) yield from job.save() # Jobs with negative TTLs don't expire yield from job.cleanup(ttl=-1) assert (yield from redis.ttl(job.key)) == -1 # Jobs with positive TTLs are eventually deleted yield from job.cleanup(ttl=100) assert (yield from redis.ttl(job.key)) == 100 # Jobs with 0 TTL are immediately deleted yield from job.cleanup(ttl=0) with pytest.raises(NoSuchJobError): yield from Job.fetch(job.id, redis)
def test_create_job_from_string_function(): """Creation of jobs using string specifier.""" job = Job.create(func='fixtures.say_hello', args=('World',)) # Job data is set assert job.func == say_hello assert not job.instance assert job.args == ('World',)
def test_persistence_of_parent_job(): """Storing jobs with parent job, either instance or key.""" parent_job = Job.create(func=some_calculation) yield from parent_job.save() job = Job.create(func=some_calculation, depends_on=parent_job) yield from job.save() stored_job = yield from Job.fetch(job.id) assert stored_job._dependency_id == parent_job.id assert (yield from stored_job.dependency) == parent_job job = Job.create(func=some_calculation, depends_on=parent_job.id) yield from job.save() stored_job = yield from Job.fetch(job.id) assert stored_job._dependency_id == parent_job.id assert (yield from stored_job.dependency) == parent_job
def test_create_instance_method_job(): """Creation of jobs for instance methods.""" n = Number(2) job = Job.create(func=n.div, args=(4,)) # Job data is set assert job.func == n.div assert job.instance == n assert job.args == (4,)
def test_delete(redis): """job.delete() deletes itself & dependents mapping from Redis.""" queue = Queue(connection=redis) job = yield from queue.enqueue(say_hello) job2 = Job.create(func=say_hello, depends_on=job) yield from job2.register_dependency() yield from job.delete() assert not (yield from redis.exists(job.key)) assert not (yield from redis.exists(job.dependents_key)) assert job.id not in (yield from queue.get_job_ids())
def test_save(redis): """Storing jobs.""" job = Job.create(func=some_calculation, args=(3, 4), kwargs=dict(z=2)) # Saving creates a Redis hash assert not (yield from redis.exists(job.key)) yield from job.save() assert (yield from redis.type(job.key)) == b'hash' # Saving writes pickled job data unpickled_data = loads((yield from redis.hget(job.key, 'data'))) assert unpickled_data[0] == 'fixtures.some_calculation'
def test_register_dependency(redis): """Ensure dependency registration works properly.""" origin = 'some_queue' registry = DeferredJobRegistry(origin, redis) job = Job.create(func=say_hello, origin=origin) job._dependency_id = 'id' yield from job.save() assert not (yield from registry.get_job_ids()) yield from job.register_dependency() assert as_text((yield from redis.spop('rq:job:id:dependents'))) == job.id assert (yield from registry.get_job_ids()) == [job.id]
def test_enqueue_sets_metadata(): """Enqueueing job onto queues modifies meta data.""" q = Queue() job = Job.create(func=say_hello, args=('Nick',), kwargs=dict(foo='bar')) # Preconditions assert not job.enqueued_at # Action yield from q.enqueue_job(job) # Postconditions assert job.enqueued_at
def test_persistence_of_typical_jobs(redis): """Storing typical jobs.""" job = Job.create(func=some_calculation, args=(3, 4), kwargs=dict(z=2)) yield from job.save() expected_date = strip_microseconds(job.created_at) stored_date = (yield from redis.hget(job.key, 'created_at')) \ .decode('utf-8') assert stored_date == utcformat(expected_date) # ... and no other keys are stored assert sorted((yield from redis.hkeys(job.key))) \ == [b'created_at', b'data', b'description']
def test_fetching_unreadable_data(redis): """Fetching succeeds on unreadable data, but lazy props fail.""" # Set up job = Job.create(func=some_calculation, args=(3, 4), kwargs=dict(z=2)) yield from job.save() # Just replace the data hkey with some random noise yield from redis.hset(job.key, 'data', 'this is no pickle string') yield from job.refresh() for attr in ('func_name', 'instance', 'args', 'kwargs'): with pytest.raises(UnpickleError): getattr(job, attr)
def test_job_is_unimportable(redis): """Jobs that cannot be imported throw exception on access.""" job = Job.create(func=say_hello, args=('Lionel',)) yield from job.save() # Now slightly modify the job to make it unimportable (this is # equivalent to a worker not having the most up-to-date source # code and unable to import the function) data = yield from redis.hget(job.key, 'data') unimportable_data = data.replace(b'say_hello', b'nay_hello') yield from redis.hset(job.key, 'data', unimportable_data) yield from job.refresh() with pytest.raises(AttributeError): job.func # accessing the func property should fail
def test_fetch(redis): """Fetching jobs.""" yield from redis.hset('rq:job:some_id', 'data', "(S'fixtures.some_calculation'\n" "N(I3\nI4\nt(dp1\nS'z'\nI2\nstp2\n.") yield from redis.hset('rq:job:some_id', 'created_at', '2012-02-07T22:13:24Z') # Fetch returns a job job = yield from Job.fetch('some_id') assert job.id == 'some_id' assert job.func_name == 'fixtures.some_calculation' assert not job.instance assert job.args == (3, 4) assert job.kwargs == dict(z=2) assert job.created_at == datetime(2012, 2, 7, 22, 13, 24)
def test_create_typical_job(): """Creation of jobs for function calls.""" job = Job.create(func=some_calculation, args=(3, 4), kwargs=dict(z=2)) # Jobs have a random UUID assert job.id assert job.created_at assert job.description assert not job.instance # Job data is set... assert job.func == some_calculation assert job.args == (3, 4) assert job.kwargs == {'z': 2} # ...but metadata is not assert not job.origin assert not job.enqueued_at assert not (yield from job.result)
def test_fetching_can_fail(): """Fetching fails for non-existing jobs.""" with pytest.raises(NoSuchJobError): yield from Job.fetch('b4a44d44-da16-4620-90a6-798e8cd72ca0')