def task_with_deferred_arguments(url): """A deferred result may be passed as an argument to another task. Tasks receiving deferred arguments will not be invoked until the deferred value is available. Notice that the value of the deferred argument, not the Deferred object itself, is passed to ``sum`` in this example. """ def func(arg): return arg broker = get_broker(url) broker.expose(func) broker.expose(sum) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) res = q.sum([ q.func(1), q.func(2), q.func(3), ]) assert res.wait(WAIT), repr(res) eq_(res.value, 6)
def more_deferred_arguments(url): from operator import add def func(arg): return arg broker = get_broker(url) broker.expose(func) broker.expose(sum) broker.expose(add) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) sum_123 = q.sum([ q.func(1), q.func(2), q.func(3), ]) sum_1234 = q.add(sum_123, q.func(4)) assert sum_1234.wait(WAIT), repr(res) eq_(sum_1234.value, 10)
def test_Queue_len(url): lock = TimeoutLock(locked=True) def func(arg=None): pass broker = get_broker(url) broker.expose(func) with thread_worker(broker, lock): q = get_queue(url) eq_(len(q), 0) r0 = q.func() eq_(len(q), 1) r1 = q.func() r2 = q.func(r1) eq_(len(q), 3) eventually((lambda: lock.locked), True) lock.release() assert r0.wait(timeout=WAIT), repr(r0) eq_(len(q), 2) eventually((lambda: lock.locked), True) lock.release() eventually((lambda: lock.locked), True) lock.release() assert r2.wait(timeout=WAIT), repr(r2) eq_(len(q), 0)
def task_with_failed_deferred_arguments(url): """TaskFailure can be passed to the final task. By default, a task fails if any of its deferred arguments fail. However, creating a ``Task`` with ``on_error=Task.PASS`` will cause a ``TaskFailure`` to be passed as the result of any task that fails. """ def func(arg): if arg == 0: raise Exception('zero fail!') return arg broker = get_broker(url) broker.expose(func) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) items = [ q.func(1), q.func(0), q.func(2), ] task = Task(q.func, on_error=Task.PASS) res = task(items) res.wait(timeout=WAIT) fail = TaskFailure( 'func', 'default', items[1].id, 'Exception: zero fail!') eq_(res.value, [1, fail, 2])
def test_Queue_len(url): lock = TimeoutLock(locked=True) def func(arg=None): pass broker = get_broker(url) broker.expose(func) with thread_worker(broker, lock): q = get_queue(url) eq_(len(q), 0) r0 = q.func() eq_(len(q), 1) r1 = q.func() r2 = q.func(r1) eq_(len(q), 3) eventually((lambda:lock.locked), True) lock.release() assert r0.wait(timeout=WAIT), repr(r0) eq_(len(q), 2) eventually((lambda:lock.locked), True) lock.release() eventually((lambda:lock.locked), True) lock.release() assert r2.wait(timeout=WAIT), repr(r2) eq_(len(q), 0)
def task_with_failed_deferred_arguments(url): """TaskFailure can be passed to the final task. By default, a task fails if any of its deferred arguments fail. However, creating a ``Task`` with ``on_error=Task.PASS`` will cause a ``TaskFailure`` to be passed as the result of any task that fails. """ def func(arg): if arg == 0: raise Exception('zero fail!') return arg broker = get_broker(url) broker.expose(func) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) items = [ q.func(1), q.func(0), q.func(2), ] task = Task(q.func, on_error=Task.PASS) res = task(items) res.wait(timeout=WAIT) fail = TaskFailure('func', 'default', items[1].id, 'Exception: zero fail!') eq_(res.value, [1, fail, 2])
def dependency_graph(url): """Dependency graph | _____________|_____________ / | \ / \ / \ / \ / \ / \ / \ left right left right left right \ / \ / \ / \ / \ / \ / catch catch catch \ | / \___________|___________/ | combine """ ts = TaskSpace() @ts.task def left(num): return ('left', num) @ts.task def right(num): return ('right', num) @ts.task def catch(left, right, num): return [num, left, right] @ts.task def combine(items): return {i[0]: i[1:] for i in items} broker = get_broker(url) broker.expose(ts) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) catches = [] for num in [1, 2, 3]: left = q.left(num) right = q.right(num) catch = q.catch(left, right, num) catches.append(catch) res = q.combine(catches) assert res.wait(WAIT), repr(res) eq_( res.value, { 1: [('left', 1), ('right', 1)], 2: [('left', 2), ('right', 2)], 3: [('left', 3), ('right', 3)], })
def dependency_graph(url): """Dependency graph | _____________|_____________ / | \ / \ / \ / \ / \ / \ / \ left right left right left right \ / \ / \ / \ / \ / \ / catch catch catch \ | / \___________|___________/ | combine """ ts = TaskSpace() @ts.task def left(num): return ('left', num) @ts.task def right(num): return ('right', num) @ts.task def catch(left, right, num): return [num, left, right] @ts.task def combine(items): return {i[0]: i[1:] for i in items} broker = get_broker(url) broker.expose(ts) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) catches = [] for num in [1, 2, 3]: left = q.left(num) right = q.right(num) catch = q.catch(left, right, num) catches.append(catch) res = q.combine(catches) assert res.wait(WAIT), repr(res) eq_(res.value, { 1: [('left', 1), ('right', 1)], 2: [('left', 2), ('right', 2)], 3: [('left', 3), ('right', 3)], })
def test_WorkerPool_max_worker_tasks(url): broker = get_broker(url) pool = WorkerPool(broker, WorkerPool_max_worker_tasks_init, workers=1, max_worker_tasks=3) with start_pool(pool): q = get_queue(url) res = q.results([q.func() for n in range(4)]) assert res.wait(WAIT), repr(res) results = res.value assert isinstance(results, list), results eq_([r[1] for r in results], [1, 2, 3, 1]) eq_(len(set(r[0] for r in results)), 2)
def result_status(url): """Deferred results can be queried for task status. A lock is used to control state interactions between the producer and the worker for illustration purposes only. This type of lock-step interaction is not normally needed or even desired. """ lock = TimeoutLock(locked=True) def func(arg): lock.acquire() return arg broker = get_broker(url) broker.expose(func) with thread_worker(broker, lock): # -- task-invoking code, usually another process -- q = get_queue(url) res = q.func('arg') eventually((lambda: res.status), const.ENQUEUED) eq_(repr(res), "<Deferred func [default:%s] enqueued>" % res.id) lock.release() eventually((lambda: res.status), const.PROCESSING) eq_(repr(res), "<Deferred func [default:%s] processing>" % res.id) lock.release() assert res.wait(WAIT), repr(res) eq_(repr(res), "<Deferred func [default:%s] success>" % res.id) eq_(res.value, 'arg')
def result_status(url): """Deferred results can be queried for task status. A lock is used to control state interactions between the producer and the worker for illustration purposes only. This type of lock-step interaction is not normally needed or even desired. """ lock = TimeoutLock(locked=True) def func(arg): lock.acquire() return arg broker = get_broker(url) broker.expose(func) with thread_worker(broker, lock): # -- task-invoking code, usually another process -- q = get_queue(url) res = q.func('arg') eventually((lambda:res.status), const.ENQUEUED) eq_(repr(res), "<Deferred func [default:%s] enqueued>" % res.id) lock.release() eventually((lambda:res.status), const.PROCESSING) eq_(repr(res), "<Deferred func [default:%s] processing>" % res.id) lock.release() assert res.wait(WAIT), repr(res) eq_(repr(res), "<Deferred func [default:%s] success>" % res.id) eq_(res.value, 'arg')
def no_such_task(url): broker = get_broker(url) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) res = q.func('arg') assert res.wait(WAIT), repr(res) eq_(repr(res), '<Deferred func [default:%s] failed>' % res.id) with assert_raises(TaskFailure, 'func [default:%s] no such task' % res.id): res.value
def test_Queue_default_options(url): def func(arg=3): if isinstance(arg, int) and arg < 2: raise ValueError('too low') return str(arg) broker = get_broker(url) broker.expose(func) with thread_worker(broker): q = get_queue(url, ignore_result=True) eq_(q.func(), None) q = get_queue(url, on_error=Task.PASS) rx = q.func(1) res = q.func(rx) assert res.wait(WAIT), repr(res) eq_(res.value, 'func [default:%s] ValueError: too low' % rx.id)
def task_error(url): def func(arg): raise Exception('fail!') broker = get_broker(url) broker.expose(func) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) res = q.func('arg') assert res.wait(WAIT), repr(res) eq_(repr(res), '<Deferred func [default:%s] failed>' % res.id) with assert_raises(TaskFailure, 'func [default:%s] Exception: fail!' % res.id): res.value
def test_worker_interrupted(url): def func(arg): raise KeyboardInterrupt() broker = get_broker(url) broker.expose(func) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) res = q.func('arg') completed = res.wait(WAIT) assert completed, repr(res) eq_(repr(res), '<Deferred func [default:%s] failed>' % res.id) with assert_raises(TaskFailure, 'func [default:%s] KeyboardInterrupt: ' % res.id): res.value
def ignore_result(url): """Tell the queue to ignore the task result when the result is not important. This is done by creating a ``Task`` object with custom options for more efficient queue operation. """ state = [] def func(arg): state.append(arg) broker = get_broker(url) broker.expose(func) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) f = Task(q.func, ignore_result=True) res = f(3) eq_(res, None) # verify that we did not get a deferred result eventually((lambda:state), [3])
def ignore_result(url): """Tell the queue to ignore the task result when the result is not important. This is done by creating a ``Task`` object with custom options for more efficient queue operation. """ state = [] def func(arg): state.append(arg) broker = get_broker(url) broker.expose(func) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) f = Task(q.func, ignore_result=True) res = f(3) eq_(res, None) # verify that we did not get a deferred result eventually((lambda: state), [3])
def Broker_duplicate_task_id(url, identifier): lock = TimeoutLock(locked=True) state = [] def func(arg): lock.acquire() return arg broker = get_broker(url) broker.expose(func) with thread_worker(broker, lock): q = get_queue(url) task = Task(q.func, id=identifier) res = task(1) eventually((lambda: res.status), const.ENQUEUED) msg = 'func [default:int] cannot enqueue task with duplicate id' with assert_raises(DuplicateTask, msg): task(2) lock.release() eventually((lambda: res.status), const.PROCESSING) msg = 'func [default:int] cannot enqueue task with duplicate id' with assert_raises(DuplicateTask, msg): task(3) lock.release() assert res.wait(timeout=WAIT), repr(res) eq_(res.value, 1) res = task(4) eventually((lambda: res.status), const.ENQUEUED) lock.release() eventually((lambda: res.status), const.PROCESSING) lock.release() assert res.wait(timeout=WAIT), repr(res) eq_(res.value, 4)
def Broker_duplicate_task_id(url, identifier): lock = TimeoutLock(locked=True) state = [] def func(arg): lock.acquire() return arg broker = get_broker(url) broker.expose(func) with thread_worker(broker, lock): q = get_queue(url) task = Task(q.func, id=identifier) res = task(1) eventually((lambda:res.status), const.ENQUEUED) msg = 'func [default:int] cannot enqueue task with duplicate id' with assert_raises(DuplicateTask, msg): task(2) lock.release() eventually((lambda:res.status), const.PROCESSING) msg = 'func [default:int] cannot enqueue task with duplicate id' with assert_raises(DuplicateTask, msg): task(3) lock.release() assert res.wait(timeout=WAIT), repr(res) eq_(res.value, 1) res = task(4) eventually((lambda:res.status), const.ENQUEUED) lock.release() eventually((lambda:res.status), const.PROCESSING) lock.release() assert res.wait(timeout=WAIT), repr(res) eq_(res.value, 4)
def wait_for_result(url): """Efficiently wait for (block on) a task result. Use this feature wisely. Waiting for a result in a WorQ task could deadlock the queue. """ def func(arg): return arg broker = get_broker(url) broker.expose(func) with thread_worker(broker): # -- task-invoking code, usually another process -- q = get_queue(url) res = q.func('arg') completed = res.wait(WAIT) assert completed, repr(res) eq_(res.value, 'arg') eq_(repr(res), "<Deferred func [default:%s] success>" % res.id)
def test_clear_Queue(url): q = get_queue(url) eq_(len(q), 0) q.func() q.func() eq_(len(q), 2) del q[:] eq_(len(q), 0) msg = 'delitem is only valid with a full slice ([:])' with assert_raises(ValueError, msg=msg): del q[:2]
def test_completed_Deferred_as_argument(url): def func(arg): eq_(arg, 1) return arg broker = get_broker(url) broker.expose(func) with thread_worker(broker): q = get_queue(url) eq_(len(q), 0) r0 = q.func(1) assert r0.wait(timeout=WAIT), repr(r0) eq_(r0.value, 1) r1 = q.func(r0) assert r1.wait(timeout=WAIT), repr(r1) eq_(r0.value, 1)
def func(arg): eq_(arg, 1) return arg