def test_failed_worker_without_warning(e, s, a, b): L = e.map(inc, range(10)) yield _wait(L) a.process.terminate() start = time() while not a.process.is_alive(): yield gen.sleep(0.01) assert time() - start < 10 yield gen.sleep(0.5) start = time() while len(s.ncores) < 2: yield gen.sleep(0.01) assert time() - start < 10 yield _wait(L) L2 = e.map(inc, range(10, 20)) yield _wait(L2) assert all(len(keys) > 0 for keys in s.has_what.values()) ncores2 = s.ncores.copy() yield e._restart() L = e.map(inc, range(10)) yield _wait(L) assert all(len(keys) > 0 for keys in s.has_what.values()) assert not (set(ncores2) & set(s.ncores)) # no overlap
def test_restrictions_map(s, a, b): e = Executor((s.ip, s.port), start=False) yield e._start() L = e.map(inc, range(5), workers={a.ip}) yield _wait(L) assert set(a.data) == {x.key for x in L} assert not b.data for x in L: assert s.restrictions[x.key] == {a.ip} L = e.map(inc, [10, 11, 12], workers=[{a.ip}, {a.ip, b.ip}, {b.ip}]) yield _wait(L) assert s.restrictions[L[0].key] == {a.ip} assert s.restrictions[L[1].key] == {a.ip, b.ip} assert s.restrictions[L[2].key] == {b.ip} with pytest.raises(ValueError): e.map(inc, [10, 11, 12], workers=[{a.ip}]) yield e._shutdown()
def f(c, a, b): e = Executor((c.ip, c.port), start=False) IOLoop.current().spawn_callback(e._go) L = e.map(inc, range(5), workers={a.ip}) yield _wait(L) assert set(a.data) == {x.key for x in L} assert not b.data for x in L: assert e.restrictions[x.key] == {a.ip} L = e.map(inc, [10, 11, 12], workers=[{a.ip}, {a.ip, b.ip}, {b.ip}]) yield _wait(L) assert e.restrictions[L[0].key] == {a.ip} assert e.restrictions[L[1].key] == {a.ip, b.ip} assert e.restrictions[L[2].key] == {b.ip} with pytest.raises(ValueError): e.map(inc, [10, 11, 12], workers=[{a.ip}]) yield e._shutdown()
def test_steal_cheap_data_slow_computation(e, s, a, b): x = e.submit(slowinc, 100, delay=0.1) # learn that slowinc is slow yield _wait([x]) futures = e.map(slowinc, range(10), delay=0.01, workers=a.address, allow_other_workers=True) yield _wait(futures) assert abs(len(a.data) - len(b.data)) < 3
def test_dont_steal_expensive_data_fast_computation(e, s, a, b): np = pytest.importorskip('numpy') x = e.submit(np.arange, 1000000, workers=a.address) yield _wait([x]) future = e.submit(np.sum, [1], workers=a.address) # learn that sum is fast yield _wait([future]) cheap = [e.submit(np.sum, x, pure=False, workers=a.address, allow_other_workers=True) for i in range(10)] yield _wait(cheap) assert len(b.data) == 0 assert len(a.data) == 12
def test_AllProgress(e, s, a, b): x, y, z = e.map(inc, [1, 2, 3]) xx, yy, zz = e.map(dec, [x, y, z]) yield _wait([x, y, z]) p = AllProgress(s) assert p.all['inc'] == {x.key, y.key, z.key} assert p.in_memory['inc'] == {x.key, y.key, z.key} assert p.released == {} assert p.erred == {} yield _wait([xx, yy, zz]) assert p.all['dec'] == {xx.key, yy.key, zz.key} assert p.in_memory['dec'] == {xx.key, yy.key, zz.key} assert p.released == {} assert p.erred == {} s.client_releases_keys(client=e.id, keys=[x.key, y.key, z.key]) assert p.released['inc'] == {x.key, y.key, z.key} assert p.all['inc'] == {x.key, y.key, z.key} assert p.all['dec'] == {xx.key, yy.key, zz.key} xxx = e.submit(div, 1, 0) yield _wait([xxx]) assert p.erred == {'div': {xxx.key}} s.client_releases_keys(client=e.id, keys=[xx.key, yy.key, zz.key]) for c in [p.all, p.in_memory, p.released, p.erred]: assert 'inc' not in c assert 'dec' not in c def f(x): return x for i in range(4): future = e.submit(f, i) import gc gc.collect() yield gen.sleep(1) yield _wait([future]) assert p.in_memory == {'f': {future.key}} yield e._restart() for c in [p.all, p.in_memory, p.released, p.erred]: assert not c x = e.submit(div, 1, 2) yield _wait([x]) p.validate()
def test_steal_expensive_data_slow_computation(e, s, a, b): np = pytest.importorskip('numpy') x = e.submit(slowinc, 100, delay=0.1, workers=a.address) yield _wait([x]) # learn that slowinc is slow x = e.submit(np.arange, 1000000, workers=a.address) # put expensive data yield _wait([x]) slow = [e.submit(slowinc, x, delay=0.1, pure=False) for i in range(4)] yield _wait([slow]) assert b.data # not empty
def test_AllProgress(e, s, a, b): x, y, z = e.map(inc, [1, 2, 3]) xx, yy, zz = e.map(dec, [x, y, z]) yield _wait([x, y, z]) p = AllProgress(s) assert p.all['inc'] == {x.key, y.key, z.key} assert p.in_memory['inc'] == {x.key, y.key, z.key} assert p.released == {} assert p.erred == {} yield _wait([xx, yy, zz]) assert p.all['dec'] == {xx.key, yy.key, zz.key} assert p.in_memory['dec'] == {xx.key, yy.key, zz.key} assert p.released == {} assert p.erred == {} s.client_releases_keys(client=e.id, keys=[x.key, y.key, z.key]) assert p.released['inc'] == {x.key, y.key, z.key} assert p.all['inc'] == {x.key, y.key, z.key} assert p.all['dec'] == {xx.key, yy.key, zz.key} xxx = e.submit(div, 1, 0) yield _wait([xxx]) assert p.erred == {'div': {xxx.key}} s.client_releases_keys(client=e.id, keys=[xx.key, yy.key, zz.key]) for c in [p.all, p.in_memory, p.released, p.erred]: assert 'inc' not in c assert 'dec' not in c def f(x): return x for i in range(4): future = e.submit(f, i) import gc; gc.collect() yield gen.sleep(1) yield _wait([future]) assert p.in_memory == {'f': {future.key}} yield e._restart() for c in [p.all, p.in_memory, p.released, p.erred]: assert not c x = e.submit(div, 1, 2) yield _wait([x]) p.validate()
def test_allow_restrictions(s, a, b): e = Executor((s.ip, s.port), start=False) yield e._start() x = e.submit(inc, 1, workers=a.ip) yield x._result() assert s.who_has[x.key] == {a.address} assert not s.loose_restrictions x = e.submit(inc, 2, workers=a.ip, allow_other_workers=True) yield x._result() assert s.who_has[x.key] == {a.address} assert x.key in s.loose_restrictions L = e.map(inc, range(3, 13), workers=a.ip, allow_other_workers=True) yield _wait(L) assert all(s.who_has[f.key] == {a.address} for f in L) assert {f.key for f in L}.issubset(s.loose_restrictions) """ x = e.submit(inc, 14, workers='127.0.0.3') with ignoring(gen.TimeoutError): yield gen.with_timeout(timedelta(seconds=0.1), x._result()) assert False assert not s.who_has[x.key] assert x.key not in s.loose_restrictions """ x = e.submit(inc, 15, workers='127.0.0.3', allow_other_workers=True) yield x._result() assert s.who_has[x.key] assert x.key in s.loose_restrictions L = e.map(inc, range(15, 25), workers='127.0.0.3', allow_other_workers=True) yield _wait(L) assert all(s.who_has[f.key] for f in L) assert {f.key for f in L}.issubset(s.loose_restrictions) with pytest.raises(ValueError): e.submit(inc, 1, allow_other_workers=True) with pytest.raises(ValueError): e.map(inc, [1], allow_other_workers=True) with pytest.raises(TypeError): e.submit(inc, 20, workers='127.0.0.1', allow_other_workers='Hello!') with pytest.raises(TypeError): e.map(inc, [20], workers='127.0.0.1', allow_other_workers='Hello!')
def test_with_status(e, s, a, b): ss = HTTPScheduler(s) ss.listen(0) client = AsyncHTTPClient() response = yield client.fetch('http://localhost:%d/tasks.json' % ss.port) out = json.loads(response.body.decode()) assert out['total'] == 0 assert out['processing'] == 0 assert out['failed'] == 0 assert out['in-memory'] == 0 assert out['ready'] == 0 assert out['waiting'] == 0 L = e.map(div, range(10), range(10)) yield _wait(L) client = AsyncHTTPClient() response = yield client.fetch('http://localhost:%d/tasks.json' % ss.port) out = json.loads(response.body.decode()) assert out['failed'] == 1 assert out['in-memory'] == 9 assert out['ready'] == 0 assert out['total'] == 10 assert out['waiting'] == 0 ss.stop()
def test_workers(e, s, a, b): while 'latency' not in s.host_info[a.ip]: yield gen.sleep(0.01) d = workers(s) assert json.loads(json.dumps(d)) == d assert 0 <= d[a.ip]['cpu'] <= 100 assert 0 <= d[a.ip]['latency'] <= 2 assert 0 <= d[a.ip]['memory'] assert 0 < d[a.ip]['memory-percent'] < 100 assert set(map(int, d[a.ip]['ports'])) == {a.port, b.port} assert d[a.ip]['processing'] == {} assert d[a.ip]['last-seen'] > 0 L = e.map(div, range(10), range(10)) yield _wait(L) assert 0 <= d[a.ip]['cpu'] <= 100 assert 0 <= d[a.ip]['latency'] <= 2 assert 0 <= d[a.ip]['memory'] assert 0 < d[a.ip]['memory-percent'] < 100 assert set(map(int, d[a.ip]['ports'])) == {a.port, b.port} assert d[a.ip]['processing'] == {} assert 0 <= d[a.ip]['disk-read'] assert 0 <= d[a.ip]['disk-write'] assert 0 <= d[a.ip]['network-send'] assert 0 <= d[a.ip]['network-recv']
def test_write_bytes(e, s, a, b): with make_hdfs() as hdfs: data = [b'123', b'456', b'789'] remote_data = yield e._scatter(data) futures = write_bytes('/tmp/test/data/file.*.dat', remote_data, hdfs=hdfs) yield _wait(futures) assert len(hdfs.ls('/tmp/test/data/')) == 3 with hdfs.open('/tmp/test/data/file.1.dat') as f: assert f.read() == b'456' futures = write_bytes('/tmp/test/data2/', remote_data, hdfs=hdfs) yield _wait(futures) assert len(hdfs.ls('/tmp/test/data2/')) == 3
def test_with_data(s, a, b): ss = HTTPScheduler(s) ss.listen(0) e = Executor((s.ip, s.port), start=False) yield e._start() L = e.map(inc, [1, 2, 3]) L2 = yield e._scatter(['Hello', 'world!']) yield _wait(L) client = AsyncHTTPClient() response = yield client.fetch('http://localhost:%s/memory-load.json' % ss.port) out = json.loads(response.body.decode()) assert all(isinstance(v, int) for v in out.values()) assert set(out) == {a.address_string, b.address_string} assert sum(out.values()) == sum( map(sys.getsizeof, [1, 2, 3, 'Hello', 'world!'])) response = yield client.fetch( 'http://localhost:%s/memory-load-by-key.json' % ss.port) out = json.loads(response.body.decode()) assert set(out) == {a.address_string, b.address_string} assert all(isinstance(v, dict) for v in out.values()) assert all(k in {'inc', 'data'} for d in out.values() for k in d) assert all(isinstance(v, int) for d in out.values() for v in d.values()) assert sum(v for d in out.values() for v in d.values()) == \ sum(map(sys.getsizeof, [1, 2, 3, 'Hello', 'world!'])) ss.stop() yield e._shutdown()
def test_with_data(e, s, a, b): ss = HTTPScheduler(s) ss.listen(0) L = e.map(inc, [1, 2, 3]) L2 = yield e._scatter(['Hello', 'world!']) yield _wait(L) client = AsyncHTTPClient() response = yield client.fetch('http://localhost:%d/memory-load.json' % ss.port) out = json.loads(response.body.decode()) assert all(isinstance(v, int) for v in out.values()) assert set(out) == {a.address, b.address} assert sum(out.values()) == sum(map(getsizeof, [1, 2, 3, 'Hello', 'world!'])) response = yield client.fetch('http://localhost:%s/memory-load-by-key.json' % ss.port) out = json.loads(response.body.decode()) assert set(out) == {a.address, b.address} assert all(isinstance(v, dict) for v in out.values()) assert all(k in {'inc', 'data'} for d in out.values() for k in d) assert all(isinstance(v, int) for d in out.values() for v in d.values()) assert sum(v for d in out.values() for v in d.values()) == \ sum(map(getsizeof, [1, 2, 3, 'Hello', 'world!'])) ss.stop()
def test_progress_stream(e, s, a, b): futures = e.map(div, [1] * 10, range(10)) x = 1 for i in range(5): x = do(inc)(x) future = e.compute(x) yield _wait(futures + [future]) stream = yield progress_stream(s.address, interval=0.010) msg = yield read(stream) assert msg == {'all': {'div': 10, 'inc': 5, 'finalize': 1}, 'erred': {'div': 1}, 'in_memory': {'div': 9, 'finalize': 1}, 'released': {'div': 1, 'inc': 5}} d = progress_quads(msg) assert d == {'name': ['div', 'inc', 'finalize'], 'all': [10, 5, 1], 'in_memory': [9, 0, 1], 'in_memory_right': [1, 1, 1], 'fraction': ['10 / 10', '5 / 5', '1 / 1'], 'erred': [1, 0, 0], 'erred_left': [0.9, 1, 1], 'released': [1, 5, 0], 'released_right': [0.1, 1, 0], 'top': [0.7, 1.7, 2.7], 'center': [0.5, 1.5, 2.5], 'bottom': [0.3, 1.3, 2.3]} stream.close()
def test_dont_steal_expensive_data_fast_computation(e, s, a, b): np = pytest.importorskip('numpy') x = e.submit(np.arange, 1000000, workers=a.address) yield _wait([x]) future = e.submit(np.sum, [1], workers=a.address) # learn that sum is fast yield _wait([future]) cheap = [ e.submit(np.sum, x, pure=False, workers=a.address, allow_other_workers=True) for i in range(10) ] yield _wait(cheap) assert len(b.data) == 0 assert len(a.data) == 12
def test_eventually_steal_unknown_functions(e, s, a, b): futures = e.map(slowinc, range(10), delay=0.1, workers=a.address, allow_other_workers=True) yield _wait(futures) assert len(a.data) >= 3 assert len(b.data) >= 3
def test_eventstream(e, s, a, b): es = EventStream() s.add_plugin(es) assert es.buffer == [] futures = e.map(div, [1] * 10, range(10)) yield _wait(futures) assert len(es.buffer) == 10
def test_worksteal_many_thieves(e, s, *workers): x = e.submit(slowinc, -1, delay=0.1) yield x._result() xs = e.map(slowinc, [x] * 100, pure=False, delay=0.01) yield _wait(xs) for w, keys in s.has_what.items(): assert 2 < len(keys) < 50
def f(c, a, b): e = Executor((c.ip, c.port), start=False, loop=loop) yield e._start() L = [e.submit(inc, i) for i in range(10)] yield _wait(L) assert a.data and b.data yield e._shutdown()
def test_many_submits_spread_evenly(s, a, b): e = Executor((s.ip, s.port), start=False) yield e._start() L = [e.submit(inc, i) for i in range(10)] yield _wait(L) assert a.data and b.data yield e._shutdown()
def test_pragmatic_move_small_data_to_large_data(s, a, b): e = Executor((s.ip, s.port), start=False) yield e._start() lists = e.map(lambda n: list(range(n)), [10] * 10, pure=False) sums = e.map(sum, lists) total = e.submit(sum, sums) def f(x, y): return None results = e.map(f, lists, [total] * 10) yield _wait([total]) yield _wait(results) for l, r in zip(lists, results): assert s.who_has[l.key] == s.who_has[r.key] yield e._shutdown()
def test_cancel_stress(e, s, *workers): da = pytest.importorskip('dask.array') x = da.random.random((40, 40), chunks=(1, 1)) x = e.persist(x) yield _wait([x]) y = (x.sum(axis=0) + x.sum(axis=1) + 1).std() for i in range(5): f = e.compute(y) while len(s.waiting) > (len(y.dask) - len(x.dask)) / 2: yield gen.sleep(0.01) yield e._cancel(f)
def test_active_holds_tasks(e, s, w): future = e.submit(slowinc, 1, delay=0.2) yield gen.sleep(0.1) assert future.key in w.active yield future._result() assert future.key not in w.active future = e.submit(throws, 1) with ignoring(Exception): yield _wait([future]) assert not w.active
def test_write_bytes(s, a, b): with make_hdfs() as hdfs: e = Executor((s.ip, s.port), start=False) yield e._start() data = [b'123', b'456', b'789'] remote_data = yield e._scatter(data) futures = write_bytes('/tmp/test/data/file.*.dat', remote_data, hdfs=hdfs) yield _wait(futures) assert len(hdfs.ls('/tmp/test/data/')) == 3 with hdfs.open('/tmp/test/data/file.1.dat') as f: assert f.read() == b'456' futures = write_bytes('/tmp/test/data2/', remote_data, hdfs=hdfs) yield _wait(futures) assert len(hdfs.ls('/tmp/test/data2/')) == 3
def f(c, a, b): e = Executor((c.ip, c.port), start=False, loop=loop) yield e._start() lists = e.map(lambda n: list(range(n)), [10] * 10, pure=False) sums = e.map(sum, lists) total = e.submit(sum, sums) def f(x, y): return None results = e.map(f, lists, [total] * 10) yield _wait([total]) yield _wait(results) for l, r in zip(lists, results): assert e.scheduler.who_has[l.key] == e.scheduler.who_has[r.key] yield e._shutdown()
def test_steal_related_tasks(e, s, a, b, c): futures = e.map(slowinc, range(20), delay=0.05, workers=a.address, allow_other_workers=True) yield _wait(futures) nearby = 0 for f1, f2 in sliding_window(2, futures): if s.who_has[f1.key] == s.who_has[f2.key]: nearby += 1 assert nearby > 10
def test_write_binary(s, a, b): with make_hdfs() as hdfs: e = Executor((s.ip, s.port), start=False) yield e._start() data = [b'123', b'456', b'789'] remote_data = yield e._scatter(data) futures = write_binary('/tmp/test/data/file.*.dat', remote_data, hdfs=hdfs) yield _wait(futures) assert len(hdfs.ls('/tmp/test/data/')) == 3 with hdfs.open('/tmp/test/data/file.1.dat') as f: assert f.read() == b'456' futures = write_binary('/tmp/test/data2/', remote_data, hdfs=hdfs) yield _wait(futures) assert len(hdfs.ls('/tmp/test/data2/')) == 3
def test_dont_steal_fast_tasks(e, s, *workers): np = pytest.importorskip('numpy') x = e.submit(np.random.random, 10000000, workers=workers[0].address) def do_nothing(x, y=None): pass futures = e.map(do_nothing, range(1000), y=x) yield _wait(futures) assert len(s.has_what[workers[0].address]) == 1001
def test_restart_cleared(e, s, a, b): x = 2 * delayed(1) + 1 f = e.compute(x) yield _wait([f]) assert s.released yield e._restart() for coll in [s.tasks, s.dependencies, s.dependents, s.waiting, s.waiting_data, s.who_has, s.restrictions, s.loose_restrictions, s.released, s.priority, s.exceptions, s.who_wants, s.exceptions_blame]: assert not coll
def test_spill_to_disk(e, s): np = pytest.importorskip('numpy') w = Worker(s.ip, s.port, loop=s.loop, memory_limit=1000) yield w._start() x = e.submit(np.random.randint, 0, 255, size=500, dtype='u1', key='x') yield _wait(x) y = e.submit(np.random.randint, 0, 255, size=500, dtype='u1', key='y') yield _wait(y) assert set(w.data) == {x.key, y.key} assert set(w.data.fast) == {x.key, y.key} z = e.submit(np.random.randint, 0, 255, size=500, dtype='u1', key='z') yield _wait(z) assert set(w.data) == {x.key, y.key, z.key} assert set(w.data.fast) == {y.key, z.key} assert set(w.data.slow) == {x.key} yield x._result() assert set(w.data.fast) == {x.key, z.key} assert set(w.data.slow) == {y.key}
def test_AllProgress_lost_key(e, s, a, b, timeout=None): p = AllProgress(s) futures = e.map(inc, range(5)) yield _wait(futures) assert len(p.in_memory['inc']) == 5 yield a._close() yield b._close() start = time() while len(p.in_memory['inc']) > 0: yield gen.sleep(0.1) assert time() < start + 2
def test_update_state_respects_data_in_memory(e, s, a): x = delayed(inc)(1) y = delayed(inc)(x) f = e.persist(y) yield _wait([f]) assert s.released == {x.key} assert s.who_has == {y.key: {a.address}} z = delayed(add)(x, y) f2 = e.persist(z) while not f2.key in s.who_has: assert y.key in s.who_has yield gen.sleep(0.0001)
def test_task_table(e, s, a, b): source, plot = task_table_plot() data = tasks(s) task_table_update(source, data) assert source.data['processing'] == [0] futures = e.map(inc, range(10)) yield _wait(futures) data = tasks(s) task_table_update(source, data) assert source.data['processing'] == [0] assert source.data['total'] == [10]
def test_restart_cleared(e, s, a, b): x = 2 * delayed(1) + 1 f = e.compute(x) yield _wait([f]) assert s.released yield e._restart() for coll in [ s.tasks, s.dependencies, s.dependents, s.waiting, s.waiting_data, s.who_has, s.restrictions, s.loose_restrictions, s.released, s.priority, s.exceptions, s.who_wants, s.exceptions_blame ]: assert not coll
def test_transition_story(e, s, a, b): x = delayed(inc)(1) y = delayed(inc)(x) f = e.persist(y) yield _wait([f]) assert s.transition_log story = s.transition_story(x.key) assert all(line in s.transition_log for line in story) assert len(story) < len(s.transition_log) assert all(x.key == line[0] or x.key in line[-1] for line in story) assert len(s.transition_story(x.key, y.key)) > len(story)
def test_progress_stream(e, s, a, b): futures = e.map(div, [1] * 10, range(10)) x = 1 for i in range(5): x = do(inc)(x) future = e.compute(x) yield _wait(futures + [future]) stream = yield progress_stream(s.address, interval=0.010) msg = yield read(stream) assert msg == { 'all': { 'div': 10, 'inc': 5, 'finalize': 1 }, 'erred': { 'div': 1 }, 'in_memory': { 'div': 9, 'finalize': 1 }, 'released': { 'div': 1, 'inc': 5 } } d = progress_quads(msg) assert d == { 'name': ['div', 'inc', 'finalize'], 'all': [10, 5, 1], 'in_memory': [9, 0, 1], 'in_memory_right': [1, 1, 1], 'fraction': ['10 / 10', '5 / 5', '1 / 1'], 'erred': [1, 0, 0], 'erred_left': [0.9, 1, 1], 'released': [1, 5, 0], 'released_right': [0.1, 1, 0], 'top': [0.7, 1.7, 2.7], 'center': [0.5, 1.5, 2.5], 'bottom': [0.3, 1.3, 2.3] } stream.close()
def f(c, a, b): e = Executor((c.ip, c.port), start=False) IOLoop.current().spawn_callback(e._go) a = e.submit(inc, 1) b = e.submit(inc, 1) c = e.submit(inc, 2) done, not_done = yield _wait([a, b, c]) assert done == {a, b, c} assert not_done == set() assert a.status == b.status == 'finished' yield e._shutdown()
def test_restrictions_submit(s, a, b): e = Executor((s.ip, s.port), start=False) yield e._start() x = e.submit(inc, 1, workers={a.ip}) y = e.submit(inc, x, workers={b.ip}) yield _wait([x, y]) assert s.restrictions[x.key] == {a.ip} assert x.key in a.data assert s.restrictions[y.key] == {b.ip} assert y.key in b.data yield e._shutdown()
def test_wait(s, a, b): e = Executor((s.ip, s.port), start=False) yield e._start() a = e.submit(inc, 1) b = e.submit(inc, 1) c = e.submit(inc, 2) done, not_done = yield _wait([a, b, c]) assert done == {a, b, c} assert not_done == set() assert a.status == b.status == 'finished' yield e._shutdown()
def f(c, a, b): e = Executor((c.ip, c.port), start=False) IOLoop.current().spawn_callback(e._go) x = e.submit(inc, 1, workers={a.ip}) y = e.submit(inc, x, workers={b.ip}) yield _wait([x, y]) assert e.restrictions[x.key] == {a.ip} assert x.key in a.data assert e.restrictions[y.key] == {b.ip} assert y.key in b.data yield e._shutdown()