def test_get_worker_name(client): def f(): get_client().submit(inc, 1).result() client.run(f) def func(dask_scheduler): return list(dask_scheduler.clients) start = time() while not any("worker" in n for n in client.run_on_scheduler(func)): sleep(0.1) assert time() < start + 10
def test_get_worker_name(client): def f(): get_client().submit(inc, 1).result() client.run(f) def func(dask_scheduler): return list(dask_scheduler.clients) start = time() while not any('worker' in n for n in client.run_on_scheduler(func)): sleep(0.1) assert time() < start + 10
def test_compute_sync(client): @dask.delayed def f(n, counter): assert isinstance(counter, Actor), type(counter) for i in range(n): counter.increment().result() @dask.delayed def check(counter, blanks): return counter.n counter = dask.delayed(Counter)() values = [f(i, counter) for i in range(5)] final = check(counter, values) result = final.compute(actors=counter) assert result == 0 + 1 + 2 + 3 + 4 def check(dask_worker): return len(dask_worker.data) + len(dask_worker.actors) start = time() while any(client.run(check).values()): sleep(0.01) assert time() < start + 2
def test_compute_sync(client): @dask.delayed def f(n, counter): assert isinstance(counter, Actor), type(counter) for i in range(n): counter.increment().result() @dask.delayed def check(counter, blanks): return counter.n counter = dask.delayed(Counter)() values = [f(i, counter) for i in range(5)] final = check(counter, values) result = final.compute(actors=counter) assert result == 0 + 1 + 2 + 3 + 4 def check(dask_worker): return len(dask_worker.data) + len(dask_worker.actors) start = time() while any(client.run(check).values()): sleep(0.01) assert time() < start + 2
def test_get_client_coroutine_sync(client, s, a, b): async def f(): client = await get_client() future = client.submit(inc, 10) result = await future return result results = client.run(f) assert results == {a["address"]: 11, b["address"]: 11}
def test_collections_get(client, optimize_graph, s, a, b): da = pytest.importorskip("dask.array") def f(dask_worker): dask_worker.set_resources(**{"A": 1}) client.run(f, workers=[a["address"]]) x = da.random.random(100, chunks=(10,)) + 1 x.compute(resources={tuple(x.dask): {"A": 1}}, optimize_graph=optimize_graph) def g(dask_worker): return len(dask_worker.log) logs = client.run(g) assert logs[a["address"]] assert not logs[b["address"]]
def test_get_client_coroutine_sync(client, s, a, b): @gen.coroutine def f(): client = yield get_client() future = client.submit(inc, 10) result = yield future raise gen.Return(result) results = client.run(f) assert results == {a["address"]: 11, b["address"]: 11}
def test_collections_get(client, optimize_graph, s, a, b): da = pytest.importorskip('dask.array') def f(dask_worker): dask_worker.set_resources(**{'A': 1}) client.run(f, workers=[a['address']]) x = da.random.random(100, chunks=(10,)) + 1 x.compute(resources={tuple(x.dask): {'A': 1}}, optimize_graph=optimize_graph) def g(dask_worker): return len(dask_worker.log) logs = client.run(g) assert logs[a['address']] assert not logs[b['address']]
def test_find_random_open_port(client): for _ in range(5): worker_address_to_port = client.run(lgb.dask._find_random_open_port) found_ports = worker_address_to_port.values() # check that found ports are different for same address (LocalCluster) assert len(set(found_ports)) == len(found_ports) # check that the ports are indeed open for port in found_ports: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.bind(('', port)) client.close(timeout=CLIENT_CLOSE_TIMEOUT)
def test_get_client_coroutine_sync(client, s, a, b): @gen.coroutine def f(): client = yield get_client() future = client.submit(inc, 10) result = yield future raise gen.Return(result) results = client.run(f) assert results == {a['address']: 11, b['address']: 11}