def recover2(loom_env, n): @tasks.py_task() def slow_task1(): import time time.sleep(0.4) raise Exception("FAIL_2") @tasks.py_task() def slow_task2(): import time time.sleep(0.4) return b"ABC" @tasks.py_task() def fail_task(): import time time.sleep(0.10) raise Exception("FAIL") loom_env.start(n) x1 = slow_task1() x2 = slow_task2() x3 = fail_task() x4 = tasks.merge((x1, x2, x3)) b = tasks.const("XYZ") c = tasks.const("12345") d = tasks.merge((b, c)) for i in range(6): with pytest.raises(client.TaskFailed): loom_env.submit_and_gather(x4) assert loom_env.submit_and_gather(d) == b"XYZ12345"
def test_checkpoint_load(loom_env): loom_env.start(1) path1 = os.path.join(LOOM_TEST_BUILD_DIR, "f1.txt") path2 = os.path.join(LOOM_TEST_BUILD_DIR, "f2.txt") path3 = os.path.join(LOOM_TEST_BUILD_DIR, "f3.txt") path4 = os.path.join(LOOM_TEST_BUILD_DIR, "f4.txt") path5 = os.path.join(LOOM_TEST_BUILD_DIR, "nonexisting") for i, p in enumerate((path1, path2, path3, path4)): with open(p, "w") as f: f.write("[{}]".format(i + 1)) t1 = tasks.const("$t1$") t1.checkpoint_path = path1 # This shoud load: [1] t2 = tasks.const("$t2$") t2.checkpoint_path = path2 # This shoud load: [2] t3 = tasks.const("$t3$") t4 = tasks.const("$t4$") x1 = tasks.merge((t1, t2, t3)) # [1][2]$t3$ x2 = tasks.merge((t1, x1)) x2.checkpoint_path = path3 # loaded as [3] x3 = tasks.merge((t4, t4)) x3.checkpoint_path = path4 # loaded as [4] x4 = tasks.merge((x3, x1, x2, t1, t2, t3)) x4.checkpoint_path = path5 assert loom_env.submit_and_gather( x4, load=True) == b'[4][1][2]$t3$[3][1][2]$t3$'
def test_future_reuse1(loom_env): loom_env.start(1) a = tasks.const("abc") b = tasks.const("xyz") c = tasks.merge((a, b)) f = loom_env.client.submit_one(c) d = tasks.const("ijk") g = tasks.merge((d, f)) result = loom_env.submit_and_gather(g) assert result == b"ijkabcxyz" assert f.gather() == b"abcxyz" loom_env.check_final_state()
def test_more_same_results(loom_env): loom_env.start(1) a = tasks.const("ABCDE") b = tasks.const("123") c = tasks.merge((a, b)) assert [b"ABCDE123", b"ABCDE123"] == loom_env.submit_and_gather([c, c]) loom_env.check_final_state()
def test_single_result(loom_env): loom_env.start(1) a = tasks.const("ABCDE") b = tasks.const("123") c = tasks.merge((a, b)) assert b"ABCDE123" == loom_env.submit_and_gather(c) loom_env.check_final_state()
def test_crash_clean_worker(loom_env): loom_env.start(2) loom_env.kill_worker(0) a = tasks.const("ABCDE") b = tasks.const("123") c = tasks.merge((a, b)) assert b"ABCDE123" == loom_env.submit_and_gather(c)
def test_open_and_merge(loom_env): a = tasks.open(FILE1) b = tasks.open(FILE2) c = tasks.merge((a, b)) loom_env.start(1) result = loom_env.submit_and_gather(c) expect = bytes( "This is file 1\n" + "\n".join("Line {}".format(i) for i in range(1, 13)) + "\n", "ascii") assert result == expect
def test_recover1(loom_env): loom_env.start(1) for i in range(10): a = tasks.run("ls /non-existing-dictionary") with pytest.raises(client.TaskFailed): loom_env.submit_and_gather(a) b = tasks.const("XYZ") c = tasks.const("12345") d = tasks.merge((b, c)) assert loom_env.submit_and_gather(d) == b"XYZ12345"
def test_checkpoint_basic(loom_env): loom_env.start(1) t1 = tasks.const("abcd") t2 = tasks.const("XYZ") t3 = tasks.merge((t1, t2)) path = os.path.join(LOOM_TEST_BUILD_DIR, "test.data") t3.checkpoint_path = path assert b"abcdXYZ" == loom_env.submit_and_gather(t3) with open(path, "rb") as f: assert f.read() == b"abcdXYZ" assert not os.path.isfile(path + ".loom.tmp")
def test_reconnect2(loom_env): loom_env.start(1) a = tasks.run("sleep 1") loom_env.client.submit_one(a) loom_env.close_client() a = tasks.const("abc") b = tasks.const("xyz") c = tasks.const("123") d = tasks.merge((a, b, c)) result = loom_env.submit_and_gather(d) assert result == b"abcxyz123" loom_env.check_final_state()
def test_future_reuse2(loom_env): loom_env.start(1) a = tasks.const("abc") f = loom_env.client.submit_one(a) b = tasks.const("123") g = loom_env.client.submit_one(b) c = tasks.const("ijk") h = tasks.merge((a, b, c)) result = loom_env.submit_and_gather((h, c)) assert result == [b"abc123ijk", b"ijk"] f.release() g.release() loom_env.check_final_state()
def test_future_reuse3(loom_env): loom_env.start(3) a = tasks.run("ls") f = loom_env.client.submit_one(a) COUNT = 1000 b = [tasks.run("ls") for i in range(COUNT)] g = loom_env.client.submit(b) result2 = f.fetch() result = loom_env.submit_and_gather(tasks.merge(g)) loom_env.client.release(g) f.release() assert result2 * COUNT == result loom_env.check_final_state()
def test_reconnect(loom_env): loom_env.start(2) code = """ import sys sys.path.insert(0, "{LOOM_PYTHON}") from loom.client import Client, tasks client = Client("localhost", {PORT}) a = tasks.run("sleep 1") b = tasks.run("sleep 1") c = tasks.run("sleep 1") d = tasks.run("sleep 1") fs = client.submit((a, b, c, d)) print(client.gather(fs)) """.format(LOOM_PYTHON=LOOM_PYTHON, PORT=loom_env.PORT) p = loom_env.independent_python(code) time.sleep(0.6) assert not p.poll() p.kill() p = loom_env.independent_python(code) time.sleep(0.6) assert not p.poll() p.kill() p = loom_env.independent_python(code) time.sleep(0.2) assert not p.poll() p.kill() time.sleep(0.2) a = tasks.run("ls") result = loom_env.submit_and_gather(a) assert result loom_env.check_final_state() a = tasks.const("abc") b = tasks.const("xyz") c = tasks.const("123") d = tasks.merge((a, b, c)) result = loom_env.submit_and_gather(d) assert result == b"abcxyz123" a = tasks.run("ls") result = loom_env.submit_and_gather(a) assert result loom_env.check_final_state()
def test_run_double_lines(loom_env): COUNT = 20000 a1 = tasks.const("afi" * 20000) b1 = tasks.run(pytestprog(0.0, "plus1"), stdin=a1) c1 = tasks.run(pytestprog(0.0, "plus1"), stdin=b1) a2 = tasks.const("kkllmm" * 20000) b2 = tasks.run(pytestprog(0.0, "plus1"), stdin=a2) c2 = tasks.run(pytestprog(0.0), stdin=b2) result = tasks.merge((c1, c2, a2)) expect = b"chk" * COUNT + b"llmmnn" * COUNT + b"kkllmm" * COUNT for i in range(1, 4): # print "Runnig for {}".format(i) cleanup() loom_env.start(i) r = loom_env.submit_and_gather(result) assert r == expect
def test_cv_iris(loom_env): CHUNKS = 15 CHUNK_SIZE = 150 // CHUNKS # There are 150 irises loom_env.start(4, 4) loom_env.info = False data = tasks.open(IRIS_DATA) data = tasks.run(("sort", "--random-sort", "-"), [(data, None)]) lines = tasks.split(data) chunks = [tasks.slice(lines, i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE) for i in range(CHUNKS)] trainsets = [tasks.merge(chunks[:i] + chunks[i + 1:]) for i in range(CHUNKS)] models = [] for i, ts in enumerate(trainsets): model = tasks.run("svm-train data", [(ts, "data")], ["data.model"]) model.label = "svm-train: {}".format(i) models.append(model) predict = [] for chunk, model in zip(chunks, models): task = tasks.run("svm-predict testdata model out", [(chunk, "testdata"), (model, "model")]) task.label = "svm-predict" predict.append(task) loom_env.set_trace("mytrace") results = loom_env.submit_and_gather(predict) assert len(results) == CHUNKS for line in results: assert line.startswith(b"Accuracy = ")
def f(a, b): return tasks.merge((a, b))
def test_merge_delimiter(loom_env): loom_env.start(1) consts = [tasks.const(str(i)) for i in range(10)] c = tasks.merge(consts, "abc") expected = bytes("abc".join(str(i) for i in range(10)), "ascii") assert expected == loom_env.submit_and_gather(c)
def test_merge_w3(loom_env): loom_env.start(3) a = tasks.const("ABCDE") b = tasks.const("123") c = tasks.merge((a, b)) assert b"ABCDE123" == loom_env.submit_and_gather(c)
def test_merge_empty_with_delimiter(loom_env): loom_env.start(1) c = tasks.merge((), "abc") assert b"" == loom_env.submit_and_gather(c)