def test_cpu_resources4(test_env): """1cpu + 2cpu tasks on 3 cpu governor""" test_env.start(1, n_cpus=3) with test_env.client.new_session() as s: tasks.Sleep(blob("first"), 1.0) tasks.Sleep(blob("second"), 1.0, cpus=2) s.submit() test_env.assert_duration(0.9, 1.1, lambda: s.wait_all())
def test_cpu_resources3(test_env): """1cpu + 2cpu tasks on 2 cpu worker""" test_env.start(1, n_cpus=2) with test_env.client.new_session() as s: tasks.sleep(1.0, blob("first")) tasks.sleep(1.0, blob("second"), cpus=2) s.submit() test_env.assert_duration(1.9, 2.1, lambda: s.wait_all())
def test_cpu_resources1(test_env): """2x 1cpu tasks on 1 cpu governor""" test_env.start(1) with test_env.client.new_session() as s: tasks.Sleep(blob("first"), 1.0) tasks.Sleep(blob("second"), 1.0) s.submit() test_env.assert_duration(1.9, 2.1, lambda: s.wait_all())
def test_cpu_resources2(test_env): """2x 1cpu tasks on 2 cpu governor""" test_env.start(1, n_cpus=2) with test_env.client.new_session() as s: tasks.sleep(1.0, blob("first")) tasks.sleep(1.0, blob("second")) s.submit() test_env.assert_duration(0.9, 1.1, lambda: s.wait_all())
def test_update(test_env): test_env.start(1) client = test_env.client s = client.new_session() with s: t1 = tasks.concat((blob("a"), blob("b"))) s.submit() s.update((t1, )) t1.wait() s.update((t1, ))
def test_concat3(test_env): """Merge large blobs""" test_env.start(1) a = b"a123" * 1000000 b = b"b43" * 2500000 c = b"c" * 1000 with test_env.client.new_session() as s: t1 = tasks.concat((blob(a), blob(c), blob(b), blob(c), blob(a))) t1.output.keep() s.submit() assert t1.output.fetch().get_bytes() == a + c + b + c + a
def test_task_wait(test_env): test_env.start(1) client = test_env.client s = client.new_session() with s.bind_only(): t1 = tasks.concat((blob("a"), blob("b"))) assert t1.state is None s.submit() assert t1.state == rpc.common.TaskState.notAssigned t1.wait() assert t1.state == rpc.common.TaskState.finished
def test_dataobj_wait(test_env): test_env.start(1) client = test_env.client s = client.new_session() with s: t1 = tasks.concat((blob("a"), blob("b"))) o1 = t1.output assert t1.state is None s.submit() assert o1.state == rpc.common.DataObjectState.unfinished o1.wait() assert o1.state == rpc.common.DataObjectState.finished
def test_wait_all(test_env): test_env.start(1) client = test_env.client s = client.new_session() with s: t1 = tasks.concat((blob("a"), blob("b"))) t2 = tasks.sleep(0.5, t1) s.submit() test_env.assert_duration(0.35, 0.65, lambda: s.wait_all()) assert t1.state == rpc.common.TaskState.finished assert t2.state == rpc.common.TaskState.finished test_env.assert_max_duration(0.1, lambda: t2.wait())
def test_make_directory(test_env): test_env.start(1, delete_list_timeout=0) # TODO: EMPTY DIR os.mkdir("empty") os.mkdir("mydir3") with open("mydir3/file.txt", "w") as f: f.write("My data 4") with test_env.client.new_session() as s: b1 = blob(b"My data 1") b2 = blob(b"My data 2") b3 = blob(b"My data 3") d1 = directory("mydir3") # TODO: EMPTY DIR d2 = directory("empty") t0 = tasks.Execute(["/bin/cat", b1], stdout=True, input_paths=[InputDir("d1", dataobj=d1)], output_paths=[OutputDir("d1")]) r = tasks.MakeDirectory([ ("myfile1", t0.outputs["stdout"]), ("mydir/mydir2/myfile2", b2), ("mydir/myfile3", b3), ("mydir/d1a", d1), # ("mydir/d2", d2), ("mydir/d1b", t0.outputs["d1"]), ]) r.output.keep() s.submit() s.wait_all() r.output.fetch().write("rdir") with open(os.path.join(test_env.work_dir, "rdir", "myfile1")) as f: assert f.read() == "My data 1" with open( os.path.join(test_env.work_dir, "rdir", "mydir", "mydir2", "myfile2")) as f: assert f.read() == "My data 2" with open(os.path.join(test_env.work_dir, "rdir", "mydir", "myfile3")) as f: assert f.read() == "My data 3" with open(os.path.join(test_env.work_dir, "rdir", "mydir", "myfile3")) as f: assert f.read() == "My data 3" with open( os.path.join(test_env.work_dir, "rdir", "mydir", "d1a", "file.txt")) as f: assert f.read() == "My data 4" with open( os.path.join(test_env.work_dir, "rdir", "mydir", "d1b", "file.txt")) as f: assert f.read() == "My data 4"
def test_unkeep_unfinished(test_env): test_env.start(1) client = test_env.client s = client.new_session() with s: t1 = tasks.concat((blob("a"), blob("b"))) t1_output = t1.output t1_output.keep() t2 = tasks.sleep(0.3, t1) s.submit() assert t1_output.is_kept() is True t1_output.unkeep() assert t1_output.is_kept() is False t2.wait()
def test_execute_termination(test_env): test_env.start(1) import time with test_env.client.new_session() as s: tasks.execute("sleep 5") s.submit() time.sleep(0.5) with test_env.client.new_session() as s: t1 = tasks.concat((blob("a"), blob("b"))) t1.keep_outputs() s.submit() r = test_env.assert_max_duration(0.2, lambda: t1.output.fetch()) assert b"ab" == r.get_bytes()
def test_submit(test_env): test_env.no_final_check() test_env.start(1) client = test_env.client s = client.new_session() with s: t1 = tasks.concat((blob("a"), blob("b"))) t2 = tasks.sleep(1, t1) assert s.task_count == 2 assert s.dataobj_count == 4 # "a", "b", "ab", "ab" s.submit() assert s.task_count == 0 assert s.dataobj_count == 0 assert t1.state == rpc.common.TaskState.notAssigned assert t2.state == rpc.common.TaskState.notAssigned
def test_rustsw_hello(test_env): test_env.start(1, executor="rusttester") with test_env.client.new_session() as s: t1 = hello(blob("world")) t1.keep_outputs() s.submit() assert t1.output.fetch().get_bytes() == b"Hello world!"
def test_task_export(test_env): import os.path test1 = os.path.join(test_env.work_dir, "TEST1") test2 = os.path.join(test_env.work_dir, "TEST2") test_env.start(1) with test_env.client.new_session() as s: a = blob("Hello ") b = blob("World!") tasks.Store(tasks.Concat((a, b)), test1) tasks.Store(tasks.Execute("ls /", stdout="output"), test2) s.submit() s.wait_all() with open(test1) as f: assert f.read() == "Hello World!" with open(test2) as f: assert "bin" in f.read()
def run_small_gridcat(session): BLOB_SIZE = 5000 BLOB_COUNT = 10 rnd = test_rnd() def random_string(rnd, length): return "".join(rnd.choice(CHARS) for i in range(length)) cat = Program(("cat", Input("input1"), Input("input2")), stdout="output") md5sum = Program("md5sum", stdin="input", stdout="output") @remote() def take_first(ctx, data): return data.get_bytes().split()[0] consts = [blob(random_string(rnd, BLOB_SIZE)) for i in range(BLOB_COUNT)] ts = [] for i in range(BLOB_COUNT): for j in range(BLOB_COUNT): t1 = cat(input1=consts[i], input2=consts[j]) t2 = md5sum(input=t1) t3 = take_first(t2) ts.append(t3.output) result = md5sum(input=tasks.Concat(ts)) result.output.keep() # session.pending_graph().write("/home/spirali/tmp/graph.dot") session.submit() result.output.fetch() == b"0a9612a2e855278d336a9e1a1589478f -\n"
def test_cpp_hello_mem(test_env): test_env.start(1, executor="cpptester") with test_env.client.new_session() as s: t1 = cpp_hello(blob("world")) t1.output.keep() s.submit() assert t1.output.fetch().get_bytes() == b"Hello world!"
def test_cpp_fail(test_env): test_env.start(1, executor="cpptester") with test_env.client.new_session() as s: t1 = cpp_fail(blob("ABCD")) s.submit() with pytest.raises(TaskException, match='ABCD'): t1.wait()
def test_remote_complex_args(test_env): @remote() def test(ctx, a, b, c={}, d=0, **kwargs): ret = (a, b.get_bytes(), c['a'].get_bytes(), c['b'][3].get_bytes(), d, kwargs['e'](4).get_bytes()) return pickle.dumps(ret) @remote() def test2(ctx, a, *args): pass test_env.start(1) with test_env.client.new_session() as s: bs = [blob(str(i)) for i in range(5)] t0 = test([True], bs[0], { "a": bs[1], "b": bs }, d=42, e=lambda x: bs[x]) t0.output.keep() s.submit() d = t0.output.fetch().get_bytes() assert pickle.loads(d) == ([True], b'0', b'1', b'3', 42, b'4')
def test_fail(self, test_env): self.start(test_env) with test_env.client.new_session() as s: t1 = self.task_fail(blob("ABCD")) s.submit() with pytest.raises(TaskException, match='ABCD'): t1.wait()
def test_hello_mem(self, test_env): self.start(test_env) with test_env.client.new_session() as s: t1 = self.task_hello(blob("world")) t1.output.keep() s.submit() assert t1.output.fetch().get_bytes() == b"Hello world!"
def test_cpp_invalid_outputs(test_env): test_env.start(1, executor="cpptester") with test_env.client.new_session() as s: obj = blob("WORLD") t1 = Task("cpptester/hello", inputs=(obj, ), outputs=3) s.submit() with pytest.raises(TaskException, match='3'): t1.wait()
def test_fetch_removed_object_fails(test_env): test_env.start(1) with test_env.client.new_session() as s: t1 = tasks.sleep(0.01, blob("abc123456")) s.submit() with pytest.raises(RainException): t1.output.fetch() t1.wait()
def test_invalid_outputs(self, test_env): self.start(test_env) with test_env.client.new_session() as s: obj = blob("WORLD") t1 = self.task("hello", inputs=(obj,), outputs=3) s.submit() with pytest.raises(TaskException, match='3'): t1.wait()
def test_sleep3_last(test_env): test_env.start(1) with test_env.client.new_session() as s: t1 = tasks.Sleep(blob("b"), 0.2) t2 = tasks.Sleep(t1, 0.2) t3 = tasks.Sleep(t2, 0.2) s.submit() test_env.assert_duration(0.4, 0.8, lambda: t3.wait())
def test_blob_construction(fake_session): with fake_session as session: b1 = blob("abc") assert b1.session == session b2 = blob(b"xyz") assert b1.session == session assert b1.id != b2.id obj = [1, {'a': [4, 5]}] b3 = blob(obj, encode='pickle') assert pickle.loads(b3.data) == obj assert b3.content_type == 'pickle' b3b = pickled(obj) assert b3b.data == b3.data assert b3b.content_type == 'pickle' b4 = blob(obj, encode='json') assert json.loads(b4.data.decode()) == obj assert rain.common.content_type.decode_value(b4.data, "json") == obj txt = "asžčďďŠ" b5 = blob(txt, encode='text:latin2') assert b5.data.decode('latin2') == txt with pytest.raises(RainException): blob(123)
def test_program_input_file(test_env): """Setting input file for program""" test_env.start(1) program = Program(("/bin/grep", "ab", Input("in1")), stdout="output") with test_env.client.new_session() as s: t1 = program(in1=blob("abc\nNOTHING\nabab")) t1.output.keep() s.submit() assert t1.output.fetch().get_bytes() == b"abc\nabab\n"
def test_hello_chain(self, test_env): self.start(test_env) with test_env.client.new_session() as s: t1 = blob("world") for i in range(30): t1 = self.task_hello(t1).output t1.keep() s.submit() assert t1.fetch().get_bytes() == b"Hello " * 30 + b"world" + b"!" * 30
def test_concat1(test_env): """Merge several short blobs""" test_env.start(1) with test_env.client.new_session() as s: t1 = tasks.Concat( [blob(x) for x in ("Hello ", "", "", "world", "!", "")]) t1.output.keep() s.submit() assert t1.output.fetch().get_bytes() == b"Hello world!"
def test_py_pass_through(test_env): @remote(outputs=("out1", "out2")) def test(ctx, data1, data2): return {"out1": data1, "out2": data2} test_env.start(1) cat = Program("/bin/cat input1", stdout="output", input_paths=[Input("input1")]) with test_env.client.new_session() as s: data = b"ABC" * 10000 t0 = cat(input1=blob(data)) t1 = test(t0, blob("Hello!")) t1.outputs["out1"].keep() t1.outputs["out2"].keep() s.submit() assert data == t1.outputs["out1"].fetch().get_bytes() assert b"Hello!" == t1.outputs["out2"].fetch().get_bytes()