def test_dag_execute() -> None: """Test execution of a _dag.""" with Fun(MockServer(), defaults=options(distributed=False)): dat = put(b"bla bla") step1 = morph(lambda x: x.decode().upper().encode(), dat) step2 = shell("cat file1 file2", inp=dict(file1=step1, file2=dat)) output = step2.stdout # make queue execute(output) out = take(output) assert out == b"BLA BLAbla bla"
def test_raising_funsie() -> None: """Test funsie that raises an error. This test is specifically designed to catch the bug fixed in fa9af6a4 where funsies that raised did not release their locks, leading to a race condition. """ def raising_fun(*inp: str) -> bytes: raise RuntimeError("this funsie raises.") with f.ManagedFun(nworkers=2): s0a = f.morph(lambda x: x, "bla blabla") s0b = f.morph(lambda x: x, "blala") s1 = f.reduce(raising_fun, "bla bla", s0a, s0b, strict=True) f.execute(s1) f.wait_for(s1, timeout=2) with pytest.raises(UnwrapError): _ = f.take(s1) s2 = f.morph(lambda x: x, s1) f.execute(s2) f.wait_for(s2, timeout=0.5)
def test_timeout_deadlock() -> None: """Test funsies that time out. Here we explicitly check if dependents are still enqueued or if the whole thing deadlocks. """ def timeout_fun(*inp: str) -> bytes: time.sleep(3.0) return b"what" def cap(inp: bytes) -> bytes: return inp.capitalize() with f.ManagedFun(nworkers=2): # Test when python function times out s1 = f.reduce(timeout_fun, "bla bla", "bla bla", opt=f.options(timeout=1)) s1b = f.morph(cap, s1) # Test when shell function times out s2 = f.shell("sleep 20", "echo 'bla bla'", opt=f.options(timeout=1)) s2b = f.morph(cap, s2.stdouts[1]) f.execute(s1b, s2b) # Check err for reduce f.wait_for(s1b, timeout=1.5) err = f.take(s1b, strict=False) assert isinstance(err, f.errors.Error) assert err.kind == f.errors.ErrorKind.JobTimedOut assert err.source == s1.parent # Check err for shell f.wait_for(s2b, timeout=1.5) err = f.take(s2b, strict=False) assert isinstance(err, f.errors.Error) assert err.kind == f.errors.ErrorKind.JobTimedOut assert err.source == s2.hash
def test_infer_errs() -> None: """Test inference applied to functions.""" with f.Fun(MockServer()): a = f.put(b"bla bla") b = f.put(3) with pytest.raises(TypeError): f.py(lambda x, y, z: (x, y), a, a, b) # should NOT raise f.py( lambda x, y, z: (x, y), a, a, b, out=[types.Encoding.blob, types.Encoding.blob], ) def i1o2(x: bytes) -> Tuple[bytes, bytes]: return x, x def i2o1(x: bytes, y: bytes) -> bytes: return x with pytest.raises(TypeError): out = f.morph(i1o2, a) # type:ignore # noqa:F841 with pytest.raises(TypeError): out = f.reduce(i1o2, a) # type:ignore # noqa:F841 with pytest.raises(TypeError): out = f.reduce(lambda x, y: x, a, b) # type:ignore # noqa:F841 # If we pass out= then the inference is skipped out = f.morph(i1o2, a, out=types.Encoding.blob) # type:ignore # noqa:F841 out = f.reduce(i1o2, a, out=types.Encoding.blob) # type:ignore # noqa:F841
def test_dag_execute_same_root() -> None: """Test execution of two dags that share the same origin.""" with Fun(MockServer(), defaults=options(distributed=False)): dat = put(b"bla bla") step1 = morph(lambda x: x.decode().upper().encode(), dat) step2 = shell("cat file1 file2", inp=dict(file1=step1, file2=dat)) step2b = shell("cat file1", inp=dict(file1=step1)) execute(step2) out = take(step2.stdout) assert out == b"BLA BLAbla bla" execute(step2b) out = take(step2b.stdout) assert out == b"BLA BLA"
def test_data_race(nworkers: int) -> None: """Test a data race when execute calls are interleaved.""" with f.ManagedFun(nworkers=nworkers): dat = f.put(b"bla bla") step1 = f.morph(lambda x: x.decode().upper().encode(), dat) step2 = f.shell( "cat file1 file2; grep 'bla' file2 file1 > file3; date >> file3", inp=dict(file1=step1, file2=dat), out=["file2", "file3"], ) f.execute(step1) f.execute(step2) f.wait_for(step1, timeout=20.0) f.wait_for(step2, timeout=20.0)
def test_parametrize() -> None: """Test that parametrization works.""" with Fun(MockServer(), options(distributed=False)) as db: dat = put(b"bla bla") dat2 = put(b"bla bla bla") step1 = morph(capitalize, dat) step2 = shell("cat file1 file2", inp=dict(file1=step1, file2=dat)) final = shell("cat file1 file3", inp={ "file1": step1, "file3": step2.stdout }) pinp = {"input": dat} pout = {"final.stdout": final.stdout, "step1": step1} new_inp = {"input": dat2} ops = _p._parametrize_subgraph(db, pinp, pout) edges = _p._subgraph_edges(db, ops) sorted_ops = _p._subgraph_toposort(ops, edges) pinp2 = dict([(k, v.hash) for k, v in pinp.items()]) pout2 = dict([(k, v.hash) for k, v in pout.items()]) new_out = _p._do_parametrize(db, sorted_ops, pinp2, pout2, new_inp) # re-run with dat2, check if the same. step1 = morph(capitalize, dat2) step2 = shell("cat file1 file2", inp=dict(file1=step1, file2=dat2)) final = shell("cat file1 file3", inp={ "file1": step1, "file3": step2.stdout }) assert new_out["final.stdout"] == final.stdout assert new_out["step1"] == step1
def test_parametric() -> None: """Test that parametric DAGs work.""" with Fun(MockServer(), options(distributed=False)) as db: dat = put(b"bla bla") step1 = morph(capitalize, dat) step2 = shell("cat file1 file2", inp=dict(file1=step1, file2=dat)) final = shell("cat file1 file3", inp={ "file1": step1, "file3": step2.stdout }) param = _p.make_parametric(db, "param", {"input": dat}, {"output": final.stdout}) param2 = _p.Parametric.grab(db, param.hash) assert param == param2
def make_data_output( structures: Sequence[Artefact[bytes]]) -> Artefact[list[Any]]: """Take xyz structure from xtb and parse them to a list of dicts.""" def to_dict(xyz: bytes) -> dict[str, Any]: as_str = xyz.decode().strip() energy = float(as_str.splitlines()[1].split()[1]) return {"structure": as_str, "energy": energy} def sort_by_energy(*elements: dict[str, Any]) -> list[dict[str, Any]]: out = [el for el in elements] out = sorted(out, key=lambda x: x["energy"]) # type:ignore return out out = [] for s in structures: out += [f.morph(to_dict, s, out=Encoding.json)] # elements to dicts return f.reduce(sort_by_energy, *out) # transform to a sorted list
def test_dag_large() -> None: """Test that DAG building doesn't do extra work for large operations.""" with Fun(MockServer()) as db: outputs = [] for i in range(100): dat = put(f"bla{i}".encode()) step1 = morph(lambda x: x.decode().upper().encode(), dat) step2 = shell( "cat file1 file2", inp=dict(file1=step1, file2="something"), out=["file2"], ) outputs += [concat(step1, step1, step2.stdout, join=b" ")] final = concat(*outputs, join=b"\n") _dag.build_dag(db, final.hash) assert len(_dag._dag_dependents(db, final.hash, hash_t("root"))) == 100
def test_dynamic_dump() -> None: """Test whether a dynamic DAG gets graphed properly.""" def split(a: bytes, b: bytes) -> list[dict[str, int]]: a = a.split() b = b.split() out = [] for ia, ib in zip(a, b): out += [{ "sum": int(ia.decode()) + int(ib.decode()), "product": int(ia.decode()) * int(ib.decode()), }] return out def apply(inp: Artefact[dict[str, Any]]) -> Artefact[str]: out = funsies.morph(lambda x: f"{x['sum']}//{x['product']}", inp) return out def combine(inp: Sequence[Artefact[str]]) -> Artefact[bytes]: def enc(inp: str) -> bytes: return inp.encode() out = [funsies.morph(enc, x, out=Encoding.blob) for x in inp] return funsies.utils.concat(*out) with funsies.ManagedFun(nworkers=1) as db: num1 = funsies.put(b"1 2 3 4 5") num2 = funsies.put(b"11 10 11 10 11") outputs = dynamic.sac( split, apply, combine, num1, num2, out=Encoding.blob, ) outputs = funsies.morph(lambda x: x, outputs) nodes, artefacts, labels, links = _graphviz.export(db, [outputs.hash]) assert len(artefacts) == 4 # not yet generated subdag parents print(len(artefacts)) funsies.execute(outputs) funsies.wait_for(outputs, timeout=1.0) nodes, artefacts, labels, links = _graphviz.export(db, [outputs.hash]) assert len(artefacts) == 22 # generated subdag parents assert funsies.take(outputs) == b"12//1112//2014//3314//4016//55"
def test_parametric_store_recall() -> None: """Test storing and recalling parametrics.""" serv = MockServer() with Fun(serv, options(distributed=False)): a = put(3) b = put(4) s = reduce(lambda x, y: x + y, a, b) s2 = morph(lambda x: 3 * x, s) execute(s2) assert take(s2) == 21 # parametrize p.commit("math", inp=dict(a=a, b=b), out=dict(s=s, s2=s2)) with Fun(serv, options(distributed=False)): out = p.recall("math", inp=dict(a=5, b=8)) execute(out["s2"]) assert take(out["s2"]) == 39
def test_exec_all() -> None: """Test execute_all.""" with Fun(MockServer(), defaults=options(distributed=False)): results = [] def div_by(x: float) -> float: return 10.0 / x for i in range(10, -1, -1): val = put(float(i)) results += [morph(div_by, val)] with pytest.raises(UnwrapError): take(results[0]) err = utils.execute_all(results) print(take(results[0])) v = take(err, strict=False) assert isinstance(v, Error) assert v.kind == ErrorKind.ExceptionRaised
def test_dag_build() -> None: """Test simple DAG build.""" with Fun(MockServer()) as db: dat = put(b"bla bla") step1 = morph(lambda x: x.decode().upper().encode(), dat) step2 = shell("cat file1 file2", inp=dict(file1=step1, file2=dat)) output = step2.stdout _dag.build_dag(db, output.hash) assert len(db.smembers(join(DAG_OPERATIONS, output.hash))) == 2 # test deletion _dag.delete_all_dags(db) assert len(db.smembers(join(DAG_OPERATIONS, output.hash))) == 0 # test new _dag _dag.build_dag(db, step1.hash) assert len(db.smembers(join(DAG_OPERATIONS, step1.hash))) == 1 assert len(_dag.descendants(db, step1.parent)) == 1
def test_parametric_eval() -> None: """Test that parametric evaluate properly.""" with Fun(MockServer(), options(distributed=False)) as db: dat = put(b"bla bla") step1 = morph(capitalize, dat) step2 = shell("cat file1 file2", inp=dict(file1=step1, file2=dat)) final = shell("cat file1 file3", inp={ "file1": step1, "file3": step2.stdout }) execute(final.stdout) # b'BLA BLABLA BLAbla bla' param = _p.make_parametric(db, "param", {"input": dat}, {"output": final.stdout}) dat2 = put(b"lol lol") out = param.evaluate(db, {"input": dat2}) execute(out["output"]) assert take(out["output"]) == b"LOL LOLLOL LOLlol lol"
def test_artefact_disk_distributed() -> None: """Test whether artefacts on disk works on different nodes.""" # funsies import funsies as f with tempfile.TemporaryDirectory() as td: with f.ManagedFun(nworkers=1, data_url=f"file://{td}"): dat = f.put(b"bla bla") step1 = f.morph(lambda x: x.decode().upper().encode(), dat) step2 = f.shell("cat file1 file2", inp=dict(file1=step1, file2=dat)) step2b = f.shell("cat file1", inp=dict(file1=step1)) f.execute(step2) f.wait_for(step2, 1.0) out = f.take(step2.stdout) assert out == b"BLA BLAbla bla" f.execute(step2b) f.wait_for(step2b, 1.0) out = f.take(step2b.stdout) assert out == b"BLA BLA"
def test_error_tolerant() -> None: """Test error tolerant funsie.""" def error_tolerant_fun(inp: Result[bytes]) -> bytes: if isinstance(inp, Error): return b"err" else: return b"" with Fun(MockServer()): db, store = get_connection() s1 = funsies.shell("cp file1 file3", inp=dict(file1="bla"), out=["file2"]) s2 = funsies.morph(error_tolerant_fun, s1.out["file2"], strict=False) with pytest.raises(RuntimeError): # Test operation not found run_op(db, store, s2.hash) run_op(db, store, s1.op) run_op(db, store, s2.parent) assert funsies.take(s2) == b"err"
def test_dag_efficient() -> None: """Test that DAG building doesn't do extra work.""" with Fun(MockServer()) as db: dat = put(b"bla bla") step1 = morph(lambda x: x.decode().upper().encode(), dat) step2 = shell("cat file1 file2", inp=dict(file1=step1, file2=dat), out=["file2"]) step2b = shell("echo 'not'", inp=dict(file1=step1)) merge = shell("cat file1 file2", inp=dict(file1=step1, file2=step2b.stdout), out=["file2"]) _dag.build_dag(db, step2.stdout.hash) # check that step2 only has stdout has no dependents assert len(_dag._dag_dependents(db, step2.stdout.hash, step2.hash)) == 0 assert len(_dag._dag_dependents(db, step2.stdout.hash, step1.parent)) == 1 _dag.build_dag(db, merge.hash) # check that however, the merged one has two dependents for step1 assert len(_dag._dag_dependents(db, merge.hash, step1.parent)) == 2
def test_parametric_store_recall_optional() -> None: """Test storing a parametric with optional parameters.""" serv = MockServer() with Fun(serv, options(distributed=False)): a = put(3) b = put("fun") s = reduce(lambda x, y: x * y, a, b) s2 = morph(lambda x: x.upper(), s) # parametrize p.commit("fun", inp=dict(a=a, b=b), out=dict(s=s2)) with Fun(serv, options(distributed=False)): out = p.recall("fun", inp=dict(a=5)) execute(out["s"]) assert take(out["s"]) == "FUNFUNFUNFUNFUN" # nested out = p.recall("fun", inp=dict(b="lol")) out = p.recall("fun", inp=dict(b=out["s"], a=2)) execute(out["s"]) assert take(out["s"]) == "LOLLOLLOLLOLLOLLOL"
def combine(inp: Sequence[Artefact[str]]) -> Artefact[bytes]: def enc(inp: str) -> bytes: return inp.encode() out = [funsies.morph(enc, x, out=Encoding.blob) for x in inp] return funsies.utils.concat(*out)
def apply(inp: Artefact[dict[str, Any]]) -> Artefact[str]: out = funsies.morph(lambda x: f"{x['sum']}//{x['product']}", inp) return out
def map_reduce(inputs: Dict[str, bytes]) -> Dict[str, _graph.Artefact]: """Basic map reduce.""" inp_data = inputs["inp"].split(b" ") for el in inp_data: out = f.morph(lambda x: x.upper(), el, opt=options()) return {"out": f.utils.concat(out, join="-")}
def combine_outer(inp: Sequence[Artefact]) -> Artefact: out = [ funsies.morph(lambda y: f"{y}".encode(), x, out=Encoding.blob) for x in inp ] return funsies.utils.concat(*out, join=b",,")
def combine(inp: Sequence[Artefact]) -> Artefact: out = [ funsies.morph(lambda y: y.encode(), x, out=Encoding.blob) for x in inp ] return funsies.utils.concat(*out)
def test_integration(reference: str, nworkers: int) -> None: """Test full integration.""" # make a temp file and copy reference database dir = tempfile.mkdtemp() if not make_reference: shutil.copy(os.path.join(ref_dir, reference, "appendonly.aof"), dir) shutil.copytree(os.path.join(ref_dir, reference, "data"), os.path.join(dir, "data")) shutil.copy(os.path.join(ref_dir, "redis.conf"), dir) # data url datadir = f"file://{os.path.join(dir, 'data')}" # Dictionary for test data test_data: dict[str, Any] = {} def update_data(a: dict[int, int], b: list[int]) -> dict[int, int]: for i in b: a[i] = a.get(i, 0) + 1 return a def sum_data(x: dict[int, int]) -> int: return sum([int(k) * v for k, v in x.items()]) def make_secret(x: int) -> str: return secrets.token_hex(x) # Start funsie script with ManagedFun( nworkers=nworkers, directory=dir, data_url=datadir, redis_args=["redis.conf"], ) as db: integers = put([5, 4, 8, 9, 9, 10, 1, 3]) init_data = put({100: 9}) test_data["init_data"] = init_data nbytes = put(4) s1 = reduce(update_data, init_data, integers) num = morph(sum_data, s1) date = shell("date").stdout test_data["date"] = date rand = morph(make_secret, nbytes) s4 = template( "date:{{date}}\n" + "some random bytes:{{random}}\n" + "a number: {{num}}\n" + "a string: {{string}}\n", { "date": date, "random": rand, "num": num, "string": "wazza" }, name="a template", ) test_data["s4"] = s4 execute(s4) wait_for(s4, 5) # check that the db doesn't itself include data for k in db.keys(): assert b"data" not in k if make_reference: folder = os.path.join(ref_dir, reference) os.makedirs(folder, exist_ok=True) for name, artefact in test_data.items(): with open(os.path.join(folder, name), "wb") as f: execute(artefact) wait_for(artefact, 10.0) out = take(artefact) data2 = _serdes.encode(artefact.kind, out) assert isinstance(data2, bytes) f.write(data2) shutil.copy( os.path.join(dir, "appendonly.aof"), os.path.join(folder, "appendonly.aof"), ) shutil.copytree( os.path.join(dir, "data"), os.path.join(folder, "data"), ) else: # Test against reference dbs for name, artefact in test_data.items(): execute(artefact) wait_for(artefact, 10.0) with open(os.path.join(ref_dir, reference, name), "rb") as f: data = f.read() out = take(artefact) data_ref = _serdes.encode(artefact.kind, out) assert isinstance(data_ref, bytes) assert data == data_ref # delete tempdir shutil.rmtree(dir)
def apply(inp: Artefact) -> Artefact: out = funsies.morph(lambda x: f"{x['sum']}//{x['product']}", inp) return out
def test_integration(reference: str, nworkers: int) -> None: """Test full integration.""" # make a temp file and copy reference database dir = tempfile.mkdtemp() if not make_reference: shutil.copy(os.path.join(ref_dir, reference, "appendonly.aof"), dir) shutil.copy(os.path.join(ref_dir, "redis.conf"), dir) # Dictionary for test data test_data = {} # Start funsie script with ManagedFun(nworkers=nworkers, directory=dir, redis_args=["redis.conf"]): dat = put(b"bla bla") step1 = morph(lambda x: x.decode().upper().encode(), dat) step2 = shell( "cat file1 file2; grep 'bla' file2 file1 > file3; date >> file3", inp=dict(file1=step1, file2=dat), out=["file2", "file3"], ) echo = shell("sleep 1", "date") merge = reduce( join_bytes, step2.out["file3"], echo.stdouts[1], name="merger", ) def tolist(x: bytes, y: bytes) -> Dict[int, str]: return {1: x.decode(), 8: y.decode()} A = py(tolist, merge, echo.stdouts[1]) test_data["test1"] = A def raises(inp: bytes) -> bytes: raise RuntimeError("an error was raised") def error_count(*inp: Result[bytes]) -> bytes: out = utils.match_results(inp, lambda x: 0, lambda x: 1) return str(sum(out)).encode() err = morph(raises, dat) count = reduce(error_count, dat, dat, err, dat, err, err, echo.stdouts[0], strict=False) cat = utils.concat(merge, dat, err, count, echo.stdouts[1], strict=False) test_data["test2"] = cat execute(step1) wait_for(step1, timeout=10.0) execute(step2) wait_for(step2, timeout=10.0) assert take(step1) == b"BLA BLA" assert take(step2.stdout) == b"BLA BLAbla bla" if make_reference: folder = os.path.join(ref_dir, reference) os.makedirs(folder, exist_ok=True) for name, artefact in test_data.items(): with open(os.path.join(folder, name), "wb") as f: execute(artefact) wait_for(artefact, 10.0) out = take(artefact) data2 = _serdes.encode(artefact.kind, out) assert isinstance(data2, bytes) f.write(data2) shutil.copy( os.path.join(dir, "appendonly.aof"), os.path.join(folder, "appendonly.aof"), ) else: # Test against reference dbs for name, artefact in test_data.items(): execute(artefact) wait_for(artefact, 10.0) with open(os.path.join(ref_dir, reference, name), "rb") as f: data = f.read() out = take(artefact) data_ref = _serdes.encode(artefact.kind, out) assert isinstance(data_ref, bytes) assert data == data_ref shutil.rmtree(dir)