def test_recursive_concat(): """ fn @concat_loop(%i: int32, %st: (any, 1)) -> (any, 1) { if (%i < 10) { let %i = reshape(cast(i, "float32"), newshape=(1, )) let %new_st = concatenate((st, i), axis=0) concat_loop(%i + 1, ) } else { st } } """ # Initial Values. i = relay.var("i", shape=(), dtype="int32") st = relay.var("st", shape=(relay.Any(), 1), dtype="int32") def _cond(i, st): return relay.op.min(relay.op.less(i, int32(10))) def _body(i, st): i_vec = relay.op.reshape(i, (1, 1)) ret = relay.op.concatenate([st, i_vec], axis=0) return i + int32(1), ret loop = while_loop(_cond, [i, st], _body) start = relay.var("start", shape=(), dtype="int32") body = loop(start, relay.op.reshape(relay.const(0), newshape=(1, 1))) func = relay.Function([start], relay.TupleGetItem(body, 1)) mod = tvm.IRModule() mod["main"] = func data = np.array(0.0, dtype="int32") ref = np.array([0] + list(range(10))).reshape((11, 1)).astype("int32") check_result([data], mod, ref)
def test_loop_free_var(): x = relay.var("x", shape=(), dtype="int32") i = relay.var("i", shape=(), dtype="int32") s = relay.var("s", shape=(), dtype="int32") def cond(i, _): return i < relay.const(10, dtype="int32") def body_no_free_var(i, acc): incr = relay.const(1, "int32") return i + incr, acc + i def body_with_free_var(i, acc): incr = relay.const(1, "int32") return i + incr, acc + x for args, body, expected in zip([[], [1]], [body_no_free_var, body_with_free_var], [45, 10]): loop = while_loop(cond, [i, s], body) tup = loop(relay.const(0, dtype="int32"), relay.zeros(shape=(), dtype="int32")) ret = relay.TupleGetItem(tup, 1) mod = tvm.IRModule() mod["main"] = relay.Function(relay.analysis.free_vars(ret), ret) check_result(args, expected, mod=mod)
def test_dynamic_concat(): """ fn @concat_loop(%i: int32, %st: (any, 1)) -> (any, 1) { if (%i < 10) { let %i = reshape(cast(i, "float32"), newshape=(1, )) let %new_st = concatenate((st, i), axis=0) concat_loop(%i + 1, ) } else { st } } """ # Initial Values. i = relay.var('i', shape=(), dtype='int32') st = relay.var('st', shape=(relay.Any(), 1), dtype='int32') def _cond(i, st): return relay.op.min(relay.op.less(i, int32(10))) def _body(i, st): i_vec = relay.op.reshape(i, (1, 1)) ret = relay.op.concatenate([st, i_vec], axis=0) return i + int32(1), ret loop = while_loop(_cond, [i, st], _body) start = relay.var('start', shape=(), dtype='int32') body = loop(start, relay.op.reshape(relay.const(0), newshape=(1, 1))) func = relay.Function([start], relay.TupleGetItem(body, 1)) func = infer_type(func)
def test_recursive_concat_with_wrong_annotation(): """ v0.0.1 fn (%start: int32) { %7 = { let %while_loop = fn (%i: int32, %st: Tensor[(1, 1), int32]) { %0 = less(%i, 10) %1 = min(%0) if (%1) { %2 = add(%i, 1) %3 = reshape(%i, newshape=[1, 1]) %4 = (%st, %3) /* The result of concat should be 1,1 but it is 2, 1. */ %5 = concatenate(%4) %while_loop(%2, %5) } else { (%i, %st) } } %6 = reshape(0, newshape=[1, 1]) %while_loop(%start, %6) } %7.1 } """ # Initial Values. i = relay.var('i', shape=(), dtype='int32') st = relay.var('st', shape=(1, 1), dtype='int32') def _cond(i, st): return relay.op.min(relay.op.less(i, int32(10))) def _body(i, st): i_vec = relay.op.reshape(i, (1, 1)) ret = relay.op.concatenate([st, i_vec], axis=0) return i + int32(1), ret loop = while_loop(_cond, [i, st], _body) start = relay.var('start', shape=(), dtype='int32') body = loop(start, relay.op.reshape(relay.const(0), newshape=(1, 1))) func = relay.Function([start], relay.TupleGetItem(body, 1)) try: func = infer_type(func) assert False except Exception as e: assert "in particular dimension 0 conflicts 2 does not match 1" in str( e)
def test_recursive_concat(): """ fn @concat_loop(%i: int32, %st: (any, 1)) -> (any, 1) { if (%i < 10) { let %i = reshape(cast(i, "float32"), newshape=(1, )) let %new_st = concatenate((st, i), axis=0) concat_loop(%i + 1, ) } else { st } } """ # Initial Values. i = relay.var('i', shape=(), dtype='int32') st = relay.var('st', shape=(relay.Any(), 1), dtype='int32') def _cond(i, st): return relay.op.min(relay.op.less(i, int32(10))) def _body(i, st): i_vec = relay.op.reshape(i, (1, 1)) ret = relay.op.concatenate([st, i_vec], axis=0) return i + int32(1), ret loop = while_loop(_cond, [i, st], _body) start = relay.var('start', shape=(), dtype='int32') body = loop(start, relay.op.reshape(relay.const(0), newshape=(1, 1))) func = relay.Function([start], relay.TupleGetItem(body, 1)) mod = tvm.IRModule() mod["main"] = func data = np.array(0.0, dtype='int32') ref = np.array([0] + list(range(10))).reshape((11, 1)).astype("int32") # TODO(@jroesch): After LambdaLift pass, TypeInfer pass will fail # so currently we cannot run this test case on VM for kind in ["debug"]: ex = relay.create_executor(kind, mod=mod, ctx=tvm.cpu(), target="llvm") result = ex.evaluate()(data) np.testing.assert_allclose(result.asnumpy(), ref)