Пример #1
0
def test_triangle_number():
    t = relay.TensorType([], "int32")
    x = Var("x", t)
    f_var = Var("f")
    f = Function([x], If(op.equal(x, const(0)), const(0), x + f_var(x - const(1))))
    orig = run_infer_type(Let(f_var, f, f_var(const(10))))
    assert_alpha_equal(dcpe(orig), const(55))
Пример #2
0
def test_tuple_match():
    a = relay.Var("a")
    b = relay.Var("b")
    clause = relay.Clause(
        relay.PatternTuple([relay.PatternVar(a),
                            relay.PatternVar(b)]), a + b)
    x = relay.Match(relay.Tuple([relay.const(1), relay.const(1)]), [clause])
    assert_alpha_equal(dcpe(x), const(2))
Пример #3
0
def test_let_polymorphism():
    id = relay.Var("id")
    xt = relay.TypeVar("xt")
    x = relay.Var("x", xt)
    body = relay.Tuple([id(relay.const(1)), id(relay.Tuple([]))])
    body = relay.Let(id, relay.Function([x], x, xt, [xt]), body)
    body = run_infer_type(body)
    int32 = relay.TensorType((), "int32")
    assert_alpha_equal(body.checked_type, relay.TupleType([int32, relay.TupleType([])]))
Пример #4
0
def astext(p, unify_free_vars=False):
    txt = p.astext()
    if isinstance(p, Expr) and free_vars(p):
        return txt
    x = relay.fromtext(txt)
    if unify_free_vars:
        assert_graph_equal(x, p)
    else:
        assert_alpha_equal(x, p)
    return txt
Пример #5
0
def test_concat():
    shape = (10, 10)
    dtype = 'float32'
    t = relay.TensorType(shape, dtype)
    rt = relay.TensorType((10, 20), dtype)
    x = relay.var("x", t)
    y = op.concatenate([x, x], axis=1)
    func = relay.Function([x], y)
    func = run_infer_type(func)
    back_func = run_infer_type(gradient(func))
    assert_alpha_equal(back_func.checked_type, relay.FuncType([t], relay.TupleType([rt, relay.TupleType([t])])))
def test_recursion():
    """
    Program:
       let f(n: i32, data: f32) -> f32 = {
          if (n == 0) {
              return data;
          } else {
              return f(n - 1, log(data));
          }
       }
       f(2, 10000);
    """
    orig = use_f(lambda f: relay.Call(f, [relay.const(2), relay.const(10000.0)]))
    dced = run_opt_pass(orig, transform.DeadCodeElimination())
    orig = run_opt_pass(orig, transform.InferType())
    assert_alpha_equal(dced, orig)
Пример #7
0
def test_ad():
    shape = (10, 10)
    dtype = "float32"
    t = TensorType(shape, dtype)
    d = Var("d", t)
    f = Function([d], d * d)
    g = dcpe(f, grad=True)
    m = d * d
    x = relay.Var("x")
    o = op.ones_like(x)
    x1 = relay.Var("x1")
    grad = op.zeros_like(d) + op.collapse_sum_like(x1 * d, d) + op.collapse_sum_like(x1 * d, d)
    body = Tuple([x, Tuple([grad])])
    body = relay.Let(x1, o, body)
    expected = Function([d], relay.Let(x, m, body))
    expected = run_opt_pass(expected, transform.InferType())
    assert_alpha_equal(g, expected)
Пример #8
0
def test_concat():
    t = relay.TensorType([10], "float32")
    x = Var("x", t)
    y = Var("x", t)
    orig = run_infer_type(Function([x, y], op.concatenate([x, y], axis=0)))
    assert_alpha_equal(dcpe(orig), orig)
def test_inline():
    orig = relay.Let(e.a, e.b, relay.Let(e.c, e.d, e.c))
    orig = run_opt_pass(orig, transform.DeadCodeElimination(True))
    assert_alpha_equal(Function(free_vars(orig), orig), Function([e.d], e.d))
Пример #10
0
def roundtrip(expr):
    x = relay.fromtext(str(expr))
    assert_alpha_equal(x, expr)