def test_recursion():
    """
    Program:
       let f(n: i32, data: f32) -> f32 = {
          if (n == 0) {
              return data;
          } else {
              return f(n - 1, log(data));
          }
       }
       f(2, 10000);
    """
    f = relay.Var("f")
    n = relay.Var("n", e.int32)
    data = relay.Var("data", e.float32)
    funcbody = relay.If(
        equal(n, relay.const(0)), data,
        relay.Call(f, [subtract(n, relay.const(1.0)),
                       log(data)]))
    value = relay.Function([n, data], funcbody, e.float32, [])
    orig = relay.Let(
        f, funcbody,
        relay.Call(f,
                   [relay.const(2.0), relay.const(10000.0)]))
    assert alpha_equal(dead_code_elimination(orig), orig)
    assert alpha_equal(dead_code_elimination(relay.Let(f, funcbody, e.three)),
                       e.three)
def test_recursion():
    """
    Program:
       let f(n: i32, data: f32) -> f32 = {
          if (n == 0) {
              return data;
          } else {
              return f(n - 1, log(data));
          }
       }
       f(2, 10000);
    """
    f = relay.Var("f")
    f1 = relay.Var("f1")
    n = relay.Var("n", e.int32)
    data = relay.Var("data", e.float32)
    funcbody = relay.If(
        equal(n, relay.const(0)), data,
        relay.Call(f1, [subtract(n, relay.const(1)),
                        log(data)]))
    value = relay.Function([n, data], funcbody, e.float32, [])
    orig = relay.Let(f, value,
                     relay.Call(
                         f,
                         [relay.const(2), relay.const(10000.0)]))
    dced = run_opt_pass(orig, transform.DeadCodeElimination())
    orig = run_opt_pass(orig, transform.InferType())
    assert graph_equal(dced, orig)
    dced = run_opt_pass(relay.Let(f, value, e.three),
                        transform.DeadCodeElimination())
    assert alpha_equal(dced, e.three)
def test_recursion():
    """
    Program:
       let f(n: i32, data: f32) -> f32 = {
          if (n == 0) {
              return data;
          } else {
              return f(n - 1, log(data));
          }
       }
       f(2, 10000);
    """
    f = relay.Var("f")
    n = relay.Var("n")
    np = relay.Param(n, e.int32)
    data = relay.Var("data")
    datap = relay.Param(data, e.float32)
    funcbody = relay.If(equal(n, convert(0)), data,
                        f(subtract(n, convert(1.0)), log(data)))
    value = relay.Function([np, datap], e.float32, funcbody, [])
    orig = relay.Let(f, funcbody, f(convert(2.0), convert(10000.0)), e.float32)
    assert alpha_equal(dead_code_elimination(orig), orig)
    assert alpha_equal(
        dead_code_elimination(relay.Let(f, funcbody, e.three, e.float32)),
        e.three)
Exemple #4
0
def test_single_op():
    "Program: fn (x : float32) { let t1 = f(x); t1 }"
    b = IRBuilder()
    with b.function(('x', 'float32')) as func:
        x, = func.param_ids()
        t1 = b.let('t1', log(x))
        b.ret(t1)
    assert_has_type(func.to_func(), func_type(['float32'], 'float32'))
Exemple #5
0
def use_f(func):
    f = relay.Var("f")
    n = relay.Var("n", e.int32)
    data = relay.Var("data", e.float32)
    funcbody = relay.If(
        equal(n, relay.const(0)), data,
        relay.Call(f, [subtract(n, relay.const(1)),
                       log(data)]))
    value = relay.Function([n, data], funcbody, e.float32, [])
    return relay.Let(f, value, func(f))
Exemple #6
0
def test_decl():
    """Program:
       def f(x : Tensor[f32, (10, 10)]) {
           let lx = log(x);
           return lx;
       }
    """
    b = IRBuilder()
    x = b.param('x')
    with b.decl('f', x):
        lx = b.let('lx', log(x))
        b.ret(lx)
    _, env = b.get()
    assert_decl_has_type(env, 'f', func_type(['float32'], 'float32'))
Exemple #7
0
def aten_log(inputs, attributes, scope):
    inp = inputs[0]
    ctx = current_context()
    net = ctx.network
    if ctx.is_tensorrt and has_trt_tensor(inputs):
        layer = net.add_unary(inp, trt.UnaryOperation.LOG)
        output = layer.get_output(0)
        output.name = scope
        layer.name = scope
        return [output]
    elif ctx.is_tvm and has_tvm_tensor(inputs):
        return [_op.log(inp)]

    return [torch.log(inp)]
Exemple #8
0
def test_dual_op():
    """Program: 
       fn (x : Tensor[f32, (10, 10)]) { 
         let t1 = log(x); 
         let t2 = add(t1, x); 
         return t1;
       }
    """
    b = IRBuilder()
    with b.function(('x', tensor_type(10, 10))) as func:
        x, = func.param_ids()
        t1 = b.let('t1', log(x))
        t2 = b.let('t2', add(t1, x))
        b.ret(t2)
    assert_has_type(func.to_func(), func_type(['float32'], 'float32'))
def test_recursion():
    """
    Program:
       let f(n: i32, data: f32) -> f32 = {
          if (n == 0) {
              return data;
          } else {
              return f(n - 1, log(data));
          }
       }
       f(2, 10000);
    """
    f = relay.Var("f")
    n = relay.Var("n", e.int32)
    data = relay.Var("data", e.float32)
    funcbody = relay.If(equal(n, relay.const(0)),
                        data,
                        relay.Call(f, [subtract(n, relay.const(1.0)),
                                       log(data)]))
    value = relay.Function([n, data], funcbody, e.float32, [])
    orig = relay.Let(f, value, relay.Call(f, [relay.const(2.0), relay.const(10000.0)]))
    assert alpha_equal(dead_code_elimination(orig), orig)
    assert alpha_equal(dead_code_elimination(relay.Let(f, value, e.three)), e.three)
Exemple #10
0
def test_recursion():
    """
    Program:
       def f(n: i32, data: f32) -> f32 {
          if (n == 0) {
              return f(n - 1, log(data));
          } else {
              return data;
          }
       }
       f(2, 10000);
    """
    b = IRBuilder()
    f = b.global_var('f')
    n = b.param('n', ty='int32')
    data = b.param('data', ty='float32')
    with b.decl(f, n, data):
        with b.if_scope(equal(n, convert(0))):
            b.ret(f(subtract(n, convert(1)), log(data)))
        with b.else_scope():
            b.ret(data)
    b.ret(f(convert(2.0), convert(10000.0)))
    assert_decl_has_type(b.env, 'f', func_type(
        ['int32', 'float32'], 'float32'))
Exemple #11
0
def test_single_op():
    "Program: fn (x : float32) { let t1 = f(x); t1 }"
    x = relay.var('x', shape=[])
    func = relay.Function([x], op.log(x))
    ttype = relay.TensorType([], dtype='float32')
    assert_has_type(func, relay.FuncType([ttype], ttype))
Exemple #12
0
def test_single_op():
    "Program: fn (%x : float32) { let %t1 = f(%x); %t1 }"
    x = relay.var("x", shape=[])
    func = relay.Function([x], op.log(x))
    ttype = relay.TensorType([], dtype="float32")
    assert_has_type(func, relay.FuncType([ttype], ttype))
Exemple #13
0
def test_single_op():
    "Program: fn (%x : float32) { let %t1 = f(%x); %t1 }"
    x = relay.var('x', shape=[])
    func = relay.Function([x], op.log(x))
    ttype = relay.TensorType([], dtype='float32')
    assert_has_type(func, relay.FuncType([ttype], ttype))