def verify_lrn(ishape, size, axis, bias, alpha, beta): x = sym.Variable("x") y = sym.lrn(x, size=size, axis=axis, bias=bias, alpha=alpha, beta=beta) dtype = "float32" x_np = np.random.uniform(size=ishape).astype(dtype) for target, ctx in ctx_list(): graph, lib, _ = nnvm.compiler.build(y, target, {"x": ishape}) m = graph_runtime.create(graph, lib, ctx) m.run(x=x_np) out = m.get_output(0, tvm.nd.empty(ishape)) out_np = topi.testing.lrn_python(x_np, size, axis, bias, alpha, beta) np.testing.assert_allclose(out.asnumpy(), out_np, atol=1e-5, rtol=1e-5) #Checking LRN op followed by elementwise op relu z = sym.relu(y) x_np = np.random.uniform(low=-10.0, high=10.0, size=ishape).astype(dtype) for target, ctx in ctx_list(): graph, lib, _ = nnvm.compiler.build(z, target, {"x": ishape}) m = graph_runtime.create(graph, lib, ctx) m.run(x=x_np) out = m.get_output(0, tvm.nd.empty(ishape)) out_np = topi.testing.lrn_python(x_np, size, axis, bias, alpha, beta) out_np = (out_np > 0) * out_np np.testing.assert_allclose(out.asnumpy(), out_np, atol=1e-5, rtol=1e-5)
def verify_lrn(ishape, size, axis, bias, alpha, beta): x = sym.Variable("x", shape=ishape) y = sym.lrn(x, size=size, axis=axis, bias=bias, alpha=alpha, beta=beta) def forward1(x): return topi.testing.lrn_python(x, size, axis, bias, alpha, beta) check_function(y, forward1) def forward2(x): y = forward1(x) return (y > 0) * y #Checking LRN op followed by elementwise op relu check_function(sym.relu(y), forward2, in_range={'x': (-10.0, 10.0)})
def verify_lrn(ishape, size, axis, bias, alpha, beta): x = sym.Variable("x", shape=ishape) y = sym.lrn(x, size=size, axis=axis, bias=bias, alpha=alpha, beta=beta) def forward1(x): return topi.testing.lrn_python(x, size, axis, bias, alpha, beta) check_function(y, forward1) def forward2(x): y = forward1(x) return (y > 0)*y #Checking LRN op followed by elementwise op relu check_function(sym.relu(y), forward2, in_range={'x': (-10.0, 10.0)})