def verify_full(shape, dtype, fill_value): A = te.placeholder(shape, dtype=dtype, name="A") B = topi.full_like(A, fill_value=fill_value) C = topi.full(shape=shape, dtype=dtype, fill_value=fill_value) s1 = te.create_schedule([B.op]) s2 = te.create_schedule([C.op]) @memoize("topi.tests.test_topi_full") def get_ref_data(): return np.full(shape, fill_value, dtype) np_nd = get_ref_data() def check_device(device): if not tvm.runtime.enabled(device): print("Skip because %s is not enabled" % device) return ctx = tvm.context(device, 0) out = tvm.nd.array(np.zeros(shape, dtype=dtype), ctx) f = tvm.build(s1, [A, B], device, name="full_like") f(tvm.nd.array(np.zeros(shape, dtype), ctx), out) tvm.testing.assert_allclose(out.asnumpy(), np_nd, rtol=1e-5) f = tvm.build(s2, [C], device, name="full") f(out) tvm.testing.assert_allclose(out.asnumpy(), np_nd, rtol=1e-5) for device in ["llvm"]: check_device(device)
def check_device(device, host="llvm"): ctx = tvm.context(device, 0) if not tvm.runtime.enabled(host): return if not ctx.exist: print("skip because %s is not enabled.." % device) return sout = te.create_schedule(out.op) mout = tvm.build(sout, [out] + inputs + args) out_shape = get_const_tuple(out.shape) l, h = data_range input_data = [ tvm.nd.array( np.random.uniform(l, h, size=get_const_tuple( input.shape)).astype(input.dtype)) for input in inputs ] arg_vals = [ tvm.nd.array( np.random.uniform(l, h, size=get_const_tuple( arg.shape)).astype(arg.dtype)) for arg in args ] ones = topi.full_like(out, 1.0) # we provide head to sum and reduce the output dimension, # which equals to grad(out.sum(), inputs) grads = te.gradient(out, inputs, head=ones) grad_sched = te.create_schedule([grad.op for grad in grads]) mgrad = tvm.build(grad_sched, list(grads) + inputs + args) if assert_no_jacobian: # TODO(yzhliu): it is better to visit the expression and do assertion lowered_ir = str( tvm.lower(grad_sched, list(grads) + inputs + args, simple_mode=True)) assert "jacobian" not in lowered_ir, lowered_ir grad_data = [ tvm.nd.empty(get_const_tuple(i.shape), g.dtype) for i, g in zip(inputs, grads) ] mgrad(*grad_data, *input_data, *arg_vals) g_res = [g.asnumpy() for g in grad_data] if desired_grads: assert isinstance(desired_grads, list) for actual, desired in zip(g_res, desired_grads): assert_allclose(actual, desired, rtol=0.1, atol=1e-2) else: def forward(*in_data): out_data = tvm.nd.empty(out_shape, out.dtype) mout(out_data, *[tvm.nd.array(d) for d in list(in_data)]) return out_data.asnumpy().sum() check_numerical_grads(forward, [d.asnumpy() for d in input_data + arg_vals], g_res)
def ones_like_compute(attrs, inputs, output_type): assert len(inputs) == 1 return [topi.full_like(inputs[0], 1.0)]