def verify_split(src_shape, indices_or_sections, axis): A = te.placeholder(shape=src_shape, name="A") tensor_l = topi.split(A, indices_or_sections, axis=axis) def check_device(device): ctx = tvm.context(device, 0) if not ctx.exist: print("Skip because %s is not enabled" % device) return print("Running on target: %s" % device) with tvm.target.create(device): s = tvm.topi.testing.get_injective_schedule(device)(tensor_l) foo = tvm.build(s, [A] + list(tensor_l), device, name="split") data_npy = np.random.normal(size=src_shape).astype(A.dtype) out_npys = np.split(data_npy, indices_or_sections, axis=axis) data_nd = tvm.nd.array(data_npy, ctx) out_nds = [ tvm.nd.empty(out_npy.shape, ctx=ctx, dtype=tensor_l[0].dtype) for out_npy in out_npys ] foo(*([data_nd] + out_nds)) for out_nd, out_npy in zip(out_nds, out_npys): tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy) for device in get_all_backend(): check_device(device)
def test_topi(): X = te.placeholder((1, 2, 4, 4), name="X") W = te.placeholder((5, 2, 3, 3), name="W") W1 = te.placeholder((2, 5, 3, 3), name="W1") W2 = te.placeholder((1, ), name="W2") R = topi.nn.conv2d(X, W, 1, 1, 1) check_grad(R, [X, W]) R1 = topi.nn.conv2d(topi.nn.relu(R), W1, 1, 0, 1) check_grad(R1, [X, W, W1]) R = topi.broadcast_to(W2, (5, 2, 3, 3)) check_grad(R, [W2]) R = topi.nn.conv2d(X, topi.broadcast_to(W2, (5, 2, 3, 3)), 1, 1, 1) check_grad(R, [X, W2]) R = topi.nn.pool(X, [2, 2], [2, 2], [0, 0, 0, 0], "avg") check_grad(R, X) R = topi.nn.pool(X, [2, 2], [2, 2], [0, 0, 0, 0], "max") check_grad(R, X) X = te.placeholder((1, 2, 5, 5), name="X") R = topi.reshape(X, (1, 32)) check_grad(R, [X]) X = te.placeholder((1, 2, 5, 5), name="X") W = te.placeholder((2, 2, 3, 3), name="W") S = topi.reshape(X, (1, 50)) check_grad(S, [X]) R = X + topi.nn.conv2d(X + topi.nn.conv2d(X, W, 1, 1, 1), W, 1, 1, 1) check_grad(R, [X, W]) S = topi.nn.softmax(topi.reshape(R, (1, 50))) check_grad(S, [X, W]) S = topi.sigmoid(topi.reshape(R, (1, 50))) check_grad(S, [X, W]) S = topi.tanh(topi.reshape(R, (1, 50))) check_grad(S, [X, W]) S = topi.nn.log_softmax(topi.reshape(R, (1, 50))) check_grad(S, [X, W]) check_grad(S, [W], [X]) X = te.placeholder((1, 2, 3, 5), name="X") Y = te.placeholder((1, 2, 7, 5), name="Y") S = topi.concatenate((X, Y), 2) check_grad(S, [X, Y]) X = te.placeholder((1, 2, 6, 5), name="X") (S, R) = topi.split(X, 2, 2) check_grad(S, [X]) check_grad(R, [X]) R1 = topi.concatenate((S, R), 2) check_grad(R1, [X]) R2 = topi.concatenate((R, S), 2) check_grad(R2, [X]) X = te.placeholder((4, 5), name="X") I = te.placeholder((100, ), name="I", dtype="int32") R = topi.take(X, topi.abs(I)) check_grad(R, [X], [I]) W = te.placeholder((5, 5), name="W") exps = topi.exp(topi.nn.dense(X, W)) sumexps = topi.sum(exps, axis=-1, keepdims=True) R = exps / sumexps check_grad(R, [X, W], data_range=(-1, 1))