Esempio n. 1
0
def verify_concatenate(shapes, axis):
    def get_concat_schedule(target):
        schedule_map = {
            "cpu": topi.x86.schedule_concatenate,
            "arm_cpu": topi.arm_cpu.schedule_concatenate,
        }
        if isinstance(target, str):
            target = tvm.target.Target(target)
        for key in target.keys:
            if key in schedule_map:
                return schedule_map[key]
        return tvm.topi.testing.get_injective_schedule(target)

    tensor_l = []
    for i, shape in enumerate(shapes):
        tensor_l.append(te.placeholder(shape, name="A" + str(i)))
    out_tensor = topi.concatenate(a_tuple=tensor_l, axis=axis)

    def check_device(device, ctx):
        print("Running on target: %s" % device)
        with tvm.target.Target(device):
            s = get_concat_schedule(device)(out_tensor)

        foo = tvm.build(s, tensor_l + [out_tensor], device, name="concatenate")
        data_npys = [np.random.normal(size=shape).astype(tensor_l[0].dtype) for shape in shapes]
        out_npy = np.concatenate(data_npys, axis=axis)
        data_nds = [tvm.nd.array(data_npy, ctx) for data_npy in data_npys]
        out_nd = tvm.nd.empty(out_npy.shape, ctx=ctx, dtype=out_tensor.dtype)
        foo(*(data_nds + [out_nd]))
        tvm.testing.assert_allclose(out_nd.asnumpy(), out_npy)

    for device, ctx in tvm.testing.enabled_targets():
        check_device(device, ctx)
Esempio n. 2
0
def test_topi():
    X = te.placeholder((1, 2, 4, 4), name="X")
    W = te.placeholder((5, 2, 3, 3), name="W")
    W1 = te.placeholder((2, 5, 3, 3), name="W1")
    W2 = te.placeholder((1, ), name="W2")

    R = topi.nn.conv2d(X, W, 1, 1, 1)
    check_grad(R, [X, W])

    R1 = topi.nn.conv2d(topi.nn.relu(R), W1, 1, 0, 1)
    check_grad(R1, [X, W, W1])

    R = topi.broadcast_to(W2, (5, 2, 3, 3))
    check_grad(R, [W2])

    R = topi.nn.conv2d(X, topi.broadcast_to(W2, (5, 2, 3, 3)), 1, 1, 1)
    check_grad(R, [X, W2])

    R = topi.nn.pool(X, [2, 2], [2, 2], [0, 0, 0, 0], "avg")
    check_grad(R, X)

    R = topi.nn.pool(X, [2, 2], [2, 2], [0, 0, 0, 0], "max")
    check_grad(R, X)

    X = te.placeholder((1, 2, 5, 5), name="X")
    R = topi.reshape(X, (1, 32))
    check_grad(R, [X])

    X = te.placeholder((1, 2, 5, 5), name="X")
    W = te.placeholder((2, 2, 3, 3), name="W")

    S = topi.reshape(X, (1, 50))
    check_grad(S, [X])

    R = X + topi.nn.conv2d(X + topi.nn.conv2d(X, W, 1, 1, 1), W, 1, 1, 1)
    check_grad(R, [X, W])

    S = topi.nn.softmax(topi.reshape(R, (1, 50)))
    check_grad(S, [X, W])

    S = topi.sigmoid(topi.reshape(R, (1, 50)))
    check_grad(S, [X, W])

    S = topi.tanh(topi.reshape(R, (1, 50)))
    check_grad(S, [X, W])

    S = topi.nn.log_softmax(topi.reshape(R, (1, 50)))
    check_grad(S, [X, W])
    check_grad(S, [W], [X])

    X = te.placeholder((1, 2, 3, 5), name="X")
    Y = te.placeholder((1, 2, 7, 5), name="Y")
    S = topi.concatenate((X, Y), 2)
    check_grad(S, [X, Y])

    X = te.placeholder((1, 2, 6, 5), name="X")
    (S, R) = topi.split(X, 2, 2)
    check_grad(S, [X])
    check_grad(R, [X])
    R1 = topi.concatenate((S, R), 2)
    check_grad(R1, [X])
    R2 = topi.concatenate((R, S), 2)
    check_grad(R2, [X])

    X = te.placeholder((4, 5), name="X")
    I = te.placeholder((100, ), name="I", dtype="int32")
    R = topi.take(X, topi.abs(I))
    check_grad(R, [X], [I])

    W = te.placeholder((5, 5), name="W")
    exps = topi.exp(topi.nn.dense(X, W))
    sumexps = topi.sum(exps, axis=-1, keepdims=True)
    R = exps / sumexps
    check_grad(R, [X, W], data_range=(-1, 1))
Esempio n. 3
0
def compute_concat(attrs, inputs, output_type):
    return [topi.concatenate(inputs, attrs.axis)]