Beispiel #1
0
def template(x_shape, perm=None, description: str = ""):
    vx = np.random.rand(*x_shape)
    vy = np.transpose(vx, perm)

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)

    if perm is None:
        operator = make_node("Transpose", ["x"], ["y"])
    else:
        operator = make_node("Transpose", ["x"], ["y"], perm=perm)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    assert list(vy.shape) == list(graph.outputs[0].shape)

    generate_kernel_test_case(
        description=f"[ONNX] Transpose {description}",
        graph=graph,
        backend=["webgpu", "webassembly", "webgl"],
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #2
0
def template(x_shape, axis, description: str = ""):
    np_axis = 1 if axis is None else axis
    vx = np.random.rand(*x_shape)
    new_shape = [mul(vx.shape[:np_axis]), mul(vx.shape[np_axis:])]
    max_i = np.argmax(vx.reshape(new_shape), axis=1)
    vy = np.zeros(new_shape)
    vy[np.arange(vy.shape[0]), max_i] = 1

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)

    kwargs = {}
    if axis is not None:
        kwargs["axis"] = axis
    operator = make_node("Hardmax", ["x"], ["y"], **kwargs)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    assert tuple(vy.shape) == tuple(
        graph.outputs[0].shape
    ), f"vy: {vy.shape}, graph.outputs[0]: {graph.outputs[0].shape}"
    generate_kernel_test_case(
        description=f"[ONNX] Hardmax {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #3
0
def template(x_shape, axes, keepdims=None, description: str = ""):
    vx = np.random.rand(*x_shape)
    vy = np.max(vx,
                axis=tuple(axes),
                keepdims=True if keepdims is None else keepdims)

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)

    kwargs = {"axes": axes}
    if keepdims is not None:
        kwargs["keepdims"] = keepdims
    operator = make_node("ReduceMax", ["x"], ["y"], **kwargs)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    assert tuple(vy.shape) == tuple(
        graph.outputs[0].shape
    ), f"vy: {vy.shape}, graph.outputs[0]: {graph.outputs[0].shape}"
    generate_kernel_test_case(
        description=f"[ONNX] ReduceMax {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #4
0
def template(x_shape, alpha, gamma, description: str = ""):
    np_alpha = 1.6732 if alpha is None else alpha
    np_gamma = 1.0507 if gamma is None else gamma
    vx = np.random.rand(*x_shape) - 0.5

    vy = vx.copy()
    vy[vx <= 0] = np_gamma * (np_alpha * np.exp(vx[vx <= 0]) - np_alpha)
    vy[vx > 0] = np_gamma * vx[vx > 0]

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)

    kwargs = {}
    if alpha is not None:
        kwargs["alpha"] = alpha
    if gamma is not None:
        kwargs["gamma"] = gamma
    operator = make_node("Selu", ["x"], ["y"], **kwargs)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(description=f"[ONNX] Selu {description}",
                              graph=graph,
                              inputs={graph.inputs[0]: vx},
                              expected={graph.outputs[0]: vy},
                              EPS=1e-2)
Beispiel #5
0
def template(x_shape, blocksize, description: str = ""):
    N, C, H, W = x_shape
    vx = np.random.rand(*x_shape)

    vy = vx.reshape(
        [N, blocksize, blocksize, C // blocksize // blocksize, H, W])
    vy = vy.transpose([0, 3, 4, 1, 5, 2])
    vy = vy.reshape(
        [N, C // blocksize // blocksize, H * blocksize, W * blocksize])

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)
    operator = make_node("DepthToSpace", ["x"], ["y"], blocksize=blocksize)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    assert list(vy.shape) == list(graph.outputs[0].shape)

    generate_kernel_test_case(
        description=f"[ONNX] DepthToSpace {description}",
        graph=graph,
        backend=["webgpu", "webassembly", "webgl"],
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #6
0
def template(n_x, x_shape, description: str = ""):
    vxs = [np.random.rand(*x_shape) for _ in range(n_x)]

    vys = list(vxs)
    while len(vys) > 1:
        vx1, vx2 = vys.pop(0), vys.pop(0)
        vy = np.maximum(vx1, vx2)
        vys.append(vy)
    vy = vys[0]

    xs = [
        make_tensor_value_info(f"x{i}", vx.shape) for i, vx in enumerate(vxs)
    ]
    y = make_tensor_value_info("y", vy.shape)

    operator = make_node("Max", [x.name for x in xs], ["y"])

    model = make_model([operator], xs, [y])

    graph = ONNXConverter().convert(model)

    assert tuple(vy.shape) == tuple(
        graph.outputs[0].shape
    ), f"vy: {vy.shape}, graph.outputs[0]: {graph.outputs[0].shape}"
    generate_kernel_test_case(
        description=f"[ONNX] Max {description}",
        graph=graph,
        inputs={graph.inputs[i]: vx
                for i, vx in enumerate(vxs)},
        expected={graph.outputs[0]: vy},
    )
Beispiel #7
0
def template(x_shape, description: str = ""):
    vx = np.random.rand(*x_shape) + 1.0
    vy = np.sqrt(vx)

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)
    operator = make_node("Sqrt", ["x"], ["y"])

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(description=f"[ONNX] Sqrt {description}",
                              graph=graph,
                              inputs={graph.inputs[0]: vx},
                              expected={graph.outputs[0]: vy})
Beispiel #8
0
def template(M=5,
             K=6,
             N=7,
             c_shape=None,
             transA=False,
             transB=False,
             broadcast=False,
             alpha=1.0,
             beta=0.0,
             description: str = ""):
    if c_shape is None:
        c_shape = [M, N]

    va = np.random.rand(*((K, M) if transA else (M, K)))
    vb = np.random.rand(*((N, K) if transB else (K, N)))
    vc = np.random.rand(*c_shape)
    vd = (va.T if transA else va) @ (vb.T
                                     if transB else vb) * alpha + vc * beta

    kwargs = {
        "broadcast": broadcast,
        "transA": transA,
        "transB": transB,
        "alpha": alpha,
        "beta": beta
    }

    a = make_tensor_value_info("a", va.shape)
    b = make_tensor_value_info("b", vb.shape)
    c = make_tensor_value_info("c", vc.shape)
    d = make_tensor_value_info("d", vd.shape)

    operator = make_node("Gemm", ["a", "b", "c"], ["d"], **kwargs)
    model = make_model([operator], [a, b, c], [d])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] Gemm {description}",
        graph=graph,
        inputs={
            graph.inputs[0]: va,
            graph.inputs[1]: vb,
            graph.inputs[2]: vc
        },
        expected={graph.outputs[0]: vd},
    )
def template(N=2, H=5, W=5, C=7, epsilon: float = 1e-5, description: str = ""):
    # spatial = 0 is not supported
    x_shape = [N, C, H, W]

    def expand(v):
        return v[np.newaxis, :, np.newaxis, np.newaxis]

    # computing test case with float64 makes difference
    def randfloat(*shape):
        return np.random.rand(*shape).astype(np.float32)

    vx = randfloat(*x_shape)
    vscale = randfloat(C)
    vB = randfloat(C)
    vmean = randfloat(C)
    vvar = randfloat(C)
    vy = (vx - expand(vmean)) / np.sqrt(expand(vvar) +
                                        epsilon) * expand(vscale) + expand(vB)

    x = make_tensor_value_info("x", x_shape)
    scale = make_tensor_value_info("scale", [C])
    B = make_tensor_value_info("B", [C])
    mean = make_tensor_value_info("mean", [C])
    var = make_tensor_value_info("var", [C])
    y = make_tensor_value_info("y", vy.shape)

    kwargs = {"epsilon": epsilon}
    operator = make_node("BatchNormalization",
                         ["x", "scale", "B", "mean", "var"], ["y"], **kwargs)
    model = make_model([operator], [x, scale, B, mean, var], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] BatchNormalization {description}",
        graph=graph,
        inputs={
            graph.inputs[0]: vx,
            graph.inputs[1]: vscale,
            graph.inputs[2]: vB,
            graph.inputs[3]: vmean,
            graph.inputs[4]: vvar
        },
        expected={graph.outputs[0]: vy},
    )
def template(x_shape, description: str = ""):
    vx = np.random.rand(*x_shape)
    vy = np.mean(vx, axis=tuple(range(2, vx.ndim)), keepdims=False)

    x = make_tensor_value_info("x", x_shape)
    y = make_tensor_value_info("y", vy.shape)
    operator = make_node("GlobalAveragePool", ["x"], ["y"])
    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] GlobalAveragePool {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #11
0
def template(x_shape, alpha, description: str = ""):
    vx = np.random.rand(*x_shape) - 0.5
    vy = np.maximum(vx, vx * alpha)

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)
    operator = make_node("LeakyRelu", ["x"], ["y"], alpha=alpha)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] LeakyRelu {description}",
        graph=graph,
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #12
0
def template(N=2,
             H=5,
             W=5,
             C=7,
             KH=3,
             KW=3,
             SH=1,
             SW=1,
             PH=1,
             PW=1,
             DH=1,
             DW=1,
             description: str = ""):
    if DH != 1 or DW != 1:
        raise NotImplementedError

    x_shape = [N, C, H, W]

    vx = np.random.rand(*x_shape)
    vy = chainer.functions.max_pooling_2d(vx,
                                          ksize=[KH, KW],
                                          stride=[SH, SW],
                                          pad=[PH, PW]).data

    x = make_tensor_value_info("x", x_shape)
    y = make_tensor_value_info("y", vy.shape)

    kwargs = {
        "kernel_shape": [KH, KW],
        "strides": [SH, SW],
        "dilations": [DH, DW],
        "pads": [PH, PH, PW, PW]
    }
    operator = make_node("MaxPool", ["x"], ["y"], **kwargs)
    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] MaxPool {description}",
        graph=graph,
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #13
0
def template(xs_shape, axis, description: str = ""):
    vxs = [np.random.rand(*x_shape) for x_shape in xs_shape]
    vy = np.concatenate(vxs, axis)

    xs = [make_tensor_value_info(f"x{i}", vx.shape) for i, vx in enumerate(vxs)]
    y = make_tensor_value_info("y", vy.shape)
    operator = make_node("Concat", [x.name for x in xs], ["y"], axis=axis)

    model = make_model([operator], xs, [y])

    graph = ONNXConverter().convert(model)

    assert tuple(vy.shape) == tuple(graph.outputs[0].shape), f"vy: {vy.shape}, graph.outputs[0]: {graph.outputs[0].shape}"
    generate_kernel_test_case(
        description=f"[ONNX] Concat {description}",
        graph=graph,
        inputs={graph.inputs[i]: vx for i, vx in enumerate(vxs)},
        expected={graph.outputs[0]: vy},
    )
Beispiel #14
0
def template(x_shape, axis, description: str = ""):
    vx = np.random.rand(*x_shape) - 0.5
    vy = np.exp(vx) / np.sum(np.exp(vx), axis=axis, keepdims=True)

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)
    operator = make_node("Softmax", ["x"], ["y"], axis=axis)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] Softmax {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #15
0
def template(x_shape, axes, description: str = ""):
    vx = np.random.rand(*x_shape)
    vy = np.squeeze(vx, tuple(axes))

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)
    operator = make_node("Squeeze", ["x"], ["y"], axes=axes)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    assert list(vy.shape) == list(graph.outputs[0].shape)

    generate_kernel_test_case(
        description=f"[ONNX] Squeeze {description}",
        graph=graph,
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #16
0
def template(x_shape, y_shape, description: str = ""):
    vx = np.random.rand(*x_shape)
    vy = np.reshape(vx, [x if y == 0 else y for x, y in zip(x_shape, y_shape)])

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)
    operator = make_node("Reshape", ["x"], ["y"], shape=y_shape)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    assert list(vy.shape) == list(graph.outputs[0].shape)

    generate_kernel_test_case(
        description=f"[ONNX] Reshape {description}",
        graph=graph,
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #17
0
def template(x_shape, axis, kwargs, description: str = ""):
    vx = np.random.rand(*x_shape)
    y_shape = [np.product(vx.shape[:axis]), np.product(vx.shape[axis:])]
    vy = vx.reshape(y_shape)

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)

    operator = make_node("Flatten", ["x"], ["y"], **kwargs)
    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    assert tuple(vy.shape) == tuple(
        graph.outputs[0].shape
    ), f"vy: {vy.shape}, graph.outputs[0]: {graph.outputs[0].shape}"
    generate_kernel_test_case(
        description=f"[ONNX] Flatten {description}",
        graph=graph,
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #18
0
def template(x_shape, split, axis, description: str = ""):
    vx = np.random.rand(*x_shape)
    sections = np.cumsum(split).tolist()[:-1]
    vys = np.split(vx, sections, axis=axis)

    x = make_tensor_value_info("x", vx.shape)
    ys = [make_tensor_value_info(f"y{i}", vy.shape) for i, vy in enumerate(vys)]
    operator = make_node("Split", ["x"], [y.name for y in ys], axis=axis, split=split)

    model = make_model([operator], [x], ys)

    graph = ONNXConverter().convert(model)

    for i, vy in enumerate(vys):
        assert tuple(vy.shape) == tuple(graph.outputs[i].shape), f"vys[{i}]: {vy.shape}, graph.outputs[{i}]: {graph.outputs[i].shape}"

    generate_kernel_test_case(
        description=f"[ONNX] Split {description}",
        graph=graph,
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[i]: vy for i, vy in enumerate(vys)},
    )
Beispiel #19
0
def template(x0_shape, x1_shape, description: str = ""):
    vx0 = np.random.rand(*x0_shape)
    vx1 = np.random.rand(*x1_shape)
    vy = vx0**vx1

    x0 = make_tensor_value_info("x0", vx0.shape)
    x1 = make_tensor_value_info("x1", vx1.shape)
    y = make_tensor_value_info("y", vy.shape)

    operator = make_node("Pow", ["x0", "x1"], ["y"])

    model = make_model([operator], [x0, x1], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] Pow {description}",
        graph=graph,
        inputs={
            graph.inputs[0]: vx0,
            graph.inputs[1]: vx1
        },
        expected={graph.outputs[0]: vy},
    )
Beispiel #20
0
def template(x0_shape,
             x1_shape,
             broadcast=0,
             axis=None,
             description: str = ""):
    vx0 = np.random.rand(*x0_shape)
    vx1 = np.random.rand(*x1_shape)
    if axis is not None:
        # onnx-style broadcast
        vx1 = vx1[(None, ) * axis + (..., ) + (None, ) *
                  (vx0.ndim - vx1.ndim - axis)]

    vy = vx0 + vx1

    x0 = make_tensor_value_info("x0", x0_shape)
    x1 = make_tensor_value_info("x1", x1_shape)
    y = make_tensor_value_info("y", vy.shape)

    kwargs = {"broadcast": broadcast}
    if axis is not None:
        kwargs["axis"] = axis
    operator = make_node("Add", ["x0", "x1"], ["y"], **kwargs)

    model = make_model([operator], [x0, x1], [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] Add {description}",
        graph=graph,
        inputs={
            graph.inputs[0]: vx0,
            graph.inputs[1]: vx1
        },
        expected={graph.outputs[0]: vy},
    )
Beispiel #21
0
def template(x_shape, num_input=2, description: str = ""):
    vxs = [np.random.rand(*x_shape) for _ in range(num_input)]
    vy = np.zeros(x_shape)
    for vx in vxs:
        vy += vx

    xs = [
        make_tensor_value_info(f"x{i}", vxs[i].shape) for i in range(num_input)
    ]
    y = make_tensor_value_info("y", vy.shape)

    operator = make_node("Sum", [x.name for x in xs], ["y"])

    model = make_model([operator], xs, [y])

    graph = ONNXConverter().convert(model)

    generate_kernel_test_case(
        description=f"[ONNX] Sum {description}",
        graph=graph,
        inputs={v: x
                for v, x in zip(graph.inputs, vxs)},
        expected={graph.outputs[0]: vy},
    )
Beispiel #22
0
def template(x_shape, pads, mode, value=None, description: str = ""):
    vx = np.random.rand(*x_shape)
    np_pads = [[b, e] for b, e in zip(pads[:vx.ndim], pads[vx.ndim:])]
    if mode == b"constant":
        vy = np.pad(vx, np_pads, mode="constant", constant_values=value)

    elif mode == b"reflect":
        vy = np.pad(vx, np_pads, mode="reflect")

    elif mode == b"edge":
        vy = np.pad(vx, np_pads, mode="symmetric")

    else:
        raise ValueError(mode)

    x = make_tensor_value_info("x", vx.shape)
    y = make_tensor_value_info("y", vy.shape)

    kwargs = {"pads": pads, "mode": mode}
    if value is not None:
        kwargs["value"] = value
    operator = make_node("Pad", ["x"], ["y"], **kwargs)

    model = make_model([operator], [x], [y])

    graph = ONNXConverter().convert(model)

    assert list(vy.shape) == list(graph.outputs[0].shape)

    generate_kernel_test_case(
        description=f"[ONNX] Pad {description}",
        graph=graph,
        backend=["webgpu", "webassembly", "webgl"],
        inputs={graph.inputs[0]: vx},
        expected={graph.outputs[0]: vy},
    )
Beispiel #23
0
def template(N=2,
             H=5,
             W=5,
             Cin=7,
             Cout=9,
             KH=3,
             KW=3,
             SH=1,
             SW=1,
             PH=1,
             PW=1,
             DH=1,
             DW=1,
             use_bias=False,
             description: str = ""):
    x_shape = [N, Cin, H, W]
    w_shape = [Cout, Cin, KH, KW]

    vx = np.random.rand(*x_shape)
    vw = np.random.rand(*w_shape)
    vb = np.random.rand(Cout) if use_bias else None

    if DH != 1 or DW != 1:
        vy = chainer.functions.dilated_convolution_2d(vx,
                                                      vw,
                                                      b=vb,
                                                      stride=[SH, SW],
                                                      pad=[PH, PW],
                                                      dilate=[DH, DW]).data
    else:
        vy = chainer.functions.convolution_2d(vx,
                                              vw,
                                              b=vb,
                                              stride=[SH, SW],
                                              pad=[PH, PW]).data

    x = make_tensor_value_info("x", x_shape)
    w = make_tensor_value_info("w", w_shape)
    y = make_tensor_value_info("y", vy.shape)

    kwargs = {
        "kernel_shape": [KH, KW],
        "strides": [SH, SW],
        "dilations": [DH, DW],
        "pads": [PH, PH, PW, PW]
    }
    if use_bias:
        b = make_tensor_value_info("b", vb.shape)
        operator = make_node("Conv", ["x", "w", "b"], ["y"], **kwargs)
        model = make_model([operator], [x, w, b], [y])

    else:
        operator = make_node("Conv", ["x", "w"], ["y"], **kwargs)
        model = make_model([operator], [x, w], [y])

    graph = ONNXConverter().convert(model)

    inputs = {graph.inputs[0]: vx, graph.inputs[1]: vw}

    if use_bias:
        inputs[graph.inputs[2]] = vb

    generate_kernel_test_case(
        description=f"[ONNX] Conv {description}",
        graph=graph,
        inputs=inputs,
        expected={graph.outputs[0]: vy},
    )