def template(N=2,
             H=14,
             W=15,
             C1=16,
             C2=17,
             ksize=3,
             stride=1,
             pad=1,
             nobias=True,
             description=""):
    link = chainer.links.Deconvolution2D(C1,
                                         C2,
                                         ksize=ksize,
                                         stride=stride,
                                         pad=pad,
                                         nobias=nobias)
    vx = chainer.Variable(
        np.arange(np.product([N, C1, H, W])).reshape(N, C1, H,
                                                     W).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.Deconvolution2D {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
        EPS=1e-2)
Пример #2
0
def template(description=""):
    if chainer.__version__ >= "3.":
        raise SkipTest("Since Chainer 3.0.0, L.BatchNormalization use F.fixed_batch_normalization when 'chainer.config.train == False'.")

    link = chainer.links.BatchNormalization(size=4)
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))

    if chainer.__version__ >= "2.":
        with chainer.using_config('train', False):
            vy = link(vx)

    else:
        vy = link(vx, test=True)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.BatchNormalization {description}",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data}
    )
Пример #3
0
def test_nobias():
    link = chainer.links.Deconvolution2D(4,
                                         10,
                                         ksize=3,
                                         stride=1,
                                         pad=1,
                                         nobias=True)

    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.Deconvolution2D(nobias=True)",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            x: ConstantVariable(vx.data, OrderNCHW).change_order(x.order).data
        },
        expected={
            y: ConstantVariable(vy.data, OrderNCHW).change_order(y.order).data
        })
Пример #4
0
def template(ksize=2, stride=None, pad=0, description=""):
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8))
    vy = chainer.functions.max_pooling_2d(vx,
                                          ksize=ksize,
                                          stride=stride,
                                          pad=pad)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.max_pooling_2d {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        },
    )
Пример #5
0
def test_with_placeholder():
    vx = chainer.Variable(np.random.rand(2, 3, 4, 5).astype(np.float32))
    vy = chainer.functions.expand_dims(vx, axis=1)

    N = Placeholder(label="N")
    C = Placeholder(label="C")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, C, H, W])
    py = chainer.functions.expand_dims(px, axis=1)

    graph = ChainerConverter().convert([px], [py])

    x = graph.inputs[0]
    y = graph.outputs[0]

    N.value = 2
    C.value = 3
    H.value = 4
    W.value = 5
    generate_kernel_test_case(
        description=f"[chainer] F.expand_dims with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Пример #6
0
def template(train=False, description=""):
    link = chainer.links.BatchNormalization(size=4)
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))

    if chainer.__version__ >= "2.":
        with chainer.using_config('train', train):
            vy = link(vx)
    else:
        vy = link(vx, test=not train)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.BatchNormalization {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        })
Пример #7
0
def template(axis=1, ndim=2, description: str = ""):
    shape = (np.arange(ndim, ) + 2).tolist()
    vx = chainer.Variable(
        np.arange(mul(shape)).reshape(shape).astype(np.float32))
    vy = chainer.functions.softmax(vx, axis)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.softmax {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(
                vx.data,
                [default_order[ndim].axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(
                vy.data,
                [default_order[ndim].axes_dict[a] for a in y.order.axes])
        },
    )
Пример #8
0
def template(axis=1, ndim=2, description: str = ""):
    if chainer.__version__ < "1.24" and axis != 1:
        raise SkipTest(
            f"chainer.functions.softmax support \"xis\" parameter since v1.24, current installed version is {chainer.__version__}"
        )

    shape = (np.arange(ndim, ) + 2).tolist()
    vx = chainer.Variable(
        np.arange(mul(shape)).reshape(shape).astype(np.float32))

    if chainer.__version__ < "1.24":
        vy = chainer.functions.softmax(vx)

    else:
        vy = chainer.functions.softmax(vx, axis=axis)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.softmax {description}",
        graph=graph,
        inputs={x: vx.data},
        backend=["webgpu", "webassembly"],
        expected={y: vy.data},
    )
def test_with_placeholder():
    link = chainer.links.Deconvolution2D(None, 16, ksize=3, stride=1, pad=1)
    vx = chainer.Variable(np.random.rand(1, 3, 16, 16).astype(np.float32))
    vy = link(vx)

    N = Placeholder(label="N")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, 3, H, W])
    py = link(px)

    graph = ChainerConverter().convert([px], [py])

    x = graph.inputs[0]
    y = graph.outputs[0]

    N.value = 1
    H.value = 16
    W.value = 16
    generate_kernel_test_case(
        description=f"[chainer] L.Deconvolution2D with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
        EPS=1e-2
    )
Пример #10
0
def test():
    vx = chainer.Variable(np.random.rand(2, 8, 6, 12))
    vy1, vy2, vy3 = chainer.functions.split_axis(vx, [4, 10], 3)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy1, vy2, vy3])

    x = graph.inputs[0]
    y1 = graph.outputs[0]
    y2 = graph.outputs[1]
    y3 = graph.outputs[2]

    generate_kernel_test_case(
        description=f"[chainer] F.SplitAxis",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y1:
            np.transpose(vy1.data,
                         [OrderNCHW.axes_dict[a] for a in y1.order.axes]),
            y2:
            np.transpose(vy2.data,
                         [OrderNCHW.axes_dict[a] for a in y2.order.axes]),
            y3:
            np.transpose(vy3.data,
                         [OrderNCHW.axes_dict[a] for a in y3.order.axes])
        },
    )
Пример #11
0
def test_with_placeholder():
    vx = chainer.Variable(np.random.rand(2, 20, 4, 5).astype(np.float32))
    vy1, vy2, vy3 = chainer.functions.split_axis(vx, [5, 15], 1)

    N = Placeholder(label="N")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, 20, H, W])
    py1, py2, py3 = chainer.functions.split_axis(px, [5, 15], 1)

    graph = ChainerConverter().convert([px], [py1, py2, py3])

    N.value = 2
    H.value = 4
    W.value = 5
    generate_kernel_test_case(
        description=f"[chainer] F.split_axis with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={graph.inputs[0]: vx.data},
        expected={
            graph.outputs[0]: vy1.data,
            graph.outputs[1]: vy2.data,
            graph.outputs[2]: vy3.data
        },
    )
Пример #12
0
def test():
    """
    Transpose test

    Chainer assumes variable order for convolution as NCHW.
    Reshape assumes no memory operation.
    However, WebDNN currently accepts only NHWC.
    Transpose have to be automatically inserted to work convolution and reshape correctly.

    Returns:

    """
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))
    conv1 = chainer.links.Convolution2D(4, 10, ksize=3)
    conv2 = chainer.links.Convolution2D(2, 4, ksize=5)
    linear1 = chainer.links.Linear(None, 5)
    h = conv1(vx)  # (2, 10, 4, 6)
    h = chainer.functions.reshape(h, (1, 2, 8, 30))
    h = conv2(h)  # (1, 4, 4, 26)
    h = chainer.functions.max_pooling_2d(h, ksize=2, stride=2)  # (1, 4, 2, 13)
    h = chainer.functions.reshape(h, (1, 2, 2, 26))
    # implicit reshape to (1, 2*2*26)
    vy = linear1(h)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] insertion of transposition",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data},
        backend=["webgpu", "webgl", "webassembly"])
Пример #13
0
def test_with_placeholder():
    link = chainer.links.BatchNormalization(size=3)
    vx = chainer.Variable(np.random.rand(1, 3, 16, 16).astype(np.float32))
    with chainer.using_config('train', False):
        vy = link(vx)

    N = Placeholder(label="N")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, 3, H, W])
    with chainer.using_config('train', False):
        py = link(px)

    graph = ChainerConverter().convert([px], [py])

    x = graph.inputs[0]
    y = graph.outputs[0]

    N.value = 1
    H.value = 16
    W.value = 16
    generate_kernel_test_case(
        description=f"[chainer] L.FixedBatchNormalization with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Пример #14
0
def template(axis, keepdims, description: str = ""):
    if chainer.__version__ < "1.24" and keepdims:
        raise SkipTest(
            f"chainer.functions.sum support \"keepdims\" parameter since v1.24, current installed version is {chainer.__version__}"
        )

    vx = chainer.Variable(np.random.rand(2, 5, 6, 8).astype(np.float32))

    if chainer.__version__ < "1.24":
        vy = chainer.functions.sum(vx, axis=axis)

    else:
        vy = chainer.functions.sum(vx, axis=axis, keepdims=keepdims)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    assert list(vy.shape) == list(y.shape), f"{vy.shape}, {y.shape}"
    generate_kernel_test_case(
        description=f"[chainer] F.sum {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Пример #15
0
def template(ksize=2,
             stride=None,
             pad=0,
             shape=(2, 4, 6, 8),
             cover_all=False,
             description=""):
    vx = chainer.Variable(
        np.arange(np.product(shape)).reshape(shape).astype(np.float32))
    vy = chainer.functions.max_pooling_2d(vx,
                                          ksize=ksize,
                                          stride=stride,
                                          pad=pad,
                                          cover_all=cover_all)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]
    assert list(vy.shape) == list(
        graph.outputs[0].shape
    ), f"(vy.shape)={vy.shape}, (graph.outputs[0].shape)={graph.outputs[0].shape}"

    generate_kernel_test_case(
        description=f"[chainer] F.max_pooling_2d {description}",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Пример #16
0
def test_with_placeholder():
    vx1 = chainer.Variable(np.random.rand(2, 10, 4, 5).astype(np.float32))
    vx2 = chainer.Variable(np.random.rand(2, 15, 4, 5).astype(np.float32))
    vy = chainer.functions.concat([vx1, vx2], axis=1)

    N = Placeholder(label="N")
    C1 = Placeholder(label="C1")
    C2 = Placeholder(label="C2")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px1 = PlaceholderVariable([N, C1, H, W])
    px2 = PlaceholderVariable([N, C2, H, W])
    py = chainer.functions.concat([px1, px2], axis=1)

    graph = ChainerConverter().convert([px1, px2], [py])

    N.value = 2
    C1.value = 10
    C2.value = 15
    H.value = 4
    W.value = 5
    generate_kernel_test_case(
        description=f"[chainer] F.concat with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            graph.inputs[0]: vx1.data,
            graph.inputs[1]: vx2.data
        },
        expected={graph.outputs[0]: vy.data},
    )
Пример #17
0
def template(n=2,
             c_in=4,
             h_in=6,
             w_in=8,
             c_out=10,
             ksize=3,
             stride=1,
             pad=0,
             nobias=True,
             EPS=1e-5,
             description=""):
    link = chainer.links.Convolution2D(c_in,
                                       c_out,
                                       ksize=ksize,
                                       stride=stride,
                                       pad=pad,
                                       nobias=nobias)
    link.W.data = np.ones(link.W.shape).astype(np.float32)
    vx = chainer.Variable(np.ones((n, c_in, h_in, w_in)).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.Convolution2D {description}",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data},
        EPS=EPS)
Пример #18
0
def test():
    vx1 = chainer.Variable(np.random.rand(2, 4, 6, 8))
    vx2 = chainer.Variable(np.random.rand(2, 4, 6, 8))
    vy = vx1 / vx2

    graph = ChainerConverter().convert_from_inout_vars([vx1, vx2], [vy])

    x1 = graph.inputs[0]
    x2 = graph.inputs[1]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.Div",
        graph=graph,
        inputs={
            x1:
            np.transpose(vx1.data,
                         [OrderNCHW.axes_dict[a] for a in x1.order.axes]),
            x2:
            np.transpose(vx2.data,
                         [OrderNCHW.axes_dict[a] for a in x2.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        },
    )
def template(n=5, k=2.0, alpha=1e-4, beta=.75, description=""):
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))
    vy = chainer.functions.local_response_normalization(vx,
                                                        n=n,
                                                        k=k,
                                                        alpha=alpha,
                                                        beta=beta)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.local_response_normalization {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        })
Пример #20
0
def test_with_placeholder():
    vx1 = chainer.Variable(np.random.rand(10, 12).astype(np.float32) * 2 - 1)
    vx2 = chainer.Variable(np.random.rand(12, 14).astype(np.float32) * 2 - 1)
    vy = chainer.functions.matmul(vx1, vx2, False, False)

    M = Placeholder(label="M")
    K = Placeholder(label="K")
    N = Placeholder(label="N")
    px1 = PlaceholderVariable([M, K])
    px2 = PlaceholderVariable([K, N])
    py = chainer.functions.matmul(px1, px2, False, False)

    graph = ChainerConverter().convert([px1, px2], [py])

    M.value = 10
    K.value = 12
    N.value = 14
    generate_kernel_test_case(
        description=f"[chainer] F.matmul with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            graph.inputs[0]: vx1.data,
            graph.inputs[1]: vx2.data
        },
        expected={graph.outputs[0]: vy.data})
Пример #21
0
def template(ksize=2,
             stride=None,
             pad=0,
             shape=(2, 4, 6, 8),
             cover_all=False,
             description=""):
    if cover_all:
        SkipTest(
            "AveragePooling2D function in Chainer does not support cover_all=True mode."
        )

    vx = chainer.Variable(np.random.rand(*shape).astype(np.float32))
    vy = chainer.functions.average_pooling_2d(vx,
                                              ksize=ksize,
                                              stride=stride,
                                              pad=pad)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]
    assert list(vy.shape) == list(
        graph.outputs[0].shape
    ), f"(vy.shape)={vy.shape}, (graph.outputs[0].shape)={graph.outputs[0].shape}"

    generate_kernel_test_case(
        description=f"[chainer] F.average_pooling_2d {description}",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Пример #22
0
def test_convolution2d_large_HW():
    H1 = 64
    W1 = 64
    C1 = 3
    C2 = 64

    link = chainer.links.Convolution2D(C1,
                                       C2,
                                       ksize=3,
                                       stride=1,
                                       pad=1,
                                       nobias=True)
    link.W.data = generate_array(*link.W.shape)
    vx = chainer.Variable(generate_array(1, C1, H1, W1))
    vy = link(vx)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(description=f"test_convolution2d_large_HW",
                              graph=graph,
                              backend=["webgl"],
                              inputs={x: vx.data},
                              expected={y: vy.data})
def test_with_placeholder():
    vx = chainer.Variable(np.random.rand(1, 3, 16, 16).astype(np.float32))
    vy = chainer.functions.local_response_normalization(vx)

    N = Placeholder(label="N")
    C = Placeholder(label="C")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, C, H, W])
    py = chainer.functions.local_response_normalization(px)

    graph = ChainerConverter().convert([px], [py])

    x = graph.inputs[0]
    y = graph.outputs[0]

    N.value = 1
    C.value = 3
    H.value = 16
    W.value = 16
    generate_kernel_test_case(
        description=
        f"[chainer] F.local_response_normalization with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Пример #24
0
def template(ksize=3, stride=1, pad=0, nobias=True, description=""):
    link = chainer.links.Deconvolution2D(4,
                                         10,
                                         ksize=ksize,
                                         stride=stride,
                                         pad=pad,
                                         nobias=nobias)
    vx = chainer.Variable(np.random.rand(2, 4, 6, 11).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.Deconvolution2D {description}",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        },
        EPS=1e-2)
Пример #25
0
def test_with_placeholder():
    vx0 = chainer.Variable(np.random.rand(10, 11, 12).astype(np.float32))
    vx1 = chainer.Variable(np.random.rand(10, 11, 12).astype(np.float32))
    vy = chainer.functions.maximum(vx0, vx1)

    A = Placeholder(label="A")
    B = Placeholder(label="B")
    C = Placeholder(label="C")
    px0 = PlaceholderVariable([A, B, C])
    px1 = PlaceholderVariable([A, B, C])
    py = chainer.functions.maximum(px0, px1)

    graph = ChainerConverter().convert([px0, px1], [py])

    A.value = 10
    B.value = 11
    C.value = 12
    generate_kernel_test_case(
        description=f"[chainer] F.maximum with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            graph.inputs[0]: vx0.data,
            graph.inputs[1]: vx1.data
        },
        expected={graph.outputs[0]: vy.data})
Пример #26
0
def test_convolution2d_large_C1_2():
    # case that MAX_TEXTURE_SIZE < C1
    H1 = 8
    W1 = 8
    C1 = config.WEBGL_MAX_TEXTURE_SIZE
    C2 = 3

    link = chainer.links.Convolution2D(C1,
                                       C2,
                                       ksize=3,
                                       stride=1,
                                       pad=1,
                                       nobias=True)
    link.W.data = generate_array(*link.W.shape)
    vx = chainer.Variable(generate_array(1, C1, H1, W1).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(description=f"test_convolution2d_large_C1_2",
                              graph=graph,
                              backend=["webgl"],
                              inputs={x: vx.data},
                              expected={y: vy.data})
Пример #27
0
def template(ksize=2,
             stride=None,
             pad=0,
             cover_all=True,
             shape=(2, 4, 6, 8),
             description=""):
    vx = chainer.Variable(np.random.rand(*shape).astype(np.float32))
    vy = chainer.functions.unpooling_2d(vx,
                                        ksize=ksize,
                                        stride=stride,
                                        pad=pad,
                                        cover_all=cover_all)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.unpooling_2d {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Пример #28
0
def template(shape=None, description=""):
    if shape is None:
        shape = [2, 4, 6, 8]

    vx = chainer.Variable(np.random.rand(2, 1, 1, 8).astype(np.float32))
    vy = chainer.functions.broadcast_to(vx, shape=shape)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.broadcast_to {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        })
Пример #29
0
def template(r=2, description=""):
    vx = chainer.Variable(
        np.random.rand(2, 4 * r * r, 6, 8).astype(np.float32))
    vy = chainer.functions.depth2space(vx, r)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.Depth2Space {description}",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        },
    )
Пример #30
0
def main():
    sys.setrecursionlimit(10000)  # workaround for deep copying large graph

    parser = argparse.ArgumentParser()
    parser.add_argument("--model",
                        default="resnet50",
                        choices=["vgg16", "resnet50"])
    parser.add_argument("--backend", default="webgpu,webassembly,fallback")
    parser.add_argument("--encoding")
    parser.add_argument('--out',
                        '-o',
                        default='output_chainer',
                        help='Directory to output the graph descriptor')

    args = parser.parse_args()

    os.makedirs(args.out, exist_ok=True)

    sample_image = np.zeros((224, 224, 3),
                            dtype=np.uint8)  # PIL.Image.open("")
    if args.model == "vgg16":
        link = chainer.links.model.vision.vgg.VGG16Layers()
        prepared_image = chainer.links.model.vision.vgg.prepare(
            sample_image)  # BGR, CHW
        out_layer_name = "fc8"

    elif args.model == "resnet50":
        link = chainer.links.model.vision.resnet.ResNet50Layers()
        prepared_image = chainer.links.model.vision.resnet.prepare(
            sample_image)
        out_layer_name = "fc6"

    nn_input = chainer.Variable(np.array([prepared_image], dtype=np.float32))
    nn_output = link(nn_input, layers=[
        out_layer_name
    ])[out_layer_name]  # 'prob' is also possible (uses softmax)
    chainer_cg = chainer.computational_graph.build_computational_graph(
        [nn_output])
    converter = ChainerConverter()
    graph = converter.convert(chainer_cg, [nn_input],
                              [nn_output])  # type: Graph

    any_backend_failed = False
    last_backend_exception = None
    for backend in args.backend.split(","):
        try:
            graph_exec_data = generate_descriptor(
                backend, graph, constant_encoder_name=args.encoding)
            graph_exec_data.save(args.out)
        except Exception as ex:
            any_backend_failed = True
            last_backend_exception = ex
            console.error(
                f"Failed generating descriptor for backend {backend}: {str(ex)}\n"
            )

    if any_backend_failed:
        raise last_backend_exception