Exemplo n.º 1
0
def _convert_sum(converter: ChainerConverter, c_op: "chainer.functions.Sum"):
    x = converter.get_variable(c_op.inputs[0])
    for axis in list(x.order.axes) if c_op.axis is None else [
            x.order.axes[i] for i in c_op.axis
    ]:
        x, = Sum(None, axis=axis)(x)

        # chainer.functions.sum supported "keepdims" parameter since v1.24
        if chainer.__version__ >= "1.24" and c_op.keepdims and x.ndim > 1:
            pass

        else:
            x = x.squeeze(axis)

    converter.set_variable(c_op.outputs[0](), x)
Exemplo n.º 2
0
def _convert_max_pooling2d(converter: ChainerConverter, c_op: "chainer.functions.MaxPooling2D"):
    if not c_op.cover_all:
        raise NotImplementedError("'cover_all=False' property in 'MaxPooling2D' is not supported.")

    x = converter.get_variable(c_op.inputs[0])
    unify_order(x.order, OrderNCHW)

    pool_opr = MaxPooling2D(None,
                            ksize=(c_op.kh, c_op.kw),
                            stride=(c_op.sy, c_op.sx),
                            padding=(c_op.ph, c_op.pw))

    y, = pool_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 3
0
def _convert_linear_function(
        converter: ChainerConverter,
        c_op: "chainer.functions.connection.linear.LinearFunction"):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])  # type: ConstantVariable

    y, = Tensordot(None, axes=[x.order.axes[1:], w.order.axes[1]])(x, w)

    if len(c_op.inputs) == 3:
        # with bias
        b = converter.get_variable(c_op.inputs[2])
        check_broadcast_constraints(y, b)
        y = y + b

    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 4
0
def template(description=""):
    if chainer.__version__ >= "3.":
        raise SkipTest("Since Chainer 3.0.0, L.BatchNormalization use F.fixed_batch_normalization when 'chainer.config.train == False'.")

    link = chainer.links.BatchNormalization(size=4)
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))

    if chainer.__version__ >= "2.":
        with chainer.using_config('train', False):
            vy = link(vx)

    else:
        vy = link(vx, test=True)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.BatchNormalization {description}",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data}
    )
Exemplo n.º 5
0
def template(ksize=2,
             stride=None,
             pad=0,
             cover_all=True,
             shape=(2, 4, 6, 8),
             description=""):
    vx = chainer.Variable(np.random.rand(*shape).astype(np.float32))
    vy = chainer.functions.unpooling_2d(vx,
                                        ksize=ksize,
                                        stride=stride,
                                        pad=pad,
                                        cover_all=cover_all)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.unpooling_2d {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Exemplo n.º 6
0
def _convert_local_response_normalization(
    converter: ChainerConverter, c_op:
    "chainer.functions.normalization.local_response_normalization.LocalResponseNormalization"
):
    x = converter.get_variable(c_op.inputs[0])
    unify_order(x.order, OrderNCHW)

    n_opr = LocalResponseNormalization(None,
                                       n=c_op.n,
                                       k=c_op.k,
                                       alpha=c_op.alpha,
                                       beta=c_op.beta)

    y, = n_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 7
0
def template(train=False, description=""):
    link = chainer.links.BatchNormalization(size=4)
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))

    if chainer.__version__ >= "2.":
        with chainer.using_config('train', train):
            vy = link(vx)
    else:
        vy = link(vx, test=not train)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.BatchNormalization {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        })
def test_with_placeholder():
    link = chainer.links.Deconvolution2D(None, 16, ksize=3, stride=1, pad=1)
    vx = chainer.Variable(np.random.rand(1, 3, 16, 16).astype(np.float32))
    vy = link(vx)

    N = Placeholder(label="N")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, 3, H, W])
    py = link(px)

    graph = ChainerConverter().convert([px], [py])

    x = graph.inputs[0]
    y = graph.outputs[0]

    N.value = 1
    H.value = 16
    W.value = 16
    generate_kernel_test_case(
        description=f"[chainer] L.Deconvolution2D with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
        EPS=1e-2
    )
Exemplo n.º 9
0
def template(ksize=2,
             stride=None,
             pad=0,
             shape=(2, 4, 6, 8),
             cover_all=False,
             description=""):
    if cover_all:
        SkipTest(
            "AveragePooling2D function in Chainer does not support cover_all=True mode."
        )

    vx = chainer.Variable(np.random.rand(*shape).astype(np.float32))
    vy = chainer.functions.average_pooling_2d(vx,
                                              ksize=ksize,
                                              stride=stride,
                                              pad=pad)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]
    assert list(vy.shape) == list(
        graph.outputs[0].shape
    ), f"(vy.shape)={vy.shape}, (graph.outputs[0].shape)={graph.outputs[0].shape}"

    generate_kernel_test_case(
        description=f"[chainer] F.average_pooling_2d {description}",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Exemplo n.º 10
0
def test_with_placeholder():
    vx = chainer.Variable(np.random.rand(2, 20, 4, 5).astype(np.float32))
    vy1, vy2, vy3 = chainer.functions.split_axis(vx, [5, 15], 1)

    N = Placeholder(label="N")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, 20, H, W])
    py1, py2, py3 = chainer.functions.split_axis(px, [5, 15], 1)

    graph = ChainerConverter().convert([px], [py1, py2, py3])

    N.value = 2
    H.value = 4
    W.value = 5
    generate_kernel_test_case(
        description=f"[chainer] F.split_axis with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={graph.inputs[0]: vx.data},
        expected={
            graph.outputs[0]: vy1.data,
            graph.outputs[1]: vy2.data,
            graph.outputs[2]: vy3.data
        },
    )
Exemplo n.º 11
0
def test():
    vx = chainer.Variable(np.random.rand(2, 8, 6, 12))
    vy1, vy2, vy3 = chainer.functions.split_axis(vx, [4, 10], 3)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy1, vy2, vy3])

    x = graph.inputs[0]
    y1 = graph.outputs[0]
    y2 = graph.outputs[1]
    y3 = graph.outputs[2]

    generate_kernel_test_case(
        description=f"[chainer] F.SplitAxis",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y1:
            np.transpose(vy1.data,
                         [OrderNCHW.axes_dict[a] for a in y1.order.axes]),
            y2:
            np.transpose(vy2.data,
                         [OrderNCHW.axes_dict[a] for a in y2.order.axes]),
            y3:
            np.transpose(vy3.data,
                         [OrderNCHW.axes_dict[a] for a in y3.order.axes])
        },
    )
Exemplo n.º 12
0
def test_with_placeholder():
    link = chainer.links.BatchNormalization(size=3)
    vx = chainer.Variable(np.random.rand(1, 3, 16, 16).astype(np.float32))
    with chainer.using_config('train', False):
        vy = link(vx)

    N = Placeholder(label="N")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, 3, H, W])
    with chainer.using_config('train', False):
        py = link(px)

    graph = ChainerConverter().convert([px], [py])

    x = graph.inputs[0]
    y = graph.outputs[0]

    N.value = 1
    H.value = 16
    W.value = 16
    generate_kernel_test_case(
        description=f"[chainer] L.FixedBatchNormalization with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Exemplo n.º 13
0
def test():
    """
    Transpose test

    Chainer assumes variable order for convolution as NCHW.
    Reshape assumes no memory operation.
    However, WebDNN currently accepts only NHWC.
    Transpose have to be automatically inserted to work convolution and reshape correctly.

    Returns:

    """
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))
    conv1 = chainer.links.Convolution2D(4, 10, ksize=3)
    conv2 = chainer.links.Convolution2D(2, 4, ksize=5)
    linear1 = chainer.links.Linear(None, 5)
    h = conv1(vx)  # (2, 10, 4, 6)
    h = chainer.functions.reshape(h, (1, 2, 8, 30))
    h = conv2(h)  # (1, 4, 4, 26)
    h = chainer.functions.max_pooling_2d(h, ksize=2, stride=2)  # (1, 4, 2, 13)
    h = chainer.functions.reshape(h, (1, 2, 2, 26))
    # implicit reshape to (1, 2*2*26)
    vy = linear1(h)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] insertion of transposition",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data},
        backend=["webgpu", "webgl", "webassembly"])
Exemplo n.º 14
0
def template(axis, keepdims, description: str = ""):
    if chainer.__version__ < "1.24" and keepdims:
        raise SkipTest(
            f"chainer.functions.sum support \"keepdims\" parameter since v1.24, current installed version is {chainer.__version__}"
        )

    vx = chainer.Variable(np.random.rand(2, 5, 6, 8).astype(np.float32))

    if chainer.__version__ < "1.24":
        vy = chainer.functions.sum(vx, axis=axis)

    else:
        vy = chainer.functions.sum(vx, axis=axis, keepdims=keepdims)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    assert list(vy.shape) == list(y.shape), f"{vy.shape}, {y.shape}"
    generate_kernel_test_case(
        description=f"[chainer] F.sum {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Exemplo n.º 15
0
def template(ksize=2,
             stride=None,
             pad=0,
             shape=(2, 4, 6, 8),
             cover_all=False,
             description=""):
    vx = chainer.Variable(
        np.arange(np.product(shape)).reshape(shape).astype(np.float32))
    vy = chainer.functions.max_pooling_2d(vx,
                                          ksize=ksize,
                                          stride=stride,
                                          pad=pad,
                                          cover_all=cover_all)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]
    assert list(vy.shape) == list(
        graph.outputs[0].shape
    ), f"(vy.shape)={vy.shape}, (graph.outputs[0].shape)={graph.outputs[0].shape}"

    generate_kernel_test_case(
        description=f"[chainer] F.max_pooling_2d {description}",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data},
    )
def template(n=5, k=2.0, alpha=1e-4, beta=.75, description=""):
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))
    vy = chainer.functions.local_response_normalization(vx,
                                                        n=n,
                                                        k=k,
                                                        alpha=alpha,
                                                        beta=beta)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.local_response_normalization {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        })
Exemplo n.º 17
0
def template(axis=1, ndim=2, description: str = ""):
    if chainer.__version__ < "1.24" and axis != 1:
        raise SkipTest(
            f"chainer.functions.softmax support \"xis\" parameter since v1.24, current installed version is {chainer.__version__}"
        )

    shape = (np.arange(ndim, ) + 2).tolist()
    vx = chainer.Variable(
        np.arange(mul(shape)).reshape(shape).astype(np.float32))

    if chainer.__version__ < "1.24":
        vy = chainer.functions.softmax(vx)

    else:
        vy = chainer.functions.softmax(vx, axis=axis)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.softmax {description}",
        graph=graph,
        inputs={x: vx.data},
        backend=["webgpu", "webassembly"],
        expected={y: vy.data},
    )
Exemplo n.º 18
0
def test():
    vx1 = chainer.Variable(np.random.rand(2, 4, 6, 8))
    vx2 = chainer.Variable(np.random.rand(2, 4, 6, 8))
    vy = vx1 / vx2

    graph = ChainerConverter().convert_from_inout_vars([vx1, vx2], [vy])

    x1 = graph.inputs[0]
    x2 = graph.inputs[1]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.Div",
        graph=graph,
        inputs={
            x1:
            np.transpose(vx1.data,
                         [OrderNCHW.axes_dict[a] for a in x1.order.axes]),
            x2:
            np.transpose(vx2.data,
                         [OrderNCHW.axes_dict[a] for a in x2.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        },
    )
Exemplo n.º 19
0
def _convert_max(converter: ChainerConverter, c_op: "chainer.functions.Max"):
    x = converter.get_variable(c_op.inputs[0])

    remove_axes = []

    for axis in list(x.order.axes) if c_op.axis is None else [
            x.order.axes[i] for i in c_op.axis
    ]:
        x, = Max(None, axis=axis)(x)
        if not c_op.keepdims and x.ndim > 1:
            remove_axes.append(axis)

    if not c_op.keepdims and x.ndim > 1:
        x = x.squeeze(remove_axes)

    converter.set_variable(c_op.outputs[0](), x)
Exemplo n.º 20
0
def test_with_placeholder():
    vx1 = chainer.Variable(np.random.rand(10, 12).astype(np.float32) * 2 - 1)
    vx2 = chainer.Variable(np.random.rand(12, 14).astype(np.float32) * 2 - 1)
    vy = chainer.functions.matmul(vx1, vx2, False, False)

    M = Placeholder(label="M")
    K = Placeholder(label="K")
    N = Placeholder(label="N")
    px1 = PlaceholderVariable([M, K])
    px2 = PlaceholderVariable([K, N])
    py = chainer.functions.matmul(px1, px2, False, False)

    graph = ChainerConverter().convert([px1, px2], [py])

    M.value = 10
    K.value = 12
    N.value = 14
    generate_kernel_test_case(
        description=f"[chainer] F.matmul with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            graph.inputs[0]: vx1.data,
            graph.inputs[1]: vx2.data
        },
        expected={graph.outputs[0]: vy.data})
def test_with_placeholder():
    vx = chainer.Variable(np.random.rand(1, 3, 16, 16).astype(np.float32))
    vy = chainer.functions.local_response_normalization(vx)

    N = Placeholder(label="N")
    C = Placeholder(label="C")
    H = Placeholder(label="H")
    W = Placeholder(label="W")
    px = PlaceholderVariable([N, C, H, W])
    py = chainer.functions.local_response_normalization(px)

    graph = ChainerConverter().convert([px], [py])

    x = graph.inputs[0]
    y = graph.outputs[0]

    N.value = 1
    C.value = 3
    H.value = 16
    W.value = 16
    generate_kernel_test_case(
        description=
        f"[chainer] F.local_response_normalization with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
    )
Exemplo n.º 22
0
def test_convolution2d_large_HW():
    H1 = 64
    W1 = 64
    C1 = 3
    C2 = 64

    link = chainer.links.Convolution2D(C1,
                                       C2,
                                       ksize=3,
                                       stride=1,
                                       pad=1,
                                       nobias=True)
    link.W.data = generate_array(*link.W.shape)
    vx = chainer.Variable(generate_array(1, C1, H1, W1))
    vy = link(vx)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(description=f"test_convolution2d_large_HW",
                              graph=graph,
                              backend=["webgl"],
                              inputs={x: vx.data},
                              expected={y: vy.data})
Exemplo n.º 23
0
def test_with_placeholder():
    vx0 = chainer.Variable(np.random.rand(10, 11, 12).astype(np.float32))
    vx1 = chainer.Variable(np.random.rand(10, 11, 12).astype(np.float32))
    vy = chainer.functions.maximum(vx0, vx1)

    A = Placeholder(label="A")
    B = Placeholder(label="B")
    C = Placeholder(label="C")
    px0 = PlaceholderVariable([A, B, C])
    px1 = PlaceholderVariable([A, B, C])
    py = chainer.functions.maximum(px0, px1)

    graph = ChainerConverter().convert([px0, px1], [py])

    A.value = 10
    B.value = 11
    C.value = 12
    generate_kernel_test_case(
        description=f"[chainer] F.maximum with placeholder",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            graph.inputs[0]: vx0.data,
            graph.inputs[1]: vx1.data
        },
        expected={graph.outputs[0]: vy.data})
Exemplo n.º 24
0
def template(ksize=3, stride=1, pad=0, nobias=True, description=""):
    link = chainer.links.Deconvolution2D(4,
                                         10,
                                         ksize=ksize,
                                         stride=stride,
                                         pad=pad,
                                         nobias=nobias)
    vx = chainer.Variable(np.random.rand(2, 4, 6, 11).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.Deconvolution2D {description}",
        graph=graph,
        backend=["webgpu", "webassembly"],
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        },
        EPS=1e-2)
Exemplo n.º 25
0
def template(axis=1, ndim=2, description: str = ""):
    shape = (np.arange(ndim, ) + 2).tolist()
    vx = chainer.Variable(
        np.arange(mul(shape)).reshape(shape).astype(np.float32))
    vy = chainer.functions.softmax(vx, axis)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.softmax {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(
                vx.data,
                [default_order[ndim].axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(
                vy.data,
                [default_order[ndim].axes_dict[a] for a in y.order.axes])
        },
    )
def template(N=2,
             H=14,
             W=15,
             C1=16,
             C2=17,
             ksize=3,
             stride=1,
             pad=1,
             nobias=True,
             description=""):
    link = chainer.links.Deconvolution2D(C1,
                                         C2,
                                         ksize=ksize,
                                         stride=stride,
                                         pad=pad,
                                         nobias=nobias)
    vx = chainer.Variable(
        np.arange(np.product([N, C1, H, W])).reshape(N, C1, H,
                                                     W).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.Deconvolution2D {description}",
        graph=graph,
        backend=["webgpu", "webgl", "webassembly"],
        inputs={x: vx.data},
        expected={y: vy.data},
        EPS=1e-2)
Exemplo n.º 27
0
def template(ksize=2, stride=None, pad=0, description=""):
    vx = chainer.Variable(np.random.rand(2, 4, 6, 8))
    vy = chainer.functions.max_pooling_2d(vx,
                                          ksize=ksize,
                                          stride=stride,
                                          pad=pad)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.max_pooling_2d {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        },
    )
Exemplo n.º 28
0
def template(shape=None, description=""):
    if shape is None:
        shape = [2, 4, 6, 8]

    vx = chainer.Variable(np.random.rand(2, 1, 1, 8).astype(np.float32))
    vy = chainer.functions.broadcast_to(vx, shape=shape)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] F.broadcast_to {description}",
        graph=graph,
        inputs={
            x:
            np.transpose(vx.data,
                         [OrderNCHW.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y:
            np.transpose(vy.data,
                         [OrderNCHW.axes_dict[a] for a in y.order.axes])
        })
Exemplo n.º 29
0
def test_nobias():
    link = chainer.links.Convolution2D(4,
                                       10,
                                       ksize=3,
                                       stride=1,
                                       pad=1,
                                       nobias=True)

    vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert_from_inout_vars([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.Convolution2D(nobias=True)",
        graph=graph,
        inputs={
            x: ConstantVariable(vx.data, OrderNCHW).change_order(x.order).data
        },
        expected={
            y: ConstantVariable(vy.data, OrderNCHW).change_order(y.order).data
        })
Exemplo n.º 30
0
def template(n=2,
             c_in=4,
             h_in=6,
             w_in=8,
             c_out=10,
             ksize=3,
             stride=1,
             pad=0,
             nobias=True,
             EPS=1e-5,
             description=""):
    link = chainer.links.Convolution2D(c_in,
                                       c_out,
                                       ksize=ksize,
                                       stride=stride,
                                       pad=pad,
                                       nobias=nobias)
    link.W.data = np.ones(link.W.shape).astype(np.float32)
    vx = chainer.Variable(np.ones((n, c_in, h_in, w_in)).astype(np.float32))
    vy = link(vx)

    graph = ChainerConverter().convert([vx], [vy])

    x = graph.inputs[0]
    y = graph.outputs[0]

    generate_kernel_test_case(
        description=f"[chainer] L.Convolution2D {description}",
        graph=graph,
        inputs={x: vx.data},
        expected={y: vy.data},
        EPS=EPS)