Esempio n. 1
0
    def check_forward_consistency(self, backend_config):
        inputs = self.generate_inputs()
        if self.nobias:
            x, W = inputs
            b = None
        else:
            x, W, b = inputs
        x_cpu = chainer.Variable(x)
        W_cpu = chainer.Variable(W)
        b_cpu = None if b is None else chainer.Variable(b)
        y_cpu = F.convolution_nd(
            x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
            cover_all=self.cover_all, dilate=self.dilate,
            groups=self.groups)

        x = backend_config.get_array(x)
        W = backend_config.get_array(W)
        if self.nobias:
            b = None
        else:
            b = backend_config.get_array(b)
        with backend_config:
            y_gpu = F.convolution_nd(
                x, W, b, stride=self.stride, pad=self.pad,
                cover_all=self.cover_all, dilate=self.dilate,
                groups=self.groups)

        testing.assert_allclose(
            y_cpu.array, y_gpu.array, **self.check_forward_options)
Esempio n. 2
0
    def check_array_supplied(self, x_ary, W_ary, b_ary):
        y_ary = functions.convolution_nd(x_ary, W_ary, b_ary)

        x_var = chainer.Variable(x_ary)
        W_var = chainer.Variable(W_ary)
        b_var = chainer.Variable(b_ary)
        y_var = functions.convolution_nd(x_var, W_var, b_var)

        testing.assert_allclose(y_ary.data, y_var.data)
Esempio n. 3
0
    def check_forward_consistency(self, nobias=False, use_cudnn=False):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if nobias else chainer.Variable(self.b)
        y_cpu = functions.convolution_nd(
            x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
            use_cudnn=use_cudnn, cover_all=self.cover_all)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if nobias else chainer.Variable(cuda.to_gpu(self.b))
        y_gpu = functions.convolution_nd(
            x_gpu, W_gpu, b_gpu, stride=self.stride, pad=self.pad,
            use_cudnn=use_cudnn, cover_all=self.cover_all)

        testing.assert_allclose(
            y_cpu.data, y_gpu.data, **self.check_forward_options)
Esempio n. 4
0
    def check_forward_consistency(self, nobias=False, use_cudnn='never'):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if nobias else chainer.Variable(self.b)
        y_cpu = F.convolution_nd(
            x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
            cover_all=self.cover_all)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if nobias else chainer.Variable(cuda.to_gpu(self.b))
        with chainer.using_config('use_cudnn', use_cudnn):
            with chainer.using_config('autotune', self.autotune):
                y_gpu = F.convolution_nd(
                    x_gpu, W_gpu, b_gpu, stride=self.stride, pad=self.pad,
                    cover_all=self.cover_all)

        testing.assert_allclose(
            y_cpu.data, y_gpu.data, **self.check_forward_options)
Esempio n. 5
0
 def test_2(self):
     n_batches = 2
     in_channels = 3
     out_channels = 1  # important
     x_shape = (n_batches, in_channels, 4)
     w_shape = (out_channels, in_channels, 3)
     x = numpy.ones(x_shape, numpy.float32)
     w = numpy.ones(w_shape, numpy.float32)
     y = F.convolution_nd(x, chainer.Variable(w))
     z = F.sum(y)
     z.backward()
Esempio n. 6
0
 def forward_expected(self, inputs):
     if self.nobias:
         x, W = inputs
         b = None
     else:
         x, W, b = inputs
     y_expected = F.convolution_nd(
         x, W, b, stride=self.stride, pad=self.pad,
         cover_all=self.cover_all, dilate=self.dilate,
         groups=self.groups)
     return y_expected.array,
Esempio n. 7
0
 def forward(self, inputs, device):
     if self.nobias:
         x, W = inputs
         b = None
     else:
         x, W, b = inputs
     y = F.convolution_nd(
         x, W, b, stride=self.stride, pad=self.pad,
         cover_all=self.cover_all, dilate=self.dilate,
         groups=self.groups)
     return y,
Esempio n. 8
0
    def check_forward_consistency_regression(self, nobias=False):
        x = chainer.Variable(self.x)
        W = chainer.Variable(self.W)
        b = None if nobias else chainer.Variable(self.b)

        y_nd = functions.convolution_nd(
            x, W, b, stride=self.stride, pad=self.pad,
            use_cudnn=False, cover_all=self.cover_all)
        y_2d = functions.convolution_2d(
            x, W, b, stride=self.stride, pad=self.pad,
            use_cudnn=False, cover_all=self.cover_all)

        testing.assert_allclose(
            y_nd.data, y_2d.data, **self.check_forward_options)
Esempio n. 9
0
    def check_forward_consistency_regression(self, nobias=False):
        x = chainer.Variable(self.x)
        W = chainer.Variable(self.W)
        b = None if nobias else chainer.Variable(self.b)

        with chainer.using_config('use_cudnn', 'never'):
            y_nd = F.convolution_nd(
                x, W, b, stride=self.stride, pad=self.pad,
                cover_all=self.cover_all, dilate=self.dilate,
                groups=self.groups)
            y_2d = F.convolution_2d(
                x, W, b, stride=self.stride, pad=self.pad,
                cover_all=self.cover_all, dilate=self.dilate,
                groups=self.groups)

        testing.assert_allclose(
            y_nd.data, y_2d.data, **self.check_forward_options)
Esempio n. 10
0
    def check_forward_consistency_regression(self, backend_config):
        inputs = self.generate_inputs()
        if self.nobias:
            x, W = inputs
            b = None
        else:
            x, W, b = inputs
        x = chainer.Variable(backend_config.get_array(x))
        W = chainer.Variable(backend_config.get_array(W))
        if b is not None:
            b = chainer.Variable(backend_config.get_array(b))

        with chainer.using_config('use_cudnn', 'never'):
            y_nd = F.convolution_nd(
                x, W, b, stride=self.stride, pad=self.pad,
                cover_all=self.cover_all, dilate=self.dilate,
                groups=self.groups)
            y_2d = F.convolution_2d(
                x, W, b, stride=self.stride, pad=self.pad,
                cover_all=self.cover_all, dilate=self.dilate,
                groups=self.groups)

        testing.assert_allclose(
            y_nd.array, y_2d.array, **self.check_forward_options)
Esempio n. 11
0
 def f(*args):
     return F.convolution_nd(
         *args, stride=self.stride, pad=self.pad,
         cover_all=self.cover_all, dilate=self.dilate,
         groups=self.groups)
Esempio n. 12
0
 def forward(self):
     x = chainer.Variable(cuda.to_gpu(self.x))
     W = chainer.Variable(cuda.to_gpu(self.W))
     return functions.convolution_nd(
         x, W, None, stride=self.stride, pad=self.pad,
         use_cudnn=self.use_cudnn)
Esempio n. 13
0
 def f(*args):
     y = F.convolution_nd(*args, stride=self.stride, pad=self.pad,
                          cover_all=self.cover_all, dilate=self.dilate,
                          groups=self.groups)
     return y * y  # make the function nonlinear
Esempio n. 14
0
 def forward(self):
     x = chainer.Variable(cuda.to_gpu(self.x))
     W = chainer.Variable(cuda.to_gpu(self.W))
     return F.convolution_nd(
         x, W, None, stride=self.stride, pad=self.pad)
Esempio n. 15
0
 def f(*args):
     y = F.convolution_nd(*args, stride=self.stride, pad=self.pad,
                          cover_all=self.cover_all)
     return y * y  # make the function nonlinear
Esempio n. 16
0
 def f(*args):
     return F.convolution_nd(
         *args, stride=self.stride, pad=self.pad,
         cover_all=self.cover_all)
Esempio n. 17
0
 def test_conv1d(self):
     (x, W, b) = self._get_data(1)
     testing.assert_allclose(
         F.convolution_nd(x, W, b).data,
         F.convolution_1d(x, W, b).data)
Esempio n. 18
0
 def test_conv3d(self):
     (x, W, b) = self._get_data(3)
     testing.assert_allclose(
         F.convolution_nd(x, W, b).data, F.convolution_3d(x, W, b).data)
Esempio n. 19
0
 def forward(self):
     x = chainer.Variable(cuda.to_gpu(self.x))
     W = chainer.Variable(cuda.to_gpu(self.W))
     return F.convolution_nd(x, W, None, stride=self.stride, pad=self.pad)