Example #1
0
    def check_forward_consistency(self, use_cudnn='always'):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        y_cpu = F.deconvolution_nd(x_cpu,
                                   W_cpu,
                                   b_cpu,
                                   stride=self.stride,
                                   pad=self.pad,
                                   outsize=self.outsize,
                                   dilate=self.dilate,
                                   groups=self.groups)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if self.nobias else chainer.Variable(cuda.to_gpu(self.b))
        with chainer.using_config('use_cudnn', use_cudnn):
            with chainer.using_config('autotune', self.autotune):
                y_gpu = F.deconvolution_nd(x_gpu,
                                           W_gpu,
                                           b_gpu,
                                           stride=self.stride,
                                           pad=self.pad,
                                           outsize=self.outsize,
                                           dilate=self.dilate,
                                           groups=self.groups)

        self.assertEqual(y_cpu.data.dtype, self.x_dtype)
        self.assertEqual(y_gpu.data.dtype, self.x_dtype)
        testing.assert_allclose(y_cpu.data, y_gpu.data.get(),
                                **self.test_forward_options)
Example #2
0
 def test_estimated_outsize(self):
     x = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.float32)
     W = numpy.random.uniform(-1, 1, (3, 2, 2)).astype(numpy.float32)
     stride = 1
     pad = 10
     with self.assertRaises(AssertionError):
         F.deconvolution_nd(x, W, stride=stride, pad=pad)
Example #3
0
    def check_array_supplied(self, x_ary, W_ary, b_ary):
        y_ary = F.deconvolution_nd(x_ary, W_ary, b_ary)

        x_var = chainer.Variable(x_ary)
        W_var = chainer.Variable(W_ary)
        b_var = chainer.Variable(b_ary)
        y_var = F.deconvolution_nd(x_var, W_var, b_var)

        testing.assert_allclose(y_ary.data, y_var.data)
Example #4
0
    def test_bias(self):
        # dtype
        x = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.float32)
        W = numpy.random.uniform(-1, 1, (3, 2, 2)).astype(numpy.float32)
        b = numpy.random.uniform(-1, 1, (2,)).astype(numpy.int32)
        with self.assertRaises(type_check.InvalidType):
            F.deconvolution_nd(x, W, b=b)

        # ndim
        x = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.float32)
        W = numpy.random.uniform(-1, 1, (3, 2, 2)).astype(numpy.float32)
        b = numpy.random.uniform(-1, 1, (2, 2)).astype(numpy.float32)
        with self.assertRaises(type_check.InvalidType):
            F.deconvolution_nd(x, W, b=b)
Example #5
0
 def f(*args):
     return F.deconvolution_nd(*args,
                               stride=self.stride,
                               pad=self.pad,
                               outsize=self.outsize,
                               dilate=self.dilate,
                               groups=self.groups)
Example #6
0
    def check_forward_consistency_regression(self,
                                             x_data,
                                             W_data,
                                             b_data,
                                             use_cudnn='always'):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if self.nobias else chainer.Variable(b_data)

        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = F.deconvolution_nd(x,
                                      W,
                                      b,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate)
            y_2d = F.deconvolution_2d(x,
                                      W,
                                      b,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate)

        testing.assert_allclose(y_nd.data, y_2d.data,
                                **self.test_forward_options)
Example #7
0
    def check_forward_consistency_regression(self, backend_config):
        inputs = self.generate_inputs()
        if self.nobias:
            x, W = inputs
            b = None
        else:
            x, W, b = inputs
        x = chainer.Variable(backend_config.get_array(x))
        W = chainer.Variable(backend_config.get_array(W))
        if b is not None:
            b = chainer.Variable(backend_config.get_array(b))

        use_cudnn = backend_config.use_cudnn

        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = F.deconvolution_nd(x,
                                      W,
                                      b,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate)
            y_2d = F.deconvolution_2d(x,
                                      W,
                                      b,
                                      stride=self.stride,
                                      pad=self.pad,
                                      outsize=self.outsize,
                                      dilate=self.dilate)

        testing.assert_allclose(y_nd.array, y_2d.array,
                                **self.check_forward_options)
Example #8
0
    def check_forward_consistency(self, use_cudnn=True):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        y_cpu = F.deconvolution_nd(
            x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
            outsize=self.outsize, use_cudnn=use_cudnn)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if self.nobias else chainer.Variable(cuda.to_gpu(self.b))
        y_gpu = F.deconvolution_nd(
            x_gpu, W_gpu, b_gpu, stride=self.stride, pad=self.pad,
            outsize=self.outsize, use_cudnn=use_cudnn)

        self.assertEqual(y_cpu.data.dtype, self.x_dtype)
        self.assertEqual(y_gpu.data.dtype, self.x_dtype)
        testing.assert_allclose(
            y_cpu.data, y_gpu.data.get(), **self.test_forward_options)
 def forward(self, inputs, device):
     if self.nobias:
         x, W = inputs
         b = None
     else:
         x, W, b = inputs
     y = F.deconvolution_nd(
         x, W, b, stride=self.stride, pad=self.pad,
         outsize=self.outsize, dilate=self.dilate,
         groups=self.groups)
     return y,
Example #10
0
 def forward_expected(self, link, inputs):
     x, = inputs
     W = link.W
     b = link.b
     y = F.deconvolution_nd(x,
                            W,
                            b,
                            outsize=self.outsize,
                            stride=self.stride,
                            pad=self.pad,
                            groups=self.groups)
     return y.array,
Example #11
0
    def check_forward_consistency_regression(self, x_data, W_data, b_data,
                                             use_cudnn=True):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if self.nobias else chainer.Variable(b_data)

        y_nd = F.deconvolution_nd(x, W, b, stride=self.stride, pad=self.pad,
                                  outsize=self.outsize, use_cudnn=use_cudnn)
        y_2d = F.deconvolution_2d(x, W, b, stride=self.stride, pad=self.pad,
                                  outsize=self.outsize, use_cudnn=use_cudnn)

        testing.assert_allclose(
            y_nd.data, y_2d.data, **self.test_forward_options)
Example #12
0
    def __call__(self, x):
        """Applies N-dimensional convolution layer.

        Args:
            x (~chainer.Variable): Input image.

        Returns:
            ~chainer.Variable: Output of convolution.

        """
        return deconvolution_nd(
            convolution_nd(x, self.W, self.b, self.stride, self.pad), self.W,
            self.b, self.stride, self.pad)
Example #13
0
    def check_forward_consistency(self, use_cudnn='always'):
        x_cpu = chainer.Variable(self.x)
        W_cpu = chainer.Variable(self.W)
        b_cpu = None if self.nobias else chainer.Variable(self.b)
        y_cpu = F.deconvolution_nd(
            x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
            outsize=self.outsize, dilate=self.dilate,
            groups=self.groups)

        x_gpu = chainer.Variable(cuda.to_gpu(self.x))
        W_gpu = chainer.Variable(cuda.to_gpu(self.W))
        b_gpu = None if self.nobias else chainer.Variable(cuda.to_gpu(self.b))
        with chainer.using_config('use_cudnn', use_cudnn):
            with chainer.using_config('autotune', self.autotune):
                y_gpu = F.deconvolution_nd(
                    x_gpu, W_gpu, b_gpu, stride=self.stride, pad=self.pad,
                    outsize=self.outsize, dilate=self.dilate,
                    groups=self.groups)

        self.assertEqual(y_cpu.data.dtype, self.x_dtype)
        self.assertEqual(y_gpu.data.dtype, self.x_dtype)
        testing.assert_allclose(
            y_cpu.data, y_gpu.data.get(), **self.test_forward_options)
Example #14
0
    def check_forward_consistency_regression(self, x_data, W_data, b_data,
                                             use_cudnn='always'):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if self.nobias else chainer.Variable(b_data)

        with chainer.using_config('use_cudnn', use_cudnn):
            y_nd = F.deconvolution_nd(x, W, b, stride=self.stride,
                                      pad=self.pad, outsize=self.outsize,
                                      dilate=self.dilate)
            y_2d = F.deconvolution_2d(x, W, b, stride=self.stride,
                                      pad=self.pad, outsize=self.outsize,
                                      dilate=self.dilate)

        testing.assert_allclose(
            y_nd.data, y_2d.data, **self.test_forward_options)
Example #15
0
    def forward(self, x):

        if self.W.array is None:
            self._initialize_params(x.shape[1])

        pad_width = [(0, 0), (0, 0)] + list(map(lambda x: (x, x), self.pad))
        x = F.pad(x, pad_width, self.pad_mode)

        return F.deconvolution_nd(x,
                                  self.W,
                                  b=self.b,
                                  stride=self.stride,
                                  pad=0,
                                  outsize=self.outsize,
                                  dilate=self.dilate,
                                  groups=self.groups)
 def forward_expected(self, inputs):
     """
     Current forward_expected implementation depends on
     F.deconvolution_nd itself and thus it's only capable
     of checking consistency between backends, not absolute
     correctness of computations
     """
     if self.nobias:
         x, W = inputs
         b = None
     else:
         x, W, b = inputs
     y_expected = F.deconvolution_nd(
         x, W, b, stride=self.stride, pad=self.pad,
         outsize=self.outsize, dilate=self.dilate,
         groups=self.groups)
     return y_expected.array,
Example #17
0
    def test_data_and_weight(self):
        # dtype of data
        x = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.int32)
        W = numpy.random.uniform(-1, 1, (3, 2, 2)).astype(numpy.float32)
        with self.assertRaises(type_check.InvalidType):
            F.deconvolution_nd(x, W)

        # dtype of weight
        x = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.float32)
        W = numpy.random.uniform(-1, 1, (3, 2, 2)).astype(numpy.int32)
        with self.assertRaises(type_check.InvalidType):
            F.deconvolution_nd(x, W)

        # ndim of weight
        x = numpy.random.uniform(-1, 1, (2, 3, 4, 4)).astype(numpy.float32)
        W = numpy.random.uniform(-1, 1, (3, 2, 2)).astype(numpy.float32)
        with self.assertRaises(type_check.InvalidType):
            F.deconvolution_nd(x, W)

        # shapes of data and weight
        x = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.float32)
        W = numpy.random.uniform(-1, 1, (2, 2, 2)).astype(numpy.float32)
        with self.assertRaises(type_check.InvalidType):
            F.deconvolution_nd(x, W)
Example #18
0
 def test_deconv3d(self):
     (x, W, b) = self._get_data(3)
     testing.assert_allclose(
         F.deconvolution_nd(x, W, b).data, F.deconvolution_3d(x, W, b).data)
 def f(*args):
     y = F.deconvolution_nd(
         *args, stride=self.stride, pad=self.pad, outsize=self.outsize,
         dilate=self.dilate, groups=self.groups)
     return y * y  # make the function nonlinear
Example #20
0
 def test_supplied_outsize(self):
     x = numpy.random.uniform(-1, 1, (2, 3, 4)).astype(numpy.float32)
     W = numpy.random.uniform(-1, 1, (3, 2, 2)).astype(numpy.float32)
     outsize = (10,)
     with self.assertRaises(type_check.InvalidType):
         F.deconvolution_nd(x, W, outsize=outsize)
Example #21
0
 def f(*args):
     return F.deconvolution_nd(*args, stride=self.stride, pad=self.pad,
                               outsize=self.outsize)
Example #22
0
 def forward(self):
     x = chainer.Variable(self.x)
     W = chainer.Variable(self.W)
     return F.deconvolution_nd(x, W, None, stride=1, pad=1)
Example #23
0
 def f(*args):
     return F.deconvolution_nd(
         *args, stride=self.stride, pad=self.pad, outsize=self.outsize,
         dilate=self.dilate, groups=self.groups)
Example #24
0
 def f(*args):
     y = F.deconvolution_nd(
         *args, stride=self.stride, pad=self.pad, outsize=self.outsize)
     return y * y  # make the function nonlinear