示例#1
0
    def check_backward(self, x_data, W_data, b_data, y_grad):
        xp = cuda.get_array_module(x_data)

        if not self.c_contiguous:
            x_data = xp.asfortranarray(x_data)
            W_data = xp.asfortranarray(W_data)
            y_grad = xp.asfortranarray(y_grad)
            self.assertFalse(x_data.flags.c_contiguous)
            self.assertFalse(W_data.flags.c_contiguous)
            self.assertFalse(y_grad.flags.c_contiguous)
            if b_data is not None:
                b = xp.empty((len(b_data) * 2, ), dtype=self.b.dtype)
                b[::2] = b_data
                b_data = b[::2]
                self.assertFalse(b_data.flags.c_contiguous)

        args = (x_data, W_data)
        if b_data is not None:
            args = args + (b_data, )

        with chainer.using_config('use_cudnn', self.use_cudnn):
            with chainer.using_config('cudnn_deterministic',
                                      self.cudnn_deterministic):
                gradient_check.check_backward(
                    convolution_2d.Convolution2DFunction(
                        self.stride, self.pad, self.cover_all), args, y_grad,
                    **self.check_backward_options)
    def check_forward(self, x_data, W_data, b_data):
        args1 = (x_data, W_data)
        args2 = (x_data, W_data)
        if b_data is not None:
            args1 = args1 + (b_data, )
            b_data = sum(numpy.split(b_data, W_data.shape[1]))
            args2 = args2 + (b_data, )

        f1 = depthwise_convolution_2d.DepthwiseConvolution2D(
            self.stride, self.pad)
        y1 = f1(*args1)
        arys = numpy.split(y1.data, self.W.shape[1], axis=1)
        y1 = sum(arys)

        f2 = convolution_2d.Convolution2DFunction(self.stride, self.pad)
        y2 = f2.apply(args2)[0].data
        testing.assert_allclose(y1, y2, **self.check_forward_options)
    def check_forward(self, x_data, W_data, b_data):
        # If all the filters are the same,
        # the operation is equivalent to convolution_2d
        for i in moves.range(W_data.shape[1]):
            for j in moves.range(W_data.shape[2]):
                W_data[:, i, j, ...] = W_data[:, 0, 0, ...]
        args1 = (x_data, W_data)
        args2 = (x_data, W_data[:, 0, 0, ...])
        if b_data is not None:
            for i in moves.range(b_data.shape[1]):
                for j in moves.range(b_data.shape[2]):
                    b_data[:, i, j, ] = b_data[:, 0, 0, ]
            args1 = args1 + (b_data,)
            b_data2 = b_data[:, 0, 0, ]
            args2 = args2 + (b_data2,)

        f1 = local_convolution_2d.LocalConvolution2DFunction(self.stride)
        y1 = f1.apply(args1)[0].data

        f2 = convolution_2d.Convolution2DFunction(self.stride, 0)
        y2 = f2.apply(args2)[0].data
        testing.assert_allclose(y1, y2, **self.check_forward_options)
示例#4
0
    def check_backward(self, x_data, W_data, b_data, y_grad):
        xp = cuda.get_array_module(x_data)

        # cuDNN < v3 does not support deterministic algorithms.
        # In that case, Chainer should raise errors.
        # As this behavior is tested by TestConvolution2DCudnnCall,
        # we simply skip the test here.
        should_raise_error = ((xp is not numpy) and self.use_cudnn
                              and self.cudnn_deterministic
                              and cuda.cudnn_enabled
                              and cuda.cudnn.cudnn.getVersion() < 3000)
        if should_raise_error:
            return

        if not self.c_contiguous:
            x_data = xp.asfortranarray(x_data)
            W_data = xp.asfortranarray(W_data)
            y_grad = xp.asfortranarray(y_grad)
            self.assertFalse(x_data.flags.c_contiguous)
            self.assertFalse(W_data.flags.c_contiguous)
            self.assertFalse(y_grad.flags.c_contiguous)
            if b_data is not None:
                b = xp.empty((len(b_data) * 2, ), dtype=self.b.dtype)
                b[::2] = b_data
                b_data = b[::2]
                self.assertFalse(b_data.flags.c_contiguous)

        args = (x_data, W_data)
        if b_data is not None:
            args = args + (b_data, )

        with chainer.using_config('use_cudnn', self.use_cudnn):
            with chainer.using_config('cudnn_deterministic',
                                      self.cudnn_deterministic):
                gradient_check.check_backward(
                    convolution_2d.Convolution2DFunction(
                        self.stride, self.pad, self.cover_all), args, y_grad,
                    **self.check_backward_options)
示例#5
0
    def check_backward(self, x_data, W_data, b_data, y_grad):
        if not self.c_contiguous:
            x_data = _asfortranarray(x_data)
            W_data = _asfortranarray(W_data)
            y_grad = _asfortranarray(y_grad)
            self.assertFalse(x_data.flags.c_contiguous)
            self.assertFalse(W_data.flags.c_contiguous)
            self.assertFalse(y_grad.flags.c_contiguous)
            if b_data is not None:
                xp = cuda.get_array_module(b_data)
                b = xp.empty((len(b_data) * 2, ), dtype=self.b.dtype)
                b[::2] = b_data
                b_data = b[::2]
                self.assertFalse(b_data.flags.c_contiguous)

        args = (x_data, W_data)
        if b_data is not None:
            args = args + (b_data, )

        gradient_check.check_backward(convolution_2d.Convolution2DFunction(
            self.stride, self.pad, self.use_cudnn),
                                      args,
                                      y_grad,
                                      eps=1e-2)