def check_backward_consistency_regression(self, x_data, gy_data,
                                              use_cudnn='always'):
        # Regression test to two-dimensional max pooling layer.

        if len(self.dims) != 2:
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        xp = cuda.get_array_module(x_data)

        # Backward computation for N-dimensional max pooling layer.
        x_nd = chainer.Variable(xp.array(x_data))
        with chainer.using_config('use_cudnn', use_cudnn):
            func_nd = functions.MaxPoolingND(self.ndim, ksize, stride=stride,
                                             pad=pad, cover_all=self.cover_all)
        y_nd = func_nd.apply((x_nd,))[0]
        y_nd.grad = gy_data
        y_nd.backward()

        # Backward computation for two-dimensional max pooling layer.
        x_2d = chainer.Variable(xp.array(x_data))
        with chainer.using_config('use_cudnn', use_cudnn):
            func_2d = functions.MaxPooling2D(ksize, stride=stride, pad=pad,
                                             cover_all=self.cover_all)
        y_2d = func_2d.apply((x_2d,))[0]
        y_2d.grad = gy_data
        y_2d.backward()

        # Test that the two result gradients are close enough.
        testing.assert_allclose(x_nd.grad, x_2d.grad)
 def test_backward_cpu_more_than_once(self):
     func = functions.MaxPoolingND(
         self.ndim, self.ksize, stride=self.stride, pad=self.pad,
         cover_all=self.cover_all)
     func.apply((self.x,))
     func.backward((self.x,), (self.gy,))
     func.backward((self.x,), (self.gy,))
 def check_backward(self, x_data, y_grad, use_cudnn='always'):
     with chainer.using_config('use_cudnn', use_cudnn):
         gradient_check.check_backward(
             functions.MaxPoolingND(
                 self.ndim, self.ksize, stride=self.stride, pad=self.pad,
                 cover_all=self.cover_all),
             x_data, y_grad, dtype='d', **self.check_backward_options)
 def check_backward(self, x_data, y_grad, use_cudnn=True):
     gradient_check.check_backward(
         functions.MaxPoolingND(self.ndim,
                                self.ksize,
                                stride=self.stride,
                                pad=self.pad,
                                cover_all=self.cover_all,
                                use_cudnn=use_cudnn), x_data, y_grad,
         **self.check_backward_options)