示例#1
0
    def check_backward_consistency_regression(self, x_data, gy_data,
                                              use_cudnn='always'):
        # Regression test to two-dimensional average pooling layer.

        if len(self.dims) != 2:
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        xp = cuda.get_array_module(x_data)

        # Backward computation for N-dimensional average pooling layer.
        x_nd = chainer.Variable(xp.array(x_data))
        with chainer.using_config('use_cudnn', use_cudnn):
            func_nd = functions.AveragePoolingND(self.ndim, ksize,
                                                 stride=stride, pad=pad)
        y_nd = func_nd.apply((x_nd,))[0]
        y_nd.grad = gy_data
        y_nd.backward()

        # Backward computation for two-dimensional average pooling layer.
        x_2d = chainer.Variable(xp.array(x_data))
        with chainer.using_config('use_cudnn', use_cudnn):
            func_2d = functions.AveragePooling2D(ksize, stride=stride, pad=pad,
                                                 cover_all=False)
        y_2d = func_2d.apply((x_2d,))[0]
        y_2d.grad = gy_data
        y_2d.backward()

        # Test that the two result gradients are close enough.
        testing.assert_allclose(x_nd.grad, x_2d.grad)
示例#2
0
 def check_backward(self, x_data, y_grad, use_cudnn=True):
     gradient_check.check_backward(
         functions.AveragePoolingND(self.ndim,
                                    self.ksize,
                                    self.stride,
                                    self.pad,
                                    use_cudnn=use_cudnn), x_data, y_grad,
         **self.check_backward_options)
 def check_backward(self, x_data, y_grad, use_cudnn='always'):
     with chainer.using_config('use_cudnn', use_cudnn):
         gradient_check.check_backward(functions.AveragePoolingND(
             self.ndim, self.ksize, self.stride, self.pad),
                                       x_data,
                                       y_grad,
                                       dtype=numpy.float64,
                                       **self.check_backward_options)
 def test_cover_all_not_supported(self):
     with self.assertRaises(ValueError):
         functions.AveragePoolingND(3, 3, cover_all=True)