Esempio n. 1
0
    def check_backward_consistency_regression(self, x_data, gy_data):
        # Regression test to two-dimensional unpooling layer.

        ndim = len(self.dims)
        if ndim != 2:
            return

        ksize = self.ksize
        stride = self.stride
        pad = self.pad
        xp = cuda.get_array_module(x_data)

        # Backward computation for N-dimensional unpooling layer.
        x_nd = chainer.Variable(xp.array(x_data))
        func_nd = functions.UnpoolingND(ndim, ksize, stride=stride,
                                        pad=pad, cover_all=self.cover_all)
        y_nd = func_nd(x_nd)
        y_nd.grad = gy_data
        y_nd.backward()

        # Backward computation for two-dimensional unpooling layer.
        x_2d = chainer.Variable(xp.array(x_data))
        func_2d = functions.Unpooling2D(ksize, stride=stride, pad=pad,
                                        cover_all=self.cover_all)
        y_2d = func_2d(x_2d)
        y_2d.grad = gy_data
        y_2d.backward()

        # Test that the two result gradients are close enough.
        opt = self.check_backward_options
        testing.assert_allclose(
            x_nd.grad, x_2d.grad, atol=opt['atol'], rtol=opt['rtol'])
 def check_backward(self, x_data, y_grad):
     gradient_check.check_backward(
         functions.Unpooling2D(self.ksize,
                               outsize=self.outsize,
                               cover_all=self.cover_all), x_data, y_grad,
         **self.check_backward_options)