Esempio n. 1
0
    def check_negative(self, xp, func_name, input, eps, nout):
        # Should be differentiable
        func = getattr(self, '_func_{}'.format(func_name))
        grad_outputs = [
            xp.random.uniform(-1, 1, input.shape).astype(input.dtype)
            for _ in range(nout)]

        def f():
            return func(input) * nout

        try:
            gradient_check.numerical_grad(
                f, (input,), grad_outputs, eps=eps,
                detect_nondifferentiable=True)
        except gradient_check.NondifferentiableError as e:
            raise AssertionError(
                'Function `{}` is expected to be differentiable, '
                'but determined to be non-differentiable.\n\n'
                'eps: {}\n'
                'input: {}\n'
                'xp: {}\n\n'
                '{}: {}'
                ''.format(
                    func_name, eps, input, xp.__name__,
                    e.__class__.__name__, e))
Esempio n. 2
0
    def check_backward(self, x_data, W_data, b_data, y_grad):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if b_data is None else chainer.Variable(b_data)
        y = functions.convolution_2d(x,
                                     W,
                                     b,
                                     stride=self.stride,
                                     pad=self.pad,
                                     use_cudnn=self.use_cudnn)

        y.grad = y_grad
        y.backward()

        func = y.creator
        if b is None:
            f = lambda: func.forward((x.data, W.data))
            gx, gW = gradient_check.numerical_grad(f, (x.data, W.data),
                                                   (y.grad, ),
                                                   eps=1e-2)
        else:
            f = lambda: func.forward((x.data, W.data, b.data))
            gx, gW, gb = gradient_check.numerical_grad(
                f, (x.data, W.data, b.data), (y.grad, ), eps=1e-2)

        gradient_check.assert_allclose(gx, x.grad)
        gradient_check.assert_allclose(gW, W.grad)
        if b is not None:
            gradient_check.assert_allclose(gb, b.grad)
Esempio n. 3
0
    def check_backward(self, x_data, W_data, b_data, y_grad):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        if b_data is None:
            y = functions.maxout(x, W)
        else:
            b = chainer.Variable(b_data)
            y = functions.maxout(x, W, b)

        y.grad = y_grad
        y.backward()
        func = y.creator

        if b_data is None:
            f = lambda: func.forward((x.data, W.data))
            gx, gW = gradient_check.numerical_grad(f, (x.data, W.data),
                                                   (y.grad, ),
                                                   eps=1e-2)
        else:
            f = lambda: func.forward((x.data, W.data, b.data))
            gx, gW, gb = gradient_check.numerical_grad(
                f, (x.data, W.data, b.data), (y.grad, ), eps=1e-2)

        gradient_check.assert_allclose(gx, x.grad, atol=1e-2)
        gradient_check.assert_allclose(gW, W.grad, atol=1e-2)
        if b_data is not None:
            gradient_check.assert_allclose(gb, b.grad, atol=1e-2)
Esempio n. 4
0
    def check_negative(self, xp, func_name, inputs, eps, nout):
        # Should be differentiable
        func = getattr(self, '_func_{}'.format(func_name))
        grad_outputs = [
            xp.random.uniform(-1, 1, _.shape).astype(_.dtype) for _ in inputs
        ]

        def f():
            return func(*inputs) * nout

        try:
            gradient_check.numerical_grad(f,
                                          inputs,
                                          grad_outputs,
                                          eps=eps,
                                          detect_nondifferentiable=True)
        except gradient_check.NondifferentiableError as e:
            raise AssertionError(
                'Function `{}` is expected to be differentiable, '
                'but determined to be non-differentiable.\n\n'
                'eps: {}\n'
                'inputs: {}\n'
                'xp: {}\n\n'
                '{}: {}'
                ''.format(func_name, eps, inputs, xp.__name__,
                          e.__class__.__name__, e))
Esempio n. 5
0
    def check_positive(self, xp, func_name, input, eps, nout):
        # Should be non-differentiable
        func = getattr(self, '_func_{}'.format(func_name))
        grad_outputs = [
            xp.random.uniform(-1, 1, input.shape).astype(input.dtype)
            for _ in range(nout)
        ]

        def f():
            return func(input) * nout

        try:
            gradient_check.numerical_grad(f, (input, ),
                                          grad_outputs,
                                          eps=eps,
                                          detect_nondifferentiable=True)
        except gradient_check.NondifferentiableError:
            pass
        else:
            raise AssertionError(
                'Function `{}` is expected to be non-differentiable, '
                'but determined to be differentiable.\n\n'
                'eps: {}\n'
                'input: {}\n'
                'xp: {}\n'
                ''.format(func_name, eps, input, xp.__name__))
Esempio n. 6
0
    def check_backward(self, x0_data, x1_data, t_data):
        x0 = chainer.Variable(x0_data)
        x1 = chainer.Variable(x1_data)
        t = chainer.Variable(t_data)
        loss = functions.contrastive(x0, x1, t, self.margin)
        loss.backward()
        self.assertEqual(None, t.grad)

        func = loss.creator
        f = lambda: func.forward((x0.data, x1.data, t.data))
        gx0, = gradient_check.numerical_grad(f, (x0.data, ), (1, ))
        gx1, = gradient_check.numerical_grad(f, (x1.data, ), (1, ))

        gradient_check.assert_allclose(gx0, x0.grad, rtol=1e-4, atol=1e-4)
        gradient_check.assert_allclose(gx1, x1.grad, rtol=1e-4, atol=1e-4)
    def check_backward(self, x0_data, x1_data, t_data, use_cudnn=True):
        x0 = chainer.Variable(x0_data)
        x1 = chainer.Variable(x1_data)
        t = chainer.Variable(t_data)
        loss = contrastive(x0, x1, t, use_cudnn)
        loss.backward()
        self.assertEqual(None, t.grad)

        func = loss.creator
        f = lambda: func.forward((x0.data, x1.data, t.data))
        gx0, = gradient_check.numerical_grad(f, (x0.data, ), (1, ))
        gx1, = gradient_check.numerical_grad(f, (x1.data, ), (1, ))

        gradient_check.assert_allclose(gx0, x0.grad)
        gradient_check.assert_allclose(gx1, x1.grad)
Esempio n. 8
0
    def check_backward(self, h_data, x_data, y_grad):
        h = chainer.Variable(h_data)
        x = chainer.Variable(x_data)
        y = self._forward(self.link, h, x)
        y.grad = y_grad
        y.backward()

        def f():
            return _gru(self.link, h_data, x_data),
        gx, = gradient_check.numerical_grad(f, (x.data,), (y_grad,))
        testing.assert_allclose(gx, x.grad, atol=1e-3)

        if isinstance(self.link, links.StatelessGRU):
            gh, = gradient_check.numerical_grad(f, (h.data,), (y_grad,))
            testing.assert_allclose(gh, h.grad, atol=1e-3)
Esempio n. 9
0
    def check_backward(self, h_data, x_data, y_grad):
        h = chainer.Variable(h_data)
        x = chainer.Variable(x_data)
        y = self._forward(self.link, h, x)
        y.grad = y_grad
        y.backward()

        def f():
            return _gru(self.link, h_data, x_data),
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,))
        testing.assert_allclose(gx, x.grad, atol=1e-3)

        if isinstance(self.link, links.GRU):
            gh, = gradient_check.numerical_grad(f, (h.data,), (y.grad,))
            testing.assert_allclose(gh, h.grad, atol=1e-3)
    def check_different_eps(self, x, y):
        def f():
            if -1 < x < 1:
                return x.copy(),
            elif -2 < x < 2:
                return 2 * x,
            else:
                return 0,

        gx, = gradient_check.numerical_grad(f, (x, ), (y, ), eps=0.5)
        self.assertEqual(gx, 1.)
        gx, = gradient_check.numerical_grad(f, (x, ), (y, ), eps=1.5)
        self.assertEqual(gx, 2.)
        gx, = gradient_check.numerical_grad(f, (x, ), (y, ), eps=2.5)
        self.assertEqual(gx, 0.)
    def check_backward(self, x0_data, x1_data, t_data, use_cudnn=True):
        x0 = chainer.Variable(x0_data)
        x1 = chainer.Variable(x1_data)
        t = chainer.Variable(t_data)
        loss = contrastive(x0, x1, t, use_cudnn)
        loss.backward()
        self.assertEqual(None, t.grad)

        func = loss.creator
        f = lambda: func.forward((x0.data, x1.data, t.data))
        gx0, = gradient_check.numerical_grad(f, (x0.data,), (1,))
        gx1, = gradient_check.numerical_grad(f, (x1.data,), (1,))

        gradient_check.assert_allclose(gx0, x0.grad)
        gradient_check.assert_allclose(gx1, x1.grad)
Esempio n. 12
0
    def check_different_eps(self, x, y):
        def f():
            if -1 < x < 1:
                return x.copy(),
            elif -2 < x < 2:
                return 2 * x,
            else:
                return 0,

        gx, = gradient_check.numerical_grad(f, (x,), (y,), eps=0.5)
        self.assertEqual(gx, 1.)
        gx, = gradient_check.numerical_grad(f, (x,), (y,), eps=1.5)
        self.assertEqual(gx, 2.)
        gx, = gradient_check.numerical_grad(f, (x,), (y,), eps=2.5)
        self.assertEqual(gx, 0.)
Esempio n. 13
0
    def check_backward(self, x0_data, x1_data, t_data):
        x0 = chainer.Variable(x0_data)
        x1 = chainer.Variable(x1_data)
        t = chainer.Variable(t_data)
        loss = functions.contrastive(x0, x1, t, self.margin)
        loss.backward()
        self.assertEqual(None, t.grad)

        func = loss.creator
        f = lambda: func.forward((x0.data, x1.data, t.data))
        gx0, = gradient_check.numerical_grad(f, (x0.data,), (1,))
        gx1, = gradient_check.numerical_grad(f, (x1.data,), (1,))

        gradient_check.assert_allclose(gx0, x0.grad, rtol=1e-4, atol=1e-4)
        gradient_check.assert_allclose(gx1, x1.grad, rtol=1e-4, atol=1e-4)
Esempio n. 14
0
def check_backward(update, atom_data, adj_data, y_grad):
    """Check gradient of GGNNUpdate.

    This function is different from other backward tests.
    Because of GRU, reset_state method has to be called explicitly
    before gradient calculation.

    Args:
        update (callable):
        atom_data (numpy.ndarray):
        adj_data (numpy.ndarray):
        y_grad (numpy.ndarray):
    """
    atom = chainer.Variable(atom_data)
    update.reset_state()
    y = update(atom, adj_data)
    y.grad = y_grad
    y.backward()

    def f():
        update.reset_state()
        return update(atom_data, adj_data).data,

    gx, = gradient_check.numerical_grad(f, (atom.data, ), (y.grad, ))
    numpy.testing.assert_allclose(cuda.to_cpu(gx),
                                  cuda.to_cpu(atom.grad),
                                  atol=1e-3,
                                  rtol=1e-3)
    return gx
Esempio n. 15
0
def check_backward(update, atom_data, adj_data, y_grad):
    # type: (MPNNUpdate, numpy.ndarray, numpy.ndarray, numpy.ndarray) -> None
    """Check gradient of MPNNUpdate.

    This function is different from other backward tests.
    Because of GRU, reset_state method has to be called explicitly
    before gradient calculation.

    Args:
        update (callable):
        atom_data (numpy.ndarray):
        adj_data (numpy.ndarray):
        y_grad (numpy.ndarray):
    """
    atom = chainer.Variable(atom_data)
    adj = chainer.Variable(adj_data)
    update.reset_state()
    y = update(atom, adj)
    y.grad = y_grad
    y.backward()

    def f():
        # type: () -> numpy.ndarray
        update.reset_state()
        return update(atom_data, adj_data).data,

    gx, = gradient_check.numerical_grad(f, (atom.data, ), (y.grad, ))
    numpy.testing.assert_allclose(gx, atom.grad, atol=1e-3, rtol=1e-3)
Esempio n. 16
0
def check_backward(readout, atom_data, y_grad):
    # type: (Set2Set, numpy.ndarray, numpy.ndarray) -> None
    """Check gradient of Set2Set.

    This function is different from other backward tests.
    Because of LSTM, reset_state method has to be called explicitly
    before gradient calculation.

    Args:
        readout:
        atom_data:
        y_grad:
    """
    atom = chainer.Variable(atom_data)
    readout.reset_state()
    y = readout(atom)
    y.grad = y_grad
    y.backward()

    def f():
        readout.reset_state()
        return readout(atom_data).data,

    gx, = gradient_check.numerical_grad(f, (atom.data, ), (y.grad, ))
    numpy.testing.assert_allclose(gx, atom.grad, atol=1e-2, rtol=1e-2)
Esempio n. 17
0
    def check_backward(self, t_data, xs_data, l_length, x_length, grad, gx):
        xs = tuple(chainer.Variable(x_data) for x_data in xs_data)
        t = chainer.Variable(t_data)

        loss = functions.connectionist_temporal_classification(
            xs,
            t,
            2,
            input_length=chainer.Variable(x_length),
            label_length=chainer.Variable(l_length))

        loss.grad = grad
        loss.backward()

        func = loss.creator
        xs_data = tuple(x.data for x in xs)
        f = lambda: func.forward((
            x_length,
            l_length,
            t.data,
        ) + xs_data)
        gx_0, gx_1, gx_2, gx_3 = gradient_check.numerical_grad(
            f, (xs_data), (gx, ))
        gradient_check.assert_allclose(xs[0].grad, gx_0, atol=1e-04)
        gradient_check.assert_allclose(xs[1].grad, gx_1, atol=1e-04)
        gradient_check.assert_allclose(xs[2].grad, gx_2, atol=1e-04)
        gradient_check.assert_allclose(xs[3].grad, gx_3, atol=1e-04)
Esempio n. 18
0
 def check_reference(self, x):
     # A returned value and an input refers the same memory.
     # See issue #488
     def func():
         return x,
     gx, = gradient_check.numerical_grad(func, (x,), (1,))
     testing.assert_allclose(cuda.to_cpu(gx), 1)
Esempio n. 19
0
def check_backward(readout, atom_data, y_grad):
    # type: (Set2Set, numpy.ndarray, numpy.ndarray) -> None
    """Check gradient of Set2Set.

    This function is different from other backward tests.
    Because of LSTM, reset_state method has to be called explicitly
    before gradient calculation.

    Args:
        readout:
        atom_data:
        y_grad:
    """
    atom = chainer.Variable(atom_data)
    readout.reset_state()
    y = readout(atom)
    y.grad = y_grad
    y.backward()

    def f():
        readout.reset_state()
        return readout(atom_data).data,

    gx, = gradient_check.numerical_grad(f, (atom.data, ), (y.grad, ))
    numpy.testing.assert_allclose(cuda.to_cpu(gx),
                                  cuda.to_cpu(atom.grad),
                                  atol=1e-2,
                                  rtol=1e-2)
Esempio n. 20
0
    def check_backward(self, x_data, t_data, y_grad):
        x = chainer.Variable(x_data)
        t = chainer.Variable(t_data)
        W = self.link.W

        y = self.link(x, t)
        y.grad = y_grad
        y.backward()

        # fix samples
        negative_sampling.NegativeSamplingFunction.samples = y.creator.samples

        def f():
            return self.link(x, t).data,

        gx, gW = gradient_check.numerical_grad(f, (x.data, W.data), (y.grad, ),
                                               eps=1e-2)
        del negative_sampling.NegativeSamplingFunction.samples  # clean up

        gradient_check.assert_allclose(cuda.to_cpu(gx),
                                       cuda.to_cpu(x.grad),
                                       atol=1.e-4)
        gradient_check.assert_allclose(cuda.to_cpu(gW),
                                       cuda.to_cpu(W.grad),
                                       atol=1.e-4)
Esempio n. 21
0
 def check_reference(self, x):
     # A returned value and an input refers the same memory.
     # See issue #488
     def func():
         return x,
     gx, = gradient_check.numerical_grad(func, (x,), (1,))
     testing.assert_allclose(cuda.to_cpu(gx), 1)
Esempio n. 22
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = self.link(x)
        y.grad = y_grad
        y.backward()

        f = lambda: (self.link(x).data,)
        gx, gW = gradient_check.numerical_grad(
            f, (x.data, self.link.W.data), (y.grad,), eps=1e-2)
        if not self.nobias:
            gb, = gradient_check.numerical_grad(
                f, (self.link.b.data,), (y.grad,), eps=1e-2)

        gradient_check.assert_allclose(gx, x.grad)
        gradient_check.assert_allclose(gW, self.link.W.grad)
        if not self.nobias:
            gradient_check.assert_allclose(gb, self.link.b.grad)
Esempio n. 23
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = self.link(x)
        y.grad = y_grad
        y.backward()

        f = lambda: (self.link(x).data,)
        gW, = gradient_check.numerical_grad(f, (self.link.W.data,), (y.grad,))
        gradient_check.assert_allclose(gW, self.link.W.grad)
Esempio n. 24
0
    def check_backward(self, x_data, gy_data, use_cudnn=True):
        x = Variable(x_data)
        y = tanh(x, use_cudnn=use_cudnn)
        y.grad = gy_data
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = numerical_grad(f, (x.data,), (y.grad,))
        assert_allclose(gx, x.grad)
Esempio n. 25
0
    def check_backward(self, x_data, gy_data, use_cudnn=True):
        x = chainer.Variable(x_data)
        y = functions.tanh(x, use_cudnn=use_cudnn)
        y.grad = gy_data
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data, ))
        gx, = gradient_check.numerical_grad(f, (x.data, ), (y.grad, ))
        gradient_check.assert_allclose(gx, x.grad)
Esempio n. 26
0
    def check_backward(self, x_data, gy_data, use_cudnn=True):
        x = chainer.Variable(x_data)
        y = functions.tanh(x, use_cudnn=use_cudnn)
        y.grad = gy_data
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,))
        gradient_check.assert_allclose(gx, x.grad)
Esempio n. 27
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = self.link(x)
        y.grad = y_grad
        y.backward()

        f = lambda: (self.link(x).data, )
        gW, = gradient_check.numerical_grad(f, (self.link.W.data, ),
                                            (y.grad, ))
        gradient_check.assert_allclose(gW, self.link.W.grad)
Esempio n. 28
0
    def check_backward(self, x_data, gy_data, use_cudnn=True):
        x = Variable(x_data)
        y = tanh(x, use_cudnn=use_cudnn)
        y.grad = gy_data
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = numerical_grad(f, (x.data,), (y.grad,))
        assert_allclose(gx, x.grad)
Esempio n. 29
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = self.link(x)
        y.grad = y_grad
        y.backward()

        f = lambda: (self.link(x).data, )
        gx, gW = gradient_check.numerical_grad(f, (x.data, self.link.W.data),
                                               (y.grad, ),
                                               eps=1e-2)
        if not self.nobias:
            gb, = gradient_check.numerical_grad(f, (self.link.b.data, ),
                                                (y.grad, ),
                                                eps=1e-2)

        gradient_check.assert_allclose(gx, x.grad)
        gradient_check.assert_allclose(gW, self.link.W.grad)
        if not self.nobias:
            gradient_check.assert_allclose(gb, self.link.b.grad)
Esempio n. 30
0
    def check_backward(self, x_data, y_grad):
        x = Variable(x_data)
        y = self.func(x)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gW, = numerical_grad(f, (func.W,), (y.grad,))
        assert_allclose(gW, func.gW)
Esempio n. 31
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = self.func(x)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data, ))
        gW, = gradient_check.numerical_grad(f, (func.W, ), (y.grad, ))
        gradient_check.assert_allclose(gW, func.gW)
Esempio n. 32
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = functions.expand_dims(x, self.axis)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x_data,))
        gx, = gradient_check.numerical_grad(f, (x_data,), (y_grad,))
        gradient_check.assert_allclose(cuda.to_cpu(x.grad),
                                       cuda.to_cpu(gx))
Esempio n. 33
0
    def check_backward(self, x_data, y_grad):
        x = Variable(x_data)
        y = leaky_relu(x, slope=self.slope)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data, ))
        gx, = numerical_grad(f, (x.data, ), (y.grad, ))

        assert_allclose(gx, x.grad)
Esempio n. 34
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = functions.leaky_relu(x, slope=self.slope)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,))

        gradient_check.assert_allclose(gx, x.grad)
Esempio n. 35
0
    def check_backward(self, x_data, y_grad, use_cudnn=True):
        x = chainer.Variable(x_data)
        y = functions.max_pooling_2d(x, 3, stride=2, pad=1, cover_all=self.cover_all, use_cudnn=use_cudnn)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,))

        gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x.grad))
Esempio n. 36
0
    def check_backward(self, op, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = op(x, self.value)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data, ))
        gx, = gradient_check.numerical_grad(f, (x.data, ), (y.grad, ))

        gradient_check.assert_allclose(gx, x.grad)
Esempio n. 37
0
    def check_backward(self, x_data, y_grad, axis=None, keepdims=False):
        x = chainer.Variable(x_data)
        y = functions.max(x, axis=axis, keepdims=keepdims)

        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data.copy(),))
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,), eps=1e-5)
        gradient_check.assert_allclose(gx, x.grad, rtol=1e-3, atol=1e-3)
Esempio n. 38
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = self.link(x)
        y.grad = y_grad
        y.backward()

        f = lambda: (self.link(x).data, )
        if self.initial_bias is None:
            gx, gW = gradient_check.numerical_grad(
                f, (x.data, self.link.W.data),
                (y.grad, ), eps=1e-4)
        else:
            gx, gW, gb = gradient_check.numerical_grad(
                f, (x.data, self.link.W.data, self.link.b.data),
                (y.grad, ), eps=1e-4)

        gradient_check.assert_allclose(gx, x.grad, atol=1e-2)
        gradient_check.assert_allclose(gW, self.link.W.grad, atol=1e-2)
        if self.initial_bias is not None:
            gradient_check.assert_allclose(gb, self.link.b.grad, atol=1e-2)
Esempio n. 39
0
    def check_backward(self, op, x_data, y_grad):
        x = Variable(x_data)
        y = op(x)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data, ))
        gx, = numerical_grad(f, (x.data, ), (y.grad, ))

        assert_allclose(gx, x.grad)
Esempio n. 40
0
    def check_backward(self, random_array, random_grad_array):
        x = chainer.Variable(random_array)
        y = functions.depth2space(x, 2)
        y.grad = random_grad_array
        y.backward()

        def func():
            return (functions.depth2space(x, 2).data,)
        gx, = gradient_check.numerical_grad(func, (x.data,), (y.grad,))

        testing.assert_allclose(x.grad, gx, rtol=0.0001)
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = functions.local_response_normalization(x)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,), eps=1)

        gradient_check.assert_allclose(gx, x.grad, atol=1e-3)
Esempio n. 42
0
    def check_backward(self, x_data, y_grad, use_cudnn=True):
        x = Variable(x_data)
        y = average_pooling_2d(x, 3, stride=2, pad=1, use_cudnn=use_cudnn)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data, ))
        gx, = numerical_grad(f, (x.data, ), (y.grad, ), eps=1e-2)

        assert_allclose(to_cpu(gx), to_cpu(x.grad))
    def check_backward(self, x_data, y_grad):
        x = Variable(x_data)
        y = local_response_normalization(x)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = numerical_grad(f, (x.data,), (y.grad,), eps=1)

        assert_allclose(gx, x.grad, atol=1e-3)
Esempio n. 44
0
    def check_backward(self, data, grad):
        x = chainer.Variable(data)
        bx = functions.broadcast_to(x, self.out_shape)

        func = bx.creator
        f = lambda: func.forward((data,))

        bx.grad = grad
        bx.backward()
        gx, = gradient_check.numerical_grad(f, (data,), (bx.grad,))
        gradient_check.assert_allclose(gx, x.grad)
Esempio n. 45
0
    def check_backward(self, op, x_data, y_grad):
        x = Variable(x_data)
        y = op(x)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = numerical_grad(f, (x.data,), (y.grad,))

        assert_allclose(gx, x.grad)
Esempio n. 46
0
    def check_backward(self, op, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = op(x, self.value)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,))

        gradient_check.assert_allclose(gx, x.grad)
Esempio n. 47
0
    def test_backward_cpu(self):
        x = Variable(np.random.randn(3, 2).astype(np.float32))
        y = F.relu(x)
        y.grad = np.random.randn(3, 2).astype(np.float32)
        y.backward()

        def f():
            return F.relu(x).data,

        gx, = gradient_check.numerical_grad(f, (x.data, ), (y.grad, ))
        testing.assert_allclose(gx, x.grad)
Esempio n. 48
0
    def check_backward(self, x_data, W_data, b_data, y_grad):
        x = chainer.Variable(x_data)
        W = chainer.Variable(W_data)
        b = None if b_data is None else chainer.Variable(b_data)
        y = functions.linear(x, W, b)
        y.grad = y_grad
        y.backward()

        func = y.creator
        if b_data is None:
            f = lambda: func.forward((x.data, W.data))
            gx, gW = gradient_check.numerical_grad(f, (x.data, W.data), (y.grad,), eps=1e-2)
        else:
            f = lambda: func.forward((x.data, W.data, b.data))
            gx, gW, gb = gradient_check.numerical_grad(f, (x.data, W.data, b.data), (y.grad,), eps=1e-2)

        gradient_check.assert_allclose(gx, x.grad)
        gradient_check.assert_allclose(gW, W.grad)
        if b_data is not None:
            gradient_check.assert_allclose(gb, b.grad)
Esempio n. 49
0
    def check_backward(self, x_data, y_grad, use_cudnn=True):
        x = Variable(x_data)
        y = average_pooling_2d(x, 3, stride=2, pad=1, use_cudnn=use_cudnn)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = numerical_grad(f, (x.data,), (y.grad,), eps=1e-2)

        assert_allclose(to_cpu(gx), to_cpu(x.grad))
Esempio n. 50
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = functions.swapaxes(x, self.axis1, self.axis2)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data.copy(),))

        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,), eps=1e-5)
        gradient_check.assert_allclose(gx, x.grad, rtol=1e-5)
Esempio n. 51
0
    def check_backward(self, c_data, h_data, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = self._forward(self.link, x)
        y.grad = y_grad
        y.backward()

        def f():
            c, y = _peephole(self.link, c_data, h_data, x_data)
            return y,
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,))
        gradient_check.assert_allclose(gx, x.grad, atol=1e-3)
Esempio n. 52
0
    def check_backward(self, c_data, h_data, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = self._forward(self.link, x)
        y.grad = y_grad
        y.backward()

        def f():
            c, y = _peephole(self.link, c_data, h_data, x_data)
            return y,
        gx, = gradient_check.numerical_grad(f, (x.data,), (y_grad,))
        testing.assert_allclose(gx, x.grad, atol=1e-3)
Esempio n. 53
0
def _check_backward(e1, e2, y_grad, link, bias):
    e1 = chainer.Variable(e1)
    e2 = chainer.Variable(e2)
    y = link(e1, e2)
    y.grad = y_grad
    y.backward()
    f = lambda: (link(e1, e2).data, )

    ge1, ge2, gW = gradient_check.numerical_grad(
        f, (e1.data, e2.data, link.W.data), (y.grad, ), eps=1e-2)
    gradient_check.assert_allclose(ge1, e1.grad, rtol=1e-3)
    gradient_check.assert_allclose(ge2, e2.grad, rtol=1e-3)
    gradient_check.assert_allclose(gW, link.W.grad, rtol=1e-3)

    if bias:
        gV1, gV2, gb = gradient_check.numerical_grad(
            f, (link.V1.data, link.V2.data, link.b.data), (y.grad, ), eps=1e-2)
        gradient_check.assert_allclose(gV1, link.V1.grad, rtol=1e-3)
        gradient_check.assert_allclose(gV2, link.V2.grad, rtol=1e-3)
        gradient_check.assert_allclose(gb, link.b.grad, rtol=1e-3)
Esempio n. 54
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = functions.leaky_relu(x, slope=self.slope)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data, ))
        gx, = gradient_check.numerical_grad(f, (x.data, ), (y.grad, ))

        gradient_check.assert_allclose(gx, x.grad)
Esempio n. 55
0
    def check_backward(self, x_data, y_grad):
        x = Variable(x_data)
        y = leaky_relu(x, slope=self.slope)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = numerical_grad(f, (x.data,), (y.grad,))

        assert_allclose(gx, x.grad)
    def check_backward(self, x_data, y_grad, use_cudnn=True):
        x = chainer.Variable(x_data)
        y = functions.spatial_pyramid_pooling_2d(x, self.pyramid_height, self.pooling_class, use_cudnn=use_cudnn)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,))

        gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x.grad), atol=1e-04)
Esempio n. 57
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = functions.dropout(x, self.ratio)
        creator = y.creator
        y.grad = y_grad
        y.backward()

        def f():
            y = _dropout(x_data, creator)
            return y,
        gx, = gradient_check.numerical_grad(f, (x_data, ), (y.grad, ), eps=0.1)
        testing.assert_allclose(gx, x.grad, **self.check_backward_options)