Exemple #1
0
    def test_single_input(self):
        def func(x):
            return paddle.sum(paddle.matmul(x, x))

        numerical_func_output = func(self.x).numpy()
        numerical_vhp = _compute_numerical_vhp(func, self.x, self.vx,
                                               self.numerical_delta,
                                               self.np_dtype)

        self.x.stop_gradient = False
        func_output, vhp = paddle.autograd.vhp(func, self.x, self.vx)
        assert np.allclose(func_output.numpy(), numerical_func_output,
                           self.rtol, self.atol)
        assert np.allclose(vhp[0].numpy(), numerical_vhp[0], self.rtol,
                           self.atol)
Exemple #2
0
    def test_v_default(self):
        def func(x, y):
            return paddle.sum(paddle.matmul(x, y))

        numerical_func_output = func(self.x, self.y).numpy()
        vx = paddle.ones(self.vx.shape, dtype=self.vx.dtype)
        vy = paddle.ones(self.vy.shape, dtype=self.vy.dtype)
        numerical_vhp = _compute_numerical_vhp(func, [self.x, self.y],
                                               [vx, vy], self.numerical_delta,
                                               self.np_dtype)

        self.x.stop_gradient = False
        self.y.stop_gradient = False
        func_output, vhp = paddle.autograd.vhp(func, [self.x, self.y])
        assert np.allclose(func_output.numpy(), numerical_func_output,
                           self.rtol, self.atol)
        for i in range(len(vhp)):
            assert np.allclose(vhp[i].numpy(), numerical_vhp[i], self.rtol,
                               self.atol)
Exemple #3
0
    def test_allow_unused_true(self):
        def func(x, y):
            return paddle.sum(paddle.matmul(x, x))

        numerical_func_output = func(self.x, self.y).numpy()
        numerical_vhp = _compute_numerical_vhp(func, [self.x, self.y],
                                               [self.vx, self.vy],
                                               self.numerical_delta,
                                               self.np_dtype)

        self.x.stop_gradient = False
        self.y.stop_gradient = False
        func_output, vhp = paddle.autograd.vhp(func, [self.x, self.y],
                                               [self.vx, self.vy],
                                               allow_unused=True)
        assert np.allclose(func_output.numpy(), numerical_func_output,
                           self.rtol, self.atol)
        assert np.allclose(vhp[0].numpy(), numerical_vhp[0], self.rtol,
                           self.atol)
        assert vhp[1] is None
Exemple #4
0
    def test_create_graph_true(self):
        def func(x):
            return paddle.sum(F.sigmoid(x))

        numerical_func_output = func(self.x).numpy()
        numerical_vhp = _compute_numerical_vhp(func, self.x, self.vx,
                                               self.numerical_delta,
                                               self.np_dtype)

        self.x.stop_gradient = False
        func_output, vhp = paddle.autograd.vhp(func,
                                               self.x,
                                               self.vx,
                                               create_graph=True)
        assert np.allclose(func_output.numpy(), numerical_func_output,
                           self.rtol, self.atol)
        assert vhp[0].stop_gradient == False
        assert np.allclose(vhp[0].numpy(), numerical_vhp[0], self.rtol,
                           self.atol)
        triple_grad = paddle.grad(vhp, self.x)
        assert triple_grad is not None
Exemple #5
0
    def test_create_graph_false(self):
        def func(x):
            return paddle.sum(F.sigmoid(x))

        numerical_func_output = func(self.x).numpy()
        numerical_vhp = _compute_numerical_vhp(func, self.x, self.vx,
                                               self.numerical_delta,
                                               self.np_dtype)

        self.x.stop_gradient = False
        func_output, vhp = paddle.autograd.vhp(func, self.x, self.vx)
        assert np.allclose(func_output.numpy(), numerical_func_output,
                           self.rtol, self.atol)
        assert vhp[0].stop_gradient == True
        assert np.allclose(vhp[0].numpy(), numerical_vhp[0], self.rtol,
                           self.atol)
        try:
            paddle.grad(vhp, self.x)
        except RuntimeError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("has no gradient") > 0