Ejemplo n.º 1
0
    def test_allow_unused_true(self):
        def func(x, y):
            return x * x

        numerical_jacobian = _compute_numerical_batch_jacobian(
            func, [self.x, self.y], self.numerical_delta, self.np_dtype)
        self.x.stop_gradient = False
        self.y.stop_gradient = False
        jacobian = paddle.autograd.batch_jacobian(func, [self.x, self.y],
                                                  allow_unused=True)

        assert np.allclose(jacobian[0].numpy(), numerical_jacobian[0][0],
                           self.rtol, self.atol)
        assert jacobian[1] is None
Ejemplo n.º 2
0
    def test_batch_multi_input_and_batch_multi_output(self):
        def func(x, y):
            return x * y, x * y

        numerical_jacobian = _compute_numerical_batch_jacobian(
            func, [self.x, self.y], self.numerical_delta, self.np_dtype)

        self.x.stop_gradient = False
        self.y.stop_gradient = False
        batch_jacobian = paddle.autograd.batch_jacobian(func, [self.x, self.y])

        for i in range(len(batch_jacobian)):
            assert np.allclose(batch_jacobian[i], numerical_jacobian[i],
                               self.rtol, self.atol)
Ejemplo n.º 3
0
    def test_create_graph_true(self):
        def func(x, y):
            return x * y

        numerical_jacobian = _compute_numerical_batch_jacobian(
            func, [self.x, self.y], self.numerical_delta, self.np_dtype)
        self.x.stop_gradient = False
        self.y.stop_gradient = False
        jacobian = paddle.autograd.batch_jacobian(func, [self.x, self.y],
                                                  create_graph=True)
        for j in range(len(jacobian)):
            assert jacobian[j].stop_gradient == False
            assert np.allclose(jacobian[j].numpy(), numerical_jacobian[0][j],
                               self.rtol, self.atol)
        double_grad = paddle.grad(jacobian[0], [self.x, self.y])
        assert double_grad is not None
Ejemplo n.º 4
0
    def test_batch_single_input_and_batch_multi_output(self):
        def func(x):
            return paddle.matmul(paddle.matmul(x, self.weight), self.y), x * x

        numerical_jacobian = _compute_numerical_batch_jacobian(
            func, [self.x], self.numerical_delta, self.np_dtype)

        self.x.stop_gradient = False
        batch_jacobian = paddle.autograd.batch_jacobian(
            func,
            self.x,
        )

        for i in range(len(batch_jacobian)):
            assert np.allclose(batch_jacobian[i].numpy(),
                               numerical_jacobian[i][0], self.rtol, self.atol)
Ejemplo n.º 5
0
    def test_batch_single_input_and_batch_single_output(self):
        def func(x):
            return paddle.matmul(paddle.matmul(x, self.weight), self.y)

        numerical_jacobian = _compute_numerical_batch_jacobian(
            func, [self.x], self.numerical_delta, self.np_dtype)

        self.x.stop_gradient = False
        batch_jacobian = paddle.autograd.batch_jacobian(
            func,
            self.x,
        )

        self.assertTrue(
            np.allclose(batch_jacobian.numpy().all(),
                        numerical_jacobian[0][0].all()))
Ejemplo n.º 6
0
    def test_create_graph_false(self):
        def func(x, y):
            return x * y

        numerical_jacobian = _compute_numerical_batch_jacobian(
            func, [self.x, self.y], self.numerical_delta, self.np_dtype)
        self.x.stop_gradient = False
        self.y.stop_gradient = False
        jacobian = paddle.autograd.batch_jacobian(func, [self.x, self.y])
        for j in range(len(jacobian)):
            assert jacobian[j].stop_gradient == True
            assert np.allclose(jacobian[j].numpy(), numerical_jacobian[0][j],
                               self.rtol, self.atol)
        try:
            paddle.grad(jacobian[0], [self.x, self.y])
        except RuntimeError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("has no gradient") > 0