コード例 #1
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_inner_product_hvp():
    for datatype in backends:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[3, 1])
        v = ad.Variable(name="v", shape=[3, 1])
        y = ad.sum(ad.einsum("ab,bc->ac", ad.transpose(x), x))

        grad_x, = ad.gradients(y, [x])
        Hv, = ad.hvp(output_node=y, node_list=[x], vector_list=[v])

        executor = ad.Executor([y, grad_x, Hv])
        x_val = T.tensor([[1.], [2.], [3]])  # 3x1
        v_val = T.tensor([[1.], [2.], [3]])  # 3x1
        y_val, grad_x_val, Hv_val = executor.run(feed_dict={
            x: x_val,
            v: v_val
        })

        expected_yval = T.sum(T.dot(T.transpose(x_val), x_val))
        expected_grad_x_val = 2 * x_val
        expected_hv_val = 2 * v_val

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
        assert T.array_equal(Hv_val, expected_hv_val)
コード例 #2
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_add_mul_mix_2(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x1", shape=[3])
        x2 = ad.Variable(name="x2", shape=[3])
        x3 = ad.Variable(name="x3", shape=[3])
        x4 = ad.Variable(name="x4", shape=[3])
        y = ad.sum(x1 + x2 * x3 * x4)

        grad_x1, grad_x2, grad_x3, grad_x4 = ad.gradients(y, [x1, x2, x3, x4])

        executor = ad.Executor([y, grad_x1, grad_x2, grad_x3, grad_x4])
        x1_val = 1 * T.ones(3)
        x2_val = 2 * T.ones(3)
        x3_val = 3 * T.ones(3)
        x4_val = 4 * T.ones(3)
        y_val, grad_x1_val, grad_x2_val, grad_x3_val, grad_x4_val = executor.run(
            feed_dict={
                x1: x1_val,
                x2: x2_val,
                x3: x3_val,
                x4: x4_val
            })
        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, T.sum(x1_val + x2_val * x3_val * x4_val))
        assert T.array_equal(grad_x1_val, T.ones_like(x1_val))
        assert T.array_equal(grad_x2_val, x3_val * x4_val)
        assert T.array_equal(grad_x3_val, x2_val * x4_val)
        assert T.array_equal(grad_x4_val, x2_val * x3_val)
コード例 #3
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_add_jacobian_scalar(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x1", shape=[])
        x2 = ad.Variable(name="x2", shape=[])
        y = x1 + x2

        jacobian_x2, = ad.jacobians(y, [x2])

        executor = ad.Executor([y, jacobian_x2])

        x1_val = T.tensor(1.)
        x2_val = T.tensor(1.)
        y_val, jacobian_x2_val = executor.run(feed_dict={
            x1: x1_val,
            x2: x2_val
        })

        expected_jacobian_x2_val = T.tensor(1.)

        assert isinstance(y, ad.Node)
        assert isinstance(jacobian_x2, ad.Node)
        assert T.array_equal(y_val, x1_val + x2_val)
        assert T.array_equal(jacobian_x2_val, expected_jacobian_x2_val)
コード例 #4
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_mul_jacobian_one_scalar(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x1", shape=[])
        x2 = ad.Variable(name="x2", shape=[2, 2])

        # test both cases of left and right multiply a scalar
        for y in [x1 * x2, x2 * x1]:

            jacobian_x1, jacobian_x2 = ad.jacobians(y, [x1, x2])
            executor = ad.Executor([y, jacobian_x1, jacobian_x2])

            x1_val = T.tensor(2.)
            x2_val = T.tensor([[5., 6.], [7., 8.]])
            y_val, jacobian_x1_val, jacobian_x2_val = executor.run(feed_dict={
                x1: x1_val,
                x2: x2_val
            })

            I = T.identity(2)
            expected_jacobian_x1_val = T.einsum("ai,bj,ij->ab", I, I, x2_val)
            expected_jacobian_x2_val = x1_val * T.einsum("ai,bj->abij", I, I)

            assert isinstance(y, ad.Node)
            assert T.array_equal(y_val, x1_val * x2_val)
            assert T.array_equal(jacobian_x1_val, expected_jacobian_x1_val)
            assert T.array_equal(jacobian_x2_val, expected_jacobian_x2_val)
コード例 #5
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_add_jacobian(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x1", shape=[2, 2])
        x2 = ad.Variable(name="x2", shape=[2, 2])
        y = x1 + x2

        jacobian_x2, = ad.jacobians(y, [x2])

        executor = ad.Executor([y, jacobian_x2])

        x1_val = T.tensor([[1, 1], [1, 1]])
        x2_val = T.tensor([[1, 1], [1, 1]])
        y_val, jacobian_x2_val = executor.run(feed_dict={
            x1: x1_val,
            x2: x2_val
        })

        I = T.identity(2)
        expected_jacobian_x2_val = T.einsum("ac,bd->abcd", I, I)

        assert isinstance(y, ad.Node)
        assert isinstance(jacobian_x2, ad.Node)
        assert T.array_equal(y_val, x1_val + x2_val)
        assert T.array_equal(jacobian_x2_val, expected_jacobian_x2_val)
コード例 #6
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_vjps():
    for datatype in backends:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[2])
        A = ad.Variable(name="A", shape=[3, 2])
        v = ad.Variable(name="v", shape=[3])
        y = ad.einsum('ab, b->a', A, x)

        transposed_vjp_x, = ad.transposed_vjps(y, [x], v)

        executor = ad.Executor([y, transposed_vjp_x])
        x_val = T.tensor([1., 2.])  # 1x3
        A_val = T.tensor([[1., 2.], [3., 4.], [5, 6]])
        v_val = T.tensor([1., 2., 3.])

        y_val, transposed_vjp_x_val = executor.run(feed_dict={
            x: x_val,
            A: A_val,
            v: v_val
        })

        expected_yval = T.einsum('ab, b->a', A_val, x_val)
        expected_transposed_vjp_x_val = T.einsum('b, ba->a', v_val, A_val)

        assert isinstance(transposed_vjp_x, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(transposed_vjp_x_val,
                             expected_transposed_vjp_x_val)
コード例 #7
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_three_mul_jacobian(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x1", shape=[2, 2])
        x2 = ad.Variable(name="x2", shape=[2, 2])
        x3 = ad.Variable(name="x3", shape=[2, 2])
        y = x1 * x2 * x3

        jacobian_x1, = ad.jacobians(y, [x1])
        executor = ad.Executor([y, jacobian_x1])

        x1_val = T.tensor([[1., 2.], [3., 4.]])
        x2_val = T.tensor([[5., 6.], [7., 8.]])
        x3_val = T.tensor([[9., 10.], [11., 12.]])

        y_val, jacobian_x1_val = executor.run(feed_dict={
            x1: x1_val,
            x2: x2_val,
            x3: x3_val
        })

        I = T.identity(2)
        expected_jacobian_x1_val = T.einsum("ai,bj,ij,ij->abij", I, I, x2_val,
                                            x3_val)

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, x1_val * x2_val * x3_val)
        assert T.array_equal(jacobian_x1_val, expected_jacobian_x1_val)
コード例 #8
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_three_mul_jacobian_scalars(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x1", shape=[])
        x2 = ad.Variable(name="x2", shape=[])
        x3 = ad.Variable(name="x3", shape=[])
        y = x1 * x2 * x3

        jacobian_x1, = ad.jacobians(y, [x1])
        executor = ad.Executor([y, jacobian_x1])

        x1_val = T.tensor(1.)
        x2_val = T.tensor(2.)
        x3_val = T.tensor(3.)

        y_val, jacobian_x1_val = executor.run(feed_dict={
            x1: x1_val,
            x2: x2_val,
            x3: x3_val
        })

        expected_jacobian_x1_val = x2_val * x3_val

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, x1_val * x2_val * x3_val)
        assert T.array_equal(jacobian_x1_val, expected_jacobian_x1_val)
コード例 #9
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_einsum():

    for datatype in backends:
        T.set_backend(datatype)

        x2 = ad.Variable(name="x2", shape=[3, 2])
        x3 = ad.Variable(name="x3", shape=[2, 3])
        matmul = ad.einsum('ik,kj->ij', x2, x3)
        y = ad.sum(matmul)

        grad_x2, grad_x3 = ad.gradients(y, [x2, x3])

        executor = ad.Executor([y, grad_x2, grad_x3])
        x2_val = T.tensor([[1, 2], [3, 4], [5, 6]])  # 3x2
        x3_val = T.tensor([[7, 8, 9], [10, 11, 12]])  # 2x3

        y_val, grad_x2_val, grad_x3_val = executor.run(feed_dict={
            x2: x2_val,
            x3: x3_val
        })

        expected_grad_sum = T.ones_like(T.dot(x2_val, x3_val))
        expected_yval = T.sum(T.dot(x2_val, x3_val))
        expected_grad_x2_val = T.dot(expected_grad_sum, T.transpose(x3_val))
        expected_grad_x3_val = T.dot(T.transpose(x2_val), expected_grad_sum)

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(grad_x2_val, expected_grad_x2_val)
        assert T.array_equal(grad_x3_val, expected_grad_x3_val)
コード例 #10
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_jtjvps():
    for datatype in backends:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[2])
        A = ad.Variable(name="A", shape=[3, 2])
        v = ad.Variable(name="v", shape=[2])
        y = ad.einsum('ab, b->a', A, x)

        jtjvp_x, = ad.jtjvps(y, [x], [v])

        executor = ad.Executor([y, jtjvp_x])
        x_val = T.tensor([1., 2.])
        A_val = T.tensor([[1., 2.], [3., 4.], [5, 6]])
        v_val = T.tensor([3., 4.])

        y_val, jtjvp_x_val = executor.run(feed_dict={
            x: x_val,
            A: A_val,
            v: v_val
        })

        expected_yval = T.einsum('ab, b->a', A_val, x_val)
        expected_jtjvp_x_val = T.einsum('ba, ac->bc', T.transpose(A_val),
                                        A_val)
        expected_jtjvp_x_val = T.einsum('ab, b->a', expected_jtjvp_x_val,
                                        v_val)

        assert isinstance(jtjvp_x, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(jtjvp_x_val, expected_jtjvp_x_val)
コード例 #11
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_add_mul_mix_3(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x2 = ad.Variable(name="x2", shape=[3])
        x3 = ad.Variable(name="x3", shape=[3])
        z = x2 * x2 + x2 + x3 + 3
        y = ad.sum(z * z + x3)

        grad_x2, grad_x3 = ad.gradients(y, [x2, x3])

        executor = ad.Executor([y, grad_x2, grad_x3])
        x2_val = 2 * T.ones(3)
        x3_val = 3 * T.ones(3)
        y_val, grad_x2_val, grad_x3_val = executor.run(feed_dict={
            x2: x2_val,
            x3: x3_val
        })

        z_val = x2_val * x2_val + x2_val + x3_val + 3
        expected_yval = z_val * z_val + x3_val
        expected_grad_x2_val = 2 * \
            (x2_val * x2_val + x2_val + x3_val + 3) * (2 * x2_val + 1)
        expected_grad_x3_val = 2 * (x2_val * x2_val + x2_val + x3_val + 3) + 1
        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, T.sum(expected_yval))
        assert T.array_equal(grad_x2_val, expected_grad_x2_val)
        assert T.array_equal(grad_x3_val, expected_grad_x3_val)
コード例 #12
0
ファイル: s2s_test.py プロジェクト: LinjianMa/AutoHOOT
def test_s2s_hvp(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[3])
        H = ad.Variable(name="H", shape=[3, 3])
        v = ad.Variable(name="v", shape=[3])
        y = ad.einsum("a,ab,b->", x, H, x)

        grad_x, = ad.gradients(y, [x])
        Hv, = ad.hvp(output_node=y, node_list=[x], vector_list=[v])

        x_val = T.tensor([1., 2., 3.])  # 3
        v_val = T.tensor([1., 2., 3.])  # 3
        H_val = T.tensor([[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]])  # 3x3

        expected_yval = T.einsum("a,ab,b->", x_val, H_val, x_val)
        expected_grad_x_val = 2 * T.einsum("ab,b->a", H_val, x_val)
        expected_hv_val = T.tensor([4., 8., 12.])

        StS = SourceToSource()
        forward_str = StS.forward([y], backend=datatype)
        m = import_code(forward_str)
        y_val_s2s, = m.forward([x_val, H_val])
        grad_str = StS.gradients(y, [x], backend=datatype)
        m = import_code(grad_str)
        grad_x_val_s2s, = m.gradients([x_val, H_val])
        hvp_str = StS.hvp(y, [x], [v], backend=datatype)
        m = import_code(hvp_str)
        Hv_val_s2s, = m.hvp([x_val, H_val, v_val])

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val_s2s, expected_yval)
        assert T.array_equal(grad_x_val_s2s, expected_grad_x_val)
        assert T.array_equal(Hv_val_s2s, expected_hv_val)
コード例 #13
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_jacobian_einsum(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x1", shape=[3, 3, 3])
        x2 = ad.Variable(name="x2", shape=[3, 3, 3])
        y = ad.einsum("ikl,jkl->ijk", x1, x2)

        jacobian_x1, jacobian_x2 = ad.jacobians(y, [x1, x2])
        executor = ad.Executor([y, jacobian_x1, jacobian_x2])

        x1_val = T.random((3, 3, 3))
        x2_val = T.random((3, 3, 3))
        y_val, jacobian_x1_val, jacobian_x2_val = executor.run(feed_dict={
            x1: x1_val,
            x2: x2_val,
        })

        I = T.identity(3)
        expected_jacobian_x1_val = T.einsum("im,kn,jno->ijkmno", I, I, x2_val)
        expected_jacobian_x2_val = T.einsum("jm,kn,ino->ijkmno", I, I, x1_val)

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, T.einsum("ikl,jkl->ijk", x1_val, x2_val))
        assert T.array_equal(jacobian_x1_val, expected_jacobian_x1_val)
        assert T.array_equal(jacobian_x2_val, expected_jacobian_x2_val)
コード例 #14
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_sub_jacobian_w_chain(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x1", shape=[2, 2])
        x2 = ad.Variable(name="x2", shape=[2, 2])
        x3 = ad.Variable(name="x3", shape=[2, 2])
        y = x1 - x2
        z = x3 - y

        jacobian_x2, = ad.jacobians(z, [x2])

        executor = ad.Executor([z, jacobian_x2])

        x1_val = T.tensor([[1, 1], [1, 1]])
        x2_val = T.tensor([[1, 1], [1, 1]])
        x3_val = T.tensor([[1, 1], [1, 1]])
        z_val, jacobian_x2_val = executor.run(feed_dict={
            x1: x1_val,
            x2: x2_val,
            x3: x3_val
        })

        I = T.identity(2)
        expected_jacobian_x2_val = T.einsum("ac,bd->abcd", I, I)

        assert isinstance(z, ad.Node)
        assert isinstance(jacobian_x2, ad.Node)
        assert T.array_equal(z_val, x3_val - x1_val + x2_val)
        assert T.array_equal(jacobian_x2_val, expected_jacobian_x2_val)
コード例 #15
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_hvp2(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[3, 1])
        H = ad.Variable(name="H", shape=[3, 3])
        v = ad.Variable(name="v", shape=[3, 1])
        y = ad.sum(
            ad.einsum("ab,bc->ac", ad.einsum("ab,bc->ac", ad.transpose(x), H),
                      x))

        grad_x, = ad.gradients(y, [x])
        Hv, = ad.hvp(output_node=y, node_list=[x], vector_list=[v])

        executor = ad.Executor([y, grad_x, Hv])
        x_val = T.tensor([[1.], [2.], [3]])  # 3x1
        v_val = T.tensor([[1.], [2.], [3]])  # 3x1
        H_val = T.tensor([[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]])  # 3x3
        y_val, grad_x_val, Hv_val = executor.run(feed_dict={
            x: x_val,
            H: H_val,
            v: v_val
        })
        Hx = T.dot(H_val, x_val)
        expected_yval = T.sum(T.dot(T.transpose(x_val), Hx))
        expected_grad_x_val = 2 * Hx
        expected_hv_val = T.tensor([[4.], [8.], [12.]])

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
        assert T.array_equal(Hv_val, expected_hv_val)
コード例 #16
0
ファイル: s2s_test.py プロジェクト: LinjianMa/AutoHOOT
def test_s2s_jtjvp(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[2])
        A = ad.Variable(name="A", shape=[3, 2])
        v = ad.Variable(name="v", shape=[2])
        y = ad.einsum("ab,b->a", A, x)

        jtjvp_x, = ad.jtjvps(y, [x], [v])

        x_val = T.tensor([1., 2.])
        A_val = T.tensor([[1., 2.], [3., 4.], [5, 6]])
        v_val = T.tensor([3, 4])

        expected_jtjvp_x_val = T.einsum("ba,bc,c->a", A_val, A_val, v_val)

        StS = SourceToSource()
        forward_str = StS.forward([jtjvp_x],
                                  function_name='jtjvp',
                                  backend=datatype)
        m = import_code(forward_str)
        jtjvp_x_val_s2s, = m.jtjvp([A_val, v_val])

        assert isinstance(jtjvp_x, ad.Node)
        assert T.array_equal(jtjvp_x_val_s2s, expected_jtjvp_x_val)
コード例 #17
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_negative(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x2 = ad.Variable(name="x2", shape=[3])
        y = ad.sum(-x2)

        grad_x2, = ad.gradients(y, [x2])

        executor = ad.Executor([y, grad_x2])
        x2_val = 2 * T.ones(3)
        y_val, grad_x2_val = executor.run(feed_dict={x2: x2_val})

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, T.sum(-x2_val))
        assert T.array_equal(grad_x2_val, -T.ones_like(x2_val))
コード例 #18
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_jacobian_summation_einsum(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[2, 2])
        x_sum = ad.einsum('ij->', x)

        grad_x, = ad.jacobians(x_sum, [x])

        executor = ad.Executor([x_sum, grad_x])
        x_val = T.tensor([[1., 2.], [3., 4.]])

        x_sum_val, grad_x_val = executor.run(feed_dict={x: x_val})

        expected_x_sum_val = T.sum(x_val)
        expected_grad_x_val = T.ones_like(x_val)

        assert T.array_equal(x_sum_val, expected_x_sum_val)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
コード例 #19
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_tensorinv_matrix():
    for datatype in backends:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[3, 3])
        inv_x = ad.tensorinv(x)
        executor = ad.Executor([inv_x])

        x_val = T.random([3, 3])
        inv_x_val, = executor.run(feed_dict={x: x_val})
        assert T.array_equal(inv_x_val, T.inv(x_val))
コード例 #20
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_summation_einsum_2(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[2, 2])
        y = ad.Variable(name="y", shape=[2, 2])
        out = ad.sum(ad.einsum('ij,ab->ab', x, y))

        grad_x, = ad.gradients(out, [x])
        executor = ad.Executor([out, grad_x])
        x_val = T.tensor([[1., 2.], [3., 4.]])
        y_val = T.tensor([[5., 6.], [7., 8.]])

        out_val, grad_x_val = executor.run(feed_dict={x: x_val, y: y_val})

        expected_out_val = T.sum(T.einsum('ij,ab->ab', x_val, y_val))
        expected_grad_x_val = T.sum(y_val) * T.ones_like(x_val)

        assert T.array_equal(out_val, expected_out_val)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
コード例 #21
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_inner_product_einsum(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[3])
        x_inner = ad.einsum('i,i->', x, x)

        grad_x, = ad.gradients(x_inner, [x])

        executor = ad.Executor([x_inner, grad_x])
        x_val = T.tensor([3., 4.])  # 1x2

        y_val, grad_x_val = executor.run(feed_dict={x: x_val})

        expected_yval = T.norm(x_val)**2
        expected_grad_x_val = 2 * x_val

        assert isinstance(x_inner, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
コード例 #22
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_jacobian_trace_einsum(backendopt):
    for datatype in backendopt:
        if datatype == 'taco':
            continue
        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[2, 2])
        trace = ad.einsum('ii->', x)

        grad_x, = ad.jacobians(trace, [x])

        executor = ad.Executor([trace, grad_x])
        x_val = T.tensor([[1., 2.], [3., 4.]])

        trace_val, grad_x_val = executor.run(feed_dict={x: x_val})

        expected_trace_val = T.einsum('ii->', x_val)
        expected_grad_x_val = T.identity(2)

        assert T.array_equal(trace_val, expected_trace_val)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
コード例 #23
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_trace_einsum(backendopt):
    for datatype in backendopt:
        if datatype == 'taco':
            continue  # Currently taco doesn't support same subscript in one operand.

        T.set_backend(datatype)
        x = ad.Variable(name="x", shape=[2, 2])
        trace = ad.einsum('ii->', x)

        grad_x, = ad.gradients(trace, [x])

        executor = ad.Executor([trace, grad_x])
        x_val = T.tensor([[1., 2.], [3., 4.]])

        trace_val, grad_x_val = executor.run(feed_dict={x: x_val})

        expected_trace_val = T.einsum('ii->', x_val)
        expected_grad_x_val = T.identity(2)

        assert T.array_equal(trace_val, expected_trace_val)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
コード例 #24
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_transpose_einsum(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x = ad.Variable(name="x", shape=[3, 2])
        y = ad.sum(ad.einsum("ij->ji", x))

        grad_x, = ad.gradients(y, [x])

        executor = ad.Executor([y, grad_x])
        x_val = T.tensor([[1, 2], [3, 4], [5, 6]])  # 3x2

        y_val, grad_x_val = executor.run(feed_dict={x: x_val})

        expected_yval = T.sum(T.transpose(x_val))
        expected_grad_x_val = T.ones_like(x_val)

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
コード例 #25
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_jvps():
    for datatype in backends:
        T.set_backend(datatype)
        x1 = ad.Variable(name="x1", shape=[2])
        A1 = ad.Variable(name="A1", shape=[3, 2])
        x2 = ad.Variable(name="x2", shape=[2])
        A2 = ad.Variable(name="A2", shape=[3, 2])
        v1 = ad.Variable(name="v1", shape=[2])
        v2 = ad.Variable(name="v2", shape=[2])
        y = ad.einsum('ab, b->a', A1, x1) + ad.einsum('ab, b->a', A2, x2)

        transposed_vjp_x = ad.jvps(y, [x1, x2], [v1, v2])

        executor = ad.Executor([y, transposed_vjp_x])
        x1_val = T.tensor([1., 2.])
        A1_val = T.tensor([[1., 2.], [3., 4.], [5, 6]])
        v1_val = T.tensor([3., 4.])
        x2_val = T.tensor([1., 2.])
        A2_val = T.tensor([[1., 2.], [3., 4.], [5, 6]])
        v2_val = T.tensor([3., 4.])

        y_val, transposed_vjp_x_val = executor.run(feed_dict={
            x1: x1_val,
            A1: A1_val,
            v1: v1_val,
            x2: x2_val,
            A2: A2_val,
            v2: v2_val
        })

        expected_yval = T.einsum('ab, b->a', A1_val, x1_val) + T.einsum(
            'ab, b->a', A2_val, x2_val)

        expected_transposed_vjp_x_val = T.einsum(
            'ab, b->a', A1_val, v1_val) + T.einsum('ab, b->a', A2_val, v2_val)

        assert isinstance(transposed_vjp_x, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(transposed_vjp_x_val,
                             expected_transposed_vjp_x_val)
コード例 #26
0
ファイル: jacobian_test.py プロジェクト: LinjianMa/AutoHOOT
def test_mul_const_jacobian(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x1 = ad.Variable(name="x2", shape=[2, 2])
        jacobian_x1, = ad.jacobians(2 * x1, [x1])
        executor = ad.Executor([jacobian_x1])
        x1_val = T.tensor([[5., 6.], [7., 8.]])
        jacobian_x1_val, = executor.run(feed_dict={x1: x1_val})
        I = T.identity(2)
        expected_jacobian_x1_val = 2 * T.einsum("ai,bj->abij", I, I)
        assert T.array_equal(jacobian_x1_val, expected_jacobian_x1_val)
コード例 #27
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_norm(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x = ad.Variable(name="x", shape=[3, 2])
        y = ad.norm(x)
        z = y**2

        grad_x, = ad.gradients(z, [x])

        executor = ad.Executor([z, grad_x])
        x_val = T.tensor([[1., 2.], [3., 4.], [5., 6.]])  # 3x2

        z_val, grad_x_val = executor.run(feed_dict={x: x_val})

        expected_zval = T.norm(x_val)**2
        expected_grad_x_val = 2 * x_val

        assert isinstance(z, ad.Node)
        assert T.array_equal(z_val, expected_zval)
        assert T.array_equal(grad_x_val, expected_grad_x_val)
コード例 #28
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_einsum_3op(backendopt):

    for datatype in backendopt:
        T.set_backend(datatype)

        x2 = ad.Variable(name="x2", shape=[3, 2])
        x3 = ad.Variable(name="x3", shape=[2, 3])
        x4 = ad.Variable(name="x4", shape=[3, 2])
        matmul = ad.einsum('ik,kj,jl->il', x2, x3, x4)
        y = ad.sum(matmul)

        grad_x2, grad_x3, grad_x4 = ad.gradients(y, [x2, x3, x4])

        executor = ad.Executor([y, grad_x2, grad_x3, grad_x4])
        x2_val = T.tensor([[1, 2], [3, 4], [5, 6]])  # 3x2
        x3_val = T.tensor([[7, 8, 9], [10, 11, 12]])  # 2x3
        x4_val = T.tensor([[1, 2], [3, 4], [5, 6]])  # 3x2

        y_val, grad_x2_val, grad_x3_val, grad_x4_val = executor.run(feed_dict={
            x2: x2_val,
            x3: x3_val,
            x4: x4_val
        })

        expected_grad_sum = T.ones_like(T.dot(T.dot(x2_val, x3_val), x4_val))
        expected_yval = T.sum(T.dot(T.dot(x2_val, x3_val), x4_val))
        expected_grad_x2_val = T.einsum("il, kj, jl->ik", expected_grad_sum,
                                        x3_val, x4_val)
        expected_grad_x3_val = T.einsum("ik, il, jl->kj", x2_val,
                                        expected_grad_sum, x4_val)
        expected_grad_x4_val = T.einsum("ik, kj, il->jl", x2_val, x3_val,
                                        expected_grad_sum)

        assert isinstance(y, ad.Node)
        assert T.array_equal(y_val, expected_yval)
        assert T.array_equal(grad_x2_val, expected_grad_x2_val)
        assert T.array_equal(grad_x3_val, expected_grad_x3_val)
        assert T.array_equal(grad_x4_val, expected_grad_x4_val)
コード例 #29
0
ファイル: autodiff_test.py プロジェクト: LinjianMa/AutoHOOT
def test_tensorinv_odd_dim(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)

        x = ad.Variable(name="x", shape=[24, 8, 3])
        inv_x = ad.tensorinv(x, ind=1)

        assert inv_x.shape == [8, 3, 24]
        assert inv_x.input_indices_length == 2

        executor = ad.Executor([inv_x])
        x_val = T.random([24, 8, 3])
        inv_x_val, = executor.run(feed_dict={x: x_val})
        assert T.array_equal(inv_x_val, T.tensorinv(x_val, ind=1))
コード例 #30
0
ファイル: s2s_test.py プロジェクト: LinjianMa/AutoHOOT
def test_s2s_tensorinv(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)
        A = ad.Variable(name="A", shape=[2, 2])
        B = ad.tensorinv(A)

        A_val = T.tensor([[1., 0.], [0., 1.]])

        StS = SourceToSource()
        fwd_str = StS.forward([B], function_name='fwd', backend=datatype)
        m = import_code(fwd_str)
        out, = m.fwd([A_val])

        assert T.array_equal(A_val, out)