Exemplo n.º 1
0
 def test_tensor_used_in_backward(self):
     # if tensor is used in backward, its ref_count should increase 1
     a = flow.Tensor(4.1, require_grad=True)
     b = flow.Tensor(3.1, require_grad=True)
     d = a * b
     # one for mul operation and one for backward of d
     assert a.forward_ref_count == 1
     assert a.backward_ref_count == 1
Exemplo n.º 2
0
    def test_iadd(self):
        a = flow.Tensor(3)
        b = a
        a += flow.Tensor(5)
        assert np.allclose(a.data, b.data)

        a = flow.Tensor(3)
        b = a
        a = a + flow.Tensor(5)
        assert not np.allclose(a.data, b.data)
Exemplo n.º 3
0
    def test_imul(self):
        a = flow.Tensor(3)
        b = a
        a *= flow.Tensor(5)
        assert np.allclose(a.data, b.data)

        a = flow.Tensor(3)
        b = a
        a = a * flow.Tensor(5)
        assert not np.allclose(a.data, b.data)
Exemplo n.º 4
0
    def test_isub(self):
        a = flow.Tensor(3)
        b = a
        a -= flow.Tensor(5)
        assert np.allclose(a.data, b.data)

        a = flow.Tensor(3)
        b = a
        a = a - flow.Tensor(5)
        assert not np.allclose(a.data, b.data)
Exemplo n.º 5
0
    def test_itrue_div(self):
        # inplace div will cause a dtype change from int to float, which raise error
        # therefore define a to be 3.1 in the first place
        a = flow.Tensor(3.1)
        b = a
        a /= flow.Tensor(5)
        assert np.allclose(a.data, b.data)

        a = flow.Tensor(3)
        b = a
        a = a / flow.Tensor(5)
        assert not np.allclose(a.data, b.data)
Exemplo n.º 6
0
    def test_function_inside_function(self):
        # if a autograd function A is called inside of a autograd function B
        # autograd function A should no increase ref_count
        class trivial_function(flow.autograd.Function):
            @staticmethod
            def forward(ctx, tensor):
                return F.add(tensor, flow.Tensor(0))

            @staticmethod
            def backward(ctx, grad):
                return F.add(grad, flow.Tensor(0))

        a = flow.Tensor(4.1, require_grad=True)
        b = trivial_function.apply(a)
        assert a.forward_ref_count == 1
Exemplo n.º 7
0
 def __init__(self):
     super().__init__()
     self.a = Identity(flow.Tensor([[4.0, 5.0]], require_grad=True))
     self.b = Identity(flow.Tensor([[5.0], [6.0]], require_grad=True))
     self.c = Identity(
         flow.Tensor([[1.0, 2.0], [3.0, 4.0]], require_grad=True))
Exemplo n.º 8
0
 def test_ref_count(self):
     a = flow.Tensor(4.1, require_grad=True)
     b = a + 1
     c = a + 2
     d = a + 3
     assert a.forward_ref_count == 3
Exemplo n.º 9
0
 def backward(ctx, grad):
     return F.add(grad, flow.Tensor(0))
Exemplo n.º 10
0
 def forward(ctx, tensor):
     return F.add(tensor, flow.Tensor(0))