Esempio n. 1
0
    def test_sum_with_grad(self):
        t1 = Tensor([1, 2, 3], requires_grad=True)
        t2 = t1.sum()

        t2.backward(Tensor(3))

        assert t1.grad.data.tolist() == [3, 3, 3]
Esempio n. 2
0
    def test_simple_sum(self):
        t1 = Tensor([1, 2, 3], requires_grad=True)
        t2 = t1.sum()

        t2.backward()

        assert t1.grad.data.tolist() == [1, 1, 1]
Esempio n. 3
0
    def test_simple_sum(self):
        t1 = Tensor([1, 2, 3], requires_grad=True)
        t2 = t1.sum()

        self.assertEqual(t2.data.tolist(), 6)

        t2.backward()

        self.assertEqual(t1.grad.data.tolist(), [1, 1, 1])
Esempio n. 4
0
    def test_mean_with_grad(self):
        """ Test mean of all elements of tensor, and input data to backward function
        """
        tensor1 = Tensor([1, 2, 3], requires_grad=True)
        tensor2 = tensor1.mean()

        tensor2.backward(Tensor(3.))

        assert tensor1.grad.data.tolist() == [1, 1, 1]
Esempio n. 5
0
    def test_sigmoid(self):
        """ Test relu activation function for tensors
        """
        tensor1 = Tensor([-100, 0, 10], requires_grad=True)
        tensor2 = Sigmoid()(tensor1)

        tensor2.backward(Tensor([1, 1, 1]))

        assert np.round(tensor2.data, 2).tolist() == [0, 0.5, 1]
        assert np.round(tensor1.grad.data, 2).tolist() == [0, 0.25, 0]
Esempio n. 6
0
    def test_softmax(self):
        """ Test relu activation function for tensors
        """
        tensor1 = Tensor([[-2, 4, -6]], requires_grad=True)
        tensor2 = Softmax()(tensor1)

        tensor2.backward(Tensor([[1, 1, 1]]))

        assert np.round(tensor2.data, 2).tolist() == [[0, 1, 0]]
        assert np.round(tensor1.grad.data, 2).tolist() == [[0, 0, 0]]
Esempio n. 7
0
    def test_relu(self):
        """ Test relu activation function for tensors
        """
        tensor1 = Tensor([-2, 4, -6], requires_grad=True)
        tensor2 = Relu()(tensor1)

        tensor2.backward(Tensor([1, 1, 1]))

        assert tensor2.data.tolist() == [0, 4, 0]
        assert np.round(tensor1.grad.data, 2).tolist() == [0, 1, 0]
Esempio n. 8
0
    def test_simple_mean(self):
        """ Test mean of all elements of tensor
        """
        tensor1 = Tensor([3, 6, 9], requires_grad=True)

        tensor2 = tensor1.mean()
        tensor2.backward()

        assert tensor2.data.tolist() == 6
        assert np.round(tensor1.grad.data, 2).tolist() == [0.33, 0.33, 0.33]
Esempio n. 9
0
    def test_simple_sum(self):
        """ Test sum of all elements of tensor
        """
        tensor1 = Tensor([1, 2, 3], requires_grad=True)

        tensor2 = tensor1.sum()
        tensor2.backward()

        assert tensor2.data.tolist() == 6
        assert tensor1.grad.data.tolist() == [1, 1, 1]
Esempio n. 10
0
    def test_simple_mul(self):
        t1 = Tensor([1, 2, 3], requires_grad=True)
        t2 = Tensor([4, 5, 6], requires_grad=True)

        t3 = t1 * t2
        t3.backward(Tensor([-1, -2, -3]))

        assert t3.data.tolist() == [4, 10, 18]
        assert t1.grad.data.tolist() == [-4, -10, -18]
        assert t2.grad.data.tolist() == [-1, -4, -9]
Esempio n. 11
0
    def test_tanh(self):
        """ Test tanh activation functions for tensors
        """
        tensor1 = Tensor([0, 1, 2], requires_grad=True)
        tensor2 = Tanh()(tensor1)

        tensor2.backward(Tensor([1, 1, 1]))

        assert np.round(tensor2.data,2).tolist() == [0, 0.76, 0.96]
        assert np.round(tensor1.grad.data,2).tolist() == [1, 0.42, 0.07]
Esempio n. 12
0
    def test_simple_add(self):
        t1 = Tensor([1, 2, 3], requires_grad=True)
        t2 = Tensor([4, 5, 6], requires_grad=True)

        t3 = t1 + t2
        t3.backward(Tensor([-1, -2, -3]))

        assert t3.data.tolist() == [5, 7, 9]
        assert t1.grad.data.tolist() == [-1, -2, -3]
        assert t2.grad.data.tolist() == [-1, -2, -3]
Esempio n. 13
0
    def test_log(self):
        """ Test log function for tensors
        """
        tensor1 = Tensor([1, np.e, np.e**2], requires_grad=True)
        tensor2 = Log()(tensor1)

        tensor2.backward(Tensor([1, np.e, np.e**2]))

        assert tensor2.data.tolist() == [0, 1, 2] 
        assert tensor1.grad.data.tolist() == [1, 1, 1]
Esempio n. 14
0
    def test_broadcast_sub2(self):
        t1 = Tensor([[1, 2, 3], [4, 5, 6]], requires_grad=True)  # (2,3)
        t2 = Tensor([[7, 8, 9]], requires_grad=True)  # (1,3)

        t3 = t1 - t2
        assert t3.data.tolist() == [[-6, -6, -6], [-3, -3, -3]]

        t3.backward(Tensor([[1, 1, 1], [1, 1, 1]]))

        assert t1.grad.data.tolist() == [[1, 1, 1], [1, 1, 1]]
        assert t2.grad.data.tolist() == [[-2, -2, -2]]
Esempio n. 15
0
    def test_isub(self):
        """Test isub of tensor (tensor-=another_tensor) with the same shape
        """
        tensor1 = Tensor([2, 4, 6, 8], requires_grad=True)
        tensor2 = Tensor([5, 8, 1, 2])

        tensor2 -= tensor1
        assert tensor2.data.tolist() == [3, 4, -5, -6]

        tensor2 -= 1
        assert tensor2.data.tolist() == [2, 3, -6, -7]
Esempio n. 16
0
    def test_broadcast_mul2(self):
        t1 = Tensor([[1, 2, 3], [4, 5, 6]], requires_grad=True)  # (2,3)
        t2 = Tensor([[7, 8, 9]], requires_grad=True)  # (1,3)

        t3 = t1 * t2
        assert t3.data.tolist() == [[7, 16, 27], [28, 40, 54]]

        t3.backward(Tensor([[1, 1, 1], [1, 1, 1]]))

        assert t1.grad.data.tolist() == [[7, 8, 9], [7, 8, 9]]
        assert t2.grad.data.tolist() == [[5, 7, 9]]
Esempio n. 17
0
    def test_broadcast_add2(self):
        t1 = Tensor([[1, 2, 3], [4, 5, 6]],
                    requires_grad=True)  # shape = (2, 3)
        t2 = Tensor([[7, 8, 9]], requires_grad=True)  # shape = (1, 3)

        t3 = t1 + t2  # shape (2, 3)
        t3.backward(Tensor([[1, 1, 1], [1, 1, 1]]))

        assert t3.data.tolist() == [[8, 10, 12], [11, 13, 15]]
        assert t1.grad.data.tolist() == [[1, 1, 1], [1, 1, 1]]
        assert t2.grad.data.tolist() == [[2, 2, 2]]
Esempio n. 18
0
    def test_imul(self):
        """Test imul of tensor (tensor*=another_tensor) with the same shape
        """
        tensor1 = Tensor([2, 4, 6, 8], requires_grad=True)
        tensor2 = Tensor([5, 8, 1, 2])

        tensor2 *= tensor1
        assert tensor2.data.tolist() == [10, 32, 6, 16]

        tensor2 *= tensor1
        assert tensor2.data.tolist() == [20, 128, 36, 128]
Esempio n. 19
0
    def test_idiv(self):
        """Test idiv of tensor (tensor/=another_tensor) with the same shape
        """
        tensor1 = Tensor([2, 2, 2, 2], requires_grad=True)
        tensor2 = Tensor([4, 8, 12, 16])

        tensor2 /= tensor1
        assert tensor2.data.tolist() == [2, 4, 6, 8]

        tensor2 /= tensor1
        assert tensor2.data.tolist() == [1, 2, 3, 4]
Esempio n. 20
0
    def test_simple_div(self):
        """ Test div between two tensors with the same shape
        """
        tensor1 = Tensor([2, 2.5, 3], requires_grad=True)
        tensor2 = Tensor([4, 5, 6], requires_grad=True)

        tensor3 = tensor2 / tensor1
        tensor3.backward(Tensor([-1, -2, -3]))

        assert tensor3.data.tolist() == [2, 2, 2]
        assert tensor1.grad.data.tolist() == [1, 1.6, 2]
        assert tensor2.grad.data.tolist() == [-0.5, -0.8, -1]
Esempio n. 21
0
    def test_broadcast_mul1(self):
        t1 = Tensor([[1, 2, 3], [4, 5, 6]], requires_grad=True)  # (2, 3)
        t2 = Tensor([7, 8, 9], requires_grad=True)  # (3,)

        t3 = t1 * t2  # (2, 3)

        self.assertEqual(t3.data.tolist(), [[7, 16, 27], [28, 40, 54]])

        t3.backward(Tensor([[1, 1, 1], [1, 1, 1]]))

        self.assertEqual(t1.grad.data.tolist(), [[7, 8, 9], [7, 8, 9]])
        self.assertEqual(t2.grad.data.tolist(), [5, 7, 9])
Esempio n. 22
0
    def test_simple_add(self):
        t1 = Tensor([1, 2, 3], requires_grad=True)
        t2 = Tensor([4, 5, 6], requires_grad=True)

        t3 = t1 + t2

        self.assertEqual(t3.data.tolist(), [5, 7, 9])

        t3.backward(Tensor([-1, -2, -3]))

        self.assertEqual(t1.grad.data.tolist(), [-1, -2, -3])
        self.assertEqual(t2.grad.data.tolist(), [-1, -2, -3])
Esempio n. 23
0
    def test_broadcast_add2(self):
        t1 = Tensor([[1, 2, 3], [4, 5, 6]], requires_grad=True)  # (2, 3)
        t2 = Tensor([[7, 8, 9]], requires_grad=True)  # (1, 3)

        t3 = t1 + t2  # (2, 3)

        self.assertEqual(t3.data.tolist(), [[8, 10, 12], [11, 13, 15]])

        t3.backward(Tensor([[1, 1, 1], [1, 1, 1]]))

        self.assertEqual(t1.grad.data.tolist(), [[1, 1, 1], [1, 1, 1]])
        self.assertEqual(t2.grad.data.tolist(), [[2, 2, 2]])
Esempio n. 24
0
    def test_simple_mul(self):
        t1 = Tensor([1, 2, 3], requires_grad=True)
        t2 = Tensor([4, 5, 6], requires_grad=True)

        t3 = t1 * t2

        self.assertEqual(t3.data.tolist(), [4, 10, 18])

        t3.backward(Tensor([-1, -2, -3]))

        self.assertEqual(t1.grad.data.tolist(), [-4, -10, -18])
        self.assertEqual(t2.grad.data.tolist(), [-1, -4, -9])
Esempio n. 25
0
    def test_simple_mul(self):
        """ Test product between two tensors with the same shape
        """
        tensor1 = Tensor([2, 4, 6], requires_grad=True)
        tensor2 = Tensor([4, 5, 6], requires_grad=True)

        tensor3 = tensor1 * tensor2
        tensor3.backward(Tensor([-1, -2, -3]))

        assert tensor3.data.tolist() == [8, 20, 36]
        assert tensor1.grad.data.tolist() == [-4, -10, -18]
        assert tensor2.grad.data.tolist() == [-2, -8, -18]
Esempio n. 26
0
    def test_broadcast_sub_diff_shapes(self):
        """ Test sub of tensors with same dim but different shape
        """
        tensor1 = Tensor([[1, 2, 3], [4, 5, 6]], requires_grad=True)
        tensor2 = Tensor([[7, 8, 9]], requires_grad=True)

        tensor3 = tensor1 - tensor2
        tensor3.backward(Tensor([[-1, -1, -1], [-1, -1, -1]]))

        assert tensor3.data.tolist() == [[-6, -6, -6], [-3, -3, -3]]
        assert tensor1.grad.data.tolist() == [[-1, -1, -1], [-1, -1, -1]]
        assert tensor2.grad.data.tolist() == [[2, 2, 2]]
Esempio n. 27
0
    def test_sub(self):
        """Test substraction of tensor with the same shape
        """
        tensor1 = Tensor([2, 4, 6, 8], requires_grad=True)
        tensor2 = Tensor([5, 8, 1, 2], requires_grad=True)

        tensor3 = tensor1 - tensor2
        tensor3.backward(Tensor([-1., -2., -3., -4.]))

        assert tensor3.data.tolist() == [-3, -4, 5, 6]
        assert tensor1.grad.data.tolist() == [-1, -2, -3, -4]
        assert tensor2.grad.data.tolist() == [1, 2, 3, 4]
Esempio n. 28
0
    def test_broadcast_sub(self):
        """ Test sub of tensors with 1 tensor of different dim (broadcasting)
        """
        tensor1 = Tensor([[1, 2, 3], [4, 5, 6]], requires_grad=True)
        tensor2 = Tensor([7, 8, 9], requires_grad=True)

        tensor3 = tensor1 - tensor2
        tensor3.backward(Tensor([[-1, -1, -1], [-1, -1, -1]]))

        assert tensor3.data.tolist() == [[-6, -6, -6], [-3, -3, -3]]
        assert tensor1.grad.data.tolist() == [[-1, -1, -1], [-1, -1, -1]]
        assert tensor2.grad.data.tolist() == [2, 2, 2]
Esempio n. 29
0
    def test_broadcast_mul_diff_shape(self):
        """ Test product of tensors with different shapes (broadcasting)
        """
        tensor1 = Tensor([[1, 2, 3], [4, 5, 6]], requires_grad=True)
        tensor2 = Tensor([[7, 8, 9]], requires_grad=True)

        tensor3 = tensor1 * tensor2
        tensor3.backward(Tensor([[1, 1, 1], [1, 1, 1]]))

        assert tensor3.data.tolist() == [[7, 16, 27], [28, 40, 54]]
        assert tensor1.grad.data.tolist() == [[7., 8., 9.], [7., 8., 9.]]
        assert tensor2.grad.data.tolist() == [[5., 7., 9.]]
Esempio n. 30
0
    def test_simple_matmul(self):
        t1 = Tensor([[1, 2], [3, 4], [5, 6]], requires_grad=True) # (3, 2)
        t2 = Tensor([[10], [20]], requires_grad=True)             # (2, 1)

        t3 = t1 @ t2                                              # (3, 1)
        assert t3.data.tolist() == [[50], [110], [170]]

        grad = Tensor([[1], [2], [3]])
        t3.backward(grad)

        # Just copying the formula from the code in tensor.py ;D
        np.testing.assert_allclose(t1.grad.data, grad.data @ t2.data.T)
        np.testing.assert_allclose(t2.grad.data, t1.data.T @ grad.data)