def testNeg(self):
     data1 = np.random.uniform(0, 10, size=(5, 5))
     t1 = Tensor(data1, requires_grad=True)
     t2 = -t1
     initialGrad = Tensor(np.ones_like(data1))
     t2.backward(initialGrad)
     assert_array_equal(t1.grad.data, -initialGrad.data)
 def testPow(self):
     data1 = np.random.uniform(0, 10, size=(5, 5))
     initialGrad = Tensor(np.ones_like(data1))
     t1 = Tensor(data1, requires_grad=True)
     t2 = sn.pow(t1, 5)
     t2.backward(initialGrad)
     assert_array_equal(t1.grad.data, 5 * data1**(4))
    def testSum(self):
        data1 = np.random.uniform(0, 10, size=(5, 5))

        t1 = Tensor(data1, requires_grad=True)
        t2 = t1.sum()

        assert_array_equal(t2.data, data1.sum())
        assert t2.requires_grad == True
    def testMatMul(self):
        data1 = np.random.uniform(0, 10, size=(5, 5))
        data2 = np.random.uniform(0, 10, size=(5, 5))

        t1 = Tensor(data1, requires_grad=True)
        t2 = Tensor(data2, requires_grad=True)
        t3 = sn.matmul(t1, t2)

        assert_array_equal(t3.data, np.matmul(data1, data2))
        assert t3.requires_grad == True
    def testMul(self):
        data1 = np.random.uniform(0, 10, size=(5, 5))
        data2 = np.random.uniform(0, 10, size=(5, 5))

        t1 = Tensor(data1, requires_grad=True)
        t2 = Tensor(data2, requires_grad=True)
        t3 = t1 * t2

        assert_array_equal(t3.data, data1 * data2)
        assert t3.requires_grad == True
Exemple #6
0
    def testReLU(self):
        testData = Tensor(np.random.uniform(-10, 10, size=(5, 5)),
                          requires_grad=True)
        correctResult = np.where(testData.data > 0, testData.data, 0)
        y = ReLU(testData)

        initialGrad = Tensor(np.ones_like(testData.data))
        y.backward(initialGrad)

        assert_array_equal(y.data, correctResult)
        assert_array_equal(testData.grad.data,
                           np.where(testData.data > 0, 1, 0))
Exemple #7
0
    def testTanh(self):
        testData = Tensor(np.random.uniform(-10, 10, size=(5, 5)),
                          requires_grad=True)
        y = Tanh(testData)

        initialGrad = Tensor(np.ones_like(testData.data))
        y.backward(initialGrad)

        assert_array_equal(y.data, np.tanh(testData.data))
        assert_array_equal(testData.grad.data, 1 - np.tanh(testData.data)**2)

        return
Exemple #8
0
def neg(t1: Tensor) -> Tensor:
    data = np.negative(t1.data)
    requires_grad =  t1.requires_grad
    return Tensor(data, requires_grad)
Exemple #9
0
def mean(t1: Tensor) -> Tensor:
    data = np.mean(t1.data)
    requires_grad = t1.requires_grad
    return Tensor(data, requires_grad)
Exemple #10
0
def pow(t1: Tensor, power: Number) -> Tensor:
    data = t1.data ** power
    requires_grad = t1.requires_grad
    return Tensor(data, requires_grad)
Exemple #11
0
def sub(t1: Tensor ,t2: Tensor) -> Tensor:
    data = t1.data + np.negative(t2.data)
    requires_grad = t1.requires_grad or t2.requires_grad
    return Tensor(data, requires_grad)
Exemple #12
0
 def testMean(self):
     data1 = np.random.uniform(0, 10, size=(5, 5))
     t1 = Tensor(data1, requires_grad=True)
     t2 = t1.mean()
     t2.backward()
     assert_array_equal(t1.grad.data, np.ones_like(data1) / np.size(data1))
Exemple #13
0

class NeuralNet(Model):
    def __init__(self):
        self.linear1 = Linear(10, 5)
        self.linear2 = Linear(5, 1)
        self.tanh = Tanh()

    def forward(self, x):
        x = self.linear1(x)
        x = self.tanh(self.linear2(x))
        return self.tanh(x)


sgd = SGD(lr=0.002)
model = NeuralNet()
model.compile(sgd, MSE())
model.summary()

testData = Tensor(np.random.uniform(-10, 10, size=(10, 1)))
output = model(testData)
print(model.linear2.weights.grad)
print(model.linear1.weights.grad)

output.backward(Tensor([[1.0]]))
print("\n")
print("no optimize")
# model.optimize()
print(model.linear2.weights.grad)
print(model.linear1.weights.grad)