Esempio n. 1
0
def test_max_ops():
    t1 = Tensor([[1, 3, 5], [3, 7, -2]], requires_grad=True)
    t2 = ops.max(t1, axis=None)
    t3 = ops.max(t1, axis=0)
    assert t2.values == 7
    assert t3.values.tolist() == [3, 7, 5]

    t2.backward()
    assert t1.grad.tolist() == [[0, 0, 0], [0, 1, 0]]
    t1.zero_grad()
    t3.backward([1, 1, 1])
    assert t1.grad.tolist() == [[0, 0, 1], [1, 1, 0]]
Esempio n. 2
0
def test_minimal_nn():
    x = Tensor(np.random.normal(0, 1.0, (100, 3)))
    y = x * 3.14 + 30

    w1 = Tensor(np.random.normal(0, 1.0, (3, 3)), requires_grad=True)
    b1 = Tensor(np.random.normal(0, 1.0, 3), requires_grad=True)

    previous_loss = 1e10
    for _ in range(100):
        w1.zero_grad()
        b1.zero_grad()
        predicted = x @ w1 + b1
        err = predicted - y
        loss = (err**2).sum()
        loss.backward()
        w1 -= 0.001 * w1.grad
        b1 -= 0.001 * b1.grad
        assert loss.values < previous_loss
        previous_loss = loss.values