Ejemplo n.º 1
0
for i in range(1, 10):
    xs.append(Tensor([[i]]))
    ys.append(Tensor([[i * 2]]))

w = Tensor([[.1]], requires_grad=True)
b = Tensor([.0], requires_grad=True)

for i in range(9):
    for j in range(9):
        x = xs[j]
        y = ys[j]
        print('x:')
        print(x.narray)
        print('y:')
        print(y.narray)
        y_ = x.matmul(w).add(b)
        print('y_:')
        print(y_.narray)
        loss = y_.sub(y).pow(Tensor(2.))
        print('loss:')
        print(loss.narray)
        loss.backward()

        # print(w.grad)
        # print(b.grad)

        w.narray = w.narray - (0.01 * w.grad.T)
        b.narray = b.narray - (0.01 * b.grad.T)

print(w.narray)
print(b.narray)
Ejemplo n.º 2
0
from autograd import Tensor

x = Tensor([[1., 2., 3.]])
w = Tensor([[2.], [3.], [4.]], requires_grad=True)
b = Tensor([.0], requires_grad=True)
y_ = x.matmul(w).add(b)
y = Tensor([60.])
loss = y_.sub(y).pow(Tensor(2.)).div(Tensor(2.))

print(loss.narray)

loss.backward()

print(w.grad)
print(b.grad)

w.narray = w.narray + (0.001 * w.grad.T)
b.narray = b.narray + (0.001 * b.grad.T)

print(w.narray)
print(b.narray)