def training(m, inputs, targets, batch_size, nb_epochs, lr): """ Training function :param m: model :param inputs: input data :param targets: :param batch_size: :param nb_epochs: :param lr: learning rate :return: """ criterion = MSELoss() optimizer = SGD(m, lr, momentum=0.9) for epoch in range(nb_epochs): for batch in range(0, inputs.size(0), batch_size): output = m.forward(inputs.narrow(0, batch, batch_size)) loss = criterion.forward(output, targets.narrow(0, batch, batch_size)) dl = criterion.backward() m.backward(dl) optimizer.step() if (epoch % 50 == 0) or (epoch == nb_epochs - 1): print('Epoch: {} Loss: {:.04f}'.format(epoch, loss.item()))
import numpy as np from tensor import Tensor from layers import Sequential, Linear from activations import Tanh, Sigmoid from optimizers import SGD from losses import MSELoss np.random.seed(0) data = Tensor(np.array([[0, 0], [0, 1], [1, 0], [1, 1]]), autograd=True) target = Tensor(np.array([[0], [1], [0], [1]]), autograd=True) model = Sequential([Linear(2, 3), Tanh(), Linear(3, 1), Sigmoid()]) criterion = MSELoss() optim = SGD(parameters=model.get_parameters(), alpha=1) for i in range(10): pred = model.forward(data) loss = criterion.forward(pred, target) loss.backward() optim.step() print(loss)