Example #1
0
 def train(self, data, numepochs):
     from NN.loss import MSELoss
     mse = MSELoss()
     for i in range(numepochs):
         x, y = data
         yhat = self.forward(x)
         dldy = mse.loss_gradient(y, yhat)
         self.backward(dldy)
         print(f"Epoch {i}, loss: {mse.loss(y, yhat)}")
Example #2
0
def train(network, data, numepochs):
    from NN.loss import MSELoss
    mse = MSELoss()
    for i in range(numepochs):
        x, y = data
        yhat, cachelist = network.forward(x)
        dldy = mse.loss_gradient(y, yhat)
        network.backward(dldy, cachelist)
        print(f"Epoch {i}, loss: {mse.loss(y, yhat)}")
Example #3
0
def train(network, data, numepochs, loss=None, optim=None):
    if not loss:
        from NN.loss import MSELoss
        loss = MSELoss()
    if not optim:
        optim = sgd_optimiser(0.01)
    for i in range(numepochs):
        x, y = data
        yhat, cachelist = network.forward(x)
        dldy = loss.loss_gradient(y, yhat)
        network.backward(dldy, cachelist, optim)
        print(f"Epoch {i}, loss: {loss.loss(y, yhat)}")
Example #4
0
def test_network():
    from NN.loss import MSELoss
    x = np.random.randn(100, 10)
    y = np.random.randn(100, 3)
    net = layer.Network(layer.FullyConnected(10, 20), layer.Tanh(),
                        layer.FullyConnected(20, 3), layer.Tanh())
    mse = MSELoss()
    yhat = net.forward(x, {})
    initloss = mse.loss(y, yhat)
    layer.train(net, (x, y), 10)
    yhat = net.forward(x, {})
    finloss = mse.loss(yhat, y)

    assert initloss > finloss