Exemple #1
0
m = X_.shape[0]
batch_size = 11
steps_per_epoch = m // batch_size

graph = Graph(feed_dict)

sgd = SGD(1e-2)

trainables = [W1, b1, W2, b2]

print("Total number of examples = {}".format(m))

for i in range(epochs):
    loss = 0
    for j in range(steps_per_epoch):
        # Step 1
        # Randomly sample a batch of examples
        X_batch, y_batch = resample(X_, y_, n_samples=batch_size)

        # Reset value of X and y Inputs
        X.output = X_batch
        y.output = y_batch

        graph.compute_gradients()

        sgd.update(trainables)

        loss += graph.loss()

    print("Epoch: {}, Loss: {:.3f}".format(i + 1, loss / steps_per_epoch))