Example #1
0
def test_mnist():
    (train_x, train_y), (test_x, test_y) = mnist.load_data()
    val_x = train_x[50000:]
    val_y = train_y[50000:]
    train_x = train_x[:50000]
    train_y = train_y[:50000]
    batch_size = 200
    modle = models.Sequential()
    modle.add(layers.Linear(28, input_shape=(None, train_x.shape[1])))
    modle.add(layers.ReLU())
    modle.add(layers.Linear(10))
    modle.add(layers.ReLU())
    modle.add(layers.Linear(10))
    modle.add(layers.Softmax())
    acc = losses.categorical_accuracy.__name__
    modle.compile(losses.CrossEntropy(),
                  optimizers.SGD(lr=0.001),
                  metrics=[losses.categorical_accuracy])
    modle.summary()
    history = modle.train(train_x,
                          train_y,
                          batch_size,
                          epochs=32,
                          validation_data=(val_x, val_y))
    epochs = range(1, len(history["loss"]) + 1)
    plt.plot(epochs, history["loss"], 'ro', label="Traning loss")
    plt.plot(epochs, history["val_loss"], 'go', label="Validating loss")
    plt.plot(epochs, history[acc], 'r', label="Traning accuracy")
    plt.plot(epochs, history["val_" + acc], 'g', label="Validating accuracy")
    plt.title('Training/Validating loss/accuracy')
    plt.xlabel('Epochs')
    plt.ylabel('Loss/Accuracy')
    plt.legend()
    plt.show(block=True)
Example #2
0
    if not "sgd_momentum" in parameters["training"]:
        parameters["training"]['sgd_momentum']=0.9
    optimizer = optim.SGD(net.parameters(), lr=parameters["training"]['learning_rate'], momentum=parameters["training"]['sgd_momentum'])
else:
    optimizer = optim.Adam(net.parameters(), lr=parameters["training"]['learning_rate'])

# LOSS
if parameters["training"]["loss_function"]=="dice":

    if (not "dice_smooth" in parameters["training"]):
        parameters["training"]['dice_smooth']=0.001

    loss_function = losses.Dice(smooth=parameters["training"]['dice_smooth'])
    
else:
    loss_function = losses.CrossEntropy()

# LR SCHEDULE
if parameters["training"]["lr_schedule"]=="cosine":
    scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, parameters["training"]["nb_epochs"])

elif parameters["training"]["lr_schedule"]=="poly":
    if not "poly_schedule_p" in parameters["training"]:
        parameters["training"]['poly_schedule_p']=0.9

    lr_lambda = lambda epoch: (1-float(epoch)/parameters["training"]["nb_epochs"])**parameters["training"]["poly_schedule_p"]
    scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda)

else:
    scheduler = optim.lr_scheduler.LambdaLR(optimizer, lambda epoch: 1)