Beispiel #1
0
def test_big_linear_batchnorm_relu_xeloss_momentum():
    np.random.seed(11785)
    mytorch_mlp = Sequential(Linear(10, 20), BatchNorm1d(20), ReLU(),
                             Linear(20, 30), BatchNorm1d(30), ReLU())
    mytorch_optimizer = SGD(mytorch_mlp.parameters(), momentum=0.9)
    mytorch_criterion = CrossEntropyLoss()
    test_step(mytorch_mlp,
              mytorch_optimizer,
              5,
              5,
              mytorch_criterion=mytorch_criterion)
    return True
def mnist(train_x, train_y, val_x, val_y):
    """Problem 3.1: Initialize objects and start training
    You won't need to call this function yourself.
    (Data is provided by autograder)
    
    Args:
        train_x (np.array): training data (55000, 784) 
        train_y (np.array): training labels (55000,) 
        val_x (np.array): validation data (5000, 784)
        val_y (np.array): validation labels (5000,)
    Returns:
        val_accuracies (list(float)): List of accuracies per validation round
                                      (num_epochs,)
    """
    # TODO: Initialize an MLP, optimizer, and criterion

    # TODO: Call training routine (make sure to write it below)
    l = [Linear(784, 20), BatchNorm1d(20), ReLU(), Linear(20, 10)]
    model = Sequential(*l)
    criterion = CrossEntropyLoss()
    optimizer = SGD(model.parameters(), lr=0.1, momentum=0.9)

    val_accuracies = train(model,
                           optimizer,
                           criterion,
                           train_x,
                           train_y,
                           val_x,
                           val_y,
                           num_epochs=3)

    return val_accuracies
Beispiel #3
0
def test_linear_batchnorm_relu_train_eval():
    np.random.seed(11785)
    mytorch_mlp = Sequential(Linear(10, 20), BatchNorm1d(20), ReLU())
    mytorch_optimizer = SGD(mytorch_mlp.parameters())
    test_step(mytorch_mlp, mytorch_optimizer, 5, 5)
    return True
Beispiel #4
0
def test_linear_batchnorm_relu_backward_train():
    np.random.seed(11785)
    mytorch_mlp = Sequential(Linear(10, 20), BatchNorm1d(20), ReLU())
    test_forward_backward(mytorch_mlp)
    return True