Beispiel #1
0
data_path = '~/data/decompress_mnist'

if __name__ == '__main__':
    x_train, x_test, y_train, y_test = mnist.load_mnist(data_path, dst_size=(32, 32), shuffle=True)

    x_train = x_train / 255 - 0.5
    x_test = x_test / 255 - 0.5

    data = {
        'X_train': x_train,
        'y_train': y_train,
        'X_val': x_test,
        'y_val': y_test
    }

    model = models.LeNet5(in_channels=1, out_channels=10, dropout=0.5)
    criterion = nn.CrossEntropyLoss()
    optimizer = optim.SGD(model.params, lr=1e-3, momentum=0.9, nesterov=True)
    stepLR = optim.StepLR(optimizer, 5, gamma=0.1)

    solver = pynet.Solver(model, data, criterion, optimizer,
                          lr_scheduler=stepLR, batch_size=128, num_epochs=10, print_every=1)
    solver.train()

    plt = Draw()
    plt(solver.loss_history)
    plt.multi_plot((solver.train_acc_history, solver.val_acc_history), ('train', 'val'),
                   title='准确率', xlabel='迭代/次', ylabel='准确率', save_path='acc.png')
    print('best_train_acc: %f; best_val_acc: %f' % (solver.best_train_acc, solver.best_val_acc))
Beispiel #2
0
                                                        shuffle=True,
                                                        is_flatten=True)

    x_train = x_train / 255 - 0.5
    x_test = x_test / 255 - 0.5

    data = {
        'X_train': x_train,
        'y_train': y_train,
        'X_val': x_test,
        'y_val': y_test
    }

    model = models.TwoLayerNet(num_in=784, num_hidden=200, num_out=10)
    criterion = nn.CrossEntropyLoss()
    optimizer = optim.SGD(model.params)

    solver = pynet.Solver(model,
                          data,
                          criterion,
                          optimizer,
                          batch_size=128,
                          num_epochs=10)
    solver.train()

    plt = Draw()
    plt(solver.loss_history)
    plt.multi_plot((solver.train_acc_history, solver.val_acc_history),
                   ('train', 'val'),
                   title='准确率',
                   xlabel='迭代/次',
Beispiel #3
0
from pynet.vision.data import mnist
from pynet.vision import Draw

data_path = '~/data/decompress_mnist'

if __name__ == '__main__':
    x_train, x_test, y_train, y_test = mnist.load_mnist(data_path, shuffle=True, is_flatten=True)

    x_train = x_train / 255 - 0.5
    x_test = x_test / 255 - 0.5

    data = {
        'X_train': x_train,
        'y_train': y_train,
        'X_val': x_test,
        'y_val': y_test
    }

    model = models.ThreeLayerNet(num_in=784, num_h1=1200, num_h2=200, num_out=10, dropout=0.5)
    criterion = nn.CrossEntropyLoss()
    optimizer = optim.SGD(model.params, lr=1e-3)

    solver = pynet.Solver(model, data, criterion, optimizer, batch_size=256, num_epochs=10, print_every=1, reg=1e-3)
    solver.train()

    plt = Draw()
    plt(solver.loss_history)
    plt.multi_plot((solver.train_acc_history, solver.val_acc_history), ('train', 'val'),
                   title='准确率', xlabel='迭代/次', ylabel='准确率', save_path='acc.png')
    print('best_train_acc: %f; best_val_acc: %f' % (solver.best_train_acc, solver.best_val_acc))
Beispiel #4
0
    x_train = x_train / 255 - 0.5
    x_test = x_test / 255 - 0.5

    data = {
        'X_train': x_train,
        'y_train': y_train,
        'X_val': x_test,
        'y_val': y_test
    }

    model = models.ThreeLayerNet(num_in=644,
                                 num_h1=2000,
                                 num_h2=800,
                                 num_out=40)
    criterion = nn.CrossEntropyLoss()
    optimizer = optim.SGD(model.params, lr=2e-2)

    solver = pynet.Solver(model,
                          data,
                          criterion,
                          optimizer,
                          batch_size=4,
                          num_epochs=100,
                          reg=1e-3,
                          print_every=1)
    solver.train()

    plt = Draw()
    plt(solver.loss_history)
    plt.multi_plot((solver.train_acc_history, solver.val_acc_history),
                   ('train', 'val'),