Beispiel #1
0
def tryParameters(test_name,
                  N_hidden,
                  lam,
                  l_rate,
                  decay,
                  mom,
                  epochs=50,
                  batch_size=250):
    net = Net([
        BatchNorm(cifar.in_size, trainMean()),
        Linear(cifar.in_size, N_hidden, lam=lam),
        ReLU(N_hidden),
        Linear(N_hidden, cifar.out_size, lam=lam),
        Softmax(cifar.out_size)
    ], lam, l_rate, decay, mom)
    results = net.trainMiniBatch(train, val, epochs, batch_size, shuffle=True)
    print('{} Test Accuracy: {:.2f}'.format(
        test_name, net.accuracy(test['one_hot'].T, test['images'].T)))
    print('Final train a/c, val a/c: {:.2f}/{:.2f}, {:.2f}/{:.2f}'.format(
        results['last_a_train'], results['last_c_train'],
        results['last_a_val'], results['last_c_val']))
    plotResults(test_name, results['a_train'], results['c_train'],
                results['a_val'], results['c_val'])
    #weights_plot(net, "plots/weights_vizualisation_{}.png".format(test_name), labels)
    return results
Beispiel #2
0
def tryParameters(test_name,
                  lin_neurons,
                  with_BN,
                  lam,
                  l_rate,
                  decay,
                  mom,
                  epochs=50,
                  batch_size=250):

    count = 0
    layers = []

    for N in lin_neurons:
        not_last_layer = count < (len(lin_neurons) - 1)
        layers.append(
            Linear(cifar.in_size if count == 0 else lin_neurons[count - 1],
                   N if not_last_layer else cifar.out_size,
                   lam=lam))
        if not_last_layer:
            if with_BN:
                layers.append(BatchNorm(N))
            layers.append(ReLU(N))
        count += 1
    if len(lin_neurons) == 1 and with_BN:
        layers.append(BatchNorm(cifar.out_size))
    layers.append(Softmax(cifar.out_size))
    # init the network
    print(["{}:{},{}".format(l.name, l.in_size, l.out_size) for l in layers])
    net = Net(layers, lam=lam, l_rate=l_rate, decay=0.99, mom=0.99)
    results = net.trainMiniBatch(train, val, epochs, batch_size, shuffle=True)
    print('{} Test Accuracy: {:.2f}'.format(
        test_name, net.accuracy(test['one_hot'].T, test['images'].T)))
    print('Final train a/c, val a/c: {:.2f}/{:.2f}, {:.2f}/{:.2f}'.format(
        results['last_a_train'], results['last_c_train'],
        results['last_a_val'], results['last_c_val']))
    plotResults(test_name, results['a_train'], results['c_train'],
                results['a_val'], results['c_val'])
    #weights_plot(net, "plots/weights_vizualisation_{}.png".format(test_name), labels)
    return results
Beispiel #3
0
        grad = network.gradient(x_batch, t_batch)
        optimizer.update(network.params, grad)

        loss = network.loss(x_batch, t_batch)
        train_loss_list.append(loss)
        print(f'train loss: {loss}')

        for key in ('W1', 'b1', 'W2', 'b2', 'W3', 'b3'):
            network.params[key] -= learning_rate * grad[key]

        loss = network.loss(x_batch, t_batch)
        train_loss_list.append(loss)

        if i % iter_per_epoch == 0:
            train_acc = network.accuracy(x_train, t_train)
            test_acc = network.accuracy(x_test, t_test)
            train_acc_list.append(train_acc)
            test_acc_list.append(test_acc)
            print(train_acc, test_acc)

    markers = {'train': 'o', 'test': 's'}
    x = np.arange(epochs)
    plt.plot(x, train_acc_list, marker='o', label='train', markevery=2)
    plt.plot(x, test_acc_list, marker='s', label='test', markevery=2)
    plt.xlabel("epochs")
    plt.ylabel("accuracy")
    plt.ylim(0, 1.0)
    plt.legend(loc='lower right')
    plt.show()