예제 #1
0
def test_construct_nn():
    nn = NeuralNetwork(2)
    nn.add_layer(Dense(40))
    nn.add_layer(Activation('relu'))
    nn.add_layer(Dense(2))
    nn.add_layer(Activation('softmax'))

    assert len(nn.layers) == 4
    assert not hasattr(nn, 'train_fn')

    nn.compile(categorical_crossentropy, argmax)

    assert hasattr(nn, 'train_fn')
예제 #2
0
num_hidden_nodes = 784
num_training_iterations = 1000

# Get the cifar10 dataset
print("Getting data...")
(X_train, y_train), (X_test, y_test) = data.get_mnist()

# Compile network
print("Compiling model...", end='')
nn = NeuralNetwork(X_train.shape[1])
for x in range(num_layers):
    nn.add_layer(Dense(num_hidden_nodes))
    get_layer_for_setting(nn, setting)
nn.add_layer(Dense(10))
nn.add_layer(Activation('softmax'))
nn.compile(loss_fn='categorical_crossentropy', init_fn='lecun', pred_fn='argmax',
           learning_rate=learning_rate, use_normal=True)
print('finished!')

ap_loss = []
for i in range(1, num_training_iterations+1):
    start = time()
    nn.train(X_train, y_train)
    time_elapsed = time() - start
    s = "{},{},{}".format(i, nn.get_accuracy(X_train, y_train), time_elapsed)
    print(s)
    f.write(s+'\n')
    #ap_loss.append()
    #print(ap_loss[-1])

# Plotting
#import matplotlib.pyplot as plt
예제 #3
0
              1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
              2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
              2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2])
y = data.one_hot_encode(y)

ap_losses = []
relu_losses = []

for x in range(trials):
    nn = NeuralNetwork(4)
    for x in range(num_layers):
        nn.add_layer(Dense(num_hidden_nodes))
        nn.add_layer(ActivationPool(act_fns))
    nn.add_layer(Dense(3))
    nn.add_layer(Activation('softmax'))
    nn.compile(loss_fn='categorical_crossentropy', pred_fn='argmax', learning_rate=learning_rate)
    ap_losses.append(nn.get_loss(X, y))

for x in range(trials):
    nn = NeuralNetwork(4)
    for x in range(num_layers):
        nn.add_layer(Dense(num_hidden_nodes))
        nn.add_layer(Activation('relu'))
    nn.add_layer(Dense(3))
    nn.add_layer(Activation('softmax'))
    nn.compile(loss_fn='categorical_crossentropy', pred_fn='argmax', learning_rate=learning_rate)
    relu_losses.append(nn.get_loss(X, y))

print(describe(ap_losses))
print(describe(relu_losses))