예제 #1
0
z_plot_before = []
x_plot_after = []
y_plot_after = []
z_plot_after = []

nn = NeuralNetwork(4)
for x in range(num_layers):
    nn.add_layer(Dense(num_hidden_nodes))
    #nn.add_layer(Activation('relu'))
    nn.add_layer(ActivationPool([theano.tensor.nnet.relu, theano.tensor.nnet.softplus, theano.tensor.nnet.sigmoid]))
nn.add_layer(Dense(3))
nn.add_layer(Activation('softmax'))
nn.compile(loss_fn='categorical_crossentropy', pred_fn='argmax', learning_rate=learning_rate)

weights = nn.layers[2].W.get_value()
print("Final loss: {}".format(nn.get_loss(X, y)))
for I in np.arange(0, 1, 0.05):
    for J in np.arange(0, 1, 0.05):
        weights[0][0] = I
        weights[0][1] = J
        nn.layers[2].W.set_value(weights)
        loss = nn.get_loss(X, y)
        x_plot_before.append(I)
        y_plot_before.append(J)
        z_plot_before.append(loss)

for x in range(num_training_iterations):
    nn.train(X, y)

weights = nn.layers[2].W.get_value()
예제 #2
0
              1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
              2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
              2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2])
y = data.one_hot_encode(y)

ap_losses = []
relu_losses = []

for x in range(trials):
    nn = NeuralNetwork(4)
    for x in range(num_layers):
        nn.add_layer(Dense(num_hidden_nodes))
        nn.add_layer(ActivationPool(act_fns))
    nn.add_layer(Dense(3))
    nn.add_layer(Activation('softmax'))
    nn.compile(loss_fn='categorical_crossentropy', pred_fn='argmax', learning_rate=learning_rate)
    ap_losses.append(nn.get_loss(X, y))

for x in range(trials):
    nn = NeuralNetwork(4)
    for x in range(num_layers):
        nn.add_layer(Dense(num_hidden_nodes))
        nn.add_layer(Activation('relu'))
    nn.add_layer(Dense(3))
    nn.add_layer(Activation('softmax'))
    nn.compile(loss_fn='categorical_crossentropy', pred_fn='argmax', learning_rate=learning_rate)
    relu_losses.append(nn.get_loss(X, y))

print(describe(ap_losses))
print(describe(relu_losses))
예제 #3
0
for x in range(num_layers):
    nn.add_layer(Dense(num_hidden_nodes))
    #nn.add_layer(Activation('relu'))
    nn.add_layer(MReLU(coefs=[P_val, D_val], trainable=trainable_mrelu))
nn.add_layer(Dense(3))
nn.add_layer(Activation('softmax'))
nn.compile(loss_fn='categorical_crossentropy', pred_fn='argmax', learning_rate=learning_rate)

weights = nn.layers[2].W.get_value()

for I in np.arange(0, 1, 0.05):
    for J in np.arange(0, 1, 0.05):
        weights[0][0] = I
        weights[0][1] = J
        nn.layers[2].W.set_value(weights)
        loss = nn.get_loss(X, y)
        x_plot_before.append(I)
        y_plot_before.append(J)
        z_plot_before.append(loss)

for x in range(num_training_iterations):
    nn.train(X, y)

weights = nn.layers[2].W.get_value()

for I in np.arange(0, 1, 0.05):
    for J in np.arange(0, 1, 0.05):
        weights[0][0] = I
        weights[0][1] = J
        nn.layers[2].W.set_value(weights)
        loss = nn.get_loss(X, y)