Exemple #1
0
layer2 = NeuronLayer(1, 5)

# Combine the layers to create a neural network
neural_network = MultilayerPerceptron([hidden_layer_1], layer2)

# print("Stage 1) Random starting synaptic weights: ")a

# The training set. We have 7 examples, each consisting of 3 input values
# and 1 output value.
training_set_inputs = array(parser.get_pixels())
training_set_outputs = array([[0], [0], [1], [1], [1], [0], [1], [0], [0],
                              [0]])

# Train the neural network using the training set.
# Do it 60,000 times and make small adjustments each time.
neural_network.train(training_set_inputs, training_set_outputs, 100000)

# print("Stage 2) New synaptic weights after training: ")
# neural_network.print_weights()

# Test the neural network with a new situation.
# print("Stage 3) Considering a new situation [1, 1] -> ?: ")
output = neural_network.think(
    array([
        0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0,
        1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0
    ]))
# print('todo el output', output)
print('0 is prime with probability', output[-1])
print('\n')
Exemple #2
0
x_train = train_data[:num_training_examples, 1:]
y_train = train_data[:num_training_examples, [0]]

x_test = test_data[:, 1:]
y_test = test_data[:, [0]]

layers = [784, 25, 10]

normalize_data = True
max_iterations = 500
alpha = 0.1

multilayer_perceptron = MultilayerPerceptron(x_train, y_train, layers,
                                             normalize_data)
(thetas, costs) = multilayer_perceptron.train(max_iterations, alpha)
plt.plot(range(len(costs)), costs)
plt.xlabel('Grident steps')
plt.xlabel('costs')
plt.show()

y_train_predictions = multilayer_perceptron.predict(x_train)
y_test_predictions = multilayer_perceptron.predict(x_test)

train_p = np.sum(y_train_predictions == y_train) / y_train.shape[0] * 100
test_p = np.sum(y_test_predictions == y_test) / y_test.shape[0] * 100
print('训练集准确率:', train_p)
print('测试集准确率:', test_p)

numbers_to_display = 64