Ejemplo n.º 1
0
print "\tDecision Tree"
decision_tree.fit(X_train, y_train)
print "\tRandom Forest"
random_forest.fit(X_train, y_train)
print "\tSupport Vector Machine"
support_vector_machine.fit(X_train, rescaled_y_train)

# .........
#  PREDICT
# .........
y_pred = {}
y_pred["Adaboost"] = adaboost.predict(X_test)
y_pred["Naive Bayes"] = naive_bayes.predict(X_test)
y_pred["K Nearest Neighbors"] = knn.predict(X_test, X_train, y_train)
y_pred["Logistic Regression"] = logistic_regression.predict(X_test)
y_pred["Multilayer Perceptron"] = mlp.predict(X_test)
y_pred["Perceptron"] = perceptron.predict(X_test)
y_pred["Decision Tree"] = decision_tree.predict(X_test)
y_pred["Random Forest"] = random_forest.predict(X_test)
y_pred["Support Vector Machine"] = support_vector_machine.predict(X_test)

# ..........
#  ACCURACY
# ..........
print "Accuracy:"
for clf in y_pred:
    if clf == "Adaboost" or clf == "Support Vector Machine":
        print "\t%-23s: %.5f" % (clf,
                                 accuracy_score(rescaled_y_test, y_pred[clf]))
    else:
        print "\t%-23s: %.5f" % (clf, accuracy_score(y_test, y_pred[clf]))
Ejemplo n.º 2
0
    data, labels = load_mnist("./data/mnist_data.csv")
    print("done!")

    i = int(len(data) * TEST_SET_PC)
    X_train, Y_train = data[i:], labels[i:]
    X_test, Y_test = data[:i], labels[:i]

    print("\nTraining set samples: %d (%d%%)" % (len(X_train), 100 *
                                                 (1 - TEST_SET_PC)))
    print("Test set samples: %d (%d%%)" % (len(X_test), 100 * TEST_SET_PC))

    mlp = MultilayerPerceptron(input_size=784,
                               layers_size=HIDDEN_LAYERS + [10],
                               layers_activation="sigmoid")
    print("\nInitial accuracy (training set): %.2f%%" %
          (100 * accuracy(mlp.predict(X_train), Y_train)))
    print("Initial accuracy (test set): %.2f%%" %
          (100 * accuracy(mlp.predict(X_test), Y_test)))

    print("\nStarting training session...")
    mlp.fit(
        data=X_train,
        labels=Y_train,
        cost_function=MeanSquaredError(),
        epochs=TRAINING_EPOCHS,
        learning_rate=LEARNING_RATE,
        batch_size=32,
        gradient_checking=False,
        momentum_term=MOMENTUM_TERM,
    )
Ejemplo n.º 3
0
print "\tDecision Tree"
decision_tree.fit(X_train, y_train)
print "\tRandom Forest"
random_forest.fit(X_train, y_train)
print "\tSupport Vector Machine"
support_vector_machine.fit(X_train, rescaled_y_train)

# .........
#  PREDICT
# .........
y_pred = {}
y_pred["Adaboost"] = adaboost.predict(X_test)
y_pred["Naive Bayes"] = naive_bayes.predict(X_test)
y_pred["K Nearest Neighbors"] = knn.predict(X_test, X_train, y_train)
y_pred["Logistic Regression"] = logistic_regression.predict(X_test)
y_pred["Multilayer Perceptron"] = mlp.predict(X_test)
y_pred["Perceptron"] = perceptron.predict(X_test)
y_pred["Decision Tree"] = decision_tree.predict(X_test)
y_pred["Random Forest"] = random_forest.predict(X_test)
y_pred["Support Vector Machine"] = support_vector_machine.predict(X_test)

# ..........
#  ACCURACY
# ..........
print "Accuracy:"
for clf in y_pred:
	if clf == "Adaboost" or clf == "Support Vector Machine":
		print "\t%-23s: %.5f" %(clf, accuracy_score(rescaled_y_test, y_pred[clf]))
	else:
		print "\t%-23s: %.5f" %(clf, accuracy_score(y_test, y_pred[clf]))
Ejemplo n.º 4
0
layers = [784, 25, 10]

normalize_data = True
max_iterations = 500
alpha = 0.1

multilayer_perceptron = MultilayerPerceptron(x_train, y_train, layers,
                                             normalize_data)
(thetas, costs) = multilayer_perceptron.train(max_iterations, alpha)
plt.plot(range(len(costs)), costs)
plt.xlabel('Grident steps')
plt.xlabel('costs')
plt.show()

y_train_predictions = multilayer_perceptron.predict(x_train)
y_test_predictions = multilayer_perceptron.predict(x_test)

train_p = np.sum(y_train_predictions == y_train) / y_train.shape[0] * 100
test_p = np.sum(y_test_predictions == y_test) / y_test.shape[0] * 100
print('训练集准确率:', train_p)
print('测试集准确率:', test_p)

numbers_to_display = 64

num_cells = math.ceil(math.sqrt(numbers_to_display))

plt.figure(figsize=(15, 15))

for plot_index in range(numbers_to_display):
    digit_label = y_test[plot_index, 0]
Ejemplo n.º 5
0
from tensorflow.examples.tutorials.mnist import input_data

from multilayer_perceptron import MultilayerPerceptron

mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
n_inputs = len(mnist.train.images)
n_classes = len(mnist.train.labels[0])
batch_size = n_inputs // 10

print('Num Inputs: {}'.format(n_inputs))
print('Num classes: {}'.format(n_classes))
print('Batch size: {}'.format(batch_size))

mlp = MultilayerPerceptron(n_inputs, n_classes, batch_size)
mlp.add_layer(50)
mlp.add_layer(20)
mlp.add_layer(10)
mlp.fit(mnist.train.images, mnist.train.labels)
pred = mlp.predict(mnist.test.images)