Ejemplo n.º 1
0
decision_tree = DecisionTree()
random_forest = RandomForest(n_estimators=150)
support_vector_machine = SupportVectorMachine(C=1, kernel=rbf_kernel)

# ........
#  TRAIN
# ........
print "Training:"
print "\tAdaboost"
adaboost.fit(X_train, rescaled_y_train)
print "\tNaive Bayes"
naive_bayes.fit(X_train, y_train)
print "\tLogistic Regression"
logistic_regression.fit(X_train, y_train)
print "\tMultilayer Perceptron"
mlp.fit(X_train, y_train, n_iterations=20000, learning_rate=0.1)
print "\tPerceptron"
perceptron.fit(X_train, y_train)
print "\tDecision Tree"
decision_tree.fit(X_train, y_train)
print "\tRandom Forest"
random_forest.fit(X_train, y_train)
print "\tSupport Vector Machine"
support_vector_machine.fit(X_train, rescaled_y_train)

# .........
#  PREDICT
# .........
y_pred = {}
y_pred["Adaboost"] = adaboost.predict(X_test)
y_pred["Naive Bayes"] = naive_bayes.predict(X_test)
Ejemplo n.º 2
0
# ........
#  TRAIN
# ........
print("Training:")
print("\tAdaboost")
adaboost.fit(X_train, rescaled_y_train)
print("\tDecision Tree")
decision_tree.fit(X_train, y_train)
print("\tGradient Boosting")
gbc.fit(X_train, y_train)
print("\tLDA")
lda.fit(X_train, y_train)
print("\tLogistic Regression")
logistic_regression.fit(X_train, y_train)
print("\tMultilayer Perceptron")
mlp.fit(X_train, y_train)
print("\tNaive Bayes")
naive_bayes.fit(X_train, y_train)
print("\tPerceptron")
perceptron.fit(X_train, y_train)
print("\tRandom Forest")
random_forest.fit(X_train, y_train)
print("\tSupport Vector Machine")
support_vector_machine.fit(X_train, rescaled_y_train)
print("\tXGBoost")
xgboost.fit(X_train, y_train)

# .........
#  PREDICT
# .........
y_pred = {}
Ejemplo n.º 3
0
                                                 (1 - TEST_SET_PC)))
    print("Test set samples: %d (%d%%)" % (len(X_test), 100 * TEST_SET_PC))

    mlp = MultilayerPerceptron(input_size=784,
                               layers_size=HIDDEN_LAYERS + [10],
                               layers_activation="sigmoid")
    print("\nInitial accuracy (training set): %.2f%%" %
          (100 * accuracy(mlp.predict(X_train), Y_train)))
    print("Initial accuracy (test set): %.2f%%" %
          (100 * accuracy(mlp.predict(X_test), Y_test)))

    print("\nStarting training session...")
    mlp.fit(
        data=X_train,
        labels=Y_train,
        cost_function=MeanSquaredError(),
        epochs=TRAINING_EPOCHS,
        learning_rate=LEARNING_RATE,
        batch_size=32,
        gradient_checking=False,
        momentum_term=MOMENTUM_TERM,
    )

    print("\nAccuracy (training set): %.2f%%" %
          (100 * accuracy(mlp.predict(X_train), Y_train)))
    print("Accuracy (test set): %.2f%%\n" %
          (100 * accuracy(mlp.predict(X_test), Y_test)))

    print(
        "Opening evaluation window...\nTo select a new image, press SPACE.\n")
    evaluation_screen(X_test, Y_test, mlp.predict(X_test).transpose())
Ejemplo n.º 4
0
decision_tree = DecisionTree()
random_forest = RandomForest(n_estimators=150)
support_vector_machine = SupportVectorMachine(C=1, kernel=rbf_kernel)

# ........
#  TRAIN
# ........
print "Training:"
print "\tAdaboost"
adaboost.fit(X_train, rescaled_y_train)
print "\tNaive Bayes"
naive_bayes.fit(X_train, y_train)
print "\tLogistic Regression"
logistic_regression.fit(X_train, y_train)
print "\tMultilayer Perceptron"
mlp.fit(X_train, y_train, n_iterations=20000, learning_rate=0.1)
print "\tPerceptron"
perceptron.fit(X_train, y_train)
print "\tDecision Tree"
decision_tree.fit(X_train, y_train)
print "\tRandom Forest"
random_forest.fit(X_train, y_train)
print "\tSupport Vector Machine"
support_vector_machine.fit(X_train, rescaled_y_train)

# .........
#  PREDICT
# .........
y_pred = {}
y_pred["Adaboost"] = adaboost.predict(X_test)
y_pred["Naive Bayes"] = naive_bayes.predict(X_test)
Ejemplo n.º 5
0
from tensorflow.examples.tutorials.mnist import input_data

from multilayer_perceptron import MultilayerPerceptron

mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
mlp = MultilayerPerceptron()
mlp.fit(mnist.train.images, mnist.train.labels)
Ejemplo n.º 6
0
# ........
#  TRAIN
# ........
print ("Training:")
print ("\tAdaboost")
adaboost.fit(X_train, rescaled_y_train)
print ("\tDecision Tree")
decision_tree.fit(X_train, y_train)
print ("\tGradient Boosting")
gbc.fit(X_train, y_train)
print ("\tLDA")
lda.fit(X_train, y_train)
print ("\tLogistic Regression")
logistic_regression.fit(X_train, y_train)
print ("\tMultilayer Perceptron")
mlp.fit(X_train, y_train)
print ("\tNaive Bayes")
naive_bayes.fit(X_train, y_train)
print ("\tPerceptron")
perceptron.fit(X_train, y_train)
print ("\tRandom Forest")
random_forest.fit(X_train, y_train)
print ("\tSupport Vector Machine")
support_vector_machine.fit(X_train, rescaled_y_train)
print ("\tXGBoost")
xgboost.fit(X_train, y_train)



# .........
#  PREDICT