Пример #1
0
def main():
    # Initializer vos paramètres
    i = ld.load_iris_dataset(0.7)
    c = ld.load_congressional_dataset(0.7)
    m1 = ld.load_monks_dataset(1)
    m2 = ld.load_monks_dataset(2)
    m3 = ld.load_monks_dataset(3)

    # Initializer/instanciez vos classifieurs avec leurs paramètres

    euclide = lambda x, y: pow(
        (x - y), 2
    )  # Pas besoin d'extraire la racine, car cela ne changera pas l'ordre de classement
    diff_binaire = lambda x, y: 0 if x == y else 1

    knn_i = Knn(train=i[0], train_labels=i[1], dist_equation=euclide)
    knn_c = Knn(train=c[0], train_labels=c[1], dist_equation=euclide)
    knn_m1 = Knn(train=m1[0], train_labels=m1[1], dist_equation=diff_binaire)
    knn_m2 = Knn(train=m2[0], train_labels=m2[1], dist_equation=diff_binaire)
    knn_m3 = Knn(train=m3[0], train_labels=m3[1], dist_equation=diff_binaire)

    bn_i = BayesNaifClassifier([1])
    bn_c = BayesNaifClassifier([0])
    bn_m1 = BayesNaifClassifier([2])
    bn_m2 = BayesNaifClassifier([2])
    bn_m3 = BayesNaifClassifier([2])

    # Entrainez votre classifieur
    print("\n=============\nKNN train tests\n=============")
    knn_i.train_test(i[0], i[1], "Dataset: Iris, Training")
    knn_c.train_test(c[0], c[1], "Dataset: Congressional, Training")
    knn_m1.train_test(m1[0], m1[1], "Dataset: MONKS-1, Training")
    knn_m2.train_test(m2[0], m2[1], "Dataset: MONKS-2, Training")
    knn_m3.train_test(m3[0], m3[1], "Dataset: MONKS-3, Training")

    print("\n=============\nBayes Naif train tests\n=============")
    bn_i.train(i[0], i[1], "Dataset: Iris, Test")
    bn_c.train(c[0], c[1], "Dataset: Congressional, Test")
    bn_m1.train(m1[0], m1[1], "Dataset: MONKS-1, Test")
    bn_m2.train(m2[0], m2[1], "Dataset: MONKS-2, Test")
    bn_m3.train(m3[0], m3[1], "Dataset: MONKS-3, Test")

    print("\n=============\nKNN tests\n=============")
    # Tester votre classifieur
    knn_i.train_test(i[2], i[3], "Dataset: Iris, Test")
    knn_c.train_test(c[2], c[3], "Dataset: Congressional, Test")
    knn_m1.train_test(m1[2], m1[3], "Dataset: MONKS-1, Test")
    knn_m2.train_test(m2[2], m2[3], "Dataset: MONKS-2, Test")
    knn_m3.train_test(m3[2], m3[3], "Dataset: MONKS-3, Test")

    print("\n=============\nBayes Naif tests\n=============")
    bn_i.test(i[2], i[3], "Dataset: Iris, Test")
    bn_c.test(c[2], c[3], "Dataset: Congressional, Test")
    bn_m1.test(m1[2], m1[3], "Dataset: MONKS-1, Test")
    bn_m2.test(m2[2], m2[3], "Dataset: MONKS-2, Test")
    bn_m3.test(m3[2], m3[3], "Dataset: MONKS-3, Test")
Пример #2
0
    print(f"Iris dataset classification: \n")
    startTime = time.time()

    iris_train, iris_train_labels, iris_test, iris_test_labels = load_datasets.load_iris_dataset(train_ratio)
    iris_knn = NbcGaussian()

    iris_knn.train(iris_train, iris_train_labels)
    iris_knn.test(iris_test, iris_test_labels)

    print(f"\n --- Elapse time: {time.time() - startTime:.2f} s --- \n")

    print('-' * 175)
    print(f"Congressional dataset classification: \n")
    startTime = time.time()

    cong_train, cong_train_labels, cong_test, cong_test_labels = load_datasets.load_congressional_dataset(train_ratio)
    cong_knn = Nbc()
    cong_knn.train(cong_train, cong_train_labels)
    cong_knn.test(cong_test, cong_test_labels)

    print(f"\n --- Elapse time: {time.time() - startTime:.2f} s --- \n")

    print('-' * 175)
    for i in range(3):
        print(f"Monks({i + 1}) dataset classification: \n")
        startTime = time.time()

        monks_train, monks_train_labels, monks_test, monks_test_labels = load_datasets.load_monks_dataset(i + 1)
        monks_knn = Nbc()
        monks_knn.train(monks_train, monks_train_labels)
        monks_knn.test(monks_test, monks_test_labels)
Пример #3
0
import numpy as np
import copy
import NeuralNet
import load_datasets

# X = (hours sleeping, hours studying), y = score on test
X = ([2, 9], [1, 5], [3, 6])
y = np.array(([92], [86], [89]), dtype=float)

X = [[0,0,1], [0,1,1], [1,0,1], [1,1,1]]

y = [0, 1, 1, 0]

n= 1;

train_iris, train_labels_iris, test_iris, test_labels_iris = load_datasets.load_iris_dataset(0.03)
train_votes, train_labels_votes, test_votes, test_labels_votes = load_datasets.load_congressional_dataset(0.02)
train_monks, train_labels_monks, test_monks, test_labels_monks = load_datasets.load_monks_dataset(n)

train = train_votes
labels = train_labels_votes

NN = NeuralNet.NeuralNet(1, 2, len(train[0]), 1)
for i in xrange(1000):
    NN.train(train, labels)

print "Actual Output: \n" + str(labels)
print "Predicted Output: \n" + str(NN.forward(train).T[0])

    # [0,1,0,1,1,1,0,0,0,0,0,0,1,1,2,1], 0))
Пример #4
0
decision_tree_congress = DecisionTree.DecisionTree()
decision_tree_monks1 = DecisionTree.DecisionTree()
decision_tree_monks2 = DecisionTree.DecisionTree()
decision_tree_monks3 = DecisionTree.DecisionTree()

rn_iris = NeuralNet.NeuralNet()
rn_congress = NeuralNet.NeuralNet()
rn_monks1 = NeuralNet.NeuralNet()
rn_monks2 = NeuralNet.NeuralNet()
rn_monks3 = NeuralNet.NeuralNet()

# Charger/lire les datasets
(train_iris, train_labels_iris, test_iris,
 test_labels_iris) = load_datasets.load_iris_dataset(0.7)
(train_congress, train_labels_congress, test_congress,
 test_labels_congress) = load_datasets.load_congressional_dataset(0.7)
(train_monks1, train_labels_monks1, test_monks1,
 test_labels_monks1) = load_datasets.load_monks_dataset(1)
(train_monks2, train_labels_monks2, test_monks2,
 test_labels_monks2) = load_datasets.load_monks_dataset(2)
(train_monks3, train_labels_monks3, test_monks3,
 test_labels_monks3) = load_datasets.load_monks_dataset(3)

#Learning_curve
# decision_tree_iris.learning_curve(train_iris, train_labels_iris, test_iris, test_labels_iris)
# decision_tree_congress.learning_curve(train_congress, train_labels_congress, test_congress, test_labels_congress)
# decision_tree_monks1.learning_curve(train_monks1, train_labels_monks1, test_monks1, test_labels_monks1)
# decision_tree_monks2.learning_curve(train_monks2, train_labels_monks2, test_monks2, test_labels_monks2)
# decision_tree_monks3.learning_curve(train_monks3, train_labels_monks3, test_monks3, test_labels_monks3)

# # Entrainez votre classifieur
Пример #5
0
"""

# On initialise les classifieurs BayesNaïf en leur indiquant le dataset
iris_bayes = BayesNaif.BayesNaif("iris dataset")
congressional_bayes = BayesNaif.BayesNaif("congres dataset")
monks_bayes = BayesNaif.BayesNaif("monks dataset")
# On initialise les classifieurs Knn en leur indiquant le dataset
iris_knn = Knn.Knn("iris dataset")
congressional_knn = Knn.Knn("congres dataset")
monks_knn = Knn.Knn("monks dataset")

# On charge les 4 dataset, et pour chacun on les sépare en 4 np_matrix
iris_train_dataset, iris_train_labels, iris_test_dataset, iris_test_labels =\
    load_datasets.load_iris_dataset(0.60)   # On utilise un ratio de 0.60 pour les instances qui vont servir à l'entrainement
congressional_train_dataset, congressional_train_labels, congressional_test_dataset, congressional_test_labels =\
    load_datasets.load_congressional_dataset(0.60)  # On utilise un ratio de 0.60 pour les instances qui vont servir à l'entrainement
monks_train_dataset, monks_train_labels, monks_test_dataset, monks_test_labels =\
    load_datasets.load_monks_dataset(2) # Ici on utilise les sets numéro 2

# On entraine nos classifieurs et puis on fait les tests
iris_bayes.train(iris_train_dataset, iris_train_labels, "iris dataset")
iris_bayes.test(iris_test_dataset, iris_test_labels, "iris dataset")
iris_knn.train(iris_train_dataset, iris_train_labels, "iris dataset")
iris_knn.test(iris_test_dataset, iris_test_labels, "iris dataset")

congressional_bayes.train(congressional_train_dataset,
                          congressional_train_labels, "congressional dataset")
congressional_bayes.test(congressional_test_dataset, congressional_test_labels,
                         "congressional dataset")
congressional_knn.train(congressional_train_dataset,
                        congressional_train_labels, "congres dataset")
Пример #6
0
    def __init__(self):
        self.error = 1
        self.nbNodes = 0
        self.nbFolds = 0
        self.nbLayers = 1
        self.epoch = 0


datasetsNames = ["Iris", "Monks 1", "Monks 2", "Monks 3", "Congressional"]

datasets = [
    load_datasets.load_iris_dataset(0.65),
    load_datasets.load_monks_dataset(1),
    load_datasets.load_monks_dataset(2),
    load_datasets.load_monks_dataset(3),
    load_datasets.load_congressional_dataset(0.5)
]

best_cases = [BestCase(), BestCase(), BestCase(), BestCase(), BestCase()]

folds = 6

for datasetNo in range(len(datasets)):
    train, train_labels, test, test_labels = datasets[datasetNo]
    best_case = best_cases[datasetNo]
    train = train[:len(train) - len(train) % folds]
    train_labels = train_labels[:len(train_labels) - len(train_labels) % folds]

    train_fold = np.split(train, folds)
    train_labels_fold = np.split(train_labels, folds)
Пример #7
0
# Initializer/instanciez vos classifieurs avec leurs parametres
knn_iris = Knn.Knn()
knn_congress = Knn.Knn()
knn_monks1 = Knn.Knn()
knn_monks2 = Knn.Knn()
knn_monks3 = Knn.Knn()

bayesNaif_iris = BayesNaif.BayesNaif()
bayesNaif_congress = BayesNaif.BayesNaif()
bayesNaif_monks1 = BayesNaif.BayesNaif()
bayesNaif_monks2 = BayesNaif.BayesNaif()
bayesNaif_monks3 = BayesNaif.BayesNaif()

# Charger/lire les datasets
(train_iris, train_labels_iris, test_iris, test_labels_iris) = load_datasets.load_iris_dataset(0.5)
(train_congress, train_labels_congress, test_congress, test_labels_congress) = load_datasets.load_congressional_dataset(0.5)
(train_monks1, train_labels_monks1, test_monks1, test_labels_monks1) = load_datasets.load_monks_dataset(1)
(train_monks2, train_labels_monks2, test_monks2, test_labels_monks2) = load_datasets.load_monks_dataset(2)
(train_monks3, train_labels_monks3, test_monks3, test_labels_monks3) = load_datasets.load_monks_dataset(3)

# # Entrainez votre classifieur
knn_iris.train(train_iris, train_labels_iris)
knn_congress.train(train_congress, train_labels_congress)
knn_monks1.train(train_monks1, train_labels_monks1)
knn_monks2.train(train_monks2, train_labels_monks2)
knn_monks3.train(train_monks3, train_labels_monks3)

bayesNaif_iris.train(train_iris, train_labels_iris)
bayesNaif_congress.train(train_congress, train_labels_congress)
bayesNaif_monks1.train(train_monks1, train_labels_monks1)
bayesNaif_monks2.train(train_monks2, train_labels_monks2)
Пример #8
0
classifieur_Knn_iris = Knn.Knn()
classifieur_Knn_congressional = Knn.Knn()
classifieur_Knn_monks_1 = Knn.Knn()
classifieur_Knn_monks_2 = Knn.Knn()
classifieur_Knn_monks_3 = Knn.Knn()

classifieur_bayes_naif_iris = BayesNaif.BayesNaif()
classifieur_bayes_naif_congressional = BayesNaif.BayesNaif()
classifieur_bayes_naif_monks_1 = BayesNaif.BayesNaif()
classifieur_bayes_naif_monks_2 = BayesNaif.BayesNaif()
classifieur_bayes_naif_monks_3 = BayesNaif.BayesNaif()

# Charger/lire les datasets

dataset_iris = load_datasets.load_iris_dataset(train_pourcentage_iris)
dataset_congressional = load_datasets.load_congressional_dataset(
    train_pourcentage_congressional)
dataset_monks_1 = load_datasets.load_monks_dataset(1)
dataset_monks_2 = load_datasets.load_monks_dataset(2)
dataset_monks_3 = load_datasets.load_monks_dataset(3)

# Entrainez votre classifieur
print(
    "Entrainement du classifieur KNN avec le dataset Iris et un pourcentage d'entrainement de "
    + str(train_pourcentage_iris))
algo_starting_time = time()
classifieur_Knn_iris.train(dataset_iris[0], dataset_iris[1], num_datset_iris)
algo_end_time = time()
print("\nTemps d'exécution de l'algorithme : " +
      str(algo_end_time - algo_starting_time) + "\n")
print("---------------------------------------------------------\n")
Пример #9
0
    Knn_clf.test(test, test_labels)
    print('KNN test time: ', time.time() - start)


    print('Naïve Bayes classifier:')
    BN_clf = BayesNaif.BayesNaif()
    BN_clf.train(train, train_labels, muted=False)

    start = time.time()
    BN_clf.test(test, test_labels)
    print('Naïve Bayes test time: ', time.time() - start)

    ###CONGRESSIONNAL DATASET###
    # Initializer vos paramètres
    print("CONGRESSIONNAL DATASET:")
    train, train_labels, test, test_labels = load_datasets.load_congressional_dataset(0.8)

    # Initializer/instanciez vos classifieurs avec leurs paramètres
    kf = K_folds(n_splits=10)
    train_kf, train_label_kf = kf.split(train, train_labels)
    k_neighb = [1, 3, 5, 7, 9, 11]
    metrics = ['minkowski', 'manhattan', 'euclidean']
    best_k = 1
    best_metric = 'minkowski'
    best_score = 0
    for k in k_neighb:
        for metric in metrics:
            knn_clf = Knn.Knn(n_neighbors=k, metric=metric)
            # kf = K_folds(n_splits=10)
            # train_kf, train_label_kf = kf.split(train, train_labels)
            avg_score = 0