# Example of Naive Bayes classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G3') # load training and testing data name = 'GaussianNB' # name of the classifier params = '' # parameters of the classifier clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'Naive Bayes') # feature space
# Example of Decision Tree classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G2') # load training and testing data name = 'DecisionTreeClassifier' # name of the classifier # parameters of the classifier params = 'max_depth = 4, min_samples_leaf = 8,random_state = 0' clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'Decision Tree') # feature space
# Example of Neural Network classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines 'MLPClassifier', 'solver="adam", alpha=1e-5,hidden_layer_sizes=(3,2), random_state=1,max_iter=2000', (X, d, Xt, dt) = loadData('G3') # load training and testing data name = 'MLPClassifier' # name of the classifier # parameters of the classifier params = 'solver="adam", alpha=1e-5,hidden_layer_sizes=(7,2), random_state=1,max_iter=2000' # params = 'solver="adam", alpha=1e-5,hidden_layer_sizes=(10,), random_state=1,max_iter=2000' clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix # PlotDecisionLines(clf,X) # decision lines PlotFeatures(X, d, 'Neural Network') # feature space
# Example of SVM classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G3') # load training and testing data name = 'SVC' # name of the classifier # parameters of the classifier params = 'kernel = "linear", gamma=0.2, C=0.1' # params = 'kernel = "poly" , gamma=0.2, C=0.1, degree = 2' # params = 'kernel = "rbf" , gamma=0.2,C=1' # params = 'kernel = "sigmoid", gamma=0.01, C=0.01' clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'SVM') # feature space
# Example of KNN classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G2') # load training and testing data name = 'KNeighborsClassifier' # name of the classifier params = 'n_neighbors=3' # parameters of the classifier clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'KNN-3') # feature space
# Example of KNN classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines from sklearn.metrics import confusion_matrix, accuracy_score import numpy as np (X, d, Xt, dt) = loadData('G2') # load training and testing data name = 'KNeighborsClassifier' # name of the classifier K = 11 # number of neighbors params = 'n_neighbors=' + str(K) # parameters of the classifier clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing d0 = testClassifier(clf, X) # classifier testing acc = accuracy_score(dt, ds) # testing accuracy acc0 = accuracy_score(d, d0) # training accuracy Z = np.concatenate((X, Xt), axis=0) # all samples PlotDecisionLines(clf, Z) # decision lines PlotFeatures(X, d, 'Training (KNN-' + str(K) + ') Acc = ' + str(acc0)) # feature space for training PlotDecisionLines(clf, Z) # decision lines PlotFeatures(Xt, dt, 'Testing (KNN-' + str(K) + ') Acc = ' + str(acc)) # feature space for testing
# Example of dmin classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G2') # load training and testing data name = 'NearestCentroid' # name of the classifier params = '' # parameters of the classifier clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'dmin') # feature space
# Example of AdaBoost classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G2') # load training and testing data name = 'AdaBoostClassifier' # name of the classifier params = 'n_estimators=100' # parameters of the classifier clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'AdaBoost') # feature space
# Example of QDA classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G4') # load training and testing data name = 'QuadraticDiscriminantAnalysis' # name of the classifier params = '' # parameters of the classifier clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'QDA') # feature space
# Example of Logistic Regression classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G2') # load training and testing data name = 'LogisticRegression' # name of the classifier params = 'C=0.1,solver="lbfgs"' # parameters of the classifier clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'Logistic Regression') # feature space
# Example of Random Forest classifier from clf_utils import loadData, defineClassifier, trainClassifier, testClassifier, PrintConfusion, PlotFeatures, PlotDecisionLines (X, d, Xt, dt) = loadData('G2') # load training and testing data name = 'RandomForestClassifier' # name of the classifier params = 'n_estimators=20,random_state = 0' # parameters of the classifier clf = defineClassifier([name, params]) # classifier definition clf = trainClassifier(clf, X, d) # classifier training ds = testClassifier(clf, Xt) # classifier testing PrintConfusion(dt, ds) # confusion matrix PlotDecisionLines(clf, X) # decision lines PlotFeatures(X, d, 'Random Forest') # feature space