def main(): iris = datasets.load_iris() X = iris.data[:, [2, 3]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) y_train_01_subset = y_train.copy() y_train_02_subset = y_train.copy() y_train_01_subset[(y_train == 1) | (y_train == 2)] = -1 y_train_01_subset[(y_train_01_subset == 0)] = 1 y_train_02_subset[(y_train == 1) | (y_train == 0)] = -1 y_train_02_subset[(y_train_02_subset == 2)] = 1 ppn1 = Perceptron() ppn2 = Perceptron() ppn1.fit(X_train, y_train_01_subset) ppn2.fit(X_train, y_train_02_subset) classifier = Classifier(ppn1, ppn2) plot_decision_regions(X_train, y_train, classifier=classifier) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() X = iris.data[:, [2, 3]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) #w regresji logarytmicznej wyjście przyjmuje wartości 0 lub 1 (prawdopodobieństwa) X_train_01_subset = X_train[(y_train == 0) | (y_train == 1)] y_train_01_subset = y_train[(y_train == 0) | (y_train == 1)] lrgd = LogisticRegressionGD(eta=0.05, n_iter=1000, random_state=1) lrgd.fit(X_train_01_subset, y_train_01_subset) plot_decision_regions(X=X_train_01_subset, y=y_train_01_subset, classifier=lrgd) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() X = iris.data[:, [2, 3]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) X_train_01_subset = X_train[(y_train == 0) | (y_train == 1)] y_train_01_subset = y_train[(y_train == 0) | (y_train == 1)] # w perceptronie wyjście jest albo 1 albo -1 y_train_01_subset[(y_train_01_subset == 0)] = -1 ppn = Perceptron(eta=0.1, n_iter=10) ppn.fit(X_train_01_subset, y_train_01_subset) plot_decision_regions(X=X_train_01_subset, y=y_train_01_subset, classifier=ppn) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() X = iris.data[:, [2, 3]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) #w regresji logarytmicznej wyjście przyjmuje wartości 0 lub 1 (prawdopodobieństwa) y_train_01_subset = y_train.copy() y_train_02_subset = y_train.copy() y_train_01_subset[(y_train == 0) | (y_train == 1)] = 1 y_train_01_subset[(y_train_01_subset == 2)] = 0 y_train_02_subset[(y_train == 2) | (y_train == 0)] = 0 y_train_02_subset[(y_train_02_subset == 1)] = 1 lrgd1 = LogisticRegressionGD() lrgd2 = LogisticRegressionGD() lrgd1.fit(X_train, y_train_01_subset) lrgd2.fit(X_train, y_train_02_subset) classifier = Classifier(lrgd1, lrgd2) #y_1_activation = classifier.activation(y_train_01_subset) #y_2_activation = classifier.activation(y_train_02_subset) y_3_activation = classifier.activation(X_train) #print(y_1_activation) #print(y_2_activation) print(y_3_activation) plot_decision_regions(X_train, y_train, classifier=classifier) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() X = iris.data[:, [2, 3]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) X_train_01_subset, y_train_01_subset, y_train_03_subset = prepare_class_subsets( X_train, y_train) print('01_subset ', y_train_01_subset) print('03_subset ', y_train_03_subset) ppn1 = LogisticRegressionGD(eta=0., n_iter=15000) ppn1.fit(X_train_01_subset, y_train_01_subset) ppn2 = LogisticRegressionGD(eta=0.55, n_iter=15000) ppn2.fit(X_train_01_subset, y_train_03_subset) calc_accuracy_total(X_train, ppn1, ppn2, y_train, y_train_01_subset, y_train_03_subset) # w perceptronie wyjście jest albo 1 albo -1 # y_train_01_subset[(y_train_01_subset == 0)] = -1 clas = Classifier(ppn1, ppn2) plot_decision_regions(X_train, y_train, classifier=clas) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() X = iris.data[:, [2, 3]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) y_train_01_subset = y_train.copy() y_train_03_subset = y_train.copy() X_train_01_subset = X_train.copy() y_train_01_subset[(y_train == 1) | (y_train == 2)] = -1 y_train_01_subset[(y_train_01_subset == 0)] = 1 y_train_03_subset[(y_train == 1) | (y_train == 0)] = -1 y_train_03_subset[(y_train_03_subset == 2)] = 1 print('y_train_01_subset ', y_train_01_subset) print('y_train_03_subset ', y_train_03_subset) ppn1 = Perceptron(eta=0.1, n_iter=500) ppn1.fit(X_train_01_subset, y_train_01_subset) ppn2 = Perceptron(eta=0.1, n_iter=500) ppn2.fit(X_train_01_subset, y_train_03_subset) y1_predict = ppn1.predict(X_train) y3_predict = ppn2.predict(X_train) accuracy_1 = accuracy(ppn1.predict(X_train), y_train_01_subset) accuracy_3 = accuracy(ppn2.predict(X_train), y_train_03_subset) print("acc1", accuracy_1) print("acc2", accuracy_3) if accuracy_1 > accuracy_3: y_results = np.where(y1_predict == 0, 0, np.where(y3_predict == 1, 2, 1)) else: y_results = np.where(y3_predict == 0, 2, np.where(y1_predict == 1, 0, 1)) print("acc_total", accuracy(y_results, y_train)) # w perceptronie wyjście jest albo 1 albo -1 # y_train_01_subset[(y_train_01_subset == 0)] = -1 clas = Classifier(ppn1, ppn2) plot_decision_regions(X_train, y_train, classifier=clas) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() X = iris.data[:, [2, 3]] y = iris.target # print('y = ', y) x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) y_train_03_subset = y_train.copy() y_train_01_subset = y_train.copy() x_train_01_subset = x_train.copy() clasyficationFactory = None clas = 'Perceptron' #clas = 'regLog' if clas == "Perceptron": clasyficationFactory = Perceptron() y_train_01_subset[(y_train == 1) | (y_train == 2)] = -1 y_train_01_subset[(y_train_01_subset == 0)] = 1 y_train_03_subset[(y_train == 1) | (y_train == 0)] = -1 y_train_03_subset[(y_train_03_subset == 2)] = 1 else: clasyficationFactory = LogisticRegressionGD() y_train_01_subset[(y_train == 1) | (y_train == 2)] = 1 y_train_01_subset[(y_train_01_subset == 0)] = 0 y_train_03_subset[(y_train == 1) | (y_train == 0)] = 1 y_train_03_subset[(y_train_03_subset == 2)] = 0 ppn1 = clasyficationFactory.startTrain(x_train_01_subset, y_train_01_subset) ppn3 = clasyficationFactory.startTrain(x_train_01_subset, y_train_03_subset) if clas == 'regLog': probabilityofLogicReggression(ppn1, ppn3, x_train_01_subset) classifier = clasyficationFactory.classifierFactory(ppn1, ppn3) plot_decision_regions(x_train, y_train, classifier=classifier) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() x = iris.data[:, [2, 3]] y = iris.target x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=1, stratify=y) multi_classifier = MultiClassifier(x_train, y_train) plot_decision_regions(X=x_test, y=y_test, classifier=multi_classifier) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() X = iris.data[:, [1, 3]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1, stratify=y) multi = Multiclass() multi.fit(X_train, y_train) multi.predict(X_test) plot_decision_regions(X=X_test, y=y_test, classifier=multi) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() print(iris.feature_names) X = iris.data[:, [1, 2]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=1, stratify=y) y_train_logreg1 = y_train.copy() y_train_logreg2 = y_train.copy() X_train_logreg = X_train.copy() y_train_logreg1[(y_train == 1) | (y_train == 2)] = 0 y_train_logreg1[(y_train == 0)] = 1 y_train_logreg2[(y_train == 1) | (y_train == 0)] = 0 y_train_logreg2[(y_train == 2)] = 1 logreg1 = LogisticRegressionGD(eta=0.01) logreg2 = LogisticRegressionGD(eta=0.01) logreg1.fit(X_train_logreg, y_train_logreg1) logreg2.fit(X_train_logreg, y_train_logreg2) plot_decision_regions_part(X=X_train_logreg, y=y_train_logreg1, classifier=logreg1) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show() plot_decision_regions_part(X=X_train_logreg, y=y_train_logreg2, classifier=logreg2) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show() clsif = Classifier(logreg1, logreg2) # predicted -1's and 1's logreg1_pred = logreg1.predict(X_train) logreg2_pred = logreg2.predict(X_train) # partial accuracies ac_logreg1 = accuracy_score(logreg1_pred, y_train_logreg1) ac_logreg2 = accuracy_score(logreg2_pred, y_train_logreg2) # overall accuracy if ac_logreg1 > ac_logreg2: y_res = np.where(logreg1_pred == 1, 0, np.where(logreg2_pred == 1, 2, 1)) else: y_res = np.where(logreg2_pred == 1, 2, np.where(logreg1_pred == 1, 0, 1)) print(f'Overall accuracy:', round(accuracy_score(y_res, y_train), 3)) plot_decision_regions(X=X_train_logreg, y=y_train, classifier=clsif) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() X = iris.data[:, [2, 3]] y = iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=1, stratify=y) y_train_perc_1 = y_train.copy() y_train_perc_2 = y_train.copy() X_train_perc = X_train.copy() y_train_perc_1[(y_train == 1) | (y_train == 2)] = -1 y_train_perc_1[(y_train == 0)] = 1 y_train_perc_2[(y_train == 1) | (y_train == 0)] = -1 y_train_perc_2[(y_train == 2)] = 1 perc1 = Perceptron(eta=0.1) perc2 = Perceptron(eta=0.01, n_iter=173) perc1.fit(X_train_perc, y_train_perc_1) perc2.fit(X_train_perc, y_train_perc_2) plot_decision_regions_part(X=X_train_perc, y=y_train_perc_1, classifier=perc1) plt.xlabel(r'sepal width (cm)') plt.ylabel(r'petal length (cm)') plt.legend(loc='upper left') plt.title("Class division by first perceptron") plt.show() plot_decision_regions_part(X=X_train_perc, y=y_train_perc_2, classifier=perc2) plt.xlabel(r'sepal width (cm)') plt.ylabel(r'petal length (cm)') plt.legend(loc='upper left') plt.title("Class division by second perceptron") plt.show() clsif = Classifier(perc1, perc2) # predicted -1's and 1's perc1_pred = perc1.predict(X_train) perc2_pred = perc2.predict(X_train) # partial accuracies ac_perc1 = accuracy_score(perc1_pred, y_train_perc_1) ac_perc2 = accuracy_score(perc2_pred, y_train_perc_2) # overall accuracy if ac_perc1 > ac_perc2: y_res = np.where(perc1_pred == 1, 0, np.where(perc2_pred == 1, 2, 1)) else: y_res = np.where(perc2_pred == 1, 2, np.where(perc1_pred == 1, 0, 1)) print(f'Overall accuracy:', round(accuracy_score(y_res, y_train), 3)) print(iris.feature_names) plot_decision_regions(X=X_train_perc, y=y_train, classifier=clsif) plt.xlabel(r'sepal width (cm)') plt.ylabel(r'petal length (cm)') plt.legend(loc='upper left') plt.title("Overall class division by multiperceptron model") plt.show()
onehotencoder = OneHotEncoder(categorical_features=[0]) y_st = np.reshape(y, (-1, 1)) y_st = onehotencoder.fit_transform(y_st).toarray() # w perceptronie wyjście jest albo 1 albo -1 # y_train_01_subset[(y_train_01_subset == 0)] = -1 y_st[y_st == 0] = -1 X_train, X_test, y_train, y_test = train_test_split(X_st, y_st, test_size=0.3, random_state=1, stratify=y) mclass = MultiClassPredict(eta=0.05, n_iter=10, classes=3) mclass.fit(X_train, y_train) predicted = mclass.predict(X_test) y_test[y_test == -1] = 0 y_test = y_test.dot(onehotencoder.active_features_).astype(int) probability = y_test[y_test == predicted].shape[0] / y_test.shape[0] print(probability) from sklearn.metrics import confusion_matrix cnf = confusion_matrix(y_test, predicted) print(cnf) plot_decision_regions(X=X_test, y=y_test, classifier=mclass) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.legend(loc='upper left') plt.show()
def main(): iris = datasets.load_iris() irisData = iris.data[:, [2, 3]] irisClass = iris.target dataTrainingSet, dataTestSet, classTrainingSet, classTestSet = train_test_split( irisData, irisClass, test_size=0.3, random_state=1, stratify=irisClass) # =============== Perceptron ==================== # Perceptron 1 classTrainingSubset1 = np.copy(classTrainingSet) classTrainingSubset1 = classTrainingSubset1[(classTrainingSubset1 != 2)] dataTrainingSubset1 = np.copy(dataTrainingSet) dataTrainingSubset1 = dataTrainingSubset1[(classTrainingSet != 2)] classTrainingSubset1[(classTrainingSubset1 != 0)] = -1 classTrainingSubset1[(classTrainingSubset1 != -1)] = 1 perceptron1 = Perceptron(learningRate=0.1, iterationsToStop=10) perceptron1.learn(dataTrainingSubset1, classTrainingSubset1) # Perceptron 2 classTrainingSubset2 = np.copy(classTrainingSet) classTrainingSubset2 = classTrainingSubset2[(classTrainingSubset2 != 1)] dataTrainingSubset2 = np.copy(dataTrainingSet) dataTrainingSubset2 = dataTrainingSubset2[(classTrainingSet != 1)] classTrainingSubset2[(classTrainingSubset2 != 2)] = -1 classTrainingSubset2[(classTrainingSubset2 != -1)] = 1 perceptron2 = Perceptron(learningRate=0.1, iterationsToStop=10) perceptron2.learn(dataTrainingSubset2, classTrainingSubset2) # Perceptron 3 classTrainingSubset3 = np.copy(classTrainingSet) classTrainingSubset3 = classTrainingSubset3[(classTrainingSubset3 != 0)] dataTrainingSubset3 = np.copy(dataTrainingSet) dataTrainingSubset3 = dataTrainingSubset3[(classTrainingSet != 0)] classTrainingSubset3[(classTrainingSubset3 != 1)] = -1 perceptron3 = Perceptron(learningRate=0.35, iterationsToStop=850) perceptron3.learn(dataTrainingSubset3, classTrainingSubset3) multiPerceptron = MultiPerceptron(perceptron1, perceptron2, perceptron3) plot_decision_regions(X=dataTestSet, y=classTestSet, classifier=multiPerceptron) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.title('Perceptron') plt.legend(loc='upper left') plt.show() # =============== Logistic regression ==================== classTrainingSubset1[(classTrainingSubset1 != 1)] = 0 logisticRegression1 = LogisticRegression(learningRate=0.05, iterationsToStop=1000, random_state=1) logisticRegression1.learn(dataTrainingSubset1, classTrainingSubset1) logisticRegression1.printProbability(dataTrainingSubset1) classTrainingSubset2[(classTrainingSubset2 != 1)] = 0 logisticRegression2 = LogisticRegression(learningRate=0.05, iterationsToStop=1000, random_state=1) logisticRegression2.learn(dataTrainingSubset2, classTrainingSubset2) logisticRegression2.printProbability(dataTrainingSubset2) classTrainingSubset3[(classTrainingSubset3 != 1)] = 0 logisticRegression3 = LogisticRegression(learningRate=0.15, iterationsToStop=1500, random_state=1) logisticRegression3.learn(dataTrainingSubset3, classTrainingSubset3) logisticRegression3.printProbability(dataTrainingSubset3) multiLogisticRegression = MultiLogisticRegression(logisticRegression1, logisticRegression2, logisticRegression3) plot_decision_regions(X=dataTestSet, y=classTestSet, classifier=multiLogisticRegression) plt.xlabel(r'$x_1$') plt.ylabel(r'$x_2$') plt.title('Logistic regression') plt.legend(loc='lower right') plt.show()