def main(): data = DataHelper.Data() x_train, y_train, x_test, y_test, _ = data.loadData("hw2_data_1.txt", 2, 70) epoch = 3 weights = np.ones(len(x_train)) / len(x_train) adaboost = Adaboost(weights, epoch, DecisionRule()) adaboost.train(x_train, y_train) prediction = adaboost.predict(x_test) print("Error rate for %d iterations is %.2f%%" % (epoch, adaboost.evaluate(prediction, y_test))) epoch = 5 weights = np.ones(len(x_train)) / len(x_train) adaboost = Adaboost(weights, epoch, DecisionRule()) adaboost.train(x_train, y_train) prediction = adaboost.predict(x_test) print("Error rate for %d iterations is %.2f%%" % (epoch, adaboost.evaluate(prediction, y_test))) epoch = 10 weights = np.ones(len(x_train)) / len(x_train) adaboost = Adaboost(weights, epoch, DecisionRule()) adaboost.train(x_train, y_train) prediction = adaboost.predict(x_test) print("Error rate for %d iterations is %.2f%%" % (epoch, adaboost.evaluate(prediction, y_test))) epoch = 20 weights = np.ones(len(x_train)) / len(x_train) adaboost = Adaboost(weights, epoch, DecisionRule()) adaboost.train(x_train, y_train) prediction = adaboost.predict(x_test) print("Error rate for %d iterations is %.2f%%" % (epoch, adaboost.evaluate(prediction, y_test)))
def main(): data = DataHelper.Data() x_train, y_train, x_test, y_test, _ = data.loadData( "hw2_data_2.txt", 20, 700) # radial kernel svm_radial = SVM(x_train, y_train, x_test, y_test, kernel="RADIAL", gamma_range=np.logspace(-3, 2, 6)) svm_radial.run() # sigmoid kernel svm_sigmoid = SVM(x_train, y_train, x_test, y_test, kernel="SIGMOID", gamma_range=np.logspace(-3, 2, 6)) svm_sigmoid.run() # polynomial kernel svm_poly = SVM(x_train, y_train, x_test, y_test, kernel="POLYNOMIAL", degree_range=range(1, 11)) svm_poly.run() plt.show()
def main(): data = DataHelper.Data() x_train, y_train, x_test, y_test, attr_list = data.loadData( "hw2_data_2.txt", 20, 700) # print(x_train.shape, y_train.shape, x_test.shape, y_test.shape, attr_list.shape) gb = GradientBoosting(x_train, y_train, x_test, y_test, attr_list) gb.fit()
def main(): data = DataHelper.Data() x_train, y_train, x_test, y_test, _ = data.loadData( "hw2_data_2.txt", 20, 700) mars = MARS(x_train, y_train, x_test, y_test) mars.fit() print("The testing error rate for MARS classifier is: %.4f" % mars.evaluate())
def main(): data = DataHelper.Data() x_train, y_train, x_test, y_test, _ = data.loadData( "hw2_data_1.txt", 2, 70) # load data weights = np.ones(x_train.shape[1] + 1) epochs = 50 model = perceptron(weights, learningRate=1, epoch=epochs) model.train(x_train, y_train) print("The error rate for perceptron after %i epochs is %.2f %%" % (epochs, model.evaluate(x_test, y_test)))