def testLinearRegressionOptimizer(self): algorithm = LinearRegression( optimizer=NumericGradientChecker(GradientDescent( learning_rate=0.1))) # Expect only some minor fp inaccuracy self.runSingleLinearRegression(algorithm, max_mse=1e-8)
def main(num_samples=50, points_per_dimension=20): X, y = datasets.make_classification(n_samples=num_samples, n_features=2, n_informative=2, n_redundant=0, n_clusters_per_class=2, flip_y=0.1) X_train, X_test, y_train, y_test = train_test_split(X, y, test_proportion=0.2) logistic_reg = LogisticRegression(optimizer=GradientDescent( num_iterations=20000)) logistic_reg.fit(X_train, y_train) decision_boundary_graph(X_test, y_test, logistic_reg, "Logistic Regression", points_per_dimension=points_per_dimension) if svm_able_to_run: logistic_reg = SVM(Kernel.linear_kernel(), C=1) logistic_reg.fit(X_train, y_train) decision_boundary_graph(X_test, y_test, logistic_reg, "SVM - Linear Kernel", points_per_dimension=points_per_dimension) logistic_reg = SVM(Kernel.gaussian_kernel(sigma=2), C=1) logistic_reg.fit(X_train, y_train) decision_boundary_graph(X_test, y_test, logistic_reg, "SVM - Gaussian Kernel", points_per_dimension=points_per_dimension) else: print("WARNING: cvxopt not installed, SVM will not work.") logistic_reg = KNN_Classification(k=1) logistic_reg.fit(X, y) logistic_reg2 = KNN_Classification(k=3) logistic_reg2.fit(X, y) decision_boundary_graph(X_test, y_test, logistic_reg, "KNN K=1", points_per_dimension=points_per_dimension) decision_boundary_graph(X_test, y_test, logistic_reg2, "KKN K=3", points_per_dimension=points_per_dimension)
def __init__(self, optimizer=None, eps=1e-3, acceptable_diff=1e-6, print_out_diff_gradient=True): if optimizer is None: optimizer = GradientDescent() self._optimizer = optimizer self._print_out_diff_gradient = print_out_diff_gradient self._eps = eps self._acceptable_diff = acceptable_diff
def main(num_iterations=200, iterations_per_update=20): # Just has one feature to make it easy to graph. X, y = datasets.make_classification(n_samples=200, n_features=1, n_informative=1, n_redundant=0, n_clusters_per_class=1, flip_y=0.1) X_train, X_test, y_train, y_test = train_test_split(X, y, test_proportion=0.2) logistic_reg = LogisticRegression(optimizer=OptimizerCostGraph( GradientDescent(num_iterations=num_iterations), iterations_per_update=iterations_per_update)) logistic_reg.fit(X_train, y_train)
def main(): # Just has one feature to make it easy to graph. X, y = datasets.make_classification(n_samples=200, n_features=1, n_informative=1, n_redundant=0, n_clusters_per_class=1, flip_y=0.1) X_train, X_test, y_train, y_test = train_test_split(X, y, test_proportion=0.2) logistic_reg = LogisticRegression(optimizer=GradientDescent(num_iterations=20000)) logistic_reg.fit(X_train, y_train) y_pred_probability = logistic_reg.predict(X_test) mse = mean_square_error(y_pred_probability, y_test) logistic_reg.set_classification_boundary(0.5) y_pred_classified = logistic_reg.predict(X_test) acc = accuracy(y_pred_classified, y_test) plt.figure() plt.scatter(X_test, y_test, color="Black", label="Actual") plt.scatter(X_test, y_pred_probability, color="Red", label="Classification Probability") plt.scatter(X_test, y_pred_classified, color="Blue", label="Rounded Prediction") plt.legend(loc='center right', fontsize=8) plt.title("Logistic Regression %.2f MSE, %.2f%% Accuracy)" % (mse, acc*100)) plt.show()
def main(): # Just has one feature to make it easy to graph. X, y = datasets.make_regression(n_samples=200, n_features=1, bias=random.uniform(-10, 10), noise=5) X_train, X_test, y_train, y_test = train_test_split(X, y, test_proportion=0.2) linear_reg = LinearRegression() linear_reg.fit(X_train, y_train) y_pred = linear_reg.predict(X_test) mse = mean_square_error(y_pred, y_test) linear_reg_w_grad_desc = LinearRegression(optimizer=GradientDescent(num_iterations=2500)) linear_reg_w_grad_desc.fit(X_train, y_train) y_pred_w_grad_desc = linear_reg_w_grad_desc.predict(X_test) mse_w_grad_desc = mean_square_error(y_pred_w_grad_desc, y_test) plt.figure() plt.scatter(X_test, y_test, color="Black", label="Actual") plt.plot(X_test, y_pred, label="Estimate") plt.plot(X_test, y_pred_w_grad_desc, label="Estimate using Optimizer") plt.legend(loc='lower right', fontsize=8) plt.title("Linear Regression %.2f MSE Normal Eq, %.2f MSE Gradient Descent)" % (mse, mse_w_grad_desc)) plt.show()
def CreateDefaultLogisticRegression(): return LogisticRegression(GradientDescent())
def _createLogisticRegression(self): return LogisticRegression(NumericGradientChecker(GradientDescent()))