def main(): print("-- Gradient Boosting Regression --") X, y = datasets.make_regression(n_features=1, n_samples=150, bias=0, noise=5) X_train, X_test, y_train, y_test = train_test_split(standardize(X), y, test_size=0.5) clf = GradientBoostingRegressor(debug=True) clf.fit(X_train, y_train) y_pred = clf.predict(X_test) mse = mean_squared_error(y_test, y_pred) print("Mean Squared Error:", mse) # Plot the results plt.scatter(X_test[:, 0], y_test, color='black') plt.scatter(X_test[:, 0], y_pred, color='green') plt.title("Gradient Boosting Regression (%.2f MSE)" % mse) plt.show()
def main(): print("-- Classification Tree --") data = datasets.load_iris() X = data.data y = data.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4) clf = ClassificationTree() clf.fit(X_train, y_train) y_pred = clf.predict(X_test) accuracy = accuracy_score(y_test, y_pred) print("Accuracy:", accuracy) pca = PCA() pca.plot_in_2d(X_test, y_pred, title="Decision Tree", accuracy=accuracy, legend_labels=data.target_names) print("-- Regression Tree --") X, y = datasets.make_regression(n_features=1, n_samples=100, bias=0, noise=5) X_train, X_test, y_train, y_test = train_test_split(standardize(X), y, test_size=0.3) clf = RegressionTree() clf.fit(X_train, y_train) y_pred = clf.predict(X_test) mse = mean_squared_error(y_test, y_pred) print("Mean Squared Error:", mse) # Plot the results plt.scatter(X_test[:, 0], y_test, color='black') plt.scatter(X_test[:, 0], y_pred, color='green') plt.title("Regression Tree (%.2f MSE)" % mse) plt.show()
def main(): print("-- Gradient Boosting Classification --") data = datasets.load_iris() X = data.data y = data.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4) clf = GradientBoostingClassifier(debug=True) clf.fit(X_train, y_train) y_pred = clf.predict(X_test) accuracy = accuracy_score(y_test, y_pred) print("Accuracy:", accuracy) Plot().plot_in_2d(X_test, y_pred, title="Gradient Boosting", accuracy=accuracy, legend_labels=data.target_names) print("-- Gradient Boosting Regression --") X, y = datasets.make_regression(n_features=1, n_samples=150, bias=0, noise=5) X_train, X_test, y_train, y_test = train_test_split(standardize(X), y, test_size=0.5) clf = GradientBoostingRegressor(debug=True) clf.fit(X_train, y_train) y_pred = clf.predict(X_test) mse = mean_squared_error(y_test, y_pred) print("Mean Squared Error:", mse) # Plot the results plt.scatter(X_test[:, 0], y_test, color='black') plt.scatter(X_test[:, 0], y_pred, color='green') plt.title("Gradient Boosting Regression (%.2f MSE)" % mse) plt.show()
def main(): print ("-- Classification Tree --") data = datasets.load_iris() X = data.data y = data.target X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4) clf = ClassificationTree() clf.fit(X_train, y_train) y_pred = clf.predict(X_test) accuracy = accuracy_score(y_test, y_pred) print ("Accuracy:", accuracy) Plot().plot_in_2d(X_test, y_pred, title="Decision Tree", accuracy=accuracy, legend_labels=data.target_names) print ("-- Regression Tree --") X, y = datasets.make_regression(n_features=1, n_samples=100, bias=0, noise=5) X_train, X_test, y_train, y_test = train_test_split(standardize(X), y, test_size=0.3) clf = RegressionTree() clf.fit(X_train, y_train) y_pred = clf.predict(X_test) mse = mean_squared_error(y_test, y_pred) print ("Mean Squared Error:", mse) # Plot the results plt.scatter(X_test[:, 0], y_test, color='black') plt.scatter(X_test[:, 0], y_pred, color='green') plt.title("Regression Tree (%.2f MSE)" % mse) plt.show()
def main(): print("-- Regression Tree --") # Load temperature data data = pd.read_csv('mlfromscratch/data/TempLinkoping2016.txt', sep="\t") time = np.atleast_2d(data["time"].as_matrix()).T temp = np.atleast_2d(data["temp"].as_matrix()).T X = standardize(time) # Time. Fraction of the year [0, 1] y = temp[:, 0] # Temperature. Reduce to one-dim X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3) clf = RegressionTree() clf.fit(X_train, y_train) y_pred = clf.predict(X_test) y_pred_line = clf.predict(X) # Color map cmap = plt.get_cmap('viridis') mse = mean_squared_error(y_test, y_pred) print("Mean Squared Error:", mse) # Plot the results # Plot the results m1 = plt.scatter(366 * X_train, y_train, color=cmap(0.9), s=10) m2 = plt.scatter(366 * X_test, y_test, color=cmap(0.5), s=10) m3 = plt.scatter(366 * X_test, y_pred, color='black', s=10) plt.suptitle("Regression Tree") plt.title("MSE: %.2f" % mse, fontsize=10) plt.xlabel('Day') plt.ylabel('Temperature in Celcius') plt.legend((m1, m2, m3), ("Training data", "Test data", "Prediction"), loc='lower right') plt.show()