def regression(): # Generate a random regression problem X, y = make_regression(n_samples=10000, n_features=100, n_informative=75, n_targets=1, noise=0.05, random_state=1111, bias=0.5) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=1111) model = LinearRegression(lr=0.01, max_iters=2000, penalty='l2', C=0.03) model.fit(X_train, y_train) predictions = model.predict(X_test) print('regression mse', mean_squared_error(y_test, predictions))
def regression(): X, y = make_regression(n_samples=10000, n_features=100, n_informative=75, n_targets=1, noise=0.05, random_state=1111, bias=0.5) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=1111) model = LinearRegression(lr=0.01, max_iters=2000, penalty='l2', C=0.03) model.fit(X_train, y_train) predictions = model.predict(X_test) print('regression mse', mean_squared_error(y_test, predictions))
def test_linear(): model = LinearRegression(lr=0.01, max_iters=2000, penalty='l2', C=0.03) model.fit(X_train, y_train) predictions = model.predict(X_test) assert mean_squared_error(y_test, predictions) < 0.25
import sys sys.path.append("../") import numpy as np from mla.linear_models import LinearRegression, LinearRegression_GD from mla.metrics import mean_squared_error # construct data X = np.random.random(1000)[:, None] Y = np.sqrt(X) X = np.hstack([np.ones(X.shape), X, X**2, X**3, X**4]) # linear regression with close form model1 = LinearRegression() model1.train(X, Y) print("Closed Form LR MSE:", mean_squared_error(Y, model1.predict(X))) # linear regression with gradient descent model2 = LinearRegression_GD() model2.train(X, Y, 10000, 0.0001) print("Gradient Descent LR MSE:", mean_squared_error(Y, model2.predict(X)))