def update_plot(self, plot_state):
     data = plot_state['inputs']
     X, y = np.array(data['x']), np.array(data['y'])
     regressor = LinearRegression(
         basis_function=ScalarBasisFunctions.Polynomial(
             plot_state['Polynomial Degree']),
         l2_cost=plot_state['L2 Weight Penalty'])
     regressor.fit(X, y)
     inputs = np.linspace(*X_RANGE, self.PLOT_POINTS)
     self.fit_line.data_source.data = dict(x=inputs,
                                           y=regressor.predict(inputs))
Example #2
0
def generate_regression_predictions():
  X, Y = get_regression_training_data()
  test_X = get_regression_testing_data()

  lr = LinearRegression()
  lr.fit(X, Y)
  predictions = [str(datetime.timedelta(seconds=int(s))) for s in lr.predict(test_X)]

  for i, x in enumerate(test_X):
    # set those who don't have a full marathon to -1
    if x[2] == -1:
      predictions[i] = -1

  return predictions
Example #3
0
def init_data():
    X, y = import_power_plant_data()
    X, y = X.to_numpy(), y.to_numpy()
    #print(X,y)
    #exit()
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2,shuffle=True, random_state=1234)
    print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)
    opt = SGD(lr=0.01)
    epoch = 10000
    regressor = LinearRegression(opt, epoch=epoch)
    x_plot = list(range(1,epoch+1))
    all_mse = regressor.fit(X_train, y_train)
    predicted = regressor.predict(X_test)
    #print(len(predicted))
    #exit()
    mse_value = Metrics.mse(y_test, predicted)
    #print(len(x_plot), len(all_mse))
    #print(mse_value)
    #y_pred_line = regressor.predict(X)
    #cmap = plt.get_cmap('viridis')
    #fig = plt.figure(figsize=(8,6))
    #m1 = plt.scatter(X_train, y_train, color=cmap(0.9), s=10)
    #m2 = plt.scatter(X_test, y_test, color=cmap(0.5), s=10)
    #plt.plot(x_plot, all_mse, color = "blue", linewidth=2)
    Plot.plot_time_series(x_plot, all_mse, "mse_plot", "number of iterations", "Mean Square Error (MSE)", "MSE vs Number of iterations")

    plt.show()
Example #4
0
def main():
    parser = argparse.ArgumentParser(description='Linear Regression test')
    parser.add_argument('-m',
                        '--method',
                        type=str,
                        default='ols',
                        help='model method: ols or grad_descent')
    parser.add_argument('-n',
                        '--n_iter',
                        type=int,
                        default=50,
                        help='number of iterations for grad_descent')
    args = parser.parse_args()
    method = args.method
    n_iter = args.n_iter

    X, y, m, bias = \
        generate_linear_data(n_samples=1000, n_features=10, bias=10)
    X_train, X_test, y_train, y_test = split_dataset(X, y)
    print("Training size: %s, Test size %s" % (len(X_train), len(X_test)))
    print("-" * 20)

    # Fit and predict
    model = LinearRegression(n_iter=n_iter)
    model.fit(X_train, y_train, method)
    y_pred = model.predict(X_test)
    print("-" * 20)

    # Scoring
    model.score(y_test, y_pred)
    print("-" * 20)
    print("True coefs: ", np.insert(m, 0, bias))
    print("Model coefs:", model.beta_hat)
    print("-" * 20)

    # Plotting
    plot_regression_residual(y_test, y_pred, bins=int(len(X_train) / 20))
    if method == 'grad_descent':
        plot_iteration_vs_cost(n_iter, model.cost_h)
Example #5
0
from models.linear_regression import LinearRegression

# Use custom styling from file
matplotlib.rc_file('../plotstyle')

# Generate data
random.seed(0)
X = np.array([i for i in range(20)], dtype='float32')
X = np.reshape(X, (20, 1))
X = np.concatenate((np.ones((20, 1), dtype='float32'), X), axis=1)

y = np.array([(i + random.uniform(-2, 2)) for i in range(20)], dtype='float32')
y = np.reshape(y, (20, 1))

# Fit model to data
model = LinearRegression(data=X, labels=y)
weights = model.fit()

# Generate line of best fit
x_bf = np.linspace(0, 20, dtype='float32')
y_bf = np.array([(weights[0][0] + x * weights[1][0]) for x in x_bf],
                dtype='float32')

plt.scatter(X[:, 1], y, color='b', s=50, label='Samples')
plt.plot(x_bf, y_bf, color='r', label='Fitted Model')
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.title('Linear Regression')
plt.legend()
plt.show()