Beispiel #1
0
def sgd_simple_learning_schedule():
    """SGD use simple_learning_schedule."""
    n_epochs = 50
    t0, t1 = 5, 50  # learning schedule hyperparameters

    def learning_schedule(t):
        return t0 / (t + t1)

    theta = np.random.randn(2, 1)  # random initialization
    m = len(x_b)
    rnd.seed(42)

    for epoch in range(n_epochs):
        for i in range(m):
            if epoch == 0 and i < 20:
                y_predict = x_new_b.dot(theta)
                style = "b-" if i > 0 else "r--"
                plt.plot(x_new, y_predict, style)
            random_index = np.random.randint(m)
            xi = x_b[random_index:random_index + 1]
            yi = y[random_index:random_index + 1]
            gradients = 2 * xi.T.dot(xi.dot(theta) - yi)
            eta = learning_schedule(epoch * m + i)
            theta = theta - eta * gradients

    logger.info('theta :{}'.format(theta))

    plt.plot(x, y, "b.")
    plt.xlabel("$x_1$", fontsize=18)
    plt.ylabel("$y$", rotation=0, fontsize=18)
    plt.axis([0, 2, 0, 15])
    save_img_and_show(name="sgd_plot")
Beispiel #2
0
def main():

    m = 100
    x = 6 * np.random.rand(m, 1) - 3
    y = 0.5 * x**2 + x + 2 + np.random.randn(m, 1)
    plt.plot(x, y, 'b.')
    plt.xlabel('$x_1$', fontsize=18)
    plt.ylabel('$y$', rotation=0, fontsize=18)
    plt.axis([-1, 3, 0, 10])
    save_img_and_show(name='Genearated nonlinear & noisy dataset')

    # Use skl
    poly_features = PolynomialFeatures(degree=2, include_bias=False)
    x_poly = poly_features.fit_transform(x)
    logger.info('x[0] {}'.format(x[0]))
    logger.info('x_poly[0] {}'.format(x_poly[0]))

    lin_reg = LinearRegression()
    lin_reg.fit(x_poly, y)
    logger.info('lin_reg.intercept_ {}'.format(lin_reg.intercept_))
    logger.info('lin_reg.coef_ {}'.format(lin_reg.coef_))

    x_new = np.linspace(-3, 3, 100).reshape(100, 1)
    x_new_poly = poly_features.transform(x_new)
    y_new = lin_reg.predict(x_new_poly)
    plt.plot(x, y, 'b.')
    plt.plot(x_new, y_new, 'r-', linewidth=2, label="Predictions")
    plt.xlabel('$x_1$', fontsize=18)
    plt.ylabel('$y$', rotation=0, fontsize=18)
    plt.legend(loc="upper left", fontsize=14)
    plt.axis([-3, 3, 0, 10])
    save_img_and_show(name="PolynomialRegressionModelPredictions")
Beispiel #3
0
def polynomial_10degree():
    polynomial_regression = Pipeline((
        ('poly_features', PolynomialFeatures(degree=10, include_bias=False)),
        ('sgd_reg', LinearRegression()),
    ))
    plot_learning_curves(polynomial_regression, x, y)
    plt.axis([0, 80, 0, 3])
    save_img_and_show('learning_curves_plot')
Beispiel #4
0
def ridge_regression():
    """Ridge Regression."""
    plt.figure(figsize=(8, 4))
    plt.subplot(121)
    plot_model(Ridge, polynomial=False, alphas=(0, 10, 100))
    plt.ylabel("$y$", rotation=0, fontsize=18)
    plt.subplot(122)
    plot_model(Ridge, polynomial=True, alphas=(0, 10**-5, 1))

    save_img_and_show(name="ridge_regression_plot")
Beispiel #5
0
def normal_equation_skl():
    """Linear regression model predictions use skl."""
    lin_reg = LinearRegression()
    lin_reg.fit(x, y)
    logger.info('intercept_ {}'.format(lin_reg.intercept_))
    logger.info('coef_ {}'.format(lin_reg.coef_))
    y_predict = lin_reg.predict(x_new)
    logger.info('predict {}'.format(y_predict))

    plt.plot(x_new, y_predict, "r-")
    plt.plot(x, y, "b.")
    plt.axis([0, 2, 0, 15])
    save_img_and_show(name='linear regression model predictions skl')
Beispiel #6
0
def main():
    """Main."""
    logger.info('Lasso Regression')
    plt.figure(figsize=(8, 4))
    plt.subplot(121)
    plot_model(Lasso, polynomial=False, alphas=(0, 0.1, 1))
    plt.ylabel("$y$", rotation=0, fontsize=18)
    plt.subplot(122)
    # Show different alpha of lasso regression
    plot_model(Lasso, polynomial=True, alphas=(0, 10**-7, 1), tol=1)

    save_img_and_show(name="lasso_regression_plot")
    plt.show()
Beispiel #7
0
def normal_equation():
    """normal_equation."""
    logger.debug(x)
    logger.debug(y)

    plt.scatter(x, y)
    save_img_and_show(name='randomly_generated_linear_dataset')

    theta_best = np.linalg.inv(x_b.T.dot(x_b)).dot(x_b.T).dot(y)
    logger.info('\nEquation y = 4 + 3x0 + Gaussionnoise. theta_best {}'.format(
        theta_best))

    y_predict = x_new_b.dot(theta_best)
    logger.info('y_predict {}'.format(y_predict))

    plt.plot(x_new, y_predict, "r-")
    plt.plot(x, y, "b.")
    plt.axis([0, 2, 0, 15])
    save_img_and_show(name='linear regression model predictions')
Beispiel #8
0
def high_degree_polynomial_regression():
    """High-degree Polynomial Regression."""
    for style, width, degree in (("g-", 1, 300), ("b--", 2, 2), ("r-+", 2, 1)):
        polybig_features = PolynomialFeatures(degree=degree,
                                              include_bias=False)
        std_scaler = StandardScaler()
        lin_reg = LinearRegression()
        polynomial_regression = Pipeline((
            ("poly_features", polybig_features),
            ("std_scaler", std_scaler),
            ("lin_reg", lin_reg),
        ))
        polynomial_regression.fit(x, y)
        y_newbig = polynomial_regression.predict(x_new)
        plt.plot(x_new, y_newbig, style, label=str(degree), linewidth=width)

    plt.plot(x, y, "b.", linewidth=3)
    plt.legend(loc="upper left")
    plt.xlabel("$x_1$", fontsize=18)
    plt.ylabel("$y$", rotation=0, fontsize=18)
    plt.axis([-3, 3, 0, 10])
    save_img_and_show(name='high_degree_polynomials_plot')
Beispiel #9
0
def learning_curves():
    lin_reg = LinearRegression()
    plot_learning_curves(lin_reg, x, y)
    plt.axis([0, 80, 0, 3])
    save_img_and_show(name='learning_curves')