lr = LinearRegression(X, y, tolerance=1e-4)
lr.stochastic_gradient_descent(0.03, 1e3)
print('LinearRegression, theta of SGB: ', lr.theta.T, ', num of iterations: ', len(lr.loss_history))
show_result(lr, "Method: stochastic_gradient_descent")


# invoke general newton method
lr = LinearRegression(X, y, tolerance=1e-4)
lr.newton_general()
print('LinearRegression, theta of newton_general: ', lr.theta.T, ', num of iterations: ', len(lr.loss_history))
show_result(lr, "Method: newton_general")


# invoke newton with Armijo search method
lr = LinearRegression(X, y, tolerance=1e-4)
lr.newton_armijo()
print('LinearRegression, theta of newton_armijo: ', lr.theta.T, ', num of iterations: ', len(lr.loss_history))
show_result(lr, "Method: newton_armijo")

# local weight linear regression
taus = np.linspace(1, 0, 5, False)
for tau in np.nditer(taus):
    _, y_estimate = lr.fit_local_weight_lr(X, tau)
    plt.title("local weight linear regression with tau=" + str(tau))
    plt.plot(lr.features[:, :-1], lr.labels, 'bo')
    plt.plot(X, y_estimate, 'r-')
    plt.show()

# Test fot Logistic Regression
# load data
data = pd.read_csv("./apple_juice.dat", header=None, sep=r"\s+")