plt.interactive(False)
    plt.show(block=True)
    plt.show()

# Test for Linear Regression
# load data
data = pd.read_csv("./lsd.dat", header=None, sep=r"\s+")
data = data.as_matrix()
X = data[:, 0: -1]
y = data[:, -1]


# invoke batch gradient decent
lr = LinearRegression(X, y, tolerance=1e-4)
lr.batch_gradient_decent(0.05, 1e5)
print('LinearRegression, theta of BGD: ', lr.theta.T, ', num of iterations: ', len(lr.loss_history))
show_result(lr, "Method: batch_gradient_decent")


# invoke stochastic gradient decent
lr = LinearRegression(X, y, tolerance=1e-4)
lr.stochastic_gradient_descent(0.03, 1e3)
print('LinearRegression, theta of SGB: ', lr.theta.T, ', num of iterations: ', len(lr.loss_history))
show_result(lr, "Method: stochastic_gradient_descent")


# invoke general newton method
lr = LinearRegression(X, y, tolerance=1e-4)
lr.newton_general()
print('LinearRegression, theta of newton_general: ', lr.theta.T, ', num of iterations: ', len(lr.loss_history))