示例#1
0
print("R-Squared Score:", regressor.r_score(X_test, Y_test))

# In[] Visualize the Training Set
#import matplotlib.pyplot as plt
#
#plt.scatter(X_train, Y_train, color="red")
#plt.plot(X_train, regressor.predict(X_train), color="blue")
#plt.title("Salary vs. Experience")
#plt.xlabel("Experience")
#plt.ylabel("Salary")
#plt.show()

from HappyML import model_drawer as md

sample_data = (X_train, Y_train)
model_data = (X_train, regressor.predict(X_train))
md.sample_model(sample_data=sample_data,
                model_data=model_data,
                title="訓練集樣本點 vs. 預測模型",
                font="DFKai-sb")
md.sample_model(sample_data=(X_test, Y_test),
                model_data=(X_test, Y_pred),
                title="測試集樣本點 vs. 預測模型",
                font="DFKai-sb")

# In[] Test for Linearity of Features
#from HappyML import model_drawer as md
#
#for i in range(X_train.shape[1]):
#    md.sample_model(sample_data=(X_train[:, i], Y_train), model_data=None, title="Linearity of Column {}".format(i))
示例#2
0
md.sample_model(sample_data=(X, Y), model_data=(X, Y_simple))
print("R-Squared of Simple Regression:", reg_simple.r_score(x_test=X, y_test=Y))

# In[]
from sklearn.preprocessing import PolynomialFeatures

deg = 12
poly_reg = PolynomialFeatures(degree=deg)
X_poly = poly_reg.fit_transform(X)

# In[]
import pandas as pd
regressor = SimpleRegressor()
regressor.fit(X_poly, Y)
Y_predict = regressor.predict(x_test=pd.DataFrame(X_poly))

md.sample_model(sample_data=(X, Y), model_data=(X, Y_predict))

# In[]
from HappyML.performance import rmse

print("Degree: {} RMSE:{:.4f}".format(deg, rmse(Y, Y_predict)))

# In[]
from HappyML.performance import rmse

rmse_linear = rmse(Y, Y_simple)
rmse_poly = rmse(Y, Y_predict)

if rmse_linear < rmse_poly: