X = pp.onehot_encoder(X, columns=[3])
X = pp.remove_columns(X, [3])
#X = pp.onehot_encoder(X, columns=[3], remove_trap=True)

# Split Training vs. Testing Set
X_train, X_test, Y_train, Y_test = pp.split_train_test(X, Y, train_size=0.8)

# Feature Scaling (optional)
#X_train, X_test = pp.feature_scaling(fit_ary=X_train, transform_arys=(X_train, X_test))
#Y_train, Y_test = pp.feature_scaling(fit_ary=Y_train, transform_arys=(Y_train, Y_test))

# In[] Create Linear Regressor
from HappyML.regression import SimpleRegressor

simple_reg = SimpleRegressor()
Y_pred_simple = simple_reg.fit(X_train, Y_train).predict(X_test)

# R-Squared always increase in multiple linear regression --> Use Adjusted R-Squared instead
print("Goodness of Model (R-Squared Score):",
      simple_reg.r_score(X_test, Y_test))

# In[] Multiple Linear Regression

## Add one column X0 for constant C0
#import statsmodels.tools.tools as smtools
#X_train = smtools.add_constant(X_train)
#
## Try-and-Error of Selecting Features with Backward Elimination
#import statsmodels.api as sm
#
#features = [0, 1, 2, 3, 4, 5]
Beispiel #2
0
Y_train, Y_test = pp.feature_scaling(fit_ary=Y_train,
                                     transform_arys=(Y_train, Y_test))

# In[] Fitting Simple Regressor
# from sklearn.linear_model import LinearRegression

# regressor = LinearRegression()
# regressor.fit(X_train, Y_train)
# Y_pred = regressor.predict(X_test)

# R_Score = regressor.score(X_test, Y_test)

from HappyML.regression import SimpleRegressor

regressor = SimpleRegressor()
Y_pred = regressor.fit(X_train, Y_train).predict(X_test)
print("R-Squared Score:", regressor.r_score(X_test, Y_test))

# In[] Visualize the Training Set
#import matplotlib.pyplot as plt
#
#plt.scatter(X_train, Y_train, color="red")
#plt.plot(X_train, regressor.predict(X_train), color="blue")
#plt.title("Salary vs. Experience")
#plt.xlabel("Experience")
#plt.ylabel("Salary")
#plt.show()

from HappyML import model_drawer as md

sample_data = (X_train, Y_train)
Beispiel #3
0
# Training / Testing Set
X_train, X_test, Y_train, Y_test = pp.split_train_test(x_ary=X,
                                                       y_ary=Y,
                                                       train_size=0.8)

# Feature Scaling
#X = pp.feature_scaling(fit_ary=X, transform_arys=(X))
#Y = pp.feature_scaling(fit_ary=Y, transform_arys=(Y))

# In[] Linear Regression as comparison
from HappyML.regression import SimpleRegressor
import HappyML.model_drawer as md

reg_simple = SimpleRegressor()
Y_simple = reg_simple.fit(x_train=X, y_train=Y).predict(x_test=X)

md.sample_model(sample_data=(X, Y), model_data=(X, Y_simple))
print("R-Squared of Simple Regression:", reg_simple.r_score(x_test=X,
                                                            y_test=Y))

# In[] Polynomial Regression
#from sklearn.preprocessing import PolynomialFeatures
#from HappyML.performance import rmse
#import pandas as pd
#
#deg=5
#poly_reg = PolynomialFeatures(degree=deg)
#X_poly = pd.DataFrame(poly_reg.fit_transform(X))
#
#regressor = SimpleRegressor()
Beispiel #4
0
# In[]
import HappyML.preprocessor as pp

dataset = pp.dataset("C:/Users/henry/Desktop/Python Training/Python機器學習/範例原始碼&「快樂版」函式庫/Ch05 Regression/Position_Salaries.csv")

X, Y = pp.decomposition(dataset, x_columns=[1], y_columns=[2])

X_train, X_test, Y_train, Y_test = pp.split_train_test(x_ary=X, y_ary=Y, train_size=0.8)

# In[]
from HappyML.regression import SimpleRegressor
import HappyML.model_drawer as md

reg_simple = SimpleRegressor()
Y_simple = reg_simple.fit(x_train=X, y_train=Y).predict(X)

md.sample_model(sample_data=(X, Y), model_data=(X, Y_simple))
print("R-Squared of Simple Regression:", reg_simple.r_score(x_test=X, y_test=Y))

# In[]
from sklearn.preprocessing import PolynomialFeatures

deg = 12
poly_reg = PolynomialFeatures(degree=deg)
X_poly = poly_reg.fit_transform(X)

# In[]
import pandas as pd
regressor = SimpleRegressor()
regressor.fit(X_poly, Y)