Ejemplo n.º 1
0
            c=np.sign(lasso.coef_),
            cmap="bwr_r")

######## Yellowbrick

from yellowbrick.regressor import AlphaSelection, ResidualsPlot, PredictionError
from sklearn.linear_model import LassoCV

### Find optimal alpha

alphas = np.logspace(-10, 1, 400)

lasso_alpha = LassoCV(alphas=alphas)
lasso_yb = AlphaSelection(lasso_alpha)
lasso_yb.fit(X, y)
lasso_yb.poof()

### RVF plot

lasso_yb = ResidualsPlot(lasso, hist=True)
lasso_yb.fit(X_train, y_train)
lasso_yb.score(X_test, y_test)
lasso_yb.poof()

### Prediction Error

lasso_yb = PredictionError(lasso, hist=True)
lasso_yb.fit(X_train, y_train)
lasso_yb.score(X_test, y_test)
lasso_yb.poof()
Ejemplo n.º 2
0
            c=np.sign(ridge.coef_),
            cmap="bwr_r")

######## Yellowbrick

from yellowbrick.regressor import AlphaSelection, ResidualsPlot, PredictionError
from sklearn.linear_model import RidgeCV

### Find optimal alpha

alphas = np.logspace(-10, 1, 400)

ridge_alpha = RidgeCV(alphas=alphas)
ridge_yb = AlphaSelection(ridge_alpha)
ridge_yb.fit(X, y)
ridge_yb.poof()

### RVF plot

ridge_yb = ResidualsPlot(ridge, hist=True)
ridge_yb.fit(X_train, y_train)
ridge_yb.score(X_test, y_test)
ridge_yb.poof()

### Prediction Error

ridge_yb = PredictionError(ridge, hist=True)
ridge_yb.fit(X_train, y_train)
ridge_yb.score(X_test, y_test)
ridge_yb.poof()
Ejemplo n.º 3
0
visualizer.fit(xtrain, ytrain)
visualizer.show()

# Optimal model
optimal_alpha = 4.103
ridge_reg = RidgeCV(alphas=np.array([optimal_alpha]))
x = ridge_reg.fit(xtrain, ytrain)
# print("Coefficients: ", ridge_reg.coef_)
y_pred = ridge_reg.predict(xtest)
err = mean_squared_error(ytest, y_pred)
print("MSE for optimal model: ", err)

# Yellowbrick Regressor - Plot error
visualizer = PredictionError(ridge_reg)
visualizer.fit(xtrain, ytrain)
visualizer.score(xtest, ytest)
visualizer.show()

# SHAP Values
explainer = shap.LinearExplainer(ridge_reg, xtrain)
shap_values = explainer.shap_values(xtest)
shap.summary_plot(shap_values, xtest, plot_type='bar')
feature_indices = [
    227, 5, 0, 228, 133, 101, 220, 208, 2, 70, 1, 40, 207, 229, 215, 79, 4,
    125, 100, 98
]
for i in feature_indices:
    print("feature ", i, ": ", xtrain_raw.columns[i])

# # Plot betas by lambda
# fig, ax = plt.subplots(figsize=(10, 5))
Ejemplo n.º 4
0
sns.heatmap(res, annot=True, cmap="YlGnBu")

######## Yellowbrick

from yellowbrick.regressor import AlphaSelection, ResidualsPlot, PredictionError
from sklearn.linear_model import ElasticNetCV

### Find optimal alpha

alphas = np.logspace(-10, 1, 400)

elastic_alpha = ElasticNetCV(alphas=alphas)
elastic_yb = AlphaSelection(elastic_alpha)
elastic_yb.fit(X, y)
elastic_yb.poof()

### RVF plot

elastic_yb = ResidualsPlot(elastic, hist=True)
elastic_yb.fit(X_train, y_train)
elastic_yb.score(X_test, y_test)
elastic_yb.poof()

### Prediction Error

elastic_yb = PredictionError(elastic, hist=True)
elastic_yb.fit(X_train, y_train)
elastic_yb.score(X_test, y_test)
elastic_yb.poof()