Beispiel #1
0
def deserialize_elastic_regressor(model_dict):
    model = ElasticNet(model_dict["params"])
    model.coef_ = np.array(model_dict["coef_"])
    model.alpha = np.array(model_dict["alpha"]).astype(np.float)
    if isinstance(model_dict["n_iter_"], list):
        model.n_iter_ = np.array(model_dict["n_iter_"])
    else:
        model.n_iter_ = int(model_dict["n_iter_"])

    if isinstance(model_dict["intercept_"], list):
        model.intercept_ = np.array(model_dict["intercept_"])
    else:
        model.intercept_ = float(model_dict["intercept_"])

    return model
def deserialize_elastic_regressor(model_dict):
    model = ElasticNet(model_dict['params'])

    model.coef_ = np.array(model_dict['coef_'])
    model.alpha = np.array(model_dict['alpha']).astype(np.float)

    if isinstance(model_dict['n_iter_'], list):
        model.n_iter_ = np.array(model_dict['n_iter_'])
    else:
        model.n_iter_ = int(model_dict['n_iter_'])

    if isinstance(model_dict['intercept_'], list):
        model.intercept_ = np.array(model_dict['intercept_'])
    else:
        model.intercept_ = float(model_dict['intercept_'])

    return model







#########################################
# ELASTIC NET MODEL

from sklearn.linear_model import ElasticNet

regr = ElasticNet(random_state=0)

regr.alpha = 1.9

regr.l1_ratio = 0.65

regr.fit(X_train,y_train)

# Calling the score method, which compares the predicted values to the actual values

y_score = regr.score(X_test, y_test)

# The score is directly comparable to R-Square
print(y_score)



#########
Beispiel #4
0
            label='Test data')
plt.xlabel('Predicted values')
plt.ylabel('Residuals')
plt.legend(loc='upper left')
plt.hlines(y=0, xmin=-10, xmax=50, color='black', lw=2)
plt.xlim([-10, 50])
plt.show()

# Setup the array of alphas and lists to store scores
alpha_space = np.logspace(-4, 0, 50)
MSE3_test_scores = []
MSE3_train_scores = []
R23_test_scores = []
R23_train_scores = []
for alpha in alpha_space:
    sl.alpha = alpha
    sl.fit(X, y)
    y_train_pred3 = sl.predict(X_train)
    y_test_pred3 = sl.predict(X_test)
    MSE3_test_scores.append(mean_squared_error(y_test, y_test_pred3))
    MSE3_train_scores.append(mean_squared_error(y_train, y_train_pred3))
    R23_test_scores.append(r2_score(y_test, y_test_pred3))
    R23_train_scores.append(r2_score(y_train, y_train_pred3))
plt.plot(alpha_space, MSE3_test_scores)
plt.xlabel('alpha_space')
plt.ylabel('MSE3_test_scores')
plt.show()

plt.plot(alpha_space, MSE3_train_scores)
plt.xlabel('alpha_space')
plt.ylabel('MSE3_train_scores')
    plt.savefig('ElasticNet l1_ratio=' + str(alpha) + '10_09.png', dpi=300)
    plt.show()

alpha_space = np.logspace(-4, 0, 50)
elanet_scores = []
elanet_scores_std = []

# Create a ridge regressor: ridge
#elanet = ElasticNet(normalize=True)
elanet = ElasticNet(alpha=1.0, l1_ratio=0.5, normalize=True)

# Compute scores over range of alphas
for alpha in alpha_space:

    # Specify the alpha value to use: ridge.alpha
    elanet.alpha = alpha

    # Perform 10-fold CV: ridge_cv_scores
    elanet_cv_scores = cross_val_score(elanet, X_train, y_train, cv=10)

    # Append the mean of ridge_cv_scores to ridge_scores
    elanet_scores.append(np.mean(elanet_cv_scores))

    # Append the std of ridge_cv_scores to ridge_scores_std
    elanet_scores_std.append(np.std(elanet_cv_scores))

# Display the plot
display_plot(elanet_scores, elanet_scores_std, 'elanet')

print("My name is Yuchen Duan")
print("My NetID is: yuchend3")
    return mse


# %%
#To be looped for each fold
for i in range(5):
    elastic_net = ElasticNet(l1_ratio=0.5)
    coefs = pd.DataFrame(index=lambdas, columns=train[0].columns)
    mse = pd.DataFrame(index=lambdas, columns=['MSE'])
    sample = pd.DataFrame(np.concatenate(train[:i] + train[i + 1:], axis=0))

    X = sample.cov()
    Y = sample.mean(axis=0)

    for j, l in enumerate(lambdas):
        elastic_net.alpha = l  # set the severity of the constraint
        elastic_net.fit(X, Y)
        coefs.iloc[j] = elastic_net.coef_
        mse.iloc[j] = MSE(X, Y, elastic_net)

    mse_summary = pd.concat([mse_summary, mse], axis=1)

# %%
minimum_mse = mse_summary.mean(axis=1).min()
optimal_lambda = mse_summary.mean(axis=1).idxmin()
#The minimum mse
print(minimum_mse, '\n')
#The lambda that gives minimum mse
print(optimal_lambda, '\n')

# %%