Esempio n. 1
0
def _ml_Elastic_Net(X, y, tune=True, l1_ratios=None):
    kfold = KFold(n_splits=10, shuffle=True, random_state=0)
    # TODO Standardize y to see if it changes something
    if tune:
        if l1_ratios is None:
            l1_ratios = [0.1, 0.25, 0.5, 0.75, 1]

        # TODO Less Alphas
        model = ElasticNetCV(cv=kfold, n_alphas=100)

        model.set_params(**{'l1_ratio': l1_ratios})

    else:
        if l1_ratios is None:
            l1_ratios = 0.5

        model = ElasticNet(l1_ratio=l1_ratios)

    return model
Esempio n. 2
0
for a in alphas:
    elastic.set_params(alpha=a)
    elastic.fit(scale(X_train), y_train)
    coefs.append(elastic.coef_)
 
ax = plt.gca()
ax.plot(alphas*2, coefs)
ax.set_xscale('log')

plt.xlabel('Alpha')
plt.ylabel('Coefficients')
plt.axvline(model.alpha_, linestyle='--', color='k',label='alpha: CV estimate')
plt.title('Optimal Alpha Parameters')
plt.show()
en.set_params(alphas=model.alpha_)
en.fit(X_train, y_train)
mean_squared_error(y_test, en.predict(X_test))
print("Best for alphas:")
print(model.alpha_)
"""
print("Best l1-ratio:")
print(lasso.l1_ratio)
"""
print("Coefficients:")
print(pd.Series(en.coef_, index=X.columns))
print(mean_squared_error(y_test, en.predict(X_test)))
fit = en.coef_*X
comb = fit.Na + fit.Cl + fit.Water

plt.semilogy(originalenergy, y, color='0.65', label='Simulated Data', linewidth=2)