plt.xscale('log') plt.show() #ELASTICNET from sklearn.linear_model import ElasticNet l1_ratio_space = np.logspace(-2, 0.5, 5) #l1_ratio_space = [0,1, 0.5, 1, 2, 3,] R2_test = [] R2_train = [] l1_ratio_serie = [] eln = ElasticNet(normalize = True, alpha = 0.001) for l1_ratio in l1_ratio_space: eln.l1_ratio = l1_ratio eln.fit(X_train_std, y_train_std) y_train_pred = eln.predict(X_train_std) y_test_pred = eln.predict(X_test_std) print('l1_ratio = %.4f'%l1_ratio) print('\tIntercept:\t%.3f' % eln.intercept_) # print('%.3f' % eln.intercept_) for i in range (13): print('\tSlope #%.0f:\t%.3f' %(i+1, eln.coef_[i])) # print('%.3f' %(eln.coef_[i])) print('\tMSE train: %.3f, test: %.3f \tR^2 train: %.3f, test: %.3f' % (mean_squared_error(y_train_std, y_train_pred), mean_squared_error(y_test_std, y_test_pred), r2_score(y_train_std, y_train_pred),
######################################### # ELASTIC NET MODEL from sklearn.linear_model import ElasticNet regr = ElasticNet(random_state=0) regr.alpha = 1.9 regr.l1_ratio = 0.65 regr.fit(X_train,y_train) # Calling the score method, which compares the predicted values to the actual values y_score = regr.score(X_test, y_test) # The score is directly comparable to R-Square print(y_score) ######### # Theil sen model
plt.ylabel('Residuals') plt.legend(loc='upper left') plt.hlines(y=0, xmin=-10, xmax=50, color='black', lw=2) plt.xlim([-10, 50]) plt.show() # Setup the array of alphas and lists to store scores l1_space = np.logspace(-4, 0, 50) MSE4_test_scores = [] MSE4_train_scores = [] R24_test_scores = [] R24_train_scores = [] for l1_ratio in l1_space: y_train_pred4 = se.predict(X_train) y_test_pred4 = se.predict(X_test) se.l1_ratio = l1_ratio se.fit(X, y) MSE4_test_scores.append(mean_squared_error(y_test, y_test_pred4)) MSE4_train_scores.append(mean_squared_error(y_train, y_train_pred4)) R24_test_scores.append(r2_score(y_test, y_test_pred4)) R24_train_scores.append(r2_score(y_train, y_train_pred4)) plt.plot(alpha_space, MSE4_test_scores) plt.xlabel('l1_ratio') plt.ylabel('MSE4_test_scores') plt.show() plt.plot(alpha_space, MSE4_train_scores) plt.xlabel('l1_ratio') plt.ylabel('MSE4_train_scores') plt.show()
plt.show() #MSE print( 'ElasticNet regression MSE train: %.3f, test: %.3f' % (mean_squared_error( y_train, y_train_pred4), mean_squared_error(y_test, y_test_pred4))) #R2 print('ElasticNet regression R^2 train: %.3f, test: %.3f' % (r2_score(y_train, y_train_pred4), r2_score(y_test, y_test_pred4))) #Compare with different l1_ratio l1_ratio_space = np.logspace(-4, 0, 50) MSE_test_scores = [] MSE_train_scores = [] R2_test_scores = [] R2_train_scores = [] for l1_ratio in l1_ratio_space: elastic.l1_ratio = l1_ratio elastic.fit(X, y) y_train_pred3 = elastic.predict(X_train).reshape(361, 1) y_test_pred3 = elastic.predict(X_test).reshape(91, 1) MSE_train_scores.append(mean_squared_error(y_train, y_train_pred3)) MSE_test_scores.append(mean_squared_error(y_test, y_test_pred3)) R2_train_scores.append(r2_score(y_train, y_train_pred3)) R2_test_scores.append(r2_score(y_test, y_test_pred3)) plt.plot(l1_ratio_space, MSE_train_scores) plt.xlabel('l1_ratio_space') plt.ylabel('MSE_train_scores') plt.show() plt.plot(l1_ratio_space, MSE_test_scores) plt.xlabel('l1_ratio_space') plt.ylabel('MSE_test_scores') plt.show()
# Display the plot display_plot(lasso_scores, lasso_scores_std, 'lasso') print('Elastic Net regression') # Elastic Net regression: alpha_space = np.logspace(-2, 0, 5) elanet_scores = [] elanet_scores_std = [] # Create a ridge regressor: lasso elanet = ElasticNet(alpha=1.0) for alpha in alpha_space: # Specify the alpha value to use: ridge.alpha elanet.l1_ratio = alpha elanet.fit(X_train, y_train) y_train_pred = elanet.predict(X_train) y_test_pred = elanet.predict(X_test) # plt.plot(range(len(df.columns)-1), ridge.coef_) # plt.xticks(range(len(df.columns)-1), df.columns.values, rotation=60) # plt.margins(0.02) # plt.show() print('Slope:', elanet.coef_) print('Intercept: %.3f' % elanet.intercept_) print('MSE train: %.3f, test: %.3f' % (mean_squared_error( y_train, y_train_pred), mean_squared_error(y_test, y_test_pred))) print('R^2 train: %.3f, test: %.3f' % (r2_score(y_train, y_train_pred), r2_score(y_test, y_test_pred))) ary = np.array(range(100000))
plt.legend(loc='upper left') plt.hlines(y=0, xmin=-10, xmax=50, color='black', lw=2) plt.xlim([-10, 50]) plt.show() print('\n') #Elastic Net print('Elastic Net :') elanet = ElasticNet(alpha=1.0) l1_ratio_space = [0.1, 0.5, 1, 2, 2.5] for l1_ratio in l1_ratio_space: print('l1_ratio = ', l1_ratio) elanet.l1_ratio = l1_ratio elanet.fit(X_train, y_train) y_train_pred = elanet.predict(X_train) y_test_pred = elanet.predict(X_test) print('MSE train: %.3f, test: %.3f' % (mean_squared_error( y_train, y_train_pred), mean_squared_error(y_test, y_test_pred))) print('R^2 train: %.3f, test: %.3f' % (r2_score(y_train, y_train_pred), r2_score(y_test, y_test_pred))) print('Coefficient: %.3f' % elanet.coef_[0]) print('Intercept: %.3f' % elanet.intercept_) print('\n') plt.scatter(y_train_pred, y_train_pred - y_train, c='steelblue',
plt.savefig('lasso alpha=' + str(alpha) + '10_09.png', dpi=300) plt.show() print('Elastic Net regression') # Elastic Net regression: ratio_space = np.logspace(-2, 0, 4) elanet_scores = [] elanet_scores_std = [] # Create a ridge regressor: lasso elanet = ElasticNet(alpha=1.0) for ratio in ratio_space: # Specify the alpha value to use: ridge.alpha elanet.l1_ratio = ratio elanet.fit(X_train, y_train) y_train_pred = elanet.predict(X_train) y_test_pred = elanet.predict(X_test) print('Slope:', elanet.coef_) print('intercept:', elanet.intercept_) print('MSE train: %.3f, test: %.3f' % (mean_squared_error( y_train, y_train_pred), mean_squared_error(y_test, y_test_pred))) print('R^2 train: %.3f, test: %.3f' % (r2_score(y_train, y_train_pred), r2_score(y_test, y_test_pred))) ary = np.array(range(100000)) plt.title('ratio=' + str(ratio)) plt.scatter(y_train_pred,