std = X_train.std(axis=0) mean = X_train.mean(axis=0) X_train = (X_train - mean) / std X_test = (X_test - mean) / std std = y_train.std(axis=0) mean = y_train.mean(axis=0) y_train = (y_train - mean) / std y_test = (y_test - mean) / std gc.collect() print "- benching ElasticNet" clf = ElasticNet(alpha=alpha, rho=0.5, fit_intercept=False) tstart = time() clf.fit(X_train, y_train) elnet_results[i, j, 0] = mean_square_error(clf.predict(X_test), y_test) elnet_results[i, j, 1] = time() - tstart gc.collect() print "- benching SGD" n_iter = np.ceil(10 ** 4.0 / n_train) clf = SGDRegressor(alpha=alpha, fit_intercept=False, n_iter=n_iter, learning_rate="invscaling", eta0=.01, power_t=0.25) tstart = time() clf.fit(X_train, y_train) sgd_results[i, j, 0] = mean_square_error(clf.predict(X_test), y_test) sgd_results[i, j, 1] = time() - tstart
def test_losses(): """test loss functions""" assert_equal(zero_one(y[half:], y_), 13) assert_almost_equal(mean_square_error(y[half:], y_), 12.999, 2) assert_almost_equal(explained_variance(y[half:], y_), -0.04, 2)
std = X_train.std(axis=0) mean = X_train.mean(axis=0) X_train = (X_train - mean) / std X_test = (X_test - mean) / std std = y_train.std(axis=0) mean = y_train.mean(axis=0) y_train = (y_train - mean) / std y_test = (y_test - mean) / std gc.collect() print "- benching ElasticNet" clf = ElasticNet(alpha=alpha, rho=0.5, fit_intercept=False) tstart = time() clf.fit(X_train, y_train) elnet_results[i, j, 0] = mean_square_error(clf.predict(X_test), y_test) elnet_results[i, j, 1] = time() - tstart gc.collect() print "- benching SGD" n_iter = np.ceil(10**4.0 / n_train) clf = SGDRegressor(alpha=alpha, fit_intercept=False, n_iter=n_iter, learning_rate="invscaling", eta0=.01, power_t=0.25) tstart = time() clf.fit(X_train, y_train) sgd_results[i, j, 0] = mean_square_error(clf.predict(X_test),