def test_weights(): sparse_X = 1 X, y = build_dataset(n_samples=30, n_features=50, sparse_X=sparse_X) np.random.seed(0) weights = np.abs(np.random.randn(X.shape[1])) tol = 1e-14 params = {'n_alphas': 10, 'tol': tol} alphas1, coefs1, gaps1 = celer_path(X, y, "lasso", weights=weights, verbose=1, **params) alphas2, coefs2, gaps2 = celer_path(X / weights[None, :], y, "lasso", **params) assert_allclose(alphas1, alphas2) assert_allclose(coefs1, coefs2 / weights[:, None], atol=1e-4, rtol=1e-3) assert_array_less(gaps1, tol * norm(y)**2 / len(y)) assert_array_less(gaps2, tol * norm(y)**2 / len(y)) alpha = 0.001 clf1 = Lasso(alpha=alpha, weights=weights, fit_intercept=False).fit(X, y) clf2 = Lasso(alpha=alpha, fit_intercept=False).fit(X / weights, y) assert_allclose(clf1.coef_, clf2.coef_ / weights) # weights must be > 0 clf1.weights[0] = 0. np.testing.assert_raises(ValueError, clf1.fit, X=X, y=y)
def test_convergence_warning(): X, y = build_dataset(n_samples=10, n_features=10) tol = -1 # gap canot be negative, a covnergence warning should be raised alpha_max = np.max(np.abs(X.T.dot(y))) / X.shape[0] clf = Lasso(alpha_max / 10, max_iter=1, max_epochs=100, tol=tol) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") clf.fit(X, y) assert len(w) == 1 assert issubclass(w[-1].category, ConvergenceWarning)
def test_dropin_lasso(sparse_X): """Test that our Lasso class behaves as sklearn's Lasso.""" X, y, _, _ = build_dataset(n_samples=20, n_features=30, sparse_X=sparse_X) alpha_max = np.linalg.norm(X.T.dot(y), ord=np.inf) / X.shape[0] alpha = alpha_max / 2. clf = Lasso(alpha=alpha) clf.fit(X, y) clf2 = sklearn_Lasso(alpha=alpha) clf2.fit(X, y) np.testing.assert_allclose(clf.coef_, clf2.coef_, rtol=1e-5) check_estimator(Lasso)
def test_dropin_lasso(sparse_X, fit_intercept): """Test that our Lasso class behaves as sklearn's Lasso.""" X, y, _, _ = build_dataset(n_samples=20, n_features=30, sparse_X=sparse_X) alpha_max = np.linalg.norm(X.T.dot(y), ord=np.inf) / X.shape[0] alpha = alpha_max / 2. params = dict(alpha=alpha, fit_intercept=fit_intercept, tol=1e-10, normalize=True) clf = Lasso(**params) clf.fit(X, y) clf2 = sklearn_Lasso(**params) clf2.fit(X, y) np.testing.assert_allclose(clf.coef_, clf2.coef_, rtol=1e-5) if fit_intercept: np.testing.assert_allclose(clf.intercept_, clf2.intercept_) check_estimator(Lasso)
def test_zero_iter(): X, y = build_dataset(n_samples=30, n_features=50) # convergence warning is raised bc we return -1 as gap with warnings.catch_warnings(record=True): assert_allclose(Lasso(max_iter=0).fit(X, y).coef_, 0) y = 2 * (y > 0) - 1 assert_allclose( LogisticRegression(max_iter=0, solver="celer-pn").fit(X, y).coef_, 0) assert_allclose( LogisticRegression(max_iter=0, solver="celer").fit(X, y).coef_, 0)
def test_Lasso(sparse_X, fit_intercept, positive): """Test that our Lasso class behaves as sklearn's Lasso.""" X, y = build_dataset(n_samples=20, n_features=30, sparse_X=sparse_X) if not positive: alpha_max = norm(X.T.dot(y), ord=np.inf) / X.shape[0] else: alpha_max = X.T.dot(y).max() / X.shape[0] alpha = alpha_max / 2. params = dict(alpha=alpha, fit_intercept=fit_intercept, tol=1e-10, positive=positive) clf = Lasso(**params) clf.fit(X, y) clf2 = sklearn_Lasso(**params) clf2.fit(X, y) assert_allclose(clf.coef_, clf2.coef_, rtol=1e-5) if fit_intercept: assert_allclose(clf.intercept_, clf2.intercept_)
def test_warm_start(): """Test Lasso path convergence.""" X, y = build_dataset(n_samples=100, n_features=100, sparse_X=True) n_samples, n_features = X.shape alpha_max = np.max(np.abs(X.T.dot(y))) / n_samples n_alphas = 10 alphas = alpha_max * np.logspace(0, -2, n_alphas) reg1 = Lasso(tol=1e-6, warm_start=True, p0=10) reg1.coef_ = np.zeros(n_features) for alpha in alphas: reg1.set_params(alpha=alpha) reg1.fit(X, y) # refitting with warm start should take less than 2 iters: reg1.fit(X, y) # hack because assert_array_less does strict comparison... np.testing.assert_array_less(reg1.n_iter_, 2.01)