def test_lasso_zero(): """Check that the sparse lasso can handle zero data without crashing""" X = sparse.csc_matrix((3, 1)) y = [0, 0, 0] T = np.array([[1], [2], [3]]) clf = SparseLasso().fit(X, y) pred = clf.predict(T) assert_array_almost_equal(clf.coef_, [0]) assert_array_almost_equal(pred, [0, 0, 0]) assert_almost_equal(clf.dual_gap_, 0)
def test_sparse_lasso_not_as_toy_dataset(): n_samples, n_features, max_iter = 100, 100, 1000 n_informative = 10 X, y = make_sparse_data(n_samples, n_features, n_informative) X_train, X_test = X[n_samples / 2:], X[:n_samples / 2] y_train, y_test = y[n_samples / 2:], y[:n_samples / 2] s_clf = SparseLasso(alpha=0.1, fit_intercept=False) s_clf.fit(X_train, y_train, max_iter=max_iter, tol=1e-7) assert_almost_equal(s_clf.dual_gap_, 0, 4) assert s_clf.score(X_test, y_test) > 0.85 # check the convergence is the same as the dense version d_clf = DenseLasso(alpha=0.1, fit_intercept=False) d_clf.fit(X_train, y_train, max_iter=max_iter, tol=1e-7) assert_almost_equal(d_clf.dual_gap_, 0, 4) assert d_clf.score(X_test, y_test) > 0.85 # check that the coefs are sparse assert_equal(np.sum(s_clf.coef_ != 0.0), n_informative)