示例#1
0
===============================================================
Run GroupLasso and GroupLasso CV for structured sparse recovery
===============================================================

The example runs the GroupLasso scikit-learn like estimators.
"""

import numpy as np
import matplotlib.pyplot as plt

from celer import GroupLassoCV, LassoCV
from celer.datasets import make_correlated_data
from celer.plot_utils import configure_plt

print(__doc__)
configure_plt(fontsize=16)

# Generating X, y, and true regression coefs with 4 groups of 5 non-zero values

n_samples, n_features = 100, 50

w_true = np.zeros(n_features)
w_true[:5] = 1
w_true[10:15] = 1
w_true[30:35] = -1
w_true[45:] = 1
X, y, w_true = make_correlated_data(
    n_samples, n_features, w_true=w_true, snr=5, random_state=0)

###############################################################################
# Get group Lasso's optimal alpha for prediction by cross validation
示例#2
0
=====================================================

From the data, a good dual stepsize can be estimated in the case of sparse
recovery.
"""

import numpy as np
from numpy.linalg import norm
import matplotlib.pyplot as plt
from sklearn.metrics import f1_score
from celer.datasets import make_correlated_data
from celer.plot_utils import configure_plt

from iterreg.sparse import dual_primal

configure_plt()

# data for the experiment:
n_samples = 200
n_features = 500

X, y, w_true = make_correlated_data(n_samples=n_samples,
                                    n_features=n_features,
                                    corr=0.2,
                                    density=0.1,
                                    snr=10,
                                    random_state=0)

###############################################################################
# In the L1 case, the Chambolle-Pock algorithm converges to the noisy Basis
# Pursuit solution, which has ``min(n_samples, n_features)`` non zero entries.