filter_univ = SelectKBest(f_regression, k=K) filter_univ.fit(Xtr, ytr) filter_ = filter_univ.get_support() filter_[0] = True Xtr_filtered = Xtr[:, filter_] Xval_filtered = Xval[:, filter_] # map form full to filtered, -1 means not selected map_full_to_filtered = -np.ones(Xtrain.shape[1], dtype=int) map_full_to_filtered[filter_] = np.arange((K + 1)) groups_filtered = [map_full_to_filtered[g] for g in groups] groups_filtered = [g[g != -1] for g in groups_filtered] groups_filtered = [g for g in groups_filtered if len(g) >= 1] weights_filtered = [len(g) for g in groups_filtered] weights_filtered = np.sqrt(np.asarray(weights_filtered)) Agl = gl.linear_operator_from_groups(Xval_filtered.shape[1], groups=groups_filtered, weights=weights_filtered, penalty_start=1) enet_gl = estimators.LinearRegressionL1L2GL( l1=l1, l2=l2, gl=lgl, A=Agl, algorithm=algorithm, penalty_start=1) # enet_gl2=ElasticNet(alpha=l2, l1_ratio=l1,) enet_gl.fit(Xtr_filtered, ytr) y_pred_test = enet_gl.predict(Xval_filtered) test_acc = r2_score(yval, y_pred_test) print test_acc inner_param[(l1, l2, lgl)].append(test_acc) inner_param_mean = {
def test_nonoverlapping_smooth(self): # Spams: http://spams-devel.gforge.inria.fr/doc-python/doc_spams.pdf import numpy as np from parsimony.functions import CombinedFunction import parsimony.algorithms.proximal as proximal import parsimony.functions as functions import parsimony.functions.nesterov.gl as gl import parsimony.datasets.simulate.l1_l2_glmu as l1_l2_glmu import parsimony.utils.weights as weights np.random.seed(42) # Note that p must be even! n, p = 25, 20 groups = [list(range(0, int(p / 2))), list(range(int(p / 2), p))] # weights = [1.5, 0.5] A = gl.linear_operator_from_groups(p, groups=groups) # , weights=weights) l = 0.0 k = 0.0 g = 0.9 start_vector = weights.RandomUniformWeights(normalise=True) beta = start_vector.get_weights(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) \ + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 mu_min = 5e-8 X, y, beta_star = l1_l2_glmu.load(l, k, g, beta, M, e, A, mu=mu_min, snr=snr) eps = 1e-8 max_iter = 18000 beta_start = start_vector.get_weights(p) mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8] fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus)) beta_parsimony = beta_start for mu in mus: # function = functions.LinearRegressionL1L2GL(X, y, l, k, g, # A=A, mu=mu, # penalty_start=0) function = CombinedFunction() function.add_loss( functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty( gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0)) beta_parsimony = fista.run(function, beta_parsimony) try: import spams params = { "loss": "square", "regul": "group-lasso-l2", "groups": np.array([1] * (int(p / 2)) + [2] * (int(p / 2)), dtype=np.int32), "lambda1": g, "max_it": max_iter, "tol": eps, "ista": False, "numThreads": -1, } beta_spams, optim_info = \ spams.fistaFlat(Y=np.asfortranarray(y), X=np.asfortranarray(X), W0=np.asfortranarray(beta_start), return_optim_info=True, **params) # print beta_spams except ImportError: # beta_spams = np.asarray([[15.56784201], # [39.51679274], # [30.42583205], # [24.8816362], # [6.48671072], # [6.48350546], # [2.41477318], # [36.00285723], # [24.98522184], # [29.43128643], # [0.85520539], # [40.31463542], # [34.60084146], # [8.82322513], # [7.55741642], # [7.62364398], # [12.64594707], # [21.81113869], # [17.95400007], # [12.10507338]]) beta_spams = np.asarray([[-11.93855944], [42.889350930], [22.076438880], [9.3869208300], [-32.73310431], [-32.73509107], [-42.05298794], [34.844819990], [9.6210946300], [19.799892400], [-45.62041548], [44.716039010], [31.634706630], [-27.37416567], [-30.27711859], [-30.12673231], [-18.62803747], [2.3561952400], [-6.476922020], [-19.86630857]]) berr = np.linalg.norm(beta_parsimony - beta_spams) # print berr assert berr < 5e-3 f_parsimony = function.f(beta_parsimony) f_spams = function.f(beta_spams) ferr = abs(f_parsimony - f_spams) # print ferr assert ferr < 5e-6
def test_combo_overlapping_nonsmooth(self): import numpy as np from parsimony.functions import CombinedFunction import parsimony.algorithms.proximal as proximal import parsimony.functions as functions import parsimony.functions.nesterov.gl as gl import parsimony.datasets.simulate.l1_l2_gl as l1_l2_gl import parsimony.utils.weights as weights np.random.seed(42) # Note that p must be even! n, p = 25, 30 groups = [list(range(0, 2 * int(p / 3))), list(range(int(p / 3), p))] group_weights = [1.5, 0.5] A = gl.linear_operator_from_groups(p, groups=groups, weights=group_weights) l = 0.618 k = 1.0 - l g = 2.718 start_vector = weights.RandomUniformWeights(normalise=True) beta = start_vector.get_weights(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) \ + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 X, y, beta_star = l1_l2_gl.load(l, k, g, beta, M, e, A, snr=snr) eps = 1e-8 max_iter = 10000 beta_start = start_vector.get_weights(p) mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8] fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus)) beta_parsimony = beta_start for mu in mus: # function = functions.LinearRegressionL1L2GL(X, y, l, k, g, # A=A, mu=mu, # penalty_start=0) function = CombinedFunction() function.add_loss( functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty(functions.penalties.L2Squared(l=k)) function.add_penalty( gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0)) function.add_prox(functions.penalties.L1(l=l)) beta_parsimony = fista.run(function, beta_parsimony) berr = np.linalg.norm(beta_parsimony - beta_star) # print berr assert berr < 5e-3 f_parsimony = function.f(beta_parsimony) f_star = function.f(beta_star) # print abs(f_parsimony - f_star) assert abs(f_parsimony - f_star) < 5e-6
def test_combo_overlapping_nonsmooth(self): import numpy as np from parsimony.functions import CombinedFunction import parsimony.algorithms.proximal as proximal import parsimony.functions as functions import parsimony.functions.nesterov.gl as gl import parsimony.datasets.simulate.l1_l2_gl as l1_l2_gl import parsimony.utils.start_vectors as start_vectors np.random.seed(42) # Note that p must be even! n, p = 25, 30 groups = [range(0, 2 * p / 3), range(p / 3, p)] weights = [1.5, 0.5] A = gl.linear_operator_from_groups(p, groups=groups, weights=weights) l = 0.618 k = 1.0 - l g = 2.718 start_vector = start_vectors.RandomStartVector(normalise=True) beta = start_vector.get_vector(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 X, y, beta_star = l1_l2_gl.load(l, k, g, beta, M, e, A, snr=snr) eps = 1e-8 max_iter = 10000 beta_start = start_vector.get_vector(p) mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8] fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus)) beta_parsimony = beta_start for mu in mus: # function = functions.LinearRegressionL1L2GL(X, y, l, k, g, # A=A, mu=mu, # penalty_start=0) function = CombinedFunction() function.add_function(functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty(functions.penalties.L2Squared(l=k)) function.add_penalty(gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0)) function.add_prox(functions.penalties.L1(l=l)) beta_parsimony = fista.run(function, beta_parsimony) berr = np.linalg.norm(beta_parsimony - beta_star) # print berr assert berr < 5e-3 f_parsimony = function.f(beta_parsimony) f_star = function.f(beta_star) # print abs(f_parsimony - f_star) assert abs(f_parsimony - f_star) < 5e-6
s= [np.linalg.norm(np.dot(Xtr[:,i],ytr)) for i in range(Xtr.shape[1])] l1_max =0.1* np.max(s)/Xtr.shape[0] print "l1 max is", l1_max ################################# l1, l2, lgl =l1_max * np.array((0.1, 0.1, 0.01)) weights = [np.sqrt(len(group)) for group in groups] weights = 1./np.sqrt(np.asarray(weights)) Agl = gl.linear_operator_from_groups(p, groups=groups, weights=weights) algorithm = algorithms.proximal.CONESTA(eps=consts.TOLERANCE, max_iter=15000) enet_gl = estimators.LinearRegressionL1L2GL(l1, l2, lgl , Agl, algorithm=algorithm) yte_pred_enetgl = enet_gl.fit(Xtr, ytr).predict(Xte) print " r carré vaut", r2_score(yte, yte_pred_enetgl) Xnon_res = sklearn.preprocessing.scale(Xnon_res, axis=0, with_mean=True, with_std=False) Ynon_res = Ynon_res-Ynon_res.mean() n_train = int(X.shape[0]/1.75) Xtr_res = Xnon_res[:n_train, :]
def test_nonoverlapping_smooth(self): # Spams: http://spams-devel.gforge.inria.fr/doc-python/doc_spams.pdf import numpy as np from parsimony.functions import CombinedFunction import parsimony.algorithms.proximal as proximal import parsimony.functions as functions import parsimony.functions.nesterov.gl as gl import parsimony.datasets.simulate.l1_l2_glmu as l1_l2_glmu import parsimony.utils.start_vectors as start_vectors np.random.seed(42) # Note that p must be even! n, p = 25, 20 groups = [range(0, p / 2), range(p / 2, p)] # weights = [1.5, 0.5] A = gl.linear_operator_from_groups(p, groups=groups) # , weights=weights) l = 0.0 k = 0.0 g = 0.9 start_vector = start_vectors.RandomStartVector(normalise=True) beta = start_vector.get_vector(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 mu_min = 5e-8 X, y, beta_star = l1_l2_glmu.load(l, k, g, beta, M, e, A, mu=mu_min, snr=snr) eps = 1e-8 max_iter = 18000 beta_start = start_vector.get_vector(p) mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8] fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus)) beta_parsimony = beta_start for mu in mus: # function = functions.LinearRegressionL1L2GL(X, y, l, k, g, # A=A, mu=mu, # penalty_start=0) function = CombinedFunction() function.add_function(functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty(gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0)) beta_parsimony = fista.run(function, beta_parsimony) try: import spams params = { "loss": "square", "regul": "group-lasso-l2", "groups": np.array([1] * (p / 2) + [2] * (p / 2), dtype=np.int32), "lambda1": g, "max_it": max_iter, "tol": eps, "ista": False, "numThreads": -1, } beta_spams, optim_info = spams.fistaFlat( Y=np.asfortranarray(y), X=np.asfortranarray(X), W0=np.asfortranarray(beta_start), return_optim_info=True, **params ) # print beta_spams except ImportError: beta_spams = np.asarray( [ [15.56784201], [39.51679274], [30.42583205], [24.8816362], [6.48671072], [6.48350546], [2.41477318], [36.00285723], [24.98522184], [29.43128643], [0.85520539], [40.31463542], [34.60084146], [8.82322513], [7.55741642], [7.62364398], [12.64594707], [21.81113869], [17.95400007], [12.10507338], ] ) berr = np.linalg.norm(beta_parsimony - beta_spams) # print berr assert berr < 5e-3 f_parsimony = function.f(beta_parsimony) f_spams = function.f(beta_spams) ferr = abs(f_parsimony - f_spams) # print ferr assert ferr < 5e-6
def test_nonoverlapping_nonsmooth(self): # Spams: http://spams-devel.gforge.inria.fr/doc-python/doc_spams.pdf import numpy as np from parsimony.functions import CombinedFunction import parsimony.algorithms.proximal as proximal import parsimony.functions as functions import parsimony.functions.nesterov.gl as gl import parsimony.datasets.simulate.l1_l2_gl as l1_l2_gl import parsimony.utils.start_vectors as start_vectors np.random.seed(42) # Note that p must be even! n, p = 25, 20 groups = [range(0, p / 2), range(p / 2, p)] # weights = [1.5, 0.5] A = gl.linear_operator_from_groups(p, groups=groups) # , weights=weights) l = 0.0 k = 0.0 g = 1.0 start_vector = start_vectors.RandomStartVector(normalise=True) beta = start_vector.get_vector(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 X, y, beta_star = l1_l2_gl.load(l, k, g, beta, M, e, A, snr=snr) eps = 1e-8 max_iter = 8500 beta_start = start_vector.get_vector(p) mus = [5e-2, 5e-4, 5e-6, 5e-8] fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus)) beta_parsimony = beta_start for mu in mus: # function = functions.LinearRegressionL1L2GL(X, y, l, k, g, # A=A, mu=mu, # penalty_start=0) function = CombinedFunction() function.add_function(functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty(gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0)) beta_parsimony = fista.run(function, beta_parsimony) try: import spams params = { "loss": "square", "regul": "group-lasso-l2", "groups": np.array([1] * (p / 2) + [2] * (p / 2), dtype=np.int32), "lambda1": g, "max_it": max_iter, "tol": eps, "ista": False, "numThreads": -1, } beta_spams, optim_info = spams.fistaFlat( Y=np.asfortranarray(y), X=np.asfortranarray(X), W0=np.asfortranarray(beta_start), return_optim_info=True, **params ) except ImportError: beta_spams = np.asarray( [ [14.01111427], [35.56508563], [27.38245962], [22.39716553], [5.835744940], [5.841502910], [2.172209350], [32.40227785], [22.48364756], [26.48822401], [0.770391500], [36.28288883], [31.14118214], [7.938279340], [6.800713150], [6.862914540], [11.38161678], [19.63087584], [16.15855845], [10.89356615], ] ) berr = np.linalg.norm(beta_parsimony - beta_spams) # print berr assert berr < 5e-2 f_parsimony = function.f(beta_parsimony) f_spams = function.f(beta_spams) ferr = abs(f_parsimony - f_spams) # print ferr assert ferr < 5e-6
def test_overlapping_smooth(self): import numpy as np from parsimony.functions import CombinedFunction import parsimony.functions as functions import parsimony.functions.nesterov.gl as gl import parsimony.datasets.simulate.l1_l2_glmu as l1_l2_glmu import parsimony.utils.start_vectors as start_vectors np.random.seed(314) # Note that p must be even! n, p = 25, 30 groups = [list(range(0, 2 * int(p / 3))), list(range(int(p / 3), p))] weights = [1.5, 0.5] A = gl.linear_operator_from_groups(p, groups=groups, weights=weights) l = 0.0 k = 0.0 g = 0.9 start_vector = start_vectors.RandomStartVector(normalise=True) beta = start_vector.get_vector(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) \ + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 mu_min = 5e-8 X, y, beta_star = l1_l2_glmu.load(l, k, g, beta, M, e, A, mu=mu_min, snr=snr) eps = 1e-8 max_iter = 15000 beta_start = start_vector.get_vector(p) mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8] fista = FISTA(eps=eps, max_iter=max_iter / len(mus)) beta_parsimony = beta_start for mu in mus: # function = functions.LinearRegressionL1L2GL(X, y, l, k, g, # A=A, mu=mu, # penalty_start=0) function = CombinedFunction() function.add_function( functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty( gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0)) beta_parsimony = fista.run(function, beta_parsimony) berr = np.linalg.norm(beta_parsimony - beta_star) # print berr assert berr < 5e-2 f_parsimony = function.f(beta_parsimony) f_star = function.f(beta_star) # print abs(f_parsimony - f_star) assert abs(f_parsimony - f_star) < 5e-7
def test_nonoverlapping_nonsmooth(self): # Spams: http://spams-devel.gforge.inria.fr/doc-python/doc_spams.pdf import numpy as np from parsimony.functions import CombinedFunction import parsimony.algorithms.proximal as proximal import parsimony.functions as functions import parsimony.functions.nesterov.gl as gl import parsimony.datasets.simulate.l1_l2_gl as l1_l2_gl import parsimony.utils.start_vectors as start_vectors np.random.seed(42) # Note that p must be even! n, p = 25, 20 groups = [list(range(0, int(p / 2))), list(range(int(p / 2), p))] # weights = [1.5, 0.5] A = gl.linear_operator_from_groups(p, groups=groups) # , weights=weights) l = 0.0 k = 0.0 g = 1.0 start_vector = start_vectors.RandomStartVector(normalise=True) beta = start_vector.get_vector(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) \ + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 X, y, beta_star = l1_l2_gl.load(l, k, g, beta, M, e, A, snr=snr) eps = 1e-8 max_iter = 8500 beta_start = start_vector.get_vector(p) mus = [5e-2, 5e-4, 5e-6, 5e-8] fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus)) beta_parsimony = beta_start for mu in mus: # function = functions.LinearRegressionL1L2GL(X, y, l, k, g, # A=A, mu=mu, # penalty_start=0) function = CombinedFunction() function.add_function( functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty( gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0)) beta_parsimony = fista.run(function, beta_parsimony) try: import spams params = { "loss": "square", "regul": "group-lasso-l2", "groups": np.array([1] * (int(p / 2)) + [2] * (int(p / 2)), dtype=np.int32), "lambda1": g, "max_it": max_iter, "tol": eps, "ista": False, "numThreads": -1, } beta_spams, optim_info = \ spams.fistaFlat(Y=np.asfortranarray(y), X=np.asfortranarray(X), W0=np.asfortranarray(beta_start), return_optim_info=True, **params) except ImportError: beta_spams = np.asarray( [[14.01111427], [35.56508563], [27.38245962], [22.39716553], [5.835744940], [5.841502910], [2.172209350], [32.40227785], [22.48364756], [26.48822401], [0.770391500], [36.28288883], [31.14118214], [7.938279340], [6.800713150], [6.862914540], [11.38161678], [19.63087584], [16.15855845], [10.89356615]]) berr = np.linalg.norm(beta_parsimony - beta_spams) # print berr assert berr < 5e-2 f_parsimony = function.f(beta_parsimony) f_spams = function.f(beta_spams) ferr = abs(f_parsimony - f_spams) # print ferr assert ferr < 5e-6
def test_overlapping_smooth(self): import numpy as np from parsimony.functions import CombinedFunction import parsimony.functions as functions import parsimony.functions.nesterov.gl as gl import parsimony.datasets.simulate.l1_l2_glmu as l1_l2_glmu import parsimony.utils.weights as weights np.random.seed(314) # Note that p must be even! n, p = 25, 30 groups = [list(range(0, 2 * int(p / 3))), list(range(int(p / 3), p))] group_weights = [1.5, 0.5] A = gl.linear_operator_from_groups(p, groups=groups, weights=group_weights) l = 0.0 k = 0.0 g = 0.9 start_vector = weights.RandomUniformWeights(normalise=True) beta = start_vector.get_weights(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) \ + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 mu_min = 5e-8 X, y, beta_star = l1_l2_glmu.load(l, k, g, beta, M, e, A, mu=mu_min, snr=snr) eps = 1e-8 max_iter = 15000 beta_start = start_vector.get_weights(p) mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8] fista = FISTA(eps=eps, max_iter=max_iter / len(mus)) beta_parsimony = beta_start for mu in mus: # function = functions.LinearRegressionL1L2GL(X, y, l, k, g, # A=A, mu=mu, # penalty_start=0) function = CombinedFunction() function.add_loss(functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty(gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0)) beta_parsimony = fista.run(function, beta_parsimony) berr = np.linalg.norm(beta_parsimony - beta_star) # print berr assert berr < 5e-2 f_parsimony = function.f(beta_parsimony) f_star = function.f(beta_star) # print(abs(f_parsimony - f_star)) assert abs(f_parsimony - f_star) < 5e-6
cv_int = cross_validation.KFold(len(ytrain), n_folds=N_FOLDS_INT) inner_param = dict() inner_param_train = dict() for l1, l2, lgl in itertools.product(L1, L2, LGL): inner_param[(l1, l2, lgl)] = [] inner_param_train[(l1, l2, lgl)] = [] for tr, val in cv_int: Xtr = Xtrain[tr, :] Xval = Xtrain[val, :] ytr = ytrain[tr] yval = ytrain[val] # test_perm = list() for l1, l2, lgl in itertools.product(L1, L2, LGL): print l1, l2, lgl Agl = gl.linear_operator_from_groups(Xval.shape[1], groups=groups, weights=weights, penalty_start=1) enet_gl = estimators.LinearRegressionL1L2GL( l1=l1, l2=l2, gl=lgl, A=Agl, algorithm=algorithm, penalty_start=1) enet_gl.fit(Xtr, ytr) y_pred_test = enet_gl.predict(Xval) test_acc = r2_score(yval, y_pred_test) y_pred_train = enet_gl.predict(Xtr) train_acc = r2_score(ytr, y_pred_train) print 'test_acc', test_acc print 'train_acc', train_acc
# The number of samples is defined as: num_samples = 50 # The number of features per sample is defined as: num_ft = shape[0] * shape[1] * shape[2] # Define X randomly as simulated data X = np.random.rand(num_samples, num_ft) # Define y as zeros or ones y = np.random.randint(0, 2, (num_samples, 1)) import parsimony.estimators as estimators import parsimony.algorithms as algorithms import parsimony.functions.nesterov.gl as gl k = 0.0 # l2 ridge regression coefficient l = 0.1 # l1 lasso coefficient groups = [range(0, 2 * num_ft / 3), range(num_ft/ 3, num_ft)] print groups A = gl.linear_operator_from_groups(num_ft, groups) lambdas = [1e-8, 1e-4, 1, 1e3, 1e10]; for g in lambdas: print g # g = 0.1 # group lasso coefficient estimator = estimators.LogisticRegressionL1L2GL(k, l, g, A=A, algorithm=algorithms.proximal.FISTA(), algorithm_params=dict(max_iter=1000)) print estimator res = estimator.fit(X, y) # print "Estimated prediction rate =", estimator.score(X, y) print "Prediction error = ", estimator.score(X, y) # print estimator.beta