algorithm=algorithms.proximal.StaticCONESTA(), algorithm_params=algorithm_params) MODELS["2d_l1l2tv_conesta"] = \ estimators.LogisticRegressionL1L2TV( l1, l2, tv, A, algorithm=algorithms.proximal.CONESTA(), algorithm_params=algorithm_params) MODELS["2d_l1l2tv_inter_conesta"] = \ estimators.LogisticRegressionL1L2TV( l1, l2, tv, A, penalty_start=1, algorithm=algorithms.proximal.CONESTA(), algorithm_params=algorithm_params) Al1tv = l1tv.linear_operator_from_shape(shape, np.prod(shape)) MODELS["2d_l1l2tv_inexactfista"] = \ estimators.LogisticRegressionL1L2TVInexactFISTA( l1, l2, tv, Al1tv, algorithm_params=algorithm_params) MODELS["2d_l1l2tv_inter_inexactfista"] = \ estimators.LogisticRegressionL1L2TVInexactFISTA( l1, l2, tv, Al1tv, penalty_start=1, algorithm_params=algorithm_params) ## Get data structure from mesh # build a cylinder mesh with the same topology than the 2D grid xyz, tri = mesh.cylinder(shape[1], shape[0])
def test_smoothed_l1tv(self): import numpy as np from parsimony.functions import CombinedFunction import parsimony.algorithms.proximal as proximal import parsimony.functions as functions import parsimony.functions.penalties as penalties import parsimony.functions.nesterov.tv as tv import parsimony.functions.nesterov.l1tv as l1tv import parsimony.utils.start_vectors as start_vectors import parsimony.datasets.simulate as simulate np.random.seed(42) px = 10 py = 1 pz = 1 shape = (pz, py, px) n, p = 5, np.prod(shape) l = 0.618 k = 0.01 g = 1.1 start_vector = start_vectors.RandomStartVector(normalise=True) beta = start_vector.get_vector(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) \ + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 mu = 5e-3 A = tv.linear_operator_from_shape(shape) # X, y, beta_star = l1_l2_tvmu.load(l=l, k=k, g=g, beta=beta, M=M, e=e, # A=A, mu=mu, snr=snr) funs = [simulate.grad.L1(l), simulate.grad.L2Squared(k), simulate.grad.TotalVariation(g, A)] lr = simulate.LinearRegressionData(funs, M, e, snr=snr, intercept=False) X, y, beta_star = lr.load(beta) eps = 1e-8 max_iter = 810 alg = proximal.FISTA(eps=eps, max_iter=max_iter) function = CombinedFunction() function.add_loss(functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty(penalties.L2Squared(l=k)) A = l1tv.linear_operator_from_shape(shape, p) function.add_prox(l1tv.L1TV(l, g, A=A, mu=mu, penalty_start=0)) # A = tv.linear_operator_from_shape(shape) # function.add_penalty(tv.TotalVariation(l=g, A=A, mu=mu, # penalty_start=0)) # function.add_prox(penalties.L1(l=l)) beta_start = start_vector.get_vector(p) beta = alg.run(function, beta_start) berr = np.linalg.norm(beta - beta_star) # print "berr:", berr # assert berr < 5e-1 assert_less(berr, 5e-1, "The found regression vector is not correct.") f_parsimony = function.f(beta) f_star = function.f(beta_star) ferr = abs(f_parsimony - f_star) # print "ferr:", ferr # assert ferr < 5e-3 assert_less(ferr, 5e-3, "The found regression vector is not correct.")
algorithm=algorithms.proximal.StaticCONESTA(), algorithm_params=algorithm_params) MODELS["2d_l1l2tv_conesta"] = \ estimators.LogisticRegressionL1L2TV( l1, l2, tv, A, algorithm=algorithms.proximal.CONESTA(), algorithm_params=algorithm_params) MODELS["2d_l1l2tv_inter_conesta"] = \ estimators.LogisticRegressionL1L2TV( l1, l2, tv, A, penalty_start=1, algorithm=algorithms.proximal.CONESTA(), algorithm_params=algorithm_params) Al1tv = l1tv.linear_operator_from_shape(shape, np.prod(shape)) MODELS["2d_l1l2tv_inexactfista"] = \ estimators.LogisticRegressionL1L2TVInexactFISTA( l1, l2, tv, Al1tv, algorithm_params=algorithm_params) MODELS["2d_l1l2tv_inter_inexactfista"] = \ estimators.LogisticRegressionL1L2TVInexactFISTA( l1, l2, tv, Al1tv, penalty_start=1, algorithm_params=algorithm_params) ## Get data structure from mesh # build a cylinder mesh with the same topology than the 2D grid xyz, tri = mesh.cylinder(shape[1], shape[0])
def test_smoothed_l1tv(self): import numpy as np from parsimony.functions import CombinedFunction import parsimony.algorithms.proximal as proximal import parsimony.functions as functions import parsimony.functions.penalties as penalties import parsimony.functions.nesterov.tv as tv import parsimony.functions.nesterov.l1tv as l1tv import parsimony.datasets.simulate.l1_l2_tvmu as l1_l2_tvmu import parsimony.utils.start_vectors as start_vectors import parsimony.datasets.simulate as simulate np.random.seed(42) px = 10 py = 1 pz = 1 shape = (pz, py, px) n, p = 5, np.prod(shape) l = 0.618 k = 0.01 g = 1.1 start_vector = start_vectors.RandomStartVector(normalise=True) beta = start_vector.get_vector(p) alpha = 1.0 Sigma = alpha * np.eye(p, p) \ + (1.0 - alpha) * np.random.randn(p, p) mean = np.zeros(p) M = np.random.multivariate_normal(mean, Sigma, n) e = np.random.randn(n, 1) snr = 100.0 mu = 5e-3 A = tv.linear_operator_from_shape(shape) # X, y, beta_star = l1_l2_tvmu.load(l=l, k=k, g=g, beta=beta, M=M, e=e, # A=A, mu=mu, snr=snr) funs = [ simulate.grad.L1(l), simulate.grad.L2Squared(k), simulate.grad.TotalVariation(g, A) ] lr = simulate.LinearRegressionData(funs, M, e, snr=snr, intercept=False) X, y, beta_star = lr.load(beta) eps = 1e-8 max_iter = 810 alg = proximal.FISTA(eps=eps, max_iter=max_iter) function = CombinedFunction() function.add_function( functions.losses.LinearRegression(X, y, mean=False)) function.add_penalty(penalties.L2Squared(l=k)) A = l1tv.linear_operator_from_shape(shape, p) function.add_prox(l1tv.L1TV(l, g, A=A, mu=mu, penalty_start=0)) # A = tv.linear_operator_from_shape(shape) # function.add_penalty(tv.TotalVariation(l=g, A=A, mu=mu, # penalty_start=0)) # function.add_prox(penalties.L1(l=l)) beta_start = start_vector.get_vector(p) beta = alg.run(function, beta_start) berr = np.linalg.norm(beta - beta_star) # print "berr:", berr assert berr < 5e-1 f_parsimony = function.f(beta) f_star = function.f(beta_star) ferr = abs(f_parsimony - f_star) # print "ferr:", ferr assert ferr < 5e-3