def test_smoothed(self):

        import numpy as np

        import parsimony.utils.consts as consts
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions.losses as losses
        import parsimony.functions.penalties as penalties
        import parsimony.functions.nesterov as nesterov
        import parsimony.utils.start_vectors as start_vectors
        import parsimony.datasets.simulate.l1_l2_tv as l1_l2_tv

        start_vector = start_vectors.RandomStartVector(normalise=True)

        np.random.seed(42)

        n, p = 75, 100

        penalty_start = 0

        alpha = 0.9
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        beta = start_vector.get_vector(p)
        beta[np.abs(beta) < 0.1] = 0.0

        l = 0.618
        k = 0.0
        g = 0.0

        mu_min = 0.001  # consts.TOLERANCE

        A = np.eye(p)
        A = [A, A, A]
        snr = 100.0
        X, y, beta_star = l1_l2_tv.load(l, k, g, beta, M, e, A, snr=snr)

        function = CombinedFunction()
        function.add_function(losses.LinearRegression(X, y,
#                                                   penalty_start=penalty_start,
                                                   mean=False))
        A = nesterov.l1.A_from_variables(p, penalty_start=penalty_start)
        function.add_penalty(nesterov.l1.L1(l, A=A, mu=mu_min,
                                            penalty_start=penalty_start))
#        function.add_prox(penalties.L1(l, penalty_start=penalty_start))

        fista = proximal.FISTA(eps=mu_min, max_iter=20000)
        beta = fista.run(function, beta)

        assert np.linalg.norm(beta - beta_star) < 5e-2
    def test_combo_overlapping_nonsmooth(self):

        import numpy as np
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.nesterov.gl as gl
        import parsimony.datasets.simulate.l1_l2_gl as l1_l2_gl
        import parsimony.utils.start_vectors as start_vectors

        np.random.seed(42)

        # Note that p must be even!
        n, p = 25, 30
        groups = [range(0, 2 * p / 3), range(p / 3, p)]
        weights = [1.5, 0.5]

        A = gl.A_from_groups(p, groups=groups, weights=weights)

        l = 0.618
        k = 1.0 - l
        g = 2.718

        start_vector = start_vectors.RandomStartVector(normalise=True)
        beta = start_vector.get_vector(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        X, y, beta_star = l1_l2_gl.load(l, k, g, beta, M, e, A, snr=snr)

        eps = 1e-8
        max_iter = 10000

        beta_start = start_vector.get_vector(p)

        mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8]
        fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
#            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
#                                                        A=A, mu=mu,
#                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_function(functions.losses.LinearRegression(X, y,
                                                               mean=False))
            function.add_penalty(functions.penalties.L2Squared(l=k))
            function.add_penalty(gl.GroupLassoOverlap(l=g, A=A, mu=mu,
                                                      penalty_start=0))
            function.add_prox(functions.penalties.L1(l=l))

            beta_parsimony = fista.run(function, beta_parsimony)

        berr = np.linalg.norm(beta_parsimony - beta_star)
#        print berr
        assert berr < 5e-3

        f_parsimony = function.f(beta_parsimony)
        f_star = function.f(beta_star)
#        print abs(f_parsimony - f_star)
        assert abs(f_parsimony - f_star) < 5e-6
    def test_nonoverlapping_nonsmooth(self):
        # Spams: http://spams-devel.gforge.inria.fr/doc-python/doc_spams.pdf

        import numpy as np
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.nesterov.gl as gl
        import parsimony.datasets.simulate.l1_l2_gl as l1_l2_gl
        import parsimony.utils.start_vectors as start_vectors

        np.random.seed(42)

        # Note that p must be even!
        n, p = 25, 20
        groups = [range(0, p / 2), range(p / 2, p)]
#        weights = [1.5, 0.5]

        A = gl.A_from_groups(p, groups=groups)  # , weights=weights)

        l = 0.0
        k = 0.0
        g = 1.0

        start_vector = start_vectors.RandomStartVector(normalise=True)
        beta = start_vector.get_vector(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        X, y, beta_star = l1_l2_gl.load(l, k, g, beta, M, e, A, snr=snr)

        eps = 1e-8
        max_iter = 8500

        beta_start = start_vector.get_vector(p)

        mus = [5e-2, 5e-4, 5e-6, 5e-8]
        fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
#            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
#                                                        A=A, mu=mu,
#                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_function(functions.losses.LinearRegression(X, y,
                                                               mean=False))
            function.add_penalty(gl.GroupLassoOverlap(l=g, A=A, mu=mu,
                                                      penalty_start=0))

            beta_parsimony = fista.run(function, beta_parsimony)

        try:
            import spams

            params = {"loss": "square",
                      "regul": "group-lasso-l2",
                      "groups": np.array([1] * (p / 2) + [2] * (p / 2),
                                         dtype=np.int32),
                      "lambda1": g,
                      "max_it": max_iter,
                      "tol": eps,
                      "ista": False,
                      "numThreads": -1,
                     }
            beta_spams, optim_info = \
                    spams.fistaFlat(Y=np.asfortranarray(y),
                                    X=np.asfortranarray(X),
                                    W0=np.asfortranarray(beta_start),
                                    return_optim_info=True,
                                    **params)

        except ImportError:
            beta_spams = np.asarray([[14.01111427],
                                     [35.56508563],
                                     [27.38245962],
                                     [22.39716553],
                                     [5.835744940],
                                     [5.841502910],
                                     [2.172209350],
                                     [32.40227785],
                                     [22.48364756],
                                     [26.48822401],
                                     [0.770391500],
                                     [36.28288883],
                                     [31.14118214],
                                     [7.938279340],
                                     [6.800713150],
                                     [6.862914540],
                                     [11.38161678],
                                     [19.63087584],
                                     [16.15855845],
                                     [10.89356615]])

        berr = np.linalg.norm(beta_parsimony - beta_spams)
#        print berr
        assert berr < 5e-2

        f_parsimony = function.f(beta_parsimony)
        f_spams = function.f(beta_spams)
        ferr = abs(f_parsimony - f_spams)
#        print ferr
        assert ferr < 5e-6
    def test_nonoverlapping_smooth(self):
        # Spams: http://spams-devel.gforge.inria.fr/doc-python/doc_spams.pdf

        import numpy as np
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.nesterov.gl as gl
        import parsimony.datasets.simulate.l1_l2_glmu as l1_l2_glmu
        import parsimony.utils.start_vectors as start_vectors

        np.random.seed(42)

        # Note that p must be even!
        n, p = 25, 20
        groups = [range(0, p / 2), range(p / 2, p)]
#        weights = [1.5, 0.5]

        A = gl.A_from_groups(p, groups=groups)  # , weights=weights)

        l = 0.0
        k = 0.0
        g = 0.9

        start_vector = start_vectors.RandomStartVector(normalise=True)
        beta = start_vector.get_vector(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        mu_min = 5e-8
        X, y, beta_star = l1_l2_glmu.load(l, k, g, beta, M, e, A,
                                          mu=mu_min, snr=snr)

        eps = 1e-8
        max_iter = 18000

        beta_start = start_vector.get_vector(p)

        mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8]
        fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
#            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
#                                                        A=A, mu=mu,
#                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_function(functions.losses.LinearRegression(X, y,
                                                               mean=False))
            function.add_penalty(gl.GroupLassoOverlap(l=g, A=A, mu=mu,
                                                      penalty_start=0))

            beta_parsimony = fista.run(function, beta_parsimony)

        try:
            import spams

            params = {"loss": "square",
                      "regul": "group-lasso-l2",
                      "groups": np.array([1] * (p / 2) + [2] * (p / 2),
                                         dtype=np.int32),
                      "lambda1": g,
                      "max_it": max_iter,
                      "tol": eps,
                      "ista": False,
                      "numThreads": -1,
                     }
            beta_spams, optim_info = \
                    spams.fistaFlat(Y=np.asfortranarray(y),
                                    X=np.asfortranarray(X),
                                    W0=np.asfortranarray(beta_start),
                                    return_optim_info=True,
                                    **params)
#            print beta_spams

        except ImportError:
            beta_spams = np.asarray([[15.56784201],
                                     [39.51679274],
                                     [30.42583205],
                                     [24.8816362],
                                     [6.48671072],
                                     [6.48350546],
                                     [2.41477318],
                                     [36.00285723],
                                     [24.98522184],
                                     [29.43128643],
                                     [0.85520539],
                                     [40.31463542],
                                     [34.60084146],
                                     [8.82322513],
                                     [7.55741642],
                                     [7.62364398],
                                     [12.64594707],
                                     [21.81113869],
                                     [17.95400007],
                                     [12.10507338]])

        berr = np.linalg.norm(beta_parsimony - beta_spams)
#        print berr
        assert berr < 5e-3

        f_parsimony = function.f(beta_parsimony)
        f_spams = function.f(beta_spams)
        ferr = abs(f_parsimony - f_spams)
#        print ferr
        assert ferr < 5e-6
    def test_combo_smooth(self):

        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.nesterov.tv as tv
        import parsimony.datasets.simulate.l1_l2_tvmu as l1_l2_tvmu
        import parsimony.utils.start_vectors as start_vectors

        np.random.seed(42)

        px = 4
        py = 4
        pz = 4
        shape = (pz, py, px)
        n, p = 50, np.prod(shape)

        l = 0.618
        k = 1.0 - l
        g = 1.1

        start_vector = start_vectors.RandomStartVector(normalise=True)
        beta = start_vector.get_vector(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        A = tv.linear_operator_from_shape(shape)
        mu_min = 5e-8
        X, y, beta_star = l1_l2_tvmu.load(l=l, k=k, g=g, beta=beta, M=M, e=e,
                                          A=A, mu=mu_min, snr=snr)

        eps = 1e-8
        max_iter = 5300

        beta_start = start_vector.get_vector(p)

        mus = [5e-2, 5e-4, 5e-6, 5e-8]
        fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
#            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
#                                                        A=A, mu=mu,
#                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_function(functions.losses.LinearRegression(X, y,
                                                               mean=False))
            function.add_penalty(tv.TotalVariation(l=g, A=A, mu=mu,
                                                   penalty_start=0))
            function.add_penalty(functions.penalties.L2Squared(l=k))
            function.add_prox(functions.penalties.L1(l=l))

            beta_parsimony = fista.run(function, beta_parsimony)

        berr = np.linalg.norm(beta_parsimony - beta_star)
#        print "berr:", berr
        assert berr < 5e-3

        f_parsimony = function.f(beta_parsimony)
        f_star = function.f(beta_star)
        ferr = abs(f_parsimony - f_star)
#        print "ferr:", ferr
        assert ferr < 5e-5
    def test_combo_overlapping_nonsmooth(self):

        import numpy as np
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.nesterov.gl as gl
        import parsimony.datasets.simulate.l1_l2_gl as l1_l2_gl
        import parsimony.utils.weights as weights

        np.random.seed(42)

        # Note that p must be even!
        n, p = 25, 30
        groups = [list(range(0, 2 * int(p / 3))), list(range(int(p / 3), p))]
        group_weights = [1.5, 0.5]

        A = gl.linear_operator_from_groups(p,
                                           groups=groups,
                                           weights=group_weights)

        l = 0.618
        k = 1.0 - l
        g = 2.718

        start_vector = weights.RandomUniformWeights(normalise=True)
        beta = start_vector.get_weights(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        X, y, beta_star = l1_l2_gl.load(l, k, g, beta, M, e, A, snr=snr)

        eps = 1e-8
        max_iter = 10000

        beta_start = start_vector.get_weights(p)

        mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8]
        fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
            #            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
            #                                                        A=A, mu=mu,
            #                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_loss(
                functions.losses.LinearRegression(X, y, mean=False))
            function.add_penalty(functions.penalties.L2Squared(l=k))
            function.add_penalty(
                gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0))
            function.add_prox(functions.penalties.L1(l=l))

            beta_parsimony = fista.run(function, beta_parsimony)

        berr = np.linalg.norm(beta_parsimony - beta_star)
        #        print berr
        assert berr < 5e-3

        f_parsimony = function.f(beta_parsimony)
        f_star = function.f(beta_star)
        #        print abs(f_parsimony - f_star)
        assert abs(f_parsimony - f_star) < 5e-6
    def test_nonoverlapping_smooth(self):
        # Spams: http://spams-devel.gforge.inria.fr/doc-python/doc_spams.pdf

        import numpy as np
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.nesterov.gl as gl
        import parsimony.datasets.simulate.l1_l2_glmu as l1_l2_glmu
        import parsimony.utils.weights as weights

        np.random.seed(42)

        # Note that p must be even!
        n, p = 25, 20
        groups = [list(range(0, int(p / 2))), list(range(int(p / 2), p))]
        #        weights = [1.5, 0.5]

        A = gl.linear_operator_from_groups(p,
                                           groups=groups)  # , weights=weights)

        l = 0.0
        k = 0.0
        g = 0.9

        start_vector = weights.RandomUniformWeights(normalise=True)
        beta = start_vector.get_weights(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        mu_min = 5e-8
        X, y, beta_star = l1_l2_glmu.load(l,
                                          k,
                                          g,
                                          beta,
                                          M,
                                          e,
                                          A,
                                          mu=mu_min,
                                          snr=snr)

        eps = 1e-8
        max_iter = 18000

        beta_start = start_vector.get_weights(p)

        mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8]
        fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
            #            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
            #                                                        A=A, mu=mu,
            #                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_loss(
                functions.losses.LinearRegression(X, y, mean=False))
            function.add_penalty(
                gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0))

            beta_parsimony = fista.run(function, beta_parsimony)

        try:
            import spams

            params = {
                "loss":
                "square",
                "regul":
                "group-lasso-l2",
                "groups":
                np.array([1] * (int(p / 2)) + [2] * (int(p / 2)),
                         dtype=np.int32),
                "lambda1":
                g,
                "max_it":
                max_iter,
                "tol":
                eps,
                "ista":
                False,
                "numThreads":
                -1,
            }
            beta_spams, optim_info = \
                    spams.fistaFlat(Y=np.asfortranarray(y),
                                    X=np.asfortranarray(X),
                                    W0=np.asfortranarray(beta_start),
                                    return_optim_info=True,
                                    **params)


#            print beta_spams

        except ImportError:
            #            beta_spams = np.asarray([[15.56784201],
            #                                     [39.51679274],
            #                                     [30.42583205],
            #                                     [24.8816362],
            #                                     [6.48671072],
            #                                     [6.48350546],
            #                                     [2.41477318],
            #                                     [36.00285723],
            #                                     [24.98522184],
            #                                     [29.43128643],
            #                                     [0.85520539],
            #                                     [40.31463542],
            #                                     [34.60084146],
            #                                     [8.82322513],
            #                                     [7.55741642],
            #                                     [7.62364398],
            #                                     [12.64594707],
            #                                     [21.81113869],
            #                                     [17.95400007],
            #                                     [12.10507338]])
            beta_spams = np.asarray([[-11.93855944], [42.889350930],
                                     [22.076438880], [9.3869208300],
                                     [-32.73310431], [-32.73509107],
                                     [-42.05298794], [34.844819990],
                                     [9.6210946300], [19.799892400],
                                     [-45.62041548], [44.716039010],
                                     [31.634706630], [-27.37416567],
                                     [-30.27711859], [-30.12673231],
                                     [-18.62803747], [2.3561952400],
                                     [-6.476922020], [-19.86630857]])

        berr = np.linalg.norm(beta_parsimony - beta_spams)
        #        print berr
        assert berr < 5e-3

        f_parsimony = function.f(beta_parsimony)
        f_spams = function.f(beta_spams)
        ferr = abs(f_parsimony - f_spams)
        #        print ferr
        assert ferr < 5e-6
    def test_overlapping_smooth(self):

        import numpy as np
        from parsimony.functions import CombinedFunction
        import parsimony.functions as functions
        import parsimony.functions.nesterov.gl as gl
        import parsimony.datasets.simulate.l1_l2_glmu as l1_l2_glmu
        import parsimony.utils.weights as weights

        np.random.seed(314)

        # Note that p must be even!
        n, p = 25, 30
        groups = [list(range(0, 2 * int(p / 3))), list(range(int(p / 3), p))]
        group_weights = [1.5, 0.5]

        A = gl.linear_operator_from_groups(p, groups=groups,
                                           weights=group_weights)

        l = 0.0
        k = 0.0
        g = 0.9

        start_vector = weights.RandomUniformWeights(normalise=True)
        beta = start_vector.get_weights(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        mu_min = 5e-8
        X, y, beta_star = l1_l2_glmu.load(l, k, g, beta, M, e, A,
                                          mu=mu_min, snr=snr)

        eps = 1e-8
        max_iter = 15000

        beta_start = start_vector.get_weights(p)

        mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8]
        fista = FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
#            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
#                                                        A=A, mu=mu,
#                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_loss(functions.losses.LinearRegression(X, y,
                                                                mean=False))
            function.add_penalty(gl.GroupLassoOverlap(l=g, A=A, mu=mu,
                                                      penalty_start=0))

            beta_parsimony = fista.run(function, beta_parsimony)

        berr = np.linalg.norm(beta_parsimony - beta_star)
#        print berr
        assert berr < 5e-2

        f_parsimony = function.f(beta_parsimony)
        f_star = function.f(beta_star)
#        print(abs(f_parsimony - f_star))
        assert abs(f_parsimony - f_star) < 5e-6
Esempio n. 9
0
    def test_smoothed_l1tv(self):

        import numpy as np

        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.penalties as penalties
        import parsimony.functions.nesterov.tv as tv
        import parsimony.functions.nesterov.l1tv as l1tv
        import parsimony.utils.start_vectors as start_vectors
        import parsimony.datasets.simulate as simulate

        np.random.seed(42)

        px = 10
        py = 1
        pz = 1
        shape = (pz, py, px)
        n, p = 5, np.prod(shape)

        l = 0.618
        k = 0.01
        g = 1.1

        start_vector = start_vectors.RandomStartVector(normalise=True)
        beta = start_vector.get_vector(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
            + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        mu = 5e-3

        A = tv.linear_operator_from_shape(shape)
#        X, y, beta_star = l1_l2_tvmu.load(l=l, k=k, g=g, beta=beta, M=M, e=e,
#                                        A=A, mu=mu, snr=snr)

        funs = [simulate.grad.L1(l),
                simulate.grad.L2Squared(k),
                simulate.grad.TotalVariation(g, A)]
        lr = simulate.LinearRegressionData(funs, M, e, snr=snr,
                                           intercept=False)

        X, y, beta_star = lr.load(beta)

        eps = 1e-8
        max_iter = 810

        alg = proximal.FISTA(eps=eps, max_iter=max_iter)

        function = CombinedFunction()
        function.add_loss(functions.losses.LinearRegression(X, y, mean=False))
        function.add_penalty(penalties.L2Squared(l=k))
        A = l1tv.linear_operator_from_shape(shape, p)
        function.add_prox(l1tv.L1TV(l, g, A=A, mu=mu, penalty_start=0))
#        A = tv.linear_operator_from_shape(shape)
#        function.add_penalty(tv.TotalVariation(l=g, A=A, mu=mu,
#                                               penalty_start=0))
#        function.add_prox(penalties.L1(l=l))

        beta_start = start_vector.get_vector(p)
        beta = alg.run(function, beta_start)

        berr = np.linalg.norm(beta - beta_star)
#        print "berr:", berr
#        assert berr < 5e-1
        assert_less(berr, 5e-1, "The found regression vector is not correct.")

        f_parsimony = function.f(beta)
        f_star = function.f(beta_star)
        ferr = abs(f_parsimony - f_star)
#        print "ferr:", ferr
#        assert ferr < 5e-3
        assert_less(ferr, 5e-3, "The found regression vector is not correct.")
Esempio n. 10
0
    def test_nonsmooth(self):

        import numpy as np

        import parsimony.utils.consts as consts
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions.losses as losses
        import parsimony.functions.penalties as penalties
        import parsimony.functions.nesterov as nesterov
        import parsimony.utils.start_vectors as start_vectors
        import parsimony.datasets.simulate.l1_l2_tv as l1_l2_tv

        start_vector = start_vectors.RandomStartVector(normalise=True)

        np.random.seed(42)

        n, p = 75, 100

        alpha = 0.9
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        beta_start = start_vector.get_vector(p)
        beta_start[np.abs(beta_start) < 0.1] = 0.0

        l = 0.618
        k = 0.0
        g = 0.0

        A = np.eye(p)
        A = [A, A, A]
        snr = 100.0
        X, y, beta_star = l1_l2_tv.load(l, k, g, beta_start, M, e, A, snr=snr)

        beta = beta_start

        for mu in [5e-2, 5e-3, 5e-4, 5e-5]:
            function = CombinedFunction()
            function.add_function(losses.LinearRegression(X, y, mean=False))

            A = nesterov.l1.linear_operator_from_variables(p, penalty_start=0)
            function.add_penalty(nesterov.l1.L1(l, A=A, mu=mu,
                                                penalty_start=0))

            fista = proximal.FISTA(eps=consts.TOLERANCE, max_iter=910)
            beta = fista.run(function, beta)

        berr = np.linalg.norm(beta - beta_star)
        #        print "berr:", berr
        assert berr < 5e-2

        # Test proximal operator
        beta = beta_start
        function = CombinedFunction()
        function.add_function(losses.LinearRegression(X, y, mean=False))
        A = nesterov.l1.linear_operator_from_variables(p, penalty_start=0)
        #        function.add_penalty(nesterov.l1.L1(l, A=A, mu=mu_min,
        #                                            penalty_start=penalty_start))
        function.add_prox(nesterov.l1.L1(l, A=A, mu=5e-5, penalty_start=0))

        fista = proximal.FISTA(eps=consts.TOLERANCE, max_iter=800)
        beta = fista.run(function, beta)

        berr = np.linalg.norm(beta - beta_star)
        #        print "berr:", berr
        assert berr < 5e-0
Esempio n. 11
0
    def test_smoothed_l1tv(self):

        import numpy as np

        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.penalties as penalties
        import parsimony.functions.nesterov.tv as tv
        import parsimony.functions.nesterov.l1tv as l1tv
        import parsimony.datasets.simulate.l1_l2_tvmu as l1_l2_tvmu
        import parsimony.utils.start_vectors as start_vectors
        import parsimony.datasets.simulate as simulate

        np.random.seed(42)

        px = 10
        py = 1
        pz = 1
        shape = (pz, py, px)
        n, p = 5, np.prod(shape)

        l = 0.618
        k = 0.01
        g = 1.1

        start_vector = start_vectors.RandomStartVector(normalise=True)
        beta = start_vector.get_vector(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        mu = 5e-3

        A = tv.linear_operator_from_shape(shape)
        #        X, y, beta_star = l1_l2_tvmu.load(l=l, k=k, g=g, beta=beta, M=M, e=e,
        #                                        A=A, mu=mu, snr=snr)

        funs = [
            simulate.grad.L1(l),
            simulate.grad.L2Squared(k),
            simulate.grad.TotalVariation(g, A)
        ]
        lr = simulate.LinearRegressionData(funs,
                                           M,
                                           e,
                                           snr=snr,
                                           intercept=False)

        X, y, beta_star = lr.load(beta)

        eps = 1e-8
        max_iter = 810

        alg = proximal.FISTA(eps=eps, max_iter=max_iter)

        function = CombinedFunction()
        function.add_function(
            functions.losses.LinearRegression(X, y, mean=False))
        function.add_penalty(penalties.L2Squared(l=k))
        A = l1tv.linear_operator_from_shape(shape, p)
        function.add_prox(l1tv.L1TV(l, g, A=A, mu=mu, penalty_start=0))
        #        A = tv.linear_operator_from_shape(shape)
        #        function.add_penalty(tv.TotalVariation(l=g, A=A, mu=mu,
        #                                               penalty_start=0))
        #        function.add_prox(penalties.L1(l=l))

        beta_start = start_vector.get_vector(p)
        beta = alg.run(function, beta_start)

        berr = np.linalg.norm(beta - beta_star)
        #        print "berr:", berr
        assert berr < 5e-1

        f_parsimony = function.f(beta)
        f_star = function.f(beta_star)
        ferr = abs(f_parsimony - f_star)
        #        print "ferr:", ferr
        assert ferr < 5e-3
    def test_smoothed(self):

        import numpy as np
        import scipy.sparse

        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions.losses as losses
        import parsimony.functions.nesterov as nesterov
        import parsimony.utils.weights as weights
        import parsimony.datasets.simulate.l1_l2_tv as l1_l2_tv

        start_vector = weights.RandomUniformWeights(normalise=True)

        np.random.seed(42)

        n, p = 75, 100

        penalty_start = 0

        alpha = 0.9
        V = np.random.randn(p, p)
        Sigma = alpha * np.eye(p, p) \
            + (1.0 - alpha) * np.dot(V.T, V)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        beta = start_vector.get_weights(p)
        beta[np.abs(beta) < 0.1] = 0.0

        l = 0.618
        k = 0.0
        g = 0.0

        mu_min = 0.001  # consts.TOLERANCE

        A = scipy.sparse.eye(p)
        # A = np.eye(p)
        A = [A, A, A]
        snr = 100.0
        X, y, beta_star = l1_l2_tv.load(l, k, g, beta, M, e, A, snr=snr)

        function = CombinedFunction()
        function.add_loss(losses.LinearRegression(X, y, mean=False))
        A = nesterov.l1.linear_operator_from_variables(p,
                                                       penalty_start=penalty_start)
        function.add_penalty(nesterov.l1.L1(l, A=A, mu=mu_min,
                                            penalty_start=penalty_start))
#        function.add_prox(penalties.L1(l, penalty_start=penalty_start))

        fista = proximal.FISTA(eps=mu_min, max_iter=23500)
        beta = fista.run(function, beta)

        berr = np.linalg.norm(beta - beta_star)
#        print "berr:", berr
#        assert berr < 5
        assert_less(berr, 5.0, "The found regression vector is not correct.")

        # Test proximal operator
        function = CombinedFunction()
        function.add_loss(losses.LinearRegression(X, y, mean=False))
        A = nesterov.l1.linear_operator_from_variables(p,
                                                       penalty_start=penalty_start)
        function.add_prox(nesterov.l1.L1(l, A=A, mu=mu_min,
                                         penalty_start=penalty_start))

        fista = proximal.FISTA(eps=mu_min, max_iter=20000)
        beta = fista.run(function, beta)

        berr = np.linalg.norm(beta - beta_star)
#        print "berr:", berr
#        assert berr < 0.1
        assert_less(berr, 0.1, "The found regression vector is not correct.")
    def test_overlapping_smooth(self):

        import numpy as np
        from parsimony.functions import CombinedFunction
        import parsimony.functions as functions
        import parsimony.functions.nesterov.gl as gl
        import parsimony.datasets.simulate.l1_l2_glmu as l1_l2_glmu
        import parsimony.utils.start_vectors as start_vectors

        np.random.seed(314)

        # Note that p must be even!
        n, p = 25, 30
        groups = [list(range(0, 2 * int(p / 3))), list(range(int(p / 3), p))]
        weights = [1.5, 0.5]

        A = gl.linear_operator_from_groups(p, groups=groups, weights=weights)

        l = 0.0
        k = 0.0
        g = 0.9

        start_vector = start_vectors.RandomStartVector(normalise=True)
        beta = start_vector.get_vector(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        mu_min = 5e-8
        X, y, beta_star = l1_l2_glmu.load(l,
                                          k,
                                          g,
                                          beta,
                                          M,
                                          e,
                                          A,
                                          mu=mu_min,
                                          snr=snr)

        eps = 1e-8
        max_iter = 15000

        beta_start = start_vector.get_vector(p)

        mus = [5e-0, 5e-2, 5e-4, 5e-6, 5e-8]
        fista = FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
            #            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
            #                                                        A=A, mu=mu,
            #                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_function(
                functions.losses.LinearRegression(X, y, mean=False))
            function.add_penalty(
                gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0))

            beta_parsimony = fista.run(function, beta_parsimony)

        berr = np.linalg.norm(beta_parsimony - beta_star)
        #        print berr
        assert berr < 5e-2

        f_parsimony = function.f(beta_parsimony)
        f_star = function.f(beta_star)
        #        print abs(f_parsimony - f_star)
        assert abs(f_parsimony - f_star) < 5e-7
    def test_nonoverlapping_nonsmooth(self):
        # Spams: http://spams-devel.gforge.inria.fr/doc-python/doc_spams.pdf

        import numpy as np
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions as functions
        import parsimony.functions.nesterov.gl as gl
        import parsimony.datasets.simulate.l1_l2_gl as l1_l2_gl
        import parsimony.utils.start_vectors as start_vectors

        np.random.seed(42)

        # Note that p must be even!
        n, p = 25, 20
        groups = [list(range(0, int(p / 2))), list(range(int(p / 2), p))]
        #        weights = [1.5, 0.5]

        A = gl.linear_operator_from_groups(p,
                                           groups=groups)  # , weights=weights)

        l = 0.0
        k = 0.0
        g = 1.0

        start_vector = start_vectors.RandomStartVector(normalise=True)
        beta = start_vector.get_vector(p)

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        X, y, beta_star = l1_l2_gl.load(l, k, g, beta, M, e, A, snr=snr)

        eps = 1e-8
        max_iter = 8500

        beta_start = start_vector.get_vector(p)

        mus = [5e-2, 5e-4, 5e-6, 5e-8]
        fista = proximal.FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
            #            function = functions.LinearRegressionL1L2GL(X, y, l, k, g,
            #                                                        A=A, mu=mu,
            #                                                        penalty_start=0)

            function = CombinedFunction()
            function.add_function(
                functions.losses.LinearRegression(X, y, mean=False))
            function.add_penalty(
                gl.GroupLassoOverlap(l=g, A=A, mu=mu, penalty_start=0))

            beta_parsimony = fista.run(function, beta_parsimony)

        try:
            import spams

            params = {
                "loss":
                "square",
                "regul":
                "group-lasso-l2",
                "groups":
                np.array([1] * (int(p / 2)) + [2] * (int(p / 2)),
                         dtype=np.int32),
                "lambda1":
                g,
                "max_it":
                max_iter,
                "tol":
                eps,
                "ista":
                False,
                "numThreads":
                -1,
            }
            beta_spams, optim_info = \
                    spams.fistaFlat(Y=np.asfortranarray(y),
                                    X=np.asfortranarray(X),
                                    W0=np.asfortranarray(beta_start),
                                    return_optim_info=True,
                                    **params)

        except ImportError:
            beta_spams = np.asarray(
                [[14.01111427], [35.56508563], [27.38245962], [22.39716553],
                 [5.835744940], [5.841502910], [2.172209350], [32.40227785],
                 [22.48364756], [26.48822401], [0.770391500], [36.28288883],
                 [31.14118214], [7.938279340], [6.800713150], [6.862914540],
                 [11.38161678], [19.63087584], [16.15855845], [10.89356615]])

        berr = np.linalg.norm(beta_parsimony - beta_spams)
        #        print berr
        assert berr < 5e-2

        f_parsimony = function.f(beta_parsimony)
        f_spams = function.f(beta_spams)
        ferr = abs(f_parsimony - f_spams)
        #        print ferr
        assert ferr < 5e-6
Esempio n. 15
0
    def test_smoothed(self):

        import numpy as np

        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions.losses as losses
        import parsimony.functions.nesterov as nesterov
        import parsimony.utils.start_vectors as start_vectors
        import parsimony.datasets.simulate.l1_l2_tv as l1_l2_tv

        start_vector = start_vectors.RandomStartVector(normalise=True)

        np.random.seed(42)

        n, p = 75, 100

        penalty_start = 0

        alpha = 0.9
        V = np.random.randn(p, p)
        Sigma = alpha * np.eye(p, p) \
            + (1.0 - alpha) * np.dot(V.T, V)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        beta = start_vector.get_vector(p)
        beta[np.abs(beta) < 0.1] = 0.0

        l = 0.618
        k = 0.0
        g = 0.0

        mu_min = 0.001  # consts.TOLERANCE

        A = np.eye(p)
        A = [A, A, A]
        snr = 100.0
        X, y, beta_star = l1_l2_tv.load(l, k, g, beta, M, e, A, snr=snr)

        function = CombinedFunction()
        function.add_loss(losses.LinearRegression(X, y, mean=False))
        A = nesterov.l1.linear_operator_from_variables(
            p, penalty_start=penalty_start)
        function.add_penalty(
            nesterov.l1.L1(l, A=A, mu=mu_min, penalty_start=penalty_start))
        #        function.add_prox(penalties.L1(l, penalty_start=penalty_start))

        fista = proximal.FISTA(eps=mu_min, max_iter=23500)
        beta = fista.run(function, beta)

        berr = np.linalg.norm(beta - beta_star)
        #        print "berr:", berr
        #        assert berr < 5
        assert_less(berr, 5.0, "The found regression vector is not correct.")

        # Test proximal operator
        function = CombinedFunction()
        function.add_loss(losses.LinearRegression(X, y, mean=False))
        A = nesterov.l1.linear_operator_from_variables(
            p, penalty_start=penalty_start)
        function.add_prox(
            nesterov.l1.L1(l, A=A, mu=mu_min, penalty_start=penalty_start))

        fista = proximal.FISTA(eps=mu_min, max_iter=20000)
        beta = fista.run(function, beta)

        berr = np.linalg.norm(beta - beta_star)
        #        print "berr:", berr
        #        assert berr < 0.1
        assert_less(berr, 0.1, "The found regression vector is not correct.")
    def test_smooth_2D_l1(self):

        from parsimony.functions import CombinedFunction
        import parsimony.functions as functions
        import parsimony.functions.nesterov.grouptv as grouptv
        import parsimony.datasets.simulate.l1_l2_grouptvmu as l1_l2_grouptvmu
        import parsimony.utils.weights as weights

        np.random.seed(1337)

        n, p = 10, 18
        shape = (1, 3, 6)

        l = 0.618
        k = 0.0
        g = 1.618

        start_vector = weights.ZerosWeights()
        beta = start_vector.get_weights(p)

        rects = [[(0, 1), (0, 3)], [(1, 2), (3, 6)]]

        beta = np.reshape(beta, shape[1:])
        beta[0:2, 0:4] = 1.0
        beta[1:3, 3:6] = 2.0
        beta[1, 3] = 1.5
        beta = np.reshape(beta, (p, 1))

        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        A = grouptv.linear_operator_from_rects(rects, shape)
        mu_min = 5e-8
        X, y, beta_star = l1_l2_grouptvmu.load(l=l, k=k, g=g, beta=beta,
                                               M=M, e=e, A=A, mu=mu_min,
                                               snr=snr)

        eps = 1e-5
        max_iter = 10000

        beta_start = start_vector.get_weights(p)

        mus = [5e-2, 5e-4, 5e-6, 5e-8]
        fista = FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
            function = CombinedFunction()
            function.add_loss(functions.losses.LinearRegression(X, y,
                                                                mean=False))
            function.add_penalty(grouptv.GroupTotalVariation(l=g,
                                                             A=A, mu=mu,
                                                             penalty_start=0))

            function.add_prox(functions.penalties.L1(l=l, penalty_start=0))

            beta_parsimony = fista.run(function, beta_parsimony)

        berr = np.linalg.norm(beta_parsimony - beta_star)
#        print "berr:", berr
        assert berr < 5e-2

        f_parsimony = function.f(beta_parsimony)
        f_star = function.f(beta_star)
        ferr = abs(f_parsimony - f_star)
#        print "ferr:", ferr
        assert ferr < 5e-5
    def test_smooth_1D_l2(self):

        from parsimony.functions import CombinedFunction
        import parsimony.functions as functions
        import parsimony.functions.nesterov.grouptv as grouptv
        import parsimony.datasets.simulate.l1_l2_grouptvmu as l1_l2_grouptvmu
        import parsimony.utils.weights as weights

        np.random.seed(1337)

        n, p = 10, 15
        shape = (1, 1, p)

        l = 0.0
        k = 0.1  # Must have some regularisation for all variables.
        g = 0.9

        start_vector = weights.RandomUniformWeights(normalise=True)
        beta = start_vector.get_weights(p)

        rects = [[(0, 5)], [(4, 10)], [(13, 15)]]
                              # 0 [ 5 ] 0
                              # 1 [ 5 ] 0
                              # 2 [ 5 ] 0
                              # 3 [ 5 ] 0
                              # 4 [ 4 ] 0 / 1
        beta[:5, :] = 5.0     # 5 [ 3 ] 1
        beta[4, :] = 4.0      # 6 [ 3 ] 1
        beta[5:10, :] = 3.0   # 7 [ 3 ] 1
        beta[13:15, :] = 7.0  # 8 [ 3 ] 1
                              # 9 [ 3 ] 1
                              # 0 [ x ] -
                              # 1 [ x ] -
                              # 2 [ x ] -
                              # 3 [ 7 ] 2
                              # 4 [ 7 ] 2
        alpha = 1.0
        Sigma = alpha * np.eye(p, p) \
              + (1.0 - alpha) * np.random.randn(p, p)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        snr = 100.0

        A = grouptv.linear_operator_from_rects(rects, shape)
        mu_min = 5e-8
        X, y, beta_star = l1_l2_grouptvmu.load(l=l, k=k, g=g, beta=beta,
                                               M=M, e=e, A=A, mu=mu_min,
                                               snr=snr)

        eps = 1e-5
        max_iter = 12000

        beta_start = start_vector.get_weights(p)

        mus = [5e-2, 5e-4, 5e-6, 5e-8]
        fista = FISTA(eps=eps, max_iter=max_iter / len(mus))

        beta_parsimony = beta_start
        for mu in mus:
            function = CombinedFunction()
            function.add_loss(functions.losses.LinearRegression(X, y,
                                                                mean=False))
            function.add_penalty(grouptv.GroupTotalVariation(l=g,
                                                             A=A, mu=mu,
                                                             penalty_start=0))

            function.add_penalty(functions.penalties.L2Squared(l=k,
                                                             penalty_start=0))

            beta_parsimony = fista.run(function, beta_parsimony)

        berr = np.linalg.norm(beta_parsimony - beta_star)
#        print "berr:", berr
        assert berr < 5e-2

        f_parsimony = function.f(beta_parsimony)
        f_star = function.f(beta_star)
        ferr = abs(f_parsimony - f_star)
#        print "ferr:", ferr
        assert ferr < 5e-5
Esempio n. 18
0
    def test_nonsmooth(self):

        import numpy as np

        import parsimony.utils.consts as consts
        from parsimony.functions import CombinedFunction
        import parsimony.algorithms.proximal as proximal
        import parsimony.functions.losses as losses
        import parsimony.functions.nesterov as nesterov
        import parsimony.utils.start_vectors as start_vectors
        import parsimony.datasets.simulate.l1_l2_tv as l1_l2_tv

        start_vector = start_vectors.RandomStartVector(normalise=True)

        np.random.seed(42)

        n, p = 75, 100

        alpha = 0.9
        V = np.random.randn(p, p)
        Sigma = alpha * np.eye(p, p) \
            + (1.0 - alpha) * np.dot(V.T, V)
        mean = np.zeros(p)
        M = np.random.multivariate_normal(mean, Sigma, n)
        e = np.random.randn(n, 1)

        beta_start = start_vector.get_vector(p)
        beta_start[np.abs(beta_start) < 0.1] = 0.0

        l = 0.618
        k = 0.0
        g = 0.0

        A = np.eye(p)
        A = [A, A, A]
        snr = 100.0
        X, y, beta_star = l1_l2_tv.load(l, k, g, beta_start, M, e, A, snr=snr)

        beta = beta_start

        for mu in [5e-2, 5e-3, 5e-4, 5e-5]:
            function = CombinedFunction()
            function.add_loss(losses.LinearRegression(X, y, mean=False))

            A = nesterov.l1.linear_operator_from_variables(p, penalty_start=0)
            function.add_penalty(nesterov.l1.L1(l, A=A, mu=mu,
                                                penalty_start=0))

            fista = proximal.FISTA(eps=consts.TOLERANCE, max_iter=2300)
            beta = fista.run(function, beta)

        berr = np.linalg.norm(beta - beta_star)
#        print "berr:", berr
#        assert berr < 5e-2
        assert_less(berr, 5e-2, "The found regression vector is not correct.")

        # Test proximal operator
        beta = beta_start
        function = CombinedFunction()
        function.add_loss(losses.LinearRegression(X, y, mean=False))
        A = nesterov.l1.linear_operator_from_variables(p, penalty_start=0)
#        function.add_penalty(nesterov.l1.L1(l, A=A, mu=mu_min,
#                                            penalty_start=penalty_start))
        function.add_prox(nesterov.l1.L1(l, A=A, mu=5e-5, penalty_start=0))

        fista = proximal.FISTA(eps=consts.TOLERANCE, max_iter=2000)
        beta = fista.run(function, beta)

        berr = np.linalg.norm(beta - beta_star)
#        print "berr:", berr
#        assert berr < 5e-0
        assert_less(berr, 5e-0, "The found regression vector is not correct.")