Exemplo n.º 1
0
 def test_solver_agd(self):
     """...Check AGD solver for Logistic Regression with Ridge
     penalization
     """
     solver = AGD(max_iter=100, verbose=False, linesearch=True)
     self.check_solver(solver,
                       fit_intercept=True,
                       model="logreg",
                       decimal=1)
Exemplo n.º 2
0
    def setUp(self):
        np.random.seed(238924)

        self.n_iter1 = list(range(0, 30, 3))
        self.obj1 = [np.random.normal() for _ in range(len(self.n_iter1))]

        self.n_iter2 = list(range(2, 40, 2))
        self.obj2 = [np.random.normal() for _ in range(len(self.n_iter2))]

        self.solver1 = GD()
        history1 = History()
        history1._set("values", {'n_iter': self.n_iter1, 'obj': self.obj1})
        self.solver1._set("history", history1)

        self.solver2 = AGD()
        history2 = History()
        history2._set("values", {'n_iter': self.n_iter2, 'obj': self.obj2})
        self.solver2._set("history", history2)
Exemplo n.º 3
0
    def test_solver_gfb(self):
        """...Check GFB's solver for a Logistic Regression with ElasticNet
        penalization

        Notes
        -----
        Using GFB solver with l1 and l2 penalizations is obviously a bad
        idea as ElasticNet prox is meant to do this, but it allows us to
        compare with another algorithm.
        """
        n_samples = 200
        n_features = 10
        y, X, w, c = Test.generate_logistic_data(n_features=n_features,
                                                 n_samples=n_samples)
        strength = 1e-3
        ratio = 0.3
        prox_elasticnet = ProxElasticNet(strength, ratio)
        prox_l1 = ProxL1(strength * ratio)
        prox_l2 = ProxL2Sq(strength * (1 - ratio))

        # First we get GFB solution with prox l1 and prox l2
        gfb = GFB(tol=1e-13, max_iter=1000, verbose=False, step=1)
        Test.prepare_solver(gfb, X, y, prox=None)
        gfb.set_prox([prox_l1, prox_l2])
        gfb_solution = gfb.solve()

        # Then we get AGD solution with prox ElasticNet
        agd = AGD(tol=1e-13,
                  max_iter=1000,
                  verbose=False,
                  step=0.5,
                  linesearch=False)
        Test.prepare_solver(agd, X, y, prox=prox_elasticnet)
        agd_solution = agd.solve()

        # Finally we assert that both algorithms lead to the same solution
        np.testing.assert_almost_equal(gfb_solution, agd_solution, decimal=1)
Exemplo n.º 4
0
def run_solvers(model, l_l2sq):
    try:
        svrg_step = 1. / model.get_lip_max()
    except AttributeError:
        svrg_step = 1e-3
    try:
        gd_step = 1. / model.get_lip_best()
    except AttributeError:
        gd_step = 1e-1

    bfgs = BFGS(verbose=False, tol=1e-13)
    bfgs.set_model(model).set_prox(ProxL2Sq(l_l2sq))
    bfgs.solve()
    bfgs.history.set_minimizer(bfgs.solution)
    bfgs.history.set_minimum(bfgs.objective(bfgs.solution))
    bfgs.solve()

    svrg = SVRG(step=svrg_step, verbose=False, tol=1e-10, seed=seed)
    svrg.set_model(model).set_prox(ProxL2Sq(l_l2sq))
    svrg.history.set_minimizer(bfgs.solution)
    svrg.history.set_minimum(bfgs.objective(bfgs.solution))
    svrg.solve()

    sdca = SDCA(l_l2sq, verbose=False, seed=seed, tol=1e-10)
    sdca.set_model(model).set_prox(ProxZero())
    sdca.history.set_minimizer(bfgs.solution)
    sdca.history.set_minimum(bfgs.objective(bfgs.solution))
    sdca.solve()

    gd = GD(verbose=False, tol=1e-10, step=gd_step, linesearch=False)
    gd.set_model(model).set_prox(ProxL2Sq(l_l2sq))
    gd.history.set_minimizer(bfgs.solution)
    gd.history.set_minimum(bfgs.objective(bfgs.solution))
    gd.solve()

    agd = AGD(verbose=False, tol=1e-10, step=gd_step, linesearch=False)
    agd.set_model(model).set_prox(ProxL2Sq(l_l2sq))
    agd.history.set_minimizer(bfgs.solution)
    agd.history.set_minimum(bfgs.objective(bfgs.solution))
    agd.solve()

    return bfgs, svrg, sdca, gd, agd
Exemplo n.º 5
0
from tick.optim.solver import GD, AGD, SGD, SVRG, SDCA
from tick.optim.model import ModelLogReg
from tick.optim.prox import ProxElasticNet, ProxL1
from tick.plot import plot_history

n_samples, n_features, = 5000, 50
weights0 = weights_sparse_gauss(n_features, nnz=10)
intercept0 = 0.2
X, y = SimuLogReg(weights=weights0, intercept=intercept0,
                  n_samples=n_samples, seed=123, verbose=False).simulate()

model = ModelLogReg(fit_intercept=True).fit(X, y)
prox = ProxElasticNet(strength=1e-3, ratio=0.5, range=(0, n_features))

solver_params = {'max_iter': 100, 'tol': 0., 'verbose': False}
x0 = np.zeros(model.n_coeffs)

gd = GD(linesearch=False, **solver_params).set_model(model).set_prox(prox)
gd.solve(x0, step=1 / model.get_lip_best())

agd = AGD(linesearch=False, **solver_params).set_model(model).set_prox(prox)
agd.solve(x0, step=1 / model.get_lip_best())

sgd = SGD(**solver_params).set_model(model).set_prox(prox)
sgd.solve(x0, step=500.)

svrg = SVRG(**solver_params).set_model(model).set_prox(prox)
svrg.solve(x0, step=1 / model.get_lip_max())

plot_history([gd, agd, sgd, svrg], log_scale=True, dist_min=True)
Exemplo n.º 6
0
 def create_solver():
     return AGD(max_iter=1, verbose=False)
Exemplo n.º 7
0
class Test(unittest.TestCase):
    def setUp(self):
        np.random.seed(238924)

        self.n_iter1 = list(range(0, 30, 3))
        self.obj1 = [np.random.normal() for _ in range(len(self.n_iter1))]

        self.n_iter2 = list(range(2, 40, 2))
        self.obj2 = [np.random.normal() for _ in range(len(self.n_iter2))]

        self.solver1 = GD()
        history1 = History()
        history1._set("values", {'n_iter': self.n_iter1, 'obj': self.obj1})
        self.solver1._set("history", history1)

        self.solver2 = AGD()
        history2 = History()
        history2._set("values", {'n_iter': self.n_iter2, 'obj': self.obj2})
        self.solver2._set("history", history2)

    def test_plot_history_solver(self):
        """...Test plot_history rendering given a list of solvers
        """
        labels = ['solver 1', 'solver 2']
        fig = plot_history([self.solver1, self.solver2],
                           show=False,
                           labels=labels)
        ax = fig.axes[0]

        ax_n_iter1, ax_obj1 = ax.lines[0].get_xydata().T
        np.testing.assert_array_equal(ax_n_iter1, self.n_iter1)
        np.testing.assert_array_equal(ax_obj1, self.obj1)
        self.assertEqual(ax.lines[0].get_label(), labels[0])

        ax_n_iter2, ax_obj2 = ax.lines[1].get_xydata().T
        np.testing.assert_array_equal(ax_n_iter2, self.n_iter2)
        np.testing.assert_array_equal(ax_obj2, self.obj2)
        self.assertEqual(ax.lines[1].get_label(), labels[1])

    def test_plot_history_solver_dist_min(self):
        """...Test plot_history rendering with dist_min argument
        """

        fig = plot_history([self.solver1, self.solver2],
                           show=False,
                           dist_min=True)
        ax = fig.axes[0]

        min_obj = min(min(self.obj1), min(self.obj2))

        ax_n_iter1, ax_obj1 = ax.lines[0].get_xydata().T
        np.testing.assert_array_equal(ax_n_iter1, self.n_iter1)
        np.testing.assert_array_equal(ax_obj1, np.array(self.obj1) - min_obj)
        self.assertEqual(ax.lines[0].get_label(), 'GD')

        ax_n_iter2, ax_obj2 = ax.lines[1].get_xydata().T
        np.testing.assert_array_equal(ax_n_iter2, self.n_iter2)
        np.testing.assert_array_equal(ax_obj2, np.array(self.obj2) - min_obj)
        self.assertEqual(ax.lines[1].get_label(), 'AGD')

    def test_plot_history_solver_log_scale(self):
        """...Test plot_history rendering on a log scale
        """

        fig = plot_history([self.solver1, self.solver2],
                           show=False,
                           dist_min=True,
                           log_scale=True)
        ax = fig.axes[0]
        self.assertEqual(ax.yaxis.get_scale(), 'log')

    def test_plot_history_learner(self):
        """...Test plot_history rendering given a list of learners
        """
        learner1 = LogisticRegression(solver='svrg')
        learner1._solver_obj._set('history', self.solver1.history)
        learner2 = LogisticRegression(solver='agd')
        learner2._solver_obj._set('history', self.solver2.history)

        fig = plot_history([learner1, learner2], show=False)
        ax = fig.axes[0]

        ax_n_iter1, ax_obj1 = ax.lines[0].get_xydata().T
        np.testing.assert_array_equal(ax_n_iter1, self.n_iter1)
        np.testing.assert_array_equal(ax_obj1, self.obj1)
        self.assertEqual(ax.lines[0].get_label(), 'SVRG')

        ax_n_iter2, ax_obj2 = ax.lines[1].get_xydata().T
        np.testing.assert_array_equal(ax_n_iter2, self.n_iter2)
        np.testing.assert_array_equal(ax_obj2, self.obj2)
        self.assertEqual(ax.lines[1].get_label(), 'AGD')