def test_solver_gd(self): """...Check GD solver for Logistic Regression with Ridge penalization """ solver = GD(max_iter=100, verbose=False, linesearch=True) self.check_solver(solver, fit_intercept=True, model="logreg", decimal=1)
def setUp(self): np.random.seed(238924) self.n_iter1 = list(range(0, 30, 3)) self.obj1 = [np.random.normal() for _ in range(len(self.n_iter1))] self.n_iter2 = list(range(2, 40, 2)) self.obj2 = [np.random.normal() for _ in range(len(self.n_iter2))] self.solver1 = GD() history1 = History() history1._set("values", {'n_iter': self.n_iter1, 'obj': self.obj1}) self.solver1._set("history", history1) self.solver2 = AGD() history2 = History() history2._set("values", {'n_iter': self.n_iter2, 'obj': self.obj2}) self.solver2._set("history", history2)
def run_solvers(model, l_l2sq): try: svrg_step = 1. / model.get_lip_max() except AttributeError: svrg_step = 1e-3 try: gd_step = 1. / model.get_lip_best() except AttributeError: gd_step = 1e-1 bfgs = BFGS(verbose=False, tol=1e-13) bfgs.set_model(model).set_prox(ProxL2Sq(l_l2sq)) bfgs.solve() bfgs.history.set_minimizer(bfgs.solution) bfgs.history.set_minimum(bfgs.objective(bfgs.solution)) bfgs.solve() svrg = SVRG(step=svrg_step, verbose=False, tol=1e-10, seed=seed) svrg.set_model(model).set_prox(ProxL2Sq(l_l2sq)) svrg.history.set_minimizer(bfgs.solution) svrg.history.set_minimum(bfgs.objective(bfgs.solution)) svrg.solve() sdca = SDCA(l_l2sq, verbose=False, seed=seed, tol=1e-10) sdca.set_model(model).set_prox(ProxZero()) sdca.history.set_minimizer(bfgs.solution) sdca.history.set_minimum(bfgs.objective(bfgs.solution)) sdca.solve() gd = GD(verbose=False, tol=1e-10, step=gd_step, linesearch=False) gd.set_model(model).set_prox(ProxL2Sq(l_l2sq)) gd.history.set_minimizer(bfgs.solution) gd.history.set_minimum(bfgs.objective(bfgs.solution)) gd.solve() agd = AGD(verbose=False, tol=1e-10, step=gd_step, linesearch=False) agd.set_model(model).set_prox(ProxL2Sq(l_l2sq)) agd.history.set_minimizer(bfgs.solution) agd.history.set_minimum(bfgs.objective(bfgs.solution)) agd.solve() return bfgs, svrg, sdca, gd, agd
def create_solver(): return GD(max_iter=1, verbose=False)
def create_solver(): return GD(max_iter=100, verbose=False, step=0.1)
class Test(unittest.TestCase): def setUp(self): np.random.seed(238924) self.n_iter1 = list(range(0, 30, 3)) self.obj1 = [np.random.normal() for _ in range(len(self.n_iter1))] self.n_iter2 = list(range(2, 40, 2)) self.obj2 = [np.random.normal() for _ in range(len(self.n_iter2))] self.solver1 = GD() history1 = History() history1._set("values", {'n_iter': self.n_iter1, 'obj': self.obj1}) self.solver1._set("history", history1) self.solver2 = AGD() history2 = History() history2._set("values", {'n_iter': self.n_iter2, 'obj': self.obj2}) self.solver2._set("history", history2) def test_plot_history_solver(self): """...Test plot_history rendering given a list of solvers """ labels = ['solver 1', 'solver 2'] fig = plot_history([self.solver1, self.solver2], show=False, labels=labels) ax = fig.axes[0] ax_n_iter1, ax_obj1 = ax.lines[0].get_xydata().T np.testing.assert_array_equal(ax_n_iter1, self.n_iter1) np.testing.assert_array_equal(ax_obj1, self.obj1) self.assertEqual(ax.lines[0].get_label(), labels[0]) ax_n_iter2, ax_obj2 = ax.lines[1].get_xydata().T np.testing.assert_array_equal(ax_n_iter2, self.n_iter2) np.testing.assert_array_equal(ax_obj2, self.obj2) self.assertEqual(ax.lines[1].get_label(), labels[1]) def test_plot_history_solver_dist_min(self): """...Test plot_history rendering with dist_min argument """ fig = plot_history([self.solver1, self.solver2], show=False, dist_min=True) ax = fig.axes[0] min_obj = min(min(self.obj1), min(self.obj2)) ax_n_iter1, ax_obj1 = ax.lines[0].get_xydata().T np.testing.assert_array_equal(ax_n_iter1, self.n_iter1) np.testing.assert_array_equal(ax_obj1, np.array(self.obj1) - min_obj) self.assertEqual(ax.lines[0].get_label(), 'GD') ax_n_iter2, ax_obj2 = ax.lines[1].get_xydata().T np.testing.assert_array_equal(ax_n_iter2, self.n_iter2) np.testing.assert_array_equal(ax_obj2, np.array(self.obj2) - min_obj) self.assertEqual(ax.lines[1].get_label(), 'AGD') def test_plot_history_solver_log_scale(self): """...Test plot_history rendering on a log scale """ fig = plot_history([self.solver1, self.solver2], show=False, dist_min=True, log_scale=True) ax = fig.axes[0] self.assertEqual(ax.yaxis.get_scale(), 'log') def test_plot_history_learner(self): """...Test plot_history rendering given a list of learners """ learner1 = LogisticRegression(solver='svrg') learner1._solver_obj._set('history', self.solver1.history) learner2 = LogisticRegression(solver='agd') learner2._solver_obj._set('history', self.solver2.history) fig = plot_history([learner1, learner2], show=False) ax = fig.axes[0] ax_n_iter1, ax_obj1 = ax.lines[0].get_xydata().T np.testing.assert_array_equal(ax_n_iter1, self.n_iter1) np.testing.assert_array_equal(ax_obj1, self.obj1) self.assertEqual(ax.lines[0].get_label(), 'SVRG') ax_n_iter2, ax_obj2 = ax.lines[1].get_xydata().T np.testing.assert_array_equal(ax_n_iter2, self.n_iter2) np.testing.assert_array_equal(ax_obj2, self.obj2) self.assertEqual(ax.lines[1].get_label(), 'AGD')
from tick.prox import ProxElasticNet, ProxL1 from tick.plot import plot_history n_samples, n_features, = 5000, 50 weights0 = weights_sparse_gauss(n_features, nnz=10) intercept0 = 0.2 X, y = SimuLogReg(weights=weights0, intercept=intercept0, n_samples=n_samples, seed=123, verbose=False).simulate() model = ModelLogReg(fit_intercept=True).fit(X, y) prox = ProxElasticNet(strength=1e-3, ratio=0.5, range=(0, n_features)) solver_params = {'max_iter': 100, 'tol': 0., 'verbose': False} x0 = np.zeros(model.n_coeffs) gd = GD(linesearch=False, **solver_params).set_model(model).set_prox(prox) gd.solve(x0, step=1 / model.get_lip_best()) agd = AGD(linesearch=False, **solver_params).set_model(model).set_prox(prox) agd.solve(x0, step=1 / model.get_lip_best()) sgd = SGD(**solver_params).set_model(model).set_prox(prox) sgd.solve(x0, step=500.) svrg = SVRG(**solver_params).set_model(model).set_prox(prox) svrg.solve(x0, step=1 / model.get_lip_max()) plot_history([gd, agd, sgd, svrg], log_scale=True, dist_min=True)