def test_ModelLogReg(self): """...Numerical consistency check of loss and gradient for Logistic Regression """ np.random.seed(12) n_samples, n_features = 5000, 10 w0 = np.random.randn(n_features) c0 = np.random.randn() # First check with intercept X, y = SimuLogReg(w0, c0, n_samples=n_samples, verbose=False).simulate() X_spars = csr_matrix(X) model = ModelLogReg(fit_intercept=True).fit(X, y) model_spars = ModelLogReg(fit_intercept=True).fit(X_spars, y) self.run_test_for_glm(model, model_spars, 1e-5, 1e-4) self._test_glm_intercept_vs_hardcoded_intercept(model) # Then check without intercept X, y = SimuLogReg(w0, None, n_samples=n_samples, verbose=False, seed=2038).simulate() X_spars = csr_matrix(X) model = ModelLogReg(fit_intercept=False).fit(X, y) model_spars = ModelLogReg(fit_intercept=False).fit(X_spars, y) self.run_test_for_glm(model, model_spars, 1e-5, 1e-4) self._test_glm_intercept_vs_hardcoded_intercept(model) # Test for the Lipschitz constants without intercept self.assertAlmostEqual(model.get_lip_best(), 0.67184209642814952) self.assertAlmostEqual(model.get_lip_mean(), 2.48961431697108) self.assertAlmostEqual(model.get_lip_max(), 13.706542412138093) self.assertAlmostEqual(model_spars.get_lip_mean(), model.get_lip_mean()) self.assertAlmostEqual(model_spars.get_lip_max(), model.get_lip_max()) # Test for the Lipschitz constants with intercept model = ModelLogReg(fit_intercept=True).fit(X, y) model_spars = ModelLogReg(fit_intercept=True).fit(X_spars, y) self.assertAlmostEqual(model.get_lip_best(), 0.671892096428) self.assertAlmostEqual(model.get_lip_mean(), 2.739614316971082) self.assertAlmostEqual(model.get_lip_max(), 13.956542412138093) self.assertAlmostEqual(model_spars.get_lip_mean(), model.get_lip_mean()) self.assertAlmostEqual(model_spars.get_lip_max(), model.get_lip_max())
from tick.optim.solver import GD, AGD, SGD, SVRG, SDCA from tick.optim.model import ModelLogReg from tick.optim.prox import ProxElasticNet, ProxL1 from tick.plot import plot_history n_samples, n_features, = 5000, 50 weights0 = weights_sparse_gauss(n_features, nnz=10) intercept0 = 0.2 X, y = SimuLogReg(weights=weights0, intercept=intercept0, n_samples=n_samples, seed=123, verbose=False).simulate() model = ModelLogReg(fit_intercept=True).fit(X, y) prox = ProxElasticNet(strength=1e-3, ratio=0.5, range=(0, n_features)) solver_params = {'max_iter': 100, 'tol': 0., 'verbose': False} x0 = np.zeros(model.n_coeffs) gd = GD(linesearch=False, **solver_params).set_model(model).set_prox(prox) gd.solve(x0, step=1 / model.get_lip_best()) agd = AGD(linesearch=False, **solver_params).set_model(model).set_prox(prox) agd.solve(x0, step=1 / model.get_lip_best()) sgd = SGD(**solver_params).set_model(model).set_prox(prox) sgd.solve(x0, step=500.) svrg = SVRG(**solver_params).set_model(model).set_prox(prox) svrg.solve(x0, step=1 / model.get_lip_max()) plot_history([gd, agd, sgd, svrg], log_scale=True, dist_min=True)