def test_variance_reduction_setting(self): """...Test SAGA variance_reduction parameter is correctly set """ svrg = SAGA() self.assertEqual(svrg.variance_reduction, 'last') self.assertEqual(svrg._solver.get_variance_reduction(), _SAGA.VarianceReductionMethod_Last) svrg = SAGA(variance_reduction='rand') self.assertEqual(svrg.variance_reduction, 'rand') self.assertEqual(svrg._solver.get_variance_reduction(), _SAGA.VarianceReductionMethod_Random) svrg.variance_reduction = 'avg' self.assertEqual(svrg.variance_reduction, 'avg') self.assertEqual(svrg._solver.get_variance_reduction(), _SAGA.VarianceReductionMethod_Average) svrg.variance_reduction = 'rand' self.assertEqual(svrg.variance_reduction, 'rand') self.assertEqual(svrg._solver.get_variance_reduction(), _SAGA.VarianceReductionMethod_Random) svrg.variance_reduction = 'last' self.assertEqual(svrg.variance_reduction, 'last') self.assertEqual(svrg._solver.get_variance_reduction(), _SAGA.VarianceReductionMethod_Last) with self.assertRaises(ValueError): svrg.variance_reduction = 'wrong_name'
def test_solver_saga(self): """...Check SAGA solver for a Logistic Regression with Ridge penalization""" solver = SAGA(step=1e-3, max_iter=100, verbose=False, tol=0) self.check_solver(solver, fit_intercept=True, model="logreg", decimal=1)
def test_set_model(self): """...Test set_model of saga, should only accept childs of ModelGeneralizedLinear""" # We try to pass a ModelCoxRegPartialLik which is not a generalized # linear model to SAGA to check that the error is raised msg = '^SAGA accepts only childs of `ModelGeneralizedLinear`$' with self.assertRaisesRegex(ValueError, msg): w = weights_sparse_gauss(n_weights=2, nnz=0) X, T, C = SimuCoxReg(w).simulate() model = ModelCoxRegPartialLik().fit(X, T, C) SAGA().set_model(model) msg = '^SAGA accepts only childs of `ModelGeneralizedLinear`$' with self.assertRaisesRegex(RuntimeError, msg): w = weights_sparse_gauss(n_weights=2, nnz=0) X, T, C = SimuCoxReg(w).simulate() model = ModelCoxRegPartialLik().fit(X, T, C) saga = SAGA() saga._solver.set_model(model._model)
def test_serializing_solvers(self): """...Test serialization of solvers """ ratio = 0.5 l_enet = 1e-2 sd = ratio * l_enet solvers = [ AdaGrad(step=1e-3, max_iter=100, verbose=False, tol=0), SGD(step=1e-3, max_iter=100, verbose=False, tol=0), SDCA(l_l2sq=sd, max_iter=100, verbose=False, tol=0), SAGA(step=1e-3, max_iter=100, verbose=False, tol=0), SVRG(step=1e-3, max_iter=100, verbose=False, tol=0) ] model_map = { ModelLinReg: SimuLinReg, ModelLogReg: SimuLogReg, ModelPoisReg: SimuPoisReg, ModelHinge: SimuLogReg, ModelQuadraticHinge: SimuLogReg, ModelSmoothedHinge: SimuLogReg, ModelAbsoluteRegression: SimuLinReg, ModelEpsilonInsensitive: SimuLinReg, ModelHuber: SimuLinReg, ModelLinRegWithIntercepts: SimuLinReg, ModelModifiedHuber: SimuLogReg } for solver in solvers: for mod in model_map: np.random.seed(12) n_samples, n_features = 100, 5 w0 = np.random.randn(n_features) intercept0 = 50 * weights_sparse_gauss(n_weights=n_samples, nnz=30) c0 = None X, y = SimuLinReg(w0, c0, n_samples=n_samples, verbose=False, seed=2038).simulate() if mod == ModelLinRegWithIntercepts: y += intercept0 model = mod(fit_intercept=False).fit(X, y) prox = ProxL1(2.) solver.set_model(model) solver.set_prox(prox) pickled = pickle.loads(pickle.dumps(solver)) self.assertTrue(solver._solver.compare(pickled._solver)) self.assertTrue( solver.model._model.compare(pickled.model._model)) self.assertTrue(solver.prox._prox.compare(pickled.prox._prox)) if mod == ModelLinRegWithIntercepts: test_vector = np.hstack((X[0], np.ones(n_samples))) self.assertEqual(model.loss(test_vector), solver.model.loss(test_vector)) else: self.assertEqual(model.loss(X[0]), solver.model.loss(X[0]))
def create_solver(): return SAGA(max_iter=100, verbose=False, step=0.01, seed=TestSolver.sto_seed)
def __init__(self, **kwargs): TS.__init__(self, **kwargs) SOLVER.__init__(self, **kwargs) object.__setattr__(self, "_s_name", "saga") if self.n_threads > 1: object.__setattr__(self, "_s_name", "asaga")
def test_asaga_solver(self): """...Check ASAGA solver for a Logistic Regression with Elastic net penalization """ seed = 1398 np.random.seed(seed) n_samples = 4000 n_features = 30 weights = weights_sparse_gauss(n_features, nnz=3).astype(self.dtype) intercept = 0.2 penalty_strength = 1e-3 sparsity = 1e-4 features = sparse.rand(n_samples, n_features, density=sparsity, format='csr', random_state=8).astype(self.dtype) simulator = SimuLogReg(weights, n_samples=n_samples, features=features, verbose=False, intercept=intercept, dtype=self.dtype) features, labels = simulator.simulate() model = ModelLogReg(fit_intercept=True) model.fit(features, labels) prox = ProxElasticNet(penalty_strength, ratio=0.1, range=(0, n_features)) solver_step = 1. / model.get_lip_max() saga = SAGA(step=solver_step, max_iter=100, tol=1e-10, verbose=False, n_threads=1, record_every=10, seed=seed) saga.set_model(model).set_prox(prox) saga.solve() asaga = SAGA(step=solver_step, max_iter=100, tol=1e-10, verbose=False, n_threads=2, record_every=10, seed=seed) asaga.set_model(model).set_prox(prox) asaga.solve() np.testing.assert_array_almost_equal(saga.solution, asaga.solution, decimal=4) self.assertGreater(np.linalg.norm(saga.solution[:-1]), 0)
def test_variance_reduction_setting(self): """...SolverTest SAGA variance_reduction parameter is correctly set""" svrg = SAGA() coeffs0 = weights_sparse_gauss(20, nnz=5, dtype=self.dtype) interc0 = None X, y = SimuLogReg(coeffs0, interc0, n_samples=3000, verbose=False, seed=123, dtype=self.dtype).simulate() model = ModelLogReg().fit(X, y) svrg.set_model(model) svrg.astype(self.dtype) self.assertEqual(svrg.variance_reduction, 'last') self.assertEqual(svrg._solver.get_variance_reduction(), SAGA_VarianceReductionMethod_Last) svrg = SAGA(variance_reduction='rand') svrg.set_model(model) svrg.astype(self.dtype) self.assertEqual(svrg.variance_reduction, 'rand') self.assertEqual(svrg._solver.get_variance_reduction(), SAGA_VarianceReductionMethod_Random) svrg.variance_reduction = 'avg' self.assertEqual(svrg.variance_reduction, 'avg') self.assertEqual(svrg._solver.get_variance_reduction(), SAGA_VarianceReductionMethod_Average) svrg.variance_reduction = 'rand' self.assertEqual(svrg.variance_reduction, 'rand') self.assertEqual(svrg._solver.get_variance_reduction(), SAGA_VarianceReductionMethod_Random) svrg.variance_reduction = 'last' self.assertEqual(svrg.variance_reduction, 'last') self.assertEqual(svrg._solver.get_variance_reduction(), SAGA_VarianceReductionMethod_Last) with self.assertRaises(ValueError): svrg.variance_reduction = 'wrong_name'
#!/usr/bin/python3 # expect tick first on PYTHONPATH from tick.array.build.array import tick_double_sparse2d_from_file, tick_double_array_from_file from tick.prox import ProxL2Sq; from tick.solver import SAGA; from tick.linear_model import ModelLogReg X = tick_double_sparse2d_from_file("url.features.cereal") n_samples = X.shape[0]; n_features = X.shape[1] y = tick_double_array_from_file ("url.labels.cereal") model = ModelLogReg(fit_intercept=False).fit(X, y) prox = ProxL2Sq((1. / n_samples) + 1e-10, range=(0, n_features)) asaga = SAGA(step=0.00257480411965, max_iter=200, tol=1e-10, verbose=False, n_threads=8, log_every_n_epochs=10) asaga.set_model(model).set_prox(prox) asaga.solve() asaga.print_history()