Ejemplo n.º 1
0
    def test_step_type_setting(self):
        """...Test that SVRG step_type parameter behaves correctly
        """
        svrg = SVRG()

        coeffs0 = weights_sparse_gauss(20, nnz=5, dtype=self.dtype)
        interc0 = None

        X, y = SimuLogReg(coeffs0,
                          interc0,
                          n_samples=3000,
                          verbose=False,
                          seed=123,
                          dtype=self.dtype).simulate()

        model = ModelLogReg().fit(X, y)
        svrg.set_model(model)
        self.assertEqual(svrg.step_type, 'fixed')
        self.assertEqual(svrg._solver.get_step_type(), SVRG_StepType_Fixed)

        svrg = SVRG(step_type='bb')
        svrg.set_model(model)
        self.assertEqual(svrg.step_type, 'bb')
        self.assertEqual(svrg._solver.get_step_type(),
                         SVRG_StepType_BarzilaiBorwein)

        svrg.step_type = 'fixed'
        self.assertEqual(svrg.step_type, 'fixed')
        self.assertEqual(svrg._solver.get_step_type(), SVRG_StepType_Fixed)

        svrg.step_type = 'bb'
        self.assertEqual(svrg.step_type, 'bb')
        self.assertEqual(svrg._solver.get_step_type(),
                         SVRG_StepType_BarzilaiBorwein)
Ejemplo n.º 2
0
    def compare_solver_sdca(self):
        """...Compare SDCA solution with SVRG solution
        """
        np.random.seed(12)
        n_samples = Test.n_samples
        n_features = Test.n_features

        for fit_intercept in [True, False]:
            y, X, coeffs0, interc0 = TestSolver.generate_logistic_data(
                n_features, n_samples)

            model = ModelLogReg(fit_intercept=fit_intercept).fit(X, y)
            ratio = 0.5
            l_enet = 1e-2

            # SDCA "elastic-net" formulation is different from elastic-net
            # implementation
            l_l2_sdca = ratio * l_enet
            l_l1_sdca = (1 - ratio) * l_enet
            sdca = SDCA(l_l2sq=l_l2_sdca, max_iter=100, verbose=False, tol=0,
                        seed=Test.sto_seed).set_model(model)
            prox_l1 = ProxL1(l_l1_sdca)
            sdca.set_prox(prox_l1)
            coeffs_sdca = sdca.solve()

            # Compare with SVRG
            svrg = SVRG(max_iter=100, verbose=False, tol=0,
                        seed=Test.sto_seed).set_model(model)
            prox_enet = ProxElasticNet(l_enet, ratio)
            svrg.set_prox(prox_enet)
            coeffs_svrg = svrg.solve(step=0.1)

            np.testing.assert_allclose(coeffs_sdca, coeffs_svrg)
Ejemplo n.º 3
0
    def test_set_model(self):
        """...Test SVRG set_model
        """
        X, y = self.simu_linreg_data()
        _, model_spars = self.get_dense_and_sparse_linreg_model(X, y)
        svrg = SVRG(variance_reduction='avg')
        msg = "'avg' variance reduction cannot be used with sparse datasets. " \
              "Please change `variance_reduction` before passing sparse data."
        with catch_warnings(record=True) as w:
            simplefilter('always')

            svrg.set_model(model_spars)
            self.assertEqual(len(w), 1)
            self.assertTrue(issubclass(w[0].category, UserWarning))
            self.assertEqual(str(w[0].message), msg)
Ejemplo n.º 4
0
 def test_solver_svrg(self):
     """...Check SVRG solver for a Logistic Regression with Ridge
     penalization
     """
     solver = SVRG(step=1e-3, max_iter=100, verbose=False, tol=0)
     self.check_solver(solver, fit_intercept=True, model="logreg",
                       decimal=1)
Ejemplo n.º 5
0
 def test_convergence_with_lags(self):
     """Test longitudinal multinomial model convergence."""
     n_intervals = 10
     n_lags = 3
     n_samples = 1500
     n_features = 3
     sim = SimuSCCS(n_samples, n_intervals, n_features, n_lags, None,
                    True, "short", seed=42, verbose=False)
     X, y, censoring, coeffs = sim.simulate()
     X = LongitudinalFeaturesLagger(n_lags=n_lags) \
         .fit_transform(X, censoring)
     model = ModelSCCS(n_intervals=n_intervals,
                       n_lags=n_lags).fit(X, y, censoring)
     solver = SVRG(max_iter=15, verbose=False)
     solver.set_model(model).set_prox(ProxZero())
     coeffs_svrg = solver.solve(step=1 / model.get_lip_max())
     np.testing.assert_almost_equal(coeffs, coeffs_svrg, decimal=1)
Ejemplo n.º 6
0
    def test_dense_and_sparse_match(self):
        """...Test in SVRG that dense and sparse code matches in all possible
        settings
        """
        variance_reductions = ['last', 'rand']
        rand_types = ['perm', 'unif']
        seed = 123
        tol = 0.
        max_iter = 50

        n_samples = 500
        n_features = 20

        # Crazy prox examples
        proxs = [
            ProxTV(strength=1e-2, range=(5, 13), positive=True),
            ProxElasticNet(strength=1e-2, ratio=0.9),
            ProxEquality(range=(0, n_features)),
            ProxL1(strength=1e-3, range=(5, 17)),
            ProxL1w(strength=1e-3,
                    weights=np.arange(5, 17, dtype=np.double),
                    range=(5, 17)),
        ]

        for intercept in [-1, None]:
            X, y = self.simu_linreg_data(interc=intercept,
                                         n_features=n_features,
                                         n_samples=n_samples)

            fit_intercept = intercept is not None
            model_dense, model_spars = self.get_dense_and_sparse_linreg_model(
                X, y, fit_intercept=fit_intercept)
            step = 1 / model_spars.get_lip_max()

            for variance_reduction, rand_type, prox in product(
                    variance_reductions, rand_types, proxs):
                solver_sparse = SVRG(step=step, tol=tol, max_iter=max_iter,
                                     verbose=False,
                                     variance_reduction=variance_reduction,
                                     rand_type=rand_type, seed=seed) \
                    .set_model(model_spars) \
                    .set_prox(prox)

                solver_dense = SVRG(step=step, tol=tol, max_iter=max_iter,
                                    verbose=False,
                                    variance_reduction=variance_reduction,
                                    rand_type=rand_type, seed=seed) \
                    .set_model(model_dense) \
                    .set_prox(prox)

                solver_sparse.solve()
                solver_dense.solve()
                np.testing.assert_array_almost_equal(solver_sparse.solution,
                                                     solver_dense.solution, 7)
Ejemplo n.º 7
0
    def _construct_solver_obj(step, max_iter, tol, print_every, record_every,
                              verbose, seed):
        # TODO: we might want to use SAGA also later... (might be faster here)
        # seed cannot be None in SVRG
        solver_obj = SVRG(step=step,
                          max_iter=max_iter,
                          tol=tol,
                          print_every=print_every,
                          record_every=record_every,
                          verbose=verbose,
                          seed=seed)

        return solver_obj
Ejemplo n.º 8
0
 def test_convergence_with_lags(self):
     """Test longitudinal multinomial model convergence."""
     n_intervals = 10
     n_samples = 800
     n_features = 2
     n_lags = np.repeat(2, n_features).astype(dtype="uint64")
     sim = SimuSCCS(n_samples,
                    n_intervals,
                    n_features,
                    n_lags,
                    None,
                    "multiple_exposures",
                    seed=42)
     _, X, y, censoring, coeffs = sim.simulate()
     coeffs = np.hstack(coeffs)
     X, _, _ = LongitudinalFeaturesLagger(n_lags=n_lags) \
         .fit_transform(X, censoring)
     model = ModelSCCS(n_intervals=n_intervals,
                       n_lags=n_lags).fit(X, y, censoring)
     solver = SVRG(max_iter=15, verbose=False)
     solver.set_model(model).set_prox(ProxZero())
     coeffs_svrg = solver.solve(step=1 / model.get_lip_max())
     np.testing.assert_almost_equal(coeffs, coeffs_svrg, decimal=1)
Ejemplo n.º 9
0
    def test_sdca_identity_poisreg(self):
        """...Test SDCA on specific case of Poisson regression with
        indentity link
        """
        l_l2sq = 1e-3
        n_samples = 10000
        n_features = 3

        np.random.seed(123)
        weight0 = np.random.rand(n_features)
        features = np.random.rand(n_samples, n_features)

        for intercept in [None, 0.45]:
            if intercept is None:
                fit_intercept = False
            else:
                fit_intercept = True

            simu = SimuPoisReg(weight0, intercept=intercept, features=features,
                               n_samples=n_samples, link='identity',
                               verbose=False)
            features, labels = simu.simulate()

            model = ModelPoisReg(fit_intercept=fit_intercept, link='identity')
            model.fit(features, labels)

            sdca = SDCA(l_l2sq=l_l2sq, max_iter=100, verbose=False, tol=1e-14,
                        seed=Test.sto_seed)

            sdca.set_model(model).set_prox(ProxZero())
            start_dual = np.sqrt(sdca._rand_max * l_l2sq)
            start_dual = start_dual * np.ones(sdca._rand_max)

            sdca.solve(start_dual)

            # Check that duality gap is 0
            self.assertAlmostEqual(
                sdca.objective(sdca.solution),
                sdca.dual_objective(sdca.dual_solution))

            # Check that original vector is approximatively retrieved
            if fit_intercept:
                original_coeffs = np.hstack((weight0, intercept))
            else:
                original_coeffs = weight0

            np.testing.assert_array_almost_equal(original_coeffs,
                                                 sdca.solution, decimal=1)

            # Ensure that we solve the same problem as other solvers
            svrg = SVRG(max_iter=100, verbose=False, tol=1e-14,
                        seed=Test.sto_seed)

            svrg.set_model(model).set_prox(ProxL2Sq(l_l2sq))
            svrg.solve(0.5 * np.ones(model.n_coeffs), step=1e-2)
            np.testing.assert_array_almost_equal(svrg.solution, sdca.solution,
                                                 decimal=4)
Ejemplo n.º 10
0
 def _construct_solver_obj_with_class(step,
                                      max_iter,
                                      tol,
                                      print_every,
                                      record_every,
                                      verbose,
                                      seed,
                                      clazz=SVRG):
     """All creatioon of solver by class type, removes values from constructor parameter
        list that do not exist on the class construct to be called
      """
     # inspect must be first assign
     _, _, _, kvs = inspect.getargvalues(inspect.currentframe())
     constructor_map = kvs.copy()
     args = inspect.getfullargspec(clazz.__init__)[0]
     for k, v in kvs.items():
         if k not in args:
             del constructor_map[k]
     return SVRG(**constructor_map)
Ejemplo n.º 11
0
def run_solvers(model, l_l2sq):
    try:
        svrg_step = 1. / model.get_lip_max()
    except AttributeError:
        svrg_step = 1e-3
    try:
        gd_step = 1. / model.get_lip_best()
    except AttributeError:
        gd_step = 1e-1

    bfgs = BFGS(verbose=False, tol=1e-13)
    bfgs.set_model(model).set_prox(ProxL2Sq(l_l2sq))
    bfgs.solve()
    bfgs.history.set_minimizer(bfgs.solution)
    bfgs.history.set_minimum(bfgs.objective(bfgs.solution))
    bfgs.solve()

    svrg = SVRG(step=svrg_step, verbose=False, tol=1e-10, seed=seed)
    svrg.set_model(model).set_prox(ProxL2Sq(l_l2sq))
    svrg.history.set_minimizer(bfgs.solution)
    svrg.history.set_minimum(bfgs.objective(bfgs.solution))
    svrg.solve()

    sdca = SDCA(l_l2sq, verbose=False, seed=seed, tol=1e-10)
    sdca.set_model(model).set_prox(ProxZero())
    sdca.history.set_minimizer(bfgs.solution)
    sdca.history.set_minimum(bfgs.objective(bfgs.solution))
    sdca.solve()

    gd = GD(verbose=False, tol=1e-10, step=gd_step, linesearch=False)
    gd.set_model(model).set_prox(ProxL2Sq(l_l2sq))
    gd.history.set_minimizer(bfgs.solution)
    gd.history.set_minimum(bfgs.objective(bfgs.solution))
    gd.solve()

    agd = AGD(verbose=False, tol=1e-10, step=gd_step, linesearch=False)
    agd.set_model(model).set_prox(ProxL2Sq(l_l2sq))
    agd.history.set_minimizer(bfgs.solution)
    agd.history.set_minimum(bfgs.objective(bfgs.solution))
    agd.solve()

    return bfgs, svrg, sdca, gd, agd
Ejemplo n.º 12
0
    def test_step_type_setting(self):
        """...Test that SVRG step_type parameter behaves correctly
        """
        svrg = SVRG()
        self.assertEqual(svrg.step_type, 'fixed')
        self.assertEqual(svrg._solver.get_step_type(), SVRG_StepType_Fixed)

        svrg = SVRG(step_type='bb')
        self.assertEqual(svrg.step_type, 'bb')
        self.assertEqual(svrg._solver.get_step_type(),
                         SVRG_StepType_BarzilaiBorwein)

        svrg.step_type = 'fixed'
        self.assertEqual(svrg.step_type, 'fixed')
        self.assertEqual(svrg._solver.get_step_type(), SVRG_StepType_Fixed)

        svrg.step_type = 'bb'
        self.assertEqual(svrg.step_type, 'bb')
        self.assertEqual(svrg._solver.get_step_type(),
                         SVRG_StepType_BarzilaiBorwein)
simulator = SimuLogReg(weights, n_samples=n_samples, features=features,
                       verbose=False, intercept=intercept)
features, labels = simulator.simulate()

model = ModelLogReg(fit_intercept=True)
model.fit(features, labels)
prox = ProxElasticNet(penalty_strength, ratio=0.5, range=(0, n_features))
svrg_step = 1. / model.get_lip_max()

test_n_threads = [1, 2, 4]

svrg_list = []
svrg_labels = []

for n_threads in test_n_threads:
    svrg = SVRG(step=svrg_step, seed=seed, max_iter=30, verbose=False,
                n_threads=n_threads)
    svrg.set_model(model).set_prox(prox)
    svrg.solve()

    svrg_list += [svrg]
    if n_threads == 1:
        svrg_labels += ['SVRG']
    else:
        svrg_labels += ['ASVRG {}'.format(n_threads)]

plot_history(svrg_list, x="time", dist_min=True, log_scale=True,
             labels=svrg_labels, show=False)
plt.ylim([3e-3, 0.3])
plt.ylabel('log distance to optimal objective', fontsize=14)
plt.tight_layout()
plt.show()
from tick.prox import ProxElasticNet, ProxL1
from tick.plot import plot_history

n_samples, n_features, = 5000, 50
weights0 = weights_sparse_gauss(n_features, nnz=10)
intercept0 = 0.2
X, y = SimuLogReg(weights=weights0,
                  intercept=intercept0,
                  n_samples=n_samples,
                  seed=123,
                  verbose=False).simulate()

model = ModelLogReg(fit_intercept=True).fit(X, y)
prox = ProxElasticNet(strength=1e-3, ratio=0.5, range=(0, n_features))

solver_params = {'max_iter': 100, 'tol': 0., 'verbose': False}
x0 = np.zeros(model.n_coeffs)

gd = GD(linesearch=False, **solver_params).set_model(model).set_prox(prox)
gd.solve(x0, step=1 / model.get_lip_best())

agd = AGD(linesearch=False, **solver_params).set_model(model).set_prox(prox)
agd.solve(x0, step=1 / model.get_lip_best())

sgd = SGD(**solver_params).set_model(model).set_prox(prox)
sgd.solve(x0, step=500.)

svrg = SVRG(**solver_params).set_model(model).set_prox(prox)
svrg.solve(x0, step=1 / model.get_lip_max())

plot_history([gd, agd, sgd, svrg], log_scale=True, dist_min=True)
Ejemplo n.º 15
0
 def create_solver():
     return SVRG(tol=1e-13,
                 step=0.1,
                 max_iter=1000,
                 seed=TestSolver.sto_seed,
                 verbose=False)
Ejemplo n.º 16
0
intercept0 = 0.2
X, y = SimuLogReg(weights=weights0, intercept=intercept0,
                  n_samples=n_samples, seed=123, verbose=False).simulate()

model = ModelLogReg(fit_intercept=True).fit(X, y)
prox = ProxElasticNet(strength=1e-3, ratio=0.5, range=(0, n_features))
x0 = np.zeros(model.n_coeffs)

optimal_step = 1 / model.get_lip_max()
tested_steps = [optimal_step, 1e-2 * optimal_step, 10 * optimal_step]

solvers = []
solver_labels = []

for step in tested_steps:
    svrg = SVRG(max_iter=30, tol=1e-10, verbose=False)
    svrg.set_model(model).set_prox(prox)
    svrg.solve(step=step)

    svrg_bb = SVRG(max_iter=30, tol=1e-10, verbose=False, step_type='bb')
    svrg_bb.set_model(model).set_prox(prox)
    svrg_bb.solve(step=step)

    solvers += [svrg, svrg_bb]

    optimal_factor = step / optimal_step
    if optimal_factor != 1:
        solver_labels += ['SVRG {:.2g} * optimal step'.format(optimal_factor),
                          'SVRG BB {:.2g} * optimal step'.format(optimal_factor)]
    else:
        solver_labels += ['SVRG optimal step'.format(optimal_factor),
Ejemplo n.º 17
0
 def create_solver():
     return SVRG(max_iter=1,
                 verbose=False,
                 step=1e-5,
                 seed=TestSolver.sto_seed)
Ejemplo n.º 18
0
 def __init__(self, **kwargs):
     TS.__init__(self, **kwargs)
     SOLVER.__init__(self, **kwargs)
     object.__setattr__(self, "_s_name", "svrg")
Ejemplo n.º 19
0
    def test_variance_reduction_setting(self):
        """...Test that SVRG variance_reduction parameter behaves correctly
        """
        svrg = SVRG()

        coeffs0 = weights_sparse_gauss(20, nnz=5, dtype=self.dtype)
        interc0 = None

        X, y = SimuLogReg(coeffs0,
                          interc0,
                          n_samples=3000,
                          verbose=False,
                          seed=123,
                          dtype=self.dtype).simulate()

        model = ModelLogReg().fit(X, y)
        svrg.set_model(model)

        self.assertEqual(svrg.variance_reduction, 'last')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         SVRG_VarianceReductionMethod_Last)

        svrg = SVRG(variance_reduction='rand')
        svrg.set_model(model)
        self.assertEqual(svrg.variance_reduction, 'rand')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         SVRG_VarianceReductionMethod_Random)

        svrg.variance_reduction = 'avg'
        self.assertEqual(svrg.variance_reduction, 'avg')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         SVRG_VarianceReductionMethod_Average)

        svrg.variance_reduction = 'rand'
        self.assertEqual(svrg.variance_reduction, 'rand')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         SVRG_VarianceReductionMethod_Random)

        svrg.variance_reduction = 'last'
        self.assertEqual(svrg.variance_reduction, 'last')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         SVRG_VarianceReductionMethod_Last)

        msg = '^variance_reduction should be one of "avg, last, rand", ' \
              'got "stuff"$'
        with self.assertRaisesRegex(ValueError, msg):
            svrg = SVRG(variance_reduction='stuff')
            svrg.set_model(model)
        with self.assertRaisesRegex(ValueError, msg):
            svrg.variance_reduction = 'stuff'

        X, y = self.simu_linreg_data(dtype=self.dtype)
        model_dense, model_spars = self.get_dense_and_sparse_linreg_model(
            X, y, dtype=self.dtype)
        try:
            svrg.set_model(model_dense)
            svrg.variance_reduction = 'avg'
            svrg.variance_reduction = 'last'
            svrg.variance_reduction = 'rand'
            svrg.set_model(model_spars)
            svrg.variance_reduction = 'last'
            svrg.variance_reduction = 'rand'
        except Exception:
            self.fail('Setting variance_reduction in these cases should have '
                      'been ok')

        msg = "'avg' variance reduction cannot be used with sparse datasets"
        with catch_warnings(record=True) as w:
            simplefilter('always')
            svrg.set_model(model_spars)
            svrg.variance_reduction = 'avg'
            self.assertEqual(len(w), 1)
            self.assertTrue(issubclass(w[0].category, UserWarning))
            self.assertEqual(str(w[0].message), msg)
Ejemplo n.º 20
0
    def test_serializing_solvers(self):
        """...Test serialization of solvers
        """
        ratio = 0.5
        l_enet = 1e-2
        sd = ratio * l_enet

        solvers = [
            AdaGrad(step=1e-3, max_iter=100, verbose=False, tol=0),
            SGD(step=1e-3, max_iter=100, verbose=False, tol=0),
            SDCA(l_l2sq=sd, max_iter=100, verbose=False, tol=0),
            SAGA(step=1e-3, max_iter=100, verbose=False, tol=0),
            SVRG(step=1e-3, max_iter=100, verbose=False, tol=0)
        ]
        model_map = {
            ModelLinReg: SimuLinReg,
            ModelLogReg: SimuLogReg,
            ModelPoisReg: SimuPoisReg,
            ModelHinge: SimuLogReg,
            ModelQuadraticHinge: SimuLogReg,
            ModelSmoothedHinge: SimuLogReg,
            ModelAbsoluteRegression: SimuLinReg,
            ModelEpsilonInsensitive: SimuLinReg,
            ModelHuber: SimuLinReg,
            ModelLinRegWithIntercepts: SimuLinReg,
            ModelModifiedHuber: SimuLogReg
        }

        for solver in solvers:
            for mod in model_map:

                np.random.seed(12)
                n_samples, n_features = 100, 5
                w0 = np.random.randn(n_features)
                intercept0 = 50 * weights_sparse_gauss(n_weights=n_samples,
                                                       nnz=30)
                c0 = None
                X, y = SimuLinReg(w0,
                                  c0,
                                  n_samples=n_samples,
                                  verbose=False,
                                  seed=2038).simulate()

                if mod == ModelLinRegWithIntercepts:
                    y += intercept0

                model = mod(fit_intercept=False).fit(X, y)

                prox = ProxL1(2.)
                solver.set_model(model)
                solver.set_prox(prox)

                pickled = pickle.loads(pickle.dumps(solver))

                self.assertTrue(solver._solver.compare(pickled._solver))

                self.assertTrue(
                    solver.model._model.compare(pickled.model._model))

                self.assertTrue(solver.prox._prox.compare(pickled.prox._prox))

                if mod == ModelLinRegWithIntercepts:
                    test_vector = np.hstack((X[0], np.ones(n_samples)))
                    self.assertEqual(model.loss(test_vector),
                                     solver.model.loss(test_vector))
                else:
                    self.assertEqual(model.loss(X[0]), solver.model.loss(X[0]))
Ejemplo n.º 21
0
    def test_variance_reduction_setting(self):
        """...Test that SVRG variance_reduction parameter behaves correctly
        """
        svrg = SVRG()
        self.assertEqual(svrg.variance_reduction, 'last')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         _SVRG.VarianceReductionMethod_Last)

        svrg = SVRG(variance_reduction='rand')
        self.assertEqual(svrg.variance_reduction, 'rand')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         _SVRG.VarianceReductionMethod_Random)

        svrg.variance_reduction = 'avg'
        self.assertEqual(svrg.variance_reduction, 'avg')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         _SVRG.VarianceReductionMethod_Average)

        svrg.variance_reduction = 'rand'
        self.assertEqual(svrg.variance_reduction, 'rand')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         _SVRG.VarianceReductionMethod_Random)

        svrg.variance_reduction = 'last'
        self.assertEqual(svrg.variance_reduction, 'last')
        self.assertEqual(svrg._solver.get_variance_reduction(),
                         _SVRG.VarianceReductionMethod_Last)

        msg = '^variance_reduction should be one of "avg, last, rand", ' \
              'got "stuff"$'
        with self.assertRaisesRegex(ValueError, msg):
            svrg = SVRG(variance_reduction='stuff')
        with self.assertRaisesRegex(ValueError, msg):
            svrg.variance_reduction = 'stuff'

        X, y = self.simu_linreg_data()
        model_dense, model_spars = self.get_dense_and_sparse_linreg_model(X, y)
        try:
            svrg.set_model(model_dense)
            svrg.variance_reduction = 'avg'
            svrg.variance_reduction = 'last'
            svrg.variance_reduction = 'rand'
            svrg.set_model(model_spars)
            svrg.variance_reduction = 'last'
            svrg.variance_reduction = 'rand'
        except Exception:
            self.fail('Setting variance_reduction in these cases should have '
                      'been ok')

        msg = "'avg' variance reduction cannot be used with sparse datasets"
        with catch_warnings(record=True) as w:
            simplefilter('always')
            svrg.set_model(model_spars)
            svrg.variance_reduction = 'avg'
            self.assertEqual(len(w), 1)
            self.assertTrue(issubclass(w[0].category, UserWarning))
            self.assertEqual(str(w[0].message), msg)