def test_solver_comparison(self): """ Test that all solvers return the same and correct solution. """ # Convex functions. y = [1, 0, 0.1, 8, -6.5, 0.2, 0.004, 0.01] sol = [0.75, 0, 0, 7.75, -6.25, 0, 0, 0] w1, w2 = .8, .4 f1 = functions.norm_l2(y=y, lambda_=w1 / 2.) # Smooth. f2 = functions.norm_l1(lambda_=w2 / 2.) # Non-smooth. # Solvers. L = w1 # Lipschitz continuous gradient. step = 1. / L lambda_ = 0.5 params = {'step': step, 'lambda_': lambda_} slvs = [] slvs.append( solvers.forward_backward(accel=acceleration.dummy(), step=step)) slvs.append(solvers.douglas_rachford(**params)) slvs.append(solvers.generalized_forward_backward(**params)) # Compare solutions. params = {'rtol': 1e-14, 'verbosity': 'NONE', 'maxit': 1e4} niters = [2, 61, 26] for solver, niter in zip(slvs, niters): x0 = np.zeros(len(y)) ret = solvers.solve([f1, f2], x0, solver, **params) nptest.assert_allclose(ret['sol'], sol) self.assertEqual(ret['niter'], niter) self.assertIs(ret['sol'], x0) # The initial value was modified.
def test_acceleration_comparison(self): """ Test that all solvers return the same and correct solution. """ # Convex functions. y = [1, 0, 0.1, 8, -6.5, 0.2, 0.004, 0.01] sol = [0.75, 0, 0, 7.75, -6.25, 0, 0, 0] w1, w2 = .8, .4 f1 = functions.norm_l2(y=y, lambda_=w1 / 2.) # Smooth. f2 = functions.norm_l1(lambda_=w2 / 2.) # Non-smooth. # Solvers. L = w1 # Lipschitz continuous gradient. step = 1. / L slvs = [] slvs.append(solvers.forward_backward(accel=acceleration.dummy(), step=step)) slvs.append(solvers.forward_backward(accel=acceleration.fista(), step=step)) slvs.append(solvers.forward_backward( accel=acceleration.fista_backtracking(eta=.999), step=step)) # Compare solutions. params = {'rtol': 1e-14, 'verbosity': 'NONE', 'maxit': 1e4} niters = [2, 2, 6] for solver, niter in zip(slvs, niters): x0 = np.zeros(len(y)) ret = solvers.solve([f1, f2], x0, solver, **params) nptest.assert_allclose(ret['sol'], sol) self.assertEqual(ret['niter'], niter)
def test_forward_backward(self): """ Test forward-backward splitting algorithm without acceleration, and with L1-norm, L2-norm, and dummy functions. """ y = [4., 5., 6., 7.] solver = solvers.forward_backward(accel=acceleration.dummy()) param = {'solver': solver, 'rtol': 1e-6, 'verbosity': 'NONE'} # L2-norm prox and dummy gradient. f1 = functions.norm_l2(y=y) f2 = functions.dummy() ret = solvers.solve([f1, f2], np.zeros(len(y)), **param) nptest.assert_allclose(ret['sol'], y) self.assertEqual(ret['crit'], 'RTOL') self.assertEqual(ret['niter'], 35) # L1-norm prox and L2-norm gradient. f1 = functions.norm_l1(y=y, lambda_=1.0) f2 = functions.norm_l2(y=y, lambda_=0.8) ret = solvers.solve([f1, f2], np.zeros(len(y)), **param) nptest.assert_allclose(ret['sol'], y) self.assertEqual(ret['crit'], 'RTOL') self.assertEqual(ret['niter'], 4) # Sanity check f3 = functions.dummy() x0 = np.zeros((4, )) self.assertRaises(ValueError, solver.pre, [f1, f2, f3], x0)
def test_acceleration_comparison(self): """ Test that all solvers return the same and correct solution. """ # Convex functions. y = [1, 0, 0.1, 8, -6.5, 0.2, 0.004, 0.01] sol = [0.75, 0, 0, 7.75, -6.25, 0, 0, 0] w1, w2 = .8, .4 f1 = functions.norm_l2(y=y, lambda_=w1 / 2.) # Smooth. f2 = functions.norm_l1(lambda_=w2 / 2.) # Non-smooth. # Solvers. L = w1 # Lipschitz continuous gradient. step = 1. / L slvs = [] slvs.append( solvers.forward_backward(accel=acceleration.dummy(), step=step)) slvs.append( solvers.forward_backward(accel=acceleration.fista(), step=step)) slvs.append( solvers.forward_backward( accel=acceleration.fista_backtracking(eta=.999), step=step)) # Compare solutions. params = {'rtol': 1e-14, 'verbosity': 'NONE', 'maxit': 1e4} niters = [2, 2, 6] for solver, niter in zip(slvs, niters): x0 = np.zeros(len(y)) ret = solvers.solve([f1, f2], x0, solver, **params) nptest.assert_allclose(ret['sol'], sol) self.assertEqual(ret['niter'], niter)
def test_solver_comparison(self): """ Test that all solvers return the same and correct solution. """ # Convex functions. y = [1, 0, 0.1, 8, -6.5, 0.2, 0.004, 0.01] sol = [0.75, 0, 0, 7.75, -6.25, 0, 0, 0] w1, w2 = .8, .4 f1 = functions.norm_l2(y=y, lambda_=w1 / 2.) # Smooth. f2 = functions.norm_l1(lambda_=w2 / 2.) # Non-smooth. # Solvers. L = w1 # Lipschitz continuous gradient. step = 1. / L lambda_ = 0.5 params = {'step': step, 'lambda_': lambda_} slvs = [] slvs.append(solvers.forward_backward(accel=acceleration.dummy(), step=step)) slvs.append(solvers.douglas_rachford(**params)) slvs.append(solvers.generalized_forward_backward(**params)) # Compare solutions. params = {'rtol': 1e-14, 'verbosity': 'NONE', 'maxit': 1e4} niters = [2, 61, 26] for solver, niter in zip(slvs, niters): x0 = np.zeros(len(y)) ret = solvers.solve([f1, f2], x0, solver, **params) nptest.assert_allclose(ret['sol'], sol) self.assertEqual(ret['niter'], niter) self.assertIs(ret['sol'], x0) # The initial value was modified.
def test_forward_backward(self): """ Test forward-backward splitting algorithm without acceleration, and with L1-norm, L2-norm, and dummy functions. """ y = [4., 5., 6., 7.] solver = solvers.forward_backward(accel=acceleration.dummy()) param = {'solver': solver, 'rtol': 1e-6, 'verbosity': 'NONE'} # L2-norm prox and dummy gradient. f1 = functions.norm_l2(y=y) f2 = functions.dummy() ret = solvers.solve([f1, f2], np.zeros(len(y)), **param) nptest.assert_allclose(ret['sol'], y) self.assertEqual(ret['crit'], 'RTOL') self.assertEqual(ret['niter'], 35) # L1-norm prox and L2-norm gradient. f1 = functions.norm_l1(y=y, lambda_=1.0) f2 = functions.norm_l2(y=y, lambda_=0.8) ret = solvers.solve([f1, f2], np.zeros(len(y)), **param) nptest.assert_allclose(ret['sol'], y) self.assertEqual(ret['crit'], 'RTOL') self.assertEqual(ret['niter'], 4) # Sanity check f3 = functions.dummy() x0 = np.zeros((4,)) self.assertRaises(ValueError, solver.pre, [f1, f2, f3], x0)
def __init__(self, step=1., accel=None): if step < 0: raise ValueError('Step should be a positive number.') self.step = step self.accel = acceleration.dummy() if accel is None else accel