def test_precond_LinearLeastSquares(self): n = 5 _A = np.eye(n) + 0.01 * util.randn([n, n]) A = linop.MatMul([n, 1], _A) x = util.randn([n, 1]) y = A(x) x_lstsq = np.linalg.lstsq(_A, y, rcond=-1)[0] p = 1 / (np.sum(abs(_A)**2, axis=0).reshape([n, 1])) P = linop.Multiply([n, 1], p) x_rec = app.LinearLeastSquares(A, y, show_pbar=False).run() npt.assert_allclose(x_rec, x_lstsq, atol=1e-3) alpha = 1 / app.MaxEig(P * A.H * A, show_pbar=False).run() x_rec = app.LinearLeastSquares(A, y, solver='GradientMethod', alpha=alpha, max_power_iter=100, max_iter=1000, show_pbar=False).run() npt.assert_allclose(x_rec, x_lstsq, atol=1e-3) tau = p x_rec = app.LinearLeastSquares(A, y, solver='PrimalDualHybridGradient', max_iter=1000, tau=tau, show_pbar=False).run() npt.assert_allclose(x_rec, x_lstsq, atol=1e-3)
def test_MaxEig(self): n = 5 mat = util.randn([n, n]) A = linop.MatMul([n, 1], mat) s = np.linalg.svd(mat, compute_uv=False) npt.assert_allclose(app.MaxEig(A.H * A, max_iter=100).run(), s[0]**2, atol=1e-2)
def test_precond_LinearLeastSquares(self): n = 5 mat = np.eye(n) + 0.1 * util.randn([n, n]) A = linop.MatMul([n, 1], mat) x = util.randn([n, 1]) y = A(x) x_lstsq = np.linalg.lstsq(mat, y, rcond=-1)[0] p = 1 / (np.sum(abs(mat)**2, axis=0).reshape([n, 1])) P = linop.Multiply([n, 1], p) x_rec = app.LinearLeastSquares(A, y).run() npt.assert_allclose(x_rec, x_lstsq) alpha = p / app.MaxEig(P * A.H * A).run() x_rec = app.LinearLeastSquares(A, y, alg_name='GradientMethod', max_iter=1000, alpha=alpha).run() npt.assert_allclose(x_rec, x_lstsq) tau = p x_rec = app.LinearLeastSquares(A, y, alg_name='PrimalDualHybridGradient', max_iter=1000, tau=tau).run() npt.assert_allclose(x_rec, x_lstsq)