def test_FISTA_Denoising(self): print ("FISTA Denoising Poisson Noise Tikhonov") # adapted from demo FISTA_Tikhonov_Poisson_Denoising.py in CIL-Demos repository #loader = TestData(data_dir=os.path.join(sys.prefix, 'share','ccpi')) loader = TestData() data = loader.load(TestData.SHAPES) ig = data.geometry ag = ig N=300 # Create Noisy data with Poisson noise scale = 5 n1 = TestData.random_noise( data.as_array()/scale, mode = 'poisson', seed = 10)*scale noisy_data = ImageData(n1) # Regularisation Parameter alpha = 10 # Setup and run the FISTA algorithm operator = Gradient(ig) fid = KullbackLeibler(b=noisy_data) reg = FunctionOperatorComposition(alpha * L2NormSquared(), operator) x_init = ig.allocate() fista = FISTA(x_init=x_init , f=reg, g=fid) fista.max_iteration = 3000 fista.update_objective_interval = 500 fista.run(verbose=True) rmse = (fista.get_output() - data).norm() / data.as_array().size print ("RMSE", rmse) self.assertLess(rmse, 4.2e-4)
def test_FISTA_Norm2Sq(self): print ("Test FISTA Norm2Sq") ig = ImageGeometry(127,139,149) b = ig.allocate(ImageGeometry.RANDOM) # fill with random numbers x_init = ig.allocate(ImageGeometry.RANDOM) identity = Identity(ig) #### it seems FISTA does not work with Nowm2Sq norm2sq = LeastSquares(identity, b) #norm2sq.L = 2 * norm2sq.c * identity.norm()**2 #norm2sq = FunctionOperatorComposition(L2NormSquared(b=b), identity) opt = {'tol': 1e-4, 'memopt':False} print ("initial objective", norm2sq(x_init)) alg = FISTA(x_init=x_init, f=norm2sq, g=ZeroFunction()) alg.max_iteration = 2 alg.run(20, verbose=True) self.assertNumpyArrayAlmostEqual(alg.x.as_array(), b.as_array()) alg = FISTA(x_init=x_init, f=norm2sq, g=ZeroFunction(), max_iteration=2, update_objective_interval=3) self.assertTrue(alg.max_iteration == 2) self.assertTrue(alg.update_objective_interval== 3) alg.run(20, verbose=True) self.assertNumpyArrayAlmostEqual(alg.x.as_array(), b.as_array())
def test_FISTA(self): print ("Test FISTA") ig = ImageGeometry(127,139,149) x_init = ImageData(geometry=ig) b = x_init.copy() # fill with random numbers b.fill(numpy.random.random(x_init.shape)) x_init = ImageData(geometry=ig) x_init.fill(numpy.random.random(x_init.shape)) identity = TomoIdentity(geometry=ig) norm2sq = Norm2sq(identity, b) opt = {'tol': 1e-4, 'memopt':False} alg = FISTA(x_init=x_init, f=norm2sq, g=None, opt=opt) alg.max_iteration = 2 alg.run(20, verbose=True) self.assertNumpyArrayAlmostEqual(alg.x.as_array(), b.as_array()) alg.run(20, verbose=True) self.assertNumpyArrayAlmostEqual(alg.x.as_array(), b.as_array())
# Using the test data b, different reconstruction methods can now be set up as # demonstrated in the rest of this file. In general all methods need an initial # guess and some algorithm options to be set: x_init = ig.allocate(0.0) opt = {'tol': 1e-4, 'iter': 200} # Create least squares object instance with projector, test data and a constant # coefficient of 0.5. Note it is least squares over all channels: #f = Norm2Sq(Aop,b,c=0.5) f = FunctionOperatorComposition(L2NormSquared(b=b), Aop) # Run FISTA for least squares without regularization FISTA_alg = FISTA() FISTA_alg.set_up(x_init=x_init, f=f, g=ZeroFunction()) FISTA_alg.max_iteration = 2000 FISTA_alg.run(opt['iter']) x_FISTA = FISTA_alg.get_output() # Display reconstruction and criterion ff0, axarrf0 = plt.subplots(1, numchannels) for k in numpy.arange(3): axarrf0[k].imshow(x_FISTA.as_array()[k], vmin=0, vmax=2.5) plt.show() plt.figure() plt.semilogy(FISTA_alg.objective) plt.title('Criterion vs iterations, least squares') plt.show() # FISTA can also solve regularised forms by specifying a second function object # such as 1-norm regularisation with choice of regularisation parameter lam.
x_init = A3D.volume_geometry.allocate() # Allocate space for the channel-wise reconstruction fista_sol_TV_channel_wise = A3D_chan.volume_geometry.allocate() for i in range(ag.channels): # Setup L2NormSquarred fidelity term, for each channel f = FunctionOperatorComposition( 0.5 * L2NormSquared(b=data.subset(channel=i)), A3D) # Run FISTA fista = FISTA(x_init=x_init, f=f, g=g) fista.max_iteration = 100 fista.update_objective_interval = 50 fista.run(400, verbose=True, callback=show_data_3D) np.copyto(fista_sol_TV_channel_wise.array[i], fista.get_output().array) #%% show reconstruction show_4D_channel_slice(fista_sol_TV_channel_wise, 5, 'FISTA TV channel-wise reconstruction') show_4D_channel_slice(fista_sol_TV_channel_wise, 10, 'FISTA TV channel-wise reconstruction') show_4D_channel_slice(fista_sol_TV_channel_wise, 15, 'FISTA TV channel-wise reconstruction') #%% Coupling Total variation reconstruction in 4D volume. For this case there is no GPU implementation # But we can use another algorithm called PDHG ( primal - dual hybrid gradient) # Set up operators: Projection and Gradient
# Regularisation Parameter alpha = 5 ############################################################################### # Setup and run the FISTA algorithm operator = Gradient(ig) fidelity = L1Norm(b=noisy_data) regulariser = FunctionOperatorComposition(alpha * L2NormSquared(), operator) x_init = ig.allocate() opt = {'memopt': True} fista = FISTA(x_init=x_init, f=regulariser, g=fidelity, opt=opt) fista.max_iteration = 2000 fista.update_objective_interval = 50 fista.run(2000, verbose=False) ############################################################################### ############################################################################### # Setup and run the PDHG algorithm op1 = Gradient(ig) op2 = Identity(ig, ag) operator = BlockOperator(op1, op2, shape=(2, 1)) f = BlockFunction(alpha * L2NormSquared(), fidelity) g = ZeroFunction() normK = operator.norm() sigma = 1 tau = 1 / (sigma * normK**2)
sin = Aop.direct(data) eta = 0 noisy_data = AcquisitionData(sin.as_array() + np.random.normal(0, 1, ag.shape)) back_proj = Aop.adjoint(noisy_data) # Define Least Squares f = FunctionOperatorComposition(L2NormSquared(b=noisy_data), Aop) # Allocate solution x_init = ig.allocate() # Run FISTA for least squares fista = FISTA(x_init=x_init, f=f, g=ZeroFunction()) fista.max_iteration = 10 fista.update_objective_interval = 2 fista.run(100, verbose=True) # Run FISTA for least squares with lower/upper bound fista0 = FISTA(x_init=x_init, f=f, g=IndicatorBox(lower=0, upper=1)) fista0.max_iteration = 10 fista0.update_objective_interval = 2 fista0.run(100, verbose=True) # Run FISTA for Regularised least squares, with Squared norm of Gradient alpha = 20 Grad = Gradient(ig) block_op = BlockOperator(Aop, alpha * Grad, shape=(2, 1)) block_data = BlockDataContainer(noisy_data, Grad.range_geometry().allocate()) f1 = FunctionOperatorComposition(L2NormSquared(b=block_data), block_op) fista1 = FISTA(x_init=x_init, f=f1, g=IndicatorBox(lower=0, upper=1))
out *= 0.5 # ADD the constraint here out.maximum(0, out=out) fid.proximal = KL_Prox_PosCone reg = FunctionOperatorComposition(alpha * L2NormSquared(), operator) x_init = ig.allocate() fista = FISTA(x_init=x_init, f=reg, g=fid) fista.max_iteration = 3000 fista.update_objective_interval = 500 fista.run(3000, verbose=True) # Show results plt.figure(figsize=(15, 15)) plt.subplot(3, 1, 1) plt.imshow(data.as_array()) plt.title('Ground Truth') plt.colorbar() plt.subplot(3, 1, 2) plt.imshow(noisy_data.as_array()) plt.title('Noisy Data') plt.colorbar() plt.subplot(3, 1, 3) plt.imshow(fista.get_output().as_array()) plt.title('Reconstruction') plt.colorbar()
plt.show() #%% Use FISTA algorithm to solve same smoothed TV regularised denoising proglem # Manually set guess of Lipschitz parameter of function to step size selection. objective_function.L = 1000000 # Set up and run FISTA algorithms print( "Running FISTA with smooth approximation of TV.\nThis will take some time .... " ) fi = FISTA(x_init, objective_function, max_iteration=10000, update_objective_interval=100) fi.run(verbose=True) ## Show FISTA reconstruction results plt.figure(figsize=(20, 5)) plt.subplot(1, 4, 1) plt.imshow(data.as_array()) plt.title('Ground Truth') plt.colorbar() plt.subplot(1, 4, 2) plt.imshow(noisy_data.as_array()) plt.title('Noisy Data') plt.colorbar() plt.subplot(1, 4, 3) plt.imshow(fi.get_output().as_array()) plt.title('FISTA Reconstruction') plt.colorbar()
def test_FISTA_cvx(self): if False: if not cvx_not_installable: try: # Problem data. m = 30 n = 20 np.random.seed(1) Amat = np.random.randn(m, n) A = LinearOperatorMatrix(Amat) bmat = np.random.randn(m) bmat.shape = (bmat.shape[0], 1) # A = Identity() # Change n to equal to m. #b = DataContainer(bmat) vg = VectorGeometry(m) b = vg.allocate('random') # Regularization parameter lam = 10 opt = {'memopt': True} # Create object instances with the test data A and b. f = LeastSquares(A, b, c=0.5) g0 = ZeroFunction() # Initial guess #x_init = DataContainer(np.zeros((n, 1))) x_init = vg.allocate() f.gradient(x_init, out = x_init) # Run FISTA for least squares plus zero function. #x_fista0, it0, timing0, criter0 = FISTA(x_init, f, g0, opt=opt) fa = FISTA(x_init=x_init, f=f, g=g0) fa.max_iteration = 10 fa.run(10) # Print solution and final objective/criterion value for comparison print("FISTA least squares plus zero function solution and objective value:") print(fa.get_output()) print(fa.get_last_objective()) # Compare to CVXPY # Construct the problem. x0 = Variable(n) objective0 = Minimize(0.5*sum_squares(Amat*x0 - bmat.T[0])) prob0 = Problem(objective0) # The optimal objective is returned by prob.solve(). result0 = prob0.solve(verbose=False, solver=SCS, eps=1e-9) # The optimal solution for x is stored in x.value and optimal objective value # is in result as well as in objective.value print("CVXPY least squares plus zero function solution and objective value:") print(x0.value) print(objective0.value) self.assertNumpyArrayAlmostEqual( numpy.squeeze(x_fista0.array), x0.value, 6) except SolverError as se: print (str(se)) self.assertTrue(True) else: self.assertTrue(cvx_not_installable)
def stest_FISTA_Norm1_cvx(self): if not cvx_not_installable: try: opt = {'memopt': True} # Problem data. m = 30 n = 20 np.random.seed(1) Amat = np.random.randn(m, n) A = LinearOperatorMatrix(Amat) bmat = np.random.randn(m) #bmat.shape = (bmat.shape[0], 1) # A = Identity() # Change n to equal to m. vgb = VectorGeometry(m) vgx = VectorGeometry(n) b = vgb.allocate() b.fill(bmat) #b = DataContainer(bmat) # Regularization parameter lam = 10 opt = {'memopt': True} # Create object instances with the test data A and b. f = LeastSquares(A, b, c=0.5) g0 = ZeroFunction() # Initial guess #x_init = DataContainer(np.zeros((n, 1))) x_init = vgx.allocate() # Create 1-norm object instance g1 = lam * L1Norm() g1(x_init) g1.prox(x_init, 0.02) # Combine with least squares and solve using generic FISTA implementation #x_fista1, it1, timing1, criter1 = FISTA(x_init, f, g1, opt=opt) fa = FISTA(x_init=x_init, f=f, g=g1) fa.max_iteration = 10 fa.run(10) # Print for comparison print("FISTA least squares plus 1-norm solution and objective value:") print(fa.get_output()) print(fa.get_last_objective()) # Compare to CVXPY # Construct the problem. x1 = Variable(n) objective1 = Minimize( 0.5*sum_squares(Amat*x1 - bmat.T[0]) + lam*norm(x1, 1)) prob1 = Problem(objective1) # The optimal objective is returned by prob.solve(). result1 = prob1.solve(verbose=False, solver=SCS, eps=1e-9) # The optimal solution for x is stored in x.value and optimal objective value # is in result as well as in objective.value print("CVXPY least squares plus 1-norm solution and objective value:") print(x1.value) print(objective1.value) self.assertNumpyArrayAlmostEqual( numpy.squeeze(x_fista1.array), x1.value, 6) except SolverError as se: print (str(se)) self.assertTrue(True) else: self.assertTrue(cvx_not_installable)
pdhg.update_objective_interval = 100 pdhg.run(1000, verbose=True) #%% ############################################################################### # Setup and run the FISTA algorithm print("Running FISTA reconstruction") fidelity = FunctionOperatorComposition(L2NormSquared(b=sinogram), Aop) regularizer = ZeroFunction() fista = FISTA() fista.set_up(x_init=x_init, f=fidelity, g=regularizer) fista.max_iteration = 500 fista.update_objective_interval = 100 fista.run(500, verbose=True) #%% Show results plt.figure(figsize=(10, 10)) plt.suptitle('Reconstructions ', fontsize=16) plt.subplot(2, 2, 1) plt.imshow(cgls.get_output().as_array()) plt.colorbar() plt.title('CGLS reconstruction') plt.subplot(2, 2, 2) plt.imshow(fista.get_output().as_array()) plt.colorbar() plt.title('FISTA reconstruction')