Пример #1
0
    def test_FISTA_cvx(self):
        if not cvx_not_installable:
            # Problem data.
            m = 30
            n = 20
            np.random.seed(1)
            Amat = np.random.randn(m, n)
            A = LinearOperatorMatrix(Amat)
            bmat = np.random.randn(m)
            bmat.shape = (bmat.shape[0], 1)

            # A = Identity()
            # Change n to equal to m.

            b = DataContainer(bmat)

            # Regularization parameter
            lam = 10
            opt = {'memopt': True}
            # Create object instances with the test data A and b.
            f = Norm2sq(A, b, c=0.5, memopt=True)
            g0 = ZeroFun()

            # Initial guess
            x_init = DataContainer(np.zeros((n, 1)))

            f.grad(x_init)

            # Run FISTA for least squares plus zero function.
            x_fista0, it0, timing0, criter0 = FISTA(x_init, f, g0, opt=opt)

            # Print solution and final objective/criterion value for comparison
            print(
                "FISTA least squares plus zero function solution and objective value:"
            )
            print(x_fista0.array)
            print(criter0[-1])

            # Compare to CVXPY

            # Construct the problem.
            x0 = Variable(n)
            objective0 = Minimize(0.5 * sum_squares(Amat * x0 - bmat.T[0]))
            prob0 = Problem(objective0)

            # The optimal objective is returned by prob.solve().
            result0 = prob0.solve(verbose=False, solver=SCS, eps=1e-9)

            # The optimal solution for x is stored in x.value and optimal objective value
            # is in result as well as in objective.value
            print(
                "CVXPY least squares plus zero function solution and objective value:"
            )
            print(x0.value)
            print(objective0.value)
            self.assertNumpyArrayAlmostEqual(numpy.squeeze(x_fista0.array),
                                             x0.value, 6)
        else:
            self.assertTrue(cvx_not_installable)
b = DataContainer(bmat)

# Regularization parameter
lam = 10
opt = {'memopt':True}
# Create object instances with the test data A and b.
f = Norm2sq(A,b,c=0.5, memopt=True)
g0 = ZeroFun()

# Initial guess
x_init = DataContainer(np.zeros((n,1)))

f.grad(x_init)

# Run FISTA for least squares plus zero function.
x_fista0, it0, timing0, criter0 = FISTA(x_init, f, g0 , opt=opt)

# Print solution and final objective/criterion value for comparison
print("FISTA least squares plus zero function solution and objective value:")
print(x_fista0.array)
print(criter0[-1])

if use_cvxpy:
    # Compare to CVXPY
    
    # Construct the problem.
    x0 = Variable(n)
    objective0 = Minimize(0.5*sum_squares(Amat*x0 - bmat.T[0]) )
    prob0 = Problem(objective0)
    
    # The optimal objective is returned by prob.solve().
Пример #3
0
plt.show()

plt.semilogy(criter_CGLS)
plt.title('CGLS criterion')
plt.show()

# CGLS solves the simple least-squares problem. The same problem can be solved
# by FISTA by setting up explicitly a least squares function object and using
# no regularisation:

# Create least squares object instance with projector, test data and a constant
# coefficient of 0.5:
f = Norm2sq(Aop, b, c=0.5)

# Run FISTA for least squares without regularization
x_fista0, it0, timing0, criter0 = FISTA(x_init, f, None, opt)

plt.imshow(x_fista0.array)
plt.title('FISTA Least squares')
plt.show()

plt.semilogy(criter0)
plt.title('FISTA Least squares criterion')
plt.show()

# FISTA can also solve regularised forms by specifying a second function object
# such as 1-norm regularisation with choice of regularisation parameter lam:

# Create 1-norm function object
lam = 0.1
g0 = Norm1(lam)
Пример #4
0
plt.colorbar()
plt.show()

plt.semilogy(criter_SIRT01)
plt.title('SIRT box(0,1) criterion')
plt.show()

# The indicator function can also be used with the FISTA algorithm to do
# least squares with nonnegativity constraint.

# Create least squares object instance with projector, test data and a constant
# coefficient of 0.5:
f = Norm2sq(Aop, b, c=0.5)

# Run FISTA for least squares without constraints
x_fista, it, timing, criter = FISTA(x_init, f, None, opt)

plt.imshow(x_fista.array)
plt.title('FISTA Least squares')
plt.show()

plt.semilogy(criter)
plt.title('FISTA Least squares criterion')
plt.show()

# Run FISTA for least squares with nonnegativity constraint
x_fista0, it0, timing0, criter0 = FISTA(x_init, f, IndicatorBox(lower=0), opt)

plt.imshow(x_fista0.array)
plt.title('FISTA Least squares nonneg')
plt.show()
Пример #5
0
    # Create least squares object instance with projector and data.
    print("Create least squares object instance with projector and data.")
    f = Norm2sq(Aop, DataContainer(sino_channel), c=0.5)

    print("Run FISTA-TV for least squares")
    lamtv = 5
    opt = {'tol': 1e-4, 'iter': 200}
    g_fgp = FGP_TV(lambdaReg=lamtv,
                   iterationsTV=50,
                   tolerance=1e-6,
                   methodTV=0,
                   nonnegativity=0,
                   printing=0,
                   device='gpu')

    x_fista_fgp, it1, timing1, criter_fgp = FISTA(x_init, f, g_fgp, opt)
    REC_chan[i, :, :] = x_fista_fgp.array
    """
    plt.figure()
    plt.subplot(121)
    plt.imshow(x_fista_fgp.array, vmin=0, vmax=0.05)
    plt.title('FISTA FGP TV')
    plt.subplot(122)
    plt.semilogy(criter_fgp)
    plt.show()
    """
    """
    print ("Run CGLS for least squares")
    opt = {'tol': 1e-4, 'iter': 20}
    x_init = ImageData(geometry=ig)
    x_CGLS, it_CGLS, timing_CGLS, criter_CGLS = CGLS(x_init, Aop, DataContainer(sino_channel), opt=opt)
Пример #6
0
plt.imshow(z1.subset(vertical=68).array)
plt.show()

# Set initial guess
print("Initial guess")
x_init = ImageData(geometry=ig)

# Create least squares object instance with projector and data.
print("Create least squares object instance with projector and data.")
f = Norm2sq(Cop, padded_data2, c=0.5)

# Run FISTA reconstruction for least squares without regularization
print("Run FISTA for least squares")
opt = {'tol': 1e-4, 'iter': 100}
x_fista0, it0, timing0, criter0 = FISTA(x_init, f, None, opt=opt)

plt.imshow(x_fista0.subset(horizontal_x=80).array)
plt.title('FISTA LS')
plt.colorbar()
plt.show()

# Set up 1-norm function for FISTA least squares plus 1-norm regularisation
print("Run FISTA for least squares plus 1-norm regularisation")
lam = 0.1
g0 = Norm1(lam)

# Run FISTA for least squares plus 1-norm function.
x_fista1, it1, timing1, criter1 = FISTA(x_init, f, g0, opt=opt)

plt.imshow(x_fista0.subset(horizontal_x=80).array)
Пример #7
0
    def test_FISTA_denoise_cvx(self):
        if not cvx_not_installable:
            opt = {'memopt': True}
            N = 64
            ig = ImageGeometry(voxel_num_x=N, voxel_num_y=N)
            Phantom = ImageData(geometry=ig)

            x = Phantom.as_array()

            x[int(round(N / 4)):int(round(3 * N / 4)),
              int(round(N / 4)):int(round(3 * N / 4))] = 0.5
            x[int(round(N / 8)):int(round(7 * N / 8)),
              int(round(3 * N / 8)):int(round(5 * N / 8))] = 1

            # Identity operator for denoising
            I = TomoIdentity(ig)

            # Data and add noise
            y = I.direct(Phantom)
            y.array = y.array + 0.1 * np.random.randn(N, N)

            # Data fidelity term
            f_denoise = Norm2sq(I, y, c=0.5, memopt=True)

            # 1-norm regulariser
            lam1_denoise = 1.0
            g1_denoise = Norm1(lam1_denoise)

            # Initial guess
            x_init_denoise = ImageData(np.zeros((N, N)))

            # Combine with least squares and solve using generic FISTA implementation
            x_fista1_denoise, it1_denoise, timing1_denoise, \
                criter1_denoise = \
                FISTA(x_init_denoise, f_denoise, g1_denoise, opt=opt)

            print(x_fista1_denoise)
            print(criter1_denoise[-1])

            # Now denoise LS + 1-norm with FBPD
            x_fbpd1_denoise, itfbpd1_denoise, timingfbpd1_denoise,\
                criterfbpd1_denoise = \
                FBPD(x_init_denoise, I, None, f_denoise, g1_denoise)
            print(x_fbpd1_denoise)
            print(criterfbpd1_denoise[-1])

            # Compare to CVXPY

            # Construct the problem.
            x1_denoise = Variable(N**2, 1)
            objective1_denoise = Minimize(
                0.5 * sum_squares(x1_denoise - y.array.flatten()) +
                lam1_denoise * norm(x1_denoise, 1))
            prob1_denoise = Problem(objective1_denoise)

            # The optimal objective is returned by prob.solve().
            result1_denoise = prob1_denoise.solve(verbose=False,
                                                  solver=SCS,
                                                  eps=1e-12)

            # The optimal solution for x is stored in x.value and optimal objective value
            # is in result as well as in objective.value
            print(
                "CVXPY least squares plus 1-norm solution and objective value:"
            )
            print(x1_denoise.value)
            print(objective1_denoise.value)
            self.assertNumpyArrayAlmostEqual(x_fista1_denoise.array.flatten(),
                                             x1_denoise.value, 5)

            self.assertNumpyArrayAlmostEqual(x_fbpd1_denoise.array.flatten(),
                                             x1_denoise.value, 5)
            x1_cvx = x1_denoise.value
            x1_cvx.shape = (N, N)

            # Now TV with FBPD
            lam_tv = 0.1
            gtv = TV2D(lam_tv)
            gtv(gtv.op.direct(x_init_denoise))

            opt_tv = {'tol': 1e-4, 'iter': 10000}

            x_fbpdtv_denoise, itfbpdtv_denoise, timingfbpdtv_denoise,\
                criterfbpdtv_denoise = \
                FBPD(x_init_denoise, gtv.op, None, f_denoise, gtv, opt=opt_tv)
            print(x_fbpdtv_denoise)
            print(criterfbpdtv_denoise[-1])

            # Compare to CVXPY

            # Construct the problem.
            xtv_denoise = Variable((N, N))
            objectivetv_denoise = Minimize(0.5 *
                                           sum_squares(xtv_denoise - y.array) +
                                           lam_tv * tv(xtv_denoise))
            probtv_denoise = Problem(objectivetv_denoise)

            # The optimal objective is returned by prob.solve().
            resulttv_denoise = probtv_denoise.solve(verbose=False,
                                                    solver=SCS,
                                                    eps=1e-12)

            # The optimal solution for x is stored in x.value and optimal objective value
            # is in result as well as in objective.value
            print(
                "CVXPY least squares plus 1-norm solution and objective value:"
            )
            print(xtv_denoise.value)
            print(objectivetv_denoise.value)

            self.assertNumpyArrayAlmostEqual(x_fbpdtv_denoise.as_array(),
                                             xtv_denoise.value, 1)

        else:
            self.assertTrue(cvx_not_installable)
Пример #8
0
    def test_FISTA_Norm1_cvx(self):
        if not cvx_not_installable:
            opt = {'memopt': True}
            # Problem data.
            m = 30
            n = 20
            np.random.seed(1)
            Amat = np.random.randn(m, n)
            A = LinearOperatorMatrix(Amat)
            bmat = np.random.randn(m)
            bmat.shape = (bmat.shape[0], 1)

            # A = Identity()
            # Change n to equal to m.

            b = DataContainer(bmat)

            # Regularization parameter
            lam = 10
            opt = {'memopt': True}
            # Create object instances with the test data A and b.
            f = Norm2sq(A, b, c=0.5, memopt=True)
            g0 = ZeroFun()

            # Initial guess
            x_init = DataContainer(np.zeros((n, 1)))

            # Create 1-norm object instance
            g1 = Norm1(lam)

            g1(x_init)
            g1.prox(x_init, 0.02)

            # Combine with least squares and solve using generic FISTA implementation
            x_fista1, it1, timing1, criter1 = FISTA(x_init, f, g1, opt=opt)

            # Print for comparison
            print(
                "FISTA least squares plus 1-norm solution and objective value:"
            )
            print(x_fista1.as_array().squeeze())
            print(criter1[-1])

            # Compare to CVXPY

            # Construct the problem.
            x1 = Variable(n)
            objective1 = Minimize(0.5 * sum_squares(Amat * x1 - bmat.T[0]) +
                                  lam * norm(x1, 1))
            prob1 = Problem(objective1)

            # The optimal objective is returned by prob.solve().
            result1 = prob1.solve(verbose=False, solver=SCS, eps=1e-9)

            # The optimal solution for x is stored in x.value and optimal objective value
            # is in result as well as in objective.value
            print(
                "CVXPY least squares plus 1-norm solution and objective value:"
            )
            print(x1.value)
            print(objective1.value)

            self.assertNumpyArrayAlmostEqual(numpy.squeeze(x_fista1.array),
                                             x1.value, 6)
        else:
            self.assertTrue(cvx_not_installable)
Пример #9
0
plt.title('FBPD TV')
plt.subplot(122)
plt.semilogy(criter_fbpdtv)
plt.show()

# Set up the ROF variant of TV from the CCPi Regularisation Toolkit and run
# TV-reconstruction using FISTA
g_rof = ROF_TV(lambdaReg = lamtv,
                 iterationsTV=50,
                 tolerance=1e-5,
                 time_marchstep=0.01,
                 device='cpu')

opt = {'tol': 1e-4, 'iter': 100}

x_fista_rof, it1, timing1, criter_rof = FISTA(x_init, f, g_rof,opt)

plt.figure()
plt.subplot(121)
plt.imshow(x_fista_rof.array)
plt.title('FISTA ROF TV')
plt.subplot(122)
plt.semilogy(criter_rof)
plt.show()

# Repeat for FGP variant.
g_fgp = FGP_TV(lambdaReg = lamtv,
                 iterationsTV=50,
                 tolerance=1e-5,
                 methodTV=0,
                 nonnegativity=0,
b = DataContainer(bmat)

# Regularization parameter
lam = 10
opt = {'memopt':True}
# Create object instances with the test data A and b.
f = Norm2sq(A,b,c=0.5, memopt=True)
g0 = ZeroFun()

# Initial guess
x_init = DataContainer(np.zeros((n,1)))

f.grad(x_init)

# Run FISTA for least squares plus zero function.
x_fista0, it0, timing0, criter0 = FISTA(x_init, f, g0 , opt=opt)

# Print solution and final objective/criterion value for comparison
print("FISTA least squares plus zero function solution and objective value:")
print(x_fista0.array)
print(criter0[-1])

gd = GradientDescent(x_init=x_init, objective_function=f, rate=0.001)
gd.max_iteration = 5000

for i,el in enumerate(gd):
    if i%100 == 0:
        print ("\rIteration {} Loss: {}".format(gd.iteration, 
               gd.get_current_loss()))

Пример #11
0
plt.show()

plt.semilogy(criter_CGLS)
plt.title('CGLS criterion')
plt.show()

# CGLS solves the simple least-squares problem. The same problem can be solved
# by FISTA by setting up explicitly a least squares function object and using
# no regularisation:

# Create least squares object instance with projector, test data and a constant
# coefficient of 0.5:
f = Norm2sq(Cop, b, c=0.5)

# Run FISTA for least squares without regularization
x_fista0, it0, timing0, criter0 = FISTA(x_init, f, None, opt=opt)

plt.imshow(x_fista0.subset(vertical=0).array)
plt.title('FISTA Least squares')
plt.show()

plt.semilogy(criter0)
plt.title('FISTA Least squares criterion')
plt.show()

# FISTA can also solve regularised forms by specifying a second function object
# such as 1-norm regularisation with choice of regularisation parameter lam:

# Create 1-norm function object
lam = 0.1
g0 = Norm1(lam)
Пример #12
0
plt.show()

plt.semilogy(criter_CGLS)
plt.title('CGLS Criterion vs iterations')
plt.show()

# Create least squares object instance with projector, test data and a constant
# coefficient of 0.5. Note it is least squares over all channels.
f = Norm2sq(Aall, data2d, c=0.5)

# Options for FISTA algorithm.
opt = {'tol': 1e-4, 'iter': 100}

# Run FISTA for least squares without regularization and display one channel
# reconstruction as image.
x_fista0, it0, timing0, criter0 = FISTA(x_init, f, None, opt)

plt.imshow(x_fista0.subset(channel=100).array)
plt.title('FISTA LS')
plt.show()

plt.semilogy(criter0)
plt.title('FISTA LS Criterion vs iterations')
plt.show()

# Set up 1-norm regularisation (over all channels), solve with FISTA, and
# display one channel of reconstruction.
lam = 0.1
g0 = Norm1(lam)

x_fista1, it1, timing1, criter1 = FISTA(x_init, f, g0, opt)
Пример #13
0
for k in range(3):
    axarro[k].imshow(z.as_array()[k], vmin=0, vmax=3500)
plt.show()

# Using the test data b, different reconstruction methods can now be set up as
# demonstrated in the rest of this file. In general all methods need an initial
# guess and some algorithm options to be set:
x_init = ImageData(numpy.zeros(x.shape), geometry=ig)
opt = {'tol': 1e-4, 'iter': 200}

# Create least squares object instance with projector, test data and a constant
# coefficient of 0.5. Note it is least squares over all channels:
f = Norm2sq(Aop, b, c=0.5)

# Run FISTA for least squares without regularization
x_fista0, it0, timing0, criter0 = FISTA(x_init, f, None, opt)

# Display reconstruction and criteration
ff0, axarrf0 = plt.subplots(1, numchannels)
for k in numpy.arange(3):
    axarrf0[k].imshow(x_fista0.as_array()[k], vmin=0, vmax=2.5)
plt.show()

plt.semilogy(criter0)
plt.title('Criterion vs iterations, least squares')
plt.show()

# FISTA can also solve regularised forms by specifying a second function object
# such as 1-norm regularisation with choice of regularisation parameter lam.
# Again the regulariser is over all channels:
lam = 10