Beispiel #1
0
    def test_SPDHG_vs_PDHG_explicit(self):
        data = dataexample.SIMPLE_PHANTOM_2D.get(size=(128, 128))

        ig = data.geometry
        ig.voxel_size_x = 0.1
        ig.voxel_size_y = 0.1

        detectors = ig.shape[0]
        angles = np.linspace(0, np.pi, 180)
        ag = AcquisitionGeometry('parallel',
                                 '2D',
                                 angles,
                                 detectors,
                                 pixel_size_h=0.1,
                                 angle_unit='radian')
        # Select device
        dev = 'cpu'

        Aop = AstraProjectorSimple(ig, ag, dev)

        sin = Aop.direct(data)
        # Create noisy data. Apply Gaussian noise
        noises = ['gaussian', 'poisson']
        noise = noises[1]
        if noise == 'poisson':
            scale = 5
            noisy_data = scale * applynoise.poisson(sin / scale, seed=10)
            # np.random.seed(10)
            # scale = 5
            # eta = 0
            # noisy_data = AcquisitionData(np.random.poisson( scale * (eta + sin.as_array()))/scale, ag)
        elif noise == 'gaussian':
            noisy_data = noise.gaussian(sin, var=0.1, seed=10)
            # np.random.seed(10)
            # n1 = np.random.normal(0, 0.1, size = ag.shape)
            # noisy_data = AcquisitionData(n1 + sin.as_array(), ag)

        else:
            raise ValueError('Unsupported Noise ', noise)

        #%% 'explicit' SPDHG, scalar step-sizes
        subsets = 10
        size_of_subsets = int(len(angles) / subsets)
        # create Gradient operator
        op1 = GradientOperator(ig)
        # take angles and create uniform subsets in uniform+sequential setting
        list_angles = [
            angles[i:i + size_of_subsets]
            for i in range(0, len(angles), size_of_subsets)
        ]
        # create acquisitioin geometries for each the interval of splitting angles
        list_geoms = [
            AcquisitionGeometry('parallel',
                                '2D',
                                list_angles[i],
                                detectors,
                                pixel_size_h=0.1,
                                angle_unit='radian')
            for i in range(len(list_angles))
        ]
        # create with operators as many as the subsets
        A = BlockOperator(*[
            AstraProjectorSimple(ig, list_geoms[i], dev)
            for i in range(subsets)
        ] + [op1])
        ## number of subsets
        #(sub2ind, ind2sub) = divide_1Darray_equally(range(len(A)), subsets)
        #
        ## acquisisiton data
        ## acquisisiton data
        AD_list = []
        for sub_num in range(subsets):
            for i in range(0, len(angles), size_of_subsets):
                arr = noisy_data.as_array()[i:i + size_of_subsets, :]
                AD_list.append(
                    AcquisitionData(arr, geometry=list_geoms[sub_num]))

        g = BlockDataContainer(*AD_list)
        alpha = 0.5
        ## block function
        F = BlockFunction(*[
            *[KullbackLeibler(b=g[i])
              for i in range(subsets)] + [alpha * MixedL21Norm()]
        ])
        G = IndicatorBox(lower=0)

        prob = [1 / (2 * subsets)] * (len(A) - 1) + [1 / 2]
        spdhg = SPDHG(f=F,
                      g=G,
                      operator=A,
                      max_iteration=1000,
                      update_objective_interval=200,
                      prob=prob)
        spdhg.run(1000, verbose=0)

        #%% 'explicit' PDHG, scalar step-sizes
        op1 = GradientOperator(ig)
        op2 = Aop
        # Create BlockOperator
        operator = BlockOperator(op1, op2, shape=(2, 1))
        f2 = KullbackLeibler(b=noisy_data)
        g = IndicatorBox(lower=0)
        normK = operator.norm()
        sigma = 1 / normK
        tau = 1 / normK

        f1 = alpha * MixedL21Norm()
        f = BlockFunction(f1, f2)
        # Setup and run the PDHG algorithm
        pdhg = PDHG(f=f, g=g, operator=operator, tau=tau, sigma=sigma)
        pdhg.max_iteration = 1000
        pdhg.update_objective_interval = 200
        pdhg.run(1000, verbose=0)

        #%% show diff between PDHG and SPDHG
        # plt.imshow(spdhg.get_output().as_array() -pdhg.get_output().as_array())
        # plt.colorbar()
        # plt.show()

        from cil.utilities.quality_measures import mae, mse, psnr
        qm = (mae(spdhg.get_output(),
                  pdhg.get_output()), mse(spdhg.get_output(),
                                          pdhg.get_output()),
              psnr(spdhg.get_output(), pdhg.get_output()))
        if debug_print:
            print("Quality measures", qm)
        np.testing.assert_almost_equal(mae(spdhg.get_output(),
                                           pdhg.get_output()),
                                       0.00150,
                                       decimal=3)
        np.testing.assert_almost_equal(mse(spdhg.get_output(),
                                           pdhg.get_output()),
                                       1.68590e-05,
                                       decimal=3)
Beispiel #2
0
    def test_SPDHG_vs_PDHG_implicit(self):

        data = dataexample.SIMPLE_PHANTOM_2D.get(size=(128, 128))

        ig = data.geometry
        ig.voxel_size_x = 0.1
        ig.voxel_size_y = 0.1

        detectors = ig.shape[0]
        angles = np.linspace(0, np.pi, 90)
        ag = AcquisitionGeometry('parallel',
                                 '2D',
                                 angles,
                                 detectors,
                                 pixel_size_h=0.1,
                                 angle_unit='radian')
        # Select device
        dev = 'cpu'

        Aop = AstraProjectorSimple(ig, ag, dev)

        sin = Aop.direct(data)
        # Create noisy data. Apply Gaussian noise
        noises = ['gaussian', 'poisson']
        noise = noises[1]
        noisy_data = ag.allocate()
        if noise == 'poisson':
            np.random.seed(10)
            scale = 20
            eta = 0
            noisy_data.fill(
                np.random.poisson(scale * (eta + sin.as_array())) / scale)
        elif noise == 'gaussian':
            np.random.seed(10)
            n1 = np.random.normal(0, 0.1, size=ag.shape)
            noisy_data.fill(n1 + sin.as_array())

        else:
            raise ValueError('Unsupported Noise ', noise)

        # Create BlockOperator
        operator = Aop
        f = KullbackLeibler(b=noisy_data)
        alpha = 0.005
        g = alpha * TotalVariation(50, 1e-4, lower=0)
        normK = operator.norm()

        #% 'implicit' PDHG, preconditioned step-sizes
        tau_tmp = 1.
        sigma_tmp = 1.
        tau = sigma_tmp / operator.adjoint(
            tau_tmp * operator.range_geometry().allocate(1.))
        sigma = tau_tmp / operator.direct(
            sigma_tmp * operator.domain_geometry().allocate(1.))
        #    initial = operator.domain_geometry().allocate()

        #    # Setup and run the PDHG algorithm
        pdhg = PDHG(f=f,
                    g=g,
                    operator=operator,
                    tau=tau,
                    sigma=sigma,
                    max_iteration=1000,
                    update_objective_interval=500)
        pdhg.run(verbose=0)

        subsets = 10
        size_of_subsets = int(len(angles) / subsets)
        # take angles and create uniform subsets in uniform+sequential setting
        list_angles = [
            angles[i:i + size_of_subsets]
            for i in range(0, len(angles), size_of_subsets)
        ]
        # create acquisitioin geometries for each the interval of splitting angles
        list_geoms = [
            AcquisitionGeometry('parallel',
                                '2D',
                                list_angles[i],
                                detectors,
                                pixel_size_h=0.1,
                                angle_unit='radian')
            for i in range(len(list_angles))
        ]
        # create with operators as many as the subsets
        A = BlockOperator(*[
            AstraProjectorSimple(ig, list_geoms[i], dev)
            for i in range(subsets)
        ])
        ## number of subsets
        #(sub2ind, ind2sub) = divide_1Darray_equally(range(len(A)), subsets)
        #
        ## acquisisiton data
        AD_list = []
        for sub_num in range(subsets):
            for i in range(0, len(angles), size_of_subsets):
                arr = noisy_data.as_array()[i:i + size_of_subsets, :]
                AD_list.append(
                    AcquisitionData(arr, geometry=list_geoms[sub_num]))

        g = BlockDataContainer(*AD_list)

        ## block function
        F = BlockFunction(*[KullbackLeibler(b=g[i]) for i in range(subsets)])
        G = alpha * TotalVariation(50, 1e-4, lower=0)

        prob = [1 / len(A)] * len(A)
        spdhg = SPDHG(f=F,
                      g=G,
                      operator=A,
                      max_iteration=1000,
                      update_objective_interval=200,
                      prob=prob)
        spdhg.run(1000, verbose=0)
        from cil.utilities.quality_measures import mae, mse, psnr
        qm = (mae(spdhg.get_output(),
                  pdhg.get_output()), mse(spdhg.get_output(),
                                          pdhg.get_output()),
              psnr(spdhg.get_output(), pdhg.get_output()))
        if debug_print:
            print("Quality measures", qm)

        np.testing.assert_almost_equal(mae(spdhg.get_output(),
                                           pdhg.get_output()),
                                       0.000335,
                                       decimal=3)
        np.testing.assert_almost_equal(mse(spdhg.get_output(),
                                           pdhg.get_output()),
                                       5.51141e-06,
                                       decimal=3)
Beispiel #3
0
    def test_PDHG_Denoising(self):
        print("PDHG Denoising with 3 noises")
        # adapted from demo PDHG_TV_Color_Denoising.py in CIL-Demos repository

        data = dataexample.PEPPERS.get(size=(256, 256))
        ig = data.geometry
        ag = ig

        which_noise = 0
        # Create noisy data.
        noises = ['gaussian', 'poisson', 's&p']
        dnoise = noises[which_noise]

        def setup(data, dnoise):
            if dnoise == 's&p':
                n1 = applynoise.saltnpepper(data,
                                            salt_vs_pepper=0.9,
                                            amount=0.2,
                                            seed=10)
            elif dnoise == 'poisson':
                scale = 5
                n1 = applynoise.poisson(data.as_array() / scale,
                                        seed=10) * scale
            elif dnoise == 'gaussian':
                n1 = applynoise.gaussian(data.as_array(), seed=10)
            else:
                raise ValueError('Unsupported Noise ', noise)
            noisy_data = ig.allocate()
            noisy_data.fill(n1)

            # Regularisation Parameter depending on the noise distribution
            if dnoise == 's&p':
                alpha = 0.8
            elif dnoise == 'poisson':
                alpha = 1
            elif dnoise == 'gaussian':
                alpha = .3
                # fidelity
            if dnoise == 's&p':
                g = L1Norm(b=noisy_data)
            elif dnoise == 'poisson':
                g = KullbackLeibler(b=noisy_data)
            elif dnoise == 'gaussian':
                g = 0.5 * L2NormSquared(b=noisy_data)
            return noisy_data, alpha, g

        noisy_data, alpha, g = setup(data, dnoise)
        operator = GradientOperator(
            ig, correlation=GradientOperator.CORRELATION_SPACE)

        f1 = alpha * MixedL21Norm()

        # Compute operator Norm
        normK = operator.norm()

        # Primal & dual stepsizes
        sigma = 1
        tau = 1 / (sigma * normK**2)

        # Setup and run the PDHG algorithm
        pdhg1 = PDHG(f=f1, g=g, operator=operator, tau=tau, sigma=sigma)
        pdhg1.max_iteration = 2000
        pdhg1.update_objective_interval = 200
        pdhg1.run(1000, verbose=0)

        rmse = (pdhg1.get_output() - data).norm() / data.as_array().size
        if debug_print:
            print("RMSE", rmse)
        self.assertLess(rmse, 2e-4)

        which_noise = 1
        noise = noises[which_noise]
        noisy_data, alpha, g = setup(data, noise)
        operator = GradientOperator(
            ig, correlation=GradientOperator.CORRELATION_SPACE)

        f1 = alpha * MixedL21Norm()

        # Compute operator Norm
        normK = operator.norm()

        # Primal & dual stepsizes
        sigma = 1
        tau = 1 / (sigma * normK**2)

        # Setup and run the PDHG algorithm
        pdhg1 = PDHG(f=f1,
                     g=g,
                     operator=operator,
                     tau=tau,
                     sigma=sigma,
                     max_iteration=2000,
                     update_objective_interval=200)

        pdhg1.run(1000, verbose=0)

        rmse = (pdhg1.get_output() - data).norm() / data.as_array().size
        if debug_print:
            print("RMSE", rmse)
        self.assertLess(rmse, 2e-4)

        which_noise = 2
        noise = noises[which_noise]
        noisy_data, alpha, g = setup(data, noise)
        operator = GradientOperator(
            ig, correlation=GradientOperator.CORRELATION_SPACE)

        f1 = alpha * MixedL21Norm()

        # Compute operator Norm
        normK = operator.norm()

        # Primal & dual stepsizes
        sigma = 1
        tau = 1 / (sigma * normK**2)

        # Setup and run the PDHG algorithm
        pdhg1 = PDHG(f=f1, g=g, operator=operator, tau=tau, sigma=sigma)
        pdhg1.max_iteration = 2000
        pdhg1.update_objective_interval = 200
        pdhg1.run(1000, verbose=0)

        rmse = (pdhg1.get_output() - data).norm() / data.as_array().size
        if debug_print:
            print("RMSE", rmse)
        self.assertLess(rmse, 2e-4)
Beispiel #4
0
    normK = operator.norm()
    
    # Primal & dual stepsizes
    sigma = 1
    tau = 1/(sigma*normK**2)
    
    # Setup and run the PDHG algorithm
    pdhg = PDHG(f=f,g=g,operator=operator, tau=tau, sigma=sigma)
    pdhg.max_iteration = 10000
    pdhg.update_objective_interval = 1
    pdhg.run(200,very_verbose=True)
    
    # Show results
    plt.figure(figsize=(20,5))
    plt.subplot(1,3,1)
    plt.imshow(data_gray.as_array(),vmin=0.0,vmax=1.0)
    plt.title('Ground Truth')
    plt.gray()
    plt.colorbar()
    plt.subplot(1,3,2)
    plt.imshow(blurredimage.as_array(),vmin=0.0,vmax=1.0)
    plt.title('Noisy and Masked Data')
    plt.gray()
    plt.colorbar()
    plt.subplot(1,3,3)
    plt.imshow(pdhg.get_output().as_array(),vmin=0.0,vmax=1.0)
    plt.title('TV Reconstruction')
    plt.gray()
    plt.colorbar()
    plt.show()
Beispiel #5
0
    pdhg.max_iteration = 2000
    pdhg.update_objective_interval = 100
    pdhg.run(2000)

    # Show results
    plt.figure(figsize=(20, 5))
    plt.subplot(1, 4, 1)
    plt.imshow(data.as_array())
    plt.title('Ground Truth')
    plt.colorbar()
    plt.subplot(1, 4, 2)
    plt.imshow(noisy_data.as_array())
    plt.title('Noisy Data')
    plt.colorbar()
    plt.subplot(1, 4, 3)
    plt.imshow(pdhg.get_output().as_array())
    plt.title('TV Reconstruction')
    plt.colorbar()
    plt.subplot(1, 4, 4)
    plt.plot(np.linspace(0, ig.shape[1], ig.shape[1]),
             data.as_array()[int(ig.shape[0] / 2), :],
             label='GTruth')
    plt.plot(np.linspace(0, ig.shape[1], ig.shape[1]),
             noisy_data.as_array()[int(ig.shape[0] / 2), :],
             label='Noisy and masked')
    plt.plot(np.linspace(0, ig.shape[1], ig.shape[1]),
             pdhg.get_output().as_array()[int(ig.shape[0] / 2), :],
             label='TV reconstruction')
    plt.legend()
    plt.title('Middle Line Profiles')
    plt.show()
Beispiel #6
0
    sigma = 1
    tau = 1/(sigma*normK**2)
    
    # Setup and run the PDHG algorithm
    pdhg = PDHG(f=f,g=g,operator=operator, tau=tau, sigma=sigma)
    pdhg.max_iteration = 2000
    pdhg.update_objective_interval = 100
    pdhg.run(2000)
    
    # Show results
    plt.figure(figsize=(20,5))
    plt.subplot(1,4,1)
    plt.imshow(data.as_array())
    plt.title('Ground Truth')
    plt.colorbar()
    plt.subplot(1,4,2)
    plt.imshow(noisy_data.as_array())
    plt.title('Noisy Data')
    plt.colorbar()
    plt.subplot(1,4,3)
    plt.imshow(pdhg.get_output().as_array())
    plt.title('TV Reconstruction')
    plt.colorbar()
    plt.subplot(1,4,4)
    plt.plot(np.linspace(0,ig.shape[1],ig.shape[1]), data.as_array()[int(ig.shape[0]/2),:], label = 'GTruth')
    plt.plot(np.linspace(0,ig.shape[1],ig.shape[1]), noisy_data.as_array()[int(ig.shape[0]/2),:], label = 'Noisy and masked')
    plt.plot(np.linspace(0,ig.shape[1],ig.shape[1]), pdhg.get_output().as_array()[int(ig.shape[0]/2),:], label = 'TV reconstruction')
    plt.legend()
    plt.title('Middle Line Profiles')
    plt.show()