Exemplo n.º 1
0
    def process(self, out=None):

        IM = self.get_input()

        pad = False
        if len(IM.shape) == 2:
            #for 2D cases
            pad = True
            data_temp = numpy.expand_dims(IM.as_array(), axis=0)
        else:
            data_temp = IM.as_array()

        sinogram_id, arr_out = astra.create_sino3d_gpu(data_temp,
                                                       self.proj_geom,
                                                       self.vol_geom)

        astra.data3d.delete(sinogram_id)

        if pad is True:
            arr_out = numpy.squeeze(arr_out, axis=0)

        if out is None:
            out = AcquisitionData(arr_out,
                                  deep_copy=False,
                                  geometry=self.sinogram_geometry.copy(),
                                  suppress_warning=True)
            return out
        else:
            out.fill(arr_out)
    def process(self, out=None):

        IM = self.get_input()
        sinogram_id, arr_out = astra.create_sino(IM.as_array(), self.proj_id)
        astra.data2d.delete(sinogram_id)

        if out is None:
            out = AcquisitionData(arr_out,
                                  deep_copy=False,
                                  geometry=self.sinogram_geometry.copy(),
                                  suppress_warning=True)
            return out
        else:
            out.fill(arr_out)
Exemplo n.º 3
0
    def direct(self, x, out=None):

        if self.tigre_geom.is2D:
            data_temp = np.expand_dims(x.as_array(),axis=0)
            arr_out = Ax.Ax(data_temp, self.tigre_geom, self.tigre_angles, projection_type=self.method['direct'])
            arr_out = np.squeeze(arr_out, axis=1)
        else:
            arr_out = Ax.Ax(x.as_array(), self.tigre_geom, self.tigre_angles, projection_type=self.method['direct'])

        if out is None:
            out = AcquisitionData(arr_out, deep_copy=False, geometry=self._range_geometry.copy(), suppress_warning=True)
            return out
        else:
            out.fill(arr_out)
Exemplo n.º 4
0
    def read(self):

        if self._geometry is None:
            self.get_geometry()

        with h5py.File(self.file_name, 'r') as dfile:

            ds_data = dfile['entry1/tomo_entry/data/data']
            data = np.array(ds_data, dtype=np.float32)

            # handle old files?
            if self.is_old_file_version():
                if isinstance(self._geometry, AcquisitionGeometry):
                    return AcquisitionData(data,
                                           True,
                                           geometry=self._geometry,
                                           suppress_warning=True)
                elif isinstance(self._geometry, ImageGeometry):
                    return ImageData(data,
                                     True,
                                     geometry=self._geometry,
                                     suppress_warning=True)
                else:
                    raise TypeError("Unsupported geometry. Expected ImageGeometry or AcquisitionGeometry, got {}"\
                        .format(type(self._geometry)))

            output = self._geometry.allocate(None)
            output.fill(data)
            return output
Exemplo n.º 5
0
 def _return_appropriate_data(self, data, geometry):
     if isinstance (geometry, ImageGeometry):
         return ImageData(data, deep=True, geometry=geometry.copy(), suppress_warning=True)
     elif isinstance (geometry, AcquisitionGeometry):
         return AcquisitionData(data, deep=True, geometry=geometry.copy(), suppress_warning=True)
     else:
         raise TypeError("Unsupported Geometry type. Expected ImageGeometry or AcquisitionGeometry, got {}"\
             .format(type(geometry)))
Exemplo n.º 6
0
    def read(self):
        '''
        Reads projections and return AcquisitionData container
        '''
        # the import will raise an ImportError if dxchange is not installed
        import dxchange
        # Load projections and most metadata
        data, metadata = dxchange.read_txrm(self.txrm_file)
        number_of_images = data.shape[0]
        
        # Read source to center and detector to center distances
        with olefile.OleFileIO(self.txrm_file) as ole:
            StoRADistance = dxchange.reader._read_ole_arr(ole, \
                    'ImageInfo/StoRADistance', "<{0}f".format(number_of_images))
            DtoRADistance = dxchange.reader._read_ole_arr(ole, \
                    'ImageInfo/DtoRADistance', "<{0}f".format(number_of_images))
            
        dist_source_center   = np.abs( StoRADistance[0] )
        dist_center_detector = np.abs( DtoRADistance[0] )
        
        # normalise data by flatfield
        data = data / metadata['reference']
        
        # circularly shift data by rounded x and y shifts
        for k in range(number_of_images):
            data[k,:,:] = np.roll(data[k,:,:], \
                (int(metadata['x-shifts'][k]),int(metadata['y-shifts'][k])), \
                axis=(1,0))
        
        # Pixelsize loaded in metadata is really the voxel size in um.
        # We can compute the effective detector pixel size as the geometric
        # magnification times the voxel size.
        d_pixel_size = ((dist_source_center+dist_center_detector)/dist_source_center)*metadata['pixel_size']
        
        # convert angles to requested unit measure, Zeiss stores in radians
        if self.angle_unit == AcquisitionGeometry.DEGREE:
            angles = np.degrees(metadata['thetas'])
        else:
            angles = np.asarray(metadata['thetas'])

        self._ag = AcquisitionGeometry.create_Cone3D(
            [0,-dist_source_center, 0] , [ 0, dist_center_detector, 0] \
            ) \
                .set_panel([metadata['image_width'], metadata['image_height']],\
                    pixel_size=[d_pixel_size/1000,d_pixel_size/1000])\
                .set_angles(angles, angle_unit=self.angle_unit)
        self._ag.dimension_labels =  ['angle', 'vertical', 'horizontal']
                

        acq_data = AcquisitionData(array=data, deep_copy=False, geometry=self._ag.copy(),\
            suppress_warning=True)
        self._metadata = metadata
        return acq_data
    def process(self, out=None):

        IM = self.get_input()

        pad = False
        if len(IM.shape) == 2:
            #for 2D cases
            pad = True
            data_temp = np.expand_dims(IM.as_array(), axis=0)
        else:
            data_temp = IM.as_array()

        if out is None:

            sinogram_id, arr_out = astra.create_sino3d_gpu(
                data_temp, self.proj_geom, self.vol_geom)
        else:
            if pad:
                arr_out = np.expand_dims(out.as_array(), axis=0)
            else:
                arr_out = out.as_array()

            sinogram_id = astra.data3d.link('-sino', self.proj_geom, arr_out)
            self.create_backprojection3d_gpu(data_temp, self.proj_geom,
                                             self.vol_geom, False, sinogram_id)

        #clear the memory on GPU
        astra.data3d.delete(sinogram_id)

        if pad is True:
            arr_out = np.squeeze(arr_out, axis=0)

        if out is None:
            out = AcquisitionData(arr_out,
                                  deep_copy=False,
                                  geometry=self.sinogram_geometry.copy(),
                                  suppress_warning=True)
            return out
        else:
            out.fill(arr_out)
    def direct(self, x, out=None):

        data = x.as_array()

        if self.tigre_geom.is2D:
            data_temp = np.expand_dims(data, axis=0)
            arr_out = self.__call_Ax(data_temp)
            arr_out = np.squeeze(arr_out, axis=1)
        else:
            arr_out = self.__call_Ax(data)

        #if single angle projection remove the dimension for CIL
        if arr_out.shape[0] == 1:
            arr_out = np.squeeze(arr_out, axis=0)

        if out is None:
            out = AcquisitionData(arr_out,
                                  deep_copy=False,
                                  geometry=self._range_geometry.copy(),
                                  suppress_warning=True)
            return out
        else:
            out.fill(arr_out)
Exemplo n.º 9
0
    def test_AcquisitionGeometry_allocate(self):
        ageometry = AcquisitionGeometry(dimension=2,
                                        angles=numpy.linspace(0, 180, num=10),
                                        geom_type='parallel',
                                        pixel_num_v=3,
                                        pixel_num_h=5,
                                        channels=2)
        sino = ageometry.allocate(0)
        shape = sino.shape
        print("shape", shape)
        self.assertAlmostEqual(0., sino.as_array()[0][0][0][0])
        self.assertAlmostEqual(
            0.,
            sino.as_array()[shape[0] - 1][shape[1] - 1][shape[2] -
                                                        1][shape[3] - 1])

        sino = ageometry.allocate(1)
        self.assertEqual(1, sino.as_array()[0][0][0][0])
        self.assertEqual(
            1,
            sino.as_array()[shape[0] - 1][shape[1] - 1][shape[2] -
                                                        1][shape[3] - 1])
        print(sino.dimension_labels, sino.shape, ageometry)

        default_order = ['channel', 'angle', 'vertical', 'horizontal']
        self.assertEqual(default_order[0], sino.dimension_labels[0])
        self.assertEqual(default_order[1], sino.dimension_labels[1])
        self.assertEqual(default_order[2], sino.dimension_labels[2])
        self.assertEqual(default_order[3], sino.dimension_labels[3])
        order = ['vertical', 'horizontal', 'channel', 'angle']
        ageometry.set_labels(order)
        sino = ageometry.allocate(0)
        print(sino.dimension_labels, sino.shape, ageometry)
        self.assertEqual(order[0], sino.dimension_labels[0])
        self.assertEqual(order[1], sino.dimension_labels[1])
        self.assertEqual(order[2], sino.dimension_labels[2])
        self.assertEqual(order[2], sino.dimension_labels[2])

        try:
            z = AcquisitionData(numpy.random.randint(10, size=(2, 3)),
                                geometry=ageometry)
            self.assertTrue(False)
        except ValueError as ve:
            print(ve)
            self.assertTrue(True)
Exemplo n.º 10
0
    def test_SPDHG_vs_PDHG_implicit(self):

        data = dataexample.SIMPLE_PHANTOM_2D.get(size=(128, 128))

        ig = data.geometry
        ig.voxel_size_x = 0.1
        ig.voxel_size_y = 0.1

        detectors = ig.shape[0]
        angles = np.linspace(0, np.pi, 90)
        ag = AcquisitionGeometry('parallel',
                                 '2D',
                                 angles,
                                 detectors,
                                 pixel_size_h=0.1,
                                 angle_unit='radian')
        # Select device
        dev = 'cpu'

        Aop = AstraProjectorSimple(ig, ag, dev)

        sin = Aop.direct(data)
        # Create noisy data. Apply Gaussian noise
        noises = ['gaussian', 'poisson']
        noise = noises[1]
        noisy_data = ag.allocate()
        if noise == 'poisson':
            np.random.seed(10)
            scale = 20
            eta = 0
            noisy_data.fill(
                np.random.poisson(scale * (eta + sin.as_array())) / scale)
        elif noise == 'gaussian':
            np.random.seed(10)
            n1 = np.random.normal(0, 0.1, size=ag.shape)
            noisy_data.fill(n1 + sin.as_array())

        else:
            raise ValueError('Unsupported Noise ', noise)

        # Create BlockOperator
        operator = Aop
        f = KullbackLeibler(b=noisy_data)
        alpha = 0.005
        g = alpha * TotalVariation(50, 1e-4, lower=0)
        normK = operator.norm()

        #% 'implicit' PDHG, preconditioned step-sizes
        tau_tmp = 1.
        sigma_tmp = 1.
        tau = sigma_tmp / operator.adjoint(
            tau_tmp * operator.range_geometry().allocate(1.))
        sigma = tau_tmp / operator.direct(
            sigma_tmp * operator.domain_geometry().allocate(1.))
        #    initial = operator.domain_geometry().allocate()

        #    # Setup and run the PDHG algorithm
        pdhg = PDHG(f=f,
                    g=g,
                    operator=operator,
                    tau=tau,
                    sigma=sigma,
                    max_iteration=1000,
                    update_objective_interval=500)
        pdhg.run(verbose=0)

        subsets = 10
        size_of_subsets = int(len(angles) / subsets)
        # take angles and create uniform subsets in uniform+sequential setting
        list_angles = [
            angles[i:i + size_of_subsets]
            for i in range(0, len(angles), size_of_subsets)
        ]
        # create acquisitioin geometries for each the interval of splitting angles
        list_geoms = [
            AcquisitionGeometry('parallel',
                                '2D',
                                list_angles[i],
                                detectors,
                                pixel_size_h=0.1,
                                angle_unit='radian')
            for i in range(len(list_angles))
        ]
        # create with operators as many as the subsets
        A = BlockOperator(*[
            AstraProjectorSimple(ig, list_geoms[i], dev)
            for i in range(subsets)
        ])
        ## number of subsets
        #(sub2ind, ind2sub) = divide_1Darray_equally(range(len(A)), subsets)
        #
        ## acquisisiton data
        AD_list = []
        for sub_num in range(subsets):
            for i in range(0, len(angles), size_of_subsets):
                arr = noisy_data.as_array()[i:i + size_of_subsets, :]
                AD_list.append(
                    AcquisitionData(arr, geometry=list_geoms[sub_num]))

        g = BlockDataContainer(*AD_list)

        ## block function
        F = BlockFunction(*[KullbackLeibler(b=g[i]) for i in range(subsets)])
        G = alpha * TotalVariation(50, 1e-4, lower=0)

        prob = [1 / len(A)] * len(A)
        spdhg = SPDHG(f=F,
                      g=G,
                      operator=A,
                      max_iteration=1000,
                      update_objective_interval=200,
                      prob=prob)
        spdhg.run(1000, verbose=0)
        from cil.utilities.quality_measures import mae, mse, psnr
        qm = (mae(spdhg.get_output(),
                  pdhg.get_output()), mse(spdhg.get_output(),
                                          pdhg.get_output()),
              psnr(spdhg.get_output(), pdhg.get_output()))
        if debug_print:
            print("Quality measures", qm)

        np.testing.assert_almost_equal(mae(spdhg.get_output(),
                                           pdhg.get_output()),
                                       0.000335,
                                       decimal=3)
        np.testing.assert_almost_equal(mse(spdhg.get_output(),
                                           pdhg.get_output()),
                                       5.51141e-06,
                                       decimal=3)
Exemplo n.º 11
0
    def test_PDHG_vs_PDHG_explicit_axpby(self):
        data = dataexample.SIMPLE_PHANTOM_2D.get(size=(128, 128))
        if debug_print:
            print("test_PDHG_vs_PDHG_explicit_axpby here")
        ig = data.geometry
        ig.voxel_size_x = 0.1
        ig.voxel_size_y = 0.1

        detectors = ig.shape[0]
        angles = np.linspace(0, np.pi, 180)
        ag = AcquisitionGeometry('parallel',
                                 '2D',
                                 angles,
                                 detectors,
                                 pixel_size_h=0.1,
                                 angle_unit='radian')

        dev = 'cpu'

        Aop = AstraProjectorSimple(ig, ag, dev)

        sin = Aop.direct(data)
        # Create noisy data. Apply Gaussian noise
        noises = ['gaussian', 'poisson']
        noise = noises[1]
        if noise == 'poisson':
            np.random.seed(10)
            scale = 5
            eta = 0
            noisy_data = AcquisitionData(
                np.random.poisson(scale * (eta + sin.as_array())) / scale,
                geometry=ag)
        elif noise == 'gaussian':
            np.random.seed(10)
            n1 = np.random.normal(0, 0.1, size=ag.shape)
            noisy_data = AcquisitionData(n1 + sin.as_array(), geometry=ag)

        else:
            raise ValueError('Unsupported Noise ', noise)

        alpha = 0.5
        op1 = GradientOperator(ig)
        op2 = Aop
        # Create BlockOperator
        operator = BlockOperator(op1, op2, shape=(2, 1))
        f2 = KullbackLeibler(b=noisy_data)
        g = IndicatorBox(lower=0)
        normK = operator.norm()
        sigma = 1. / normK
        tau = 1. / normK

        f1 = alpha * MixedL21Norm()
        f = BlockFunction(f1, f2)
        # Setup and run the PDHG algorithm

        algos = []
        algos.append(
            PDHG(f=f,
                 g=g,
                 operator=operator,
                 tau=tau,
                 sigma=sigma,
                 max_iteration=1000,
                 update_objective_interval=200,
                 use_axpby=True))
        algos[0].run(1000, verbose=0)

        algos.append(
            PDHG(f=f,
                 g=g,
                 operator=operator,
                 tau=tau,
                 sigma=sigma,
                 max_iteration=1000,
                 update_objective_interval=200,
                 use_axpby=False))
        algos[1].run(1000, verbose=0)

        from cil.utilities.quality_measures import mae, mse, psnr
        qm = (mae(algos[0].get_output(), algos[1].get_output()),
              mse(algos[0].get_output(), algos[1].get_output()),
              psnr(algos[0].get_output(), algos[1].get_output()))
        if debug_print:
            print("Quality measures", qm)
        np.testing.assert_array_less(qm[0], 0.005)
        np.testing.assert_array_less(qm[1], 3e-05)
Exemplo n.º 12
0
num_pixels_x = data.shape[2]
num_pixels_y = data.shape[1]
pixel_size_xy = 0.254 * bins

ag = AcquisitionGeometry.create_Cone3D( source_position=[0.0,source_pos_y,0.0], \
                                        detector_position=[0.0,detector_pos_y,0.0],\
                                        rotation_axis_position=[object_offset_x,0,0],\
                                        rotation_axis_direction=[0,-np.sin(tilt), np.cos(tilt)] ) \
                        .set_angles(angles=angles_list, angle_unit='degree')\
                        .set_panel( num_pixels=[num_pixels_x, num_pixels_y], \
                                    pixel_size=pixel_size_xy,\
                                    origin='top-left')
print(ag)

#%% create AcquisitonData (data + geometry)
aq_data_raw = AcquisitionData(data, False, geometry=ag)

#%% convert to attenuation
aq_data = aq_data_raw.log()
aq_data *= -1

#%% view data
ag = aq_data.geometry
islicer(aq_data, direction='angle')

#%% Set up reconstruction volume
ig = ag.get_ImageGeometry()
ig.voxel_num_x = int(num_pixels_x - 200 / bins)
ig.voxel_num_y = int(num_pixels_x - 600 / bins)
ig.voxel_num_z = int(400 // bins)
print(ig)
Exemplo n.º 13
0
    def process(self, out=None):

        data = self.get_input()
        geometry = data.geometry

        angles_deg = geometry.config.angles.angle_data.copy()

        if geometry.config.angles.angle_unit == "radian":
            angles_deg *= 180 / np.pi

        #keep angles in range -180 to 180
        while angles_deg.min() < -180:
            angles_deg[angles_deg < -180] += 360

        while angles_deg.max() >= 180:
            angles_deg[angles_deg >= 180] -= 360

        target = angles_deg[self.projection_index] + 180

        if target < -180:
            target += 360
        elif target >= 180:
            target -= 360

        ind = np.abs(angles_deg - target).argmin()

        if abs(angles_deg[ind] - angles_deg[0]) - 180 > self.ang_tol:
            raise ValueError(
                'Method requires projections at 180 degrees interval')

        #cross correlate single slice with the 180deg one reveresed
        data_slice = data.subset(vertical=self.slice_index)

        data1 = data_slice.subset(angle=0).as_array()
        data2 = np.flip(data_slice.subset(angle=ind).as_array())

        border = int(data1.size * 0.05)
        lag = np.correlate(data1[border:-border], data2[border:-border],
                           "full")

        ind = lag.argmax()

        #fit quadratic to 3 centre points
        a = (lag[ind + 1] + lag[ind - 1] - 2 * lag[ind]) * 0.5
        b = a + lag[ind] - lag[ind - 1]
        quad_max = -b / (2 * a) + ind

        shift = (quad_max - (lag.size - 1) / 2) / 2
        shift = np.floor(shift * 100 + 0.5) / 100

        new_geometry = data.geometry.copy()

        #set up new geometry
        new_geometry.config.system.rotation_axis.position[
            0] = shift * geometry.config.panel.pixel_size[0]

        print("Centre of rotation correction using cross-correlation")
        print("\tCalculated from slice: ", self.slice_index)
        print("\tApplied centre of rotation shift = ", shift,
              "pixels at the detector.")

        if out is None:
            return AcquisitionData(
                array=data,
                deep_copy=True,
                dimension_labels=new_geometry.dimension_labels,
                geometry=new_geometry,
                supress_warning=True)
        else:
            out.geometry = new_geometry
Exemplo n.º 14
0
    def process(self, out=None):

        data_full = self.get_input()

        if data_full.geometry.dimension == '3D':
            data = data_full.get_slice(vertical=self.slice_index)
        else:
            data = data_full

        geometry = data.geometry

        angles_deg = geometry.config.angles.angle_data.copy()

        if geometry.config.angles.angle_unit == "radian":
            angles_deg *= 180 / np.pi

        #keep angles in range -180 to 180
        while angles_deg.min() <= -180:
            angles_deg[angles_deg <= -180] += 360

        while angles_deg.max() > 180:
            angles_deg[angles_deg > 180] -= 360

        target = angles_deg[self.projection_index] + 180

        if target <= -180:
            target += 360
        elif target > 180:

            target -= 360

        ind = np.abs(angles_deg - target).argmin()

        ang_diff = abs(angles_deg[ind] - angles_deg[0])
        if abs(ang_diff - 180) > self.ang_tol:
            raise ValueError(
                'Method requires projections at 180 +/- {0} degrees interval, got {1}.\nPick a different initial projection or increase the angular tolerance `ang_tol`.'
                .format(self.ang_tol, ang_diff))

        #cross correlate single slice with the 180deg one reveresed
        data1 = data.subset(angle=0).as_array()
        data2 = np.flip(data.subset(angle=ind).as_array())

        border = int(data1.size * 0.05)
        lag = np.correlate(data1[border:-border], data2[border:-border],
                           "full")

        ind = lag.argmax()

        #fit quadratic to 3 centre points
        a = (lag[ind + 1] + lag[ind - 1] - 2 * lag[ind]) * 0.5
        b = a + lag[ind] - lag[ind - 1]
        quad_max = -b / (2 * a) + ind

        shift = (quad_max - (lag.size - 1) / 2) / 2
        shift = np.floor(shift * 100 + 0.5) / 100

        new_geometry = data_full.geometry.copy()

        #set up new geometry
        new_geometry.config.system.rotation_axis.position[
            0] = shift * geometry.config.panel.pixel_size[0]

        logger.info(
            "Centre of rotation correction found using cross-correlation")
        logger.info("Calculated from slice: %s", str(self.slice_index))
        logger.info("Centre of rotation shift = %f pixels", shift)
        logger.info("Centre of rotation shift = %f units at the object",
                    shift * geometry.config.panel.pixel_size[0])
        logger.info("Return new dataset with centred geometry")

        if out is None:
            return AcquisitionData(array=data_full,
                                   deep_copy=True,
                                   geometry=new_geometry,
                                   supress_warning=True)
        else:
            out.geometry = new_geometry
Exemplo n.º 15
0
    def test_SPDHG_vs_PDHG_explicit(self):
        data = dataexample.SIMPLE_PHANTOM_2D.get(size=(128, 128))

        ig = data.geometry
        ig.voxel_size_x = 0.1
        ig.voxel_size_y = 0.1

        detectors = ig.shape[0]
        angles = np.linspace(0, np.pi, 180)
        ag = AcquisitionGeometry('parallel',
                                 '2D',
                                 angles,
                                 detectors,
                                 pixel_size_h=0.1,
                                 angle_unit='radian')
        # Select device
        dev = 'cpu'

        Aop = AstraProjectorSimple(ig, ag, dev)

        sin = Aop.direct(data)
        # Create noisy data. Apply Gaussian noise
        noises = ['gaussian', 'poisson']
        noise = noises[1]
        if noise == 'poisson':
            scale = 5
            noisy_data = scale * applynoise.poisson(sin / scale, seed=10)
            # np.random.seed(10)
            # scale = 5
            # eta = 0
            # noisy_data = AcquisitionData(np.random.poisson( scale * (eta + sin.as_array()))/scale, ag)
        elif noise == 'gaussian':
            noisy_data = noise.gaussian(sin, var=0.1, seed=10)
            # np.random.seed(10)
            # n1 = np.random.normal(0, 0.1, size = ag.shape)
            # noisy_data = AcquisitionData(n1 + sin.as_array(), ag)

        else:
            raise ValueError('Unsupported Noise ', noise)

        #%% 'explicit' SPDHG, scalar step-sizes
        subsets = 10
        size_of_subsets = int(len(angles) / subsets)
        # create Gradient operator
        op1 = GradientOperator(ig)
        # take angles and create uniform subsets in uniform+sequential setting
        list_angles = [
            angles[i:i + size_of_subsets]
            for i in range(0, len(angles), size_of_subsets)
        ]
        # create acquisitioin geometries for each the interval of splitting angles
        list_geoms = [
            AcquisitionGeometry('parallel',
                                '2D',
                                list_angles[i],
                                detectors,
                                pixel_size_h=0.1,
                                angle_unit='radian')
            for i in range(len(list_angles))
        ]
        # create with operators as many as the subsets
        A = BlockOperator(*[
            AstraProjectorSimple(ig, list_geoms[i], dev)
            for i in range(subsets)
        ] + [op1])
        ## number of subsets
        #(sub2ind, ind2sub) = divide_1Darray_equally(range(len(A)), subsets)
        #
        ## acquisisiton data
        ## acquisisiton data
        AD_list = []
        for sub_num in range(subsets):
            for i in range(0, len(angles), size_of_subsets):
                arr = noisy_data.as_array()[i:i + size_of_subsets, :]
                AD_list.append(
                    AcquisitionData(arr, geometry=list_geoms[sub_num]))

        g = BlockDataContainer(*AD_list)
        alpha = 0.5
        ## block function
        F = BlockFunction(*[
            *[KullbackLeibler(b=g[i])
              for i in range(subsets)] + [alpha * MixedL21Norm()]
        ])
        G = IndicatorBox(lower=0)

        prob = [1 / (2 * subsets)] * (len(A) - 1) + [1 / 2]
        spdhg = SPDHG(f=F,
                      g=G,
                      operator=A,
                      max_iteration=1000,
                      update_objective_interval=200,
                      prob=prob)
        spdhg.run(1000, verbose=0)

        #%% 'explicit' PDHG, scalar step-sizes
        op1 = GradientOperator(ig)
        op2 = Aop
        # Create BlockOperator
        operator = BlockOperator(op1, op2, shape=(2, 1))
        f2 = KullbackLeibler(b=noisy_data)
        g = IndicatorBox(lower=0)
        normK = operator.norm()
        sigma = 1 / normK
        tau = 1 / normK

        f1 = alpha * MixedL21Norm()
        f = BlockFunction(f1, f2)
        # Setup and run the PDHG algorithm
        pdhg = PDHG(f=f, g=g, operator=operator, tau=tau, sigma=sigma)
        pdhg.max_iteration = 1000
        pdhg.update_objective_interval = 200
        pdhg.run(1000, verbose=0)

        #%% show diff between PDHG and SPDHG
        # plt.imshow(spdhg.get_output().as_array() -pdhg.get_output().as_array())
        # plt.colorbar()
        # plt.show()

        from cil.utilities.quality_measures import mae, mse, psnr
        qm = (mae(spdhg.get_output(),
                  pdhg.get_output()), mse(spdhg.get_output(),
                                          pdhg.get_output()),
              psnr(spdhg.get_output(), pdhg.get_output()))
        if debug_print:
            print("Quality measures", qm)
        np.testing.assert_almost_equal(mae(spdhg.get_output(),
                                           pdhg.get_output()),
                                       0.00150,
                                       decimal=3)
        np.testing.assert_almost_equal(mse(spdhg.get_output(),
                                           pdhg.get_output()),
                                       1.68590e-05,
                                       decimal=3)
Exemplo n.º 16
0
    def test_SPDHG_vs_SPDHG_explicit_axpby(self):
        data = dataexample.SIMPLE_PHANTOM_2D.get(size=(128, 128))
        if debug_print:
            print("test_SPDHG_vs_SPDHG_explicit_axpby here")
        ig = data.geometry
        ig.voxel_size_x = 0.1
        ig.voxel_size_y = 0.1

        detectors = ig.shape[0]
        angles = np.linspace(0, np.pi, 180)
        ag = AcquisitionGeometry('parallel',
                                 '2D',
                                 angles,
                                 detectors,
                                 pixel_size_h=0.1,
                                 angle_unit='radian')
        # Select device
        # device = input('Available device: GPU==1 / CPU==0 ')
        # if device=='1':
        #     dev = 'gpu'
        # else:
        #     dev = 'cpu'
        dev = 'cpu'

        Aop = AstraProjectorSimple(ig, ag, dev)

        sin = Aop.direct(data)
        # Create noisy data. Apply Gaussian noise
        noises = ['gaussian', 'poisson']
        noise = noises[1]
        if noise == 'poisson':
            np.random.seed(10)
            scale = 5
            eta = 0
            noisy_data = AcquisitionData(
                np.random.poisson(scale * (eta + sin.as_array())) / scale,
                geometry=ag)
        elif noise == 'gaussian':
            np.random.seed(10)
            n1 = np.random.normal(0, 0.1, size=ag.shape)
            noisy_data = AcquisitionData(n1 + sin.as_array(), geometry=ag)

        else:
            raise ValueError('Unsupported Noise ', noise)

        #%% 'explicit' SPDHG, scalar step-sizes
        subsets = 10
        size_of_subsets = int(len(angles) / subsets)
        # create GradientOperator operator
        op1 = GradientOperator(ig)
        # take angles and create uniform subsets in uniform+sequential setting
        list_angles = [
            angles[i:i + size_of_subsets]
            for i in range(0, len(angles), size_of_subsets)
        ]
        # create acquisitioin geometries for each the interval of splitting angles
        list_geoms = [
            AcquisitionGeometry('parallel',
                                '2D',
                                list_angles[i],
                                detectors,
                                pixel_size_h=0.1,
                                angle_unit='radian')
            for i in range(len(list_angles))
        ]
        # create with operators as many as the subsets
        A = BlockOperator(*[
            AstraProjectorSimple(ig, list_geoms[i], dev)
            for i in range(subsets)
        ] + [op1])
        ## number of subsets
        #(sub2ind, ind2sub) = divide_1Darray_equally(range(len(A)), subsets)
        #
        ## acquisisiton data
        ## acquisisiton data
        AD_list = []
        for sub_num in range(subsets):
            for i in range(0, len(angles), size_of_subsets):
                arr = noisy_data.as_array()[i:i + size_of_subsets, :]
                AD_list.append(
                    AcquisitionData(arr, geometry=list_geoms[sub_num]))

        g = BlockDataContainer(*AD_list)

        alpha = 0.5
        ## block function
        F = BlockFunction(*[
            *[KullbackLeibler(b=g[i])
              for i in range(subsets)] + [alpha * MixedL21Norm()]
        ])
        G = IndicatorBox(lower=0)

        prob = [1 / (2 * subsets)] * (len(A) - 1) + [1 / 2]
        algos = []
        algos.append(
            SPDHG(f=F,
                  g=G,
                  operator=A,
                  max_iteration=1000,
                  update_objective_interval=200,
                  prob=prob.copy(),
                  use_axpby=True))
        algos[0].run(1000, verbose=0)

        algos.append(
            SPDHG(f=F,
                  g=G,
                  operator=A,
                  max_iteration=1000,
                  update_objective_interval=200,
                  prob=prob.copy(),
                  use_axpby=False))
        algos[1].run(1000, verbose=0)

        # np.testing.assert_array_almost_equal(algos[0].get_output().as_array(), algos[1].get_output().as_array())
        from cil.utilities.quality_measures import mae, mse, psnr
        qm = (mae(algos[0].get_output(), algos[1].get_output()),
              mse(algos[0].get_output(), algos[1].get_output()),
              psnr(algos[0].get_output(), algos[1].get_output()))
        if debug_print:
            print("Quality measures", qm)
        assert qm[0] < 0.005
        assert qm[1] < 3.e-05
    def process(self, out=None):

        #get slice
        data_full = self.get_input()

        if data_full.geometry.dimension == '3D':
            data = data_full.get_slice(vertical=self.slice_index)
        else:
            data = data_full

        data.geometry.config.system.align_reference_frame('cil')
        width = data.geometry.config.panel.num_pixels[0]

        #initial grid search
        if self.search_range is None:
            self.search_range = width // 4

        if self.initial_binning is None:
            self.initial_binning = min(int(np.ceil(width / 128)), 16)

        logger.debug("Initial search:")
        logger.debug("search range is %d", self.search_range)
        logger.debug("initial binning is %d", self.initial_binning)

        #filter full projections
        data_filtered = data.copy()
        data_filtered.fill(
            scipy.ndimage.sobel(data.as_array(),
                                axis=1,
                                mode='reflect',
                                cval=0.0))

        if self.initial_binning > 1:

            #gaussian filter data
            data_temp = data_filtered.copy()
            data_temp.fill(
                scipy.ndimage.gaussian_filter(data_filtered.as_array(),
                                              [0, self.initial_binning // 2]))

            #bin data whilst preserving centres
            num_pix_new = np.ceil(width / self.initial_binning)

            new_half_panel = (num_pix_new - 1) / 2
            half_panel = (width - 1) / 2

            sampling_points = np.mgrid[-self.initial_binning *
                                       new_half_panel:self.initial_binning *
                                       new_half_panel + 1:self.initial_binning]
            initial_cordinates = np.mgrid[-half_panel:half_panel + 1:1]

            new_geom = data.geometry.copy()
            new_geom.config.panel.num_pixels[0] = num_pix_new
            new_geom.config.panel.pixel_size[0] *= self.initial_binning
            data_binned = new_geom.allocate()

            for i in range(data.shape[0]):
                data_binned.fill(np.interp(sampling_points, initial_cordinates,
                                           data.array[i, :]),
                                 angle=i)

            #filter
            data_binned_filtered = data_binned.copy()
            data_binned_filtered.fill(
                scipy.ndimage.sobel(data_binned.as_array(),
                                    axis=1,
                                    mode='reflect',
                                    cval=0.0))
            data_processed = data_binned_filtered
        else:
            data_processed = data_filtered

        ig = data_processed.geometry.get_ImageGeometry()

        #binned grid search
        vox_rad = np.ceil(self.search_range / self.initial_binning)
        steps = int(4 * vox_rad + 1)
        offsets = np.linspace(-vox_rad, vox_rad, steps) * ig.voxel_size_x
        obj_vals = []

        for offset in offsets:
            obj_vals.append(self.calculate(data_processed, ig, offset))

        if logger.isEnabledFor(logging.DEBUG):
            self.plot(offsets, obj_vals,
                      ig.voxel_size_x / self.initial_binning)

        ind = np.argmin(obj_vals)
        if ind == 0 or ind == len(obj_vals) - 1:
            raise ValueError(
                "Unable to minimise function within set search_range")
        else:
            centre = self.get_min(offsets, obj_vals, ind)

        if self.initial_binning > 8:
            #binned search continued
            logger.debug("binned search starting at %f", centre)
            a = centre - ig.voxel_size_x * 2
            b = centre + ig.voxel_size_x * 2
            centre = self.gss(data_processed, ig, (a, b),
                              self.tolerance * ig.voxel_size_x,
                              self.initial_binning)

        #fine search
        logger.debug("fine search starting at %f", centre)
        data_processed = data_filtered
        ig = data_processed.geometry.get_ImageGeometry()
        a = centre - ig.voxel_size_x * 2
        b = centre + ig.voxel_size_x * 2
        centre = self.gss(data_processed, ig, (a, b),
                          self.tolerance * ig.voxel_size_x, 1)

        new_geometry = data_full.geometry.copy()
        new_geometry.config.system.rotation_axis.position[0] = centre

        logger.info(
            "Centre of rotation correction found using image_sharpness")
        logger.info("Calculated from slice: %s", str(self.slice_index))
        logger.info("Centre of rotation shift = %f pixels",
                    centre / ig.voxel_size_x)
        logger.info("Centre of rotation shift = %f units at the object",
                    centre)
        logger.info("Return new dataset with centred geometry")

        if out is None:
            return AcquisitionData(array=data_full,
                                   deep_copy=True,
                                   geometry=new_geometry,
                                   supress_warning=True)
        else:
            out.geometry = new_geometry