Пример #1
0
    def filter_func(data: Images,
                    size=None,
                    mode=None,
                    order=None,
                    cores=None,
                    chunksize=None,
                    progress=None):
        """
        :param data: Input data as a 3D numpy.ndarray
        :param size: Size of the kernel
        :param mode: The mode with which to handle the endges.
                     One of [reflect, constant, nearest, mirror, wrap].
        :param order: The order of the filter along each axis is given as a
                      sequence of integers, or as a single number.
                      An order of 0 corresponds to convolution with a Gaussian
                      kernel.
                      An order of 1, 2, or 3 corresponds to convolution
                      with the first, second or third derivatives of a Gaussian.
                      Higher order derivatives are not implemented
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.

        :return: The processed 3D numpy.ndarray
        """
        h.check_data_stack(data)

        if size and size > 1:
            _execute(data.data, size, mode, order, cores, chunksize, progress)
        h.check_data_stack(data)
        return data
Пример #2
0
    def filter_func(data: Images,
                    size=None,
                    mode="reflect",
                    cores=None,
                    chunksize=None,
                    progress=None,
                    force_cpu=True):
        """
        :param data: Input data as a 3D numpy.ndarray
        :param size: Size of the kernel
        :param mode: The mode with which to handle the endges.
                     One of [reflect, constant, nearest, mirror, wrap].
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.

        :return: Returns the processed data

        """
        h.check_data_stack(data)

        if size and size > 1:
            if not force_cpu:
                data = _execute_gpu(data.data, size, mode, progress)
            else:
                _execute(data.data, size, mode, cores, chunksize, progress)

        h.check_data_stack(data)
        return data
Пример #3
0
    def filter_func(images: Images, air_region: SensibleROI = None, cores=None, chunksize=None, progress=None):
        """
        Normalise by beam intensity.

        This does NOT do any checks if the Air Region is out of bounds!
        If the Air Region is out of bounds, the crop will fail at runtime.
        If the Air Region is in bounds, but has overlapping coordinates
        the crop give back a 0 shape of the coordinates that were wrong.

        :param images: Sample data which is to be processed. Expected in radiograms

        :param air_region: The order is - Left Top Right Bottom. The air region
                           from which sums will be calculated and all images will
                           be normalised.

        :param cores: The number of cores that will be used to process the data.

        :param chunksize: The number of chunks that each worker will receive.
        :param progress: Reference to a progress bar object

        :returns: Filtered data (stack of images)
        """
        h.check_data_stack(images)

        # just get data reference
        if air_region:
            progress = Progress.ensure_instance(progress, task_name='ROI Normalisation')
            _execute(images.data, air_region, cores, chunksize, progress)
        h.check_data_stack(images)
        return images
Пример #4
0
    def filter_func(images: Images, value: Union[int, float] = 0e7, unit="micron", progress=None) -> Images:
        if unit == "micron":
            value *= 1e-4

        h.check_data_stack(images)
        if value != 0e7 or value != -0e7:
            images.data /= value
        return images
Пример #5
0
    def filter_func(data: Images,
                    flat_before: Images = None,
                    flat_after: Images = None,
                    dark_before: Images = None,
                    dark_after: Images = None,
                    selected_flat_fielding: str = None,
                    cores=None,
                    chunksize=None,
                    progress=None) -> Images:
        """Do background correction with flat and dark images.

        :param data: Sample data which is to be processed. Expected in radiograms
        :param flat_before: Flat (open beam) image to use in normalization, for before the sample is imaged
        :param flat_after: Flat (open beam) image to use in normalization, for after the sample is imaged
        :param dark_before: Dark image to use in normalization, for before the sample is imaged
        :param dark_after: Dark image to use in normalization, for before the sample is imaged
        :param selected_flat_fielding: Select which of the flat fielding methods to use, just Before stacks, just After
                                       stacks or combined.
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.
        :return: Filtered data (stack of images)
        """
        h.check_data_stack(data)

        if selected_flat_fielding is not None:
            if selected_flat_fielding == "Both, concatenated" and flat_after is not None and flat_before is not None \
                    and dark_after is not None and dark_before is not None:
                flat_avg = (flat_before.data.mean(axis=0) + flat_after.data.mean(axis=0)) / 2.0
                dark_avg = (dark_before.data.mean(axis=0) + dark_after.data.mean(axis=0)) / 2.0
            elif selected_flat_fielding == "Only Before" and flat_before is not None and dark_before is not None:
                flat_avg = flat_before.data.mean(axis=0)
                dark_avg = dark_before.data.mean(axis=0)
            elif selected_flat_fielding == "Only After" and flat_after is not None and dark_after is not None:
                flat_avg = flat_after.data.mean(axis=0)
                dark_avg = dark_after.data.mean(axis=0)
            else:
                flat_avg = None
                dark_avg = None

            if flat_avg is not None and dark_avg is not None:
                if 2 != flat_avg.ndim or 2 != dark_avg.ndim:
                    raise ValueError(
                        f"Incorrect shape of the flat image ({flat_avg.shape}) or dark image ({dark_avg.shape}) \
                        which should match the shape of the sample images ({data.data.shape})")

                if not data.data.shape[1:] == flat_avg.shape == dark_avg.shape:
                    raise ValueError(f"Not all images are the expected shape: {data.data.shape[1:]}, instead "
                                     f"flat had shape: {flat_avg.shape}, and dark had shape: {dark_avg.shape}")

                progress = Progress.ensure_instance(progress,
                                                    num_steps=data.data.shape[0],
                                                    task_name='Background Correction')
                _execute(data.data, flat_avg, dark_avg, cores, chunksize, progress)

        h.check_data_stack(data)
        return data
Пример #6
0
    def filter_func(images: Images,
                    rebin_param=0.5,
                    mode=None,
                    cores=None,
                    chunksize=None,
                    progress=None) -> Images:
        """
        :param images: Sample data which is to be processed. Expects radiograms
        :param rebin_param: int, float or tuple
                            int - Percentage of current size.
                            float - Fraction of current size.
                            tuple - Size of the output image (x, y).
        :param mode: Interpolation to use for re-sizing
                     ('nearest', 'lanczos', 'bilinear', 'bicubic' or 'cubic').
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.

        :return: The processed 3D numpy.ndarray
        """
        h.check_data_stack(images)

        if isinstance(rebin_param, tuple):
            param_valid = rebin_param[0] > 0 and rebin_param[1] > 0
        else:
            param_valid = rebin_param > 0

        if param_valid:
            sample = images.data
            sample_name: Optional[str]
            if images.memory_filename is not None:
                sample_name = images.memory_filename
                images.free_memory(delete_filename=False)
            else:
                # this case is true when the filter preview is being calculated
                sample_name = None
                # force single core execution as it's faster for a single image
                cores = 1
            empty_resized_data = _create_reshaped_array(
                sample.shape, sample.dtype, rebin_param, sample_name)

            f = ptsm.create_partial(skimage.transform.resize,
                                    ptsm.return_to_second_but_dont_use_it,
                                    mode=mode,
                                    output_shape=empty_resized_data.shape[1:])
            ptsm.execute(sample,
                         empty_resized_data,
                         f,
                         cores,
                         chunksize,
                         progress=progress,
                         msg="Applying Rebin")
            images.data = empty_resized_data

        return images
Пример #7
0
    def filter_func(images: Images,
                    run_ring_removal=False,
                    center_x=None,
                    center_y=None,
                    thresh=300.0,
                    thresh_max=300.0,
                    thresh_min=-100.0,
                    theta_min=30,
                    rwidth=30,
                    cores=None,
                    chunksize=None,
                    progress=None):
        """
        Removal of ring artifacts in reconstructed volume.

        :param images: Sample data which is to be processed. Expected in radiograms
        :param run_ring_removal: Uses Wavelet-Fourier based ring removal
        :param center_x: (float, optional) abscissa location of center of rotation
        :param center_y: (float, optional) ordinate location of center of rotation
        :param thresh: (float, optional)
                       maximum value of an offset due to a ring artifact
        :param thresh_max: (float, optional)
                       max value for portion of image to filter
        :param thresh_min: (float, optional)
                       min value for portion of image to filer
        :param theta_min: (int, optional)
                          minimum angle in degrees to be considered ring artifact
        :param rwidth: (int, optional)
                       Maximum width of the rings to be filtered in pixels
        :returns: Filtered data
        """
        progress = Progress.ensure_instance(progress, task_name='Ring Removal')

        tp = safe_import('tomopy.misc.corr')

        if run_ring_removal:
            h.check_data_stack(images)

            with progress:
                progress.update(msg="Ring Removal")
                sample = images.data
                tp.remove_ring(sample,
                               center_x=center_x,
                               center_y=center_y,
                               thresh=thresh,
                               thresh_max=thresh_max,
                               thresh_min=thresh_min,
                               theta_min=theta_min,
                               rwidth=rwidth,
                               ncore=cores,
                               nchunk=chunksize,
                               out=sample)

        return images
Пример #8
0
    def filter_func(images: Images,
                    value: Union[int, float] = 0,
                    unit="micron",
                    progress=None) -> Images:
        h.check_data_stack(images)
        if not value:
            raise ValueError('value parameter must not equal 0 or None')

        if unit == "micron":
            value *= 1e-4

        images.data /= value
        return images
Пример #9
0
    def filter_func(images: Images,
                    region_of_interest: Optional[Union[List[int], List[float],
                                                       SensibleROI]] = None,
                    progress=None) -> Images:
        """Execute the Crop Coordinates by Region of Interest filter. This does
        NOT do any checks if the Region of interest is out of bounds!

        If the region of interest is out of bounds, the crop will **FAIL** at
        runtime.

        If the region of interest is in bounds, but has overlapping coordinates
        the crop give back a 0 shape of the coordinates that were wrong.

        :param images: Input data as a 3D numpy.ndarray

        :param region_of_interest: Crop original images using these coordinates.
                                   The selection is a rectangle and expected order
                                   is - Left Top Right Bottom.

        :return: The processed 3D numpy.ndarray
        """

        if region_of_interest is None:
            region_of_interest = SensibleROI.from_list([0, 0, 50, 50])
        if isinstance(region_of_interest, list):
            region_of_interest = SensibleROI.from_list(region_of_interest)

        assert isinstance(region_of_interest, SensibleROI)

        h.check_data_stack(images)

        sample = images.data
        shape = (sample.shape[0], region_of_interest.height,
                 region_of_interest.width)
        if any((s < 0 for s in shape)):
            raise ValueError(
                "It seems the Region of Interest is outside of the current image dimensions.\n"
                "This can happen on the image preview right after a previous Crop Coordinates."
            )

        # allocate output first BEFORE freeing the original data,
        # otherwise it's possible to free and then fail allocation for output
        # at which point you're left with no data
        output = pu.allocate_output(images, shape)
        images.data = execute_single(sample,
                                     region_of_interest,
                                     progress,
                                     out=output)

        return images
Пример #10
0
    def filter_func(images: Images,
                    rebin_param=0.5,
                    mode=None,
                    cores=None,
                    chunksize=None,
                    progress=None) -> Images:
        """
        :param images: Sample data which is to be processed. Expects radiograms
        :param rebin_param: int, float or tuple
                            int - Percentage of current size.
                            float - Fraction of current size.
                            tuple - Size of the output image (x, y).
        :param mode: Interpolation to use for re-sizing
                     ('nearest', 'lanczos', 'bilinear', 'bicubic' or 'cubic').
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.

        :return: The processed 3D numpy.ndarray
        """
        h.check_data_stack(images)

        if isinstance(rebin_param, tuple):
            param_valid = rebin_param[0] > 0 and rebin_param[1] > 0
        else:
            param_valid = rebin_param > 0

        if param_valid:
            sample = images.data
            sample_name: Optional[str]
            # allocate output first BEFORE freeing the original data,
            # otherwise it's possible to free and then fail allocation for output
            # at which point you're left with no data
            empty_resized_data = _create_reshaped_array(images, rebin_param)

            f = ptsm.create_partial(skimage.transform.resize,
                                    ptsm.return_to_second_but_dont_use_it,
                                    mode=mode,
                                    output_shape=empty_resized_data.shape[1:])
            ptsm.execute(sample,
                         empty_resized_data,
                         f,
                         cores,
                         chunksize,
                         progress=progress,
                         msg="Applying Rebin")
            images.data = empty_resized_data

        return images
Пример #11
0
    def filter_func(data: Images, angle=None, dark=None, cores=None, chunksize=None, progress=None):
        """
        Rotates images by an arbitrary degree.

        :param data: stack of sample images
        :param angle: The rotation to be performed, in degrees
        :param cores: cores for parallel execution
        :param chunksize: chunk for each worker

        :return: The rotated images
        """
        h.check_data_stack(data)

        if angle:
            _execute(data.data, angle, cores, chunksize, progress)

        return data
Пример #12
0
    def filter_func(data: Images, angle=None, dark=None, cores=None, chunksize=None, progress=None):
        """
        Rotates images by an arbitrary degree.

        :param data: stack of sample images
        :param angle: The rotation to be performed, in degrees
        :param cores: cores for parallel execution
        :param chunksize: chunk for each worker

        :return: The rotated images
        """
        h.check_data_stack(data)

        if angle is None:
            raise ValueError('Value must be provided for angle parameter')

        # No need to run the filter for an angle of 0 as it won't have any effect
        if not angle == 0:
            _execute(data.data, angle, cores, chunksize, progress)

        return data
Пример #13
0
    def filter_func(images: Images,
                    region_of_interest: Optional[Union[List[int], List[float], SensibleROI]] = None,
                    progress=None) -> Images:
        """
        Execute the Crop Coordinates by Region of Interest filter.
        This does NOT do any checks if the Region of interest is out of bounds!

        If the region of interest is out of bounds, the crop will **FAIL** at
        runtime.

        If the region of interest is in bounds, but has overlapping coordinates
        the crop give back a 0 shape of the coordinates that were wrong.

        :param images: Input data as a 3D numpy.ndarray

        :param region_of_interest: Crop original images using these coordinates.
                                   The selection is a rectangle and expected order
                                   is - Left Top Right Bottom.

        :return: The processed 3D numpy.ndarray
        """

        if region_of_interest is None:
            region_of_interest = SensibleROI.from_list([0, 0, 50, 50])
        if isinstance(region_of_interest, list):
            region_of_interest = SensibleROI.from_list(region_of_interest)

        assert isinstance(region_of_interest, SensibleROI)

        h.check_data_stack(images)

        sample = images.data
        shape = (sample.shape[0], region_of_interest.height, region_of_interest.width)
        sample_name = images.memory_filename
        if sample_name is not None:
            images.free_memory(delete_filename=False)
        output = pu.create_array(shape, sample.dtype, sample_name)
        images.data = execute_single(sample, region_of_interest, progress, out=output)

        return images
Пример #14
0
    def filter_func(data: Images,
                    flat: Images = None,
                    dark: Images = None,
                    cores=None,
                    chunksize=None,
                    progress=None) -> Images:
        """
        Do background correction with flat and dark images.

        :param data: Sample data which is to be processed. Expected in radiograms
        :param flat: Flat (open beam) image to use in normalization
        :param dark: Dark image to use in normalization
        :param clip_min: After normalisation, clip any pixels under this value.
        :param clip_max: After normalisation, clip any pixels over this value.
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.
        :return: Filtered data (stack of images)
        """
        h.check_data_stack(data)

        if flat is not None and dark is not None:
            flat_avg = flat.data.mean(axis=0)
            dark_avg = dark.data.mean(axis=0)
            if 2 != flat_avg.ndim or 2 != dark_avg.ndim:
                raise ValueError(
                    f"Incorrect shape of the flat image ({flat_avg.shape}) or dark image ({dark_avg.shape}) \
                    which should match the shape of the sample images ({data.data.shape})")

            if not data.data.shape[1:] == flat_avg.shape == dark_avg.shape:
                raise ValueError(f"Not all images are the expected shape: {data.data.shape[1:]}, instead "
                                 f"flat had shape: {flat_avg.shape}, and dark had shape: {dark_avg.shape}")

            progress = Progress.ensure_instance(progress,
                                                num_steps=data.data.shape[0],
                                                task_name='Background Correction')
            _execute(data.data, flat_avg, dark_avg, cores, chunksize, progress)

        h.check_data_stack(data)
        return data
Пример #15
0
    def filter_func(images: Images,
                    rebin_param=0.5,
                    mode=None,
                    cores=None,
                    chunksize=None,
                    progress=None) -> Images:
        """
        :param images: Sample data which is to be processed. Expects radiograms
        :param rebin_param: int, float or tuple
                            int - Percentage of current size.
                            float - Fraction of current size.
                            tuple - Size of the output image (x, y).
        :param mode: Interpolation to use for re-sizing
                     ('nearest', 'lanczos', 'bilinear', 'bicubic' or 'cubic').
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.

        :return: The processed 3D numpy.ndarray
        """
        h.check_data_stack(images)

        if isinstance(rebin_param, tuple):
            param_valid = rebin_param[0] > 0 and rebin_param[1] > 0
        else:
            param_valid = rebin_param > 0

        if param_valid:
            sample = images.data
            empty_resized_data = _create_reshaped_array(images, rebin_param)

            f = ps.create_partial(skimage.transform.resize,
                                  ps.return_to_second_at_i,
                                  mode=mode,
                                  output_shape=empty_resized_data.shape[1:])
            ps.shared_list = [sample, empty_resized_data]
            ps.execute(f, sample.shape[0], cores, "Applying Rebin", progress)
            images.data = empty_resized_data

        return images
Пример #16
0
    def filter_func(data: Images,
                    size=None,
                    mode="reflect",
                    cores=None,
                    chunksize=None,
                    progress=None,
                    force_cpu=True):
        """
        :param data: Input data as an Images object.
        :param size: Size of the kernel
        :param mode: The mode with which to handle the edges.
                     One of [reflect, constant, nearest, mirror, wrap].
                     Modes are described in the `SciPy documentation
                     <https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.median_filter.html>`_.
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.
        :param progress: The object for displaying the progress.
        :param force_cpu: Whether or not to use the CPU.

        :return: Returns the processed data

        """
        h.check_data_stack(data)

        if not size or not size > 1:
            raise ValueError(
                f'Size parameter must be greater than 1, but value provided was {size}'
            )

        if not force_cpu:
            data = _execute_gpu(data.data, size, mode, progress)
        else:
            _execute(data.data, size, mode, cores, chunksize, progress)

        h.check_data_stack(data)
        return data
Пример #17
0
    def filter_func(data: Images,
                    size=None,
                    mode=None,
                    order=None,
                    cores=None,
                    chunksize=None,
                    progress=None):
        """
        :param data: Input data as a 3D numpy.ndarray
        :param size: Size of the kernel
        :param mode: The mode with which to handle the edges.
                     One of [reflect, constant, nearest, mirror, wrap].
                     Modes are described in the `SciPy documentation
                     <https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.gaussian_filter.html>`_.
        :param order: The order of the filter along each axis is given as a
                      sequence of integers, or as a single number.
                      An order of 0 corresponds to convolution with a Gaussian
                      kernel.
                      An order of 1, 2, or 3 corresponds to convolution
                      with the first, second or third derivatives of a Gaussian.
                      Higher order derivatives are not implemented
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.

        :return: The processed 3D numpy.ndarray
        """
        h.check_data_stack(data)

        if not size or not size > 1:
            raise ValueError(
                f'Size parameter must be greater than 1, but value provided was {size}'
            )

        _execute(data.data, size, mode, order, cores, progress)
        h.check_data_stack(data)
        return data
Пример #18
0
    def run_reconstruct(self,
                        sample,
                        config,
                        proj_angles=None,
                        progress=None,
                        **kwargs):
        """
        Run a reconstruction with TomoPy, using the CPU algorithms they
        provide.

        Information for each reconstruction method is available at
            http://tomopy.readthedocs.io/en/latest/api/tomopy.recon.algorithm.html

        :param sample: The sample image data as a 3D numpy.ndarray

        :param config: A ReconstructionConfig with all the necessary parameters
                       to run a reconstruction. The Centers of Rotation
                        must be interpolated independently!

        :param proj_angles: The projection angle for each slice.
                            If not provided equidistant angles will be
                            generated

        :param kwargs: Any keyword arguments will be forwarded to the TomoPy
                       reconstruction function

        :return: The reconstructed volume
        """
        progress = Progress.ensure_instance(progress, task_name='TomoPy')
        log = getLogger(__name__)

        h.check_data_stack(sample)

        if proj_angles is None:
            num_radiograms = sample.shape[1]
            proj_angles = projection_angles.generate(config.func.max_angle,
                                                     num_radiograms).value

        alg = config.func.algorithm
        num_iter = config.func.num_iter
        cores = config.func.cores
        cors = config.func.cors

        assert len(cors) == sample.shape[0],\
            "The provided number of CORs does not match the slice number! \
            A Center of rotation must be provided for each slice. Usually \
            that is done via core.utility.cor_interpolate"

        iterative_algorithm = False if alg in ['gridrec', 'fbp'] else True

        with progress:
            if iterative_algorithm:  # run the iterative algorithms
                progress.update(msg="Iterative method with TomoPy")
                log.info("Avg Center of Rotation: {0}, Algorithm: {1}, number "
                         "of iterations: {2}...".format(
                             np.mean(cors), alg, num_iter))

                kwargs = dict(kwargs, num_iter=num_iter)
            else:  # run the non-iterative algorithms
                progress.update(msg="Non-iterative method with TomoPy")
                log.info("Mean COR: {0}, Number of CORs provided {1}, "
                         "Algorithm: {2}...".format(np.mean(cors), len(cors),
                                                    alg))

            # TODO need to expose the operations to CLI
            # filter_name='parzen',
            # filter_par=[5.],
            recon = self._tomopy.recon(tomo=sample,
                                       theta=proj_angles,
                                       center=cors,
                                       ncore=cores,
                                       algorithm=alg,
                                       sinogram_order=True,
                                       **kwargs)

        log.info("Reconstructed 3D volume. Shape: {0}, and pixel data type: "
                 "{1}.".format(recon.shape, recon.dtype))

        return recon
Пример #19
0
    def filter_func(images: Images,
                    center_mode="image center",
                    center_x=None,
                    center_y=None,
                    thresh=300.0,
                    thresh_max=300.0,
                    thresh_min=-100.0,
                    theta_min=30,
                    rwidth=30,
                    cores=None,
                    chunksize=None,
                    progress=None):
        """
        Removal of ring artifacts in reconstructed volume. Uses Wavelet-Fourier based ring removal

        :param images: Sample data which is to be processed. Expected in radiograms
        :param center_mode: Whether to use the center of the image or a user-defined value
        :param center_x: (float, optional) abscissa location of center of rotation
        :param center_y: (float, optional) ordinate location of center of rotation
        :param thresh: (float, optional)
                       maximum value of an offset due to a ring artifact
        :param thresh_max: (float, optional)
                       max value for portion of image to filter
        :param thresh_min: (float, optional)
                       min value for portion of image to filer
        :param theta_min: (int, optional)
                          minimum angle in degrees to be considered ring artifact
        :param rwidth: (int, optional)
                       Maximum width of the rings to be filtered in pixels
        :returns: Filtered data
        """
        progress = Progress.ensure_instance(progress, task_name='Ring Removal')

        tp = safe_import('tomopy.misc.corr')

        if center_mode != "manual":
            center_x = center_y = None

        h.check_data_stack(images)

        # COMPAT tomopy <= 1.10.1
        # tomopy 1.10.1 and older will crash with "large" values of theta
        # Catch these here for now
        # https://github.com/tomopy/tomopy/issues/551
        if center_mode == "manual":
            min_dist_to_edge = min([
                center_x, center_y, images.width - center_x,
                images.height - center_y
            ])
        else:
            min_dist_to_edge = min([images.width / 2, images.height / 2])

        if theta_min >= 180 or theta_min > min_dist_to_edge:
            raise ValueError(
                "Theta should be in the range [0 - 180) and larger than the min distance from"
                "from COR to edge.")
        # end COMPAT

        with progress:
            progress.update(msg="Ring Removal")
            sample = images.data
            tp.remove_ring(sample,
                           center_x=center_x,
                           center_y=center_y,
                           thresh=thresh,
                           thresh_max=thresh_max,
                           thresh_min=thresh_min,
                           theta_min=theta_min,
                           rwidth=rwidth,
                           ncore=cores,
                           nchunk=chunksize,
                           out=sample)

        return images
    def filter_func(images: Images,
                    region_of_interest: SensibleROI = None,
                    normalisation_mode: str = modes()[0],
                    flat_field: Optional[Images] = None,
                    cores=None,
                    chunksize=None,
                    progress=None):
        """Normalise by beam intensity.

        This does NOT do any checks if the Air Region is out of bounds!
        If the Air Region is out of bounds, the crop will fail at runtime.
        If the Air Region is in bounds, but has overlapping coordinates
        the crop give back a 0 shape of the coordinates that were wrong.

        :param images: Sample data which is to be processed. Expected in radiograms

        :param region_of_interest: The order is - Left Top Right Bottom. The air
        region for which grey values are summed up and used for normalisation/scaling.

        :param normalisation_mode: Controls what the ROI counts are normalised to.
            'Preserve Max' : Normalisation is scaled such that the maximum pixel value of the stack is equal before
                             and after the operation.
            'Stack Average' : The mean value of the air region across all projections is preserved.
            'Flat Field' : The mean value of the air regions in the projections is made equal to the mean value of the
                           air region in the flat field image.

        :param flat_field: Flat field to use if 'Flat Field' mode is enabled.

        :param cores: The number of cores that will be used to process the data.

        :param chunksize: The number of chunks that each worker will receive.
        :param progress: Reference to a progress bar object

        :returns: Filtered data (stack of images)
        """
        if normalisation_mode not in modes():
            raise ValueError(
                f"Unknown normalisation_mode: {normalisation_mode}, should be one of {modes()}"
            )

        if normalisation_mode == "Flat Field" and flat_field is None:
            raise ValueError(
                'flat_field must provided if using normalisation_mode of "Flat Field"'
            )

        h.check_data_stack(images)

        if not region_of_interest:
            raise ValueError('region_of_interest must be provided')

        if flat_field is not None:
            flat_field_data = flat_field.data
        else:
            flat_field_data = None

        # just get data reference
        progress = Progress.ensure_instance(progress,
                                            task_name='ROI Normalisation')
        _execute(images.data, region_of_interest, normalisation_mode,
                 flat_field_data, cores, chunksize, progress)
        h.check_data_stack(images)
        return images
Пример #21
0
def fool_my_own_sanity_check(data):
    try:
        h.check_data_stack(data)
    except ValueError:
        h.check_data_stack(data, expected_dims=2)