예제 #1
0
    def filter_func(images: Images,
                    diff=None,
                    radius=_default_radius,
                    mode=_default_mode,
                    cores=None,
                    progress: Progress = None):
        """
        :param images: Input data
        :param diff: Pixel value difference above which to crop bright pixels
        :param radius: Size of the median filter to apply
        :param cores: The number of cores that will be used to process the data.

        :return: The processed 3D numpy.ndarray
        """
        if diff and radius and diff > 0 and radius > 0:
            func = ps.create_partial(OutliersFilter._execute,
                                     ps.return_to_self,
                                     diff=diff,
                                     radius=radius,
                                     mode=mode)
            ps.shared_list = [images.data]
            ps.execute(
                func,
                images.num_projections,
                progress=progress,
                msg=f"Outliers with threshold {diff} and kernel {radius}")
        return images
예제 #2
0
    def filter_func(images: Images,
                    diff=None,
                    radius=_default_radius,
                    mode=_default_mode,
                    cores=None,
                    progress: Progress = None):
        """
        :param images: Input data
        :param diff: Pixel value difference above which to crop bright pixels
        :param radius: Size of the median filter to apply
        :param mode: Whether to remove bright or dark outliers
                    One of [OUTLIERS_BRIGHT, OUTLIERS_DARK]
        :param cores: The number of cores that will be used to process the data.

        :return: The processed 3D numpy.ndarray
        """
        if not diff or not diff > 0:
            raise ValueError(f'diff parameter must be greater than 0. Value provided was {diff}')

        if not radius or not radius > 0:
            raise ValueError(f'radius parameter must be greater than 0. Value provided was {radius}')

        func = ps.create_partial(OutliersFilter._execute, ps.return_to_self, diff=diff, radius=radius, mode=mode)
        ps.shared_list = [images.data]
        ps.execute(func,
                   images.num_projections,
                   progress=progress,
                   msg=f"Outliers with threshold {diff} and kernel {radius}")
        return images
예제 #3
0
def _execute(data: np.ndarray, angle: float, cores: int, chunksize: int, progress: Progress):
    progress = Progress.ensure_instance(progress, task_name='Rotate Stack')

    with progress:
        f = ps.create_partial(_rotate_image_inplace, ps.inplace1, angle=angle)
        ps.shared_list = [data]
        ps.execute(f, data.shape[0], progress, msg=f"Rotating by {angle} degrees", cores=cores)

    return data
 def filter_func(images: Images,
                 snr=3,
                 size=61,
                 cores=None,
                 chunksize=None,
                 progress=None):
     f = ps.create_partial(remove_dead_stripe,
                           ps.return_to_self,
                           snr=snr,
                           size=size,
                           residual=False)
     ps.shared_list = [images.data]
     ps.execute(f, images.data.shape[0], progress, cores=cores)
     return images
예제 #5
0
def _calculate_correlation_error(images, shared_search_range, min_correlation_error, progress):
    # if the projections are passed in the partial they are copied to every process on every iteration
    # this makes the multiprocessing significantly slower
    # so they are copied into a shared array to avoid that copying
    with pu.temp_shared_array((2, images.height, images.width)) as shared_projections:
        shared_projections[0][:] = images.projection(0)
        shared_projections[1][:] = np.fliplr(images.proj180deg.data[0])

        do_search_partial = ps.create_partial(do_calculate_correlation_err, ps.inplace3, image_width=images.width)

        ps.shared_list = [min_correlation_error, shared_search_range, shared_projections]
        ps.execute(do_search_partial,
                   num_operations=min_correlation_error.shape[0],
                   progress=progress,
                   msg="Finding correlation on row")
예제 #6
0
 def filter_func(images: Images,
                 snr=3,
                 size=61,
                 cores=None,
                 chunksize=None,
                 progress=None):
     f = ps.create_partial(
         remove_unresponsive_and_fluctuating_stripe,
         ps.return_to_self,
         snr=snr,
         size=size,
     )
     ps.shared_list = [images.data]
     ps.execute(f, images.num_projections, progress, cores=cores)
     return images
예제 #7
0
    def filter_func(images: Images,
                    order=1,
                    sigma=3,
                    cores=None,
                    chunksize=None,
                    progress=None):
        f = ps.create_partial(remove_stripe_based_fitting,
                              ps.return_to_self,
                              order=order,
                              sigma=sigma,
                              sort=True)

        ps.shared_list = [images.data]
        ps.execute(f, images.data.shape[0], progress, cores=cores)
        return images
 def filter_func(images,
                 snr=3,
                 la_size=61,
                 cores=None,
                 chunksize=None,
                 progress=None):
     f = ps.create_partial(
         remove_large_stripe,
         ps.return_to_self,
         snr=snr,
         size=la_size,
     )
     ps.shared_list = [images.data]
     ps.execute(f, images.num_projections, progress, cores=cores)
     return images
예제 #9
0
    def filter_func(images: Images,
                    order=1,
                    sigmax=3,
                    sigmay=3,
                    cores=None,
                    chunksize=None,
                    progress=None):
        f = ps.create_partial(remove_stripe_based_sorting_fitting,
                              ps.return_to_self,
                              order=order,
                              sigmax=sigmax,
                              sigmay=sigmay)

        ps.shared_list = [images.data]
        ps.execute(f, images.num_projections, progress, cores=cores)
        return images
예제 #10
0
def _execute(data: np.ndarray,
             flat=None,
             dark=None,
             cores=None,
             chunksize=None,
             progress=None):
    """A benchmark justifying the current implementation, performed on
    500x2048x2048 images.

    #1 Separate runs
    Subtract (sequential with np.subtract(data, dark, out=data)) - 13s
    Divide (par) - 1.15s

    #2 Separate parallel runs
    Subtract (par) - 5.5s
    Divide (par) - 1.15s

    #3 Added subtract into _divide so that it is:
                np.true_divide(
                    np.subtract(data, dark, out=data), norm_divide, out=data)
    Subtract then divide (par) - 55s
    """
    with progress:
        progress.update(msg="Applying background correction")

        norm_divide = pu.create_array((data.shape[1], data.shape[2]),
                                      data.dtype)

        # subtract dark from flat and copy into shared array with [:]
        norm_divide[:] = np.subtract(flat, dark)

        # prevent divide-by-zero issues, and negative pixels make no sense
        norm_divide[norm_divide == 0] = MINIMUM_PIXEL_VALUE

        # subtract the dark from all images
        do_subtract = ps.create_partial(_subtract,
                                        fwd_function=ps.inplace_second_2d)
        ps.shared_list = [data, dark]
        ps.execute(do_subtract, data.shape[0], progress, cores=cores)

        # divide the data by (flat - dark)
        do_divide = ps.create_partial(_divide,
                                      fwd_function=ps.inplace_second_2d)
        ps.shared_list = [data, norm_divide]
        ps.execute(do_divide, data.shape[0], progress, cores=cores)

    return data
 def filter_func(images: Images,
                 snr=3,
                 la_size=61,
                 sm_size=21,
                 dim=1,
                 cores=None,
                 chunksize=None,
                 progress=None):
     f = ps.create_partial(remove_all_stripe,
                           ps.return_to_self,
                           snr=snr,
                           la_size=la_size,
                           sm_size=sm_size,
                           dim=dim)
     ps.shared_list = [images.data]
     ps.execute(f, images.data.shape[0], progress, cores=cores)
     return images
예제 #12
0
def _execute(data, size, edgemode, cores=None, chunksize=None, progress=None):
    log = getLogger(__name__)
    progress = Progress.ensure_instance(progress, task_name='NaN Removal')

    # create the partial function to forward the parameters
    f = ps.create_partial(_nan_to_median,
                          ps.return_to_self,
                          size=size,
                          edgemode=edgemode)

    with progress:
        log.info(
            "PARALLEL NaN Removal filter, with pixel data type: {0}".format(
                data.dtype))

        ps.shared_list = [data]
        ps.execute(f, data.shape[0], progress, msg="NaN Removal", cores=cores)

    return data
예제 #13
0
def _execute(data: np.ndarray, size, mode, order, cores=None, progress=None):
    log = getLogger(__name__)
    progress = Progress.ensure_instance(progress, task_name='Gaussian filter')

    f = ps.create_partial(scipy_ndimage.gaussian_filter,
                          ps.return_to_self,
                          sigma=size,
                          mode=mode,
                          order=order)

    log.info("Starting PARALLEL gaussian filter, with pixel data type: {0}, "
             "filter size/width: {1}.".format(data.dtype, size))

    progress.update()
    ps.shared_list = [data]
    ps.execute(f, data.shape[0], progress, msg="Gaussian filter", cores=cores)

    progress.mark_complete()
    log.info("Finished  gaussian filter, with pixel data type: {0}, "
             "filter size/width: {1}.".format(data.dtype, size))
    def filter_func(images: Images,
                    cores=None,
                    chunksize=None,
                    progress=None) -> Images:
        if images.num_projections == 1:
            # we can't really compute the preview as the image stack copy
            # passed in doesn't have the logfile in it
            raise RuntimeError("No logfile available for this stack.")

        counts = images.counts()

        if counts is None:
            raise RuntimeError("No loaded log values for this stack.")

        counts_val = counts.value / counts.value[0]
        do_division = ps.create_partial(_divide_by_counts,
                                        fwd_function=ps.inplace2)
        ps.shared_list = [images.data, counts_val]
        ps.execute(do_division, images.num_projections, progress, cores=cores)
        return images
예제 #15
0
    def filter_func(images: Images,
                    rebin_param=0.5,
                    mode=None,
                    cores=None,
                    chunksize=None,
                    progress=None) -> Images:
        """
        :param images: Sample data which is to be processed. Expects radiograms
        :param rebin_param: int, float or tuple
                            int - Percentage of current size.
                            float - Fraction of current size.
                            tuple - Size of the output image (x, y).
        :param mode: Interpolation to use for re-sizing
                     ('nearest', 'lanczos', 'bilinear', 'bicubic' or 'cubic').
        :param cores: The number of cores that will be used to process the data.
        :param chunksize: The number of chunks that each worker will receive.

        :return: The processed 3D numpy.ndarray
        """
        h.check_data_stack(images)

        if isinstance(rebin_param, tuple):
            param_valid = rebin_param[0] > 0 and rebin_param[1] > 0
        else:
            param_valid = rebin_param > 0

        if param_valid:
            sample = images.data
            empty_resized_data = _create_reshaped_array(images, rebin_param)

            f = ps.create_partial(skimage.transform.resize,
                                  ps.return_to_second_at_i,
                                  mode=mode,
                                  output_shape=empty_resized_data.shape[1:])
            ps.shared_list = [sample, empty_resized_data]
            ps.execute(f, sample.shape[0], cores, "Applying Rebin", progress)
            images.data = empty_resized_data

        return images
예제 #16
0
def _execute(data, size, mode, cores=None, chunksize=None, progress=None):
    log = getLogger(__name__)
    progress = Progress.ensure_instance(progress, task_name='Median filter')

    # create the partial function to forward the parameters
    f = ps.create_partial(_median_filter,
                          ps.return_to_self,
                          size=size,
                          mode=mode)

    with progress:
        log.info("PARALLEL median filter, with pixel data type: {0}, filter "
                 "size/width: {1}.".format(data.dtype, size))

        ps.shared_list = [data]
        ps.execute(f,
                   data.shape[0],
                   progress,
                   msg="Median filter",
                   cores=cores)

    return data
def _execute(data: np.ndarray,
             air_region: SensibleROI,
             cores=None,
             chunksize=None,
             progress=None):
    log = getLogger(__name__)

    with progress:
        progress.update(msg="Normalization by air region")
        if isinstance(air_region, list):
            air_region = SensibleROI.from_list(air_region)

        # initialise same number of air sums
        img_num = data.shape[0]
        air_sums = pu.create_array((img_num, ), data.dtype)

        do_calculate_air_sums = ps.create_partial(_calc_sum,
                                                  ps.return_to_second_at_i,
                                                  air_left=air_region.left,
                                                  air_top=air_region.top,
                                                  air_right=air_region.right,
                                                  air_bottom=air_region.bottom)

        ps.shared_list = [data, air_sums]
        ps.execute(do_calculate_air_sums, data.shape[0], progress, cores=cores)

        do_divide = ps.create_partial(_divide_by_air_sum,
                                      fwd_function=ps.inplace2)
        ps.shared_list = [data, air_sums]
        ps.execute(do_divide, data.shape[0], progress, cores=cores)

        avg = np.average(air_sums)
        max_avg = np.max(air_sums) / avg
        min_avg = np.min(air_sums) / avg

        log.info(
            f"Normalization by air region. "
            f"Average: {avg}, max ratio: {max_avg}, min ratio: {min_avg}.")
 def filter_func(images: Images,
                 sigma=3,
                 size=21,
                 window_dim=1,
                 filtering_dim=1,
                 cores=None,
                 chunksize=None,
                 progress=None):
     if filtering_dim == 1:
         f = ps.create_partial(remove_stripe_based_filtering_sorting,
                               ps.return_to_self,
                               sigma=sigma,
                               size=size,
                               dim=window_dim)
     else:
         f = ps.create_partial(remove_stripe_based_2d_filtering_sorting,
                               ps.return_to_self,
                               sigma=sigma,
                               size=size,
                               dim=window_dim)
     ps.shared_list = [images.data]
     ps.execute(f, images.num_projections, progress, cores=cores)
     return images
예제 #19
0
def _execute(data: np.ndarray, div_val: float, mult_val: float, add_val: float, sub_val: float, cores: Optional[int],
             progress):
    do_arithmetic = ps.create_partial(_arithmetic_func, fwd_function=ps.arithmetic)
    ps.shared_list = [data, div_val, mult_val, add_val, sub_val]
    ps.execute(do_arithmetic, data.shape[0], progress, cores=cores)
def _execute(data: np.ndarray,
             air_region: SensibleROI,
             normalisation_mode: str,
             flat_field: Optional[np.ndarray],
             cores=None,
             chunksize=None,
             progress=None):
    log = getLogger(__name__)

    with progress:
        progress.update(msg="Normalization by air region")
        if isinstance(air_region, list):
            air_region = SensibleROI.from_list(air_region)

        # initialise same number of air sums
        img_num = data.shape[0]
        air_means = pu.create_array((img_num, ), data.dtype)

        do_calculate_air_means = ps.create_partial(
            _calc_mean,
            ps.return_to_second_at_i,
            air_left=air_region.left,
            air_top=air_region.top,
            air_right=air_region.right,
            air_bottom=air_region.bottom)

        ps.shared_list = [data, air_means]
        ps.execute(do_calculate_air_means,
                   data.shape[0],
                   progress,
                   cores=cores)

        if normalisation_mode == 'Preserve Max':
            air_maxs = pu.create_array((img_num, ), data.dtype)
            do_calculate_air_max = ps.create_partial(_calc_max,
                                                     ps.return_to_second_at_i)

            ps.shared_list = [data, air_maxs]
            ps.execute(do_calculate_air_max,
                       data.shape[0],
                       progress,
                       cores=cores)

            if np.isnan(air_maxs).any():
                raise ValueError("Image contains invalid (NaN) pixels")

            # calculate the before and after maximum
            init_max = air_maxs.max()
            post_max = (air_maxs / air_means).max()
            air_means *= post_max / init_max

        elif normalisation_mode == 'Stack Average':
            air_means /= air_means.mean()

        elif normalisation_mode == 'Flat Field' and flat_field is not None:
            flat_mean = pu.create_array((flat_field.shape[0], ),
                                        flat_field.dtype)
            ps.shared_list = [flat_field, flat_mean]
            ps.execute(do_calculate_air_means,
                       flat_field.shape[0],
                       progress,
                       cores=cores)
            air_means /= flat_mean.mean()

        if np.isnan(air_means).any():
            raise ValueError("Air region contains invalid (NaN) pixels")

        do_divide = ps.create_partial(_divide_by_air, fwd_function=ps.inplace2)
        ps.shared_list = [data, air_means]
        ps.execute(do_divide, data.shape[0], progress, cores=cores)

        avg = np.average(air_means)
        max_avg = np.max(air_means) / avg
        min_avg = np.min(air_means) / avg

        log.info(
            f"Normalization by air region. "
            f"Average: {avg}, max ratio: {max_avg}, min ratio: {min_avg}.")