Exemple #1
0
    def filter_func(images: Images,
                    region_of_interest: Optional[Union[List[int], List[float],
                                                       SensibleROI]] = None,
                    progress=None) -> Images:
        """Execute the Crop Coordinates by Region of Interest filter. This does
        NOT do any checks if the Region of interest is out of bounds!

        If the region of interest is out of bounds, the crop will **FAIL** at
        runtime.

        If the region of interest is in bounds, but has overlapping coordinates
        the crop give back a 0 shape of the coordinates that were wrong.

        :param images: Input data as a 3D numpy.ndarray

        :param region_of_interest: Crop original images using these coordinates.
                                   The selection is a rectangle and expected order
                                   is - Left Top Right Bottom.

        :return: The processed 3D numpy.ndarray
        """

        if region_of_interest is None:
            region_of_interest = SensibleROI.from_list([0, 0, 50, 50])
        if isinstance(region_of_interest, list):
            region_of_interest = SensibleROI.from_list(region_of_interest)

        assert isinstance(region_of_interest, SensibleROI)

        h.check_data_stack(images)

        sample = images.data
        shape = (sample.shape[0], region_of_interest.height,
                 region_of_interest.width)
        if any((s < 0 for s in shape)):
            raise ValueError(
                "It seems the Region of Interest is outside of the current image dimensions.\n"
                "This can happen on the image preview right after a previous Crop Coordinates."
            )

        # allocate output first BEFORE freeing the original data,
        # otherwise it's possible to free and then fail allocation for output
        # at which point you're left with no data
        output = pu.allocate_output(images, shape)
        images.data = execute_single(sample,
                                     region_of_interest,
                                     progress,
                                     out=output)

        return images
    def do_execute(self, images: Images):
        original = np.copy(images.data[0])

        air = SensibleROI.from_list([3, 3, 4, 4])
        result = RoiNormalisationFilter.filter_func(images, air)

        th.assert_not_equals(result.data[0], original)
Exemple #3
0
def _execute(data, air_region: SensibleROI, cores=None, chunksize=None, progress=None):
    log = getLogger(__name__)

    with progress:
        progress.update(msg="Normalization by air region")
        if isinstance(air_region, list):
            air_region = SensibleROI.from_list(air_region)

        # initialise same number of air sums
        img_num = data.shape[0]
        with pu.temp_shared_array((img_num, 1, 1), data.dtype) as air_sums:
            # turn into a 1D array, from the 3D that is returned
            air_sums = air_sums.reshape(img_num)

            calc_sums_partial = ptsm.create_partial(_calc_sum,
                                                    fwd_function=ptsm.return_to_second,
                                                    air_left=air_region.left,
                                                    air_top=air_region.top,
                                                    air_right=air_region.right,
                                                    air_bottom=air_region.bottom)

            data, air_sums = ptsm.execute(data, air_sums, calc_sums_partial, cores, chunksize, progress=progress)

            air_sums_partial = ptsm.create_partial(_divide_by_air_sum, fwd_function=ptsm.inplace)

            data, air_sums = ptsm.execute(data, air_sums, air_sums_partial, cores, chunksize, progress=progress)

            avg = np.average(air_sums)
            max_avg = np.max(air_sums) / avg
            min_avg = np.min(air_sums) / avg

            log.info(f"Normalization by air region. " f"Average: {avg}, max ratio: {max_avg}, min ratio: {min_avg}.")
Exemple #4
0
    def test_memory_change_acceptable(self):
        """
        Expected behaviour for the filter is to be done in place
        without using more memory.

        In reality the memory is increased by about 40MB (4 April 2017),
        but this could change in the future.

        The reason why a 10% window is given on the expected size is
        to account for any library imports that may happen.

        This will still capture if the data is doubled, which is the main goal.
        """
        images = th.generate_images()
        roi = SensibleROI.from_list([1, 1, 5, 5])

        cached_memory = get_memory_usage_linux(mb=True)[0]

        result = CropCoordinatesFilter.filter_func(images, roi)

        self.assertLess(
            get_memory_usage_linux(mb=True)[0], cached_memory * 1.1)

        expected_shape = (10, 4, 4)

        npt.assert_equal(result.data.shape, expected_shape)
 def execute_wrapper(roi_field: QLineEdit) -> partial:
     try:
         roi = SensibleROI.from_list([
             int(number)
             for number in roi_field.text().strip("[").strip("]").split(",")
         ])
         return partial(CropCoordinatesFilter.filter_func,
                        region_of_interest=roi)
     except Exception as e:
         raise ValueError(f"The provided ROI string is invalid! Error: {e}")
    def filter_func(images: Images,
                    region_of_interest: Optional[Union[List[int], List[float], SensibleROI]] = None,
                    progress=None) -> Images:
        """
        Execute the Crop Coordinates by Region of Interest filter.
        This does NOT do any checks if the Region of interest is out of bounds!

        If the region of interest is out of bounds, the crop will **FAIL** at
        runtime.

        If the region of interest is in bounds, but has overlapping coordinates
        the crop give back a 0 shape of the coordinates that were wrong.

        :param images: Input data as a 3D numpy.ndarray

        :param region_of_interest: Crop original images using these coordinates.
                                   The selection is a rectangle and expected order
                                   is - Left Top Right Bottom.

        :return: The processed 3D numpy.ndarray
        """

        if region_of_interest is None:
            region_of_interest = SensibleROI.from_list([0, 0, 50, 50])
        if isinstance(region_of_interest, list):
            region_of_interest = SensibleROI.from_list(region_of_interest)

        assert isinstance(region_of_interest, SensibleROI)

        h.check_data_stack(images)

        sample = images.data
        shape = (sample.shape[0], region_of_interest.height, region_of_interest.width)
        sample_name = images.memory_filename
        if sample_name is not None:
            images.free_memory(delete_filename=False)
        output = pu.create_array(shape, sample.dtype, sample_name)
        images.data = execute_single(sample, region_of_interest, progress, out=output)

        return images
    def test_executed_only_volume(self):
        # Check that the filter is  executed when:
        #   - valid Region of Interest is provided
        #   - no flat or dark images are provided

        roi = SensibleROI.from_list([1, 1, 5, 5])
        images = th.generate_images()
        # store a reference here so it doesn't get freed inside the filter execute
        sample = images.data
        result = CropCoordinatesFilter.filter_func(images, roi)
        expected_shape = (10, 4, 4)

        npt.assert_equal(result.data.shape, expected_shape)
        # check that the data has been modified
        th.assert_not_equals(result.data, sample)
    def execute_wrapper(roi_field, norm_mode, flat_field):
        try:
            roi = SensibleROI.from_list([
                int(number)
                for number in roi_field.text().strip("[").strip("]").split(",")
            ])
        except Exception as e:
            raise ValueError(f"The provided ROI string is invalid! Error: {e}")

        mode = norm_mode.currentText()
        flat_images = BaseFilter.get_images_from_stack(flat_field,
                                                       "flat field")
        return partial(RoiNormalisationFilter.filter_func,
                       region_of_interest=roi,
                       normalisation_mode=mode,
                       flat_field=flat_images)
def _execute(data: np.ndarray,
             air_region: SensibleROI,
             cores=None,
             chunksize=None,
             progress=None):
    log = getLogger(__name__)

    with progress:
        progress.update(msg="Normalization by air region")
        if isinstance(air_region, list):
            air_region = SensibleROI.from_list(air_region)

        # initialise same number of air sums
        img_num = data.shape[0]
        air_sums = pu.create_array((img_num, ), data.dtype)

        do_calculate_air_sums = ps.create_partial(_calc_sum,
                                                  ps.return_to_second_at_i,
                                                  air_left=air_region.left,
                                                  air_top=air_region.top,
                                                  air_right=air_region.right,
                                                  air_bottom=air_region.bottom)

        ps.shared_list = [data, air_sums]
        ps.execute(do_calculate_air_sums, data.shape[0], progress, cores=cores)

        do_divide = ps.create_partial(_divide_by_air_sum,
                                      fwd_function=ps.inplace2)
        ps.shared_list = [data, air_sums]
        ps.execute(do_divide, data.shape[0], progress, cores=cores)

        avg = np.average(air_sums)
        max_avg = np.max(air_sums) / avg
        min_avg = np.min(air_sums) / avg

        log.info(
            f"Normalization by air region. "
            f"Average: {avg}, max ratio: {max_avg}, min ratio: {min_avg}.")
def _execute(data: np.ndarray,
             air_region: SensibleROI,
             normalisation_mode: str,
             flat_field: Optional[np.ndarray],
             cores=None,
             chunksize=None,
             progress=None):
    log = getLogger(__name__)

    with progress:
        progress.update(msg="Normalization by air region")
        if isinstance(air_region, list):
            air_region = SensibleROI.from_list(air_region)

        # initialise same number of air sums
        img_num = data.shape[0]
        air_means = pu.create_array((img_num, ), data.dtype)

        do_calculate_air_means = ps.create_partial(
            _calc_mean,
            ps.return_to_second_at_i,
            air_left=air_region.left,
            air_top=air_region.top,
            air_right=air_region.right,
            air_bottom=air_region.bottom)

        ps.shared_list = [data, air_means]
        ps.execute(do_calculate_air_means,
                   data.shape[0],
                   progress,
                   cores=cores)

        if normalisation_mode == 'Preserve Max':
            air_maxs = pu.create_array((img_num, ), data.dtype)
            do_calculate_air_max = ps.create_partial(_calc_max,
                                                     ps.return_to_second_at_i)

            ps.shared_list = [data, air_maxs]
            ps.execute(do_calculate_air_max,
                       data.shape[0],
                       progress,
                       cores=cores)

            if np.isnan(air_maxs).any():
                raise ValueError("Image contains invalid (NaN) pixels")

            # calculate the before and after maximum
            init_max = air_maxs.max()
            post_max = (air_maxs / air_means).max()
            air_means *= post_max / init_max

        elif normalisation_mode == 'Stack Average':
            air_means /= air_means.mean()

        elif normalisation_mode == 'Flat Field' and flat_field is not None:
            flat_mean = pu.create_array((flat_field.shape[0], ),
                                        flat_field.dtype)
            ps.shared_list = [flat_field, flat_mean]
            ps.execute(do_calculate_air_means,
                       flat_field.shape[0],
                       progress,
                       cores=cores)
            air_means /= flat_mean.mean()

        if np.isnan(air_means).any():
            raise ValueError("Air region contains invalid (NaN) pixels")

        do_divide = ps.create_partial(_divide_by_air, fwd_function=ps.inplace2)
        ps.shared_list = [data, air_means]
        ps.execute(do_divide, data.shape[0], progress, cores=cores)

        avg = np.average(air_means)
        max_avg = np.max(air_means) / avg
        min_avg = np.min(air_means) / avg

        log.info(
            f"Normalization by air region. "
            f"Average: {avg}, max ratio: {max_avg}, min ratio: {min_avg}.")