예제 #1
0
    def filter_func(images: Images,
                    region_of_interest: Optional[Union[List[int], List[float],
                                                       SensibleROI]] = None,
                    progress=None) -> Images:
        """Execute the Crop Coordinates by Region of Interest filter. This does
        NOT do any checks if the Region of interest is out of bounds!

        If the region of interest is out of bounds, the crop will **FAIL** at
        runtime.

        If the region of interest is in bounds, but has overlapping coordinates
        the crop give back a 0 shape of the coordinates that were wrong.

        :param images: Input data as a 3D numpy.ndarray

        :param region_of_interest: Crop original images using these coordinates.
                                   The selection is a rectangle and expected order
                                   is - Left Top Right Bottom.

        :return: The processed 3D numpy.ndarray
        """

        if region_of_interest is None:
            region_of_interest = SensibleROI.from_list([0, 0, 50, 50])
        if isinstance(region_of_interest, list):
            region_of_interest = SensibleROI.from_list(region_of_interest)

        assert isinstance(region_of_interest, SensibleROI)

        h.check_data_stack(images)

        sample = images.data
        shape = (sample.shape[0], region_of_interest.height,
                 region_of_interest.width)
        if any((s < 0 for s in shape)):
            raise ValueError(
                "It seems the Region of Interest is outside of the current image dimensions.\n"
                "This can happen on the image preview right after a previous Crop Coordinates."
            )

        # allocate output first BEFORE freeing the original data,
        # otherwise it's possible to free and then fail allocation for output
        # at which point you're left with no data
        output = pu.allocate_output(images, shape)
        images.data = execute_single(sample,
                                     region_of_interest,
                                     progress,
                                     out=output)

        return images
    def do_execute(self, images: Images):
        original = np.copy(images.data[0])

        air = SensibleROI.from_list([3, 3, 4, 4])
        result = RoiNormalisationFilter.filter_func(images, air)

        th.assert_not_equals(result.data[0], original)
예제 #3
0
    def test_roi_changed_callback(self):
        self.roi_callback_was_called = False
        self.view.roi_updated.connect(self._roi_updated_callback)

        self.view.roi_changed_callback(SensibleROI(1, 2, 3, 4))

        self.assertTrue(self.roi_callback_was_called)
예제 #4
0
def _execute(data, air_region: SensibleROI, cores=None, chunksize=None, progress=None):
    log = getLogger(__name__)

    with progress:
        progress.update(msg="Normalization by air region")
        if isinstance(air_region, list):
            air_region = SensibleROI.from_list(air_region)

        # initialise same number of air sums
        img_num = data.shape[0]
        with pu.temp_shared_array((img_num, 1, 1), data.dtype) as air_sums:
            # turn into a 1D array, from the 3D that is returned
            air_sums = air_sums.reshape(img_num)

            calc_sums_partial = ptsm.create_partial(_calc_sum,
                                                    fwd_function=ptsm.return_to_second,
                                                    air_left=air_region.left,
                                                    air_top=air_region.top,
                                                    air_right=air_region.right,
                                                    air_bottom=air_region.bottom)

            data, air_sums = ptsm.execute(data, air_sums, calc_sums_partial, cores, chunksize, progress=progress)

            air_sums_partial = ptsm.create_partial(_divide_by_air_sum, fwd_function=ptsm.inplace)

            data, air_sums = ptsm.execute(data, air_sums, air_sums_partial, cores, chunksize, progress=progress)

            avg = np.average(air_sums)
            max_avg = np.max(air_sums) / avg
            min_avg = np.min(air_sums) / avg

            log.info(f"Normalization by air region. " f"Average: {avg}, max ratio: {max_avg}, min ratio: {min_avg}.")
예제 #5
0
 def dupe_stack_roi(self):
     with operation_in_progress(
             "Copying data, this may take a while",
             "The data is being copied, this may take a while.", self.view):
         new_images = self.images.copy_roi(
             SensibleROI.from_points(*self.view.image_view.get_roi()))
         self.view.parent_create_stack(new_images, self.view.name)
예제 #6
0
    def test_memory_change_acceptable(self):
        """
        Expected behaviour for the filter is to be done in place
        without using more memory.

        In reality the memory is increased by about 40MB (4 April 2017),
        but this could change in the future.

        The reason why a 10% window is given on the expected size is
        to account for any library imports that may happen.

        This will still capture if the data is doubled, which is the main goal.
        """
        images = th.generate_images()
        roi = SensibleROI.from_list([1, 1, 5, 5])

        cached_memory = get_memory_usage_linux(mb=True)[0]

        result = CropCoordinatesFilter.filter_func(images, roi)

        self.assertLess(
            get_memory_usage_linux(mb=True)[0], cached_memory * 1.1)

        expected_shape = (10, 4, 4)

        npt.assert_equal(result.data.shape, expected_shape)
예제 #7
0
 def execute_wrapper(roi_field: QLineEdit) -> partial:
     try:
         roi = SensibleROI.from_list([
             int(number)
             for number in roi_field.text().strip("[").strip("]").split(",")
         ])
         return partial(CropCoordinatesFilter.filter_func,
                        region_of_interest=roi)
     except Exception as e:
         raise ValueError(f"The provided ROI string is invalid! Error: {e}")
예제 #8
0
    def filter_func(images: Images,
                    region_of_interest: Optional[Union[List[int], List[float], SensibleROI]] = None,
                    progress=None) -> Images:
        """
        Execute the Crop Coordinates by Region of Interest filter.
        This does NOT do any checks if the Region of interest is out of bounds!

        If the region of interest is out of bounds, the crop will **FAIL** at
        runtime.

        If the region of interest is in bounds, but has overlapping coordinates
        the crop give back a 0 shape of the coordinates that were wrong.

        :param images: Input data as a 3D numpy.ndarray

        :param region_of_interest: Crop original images using these coordinates.
                                   The selection is a rectangle and expected order
                                   is - Left Top Right Bottom.

        :return: The processed 3D numpy.ndarray
        """

        if region_of_interest is None:
            region_of_interest = SensibleROI.from_list([0, 0, 50, 50])
        if isinstance(region_of_interest, list):
            region_of_interest = SensibleROI.from_list(region_of_interest)

        assert isinstance(region_of_interest, SensibleROI)

        h.check_data_stack(images)

        sample = images.data
        shape = (sample.shape[0], region_of_interest.height, region_of_interest.width)
        sample_name = images.memory_filename
        if sample_name is not None:
            images.free_memory(delete_filename=False)
        output = pu.create_array(shape, sample.dtype, sample_name)
        images.data = execute_single(sample, region_of_interest, progress, out=output)

        return images
    def test_executed_only_volume(self):
        # Check that the filter is  executed when:
        #   - valid Region of Interest is provided
        #   - no flat or dark images are provided

        roi = SensibleROI.from_list([1, 1, 5, 5])
        images = th.generate_images()
        # store a reference here so it doesn't get freed inside the filter execute
        sample = images.data
        result = CropCoordinatesFilter.filter_func(images, roi)
        expected_shape = (10, 4, 4)

        npt.assert_equal(result.data.shape, expected_shape)
        # check that the data has been modified
        th.assert_not_equals(result.data, sample)
    def execute_wrapper(roi_field, norm_mode, flat_field):
        try:
            roi = SensibleROI.from_list([
                int(number)
                for number in roi_field.text().strip("[").strip("]").split(",")
            ])
        except Exception as e:
            raise ValueError(f"The provided ROI string is invalid! Error: {e}")

        mode = norm_mode.currentText()
        flat_images = BaseFilter.get_images_from_stack(flat_field,
                                                       "flat field")
        return partial(RoiNormalisationFilter.filter_func,
                       region_of_interest=roi,
                       normalisation_mode=mode,
                       flat_field=flat_images)
예제 #11
0
 def _update_roi_region_avg(self) -> Optional[SensibleROI]:
     if self.image.ndim != 3:
         return None
     roi_pos, roi_size = self.get_roi()
     # image indices are in order [Z, X, Y]
     left, right = roi_pos.x, roi_pos.x + roi_size.x
     top, bottom = roi_pos.y, roi_pos.y + roi_size.y
     data = self.image[:, top:bottom, left:right]
     if data is not None:
         while data.ndim > 1:
             data = data.mean(axis=1)
         if len(self.roiCurves) == 0:
             self.roiCurves.append(self.ui.roiPlot.plot())
         self.roiCurves[0].setData(y=data, x=self.tVals)
     self.roiString = f"({left}, {top}, {right}, {bottom}) | " \
                      f"region avg={data[int(self.timeLine.value())].mean():.6f}"
     return SensibleROI(left, top, right, bottom)
예제 #12
0
    def test_copy_roi(self):
        images = generate_images()
        images.record_operation("Test", "Display", 123)
        self.assertFalse(images.is_sinograms)
        cropped_copy = images.copy_roi(SensibleROI(0, 0, 5, 5))

        self.assertEqual(cropped_copy, images.data[:, 0:5, 0:5])

        self.assertEqual(len(cropped_copy.metadata[const.OPERATION_HISTORY]), 2)
        self.assertEqual(cropped_copy.metadata[const.OPERATION_HISTORY][-1][const.OPERATION_DISPLAY_NAME],
                         CropCoordinatesFilter.filter_name)

        # remove the extra crop operation
        cropped_copy.metadata[const.OPERATION_HISTORY].pop(-1)
        # the two metadatas show now be equal again
        self.assertEqual(images.metadata, cropped_copy.metadata)
        self.assertNotEqual(images, cropped_copy)
def _execute(data: np.ndarray,
             air_region: SensibleROI,
             cores=None,
             chunksize=None,
             progress=None):
    log = getLogger(__name__)

    with progress:
        progress.update(msg="Normalization by air region")
        if isinstance(air_region, list):
            air_region = SensibleROI.from_list(air_region)

        # initialise same number of air sums
        img_num = data.shape[0]
        air_sums = pu.create_array((img_num, ), data.dtype)

        do_calculate_air_sums = ps.create_partial(_calc_sum,
                                                  ps.return_to_second_at_i,
                                                  air_left=air_region.left,
                                                  air_top=air_region.top,
                                                  air_right=air_region.right,
                                                  air_bottom=air_region.bottom)

        ps.shared_list = [data, air_sums]
        ps.execute(do_calculate_air_sums, data.shape[0], progress, cores=cores)

        do_divide = ps.create_partial(_divide_by_air_sum,
                                      fwd_function=ps.inplace2)
        ps.shared_list = [data, air_sums]
        ps.execute(do_divide, data.shape[0], progress, cores=cores)

        avg = np.average(air_sums)
        max_avg = np.max(air_sums) / avg
        min_avg = np.min(air_sums) / avg

        log.info(
            f"Normalization by air region. "
            f"Average: {avg}, max ratio: {max_avg}, min ratio: {min_avg}.")
예제 #14
0
 def current_roi(self) -> SensibleROI:
     return SensibleROI.from_points(*self.image_view.get_roi())
def _execute(data: np.ndarray,
             air_region: SensibleROI,
             normalisation_mode: str,
             flat_field: Optional[np.ndarray],
             cores=None,
             chunksize=None,
             progress=None):
    log = getLogger(__name__)

    with progress:
        progress.update(msg="Normalization by air region")
        if isinstance(air_region, list):
            air_region = SensibleROI.from_list(air_region)

        # initialise same number of air sums
        img_num = data.shape[0]
        air_means = pu.create_array((img_num, ), data.dtype)

        do_calculate_air_means = ps.create_partial(
            _calc_mean,
            ps.return_to_second_at_i,
            air_left=air_region.left,
            air_top=air_region.top,
            air_right=air_region.right,
            air_bottom=air_region.bottom)

        ps.shared_list = [data, air_means]
        ps.execute(do_calculate_air_means,
                   data.shape[0],
                   progress,
                   cores=cores)

        if normalisation_mode == 'Preserve Max':
            air_maxs = pu.create_array((img_num, ), data.dtype)
            do_calculate_air_max = ps.create_partial(_calc_max,
                                                     ps.return_to_second_at_i)

            ps.shared_list = [data, air_maxs]
            ps.execute(do_calculate_air_max,
                       data.shape[0],
                       progress,
                       cores=cores)

            if np.isnan(air_maxs).any():
                raise ValueError("Image contains invalid (NaN) pixels")

            # calculate the before and after maximum
            init_max = air_maxs.max()
            post_max = (air_maxs / air_means).max()
            air_means *= post_max / init_max

        elif normalisation_mode == 'Stack Average':
            air_means /= air_means.mean()

        elif normalisation_mode == 'Flat Field' and flat_field is not None:
            flat_mean = pu.create_array((flat_field.shape[0], ),
                                        flat_field.dtype)
            ps.shared_list = [flat_field, flat_mean]
            ps.execute(do_calculate_air_means,
                       flat_field.shape[0],
                       progress,
                       cores=cores)
            air_means /= flat_mean.mean()

        if np.isnan(air_means).any():
            raise ValueError("Air region contains invalid (NaN) pixels")

        do_divide = ps.create_partial(_divide_by_air, fwd_function=ps.inplace2)
        ps.shared_list = [data, air_means]
        ps.execute(do_divide, data.shape[0], progress, cores=cores)

        avg = np.average(air_means)
        max_avg = np.max(air_means) / avg
        min_avg = np.min(air_means) / avg

        log.info(
            f"Normalization by air region. "
            f"Average: {avg}, max ratio: {max_avg}, min ratio: {min_avg}.")