def test_executed_2d(self): images = th.generate_images() control = images.copy() result = RemoveStripeFilteringFilter.filter_func(images, filtering_dim=2) th.assert_not_equals(result.data, control.data)
def test_fail_with_normal_array_return_to_second(self): """ This test does not use shared arrays and will not change the data. This behaviour is intended and is """ # create data as normal nd array img = th.gen_img_numpy_rand() img2nd = th.gen_img_numpy_rand() # get the expected as usual expected = img + img2nd # make sure it hasnt changed the original array assert expected[0, 0, 0] != img[0, 0, 0] assert expected[1, 0, 0] != img[1, 0, 0] assert expected[0, 4, 0] != img[0, 4, 0] assert expected[6, 0, 1] != img[6, 0, 1] # create partial f = ptsm.create_partial(return_from_func, fwd_function=ptsm.return_to_second, add_arg=5) # execute parallel res1, res2 = ptsm.execute(img, img2nd, f) # compare results npt.assert_equal(res1, img) npt.assert_equal(res2, img2nd) th.assert_not_equals(res2, expected)
def test_executed(self): images = th.generate_images() control = images.copy() result = RemoveAllStripesFilter.filter_func(images) th.assert_not_equals(result.data, control.data)
def do_execute(self, images: Images): size = 3 mode = 'reflect' original = np.copy(images.data[0]) result = MedianFilter.filter_func(images, size, mode) th.assert_not_equals(result.data, original)
def do_execute(self, images: Images): original = np.copy(images.data[0]) air = SensibleROI.from_list([3, 3, 4, 4]) result = RoiNormalisationFilter.filter_func(images, air) th.assert_not_equals(result.data[0], original)
def test_fail_with_normal_array_fwd_func_second_2d(self): # shape of 11 forces the execution to be parallel img = th.gen_img_numpy_rand((11, 10, 10)) orig_img = np.copy(img) img2nd = th.gen_img_numpy_rand((11, 10, 10)) orig_img2nd = np.copy(img2nd) img2nd = img2nd[0] # get the expected as usual expected = img + img2nd # make sure it hasnt changed the original array assert expected[0, 0, 0] != img[0, 0, 0] assert expected[1, 0, 0] != img[1, 0, 0] assert expected[0, 4, 0] != img[0, 4, 0] assert expected[6, 0, 1] != img[6, 0, 1] # create partial f = ptsm.create_partial(add_inplace, fwd_function=ptsm.inplace_second_2d, add_arg=5) # execute parallel ptsm.execute(img, img2nd, f) # compare results th.assert_not_equals(img, expected) th.assert_not_equals(img2nd, expected) npt.assert_equal(img, orig_img) npt.assert_equal(img2nd, orig_img2nd[0])
def test_fail_with_normal_array_return_to_first(self): # create data as normal nd array img = th.gen_img_numpy_rand() img2nd = th.gen_img_numpy_rand() # get the expected as usual expected = img + img2nd # make sure it hasnt changed the original array assert expected[0, 0, 0] != img[0, 0, 0] assert expected[1, 0, 0] != img[1, 0, 0] assert expected[0, 4, 0] != img[0, 4, 0] assert expected[6, 0, 1] != img[6, 0, 1] # create partial f = ptsm.create_partial(return_from_func, fwd_function=ptsm.return_to_first, add_arg=5) # execute parallel res1, res2 = ptsm.execute(img, img2nd, f) # compare results npt.assert_equal(res1, img) npt.assert_equal(res2, img2nd) th.assert_not_equals(res1, expected)
def test_divide(self): images = th.generate_images() copy = np.copy(images.data) result = self.do_divide(images, 0.005) th.assert_not_equals(result.data, copy)
def test_find_shift(): images = generate_images((10, 10, 10)) search_range = get_search_range(images.width) min_correlation_error = np.random.rand(len(search_range), images.height) shift = np.zeros((images.height, )) _find_shift(images, search_range, min_correlation_error, shift) # check that the shift has been changed assert_not_equals(shift, np.zeros((images.height, )))
def test_executed_sinogram(self): images = th.generate_images(shape=(1, 10, 20)) images._is_sinograms = True control = images.copy() result = RemoveAllStripesFilter.filter_func(images) th.assert_not_equals(result.data, control.data)
def do_execute(self): images = th.generate_images() original = np.copy(images.data[0]) air = [3, 3, 4, 4] result = RoiNormalisationFilter.filter_func(images, air) th.assert_not_equals(result.data[0], original)
def do_stripe_removal(self, wf=None, ti=None, sf=None): images = th.generate_images() control = images.copy() result = StripeRemovalFilter.filter_func(images, wf, ti, sf) th.assert_not_equals(result.data, control.data) th.assert_not_equals(result.data, control.data)
def test_executed(self): images = th.generate_images() control = images.copy() # size=3 makes sure that the data will be changed, as the default kernel is bigger # than the size of the test data result = RemoveDeadStripesFilter.filter_func(images, size=3) th.assert_not_equals(result.data, control.data)
def test_executed_no_helper_gpu(self): images = th.generate_images() size = 3 mode = 'reflect' original = np.copy(images.data[0]) result = MedianFilter.filter_func(images, size, mode, force_cpu=False) th.assert_not_equals(result.data, original)
def test_proj180deg(self): images = generate_images((10, 100, 350)) # expected without having a specific 180 deg projection self.assertIsNone(images._proj180deg) expected_projection = images.projection(images.num_projections // 2) # simulate a pre-loaded one np.array_equal(images.proj180deg.data, expected_projection) images._proj180deg = generate_images((1, 100, 350)) assert_not_equals(images.proj180deg.data, expected_projection)
def test_roi_normalisation_performs_rescale(self): images = th.generate_images() images_max = images.data.max() original = np.copy(images.data[0]) air = [3, 3, 4, 4] result = RoiNormalisationFilter.filter_func(images, air) th.assert_not_equals(result.data[0], original) self.assertAlmostEqual(result.data.max(), images_max, places=6)
def test_executed(self): images = th.generate_images() radius = 8 threshold = 0.1 sample = np.copy(images.data) result = OutliersFilter.filter_func(images, threshold, radius, cores=1) th.assert_not_equals(result.data, sample)
def test_not_executed(self): images = th.generate_images() size = None mode = None original = np.copy(images.data[0]) result = MedianFilter.filter_func(images, size, mode) th.assert_not_equals(result.data, original)
def test_execute(): images = generate_images() images._log_file = mock.Mock() images._log_file.counts = mock.Mock( return_value=Counts(np.sin(np.linspace(0, 1, images.num_projections)))) original = images.copy() MonitorNormalisation.filter_func(images) images._log_file.counts.assert_called_once() assert_not_equals(original.data, images.data)
def test_executed_parallel(self): images = th.generate_images() size = 3 mode = 'reflect' order = 1 original = np.copy(images.data[0]) result = GaussianFilter.filter_func(images, size, mode, order) th.assert_not_equals(result.data, original)
def test_executed_no_helper_seq(self): images = th.generate_images() size = 3 mode = 'reflect' original = np.copy(images.data[0]) th.switch_mp_off() result = MedianFilter.filter_func(images, size, mode) th.switch_mp_on() th.assert_not_equals(result.data, original)
def test_executed_only_volume(self): # Check that the filter is executed when: # - valid Region of Interest is provided # - no flat or dark images are provided roi = SensibleROI.from_list([1, 1, 5, 5]) images = th.generate_images() # store a reference here so it doesn't get freed inside the filter execute sample = images.data result = CropCoordinatesFilter.filter_func(images, roi) expected_shape = (10, 4, 4) npt.assert_equal(result.data.shape, expected_shape) # check that the data has been modified th.assert_not_equals(result.data, sample)
def test_roi_normalisation_stack_average(self): air = [3, 3, 6, 8] images = th.generate_images([10, 20, 30], seed=2021) images.data[2] *= 2 images.data[3] *= 0.5 air_data_orig = np.copy(images.data[:, air[1]:air[3], air[0]:air[2]]) original = np.copy(images.data[0]) result = RoiNormalisationFilter.filter_func(images, air, "Stack Average") air_data_after = np.copy(result.data[:, air[1]:air[3], air[0]:air[2]]) th.assert_not_equals(result.data[0], original) self.assertAlmostEqual(air_data_orig.mean(), air_data_after.mean(), places=6) self.assertAlmostEqual(air_data_after[0].mean(), air_data_after[1].mean(), places=6)
def test_fail_with_normal_array_fwd_func(self): # create data as shared array img = th.gen_img_numpy_rand((11, 10, 10)) orig = np.copy(img) add_arg = 5 expected = img + add_arg assert expected[0, 0, 0] != img[0, 0, 0] assert expected[1, 0, 0] != img[1, 0, 0] assert expected[0, 4, 0] != img[0, 4, 0] assert expected[6, 0, 1] != img[6, 0, 1] # create partial f = psm.create_partial(return_from_func, fwd_func=psm.return_fwd_func, add_arg=add_arg) # execute parallel res = psm.execute(img, f) # compare results th.assert_not_equals(res, expected) npt.assert_equal(img, orig)
def test_roi_normalisation_to_flat(self): air = [3, 3, 6, 8] images = th.generate_images([10, 20, 30], seed=2021) flat_field = th.generate_images([2, 20, 30], seed=2021) images.data[::2] *= 0.5 air_data_flat = np.copy(flat_field.data[:, air[1]:air[3], air[0]:air[2]]) original = np.copy(images.data[0]) result = RoiNormalisationFilter.filter_func(images, air, "Flat Field", flat_field) air_data_after = np.copy(result.data[:, air[1]:air[3], air[0]:air[2]]) th.assert_not_equals(result.data[0], original) self.assertAlmostEqual(air_data_flat.mean(), air_data_after.mean(), places=6) self.assertAlmostEqual(air_data_after[0].mean(), air_data_after[1].mean(), places=6)
def test_memory_change_acceptable(self): """ Expected behaviour for the filter is to be done in place without using more memory. In reality the memory is increased by about 40MB (4 April 2017), but this could change in the future. The reason why a 10% window is given on the expected size is to account for any library imports that may happen. This will still capture if the data is doubled, which is the main goal. """ images = th.generate_images() cached_memory = get_memory_usage_linux(kb=True)[0] original = np.copy(images.data) result = MinusLogFilter.filter_func(images, minus_log=True) self.assertLess( get_memory_usage_linux(kb=True)[0], cached_memory * 1.1) th.assert_not_equals(result.data, original)