def test_metadata_round_trip(self): # Create dummy image stack sample = th.gen_img_shared_array_with_val(42.) images = Images(sample) images.metadata['message'] = 'hello, world!' # Save image stack saver.save(images, self.output_directory) # Load image stack back dataset = loader.load(self.output_directory) loaded_images = dataset.sample # Ensure properties have been preserved self.assertEqual(loaded_images.metadata, images.metadata) loaded_images.free_memory() if dataset.dark_before: dataset.dark_before.free_memory() if dataset.dark_after: dataset.dark_after.free_memory() if dataset.flat_before: dataset.flat_before.free_memory() if dataset.flat_after: dataset.flat_after.free_memory()
def do_preproc(self, img_format, loader_indices=None, expected_len=None, saver_indices=None, data_as_stack=False): expected_images = th.generate_images() # saver indices only affects the enumeration of the data if saver_indices: # crop the original images to make sure the tests is correct expected_images.data = expected_images.data[ saver_indices[0]:saver_indices[1]] # saver.save_preproc_images(expected_images) saver.save(expected_images, self.output_directory, out_format=img_format, indices=saver_indices) self.assert_files_exist( os.path.join(self.output_directory, saver.DEFAULT_NAME_PREFIX), img_format, data_as_stack, expected_images.data.shape[0], saver_indices) # this does not load any flats or darks as they were not saved out dataset = loader.load(self.output_directory, in_format=img_format, indices=loader_indices) loaded_images = dataset.sample if loader_indices: assert len(loaded_images.data) == expected_len, \ "The length of the loaded data does not " \ "match the expected length! Expected: {0}, " \ "Got {1}".format(expected_len, len( loaded_images.data)) expected_images.data = expected_images.data[ loader_indices[0]:loader_indices[1]] npt.assert_equal(loaded_images.data, expected_images.data) loaded_images.free_memory() if dataset.dark_before: dataset.dark_before.free_memory() if dataset.dark_after: dataset.dark_after.free_memory() if dataset.flat_before: dataset.flat_before.free_memory() if dataset.flat_after: dataset.flat_after.free_memory()
def test_metadata_round_trip(self): # Create dummy image stack sample = th.gen_img_numpy_rand() images = Images(sample) images.metadata['message'] = 'hello, world!' # Save image stack saver.save(images, self.output_directory) # Load image stack back dataset = loader.load(self.output_directory) loaded_images = dataset.sample # Ensure properties have been preserved self.assertEqual(loaded_images.metadata, images.metadata)
def do_saving(self, stack_uuid, output_dir, name_prefix, image_format, overwrite, progress): svp = self.get_stack_visualiser(stack_uuid).presenter filenames = saver.save(svp.images, output_dir=output_dir, name_prefix=name_prefix, overwrite_all=overwrite, out_format=image_format, progress=progress) svp.images.filenames = filenames return True
def do_images_saving(self, images_id, output_dir, name_prefix, image_format, overwrite, pixel_depth, progress): images = self.get_images_by_uuid(images_id) if images is None: self.raise_error_when_images_not_found(images_id) filenames = saver.save(images, output_dir=output_dir, name_prefix=name_prefix, overwrite_all=overwrite, out_format=image_format, pixel_depth=pixel_depth, progress=progress) images.filenames = filenames return True
def test_load_sample_flat_and_dark(self, img_format='tiff', loader_indices=None, expected_len=None, saver_indices=None): images = th.generate_images() flat_before = th.generate_images() dark_before = th.generate_images() flat_after = th.generate_images() dark_after = th.generate_images() # this only affects enumeration saver._indices = saver_indices # saver indices only affects the enumeration of the data if saver_indices: # crop the original images to make sure the test is checking the # indices that were actually saved out images.data = images.data[saver_indices[0]:saver_indices[1]] saver.save(images, self.output_directory, out_format=img_format) flat_before_dir = os.path.join(self.output_directory, "imgIOTest_flat_before") saver.save(flat_before, flat_before_dir, out_format=img_format) flat_after_dir = os.path.join(self.output_directory, "imgIOTest_flat_after") saver.save(flat_after, flat_after_dir, out_format=img_format) dark_before_dir = os.path.join(self.output_directory, "imgIOTest_dark_before") saver.save(dark_before, dark_before_dir, out_format=img_format) dark_after_dir = os.path.join(self.output_directory, "imgIOTest_dark_after") saver.save(dark_after, dark_after_dir, out_format=img_format) data_as_stack = False self.assert_files_exist( os.path.join(self.output_directory, saver.DEFAULT_NAME_PREFIX), img_format, data_as_stack, images.data.shape[0]) flat_before_dir = os.path.join(flat_before_dir, saver.DEFAULT_NAME_PREFIX) self.assert_files_exist(flat_before_dir, img_format, data_as_stack, flat_before.data.shape[0]) flat_after_dir = os.path.join(flat_after_dir, saver.DEFAULT_NAME_PREFIX) self.assert_files_exist(flat_after_dir, img_format, data_as_stack, flat_after.data.shape[0]) dark_before_dir = os.path.join(dark_before_dir, saver.DEFAULT_NAME_PREFIX) self.assert_files_exist(dark_before_dir, img_format, data_as_stack, dark_before.data.shape[0]) dark_after_dir = os.path.join(dark_after_dir, saver.DEFAULT_NAME_PREFIX) self.assert_files_exist(dark_after_dir, img_format, data_as_stack, dark_after.data.shape[0]) flat_before_filename = f"{flat_before_dir}_{''.zfill(saver.DEFAULT_ZFILL_LENGTH)}.{img_format}" flat_after_filename = f"{flat_after_dir}_{''.zfill(saver.DEFAULT_ZFILL_LENGTH)}.{img_format}" dark_before_filename = f"{dark_before_dir}_{''.zfill(saver.DEFAULT_ZFILL_LENGTH)}.{img_format}" dark_after_filename = f"{dark_after_dir}_{''.zfill(saver.DEFAULT_ZFILL_LENGTH)}.{img_format}" dataset = loader.load(self.output_directory, input_path_flat_before=flat_before_filename, input_path_flat_after=flat_after_filename, input_path_dark_before=dark_before_filename, input_path_dark_after=dark_after_filename, in_format=img_format, indices=loader_indices) loaded_images = dataset.sample if loader_indices: assert len(loaded_images.data) == expected_len, \ "The length of the loaded data doesn't " \ "match the expected length: {0}, " \ "Got: {1}".format( expected_len, len(loaded_images.data)) # crop the original images to make sure the tests is correct images.data = images.data[loader_indices[0]:loader_indices[1]] npt.assert_equal(loaded_images.data, images.data) # we only check the first image because they will be # averaged out when loaded! The initial images are only 3s npt.assert_equal(dataset.flat_before.data, flat_before.data) npt.assert_equal(dataset.dark_before.data, dark_before.data) npt.assert_equal(dataset.flat_after.data, flat_after.data) npt.assert_equal(dataset.dark_after.data, dark_after.data) loaded_images.free_memory() if dataset.dark_before: dataset.dark_before.free_memory() if dataset.flat_before: dataset.flat_before.free_memory() if dataset.dark_after: dataset.dark_after.free_memory() if dataset.flat_after: dataset.flat_after.free_memory()