def store_and_upload_example( dataset_example: DatasetExample, segmentation_config: SegmentationModelBase) -> None: """ Stores an example input and output of the network to Nifti files. :param dataset_example: The dataset example, with image, label and prediction, that should be written. :param segmentation_config: configuration information to be used for normalization and example_images_folder """ folder = segmentation_config.example_images_folder os.makedirs(folder, exist_ok=True) def create_file_name(suffix: str) -> str: fn = "p" + str(dataset_example.patient_id) + "_e_" + str( dataset_example.epoch) + "_" + suffix + ".nii.gz" fn = os.path.join(folder, fn) return fn io_util.store_image_as_short_nifti( image=dataset_example.image, header=dataset_example.header, file_name=create_file_name(suffix="image"), args=segmentation_config) # merge multiple binary masks (one per class) into a single multi-label map image labels = image_util.merge_masks(dataset_example.labels) io_util.store_as_ubyte_nifti(image=labels, header=dataset_example.header, file_name=create_file_name(suffix="label")) io_util.store_as_ubyte_nifti( image=dataset_example.prediction, header=dataset_example.header, file_name=create_file_name(suffix="prediction"))
def test_store_image_as_short_nifti(test_output_dirs: TestOutputDirectories, norm_method: PhotometricNormalizationMethod, image_range: Any, window_level: Any) -> None: window, level = window_level if window_level else (400, 0) image = np.random.random_sample((1, 2, 3)) image_shape = image.shape args = SegmentationModelBase(norm_method=norm_method, window=window, level=level, should_validate=False) # Get integer values that are in the image range image1 = LinearTransform.transform(data=image, input_range=(0, 1), output_range=args.output_range) image = image1.astype(np.short) # type: ignore header = ImageHeader(origin=(1, 1, 1), direction=(1, 0, 0, 0, 1, 0, 0, 0, 1), spacing=(1, 1, 1)) nifti_name = test_output_dirs.create_file_or_folder_path(default_image_name) io_util.store_image_as_short_nifti(image, header, nifti_name, args) if norm_method == PhotometricNormalizationMethod.CtWindow: output_range = get_range_for_window_level(args.level, args.window) image = LinearTransform.transform(data=image, input_range=args.output_range, output_range=output_range) image = image.astype(np.short) else: image = image * 1000 t = np.unique(image) assert_nifti_content(nifti_name, image_shape, header, list(t), np.short)
def test_scale_and_unscale_image( test_output_dirs: TestOutputDirectories) -> None: """ Test if an image in the CT value range can be recovered when we save dataset examples (undoing the effects of CT Windowing) """ image_size = (5, 5, 5) spacing = (1, 2, 3) header = ImageHeader(origin=(0, 1, 0), direction=(-1, 0, 0, 0, -1, 0, 0, 0, -1), spacing=spacing) np.random.seed(0) # Random image values with mean -100, std 100. This will cover a range # from -400 to +200 HU image = np.random.normal(-100, 100, size=image_size) window = 200 level = -100 # Lower and upper bounds of the interval of raw CT values that will be retained. lower = level - window / 2 upper = level + window / 2 # Create a copy of the image with all values outside of the (Window, Level) range set to the boundaries. # When saving and loading back in, we will not be able to recover any values that fell outside those boundaries. image_restricted = image.copy() image_restricted[image < lower] = lower image_restricted[image > upper] = upper # The image will be saved with voxel type short image_restricted = image_restricted.astype(int) # Apply window and level, mapping to the usual CNN input value range cnn_input_range = (-1, +1) image_windowed = LinearTransform.transform(data=image, input_range=(lower, upper), output_range=cnn_input_range) args = SegmentationModelBase( norm_method=PhotometricNormalizationMethod.CtWindow, output_range=cnn_input_range, window=window, level=level, should_validate=False) file_name = test_output_dirs.create_file_or_folder_path( "scale_and_unscale_image.nii.gz") io_util.store_image_as_short_nifti(image_windowed, header, file_name, args) image_from_disk = io_util.load_nifti_image(file_name) # noinspection PyTypeChecker assert_nifti_content(file_name, image_size, header, np.unique(image_restricted).tolist(), np.short) assert np.array_equal(image_from_disk.image, image_restricted)
def store_and_upload_example(dataset_example: DatasetExample, args: Optional[SegmentationModelBase], images_folder: Optional[Path] = None) -> None: """ Stores an example input and output of the network to Nifti files. :param dataset_example: The dataset example, with image, label and prediction, that should be written. :param args: configuration information to be used for normalization. TODO: This should not be optional why is this assigning to example_images_folder :param images_folder: The folder to which the result Nifti files should be written. If args is not None, the args.example_images_folder is used instead. """ folder = Path("") if images_folder is None else images_folder if args is not None: folder = args.example_images_folder if folder != "" and not os.path.exists(folder): os.mkdir(folder) def create_file_name(suffix: str) -> str: fn = "p" + str(dataset_example.patient_id) + "_e_" + str( dataset_example.epoch) + "_" + suffix + ".nii.gz" fn = os.path.join(folder, fn) return fn io_util.store_image_as_short_nifti( image=dataset_example.image, header=dataset_example.header, file_name=create_file_name(suffix="image"), args=args) # merge multiple binary masks (one per class) into a single multi-label map image labels = image_util.merge_masks(dataset_example.labels) io_util.store_as_ubyte_nifti(image=labels, header=dataset_example.header, file_name=create_file_name(suffix="label")) io_util.store_as_ubyte_nifti( image=dataset_example.prediction, header=dataset_example.header, file_name=create_file_name(suffix="prediction"))