def load_data(prediction_filenames, ground_truth_filenames, uncertainty_filenames): print("Loading data...") predictions, ground_truths, uncertainties = [], [], [] target_shape = (512, 512, 260) for i in tqdm(range(len(prediction_filenames))): prediction = utils.load_nifty(prediction_filenames[i])[0].astype( np.float16) ground_truth = utils.load_nifty(ground_truth_filenames[i])[0].astype( np.float16) uncertainty = utils.load_nifty(uncertainty_filenames[i])[0].astype( np.float16) uncertainty = np.nan_to_num(uncertainty) prediction = utils.interpolate(prediction, target_shape, mask=True) ground_truth = utils.interpolate(ground_truth, target_shape, mask=True) uncertainty = utils.interpolate(uncertainty, target_shape, mask=False) predictions.append(prediction) ground_truths.append(ground_truth) uncertainties.append(uncertainty) predictions = np.asarray(predictions) ground_truths = np.asarray(ground_truths) uncertainties = np.asarray(uncertainties) print("Finished loading data") return predictions, ground_truths, uncertainties
def recommend_slices_single_case(i, prediction_filenames, uncertainty_filenames, gt_filenames, save_path, find_best_slices_func, num_slices, slice_gap, default_size): uncertainty, affine, spacing, header = utils.load_nifty( uncertainty_filenames[i]) prediction, _, _, _ = utils.load_nifty(prediction_filenames[i]) gt, _, _, _ = utils.load_nifty(gt_filenames[i]) adapted_slice_gap = adapt_slice_gap(uncertainty, slice_gap, default_size) # indices_dim_0: Sagittal # indices_dim_1: Coronal # indices_dim_2: Axial indices_dim_0, indices_dim_1, indices_dim_2 = find_best_slices_func( prediction, uncertainty, num_slices, adapted_slice_gap) recommended_slices = len(indices_dim_0) + len(indices_dim_1) + len( indices_dim_2) gt_slices = comp_gt_slices(gt) # total_recommended_slices += recommended_slices # total_gt_slices += gt_slices # print("{} recommended slices: {}, gt slices: {}, ratio: {}".format(os.path.basename(uncertainty_filenames[i]), recommended_slices, gt_slices, recommended_slices / gt_slices)) # print("indices_dim_0: {}, indices_dim_1: {}, indices_dim_2: {}".format(indices_dim_0, indices_dim_1, indices_dim_2)) filtered_mask = filter_mask(gt, indices_dim_0, indices_dim_1, indices_dim_2) utils.save_nifty(save_path + os.path.basename(uncertainty_filenames[i])[:-7] + "_0001.nii.gz", filtered_mask, affine, spacing, header, is_mask=True) return recommended_slices, gt_slices
def add_to_images_or_masks(image_path, guiding_mask_path, save_path, is_mask=False): image_filenames = utils.load_filenames(image_path) guiding_mask_filenames = utils.load_filenames(guiding_mask_path) for i in tqdm(range(len(image_filenames))): image, affine, spacing, header = utils.load_nifty(image_filenames[i]) guiding_mask, _, _, _ = utils.load_nifty(guiding_mask_filenames[i]) image = np.stack([image, guiding_mask], axis=-1) utils.save_nifty(save_path + os.path.basename(image_filenames[i]), image, affine, spacing, header, is_mask=is_mask)
def round_mask(filename, save_path): mask, affine, spacing, header = utils.load_nifty(filename) mask = np.rint(mask) mask = mask.astype(np.uint8) utils.save_nifty(save_path + os.path.basename(filename), mask, affine, spacing, None, is_mask=True)
def evaluate_label(prediction_filenames, ground_truth_filenames, uncertainty_filenames, label): threshold_scores = [] thresholds = [0.5] # [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9] for i in tqdm(range(len(prediction_filenames))): prediction = utils.load_nifty(prediction_filenames[i])[0].astype( np.float16) ground_truth = utils.load_nifty(ground_truth_filenames[i])[0].astype( np.float16) uncertainty = utils.load_nifty(uncertainty_filenames[i])[0].astype( np.float16) case_threshold_scores = evaluate_case(prediction, ground_truth, uncertainty, thresholds, label) threshold_scores.append(case_threshold_scores) threshold_scores = np.asarray(threshold_scores) threshold_scores = np.mean(threshold_scores, axis=0) return { "label": label, "thresholds": thresholds, "threshold_scores": threshold_scores }
def remove_label(load_path, save_path, labels_to_remove): save_path = utils.fix_path(save_path) load_path = utils.fix_path(load_path) filenames = utils.load_filenames(load_path) for filename in tqdm(filenames): basename = os.path.basename(filename) mask, affine, spacing, header = utils.load_nifty(filename) for label in labels_to_remove: mask[mask == label] = 0 mask = np.rint(mask) mask = mask.astype(int) utils.save_nifty(save_path + basename, mask, affine, spacing, header)
def select_rois(img_dir, uncertainty_mask_dir, save_dir, window_size_percentage=0.02, window_per_border=3, max_rois=5, min_z_distance_percentage=0.1, max_iou=0.1): imgs_filenames = utils.load_filenames(img_dir) uncertainty_masks_filenames = utils.load_filenames(uncertainty_mask_dir) uncertainty_masks = [utils.load_nifty(uncertainty_mask_filename)[0] for uncertainty_mask_filename in uncertainty_masks_filenames] uncertainty_masks = [utils.normalize(uncertainty_mask) for uncertainty_mask in uncertainty_masks] uncertainty_masks_size_mean = comp_uncertainty_masks_mean(uncertainty_masks) window_shapes = comp_window_shapes(uncertainty_masks_size_mean, window_size_percentage, window_per_border) for i in tqdm(range(len(imgs_filenames))): img, affine, spacing, header = utils.load_nifty(imgs_filenames[i]) if len(img.shape) == 4: # TODO: Remove modality in the case of prostate dataset, remove in final version img = img[..., 0] img_reoriented = utils.reorient(img, affine) # TODO: Reorient ist hardcoded uncertainty_mask_reoriented = utils.reorient(uncertainty_masks[i], affine) rois = [] # Each entry is [roi_sum, x, y, z, width, length] for window_shape in tqdm(window_shapes): window_shape_rois = comp_rois_single_window_shape(uncertainty_mask_reoriented, window_shape) rois.extend(window_shape_rois) rois = np.asarray(rois) rois = filter_rois(rois, max_rois, uncertainty_mask_reoriented.shape, min_z_distance_percentage, max_iou) rois = extract_rois(img_reoriented, uncertainty_mask_reoriented, rois) save_rois(save_dir, os.path.basename(uncertainty_masks_filenames[i][:-7]) + "/", rois, img, uncertainty_masks[i], affine, spacing, header)
def comp_guiding_mask(load_path, save_path, slice_gap, default_size, slice_depth=3): filenames = utils.load_filenames(load_path) for filename in tqdm(filenames): mask, affine, spacing, header = utils.load_nifty(filename) adapted_slice_gap = adapt_slice_gap(mask, slice_gap, default_size) mask_slices = comp_slices_mask(mask, adapted_slice_gap, slice_depth=slice_depth) utils.save_nifty(save_path + os.path.basename(filename), mask_slices, affine, spacing, header, is_mask=True)
def merge_labels(load_mask, save_mask, load_label_table): mask, affine, spacing, header = utils.load_nifty(load_mask) mask = mask.astype(int) ggo, cons, pe = get_labels(load_label_table) for label in tqdm(np.concatenate((ggo, cons, pe), axis=0), disable=True): mask[mask == label] = -label for label in tqdm(ggo, disable=True): mask[mask == -label] = 1 for label in tqdm(cons, disable=True): mask[mask == -label] = 2 for label in tqdm(pe, disable=True): mask[mask == -label] = 3 mask = np.rint(mask) mask = mask.astype(int) utils.save_nifty(save_mask, mask, affine, spacing, header)
def comp_uncertainties(load_dir, save_dir, uncertainty_estimator, type="part"): load_dir = utils.fix_path(load_dir) save_dir = utils.fix_path(save_dir) filenames = utils.load_filenames(load_dir) cases, nr_labels, nr_parts = group_data(filenames) print("nr_cases: ", len(cases)) print("nr_labels: ", nr_labels) print("nr_parts: ", nr_parts) for case in tqdm(cases): for label in range(nr_labels + 1): predictions = [] for part in range(nr_parts + 1): name = load_dir + str(case).zfill(4) + "_" + str( label) + "_" + type + "_" + str(part) + ".nii.gz" prediction, affine, spacing, header = utils.load_nifty(name) predictions.append(prediction.astype(np.float16)) predictions = np.stack(predictions) uncertainty = uncertainty_estimator(predictions) name = save_dir + str(case).zfill(4) + "_" + str(label) + ".nii.gz" utils.save_nifty(name, uncertainty, affine, spacing, header)
def tmp2(filename): mask, affine, spacing, header = utils.load_nifty(filename) print(mask[46 - 1][155 - 1][116 - 1])
def round_mask(filename): mask, affine, spacing, header = utils.load_nifty(filename) mask = np.rint(mask) mask = mask.astype(int) utils.save_nifty(filename, mask, affine, spacing, header)