def main(): parser = argparse.ArgumentParser() parser.add_argument( "-gt", "--gt_folder", help="Input folder with all scans and ground truth masks") parser.add_argument("-sct", "--sct_folder", help="Input folder with all SCT generated masks") parser.add_argument("-o", "--output", help="Output folder for the SCT GM mask scores") args = parser.parse_args() file_tree = dm.load_files(args.gt_folder, args.sct_folder) score_tree = {'maskr1': {}, 'maskr2': {}, 'maskr3': {}, 'maskr4': {}} masks = list(score_tree.keys()) ds = [] hs = [] start_time = time.time() for i in range(len(masks)): print("Getting scores for " + masks[i]) for site in file_tree.keys(): print("Getting scores for " + site + " images") score_tree[masks[i]][site] = {} sc_nbr = len(file_tree[site].keys()) k = 1 for sc in file_tree[site].keys(): score_tree[masks[i]][site][sc] = {} print("Scoring segmentation of scan " + str(k) + " out of " + str(sc_nbr)) dice_scores = gma.dice_score(file_tree[site][sc], i) n = sum(x > 0 for x in dice_scores) haus_scores = gma.hausdorff_score(file_tree[site][sc], i) avg_dice_score = sum(dice_scores) / n avg_haus_score = sum(haus_scores) / n score_tree[masks[i]][site][sc]['dice_score'] = avg_dice_score score_tree[masks[i]][site][sc]['haus_score'] = avg_haus_score ds.append(avg_dice_score) hs.append(avg_haus_score) k += 1 end_time = time.time() dur = end_time - start_time output_file = open(args.output + "sct_seg_scores.pkl", "wb") pickle.dump(score_tree, output_file) output_file.close() print("All done !") print("Total duration : " + str(int(dur // 60)) + "min and " + str(int(dur % 60)) + "s") print("Average dice score : " + str(sum(ds) / len(ds))) print("Average hausdorff distance : " + str(sum(hs) / len(hs))) print("Saved scores at : " + args.output + "sct_seg_scores.pkl")
def plot_sklt_sct_masks_ref(scan, avg_mask, mask=0, slice=0): if not scan['sct_seg']: print("No SCT segmentation found !") return img_path = scan['image'] data = nib.load(img_path) img_slice = data.get_fdata()[:,:,slice] print(img_slice.max(), img_slice.min()) img_slice /= img_slice.max() img = np.asarray(np.dstack((img_slice, img_slice, img_slice))) mask_path = scan['sct_seg'] data = nib.load(mask_path) sct_slice = data.get_fdata()[:, :, slice] sklt_sct_slice = skeletonize(sct_slice) sct_img = np.where(np.stack((sklt_sct_slice,) * 3, axis=-1) == (1., 1., 1.), np.stack((sklt_sct_slice, np.zeros(sklt_sct_slice.shape), np.zeros(sklt_sct_slice.shape)), axis=-1), img) ids_masks = np.nonzero(sklt_sct_slice) x_min = min(ids_masks[0]) x_max = max(ids_masks[0]) y_min = min(ids_masks[1]) y_max = max(ids_masks[1]) xpad = avg_mask.shape[0] - (x_max - x_min) ypad = avg_mask.shape[1] - (y_max - y_min) xmin_pad = xpad // 2 xmax_pad = xpad - xmin_pad ymin_pad = ypad // 2 ymax_pad = ypad - ymin_pad sct_img = sct_img[x_min - xmin_pad:x_max + xmax_pad, y_min - ymin_pad:y_max + ymax_pad] gt_img = np.where(np.stack((avg_mask,)*3, axis=-1)==(1., 1., 1.),np.stack((np.zeros(avg_mask.shape), avg_mask,np.zeros(avg_mask.shape)), axis=-1), sct_img) plt.imshow(gt_img) plt.title('Masques expert '+str(mask+1)+' (vert) et SCT (rouge) skelétisés '+scan['name']+' level '+scan['levels'][slice], fontsize=20) plt.show() print('Distance de Hausdorff skeletisée : ', gma.general_hausdorff_distance(sklt_sct_slice[x_min - xmin_pad:x_max + xmax_pad, y_min - ymin_pad:y_max + ymax_pad], avg_mask)) ds = gma.get_sct_gm_densities(scan) print('Densité :', np.mean(ds[slice])) print('Texute :', np.std(ds[slice]))
def main(): parser = argparse.ArgumentParser() parser.add_argument( "-gt", "--gt_folder", help="Input folder with all scans and ground truth masks") parser.add_argument("-o", "--output", help="Output folder for the SCT GM mask scores") args = parser.parse_args() sct_tree = dm.load_files(args.gt_folder) CROP_SIZE = (50, 50) avg_masks = { 'site1': np.zeros(CROP_SIZE), 'site2': np.zeros(CROP_SIZE), 'site3': np.zeros(CROP_SIZE), 'site4': np.zeros(CROP_SIZE), 'average': np.zeros(CROP_SIZE), } start_time = time.time() for site in sct_tree.keys(): print("Getting skeleton for " + site + " images") sc_nbr = len(sct_tree[site].keys()) k = 1 for sc in sct_tree[site].keys(): print("Getting skeletons of scan " + str(k) + " out of " + str(sc_nbr)) for sl in range(sct_tree[site][sc]['slice_nbr']): cropped_mask = gma.get_cropped_gm_mask(sct_tree[site][sc], sl, 2, CROP_SIZE) if len(cropped_mask) > 1: avg_masks[site] = np.where(cropped_mask == 1., 1, avg_masks[site]) k += 1 avg_masks['average'] = np.where(avg_masks[site] == 1., 1, avg_masks['average']) avg_masks[site] = skeletonize(avg_masks[site]) avg_masks['average'] = skeletonize(avg_masks['average']) end_time = time.time() dur = end_time - start_time output_file = open(args.output + "average_masks.pkl", "wb") pickle.dump(avg_masks, output_file) output_file.close() print("All done !") print("Total duration : " + str(int(dur // 60)) + "min and " + str(int(dur % 60)) + "s") print("Saved tempaltes at : " + args.output + "average_masks.pkl")
def plot_sct_masks_overlap(scan, mask=0, slice=0): if not scan['sct_seg']: print("No SCT segmentation found !") return img_path = scan['image'] data = nib.load(img_path) img_slice = data.get_fdata()[:,:,slice] img_slice /= img_slice.max() img = np.asarray(np.dstack((img_slice, img_slice, img_slice))) mask_path = scan['masks'][mask] data = nib.load(mask_path) gt_slice = data.get_fdata()[:, :, slice] gt_slice = np.where(gt_slice == 1., 1, 0) gt_img = np.where(np.stack((gt_slice,) * 3, axis=-1) == (1., 1., 1.), np.stack((np.zeros(gt_slice.shape), gt_slice, np.zeros(gt_slice.shape)), axis=-1), img) mask_path = scan['sct_seg'] data = nib.load(mask_path) sct_slice = data.get_fdata()[:, :, slice] sct_img = np.where(np.stack((sct_slice,) * 3, axis=-1) == (1., 1., 1.), np.stack((sct_slice, np.zeros(sct_slice.shape), np.zeros(sct_slice.shape)), axis=-1), gt_img) ids_masks = np.nonzero(sct_slice) x_min = min(ids_masks[0]) x_max = max(ids_masks[0]) y_min = min(ids_masks[1]) y_max = max(ids_masks[1]) pad = 20 sct_img = sct_img[x_min-pad:x_max+pad, y_min-pad:y_max+pad] plt.imshow(sct_img) plt.title('Masques expert '+str(mask+1)+' (vert) et SCT (rouge) ', fontsize=20) plt.show() print(scan['name']+' level '+scan['levels'][slice]) print('Distance de Hausdorff skeletisée : ', gma.general_hausdorff_distance(gt_slice, sct_slice)) print('Score Dice : ', gma.dice_score(scan, mask)[slice])
def main(): parser = argparse.ArgumentParser() parser.add_argument( "-gt", "--gt_folder", help="Input folder with all scans and ground truth masks") parser.add_argument("-sct", "--sct_folder", help="Input folder with all SCT generated masks") parser.add_argument("-mt", "--mask_tempaltes", help="Pickle file with template file") parser.add_argument("-o", "--output", help="Output folder for the SCT GM mask scores") args = parser.parse_args() train_file_tree = dm.load_files(args.gt_folder, args.sct_folder) avg_mask_file = open(args.mask_tempaltes, "rb") avg_masks = pkl.load(avg_mask_file) avg_mask_file.close() crop_size = avg_masks['site1'].shape[:1] donnees = { 'SITE': [], 'SCAN': [], 'SLICE': [], 'LEVEL': [], 'DENSITY': [], 'TEXTURE': [], 'SHAPE': [], } stats = { 'site1': {}, 'site2': {}, 'site3': {}, 'site4': {}, } start_time = time.time() for site in train_file_tree.keys(): dens = [] text = [] shap = [] for sc in train_file_tree[site].keys(): slice_densities = gma.get_sct_gm_densities( train_file_tree[site][sc]) for sl in range(train_file_tree[site][sc]['slice_nbr']): sct_gm_skeleton = gma.get_cropped_sct_gm_skeleton( train_file_tree[site][sc], sl, crop_size) if len(sct_gm_skeleton) > 1: donnees['SITE'].append(int(site[-1])) donnees['SCAN'].append(int(sc[-2:])) donnees['SLICE'].append(sl) try: donnees['LEVEL'].append( int(train_file_tree[site][sc]['levels'][sl][0])) except: donnees['LEVEL'].append(0) donnees['DENSITY'].append(np.mean(slice_densities[sl])) dens.append(donnees['DENSITY'][-1]) donnees['TEXTURE'].append(np.std(slice_densities[sl])) text.append(donnees['TEXTURE'][-1]) donnees['SHAPE'].append( gma.general_hausdorff_distance(sct_gm_skeleton, avg_masks[site])) shap.append(donnees['SHAPE'][-1]) stats[site] = { 'density': { 'mean': np.mean(dens), 'std': np.std(dens) }, 'texture': { 'mean': np.mean(text), 'std': np.std(text) }, 'shape': { 'mean': np.mean(shap), 'std': np.std(shap) } } df = pd.DataFrame(donnees, columns=list(donnees.keys())) df.to_pickle(args.output + "GM_features.pkl") end_time = time.time() dur = end_time - start_time output_file = open(args.output + "GM_feature_stats.pkl", "wb") pkl.dump(stats, output_file) output_file.close() print("All done !") print("Total duration : " + str(int(dur // 60)) + "min and " + str(int(dur % 60)) + "s") print("Saved feature data at : " + args.output + "GM_features.pkl") print("Saved feature stats at : " + args.output + "GM_feature_stats.pkl")