def save_batch(x, y): x_np = x.detach().cpu().numpy() # 16,3,32,32,32 y_np = y.detach().cpu().numpy() # 16,1,32,32,32 for i_batch in range(x_np.shape[0]): save_image(y_np[i_batch, 0,:,:,:], "label_batch_"+ str(i_batch) +".nii.gz") for i_seq in range(x_np.shape[1]): save_image(x_np[i_batch, i_seq, :, :, :], "img_batch_"+str(i_batch)+ "_seq_"+str(i_seq) + ".nii.gz")
0, num_tp): # for each timepoint starting at the second one curr_img = all_images[i_mod][i_tp] curr_wm_mask = all_labels[0][i_tp] f = lambda x: cv2.compareHist( np.histogram(curr_ref[curr_ref_mask > 0].ravel(), 256, [0, 1], density=True)[0].astype(np.float32), np.histogram((x[0] * curr_img[curr_wm_mask > 0]).ravel(), 256, [0, 1], density=True)[0].astype(np.float32), 1) # Optimize Chi-Square metric xopt = fmin(func=f, x0=[1]) curr_img_new = np.clip(xopt[0] * curr_img, 0, 1) save_image( curr_img_new, jp( path_write, pat, modalities[i_mod] + "_norm_" + str(i_tp + 1).zfill(2) + ".nii.gz")) all_images_aligned[i_mod][i_tp] = curr_img_new histograms_aligned[i_mod][i_tp] = np.histogram( curr_img_new[curr_img_new > 0].ravel(), 256, [0, 1], density=True) # generate histograms after alignment histograms_wm_aligned = [[ np.histogram(img[lbl > 0].ravel(), 256, [0, 1], density=True) for img, lbl in zip(all_images_aligned[i], all_labels[0]) ] for i in range(len(all_images_aligned))] #histograms_gm_aligned = [[np.histogram(img[lbl>0].ravel(), 256, [0,1], density=True) for img, lbl in zip(all_images_aligned[i], all_labels[1])] for i in range(len(all_images_aligned))] #histograms_mask_aligned = [[np.histogram(img[lbl>0].ravel(), 256, [0,1], density=True) for img, lbl in zip(all_images_aligned[i], all_labels[2])] for i in range(len(all_images_aligned))]
nib.load(jp(path_base, pat, tp, 'flair.nii.gz')).get_fdata()) t2 = normalize_data( nib.load(jp(path_base, pat, tp, 't2.nii.gz')).get_fdata()) pd = normalize_data( nib.load(jp(path_base, pat, tp, 'pd.nii.gz')).get_fdata()) t1 = normalize_data( nib.load(jp(path_base, pat, tp, 'mprage.nii.gz')).get_fdata()) t1_inv = normalize_data(np.max(t1) - t1) t2_flair = normalize_data(flair * t2) pd_flair = normalize_data(flair * pd) t1_inv_flair = normalize_data(flair * t1_inv) what = normalize_data(flair * (t2 + pd + t1_inv)) save_image(t2_flair, jp(path_base, pat, tp, "t2_times_flair.nii.gz"), orientation="RAI") save_image(pd_flair, jp(path_base, pat, tp, "pd_times_flair.nii.gz"), orientation="RAI") save_image(t1_inv_flair, jp(path_base, pat, tp, "t1_inv_times_flair.nii.gz"), orientation="RAI") save_image(what, jp(path_base, pat, tp, "sum_times_flair.nii.gz"), orientation="RAI") else: for pat in patients: print("Patient ", pat) flair_files = list_files_with_name_containing(jp(path_base, pat), "flair", "nii.gz")
# ensemble segmentations of all 5 folds print("Ensembling models of all selected folds...") all_segmentations = np.stack(all_folds) # size (5, 61, 181,217, 181) the_sum = np.sum(all_segmentations, axis = 0) # size should be (61, 181,217, 181) results = the_sum >= np.ceil(list(selection.values()).count(True)/2) # boolean with size (61, 181,217, 181) # save images results = results.astype(np.uint8) for i_case in range(results.shape[0]): if post_processing: #Remove very small lesions (3 voxels) labels_out = cc3d.connected_components(results[i_case,:,:,:]) for i_cc in np.unique(labels_out): if len(labels_out[labels_out == i_cc]) <= min_area: results[i_case,:,:,:][labels_out == i_cc] = 0 save_image(results[i_case,:,:,:], jp(path_results, experiment_name_folder,"test"+all_indexes[i_case][0] + "_" + all_indexes[i_case][1] + "_qwertz.nii")) # Save dictionary that identifies which folds were considered selection["experiment"] = experiment_name selection["Postprocessing"] = post_processing selection["Min-area"] = min_area create_log(jp(path_results, experiment_name_folder), selection) z = 0 elif experiment_type == 'l': path_exp = jp(path_experiments_l, experiment_name) folds = list_folders(path_exp) #Read log file to get parameters
'flair.nii.gz')).get_fdata()) ref_mprage = normalize_data( nib.load(jp(path_data, pat, timepoints[0], 'mprage.nii.gz')).get_fdata()) ref_pd = normalize_data( nib.load(jp(path_data, pat, timepoints[0], 'pd.nii.gz')).get_fdata()) ref_t2 = normalize_data( nib.load(jp(path_data, pat, timepoints[0], 't2.nii.gz')).get_fdata()) brain_mask_ref = nib.load( jp(path_data, pat, timepoints[0], 'brain_mask.nii.gz')).get_fdata() #mask1 = nib.load(jp(path_data, pat, timepoints[0], 'mask1.nii.gz')).get_fdata() #mask2 = nib.load(jp(path_data, pat, timepoints[0], 'mask2.nii.gz')).get_fdata() #Save first timepoint without modifying it create_folder(jp(path_new_data, pat, timepoints[0])) save_image(ref_flair, jp(path_new_data, pat, timepoints[0], "flair.nii.gz")) save_image(ref_mprage, jp(path_new_data, pat, timepoints[0], "mprage.nii.gz")) save_image(ref_pd, jp(path_new_data, pat, timepoints[0], "pd.nii.gz")) save_image(ref_t2, jp(path_new_data, pat, timepoints[0], "t2.nii.gz")) save_image(brain_mask_ref, jp(path_new_data, pat, timepoints[0], "brain_mask.nii.gz")) #save_image(mask1, jp(path_new_data, pat, timepoints[0], "mask1.nii.gz")) #save_image(mask2, jp(path_new_data, pat, timepoints[0], "mask2.nii.gz")) for tp in timepoints[1:]: # from second timepoint print("Current tp: ", tp) target_flair = normalize_data( nib.load(jp(path_data, pat, tp, 'flair.nii.gz')).get_fdata()) target_mprage = normalize_data( nib.load(jp(path_data, pat, tp, 'mprage.nii.gz')).get_fdata())
nib.load( jp(path_base, exp, "fold" + str(f + 1).zfill(2), "results", str(f + 1).zfill(2), img)).get_fdata().astype(np.uint8)) data[exp].append(timepoints) tp_amounts.append(len(images)) for f in range(num_folds): #Create folders for results create_folder(jp(path_base, ensemble_name, "fold" + str(f + 1).zfill(2))) create_folder( jp(path_base, ensemble_name, "fold" + str(f + 1).zfill(2), "results")) create_folder( jp(path_base, ensemble_name, "fold" + str(f + 1).zfill(2), "results", str(f + 1).zfill(2))) # do ensembling for f in range(num_folds): # for each fold for tp in range(tp_amounts[f]): to_ensemble = [data[exp][f][tp] for exp in to_consider] # to ensemble to_ensemble = np.stack(to_ensemble, axis=0) sumi = np.sum(to_ensemble, axis=0) res = sumi >= np.ceil(len(to_consider) / 2) save_image( res.astype(np.uint8), jp( path_base, ensemble_name, "fold" + str(f + 1).zfill(2), "results", str(f + 1).zfill(2), str(f + 1).zfill(2) + "_" + str(tp + 1).zfill(2) + "_segm.nii.gz"))
name_folder_new_ensemble = "Ensemble_" + str(num_ens + 1).zfill(2) create_folder(jp(path_ensembles, name_folder_new_ensemble)) # Check that listed experiments exist all_experiments = list_folders(path_base) all_experiments = [x for x in all_experiments if x in experiments_to_ensemble] assert len(all_experiments) % 2 != 0 # number of experiments must be odd all_images = [] for i_exp, exp in enumerate(all_experiments): print("Current experiment: ", exp, i_exp + 1, "/", len(all_experiments)) all_images_names = list_files_with_extension(jp(path_base, exp), "nii") all_images_curr_exp = [] for img in all_images_names: all_images_curr_exp.append( nib.load(jp(path_base, exp, img)).get_fdata().astype(np.uint8)) all_images.append(np.stack(all_images_curr_exp)) # (61, volume size) all_images_np = np.stack(all_images) the_sum = np.sum(all_images_np, axis=0) voting = the_sum >= np.ceil(len(all_experiments) / 2) # save images for i_case in range(voting.shape[0]): save_image( voting[i_case, :, :, :].astype(np.uint8), jp(path_ensembles, name_folder_new_ensemble, all_images_names[i_case])) create_log(jp(path_ensembles, name_folder_new_ensemble), {'experiments': all_experiments})