def create_one_3D_file(): ''' Create one big file which contains all 3D Images (not slices). ''' class HP: DATASET = "HCP" RESOLUTION = "1.25mm" FEATURES_FILENAME = "270g_125mm_peaks" LABELS_TYPE = np.int16 DATASET_FOLDER = "HCP" data_all = [] seg_all = [] print("\n\nProcessing Data...") for s in get_all_subjects(): print("processing data subject {}".format(s)) data = nib.load(join(C.HOME, HP.DATASET_FOLDER, s, HP.FEATURES_FILENAME + ".nii.gz")).get_data() data = np.nan_to_num(data) data = DatasetUtils.scale_input_to_unet_shape(data, HP.DATASET, HP.RESOLUTION) data_all.append(np.array(data)) np.save("data.npy", data_all) del data_all # free memory print("\n\nProcessing Segs...") for s in get_all_subjects(): print("processing seg subject {}".format(s)) seg = ImgUtils.create_multilabel_mask(HP, s, labels_type=HP.LABELS_TYPE) if HP.RESOLUTION == "2.5mm": seg = ImgUtils.resize_first_three_dims(seg, order=0, zoom=0.5) seg = DatasetUtils.scale_input_to_unet_shape(seg, HP.DATASET, HP.RESOLUTION) seg_all.append(np.array(seg)) print("SEG TYPE: {}".format(seg_all.dtype)) np.save("seg.npy", seg_all)
def _create_prob_slices_file(HP, subjects, filename, bundle, shuffle=True): mask_dir = join(C.HOME, HP.DATASET_FOLDER) input_dir = HP.MULTI_PARENT_PATH combined_slices = [] mask_slices = [] for s in subjects: print("processing subject {}".format(s)) probs_x = nib.load(join(input_dir, "UNet_x_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data() probs_y = nib.load(join(input_dir, "UNet_y_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data() probs_z = nib.load(join(input_dir, "UNet_z_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data() # probs_x = DatasetUtils.scale_input_to_unet_shape(probs_x, HP.DATASET, HP.RESOLUTION) # probs_y = DatasetUtils.scale_input_to_unet_shape(probs_y, HP.DATASET, HP.RESOLUTION) # probs_z = DatasetUtils.scale_input_to_unet_shape(probs_z, HP.DATASET, HP.RESOLUTION) combined = np.stack((probs_x, probs_y, probs_z), axis=4) # (73, 87, 73, 18, 3) #not working alone: one dim too much for UNet -> reshape combined = np.reshape(combined, (combined.shape[0], combined.shape[1], combined.shape[2], combined.shape[3] * combined.shape[4])) # (73, 87, 73, 3*18) # print("combined shape after", combined.shape) mask_data = ImgUtils.create_multilabel_mask(HP, s, labels_type=HP.LABELS_TYPE) if HP.DATASET == "HCP_2mm": #use "HCP" because for mask we need downscaling mask_data = DatasetUtils.scale_input_to_unet_shape(mask_data, "HCP", HP.RESOLUTION) elif HP.DATASET == "HCP_2.5mm": # use "HCP" because for mask we need downscaling mask_data = DatasetUtils.scale_input_to_unet_shape(mask_data, "HCP", HP.RESOLUTION) else: # Mask has same resolution as probmaps -> we can use same resizing mask_data = DatasetUtils.scale_input_to_unet_shape(mask_data, HP.DATASET, HP.RESOLUTION) # Save as Img img = nib.Nifti1Image(combined, ImgUtils.get_dwi_affine(HP.DATASET, HP.RESOLUTION)) nib.save(img, join(HP.EXP_PATH, "combined", s + "_combinded_probmap.nii.gz")) combined = DatasetUtils.scale_input_to_unet_shape(combined, HP.DATASET, HP.RESOLUTION) assert (combined.shape[2] == mask_data.shape[2]) #Save as Slices for z in range(combined.shape[2]): combined_slices.append(combined[:, :, z, :]) mask_slices.append(mask_data[:, :, z, :]) if shuffle: combined_slices, mask_slices = sk_shuffle(combined_slices, mask_slices, random_state=9) if HP.TRAIN: np.save(filename + "_data.npy", combined_slices) np.save(filename + "_seg.npy", mask_slices)
def create_one_3D_file(): ''' Create one big file which contains all 3D Images (not slices). ''' class HP: DATASET = "HCP" RESOLUTION = "1.25mm" FEATURES_FILENAME = "270g_125mm_peaks" LABELS_TYPE = np.int16 DATASET_FOLDER = "HCP" data_all = [] seg_all = [] print("\n\nProcessing Data...") for s in get_all_subjects(): print("processing data subject {}".format(s)) data = nib.load( join(C.HOME, HP.DATASET_FOLDER, s, HP.FEATURES_FILENAME + ".nii.gz")).get_data() data = np.nan_to_num(data) data = DatasetUtils.scale_input_to_unet_shape( data, HP.DATASET, HP.RESOLUTION) data_all.append(np.array(data)) np.save("data.npy", data_all) del data_all # free memory print("\n\nProcessing Segs...") for s in get_all_subjects(): print("processing seg subject {}".format(s)) seg = ImgUtils.create_multilabel_mask(HP, s, labels_type=HP.LABELS_TYPE) if HP.RESOLUTION == "2.5mm": seg = ImgUtils.resize_first_three_dims(seg, order=0, zoom=0.5) seg = DatasetUtils.scale_input_to_unet_shape( seg, HP.DATASET, HP.RESOLUTION) seg_all.append(np.array(seg)) print("SEG TYPE: {}".format(seg_all.dtype)) np.save("seg.npy", seg_all)
def _create_slices_file(HP, subjects, filename, slice, shuffle=True): data_dir = join(C.HOME, HP.DATASET_FOLDER) dwi_slices = [] mask_slices = [] print("\n\nProcessing Data...") for s in subjects: print("processing dwi subject {}".format(s)) dwi = nib.load(join(data_dir, s, HP.FEATURES_FILENAME + ".nii.gz")) dwi_data = dwi.get_data() dwi_data = np.nan_to_num(dwi_data) dwi_data = DatasetUtils.scale_input_to_unet_shape(dwi_data, HP.DATASET, HP.RESOLUTION) # if slice == "x": # for z in range(dwi_data.shape[0]): # dwi_slices.append(dwi_data[z, :, :, :]) # # if slice == "y": # for z in range(dwi_data.shape[1]): # dwi_slices.append(dwi_data[:, z, :, :]) # # if slice == "z": # for z in range(dwi_data.shape[2]): # dwi_slices.append(dwi_data[:, :, z, :]) #Use slices from all directions in one dataset for z in range(dwi_data.shape[0]): dwi_slices.append(dwi_data[z, :, :, :]) for z in range(dwi_data.shape[1]): dwi_slices.append(dwi_data[:, z, :, :]) for z in range(dwi_data.shape[2]): dwi_slices.append(dwi_data[:, :, z, :]) dwi_slices = np.array(dwi_slices) random_idxs = None if shuffle: random_idxs = np.random.choice(len(dwi_slices), len(dwi_slices)) dwi_slices = dwi_slices[random_idxs] np.save(filename + "_data.npy", dwi_slices) del dwi_slices #free memory print("\n\nProcessing Segs...") for s in subjects: print("processing seg subject {}".format(s)) mask_data = ImgUtils.create_multilabel_mask(HP, s, labels_type=HP.LABELS_TYPE) if HP.RESOLUTION == "2.5mm": mask_data = ImgUtils.resize_first_three_dims(mask_data, order=0, zoom=0.5) mask_data = DatasetUtils.scale_input_to_unet_shape(mask_data, HP.DATASET, HP.RESOLUTION) # if slice == "x": # for z in range(dwi_data.shape[0]): # mask_slices.append(mask_data[z, :, :, :]) # # if slice == "y": # for z in range(dwi_data.shape[1]): # mask_slices.append(mask_data[:, z, :, :]) # # if slice == "z": # for z in range(dwi_data.shape[2]): # mask_slices.append(mask_data[:, :, z, :]) # Use slices from all directions in one dataset for z in range(dwi_data.shape[0]): mask_slices.append(mask_data[z, :, :, :]) for z in range(dwi_data.shape[1]): mask_slices.append(mask_data[:, z, :, :]) for z in range(dwi_data.shape[2]): mask_slices.append(mask_data[:, :, z, :]) mask_slices = np.array(mask_slices) print("SEG TYPE: {}".format(mask_slices.dtype)) if shuffle: mask_slices = mask_slices[random_idxs] np.save(filename + "_seg.npy", mask_slices)
def _create_slices_file(HP, subjects, filename, slice, shuffle=True): data_dir = join(C.HOME, HP.DATASET_FOLDER) dwi_slices = [] mask_slices = [] print("\n\nProcessing Data...") for s in subjects: print("processing dwi subject {}".format(s)) dwi = nib.load(join(data_dir, s, HP.FEATURES_FILENAME + ".nii.gz")) dwi_data = dwi.get_data() dwi_data = np.nan_to_num(dwi_data) dwi_data = DatasetUtils.scale_input_to_unet_shape( dwi_data, HP.DATASET, HP.RESOLUTION) # if slice == "x": # for z in range(dwi_data.shape[0]): # dwi_slices.append(dwi_data[z, :, :, :]) # # if slice == "y": # for z in range(dwi_data.shape[1]): # dwi_slices.append(dwi_data[:, z, :, :]) # # if slice == "z": # for z in range(dwi_data.shape[2]): # dwi_slices.append(dwi_data[:, :, z, :]) #Use slices from all directions in one dataset for z in range(dwi_data.shape[0]): dwi_slices.append(dwi_data[z, :, :, :]) for z in range(dwi_data.shape[1]): dwi_slices.append(dwi_data[:, z, :, :]) for z in range(dwi_data.shape[2]): dwi_slices.append(dwi_data[:, :, z, :]) dwi_slices = np.array(dwi_slices) random_idxs = None if shuffle: random_idxs = np.random.choice(len(dwi_slices), len(dwi_slices)) dwi_slices = dwi_slices[random_idxs] np.save(filename + "_data.npy", dwi_slices) del dwi_slices #free memory print("\n\nProcessing Segs...") for s in subjects: print("processing seg subject {}".format(s)) mask_data = ImgUtils.create_multilabel_mask( HP, s, labels_type=HP.LABELS_TYPE) if HP.RESOLUTION == "2.5mm": mask_data = ImgUtils.resize_first_three_dims(mask_data, order=0, zoom=0.5) mask_data = DatasetUtils.scale_input_to_unet_shape( mask_data, HP.DATASET, HP.RESOLUTION) # if slice == "x": # for z in range(dwi_data.shape[0]): # mask_slices.append(mask_data[z, :, :, :]) # # if slice == "y": # for z in range(dwi_data.shape[1]): # mask_slices.append(mask_data[:, z, :, :]) # # if slice == "z": # for z in range(dwi_data.shape[2]): # mask_slices.append(mask_data[:, :, z, :]) # Use slices from all directions in one dataset for z in range(dwi_data.shape[0]): mask_slices.append(mask_data[z, :, :, :]) for z in range(dwi_data.shape[1]): mask_slices.append(mask_data[:, z, :, :]) for z in range(dwi_data.shape[2]): mask_slices.append(mask_data[:, :, z, :]) mask_slices = np.array(mask_slices) print("SEG TYPE: {}".format(mask_slices.dtype)) if shuffle: mask_slices = mask_slices[random_idxs] np.save(filename + "_seg.npy", mask_slices)
def _create_prob_slices_file(HP, subjects, filename, bundle, shuffle=True): mask_dir = join(C.HOME, HP.DATASET_FOLDER) input_dir = HP.MULTI_PARENT_PATH combined_slices = [] mask_slices = [] for s in subjects: print("processing subject {}".format(s)) probs_x = nib.load( join(input_dir, "UNet_x_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data() probs_y = nib.load( join(input_dir, "UNet_y_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data() probs_z = nib.load( join(input_dir, "UNet_z_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data() # probs_x = DatasetUtils.scale_input_to_unet_shape(probs_x, HP.DATASET, HP.RESOLUTION) # probs_y = DatasetUtils.scale_input_to_unet_shape(probs_y, HP.DATASET, HP.RESOLUTION) # probs_z = DatasetUtils.scale_input_to_unet_shape(probs_z, HP.DATASET, HP.RESOLUTION) combined = np.stack( (probs_x, probs_y, probs_z), axis=4 ) # (73, 87, 73, 18, 3) #not working alone: one dim too much for UNet -> reshape combined = np.reshape( combined, (combined.shape[0], combined.shape[1], combined.shape[2], combined.shape[3] * combined.shape[4])) # (73, 87, 73, 3*18) # print("combined shape after", combined.shape) mask_data = ImgUtils.create_multilabel_mask( HP, s, labels_type=HP.LABELS_TYPE) if HP.DATASET == "HCP_2mm": #use "HCP" because for mask we need downscaling mask_data = DatasetUtils.scale_input_to_unet_shape( mask_data, "HCP", HP.RESOLUTION) elif HP.DATASET == "HCP_2.5mm": # use "HCP" because for mask we need downscaling mask_data = DatasetUtils.scale_input_to_unet_shape( mask_data, "HCP", HP.RESOLUTION) else: # Mask has same resolution as probmaps -> we can use same resizing mask_data = DatasetUtils.scale_input_to_unet_shape( mask_data, HP.DATASET, HP.RESOLUTION) # Save as Img img = nib.Nifti1Image( combined, ImgUtils.get_dwi_affine(HP.DATASET, HP.RESOLUTION)) nib.save( img, join(HP.EXP_PATH, "combined", s + "_combinded_probmap.nii.gz")) combined = DatasetUtils.scale_input_to_unet_shape( combined, HP.DATASET, HP.RESOLUTION) assert (combined.shape[2] == mask_data.shape[2]) #Save as Slices for z in range(combined.shape[2]): combined_slices.append(combined[:, :, z, :]) mask_slices.append(mask_data[:, :, z, :]) if shuffle: combined_slices, mask_slices = sk_shuffle(combined_slices, mask_slices, random_state=9) if HP.TRAIN: np.save(filename + "_data.npy", combined_slices) np.save(filename + "_seg.npy", mask_slices)