def test_train_histogram(self): paths = [sample['image']['path'] for sample in self.dataset] HistogramStandardization.train( paths, masking_function=HistogramStandardization.mean, output_path=(self.dir / 'landmarks.txt'), ) HistogramStandardization.train( paths, mask_path=self.dataset[0]['label']['path'], output_path=(self.dir / 'landmarks.npy'), )
def create_normalization_file(use_controls, use_nofcd, mods): """ Creates t1_landmark.npy file using torchio library for brain normalizations. """ for j in range(1, mods + 1): fcd_paths = sorted(glob.glob(FCD_FOLDER + f'fcd_*.{j}.nii.gz')) nofcd_paths = sorted(glob.glob(FCD_FOLDER + f'nofcd_*.{j}.nii.gz')) control_paths = sorted( glob.glob(CONTROL_FOLDER + f'control_*.{j}.nii.gz')) mri_paths = fcd_paths if use_nofcd: mri_paths += nofcd_paths if use_controls: mri_paths += control_paths t1_landmarks_path = Path(f'./data/t1_landmarks_{j}.npy') if t1_landmarks_path.is_file(): continue # os.remove(f'./data/t1_landmarks_{j}.npy') t1_landmarks = (t1_landmarks_path if t1_landmarks_path.is_file() else HistogramStandardization.train(mri_paths)) np.save(t1_landmarks_path, t1_landmarks, allow_pickle=True)
def create_normalization_file(use_controls, use_nofcd): """ Creates t1_landmark.npy file using torchio library for brain normalizations. """ FCD_FOLDER = './data/fcd_brains/' fcd_paths = sorted(list(filter(lambda x: 'nofcd' not in x, os.listdir(FCD_FOLDER)))) fcd_paths = list(map(lambda x: FCD_FOLDER + x, fcd_paths)) nofcd_paths = sorted(list(filter(lambda x: 'nofcd' in x, os.listdir(FCD_FOLDER)))) nofcd_paths = list(map(lambda x: FCD_FOLDER + x, nofcd_paths)) CONTROL_FOLDER = './data/control_brains/' control_paths = sorted(os.listdir(CONTROL_FOLDER)) control_paths = list(map(lambda x: CONTROL_FOLDER + x, control_paths)) mri_paths = fcd_paths if use_nofcd: mri_paths += fcd_paths if use_controls: mri_paths += control_paths t1_landmarks_path = Path('./data/t1_landmarks.npy') if t1_landmarks_path.is_file(): os.remove('./data/t1_landmarks.npy') t1_landmarks = ( t1_landmarks_path if t1_landmarks_path.is_file() else HistogramStandardization.train(mri_paths) ) np.save(t1_landmarks_path, t1_landmarks, allow_pickle=True)
def get_hist_landmarks(img_path, lndmrk_path, names): image_paths = list(map(lambda x: os.path.join(img_path, str(x), 'T1w', 'T1w_acpc_dc_restore_brain.nii.gz'), names)) landmarks = HistogramStandardization.train(image_paths, masking_function=cut_img_mask, output_path=lndmrk_path) return landmarks
def test_train_histogram(self): paths = [subject.image.path for subject in self.dataset] # Use a function to mask HistogramStandardization.train( paths, masking_function=HistogramStandardization.mean, output_path=(self.dir / 'landmarks.txt'), ) # Use a file to mask HistogramStandardization.train( paths, mask_path=self.dataset[0].label.path, output_path=(self.dir / 'landmarks.npy'), ) # Use files to mask masks = [subject.label.path for subject in self.dataset] HistogramStandardization.train( paths, mask_path=masks, output_path=(self.dir / 'landmarks_masks.npy'), )
def test_bad_paths_lengths(self): with self.assertRaises(ValueError): HistogramStandardization.train( [1, 2], mask_path=[1, 2, 3], )