def get_resected_structures(self, sample, resection_mask): from pathlib import Path from tempfile import NamedTemporaryFile from utils import AffineMatrix, sglob from episurg.parcellation import GIFParcellation mni_path = Path(sample[IMAGE]['path']) mni_dir = mni_path.parent dataset_dir = mni_dir.parent parcellation_dir = dataset_dir / 'parcellation' stem = mni_path.name.split('_t1_pre')[0] transform_path = sglob(mni_dir, f'{stem}*.txt')[0] parcellation_path = sglob(parcellation_dir, f'{stem}*.nii.gz')[0] transform = AffineMatrix(transform_path).get_itk_transform() parcellation = sitk.ReadImage(str(parcellation_path)) resampled = sitk.Resample( parcellation, resection_mask, transform, sitk.sitkNearestNeighbor, ) with NamedTemporaryFile(suffix='.nii') as p: with NamedTemporaryFile(suffix='.nii') as m: parcellation_path = p.name mask_path = m.name sitk.WriteImage(resampled, parcellation_path) sitk.WriteImage(resection_mask, mask_path) parcellation = GIFParcellation(parcellation_path) resected_structures = parcellation.get_resected_ratios( mask_path) return resected_structures
def cache(dataset, resection_params, augment=True, caches_dir='/tmp/val_set_cache', num_workers=12): caches_dir = Path(caches_dir) wm_lesion_p = resection_params['wm_lesion_p'] clot_p = resection_params['clot_p'] shape = resection_params['shape'] texture = resection_params['texture'] augment_string = '_no_augmentation' if not augment else '' dir_name = f'wm_{wm_lesion_p}_clot_{clot_p}_{shape}_{texture}{augment_string}' cache_dir = caches_dir / dir_name image_dir = cache_dir / 'image' label_dir = cache_dir / 'label' if not cache_dir.is_dir(): print('Caching validation set') image_dir.mkdir(parents=True) label_dir.mkdir(parents=True) loader = torch.utils.data.DataLoader( dataset, num_workers=num_workers, collate_fn=lambda x: x[0], ) for subject in tqdm(loader): image_path = image_dir / subject.image.path.name label_path = label_dir / subject.image.path.name # label has no path because it was created not loaded subject.image.save(image_path) subject.label.save(label_path) subjects = [] for im_path, label_path in zip(sglob(image_dir), sglob(label_dir)): subject = tio.Subject( image=tio.ScalarImage(im_path), label=tio.LabelMap(label_path), ) subjects.append(subject) return tio.SubjectsDataset(subjects)
def get_real_resection_subjects(dataset_dir): dataset_dir = Path(dataset_dir) image_dir = dataset_dir / 'image' label_dir = dataset_dir / 'label' image_paths = sglob(image_dir) label_paths = sglob(label_dir) assert len(image_paths) == len(label_paths) subjects = [] for image_path, label_path in zip(image_paths, label_paths): subject = tio.Subject( image=tio.ScalarImage(image_path), label=tio.LabelMap(label_path), ) subjects.append(subject) return subjects
def get_paths(folder): import utils folder = Path(folder) if folder.is_file(): fps = [folder] elif folder.is_dir(): fps = utils.sglob(folder) return fps
def get_paths(path): import utils path = Path(path) if path.is_file(): fps = [path] elif path.is_dir(): fps = utils.sglob(path, '*.nii.gz') return fps
def get_subjects_list_from_dir(dataset_dir): dataset_dir = Path(dataset_dir) mni_dir = dataset_dir / 'mni' resection_dir = dataset_dir / 'resection' noise_paths = sglob(resection_dir, '*noise*') subjects_list = [] for noise_path in noise_paths: stem = noise_path.stem.split('_noise')[0] image_path = mni_dir / f'{stem}_on_mni.nii.gz' gml_path = resection_dir / f'{stem}_gray_matter_left_seg.nii.gz' gmr_path = resection_dir / f'{stem}_gray_matter_right_seg.nii.gz' rl_path = resection_dir / f'{stem}_resectable_left_seg.nii.gz' rr_path = resection_dir / f'{stem}_resectable_right_seg.nii.gz' subject = tio.Subject( image=tio.ScalarImage(image_path), resection_noise=tio.ScalarImage(noise_path), resection_gray_matter_left=tio.LabelMap(gml_path), resection_gray_matter_right=tio.LabelMap(gmr_path), resection_resectable_left=tio.LabelMap(rl_path), resection_resectable_right=tio.LabelMap(rr_path), ) subjects_list.append(subject) return subjects_list