def __init__( self, args, mode, dataset_path="./datasets", crop_dim=(32, 32, 32), split_id=1, samples=1000, load=False, ): # split_id = int(split_id) fold_id = int(args.fold_id) print(f"using fold_id {fold_id}") """ :param mode: 'train','val','test' :param dataset_path: root dataset folder :param crop_dim: subvolume tuple :param fold_id: 1 to 10 values :param samples: number of sub-volumes that you want to create """ self.mode = mode self.root = str(dataset_path) self.training_path = self.root + "/iseg_2019/iSeg-2019-Training/" self.testing_path = self.root + "/iseg_2019/iSeg-2019-Validation/" self.CLASSES = 4 self.full_vol_dim = (144, 192, 256) # slice, width, height self.crop_size = crop_dim self.threshold = args.threshold self.normalization = args.normalization self.augmentation = args.augmentation self.list = [] self.samples = int(samples) self.full_volume = None self.save_name = (self.root + "/iseg_2019/iseg2019-list-" + mode + "-samples-" + str(samples) + ".txt") if self.augmentation: self.transform = augment3D.RandomChoice( transforms=[ augment3D.GaussianNoise(mean=0, std=0.01), augment3D.RandomFlip(), augment3D.ElasticTransform(), ], p=0.5, ) if load: ## load pre-generated data self.list = utils.load_list(self.save_name) list_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, "*T1.img"))) self.affine = img_loader.load_affine_matrix(list_IDsT1[0]) return subvol = ("_vol_" + str(crop_dim[0]) + "x" + str(crop_dim[1]) + "x" + str(crop_dim[2])) self.sub_vol_path = self.root + "/iseg_2019/generated/" + mode + subvol + "/" utils.make_dirs(self.sub_vol_path) list_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, "*T1.img"))) list_IDsT2 = sorted( glob.glob(os.path.join(self.training_path, "*T2.img"))) labels = sorted( glob.glob(os.path.join(self.training_path, "*label.img"))) print(self.training_path) self.affine = img_loader.load_affine_matrix(list_IDsT1[0]) if self.mode == "train": # custom code # list_IDsT1 = list_IDsT1[:split_id] # list_IDsT2 = list_IDsT2[:split_id] # labels = labels[:split_id] list_IDsT1 = [x for x in list_IDsT1 if f"-{fold_id}-" not in x] list_IDsT2 = [x for x in list_IDsT2 if f"-{fold_id}-" not in x] labels = [x for x in labels if f"-{fold_id}-" not in x] assert len(labels) == len(list_IDsT1) assert len(labels) == len(list_IDsT2) assert len(labels) == 9 self.list = create_sub_volumes( list_IDsT1, list_IDsT2, labels, dataset_name="iseg2019", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, th_percent=self.threshold, ) elif self.mode == "val": # list_IDsT1 = list_IDsT1[split_id:] # list_IDsT2 = list_IDsT2[split_id:] # labels = labels[split_id:] list_IDsT1 = [x for x in list_IDsT1 if f"-{fold_id}-" in x] list_IDsT2 = [x for x in list_IDsT2 if f"-{fold_id}-" in x] labels = [x for x in labels if f"-{fold_id}-" in x] assert len(labels) == len(list_IDsT1) assert len(labels) == len(list_IDsT2) assert len(labels) == 1 self.list = create_sub_volumes( list_IDsT1, list_IDsT2, labels, dataset_name="iseg2019", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, th_percent=self.threshold, ) self.full_volume = get_viz_set(list_IDsT1, list_IDsT2, labels, dataset_name="iseg2019") elif self.mode == "test": # self.list_IDsT1 = sorted(glob.glob(os.path.join(self.testing_path, '*T1.img'))) # self.list_IDsT2 = sorted(glob.glob(os.path.join(self.testing_path, '*T2.img'))) # self.labels = None # todo inference here list_IDsT1 = [x for x in list_IDsT1 if f"-{fold_id}-" in x] list_IDsT2 = [x for x in list_IDsT2 if f"-{fold_id}-" in x] labels = [x for x in labels if f"-{fold_id}-" in x] assert len(labels) == len(list_IDsT1) assert len(labels) == len(list_IDsT2) assert len(labels) == 1 self.list = create_non_overlapping_sub_volumes( list_IDsT1, list_IDsT2, labels, dataset_name="iseg2019", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, th_percent=self.threshold, ) self.full_volume = get_viz_set(list_IDsT1, list_IDsT2, labels, dataset_name="iseg2019") utils.save_list(self.save_name, self.list)
def __init__( self, args, mode, dataset_path="./datasets", classes=5, crop_dim=(200, 200, 150), split_idx=260, samples=10, load=False, ): """ :param mode: 'train','val','test' :param dataset_path: root dataset folder :param crop_dim: subvolume tuple :param split_idx: 1 to 10 values :param samples: number of sub-volumes that you want to create """ self.mode = mode self.root = str(dataset_path) self.training_path = self.root + "/brats2019/MICCAI_BraTS_2019_Data_Training/" self.testing_path = self.root + "/brats2019/MICCAI_BraTS_2019_Data_Validation/" self.full_vol_dim = (240, 240, 155) # slice, width, height self.crop_size = crop_dim self.threshold = args.threshold self.normalization = args.normalization self.augmentation = args.augmentation self.list = [] self.samples = samples self.full_volume = None self.classes = classes if self.augmentation: self.transform = augment3D.RandomChoice( transforms=[ augment3D.GaussianNoise(mean=0, std=0.01), augment3D.RandomFlip(), augment3D.ElasticTransform(), ], p=0.5, ) self.save_name = os.path.join( self.root, "brats2019", f"brats2019-list-{mode}-samples-{samples}.txt" ) if load: ## load pre-generated data self.list = utils.load_list(self.save_name) list_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, "*GG/*/*t1.nii.gz")) ) self.affine = img_loader.load_affine_matrix(list_IDsT1[0]) return subvol = ( "_vol_" + str(crop_dim[0]) + "x" + str(crop_dim[1]) + "x" + str(crop_dim[2]) ) self.sub_vol_path = ( self.root + "/brats2019/MICCAI_BraTS_2019_Data_Training/generated/" + mode + subvol + "/" ) utils.make_dirs(self.sub_vol_path) # split HGG and LGG HGG_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, "HGG/*/*t1.nii.gz")) ) HGG_IDsT1ce = sorted( glob.glob(os.path.join(self.training_path, "HGG/*/*t1ce.nii.gz")) ) HGG_IDsT2 = sorted( glob.glob(os.path.join(self.training_path, "HGG/*/*t2.nii.gz")) ) HGG_IDsFlair = sorted( glob.glob(os.path.join(self.training_path, "HGG/*/*_flair.nii.gz")) ) HGG_labels = sorted( glob.glob(os.path.join(self.training_path, "HGG/*/*_seg.nii.gz")) ) LGG_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, "LGG/*/*t1.nii.gz")) ) LGG_IDsT1ce = sorted( glob.glob(os.path.join(self.training_path, "LGG/*/*t1ce.nii.gz")) ) LGG_IDsT2 = sorted( glob.glob(os.path.join(self.training_path, "LGG/*/*t2.nii.gz")) ) LGG_IDsFlair = sorted( glob.glob(os.path.join(self.training_path, "LGG/*/*_flair.nii.gz")) ) LGG_labels = sorted( glob.glob(os.path.join(self.training_path, "LGG/*/*_seg.nii.gz")) ) ( HGG_IDsT1, HGG_IDsT1ce, HGG_IDsT2, HGG_IDsFlair, HGG_labels, ) = utils.shuffle_lists( HGG_IDsT1, HGG_IDsT1ce, HGG_IDsT2, HGG_IDsFlair, HGG_labels, seed=17 ) ( LGG_IDsT1, LGG_IDsT1ce, LGG_IDsT2, LGG_IDsFlair, LGG_labels, ) = utils.shuffle_lists( LGG_IDsT1, LGG_IDsT1ce, LGG_IDsT2, LGG_IDsFlair, LGG_labels, seed=17 ) self.affine = img_loader.load_affine_matrix((HGG_IDsT1 + LGG_IDsT1)[0]) hgg_len = len(HGG_IDsT1) lgg_len = len(LGG_IDsT1) print("Brats2019, Training HGG:", hgg_len) print("Brats2019, Training LGG:", lgg_len) print("Brats2019, Training total:", hgg_len + lgg_len) hgg_split = int(hgg_len * 0.8) lgg_split = int(lgg_len * 0.8) if self.mode == "train": list_IDsT1 = HGG_IDsT1[:hgg_split] + LGG_IDsT1[:hgg_split] list_IDsT1ce = HGG_IDsT1ce[:hgg_split] + LGG_IDsT1ce[:hgg_split] list_IDsT2 = HGG_IDsT2[:hgg_split] + LGG_IDsT2[:hgg_split] list_IDsFlair = HGG_IDsFlair[:hgg_split] + LGG_IDsFlair[:hgg_split] labels = HGG_labels[:hgg_split] + LGG_labels[:hgg_split] self.list = create_sub_volumes( list_IDsT1, list_IDsT1ce, list_IDsT2, list_IDsFlair, labels, dataset_name="brats2019", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, th_percent=self.threshold, ) elif self.mode == "val": list_IDsT1 = HGG_IDsT1[hgg_split:] + LGG_IDsT1[hgg_split:] list_IDsT1ce = HGG_IDsT1ce[hgg_split:] + LGG_IDsT1ce[hgg_split:] list_IDsT2 = HGG_IDsT2[hgg_split:] + LGG_IDsT2[hgg_split:] list_IDsFlair = HGG_IDsFlair[hgg_split:] + LGG_IDsFlair[hgg_split:] labels = HGG_labels[hgg_split:] + LGG_labels[hgg_split:] self.list = create_sub_volumes( list_IDsT1, list_IDsT1ce, list_IDsT2, list_IDsFlair, labels, dataset_name="brats2019", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, th_percent=self.threshold, ) elif self.mode == "test": # self.list_IDsT1 = sorted(glob.glob(os.path.join(self.testing_path, '*GG/*/*t1.nii.gz'))) # self.list_IDsT1ce = sorted(glob.glob(os.path.join(self.testing_path, '*GG/*/*t1ce.nii.gz'))) # self.list_IDsT2 = sorted(glob.glob(os.path.join(self.testing_path, '*GG/*/*t2.nii.gz'))) # self.list_IDsFlair = sorted(glob.glob(os.path.join(self.testing_path, '*GG/*/*_flair.nii.gz'))) # self.labels = None list_IDsT1 = HGG_IDsT1[hgg_split:] + LGG_IDsT1[hgg_split:] list_IDsT1ce = HGG_IDsT1ce[hgg_split:] + LGG_IDsT1ce[hgg_split:] list_IDsT2 = HGG_IDsT2[hgg_split:] + LGG_IDsT2[hgg_split:] list_IDsFlair = HGG_IDsFlair[hgg_split:] + LGG_IDsFlair[hgg_split:] labels = HGG_labels[hgg_split:] + LGG_labels[hgg_split:] self.list = create_non_overlapping_sub_volumes( list_IDsT1, list_IDsT1ce, list_IDsT2, list_IDsFlair, labels, dataset_name="brats2019", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, th_percent=self.threshold, ) utils.save_list(self.save_name, self.list)
def __init__(self, args, mode, dataset_path='./datasets', classes=5, crop_dim=(32, 32, 32), split_idx=10, samples=10, load=False): """ :param mode: 'train','val','test' :param dataset_path: root dataset folder :param crop_dim: subvolume tuple :param split_idx: 1 to 10 values :param samples: number of sub-volumes that you want to create """ self.mode = mode self.root = str(dataset_path) self.training_path = self.root + '/MICCAI_BraTS_2018_Data_Training/' self.testing_path = self.root + ' ' self.CLASSES = 4 self.full_vol_dim = (240, 240, 155) # slice, width, height self.crop_size = crop_dim self.threshold = args.threshold self.normalization = args.normalization self.augmentation = args.augmentation self.list = [] self.samples = samples self.full_volume = None self.classes = classes self.save_name = self.root + '/MICCAI_BraTS_2018_Data_Training/brats2018-list-' + mode + '-samples-' + str( samples) + '.txt' if self.augmentation: self.transform = augment3D.RandomChoice(transforms=[ augment3D.GaussianNoise(mean=0, std=0.01), augment3D.RandomFlip(), augment3D.ElasticTransform() ], p=0.5) if load: ## load pre-generated data list_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, '*GG/*/*t1.nii.gz'))) self.affine = img_loader.load_affine_matrix(list_IDsT1[0]) self.list = utils.load_list(self.save_name) return subvol = '_vol_' + str(crop_dim[0]) + 'x' + str( crop_dim[1]) + 'x' + str(crop_dim[2]) self.sub_vol_path = self.root + '/MICCAI_BraTS_2018_Data_Training/generated/' + mode + subvol + '/' utils.make_dirs(self.sub_vol_path) list_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, '*GG/*/*t1.nii.gz'))) list_IDsT1ce = sorted( glob.glob(os.path.join(self.training_path, '*GG/*/*t1ce.nii.gz'))) list_IDsT2 = sorted( glob.glob(os.path.join(self.training_path, '*GG/*/*t2.nii.gz'))) list_IDsFlair = sorted( glob.glob(os.path.join(self.training_path, '*GG/*/*_flair.nii.gz'))) labels = sorted( glob.glob(os.path.join(self.training_path, '*GG/*/*_seg.nii.gz'))) # print(len(list_IDsT1),len(list_IDsT2),len(list_IDsFlair),len(labels)) self.affine = img_loader.load_affine_matrix(list_IDsT1[0]) if self.mode == 'train': list_IDsT1 = list_IDsT1[:split_idx] list_IDsT1ce = list_IDsT1ce[:split_idx] list_IDsT2 = list_IDsT2[:split_idx] list_IDsFlair = list_IDsFlair[:split_idx] labels = labels[:split_idx] self.list = create_sub_volumes(list_IDsT1, list_IDsT1ce, list_IDsT2, list_IDsFlair, labels, dataset_name="brats2018", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, normalization=self.normalization, th_percent=self.threshold) elif self.mode == 'val': list_IDsT1 = list_IDsT1[split_idx:] list_IDsT1ce = list_IDsT1ce[split_idx:] list_IDsT2 = list_IDsT2[split_idx:] list_IDsFlair = list_IDsFlair[split_idx:] labels = labels[split_idx:] self.list = create_sub_volumes(list_IDsT1, list_IDsT1ce, list_IDsT2, list_IDsFlair, labels, dataset_name="brats2018", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, normalization=self.normalization, th_percent=self.threshold) elif self.mode == 'test': self.list_IDsT1 = sorted( glob.glob(os.path.join(self.testing_path, '*GG/*/*t1.nii.gz'))) self.list_IDsT1ce = sorted( glob.glob(os.path.join(self.testing_path, '*GG/*/*t1ce.nii.gz'))) self.list_IDsT2 = sorted( glob.glob(os.path.join(self.testing_path, '*GG/*/*t2.nii.gz'))) self.list_IDsFlair = sorted( glob.glob( os.path.join(self.testing_path, '*GG/*/*_flair.nii.gz'))) self.labels = None utils.save_list(self.save_name, self.list)
def __init__(self, args, mode, dataset_path='./datasets', crop_dim=(32, 32, 32), split_id=1, samples=1000, load=False): load = False """ :param mode: 'train','val','test' :param dataset_path: root dataset folder :param crop_dim: subvolume tuple :param fold_id: 1 to 10 values :param samples: number of sub-volumes that you want to create """ self.mode = mode self.root = str(dataset_path) self.training_path = self.root + '/iseg_2017/iSeg-2017-Training/' self.testing_path = self.root + '/iseg_2017/iSeg-2017-Testing/' self.CLASSES = 4 self.full_vol_dim = (144, 192, 256) # slice, width, height self.threshold = args.threshold self.normalization = args.normalization self.augmentation = args.augmentation self.crop_size = crop_dim self.list = [] self.samples = samples self.full_volume = None # self.save_name = self.root + '/iseg_2017/iSeg-2017-Training/iseg2017-list-' + mode + '-samples-' + str( self.save_name = self.root + '/iseg2017-list-' + mode + '-samples-' + str( samples) + '.txt' if self.augmentation: self.transform = augment3D.RandomChoice(transforms=[ augment3D.GaussianNoise(mean=0, std=0.01), augment3D.RandomFlip(), augment3D.ElasticTransform() ], p=0.5) if load: ## load pre-generated data self.list = utils.load_list(self.save_name) list_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, '*T1.img'))) self.affine = img_loader.load_affine_matrix(list_IDsT1[0]) return subvol = '_vol_' + str(crop_dim[0]) + 'x' + str( crop_dim[1]) + 'x' + str(crop_dim[2]) self.sub_vol_path = self.root + '/iseg_2017/generated/' + mode + subvol + '/' utils.make_dirs(self.sub_vol_path) list_IDsT1 = sorted( glob.glob(os.path.join(self.training_path, '*T1.img'))) list_IDsT2 = sorted( glob.glob(os.path.join(self.training_path, '*T2.img'))) labels = sorted( glob.glob(os.path.join(self.training_path, '*label.img'))) self.affine = img_loader.load_affine_matrix(list_IDsT1[0]) if self.mode == 'train': list_IDsT1 = list_IDsT1[:split_id] list_IDsT2 = list_IDsT2[:split_id] labels = labels[:split_id] self.list = create_sub_volumes(list_IDsT1, list_IDsT2, labels, dataset_name="iseg2017", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, th_percent=self.threshold, normalization=args.normalization) elif self.mode == 'val': utils.make_dirs(self.sub_vol_path) list_IDsT1 = list_IDsT1[split_id:] list_IDsT2 = list_IDsT2[split_id:] labels = labels[split_id:] self.list = create_sub_volumes(list_IDsT1, list_IDsT2, labels, dataset_name="iseg2017", mode=mode, samples=samples, full_vol_dim=self.full_vol_dim, crop_size=self.crop_size, sub_vol_path=self.sub_vol_path, th_percent=self.threshold, normalization=args.normalization) self.full_volume = get_viz_set(list_IDsT1, list_IDsT2, labels, dataset_name="iseg2017") elif self.mode == 'test': self.list_IDsT1 = sorted( glob.glob(os.path.join(self.testing_path, '*T1.img'))) self.list_IDsT2 = sorted( glob.glob(os.path.join(self.testing_path, '*T2.img'))) self.labels = None elif self.mode == 'viz': list_IDsT1 = list_IDsT1[split_id:] list_IDsT2 = list_IDsT2[:split_id:] labels = labels[split_id:] self.full_volume = get_viz_set(list_IDsT1, list_IDsT2, labels, dataset_name="iseg2017") self.list = [] utils.save_list(self.save_name, self.list)