def load_3d(self):
        """
        dataset_mode: HGG/LGG/ALL
        return list(dict[patient_id][modality] = filename.nii.gz)
        """
        print("Data Folder: ", self.basedir)

        #modalities = ['flair', 't1ce', 't1.', 't2']
        modalities = ['ct']

        if self.mode == 'test':
            imgs = [self.basedir]
            patient_ids = ['test' for x in imgs]
        else:
            imgs = glob.glob(self.basedir + "/*")
            patient_ids = [x.split("/")[-1] for x in imgs]

        ret = []
        print("Preprocessing Data ...")
        for idx, file_name in tqdm(enumerate(imgs), total=len(imgs)):
            data = {}
            data['image_data'] = {}
            data['file_name'] = file_name
            data['id'] = patient_ids[idx]
            # read modality
            mod = glob.glob(file_name + "/*.nii*")
            #assert len(mod) >= 4, '{}'.format(file_name)  # 4mod +1gt
            for m in mod:
                _m = m.split("/")[-1].split("_")[0]
                data['image_data']['ct'] = m

            if 'gt' in data:
                if True:
                    #if not config.NO_CACHE and not 'training' in self.basedir:
                    data['gt'] = glob.glob(
                        os.path.join(self.basedir, "mask_nii/") +
                        file_name.split('/')[-1] + "*.nii*")[0]
                    data['preprocessed'] = crop_brain_region(
                        data['image_data'], data['gt'])
                    del data['image_data']
                    del data['gt']
            else:
                data['preprocessed'] = crop_brain_region(data['image_data'],
                                                         None,
                                                         with_gt=False)
                del data['image_data']

            ret.append(data)
        return ret
Exemple #2
0
    def load_3d(self):
        """
        dataset_mode: HGG/LGG/ALL
        return list(dict[patient_id][modality] = filename.nii.gz)
        """
        print("Data Folder: ", self.basedir)

        modalities = ['flair', 't1ce', 't1.', 't2']

        if 'training' in self.basedir:
            img_HGG = glob.glob(self.basedir + "/GBM/*")
            img_LGG = glob.glob(self.basedir + "/LGG/*")
            imgs = img_HGG + img_LGG
        else:
            imgs = glob.glob(self.basedir + "/*")
        imgs = [x for x in imgs if 'survival_evaluation.csv' not in x]

        patient_ids = [x.split("/")[-1] for x in imgs]
        ret = []
        print("Preprocessing Data ...")
        for idx, file_name in tqdm(enumerate(imgs), total=len(imgs)):
            data = {}
            data['image_data'] = {}
            data['file_name'] = file_name
            data['id'] = patient_ids[idx]
            # read modality
            mod = glob.glob(file_name + "/*.nii*")
            assert len(mod) >= 4, '{}'.format(file_name)  # 4mod +1gt
            for m in mod:
                if 'seg' in m:
                    data['gt'] = m
                else:
                    _m = m.split("/")[-1].split(".")[0].split("_")[-1]
                    data['image_data'][_m] = m

            if 'gt' in data:
                if not config.NO_CACHE and not 'training' in self.basedir:
                    data['preprocessed'] = crop_brain_region(
                        data['image_data'], data['gt'])
                    del data['image_data']
                    del data['gt']
            else:
                data['preprocessed'] = crop_brain_region(data['image_data'],
                                                         None,
                                                         with_gt=False)
                del data['image_data']

            ret.append(data)
        return ret
Exemple #3
0
 def load_5fold(self):
     with open(config.CROSS_VALIDATION_PATH, 'rb') as f:
         data = pickle.load(f)
     imgs = data["fold{}".format(config.FOLD)][self.mode]
     patient_ids = [x.split("/")[-1] for x in imgs]
     ret = []
     for idx, file_name in enumerate(imgs):
         data = {}
         data['image_data'] = {}
         data['file_name'] = file_name
         data['id'] = patient_ids[idx]
         mod = glob.glob(file_name + "/*.nii*")
         assert len(mod) >= 4
         for m in mod:
             if 'seg' in m:
                 data['gt'] = m
             else:
                 _m = m.split("/")[-1].split(".")[0].split("_")[-1]
                 data['image_data'][_m] = m
         if 'gt' in data:
             data['preprocessed'] = crop_brain_region(
                 data['image_data'], data['gt'])
             del data['image_data']
             del data['gt']
         ret.append(data)
     return ret
 def load_5fold(self):
     # with open(config.CROSS_VALIDATION_PATH, 'rb') as f:
     #     data = pickle.load(f)
     # imgs = data["fold{}".format(config.FOLD)][self.mode]
     # patient_ids = [x.split("/")[-1] for x in imgs]
     ret = []
     patient_ids=[257,258]
     imgs=['/home/guan/Documents/codes/3DUnet-Tensorflow-Brats18-master/data/dataset/BRATS2018/val/HGG/BraTS20_Training_257',
           '/home/guan/Documents/codes/3DUnet-Tensorflow-Brats18-master/data/dataset/BRATS2018/val/HGG/BraTS20_Training_258']
     for idx, file_name in enumerate(imgs):
         data = {}
         data['image_data'] = {}
         data['file_name'] = file_name
         data['id'] = patient_ids[idx]
         # read modality
         mod = glob.glob(file_name+"/*.nii*")
         assert len(mod) >= 4  # 4mod +1gt
         for m in mod:
             if 'seg' in m:
                 data['gt'] = m
             else:
                 _m = m.split("/")[-1].split(".")[0].split("_")[-1]
                 data['image_data'][_m] = m
         if 'gt' in data:
             data['preprocessed'] = crop_brain_region(data['image_data'], data['gt'])
             del data['image_data']
             del data['gt']
         ret.append(data)
     return ret