コード例 #1
0
    def load_dataset(self):
        train = get_file_list(self.args.data_path, self.args.train_ids_path)
        #val = get_file_list(self.args.data_path,self.args.valid_ids_path)

        train, val = get_split_deterministic(train,
                                             fold=0,
                                             num_splits=5,
                                             random_state=12345)

        shapes = [brats_dataloader.load_patient(i)[0].shape[1:] for i in train]
        max_shape = np.max(shapes, 0)
        max_shape = list(np.max((max_shape, self.args.patch_size), 0))

        dataloader_train = brats_dataloader(train,
                                            self.args.batch_size,
                                            max_shape,
                                            self.args.num_threads,
                                            return_incomplete=True,
                                            infinite=False)

        tr_transforms = get_train_transform(self.args.patch_size)

        tr_gen = MultiThreadedAugmenter(dataloader_train,
                                        tr_transforms,
                                        num_processes=self.args.num_threads,
                                        num_cached_per_queue=3,
                                        seeds=None,
                                        pin_memory=False)

        self.num_batches_per_epoch = int(
            math.ceil(len(train) / self.args.batch_size))
        self.train_data_loader = tr_gen
        self.val = val
コード例 #2
0
    tr_transforms.append(
        GaussianBlurTransform(blur_sigma=(0.5, 1.5),
                              different_sigma_per_channel=True,
                              p_per_channel=0.5,
                              p_per_sample=0.15))

    # now we compose these transforms together
    tr_transforms = Compose(tr_transforms)
    return tr_transforms


if __name__ == "__main__":
    patients = get_list_of_patients(brats_preprocessed_folder)

    train, val = get_split_deterministic(patients,
                                         fold=0,
                                         num_splits=5,
                                         random_state=12345)

    patch_size = (128, 128, 128)
    batch_size = 2

    # I recommend you don't use 'iteration oder all training data' as epoch because in patch based training this is
    # really not super well defined. If you leave all arguments as default then each batch sill contain randomly
    # selected patients. Since we don't care about epochs here we can set num_threads_in_multithreaded to anything.
    dataloader = BraTS2017DataLoader3D(train, batch_size, patch_size, 1)

    batch = next(dataloader)
    try:
        from batchviewer import view_batch
        # batch viewer can show up to 4d tensors. We can show only one sample, but that should be sufficient here
        view_batch(batch['data'][0], batch['seg'][0])
コード例 #3
0
#         'patch_size': [24, 128, 128],
#         'pretrained': True
#     }
# }

patients = get_list_of_patients(
    'brats_data_preprocessed/Brats{}TrainingData'.format(
        str(args.brats_train_year)))
batch_size = args.batch_size
patch_size = [args.patch_depth, args.patch_width, args.patch_height]
in_channels = ['t1c', 't2', 'flair']

#%%
# num_splits=5 means 1/5th is validation data!
patients_train, patients_val = get_split_deterministic(patients,
                                                       fold=0,
                                                       num_splits=5,
                                                       random_state=args.seed)

if not args.use_validation:
    patients_train = patients

#%%
patients_test = get_list_of_patients(
    'brats_data_preprocessed/Brats{}ValidationData'.format(
        str(args.brats_test_year)))
target_patients = patients_test

#%%
train_dl = BRATSDataLoader(patients_train,
                           batch_size=batch_size,
                           patch_size=patch_size,
コード例 #4
0
    output_preprocessed_v0 = join(preprocessing_output_dir, task_name_variant0)
    maybe_mkdir_p(output_preprocessed_v0)

    patients = subdirs(join(root, "MR"), join=False)
    task_name_variant1 = "Task037_CHAOS_Task_3_5_Variant1"
    task_name_variant2 = "Task038_CHAOS_Task_3_5_Variant2"

    output_preprocessed_v1 = join(preprocessing_output_dir, task_name_variant1)
    maybe_mkdir_p(output_preprocessed_v1)

    output_preprocessed_v2 = join(preprocessing_output_dir, task_name_variant2)
    maybe_mkdir_p(output_preprocessed_v2)

    splits = []
    for fold in range(5):
        tr, val = get_split_deterministic(patients, fold, 5, 12345)
        train = ["T2_" + i
                 for i in tr] + ["T1_in_" + i
                                 for i in tr] + ["T1_out_" + i for i in tr
                                                 ] + ["CT_" + i for i in tr]
        validation = ["T2_" + i for i in val] + ["T1_in_" + i for i in val] + [
            "T1_out_" + i for i in val
        ] + ["CT_" + i for i in val]
        splits.append({'train': train, 'val': validation})
    save_pickle(splits, join(output_preprocessed_v0, "splits_final.pkl"))

    splits = []
    for fold in range(5):
        tr, val = get_split_deterministic(patients, fold, 5, 12345)
        train = ["T2_" + i for i in tr] + ["T1_" + i for i in tr]
        validation = ["T2_" + i for i in val] + ["T1_" + i for i in val]