Exemplo n.º 1
0
 def load_dataset(self, task=None):
     if task is None:
         self.dataset = load_dataset(
             self.folder_with_preprocessed_data[self.tasks[0]])
     else:
         self.dataset = load_dataset(
             self.folder_with_preprocessed_data[task])
Exemplo n.º 2
0
 def load_dataset(self):
     self.dataset = load_dataset(self.folder_with_preprocessed_data)
Exemplo n.º 3
0
        max(params.get('num_threads') // 2, 1),
        params.get("num_cached_per_thread"),
        seeds=seeds_val,
        pin_memory=pin_memory)
    return batchgenerator_train, batchgenerator_val


if __name__ == "__main__":
    from nnunet.training.dataloading.dataset_loading import DataLoader3D, load_dataset
    from nnunet.paths import preprocessing_output_dir
    import os
    import pickle

    t = "Task002_Heart"
    p = os.path.join(preprocessing_output_dir, t)
    dataset = load_dataset(p, 0)
    with open(os.path.join(p, "plans.pkl"), 'rb') as f:
        plans = pickle.load(f)

    basic_patch_size = get_patch_size(
        np.array(plans['stage_properties'][0].patch_size),
        default_3D_augmentation_params['rotation_x'],
        default_3D_augmentation_params['rotation_y'],
        default_3D_augmentation_params['rotation_z'],
        default_3D_augmentation_params['scale_range'])

    dl = DataLoader3D(
        dataset, basic_patch_size,
        np.array(plans['stage_properties'][0].patch_size).astype(int), 1)
    tr, val = get_default_augmentation(
        dl, dl,