Esempio n. 1
0
class NumpyDataSet(object):
    """
    TODO
    """
    def __init__(self, base_dir, mode="train", batch_size=16, num_batches=10000000, seed=None, num_processes=8, num_cached_per_queue=8 * 4, target_size=128,
                 file_pattern='*.npy', label_slice=1, input_slice=(0,), do_reshuffle=True, keys=None):

        data_loader = NumpyDataLoader(base_dir=base_dir, mode=mode, batch_size=batch_size, num_batches=num_batches, seed=seed, file_pattern=file_pattern,
                                      input_slice=input_slice, label_slice=label_slice, keys=keys)

        self.data_loader = data_loader
        self.batch_size = batch_size
        self.do_reshuffle = do_reshuffle
        self.number_of_slices = 1

        self.transforms = get_transforms(mode=mode, target_size=target_size)
        self.augmenter = MultiThreadedDataLoader(data_loader, self.transforms, num_processes=num_processes,
                                                 num_cached_per_queue=num_cached_per_queue, seeds=seed,
                                                 shuffle=do_reshuffle)
        self.augmenter.restart()

    def __len__(self):
        return len(self.data_loader)

    def __iter__(self):
        if self.do_reshuffle:
            self.data_loader.reshuffle()
        self.augmenter.renew()
        return self.augmenter

    def __next__(self):
        return next(self.augmenter)
Esempio n. 2
0
    def __init__(self,
                 base_dir,
                 mode="train",
                 batch_size=16,
                 num_batches=10000000,
                 seed=None,
                 num_processes=8,
                 num_cached_per_queue=8 * 4,
                 target_size=128,
                 file_pattern='*.npy',
                 label_slice=1,
                 input_slice=(0, ),
                 do_reshuffle=True,
                 keys=None):

        data_loader = NumpyDataLoader(base_dir=base_dir,
                                      mode=mode,
                                      batch_size=batch_size,
                                      num_batches=num_batches,
                                      seed=seed,
                                      file_pattern=file_pattern,
                                      input_slice=input_slice,
                                      label_slice=label_slice,
                                      keys=keys)
        #data_loader = BraTS2017DataLoader2D(train, batch_size, max_shape, 1)
        self.data_loader = data_loader
        self.batch_size = batch_size
        self.do_reshuffle = do_reshuffle
        self.number_of_slices = 1

        self.transforms = get_transforms(mode=mode, target_size=target_size)
        self.augmenter = MultiThreadedDataLoader(
            data_loader,
            self.transforms,
            num_processes=num_processes,
            num_cached_per_queue=num_cached_per_queue,
            seeds=seed,
            shuffle=do_reshuffle)
        self.augmenter.restart()