Ejemplo n.º 1
0
    def val_dataloader(self):
        if self.data_config.dataset_name == "normal_dataset":
            transform = self._3d_augmenation("valid")
        else:
            transform = None
        if self.use_ddp:
            sampler = torch.utils.data.distributed.DistributedSampler(
                self.valid_dataset, shuffle=False)
            valid_loader = DataLoader(
                self.valid_dataset,
                batch_size=self.test_config.batch_size,
                num_workers=self.cpu_count,
                pin_memory=True,
                sampler=sampler,
                gpu_transforms=transform,
            )
        else:
            sampler = torch.utils.data.sampler.SequentialSampler(
                self.valid_dataset)
            valid_loader = DataLoader(
                self.valid_dataset,
                batch_size=self.test_config.batch_size,
                pin_memory=True,
                sampler=sampler,
                num_workers=self.cpu_count,
                gpu_transforms=transform,
            )

        return valid_loader
Ejemplo n.º 2
0
 def train_dataloader(self):
     if self.data_config.dataset_name == "normal_dataset":
         transform = self._3d_augmenation("train")
     else:
         transform = None
     if self.use_ddp:
         sampler = torch.utils.data.distributed.DistributedSampler(
             self.train_dataset, shuffle=True)
         train_loader = DataLoader(
             self.train_dataset,
             batch_size=self.train_config.batch_size,
             num_workers=self.cpu_count,
             pin_memory=True,
             sampler=sampler,
             drop_last=True,
             gpu_transforms=transform,
         )
     else:
         sampler = torch.utils.data.sampler.RandomSampler(
             self.train_dataset)
         train_loader = DataLoader(
             self.train_dataset,
             batch_size=self.train_config.batch_size,
             pin_memory=True,
             sampler=sampler,
             drop_last=True,
             num_workers=self.cpu_count,
             gpu_transforms=transform,
         )
     return train_loader
Ejemplo n.º 3
0
    def train_dataloader(self):
        transforms_augment_cpu = []
        transforms_augment = []

        #transforms_augment_cpu.append(rtr.intensity.RandomAddValue(UniformParameter(-0.2, 0.2)))
        #cpu_transforms = Compose(transforms_augment_cpu)

        keys = ('data', 'label')
        # transforms_augment.append(rtr.GaussianNoise(0., 0.05))
        transforms_augment.append(rtr.Rot90(dims=(0, 1, 2), keys=keys))
        transforms_augment.append(rtr.Mirror(dims=(0, 1, 2), keys=keys))
        #transforms_augment.append(ElasticDeformer3d(32, 4, keys=keys,
        #    interp_mode={ 'data': 'linear', 'label': 'nearest' }))
        #transforms_augment.append(rtr.BaseAffine(
        #    scale=UniformParameter(0.95, 1.05),
        #    rotation=UniformParameter(-45, 45), degree=True,
        #    translation=UniformParameter(-0.05, 0.05),
        #    keys=('data', 'label'),
        #    interpolation_mode='nearest'))
        gpu_transforms = Compose(transforms_augment)
        return DataLoader(self.train_dataset,
                          batch_size=self.hparams.batch_size,
                          num_workers=self.hparams.num_loader_workers,
                          shuffle=True,
                          #batch_transforms=cpu_transforms,
                          gpu_transforms=gpu_transforms,
                          pin_memory=True)
Ejemplo n.º 4
0
    def test_progressive_resize_integration(self):
        sizes = [1, 3, 6]
        scheduler = SizeStepScheduler([1, 2], [1, 3, 6])
        trafo = ProgressiveResize(scheduler)

        dset = [self.batch_dict] * 10
        loader = DataLoader(dset, num_workers=4, batch_transforms=trafo)

        data_shape = [tuple(i["data"].shape) for i in loader]

        self.assertIn((1, 1, 1, 1, 1), data_shape)
        # self.assertIn((1, 1, 3, 3, 3), data_shape)
        self.assertIn((1, 1, 6, 6, 6), data_shape)
Ejemplo n.º 5
0
    def val_dataloader(self):
        gpu_transforms = []
        keys = ('data', 'label')
        gpu_transforms.append(rtr.Rot90(dims=(0, 1, 2), keys=keys))
        gpu_transforms.append(rtr.Mirror(dims=(0, 1, 2), keys=keys))
        gpu_transforms = Compose(gpu_transforms)

        return DataLoader(self.val_dataset,
                          batch_size=2 * self.hparams.batch_size,
                          num_workers=self.hparams.num_loader_workers,
                          shuffle=False,
                          gpu_transforms=gpu_transforms,
                          pin_memory=True)
Ejemplo n.º 6
0
 def train_dataloader(self):
     keys = ('data', 'label')
     transforms_augment = []
     transforms_augment.append(rtr.Rot90(dims=(0, 1, 2), keys=keys))
     transforms_augment.append(rtr.Mirror(dims=(0, 1, 2), keys=keys))
     #transforms_augment.append(ElasticDeformer3d(32, 4, keys=keys,
     #    interp_mode={ 'data': 'linear', 'label': 'nearest' }))
     gpu_transforms = Compose(transforms_augment)
     return DataLoader(self.train_dataset,
                       batch_size=self.hparams.batch_size,
                       num_workers=self.hparams.num_loader_workers,
                       shuffle=True,
                       gpu_transforms=gpu_transforms,
                       pin_memory=True)
Ejemplo n.º 7
0
 def test_dataloader(self):
     if self.data_config.dataset_name == "normal_dataset":
         transform = self._3d_augmenation("test")
     else:
         transform = None
     sampler = torch.utils.data.sampler.SequentialSampler(self.test_dataset)
     test_loader = DataLoader(
         self.test_dataset,
         batch_size=self.test_config.batch_size,
         pin_memory=True,
         sampler=sampler,
         num_workers=self.cpu_count,
         gpu_transforms=transform,
     )
     return test_loader
Ejemplo n.º 8
0
    def val_dataloader(self):
        gpu_transforms = []
        gpu_transforms.append(rtr.Rot90(dims=(0, 1, 2), keys=('data', 'label')))
        gpu_transforms.append(rtr.Mirror(dims=(0, 1, 2), keys=('data', 'label')))
        gpu_transforms = Compose(gpu_transforms)

        # batch_transforms = []
        # batch_transforms.append(BatchRandomCrop(self.hparams.crop_size, bs=1, dist=0, keys=('data', 'label')))
        # batch_transforms = Compose(batch_transforms)

        return DataLoader(self.val_dataset,
                          batch_size=2 * self.hparams.batch_size,
                          num_workers=self.hparams.num_loader_workers,
                          shuffle=False,
                          # batch_transforms=batch_transforms,
                          gpu_transforms=gpu_transforms,
                          pin_memory=True)