Exemplo n.º 1
0
    def loadData(self):       
        
        trainset = SimulationDataset("train", transforms=transforms.Compose([                 
                utils.RandomCoose(['center']),          
                utils.Preprocess(self.input_shape),
                # utils.RandomResizedCrop(self.input_shape),
                # utils.RandomNoise(),
                utils.RandomTranslate(10, 10),
                # utils.RandomBrightness(),
                # utils.RandomContrast(),
                # utils.RandomHue(),
                utils.RandomHorizontalFlip(),
                utils.ToTensor(),
                utils.Normalize([0.1, 0.4, 0.4], [0.9, 0.6, 0.5])
            ]))
        # weights = utils.get_weights(trainset)
        # sampler = torch.utils.data.sampler.WeightedRandomSampler(weights, len(weights), replacement=False)
        # self.trainloader = torch.utils.data.DataLoader(trainset, batch_size=self.cfg.batch_size, sampler=sampler, num_workers=0, pin_memory=True)
        self.trainloader = torch.utils.data.DataLoader(trainset, shuffle=True, batch_size=self.cfg.batch_size, num_workers=0, pin_memory=True)

        testset = SimulationDataset("test", transforms=transforms.Compose([
                utils.RandomCoose(['center']),
                utils.Preprocess(self.input_shape),
                utils.ToTensor(),
                utils.Normalize([0.1, 0.4, 0.4], [0.9, 0.6, 0.5])
            ]))
        self.testloader = torch.utils.data.DataLoader(testset, batch_size=self.cfg.batch_size, shuffle=False, num_workers=0, pin_memory=True)
Exemplo n.º 2
0
    def loadData(self):

        trainset = SimulationDataset(
            "train",
            transforms=transforms.Compose([
                utils.RandomCoose(['centre', 'left', 'right']),
                utils.Preprocess(self.input_shape),
                utils.RandomTranslate(100, 10),
                utils.RandomBrightness(),
                utils.RandomContrast(),
                utils.RandomHue(),
                utils.RandomHorizontalFlip(),
                utils.ToTensor(),
                utils.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
            ]))

        weights = utils.get_weights(trainset)

        sampler = torch.utils.data.sampler.WeightedRandomSampler(
            weights, len(weights), replacement=True)

        # self.trainloader = torch.utils.data.DataLoader(trainset, batch_size=self.cfg.batch_size, sampler=sampler, num_workers=4)

        self.trainloader = torch.utils.data.DataLoader(
            trainset, batch_size=self.cfg.batch_size, num_workers=4)

        testset = SimulationDataset("test",
                                    transforms=transforms.Compose([
                                        utils.RandomCoose(['center']),
                                        utils.Preprocess(self.input_shape),
                                        utils.ToTensor(),
                                        utils.Normalize([0.485, 0.456, 0.406],
                                                        [0.229, 0.224, 0.225])
                                    ]))

        self.testloader = torch.utils.data.DataLoader(
            testset,
            batch_size=self.cfg.batch_size,
            shuffle=False,
            num_workers=4)
        # plt.imshow(F.to_pil_image(sample['image']))
        # plt.title(str(sample['target']))
        # plt.show()

        return sample['image'], sample['target']

    def __len__(self):
        return len(self.image_paths)


if __name__ == '__main__':

    input_shape = (utils.IMAGE_HEIGHT, utils.IMAGE_WIDTH)
    dataset = SimulationDataset("train",
                                transforms=transforms.Compose([
                                    utils.RandomCoose(['center']),
                                    utils.Preprocess(input_shape),
                                    utils.RandomHorizontalFlip(),
                                    utils.ToTensor(),
                                    utils.Normalize([0.485, 0.456, 0.406],
                                                    [0.229, 0.224, 0.225])
                                ]))
    print(dataset.__len__())
    print(dataset.__getitem__(0)[0].size())

    for c in range(3):
        for i in range(dataset.__len__()):
            print(dataset.__getitem__(i)[c].mean())
            print(dataset.__getitem__(i)[c].std())
    # print(dataset.__getitem__(0))
    # print(len(dataset.__get_annotations__()))