Esempio n. 1
0
    def __init__(self,
                 path,
                 mode='train',
                 image_size=224,
                 resize_short_size=256):
        super(ImageNetDataset, self).__init__(path)
        self.mode = mode

        self.samples = []
        list_file = "train_list.txt" if self.mode == "train" else "val_list.txt"
        with open(os.path.join([path, list_file]), 'r') as f:
            for line in f:
                _image, _label = line.strip().split(" ")
                self.samples.append((_image, int(_label)))
        normalize = transforms.Normalize(mean=[123.675, 116.28, 103.53],
                                         std=[58.395, 57.120, 57.375])
        if self.mode == 'train':
            self.transform = transforms.Compose([
                transforms.RandomResizedCrop(image_size),
                transforms.RandomHorizontalFlip(),
                transforms.Transpose(), normalize
            ])
        else:
            self.transform = transforms.Compose([
                transforms.Resize(resize_short_size),
                transforms.CenterCrop(image_size),
                transforms.Transpose(), normalize
            ])
Esempio n. 2
0
    def __init__(self,
                 data_dir,
                 mode='train',
                 image_size=224,
                 resize_short_size=256):
        super(ImageNetDataset, self).__init__()
        train_file_list = os.path.join(data_dir, 'train_list.txt')
        val_file_list = os.path.join(data_dir, 'val_list.txt')
        test_file_list = os.path.join(data_dir, 'test_list.txt')
        self.data_dir = data_dir
        self.mode = mode

        normalize = transforms.Normalize(
            mean=[123.675, 116.28, 103.53], std=[58.395, 57.120, 57.375])
        if self.mode == 'train':
            self.transform = transforms.Compose([
                transforms.RandomResizedCrop(image_size),
                transforms.RandomHorizontalFlip(), transforms.Transpose(),
                normalize
            ])
        else:
            self.transform = transforms.Compose([
                transforms.Resize(resize_short_size),
                transforms.CenterCrop(image_size), transforms.Transpose(),
                normalize
            ])

        if mode == 'train':
            with open(train_file_list) as flist:
                full_lines = [line.strip() for line in flist]
                np.random.shuffle(full_lines)
                if os.getenv('PADDLE_TRAINING_ROLE'):
                    # distributed mode if the env var `PADDLE_TRAINING_ROLE` exits
                    trainer_id = int(os.getenv("PADDLE_TRAINER_ID", "0"))
                    trainer_count = int(os.getenv("PADDLE_TRAINERS_NUM", "1"))
                    per_node_lines = len(full_lines) // trainer_count
                    lines = full_lines[trainer_id * per_node_lines:(
                        trainer_id + 1) * per_node_lines]
                    print(
                        "read images from %d, length: %d, lines length: %d, total: %d"
                        % (trainer_id * per_node_lines, per_node_lines,
                           len(lines), len(full_lines)))
                else:
                    lines = full_lines
            self.data = [line.split() for line in lines]
        else:
            with open(val_file_list) as flist:
                lines = [line.strip() for line in flist]
                self.data = [line.split() for line in lines]
Esempio n. 3
0
    def test_tranpose(self):
        trans = transforms.Compose([transforms.Transpose()])
        self.do_transform(trans)

        fake_img = self.create_image((50, 100, 3))
        converted_img = trans(fake_img)

        np.testing.assert_equal(self.get_shape(converted_img), (3, 50, 100))
Esempio n. 4
0
    def __init__(self,
                 path,
                 mode='train',
                 image_size=224,
                 resize_short_size=256):
        super(ImageNetDataset, self).__init__(path)
        self.mode = mode

        normalize = transforms.Normalize(mean=[123.675, 116.28, 103.53],
                                         std=[58.395, 57.120, 57.375])
        if self.mode == 'train':
            self.transform = transforms.Compose([
                transforms.RandomResizedCrop(image_size),
                transforms.RandomHorizontalFlip(),
                transforms.Transpose(), normalize
            ])
        else:
            self.transform = transforms.Compose([
                transforms.Resize(resize_short_size),
                transforms.CenterCrop(image_size),
                transforms.Transpose(), normalize
            ])
Esempio n. 5
0
    def test_trans_all(self):
        normalize = transforms.Normalize(
            mean=[123.675, 116.28, 103.53],
            std=[58.395, 57.120, 57.375], )
        trans = transforms.Compose([
            transforms.RandomResizedCrop(224),
            transforms.ColorJitter(
                brightness=0.4, contrast=0.4, saturation=0.4, hue=0.4),
            transforms.RandomHorizontalFlip(),
            transforms.Transpose(),
            normalize,
        ])

        self.do_transform(trans)
Esempio n. 6
0
def load_image(image_path, max_size=400, shape=None):
    image = Image.open(image_path).convert('RGB')

    size = shape if shape is not None else max_size if max(
        image.size) > max_size else max(image.size)

    transform = transforms.Compose([
        transforms.Resize(size),
        transforms.Transpose(),
        transforms.Normalize([123.675, 116.28, 103.53],
                             [58.395, 57.120, 57.375])
    ])
    image = transform(image)[np.newaxis, :3, :, :]
    image = paddle.to_tensor(image)
    return image
Esempio n. 7
0
    def __init__(self, data_dir, image_size=224, resize_short_size=256):
        super(ImageNetValDataset, self).__init__()
        val_file_list = os.path.join(data_dir, 'val_list.txt')
        test_file_list = os.path.join(data_dir, 'test_list.txt')
        self.data_dir = data_dir

        normalize = transforms.Normalize(mean=[123.675, 116.28, 103.53],
                                         std=[58.395, 57.120, 57.375])
        self.transform = transforms.Compose([
            transforms.Resize(resize_short_size),
            transforms.CenterCrop(image_size),
            transforms.Transpose(), normalize
        ])

        with open(val_file_list) as flist:
            lines = [line.strip() for line in flist]
            self.data = [line.split() for line in lines]
Esempio n. 8
0
 def test_normalize(self):
     normalize = transforms.Normalize(mean=0.5, std=0.5)
     trans = transforms.Compose([transforms.Transpose(), normalize])
     self.do_transform(trans)
Esempio n. 9
0
 def do_transform(self, trans):
     trans.transforms.insert(0, transforms.ToTensor(data_format='CHW'))
     trans.transforms.append(transforms.Transpose(order=(1, 2, 0)))
     dataset_folder = DatasetFolder(self.data_dir, transform=trans)
     for _ in dataset_folder:
         pass