Ejemplo n.º 1
0
    def test_sr_folder_gt_dataset(self):
        # setup
        sr_pipeline = [
            dict(type='LoadImageFromFile', io_backend='disk', key='gt'),
            dict(type='ImageToTensor', keys=['gt'])
        ]
        target_keys = ['gt_path', 'gt']
        gt_folder = self.data_prefix / 'gt'
        filename_tmpl = '{}_x4'

        # input path is Path object
        sr_folder_dataset = SRFolderGTDataset(
            gt_folder=gt_folder,
            pipeline=sr_pipeline,
            scale=4,
            filename_tmpl=filename_tmpl)
        data_infos = sr_folder_dataset.data_infos
        assert data_infos == [dict(gt_path=str(gt_folder / 'baboon.png'))]
        result = sr_folder_dataset[0]
        assert (len(sr_folder_dataset) == 1)
        assert assert_dict_has_keys(result, target_keys)
        # input path is str
        sr_folder_dataset = SRFolderGTDataset(
            gt_folder=str(gt_folder),
            pipeline=sr_pipeline,
            scale=4,
            filename_tmpl=filename_tmpl)
        data_infos = sr_folder_dataset.data_infos
        assert data_infos == [dict(gt_path=str(gt_folder / 'baboon.png'))]
        result = sr_folder_dataset[0]
        assert (len(sr_folder_dataset) == 1)
        assert assert_dict_has_keys(result, target_keys)
Ejemplo n.º 2
0
    def test_base_vfi_dataset(self):

        dataset = BaseVFIDataset(self.pipeline, self.folder, self.ann_file)
        dataset.__init__(self.pipeline, self.folder, self.ann_file)
        dataset.load_annotations()
        assert dataset.folder == self.folder
        assert dataset.ann_file == self.ann_file
        setattr(dataset, 'data_infos', [
            dict(inputs_path=[
                'tests/data/vimeo90k/00001/0266/im1.png',
                'tests/data/vimeo90k/00001/0266/im3.png'
            ],
                 target_path='tests/data/vimeo90k/00001/0266/im2.png',
                 key='00001/0266')
        ])
        data = dataset.__getitem__(0)
        assert_dict_has_keys(data, ['folder', 'ann_file'])
        results = [dict(eval_result=dict(psnr=1.1, ssim=0.3))]
        eval_result = dataset.evaluate(results)
        assert_dict_has_keys(eval_result, ['psnr', 'ssim'])

        with pytest.raises(TypeError):
            dataset.evaluate(results[0])
        with pytest.raises(AssertionError):
            dataset.evaluate(results + results)
Ejemplo n.º 3
0
    def test_sr_landmark_dataset(self):
        # setup
        sr_pipeline = [
            dict(type='LoadImageFromFile',
                 io_backend='disk',
                 key='gt',
                 flag='color',
                 channel_order='rgb',
                 backend='cv2')
        ]

        target_keys = ['gt_path', 'bbox', 'shape', 'landmark']
        gt_folder = self.data_prefix / 'face'
        ann_file = self.data_prefix / 'facemark_ann.npy'

        # input path is Path object
        sr_landmark_dataset = SRFacialLandmarkDataset(gt_folder=gt_folder,
                                                      ann_file=ann_file,
                                                      pipeline=sr_pipeline,
                                                      scale=4)
        data_infos = sr_landmark_dataset.data_infos
        assert len(data_infos) == 1
        result = sr_landmark_dataset[0]
        assert len(sr_landmark_dataset) == 1
        assert assert_dict_has_keys(result, target_keys)
        # input path is str
        sr_landmark_dataset = SRFacialLandmarkDataset(gt_folder=str(gt_folder),
                                                      ann_file=str(ann_file),
                                                      pipeline=sr_pipeline,
                                                      scale=4)
        data_infos = sr_landmark_dataset.data_infos
        assert len(data_infos) == 1
        result = sr_landmark_dataset[0]
        assert len(sr_landmark_dataset) == 1
        assert assert_dict_has_keys(result, target_keys)
Ejemplo n.º 4
0
    def test_vfi_vimeo90k_dataset(self):

        dataset_cfg = dict(type='VFIVimeo90KDataset',
                           folder=self.folder,
                           ann_file=self.ann_file,
                           pipeline=self.pipeline)
        dataset = build_dataset(dataset_cfg)
        data_infos = dataset.data_infos[0]
        assert_dict_has_keys(data_infos, ['inputs_path', 'target_path', 'key'])
Ejemplo n.º 5
0
    def test_sr_annotation_dataset(self):
        # setup
        anno_file_path = self.data_prefix / 'train.txt'
        sr_pipeline = [
            dict(type='LoadImageFromFile', io_backend='disk', key='lq'),
            dict(type='LoadImageFromFile', io_backend='disk', key='gt'),
            dict(type='PairedRandomCrop', gt_patch_size=128),
            dict(type='ImageToTensor', keys=['lq', 'gt'])
        ]
        target_keys = [
            'lq_path', 'gt_path', 'scale', 'lq', 'lq_ori_shape', 'gt',
            'gt_ori_shape'
        ]

        # input path is Path object
        sr_annotation_dataset = SRAnnotationDataset(
            lq_folder=self.data_prefix / 'lq',
            gt_folder=self.data_prefix / 'gt',
            ann_file=anno_file_path,
            pipeline=sr_pipeline,
            scale=4,
            filename_tmpl='{}_x4')
        data_infos = sr_annotation_dataset.data_infos
        assert data_infos == [
            dict(
                lq_path=str(self.data_prefix / 'lq' / 'baboon_x4.png'),
                gt_path=str(self.data_prefix / 'gt' / 'baboon.png'))
        ]
        result = sr_annotation_dataset[0]
        assert (len(sr_annotation_dataset) == 1)
        assert assert_dict_has_keys(result, target_keys)
        # input path is str
        sr_annotation_dataset = SRAnnotationDataset(
            lq_folder=str(self.data_prefix / 'lq'),
            gt_folder=str(self.data_prefix / 'gt'),
            ann_file=str(anno_file_path),
            pipeline=sr_pipeline,
            scale=4,
            filename_tmpl='{}_x4')
        data_infos = sr_annotation_dataset.data_infos
        assert data_infos == [
            dict(
                lq_path=str(self.data_prefix / 'lq' / 'baboon_x4.png'),
                gt_path=str(self.data_prefix / 'gt' / 'baboon.png'))
        ]
        result = sr_annotation_dataset[0]
        assert (len(sr_annotation_dataset) == 1)
        assert assert_dict_has_keys(result, target_keys)
Ejemplo n.º 6
0
    def test_sr_lmdb_dataset(self):
        # setup
        lq_lmdb_folder = self.data_prefix / 'lq.lmdb'
        sr_pipeline = [
            dict(
                type='LoadImageFromFile',
                io_backend='lmdb',
                key='lq',
                db_path=lq_lmdb_folder),
            dict(
                type='LoadImageFromFile',
                io_backend='lmdb',
                key='gt',
                db_path=lq_lmdb_folder),
            dict(type='ImageToTensor', keys=['lq', 'gt'])
        ]
        target_keys = [
            'lq_path', 'gt_path', 'scale', 'lq', 'lq_ori_shape', 'gt',
            'gt_ori_shape'
        ]

        # input path is Path object
        sr_lmdb_dataset = SRLmdbDataset(
            lq_folder=lq_lmdb_folder,
            gt_folder=lq_lmdb_folder,  # fake gt_folder
            pipeline=sr_pipeline,
            scale=1)
        data_infos = sr_lmdb_dataset.data_infos
        assert data_infos == [dict(lq_path='baboon', gt_path='baboon')]
        result = sr_lmdb_dataset[0]
        assert (len(sr_lmdb_dataset) == 1)
        assert assert_dict_has_keys(result, target_keys)
        # input path is str
        sr_lmdb_dataset = SRLmdbDataset(
            lq_folder=str(lq_lmdb_folder),
            gt_folder=(lq_lmdb_folder),  # fake gt_folder
            pipeline=sr_pipeline,
            scale=1)
        data_infos = sr_lmdb_dataset.data_infos
        assert data_infos == [dict(lq_path='baboon', gt_path='baboon')]
        result = sr_lmdb_dataset[0]
        assert (len(sr_lmdb_dataset) == 1)
        assert assert_dict_has_keys(result, target_keys)

        with pytest.raises(ValueError):
            sr_lmdb_dataset = SRLmdbDataset(
                lq_folder=self.data_prefix,  # normal folder
                gt_folder=lq_lmdb_folder,  # fake gt_folder
                pipeline=sr_pipeline,
                scale=1)
        with pytest.raises(ValueError):
            sr_lmdb_dataset = SRLmdbDataset(
                lq_folder=str(self.data_prefix),  # normal folder
                gt_folder=lq_lmdb_folder,  # fake gt_folder
                pipeline=sr_pipeline,
                scale=1)
        with pytest.raises(ValueError):
            sr_lmdb_dataset = SRLmdbDataset(
                lq_folder=lq_lmdb_folder,
                gt_folder=self.data_prefix,  # normal folder
                pipeline=sr_pipeline,
                scale=1)
        with pytest.raises(ValueError):
            sr_lmdb_dataset = SRLmdbDataset(
                lq_folder=lq_lmdb_folder,
                gt_folder=str(self.data_prefix),  # normal folder
                pipeline=sr_pipeline,
                scale=1)
Ejemplo n.º 7
0
    def test_sr_folder_ref_dataset(self):
        # setup
        sr_pipeline = [
            dict(type='LoadImageFromFile', io_backend='disk', key='lq'),
            dict(type='LoadImageFromFile', io_backend='disk', key='gt'),
            dict(type='LoadImageFromFile', io_backend='disk', key='ref'),
            dict(type='PairedRandomCrop', gt_patch_size=128),
            dict(type='ImageToTensor', keys=['lq', 'gt', 'ref'])
        ]
        target_keys = [
            'lq_path', 'gt_path', 'ref_path', 'scale', 'lq', 'gt', 'ref'
        ]
        lq_folder = self.data_prefix / 'lq'
        gt_folder = self.data_prefix / 'gt'
        ref_folder = self.data_prefix / 'gt'
        filename_tmpl = '{}_x4'

        # input path is Path object
        sr_folder_ref_dataset = SRFolderRefDataset(
            lq_folder=lq_folder,
            gt_folder=gt_folder,
            ref_folder=str(ref_folder),
            pipeline=sr_pipeline,
            scale=4,
            filename_tmpl_lq=filename_tmpl)
        data_infos = sr_folder_ref_dataset.data_infos
        assert data_infos == [
            dict(
                lq_path=str(lq_folder / 'baboon_x4.png'),
                gt_path=str(gt_folder / 'baboon.png'),
                ref_path=str(ref_folder / 'baboon.png'))
        ]
        result = sr_folder_ref_dataset[0]
        assert len(sr_folder_ref_dataset) == 1
        assert assert_dict_has_keys(result, target_keys)
        # input path is str
        sr_folder_ref_dataset = SRFolderRefDataset(
            lq_folder=str(lq_folder),
            gt_folder=str(gt_folder),
            ref_folder=str(ref_folder),
            pipeline=sr_pipeline,
            scale=4,
            filename_tmpl_lq=filename_tmpl)
        data_infos = sr_folder_ref_dataset.data_infos
        assert data_infos == [
            dict(
                lq_path=str(lq_folder / 'baboon_x4.png'),
                gt_path=str(gt_folder / 'baboon.png'),
                ref_path=str(ref_folder / 'baboon.png'))
        ]
        result = sr_folder_ref_dataset[0]
        assert len(sr_folder_ref_dataset) == 1
        assert assert_dict_has_keys(result, target_keys)

        with pytest.raises(AssertionError):
            sr_folder_ref_dataset = SRFolderRefDataset(
                lq_folder=str(lq_folder),
                gt_folder=str(self.data_prefix / 'image'),  # fake gt_folder
                ref_folder=str(ref_folder),
                pipeline=sr_pipeline,
                scale=4,
                filename_tmpl_lq=filename_tmpl)
        with pytest.raises(AssertionError):
            sr_folder_ref_dataset = SRFolderRefDataset(
                lq_folder=str(self.data_prefix / 'image'),  # fake lq_folder
                gt_folder=str(gt_folder),
                ref_folder=str(ref_folder),
                pipeline=sr_pipeline,
                scale=4,
                filename_tmpl_lq=filename_tmpl)
        with pytest.raises(AssertionError):
            sr_folder_ref_dataset = SRFolderRefDataset(
                lq_folder=str(lq_folder),
                gt_folder=str(self.data_prefix / 'bg'),  # fake gt_folder
                ref_folder=str(ref_folder),
                pipeline=sr_pipeline,
                scale=4,
                filename_tmpl_lq=filename_tmpl)
        with pytest.raises(AssertionError):
            sr_folder_ref_dataset = SRFolderRefDataset(
                lq_folder=str(self.data_prefix / 'bg'),  # fake lq_folder
                gt_folder=str(gt_folder),
                ref_folder=str(ref_folder),
                pipeline=sr_pipeline,
                scale=4,
                filename_tmpl_lq=filename_tmpl)
        with pytest.raises(AssertionError):
            sr_folder_ref_dataset = SRFolderRefDataset(
                lq_folder=None,
                gt_folder=None,
                ref_folder=str(ref_folder),
                pipeline=sr_pipeline,
                scale=4,
                filename_tmpl_lq=filename_tmpl)
Ejemplo n.º 8
0
    def test_generation_paired_dataset(self):
        # setup
        img_norm_cfg = dict(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
        pipeline = [
            dict(type='LoadPairedImageFromFile',
                 io_backend='disk',
                 key='pair',
                 flag='color'),
            dict(type='Resize',
                 keys=['img_a', 'img_b'],
                 scale=(286, 286),
                 interpolation='bicubic'),
            dict(type='FixedCrop',
                 keys=['img_a', 'img_b'],
                 crop_size=(256, 256)),
            dict(type='Flip', keys=['img_a', 'img_b'], direction='horizontal'),
            dict(type='RescaleToZeroOne', keys=['img_a', 'img_b']),
            dict(type='Normalize',
                 keys=['img_a', 'img_b'],
                 to_rgb=True,
                 **img_norm_cfg),
            dict(type='ImageToTensor', keys=['img_a', 'img_b']),
            dict(type='Collect',
                 keys=['img_a', 'img_b'],
                 meta_keys=['img_a_path', 'img_b_path'])
        ]
        target_keys = ['img_a', 'img_b', 'meta']
        target_meta_keys = ['img_a_path', 'img_b_path']
        pair_folder = self.data_prefix / 'paired'

        # input path is Path object
        generation_paried_dataset = GenerationPairedDataset(
            dataroot=pair_folder, pipeline=pipeline, test_mode=True)
        data_infos = generation_paried_dataset.data_infos
        assert data_infos == [
            dict(pair_path=str(pair_folder / 'test' / '3.jpg'))
        ]
        result = generation_paried_dataset[0]
        assert (len(generation_paried_dataset) == 1)
        assert assert_dict_has_keys(result, target_keys)
        assert assert_dict_has_keys(result['meta'].data, target_meta_keys)
        assert (result['meta'].data['img_a_path'] == str(pair_folder / 'test' /
                                                         '3.jpg'))
        assert (result['meta'].data['img_b_path'] == str(pair_folder / 'test' /
                                                         '3.jpg'))

        # input path is str
        generation_paried_dataset = GenerationPairedDataset(
            dataroot=str(pair_folder), pipeline=pipeline, test_mode=True)
        data_infos = generation_paried_dataset.data_infos
        assert data_infos == [
            dict(pair_path=str(pair_folder / 'test' / '3.jpg'))
        ]
        result = generation_paried_dataset[0]
        assert (len(generation_paried_dataset) == 1)
        assert assert_dict_has_keys(result, target_keys)
        assert assert_dict_has_keys(result['meta'].data, target_meta_keys)
        assert (result['meta'].data['img_a_path'] == str(pair_folder / 'test' /
                                                         '3.jpg'))
        assert (result['meta'].data['img_b_path'] == str(pair_folder / 'test' /
                                                         '3.jpg'))

        # test_mode = False
        generation_paried_dataset = GenerationPairedDataset(
            dataroot=str(pair_folder), pipeline=pipeline, test_mode=False)
        data_infos = generation_paried_dataset.data_infos
        assert data_infos == [
            dict(pair_path=str(pair_folder / 'train' / '1.jpg')),
            dict(pair_path=str(pair_folder / 'train' / '2.jpg'))
        ]
        assert (len(generation_paried_dataset) == 2)
        result = generation_paried_dataset[0]
        assert assert_dict_has_keys(result, target_keys)
        assert assert_dict_has_keys(result['meta'].data, target_meta_keys)
        assert (result['meta'].data['img_a_path'] == str(pair_folder /
                                                         'train' / '1.jpg'))
        assert (result['meta'].data['img_b_path'] == str(pair_folder /
                                                         'train' / '1.jpg'))
        result = generation_paried_dataset[1]
        assert assert_dict_has_keys(result, target_keys)
        assert assert_dict_has_keys(result['meta'].data, target_meta_keys)
        assert (result['meta'].data['img_a_path'] == str(pair_folder /
                                                         'train' / '2.jpg'))
        assert (result['meta'].data['img_b_path'] == str(pair_folder /
                                                         'train' / '2.jpg'))