Esempio n. 1
0
    def test_mxnet(self):
        import mxnet as mx
        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': None
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('mxnet', dataloader_args)
        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break

        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': 'val/val.txt'
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('mxnet', dataloader_args)
        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break
Esempio n. 2
0
    def test_onnx(self):
        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': None
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_integerops', dataloader_args)
        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break

        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': 'val/val_map.txt'
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_integerops', dataloader_args)
        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break
Esempio n. 3
0
    def test_onnx(self):
        dataloader_args = {
            'dataset': {
                "ImageFolder": {
                    'root': './val'
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_integerops', dataloader_args)

        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break
Esempio n. 4
0
    def test_tensorflow(self):
        dataloader_args = {
            'dataset': {
                "ImageFolder": {
                    'root': './val'
                }
            },
            'transform': {
                'RandomResizedCrop': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('tensorflow', dataloader_args)

        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break
Esempio n. 5
0
    def test_pytorch(self):
        dataloader_args = {
            'dataset': {
                "ImageFolder": {
                    'root': './val'
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                },
                'ToTensor': {}
            },
            'filter': None
        }
        dataloader = create_dataloader('pytorch', dataloader_args)

        for data in dataloader:
            self.assertEqual(data[0][0].shape, (3, 24, 24))
            break
Esempio n. 6
0
    def testLabelBalanceCOCORecord(self):
        from PIL import Image
        tf.compat.v1.disable_eager_execution()

        random_array = np.random.random_sample([100, 100, 3]) * 255
        random_array = random_array.astype(np.uint8)
        im = Image.fromarray(random_array)
        im.save('test.jpeg')

        image = tf.compat.v1.gfile.FastGFile('test.jpeg', 'rb').read()
        source_id = '000000397133.jpg'.encode('utf-8')
        label = 'person'.encode('utf-8')
        example1 = tf.train.Example(features=tf.train.Features(
            feature={
                'image/encoded':
                tf.train.Feature(bytes_list=tf.train.BytesList(value=[image])),
                'image/object/class/text':
                tf.train.Feature(bytes_list=tf.train.BytesList(value=[label])),
                'image/source_id':
                tf.train.Feature(bytes_list=tf.train.BytesList(
                    value=[source_id])),
                'image/object/bbox/xmin':
                tf.train.Feature(float_list=tf.train.FloatList(value=[10])),
                'image/object/bbox/ymin':
                tf.train.Feature(float_list=tf.train.FloatList(value=[10])),
                'image/object/bbox/xmax':
                tf.train.Feature(float_list=tf.train.FloatList(value=[100])),
                'image/object/bbox/ymax':
                tf.train.Feature(float_list=tf.train.FloatList(value=[100])),
            }))
        example2 = tf.train.Example(features=tf.train.Features(
            feature={
                'image/encoded':
                tf.train.Feature(bytes_list=tf.train.BytesList(value=[image])),
                'image/object/class/text':
                tf.train.Feature(bytes_list=tf.train.BytesList(value=[label])),
                'image/source_id':
                tf.train.Feature(bytes_list=tf.train.BytesList(
                    value=[source_id])),
                'image/object/bbox/xmin':
                tf.train.Feature(float_list=tf.train.FloatList(
                    value=[10, 20])),
                'image/object/bbox/ymin':
                tf.train.Feature(float_list=tf.train.FloatList(
                    value=[10, 20])),
                'image/object/bbox/xmax':
                tf.train.Feature(float_list=tf.train.FloatList(
                    value=[100, 200])),
                'image/object/bbox/ymax':
                tf.train.Feature(float_list=tf.train.FloatList(
                    value=[100, 200])),
            }))
        with tf.io.TFRecordWriter('test.record') as writer:
            writer.write(example1.SerializeToString())
            writer.write(example2.SerializeToString())

        preprocesses = TRANSFORMS('tensorflow', 'preprocess')
        preprocess = get_preprocess(preprocesses, {'ParseDecodeCoco': {}})
        filters = FILTERS('tensorflow')
        filter = filters['LabelBalanceCOCORecord'](2)
        datasets = DATASETS('tensorflow')
        dataset = datasets['COCORecord']('test.record', \
            transform=preprocess, filter=filter)
        dataloader = DATALOADERS['tensorflow'](dataset=dataset, batch_size=1)
        for (inputs, labels) in dataloader:
            self.assertEqual(inputs.shape, (1, 100, 100, 3))
            self.assertEqual(labels[0].shape, (1, 2, 4))

        dataset2 = create_dataset('tensorflow',
                                  {'COCORecord': {
                                      'root': 'test.record'
                                  }}, {'ParseDecodeCoco': {}},
                                  {'LabelBalance': {
                                      'size': 2
                                  }})
        dataloader2 = DATALOADERS['tensorflow'](dataset=dataset2, batch_size=1)
        for (inputs, labels) in dataloader2:
            self.assertEqual(inputs.shape, (1, 100, 100, 3))
            self.assertEqual(labels[0].shape, (1, 2, 4))

        dataloader3 = create_dataloader('tensorflow', {'batch_size':1, 'dataset':{'COCORecord':{'root':'test.record'}},\
                 'filter':{'LabelBalance':{'size':2}}, 'transform':{'ParseDecodeCoco':{}}})
        for (inputs, labels) in dataloader3:
            self.assertEqual(inputs.shape, (1, 100, 100, 3))
            self.assertEqual(labels[0].shape, (1, 2, 4))
        os.remove('test.record')
        os.remove('test.jpeg')
Esempio n. 7
0
    def test_onnx(self):
        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': None
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_integerops', dataloader_args)
        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break

        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': 'val/val.txt'
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_integerops', dataloader_args)
        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break
        # test old api
        eval_dataset = create_dataset('onnxrt_integerops',
                                      {'Imagenet': {
                                          'root': './'
                                      }}, None, None)
        dataloader = DataLoader('onnxrt_integerops',
                                dataset=eval_dataset,
                                batch_size=1)
        for data in dataloader:
            self.assertEqual(data[0][0].shape, (100, 100, 3))
            break

        with open('val/fake_map.txt', 'w') as f:
            f.write('test.jpg   0 \n')
            f.write('test2.jpg   1')
        dataset_args = {
            "ImagenetRaw": {
                'data_path': 'val',
                'image_list': 'val/fake_map.txt'
            },
        }
        dataset = create_dataset('onnxrt_integerops', dataset_args, None, None)
        self.assertEqual(len(dataset), 1)

        with open('val/fake_map.txt', 'w') as f:
            f.write('test2.jpg   1')
        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': 'val/fake_map.txt'
                }
            },
            'transform': None,
            'filter': None
        }
        self.assertRaises(ValueError, create_dataloader, 'onnxrt_integerops',
                          dataloader_args)

        with open('val/not_found_map.txt', 'w') as f:
            f.write('test.jpg   0' + '\n')
            f.write('not_found.jpg   1')
        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': 'val/not_found_map.txt'
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_integerops', dataloader_args)
        for data in dataloader:
            self.assertEqual(data[0][0].shape, (24, 24, 3))
            break

        with open('val/blank.txt', 'w') as f:
            f.write('blank.jpg   0')
        dataloader_args = {
            'dataset': {
                "ImagenetRaw": {
                    'data_path': 'val',
                    'image_list': 'val/blank.txt'
                }
            },
            'transform': None,
            'filter': None
        }
        self.assertRaises(ValueError, create_dataloader, 'onnxrt_qlinearops',
                          dataloader_args)
Esempio n. 8
0
    def test_onnx_dataset(self):
        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "CIFAR10": {
                    'root': './',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'onnxrt_qlinearops',
                          dataloader_args)

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "CIFAR100": {
                    'root': './',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'onnxrt_qlinearops',
                          dataloader_args)

        dataloader_args = {
            'dataset': {
                "MNIST": {
                    'root': './test',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'onnxrt_qlinearops',
                          dataloader_args)

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "MNIST": {
                    'root': './',
                    'train': False,
                    'download': True
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_qlinearops', dataloader_args)

        for data in dataloader:
            self.assertEqual(len(data[0]), 2)
            self.assertEqual(data[0][0].shape, (24, 24, 1))
            break

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "FashionMNIST": {
                    'root': './',
                    'train': False,
                    'download': True
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_qlinearops', dataloader_args)

        for data in dataloader:
            self.assertEqual(len(data[0]), 2)
            self.assertEqual(data[0][0].shape, (24, 24, 1))
            break
Esempio n. 9
0
    def test_onnx_dataset(self):
        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "CIFAR10": {
                    'root': './',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'onnxrt_qlinearops',
                          dataloader_args)

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "CIFAR100": {
                    'root': './',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'onnxrt_qlinearops',
                          dataloader_args)

        dataloader_args = {
            'dataset': {
                "MNIST": {
                    'root': './test',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'onnxrt_qlinearops',
                          dataloader_args)

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "MNIST": {
                    'root': './',
                    'train': False,
                    'download': True
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_qlinearops', dataloader_args)

        for data in dataloader:
            self.assertEqual(len(data[0]), 2)
            self.assertEqual(data[0][0].shape, (24, 24, 1))
            break

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "FashionMNIST": {
                    'root': './',
                    'train': False,
                    'download': True
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('onnxrt_qlinearops', dataloader_args)

        for data in dataloader:
            self.assertEqual(len(data[0]), 2)
            self.assertEqual(data[0][0].shape, (24, 24, 1))
            break

        dataloader_args = {
            'batch_size': 2,
            'shuffle': True,
            'dataset': {
                "FashionMNIST": {
                    'root': './',
                    'train': False,
                    'download': True
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        with self.assertLogs() as cm:
            dataloader = create_dataloader('onnxrt_qlinearops',
                                           dataloader_args)
        self.assertEqual(cm.output, ['WARNING:root:Shuffle is not supported yet' \
                         ' in ONNXRTDataLoader, ignoring shuffle keyword.'])
Esempio n. 10
0
    def test_pytorch_dataset(self):
        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "CIFAR10": {
                    'root': './',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'pytorch',
                          dataloader_args)

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "CIFAR100": {
                    'root': './',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'pytorch',
                          dataloader_args)

        dataloader_args = {
            'dataset': {
                "MNIST": {
                    'root': './test',
                    'train': False,
                    'download': False
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        self.assertRaises(RuntimeError, create_dataloader, 'pytorch',
                          dataloader_args)

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "MNIST": {
                    'root': './',
                    'train': False,
                    'download': True
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('pytorch', dataloader_args)
        for data in dataloader:
            self.assertEqual(len(data[0]), 2)
            self.assertEqual(data[0][0].shape, (24, 24))
            break

        dataloader_args = {
            'batch_size': 2,
            'dataset': {
                "FashionMNIST": {
                    'root': './',
                    'train': False,
                    'download': True
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('pytorch', dataloader_args)
        for data in dataloader:
            self.assertEqual(len(data[0]), 2)
            self.assertEqual(data[0][0].shape, (24, 24))
            break

        dataloader_args = {
            'batch_size': 2,
            'shuffle': True,
            'dataset': {
                "FashionMNIST": {
                    'root': './',
                    'train': False,
                    'download': True
                }
            },
            'transform': {
                'Resize': {
                    'size': 24
                }
            },
            'filter': None
        }
        dataloader = create_dataloader('pytorch', dataloader_args)
        self.assertEqual(dataloader.dataloader.sampler.__class__.__name__,
                         'RandomSampler')
        for data in dataloader:
            self.assertEqual(len(data[0]), 2)
            self.assertEqual(data[0][0].shape, (24, 24))
            break