Esempio n. 1
0
 def test_basic(self):
     """ test basic apis 'next/size/drained'
     """
     roi_source = build_source(self.config)
     for i, sample in enumerate(roi_source):
         self.assertTrue('image' in sample)
         self.assertGreater(len(sample['image']), 0)
     self.assertTrue(roi_source.drained())
     self.assertEqual(i + 1, roi_source.size())
 def test_batch(self):
     """ test batched dataset
     """
     batchsize = 2
     mapper = tf.build_mapper(self.ops)
     ds = build_source(self.sc_config)
     mapped_ds = tf.map(ds, mapper)
     batched_ds = tf.batch(mapped_ds, batchsize, True)
     for sample in batched_ds:
         out = sample
     self.assertEqual(len(out), batchsize)
Esempio n. 3
0
    def test_reset(self):
        """ test functions 'reset/epoch_id'
        """
        roi_source = build_source(self.config)

        self.assertTrue(roi_source.next() is not None)
        self.assertEqual(roi_source.epoch_id(), 0)

        roi_source.reset()

        self.assertEqual(roi_source.epoch_id(), 1)
        self.assertTrue(roi_source.next() is not None)
    def test_map(self):
        """ test transformer.map
        """
        mapper = tf.build_mapper(self.ops)
        ds = build_source(self.sc_config)
        mapped_ds = tf.map(ds, mapper)
        ct = 0
        for sample in mapped_ds:
            self.assertTrue(type(sample[0]) is np.ndarray)
            ct += 1

        self.assertEqual(ct, mapped_ds.size())
    def test_parallel_map(self):
        """ test transformer.map with concurrent workers
        """
        mapper = tf.build_mapper(self.ops)
        ds = build_source(self.sc_config)
        worker_conf = {'WORKER_NUM': 2, 'use_process': True}
        mapped_ds = tf.map(ds, mapper, worker_conf)

        ct = 0
        for sample in mapped_ds:
            self.assertTrue(type(sample[0]) is np.ndarray)
            ct += 1

        self.assertTrue(mapped_ds.drained())
        self.assertEqual(ct, mapped_ds.size())
        mapped_ds.reset()

        ct = 0
        for sample in mapped_ds:
            self.assertTrue(type(sample[0]) is np.ndarray)
            ct += 1

        self.assertEqual(ct, mapped_ds.size())
Esempio n. 6
0
 def _my_data_reader():
     mydata = build_source(self.rcnn_conf['DATA']['TRAIN'])
     for i, sample in enumerate(mydata):
         yield sample