Exemplo n.º 1
0
    def test_batch_and_unpad_2d_tensors_of_different_sizes_in_1st_dimension(
            self):
        with self.test_session() as sess:
            batch_size = 3
            num_batches = 2
            examples = tf.Variable(tf.constant(2, dtype=tf.int32))
            counter = examples.count_up_to(num_batches * batch_size + 2)
            boxes = tf.tile(tf.reshape(tf.range(4), [1, 4]),
                            tf.stack([counter, tf.constant(1)]))
            batch_queue = batcher.BatchQueue(tensor_dict={'boxes': boxes},
                                             batch_size=batch_size,
                                             batch_queue_capacity=100,
                                             num_batch_queue_threads=1,
                                             prefetch_queue_capacity=100)
            batch = batch_queue.dequeue()

            for tensor_dict in batch:
                for tensor in tensor_dict.values():
                    self.assertAllEqual([None, 4],
                                        tensor.get_shape().as_list())

            tf.initialize_all_variables().run()
            with slim.queues.QueueRunners(sess):
                i = 2
                for _ in range(num_batches):
                    batch_np = sess.run(batch)
                    for tensor_dict in batch_np:
                        for tensor in tensor_dict.values():
                            self.assertAllEqual(tensor,
                                                np.tile(np.arange(4), (i, 1)))
                            i += 1
                with self.assertRaises(tf.errors.OutOfRangeError):
                    sess.run(batch)
Exemplo n.º 2
0
    def test_batch_and_unpad_2d_tensors_of_same_size_in_all_dimensions(self):
        with self.test_session() as sess:
            batch_size = 3
            num_batches = 2
            examples = tf.Variable(tf.constant(1, dtype=tf.int32))
            counter = examples.count_up_to(num_batches * batch_size + 1)
            image = tf.reshape(tf.range(1, 13), [4, 3]) * counter
            batch_queue = batcher.BatchQueue(tensor_dict={'image': image},
                                             batch_size=batch_size,
                                             batch_queue_capacity=100,
                                             num_batch_queue_threads=1,
                                             prefetch_queue_capacity=100)
            batch = batch_queue.dequeue()

            for tensor_dict in batch:
                for tensor in tensor_dict.values():
                    self.assertAllEqual([4, 3], tensor.get_shape().as_list())

            tf.initialize_all_variables().run()
            with slim.queues.QueueRunners(sess):
                i = 1
                for _ in range(num_batches):
                    batch_np = sess.run(batch)
                    for tensor_dict in batch_np:
                        for tensor in tensor_dict.values():
                            self.assertAllEqual(
                                tensor,
                                np.arange(1, 13).reshape((4, 3)) * i)
                            i += 1
                with self.assertRaises(tf.errors.OutOfRangeError):
                    sess.run(batch)
Exemplo n.º 3
0
    def test_batcher_when_batch_size_is_one(self):
        with self.test_session() as sess:
            batch_size = 1
            num_batches = 2
            examples = tf.Variable(tf.constant(2, dtype=tf.int32))
            counter = examples.count_up_to(num_batches * batch_size + 2)
            image = tf.reshape(tf.range(counter * counter),
                               tf.stack([counter, counter]))
            batch_queue = batcher.BatchQueue(tensor_dict={'image': image},
                                             batch_size=batch_size,
                                             batch_queue_capacity=100,
                                             num_batch_queue_threads=1,
                                             prefetch_queue_capacity=100)
            batch = batch_queue.dequeue()

            for tensor_dict in batch:
                for tensor in tensor_dict.values():
                    self.assertAllEqual([None, None],
                                        tensor.get_shape().as_list())

            tf.initialize_all_variables().run()
            with slim.queues.QueueRunners(sess):
                i = 2
                for _ in range(num_batches):
                    batch_np = sess.run(batch)
                    for tensor_dict in batch_np:
                        for tensor in tensor_dict.values():
                            self.assertAllEqual(
                                tensor,
                                np.arange(i * i).reshape((i, i)))
                            i += 1
                with self.assertRaises(tf.errors.OutOfRangeError):
                    sess.run(batch)
Exemplo n.º 4
0
def create_input_queue(batch_size_per_clone, create_tensor_dict_fn,
                       batch_queue_capacity, num_batch_queue_threads,
                       prefetch_queue_capacity, data_augmentation_options):
    """Sets up reader, prefetcher and returns input queue.

  Args:
    batch_size_per_clone: batch size to use per clone.
    create_tensor_dict_fn: function to create tensor dictionary.
    batch_queue_capacity: maximum number of elements to store within a queue.
    num_batch_queue_threads: number of threads to use for batching.
    prefetch_queue_capacity: maximum capacity of the queue used to prefetch
                             assembled batches.
    data_augmentation_options: a list of tuples, where each tuple contains a
      data augmentation function and a dictionary containing arguments and their
      values (see preprocessor.py).

  Returns:
    input queue: a batcher.BatchQueue object holding enqueued tensor_dicts
      (which hold images, boxes and targets).  To get a batch of tensor_dicts,
      call input_queue.Dequeue().
  """
    tensor_dict = create_tensor_dict_fn()

    tensor_dict[fields.InputDataFields.image] = tf.expand_dims(
        tensor_dict[fields.InputDataFields.image], 0)

    images = tensor_dict[fields.InputDataFields.image]
    float_images = tf.to_float(images)
    tensor_dict[fields.InputDataFields.image] = float_images

    include_instance_masks = (fields.InputDataFields.groundtruth_instance_masks
                              in tensor_dict)
    include_keypoints = (fields.InputDataFields.groundtruth_keypoints
                         in tensor_dict)
    include_multiclass_scores = (fields.InputDataFields.multiclass_scores
                                 in tensor_dict)
    if data_augmentation_options:
        tensor_dict = preprocessor.preprocess(
            tensor_dict,
            data_augmentation_options,
            func_arg_map=preprocessor.get_default_func_arg_map(
                include_multiclass_scores=include_multiclass_scores,
                include_instance_masks=include_instance_masks,
                include_keypoints=include_keypoints))

    input_queue = batcher.BatchQueue(
        tensor_dict,
        batch_size=batch_size_per_clone,
        batch_queue_capacity=batch_queue_capacity,
        num_batch_queue_threads=num_batch_queue_threads,
        prefetch_queue_capacity=prefetch_queue_capacity)
    return input_queue