Example #1
0
    def enqueue_many(self, vals, name=None):
        """Enqueues zero or elements to this queue.

    This operation slices each component tensor along the 0th dimension to
    make multiple queue elements. All of the tensors in `vals` must have the
    same size in the 0th dimension.

    If the queue is full when this operation executes, it will block
    until all of the elements have been enqueued.

    Args:
      vals: The tensor or tuple of tensors from which the queue elements
        are taken.
      name: A name for the operation (optional).

    Returns:
      The operation that enqueues a batch of tuples of tensors to the queue.
    """
        if name is None:
            name = "%s_EnqueueMany" % self._name

        ret = gen_data_flow_ops._queue_enqueue_many(self._queue_ref,
                                                    vals,
                                                    name=name)

        # NOTE(mrry): Not using a shape function because we need access to
        # the `QueueBase` object.
        batch_dim = ret.inputs[1].get_shape()[0]
        for val, shape in zip(ret.inputs[1:], self._shapes):
            batch_dim.merge_with(val.get_shape()[0])
            val.get_shape()[1:].assert_is_compatible_with(shape)

        return ret
Example #2
0
  def enqueue_many(self, vals, name=None):
    """Enqueues zero or elements to this queue.

    This operation slices each component tensor along the 0th dimension to
    make multiple queue elements. All of the tensors in `vals` must have the
    same size in the 0th dimension.

    If the queue is full when this operation executes, it will block
    until all of the elements have been enqueued.

    Args:
      vals: The tensor or tuple of tensors from which the queue elements
        are taken.
      name: A name for the operation (optional).

    Returns:
      The operation that enqueues a batch of tuples of tensors to the queue.
    """
    if not isinstance(vals, (list, tuple)):
      vals = [vals]

    with ops.op_scope(vals, name, "%s_EnqueueMany" % self._name) as scope:
      vals = self._check_enqueue_dtypes(vals)

      # NOTE(mrry): Not using a shape function because we need access to
      # the `QueueBase` object.
      batch_dim = vals[0].get_shape().with_rank_at_least(1)[0]
      for val, shape in zip(vals, self._shapes):
        batch_dim = batch_dim.merge_with(
            val.get_shape().with_rank_at_least(1)[0])
        val.get_shape()[1:].assert_is_compatible_with(shape)

      return gen_data_flow_ops._queue_enqueue_many(
          self._queue_ref, vals, name=scope)
    def enqueue_many(self, vals, name=None):
        """Enqueues zero or elements to this queue.

    This operation slices each component tensor along the 0th dimension to
    make multiple queue elements. All of the tensors in `vals` must have the
    same size in the 0th dimension.

    If the queue is full when this operation executes, it will block
    until all of the elements have been enqueued.

    Args:
      vals: The tensor or tuple of tensors from which the queue elements
        are taken.
      name: A name for the operation (optional).

    Returns:
      The operation that enqueues a batch of tuples of tensors to the queue.
    """
        if name is None:
            name = "%s_EnqueueMany" % self._name

        ret = gen_data_flow_ops._queue_enqueue_many(self._queue_ref, vals, name=name)

        # NOTE(mrry): Not using a shape function because we need access to
        # the `QueueBase` object.
        batch_dim = ret.inputs[1].get_shape()[0]
        for val, shape in zip(ret.inputs[1:], self._shapes):
            batch_dim.merge_with(val.get_shape()[0])
            val.get_shape()[1:].assert_is_compatible_with(shape)

        return ret
Example #4
0
    def enqueue_many(self, vals, name=None):
        """Enqueues zero or more elements to this queue.

    This operation slices each component tensor along the 0th dimension to
    make multiple queue elements. All of the tensors in `vals` must have the
    same size in the 0th dimension.

    If the queue is full when this operation executes, it will block
    until all of the elements have been enqueued.

    At runtime, this operation may raise an error if the queue is
    [closed](#QueueBase.close) before or during its execution. If the
    queue is closed before this operation runs,
    `tf.errors.AbortedError` will be raised. If this operation is
    blocked, and either (i) the queue is closed by a close operation
    with `cancel_pending_enqueues=True`, or (ii) the session is
    [closed](../../api_docs/python/client.md#Session.close),
    `tf.errors.CancelledError` will be raised.

    Args:
      vals: A tensor, a list or tuple of tensors, or a dictionary
        from which the queue elements are taken.
      name: A name for the operation (optional).

    Returns:
      The operation that enqueues a batch of tuples of tensors to the queue.
    """
        with ops.name_scope(name, "%s_EnqueueMany" % self._name,
                            self._scope_vals(vals)) as scope:
            vals = self._check_enqueue_dtypes(vals)

            # NOTE(mrry): Not using a shape function because we need access to
            # the `QueueBase` object.
            batch_dim = vals[0].get_shape().with_rank_at_least(1)[0]
            for val, shape in zip(vals, self._shapes):
                batch_dim = batch_dim.merge_with(
                    val.get_shape().with_rank_at_least(1)[0])
                val.get_shape()[1:].assert_is_compatible_with(shape)

            return gen_data_flow_ops._queue_enqueue_many(self._queue_ref,
                                                         vals,
                                                         name=scope)
Example #5
0
  def enqueue_many(self, vals, name=None):
    """Enqueues zero or more elements to this queue.

    This operation slices each component tensor along the 0th dimension to
    make multiple queue elements. All of the tensors in `vals` must have the
    same size in the 0th dimension.

    If the queue is full when this operation executes, it will block
    until all of the elements have been enqueued.

    At runtime, this operation may raise an error if the queue is
    [closed](#QueueBase.close) before or during its execution. If the
    queue is closed before this operation runs,
    `tf.errors.AbortedError` will be raised. If this operation is
    blocked, and either (i) the queue is closed by a close operation
    with `cancel_pending_enqueues=True`, or (ii) the session is
    [closed](../../api_docs/python/client.md#Session.close),
    `tf.errors.CancelledError` will be raised.

    Args:
      vals: A tensor, a list or tuple of tensors, or a dictionary
        from which the queue elements are taken.
      name: A name for the operation (optional).

    Returns:
      The operation that enqueues a batch of tuples of tensors to the queue.
    """
    with ops.op_scope(self._scope_vals(vals), name,
                      "%s_EnqueueMany" % self._name) as scope:
      vals = self._check_enqueue_dtypes(vals)

      # NOTE(mrry): Not using a shape function because we need access to
      # the `QueueBase` object.
      batch_dim = vals[0].get_shape().with_rank_at_least(1)[0]
      for val, shape in zip(vals, self._shapes):
        batch_dim = batch_dim.merge_with(
            val.get_shape().with_rank_at_least(1)[0])
        val.get_shape()[1:].assert_is_compatible_with(shape)

      return gen_data_flow_ops._queue_enqueue_many(
          self._queue_ref, vals, name=scope)