def __init__(self, input_dataset, device, buffer_size): with ops.device("/device:CPU:0"): super(_PrefetchToDeviceEagerIterator, self).__init__(input_dataset) input_iterator_handle = core_gen_dataset_ops.iterator_to_string_handle( self._resource) self._device = device @function.Defun(dtypes.string) def _prefetch_fn(handle): """Prefetches one element from `input_iterator`.""" remote_iterator = iterator_ops.Iterator.from_string_handle( handle, self.output_types, self.output_shapes, self.output_classes) ret = remote_iterator.get_next() return nest.flatten(sparse.serialize_sparse_tensors(ret)) _prefetch_fn.add_to_graph(None) with ops.device(device): self._buffering_resource = function_buffering_resource( f=_prefetch_fn, output_types=self._flat_output_types, target_device=gen_dataset_ops.iterator_get_device(self._resource), string_arg=input_iterator_handle, buffer_size=buffer_size, shared_name=iterator_ops._generate_shared_name( "function_buffer_resource"))
def __init__(self, iterator_resource, initializer, output_types, output_shapes, output_classes): """Creates a new iterator from the given iterator resource. Note: Most users will not call this initializer directly, and will instead use `Dataset.make_initializable_iterator()` or `Dataset.make_one_shot_iterator()`. Args: iterator_resource: A `tf.resource` scalar `tf.Tensor` representing the iterator. initializer: A `tf.Operation` that should be run to initialize this iterator. output_types: A nested structure of `tf.DType` objects corresponding to each component of an element of this dataset. output_shapes: A nested structure of `tf.TensorShape` objects corresponding to each component of an element of this dataset. output_classes: A nested structure of Python `type` object corresponding to each component of an element of this iterator. """ self._iterator_resource = iterator_resource self._initializer = initializer self._output_classes = output_classes self._output_types = output_types self._output_shapes = output_shapes self._string_handle = gen_dataset_ops.iterator_to_string_handle( self._iterator_resource) self._get_next_call_count = 0
def __init__(self, input_dataset, device, buffer_size): with ops.device("/device:CPU:0"): super(_PrefetchToDeviceEagerIterator, self).__init__(input_dataset) input_iterator_handle = gen_dataset_ops.iterator_to_string_handle( self._resource) self._device = device @function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.string)]) def _prefetch_fn(handle): """Prefetches one element from `input_iterator`.""" remote_iterator = iterator_ops.Iterator.from_string_handle( handle, self.output_types, self.output_shapes, self.output_classes) ret = remote_iterator.get_next() return nest.flatten(sparse.serialize_sparse_tensors(ret)) self._prefetch_fn = _prefetch_fn._get_concrete_function_internal() # pylint: disable=protected-access with ops.device(device): self._buffering_resource = function_buffering_resource( f=self._prefetch_fn, output_types=self._flat_output_types, target_device=ged_ops.experimental_iterator_get_device( self._resource), string_arg=input_iterator_handle, buffer_size=buffer_size, shared_name=iterator_ops._generate_shared_name( "function_buffer_resource"))
def __init__(self, iterator_resource, initializer, output_types, output_shapes, output_classes): """Creates a new iterator from the given iterator resource. Note: Most users will not call this initializer directly, and will instead use `Dataset.make_initializable_iterator()` or `Dataset.make_one_shot_iterator()`. Args: iterator_resource: A `tf.resource` scalar `tf.Tensor` representing the iterator. initializer: A `tf.Operation` that should be run to initialize this iterator. output_types: A nested structure of `tf.DType` objects corresponding to each component of an element of this iterator. output_shapes: A nested structure of `tf.TensorShape` objects corresponding to each component of an element of this iterator. output_classes: A nested structure of Python `type` objects corresponding to each component of an element of this iterator. """ self._iterator_resource = iterator_resource self._initializer = initializer if (output_types is None or output_shapes is None or output_classes is None): raise ValueError("If `structure` is not specified, all of " "`output_types`, `output_shapes`, and `output_classes`" " must be specified.") self._structure = structure_lib.convert_legacy_structure( output_types, output_shapes, output_classes) self._string_handle = gen_dataset_ops.iterator_to_string_handle( self._iterator_resource) self._get_next_call_count = 0 ops.add_to_collection(GLOBAL_ITERATORS, self._iterator_resource)
def __init__(self, dataset): """Creates a new iterator over the given dataset. For example: ```python dataset = tf.data.Dataset.range(4) for x in Iterator(dataset): print(x) ``` Tensors produced will be placed on the device on which this iterator object was created. Args: dataset: A `tf.data.Dataset` object. Raises: TypeError: If `dataset` is an unsupported type. RuntimeError: When invoked without eager execution enabled. """ if isinstance(dataset, prefetching_ops._PrefetchToDeviceDataset): # pylint: disable=protected-access raise TypeError( "`tf.contrib.data.prefetch_to_device()` is not compatible with " "`tf.contrib.eager.Iterator`. Use `for ... in dataset:` to iterate " "over the dataset instead.") super(Iterator, self).__init__(dataset) if not context.context().device_spec.device_type: is_remote_device = False else: is_remote_device = context.context().device_spec.device_type != "CPU" self._buffer_resource_handle = None if is_remote_device: with ops.device("/device:CPU:0"): iter_string_handle = gen_dataset_ops.iterator_to_string_handle( self._resource) @function.Defun(dtypes.string) def remote_fn(h): remote_iterator = iterator_ops.Iterator.from_string_handle( h, self.output_types, self.output_shapes, self.output_classes) return remote_iterator.get_next() remote_fn.add_to_graph(None) target = constant_op.constant("/device:CPU:0") with ops.device(self._device): self._buffer_resource_handle = prefetching_ops.function_buffering_resource( # pylint: disable=line-too-long string_arg=iter_string_handle, output_types=self._flat_output_types, f=remote_fn, target_device=target, buffer_size=10, container="", shared_name=_generate_shared_name( "contrib_eager_iterator_function_buffer_resource")) self._buffer_resource_deleter = resource_variable_ops.EagerResourceDeleter( # pylint: disable=line-too-long handle=self._buffer_resource_handle, handle_device=self._device)
def __init__(self, iterator_resource, initializer, output_types, output_shapes, output_classes): self._iterator_resource = iterator_resource self._initializer = initializer self._output_classes = output_classes self._output_types = output_types self._output_shapes = output_shapes self._string_handle = gen_dataset_ops.iterator_to_string_handle( self._iterator_resource)
def string_handle(self, name=None): """Returns a string-valued `tf.Tensor` that represents this iterator. Args: name: (Optional.) A name for the created operation. Returns: A scalar `tf.Tensor` of type `tf.string`. """ return gen_dataset_ops.iterator_to_string_handle( self._iterator_resource, name=name)
def _init_func(): """Creates an iterator for the input dataset. Returns: A `string` tensor that encapsulates the iterator created. """ ds_variant = gen_dataset_ops.unwrap_dataset_variant(wrap_ds_variant) resource = gen_dataset_ops.anonymous_iterator( **dataset_ops.flat_structure(self._input_dataset)) with ops.control_dependencies( [gen_dataset_ops.make_iterator(ds_variant, resource)]): return gen_dataset_ops.iterator_to_string_handle(resource)
def _init_func(): """Creates an iterator for the input dataset. Returns: A `string` tensor that encapsulates the iterator created. """ # pylint: disable=protected-access ds_variant = self._input_dataset._as_variant_tensor() resource = gen_dataset_ops.anonymous_iterator( output_types=self._flat_output_types, output_shapes=self._flat_output_shapes) with ops.control_dependencies( [gen_dataset_ops.make_iterator(ds_variant, resource)]): return gen_dataset_ops.iterator_to_string_handle(resource)
def _init_func(): """Creates an iterator for the input dataset. Returns: A `string` tensor that encapsulates the iterator created. """ # pylint: disable=protected-access ds_variant = self._input_dataset._as_variant_tensor() resource = gen_dataset_ops.anonymous_iterator( output_types=self._flat_output_types, output_shapes=self._flat_output_shapes) with ops.control_dependencies( [gen_dataset_ops.make_iterator(ds_variant, resource)]): return gen_dataset_ops.iterator_to_string_handle(resource)
def string_handle(self, name=None): """Returns a string-valued `tf.Tensor` that represents this iterator. Args: name: (Optional.) A name for the created operation. Returns: A scalar `tf.Tensor` of type `tf.string`. """ if name is None: return self._string_handle else: return gen_dataset_ops.iterator_to_string_handle( self._iterator_resource, name=name)
def _init_func(): """Creates an iterator for the input dataset. Returns: A `string` tensor that encapsulates the iterator created. """ ds_variant = gen_dataset_ops.unwrap_dataset_variant( wrap_ds_variant) resource = gen_dataset_ops.anonymous_iterator( output_types=self._flat_output_types, output_shapes=self._flat_output_shapes) with ops.control_dependencies( [gen_dataset_ops.make_iterator(ds_variant, resource)]): return gen_dataset_ops.iterator_to_string_handle(resource)
def __init__(self, iterator_resource, initializer, output_types, output_shapes, output_classes): """Creates a new iterator from the given iterator resource. Note: Most users will not call this initializer directly, and will instead use `Dataset.make_initializable_iterator()` or `Dataset.make_one_shot_iterator()`. Args: iterator_resource: A `tf.resource` scalar `tf.Tensor` representing the iterator. initializer: A `tf.Operation` that should be run to initialize this iterator. output_types: A (nested) structure of `tf.DType` objects corresponding to each component of an element of this iterator. output_shapes: A (nested) structure of `tf.TensorShape` objects corresponding to each component of an element of this iterator. output_classes: A (nested) structure of Python `type` objects corresponding to each component of an element of this iterator. Raises: TypeError: If `output_types`, `output_shapes`, or `output_classes` is not specified. """ self._iterator_resource = iterator_resource self._initializer = initializer if (output_types is None or output_shapes is None or output_classes is None): raise ValueError( "All of `output_types`, `output_shapes`, and `output_classes` " "must be specified to create an iterator. Got " f"`output_types` = {output_types!r}, " f"`output_shapes` = {output_shapes!r}, " f"`output_classes` = {output_classes!r}.") self._element_spec = structure.convert_legacy_structure( output_types, output_shapes, output_classes) self._flat_tensor_shapes = structure.get_flat_tensor_shapes( self._element_spec) self._flat_tensor_types = structure.get_flat_tensor_types( self._element_spec) self._string_handle = gen_dataset_ops.iterator_to_string_handle( self._iterator_resource) self._get_next_call_count = 0 ops.add_to_collection(GLOBAL_ITERATORS, self._iterator_resource)
def __init__(self, iterator_resource, initializer, output_types, output_shapes, output_classes): """Creates a new iterator from the given iterator resource. Note: Most users will not call this initializer directly, and will instead use `Dataset.make_initializable_iterator()` or `Dataset.make_one_shot_iterator()`. Args: iterator_resource: A `tf.resource` scalar `tf.Tensor` representing the iterator. initializer: A `tf.Operation` that should be run to initialize this iterator. output_types: A nested structure of `tf.DType` objects corresponding to each component of an element of this iterator. output_shapes: A nested structure of `tf.TensorShape` objects corresponding to each component of an element of this iterator. output_classes: A nested structure of Python `type` objects corresponding to each component of an element of this iterator. """ self._iterator_resource = iterator_resource self._initializer = initializer if (output_types is None or output_shapes is None or output_classes is None): raise ValueError( "If `structure` is not specified, all of " "`output_types`, `output_shapes`, and `output_classes`" " must be specified.") # pylint: disable=protected-access self._structure = structure_lib.Structure._from_legacy_structure( output_types, output_shapes, output_classes) # pylint: enable=protected-access self._string_handle = gen_dataset_ops.iterator_to_string_handle( self._iterator_resource) self._get_next_call_count = 0 ops.add_to_collection(GLOBAL_ITERATORS, self._iterator_resource)
def __init__(self, dataset): """Creates a new iterator over the given dataset. For example: ```python dataset = tf.data.Dataset.range(4) for x in Iterator(dataset): print(x) ``` Tensors produced will be placed on the device on which this iterator object was created. Args: dataset: A `tf.data.Dataset` object. Raises: RuntimeError: When invoked without eager execution enabled. """ if not context.in_eager_mode(): raise RuntimeError( "{} objects can only be used when eager execution is enabled, use " "tf.data.Dataset.make_iterator or " "tf.data.Dataset.make_one_shot_iterator for graph construction" .format(type(self))) with ops.device("/device:CPU:0"): ds_variant = dataset._as_variant_tensor() # pylint: disable=protected-access self._output_types = dataset.output_types self._output_shapes = dataset.output_shapes self._flat_output_types = nest.flatten(dataset.output_types) self._flat_output_shapes = nest.flatten(dataset.output_shapes) self._resource = gen_dataset_ops.iterator( container="", shared_name=_generate_shared_name("eager_iterator"), output_types=self._flat_output_types, output_shapes=self._flat_output_shapes) gen_dataset_ops.make_iterator(ds_variant, self._resource) # Delete the resource when this object is deleted self._resource_deleter = resource_variable_ops.EagerResourceDeleter( handle=self._resource, handle_device="/device:CPU:0") self._device = context.context().device_name self._buffer_resource_handle = None if not context.context().device_spec.device_type: is_remote_device = False else: is_remote_device = context.context( ).device_spec.device_type != "CPU" if is_remote_device: with ops.device("/device:CPU:0"): iter_string_handle = gen_dataset_ops.iterator_to_string_handle( self._resource) @function.Defun(dtypes.string) def remote_fn(h): remote_iterator = iterator_ops.Iterator.from_string_handle( h, self._output_types, self._output_shapes) return remote_iterator.get_next() remote_fn.add_to_graph(None) target = constant_op.constant("/device:CPU:0") with ops.device(self._device): self._buffer_resource_handle = prefetching_ops.function_buffering_resource( string_arg=iter_string_handle, f=remote_fn, target_device=target, buffer_size=10, thread_pool_size=1, container="", shared_name=_generate_shared_name( "function_buffer_resource")) self._buffer_resource_deleter = resource_variable_ops.EagerResourceDeleter( handle=self._buffer_resource_handle, handle_device=self._device)
def __init__(self, dataset): """Creates a new iterator over the given dataset. For example: ```python dataset = tf.data.Dataset.range(4) for x in Iterator(dataset): print(x) ``` Tensors produced will be placed on the device on which this iterator object was created. Args: dataset: A `tf.data.Dataset` object. Raises: RuntimeError: When invoked without eager execution enabled. """ if not context.in_eager_mode(): raise RuntimeError( "{} objects can only be used when eager execution is enabled, use " "tf.data.Dataset.make_iterator or " "tf.data.Dataset.make_one_shot_iterator for graph construction". format(type(self))) with ops.device("/device:CPU:0"): ds_variant = dataset._as_variant_tensor() # pylint: disable=protected-access self._output_types = dataset.output_types self._output_shapes = dataset.output_shapes self._flat_output_types = nest.flatten(dataset.output_types) self._flat_output_shapes = nest.flatten(dataset.output_shapes) self._resource = gen_dataset_ops.iterator( container="", shared_name=_generate_shared_name("eager_iterator"), output_types=self._flat_output_types, output_shapes=self._flat_output_shapes) gen_dataset_ops.make_iterator(ds_variant, self._resource) # Delete the resource when this object is deleted self._resource_deleter = resource_variable_ops.EagerResourceDeleter( handle=self._resource, handle_device="/device:CPU:0") self._device = context.context().device_name self._buffer_resource_handle = None if not context.context().device_spec.device_type: is_remote_device = False else: is_remote_device = context.context().device_spec.device_type != "CPU" if is_remote_device: with ops.device("/device:CPU:0"): iter_string_handle = gen_dataset_ops.iterator_to_string_handle( self._resource) @function.Defun(dtypes.string) def remote_fn(h): remote_iterator = iterator_ops.Iterator.from_string_handle( h, self._output_types, self._output_shapes) return remote_iterator.get_next() remote_fn.add_to_graph(None) target = constant_op.constant("/device:CPU:0") with ops.device(self._device): self._buffer_resource_handle = prefetching_ops.function_buffering_resource( string_arg=iter_string_handle, f=remote_fn, target_device=target, buffer_size=10, thread_pool_size=1, container="", shared_name=_generate_shared_name("function_buffer_resource")) self._buffer_resource_deleter = resource_variable_ops.EagerResourceDeleter( handle=self._buffer_resource_handle, handle_device=self._device)