Example #1
0
        """Tests whether the given value is an ndarray (and not a TF tensor/var)."""
        # TODO(tomhennigan) Support __array_interface__ too.
        return hasattr(value, "__array__") and not (
            isinstance(value, ops.Tensor)
            or isinstance(value, resource_variable_ops.BaseResourceVariable)
            or hasattr(value, "_should_act_as_resource_variable")

            # For legacy reasons we do not automatically promote Numpy strings.
            or isinstance(value, np.str_)
            # NumPy dtypes have __array__ as unbound methods.
            or isinstance(value, type)
            # CompositeTensors should be flattened instead.
            or isinstance(value, composite_tensor.CompositeTensor))


_pywrap_utils.RegisterType("GenericType", GenericType)


class OrderedCollectionType(trace.TraceType):
    """Represents an ordered collection of TraceType objects.

  Attributes:
    components: The sequence of TraceType objects that this class represents.
  """
    def __init__(self, *components: trace.TraceType):
        self.components = components

    def _has_same_structure(self, other):
        if not isinstance(other, type(self)):
            return False
Example #2
0
  `CompositeTensor`s; use `CompositeTensor._type_spec` instead.

  Args:
    type_object: A Python `type` object representing the type of values
      accepted by `converter_fn`.
    converter_fn: A function that takes one argument (an instance of the
      type represented by `type_object`) and returns a `TypeSpec`.
    allow_subclass: If true, then use `isinstance(value, type_object)` to
      check for matches.  If false, then use `type(value) is type_object`.
  """
  _, type_object = tf_decorator.unwrap(type_object)
  _TYPE_CONVERSION_FUNCTION_REGISTRY.append(
      (type_object, converter_fn, allow_subclass))


_pywrap_utils.RegisterType("TypeSpec", TypeSpec)


_TYPE_SPEC_TO_NAME = {}
_NAME_TO_TYPE_SPEC = {}


# Regular expression for valid TypeSpec names.
_REGISTERED_NAME_RE = re.compile(r"^(\w+\.)+\w+$")


# TODO(b/173744905) tf_export this as "tf.register_type_spec".  (And add a
# usage example to the docstring, once the API is public.)
#
# TODO(b/173744905) Update this decorator to apply to ExtensionType rather than
# TypeSpec (once we do refactoring to move to_components/from_components from
Example #3
0
                                      device=iterator_resource.device)
        ]
        super(_IteratorSaveable, self).__init__(iterator_resource, specs, name)

    def restore(self, restored_tensors, restored_shapes):
        with ops.colocate_with(self.op):
            return gen_dataset_ops.deserialize_iterator(
                self.op, restored_tensors[0])


@deprecation.deprecated(
    None, "Use `tf.data.Iterator.get_next_as_optional()` instead.")
@tf_export("data.experimental.get_next_as_optional")
def get_next_as_optional(iterator):
    """Returns a `tf.experimental.Optional` with the next element of the iterator.

  If the iterator has reached the end of the sequence, the returned
  `tf.experimental.Optional` will have no value.

  Args:
    iterator: A `tf.data.Iterator`.

  Returns:
    A `tf.experimental.Optional` object which either contains the next element
    of the iterator (if it exists) or no value.
  """
    return iterator.get_next_as_optional()


_pywrap_utils.RegisterType("OwnedIterator", OwnedIterator)
Example #4
0
        if not isinstance(other, WeakrefType):
            return False

        if self._object() is None or other._object() is None:
            return False

        if self._object() is other._object():
            return True

        return self._object == other._object

    def __hash__(self):
        return self._object_hash


_pywrap_utils.RegisterType("GenericType", GenericType)
_pywrap_utils.RegisterType("WeakrefType", WeakrefType)


class OrderedCollectionType(trace.TraceType):
    """Represents an ordered collection of TraceType objects.

  Attributes:
    components: The sequence of TraceType objects that this class represents.
  """
    def __init__(self, *components: trace.TraceType):
        self.components = components

    def _has_same_structure(self, other):
        if not isinstance(other, type(self)):
            return False
Example #5
0
        s = "BoundedTensorSpec(shape={}, dtype={}, name={}, minimum={}, maximum={})"
        return s.format(self.shape, repr(self.dtype), repr(self.name),
                        repr(self.minimum), repr(self.maximum))

    def __eq__(self, other):
        tensor_spec_eq = super(BoundedTensorSpec, self).__eq__(other)
        return (tensor_spec_eq and np.allclose(self.minimum, other.minimum)
                and np.allclose(self.maximum, other.maximum))

    def __hash__(self):
        return hash((self._shape, self.dtype))

    def __reduce__(self):
        return BoundedTensorSpec, (self._shape, self._dtype, self._minimum,
                                   self._maximum, self._name)

    def _serialize(self):
        return (self._shape, self._dtype, self._minimum, self._maximum,
                self._name)


trace_type.register_serializable(BoundedTensorSpec)
_pywrap_utils.RegisterType("TensorSpec", TensorSpec)

# Note: we do not include Tensor names when constructing TypeSpecs.
type_spec.register_type_spec_from_value_converter(
    ops.Tensor, lambda tensor: TensorSpec(tensor.shape, tensor.dtype))

type_spec.register_type_spec_from_value_converter(
    np.ndarray, lambda array: TensorSpec(array.shape, array.dtype))
Example #6
0
        # must be the shape of a vector.
        if shape.ndims is not None and shape.ndims != 1:
            raise ValueError(
                f"Expected a shape with 1 dimension. Obtained: {shape} "
                f"which has {shape.ndims} dimensions.")
        rank = tensor_shape.dimension_value(shape[0])
        return SparseTensorSpec(tensor_shape.unknown_shape(rank), self.dtype)

    def consumers(self):
        return self._consumers()


SparseTensorValue = collections.namedtuple(
    "SparseTensorValue", ["indices", "values", "dense_shape"])
tf_export(v1=["SparseTensorValue"])(SparseTensorValue)
_pywrap_utils.RegisterType("SparseTensorValue", SparseTensorValue)


@tf_export("SparseTensorSpec")
@type_spec.register("tf.SparseTensorSpec")
class SparseTensorSpec(type_spec.BatchableTypeSpec):
    """Type specification for a `tf.sparse.SparseTensor`."""

    __slots__ = ["_shape", "_dtype"]

    value_type = property(lambda self: SparseTensor)

    def __init__(self, shape=None, dtype=dtypes.float32):
        """Constructs a type specification for a `tf.sparse.SparseTensor`.

    Args:
Example #7
0
    def _convert_variables_to_tensors(self):
        """Converts ResourceVariable components to Tensors.

    Override this method to explicitly convert ResourceVariables embedded in the
    CompositeTensor to Tensors. By default, it returns the CompositeTensor
    unchanged.

    Returns:
      A CompositeTensor with all its ResourceVariable components converted to
      Tensors.
    """
        return self


_pywrap_utils.RegisterType("CompositeTensor", CompositeTensor)


def replace_composites_with_components(structure):
    """Recursively replaces CompositeTensors with their components.

  Args:
    structure: A `nest`-compatible structure, possibly containing composite
      tensors.

  Returns:
    A copy of `structure`, where each composite tensor has been replaced by
    its components.  The result will contain no composite tensors.
    Note that `nest.flatten(replace_composites_with_components(structure))`
    returns the same value as `nest.flatten(structure)`.
  """