# tensors of the SparseTensor are ([None], [None, r], [r]). NOTE: The shape # invariant here is the shape of the SparseTensor.dense_shape property. It # must be the shape of a vector. if shape.ndims is not None and shape.ndims != 1: raise ValueError("Expected a shape with 1 dimension") rank = tensor_shape.dimension_value(shape[0]) return SparseTensorSpec(tensor_shape.unknown_shape(rank), self.dtype) def consumers(self): return self._consumers() SparseTensorValue = collections.namedtuple( "SparseTensorValue", ["indices", "values", "dense_shape"]) tf_export(v1=["SparseTensorValue"])(SparseTensorValue) _pywrap_utils.RegisterType("SparseTensorValue", SparseTensorValue) @tf_export("SparseTensorSpec") class SparseTensorSpec(type_spec.BatchableTypeSpec): """Type specification for a `tf.sparse.SparseTensor`.""" __slots__ = ["_shape", "_dtype"] value_type = property(lambda self: SparseTensor) def __init__(self, shape=None, dtype=dtypes.float32): """Constructs a type specification for a `tf.sparse.SparseTensor`. Args: shape: The dense shape of the `SparseTensor`, or `None` to allow any dense
Returns: A list of `Operation`s. Raises: RuntimeError: If this method is called while executing eagerly. """ consumers = nest.flatten([ component.consumers() for component in nest.flatten(self, expand_composites=True) if getattr(component, "graph", None) is not None ]) return list(set(consumers)) _pywrap_utils.RegisterType("CompositeTensor", CompositeTensor) def replace_composites_with_components(structure): """Recursively replaces CompositeTensors with their components. Args: structure: A `nest`-compatible structure, possibly containing composite tensors. Returns: A copy of `structure`, where each composite tensor has been replaced by its components. The result will contain no composite tensors. Note that `nest.flatten(replace_composites_with_components(structure))` returns the same value as `nest.flatten(structure)`. """
def __repr__(self): s = "BoundedTensorSpec(shape={}, dtype={}, name={}, minimum={}, maximum={})" return s.format(self.shape, repr(self.dtype), repr(self.name), repr(self.minimum), repr(self.maximum)) def __eq__(self, other): tensor_spec_eq = super(BoundedTensorSpec, self).__eq__(other) return (tensor_spec_eq and np.allclose(self.minimum, other.minimum) and np.allclose(self.maximum, other.maximum)) def __hash__(self): return hash((self._shape_tuple, self.dtype)) def __reduce__(self): return BoundedTensorSpec, (self._shape, self._dtype, self._minimum, self._maximum, self._name) def _serialize(self): return (self._shape, self._dtype, self._minimum, self._maximum, self._name) _pywrap_utils.RegisterType("TensorSpec", TensorSpec) # Note: we do not include Tensor names when constructing TypeSpecs. type_spec.register_type_spec_from_value_converter( ops.Tensor, lambda tensor: TensorSpec(tensor.shape, tensor.dtype)) type_spec.register_type_spec_from_value_converter( np.ndarray, lambda array: TensorSpec(array.shape, array.dtype))
return None _TYPE_CONVERSION_FUNCTION_REGISTRY = [] def register_type_spec_from_value_converter(type_object, converter_fn, allow_subclass=False): """Registers a function for converting values with a given type to TypeSpecs. If multiple registered `type_object`s match a value, then the most recent registration takes precedence. Custom converters should not be defined for `CompositeTensor`s; use `CompositeTensor._type_spec` instead. Args: type_object: A Python `type` object representing the type of values accepted by `converter_fn`. converter_fn: A function that takes one argument (an instance of the type represented by `type_object`) and returns a `TypeSpec`. allow_subclass: If true, then use `isinstance(value, type_object)` to check for matches. If false, then use `type(value) is type_object`. """ _, type_object = tf_decorator.unwrap(type_object) _TYPE_CONVERSION_FUNCTION_REGISTRY.append( (type_object, converter_fn, allow_subclass)) _pywrap_utils.RegisterType("TypeSpec", TypeSpec)