def __init__(self, transform_fn, pretransformed_input, dtype=None, shape=NONE_SPECIFIED, name=None): """Creates the `DeferredTensor` object. Args: transform_fn: Python `callable` taking `pretransformed_input` and returning a `Tensor` (representing by this object). pretransformed_input: object with `shape`, `dtype` properties (typically a `tf.Variable`) passed into `transform_fn` when this object is acted upon in a `Tensor` context, eg, `tf.convert_to_tensor`, `+`, `tf.math.exp`, etc. dtype: Equivalent to what would otherwise be `transform_fn(pretransformed_input).dtype`. Default value: `None` (i.e., `pretransformed_input.dtype`). shape: Equivalent to what would otherwise be `transform_fn(pretransformed_input).shape`. Default value: `'None'` (i.e., `pretransformed_input.shape`). name: Python `str` representing this object's `name`; used only in graph mode. Default value: `None` (i.e., `transform_fn.__name__ + '_' + pretransformed_input.name`). Raises: TypeError: if `transform_fn` is not `callable`. TypeError: if `pretransformed_input` lacks `dtype` and/or `shape` properties (and `dtype` and/or `shape` arguments are unspecified). """ if not callable(transform_fn): raise TypeError('Argument `transform_fn` must be a Python `callable`.') if ((dtype is None and not hasattr(pretransformed_input, 'dtype')) or (shape is None and not hasattr(pretransformed_input, 'shape'))): raise TypeError('Argument `pretransformed_input` must have `dtype` and ' '`shape` properties (unless `dtype`, `shape` arguments ' 'are explicitly provided.') has_name = bool(name) if not has_name: name = '_'.join([ transform_fn.__name__, getattr(pretransformed_input, 'name', '')]) name = name_util.strip_invalid_chars(name) name = name_util.camel_to_lower_snake(name) name = name_util.get_name_scope_name(name) name = name_util.strip_invalid_chars(name) super(DeferredTensor, self).__init__(name=name) self._name = name self._transform_fn = transform_fn self._pretransformed_input = pretransformed_input self._dtype = dtype_util.base_dtype(dtype or pretransformed_input.dtype) self._shape = tf.TensorShape( pretransformed_input.shape if shape == 'None' else shape) # Secret handshake with tf.is_tensor to return True for DT. # # Works around an exception in LinearOperator (which in 2.0.0 checks only # `tf.is_tensor`, not also `linear_operator_util.is_ref`: # ValueError: Graph parent item 0 is not a Tensor; # <DeferredTensor: dtype=float32, shape=[2], fn=exp>. # TODO(b/140157055): Remove this shim after LinOp is patched in 2.0. self.is_tensor_like = True
def __init__(self, transform_fn, pretransformed_input, dtype=None, shape=NONE_SPECIFIED, name=None): """Creates the `DeferredTensor` object. Args: transform_fn: Python `callable` taking `pretransformed_input` and returning a `Tensor` (representing by this object). pretransformed_input: object with `shape`, `dtype` properties (typically a `tf.Variable`) passed into `transform_fn` when this object is acted upon in a `Tensor` context, eg, `tf.convert_to_tensor`, `+`, `tf.math.exp`, etc. dtype: Equivalent to what would otherwise be `transform_fn(pretransformed_input).dtype`. Default value: `None` (i.e., `pretransformed_input.dtype`). shape: Equivalent to what would otherwise be `transform_fn(pretransformed_input).shape`. Default value: `'None'` (i.e., `pretransformed_input.shape`). name: Python `str` representing this object's `name`; used only in graph mode. Default value: `None` (i.e., `transform_fn.__name__ + '_' + pretransformed_input.name`). Raises: TypeError: if `transform_fn` is not `callable`. TypeError: if `pretransformed_input` lacks `dtype` and/or `shape` properties (and `dtype` and/or `shape` arguments are unspecified). """ if not callable(transform_fn): raise TypeError( 'Argument `transform_fn` must be a Python `callable`.') if ((dtype is None and not hasattr(pretransformed_input, 'dtype')) or (shape is None and not hasattr(pretransformed_input, 'shape'))): raise TypeError( 'Argument `pretransformed_input` must have `dtype` and ' '`shape` properties (unless `dtype`, `shape` arguments ' 'are explicitly provided.') has_name = bool(name) if not has_name: name = '_'.join([ transform_fn.__name__, getattr(pretransformed_input, 'name', '') ]) name = name_util.strip_invalid_chars(name) name = name_util.camel_to_lower_snake(name) name = name_util.get_name_scope_name(name) name = name_util.strip_invalid_chars(name) super(DeferredTensor, self).__init__(name=name) self._name = name self._transform_fn = transform_fn self._pretransformed_input = pretransformed_input self._dtype = dtype_util.base_dtype(dtype or pretransformed_input.dtype) self._shape = tf.TensorShape(pretransformed_input.shape if shape == 'None' else shape)
def __init__(self, also_track=None, name=None): name = name_util.strip_invalid_chars(name or type(self).__name__) self._also_track = [] if also_track is None else [also_track] super(Layer, self).__init__(name=name) self._extra_loss = None self._extra_result = None self._trace = False
def __init__(self, name=None): """Creates the ExponentialFamily. Args: name: Python `str` used as TF namescope for ops created by member functions. Default value: `None` (i.e., the subclass name). """ if not name: name = type(self).__name__ name = name_util.camel_to_lower_snake(name) name = name_util.get_name_scope_name(name) name = name_util.strip_invalid_chars(name) super(ExponentialFamily, self).__init__(name=name)
def __init__(self, graph_parents=None, is_constant_jacobian=False, validate_args=False, dtype=None, forward_min_event_ndims=None, inverse_min_event_ndims=None, parameters=None, name=None): """Constructs Bijector. A `Bijector` transforms random variables into new random variables. Examples: ```python # Create the Y = g(X) = X transform. identity = Identity() # Create the Y = g(X) = exp(X) transform. exp = Exp() ``` See `Bijector` subclass docstring for more details and specific examples. Args: graph_parents: Python list of graph prerequisites of this `Bijector`. is_constant_jacobian: Python `bool` indicating that the Jacobian matrix is not a function of the input. validate_args: Python `bool`, default `False`. Whether to validate input with asserts. If `validate_args` is `False`, and the inputs are invalid, correct behavior is not guaranteed. dtype: `tf.dtype` supported by this `Bijector`. `None` means dtype is not enforced. forward_min_event_ndims: Python `integer` indicating the minimum number of dimensions `forward` operates on. inverse_min_event_ndims: Python `integer` indicating the minimum number of dimensions `inverse` operates on. Will be set to `forward_min_event_ndims` by default, if no value is provided. parameters: Python `dict` of parameters used to instantiate this `Bijector`. name: The name to give Ops created by the initializer. Raises: ValueError: If neither `forward_min_event_ndims` and `inverse_min_event_ndims` are specified, or if either of them is negative. ValueError: If a member of `graph_parents` is not a `Tensor`. """ if not name: name = type(self).__name__ name = name_util.camel_to_lower_snake(name) name = name_util.get_name_scope_name(name) name = name_util.strip_invalid_chars(name) super(Bijector, self).__init__(name=name) self._name = name self._parameters = self._no_dependency(parameters) self._graph_parents = self._no_dependency(graph_parents or []) self._is_constant_jacobian = is_constant_jacobian self._validate_args = validate_args self._dtype = dtype self._initial_parameter_control_dependencies = tuple( d for d in self._parameter_control_dependencies(is_init=True) if d is not None) if self._initial_parameter_control_dependencies: self._initial_parameter_control_dependencies = ( tf.group(*self._initial_parameter_control_dependencies),) if forward_min_event_ndims is None and inverse_min_event_ndims is None: raise ValueError('Must specify at least one of `forward_min_event_ndims` ' 'and `inverse_min_event_ndims`.') elif inverse_min_event_ndims is None: inverse_min_event_ndims = forward_min_event_ndims elif forward_min_event_ndims is None: forward_min_event_ndims = inverse_min_event_ndims if not isinstance(forward_min_event_ndims, int): raise TypeError('Expected forward_min_event_ndims to be of ' 'type int, got {}'.format( type(forward_min_event_ndims).__name__)) if not isinstance(inverse_min_event_ndims, int): raise TypeError('Expected inverse_min_event_ndims to be of ' 'type int, got {}'.format( type(inverse_min_event_ndims).__name__)) if forward_min_event_ndims < 0: raise ValueError('forward_min_event_ndims must be a non-negative ' 'integer.') if inverse_min_event_ndims < 0: raise ValueError('inverse_min_event_ndims must be a non-negative ' 'integer.') self._forward_min_event_ndims = forward_min_event_ndims self._inverse_min_event_ndims = inverse_min_event_ndims for i, t in enumerate(self._graph_parents): if t is None or not tf.is_tensor(t): raise ValueError('Graph parent item %d is not a Tensor; %s.' % (i, t)) # Setup caching after everything else is done. self._cache = self._setup_cache()
def __init__(self, also_track=None, validate_args=False, name=None): name = name_util.strip_invalid_chars(name or type(self).__name__) self._also_track = [] if also_track is None else [also_track] super(Layer, self).__init__(name=name) self._trace = False self._validate_args = validate_args
def __init__(self, name=None): name = name_util.strip_invalid_chars(name or type(self).__name__) super(Layer, self).__init__(name=name) self._extra_loss = None self._extra_result = None
def __init__(self, pretransformed_input, transform_fn, dtype=None, shape=NONE_SPECIFIED, also_track=None, name=None): """Creates the `DeferredTensor` object. Args: pretransformed_input: object with `shape`, `dtype` properties (typically a `tf.Variable`) passed into `transform_fn` when this object is acted upon in a `Tensor` context, eg, `tf.convert_to_tensor`, `+`, `tf.math.exp`, etc. transform_fn: Python `callable` or `tfp.bijectors.Bijector`-like instance. When `callable`, should take `pretransformed_input` and return a `Tensor` (representing by this object). dtype: Equivalent to what would otherwise be `transform_fn(pretransformed_input).dtype`. Default value: `None` (i.e., `getattr(transform_fn, 'dtype', None) or pretransformed_input.dtype`). shape: Equivalent to what would otherwise be `transform_fn(pretransformed_input).shape`. Default value: `'None'` (i.e., `getattr(transform_fn, 'forward_event_shape', lambda x: x)( pretransformed_input.shape)`). also_track: Optional instance or structure of instances of `tf.Variable` and/or `tf.Module`, containing any additional trainable variables that the `transform_fn` may access beyond the given `pretransformed_input`. This ensures that such variables will be correctly tracked in `self.trainable_variables`. Default value: `None`. name: Python `str` representing this object's `name`; used only in graph mode. Default value: `None` (i.e., `(getattr(transform_fn, 'name', None) or transform_fn.__name__ + '_' + pretransformed_input.name)`). Raises: TypeError: if `transform_fn` is not `callable`. TypeError: if `pretransformed_input` lacks `dtype` and/or `shape` properties (and `dtype` and/or `shape` arguments are unspecified). """ pretransformed_input = tensor_util.convert_nonref_to_tensor( pretransformed_input, name='pretransformed_input') if dtype is None: dtype = (getattr(transform_fn, 'dtype', None) or dtype_util.base_dtype(pretransformed_input.dtype)) try: dtype = None if dtype is None else tf.as_dtype(dtype) except TypeError: raise TypeError('Argument `dtype` must be convertible to a ' '`tf.dtypes.DType`; saw "{}" of type "{}".'.format( repr(dtype), type(dtype))) if shape == NONE_SPECIFIED: shape = getattr(transform_fn, 'forward_event_shape', _identity) shape = shape(pretransformed_input.shape) try: shape = tf.TensorShape(shape) except TypeError: raise TypeError('Argument `shape` must be convertible to a ' '`tf.TensorShape`; saw "{}".'.format(shape)) name = name or getattr(transform_fn, 'name', None) if not name: name = '_'.join([ transform_fn.__name__, getattr(pretransformed_input, 'name', '') ]) name = name_util.strip_invalid_chars(name) name = name_util.camel_to_lower_snake(name) name = name_util.get_name_scope_name(name) name = name_util.strip_invalid_chars(name) if hasattr(transform_fn, 'forward'): fwd_name = '"{}"'.format(transform_fn.name) else: fwd_name = transform_fn.__name__ if not callable(transform_fn): raise TypeError('Argument `transform_fn` must be `callable`.') super(DeferredTensor, self).__init__(name=name) self._pretransformed_input = pretransformed_input self._transform_fn = transform_fn self._dtype = dtype self._shape = shape self._also_track = also_track self._name = name self._fwd_name = fwd_name # Secret handshake with tf.is_tensor to return True for DT. # # Works around an exception in LinearOperator (which in 2.0.0 checks only # `tf.is_tensor`, not also `linear_operator_util.is_ref`: # ValueError: Graph parent item 0 is not a Tensor; # <DeferredTensor: dtype=float32, shape=[2], fn=exp>. # TODO(b/140157055): Remove this shim after LinOp is patched in 2.0. self.is_tensor_like = True
def __init__(self, dtype, reparameterization_type, validate_args, allow_nan_stats, parameters=None, graph_parents=None, name=None): """Constructs the `Distribution`. **This is a private method for subclass use.** Args: dtype: The type of the event samples. `None` implies no type-enforcement. reparameterization_type: Instance of `ReparameterizationType`. If `tfd.FULLY_REPARAMETERIZED`, then samples from the distribution are fully reparameterized, and straight-through gradients are supported. If `tfd.NOT_REPARAMETERIZED`, then samples from the distribution are not fully reparameterized, and straight-through gradients are either partially unsupported or are not supported at all. validate_args: Python `bool`, default `False`. When `True` distribution parameters are checked for validity despite possibly degrading runtime performance. When `False` invalid inputs may silently render incorrect outputs. allow_nan_stats: Python `bool`, default `True`. When `True`, statistics (e.g., mean, mode, variance) use the value "`NaN`" to indicate the result is undefined. When `False`, an exception is raised if one or more of the statistic's batch members are undefined. parameters: Python `dict` of parameters used to instantiate this `Distribution`. graph_parents: Python `list` of graph prerequisites of this `Distribution`. name: Python `str` name prefixed to Ops created by this class. Default: subclass name. Raises: ValueError: if any member of graph_parents is `None` or not a `Tensor`. """ if not name: name = type(self).__name__ name = name_util.camel_to_lower_snake(name) name = name_util.get_name_scope_name(name) name = name_util.strip_invalid_chars(name) super(Distribution, self).__init__(name=name) self._name = name graph_parents = [] if graph_parents is None else graph_parents for i, t in enumerate(graph_parents): if t is None or not tf.is_tensor(t): raise ValueError('Graph parent item %d is not a Tensor; %s.' % (i, t)) self._dtype = dtype self._reparameterization_type = reparameterization_type self._allow_nan_stats = allow_nan_stats self._validate_args = validate_args self._parameters = self._no_dependency(parameters) self._parameters_sanitized = False self._graph_parents = graph_parents self._initial_parameter_control_dependencies = tuple( d for d in self._parameter_control_dependencies(is_init=True) if d is not None) if self._initial_parameter_control_dependencies: self._initial_parameter_control_dependencies = ( tf.group(*self._initial_parameter_control_dependencies),)