def testBijectiveAndFiniteAxis(self): permutation = np.int32([1, 0]) x = np.random.randn(4, 2, 3) y = x[..., permutation, :] bijector = tfb.Permute(permutation=permutation, axis=-2, validate_args=True) bijector_test_util.assert_bijective_and_finite(bijector, x, y, eval_func=self.evaluate, event_ndims=2, rtol=1e-6, atol=0)
def testPreservesShape(self): # TODO(b/131157549, b/131124359): Test should not be needed. Consider # deleting when underlying issue with constant eager tensors is fixed. permutation = [2, 1, 0] x = tf.keras.Input((3, ), batch_size=None) bijector = tfb.Permute(permutation=permutation, axis=-1, validate_args=True) y = bijector.forward(x) self.assertAllEqual(y.shape.as_list(), [None, 3]) inverse_y = bijector.inverse(x) self.assertAllEqual(inverse_y.shape.as_list(), [None, 3])
def testRaisesOpError(self): with self.assertRaisesError("Permutation over `d` must contain"): permutation = tf.placeholder_with_default([1, 2], shape=None) bijector = tfb.Permute(permutation=permutation, validate_args=True) self.evaluate(bijector.inverse([1.]))
def bijectors(draw, bijector_name=None, batch_shape=None, event_dim=None, enable_vars=False): """Strategy for drawing Bijectors. The emitted bijector may be a basic bijector or an `Invert` of a basic bijector, but not a compound like `Chain`. Args: draw: Hypothesis strategy sampler supplied by `@hps.composite`. bijector_name: Optional Python `str`. If given, the produced bijectors will all have this type. If omitted, Hypothesis chooses one from the whitelist `TF2_FRIENDLY_BIJECTORS`. batch_shape: An optional `TensorShape`. The batch shape of the resulting bijector. Hypothesis will pick one if omitted. event_dim: Optional Python int giving the size of each of the underlying distribution's parameters' event dimensions. This is shared across all parameters, permitting square event matrices, compatible location and scale Tensors, etc. If omitted, Hypothesis will choose one. enable_vars: TODO(bjp): Make this `True` all the time and put variable initialization in slicing_test. If `False`, the returned parameters are all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor` `tfp.util.TransformedVariable`} Returns: bijectors: A strategy for drawing bijectors with the specified `batch_shape` (or an arbitrary one if omitted). """ if bijector_name is None: bijector_name = draw(hps.sampled_from(TF2_FRIENDLY_BIJECTORS)) if batch_shape is None: batch_shape = draw(tfp_hps.shapes()) if event_dim is None: event_dim = draw(hps.integers(min_value=2, max_value=6)) if bijector_name == 'Invert': underlying_name = draw( hps.sampled_from(sorted(set(TF2_FRIENDLY_BIJECTORS) - {'Invert'}))) underlying = draw( bijectors(bijector_name=underlying_name, batch_shape=batch_shape, event_dim=event_dim, enable_vars=enable_vars)) return tfb.Invert(underlying, validate_args=True) if bijector_name == 'TransformDiagonal': underlying_name = draw( hps.sampled_from(sorted(TRANSFORM_DIAGONAL_WHITELIST))) underlying = draw( bijectors(bijector_name=underlying_name, batch_shape=(), event_dim=event_dim, enable_vars=enable_vars)) return tfb.TransformDiagonal(underlying, validate_args=True) if bijector_name == 'Inline': if enable_vars: scale = tf.Variable(1., name='scale') else: scale = 2. b = tfb.AffineScalar(scale=scale) inline = tfb.Inline( forward_fn=b.forward, inverse_fn=b.inverse, forward_log_det_jacobian_fn=lambda x: b.forward_log_det_jacobian( # pylint: disable=g-long-lambda x, event_ndims=b.forward_min_event_ndims), forward_min_event_ndims=b.forward_min_event_ndims, is_constant_jacobian=b.is_constant_jacobian, ) inline.b = b return inline if bijector_name == 'DiscreteCosineTransform': dct_type = draw(hps.integers(min_value=2, max_value=3)) return tfb.DiscreteCosineTransform(validate_args=True, dct_type=dct_type) if bijector_name == 'PowerTransform': power = draw(hps.floats(min_value=0., max_value=10.)) return tfb.PowerTransform(validate_args=True, power=power) if bijector_name == 'Permute': event_ndims = draw(hps.integers(min_value=1, max_value=2)) axis = draw(hps.integers(min_value=-event_ndims, max_value=-1)) # This is a permutation of dimensions within an axis. # (Contrast with `Transpose` below.) permutation = draw(hps.permutations(np.arange(event_dim))) return tfb.Permute(permutation, axis=axis) if bijector_name == 'Reshape': event_shape_out = draw(tfp_hps.shapes(min_ndims=1)) # TODO(b/142135119): Wanted to draw general input and output shapes like the # following, but Hypothesis complained about filtering out too many things. # event_shape_in = draw(tfp_hps.shapes(min_ndims=1)) # hp.assume(event_shape_out.num_elements() == event_shape_in.num_elements()) event_shape_in = [event_shape_out.num_elements()] return tfb.Reshape(event_shape_out=event_shape_out, event_shape_in=event_shape_in, validate_args=True) if bijector_name == 'Transpose': event_ndims = draw(hps.integers(min_value=0, max_value=2)) # This is a permutation of axes. # (Contrast with `Permute` above.) permutation = draw(hps.permutations(np.arange(event_ndims))) return tfb.Transpose(perm=permutation) bijector_params = draw( broadcasting_params(bijector_name, batch_shape, event_dim=event_dim, enable_vars=enable_vars)) ctor = getattr(tfb, bijector_name) return ctor(validate_args=True, **bijector_params)