def test_against_numpy(self, data): dtype = data.draw(hps.sampled_from([np.float32, np.float64])) shp = (data.draw(hps.integers(5, 10)), data.draw(hps.integers(5, 10))) axis = data.draw(hps.integers(0, len(shp) - 1)) y = data.draw( tfp_hps.constrained_tensors(tfp_hps.identity_fn, shp, dtype)) x_dx = data.draw(hps.sampled_from(['x', 'dx', None])) if x_dx is None: x = None dx = None elif x_dx == 'dx': x = None dx = data.draw(hps.floats(0.1, 10)) else: x = data.draw( tfp_hps.constrained_tensors(tfp_hps.identity_fn, shp, dtype)) dx = None np_soln = np.trapz( self.evaluate(y), x=self.evaluate(x) if x is not None else None, # cannot evaluate(None) dx=dx or 1.0, # numpy default is 1.0 axis=axis) tf_soln = tfp_math.trapz(y, x, dx, axis) self.assertAllClose(np_soln, tf_soln)
def check_event_space_bijector_constrains(self, dist, data): event_space_bijector = dist.experimental_default_event_space_bijector() if event_space_bijector is None: return total_sample_shape = tensorshape_util.concatenate( # Draw a sample shape data.draw(tfp_hps.shapes()), # Draw a shape that broadcasts with `[batch_shape, inverse_event_shape]` # where `inverse_event_shape` is the event shape in the bijector's # domain. This is the shape of `y` in R**n, such that # x = event_space_bijector(y) has the event shape of the distribution. data.draw( tfp_hps.broadcasting_shapes(tensorshape_util.concatenate( dist.batch_shape, event_space_bijector.inverse_event_shape( dist.event_shape)), n=1))[0]) y = data.draw( tfp_hps.constrained_tensors(tfp_hps.identity_fn, total_sample_shape.as_list())) with tfp_hps.no_tf_rank_errors(): x = event_space_bijector(y) with tf.control_dependencies(dist._sample_control_dependencies(x)): self.evaluate(tf.identity(x))
def codomain_tensors(draw, bijector, shape=None): """Strategy for drawing Tensors in the codomain of a bijector. If the bijector's codomain is constrained, this proceeds by drawing an unconstrained Tensor and then transforming it to fit. The constraints are declared in `bijectors.hypothesis_testlib.bijector_supports`. The transformations are defined by `tfp_hps.constrainer`. Args: draw: Hypothesis strategy sampler supplied by `@hps.composite`. bijector: A `Bijector` in whose codomain the Tensors will be. shape: An optional `TensorShape`. The shape of the resulting Tensors. Hypothesis will pick one if omitted. Returns: tensors: A strategy for drawing codomain Tensors for the desired bijector. """ if is_invert(bijector): return draw(domain_tensors(bijector.bijector, shape)) elif is_transform_diagonal(bijector): return draw(codomain_tensors(bijector.diag_bijector, shape)) if shape is None: shape = draw(tfp_hps.shapes()) bijector_name = type(bijector).__name__ support = bhps.bijector_supports()[bijector_name].inverse if is_generalized_pareto(bijector): constraint_fn = bhps.generalized_pareto_constraint( bijector.loc, bijector.scale, bijector.concentration) elif isinstance(bijector, tfb.SoftClip): constraint_fn = bhps.softclip_constraint(bijector.low, bijector.high) else: constraint_fn = tfp_hps.constrainer(support) return draw(tfp_hps.constrained_tensors(constraint_fn, shape))
def domain_tensors(draw, bijector, shape=None): """Strategy for drawing Tensors in the domain of a bijector. If the bijector's domain is constrained, this proceeds by drawing an unconstrained Tensor and then transforming it to fit. The constraints are declared in `bijectors.hypothesis_testlib.bijector_supports`. The transformations are defined by `tfp_hps.constrainer`. Args: draw: Hypothesis strategy sampler supplied by `@hps.composite`. bijector: A `Bijector` in whose domain the Tensors will be. shape: An optional `TensorShape`. The shape of the resulting Tensors. Hypothesis will pick one if omitted. Returns: tensors: A strategy for drawing domain Tensors for the desired bijector. """ if is_invert(bijector): return draw(codomain_tensors(bijector.bijector, shape)) if shape is None: shape = draw(tfp_hps.shapes()) bijector_name = type(bijector).__name__ support = bijector_hps.bijector_supports()[bijector_name].forward if isinstance(bijector, tfb.PowerTransform): constraint_fn = bijector_hps.power_transform_constraint(bijector.power) else: constraint_fn = tfp_hps.constrainer(support) return draw(tfp_hps.constrained_tensors(constraint_fn, shape))
def codomain_tensors(draw, bijector, shape=None): if is_invert(bijector): return draw(domain_tensors(bijector.bijector, shape)) if shape is None: shape = draw(tfp_hps.batch_shapes()) bijector_name = type(bijector).__name__ support = bijector_hps.bijector_supports()[bijector_name].inverse constraint_fn = constrainer(support) return draw(tfp_hps.constrained_tensors(constraint_fn, shape))
def testDistribution(self, data): enable_vars = data.draw(hps.booleans()) # TODO(b/146572907): Fix `enable_vars` for metadistributions. broken_dists = EVENT_SPACE_BIJECTOR_IS_BROKEN if enable_vars: broken_dists.extend(dhps.INSTANTIABLE_META_DISTS) dist = data.draw( dhps.distributions( enable_vars=enable_vars, eligibility_filter=(lambda name: name not in broken_dists))) self.evaluate([var.initializer for var in dist.variables]) self.check_bad_loc_scale(dist) event_space_bijector = dist._experimental_default_event_space_bijector( ) if event_space_bijector is None: return total_sample_shape = tensorshape_util.concatenate( # Draw a sample shape data.draw(tfp_hps.shapes()), # Draw a shape that broadcasts with `[batch_shape, inverse_event_shape]` # where `inverse_event_shape` is the event shape in the bijector's # domain. This is the shape of `y` in R**n, such that # x = event_space_bijector(y) has the event shape of the distribution. data.draw( tfp_hps.broadcasting_shapes(tensorshape_util.concatenate( dist.batch_shape, event_space_bijector.inverse_event_shape( dist.event_shape)), n=1))[0]) y = data.draw( tfp_hps.constrained_tensors(tfp_hps.identity_fn, total_sample_shape.as_list())) x = event_space_bijector(y) with tf.control_dependencies(dist._sample_control_dependencies(x)): self.evaluate(tf.identity(x))
def check_event_space_bijector_constrains(self, dist, data): event_space_bijector = dist.experimental_default_event_space_bijector() if event_space_bijector is None: return # Draw a sample shape sample_shape = data.draw(tfp_hps.shapes()) inv_event_shape = event_space_bijector.inverse_event_shape( tensorshape_util.concatenate(dist.batch_shape, dist.event_shape)) # Draw a shape that broadcasts with `[batch_shape, inverse_event_shape]` # where `inverse_event_shape` is the event shape in the bijector's # domain. This is the shape of `y` in R**n, such that # x = event_space_bijector(y) has the event shape of the distribution. # TODO(b/174778703): Actually draw broadcast compatible shapes. batch_inv_event_compat_shape = inv_event_shape # batch_inv_event_compat_shape = data.draw( # tfp_hps.broadcast_compatible_shape(inv_event_shape)) # batch_inv_event_compat_shape = tensorshape_util.concatenate( # (1,) * (len(inv_event_shape) - len(batch_inv_event_compat_shape)), # batch_inv_event_compat_shape) total_sample_shape = tensorshape_util.concatenate( sample_shape, batch_inv_event_compat_shape) # full_sample_batch_event_shape = tensorshape_util.concatenate( # sample_shape, inv_event_shape) y = data.draw( tfp_hps.constrained_tensors(tfp_hps.identity_fn, total_sample_shape.as_list())) hp.note('Trying to constrain inputs {}'.format(y)) with tfp_hps.no_tf_rank_errors(): x = event_space_bijector(y) hp.note('Got constrained samples {}'.format(x)) with tf.control_dependencies(dist._sample_control_dependencies(x)): self.evaluate(tensor_util.identity_as_tensor(x))