def test_identity_as_tensor(self): for v in (tf.constant([4., 3.]), tf.Variable(0.), tfp.util.DeferredTensor(tf.Variable(1.), tf.math.exp), tfp.util.TransformedVariable(2., tfb.Scale(tf.Variable(4.)))): v_ = tensor_util.identity_as_tensor(v) self.assertIsNot(v, v_) self.assertIsInstance(v_, tf.Tensor)
def _probs_parameter_no_checks(self, total_count=None): if self._logits is None: probs = tensor_util.identity_as_tensor(self._probs) else: probs = tf.math.sigmoid(self._logits) # Suppress potentially nasty probs like `nan` b/c they don't matter where # total_count == 0. if total_count is None: total_count = self.total_count return tf.where(total_count > 0, probs, 0)
def check_event_space_bijector_constrains(self, dist, data): event_space_bijector = dist.experimental_default_event_space_bijector() if event_space_bijector is None: return # Draw a sample shape sample_shape = data.draw(tfp_hps.shapes()) inv_event_shape = event_space_bijector.inverse_event_shape( tensorshape_util.concatenate(dist.batch_shape, dist.event_shape)) # Draw a shape that broadcasts with `[batch_shape, inverse_event_shape]` # where `inverse_event_shape` is the event shape in the bijector's # domain. This is the shape of `y` in R**n, such that # x = event_space_bijector(y) has the event shape of the distribution. # TODO(b/174778703): Actually draw broadcast compatible shapes. batch_inv_event_compat_shape = inv_event_shape # batch_inv_event_compat_shape = data.draw( # tfp_hps.broadcast_compatible_shape(inv_event_shape)) # batch_inv_event_compat_shape = tensorshape_util.concatenate( # (1,) * (len(inv_event_shape) - len(batch_inv_event_compat_shape)), # batch_inv_event_compat_shape) total_sample_shape = tensorshape_util.concatenate( sample_shape, batch_inv_event_compat_shape) # full_sample_batch_event_shape = tensorshape_util.concatenate( # sample_shape, inv_event_shape) y = data.draw( tfp_hps.constrained_tensors(tfp_hps.identity_fn, total_sample_shape.as_list())) hp.note('Trying to constrain inputs {}'.format(y)) with tfp_hps.no_tf_rank_errors(): x = event_space_bijector(y) hp.note('Got constrained samples {}'.format(x)) with tf.control_dependencies(dist._sample_control_dependencies(x)): self.evaluate(tensor_util.identity_as_tensor(x))
def _probs_parameter_no_checks(self): if self._logits is None: return tensor_util.identity_as_tensor(self._probs) return tf.math.sigmoid(self._logits)
def _logits_parameter_no_checks(self): if self._logits is None: probs = tf.convert_to_tensor(self._probs) return tf.math.log(probs) - tf.math.log1p(-probs) return tensor_util.identity_as_tensor(self._logits)
def _probs_parameter_no_checks(self, name=None): """Probs computed from non-`None` input arg (`probs` or `logits`).""" if self._logits is None: return tensor_util.identity_as_tensor(self._probs) return tf.math.sigmoid(self._logits)
def _logits_parameter_no_checks(self, name=None): """Logits computed from non-`None` input arg (`probs` or `logits`).""" if self._logits is None: probs = tf.convert_to_tensor(self._probs) return tf.math.log(probs) - tf.math.log1p(-probs) return tensor_util.identity_as_tensor(self._logits)
def _log_rate2_parameter_no_checks(self): if self._log_rate2 is None: return tf.math.log(self._rate2) return tensor_util.identity_as_tensor(self._log_rate2)
def _rate1_parameter_no_checks(self): if self._rate1 is None: return tf.exp(self._log_rate1) return tensor_util.identity_as_tensor(self._rate1)
def _probs_parameter_no_checks(self): if self._probits is None: return tensor_util.identity_as_tensor(self._probs) return special_math.ndtr(self._probits)
def _probits_parameter_no_checks(self): if self._probits is None: probs = tf.convert_to_tensor(self._probs) return tf.math.ndtri(probs) return tensor_util.identity_as_tensor(self._probits)
def _cdf(self, x): loc = tensor_util.identity_as_tensor(self.loc) return tf.cast(x >= loc - self._slack(loc), dtype=self.dtype)