Esempio n. 1
0
 def testScalarCongruency(self):
     with self.test_session():
         bijector = bijectors.Invert(bijectors.Exp())
         assert_scalar_congruency(bijector,
                                  lower_x=1e-3,
                                  upper_x=1.5,
                                  rtol=0.05)
Esempio n. 2
0
 def testBijector(self):
     with self.test_session():
         for fwd in [
                 bijectors.Identity(),
                 bijectors.Exp(event_ndims=1),
                 bijectors.Affine(shift=[0., 1.],
                                  scale_diag=[2., 3.],
                                  event_ndims=1),
                 bijectors.Softplus(event_ndims=1),
                 bijectors.SoftmaxCentered(event_ndims=1),
                 bijectors.SigmoidCentered(),
         ]:
             rev = bijectors.Invert(fwd)
             self.assertEqual("_".join(["invert", fwd.name]), rev.name)
             x = [[[1., 2.], [2., 3.]]]
             self.assertAllClose(
                 fwd.inverse(x).eval(),
                 rev.forward(x).eval())
             self.assertAllClose(
                 fwd.forward(x).eval(),
                 rev.inverse(x).eval())
             self.assertAllClose(
                 fwd.forward_log_det_jacobian(x).eval(),
                 rev.inverse_log_det_jacobian(x).eval())
             self.assertAllClose(
                 fwd.inverse_log_det_jacobian(x).eval(),
                 rev.forward_log_det_jacobian(x).eval())
Esempio n. 3
0
  def testBijector(self):
    x = np.float32(np.random.randn(3, 4, 4))

    y = x.copy()
    for i in range(x.shape[0]):
      np.fill_diagonal(y[i, :, :], np.exp(np.diag(x[i, :, :])))

    exp = bijectors.Exp()
    b = bijectors.TransformDiagonal(diag_bijector=exp)

    y_ = self.evaluate(b.forward(x))
    self.assertAllClose(y, y_)

    x_ = self.evaluate(b.inverse(y))
    self.assertAllClose(x, x_)

    fldj = self.evaluate(b.forward_log_det_jacobian(x, event_ndims=2))
    ildj = self.evaluate(b.inverse_log_det_jacobian(y, event_ndims=2))
    self.assertAllEqual(
        fldj,
        self.evaluate(exp.forward_log_det_jacobian(
            np.array([np.diag(x_mat) for x_mat in x]),
            event_ndims=1)))
    self.assertAllEqual(
        ildj,
        self.evaluate(exp.inverse_log_det_jacobian(
            np.array([np.diag(y_mat) for y_mat in y]),
            event_ndims=1)))
Esempio n. 4
0
 def testDocstringExample(self):
   with self.cached_session():
     exp_gamma_distribution = (
         transformed_distribution_lib.TransformedDistribution(
             distribution=gamma_lib.Gamma(concentration=1., rate=2.),
             bijector=bijectors.Invert(bijectors.Exp())))
     self.assertAllEqual(
         [], array_ops.shape(exp_gamma_distribution.sample()).eval())
    def testComputesCorrectValues(self):
        shift = 1.61803398875
        x = np.float32(np.array([-1, .5, 2]))
        y = np.float32(
            np.array([[np.exp(2) + shift, 0.], [.5, np.exp(-1) + shift]]))

        b = bijectors.ScaleTriL(diag_bijector=bijectors.Exp(),
                                diag_shift=shift)

        y_ = self.evaluate(b.forward(x))
        self.assertAllClose(y, y_, rtol=1e-4)

        x_ = self.evaluate(b.inverse(y))
        self.assertAllClose(x, x_, rtol=1e-4)
  def __init__(
      self,
      temperature,
      logits=None,
      probs=None,
      dtype=None,
      validate_args=False,
      allow_nan_stats=True,
      name="RelaxedOneHotCategorical"):
    """Initialize RelaxedOneHotCategorical using class log-probabilities.

    Args:
      temperature: An 0-D `Tensor`, representing the temperature
        of a set of RelaxedOneHotCategorical distributions. The temperature
        should be positive.
      logits: An N-D `Tensor`, `N >= 1`, representing the log probabilities
        of a set of RelaxedOneHotCategorical distributions. The first
        `N - 1` dimensions index into a batch of independent distributions and
        the last dimension represents a vector of logits for each class. Only
        one of `logits` or `probs` should be passed in.
      probs: An N-D `Tensor`, `N >= 1`, representing the probabilities
        of a set of RelaxedOneHotCategorical distributions. The first `N - 1`
        dimensions index into a batch of independent distributions and the last
        dimension represents a vector of probabilities for each class. Only one
        of `logits` or `probs` should be passed in.
      dtype: The type of the event samples (default: inferred from
        logits/probs).
      validate_args: Unused in this distribution.
      allow_nan_stats: Python `bool`, default `True`. If `False`, raise an
        exception if a statistic (e.g. mean/mode/etc...) is undefined for any
        batch member. If `True`, batch members with valid parameters leading to
        undefined statistics will return NaN for this statistic.
      name: A name for this distribution (optional).
    """
    dist = ExpRelaxedOneHotCategorical(temperature,
                                       logits=logits,
                                       probs=probs,
                                       dtype=dtype,
                                       validate_args=validate_args,
                                       allow_nan_stats=allow_nan_stats)
    super(RelaxedOneHotCategorical, self).__init__(dist,
                                                   bijectors.Exp(),
                                                   name=name)