Esempio n. 1
0
  def testBetaModeInvalid(self):
    a = np.array([1., 2, 3])
    b = np.array([2., 4, 1.2])
    dist = beta_lib.Beta(a, b, allow_nan_stats=False)
    with self.assertRaisesOpError("Condition x < y.*"):
      self.evaluate(dist.mode())

    a = np.array([2., 2, 3])
    b = np.array([1., 4, 1.2])
    dist = beta_lib.Beta(a, b, allow_nan_stats=False)
    with self.assertRaisesOpError("Condition x < y.*"):
      self.evaluate(dist.mode())
Esempio n. 2
0
 def testPdfAlphaStretchedInBroadcastWhenLowerRank(self):
   a = [1., 2]
   b = [1., 2]
   x = [[.5, .5], [.2, .8]]
   pdf = beta_lib.Beta(a, b).prob(x)
   self.assertAllClose([[1., 3. / 2], [1., 24. / 25]], self.evaluate(pdf))
   self.assertEqual((2, 2), pdf.get_shape())
Esempio n. 3
0
 def testBetaMode(self):
   a = np.array([1.1, 2, 3])
   b = np.array([2., 4, 1.2])
   expected_mode = (a - 1) / (a + b - 2)
   dist = beta_lib.Beta(a, b)
   self.assertEqual(dist.mode().get_shape(), (3,))
   self.assertAllClose(expected_mode, self.evaluate(dist.mode()))
Esempio n. 4
0
 def testBetaProperty(self):
     a = [[1., 2, 3]]
     b = [[2., 4, 3]]
     with self.test_session():
         dist = beta_lib.Beta(a, b)
         self.assertEqual([1, 3], dist.concentration0.get_shape())
         self.assertAllClose(b, dist.concentration0.eval())
Esempio n. 5
0
  def testBetaSampleMultipleTimes(self):
    a_val = 1.
    b_val = 2.
    n_val = 100

    random_seed.set_random_seed(654321)
    beta1 = beta_lib.Beta(
        concentration1=a_val, concentration0=b_val, name="beta1")
    samples1 = self.evaluate(beta1.sample(n_val, seed=123456))

    random_seed.set_random_seed(654321)
    beta2 = beta_lib.Beta(
        concentration1=a_val, concentration0=b_val, name="beta2")
    samples2 = self.evaluate(beta2.sample(n_val, seed=123456))

    self.assertAllClose(samples1, samples2)
Esempio n. 6
0
 def testBetaSample(self):
     with self.test_session():
         a = 1.
         b = 2.
         beta = beta_lib.Beta(a, b)
         n = constant_op.constant(100000)
         samples = beta.sample(n)
         sample_values = samples.eval()
         self.assertEqual(sample_values.shape, (100000, ))
         self.assertFalse(np.any(sample_values < 0.0))
         if not stats:
             return
         self.assertLess(
             stats.kstest(
                 # Beta is a univariate distribution.
                 sample_values,
                 stats.beta(a=1., b=2.).cdf)[0],
             0.01)
         # The standard error of the sample mean is 1 / (sqrt(18 * n))
         self.assertAllClose(sample_values.mean(axis=0),
                             stats.beta.mean(a, b),
                             atol=1e-2)
         self.assertAllClose(np.cov(sample_values, rowvar=0),
                             stats.beta.var(a, b),
                             atol=1e-1)
Esempio n. 7
0
 def testPdfXStretchedInBroadcastWhenLowerRank(self):
   a = [[1., 2], [2., 3]]
   b = [[1., 2], [2., 3]]
   x = [.5, .5]
   pdf = beta_lib.Beta(a, b).prob(x)
   self.assertAllClose([[1., 3. / 2], [3. / 2, 15. / 8]], self.evaluate(pdf))
   self.assertEqual((2, 2), pdf.get_shape())
Esempio n. 8
0
 def testPdfTwoBatchesNontrivialX(self):
   a = [1., 2]
   b = [1., 2]
   x = [.3, .7]
   dist = beta_lib.Beta(a, b)
   pdf = dist.prob(x)
   self.assertAllClose([1, 63. / 50], self.evaluate(pdf))
   self.assertEqual((2,), pdf.get_shape())
Esempio n. 9
0
 def testPdfTwoBatches(self):
   a = [1., 2]
   b = [1., 2]
   x = [.5, .5]
   dist = beta_lib.Beta(a, b)
   pdf = dist.prob(x)
   self.assertAllClose([1., 3. / 2], self.evaluate(pdf))
   self.assertEqual((2,), pdf.get_shape())
Esempio n. 10
0
 def testComplexShapesBroadcast(self):
   a = np.random.rand(3, 2, 2)
   b = np.random.rand(2, 2)
   dist = beta_lib.Beta(a, b)
   self.assertAllEqual([], self.evaluate(dist.event_shape_tensor()))
   self.assertAllEqual([3, 2, 2], self.evaluate(dist.batch_shape_tensor()))
   self.assertEqual(tensor_shape.TensorShape([]), dist.event_shape)
   self.assertEqual(tensor_shape.TensorShape([3, 2, 2]), dist.batch_shape)
Esempio n. 11
0
 def testPdfXStretchedInBroadcastWhenSameRank(self):
     with self.test_session():
         a = [[1., 2], [2., 3]]
         b = [[1., 2], [2., 3]]
         x = [[.5, .5]]
         pdf = beta_lib.Beta(a, b).prob(x)
         self.assertAllClose([[1., 3. / 2], [3. / 2, 15. / 8]], pdf.eval())
         self.assertEqual((2, 2), pdf.get_shape())
Esempio n. 12
0
 def testSimpleShapes(self):
   a = np.random.rand(3)
   b = np.random.rand(3)
   dist = beta_lib.Beta(a, b)
   self.assertAllEqual([], self.evaluate(dist.event_shape_tensor()))
   self.assertAllEqual([3], self.evaluate(dist.batch_shape_tensor()))
   self.assertEqual(tensor_shape.TensorShape([]), dist.event_shape)
   self.assertEqual(tensor_shape.TensorShape([3]), dist.batch_shape)
Esempio n. 13
0
 def testPdfAlphaStretchedInBroadcastWhenSameRank(self):
   a = [[1., 2]]
   b = [[1., 2]]
   x = [[.5, .5], [.3, .7]]
   dist = beta_lib.Beta(a, b)
   pdf = dist.prob(x)
   self.assertAllClose([[1., 3. / 2], [1., 63. / 50]], self.evaluate(pdf))
   self.assertEqual((2, 2), pdf.get_shape())
Esempio n. 14
0
 def testBetaEntropy(self):
   a = [1., 2, 3]
   b = [2., 4, 1.2]
   dist = beta_lib.Beta(a, b)
   self.assertEqual(dist.entropy().get_shape(), (3,))
   if not stats:
     return
   expected_entropy = stats.beta.entropy(a, b)
   self.assertAllClose(expected_entropy, self.evaluate(dist.entropy()))
Esempio n. 15
0
 def testBetaVariance(self):
   a = [1., 2, 3]
   b = [2., 4, 1.2]
   dist = beta_lib.Beta(a, b)
   self.assertEqual(dist.variance().get_shape(), (3,))
   if not stats:
     return
   expected_variance = stats.beta.var(a, b)
   self.assertAllClose(expected_variance, self.evaluate(dist.variance()))
Esempio n. 16
0
  def testBetaModeEnableAllowNanStats(self):
    a = np.array([1., 2, 3])
    b = np.array([2., 4, 1.2])
    dist = beta_lib.Beta(a, b, allow_nan_stats=True)

    expected_mode = (a - 1) / (a + b - 2)
    expected_mode[0] = np.nan
    self.assertEqual((3,), dist.mode().get_shape())
    self.assertAllClose(expected_mode, self.evaluate(dist.mode()))

    a = np.array([2., 2, 3])
    b = np.array([1., 4, 1.2])
    dist = beta_lib.Beta(a, b, allow_nan_stats=True)

    expected_mode = (a - 1) / (a + b - 2)
    expected_mode[0] = np.nan
    self.assertEqual((3,), dist.mode().get_shape())
    self.assertAllClose(expected_mode, self.evaluate(dist.mode()))
Esempio n. 17
0
 def testBetaMean(self):
   a = [1., 2, 3]
   b = [2., 4, 1.2]
   dist = beta_lib.Beta(a, b)
   self.assertEqual(dist.mean().get_shape(), (3,))
   if not stats:
     return
   expected_mean = stats.beta.mean(a, b)
   self.assertAllClose(expected_mean, self.evaluate(dist.mean()))
Esempio n. 18
0
 def testPdfUniformZeroBatch(self):
   # This is equivalent to a uniform distribution
   a = 1.
   b = 1.
   x = np.array([.1, .2, .3, .5, .8], dtype=np.float32)
   dist = beta_lib.Beta(a, b)
   pdf = dist.prob(x)
   self.assertAllClose([1.] * 5, self.evaluate(pdf))
   self.assertEqual((5,), pdf.get_shape())
Esempio n. 19
0
 def testComplexShapes(self):
     with self.test_session():
         a = np.random.rand(3, 2, 2)
         b = np.random.rand(3, 2, 2)
         dist = beta_lib.Beta(a, b)
         self.assertAllEqual([], dist.event_shape_tensor().eval())
         self.assertAllEqual([3, 2, 2], dist.batch_shape_tensor().eval())
         self.assertEqual(tensor_shape.TensorShape([]), dist.event_shape)
         self.assertEqual(tensor_shape.TensorShape([3, 2, 2]),
                          dist.batch_shape)
Esempio n. 20
0
 def testBetaFullyReparameterized(self):
   a = constant_op.constant(1.0)
   b = constant_op.constant(2.0)
   with backprop.GradientTape() as tape:
     tape.watch(a)
     tape.watch(b)
     beta = beta_lib.Beta(a, b)
     samples = beta.sample(100)
   grad_a, grad_b = tape.gradient(samples, [a, b])
   self.assertIsNotNone(grad_a)
   self.assertIsNotNone(grad_b)
Esempio n. 21
0
 def testBetaLogCdf(self):
   shape = (30, 40, 50)
   for dt in (np.float32, np.float64):
     a = 10. * np.random.random(shape).astype(dt)
     b = 10. * np.random.random(shape).astype(dt)
     x = np.random.random(shape).astype(dt)
     actual = self.evaluate(math_ops.exp(beta_lib.Beta(a, b).log_cdf(x)))
     self.assertAllEqual(np.ones(shape, dtype=np.bool), 0. <= x)
     self.assertAllEqual(np.ones(shape, dtype=np.bool), 1. >= x)
     if not stats:
       return
     self.assertAllClose(stats.beta.cdf(x, a, b), actual, rtol=1e-4, atol=0)
Esempio n. 22
0
    def testBetaBetaKL(self):
        with self.test_session() as sess:
            for shape in [(10, ), (4, 5)]:
                a1 = 6.0 * np.random.random(size=shape) + 1e-4
                b1 = 6.0 * np.random.random(size=shape) + 1e-4
                a2 = 6.0 * np.random.random(size=shape) + 1e-4
                b2 = 6.0 * np.random.random(size=shape) + 1e-4
                # Take inverse softplus of values to test BetaWithSoftplusConcentration
                a1_sp = np.log(np.exp(a1) - 1.0)
                b1_sp = np.log(np.exp(b1) - 1.0)
                a2_sp = np.log(np.exp(a2) - 1.0)
                b2_sp = np.log(np.exp(b2) - 1.0)

                d1 = beta_lib.Beta(concentration1=a1, concentration0=b1)
                d2 = beta_lib.Beta(concentration1=a2, concentration0=b2)
                d1_sp = beta_lib.BetaWithSoftplusConcentration(
                    concentration1=a1_sp, concentration0=b1_sp)
                d2_sp = beta_lib.BetaWithSoftplusConcentration(
                    concentration1=a2_sp, concentration0=b2_sp)

                if not special:
                    return
                kl_expected = (special.betaln(a2, b2) -
                               special.betaln(a1, b1) +
                               (a1 - a2) * special.digamma(a1) +
                               (b1 - b2) * special.digamma(b1) +
                               (a2 - a1 + b2 - b1) * special.digamma(a1 + b1))

                for dist1 in [d1, d1_sp]:
                    for dist2 in [d2, d2_sp]:
                        kl = kullback_leibler.kl_divergence(dist1, dist2)
                        kl_val = sess.run(kl)
                        self.assertEqual(kl.get_shape(), shape)
                        self.assertAllClose(kl_val, kl_expected)

                # Make sure KL(d1||d1) is 0
                kl_same = sess.run(kullback_leibler.kl_divergence(d1, d1))
                self.assertAllClose(kl_same, np.zeros_like(kl_expected))
Esempio n. 23
0
 def testPdfXProper(self):
     a = [[1., 2, 3]]
     b = [[2., 4, 3]]
     with self.test_session():
         dist = beta_lib.Beta(a, b, validate_args=True)
         dist.prob([.1, .3, .6]).eval()
         dist.prob([.2, .3, .5]).eval()
         # Either condition can trigger.
         with self.assertRaisesOpError("sample must be positive"):
             dist.prob([-1., 0.1, 0.5]).eval()
         with self.assertRaisesOpError("sample must be positive"):
             dist.prob([0., 0.1, 0.5]).eval()
         with self.assertRaisesOpError("sample must be no larger than `1`"):
             dist.prob([.1, .2, 1.2]).eval()
Esempio n. 24
0
 def testBetaSampleMultidimensional(self):
     a = np.random.rand(3, 2, 2).astype(np.float32)
     b = np.random.rand(3, 2, 2).astype(np.float32)
     beta = beta_lib.Beta(a, b)
     n = constant_op.constant(100000)
     samples = beta.sample(n)
     sample_values = self.evaluate(samples)
     self.assertEqual(sample_values.shape, (100000, 3, 2, 2))
     self.assertFalse(np.any(sample_values < 0.0))
     if not stats:
         return
     self.assertAllClose(sample_values[:, 1, :].mean(axis=0),
                         stats.beta.mean(a, b)[1, :],
                         atol=1e-1)
Esempio n. 25
0
 def testPdfXProper(self):
   a = [[1., 2, 3]]
   b = [[2., 4, 3]]
   dist = beta_lib.Beta(a, b, validate_args=True)
   self.evaluate(dist.prob([.1, .3, .6]))
   self.evaluate(dist.prob([.2, .3, .5]))
   # Either condition can trigger.
   with self.assertRaisesOpError("sample must be positive"):
     self.evaluate(dist.prob([-1., 0.1, 0.5]))
   with self.assertRaisesOpError("sample must be positive"):
     self.evaluate(dist.prob([0., 0.1, 0.5]))
   with self.assertRaisesOpError("sample must be less than `1`"):
     self.evaluate(dist.prob([.1, .2, 1.2]))
   with self.assertRaisesOpError("sample must be less than `1`"):
     self.evaluate(dist.prob([.1, .2, 1.0]))
Esempio n. 26
0
 def testBetaCdf(self):
     with self.test_session():
         shape = (30, 40, 50)
         for dt in (np.float32, np.float64):
             a = 10. * np.random.random(shape).astype(dt)
             b = 10. * np.random.random(shape).astype(dt)
             x = np.random.random(shape).astype(dt)
             actual = beta_lib.Beta(a, b).cdf(x).eval()
             self.assertAllEqual(np.ones(shape, dtype=np.bool), 0. <= x)
             self.assertAllEqual(np.ones(shape, dtype=np.bool), 1. >= x)
             if not stats:
                 return
             self.assertAllClose(stats.beta.cdf(x, a, b),
                                 actual,
                                 rtol=1e-4,
                                 atol=0)
Esempio n. 27
0
 def testLogPdfOnBoundaryIsFiniteWhenAlphaIsOne(self):
     b = [[0.01, 0.1, 1., 2], [5., 10., 2., 3]]
     pdf = self.evaluate(beta_lib.Beta(1., b).prob(0.))
     self.assertAllEqual(np.ones_like(pdf, dtype=np.bool), np.isfinite(pdf))
Esempio n. 28
0
 def testBetaProperty(self):
   a = [[1., 2, 3]]
   b = [[2., 4, 3]]
   dist = beta_lib.Beta(a, b)
   self.assertEqual([1, 3], dist.concentration0.get_shape())
   self.assertAllClose(b, self.evaluate(dist.concentration0))