def test_high_entropy_bounds(self):
   # For high entropy distributions, the training bound should be very tight,
   # and the overhead of range coding manageable.
   noisy = uniform_noise.NoisyNormal(loc=0., scale=100.)
   em = ContinuousBatchedEntropyModel(noisy, 1, compression=True)
   x = noisy.base.sample([10000])
   bits_eval = em.bits(x, training=False)
   bits_training = em.bits(x, training=True)
   bits_compressed = 8 * len(em.compress(x).numpy())
   self.assertAllClose(bits_training, bits_eval, atol=0, rtol=5e-5)
   self.assertAllClose(bits_compressed, bits_eval, atol=0, rtol=5e-3)
 def test_information_bounds(self):
   # `bits(training=True)` should be greater than `bits(training=False)`
   # because it is defined as an upper bound (albeit for infinite data). The
   # actual length of the bit string should always be greater than
   # `bits(training=False)` because range coding is only asymptotically
   # optimal, and because it operates on quantized probabilities.
   for scale in 2 ** tf.linspace(-2., 7., 10):
     noisy = uniform_noise.NoisyNormal(loc=0., scale=scale)
     em = ContinuousBatchedEntropyModel(noisy, 1, compression=True)
     x = noisy.base.sample([10000])
     bits_eval = em.bits(x, training=False)
     bits_training = em.bits(x, training=True)
     bits_compressed = 8 * len(em.compress(x).numpy())
     self.assertGreater(bits_training, .9975 * bits_eval)
     self.assertGreater(bits_compressed, bits_eval)