コード例 #1
0
 def test_soft_round_values_and_gradients_are_finite(self, alpha):
   x = tf.linspace(0., 1., 11)  # covers exact integers and half-integers
   with tf.GradientTape() as tape:
     tape.watch(x)
     y = soft_round_ops.soft_round(x, alpha=alpha)
   dy = tape.gradient(y, x)
   self.assertAllEqual(tf.math.is_finite(y), tf.ones(x.shape, dtype=bool))
   self.assertAllEqual(tf.math.is_finite(dy), tf.ones(x.shape, dtype=bool))
コード例 #2
0
class AdaptersTest(tf.test.TestCase, parameterized.TestCase):
    @parameterized.named_parameters(
        ("softround_deepfactorized",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=5.0),
         deep_factorized.DeepFactorized, 0.0),
        ("softround_logistic",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Logistic(loc=10.3, scale=1.5),
         lambda: soft_round_ops.soft_round(0.3, alpha=5.0)),
        ("softround_normal",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=4.0),
         lambda: tfp.distributions.Normal(loc=10.4, scale=1.5),
         lambda: soft_round_ops.soft_round(0.4, alpha=4.0)),
        ("noisysoftround_deepfactorized",
         lambda d: round_adapters.NoisySoftRoundAdapter(d, alpha=5.0),
         deep_factorized.DeepFactorized, 0.0),
        ("noisysoftround_logistic",
         lambda d: round_adapters.NoisySoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Logistic(loc=10, scale=1.5), 0.0),
        ("noisysoftround_normal",
         lambda d: round_adapters.NoisySoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Normal(loc=10, scale=1.5), 0.0),
        ("round_deepfactorized", round_adapters.RoundAdapter,
         lambda: deep_factorized.DeepFactorized(init_scale=1.0), 0.0),
        ("round_logistic", round_adapters.RoundAdapter,
         lambda: tfp.distributions.Logistic(loc=1.5, scale=1.5), 0.0),
        ("round_normal", round_adapters.RoundAdapter,
         lambda: tfp.distributions.Normal(loc=1.5, scale=1.5), 0.0),
        ("noisyround_deepfactorized", round_adapters.NoisyRoundAdapter,
         lambda: deep_factorized.DeepFactorized(init_scale=1.0), 0.0),
        ("noisyround_logistic", round_adapters.NoisyRoundAdapter,
         lambda: tfp.distributions.Logistic(loc=1.5, scale=1.5), 0.0),
        ("noisyround_normal", round_adapters.NoisyRoundAdapter,
         lambda: tfp.distributions.Normal(loc=1.5, scale=1.5), 0.0),
    )
    def test_tails_and_offset(self, adapter, distribution, expected_offset):
        dist = adapter(distribution())
        lower_tail = dist._lower_tail(2**-8)
        try:
            left_mass = dist.cdf(lower_tail)
        except NotImplementedError:
            # We use base distribution as a proxy for the tail mass.
            left_mass = dist.base.cdf(lower_tail)
        self.assertLessEqual(left_mass, 2**-8)

        upper_tail = dist._upper_tail(2**-8)
        try:
            right_mass = dist.survival_function(upper_tail)
        except NotImplementedError:
            # We use base distribution as a proxy for the tail mass.
            right_mass = dist.base.survival_function(upper_tail)
        self.assertLessEqual(right_mass, 2**-8)

        self.assertGreater(upper_tail, lower_tail)
        offset = dist._quantization_offset()
        if not isinstance(expected_offset, float):
            # We cannot run tf inside the parameterized test declaration, hence
            # non-float values are wrapped in a lambda.
            expected_offset = expected_offset()
        self.assertAllClose(offset, expected_offset)

    @parameterized.named_parameters(
        ("softround_logistic",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Logistic(loc=10, scale=1.5)),
        ("softround_normal",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Normal(loc=10, scale=1.5)),
    )
    def test_mode_and_quantile(self, adapter, distribution):
        dist = adapter(distribution())
        mode = dist.mode()
        left_mass = dist.cdf(mode)
        self.assertAllClose(left_mass, 0.5)
        quantile_75p = dist.quantile(0.75)
        left_mass = dist.cdf(quantile_75p)
        self.assertAllClose(left_mass, 0.75)

    def test_lacking_mode_and_quantile(self):
        dist = round_adapters.RoundAdapter(
            tfp.distributions.Logistic(loc=1.5, scale=1.5))
        with self.assertRaises(NotImplementedError):
            dist.mode()
        with self.assertRaises(NotImplementedError):
            dist.quantile(0.75)

    def test_lacking_tails_and_offset(self):
        class NonInvertibleAdapater(round_adapters.MonotonicAdapter):

            invertible = False

            def transform(self, x):
                return tf.ceil(x)

            def inverse_transform(self, y):
                return tf.floor(y)

        dist = NonInvertibleAdapater(
            tfp.distributions.Normal(loc=1.5, scale=1.5))
        with self.assertRaises(NotImplementedError):
            dist._lower_tail(0.01)
        with self.assertRaises(NotImplementedError):
            dist._upper_tail(0.01)
コード例 #3
0
ファイル: round_adapters.py プロジェクト: wps1215/compression
 def transform(self, x):
     return soft_round_ops.soft_round(x, self._alpha)
コード例 #4
0
 def test_soft_inverse_is_actual_inverse(self):
   x = tf.constant([-1.25, -0.75, 0.75, 1.25], dtype=tf.float32)
   y = soft_round_ops.soft_round(x, alpha=2.0)
   x2 = soft_round_ops.soft_round_inverse(y, alpha=2.0)
   self.assertAllClose(x, x2)
コード例 #5
0
 def test_soft_round_large_alpha_is_round(self):
   # We don't care what happens exactly near half-integer values:
   for offset in range(-5, 5):
     x = tf.linspace(offset - 0.499, offset + 0.499, 100)
     y = soft_round_ops.soft_round(x, alpha=2000.0)
     self.assertAllClose(tf.round(x), y, atol=0.02)
コード例 #6
0
 def test_soft_round_small_alpha_is_identity(self):
   x = tf.linspace(-2., 2., 50)
   y = soft_round_ops.soft_round(x, alpha=1e-13)
   self.assertAllClose(x, y)
コード例 #7
0
 def test_soft_round_layer_soft_rounds(self):
     alpha = 5.0
     layer = soft_round.SoftRound(alpha=alpha)
     x = tf.linspace(-5.0, 5.0, num=50)
     y = layer(x)
     self.assertAllClose(y, soft_round_ops.soft_round(x, alpha=alpha))