def __init__(self,
              alpha=5.0,
              name="NoisySoftRoundedDeepFactorized",
              **kwargs):
   super().__init__(
       base=deep_factorized.DeepFactorized(**kwargs),
       alpha=alpha,
       name=name)
示例#2
0
 def test_variables_receive_gradients(self):
     df = deep_factorized.DeepFactorized()
     with tf.GradientTape() as tape:
         x = tf.random.normal([20])
         loss = -tf.reduce_mean(df.log_prob(x))
     grads = tape.gradient(loss, df.trainable_variables)
     self.assertLen(grads, 8)
     self.assertNotIn(None, grads)
示例#3
0
 def test_logistic_is_special_case_log_cdf(self):
     # With no hidden units, the density should collapse to a logistic
     # distribution.
     df = deep_factorized.DeepFactorized(num_filters=(), init_scale=1)
     logistic = tfp.distributions.Logistic(loc=-df._biases[0][0, 0],
                                           scale=1.)
     x = tf.linspace(-5000., 5000., 1000)
     log_cdf_df = df.log_cdf(x)
     log_cdf_logistic = logistic.log_cdf(x)
     self.assertAllClose(log_cdf_df, log_cdf_logistic)
示例#4
0
 def test_logistic_is_special_case(self):
     # With no hidden units, the density should collapse to a logistic
     # distribution convolved with a standard uniform distribution.
     df = deep_factorized.DeepFactorized(num_filters=(), init_scale=1)
     logistic = tfp.distributions.Logistic(loc=-df._biases[0][0, 0],
                                           scale=1.)
     x = tf.linspace(-5., 5., 20)
     prob_df = df.prob(x)
     prob_log = logistic.cdf(x + .5) - logistic.cdf(x - .5)
     self.assertAllClose(prob_df, prob_log)
 def test_logistic_is_special_case(self, method):
     # With no hidden units, the density should collapse to a logistic
     # distribution.
     df = deep_factorized.DeepFactorized(num_filters=(), init_scale=1)
     logistic = tfp.distributions.Logistic(loc=-df._biases[0][0, 0],
                                           scale=1.)
     x = tf.linspace(-5., 5., 20)
     val_df = getattr(df, method)(x)
     val_logistic = getattr(logistic, method)(x)
     self.assertAllClose(val_df, val_logistic)
示例#6
0
 def test_stats_throw_error(self):
     df = deep_factorized.DeepFactorized()
     with self.assertRaises(NotImplementedError):
         df.mode()
     with self.assertRaises(NotImplementedError):
         df.mean()
     with self.assertRaises(NotImplementedError):
         df.quantile(.5)
     with self.assertRaises(NotImplementedError):
         df.survival_function(.5)
     with self.assertRaises(NotImplementedError):
         df.sample()
示例#7
0
 def test_can_instantiate_batched(self):
     df = deep_factorized.DeepFactorized(batch_shape=(4, 3))
     self.assertEqual(df.batch_shape, (4, 3))
     self.assertEqual(df.event_shape, ())
     self.assertEqual(df.num_filters, (3, 3))
     self.assertEqual(df.init_scale, 10)
示例#8
0
 def test_quantization_offset_is_zero(self):
     df = deep_factorized.DeepFactorized()
     self.assertEqual(helpers.quantization_offset(df), 0)
示例#9
0
 def test_uniform_is_special_case(self):
     # With the scale parameter going to zero, the density should approach a
     # unit-width uniform distribution.
     df = deep_factorized.DeepFactorized(init_scale=1e-3)
     x = tf.linspace(-1., 1., 10)
     self.assertAllClose(df.prob(x), [0, 0, 0, 1, 1, 1, 1, 0, 0, 0])
示例#10
0
class AdaptersTest(tf.test.TestCase, parameterized.TestCase):
    @parameterized.named_parameters(
        ("softround_deepfactorized",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=5.0),
         deep_factorized.DeepFactorized, 0.0),
        ("softround_logistic",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Logistic(loc=10.3, scale=1.5),
         lambda: soft_round_ops.soft_round(0.3, alpha=5.0)),
        ("softround_normal",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=4.0),
         lambda: tfp.distributions.Normal(loc=10.4, scale=1.5),
         lambda: soft_round_ops.soft_round(0.4, alpha=4.0)),
        ("noisysoftround_deepfactorized",
         lambda d: round_adapters.NoisySoftRoundAdapter(d, alpha=5.0),
         deep_factorized.DeepFactorized, 0.0),
        ("noisysoftround_logistic",
         lambda d: round_adapters.NoisySoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Logistic(loc=10, scale=1.5), 0.0),
        ("noisysoftround_normal",
         lambda d: round_adapters.NoisySoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Normal(loc=10, scale=1.5), 0.0),
        ("round_deepfactorized", round_adapters.RoundAdapter,
         lambda: deep_factorized.DeepFactorized(init_scale=1.0), 0.0),
        ("round_logistic", round_adapters.RoundAdapter,
         lambda: tfp.distributions.Logistic(loc=1.5, scale=1.5), 0.0),
        ("round_normal", round_adapters.RoundAdapter,
         lambda: tfp.distributions.Normal(loc=1.5, scale=1.5), 0.0),
        ("noisyround_deepfactorized", round_adapters.NoisyRoundAdapter,
         lambda: deep_factorized.DeepFactorized(init_scale=1.0), 0.0),
        ("noisyround_logistic", round_adapters.NoisyRoundAdapter,
         lambda: tfp.distributions.Logistic(loc=1.5, scale=1.5), 0.0),
        ("noisyround_normal", round_adapters.NoisyRoundAdapter,
         lambda: tfp.distributions.Normal(loc=1.5, scale=1.5), 0.0),
    )
    def test_tails_and_offset(self, adapter, distribution, expected_offset):
        dist = adapter(distribution())
        lower_tail = dist._lower_tail(2**-8)
        try:
            left_mass = dist.cdf(lower_tail)
        except NotImplementedError:
            # We use base distribution as a proxy for the tail mass.
            left_mass = dist.base.cdf(lower_tail)
        self.assertLessEqual(left_mass, 2**-8)

        upper_tail = dist._upper_tail(2**-8)
        try:
            right_mass = dist.survival_function(upper_tail)
        except NotImplementedError:
            # We use base distribution as a proxy for the tail mass.
            right_mass = dist.base.survival_function(upper_tail)
        self.assertLessEqual(right_mass, 2**-8)

        self.assertGreater(upper_tail, lower_tail)
        offset = dist._quantization_offset()
        if not isinstance(expected_offset, float):
            # We cannot run tf inside the parameterized test declaration, hence
            # non-float values are wrapped in a lambda.
            expected_offset = expected_offset()
        self.assertAllClose(offset, expected_offset)

    @parameterized.named_parameters(
        ("softround_logistic",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Logistic(loc=10, scale=1.5)),
        ("softround_normal",
         lambda d: round_adapters.SoftRoundAdapter(d, alpha=5.0),
         lambda: tfp.distributions.Normal(loc=10, scale=1.5)),
    )
    def test_mode_and_quantile(self, adapter, distribution):
        dist = adapter(distribution())
        mode = dist.mode()
        left_mass = dist.cdf(mode)
        self.assertAllClose(left_mass, 0.5)
        quantile_75p = dist.quantile(0.75)
        left_mass = dist.cdf(quantile_75p)
        self.assertAllClose(left_mass, 0.75)

    def test_lacking_mode_and_quantile(self):
        dist = round_adapters.RoundAdapter(
            tfp.distributions.Logistic(loc=1.5, scale=1.5))
        with self.assertRaises(NotImplementedError):
            dist.mode()
        with self.assertRaises(NotImplementedError):
            dist.quantile(0.75)

    def test_lacking_tails_and_offset(self):
        class NonInvertibleAdapater(round_adapters.MonotonicAdapter):

            invertible = False

            def transform(self, x):
                return tf.ceil(x)

            def inverse_transform(self, y):
                return tf.floor(y)

        dist = NonInvertibleAdapater(
            tfp.distributions.Normal(loc=1.5, scale=1.5))
        with self.assertRaises(NotImplementedError):
            dist._lower_tail(0.01)
        with self.assertRaises(NotImplementedError):
            dist._upper_tail(0.01)
 def test_broadcasts_correctly(self, method):
     df = deep_factorized.DeepFactorized(batch_shape=(2, 3))
     x = tf.reshape(tf.linspace(-5., 5., 20), (4, 5, 1, 1))
     val = getattr(df, method)(x)
     self.assertEqual(val.shape, (4, 5, 2, 3))
示例#12
0
 def test_deep_factorized_tails_are_in_order(self):
     dist = deep_factorized.DeepFactorized(batch_shape=[10])
     self.assertAllGreater(
         helpers.upper_tail(dist, 2**-8) - helpers.lower_tail(dist, 2**-8),
         0)
示例#13
0
 def __init__(self, name="NoisyRoundedDeepFactorized", **kwargs):
     prior = deep_factorized.DeepFactorized(**kwargs)
     super().__init__(base=prior, name=name)