Пример #1
0
    def testDenseDVIMoments(self):
        """Verifies DenseDVI's moments empirically with samples."""
        tf.set_random_seed(377269)
        batch_size = 3
        num_features = 5
        units = 128
        num_samples = 50000
        inputs = tf.to_float(np.random.rand(batch_size, num_features))
        layer = bayes.DenseDVI(units, activation=tf.nn.relu)

        outputs1 = layer(inputs)
        mean1 = outputs1.distribution.mean()
        covariance1 = outputs1.distribution.covariance()

        kernel_samples = layer.kernel.distribution.sample(num_samples)
        outputs2 = layer.activation(
            tf.einsum("bd,sdu->sbu", inputs, kernel_samples) +
            tf.reshape(layer.bias, [1, 1, units]))
        mean2 = tf.reduce_mean(outputs2, axis=0)
        centered_outputs2 = tf.transpose(outputs2 - mean2, [1, 2, 0])
        covariance2 = tf.matmul(centered_outputs2,
                                centered_outputs2,
                                transpose_b=True) / float(num_samples)

        self.evaluate(tf.global_variables_initializer())
        mean1_val, covariance1_val, mean2_val, covariance2_val = self.evaluate(
            [mean1, covariance1, mean2, covariance2])
        # Check % of mismatches is not too high according to heuristic thresholds.
        num_mismatches = np.sum(np.abs(mean1_val - mean2_val) > 5e-3)
        percent_mismatches = num_mismatches / float(batch_size * units)
        self.assertLessEqual(percent_mismatches, 0.05)
        num_mismatches = np.sum(
            np.abs(covariance1_val - covariance2_val) > 5e-3)
        percent_mismatches = num_mismatches / float(batch_size * units * units)
        self.assertLessEqual(percent_mismatches, 0.05)
Пример #2
0
 def testDenseDVIIsDeterministic(self):
     """Tests that DenseDVI network has a deterministic loss function."""
     features = tf.to_float(np.random.rand(3, 2))
     labels = tf.to_float(np.random.rand(3, 1))
     model = tf.keras.Sequential([
         bayes.DenseDVI(5, activation=tf.nn.relu),
         bayes.DenseDVI(1, activation=None),
     ])
     outputs = model(features, training=True)
     nll = -tf.reduce_sum(outputs.distribution.log_prob(labels))
     kl = sum(model.losses)
     loss = nll + kl
     self.evaluate(tf.global_variables_initializer())
     res1 = self.evaluate(loss)
     res2 = self.evaluate(loss)
     self.assertEqual(res1, res2)