Ejemplo n.º 1
0
def test_kl_divergence():
    """Tests kl_divergence"""

    # Divergence between a distribution and itself should be 0
    dist = tfp.distributions.Normal(0, 1)
    assert ops.kl_divergence(dist, dist).numpy() == 0.0

    # Divergence between two different distributions should be >0
    d1 = tfp.distributions.Normal(0, 1)
    d2 = tfp.distributions.Normal(1, 1)
    assert ops.kl_divergence(d1, d2).numpy() > 0.0

    # Divergence between more different distributions should be larger
    d1 = tfp.distributions.Normal(0, 1)
    d2 = tfp.distributions.Normal(1, 1)
    d3 = tfp.distributions.Normal(2, 1)
    assert (ops.kl_divergence(d1, d2).numpy() < ops.kl_divergence(d1,
                                                                  d3).numpy())

    # Should auto-convert probflow distibutions
    dist = pf.Normal(0, 1)
    assert ops.kl_divergence(dist, dist).numpy() == 0.0
Ejemplo n.º 2
0
 def __call__(self):
     return pf.Mixture(pf.Normal(self.m(), self.s()), probs=self.w())
Ejemplo n.º 3
0
 def __call__(self):
     return pf.Normal(self.mu(), self.sig())
Ejemplo n.º 4
0
 def __call__(self, x):
     return pf.Normal(self.net(x), self.s())
Ejemplo n.º 5
0
 def __call__(self, x):
     return pf.Normal(x * self.weight() + self.bias(), self.std())
 def __call__(self, x):
     return pf.Normal(x @ self.w() + self.b(), self.s())