def test_kl3(self): x_probs = np.array([0.25, 0.20, 0, 0.55]) xy_probs = np.array([0.20, 0, 0.25, 0.55]) self.assertAlmostEqual(entropy.kullback_leiber_divergence(x_probs, xy_probs), float('inf'))
def test_kl2(self): x_probs = np.array([0.04, 0.16] * 5 + [0]) xy_probs = np.array([0.02, 0.18] * 5 + [0]) dkl = 0 for i in range(len(x_probs) - 1): div = x_probs[i] / xy_probs[i] dkl += x_probs[i] * math.log(div, 2) self.assertAlmostEqual(entropy.kullback_leiber_divergence(x_probs, xy_probs), dkl)