Ejemplo n.º 1
0
def test_kl_divergence():
    mask = torch.tensor([[0, 1], [1, 1]]).bool()
    p = Normal(torch.randn(2, 2), torch.randn(2, 2).exp())
    q = Normal(torch.randn(2, 2), torch.randn(2, 2).exp())
    expected = kl_divergence(p.to_event(2), q.to_event(2))
    actual = (kl_divergence(p.mask(mask).to_event(2),
                            q.mask(mask).to_event(2)) +
              kl_divergence(p.mask(~mask).to_event(2),
                            q.mask(~mask).to_event(2)))
    assert_equal(actual, expected)
Ejemplo n.º 2
0
def test_broadcast(event_shape, dist_shape, mask_shape):
    mask = torch.empty(torch.Size(mask_shape)).bernoulli_(0.5).bool()
    base_dist = Normal(torch.zeros(dist_shape + event_shape), 1.)
    base_dist = base_dist.to_event(len(event_shape))
    assert base_dist.batch_shape == dist_shape
    assert base_dist.event_shape == event_shape

    d = base_dist.mask(mask)
    d_shape = broadcast_shape(mask.shape, base_dist.batch_shape)
    assert d.batch_shape == d_shape
    assert d.event_shape == event_shape