def __init__(self, concentration, rate=1., validate_args=None):
     # NB: we keep the same notation `rate` as in Pyro and tensorflow but
     # it plays the role of scale parameter of InverseGamma in literatures
     # (e.g. wikipedia: https://en.wikipedia.org/wiki/Inverse-gamma_distribution)
     base_dist = Gamma(concentration, rate)
     self.concentration = concentration
     self.rate = rate
     super(InverseGamma, self).__init__(base_dist, PowerTransform(-1.0),
                                        validate_args=validate_args)
Ejemplo n.º 2
0
                return transform.inv(matrix)

            expected = onp.linalg.slogdet(jax.jacobian(vec_transform)(x))[1]
            inv_expected = onp.linalg.slogdet(jax.jacobian(inv_vec_transform)(y_tril))[1]
        else:
            expected = np.log(np.abs(grad(transform)(x)))
            inv_expected = np.log(np.abs(grad(transform.inv)(y)))

        assert_allclose(actual, expected, atol=1e-6, rtol=1e-6)
        assert_allclose(actual, -inv_expected, atol=1e-6, rtol=1e-6)


# NB: skip transforms which are tested in `test_biject_to`
@pytest.mark.parametrize('transform, event_shape', [
    (PermuteTransform(np.array([3, 0, 4, 1, 2])), (5,)),
    (PowerTransform(2.), ()),
    (MultivariateAffineTransform(np.array([1., 2.]), np.array([[0.6, 0.], [1.5, 0.4]])), (2,))
])
@pytest.mark.parametrize('batch_shape', [(), (1,), (3,), (6,), (3, 1), (1, 3), (5, 3)])
def test_bijective_transforms(transform, event_shape, batch_shape):
    shape = batch_shape + event_shape
    rng_key = random.PRNGKey(0)
    x = biject_to(transform.domain)(random.normal(rng_key, shape))
    y = transform(x)

    # test codomain
    assert_array_equal(transform.codomain(y), np.ones(batch_shape))

    # test inv
    z = transform.inv(y)
    assert_allclose(x, z, atol=1e-6, rtol=1e-6)