Exemple #1
0
 def _get_posterior(self):
     if self.latent_dim == 1:
         raise ValueError('latent dim = 1. Consider using AutoDiagonalNormal instead')
     flows = []
     for i in range(self.num_flows):
         if i > 0:
             flows.append(PermuteTransform(jnp.arange(self.latent_dim)[::-1]))
         residual = "gated" if i < (self.num_flows - 1) else None
         arn = BlockNeuralAutoregressiveNN(self.latent_dim, self._hidden_factors, residual)
         arnn = numpyro.module('{}_arn__{}'.format(self.prefix, i), arn, (self.latent_dim,))
         flows.append(BlockNeuralAutoregressiveTransform(arnn))
     return dist.TransformedDistribution(self.get_base_dist(), flows)
Exemple #2
0
 def _get_transform(self):
     if self.latent_size == 1:
         raise ValueError('latent dim = 1. Consider using AutoDiagonalNormal instead')
     hidden_dims = [self.latent_size, self.latent_size] if self._hidden_dims is None else self._hidden_dims
     flows = []
     for i in range(self.num_flows):
         if i > 0:
             flows.append(PermuteTransform(np.arange(self.latent_size)[::-1]))
         arn = AutoregressiveNN(self.latent_size, hidden_dims,
                                permutation=np.arange(self.latent_size),
                                skip_connections=self._skip_connections,
                                nonlinearity=self._nonlinearity)
         arnn = numpyro.module('{}_arn__{}'.format(self.prefix, i), arn, (self.latent_size,))
         flows.append(InverseAutoregressiveTransform(arnn))
     return ComposeTransform(flows)
Exemple #3
0
 def _get_posterior(self):
     if self.latent_dim == 1:
         raise ValueError('latent dim = 1. Consider using AutoDiagonalNormal instead')
     hidden_dims = [self.latent_dim, self.latent_dim] if self._hidden_dims is None else self._hidden_dims
     flows = []
     for i in range(self.num_flows):
         if i > 0:
             flows.append(PermuteTransform(jnp.arange(self.latent_dim)[::-1]))
         arn = AutoregressiveNN(self.latent_dim, hidden_dims,
                                permutation=jnp.arange(self.latent_dim),
                                skip_connections=self._skip_connections,
                                nonlinearity=self._nonlinearity)
         arnn = numpyro.module('{}_arn__{}'.format(self.prefix, i), arn, (self.latent_dim,))
         flows.append(InverseAutoregressiveTransform(arnn))
     return dist.TransformedDistribution(self.get_base_dist(), flows)
Exemple #4
0
 def _get_transform(self):
     if self.latent_size == 1:
         raise ValueError(
             'latent dim = 1. Consider using AutoDiagonalNormal instead')
     flows = []
     for i in range(self.num_flows):
         if i > 0:
             flows.append(
                 PermuteTransform(np.arange(self.latent_size)[::-1]))
         residual = "gated" if i < (self.num_flows - 1) else None
         arn = BlockNeuralAutoregressiveNN(self.latent_size,
                                           self._hidden_factors, residual)
         arnn = numpyro.module('{}_arn__{}'.format(self.prefix, i), arn,
                               (self.latent_size, ))
         flows.append(BlockNeuralAutoregressiveTransform(arnn))
     return ComposeTransform(flows)
                    matrix = matrix + np.swapaxes(matrix, -2, -1) - np.diag(np.diag(matrix))
                return transform.inv(matrix)

            expected = onp.linalg.slogdet(jax.jacobian(vec_transform)(x))[1]
            inv_expected = onp.linalg.slogdet(jax.jacobian(inv_vec_transform)(y_tril))[1]
        else:
            expected = np.log(np.abs(grad(transform)(x)))
            inv_expected = np.log(np.abs(grad(transform.inv)(y)))

        assert_allclose(actual, expected, atol=1e-6, rtol=1e-6)
        assert_allclose(actual, -inv_expected, atol=1e-6, rtol=1e-6)


# NB: skip transforms which are tested in `test_biject_to`
@pytest.mark.parametrize('transform, event_shape', [
    (PermuteTransform(np.array([3, 0, 4, 1, 2])), (5,)),
    (PowerTransform(2.), ()),
    (MultivariateAffineTransform(np.array([1., 2.]), np.array([[0.6, 0.], [1.5, 0.4]])), (2,))
])
@pytest.mark.parametrize('batch_shape', [(), (1,), (3,), (6,), (3, 1), (1, 3), (5, 3)])
def test_bijective_transforms(transform, event_shape, batch_shape):
    shape = batch_shape + event_shape
    rng_key = random.PRNGKey(0)
    x = biject_to(transform.domain)(random.normal(rng_key, shape))
    y = transform(x)

    # test codomain
    assert_array_equal(transform.codomain(y), np.ones(batch_shape))

    # test inv
    z = transform.inv(y)