Example #1
0
 def log_abs_det_jacobian(self, x, y, intermediates=None):
     return sum_rightmost(
         np.broadcast_to(np.log(np.abs(self.scale)), np.shape(x)),
         self.event_dim)
Example #2
0
 def __init__(self, logits=None, validate_args=None):
     self.logits = logits
     super(BernoulliLogits,
           self).__init__(batch_shape=jnp.shape(self.logits),
                          validate_args=validate_args)
Example #3
0
 def support(self):
     return constraints.integer_interval(0, jnp.shape(self.logits)[-1] - 1)
Example #4
0
 def shape_i(x):
     return jnp.shape(x)[i]
Example #5
0
def _predictive(
        rng_key,
        model,
        posterior_samples,
        batch_shape,
        return_sites=None,
        infer_discrete=False,
        parallel=True,
        model_args=(),
        model_kwargs={},
):
    masked_model = numpyro.handlers.mask(model, mask=False)
    if infer_discrete:
        # inspect the model to get some structure
        rng_key, subkey = random.split(rng_key)
        batch_ndim = len(batch_shape)
        prototype_sample = tree_map(
            lambda x: jnp.reshape(x, (-1, ) + jnp.shape(x)[batch_ndim:])[0],
            posterior_samples,
        )
        prototype_trace = trace(
            seed(substitute(masked_model, prototype_sample),
                 subkey)).get_trace(*model_args, **model_kwargs)
        first_available_dim = -_guess_max_plate_nesting(prototype_trace) - 1

    def single_prediction(val):
        rng_key, samples = val
        if infer_discrete:
            from numpyro.contrib.funsor import config_enumerate
            from numpyro.contrib.funsor.discrete import _sample_posterior

            model_trace = prototype_trace
            temperature = 1
            pred_samples = _sample_posterior(
                config_enumerate(condition(model, samples)),
                first_available_dim,
                temperature,
                rng_key,
                *model_args,
                **model_kwargs,
            )
        else:
            model_trace = trace(
                seed(substitute(masked_model, samples),
                     rng_key)).get_trace(*model_args, **model_kwargs)
            pred_samples = {
                name: site["value"]
                for name, site in model_trace.items()
            }

        if return_sites is not None:
            if return_sites == "":
                sites = {
                    k
                    for k, site in model_trace.items()
                    if site["type"] != "plate"
                }
            else:
                sites = return_sites
        else:
            sites = {
                k
                for k, site in model_trace.items()
                if (site["type"] == "sample" and k not in samples) or (
                    site["type"] == "deterministic")
            }
        return {
            name: value
            for name, value in pred_samples.items() if name in sites
        }

    num_samples = int(np.prod(batch_shape))
    if num_samples > 1:
        rng_key = random.split(rng_key, num_samples)
    rng_key = rng_key.reshape((*batch_shape, 2))
    chunk_size = num_samples if parallel else 1
    return soft_vmap(single_prediction, (rng_key, posterior_samples),
                     len(batch_shape), chunk_size)
Example #6
0
File: poisson.py Project: lmmx/mcx
 def __init__(self, lmbda):
     self.event_shape = ()
     self.batch_shape = broadcast_batch_shape(np.shape(lmbda))
     self.lmbda = lmbda
Example #7
0
File: stan.py Project: rlouf/mcx
 def init(chain_state: HMCState) -> MassMatrixAdaptationState:
     """Initialize the mass matrix adaptation algorithm."""
     n_dims = jnp.shape(chain_state.position)[-1]
     mm_state = mm_init(n_dims)
     return mm_state
Example #8
0
 def log_abs_det_jacobian(self, x, y, intermediates=None):
     return jnp.broadcast_to(jnp.log(jnp.abs(self.scale)), jnp.shape(x))
Example #9
0
 def log_abs_det_jacobian(self, x, y, intermediates=None):
     # Ref: http://web.mit.edu/18.325/www/handouts/handout2.pdf page 13
     n = jnp.shape(x)[-1]
     order = -jnp.arange(n, 0, -1)
     return -n * jnp.log(2) + jnp.sum(order * jnp.log(jnp.diagonal(y, axis1=-2, axis2=-1)), axis=-1)
Example #10
0
def shape(x):
    size = numpy.shape(x)
    if len(size) == 0:
        return (1, )
    else:
        return size
Example #11
0
 def __init__(self, p):
     self.event_shape = ()
     self.batch_shape = broadcast_batch_shape(np.shape(p))
     self.p = p * 1.0  # will fail if p is int
Example #12
0
        x = nn.sigmoid(z)
        x = jnp.reshape(x, (x.shape[0], ) + self.input_shape)
        return x


# `ae` is a detached module, which has no variables.
ae = AutoEncoder(encoder_widths=(32, 32, 32),
                 decoder_widths=(32, 32, 32),
                 input_shape=(28, 28, 1))

# `ae.initialized` returnes a materialized copy of `ae` by
# running through an input to create submodules defined lazily.
params = ae.init({'params': random.PRNGKey(42)}, jnp.ones((1, 28, 28, 1)))

# Now you can use `ae` as a normal object, calling any methods defined on AutoEncoder
print("reconstruct", jnp.shape(ae.apply(params, jnp.ones((1, 28, 28, 1)))))
print("encoder",
      jnp.shape(ae.apply(params, jnp.ones((1, 28, 28, 1)), method=ae.encode)))

# `ae.variables` is a frozen dict that looks like
# {'params': {"decoder": {"Dense_0": {"bias": ..., "kernel": ...}, ...}}
print("var shapes", jax.tree_map(jnp.shape, params))

# TODO(avital, levskaya): resurrect this example once interactive api is restored.

# You can access submodules defined in setup(), they are just references on
# the autoencoder instance
# encoder = ae.encoder
# print("encoder var shapes", jax.tree_map(jnp.shape, encoder.variables))

# # You can also acccess submodules that were defined in-line.
Example #13
0
 def log_abs_det_jacobian(self, x, y, intermediates=None):
     return np.full(np.shape(x)[:-1], 0.)
Example #14
0
 def log_abs_det_jacobian(self, x, y, intermediates=None):
     return np.full(
         np.shape(x) if self.event_dim == 0 else np.shape(x)[:-1], 0.)
Example #15
0
 def __init__(self, log_factor, validate_args=None):
     batch_shape = jnp.shape(log_factor)
     event_shape = (0,)  # This satisfies .size == 0.
     self.log_factor = log_factor
     super(Unit, self).__init__(batch_shape, event_shape, validate_args=validate_args)
Example #16
0
 def log_abs_det_jacobian(self, x, y, intermediates=None):
     # NB: see derivation in LKJCholesky implementation
     n = jnp.shape(x)[-1]
     order = -jnp.arange(n - 1, -1, -1)
     return jnp.sum(order * jnp.log(jnp.diagonal(y, axis1=-2, axis2=-1)), axis=-1)
Example #17
0
 def log_prob(self, value):
     shape = lax.broadcast_shapes(self.batch_shape, jnp.shape(value)[:-1])
     return jnp.broadcast_to(self.log_factor, shape)
Example #18
0
 def _inverse(self, y):
     y = y - self.loc
     original_shape = jnp.shape(y)
     yt = jnp.reshape(y, (-1, original_shape[-1])).T
     xt = solve_triangular(self.scale_tril, yt, lower=True)
     return jnp.reshape(xt.T, original_shape)
Example #19
0
 def __init__(self, p):
     self.event_shape = ()
     self.batch_shape = jnp.shape(p)
     self.p = p * 1.0  # will fail if p is int
Example #20
0
 def log_abs_det_jacobian(self, x, y, intermediates=None):
     return jnp.broadcast_to(jnp.log(jnp.diagonal(self.scale_tril, axis1=-2, axis2=-1)).sum(-1),
                             jnp.shape(x)[:-1])
Example #21
0
 def shape(x):
     return jnp.shape(x)
Example #22
0
 def log_abs_det_jacobian(self, x, y, intermediates=None):
     return jnp.zeros(jnp.shape(x)[:-1])
Example #23
0
    def body_fn(state):
        i, key, _, _ = state
        key, subkey = random.split(key)

        if radius is None or prototype_params is None:
            # XXX: we don't want to apply enum to draw latent samples
            model_ = model
            if enum:
                from numpyro.contrib.funsor import enum as enum_handler

                if isinstance(model, substitute) and isinstance(
                        model.fn, enum_handler):
                    model_ = substitute(model.fn.fn, data=model.data)
                elif isinstance(model, enum_handler):
                    model_ = model.fn

            # Wrap model in a `substitute` handler to initialize from `init_loc_fn`.
            seeded_model = substitute(seed(model_, subkey),
                                      substitute_fn=init_strategy)
            model_trace = trace(seeded_model).get_trace(
                *model_args, **model_kwargs)
            constrained_values, inv_transforms = {}, {}
            for k, v in model_trace.items():
                if (v["type"] == "sample" and not v["is_observed"]
                        and not v["fn"].support.is_discrete):
                    constrained_values[k] = v["value"]
                    with helpful_support_errors(v):
                        inv_transforms[k] = biject_to(v["fn"].support)
            params = transform_fn(
                inv_transforms,
                {k: v
                 for k, v in constrained_values.items()},
                invert=True,
            )
        else:  # this branch doesn't require tracing the model
            params = {}
            for k, v in prototype_params.items():
                if k in init_values:
                    params[k] = init_values[k]
                else:
                    params[k] = random.uniform(subkey,
                                               jnp.shape(v),
                                               minval=-radius,
                                               maxval=radius)
                    key, subkey = random.split(key)

        potential_fn = partial(potential_energy,
                               model,
                               model_args,
                               model_kwargs,
                               enum=enum)
        if validate_grad:
            if forward_mode_differentiation:
                pe = potential_fn(params)
                z_grad = jacfwd(potential_fn)(params)
            else:
                pe, z_grad = value_and_grad(potential_fn)(params)
            z_grad_flat = ravel_pytree(z_grad)[0]
            is_valid = jnp.isfinite(pe) & jnp.all(jnp.isfinite(z_grad_flat))
        else:
            pe = potential_fn(params)
            is_valid = jnp.isfinite(pe)
            z_grad = None

        return i + 1, key, (params, pe, z_grad), is_valid
Example #24
0
 def log_prob(self, value):
     shape = lax.broadcast_shapes(self.batch_shape,
                                  jnp.shape(value)[:max(jnp.ndim(value) - self.event_dim, 0)])
     log_prob = self.base_dist.log_prob(value)
     return jnp.broadcast_to(log_prob, shape)
Example #25
0
def discrete_gibbs_fn(model,
                      model_args=(),
                      model_kwargs={},
                      *,
                      random_walk=False,
                      modified=False):
    """
    [EXPERIMENTAL INTERFACE]

    Returns a gibbs_fn to be used in :class:`HMCGibbs`, which works for discrete latent sites
    with enumerate support. The site update order is randomly permuted at each step.

    Note that those discrete latent sites that are not specified in the constructor of
    :class:`HMCGibbs` will be marginalized out by default (if they have enumerate supports).

    :param callable model: a callable with NumPyro primitives. This should be the same model
        as the one used in the `inner_kernel` of :class:`HMCGibbs`.
    :param tuple model_args: Arguments provided to the model.
    :param dict model_kwargs: Keyword arguments provided to the model.
    :param bool random_walk: If False, Gibbs sampling will be used to draw a sample from the
        conditional `p(gibbs_site | remaining sites)`. Otherwise, a sample will be drawn uniformly
        from the domain of `gibbs_site`.
    :param bool modified: whether to use a modified proposal, as suggested in reference [1], which
        always proposes a new state for the current Gibbs site.
        The modified scheme appears in the literature under the name "modified Gibbs sampler" or
        "Metropolised Gibbs sampler".
    :return: a callable `gibbs_fn` to be used in :class:`HMCGibbs`

    **References:**

    1. *Peskun's theorem and a modified discrete-state Gibbs sampler*,
       Liu, J. S. (1996)

    **Example**

    .. doctest::

        >>> from jax import random
        >>> import jax.numpy as jnp
        >>> import numpyro
        >>> import numpyro.distributions as dist
        >>> from numpyro.infer import MCMC, NUTS, HMCGibbs, discrete_gibbs_fn
        ...
        >>> def model(probs, locs):
        ...     c = numpyro.sample("c", dist.Categorical(probs))
        ...     numpyro.sample("x", dist.Normal(locs[c], 0.5))
        ...
        >>> probs = jnp.array([0.15, 0.3, 0.3, 0.25])
        >>> locs = jnp.array([-2, 0, 2, 4])
        >>> gibbs_fn = discrete_gibbs_fn(model, (probs, locs))
        >>> kernel = HMCGibbs(NUTS(model), gibbs_fn, gibbs_sites=["c"])
        >>> mcmc = MCMC(kernel, 1000, 100000, progress_bar=False)
        >>> mcmc.run(random.PRNGKey(0), probs, locs)
        >>> mcmc.print_summary()  # doctest: +SKIP

    """
    # NB: all of the information such as `model`, `model_args`, `model_kwargs`
    # can be accessed from HMCGibbs.sample but we require them here to
    # simplify the api of `gibbs_fn`
    prototype_trace = trace(seed(model, rng_seed=0)).get_trace(
        *model_args, **model_kwargs)
    support_sizes = {
        name: jnp.broadcast_to(site["fn"].enumerate_support(False).shape[0],
                               jnp.shape(site["value"]))
        for name, site in prototype_trace.items() if site["type"] == "sample"
        and site["fn"].has_enumerate_support and not site["is_observed"]
    }
    max_plate_nesting = _guess_max_plate_nesting(prototype_trace)
    if random_walk:
        if modified:
            proposal_fn = partial(_discrete_modified_rw_proposal, stay_prob=0.)
        else:
            proposal_fn = _discrete_rw_proposal
    else:
        if modified:
            proposal_fn = partial(_discrete_modified_gibbs_proposal,
                                  stay_prob=0.)
        else:
            proposal_fn = _discrete_gibbs_proposal

    def gibbs_fn(rng_key, gibbs_sites, hmc_sites):
        # convert to unconstrained values
        z_hmc = {
            k: biject_to(prototype_trace[k]["fn"].support).inv(v)
            for k, v in hmc_sites.items()
            if k in prototype_trace and prototype_trace[k]["type"] == "sample"
        }
        use_enum = len(set(support_sizes) - set(gibbs_sites)) > 0
        wrapped_model = _wrap_model(model)
        if use_enum:
            from numpyro.contrib.funsor import config_enumerate, enum

            wrapped_model = enum(config_enumerate(wrapped_model),
                                 -max_plate_nesting - 1)

        def potential_fn(z_discrete):
            model_kwargs_ = model_kwargs.copy()
            model_kwargs_["_gibbs_sites"] = z_discrete
            return potential_energy(wrapped_model,
                                    model_args,
                                    model_kwargs_,
                                    z_hmc,
                                    enum=use_enum)

        # get support_sizes of gibbs_sites
        support_sizes_flat, _ = ravel_pytree(
            {k: support_sizes[k]
             for k in gibbs_sites})
        num_discretes = support_sizes_flat.shape[0]

        rng_key, rng_permute = random.split(rng_key)
        idxs = random.permutation(rng_key, jnp.arange(num_discretes))

        def body_fn(i, val):
            idx = idxs[i]
            support_size = support_sizes_flat[idx]
            rng_key, z, pe = val
            rng_key, z_new, pe_new, log_accept_ratio = proposal_fn(
                rng_key,
                z,
                pe,
                potential_fn=potential_fn,
                idx=idx,
                support_size=support_size)
            rng_key, rng_accept = random.split(rng_key)
            # u ~ Uniform(0, 1), u < accept_ratio => -log(u) > -log_accept_ratio
            # and -log(u) ~ exponential(1)
            z, pe = cond(
                random.exponential(rng_accept) > -log_accept_ratio,
                (z_new, pe_new), identity, (z, pe), identity)
            return rng_key, z, pe

        init_val = (rng_key, gibbs_sites, potential_fn(gibbs_sites))
        _, gibbs_sites, _ = fori_loop(0, num_discretes, body_fn, init_val)
        return gibbs_sites

    return gibbs_fn
Example #26
0
 def log_prob(self, value):
     batch_shape = jnp.shape(value)[:jnp.ndim(value) - len(self.event_shape)]
     batch_shape = lax.broadcast_shapes(batch_shape, self.batch_shape)
     return jnp.zeros(batch_shape)
Example #27
0
 def __init__(self, logits, total_count=1, validate_args=None):
     self.logits, self.total_count = promote_shapes(logits, total_count)
     batch_shape = lax.broadcast_shapes(jnp.shape(logits),
                                        jnp.shape(total_count))
     super(BinomialLogits, self).__init__(batch_shape=batch_shape,
                                          validate_args=validate_args)
Example #28
0
 def _validate_sample(self, value):
     mask = super(ImproperUniform, self)._validate_sample(value)
     batch_dim = jnp.ndim(value) - len(self.event_shape)
     if batch_dim < jnp.ndim(mask):
         mask = jnp.all(jnp.reshape(mask, jnp.shape(mask)[:batch_dim] + (-1,)), -1)
     return mask
Example #29
0
 def __init__(self, rate, validate_args=None):
     self.rate = rate
     super(Poisson, self).__init__(jnp.shape(rate),
                                   validate_args=validate_args)
Example #30
0
 def apply_fun(params, inputs, **kwargs):
     del kwargs
     pe = params
     symbol_size = np.shape(inputs)[1]
     return inputs + pe[:, :symbol_size]