Ejemplo n.º 1
0
def main() -> None:

    x = _load_dataset()

    rng_key = random.PRNGKey(0)
    rng_key, rng_key_prior, rng_key_infer, rng_key_posterior = random.split(
        rng_key, 4)

    # prior
    predictive = infer.Predictive(model, num_samples=10)
    prior_samples = predictive(rng_key_prior, None, *x.shape, future_steps=20)

    # Inference
    kernel = infer.NUTS(model)
    mcmc = infer.MCMC(kernel, 100, 100)
    mcmc.run(rng_key_infer, x)
    posterior_samples = mcmc.get_samples()

    # Posterior prediction
    predictive = infer.Predictive(model, posterior_samples=posterior_samples)
    posterior_predictive = predictive(rng_key_posterior,
                                      None,
                                      *x.shape,
                                      future_steps=20)

    _save_results(x, prior_samples, posterior_samples, posterior_predictive,
                  len(x))
Ejemplo n.º 2
0
def main() -> None:

    x = jnp.concatenate([
        np.random.randn(10),
        np.random.randn(10) + 2,
        np.random.randn(10) - 1,
        np.random.randn(10) + 1,
    ])
    x = x[:, None, None]

    rng_key = random.PRNGKey(0)
    rng_key, rng_key_prior, rng_key_infer, rng_key_posterior = random.split(
        rng_key, 4)

    # prior
    predictive = infer.Predictive(model, num_samples=10)
    prior_samples = predictive(rng_key_prior,
                               future_steps=20,
                               batch=10,
                               x_dim=1)

    # Inference
    kernel = infer.NUTS(model)
    mcmc = infer.MCMC(kernel, 100, 100)
    mcmc.run(rng_key_infer, x)
    posterior_samples = mcmc.get_samples()

    # Posterior prediction
    predictive = infer.Predictive(model, posterior_samples=posterior_samples)
    posterior_predictive = predictive(rng_key_posterior, None, *x.shape, 10)

    _save_results(x, prior_samples, posterior_samples, posterior_predictive)
Ejemplo n.º 3
0
def main() -> None:

    # Data
    x, t = _load_data()
    num_train = int(len(x) * 0.8)
    x_train = x[:num_train]
    t_train = t[:num_train]

    rng_key = random.PRNGKey(0)
    rng_key, rng_key_prior, rng_key_infer, rng_key_posterior = random.split(
        rng_key, 4)

    # prior
    predictive = infer.Predictive(model, num_samples=10)
    prior_samples = predictive(rng_key_prior, t)

    # Inference
    kernel = infer.NUTS(model)
    mcmc = infer.MCMC(kernel, 100, 100)
    mcmc.run(rng_key_infer, t_train, x_train)
    posterior_samples = mcmc.get_samples()

    # Posterior prediction
    predictive = infer.Predictive(
        model,
        posterior_samples=posterior_samples,
        return_sites=["x", "s0", "z", "trend", "weight"],
    )
    posterior_predictive = predictive(rng_key_posterior, t)

    _save_results(x, prior_samples, posterior_samples, posterior_predictive,
                  num_train)
Ejemplo n.º 4
0
def main() -> None:

    # Data
    x, betas, covariates = _load_data()
    num_train = int(len(x) * 0.8)
    x_train = x[:num_train]
    c_train = covariates[:num_train]

    rng_key = random.PRNGKey(0)
    rng_key, rng_key_prior, rng_key_infer, rng_key_posterior = random.split(
        rng_key, 4)

    # prior
    predictive = infer.Predictive(model, num_samples=10)
    prior_samples = predictive(rng_key_prior, c_train)

    # Inference
    kernel = infer.NUTS(model)
    mcmc = infer.MCMC(kernel, 100, 100)
    mcmc.run(rng_key_infer, c_train, x_train)
    posterior_samples = mcmc.get_samples()

    # Posterior prediction
    posterior_given = posterior_samples.copy()
    posterior_given.pop("weight")
    predictive = infer.Predictive(model,
                                  posterior_samples=posterior_given,
                                  return_sites=["x", "weight"])
    posterior_predictive = predictive(rng_key_posterior, covariates)

    _save_results(x, betas, prior_samples, posterior_samples,
                  posterior_predictive, num_train)
Ejemplo n.º 5
0
def main() -> None:

    _, y, x_missing = _load_dataset()
    train_len = int(len(y) * 0.8)
    x_train = x_missing[:train_len]
    y_train = y[:train_len]

    num_chains = 1
    numpyro.set_platform("cpu")
    numpyro.set_host_device_count(num_chains)

    rng_key = random.PRNGKey(0)
    rng_key, rng_key_posterior, rng_key_prior = random.split(rng_key, 3)

    predictive = infer.Predictive(bayesian_regression, num_samples=500)
    prior = predictive(rng_key_prior, x_train)

    kernel = infer.NUTS(bayesian_regression)
    mcmc = infer.MCMC(kernel,
                      num_warmup=1000,
                      num_samples=1000,
                      num_chains=num_chains)
    mcmc.run(rng_key, x_train, y_train)
    posterior_samples = mcmc.get_samples()

    predictive = infer.Predictive(bayesian_regression,
                                  posterior_samples=posterior_samples)
    posterior_predictive = predictive(rng_key_posterior, x_missing)

    _save_results(y, mcmc, prior, posterior_samples, posterior_predictive)
Ejemplo n.º 6
0
    def sample_posterior_predictive(
        self,
        df: pd.DataFrame,
        hdpi: bool = False,
        hdpi_interval: float = 0.9,
        rng_key: np.ndarray = None,
    ) -> typing.Union[pd.Series, pd.DataFrame]:
        """Obtain samples from the posterior predictive.
        
        Parameters
        ----------
        df : pd.DataFrame
            Source dataframe.
        hdpi : bool
            Option to include lower/upper bound of the highest posterior density 
            interval. Returns a dataframe if true, a series if false. Default False.
        hdpi_interval : float
            HDPI width. Default 0.9.
        rng_key : two-element ndarray.
            Optional rng key, will be randomly splitted if not provided.

        Returns
        -------
        pd.Series or pd.DataFrame
            Forecasts. Will be a series with the name of the dv if no HDPI. Will be a
            dataframe if HDPI is included.

        """
        # get rng key
        rng_key_ = (self.split_rand_key()
                    if rng_key is None else rng_key.astype("uint32"))

        # check for nulls
        null_cols = columns_with_null_data(self.transform(df))
        if null_cols:
            raise exceptions.NullDataFound(*null_cols)

        #  do it
        predictions = infer.Predictive(self.model,
                                       self.samples_flat)(rng_key_,
                                                          df=df)[self.dv]

        if not hdpi:
            return pd.Series(predictions.mean(axis=0),
                             index=df.index,
                             name=self.dv)

        hdpi = diagnostics.hpdi(predictions, hdpi_interval)

        return pd.DataFrame(
            {
                self.dv: predictions.mean(axis=0),
                "hdpi_lower": hdpi[0, :],
                "hdpi_upper": hdpi[1, :],
            },
            index=df.index,
        )
Ejemplo n.º 7
0
def main(args: argparse.Namespace) -> None:

    _, y, x_missing = _load_dataset()
    batch, x_dim = x_missing.shape
    train_len = int(len(y) * 0.8)
    x_train = x_missing[:train_len]
    y_train = y[:train_len]

    numpyro.set_platform("cpu")
    numpyro.set_host_device_count(args.num_chains)
    rng_key = random.PRNGKey(1)
    rng_key, rng_key_prior, rng_key_posterior, rng_key_pca_pred = random.split(
        rng_key, 4)

    predictive = infer.Predictive(pca_regression, num_samples=500)
    prior = predictive(rng_key_prior, batch=batch, x_dim=x_dim)

    kernel = infer.NUTS(pca_regression)
    mcmc = infer.MCMC(
        kernel,
        num_warmup=args.num_warmup,
        num_samples=args.num_samples,
        num_chains=args.num_chains,
    )
    mcmc.run(rng_key_posterior, x_train, y_train)
    posterior_samples = mcmc.get_samples()

    posterior_without_z = posterior_samples.copy()
    posterior_without_z.pop("z")
    predictive = infer.Predictive(pca_regression,
                                  posterior_samples=posterior_without_z)
    posterior_predictive = predictive(rng_key_pca_pred,
                                      batch=batch,
                                      x_dim=x_dim)

    _save_results(
        y,
        mcmc,
        prior,
        posterior_samples,
        posterior_predictive,
        var_names=["phi", "eta", "theta", "sigma"],
    )
Ejemplo n.º 8
0
def main() -> None:

    x = _load_data()

    rng_key = random.PRNGKey(0)
    rng_key, rng_key_prior, rng_key_infer, rng_key_posterior = random.split(rng_key, 4)

    # Prior prediction
    predictive = infer.Predictive(model, num_samples=10)
    prior_predictive = predictive(rng_key_prior, None, *x.shape, future_steps=10)

    # Inference
    adam = optim.Adam(0.0001)
    svi = infer.SVI(model, guide, adam, infer.Trace_ELBO())
    svi_result = svi.run(rng_key_infer, 50000, x)

    # Posterior prediction
    predictive = infer.Predictive(model, params=svi_result.params, num_samples=10)
    posterior_predictive = predictive(rng_key_posterior, None, *x.shape, future_steps=10)

    _save_results(x, svi_result.params, prior_predictive, posterior_predictive, svi_result)
Ejemplo n.º 9
0
def main(args: argparse.Namespace) -> None:

    model = model_dict[args.model]

    _, fetch = load_dataset(JSB_CHORALES, split="train", shuffle=False)
    lengths, sequences = fetch()

    # Remove never used data dimension to reduce computation time
    present_notes = (sequences == 1).sum(0).sum(0) > 0
    sequences = sequences[..., present_notes]
    batch, seq_len, data_dim = sequences.shape

    rng_key = random.PRNGKey(0)
    rng_key, rng_key_prior, rng_key_pred = random.split(rng_key, 3)

    predictive = infer.Predictive(model, num_samples=10)
    prior_samples = predictive(rng_key_prior,
                               batch=batch,
                               seq_len=seq_len,
                               data_dim=data_dim,
                               future_steps=20)

    kernel = infer.NUTS(model)
    mcmc = infer.MCMC(kernel, args.num_warmup, args.num_samples,
                      args.num_chains)
    mcmc.run(rng_key, sequences, lengths)
    posterior_samples = mcmc.get_samples()

    predictive = infer.Predictive(model, posterior_samples)
    predictive_samples = predictive(rng_key_pred,
                                    sequences,
                                    lengths,
                                    future_steps=10)

    path = pathlib.Path("./data/hmm_enum")
    path.mkdir(exist_ok=True)

    jnp.savez(path / "prior_samples.npz", **prior_samples)
    jnp.savez(path / "posterior_samples.npz", **posterior_samples)
    jnp.savez(path / "predictive_samples.npz", **predictive_samples)
Ejemplo n.º 10
0
def main() -> None:

    rng_key = random.PRNGKey(0)

    samples = run_inference(model, rng_key)
    reparam_samples = run_inference(reparam_model, rng_key)

    predictive = infer.Predictive(reparam_model,
                                  reparam_samples,
                                  return_sites=["x", "y"])
    reparam_samples = predictive(random.PRNGKey(1))

    _plot_results(samples, reparam_samples)
Ejemplo n.º 11
0
def predict(
    model: Callable,
    sigma: np.ndarray,
    rng_key: np.ndarray,
    *,
    posterior_samples: Optional[Dict[str, jnp.ndarray]] = None,
    num_samples: Optional[int] = None,
) -> Dict[str, jnp.ndarray]:

    predictive = infer.Predictive(model,
                                  posterior_samples=posterior_samples,
                                  num_samples=num_samples)

    return predictive(rng_key, sigma)