Пример #1
0
 def get_inference_data(self, data):
     return from_pyro(posterior=data.obj)
Пример #2
0
        tavg_norm_noauto_3d, tavg_raw_all_3d, tavg_raw_noauto_3d
    ] = pickle.load(f)

tm.mtype = 'group'
tm.target = 'self'  # 'self','targ','avg'
tm.dtype = 'norm'  # 'norm','raw'
tm.auto = 'all'  # 'noauto','all'
tm.stickbreak = False
tm.optim = pyro.optim.Adam({'lr': 0.0005, 'betas': [0.8, 0.99]})
tm.elbo = TraceEnum_ELBO(max_plate_nesting=1)

tm.K = 3

pyro.clear_param_store()
pyro.set_rng_seed(99)

# #declare dataset to be modeled
# dtname = 't{}_{}_{}_3d'.format(target, dtype, auto)
# print("running MCMC with: {}".format(dtname))
# data = globals()[dtname]

nuts_kernel = NUTS(tm.model)

mcmc = MCMC(nuts_kernel, num_samples=5000, warmup_steps=1000)
mcmc.run(tself_norm_all_3d)

posterior_samples = mcmc.get_samples()

abc = az.from_pyro(mcmc, log_likelihood=True)
az.stats.waic(abc.posterior.weights)
Пример #3
0

def sample_prior_pred(model, numofsamples, y_obs):
    prior_pred = Predictive(model, {}, num_samples=numofsamples)
    prior_summary = get_summary_table(prior_pred, sites=["y_obs"])
    return prior_summary


np.random.seed(0)
Y = torch.Tensor(stats.bernoulli(0.7).rvs(20))

nuts_kernel = NUTS(partial_pooled)
mcmc = MCMC(nuts_kernel, 1000, num_chains=1)
mcmc.run(Y)
trace = mcmc.get_samples()
idata = az.from_pyro(trace)
observedY = partial_pooled(Y)
pred_dist = (sample_prior_pred(partial_pooled, 1000, observedY),
             sample_posterior_pred(partial_pooled, idata, observedY))

fig, ax = plt.subplots()
az.plot_dist(pred_dists[0].sum(1),
             hist_kwargs={
                 "color": "0.5",
                 "bins": range(0, 22)
             })
ax.set_title(f"Prior predictive distribution", fontweight='bold')
ax.set_xlim(-1, 21)
ax.set_ylim(0, 0.15)
ax.set_xlabel("number of success")