# THE MODEL. with pm.Model() as model: # Hyperprior on model index: model_index = pm.DiscreteUniform('model_index', lower=0, upper=1) # Prior nu = pm.Normal('nu', mu=0, tau=0.1) # it is posible to use tau or sd eta = pm.Gamma('eta', .1, .1) theta0 = 1 / (1 + pm.exp(-nu)) # theta from model index 0 theta1 = pm.exp(-eta) # theta from model index 1 theta = pm.switch(pm.eq(model_index, 0), theta0, theta1) # Likelihood y = pm.Bernoulli('y', p=theta, observed=y) # Sampling start = pm.find_MAP() step1 = pm.Metropolis(model.vars[1:]) step2 = pm.ElemwiseCategoricalStep(var=model_index, values=[0, 1]) trace = pm.sample(10000, [step1, step2], start=start, progressbar=False) # EXAMINE THE RESULTS. burnin = 1000 thin = 5 ## Print summary for each trace #pm.summary(trace[burnin::thin]) #pm.summary(trace) ## Check for mixing and autocorrelation #pm.autocorrplot(trace[burnin::thin], vars =[nu, eta]) #pm.autocorrplot(trace, vars =[nu, eta]) ## Plot KDE and sampled values for each parameter.
# cluster centers means = pm.Normal('means', mu=[0, 0, 0], sd=15, shape=k) # break symmetry order_means_potential = pm.Potential('order_means_potential', tt.switch(np.dot(means[1]-means[0], np.transpose(means[1]-means[0])) < 0, -np.inf, 0) + tt.switch(means[2]-means[1] < 0, -np.inf, 0)) # measurement error sd = pm.Uniform('sd', lower=0, upper=20) # latent cluster of each observation category = pm.Categorical('category', p=p, shape=ndata) # likelihood for each observed value points = pm.Normal('obs', mu=means[category], sd=sd, observed=data) with model: step1 = pm.Metropolis(vars=[p, sd, means]) step2 = pm.ElemwiseCategoricalStep(vars=[category], values=[0, 1, 2]) tr = pm.sample(10000, step=[step1, step2]) pm.plots.traceplot(tr, ['p', 'sd', 'means']) # plt.hist(data) plt.show()
a_Beta1 = mu[cond_of_subj] * kappa1[cond_of_subj] b_Beta1 = (1 - mu[cond_of_subj]) * kappa1[cond_of_subj] #Prior on theta theta0 = pm.Beta('theta0', a_Beta0, b_Beta0, shape=n_subj) theta1 = pm.Beta('theta1', a_Beta1, b_Beta1, shape=n_subj) # if model_index == 0 then sample from theta1 else sample from theta0 theta = pm.switch(pm.eq(model_index, 0), theta1, theta0) # Likelihood: y = pm.Binomial('y', p=theta, n=n_trl_of_subj, observed=n_corr_of_subj) # Sampling start = pm.find_MAP() steps = [pm.Metropolis([i]) for i in model.unobserved_RVs[1:]] steps.append(pm.ElemwiseCategoricalStep(var=model_index, values=[0, 1])) trace = pm.sample(50000, steps, start=start, progressbar=False) # EXAMINE THE RESULTS. burnin = 1000 thin = 1 model_idx_sample = trace['model_index'][burnin::thin] pM1 = sum(model_idx_sample == 1) / len(model_idx_sample) pM2 = 1 - pM1 plt.figure(figsize=(15, 15)) plt.subplot2grid((5, 4), (0, 0), colspan=4) plt.plot(model_idx_sample, label='p(M1|D) = %.3f ; p(M2|D) = %.3f' % (pM1, pM2)) plt.xlabel('Step in Markov Chain')
# for (i in 1:m){ # k[i] ~ dbin(theta,n) # } # # Priors on Rate Theta and Number n # theta ~ dbeta(1,1) # n ~ dcat(p[]) # for (i in 1:nmax){ # p[i] <- 1/nmax # } #} with model: # Priors on rate theta and number n theta = pm.Beta('theta', alpha=1, beta=1) p = pm.constant(np.ones(nmax) / nmax) n = pm.Categorical('n', p=p, shape=1) #FIXME: How to use this properly? # Observed Returns # k = pm.Binomial('k', p=theta, n=n, observed=k, shape=m) # instantiate samplers values_np = np.ones(nmax) / nmax #FIXME: How to use this properly? step1 = pm.Metropolis([theta]) step2 = pm.ElemwiseCategoricalStep(var=n, values=values_np) stepFunc = [step1, step2] # draw posterior samples (in 4 parallel running chains), TODO: very slow!? Nsample = 100 Nchains = 4 traces = pm.sample(Nsample, step=stepFunc, njobs=Nchains) plotVars = ['theta', 'n'] axs = pm.traceplot(traces, vars=plotVars, combined=False) axs[0][0].set_xlim([0, 1]) # manually set x-limits for comparisons