def pymc3_dist(self, name, hypers): p = self.p if(len(hypers) == 1): hyper_dist = hypers[0][0] hyper_name = hypers[0][1] p = hyper_dist.pymc3_dist(hyper_name, []) if(self.num_elements==-1): return pm.Geometric(name, p=p) else: return pm.Geometric(name, p=p, shape=self.num_elements)
def _sample_pymc3(cls, dist, size, seed): """Sample from PyMC3.""" import pymc3 pymc3_rv_map = { 'GeometricDistribution': lambda dist: pymc3.Geometric('X', p=float(dist.p)), 'PoissonDistribution': lambda dist: pymc3.Poisson('X', mu=float(dist.lamda)), 'NegativeBinomialDistribution': lambda dist: pymc3.NegativeBinomial('X', mu=float((dist.p * dist.r) / (1 - dist.p)), alpha=float(dist.r)) } dist_list = pymc3_rv_map.keys() if dist.__class__.__name__ not in dist_list: return None with pymc3.Model(): pymc3_rv_map[dist.__class__.__name__](dist) return pymc3.sample(size, chains=1, progressbar=False, random_seed=seed)[:]['X']
""") plt.figure(dpi=100) ##### COMPUTATION ##### # DECLARING THE "TRUE" PARAMETERS UNDERLYING THE SAMPLE p_real = 0.3 # DRAW A SAMPLE OF N=1000 np.random.seed(42) sample = geom.rvs(p=p_real, size=100) ##### SIMULATION ##### # MODEL BUILDING with pm.Model() as model: p = pm.Uniform("p") geometric = pm.Geometric("geometric", p=p, observed=sample) # MODEL RUN with model: step = pm.Metropolis() trace = pm.sample(100000, step=step) burned_trace = trace[50000:] # P - 95% CONF INTERVAL ps = burned_trace["p"] ps_est_95 = ps.mean() - 2*ps.std(), ps.mean() + 2*ps.std() print("95% of sampled ps are between {:0.3f} and {:0.3f}".format(*ps_est_95)) ##### PLOTTING ##### # SAMPLE DISTRIBUTION cnt = Counter(sample)
def distributed_stmt(store, stmt): var = stmt.children[0].value if len(stmt.children) == 2: shape = () dist_stmt = stmt.children[1] else: shape = parse_shape(store, stmt.children[1]) dist_stmt = stmt.children[2] dist = dist_stmt.children[0].value args = [process_numexpr(store, arg) for arg in dist_stmt.children[1:]] check_arity(dist, len(args)) data = store.lookup_data(var) with store.model: # Discrete if dist == 'Bern': store.add_rv( var, pm.Bernoulli(var, p=args[0], observed=data, shape=shape)) elif dist == 'Unif': store.add_rv( var, pm.Uniform(var, lower=args[0], upper=args[1], observed=data, shape=shape)) elif dist == 'Beta': store.add_rv( var, pm.Beta(var, alpha=args[0], beta=args[1], observed=data, shape=shape)) elif dist == 'Pois': store.add_rv( var, pm.Poisson(var, mu=args[0], observed=data, shape=shape)) elif dist == 'DUnif': store.add_rv( var, pm.DiscreteUniform(var, lower=args[0], upper=args[1], observed=data, shape=shape)) elif dist == 'Binom': store.add_rv( var, pm.Binomial(var, n=args[0], p=args[1], observed=data, shape=shape)) elif dist == 'Geometric': store.add_rv( var, pm.Geometric(var, p=args[0], observed=data, shape=shape)) # Continuous elif dist == 'N': store.add_rv( var, pm.Normal(var, mu=args[0], sigma=args[1], observed=data, shape=shape)) elif dist == 'Gamma': store.add_rv( var, pm.Gamma(var, alpha=args[0], beta=args[1], observed=data, shape=shape)) elif dist == 'Exp': store.add_rv( var, pm.Exponential(var, lam=args[0], testval=0, observed=data, shape=shape))
with pm.Model() as m_pois: a = pm.Normal("a", 0, 100, shape=2) lam = pm.math.exp(a) admit = pm.Poisson("admit", lam[0], observed=d_ad.admit) rej = pm.Poisson("rej", lam[1], observed=d_ad.reject) trace_pois = pm.sample(1000, tune=1000) # %% m_binom = pm.summary(trace_binom).round(2) logistic(m_binom["mean"]) # %% m_pois = pm.summary(trace_pois).round(2) m_pois["mean"][0] np.exp(m_pois["mean"][0]) / (np.exp(m_pois["mean"][0]) + np.exp(m_pois["mean"][1])) # %% N = 100 x = np.random.rand(N) y = np.random.geometric(logistic(-1 + 2 * x), size=N) with pm.Model() as m_10_18: a = pm.Normal("a", 0, 10) b = pm.Normal("b", 0, 1) p = pm.math.invlogit(a + b * x) obs = pm.Geometric("y", p=p, observed=y) trace_10_18 = pm.sample(1000, tune=1000) az.summary(trace_10_18, credible_interval=0.89, round_to=2)