def model(X, Y, D_H): D_X, D_Y = X.shape[1], 1 # sample first layer (we put unit normal priors on all weights) w1 = sample("w1", dist.Normal(np.zeros((D_X, D_H)), np.ones( (D_X, D_H)))) # D_X D_H z1 = nonlin(np.matmul(X, w1)) # N D_H <= first layer of activations # sample second layer w2 = sample("w2", dist.Normal(np.zeros((D_H, D_H)), np.ones( (D_H, D_H)))) # D_H D_H z2 = nonlin(np.matmul(z1, w2)) # N D_H <= second layer of activations # sample final layer of weights and neural network output w3 = sample("w3", dist.Normal(np.zeros((D_H, D_Y)), np.ones( (D_H, D_Y)))) # D_H D_Y z3 = np.matmul(z2, w3) # N D_Y <= output of the neural network # we put a prior on the observation noise prec_obs = sample("prec_obs", dist.Gamma(3.0, 1.0)) sigma_obs = 1.0 / np.sqrt(prec_obs) # observe data sample("Y", dist.Normal(z3, sigma_obs), obs=Y)
def dual_moon_model(): x = sample('x', dist.Uniform(-4 * np.ones(2), 4 * np.ones(2))) pe = dual_moon_pe(x) sample('log_density', dist.Delta(log_density=-pe), obs=0.)
def model(data, labels): dim = data.shape[1] coefs = sample('coefs', dist.Normal(np.zeros(dim), np.ones(dim))) logits = np.dot(data, coefs) return sample('obs', dist.Bernoulli(logits=logits), obs=labels)
def model(data): concentration = np.array([1.0, 1.0, 1.0]) p_latent = sample('p_latent', dist.Dirichlet(concentration)) sample('obs', dist.Categorical(p_latent), obs=data) return p_latent
def model(data): alpha = sample('alpha', dist.Uniform(0, 1)) loc = param('loc', 0., constraint=constraints.interval(0., alpha)) sample('obs', dist.Normal(loc, 0.1), obs=data)
def model(data): f = sample('beta', dist.Beta(np.ones(2), np.ones(2))) sample('obs', dist.Bernoulli(f), obs=data)
def actual_model(data): alpha = sample('alpha', dist.Uniform(0, 1)) loc = sample('loc', dist.Uniform(0, alpha)) sample('obs', dist.Normal(loc, 0.1), obs=data)
def guide(): alpha_q = param("alpha_q", 1.0, constraint=constraints.positive) beta_q = param("beta_q", 1.0, constraint=constraints.positive) sample("beta", dist.Beta(alpha_q, beta_q))
def reparam_model(dim=10): y = sample('y', dist.Normal(0, 3)) sample('x', dist.TransformedDistribution( dist.Normal(np.zeros(dim - 1), 1), AffineTransform(0, np.exp(y / 2))))
def model(dim=10): y = sample('y', dist.Normal(0, 3)) sample('x', dist.Normal(np.zeros(dim - 1), np.exp(y / 2)))
def model(data): x = sample('x', dist.Normal(0, 1)) with scale(10): sample('obs', dist.Normal(x, 1), obs=data)
def model(data): mean = param('mean', 0.) std = param('std', 1., constraint=constraints.positive) return sample('obs', dist.Normal(mean, std), obs=data)
def model(data): f = sample('beta', dist.Beta(1., 1.)) sample('obs', dist.Bernoulli(f), obs=data)
def model(): sample('x', x_prior) sample('y', y_prior)
def model(returns): step_size = sample('sigma', dist.Exponential(50.)) s = sample('s', dist.GaussianRandomWalk(scale=step_size, num_steps=np.shape(returns)[0])) nu = sample('nu', dist.Exponential(.1)) return sample('r', dist.StudentT(df=nu, loc=0., scale=np.exp(-2*s)), obs=returns)
def model(data): f = sample("beta", dist.Beta(1., 1.)) sample("obs", dist.Bernoulli(f), obs=data)
def model(data): alpha = np.array([1.1, 1.1]) beta = np.array([1.1, 1.1]) p_latent = sample('p_latent', dist.Beta(alpha, beta)) sample('obs', dist.Bernoulli(p_latent), obs=data) return p_latent
def model(data): # NB: model's constraints will play no effect loc = param('loc', 0., constraint=constraints.interval(0, 0.5)) sample('obs', dist.Normal(loc, 0.1), obs=data)
def model(data): loc = sample("loc", dist.Normal(0., 1.)) sample("obs", dist.Normal(loc, 1.), obs=data)
def expected_model(data): alpha = sample('alpha', dist.Uniform(0, 1)) loc = sample('loc', dist.Uniform(0, 1)) * alpha sample('obs', dist.Normal(loc, 0.1), obs=data)
def guide(): guide_loc = param("guide_loc", 0.) guide_scale = np.exp(param("guide_scale_log", 0.)) sample("loc", dist.Normal(guide_loc, guide_scale))
def model(data, labels): coefs = sample('coefs', dist.Normal(np.zeros(dim), np.ones(dim))) logits = np.sum(coefs * data, axis=-1) return sample('obs', dist.Bernoulli(logits=logits), obs=labels)
def model(labels): coefs = sample('coefs', dist.norm(np.zeros(dim), np.ones(dim))) logits = np.sum(coefs * data, axis=-1) return sample('obs', dist.bernoulli(logits, is_logits=True), obs=labels)