import edward as ed
import tensorflow as tf

from edward.models import Beta

model_code = """
  data {
    int<lower=0> N;
    int<lower=0,upper=1> x[N];
  }
  parameters {
    real<lower=0,upper=1> p;
  }
  model {
    p ~ beta(1.0, 1.0);
    for (n in 1:N)
    x[n] ~ bernoulli(p);
  }
"""
ed.set_seed(42)
data = {'N': 10, 'x': [0, 1, 0, 0, 0, 0, 0, 0, 0, 1]}

model = ed.StanModel(model_code=model_code)

qp_a = tf.nn.softplus(tf.Variable(tf.random_normal([])))
qp_b = tf.nn.softplus(tf.Variable(tf.random_normal([])))
qp = Beta(a=qp_a, b=qp_b)

inference = ed.KLqp({'p': qp}, data, model)
inference.run(n_iter=500)
Example #2
0
    """
    def __init__(self):
        self.num_vars = 1

    def log_prob(self, xs, zs):
        log_prior = beta.logpdf(zs, a=1.0, b=1.0)
        log_lik = tf.pack(
            [tf.reduce_sum(bernoulli.logpmf(xs, z)) for z in tf.unpack(zs)])
        return log_lik + log_prior

    def sample_likelihood(self, zs, size):
        """x | z ~ p(x | z)"""
        out = np.zeros((zs.shape[0], size))
        for s in range(zs.shape[0]):
            out[s, :] = bernoulli.rvs(zs[s, :], size=size)

        return out


ed.set_seed(42)
model = BetaBernoulli()
variational = Variational()
variational.add(Beta(model.num_vars))
data = ed.Data(tf.constant((0, 1, 0, 0, 0, 0, 0, 0, 0, 1), dtype=tf.float32))

inference = ed.MFVI(model, variational, data)
sess = inference.run(n_iter=200)

T = lambda y, z=None: tf.reduce_mean(y)
print(ed.ppc(model, variational, data, T, sess=sess))
Example #3
0
from __future__ import print_function

import edward as ed
import numpy as np
import six
import tensorflow as tf

from edward.models import Bernoulli, Beta

ed.set_seed(42)

# DATA
x_data = np.array([0, 1, 0, 0, 0, 0, 0, 0, 0, 1])

# MODEL
pi = Beta(a=1.0, b=1.0)
x = Bernoulli(p=pi, sample_shape=10)

# COMPLETE CONDITIONAL
pi_cond = ed.complete_conditional(pi)

sess = ed.get_session()
tf.global_variables_initializer().run()

print('p(pi | x) type:', pi_cond.parameters['name'])
param_vals = sess.run(
    {
        key: val
        for key, val in six.iteritems(pi_cond.parameters)
        if isinstance(val, tf.Tensor)
    }, {x: x_data})
Example #4
0
    Likelihood: Bernoulli
Variational model
    Likelihood: Mean-field Beta
"""
import edward as ed
import tensorflow as tf

from edward.models import Variational, Beta
from edward.stats import bernoulli, beta


class BetaBernoulli:
    """
    p(x, z) = Bernoulli(x | z) * Beta(z | 1, 1)
    """
    def log_prob(self, xs, zs):
        log_prior = beta.logpdf(zs, a=1.0, b=1.0)
        log_lik = tf.pack(
            [tf.reduce_sum(bernoulli.logpmf(xs, z)) for z in tf.unpack(zs)])
        return log_lik + log_prior


ed.set_seed(42)
model = BetaBernoulli()
variational = Variational()
variational.add(Beta())
data = ed.Data(tf.constant((0, 1, 0, 0, 0, 0, 0, 0, 0, 1), dtype=tf.float32))

inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000)
Example #5
0
		conn = np.random.binomial(n=1,p=phi[cluster_i][cluster_j] * sparsity);	

		#symmetrical connections
		graph[i][j] = conn;
		graph[j][i] = conn;

X_data = graph;
Z_true = clusters;
membership_act = [list(clusters).count(x)/N for x in range(K)];
#-------------------END-------------------



# MODEL
gamma = Dirichlet(concentration=tf.ones([K]))
Pi = Beta(concentration0=tf.ones([K, K]), concentration1=tf.ones([K, K]))
Z = Multinomial(total_count=1.0, probs=gamma, sample_shape=N)
X = Bernoulli(probs=tf.matmul(Z, tf.matmul(Pi, tf.transpose(Z))))

# INFERENCE (EM algorithm)
qgamma = PointMass(params=tf.nn.softmax(tf.Variable(tf.random_normal([K]))))
qPi = PointMass(params=tf.nn.sigmoid(tf.Variable(tf.random_normal([K, K]))))
qZ = PointMass(params=tf.nn.softmax(tf.Variable(tf.random_normal([N, K]))))

inference = ed.MAP({gamma: qgamma, Pi: qPi, Z: qZ}, data={X: X_data})

n_iter = 250
inference.initialize(n_iter=n_iter)

tf.global_variables_initializer().run()