Exemple #1
0
             scale=tf.nn.softplus(tf.Variable(tf.random_normal([K, D]))))
qsigma = InverseGamma(concentration=tf.nn.softplus(
    tf.Variable(tf.random_normal([K, D]))),
                      rate=tf.nn.softplus(tf.Variable(tf.random_normal([K,
                                                                        D]))))
qc = Categorical(logits=tf.Variable(tf.zeros([N, K])))

inference = ed.KLqp(latent_vars={
    pi: qpi,
    mu: qmu,
    sigma: qsigma,
    c: qc
},
                    data={x: x_data})

inference.initialize(n_iter=10000, n_samples=200)

sess = ed.get_session()
tf.global_variables_initializer().run()

for _ in range(inference.n_iter):
    info_dict = inference.update()
    inference.print_progress(info_dict)
    t = info_dict['t']
    if t == 1 or t % inference.n_print == 0:
        qpi_mean, qmu_mean, qsigma_mean = \
            sess.run([qpi.mean(), qmu.mean(), qsigma.mean()])
        print('\nInferred membership probabilities: {}'.format(qpi_mean))
        print('Inferred cluster means: {}'.format(qmu_mean))
        print('Inferred sigmas: {}'.format(qsigma_mean))
Exemple #2
0
import tensorflow as tf
from edward.models import InverseGamma, Normal

N = 1000

# Data generation (known mean)
mu = 7.0
sigma = 0.55
xn_data = np.random.normal(mu, sigma, N)
print('sigma={}'.format(sigma))

# Prior definition
alpha = tf.Variable(0.9, dtype=tf.float32, trainable=False)
beta = tf.Variable(0.5, dtype=tf.float32, trainable=False)

# Posterior inference
# Probabilistic model
ig = InverseGamma(alpha, beta)
xn = Normal(mu, tf.ones([N]) * tf.sqrt(ig))

# Variational model
qig = InverseGamma(tf.nn.softplus(tf.Variable(tf.random_normal([]))),
                   tf.nn.softplus(tf.Variable(tf.random_normal([]))))

# Inference
inference = ed.KLqp({ig: qig}, data={xn: xn_data})
inference.run(n_iter=2000, n_samples=150)

sess = ed.get_session()
print('Inferred sigma={}'.format(sess.run(tf.sqrt(qig.mean()))))