def main(_): # DATA pi_true = np.random.dirichlet(np.array([20.0, 30.0, 10.0, 10.0])) z_data = np.array([np.random.choice(FLAGS.K, 1, p=pi_true)[0] for n in range(FLAGS.N)]) print("pi: {}".format(pi_true)) # MODEL pi = Dirichlet(tf.ones(4)) z = Categorical(probs=pi, sample_shape=FLAGS.N) # INFERENCE qpi = Dirichlet(tf.nn.softplus( tf.get_variable("qpi/concentration", [FLAGS.K]))) inference = ed.KLqp({pi: qpi}, data={z: z_data}) inference.run(n_iter=1500, n_samples=30) sess = ed.get_session() print("Inferred pi: {}".format(sess.run(qpi.mean())))
def main(_): # DATA pi_true = np.random.dirichlet(np.array([20.0, 30.0, 10.0, 10.0])) z_data = np.array( [np.random.choice(FLAGS.K, 1, p=pi_true)[0] for n in range(FLAGS.N)]) print("pi: {}".format(pi_true)) # MODEL pi = Dirichlet(tf.ones(4)) z = Categorical(probs=pi, sample_shape=FLAGS.N) # INFERENCE qpi = Dirichlet( tf.nn.softplus(tf.get_variable("qpi/concentration", [FLAGS.K]))) inference = ed.KLqp({pi: qpi}, data={z: z_data}) inference.run(n_iter=1500, n_samples=30) sess = ed.get_session() print("Inferred pi: {}".format(sess.run(qpi.mean())))
from __future__ import absolute_import from __future__ import division from __future__ import print_function import edward as ed import numpy as np import tensorflow as tf from edward.models import Categorical, Dirichlet N = 1000 K = 4 # DATA pi_true = np.random.dirichlet(np.array([20.0, 30.0, 10.0, 10.0])) z_data = np.array([np.random.choice(K, 1, p=pi_true)[0] for n in range(N)]) print('pi={}'.format(pi_true)) # MODEL pi = Dirichlet(alpha=tf.ones(4)) z = Categorical(p=tf.ones([N, 1]) * pi) # INFERENCE qpi = Dirichlet(alpha=tf.nn.softplus(tf.Variable(tf.random_normal([K])))) inference = ed.KLqp({pi: qpi}, data={z: z_data}) inference.run(n_iter=1500, n_samples=30) sess = ed.get_session() print('Inferred pi={}'.format(sess.run(qpi.mean())))
scale=tf.nn.softplus(tf.Variable(tf.random_normal([K, D])))) qsigma = InverseGamma(concentration=tf.nn.softplus( tf.Variable(tf.random_normal([K, D]))), rate=tf.nn.softplus(tf.Variable(tf.random_normal([K, D])))) qc = Categorical(logits=tf.Variable(tf.zeros([N, K]))) inference = ed.KLqp(latent_vars={ pi: qpi, mu: qmu, sigma: qsigma, c: qc }, data={x: x_data}) inference.initialize(n_iter=10000, n_samples=200) sess = ed.get_session() tf.global_variables_initializer().run() for _ in range(inference.n_iter): info_dict = inference.update() inference.print_progress(info_dict) t = info_dict['t'] if t == 1 or t % inference.n_print == 0: qpi_mean, qmu_mean, qsigma_mean = \ sess.run([qpi.mean(), qmu.mean(), qsigma.mean()]) print('\nInferred membership probabilities: {}'.format(qpi_mean)) print('Inferred cluster means: {}'.format(qmu_mean)) print('Inferred sigmas: {}'.format(qsigma_mean))