optimizer = tf.train.RMSPropOptimizer(0.01, epsilon=1.0) inference.initialize(optimizer=optimizer) init = tf.global_variables_initializer() init.run() n_epoch = 100 n_iter_per_epoch = 1000 for epoch in range(n_epoch): avg_loss = 0.0 widgets = ["epoch #%d|" % epoch, Percentage(), Bar(), ETA()] pbar = ProgressBar(n_iter_per_epoch, widgets=widgets) pbar.start() for t in range(n_iter_per_epoch): pbar.update(t) x_train, _ = mnist.train.next_batch(M) info_dict = inference.update(feed_dict={x_ph: x_train}) avg_loss += info_dict['loss'] # Print a lower bound to the average marginal likelihood for an # image. avg_loss = avg_loss / n_iter_per_epoch avg_loss = avg_loss / M print("log p(x) >= {:0.3f}".format(avg_loss)) # Prior predictive check. imgs = sess.run(x.value()) for m in range(M): imsave("img/%d.png" % m, imgs[m].reshape(28, 28))
init = tf.global_variables_initializer() init.run() n_iter_per_epoch = 100 n_epoch = T // n_iter_per_epoch for epoch in range(n_epoch): avg_loss = 0.0 widgets = ["epoch #%d|" % epoch, Percentage(), Bar(), ETA()] pbar = ProgressBar(n_iter_per_epoch, widgets=widgets) pbar.start() for t in range(n_iter_per_epoch): pbar.update(t) info_dict_e = inference_e.update() info_dict_m = inference_m.update() avg_loss += info_dict_m['loss'] print("Acceptance Rate:") print(info_dict_e['accept_rate']) # Print a lower bound to the average marginal likelihood for an # image. avg_loss = avg_loss / n_iter_per_epoch avg_loss = avg_loss / N print("log p(x) >= {:0.3f}".format(avg_loss)) # Prior predictive check. imgs = x.value().eval() for m in range(N): imsave(os.path.join(IMG_DIR, '%d.png') % m, imgs[m].reshape(28, 28))
init = tf.initialize_all_variables() init.run() n_iter_per_epoch = 100 n_epoch = T // n_iter_per_epoch for epoch in range(n_epoch): avg_loss = 0.0 widgets = ["epoch #%d|" % epoch, Percentage(), Bar(), ETA()] pbar = ProgressBar(n_iter_per_epoch, widgets=widgets) pbar.start() for t in range(n_iter_per_epoch): pbar.update(t) info_dict_e = inference_e.update() info_dict_m = inference_m.update() avg_loss += info_dict_m['loss'] print("Acceptance Rate:") print(info_dict_e['accept_rate']) # Print a lower bound to the average marginal likelihood for an # image. avg_loss = avg_loss / n_iter_per_epoch avg_loss = avg_loss / N print("log p(x) >= {:0.3f}".format(avg_loss)) # Prior predictive check. imgs = x.value().eval() for m in range(N): imsave(os.path.join(IMG_DIR, '%d.png') % m, imgs[m].reshape(28, 28))
from __future__ import print_function, division, absolute_import from data import get_value import tensorflow as tf import edward as ed from edward.models import Beta, Bernoulli theta = Beta(a=1.0, b=1.0) # 100-dimensional Bernoulli x = Bernoulli(p=tf.ones(12) * theta) # ====== sampling from each marginal variables theta_sample = theta.sample() x_sample = x.sample() print("Marginal theta samples:", get_value(theta_sample)) print("Marginal X samples:", get_value(x_sample)) # ====== sampling from the joint distribution samples = get_value([x.value(), theta.value()]) print("From joint distribution:") print("- X:", samples[0]) print("- theta:", samples[1])
optimizer = tf.train.RMSPropOptimizer(0.01, epsilon=1.0) inference.initialize(optimizer=optimizer) init = tf.initialize_all_variables() init.run() n_epoch = 100 n_iter_per_epoch = 1000 for epoch in range(n_epoch): avg_loss = 0.0 widgets = ["epoch #%d|" % epoch, Percentage(), Bar(), ETA()] pbar = ProgressBar(n_iter_per_epoch, widgets=widgets) pbar.start() for t in range(n_iter_per_epoch): pbar.update(t) x_train, _ = mnist.train.next_batch(M) info_dict = inference.update(feed_dict={x_ph: x_train}) avg_loss += info_dict['loss'] # Print a lower bound to the average marginal likelihood for an # image. avg_loss = avg_loss / n_iter_per_epoch avg_loss = avg_loss / M print("log p(x) >= {:0.3f}".format(avg_loss)) # Prior predictive check. imgs = sess.run(x.value()) for m in range(M): imsave("img/%d.png" % m, imgs[m].reshape(28, 28))