import bayes_lib as bl import autograd import autograd.numpy as agnp import autograd.scipy as agsp import matplotlib.pyplot as plt from tensorflow.examples.tutorials.mnist import input_data #x_true = agnp.random.normal(3, 1, size = (100, 10)) mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) train_images = mnist.train.next_batch(100)[0] with bl.Model() as m: X = bl.Placeholder('X', dimensions=agnp.array([100, 784])) encoder = bl.ml.neural_network.DenseNeuralNetwork( 'Encoder', X, layer_dims=[784, 50, 20], nonlinearity=bl.math.utils.sigmoid) decoder = bl.ml.neural_network.DenseNeuralNetwork( 'Decoder', encoder, layer_dims=[20, 50, 784], nonlinearity=bl.math.utils.sigmoid, last_layer_nonlinearity=bl.math.utils.sigmoid) y = bl.rvs.Bernoulli('obs', decoder, observed=X) fit_params = agnp.loadtxt("pos.txt", delimiter=',') m.set_param(fit_params) out = m.evaluate(decoder, feed_dict={X: train_images}) for i in range(out.shape[1]):
s1 = theta3**2 s2 = theta4**2 rho = np.tanh(theta5) return np.sum( multivariate_normal.logpdf( X, np.array([theta1, theta2]), np.array([[s1, rho * s1 * s2], [rho * s1 * s2, s2]]))) tp = np.array([0.7, -2.9, -1, -0.9, 0.6, 4]) x = sample(0.7, -2.9, -1, -0.9, 0.6, 4) with bl.Model() as model_post: X = tf.placeholder(tf.float32, shape=[None, 2]) theta1 = bl.rvs.Uniform(-3, 3) theta2 = bl.rvs.Uniform(-3, 3) theta3 = bl.rvs.Uniform(-3, 3) theta4 = bl.rvs.Uniform(-3, 3) theta5 = bl.rvs.Uniform(-3, 3) rho = tf.tanh(theta5) corr = rho * (theta3**2) * (theta4**2) mu = tf.stack([theta1, theta2]) cov = [[theta3**4, corr], [corr, theta4**4]] y = bl.rvs.Multivariate_Normal(mu, cov, observed=X)
import bayes_lib as bl import tensorflow as tf import seaborn as sns import pandas as pd import autograd.numpy as np import autograd.scipy as agsp import matplotlib.pyplot as plt with bl.Model() as model: x = bl.rvs.Normal(0., 2., name='b') inf_alg = bl.inference.samplers.MultivariateUniSliceSampler() chain, tchain = inf_alg.run(model, np.array(np.random.normal(0, 1)).reshape(1, )) plt.hist(tchain) plt.show() """ inf_alg = bl.inference.variational.ReparameterizedVariationalInference(model, init = np.array([0,0,0,0])) res, v_dist = inf_alg.run(feed_dict = {X: X_data, y: y_data}, iter_func = iterfunc) samples = v_dist.sample(res.position, 1000) sns.pairplot(pd.DataFrame(samples)) plt.show() inf_alg = bl.inference.samplers.A_MVNMetropolisHastings_Adapt() chain = inf_alg.run(model, feed_dict = {X: X_data, y: y_data}, n_iters = 10000) sns.pairplot(pd.DataFrame(chain[5000:,:])) plt.show() plt.plot(chain[5000:,0])
def gen_data(n): logpdfs = [] #theta = agnp.random.multivariate_normal(agnp.array([3,3]), agnp.eye(2), size = n) theta = agnp.random.uniform(0, 5, size=(n, 2)) data = [] for i in range(n): y = agnp.random.multivariate_normal(theta[i, :], agnp.eye(2)) logpdfs.append( agsp.stats.multivariate_normal.logpdf(y, theta[i, :], agnp.eye(2))) data.append(y) return logpdfs, theta, agnp.array(data) #data = agnp.hstack([agnp.random.normal(3, 2, size = (1000,1)),agnp.random.normal(10, .1, size = (1000,1)), agnp.random.normal(-5, 3, size = (1000,1))]) with bl.Model() as m_surrogate_density: #X = bl.Placeholder('X', dimensions = agnp.array([1000,3])) X = bl.Placeholder('X', dimensions=agnp.array([100, 2])) theta = bl.Placeholder('theta', dimensions=agnp.array([100, 2])) made = bl.ml.made.ConditionalGaussianMADE('made', 2, 2, theta, [20, 21], X, nonlinearity=bl.math.utils.relu) init = agnp.random.normal(-3, 3, size=m_surrogate_density.n_params) def iter_func(t, p, o): print("Objective value: %f" % o)