示例#1
0
D = 3 # dimension
cov = [[ 1.36, 0.62, 0.93],
       [ 0.80, 1.19, 0.43],
       [ 0.57, 0.73, 1.06]]

# model
z = MultivariateNormalTriL(loc=tf.ones(D),
                scale_tril=tf.cholesky(cov))

# inference 
qz = MultivariateNormalTriL(loc=tf.Variable(tf.zeros(D)),
                            scale_tril=tf.nn.softplus(tf.Variable(tf.zeros((D, D)))))
inference = ed.KLqp({z:qz})

# qz = Empirical(tf.Variable(tf.random_normal([T,D])))
# inference = ed.HMC({z:qz}) 

inference.run()

# criticism
sess = ed.get_session()
mean, stddev = sess.run([qz.mean(), qz.stddev()])
print("Inferred posterior mean: ", mean)
print("Inferred post erior stddev: ", stddev)
a = sess.run(qz.sample(5000))

# plot
fig = plt.figure()
ax = Axes3D(fig)
ax.plot(a[:, 0], a[:, 1], a[:, 2], "o")
plt.show()
示例#2
0
m_prior = tf.constant(np.array([0.5, 0.5]), dtype=tf.float64)
k_prior = tf.constant(0.6, dtype=tf.float64)

# Posterior inference
# Probabilistic model
sigma = WishartCholesky(df=v_prior, scale=W_prior)
mu = MultivariateNormalTriL(m_prior, k_prior * sigma)
xn = MultivariateNormalTriL(tf.reshape(tf.tile(mu, [N]), [N, D]),
                            tf.reshape(tf.tile(sigma, [N, 1]), [N, 2, 2]))

# Variational model
# Variational model
qmu = MultivariateNormalTriL(
    tf.Variable(tf.random_normal([D], dtype=tf.float64)),
    tf.nn.softplus(tf.Variable(tf.random_normal([D, D], dtype=tf.float64))))
L = tf.Variable(tf.random_normal([D, D], dtype=tf.float64))
qsigma = WishartCholesky(
    tf.nn.softplus(
        tf.Variable(tf.random_normal([], dtype=tf.float64)) + D + 1),
    LinearOperatorTriL(L).to_dense())

# Inference
inference = ed.KLqp({mu: qmu, sigma: qsigma}, data={xn: xn_data})
inference.run(n_iter=N_ITERS, n_samples=N_SAMPLES)

sess = ed.get_session()

print('Inferred mu: {}'.format(sess.run(qmu.mean())))
print('Inferred precision: {}'.format(sess.run(qsigma.mean())))
print('Inferred sigma: {}'.format(sess.run(1 / qsigma.mean())))