Пример #1
0
    def test_multivariate_normal_full(self):
        with self.test_session() as sess:
            N, D, w_true, X_train, y_train, X, w, b, y = self._setup()

            # INFERENCE. Initialize sigma's at identity to verify if we
            # learned an approximately zero determinant.
            qw = MultivariateNormalFull(mu=tf.Variable(tf.random_normal([D])),
                                        sigma=tf.Variable(tf.diag(tf.ones(D))))
            qb = MultivariateNormalFull(mu=tf.Variable(tf.random_normal([1])),
                                        sigma=tf.Variable(tf.diag(tf.ones(1))))

            inference = ed.Laplace({
                w: qw,
                b: qb
            },
                                   data={
                                       X: X_train,
                                       y: y_train
                                   })
            inference.run(n_iter=100)

            self._test(sess, qw, qb, w_true)
Пример #2
0
Langevin dynamics.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import edward as ed
import tensorflow as tf

from edward.models import Empirical, MultivariateNormalFull

ed.set_seed(42)

# MODEL
z = MultivariateNormalFull(
    mu=tf.ones(2),
    sigma=tf.constant([[1.0, 0.8], [0.8, 1.0]]))

# INFERENCE
qz = Empirical(params=tf.Variable(tf.random_normal([2000, 2])))

inference = ed.SGLD({z: qz})
inference.run(step_size=5.0)

# CRITICISM
sess = ed.get_session()
mean, std = sess.run([qz.mean(), qz.std()])
print("Inferred posterior mean:")
print(mean)
print("Inferred posterior std:")
print(std)
Пример #3
0
                mat[i] += [multivariate_rbf(xi, xj)]

        mat[i] = tf.pack(mat[i])

    return tf.pack(mat)


ed.set_seed(42)

# DATA
df = np.loadtxt('data/crabs_train.txt', dtype='float32', delimiter=',')
df[df[:, 0] == -1, 0] = 0  # replace -1 label with 0 label
N = 25  # number of data points
D = df.shape[1] - 1  # number of features
subset = np.random.choice(df.shape[0], N, replace=False)
X_train = df[subset, 1:]
y_train = df[subset, 0]

# MODEL
X = ed.placeholder(tf.float32, [N, D])
f = MultivariateNormalFull(mu=tf.zeros(N), sigma=kernel(X))
y = Bernoulli(logits=f)

# INFERENCE
qf = Normal(mu=tf.Variable(tf.random_normal([N])),
            sigma=tf.nn.softplus(tf.Variable(tf.random_normal([N]))))

data = {X: X_train, y: y_train}
inference = ed.KLqp({f: qf}, data)
inference.run(n_iter=500)
Пример #4
0
def _test(mu, sigma, n):
  x = MultivariateNormalFull(mu=mu, sigma=sigma)
  val_est = get_dims(x.sample(n))
  val_true = n + get_dims(mu)
  assert val_est == val_true