Esempio n. 1
0
  def test_ar_mle(self):
    # set up test data: a random walk
    T = 100
    z_true = np.zeros(T)
    r = 0.95
    sig = 0.01
    eta = 0.01
    for t in range(1, 100):
      z_true[t] = r * z_true[t - 1] + sig * np.random.randn()

    x_data = (z_true + eta * np.random.randn(T)).astype(np.float32)

    # use scipy to find max likelihood
    def cost(z):
      initial = z[0]**2 / sig**2
      ar = np.sum((z[1:] - r * z[:-1])**2) / sig**2
      data = np.sum((x_data - z)**2) / eta**2
      return initial + ar + data

    mle = minimize(cost, np.zeros(T)).x

    with self.test_session() as sess:
      z = AutoRegressive(T, r, sig)
      x = Normal(loc=z, scale=eta)

      qz = PointMass(params=tf.Variable(tf.zeros(T)))
      inference = ed.MAP({z: qz}, data={x: x_data})
      inference.run(n_iter=500)

      self.assertAllClose(qz.eval(), mle, rtol=1e-3, atol=1e-3)
Esempio n. 2
0
def mmsb(N, K, data):
    # sparsity
    rho = 0.3
    # MODEL
    # probability of belonging to each of K blocks for each node
    gamma = Dirichlet(concentration=tf.ones([K]))
    # block connectivity
    Pi = Beta(concentration0=tf.ones([K, K]), concentration1=tf.ones([K, K]))
    # probability of belonging to each of K blocks for all nodes
    Z = Multinomial(total_count=1.0, probs=gamma, sample_shape=N)
    # adjacency
    X = Bernoulli(probs=(1 - rho) *
                  tf.matmul(Z, tf.matmul(Pi, tf.transpose(Z))))

    # INFERENCE (EM algorithm)
    qgamma = PointMass(
        params=tf.nn.softmax(tf.Variable(tf.random_normal([K]))))
    qPi = PointMass(
        params=tf.nn.sigmoid(tf.Variable(tf.random_normal([K, K]))))
    qZ = PointMass(params=tf.nn.softmax(tf.Variable(tf.random_normal([N, K]))))

    #qgamma = Normal(loc=tf.get_variable("qgamma/loc", [K]),
    #                scale=tf.nn.softplus(
    #                        tf.get_variable("qgamma/scale", [K])))
    #qPi = Normal(loc=tf.get_variable("qPi/loc", [K, K]),
    #                scale=tf.nn.softplus(
    #                        tf.get_variable("qPi/scale", [K, K])))
    #qZ = Normal(loc=tf.get_variable("qZ/loc", [N, K]),
    #                scale=tf.nn.softplus(
    #                        tf.get_variable("qZ/scale", [N, K])))

    #inference = ed.KLqp({gamma: qgamma, Pi: qPi, Z: qZ}, data={X: data})
    inference = ed.MAP({gamma: qgamma, Pi: qPi, Z: qZ}, data={X: data})

    #inference.run()
    n_iter = 6000
    inference.initialize(optimizer=tf.train.AdamOptimizer(learning_rate=0.01),
                         n_iter=n_iter)

    tf.global_variables_initializer().run()

    for _ in range(inference.n_iter):
        info_dict = inference.update()
        inference.print_progress(info_dict)

    inference.finalize()
    print('qgamma after: ', qgamma.mean().eval())
    return qZ.mean().eval(), qPi.eval()
Esempio n. 3
0
def mmsb(N, K, data):
    # sparsity
    rho = 0.3
    # MODEL
    # probability of belonging to each of K blocks for each node
    gamma = Dirichlet(concentration=tf.ones([K]))
    # block connectivity
    Pi = Beta(concentration0=tf.ones([K, K]), concentration1=tf.ones([K, K]))
    # probability of belonging to each of K blocks for all nodes
    Z = Multinomial(total_count=1.0, probs=gamma, sample_shape=N)
    # adjacency
    X = Bernoulli(probs=(1 - rho) * tf.matmul(Z, tf.matmul(Pi, tf.transpose(Z))))
    
    # INFERENCE (EM algorithm)
    qgamma = PointMass(params=tf.nn.softmax(tf.Variable(tf.random_normal([K]))))
    qPi = PointMass(params=tf.nn.sigmoid(tf.Variable(tf.random_normal([K, K]))))
    qZ = PointMass(params=tf.nn.softmax(tf.Variable(tf.random_normal([N, K]))))
    
    #qgamma = Normal(loc=tf.get_variable("qgamma/loc", [K]),
    #                scale=tf.nn.softplus(
    #                        tf.get_variable("qgamma/scale", [K])))
    #qPi = Normal(loc=tf.get_variable("qPi/loc", [K, K]),
    #                scale=tf.nn.softplus(
    #                        tf.get_variable("qPi/scale", [K, K])))
    #qZ = Normal(loc=tf.get_variable("qZ/loc", [N, K]),
    #                scale=tf.nn.softplus(
    #                        tf.get_variable("qZ/scale", [N, K])))
    
    #inference = ed.KLqp({gamma: qgamma, Pi: qPi, Z: qZ}, data={X: data})
    inference = ed.MAP({gamma: qgamma, Pi: qPi, Z: qZ}, data={X: data})
    
    #inference.run()
    n_iter = 6000
    inference.initialize(optimizer=tf.train.AdamOptimizer(learning_rate=0.01), n_iter=n_iter)
    
    tf.global_variables_initializer().run()
    
    for _ in range(inference.n_iter):
        info_dict = inference.update()
        inference.print_progress(info_dict)
    
    inference.finalize()
    print('qgamma after: ', qgamma.mean().eval())
    return qZ.mean().eval(), qPi.eval()
Esempio n. 4
0
#!/usr/bin/env python
"""A simple coin flipping example. Inspired by Stan's toy example.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import edward as ed
import numpy as np
import tensorflow as tf

from edward.models import Bernoulli, Beta, PointMass

ed.set_seed(42)

# DATA
x_data = np.array([0, 1, 0, 0, 0, 0, 0, 0, 0, 1])

# MODEL
p = Beta(1.0, 1.0)
x = Bernoulli(probs=p, sample_shape=10)

# INFERENCE
qp_params = tf.sigmoid(tf.Variable(tf.random_normal([])))
qp = PointMass(params=qp_params)

inference = ed.MAP({p: qp}, data={x: x_data})
inference.run(n_iter=50)

print("Fitted probability: {}".format(qp.eval()))