Exemplo n.º 1
0
  def test_normalnormal_run(self):
    with self.test_session() as sess:
      x_data = np.array([0.0] * 50, dtype=np.float32)

      mu = Normal(mu=0.0, sigma=1.0)
      x = Normal(mu=tf.ones(50) * mu, sigma=1.0)

      qmu_mu = tf.Variable(tf.random_normal([]))
      qmu_sigma = tf.nn.softplus(tf.Variable(tf.random_normal([])))
      qmu = Normal(mu=qmu_mu, sigma=qmu_sigma)

      # analytic solution: N(mu=0.0, sigma=\sqrt{1/51}=0.140)
      inference = ed.KLpq({mu: qmu}, data={x: x_data})
      inference.run(n_samples=25, n_iter=100)

      self.assertAllClose(qmu.mean().eval(), 0, rtol=1e-1, atol=1e-1)
      self.assertAllClose(qmu.std().eval(), np.sqrt(1 / 51),
                          rtol=1e-1, atol=1e-1)
sess = ed.get_session()
tf.global_variables_initializer().run()

i = 0
for _ in range(inference.n_iter):
    X_batch, y_batch, i = next_batch(M, i)
    for _ in range(5):
        info_dict_d = inference.update(variables="Disc",
                                       feed_dict={
                                           X: X_batch,
                                           y_ph: y_batch
                                       })

    info_dict = inference.update(variables="Gen",
                                 feed_dict={
                                     X: X_batch,
                                     y_ph: y_batch
                                 })
    info_dict['loss_d'] = info_dict_d['loss_d']
    info_dict['t'] = info_dict['t'] // 6  # say set of 6 updates is 1 iteration

    t = info_dict['t']
    inference.print_progress(info_dict)
    if t == 1 or t % inference.n_print == 0:
        # Check inferred posterior parameters.
        mean, std = sess.run([qw.mean(), qw.std()])
        print("\nInferred mean & std:")
        print(mean)
        print(std)