示例#1
0
 def test_natural_gradient(self):
     """
     Test random natural gradient cases.
     """
     with tf.Graph().as_default():
         with tf.Session() as sess:
             for size in range(3, 9):
                 dist = NaturalSoftmax(size, epsilon=0)
                 softmax = CategoricalSoftmax(size)
                 param_row = tf.constant(np.random.normal(size=(size, )),
                                         dtype=tf.float64)
                 params = tf.stack([param_row])
                 one_hot = np.zeros((1, size))
                 one_hot[0, 1] = 1
                 samples = tf.constant(one_hot, dtype=tf.float64)
                 kl_div = softmax.kl_divergence(tf.stop_gradient(params),
                                                params)
                 hessian = sess.run(tf.hessians(kl_div, param_row)[0])
                 gradient = sess.run(
                     tf.gradients(softmax.log_prob(params, samples),
                                  params)[0][0])
                 expected = np.matmul(np.array([gradient]),
                                      np.linalg.pinv(hessian))[0]
                 actual = sess.run(
                     tf.gradients(dist.log_prob(params, samples),
                                  params)[0][0])
                 self.assertTrue(np.allclose(actual, expected))
示例#2
0
def test_nat_softmax_log_prob():
    """
    Test log probabilities of NaturalSoftmax.
    """
    with tf.Graph().as_default():
        with tf.Session() as sess:
            dist = NaturalSoftmax(7)
            params = tf.constant(np.random.normal(size=(15, 7)), dtype=tf.float64)
            sampled = tf.one_hot([random.randrange(7) for _ in range(15)], 7,
                                 dtype=tf.float64)
            actual = sess.run(dist.log_prob(params, sampled))
            expected = sess.run(CategoricalSoftmax(7).log_prob(params, sampled))
            assert np.allclose(actual, expected)
示例#3
0
def test_nat_softmax_determinism():
    """
    Make sure that the gradient of NaturalSoftmax doesn't
    change from run to run.
    """
    with tf.Graph().as_default():
        with tf.Session() as sess:
            dist = NaturalSoftmax(7)
            params = tf.constant(np.random.normal(size=(15, 7)), dtype=tf.float64)
            sampled = tf.one_hot([random.randrange(7) for _ in range(15)], 7,
                                 dtype=tf.float64)
            batched_grad = tf.gradients(dist.log_prob(params, sampled), params)[0]
            first = sess.run(batched_grad)
            for _ in range(10):
                assert np.allclose(first, sess.run(batched_grad))
示例#4
0
def test_nat_softmax_batched():
    """
    Test that batched gradients from NaturalSoftmax
    give the same results as single gradients.
    """
    with tf.Graph().as_default():
        with tf.Session() as sess:
            dist = NaturalSoftmax(7)
            params = tf.constant(np.random.normal(size=(15, 7)), dtype=tf.float64)
            sampled = tf.one_hot([random.randrange(7) for _ in range(15)], 7,
                                 dtype=tf.float64)
            batched_grad = tf.gradients(dist.log_prob(params, sampled), params)[0]
            single_grads = []
            for i in range(15):
                sub_params = params[i:i+1]
                prob = dist.log_prob(sub_params, sampled[i:i+1])
                single_grads.append(tf.gradients(prob, sub_params)[0])
            single_grad = tf.concat(single_grads, axis=0)
            batched, single = sess.run((batched_grad, single_grad))
            assert batched.shape == single.shape
            assert np.allclose(batched, single)