def testShapeGetters(self):
   with self.test_session():
     x = tensor_shape.TensorShape([4])
     y = tensor_shape.TensorShape([5])
     bijector = SoftmaxCentered(validate_args=True)
     self.assertAllEqual(y, bijector.forward_event_shape(x))
     self.assertAllEqual(y.as_list(),
                         bijector.forward_event_shape_tensor(
                             x.as_list()).eval())
     self.assertAllEqual(x, bijector.inverse_event_shape(y))
     self.assertAllEqual(x.as_list(),
                         bijector.inverse_event_shape_tensor(
                             y.as_list()).eval())
Пример #2
0
 def _compute_quantiles():
   """Helper to build quantiles."""
   # Omit {0, 1} since they might lead to Inf/NaN.
   zero = array_ops.zeros([], dtype=dist.dtype)
   edges = math_ops.linspace(zero, 1., quadrature_size + 3)[1:-1]
   # Expand edges so its broadcast across batch dims.
   edges = array_ops.reshape(edges, shape=array_ops.concat([
       [-1], array_ops.ones([batch_ndims], dtype=dtypes.int32)], axis=0))
   quantiles = dist.quantile(edges)
   quantiles = SoftmaxCentered().forward(quantiles)
   # Cyclically permute left by one.
   perm = array_ops.concat([
       math_ops.range(1, 1 + batch_ndims), [0]], axis=0)
   quantiles = array_ops.transpose(quantiles, perm)
   quantiles.set_shape(_get_final_shape(quadrature_size + 1))
   return quantiles
Пример #3
0
 def _compute_quantiles():
   """Helper to build quantiles."""
   # Omit {0, 1} since they might lead to Inf/NaN.
   zero = array_ops.zeros([], dtype=dist.dtype)
   edges = math_ops.linspace(zero, 1., quadrature_size + 3)[1:-1]
   # Expand edges so its broadcast across batch dims.
   edges = array_ops.reshape(edges, shape=array_ops.concat([
       [-1], array_ops.ones([batch_ndims], dtype=dtypes.int32)], axis=0))
   quantiles = dist.quantile(edges)
   quantiles = SoftmaxCentered().forward(quantiles)
   # Cyclically permute left by one.
   perm = array_ops.concat([
       math_ops.range(1, 1 + batch_ndims), [0]], axis=0)
   quantiles = array_ops.transpose(quantiles, perm)
   quantiles.set_shape(_get_final_shape(quadrature_size + 1))
   return quantiles
Пример #4
0
 def testShapeGetters(self):
     with self.cached_session():
         chain = Chain([
             SoftmaxCentered(validate_args=True),
             SoftmaxCentered(validate_args=True),
         ])
         x = tensor_shape.TensorShape([1])
         y = tensor_shape.TensorShape([2 + 1])
         self.assertAllEqual(y, chain.forward_event_shape(x))
         self.assertAllEqual(
             y.as_list(),
             chain.forward_event_shape_tensor(x.as_list()).eval())
         self.assertAllEqual(x, chain.inverse_event_shape(y))
         self.assertAllEqual(
             x.as_list(),
             chain.inverse_event_shape_tensor(y.as_list()).eval())
Пример #5
0
 def testShapeGetters(self):
   with self.test_session():
     for x, y, b in ((tensor_shape.TensorShape([]),
                      tensor_shape.TensorShape([2]),
                      SoftmaxCentered(
                          event_ndims=0, validate_args=True)),
                     (tensor_shape.TensorShape([4]),
                      tensor_shape.TensorShape([5]),
                      SoftmaxCentered(
                          event_ndims=1, validate_args=True))):
       self.assertAllEqual(y, b.forward_event_shape(x))
       self.assertAllEqual(y.as_list(),
                           b.forward_event_shape_tensor(x.as_list()).eval())
       self.assertAllEqual(x, b.inverse_event_shape(y))
       self.assertAllEqual(x.as_list(),
                           b.inverse_event_shape_tensor(y.as_list()).eval())
Пример #6
0
 def testShapeGetters(self):
     with self.test_session():
         bijector = Chain([
             SoftmaxCentered(event_ndims=1, validate_args=True),
             SoftmaxCentered(event_ndims=0, validate_args=True)
         ])
         x = tensor_shape.TensorShape([])
         y = tensor_shape.TensorShape([2 + 1])
         self.assertAllEqual(y, bijector.forward_event_shape(x))
         self.assertAllEqual(
             y.as_list(),
             bijector.forward_event_shape_tensor(x.as_list()).eval())
         self.assertAllEqual(x, bijector.inverse_event_shape(y))
         self.assertAllEqual(
             x.as_list(),
             bijector.inverse_event_shape_tensor(y.as_list()).eval())
Пример #7
0
 def testBijectorUnknownShape(self):
     with self.test_session():
         softmax = SoftmaxCentered()
         self.assertEqual("softmax_centered", softmax.name)
         x = array_ops.placeholder(shape=[2, None], dtype=dtypes.float32)
         real_x = np.log([[2., 3, 4], [4., 8, 12]])
         y = array_ops.placeholder(shape=[2, None], dtype=dtypes.float32)
         real_y = [[0.2, 0.3, 0.4, 0.1], [0.16, 0.32, 0.48, 0.04]]
         self.assertAllClose(real_y,
                             softmax.forward(x).eval(feed_dict={x: real_x}))
         self.assertAllClose(real_x,
                             softmax.inverse(y).eval(feed_dict={y: real_y}))
         self.assertAllClose(-np.sum(np.log(real_y), axis=1),
                             softmax.inverse_log_det_jacobian(
                                 y,
                                 event_ndims=1).eval(feed_dict={y: real_y}),
                             atol=0.,
                             rtol=1e-7)
         self.assertAllClose(-softmax.inverse_log_det_jacobian(
             y, event_ndims=1).eval(feed_dict={y: real_y}),
                             softmax.forward_log_det_jacobian(
                                 x,
                                 event_ndims=1).eval(feed_dict={x: real_x}),
                             atol=0.,
                             rtol=1e-7)
Пример #8
0
 def testBijectiveAndFinite(self):
     with self.test_session():
         softmax = SoftmaxCentered(event_ndims=1)
         x = np.linspace(-50, 50, num=10).reshape(5, 2).astype(np.float32)
         # Make y values on the simplex with a wide range.
         y_0 = np.ones(5).astype(np.float32)
         y_1 = (1e-5 * rng.rand(5)).astype(np.float32)
         y_2 = (1e1 * rng.rand(5)).astype(np.float32)
         y = np.array([y_0, y_1, y_2])
         y /= y.sum(axis=0)
         y = y.T  # y.shape = [5, 3]
         assert_bijective_and_finite(softmax, x, y)
 def testBijectorUnknownShape(self):
   with self.test_session():
     softmax = SoftmaxCentered()
     self.assertEqual("softmax_centered", softmax.name)
     x = array_ops.placeholder(shape=[2, None], dtype=dtypes.float32)
     real_x = np.log([[2., 3, 4], [4., 8, 12]])
     y = array_ops.placeholder(shape=[2, None], dtype=dtypes.float32)
     real_y = [[0.2, 0.3, 0.4, 0.1], [0.16, 0.32, 0.48, 0.04]]
     self.assertAllClose(real_y, softmax.forward(x).eval(
         feed_dict={x: real_x}))
     self.assertAllClose(real_x, softmax.inverse(y).eval(
         feed_dict={y: real_y}))
     self.assertAllClose(
         -np.sum(np.log(real_y), axis=1),
         softmax.inverse_log_det_jacobian(y).eval(
             feed_dict={y: real_y}),
         atol=0.,
         rtol=1e-7)
     self.assertAllClose(
         -softmax.inverse_log_det_jacobian(y).eval(
             feed_dict={y: real_y}),
         softmax.forward_log_det_jacobian(x).eval(
             feed_dict={x: real_x}),
         atol=0.,
         rtol=1e-7)
Пример #10
0
 def testBijectorVector(self):
     with self.test_session():
         softmax = SoftmaxCentered(event_ndims=1)
         self.assertEqual("softmax_centered", softmax.name)
         x = np.log([[2., 3, 4], [4., 8, 12]])
         y = [[0.2, 0.3, 0.4, 0.1], [0.16, 0.32, 0.48, 0.04]]
         self.assertAllClose(y, softmax.forward(x).eval())
         self.assertAllClose(x, softmax.inverse(y).eval())
         self.assertAllClose(-np.sum(np.log(y), axis=1),
                             softmax.inverse_log_det_jacobian(y).eval(),
                             atol=0.,
                             rtol=1e-7)
         self.assertAllClose(-softmax.inverse_log_det_jacobian(y).eval(),
                             softmax.forward_log_det_jacobian(x).eval(),
                             atol=0.,
                             rtol=1e-7)
Пример #11
0
 def testShapeGetters(self):
     with self.test_session():
         x = tensor_shape.TensorShape([4])
         y = tensor_shape.TensorShape([5])
         bijector = SoftmaxCentered(validate_args=True)
         self.assertAllEqual(y, bijector.forward_event_shape(x))
         self.assertAllEqual(
             y.as_list(),
             bijector.forward_event_shape_tensor(x.as_list()).eval())
         self.assertAllEqual(x, bijector.inverse_event_shape(y))
         self.assertAllEqual(
             x.as_list(),
             bijector.inverse_event_shape_tensor(y.as_list()).eval())
Пример #12
0
 def testBijectorScalar(self):
     with self.test_session():
         softmax = SoftmaxCentered()  # scalar by default
         self.assertEqual("softmax_centered", softmax.name)
         x = np.log([[2., 3, 4], [4., 8, 12]])
         y = [[[2. / 3, 1. / 3], [3. / 4, 1. / 4], [4. / 5, 1. / 5]],
              [[4. / 5, 1. / 5], [8. / 9, 1. / 9], [12. / 13, 1. / 13]]]
         self.assertAllClose(y, softmax.forward(x).eval())
         self.assertAllClose(x, softmax.inverse(y).eval())
         self.assertAllClose(-np.sum(np.log(y), axis=2),
                             softmax.inverse_log_det_jacobian(y).eval(),
                             atol=0.,
                             rtol=1e-7)
         self.assertAllClose(-softmax.inverse_log_det_jacobian(y).eval(),
                             softmax.forward_log_det_jacobian(x).eval(),
                             atol=0.,
                             rtol=1e-7)
 def testBijectorVector(self):
   with self.test_session():
     softmax = SoftmaxCentered(event_ndims=1)
     self.assertEqual("softmax_centered", softmax.name)
     x = np.log([[2., 3, 4], [4., 8, 12]])
     y = [[0.2, 0.3, 0.4, 0.1], [0.16, 0.32, 0.48, 0.04]]
     self.assertAllClose(y, softmax.forward(x).eval())
     self.assertAllClose(x, softmax.inverse(y).eval())
     self.assertAllClose(
         -np.sum(np.log(y), axis=1),
         softmax.inverse_log_det_jacobian(y).eval(),
         atol=0.,
         rtol=1e-7)
     self.assertAllClose(
         -softmax.inverse_log_det_jacobian(y).eval(),
         softmax.forward_log_det_jacobian(x).eval(),
         atol=0.,
         rtol=1e-7)
 def testBijectorScalar(self):
   with self.test_session():
     softmax = SoftmaxCentered()  # scalar by default
     self.assertEqual("softmax_centered", softmax.name)
     x = np.log([[2., 3, 4],
                 [4., 8, 12]])
     y = [[[2. / 3, 1. / 3],
           [3. / 4, 1. / 4],
           [4. / 5, 1. / 5]],
          [[4. / 5, 1. / 5],
           [8. / 9, 1. / 9],
           [12. / 13, 1. / 13]]]
     self.assertAllClose(y, softmax.forward(x).eval())
     self.assertAllClose(x, softmax.inverse(y).eval())
     self.assertAllClose(
         -np.sum(np.log(y), axis=2),
         softmax.inverse_log_det_jacobian(y).eval(),
         atol=0.,
         rtol=1e-7)
     self.assertAllClose(
         -softmax.inverse_log_det_jacobian(y).eval(),
         softmax.forward_log_det_jacobian(x).eval(),
         atol=0.,
         rtol=1e-7)