def testJacobianNumerically(self):
        x = tf.random_normal([_BATCH_SIZE, 2])
        h1 = tf.contrib.layers.fully_connected(x, 20)
        h2 = tf.contrib.layers.fully_connected(h1, 20)
        f = tf.contrib.layers.fully_connected(h2, 10)
        j_tensor = jacobian_conditioning.compute_jacobian(xs=x, fx=f)

        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            x_np = sess.run(x)
            jacobian = sess.run(j_tensor, feed_dict={x: x_np})

            # Test 10 random elements.
            for _ in range(10):
                # Pick a random element of jacobian to test.
                batch_idx = np.random.randint(_BATCH_SIZE)
                x_idx = np.random.randint(2)
                f_idx = np.random.randint(10)

                # Test with finite differences.
                epsilon = 1e-4

                x_plus = x_np.copy()
                x_plus[batch_idx, x_idx] += epsilon
                f_plus = sess.run(f, feed_dict={x: x_plus})[batch_idx, f_idx]

                x_minus = x_np.copy()
                x_minus[batch_idx, x_idx] -= epsilon
                f_minus = sess.run(f, feed_dict={x: x_minus})[batch_idx, f_idx]

                self.assertAllClose(jacobian[batch_idx, f_idx, x_idx],
                                    (f_plus - f_minus) / (2. * epsilon),
                                    rtol=1e-3,
                                    atol=1e-3)
    def testJacobianSimpleCase(self):
        x = tf.random_normal([_BATCH_SIZE, 2])
        W = tf.constant([[2., -1.], [1.5, 1.]])  # pylint: disable=invalid-name
        f = tf.matmul(x, W)
        j_tensor = jacobian_conditioning.compute_jacobian(xs=x, fx=f)
        with tf.Session() as sess:
            jacobian = sess.run(j_tensor)

        # Transpose of W in 'expected' is expected because in vector notation
        # f = W^T * x.
        expected = tf.tile([[[2, 1.5], [-1, 1]]], [_BATCH_SIZE, 1, 1])
        self.assertAllClose(jacobian, expected)
    def testJacobianAgainstSlowVersion(self):
        x = tf.random_normal([_BATCH_SIZE, 2])
        h1 = tf.contrib.layers.fully_connected(x, 20)
        h2 = tf.contrib.layers.fully_connected(h1, 20)
        f = tf.contrib.layers.fully_connected(h2, 10)

        j_slow_tensor = slow_jacobian(xs=x, fx=f)
        j_fast_tensor = jacobian_conditioning.compute_jacobian(xs=x, fx=f)

        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            j_fast, j_slow = sess.run([j_fast_tensor, j_slow_tensor])
        self.assertAllClose(j_fast, j_slow)
Ejemplo n.º 4
0
def ComputeGeneratorConditionNumber(sess, gan):
    """Computes the generator condition number.

  Computes the Jacobian of the generator in session, then postprocesses to get
  the condition number.

  Args:
    sess: tf.Session object.
    gan: AbstractGAN object, that is already present in the current tf.Graph.

  Returns:
    A list of length gan.batch_size. Each element is the condition number
    computed at a single z sample within a minibatch.
  """
    shape = gan.fake_images.get_shape().as_list()
    flat_generator_output = tf.reshape(
        gan.fake_images, [gan.batch_size, np.prod(shape[1:])])
    tf_jacobian = conditioning_lib.compute_jacobian(xs=gan.z,
                                                    fx=flat_generator_output)
    z_sample = gan.z_generator(gan.batch_size, gan.z_dim)
    np_jacobian = sess.run(tf_jacobian, feed_dict={gan.z: z_sample})
    result_dict = conditioning_lib.analyze_jacobian(np_jacobian)
    return result_dict["metric_tensor"]["log_condition_number"]