示例#1
0
 def testForwardGradientFunctionUsedByAccumulatorForOps(self):
   previous_fn = forwardprop._forward_gradient
   try:
     with forwardprop.ForwardGradientAccumulator() as acc:
       x = constant_op.constant(1.)
       acc.watch(x, 2.)
       y = x + x
       pywrap_tensorflow.TFE_Py_RegisterForwardGradientFunction(
           lambda *args, **kwargs: [constant_op.constant(-15.)])
       z = x + x
     self.assertAllClose(4., acc.jvp(y))
     self.assertAllClose(-15., acc.jvp(z))
   finally:
     pywrap_tensorflow.TFE_Py_RegisterForwardGradientFunction(previous_fn)
示例#2
0
        transpose_tape.watch(forwardprop_aids)
        grads = backfunc_tape.gradient(
            trainable_outputs,
            trainable_inputs,
            forwardprop_aids,
            unconnected_gradients=UnconnectedGradients.ZERO)
    nontrivial_output_tangents = transpose_tape.gradient(
        grads, forwardprop_aids, output_gradients=nontrivial_tangents)
    output_tangents = [None] * len(outputs)
    for index, tangent in zip(nontrivial_output_indices,
                              nontrivial_output_tangents):
        output_tangents[index] = tangent
    return output_tangents


pywrap_tensorflow.TFE_Py_RegisterForwardGradientFunction(_forward_gradient)


class ForwardGradientAccumulator(object):
    """Computes Jacobian-vector products using forward-mode autodiff.

  Example:

  ```
  with ForwardGradientAccumulator() as acc:
    x = tf.constant([[2.0, 3.0], [1.0, 4.0]])
    acc.watch(x, tf.constant([[5., 6.], [7., 8.]]))
    y = tf.reduce_sum(tf.sin(x) * tf.tan(x), axis=1)
  jvp = acc.jvp(y)
  ```