示例#1
0
 def f(x):
     return custom_gradient.prevent_gradient(x, 'No gradient')
                    ))
            ]

        return assertions


def _outer_squared_difference(x, y):
    """Convenience function analogous to tf.squared_difference."""
    z = x - y
    return z[..., tf.newaxis, :] * z[..., tf.newaxis]


@tfp_custom_gradient.custom_gradient(
    vjp_fwd=lambda x: (tf.identity(x), ()),
    vjp_bwd=lambda _, dx: tfp_custom_gradient.prevent_gradient(  # pylint: disable=g-long-lambda
        dx,
        message='Second derivative is not implemented.'),
    jvp_fn=lambda primals, tangents: (  # pylint: disable=g-long-lambda
        tfp_custom_gradient.prevent_gradient(
            primals[0], message='Second derivative is not implemented.'),
        tfp_custom_gradient.prevent_gradient(
            tangents[0], message='Second derivative is not implemented.')))
def _prevent_2nd_derivative(x):
    """Disables computation of the second derivatives for a tensor.

  NB: you need to apply a non-identity function to the output tensor for the
  exception to be raised.

  Arguments:
    x: A tensor.
 def grad(dy):
     return tfp_custom_gradient.prevent_gradient(
         dy, message='Second derivative is not implemented.')