def testGraphModeEagerGradError(self):
   with context.graph_mode():
     def f():
       x = variable_scope.get_variable(
           'v', initializer=constant_op.constant(1.0))
       return x * constant_op.constant(2.0)
     with self.assertRaisesRegexp(ValueError,
                                  'No trainable variables were accessed'):
       backprop.implicit_val_and_grad(f)()
예제 #2
0
    def testCustomGradient(self):
        @custom_gradient.custom_gradient
        def my_mul(x, y):
            result = x * y

            def grad(dr):
                return [dr * y, dr * x]

            return result, grad

        lr = 0.25
        x = resource_variable_ops.ResourceVariable(2., name='x')

        def loss(x):
            return my_mul(2., x.read_value())

        loss_grads_fn = backprop.implicit_val_and_grad(loss)

        losses = []
        for _ in range(5):
            loss, grads_and_vars = loss_grads_fn(x)
            losses.append(loss.numpy())
            for (grad, var) in grads_and_vars:
                var.assign_sub(lr * grad)
        self.assertAllEqual(losses, [4.0, 3., 2., 1., 0.])
예제 #3
0
  def testCustomGradient(self):

    @custom_gradient.custom_gradient
    def my_mul(x, y):
      result = x*y

      def grad(dr):
        return [dr*y, dr*x]
      return result, grad

    lr = 0.25
    x = resource_variable_ops.ResourceVariable(2., name='x')

    def loss(x):
      return my_mul(2., x.read_value())

    loss_grads_fn = backprop.implicit_val_and_grad(loss)

    losses = []
    for _ in range(5):
      loss, grads_and_vars = loss_grads_fn(x)
      losses.append(loss.numpy())
      for (grad, var) in grads_and_vars:
        var.assign_sub(lr*grad)
    self.assertAllEqual(losses, [4.0, 3., 2., 1., 0.])
예제 #4
0
  def testDifferentiatingFunctionThatReturnsNone(self):

    def fn(x, y):
      result = x*y  # pylint: disable=unused-variable

    x = constant_op.constant(1)
    y = constant_op.constant(2)

    loss_grads_fn = backprop.implicit_val_and_grad(fn)
    with self.assertRaisesRegexp(
        ValueError, 'Cannot differentiate a function that returns None; '
        'did you forget to return a value from fn?'):
      loss_grads_fn(x, y)

    val_and_grads_fn = backprop.val_and_grad_function(fn)
    with self.assertRaisesRegexp(
        ValueError, 'Cannot differentiate a function that returns None; '
        'did you forget to return a value from fn?'):
      val_and_grads_fn(x, y)
  def testDifferentiatingFunctionThatReturnsNone(self):

    def fn(x, y):
      result = x*y  # pylint: disable=unused-variable

    x = constant_op.constant(1)
    y = constant_op.constant(2)

    loss_grads_fn = backprop.implicit_val_and_grad(fn)
    with self.assertRaisesRegexp(
        ValueError, 'Cannot differentiate a function that returns None; '
        'did you forget to return a value from fn?'):
      loss_grads_fn(x, y)

    val_and_grads_fn = backprop.val_and_grad_function(fn)
    with self.assertRaisesRegexp(
        ValueError, 'Cannot differentiate a function that returns None; '
        'did you forget to return a value from fn?'):
      val_and_grads_fn(x, y)