コード例 #1
0
 def testGraphModeEagerGradError(self):
   with context.graph_mode():
     def f():
       x = variable_scope.get_variable(
           'v', initializer=constant_op.constant(1.0))
       return x * constant_op.constant(2.0)
     with self.assertRaisesRegexp(ValueError,
                                  'No trainable variables were accessed'):
       backprop.implicit_val_and_grad(f)()
コード例 #2
0
    def testCustomGradient(self):
        @custom_gradient.custom_gradient
        def my_mul(x, y):
            result = x * y

            def grad(dr):
                return [dr * y, dr * x]

            return result, grad

        lr = 0.25
        x = resource_variable_ops.ResourceVariable(2., name='x')

        def loss(x):
            return my_mul(2., x.read_value())

        loss_grads_fn = backprop.implicit_val_and_grad(loss)

        losses = []
        for _ in range(5):
            loss, grads_and_vars = loss_grads_fn(x)
            losses.append(loss.numpy())
            for (grad, var) in grads_and_vars:
                var.assign_sub(lr * grad)
        self.assertAllEqual(losses, [4.0, 3., 2., 1., 0.])
コード例 #3
0
ファイル: backprop_test.py プロジェクト: Wajih-O/tensorflow
  def testCustomGradient(self):

    @custom_gradient.custom_gradient
    def my_mul(x, y):
      result = x*y

      def grad(dr):
        return [dr*y, dr*x]
      return result, grad

    lr = 0.25
    x = resource_variable_ops.ResourceVariable(2., name='x')

    def loss(x):
      return my_mul(2., x.read_value())

    loss_grads_fn = backprop.implicit_val_and_grad(loss)

    losses = []
    for _ in range(5):
      loss, grads_and_vars = loss_grads_fn(x)
      losses.append(loss.numpy())
      for (grad, var) in grads_and_vars:
        var.assign_sub(lr*grad)
    self.assertAllEqual(losses, [4.0, 3., 2., 1., 0.])
コード例 #4
0
ファイル: backprop_test.py プロジェクト: Wajih-O/tensorflow
  def testDifferentiatingFunctionThatReturnsNone(self):

    def fn(x, y):
      result = x*y  # pylint: disable=unused-variable

    x = constant_op.constant(1)
    y = constant_op.constant(2)

    loss_grads_fn = backprop.implicit_val_and_grad(fn)
    with self.assertRaisesRegexp(
        ValueError, 'Cannot differentiate a function that returns None; '
        'did you forget to return a value from fn?'):
      loss_grads_fn(x, y)

    val_and_grads_fn = backprop.val_and_grad_function(fn)
    with self.assertRaisesRegexp(
        ValueError, 'Cannot differentiate a function that returns None; '
        'did you forget to return a value from fn?'):
      val_and_grads_fn(x, y)
コード例 #5
0
  def testDifferentiatingFunctionThatReturnsNone(self):

    def fn(x, y):
      result = x*y  # pylint: disable=unused-variable

    x = constant_op.constant(1)
    y = constant_op.constant(2)

    loss_grads_fn = backprop.implicit_val_and_grad(fn)
    with self.assertRaisesRegexp(
        ValueError, 'Cannot differentiate a function that returns None; '
        'did you forget to return a value from fn?'):
      loss_grads_fn(x, y)

    val_and_grads_fn = backprop.val_and_grad_function(fn)
    with self.assertRaisesRegexp(
        ValueError, 'Cannot differentiate a function that returns None; '
        'did you forget to return a value from fn?'):
      val_and_grads_fn(x, y)