コード例 #1
0
ファイル: backprop.py プロジェクト: zobiaakram/tensorflow
 def grad_fn(*args):
     """Computes the gradient of the wrapped function."""
     tape.push_new_tape()
     end_node = f(*args)
     variables = tape.top_tape_watched_variables()
     sources = [x.handle for x in variables]
     grad = imperative_grad(end_node, sources)
     return end_node, list(zip(grad, variables))
コード例 #2
0
ファイル: backprop.py プロジェクト: Crazyonxh/tensorflow
 def grad_fn(*args):
   """Computes the gradient of the wrapped function."""
   tape.push_new_tape()
   end_node = f(*args)
   variables = tape.top_tape_watched_variables()
   sources = [x.handle for x in variables]
   grad = imperative_grad(end_node, sources)
   return end_node, list(zip(grad, variables))
コード例 #3
0
ファイル: backprop.py プロジェクト: yzh2436/tensorflow
    def grad_fn(*args):
        """Computes the gradient of the wrapped function."""
        tape.push_new_tape()
        end_node = f(*args)
        variables = tape.top_tape_watched_variables()
        sources = [x.handle for x in variables]

        if not sources:
            raise ValueError("no trainable variables were accessed while the "
                             "function was being computed.")
        grad = imperative_grad.imperative_grad(_default_vspace,
                                               tape.pop_tape(),
                                               nest.flatten(end_node), sources)
        return end_node, list(zip(grad, variables))
コード例 #4
0
ファイル: backprop.py プロジェクト: DjangoPeng/tensorflow
  def grad_fn(*args):
    """Computes the gradient of the wrapped function."""
    tape.push_new_tape()
    end_node = f(*args)
    variables = tape.top_tape_watched_variables()
    sources = [x.handle for x in variables]

    if not sources:
      raise ValueError("no trainable variables were accessed while the "
                       "function was being computed.")
    grad = imperative_grad.imperative_grad(_default_vspace,
                                           tape.pop_tape(),
                                           nest.flatten(end_node),
                                           sources)
    return end_node, list(zip(grad, variables))
コード例 #5
0
    def grad_fn(*args):
        """Computes the gradient of the wrapped function."""
        tape.push_new_tape()
        try:
            end_node = f(*args)
            if end_node is None:
                raise ValueError(
                    "Cannot differentiate a function that returns None; "
                    "did you forget to return a value from {}?".format(
                        f.__name__))
            variables = tape.top_tape_watched_variables()
        finally:
            popped_tape = tape.pop_tape()
        sources = [x.handle for x in variables]

        if not sources:
            raise ValueError("No trainable variables were accessed while the "
                             "function was being computed.")
        grad = imperative_grad.imperative_grad(_default_vspace, popped_tape,
                                               nest.flatten(end_node), sources)
        return end_node, list(zip(grad, variables))