Пример #1
0
 def __del__(self):
     if not self._requires_grad or self._static_shape:
         if self._own_storage and self._dg_tensor:
             # Always reuse the leaf variables or
             # tensors that do not require grad
             # PyGC will detect them automatically
             TPool.put(self.name)
Пример #2
0
def backward(self, gradient=None):
    if not self._requires_grad:
        raise RuntimeError('This variable does not require grads.'
                           '\nCan not backward from this variable.')

    # 1. Expressions -> Forward-Ops
    # We should sort out the topology of these operators before using
    all_expressions = sorted(self._expr._ops.items(), key=lambda d: d[0])
    forward_ops = [v for k, v in all_expressions]

    # 2. Forward-Ops + Targets + InputGrads + IgnoredGrads -> Backward-Ops
    targets = [self.name]; input_grads = []
    ignored_grads = list(self._ignored_grads) if self._ignored_grads else []
    if gradient is not None:
        if not isinstance(gradient, Tensor):
            raise TypeError('gradients can be either Tensors, Variables or None,'
                            ' but got {}'.format(type(gradient)))
        tensor_utils.FromPyArray(gradient.cpu().numpy(), self.name + '_grad')
        input_grads.append(self.name + '_grad')

    # 3. Flow or Flow or Flow
    ws.RunGradientFlow(forward_ops, targets, input_grads, ignored_grads)

    # 4. Release resources
    # We should release both the anchors and tensors
    for forward_op in forward_ops:
        APool.put(forward_op.name)
        for output in forward_op.output:
            if output not in forward_op.input:
                TPool.put(output)