示例#1
0
def backward(self, gradient=None):
    if not self._requires_grad:
        raise RuntimeError('This variable does not require grads.'
                           '\nCan not backward from this variable.')

    # 1. Expressions -> Forward-Ops
    # We should sort out the topology of these operators before using
    all_expressions = sorted(self._expr._ops.items(), key=lambda d: d[0])
    forward_ops = [v for k, v in all_expressions]

    # 2. Forward-Ops + Targets + InputGrads + IgnoredGrads -> Backward-Ops
    targets = [self.name]; input_grads = []
    ignored_grads = list(self._ignored_grads) if self._ignored_grads else []
    if gradient is not None:
        if not isinstance(gradient, Tensor):
            raise TypeError('gradients can be either Tensors, Variables or None,'
                            ' but got {}'.format(type(gradient)))
        tensor_utils.FromPyArray(gradient.cpu().numpy(), self.name + '_grad')
        input_grads.append(self.name + '_grad')

    # 3. Flow or Flow or Flow
    ws.RunGradientFlow(forward_ops, targets, input_grads, ignored_grads)

    # 4. Release resources
    # We should release both the anchors and tensors
    for forward_op in forward_ops:
        APool.put(forward_op.name)
        for output in forward_op.output:
            if output not in forward_op.input:
                TPool.put(output)
示例#2
0
 def __del__(self):
     if not self._requires_grad or self._static_shape:
         if self._own_storage and self._dg_tensor:
             # Always reuse the leaf variables or
             # tensors that do not require grad
             # PyGC will detect them automatically
             TPool.put(self.name)
示例#3
0
 def _init_from_numpy(self, array):
     self._static_shape = Size(array.shape)
     # We use the scope of ``numpy`` instead of ``leaf``
     # As it is costly to switch memory between ``copy`` and ``zero-copy``
     self._dg_tensor = tensor_utils.FromPyArray(array,
                                                name=TPool.get('numpy'))
     self._ignored_grads = {self.name +
                            '_grad'} if not self._requires_grad else None
示例#4
0
 def _init_from_shape(self, shape):
     if isinstance(shape, int): shape = [shape]
     self._static_shape = Size(shape)
     self._dg_tensor = tensor_utils.FromShape(
         shape,
         self._dtype,
         ctx=CTX_TO_DEVICE_OPTION[tuple(self._ctx)],
         name=TPool.get('leaf'))
     self._ignored_grads = {self.name +
                            '_grad'} if not self._requires_grad else None
示例#5
0
文件: module.py 项目: k9sret/Dragon
    def register_buffers(self, n_buffers):
        """Apply for n buffers from TensorPool.

        Buffers will be released after backward pass.

        Parameters
        ----------
        n_buffers : int
            The number of buffers.

        """
        return [TPool.get() for i in range(n_buffers)]