Пример #1
0
 def _init_from_numpy(self, array):
     self._static_shape = Size(array.shape)
     # We use the scope of ``numpy`` instead of ``leaf``
     # As it is costly to switch memory between ``copy`` and ``zero-copy``
     self._dg_tensor = tensor_utils.FromPyArray(array,
                                                name=TPool.get('numpy'))
     self._ignored_grads = {self.name +
                            '_grad'} if not self._requires_grad else None
Пример #2
0
 def _init_from_shape(self, shape):
     if isinstance(shape, int): shape = [shape]
     self._static_shape = Size(shape)
     self._dg_tensor = tensor_utils.FromShape(
         shape,
         self._dtype,
         ctx=CTX_TO_DEVICE_OPTION[tuple(self._ctx)],
         name=TPool.get('leaf'))
     self._ignored_grads = {self.name +
                            '_grad'} if not self._requires_grad else None
Пример #3
0
    def register_buffers(self, n_buffers):
        """Apply for n buffers from TensorPool.

        Buffers will be released after backward pass.

        Parameters
        ----------
        n_buffers : int
            The number of buffers.

        """
        return [TPool.get() for i in range(n_buffers)]