Exemplo n.º 1
0
 def resize_as(self, tensor):
     if has_torch_function_variadic(self, tensor):
         return handle_torch_function(Tensor.resize_as, (self, tensor),
                                      self, tensor)
     warnings.warn("non-inplace resize_as is deprecated")
     from torch.autograd._functions import Resize
     return Resize.apply(self, tensor.size())
Exemplo n.º 2
0
 def __ipow__(self, other):  # type: ignore[misc]
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__ipow__, (self, other), self,
                                      other)
     return NotImplemented
Exemplo n.º 3
0
 def __rdiv__(self, other):
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__rdiv__, (self, other), self,
                                      other)
     return self.reciprocal() * other
Exemplo n.º 4
0
 def __rsub__(self, other):
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__rsub__, (self, other), self,
                                      other)
     return _C._VariableFunctions.rsub(self, other)
Exemplo n.º 5
0
 def __rmatmul__(self, other):
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__rmatmul__, (self, other),
                                      self, other)
     return torch.matmul(other, self)
Exemplo n.º 6
0
 def __rmod__(self, other):
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__rmod__, (self, other), self,
                                      other)
     return torch.remainder(other, self)