Exemple #1
0
 def resize_as(self, tensor):
     if has_torch_function_variadic(self, tensor):
         return handle_torch_function(Tensor.resize_as, (self, tensor),
                                      self, tensor)
     warnings.warn("non-inplace resize_as is deprecated")
     from torch.autograd._functions import Resize
     return Resize.apply(self, tensor.size())
Exemple #2
0
 def __ipow__(self, other):  # type: ignore[misc]
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__ipow__, (self, other), self,
                                      other)
     return NotImplemented
Exemple #3
0
 def __rdiv__(self, other):
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__rdiv__, (self, other), self,
                                      other)
     return self.reciprocal() * other
Exemple #4
0
 def __rsub__(self, other):
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__rsub__, (self, other), self,
                                      other)
     return _C._VariableFunctions.rsub(self, other)
Exemple #5
0
 def __rmatmul__(self, other):
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__rmatmul__, (self, other),
                                      self, other)
     return torch.matmul(other, self)
Exemple #6
0
 def __rmod__(self, other):
     if has_torch_function_variadic(self, other):
         return handle_torch_function(Tensor.__rmod__, (self, other), self,
                                      other)
     return torch.remainder(other, self)