def resize_as(self, tensor): if has_torch_function_variadic(self, tensor): return handle_torch_function(Tensor.resize_as, (self, tensor), self, tensor) warnings.warn("non-inplace resize_as is deprecated") from torch.autograd._functions import Resize return Resize.apply(self, tensor.size())
def __ipow__(self, other): # type: ignore[misc] if has_torch_function_variadic(self, other): return handle_torch_function(Tensor.__ipow__, (self, other), self, other) return NotImplemented
def __rdiv__(self, other): if has_torch_function_variadic(self, other): return handle_torch_function(Tensor.__rdiv__, (self, other), self, other) return self.reciprocal() * other
def __rsub__(self, other): if has_torch_function_variadic(self, other): return handle_torch_function(Tensor.__rsub__, (self, other), self, other) return _C._VariableFunctions.rsub(self, other)
def __rmatmul__(self, other): if has_torch_function_variadic(self, other): return handle_torch_function(Tensor.__rmatmul__, (self, other), self, other) return torch.matmul(other, self)
def __rmod__(self, other): if has_torch_function_variadic(self, other): return handle_torch_function(Tensor.__rmod__, (self, other), self, other) return torch.remainder(other, self)