def copy_torch(x: torch.Tensor, requires_grad, device=None) -> torch.Tensor: grad = requires_grad if requires_grad is not None else Backend.requires_grad( ) device = torch.device( device) if device is not None else Backend.get_device() new_tensor = x.clone() if device is not None and new_tensor.device == device: new_tensor = new_tensor.to(device) if grad is not None and not grad: new_tensor = new_tensor.detach() return new_tensor
def to_torch(x, requires_grad: bool = None, device: str = None, copy: bool = False): use_grad = requires_grad if requires_grad is not None else Backend.requires_grad( ) device = device if device is not None else Backend.get_device() if isinstance(x, torch.Tensor): return (copy_torch(x, device=device, requires_grad=use_grad) if copy else _assign_device_and_grad( x, device=device, requires_grad=use_grad)) return new_torch_tensor(x, device=device, copy=copy)