def resize_as(self, tensor): if has_torch_function_variadic(self, tensor): return handle_torch_function(Tensor.resize_as, (self, tensor), self, tensor) warnings.warn("non-inplace resize_as is deprecated") from torch.autograd._functions import Resize return Resize.apply(self, tensor.size())
def epoch_eval(self, eval_loader) -> Tuple[float, float]: self._model.eval() epoch_loss: float = 0. iou_list: List[float] = [] for inputs, targets in eval_loader: inputs = inputs.to(self.device) targets = targets.to(self.device) outputs = self._model(inputs) b, _, h, w = outputs.size() outputs = outputs.permute(0, 2, 3, 1) outputs = Resize.apply(outputs, (b * h * w, self.num_classes)) targets = targets.reshape(-1) m_iou = mean_intersection_over_union(y_true=targets, y_pred=outputs, num_classes=self.num_classes) iou_list.append(m_iou) loss = self.criterion(outputs, targets) epoch_loss += loss.item() mean_loss: float = epoch_loss / len(eval_loader) mean_iou: float = sum(iou_list) / len(eval_loader) return mean_loss, mean_iou
def resize_as(self, tensor): relevant_args = (self, tensor) from torch.overrides import has_torch_function, handle_torch_function if type(self) is not Tensor and type(tensor) is not Tensor and has_torch_function(relevant_args): return handle_torch_function(Tensor.resize_as, relevant_args, self, tensor) warnings.warn("non-inplace resize_as is deprecated") from torch.autograd._functions import Resize return Resize.apply(self, tensor.size())
def resize_as(self, tensor): warnings.warn("non-inplace resize_as is deprecated") from torch.autograd._functions import Resize return Resize.apply(self, tensor.size())
def resize(self, *sizes): warnings.warn("non-inplace resize is deprecated") from torch.autograd._functions import Resize return Resize.apply(self, sizes)
def resize(self, *sizes): if has_torch_function_unary(self): return handle_torch_function(Tensor.resize, (self, ), self, *sizes) warnings.warn("non-inplace resize is deprecated") from torch.autograd._functions import Resize return Resize.apply(self, sizes)