def forward_batch( self, dataset_batch: DatasetBatch, model: NNModel, device: torch.device, postprocessors: List[PredictPostProcessingIF] ) -> InferenceResultBatch: model = model.to(device) dataset_batch.to_device(device) inference_result_batch = self.inference_component.predict( model, dataset_batch, postprocessors) return inference_result_batch
def predict_dataset_batch(self, batch: DatasetBatch, no_grad: bool = True) -> InferenceResultBatch: batch.to_device(self._device) if no_grad: with torch.no_grad(): forward_result = self.model.forward(batch.samples) else: forward_result = self.model.forward(batch.samples) result_batch = InferenceResultBatch(targets=deepcopy(batch.targets), tags=deepcopy(batch.tags), predictions=forward_result) result_batch = PredictPostprocessingComponent.post_process( result_batch, post_processors=self.post_processors) result_batch.to_cpu() return result_batch
def __call__(self, batch: List[torch.Tensor]): """ :param batch: batch format [no_samples, height, width, channels] :return: """ # batch contains a list of tuples of structure (sequence, target) inputs = [item[0].to(self.device) for item in batch] inputs = torch.stack(inputs) # transform into vector # inputs = inputs.view(inputs.shape[0], -1) # transform into CHW matrix # inputs = inputs.permute(0, 3, 1, 2) inputs = inputs.reshape(-1, 1, 28, 28) targets_tensor = torch.tensor([item[1] for item in batch]).to(inputs[0].device) target_partitions = {self.target_publication_key: targets_tensor} return DatasetBatch(targets=target_partitions, tags=None, samples=inputs)
def test_detach(self, dataset_batch: DatasetBatch): dataset_batch.detach()
def test_get_device(self, dataset_batch: DatasetBatch): dataset_batch.to_device(torch.device("cpu")) assert dataset_batch.get_device() == torch.device("cpu")
def test_to_cpu(self, dataset_batch: DatasetBatch): dataset_batch.to_cpu()
def test_to_device(self, dataset_batch: DatasetBatch): dataset_batch.to_device(torch.device("cpu"))
def dataset_batch(self) -> DatasetBatch: tensor = torch.IntTensor([0, 0, 0, 1, 1, 1]) return DatasetBatch( targets={TestDatasetBatch.target_key: tensor.clone()}, samples=tensor.clone(), tags=tensor.clone())
def train_batch(self, batch: DatasetBatch, model: NNModel, optimizer: OptimizerAdapter, device: torch.device): model.zero_grad() batch.to_device(device) loss = self.calc_loss(model, batch) loss.sum().backward() optimizer.step()