Exemplo n.º 1
0
 def forward_batch(
         self, dataset_batch: DatasetBatch, model: NNModel,
         device: torch.device, postprocessors: List[PredictPostProcessingIF]
 ) -> InferenceResultBatch:
     model = model.to(device)
     dataset_batch.to_device(device)
     inference_result_batch = self.inference_component.predict(
         model, dataset_batch, postprocessors)
     return inference_result_batch
Exemplo n.º 2
0
    def predict_dataset_batch(self,
                              batch: DatasetBatch,
                              no_grad: bool = True) -> InferenceResultBatch:
        batch.to_device(self._device)
        if no_grad:
            with torch.no_grad():
                forward_result = self.model.forward(batch.samples)
        else:
            forward_result = self.model.forward(batch.samples)

        result_batch = InferenceResultBatch(targets=deepcopy(batch.targets),
                                            tags=deepcopy(batch.tags),
                                            predictions=forward_result)
        result_batch = PredictPostprocessingComponent.post_process(
            result_batch, post_processors=self.post_processors)
        result_batch.to_cpu()
        return result_batch
Exemplo n.º 3
0
 def __call__(self, batch: List[torch.Tensor]):
     """
     :param batch: batch format [no_samples, height, width, channels]
     :return:
     """
     # batch contains a list of tuples of structure (sequence, target)
     inputs = [item[0].to(self.device) for item in batch]
     inputs = torch.stack(inputs)
     # transform into vector
     # inputs = inputs.view(inputs.shape[0], -1)
     # transform into CHW matrix
     # inputs = inputs.permute(0, 3, 1, 2)
     inputs = inputs.reshape(-1, 1, 28, 28)
     targets_tensor = torch.tensor([item[1]
                                    for item in batch]).to(inputs[0].device)
     target_partitions = {self.target_publication_key: targets_tensor}
     return DatasetBatch(targets=target_partitions,
                         tags=None,
                         samples=inputs)
Exemplo n.º 4
0
 def test_detach(self, dataset_batch: DatasetBatch):
     dataset_batch.detach()
Exemplo n.º 5
0
 def test_get_device(self, dataset_batch: DatasetBatch):
     dataset_batch.to_device(torch.device("cpu"))
     assert dataset_batch.get_device() == torch.device("cpu")
Exemplo n.º 6
0
 def test_to_cpu(self, dataset_batch: DatasetBatch):
     dataset_batch.to_cpu()
Exemplo n.º 7
0
 def test_to_device(self, dataset_batch: DatasetBatch):
     dataset_batch.to_device(torch.device("cpu"))
Exemplo n.º 8
0
 def dataset_batch(self) -> DatasetBatch:
     tensor = torch.IntTensor([0, 0, 0, 1, 1, 1])
     return DatasetBatch(
         targets={TestDatasetBatch.target_key: tensor.clone()},
         samples=tensor.clone(),
         tags=tensor.clone())
Exemplo n.º 9
0
 def train_batch(self, batch: DatasetBatch, model: NNModel, optimizer: OptimizerAdapter, device: torch.device):
     model.zero_grad()
     batch.to_device(device)
     loss = self.calc_loss(model, batch)
     loss.sum().backward()
     optimizer.step()