def on_evaluation_batch_start(self, batch, dataloader_idx,
                               num_dataloaders):
     model = self.trainer.lightning_module
     # set dataloader_idx only if multiple ones
     model._current_dataloader_idx = dataloader_idx if num_dataloaders > 1 else None
     # track batch_size
     self.cached_results._batch_size = Result.extract_batch_size(batch)
Пример #2
0
 def on_evaluation_batch_start(self, testing, batch, dataloader_idx, num_dataloaders):
     # Todo: required argument `testing` is not used
     model = self.trainer.lightning_module
     # set dataloader_idx only if multiple ones
     model._current_dataloader_idx = dataloader_idx if num_dataloaders > 1 else None
     # track batch_size
     self.cached_results._batch_size = Result.extract_batch_size(batch)
Пример #3
0
    def on_evaluation_batch_start(self, testing, batch, dataloader_idx,
                                  num_dataloaders):
        # reset the result of the PL module
        model = self.trainer.get_model()
        model._current_dataloader_idx = dataloader_idx if num_dataloaders > 1 else None

        # track batch_size
        self.cached_results._batch_size = Result.extract_batch_size(batch)
Пример #4
0
 def on_train_split_start(self, split_idx: int, opt_idx: int, split_batch) -> None:
     self.cached_results._split_idx = split_idx
     self.cached_results._opt_idx = opt_idx
     self.cached_results._batch_size = Result.extract_batch_size(split_batch)