def advance(self, *args: Any, **kwargs: Any) -> None:
        """Performs evaluation on one single dataloader."""
        void(*args, **kwargs)

        dataloader_idx = self.current_dataloader_idx
        dataloader = self.current_dataloader
        assert self._data_fetcher is not None
        self._data_fetcher.setup(
            dataloader,
            batch_to_device=partial(self.trainer._call_strategy_hook,
                                    "batch_to_device",
                                    dataloader_idx=dataloader_idx),
        )
        dl_max_batches = self._max_batches[dataloader_idx]

        kwargs = OrderedDict()
        if self.num_dataloaders > 1:
            kwargs["dataloader_idx"] = dataloader_idx
        dl_outputs = self.epoch_loop.run(self._data_fetcher, dl_max_batches,
                                         kwargs)

        # store batch level output per dataloader
        self._outputs.append(dl_outputs)

        if not self.trainer.sanity_checking:
            # indicate the loop has run
            self._has_run = True
Esempio n. 2
0
 def on_run_start(self, *args: Any, **kwargs: Any) -> None:
     """Runs the ``on_evaluation_model_eval``, ``on_evaluation_start`` and ``on_evaluation_epoch_start`` hooks"""
     void(*args, **kwargs)
     # hook
     self.on_evaluation_model_eval()
     self.trainer.lightning_module.zero_grad()
     self.on_evaluation_start()
     self.on_evaluation_epoch_start()
Esempio n. 3
0
    def advance(self, *args: Any, **kwargs: Any) -> None:
        """Predicts one entire dataloader."""
        void(*args, **kwargs)
        dataloader = self.trainer.accelerator.process_dataloader(self.current_dataloader)
        dataloader_iter = enumerate(dataloader)
        dl_max_batches = self.max_batches[self.current_dataloader_idx]

        dl_predictions, dl_batch_indices = self.epoch_loop.run(
            dataloader_iter, self.current_dataloader_idx, dl_max_batches, self.num_dataloaders, self.return_predictions
        )
        self.predictions.append(dl_predictions)
        self.epoch_batch_indices.append(dl_batch_indices)
    def on_run_start(self, *args: Any, **kwargs: Any) -> None:
        """Runs the ``_on_evaluation_model_eval``, ``_on_evaluation_start`` and ``_on_evaluation_epoch_start``
        hooks."""
        void(*args, **kwargs)

        data_fetcher_cls = _select_data_fetcher_type(self.trainer)
        self._data_fetcher = data_fetcher_cls(prefetch_batches=self.prefetch_batches)

        # hook
        self._on_evaluation_model_eval()
        self.trainer.lightning_module.zero_grad()
        self._on_evaluation_start()
        self._on_evaluation_epoch_start()
Esempio n. 5
0
    def advance(self, *args: Any, **kwargs: Any) -> None:
        """Predicts one entire dataloader."""
        void(*args, **kwargs)
        dataloader = self.current_dataloader
        if dataloader is not None:
            _set_sampler_epoch(
                dataloader,
                self.trainer.fit_loop.epoch_progress.current.processed)
        dataloader = self.trainer.strategy.process_dataloader(dataloader)
        dataloader_iter = enumerate(dataloader)
        dl_max_batches = self.max_batches[self.current_dataloader_idx]

        dl_predictions, dl_batch_indices = self.epoch_loop.run(
            dataloader_iter, self.current_dataloader_idx, dl_max_batches,
            self.num_dataloaders)
        self.predictions.append(dl_predictions)
        self.epoch_batch_indices.append(dl_batch_indices)
    def advance(self, *args: Any, **kwargs: Any) -> None:
        """Predicts one entire dataloader."""
        void(*args, **kwargs)
        dataloader = self.current_dataloader
        if (dataloader is not None and getattr(dataloader, "sampler", None)
                and callable(getattr(dataloader.sampler, "set_epoch", None))):
            # set seed for distributed sampler (enables shuffling for each epoch)
            dataloader.sampler.set_epoch(
                self.trainer.fit_loop.epoch_progress.current.processed)
        dataloader = self.trainer.strategy.process_dataloader(dataloader)
        dataloader_iter = enumerate(dataloader)
        dl_max_batches = self.max_batches[self.current_dataloader_idx]

        dl_predictions, dl_batch_indices = self.epoch_loop.run(
            dataloader_iter, self.current_dataloader_idx, dl_max_batches,
            self.num_dataloaders, self.return_predictions)
        self.predictions.append(dl_predictions)
        self.epoch_batch_indices.append(dl_batch_indices)
    def advance(self, *args: Any, **kwargs: Any) -> None:
        """Performs evaluation on one single dataloader"""
        void(*args, **kwargs)
        dataloader = self.trainer.accelerator.process_dataloader(
            self.current_dataloader)
        dataloader_iter = enumerate(dataloader)
        dl_max_batches = self._max_batches[self.current_dataloader_idx]

        dl_outputs = self.evaluation_loop.run(
            dataloader_iter,
            self.current_dataloader_idx,
            dl_max_batches,
            self.num_dataloaders,
        )

        # store batch level output per dataloader
        if self.should_track_batch_outputs_for_epoch_end:
            self.outputs.append(dl_outputs)
    def advance(self, *args: Any, **kwargs: Any) -> None:
        """Performs evaluation on one single dataloader"""
        void(*args, **kwargs)

        dataloader_idx: int = self.current_dataloader_idx
        dataloader = self.trainer.accelerator.process_dataloader(self.current_dataloader)
        dataloader = self.trainer.data_connector.get_profiled_dataloader(dataloader, dataloader_idx=dataloader_idx)

        dl_max_batches = self._max_batches[dataloader_idx]

        dl_outputs = self.epoch_loop.run(dataloader, dataloader_idx, dl_max_batches, self.num_dataloaders)

        # store batch level output per dataloader
        if self.should_track_batch_outputs_for_epoch_end:
            self.outputs.append(dl_outputs)

        if not self.trainer.sanity_checking:
            # indicate the loop has run
            self._has_run = True
Esempio n. 9
0
    def advance(self, *args: Any, **kwargs: Any) -> None:
        """Performs evaluation on one single dataloader."""
        void(*args, **kwargs)

        dataloader_idx = self.current_dataloader_idx
        dataloader = self.trainer.strategy.process_dataloader(self.current_dataloader)
        self.data_fetcher = dataloader = self.trainer._data_connector.get_profiled_dataloader(
            dataloader, dataloader_idx=dataloader_idx
        )
        dl_max_batches = self._max_batches[dataloader_idx]

        dl_outputs = self.epoch_loop.run(
            dataloader, dataloader_idx if self.num_dataloaders > 1 else None, dl_max_batches
        )

        # store batch level output per dataloader
        self._outputs.append(dl_outputs)

        if not self.trainer.sanity_checking:
            # indicate the loop has run
            self._has_run = True