Exemplo n.º 1
0
 def on_stage_start(self, runner: "IRunner") -> None:
     """Event handler."""
     self.model = get_attr(runner, key="model", inner_key=self.model_key)
     self.optimizer = get_attr(runner,
                               key="optimizer",
                               inner_key=self.optimizer_key)
     assert self.model is not None
     assert self.optimizer is not None
Exemplo n.º 2
0
    def on_stage_start(self, runner: "IRunner") -> None:
        """Stage start hook.

        Args:
            runner: current runner
        """
        self.scheduler = get_attr(runner,
                                  key="scheduler",
                                  inner_key=self.scheduler_key)
        assert self.scheduler is not None

        if isinstance(self.scheduler,
                      torch.optim.lr_scheduler.ReduceLROnPlateau):
            assert self.loader_key is not None and self.metric_key is not None, (
                "For `ReduceLROnPlateau` scheduler `SchedulerCallback` "
                "required both `loader_key` and `metric_key` specified")

        if self.mode is None:
            if isinstance(self.scheduler, BatchScheduler):
                self.mode = "batch"
            else:
                self.mode = "epoch"

        if isinstance(self.scheduler,
                      OneCycleLRWithWarmup) and self.mode == "batch":
            self.scheduler.reset()
        assert self.mode is not None
Exemplo n.º 3
0
 def on_experiment_start(self, runner: "IRunner") -> None:
     """Event handler."""
     self.optimizer = optimizer = get_attr(
         runner, key="optimizer", inner_key=self.optimizer_key
     )
     self.optimizer = optimizer
     self.init_lr = optimizer.param_groups[0]["lr"]
Exemplo n.º 4
0
    def on_stage_start(self, runner: "IRunner") -> None:
        """Stage start hook.

        Args:
            runner: current runner
        """
        self.reduced_metric = self.reduced_metric or runner.main_metric

        scheduler = get_attr(
            runner, key="scheduler", inner_key=self.scheduler_key
        )
        assert scheduler is not None
        self._scheduler = scheduler

        if self.mode is None:
            if isinstance(scheduler, BatchScheduler):
                self.mode = "batch"
            else:
                self.mode = "epoch"

        if (
            isinstance(scheduler, OneCycleLRWithWarmup)
            and self.mode == "batch"
        ):
            scheduler.reset()
        assert self.mode is not None
Exemplo n.º 5
0
    def on_stage_start(self, runner: "IRunner"):
        """Checks that the current stage has correct criterion.

        Args:
            runner: current runner
        """
        self.criterion = get_attr(runner, key="criterion", inner_key=self.criterion_key)
        assert self.criterion is not None
Exemplo n.º 6
0
    def on_stage_start(self, runner: "IRunner") -> None:
        """Stage start hook.

        Args:
            runner: current runner
        """
        self.optimizer = optimizer = get_attr(runner,
                                              key="optimizer",
                                              inner_key=self.optimizer_key)
        self.optimizer = optimizer
        self.init_lr = optimizer.param_groups[0]["lr"]
Exemplo n.º 7
0
    def on_stage_start(self, runner: "IRunner") -> None:
        """Stage start hook.

        Args:
            runner: current runner
        """
        optimizer = get_attr(runner,
                             key="optimizer",
                             inner_key=self.optimizer_key)
        assert optimizer is not None
        self._optimizer = optimizer
        self.init_lr = optimizer.defaults["lr"]
Exemplo n.º 8
0
    def on_stage_start(self, runner: "IRunner") -> None:
        """Checks that the current stage has correct optimizer.

        Args:
            runner(IRunner): current runner
        """
        from torch.cuda.amp import GradScaler

        self._optimizer = get_attr(runner,
                                   key="optimizer",
                                   inner_key=self.optimizer_key)
        self.scaler = GradScaler()
        assert self._optimizer is not None
Exemplo n.º 9
0
    def on_stage_start(self, runner: "IRunner") -> None:
        """Checks that the current stage has correct optimizer.

        Args:
            runner(IRunner): current runner
        """
        self._optimizer = get_attr(runner,
                                   key="optimizer",
                                   inner_key=self.optimizer_key)
        # device based optimization step
        if runner.device.type == "xla":
            self._optimizer_step_fn = self._optimizer_step_tpu
        else:
            self._optimizer_step_fn = self._optimizer_step

        assert self._optimizer is not None
Exemplo n.º 10
0
    def on_stage_start(self, runner: "IRunner") -> None:
        """Resolve amp/apex settings, prepare optimizer and scaler

        Args:
            runner(IRunner): current runner
        """
        if self.use_amp is None:
            if runner.experiment is not None:
                self.use_amp = runner.experiment.distributed_params.get(
                    "amp", False)
            else:
                self.use_amp = False

        if self.use_apex is None:
            if runner.experiment is not None:
                self.use_apex = runner.experiment.distributed_params.get(
                    "apex", False)
            else:
                self.use_apex = False

        self._optimizer = get_attr(runner,
                                   key="optimizer",
                                   inner_key=self.optimizer_key)

        # device based optimization step
        if runner.device.type == "xla":
            self._optimizer_step_fn = self._optimizer_step_tpu
        elif self.use_amp:
            self._optimizer_step_fn = self._optimizer_step_amp
        else:
            self._optimizer_step_fn = self._optimizer_step

        if hasattr(self._optimizer, "_amp_stash") and not self.use_apex:
            warnings.warn(
                "`_amp_stash` is found in `self._optimizer`:, "
                "but `use_apex` is False",
                stacklevel=2,
            )

        assert self._optimizer is not None

        if self.use_amp:
            from torch.cuda.amp import GradScaler

            self.scaler = GradScaler()
Exemplo n.º 11
0
 def on_experiment_start(self, runner: "IRunner") -> None:
     """Event handler."""
     self.optimizer = get_attr(runner, key="optimizer", inner_key=self.optimizer_key)
     assert self.optimizer is not None
Exemplo n.º 12
0
 def on_experiment_start(self, runner: "IRunner"):
     """Event handler."""
     self.criterion = get_attr(runner,
                               key="criterion",
                               inner_key=self.criterion_key)
     assert self.criterion is not None