コード例 #1
0
def test_onecyclelr_with_epoch_interval_warns():
    """Test warning when a OneCycleLR is used and interval is epoch."""
    model = BoringModel()
    optimizer = optim.Adam(model.parameters())
    lr_scheduler = {"scheduler": optim.lr_scheduler.OneCycleLR(optimizer, max_lr=0.01, total_steps=3)}
    with pytest.warns(RuntimeWarning, match="Are you sure you didn't mean 'interval': 'step'?"):
        _configure_schedulers_automatic_opt([lr_scheduler], None)
コード例 #2
0
ファイル: base_lightning.py プロジェクト: J-shang/nni
    def configure_optimizers(self):
        """
        Combine architecture optimizers and user's model optimizers.
        You can overwrite :meth:`configure_architecture_optimizers` if architecture optimizers are needed in your NAS algorithm.

        For now :attr:`model` is tested against evaluators in :mod:`nni.retiarii.evaluator.pytorch.lightning`
        and it only returns 1 optimizer.
        But for extendibility, codes for other return value types are also implemented.
        """
        # pylint: disable=assignment-from-none
        arc_optimizers = self.configure_architecture_optimizers()
        if arc_optimizers is None:
            return self.model.configure_optimizers()

        if isinstance(arc_optimizers, optim.Optimizer):
            arc_optimizers = [arc_optimizers]
        self.arc_optim_count = len(arc_optimizers)

        # FIXME: this part uses non-official lightning API.
        # The return values ``frequency`` and ``monitor`` are ignored because lightning requires
        # ``len(optimizers) == len(frequency)``, and gradient backword is handled manually.
        # For data structure of variables below, please see pytorch lightning docs of ``configure_optimizers``.
        try:
            # above v1.6
            from pytorch_lightning.core.optimizer import (  # pylint: disable=import-error
                _configure_optimizers,  # type: ignore
                _configure_schedulers_automatic_opt,  # type: ignore
                _configure_schedulers_manual_opt  # type: ignore
            )
            w_optimizers, lr_schedulers, self.frequencies, monitor = \
                _configure_optimizers(self.model.configure_optimizers())  # type: ignore
            lr_schedulers = (_configure_schedulers_automatic_opt(
                lr_schedulers, monitor) if self.automatic_optimization else
                             _configure_schedulers_manual_opt(lr_schedulers))
        except ImportError:
            # under v1.5
            w_optimizers, lr_schedulers, self.frequencies, monitor = \
                self.trainer._configure_optimizers(self.model.configure_optimizers())  # type: ignore
            lr_schedulers = self.trainer._configure_schedulers(
                lr_schedulers, monitor,
                not self.automatic_optimization)  # type: ignore

        if any(sch["scheduler"].optimizer not in w_optimizers
               for sch in lr_schedulers):  # type: ignore
            raise Exception(
                "Some schedulers are attached with an optimizer that wasn't returned from `configure_optimizers`."
            )

        # variables used to handle optimizer frequency
        self.cur_optimizer_step = 0
        self.cur_optimizer_index = 0

        return arc_optimizers + w_optimizers, lr_schedulers