예제 #1
0
    def __init__(self, scheduler: torch.optim.lr_scheduler._LRScheduler, step_mode: StepMode):
        """Wrapper for a PyTorch LRScheduler.

        Usage of this wrapper is required to properly schedule the optimizer's learning rate.

        This wrapper fulfills two main functions:
            1. Save and restore the learning rate when a trial is paused, preempted, etc.
            2. Step the learning rate scheduler at the configured frequency
               (e.g., every batch or every epoch).

        Args:
            scheduler (:py:class:`torch.optim.lr_scheduler._LRScheduler`):
                Learning rate scheduler to be used by Determined.
            step_mode (:py:class:`det.pytorch.LRSchedulerStepMode`):
                The strategy Determined will use to call (or not call) scheduler.step().

                1. ``STEP_EVERY_EPOCH``: Determined will call scheduler.step() after
                   every training epoch. No arguments will be passed to step().

                2. ``STEP_EVERY_BATCH``: Determined will call scheduler.step() after every
                   training batch. No arguments will be passed to step().

                3. ``MANUAL_STEP``: Determined will not call scheduler.step() at all.
                   It is up to the user to decide when to call scheduler.step(),
                   and whether to pass any arguments.
        """

        check.check_not_none(scheduler)
        check.check_isinstance(step_mode, LRScheduler.StepMode)

        self.scheduler = scheduler
        self.step_mode = step_mode
예제 #2
0
    def __init__(
        self,
        scheduler: torch.optim.lr_scheduler._LRScheduler,
        step_mode: StepMode,
    ):
        """LRScheduler constructor

        Args:
            scheduler (:py:class:`torch.optim.lr_scheduler._LRScheduler`):
                Learning rate scheduler to be used by Determined.
            step_mode (:py:class:`det.pytorch.LRSchedulerStepMode`):
                The strategy Determined will use to call (or not call) scheduler.step().

                1. ``STEP_EVERY_EPOCH``: Determined will call scheduler.step() after
                   every training epoch. No arguments will be passed to step().

                2. ``STEP_EVERY_BATCH``: Determined will call scheduler.step() after every
                   training batch. No arguments will be passed to step().

                3. ``MANUAL_STEP``: Determined will not call scheduler.step() at all.
                   It is up to the user to decide when to call scheduler.step(),
                   and whether to pass any arguments.
        """
        check.check_not_none(scheduler)
        check.check_isinstance(step_mode, LRScheduler.StepMode)

        self._scheduler = scheduler
        self._step_mode = step_mode
예제 #3
0
def create_trial_instance(trial_def: Type[det.Trial]) -> None:
    with tempfile.TemporaryDirectory() as td:
        trial_instance = experimental.create_trial_instance(
            trial_def=trial_def,
            config={"hyperparameters": {"global_batch_size": det.Constant(16)}},
            checkpoint_dir=td,
        )
    check.check_isinstance(trial_instance, det.Trial)