Beispiel #1
0
 def _auto_step_lr_scheduler_per_batch(self, batch_idx: int, lr_scheduler: LRScheduler) -> None:
     """
     This function aims at automatically step a LR scheduler. It should be called per batch.
     """
     if lr_scheduler._step_mode == LRScheduler.StepMode.STEP_EVERY_BATCH:
         lr_scheduler.step()
     elif lr_scheduler._step_mode == LRScheduler.StepMode.STEP_EVERY_EPOCH:
         mod = (batch_idx + 1) % len(self.training_loader)
         if mod == 0 or mod < self.hvd_config.aggregation_frequency:
             lr_scheduler.step()
Beispiel #2
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     scheduler = get_linear_schedule_with_warmup(
         optimizer,
         num_warmup_steps=self.context.get_hparam("num_warmup_steps"),
         num_training_steps=self.context.get_hparam("num_training_steps"),
     )
     return LRScheduler(scheduler, LRScheduler.StepMode.STEP_EVERY_BATCH)
Beispiel #3
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     scheduler = torch.optim.lr_scheduler.StepLR(
         optimizer,
         self.context.get_hparam("reduce_every"),
         gamma=self.context.get_hparam("lr_gamma"),
     )
     return LRScheduler(scheduler, LRScheduler.StepMode.STEP_EVERY_EPOCH)
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     self.Lr2 = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, 100)
     self.Lr1 = torch.optim.lr_scheduler.StepLR(optimizer, 1)
     
     self.combined_lrs = MultiLRScheduler(self.Lr1, self.Lr2, optimizer)
     # Because we are calling .step() ourselves in our MultiLRScheduler we need to 
     # set the StepMode to MANUAL_STEP.
     return LRScheduler(self.combined_lrs, step_mode=LRScheduler.StepMode.MANUAL_STEP)
Beispiel #5
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     """
     Required Method to use a learning rate scheduler
     Returns: Determined scheduler object
     """
     self.myLR = MyLR(optimizer, self.hparams)
     step_mode = LRScheduler.StepMode.MANUAL_STEP
     self._optimizer = self.context.get_optimizer()
     return LRScheduler(self.myLR, step_mode=step_mode)
    def _auto_step_lr_scheduler_per_batch(
            self, batch_idx: int, lr_scheduler: pytorch.LRScheduler) -> None:
        """
        This function aims at automatically step a LR scheduler. It should be called per batch.
        """

        # Never step lr when we do not step optimizer.
        if not self.context._should_communicate_and_update():
            return

        if lr_scheduler._step_mode == pytorch.LRScheduler.StepMode.STEP_EVERY_BATCH:
            start_idx = batch_idx - self.context._aggregation_frequency + 1
            for i in range(start_idx, batch_idx + 1):
                if (i + 1) % lr_scheduler._frequency == 0:
                    lr_scheduler.step()
        elif lr_scheduler._step_mode == pytorch.LRScheduler.StepMode.STEP_EVERY_OPTIMIZER_STEP:
            if (batch_idx + 1) % lr_scheduler._frequency == 0:
                lr_scheduler.step()
        elif lr_scheduler._step_mode == pytorch.LRScheduler.StepMode.STEP_EVERY_EPOCH:
            # We will step if the next optimizer step will land in the next epoch.
            epoch_idx = self.get_epoch_idx(batch_idx)
            next_steppable_batch = batch_idx + self.context._aggregation_frequency
            next_batch_epoch_idx = self.get_epoch_idx(next_steppable_batch)
            for e in range(epoch_idx, next_batch_epoch_idx):
                if (e + 1) % lr_scheduler._frequency == 0:
                    lr_scheduler.step()
Beispiel #7
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     """
     Required Method to use a learning rate scheduler
     Returns: Determined scheduler object
     Determined will handle the learning rate scheduler update based on the Determined
     LRScheduler parameters. If step_every_batch or step_every_epoch is True, Determined will
     handle the .step(). If both are false, the user will be in charge of calling .step().
     """
     scheduler = get_linear_schedule_with_warmup(
         optimizer,
         num_warmup_steps=self.context.get_hparam("num_warmup_steps"),
         num_training_steps=self.context.get_hparam("num_training_steps"),
     )
     return LRScheduler(scheduler, LRScheduler.StepMode.STEP_EVERY_BATCH)
Beispiel #8
0
    def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
        """
        Required Method to use a learning rate scheduler
        Returns: Determined scheduler object
        Determined will handle the learning rate scheduler update based on the Determined
        LRScheduler parameters. If step_every_batch or step_every_epoch is True, Determined will
        handle the .step(). If both are false, the user will be in charge of calling .step().
        """
        self.myLR = MyLR(optimizer, self.context.get_hparams())
        step_mode = LRScheduler.StepMode.MANUAL_STEP
        if self.context.get_hparam("step_every_batch"):
            step_mode = LRScheduler.StepMode.STEP_EVERY_BATCH
        elif self.context.get_hparam("step_every_epoch"):
            step_mode = LRScheduler.StepMode.STEP_EVERY_EPOCH

        return LRScheduler(self.myLR, step_mode=step_mode)
 def create_lr_scheduler(self, optimizer):
     lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                    step_size=3,
                                                    gamma=0.1)
     return LRScheduler(lr_scheduler,
                        step_mode=LRScheduler.StepMode.STEP_EVERY_EPOCH)
Beispiel #10
0
 def create_lr_scheduler(self, optimizer):
     self.scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
         optimizer, self.context.get_hparam("cosine_annealing_epochs")
     )
     step_mode = LRScheduler.StepMode.MANUAL_STEP
     return LRScheduler(self.scheduler, step_mode=step_mode)
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     step_mode = LRScheduler.StepMode.STEP_EVERY_BATCH
     scheduler = build_lr_scheduler(self.cfg, optimizer)
     return LRScheduler(scheduler, step_mode=step_mode)
 def create_lr_scheduler(self, optimizer):
     self.scheduler = ModifyableLRSchedule(optimizer)
     return LRScheduler(self.scheduler,
                        step_mode=LRScheduler.StepMode.MANUAL_STEP)
 def create_lr_scheduler(self, optimizer):
     self.scheduler = ModifyableLRSchedule(optimizer)
     return LRScheduler(self.scheduler,
                        step_mode=LRScheduler.StepMode.STEP_EVERY_BATCH)
Beispiel #14
0
 def create_lr_scheduler(self, optimizer):
     self.scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
         optimizer, self.context.get_hparam("train_epochs"))
     step_mode = LRScheduler.StepMode.STEP_EVERY_EPOCH
     return LRScheduler(self.scheduler, step_mode=step_mode)