Exemplo n.º 1
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     scheduler = get_linear_schedule_with_warmup(
         optimizer,
         num_warmup_steps=self.context.get_hparam("num_warmup_steps"),
         num_training_steps=self.context.get_hparam("num_training_steps"),
     )
     return LRScheduler(scheduler, LRScheduler.StepMode.STEP_EVERY_BATCH)
Exemplo n.º 2
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     scheduler = torch.optim.lr_scheduler.StepLR(
         optimizer,
         self.context.get_hparam("reduce_every"),
         gamma=self.context.get_hparam("lr_gamma"),
     )
     return LRScheduler(scheduler, LRScheduler.StepMode.STEP_EVERY_EPOCH)
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     self.Lr2 = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, 100)
     self.Lr1 = torch.optim.lr_scheduler.StepLR(optimizer, 1)
     
     self.combined_lrs = MultiLRScheduler(self.Lr1, self.Lr2, optimizer)
     # Because we are calling .step() ourselves in our MultiLRScheduler we need to 
     # set the StepMode to MANUAL_STEP.
     return LRScheduler(self.combined_lrs, step_mode=LRScheduler.StepMode.MANUAL_STEP)
Exemplo n.º 4
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     """
     Required Method to use a learning rate scheduler
     Returns: Determined scheduler object
     """
     self.myLR = MyLR(optimizer, self.hparams)
     step_mode = LRScheduler.StepMode.MANUAL_STEP
     self._optimizer = self.context.get_optimizer()
     return LRScheduler(self.myLR, step_mode=step_mode)
Exemplo n.º 5
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     """
     Required Method to use a learning rate scheduler
     Returns: Determined scheduler object
     Determined will handle the learning rate scheduler update based on the Determined
     LRScheduler parameters. If step_every_batch or step_every_epoch is True, Determined will
     handle the .step(). If both are false, the user will be in charge of calling .step().
     """
     scheduler = get_linear_schedule_with_warmup(
         optimizer,
         num_warmup_steps=self.context.get_hparam("num_warmup_steps"),
         num_training_steps=self.context.get_hparam("num_training_steps"),
     )
     return LRScheduler(scheduler, LRScheduler.StepMode.STEP_EVERY_BATCH)
Exemplo n.º 6
0
    def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
        """
        Required Method to use a learning rate scheduler
        Returns: Determined scheduler object
        Determined will handle the learning rate scheduler update based on the Determined
        LRScheduler parameters. If step_every_batch or step_every_epoch is True, Determined will
        handle the .step(). If both are false, the user will be in charge of calling .step().
        """
        self.myLR = MyLR(optimizer, self.context.get_hparams())
        step_mode = LRScheduler.StepMode.MANUAL_STEP
        if self.context.get_hparam("step_every_batch"):
            step_mode = LRScheduler.StepMode.STEP_EVERY_BATCH
        elif self.context.get_hparam("step_every_epoch"):
            step_mode = LRScheduler.StepMode.STEP_EVERY_EPOCH

        return LRScheduler(self.myLR, step_mode=step_mode)
 def create_lr_scheduler(self, optimizer):
     lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                    step_size=3,
                                                    gamma=0.1)
     return LRScheduler(lr_scheduler,
                        step_mode=LRScheduler.StepMode.STEP_EVERY_EPOCH)
Exemplo n.º 8
0
 def create_lr_scheduler(self, optimizer):
     self.scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
         optimizer, self.context.get_hparam("cosine_annealing_epochs")
     )
     step_mode = LRScheduler.StepMode.MANUAL_STEP
     return LRScheduler(self.scheduler, step_mode=step_mode)
Exemplo n.º 9
0
 def create_lr_scheduler(self, optimizer: torch.optim.Optimizer):
     step_mode = LRScheduler.StepMode.STEP_EVERY_BATCH
     scheduler = build_lr_scheduler(self.cfg, optimizer)
     return LRScheduler(scheduler, step_mode=step_mode)
Exemplo n.º 10
0
 def create_lr_scheduler(self, optimizer):
     self.scheduler = ModifyableLRSchedule(optimizer)
     return LRScheduler(self.scheduler,
                        step_mode=LRScheduler.StepMode.MANUAL_STEP)
Exemplo n.º 11
0
 def create_lr_scheduler(self, optimizer):
     self.scheduler = ModifyableLRSchedule(optimizer)
     return LRScheduler(self.scheduler,
                        step_mode=LRScheduler.StepMode.STEP_EVERY_BATCH)
Exemplo n.º 12
0
 def create_lr_scheduler(self, optimizer):
     self.scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
         optimizer, self.context.get_hparam("train_epochs"))
     step_mode = LRScheduler.StepMode.STEP_EVERY_EPOCH
     return LRScheduler(self.scheduler, step_mode=step_mode)