def update_lr(self, new_lr):
     """
     Change learning rate with new_lr
     :param new_lr:
     :return:
     """
     torch_utils.change_lr(self.optimizer, new_lr)
Exemple #2
0
 def update_lr(self, new_lr):
     torch_utils.change_lr(self.optimizer, new_lr)
Exemple #3
0
 def update_lr(self, new_lr):
     """
     Update learning rate of the optimizer
     :param new_lr: new learning rate
     """
     torch_utils.change_lr(self.optimizer, new_lr)
Exemple #4
0
 def update_lr(self, new_lr):  # here should change
     torch_utils.change_lr(self.optimizer, new_lr)
Exemple #5
0
 def update_lr(self, new_lr):
     change_lr(self.optimizer, new_lr)