예제 #1
0
 def update_lr(self, new_lr):
     """
     Change learning rate with new_lr
     :param new_lr:
     :return:
     """
     torch_utils.change_lr(self.optimizer, new_lr)
예제 #2
0
파일: trainer.py 프로젝트: sahaana/DualRE
 def update_lr(self, new_lr):
     torch_utils.change_lr(self.optimizer, new_lr)
예제 #3
0
 def update_lr(self, new_lr):
     """
     Update learning rate of the optimizer
     :param new_lr: new learning rate
     """
     torch_utils.change_lr(self.optimizer, new_lr)
예제 #4
0
파일: trainer.py 프로젝트: li-study/BiGI
 def update_lr(self, new_lr):  # here should change
     torch_utils.change_lr(self.optimizer, new_lr)
예제 #5
0
 def update_lr(self, new_lr):
     change_lr(self.optimizer, new_lr)