def get_lr_scheduler(optimizer, mode='max', factor=0.5, patience=10, threshold=1e-4, threshold_mode='rel'): def reduce_lr(self, epoch): ReduceLROnPlateau._reduce_lr(self, epoch) lr_scheduler = ReduceLROnPlateau(optimizer, mode, factor, patience, False, threshold, threshold_mode) lr_scheduler._reduce_lr = partial(reduce_lr, lr_scheduler) return lr_scheduler
def get_lr_scheduler(logger, optimizer, mode='max', factor=0.5, patience=10, threshold=1e-4, threshold_mode='rel'): def reduce_lr(self, epoch): ReduceLROnPlateau._reduce_lr(self, epoch) logger.info(f"learning rate is reduced by factor {factor}!") lr_scheduler = ReduceLROnPlateau(optimizer, mode, factor, patience, False, threshold, threshold_mode) lr_scheduler._reduce_lr = partial(reduce_lr, lr_scheduler) return lr_scheduler
def get_lr_scheduler(logger, optimizer): def reduce_lr(self, epoch): ReduceLROnPlateau._reduce_lr(self, epoch) logger.info("learning rate is reduced by factor 0.2!") lr_scheduler = ReduceLROnPlateau(optimizer, mode="max", factor=0.2, patience=10, threshold=0.005, threshold_mode="rel") lr_scheduler._reduce_lr = partial(reduce_lr, lr_scheduler) return lr_scheduler
def reduce_lr(self, epoch): ReduceLROnPlateau._reduce_lr(self, epoch)
def reduce_lr(self, epoch): ReduceLROnPlateau._reduce_lr(self, epoch) logger.info(f"learning rate is reduced by factor {factor}!")