Exemplo n.º 1
0
def adjust_learning_rate(optimizer, epoch, step_in_epoch, total_steps_in_epoch):
    lr = args.lr
    epoch = epoch + step_in_epoch / total_steps_in_epoch

    # LR warm-up to handle large minibatch sizes from https://arxiv.org/abs/1706.02677
    lr = ramps.linear_rampup(epoch, args.lr_rampup) * (args.lr - args.initial_lr) + args.initial_lr

    # Cosine LR rampdown from https://arxiv.org/abs/1608.03983 (but one cycle only)
    if args.lr_rampdown_epochs:
        assert args.lr_rampdown_epochs >= args.epochs
        lr *= ramps.cosine_rampdown(epoch, args.lr_rampdown_epochs)

    for param_group in optimizer.param_groups:
        param_group['lr'] = lr
Exemplo n.º 2
0
def adjust_learning_rate(optimizer, epoch, step_in_epoch, total_steps_in_epoch,
                         args):

    lr = args.lr
    epoch = epoch + step_in_epoch / total_steps_in_epoch

    # LR warm-up to handle large minibatch sizes from https://arxiv.org/abs/1706.02677
    lr = ramps.linear_rampup(
        epoch, args.lr_rampup) * (lr - args.initial_lr) + args.initial_lr

    # Cosine LR rampdown from https://arxiv.org/abs/1608.03983 (but one cycle only)
    if args.lr_rampdown_epochs:
        assert args.lr_rampdown_epochs >= args.epochs
        lr *= ramps.cosine_rampdown(epoch, args.lr_rampdown_epochs)

    for param_group in optimizer.param_groups:
        param_group['lr'] = lr
Exemplo n.º 3
0
def adjust_learning_rate(optimizer, epoch, step_in_epoch, total_steps_in_epoch):
    lr = args.lr
    epoch = epoch + step_in_epoch / total_steps_in_epoch
    # LR warm-up to handle large minibatch sizes from https://arxiv.org/abs/1706.02677
    lr = ramps.linear_rampup(epoch, args.lr_rampup) * (args.lr - args.initial_lr) + args.initial_lr
    
    if args.lr_rampdown_epochs:
      if epoch < args.epochs:
        # Cosine LR rampdown from https://arxiv.org/abs/1608.03983
        assert args.lr_rampdown_epochs >= args.epochs
        lr *= ramps.cosine_rampdown(epoch, args.lr_rampdown_epochs)
      elif epoch >= args.epochs:
        if args.constant_lr:
          constant_lr = ramps.cosine_rampdown(args.constant_lr_epoch, args.lr_rampdown_epochs)
          lr *= constant_lr
        else:
          lr_rampdown_epochs = args.lr_rampdown_epochs if args.cycle_rampdown_epochs == 0 else args.cycle_rampdown_epochs
          lr *= ramps.cosine_rampdown((lr_rampdown_epochs - (args.lr_rampdown_epochs - args.epochs) - args.cycle_interval) + ((epoch - args.epochs) % args.cycle_interval),
              lr_rampdown_epochs)

    for param_group in optimizer.param_groups:
        param_group['lr'] = lr