def adapt_optimizer(opt):
    if ('str' == opt.__class__.__name__):
        opt = get_optimizer_by_name(opt)
    opt_config = opt.get_config()
    try:
        opt_config['learning_rate'] *= hvd.size()
    except KeyError:
        opt_config['lr'] *= hvd.size()
    return hvd.DistributedOptimizer(opt.from_config(opt_config))
Beispiel #2
0
def hvd_adapt_optimizer(opt):
    if "".__class__.__name__ == opt.__class__.__name__:
        opt = get_optimizer_by_name(opt)
    opt_config = opt.get_config()
    try:
        opt_config["learning_rate"] *= hvd.size()
    except KeyError:
        opt_config["lr"] *= hvd.size()
    return hvd.DistributedOptimizer(opt.from_config(opt_config))
Beispiel #3
0
def hvd_adapt_optimizer(opt):
    if isinstance(opt, str):
        opt = get_optimizer_by_name(opt)
    opt_config = opt.get_config()
    try:
        opt_config["learning_rate"] *= hvd.size()
    except KeyError:
        opt_config["lr"] *= hvd.size()
    return hvd.DistributedOptimizer(opt.from_config(opt_config))