def build_optim(model, checkpoint):
    if opt.train_from:
        print('Loading optimizer from checkpoint.')
        optim = checkpoint['optim']
        optim.optimizer.load_state_dict(
            checkpoint['optim'].optimizer.state_dict())
    else:
        # what members of opt does Optim need?
        optim = table.Optim(
            opt.optim, opt.learning_rate, opt.alpha, opt.max_grad_norm,
            lr_decay=opt.learning_rate_decay,
            start_decay_at=opt.start_decay_at,
            opt=opt
        )

    optim.set_parameters(model.parameters())

    return optim
Esempio n. 2
0
def build_optimizer(model, checkpoint=None):
    if args.train_from:
        assert checkpoint is not None

        logger.info(' * loading optimizer from checkpoint')
        optim = checkpoint['optim']
        optim.optimizer.load_state_dict(checkpoint['optim'].optimizer.state_dict())

    else:
        optim = table.Optim(
            method=args.optim,
            lr=args.learning_rate,
            alpha=args.alpha,
            max_grad_norm=args.max_grad_norm,
            lr_decay=args.learning_rate_decay,
            start_decay_at=args.start_decay_at,
            opt=args
        )

    optim.set_parameters(model.parameters())

    return optim