Example #1
0
def load_test_model(model_path, opt, dummy_opt):
    checkpoint = torch.load(model_path,
                            map_location=lambda storage, loc: storage)

    model_opt = checkpoint['opt']
    for arg in dummy_opt:
        if arg not in model_opt:
            model_opt.__dict__[arg] = dummy_opt[arg]
    for attribute in ["share_embeddings", "stateful"]:
        if not hasattr(model_opt, attribute):
            model_opt.__dict__[attribute] = False

    # TODO: fix this
    if model_opt.stateful and not opt.sample:
        raise ValueError(
            'Beam search generator does not work with stateful models yet')

    mappings = read_pickle('{}/vocab.pkl'.format(model_opt.mappings))

    # mappings = read_pickle('{0}/{1}/vocab.pkl'.format(model_opt.mappings, model_opt.model))
    mappings = make_model_mappings(model_opt.model, mappings)

    model = make_base_model(model_opt, mappings, use_gpu(opt), checkpoint)
    model.eval()
    model.generator.eval()
    return mappings, model, model_opt
Example #2
0
def load_test_model(model_path, opt, dummy_opt):
    checkpoint = torch.load(model_path,
                            map_location=lambda storage, loc: storage)

    model_opt = checkpoint['opt']
    for arg in dummy_opt:
        if arg not in model_opt:
            model_opt.__dict__[arg] = dummy_opt[arg]

    mappings = read_pickle('{}/vocab.pkl'.format(model_opt.mappings))
    mappings = make_model_mappings(model_opt.model, mappings)

    model = make_base_model(model_opt, mappings, use_gpu(opt), checkpoint)
    model.eval()
    model.generator.eval()
    return mappings, model, model_opt
Example #3
0
def load_test_model(model_path, opt, dummy_opt):
    if model_path is not None:
        print('Load model from {}.'.format(model_path))
        checkpoint = torch.load(model_path, map_location=lambda storage, loc: storage)

        model_opt = checkpoint['opt']
        for arg in dummy_opt:
            if arg not in model_opt:
                model_opt.__dict__[arg] = dummy_opt[arg]
    else:
        print('Build model from scratch.')
        checkpoint = None
        model_opt = opt

    mappings = read_pickle('{}/vocab.pkl'.format(model_opt.mappings))

    # mappings = read_pickle('{0}/{1}/vocab.pkl'.format(model_opt.mappings, model_opt.model))
    mappings = make_model_mappings(model_opt.model, mappings)

    model, critic = make_base_model(model_opt, mappings, use_gpu(opt), checkpoint)
    model.eval()
    critic.eval()
    return mappings, model, model_opt, critic
Example #4
0
def get_vocabs(vocab_path, vocab_type):
    mappings = read_pickle(vocab_path)
    mapping_key = "{}_vocab".format(vocab_type)
    vocab = mappings[mapping_key]
    print('{0} vocab size: {1}'.format(vocab_type, len(vocab)))
    return vocab