示例#1
0
                               beam=beam,
                               wt_ranker=wt_ranker)
            for final, prob_gen, score_ranker, hyp in ret:
                print('%.3f gen %.3f ranker %.3f\t%s' %
                      (final, prob_gen, score_ranker, hyp))


if __name__ == "__main__":
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('--path_generator', '-pg', type=str)
    parser.add_argument('--path_ranker', '-pr', type=str)
    parser.add_argument('--cpu', action='store_true')
    parser.add_argument('--topk', type=int, default=3)
    parser.add_argument('--beam', type=int, default=3)
    parser.add_argument('--wt_ranker', type=float, default=1.)
    parser.add_argument('--topp', type=float, default=0.8)
    args = parser.parse_args()

    cuda = False if args.cpu else torch.cuda.is_available()
    generator = GPT2Generator(args.path_generator, cuda)
    if args.path_ranker is None:
        generator.play(topk=args.topk, beam=args.beam, topp=args.topp)
    else:
        from score import get_model
        ranker = get_model(args.path_ranker, cuda)
        integrated = Integrated(generator, ranker)
        integrated.play(topk=args.topk,
                        beam=args.beam,
                        topp=args.topp,
                        wt_ranker=args.wt_ranker)
示例#2
0
import torch
from score import get_model

cuda = True if torch.cuda.is_available() else False
model = get_model('restore/updown.pth', cuda)


def rank(context, response):

    model.eval()

    score = model.predict(context, response, max_cxt_turn=None)
    return str(score[0])


if __name__ == "__main__":
    print(test)