Exemple #1
0
                        default=CONFIG_DEFAULT,
                        help="Configuration file for the bot, default=" +
                        CONFIG_DEFAULT)
    parser.add_argument("-m", "--model", required=True, help="Model to load")
    parser.add_argument("--sample",
                        default=False,
                        action='store_true',
                        help="Enable sampling mode")
    prog_args = parser.parse_args()

    conf = configparser.ConfigParser()
    if not conf.read(os.path.expanduser(prog_args.config)):
        log.error("Configuration file %s not found", prog_args.config)
        sys.exit()

    emb_dict = data.load_emb_dict(os.path.dirname(prog_args.model))
    log.info("Loaded embedded dict with %d entries", len(emb_dict))
    rev_emb_dict = {idx: word for word, idx in emb_dict.items()}
    end_token = emb_dict[data.END_TOKEN]

    net = model.PhraseModel(emb_size=model.EMBEDDING_DIM,
                            dict_size=len(emb_dict),
                            hid_size=model.HIDDEN_STATE_SIZE)
    net.load_state_dict(torch.load(prog_args.model))

    def bot_func(bot, update, args):
        text = " ".join(args)
        words = utils.tokenize(text)
        seq_1 = data.encode_words(words, emb_dict)
        input_seq = model.pack_input(seq_1, net.emb)
        enc = net.encode(input_seq)
def process_string(s, emb_dict, rev_emb_dict, net, use_sampling=False):
    out_words = words_to_words(words, emb_dict, rev_emb_dict, net, use_sampling=use_sampling)
    print(" ".join(out_words))


if __name__ == "__main__":
    logging.basicConfig(format="%(asctime)-15s %(levelname)s %(message)s", level=logging.INFO)
    parser = argparse.ArgumentParser()
    parser.add_argument("-m", "--model", required=True, help="Model name to load")
    parser.add_argument("-s", "--string", help="String to process, otherwise will loop")
    parser.add_argument("--sample", default=False, action="store_true", help="Enable sampling generation instead of argmax")
    parser.add_argument("--self", type=int, default=1, help="Enable self-loop mode with given amount of phrases.")
    args = parser.parse_args()

    emb_dict = data.load_emb_dict(os.path.dirname(args.model))
    net = model.PhraseModel(emb_size=model.EMBEDDING_DIM, dict_size=len(emb_dict), hid_size=model.HIDDEN_STATE_SIZE)
    net.load_state_dict(torch.load(args.model))

    rev_emb_dict = {idx: word for word, idx in emb_dict.items()}

    while True:
        if args.string:
            input_string = args.string
        else:
            input_string = input(">>> ")
        if not input_string:
            break

        words = utils.tokenize(input_string)
        for _ in range(args.self):