Esempio n. 1
0
    def __init__(self, host='0.0.0.0', port=8000, server='paste'):

        import bottle

        self.host = host
        self.port = port
        self.server = server
        self.graph = tf.get_default_graph()
        self.sess = K.get_session()
        self.set_session = K.set_session
        self.bottle = bottle
Esempio n. 2
0
    def __init__(self, host='0.0.0.0', port=8000, server='paste'):

        import tensorflow as tf
        from bert4keras.backend import K
        import bottle

        self.host = host
        self.port = port
        self.server = server
        self.graph = tf.get_default_graph()
        self.sess = K.get_session()
        self.set_session = K.set_session
        self.bottle = bottle
Esempio n. 3
0
# bert4keras加载CDial-GPT

import numpy as np
from bert4keras.models import build_transformer_model
from bert4keras.tokenizers import Tokenizer
from bert4keras.snippets import AutoRegressiveDecoder
from bert4keras.snippets import uniout
import tensorflow as tf
from bert4keras.backend import K
from flask import Flask, request, render_template, send_file

app = Flask(__name__)

graph = tf.get_default_graph()
sess = K.get_session()
set_session = K.set_session

config_path = r'GPT_large-tf\gpt_config.json'
checkpoint_path = r'GPT_large-tf\gpt_model.ckpt'
dict_path = r'GPT_large-tf\vocab.txt'

tokenizer = Tokenizer(dict_path, do_lower_case=True)  # 建立分词器
speakers = [
    tokenizer.token_to_id('[speaker1]'),
    tokenizer.token_to_id('[speaker2]')
]

model = build_transformer_model(config_path=config_path,
                                checkpoint_path=checkpoint_path,
                                model='GPT_OpenAI')  # 建立模型,加载权重