Esempio n. 1
0
    def function(self, text):
        try:
            text = text.replace(' ', '')
            x = [_ for _ in text]

            # Pre-processor data
            processor = utils.load_processor(
                model_path='saved_model/time_entity/1')
            tensor = processor.process_x_dataset([x])

            # only for bert Embedding
            tensor = [{
                "Input-Token:0": i.tolist(),
                "Input-Segment:0": np.zeros(i.shape).tolist()
            } for i in tensor]

            # predict
            r = requests.post(
                "http://localhost:8501/v1/models/time_entity:predict",
                json={"instances": tensor})
            preds = r.json()['predictions']

            # Convert result back to labels
            labels = processor.reverse_numerize_label_sequences(
                np.array(preds).argmax(-1))

            entities = get_predict('TIME', text, labels[0])

            return entities

        except Exception:
            self.write(traceback.format_exc().replace('\n', '<br>'))
def predict():
    t1 = time.time()

    text = '据《新闻联播》报道,9月9日至11日,中央纪委书记赵乐际到河北调研。'
    x = [_ for _ in text]

    # Pre-processor data
    processor = utils.load_processor(model_path='saved_model/time_entity/1')
    tensor = processor.process_x_dataset([x])

    # only for bert Embedding
    tensor = [{
        "Input-Token:0": i.tolist(),
        "Input-Segment:0": np.zeros(i.shape).tolist()
    } for i in tensor]

    # predict
    r = requests.post("http://localhost:8501/v1/models/time_entity:predict",
                      json={"instances": tensor})
    preds = r.json()['predictions']

    # Convert result back to labels
    labels = processor.reverse_numerize_label_sequences(
        np.array(preds).argmax(-1))

    for label in labels:
        entities = get_predict('TIME', text, label)
        print(entities)

    t2 = time.time()
    print('cost time: %ss.' % str(round(t2 - t1, 4)))
Esempio n. 3
0
    def test_load(self):
        model_path = os.path.join(tempfile.gettempdir(), str(time.time()))
        model = BiGRU_Model()
        model.fit(class_train_x, class_train_y, epochs=1)
        model.save(model_path)

        processor = utils.load_processor(model_path)

        assert processor.token2idx == model.embedding.processor.token2idx
        assert processor.label2idx == model.embedding.processor.label2idx

        assert processor.__class__ == model.embedding.processor.__class__

        process_x_0 = processor.process_x_dataset(class_train_x[:10])
        process_x_1 = model.embedding.process_x_dataset(class_train_x[:10])
        assert np.array_equal(process_x_0, process_x_1)
Esempio n. 4
0
def get_ai_reply(sentence,
                 model_path='../data/chat_model_serving/1',
                 url="http://106.54.166.111:8501/v1/models/gru_bert:predict",
                 answer='../data/answer.csv'):
    sentence = list(sentence)
    processor = utils.load_processor(model_path=model_path)
    tensor = processor.process_x_dataset([sentence])
    tensor = [{
        "Input-Token:0": i.tolist(),
        "Input-Segment:0": np.zeros(i.shape).tolist()
    } for i in tensor]
    req = requests.post(url, json={"instances": tensor})
    preds = req.json()['predictions']
    labels = processor.reverse_numerize_label_sequences(
        np.array(preds).argmax(-1))
    reply = answer.loc[int(labels[0]), 'answer']
    prob = np.array(preds).max()
    return labels[0], prob, reply
Esempio n. 5
0
# 先只训练一轮
model.fit(train_x, train_y, valid_x, valid_y, batch_size=32, epochs=10)

model.evaluate(test_x, test_y, batch_size=512)

model.save('bert_model')

import kashgari
kashgari.utils.convert_to_saved_model(model, 'tf_bert_model', version=1)

test = pd.read_csv("test_bert_remove.csv")
print(test.head())

import numpy as np
predict_list = list(test['comment'].apply(lambda x: tokenizer.tokenize(x)))
#predict_list = list(test['comment'])
np_pre = np.asarray(predict_list)
print(np_pre[1])

from kashgari import utils
processor = utils.load_processor(model_path='tf_bert_model/1')
tensor = processor.process_x_dataset(np_pre)
# tensor = [{
#    "Input-Token:0": i.tolist(),
#    "Input-Segment:0": np.zeros(i.shape).tolist()
# } for i in tensor]

print(tensor[1])

result = model.predict(tensor)
print(f"{result}")