Beispiel #1
0
def client_clone(id, idx):
    bc = bert_client(port=int(sys.argv[1]),
                     port_out=int(sys.argv[2]),
                     identity=id)
    for j in bc.fetch():
        print('clone-client-%d: received %d x %d' %
              (idx, j.shape[0], j.shape[1]))
 def run(self):
     try:
         from model_serving.client import bert_client
     except ImportError:
         raise ImportError(
             'BertClient module is not available, it is required for benchmarking.'
             'Please use "pip install -U bert-serving-client" to install it.'
         )
     with bert_client(port=self.port,
                      port_out=self.port_out,
                      show_server_config=True,
                      check_version=False,
                      check_length=False) as bc:
         time_all = []
         for _ in range(self.num_repeat):
             start_t = time.perf_counter()
             bc.encode(self.batch)
             time_all.append(time.perf_counter() - start_t)
         self.avg_time = mean(
             time_all[2:]
         )  # first one is often slow due to cold-start/warm-up effect
Beispiel #3
0
# using BertClient in multicast way

import sys
import threading

from model_serving.client import bert_client


def client_clone(id, idx):
    bc = bert_client(port=int(sys.argv[1]),
                     port_out=int(sys.argv[2]),
                     identity=id)
    for j in bc.fetch():
        print('clone-client-%d: received %d x %d' %
              (idx, j.shape[0], j.shape[1]))


if __name__ == '__main__':
    bc = bert_client(port=int(sys.argv[1]), port_out=int(sys.argv[2]))
    # start two cloned clients sharing the same identity as bc
    for j in range(2):
        t = threading.Thread(target=client_clone, args=(bc.identity, j))
        t.start()

    with open('README.md') as fp:
        data = [v for v in fp if v.strip()]

    for _ in range(3):
        vec = bc.encode(data)
        print('bc received %d x %d' % (vec.shape[0], vec.shape[1]))
Beispiel #4
0
# $

# read and write TFRecord

import os

import GPUtil
import tensorflow as tf
from model_serving.client import bert_client

os.environ['CUDA_VISIBLE_DEVICES'] = str(GPUtil.getFirstAvailable()[0])
tf.logging.set_verbosity(tf.logging.INFO)

with open('README.md') as fp:
    data = [v for v in fp if v.strip()]
    bc = bert_client()
    list_vec = bc.encode(data)
    list_label = [0 for _ in data]  # a dummy list of all-zero labels

# write tfrecords

with tf.python_io.TFRecordWriter('tmp.tfrecord') as writer:

    def create_float_feature(values):
        return tf.train.Feature(float_list=tf.train.FloatList(value=values))

    def create_int_feature(values):
        return tf.train.Feature(int64_list=tf.train.Int64List(
            value=list(values)))

    for (vec, label) in zip(list_vec, list_label):
Beispiel #5
0
    '-2',
    '-gpu_memory_fraction',
    '0.2',
    '-device',
    '3',
]
args = get_args_parser().parse_args(common)

for pool_layer in range(1, 13):
    setattr(args, 'pooling_layer', [-pool_layer])
    server = BertServer(args)
    server.start()
    print('wait until server is ready...')
    time.sleep(20)
    print('encoding...')
    bc = bert_client(port=port, port_out=port_out, show_server_config=True)
    subset_vec_all_layers.append(bc.encode(subset_text))
    bc.close()
    server.close()
    print('done at layer -%d' % pool_layer)

#save bert vectors and labels
stacked_subset_vec_all_layers = np.stack(subset_vec_all_layers)
np.save('example7_5k_2', stacked_subset_vec_all_layers)
np_subset_label = np.array(subset_label)
np.save('example7_5k_2_subset_label', np_subset_label)

#load bert vectors and labels
subset_vec_all_layers = np.load('example7_5k_mxnet.npy')
np_subset_label = np.load('example7_5k_mxnet_subset_label.npy')
subset_label = np_subset_label.tolist()
Beispiel #6
0
# NOTE: First install bert-as-service via
# $
# $ pip install bert-serving-server
# $ pip install bert-serving-client
# $

# using BertClient in sync way

import sys
import time

from model_serving.client import bert_client

if __name__ == '__main__':
    bc = bert_client(port=int(sys.argv[1]),
                     port_out=int(sys.argv[2]),
                     show_server_config=True)
    # encode a list of strings
    with open('README.md') as fp:
        data = [v for v in fp if v.strip()][:512]
        num_tokens = sum(
            len([vv for vv in v.split() if vv.strip()]) for v in data)

    show_tokens = len(sys.argv) > 3 and bool(sys.argv[3])
    bc.encode(data)  # warm-up GPU
    for j in range(10):
        tmp = data * (2**j)
        c_num_tokens = num_tokens * (2**j)
        start_t = time.time()
        bc.encode(tmp, show_tokens=show_tokens)
        time_t = time.time() - start_t
Beispiel #7
0
import numpy as np
from model_serving.client import bert_client
from termcolor import colored

prefix_q = '##### **Q:** '
topk = 5

with open('README.md') as fp:
    questions = [
        v.replace(prefix_q, '').strip() for v in fp
        if v.strip() and v.startswith(prefix_q)
    ]
    print('%d questions loaded, avg. len of %d' %
          (len(questions), np.mean([len(d.split()) for d in questions])))

with bert_client(port=4000, port_out=4001) as bc:
    doc_vecs = bc.encode(questions)

    while True:
        query = input(colored('your question: ', 'green'))
        query_vec = bc.encode([query])[0]
        # compute normalized dot product as score
        score = np.sum(query_vec * doc_vecs, axis=1) / np.linalg.norm(doc_vecs,
                                                                      axis=1)
        topk_idx = np.argsort(score)[::-1][:topk]
        print('top %d questions similar to "%s"' %
              (topk, colored(query, 'green')))
        for idx in topk_idx:
            print('> %s\t%s' % (colored('%.1f' % score[idx], 'cyan'),
                                colored(questions[idx], 'yellow')))