Exemple #1
0
def run_benchmark(args):
    from copy import deepcopy
    from bert_serving.server import BertServer

    # load vocabulary
    with open(args.client_vocab_file, encoding='utf8') as fp:
        vocab = list(set(vv for v in fp for vv in v.strip().split()))
    print('vocabulary size: %d' % len(vocab))

    # select those non-empty test cases
    all_exp_names = [
        k.replace('test_', '') for k, v in vars(args).items()
        if k.startswith('test_') and v
    ]

    for exp_name in all_exp_names:
        # set common args
        cargs = deepcopy(args)
        exp_vars = vars(args)['test_%s' % exp_name]
        avg_speed = []

        for cvar in exp_vars:
            # override exp args
            setattr(cargs, exp_name, cvar)
            server = BertServer(cargs)
            server.start()
            time.sleep(cargs.wait_till_ready)

            # sleep until server is ready
            all_clients = [
                BenchmarkClient(cargs, vocab) for _ in range(cargs.num_client)
            ]
            for bc in all_clients:
                bc.start()

            clients_speed = []
            for bc in all_clients:
                bc.join()
                clients_speed.append(cargs.client_batch_size / bc.avg_time)
            server.close()

            max_speed, min_speed, cavg_speed = int(max(clients_speed)), int(
                min(clients_speed)), int(mean(clients_speed))

            print('avg speed: %d\tmax speed: %d\tmin speed: %d' %
                  (cavg_speed, max_speed, min_speed),
                  flush=True)

            avg_speed.append(cavg_speed)

        with open(
                'benchmark-%d%s.result' %
            (args.num_worker, '-fp16' if args.fp16 else ''), 'a') as fw:
            print('\n|`%s`\t|samples/s|\n|---|---|' % exp_name, file=fw)
            for cvar, cavg_speed in zip(exp_vars, avg_speed):
                print('|%s\t|%d|' % (cvar, cavg_speed), file=fw)
            # for additional plotting
            print('\n%s = %s\n%s = %s' %
                  (exp_name, exp_vars, 'speed', avg_speed),
                  file=fw)
def run_benchmark(args):
    from copy import deepcopy
    from bert_serving.server import BertServer

    # load vocabulary
    with open(args.client_vocab_file, encoding='utf8') as fp:
        vocab = list(set(vv for v in fp for vv in v.strip().split()))
    print('vocabulary size: %d' % len(vocab))

    all_exp_names = [
        k.replace('test_', '') for k in vars(args).keys()
        if k.startswith('test_')
    ]
    fp = open(
        'benchmark-%d%s.result' %
        (args.num_worker, '-fp16' if args.fp16 else ''), 'w')
    for exp_name in all_exp_names:
        # set common args
        cargs = deepcopy(args)
        exp_vars = vars(args)['test_%s' % exp_name]
        avg_speed = []
        fp.write('\n%s\tsamples/s\n' % exp_name)
        for cvar in exp_vars:
            # override exp args
            setattr(cargs, exp_name, cvar)
            server = BertServer(cargs)
            server.start()
            time.sleep(cargs.wait_till_ready)

            # sleep until server is ready
            all_clients = [
                BenchmarkClient(cargs, vocab) for _ in range(cargs.num_client)
            ]
            for bc in all_clients:
                bc.start()

            clients_speed = []
            for bc in all_clients:
                bc.join()
                clients_speed.append(cargs.client_batch_size / bc.avg_time)
            server.close()

            max_speed, min_speed, cavg_speed = int(max(clients_speed)), int(
                min(clients_speed)), int(mean(clients_speed))

            print('avg speed: %d\tmax speed: %d\tmin speed: %d' %
                  (cavg_speed, max_speed, min_speed),
                  flush=True)
            fp.write('%s\t%d\n' % (cvar, cavg_speed))
            fp.flush()
            avg_speed.append(cavg_speed)

        # for plotting
        fp.write('%s\n%s\n' % (exp_vars, avg_speed))
        fp.flush()
    fp.close()
def save_emb():

    common = [
        '-model_dir',
        '/home/ydu/BERT/uncased_L-12_H-768_A-12/',
        '-num_worker',
        '2',
        '-port',
        '5555',
        '-port_out',
        '5556',
        '-max_seq_len',
        '128',
        '-max_batch_size',
        '256',
        # '-tuned_model_dir', '/home/ydu/BERT/bert_mgpu/pretrain_output/10k-32b-all4data/',
        # '-ckpt_name', 'model.ckpt-2500',
    ]

    args = get_args_parser().parse_args(common)

    # folder = ['books', 'dvd', 'electronics', 'kitchen']
    data_path = '/home/ydu/BERT/DATA/'
    data_folder = ['metacritic', 'imdb', 'amazon', 'reddit']

    # model_path = 'home/ydu/BERT/bert_mgpu/results/'
    # model_folder = 'amazon-balanced/'
    # model_type = 'bert-tune'
    data = {}

    # setattr(args, 'tuned_model_dir', '/home/ydu/BERT/bert_mgpu/pretrain_output/reddit-pretrain')
    # setattr(args, 'ckpt_name', 'model.ckpt-2500')
    setattr(args, 'tuned_model_dir',
            '/home/ydu/BERT/bert_mgpu/pretrain_output/10k-32b-all4data')
    setattr(args, 'ckpt_name', 'model.ckpt-2500')

    for d in data_folder:
        fn = data_path + d + '/all.tsv'
        print("===========", fn, "================")
        text = read_tsv(fn)
        server = BertServer(args)
        server.start()
        print('wait until server is ready...')
        time.sleep(20)
        print('encoding...')
        bc = BertClient()
        data[d] = bc.encode(text)
        bc.close()
        server.close()

    pickle_name = data_path + 'EMB/allpre_emb.pickle'
    with open(pickle_name, 'wb') as handle:
        pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL)

    return pickle_name
def extract_topics_all(issues_path, model_dir, topic_file, n_topics):
    """Extract topics for all issues with top n_topics topics"""
    topic_all = []
    text_all, divide_list = combine_issues(issues_path)
    topics = tp.get_topic_list(topic_file)
    topic_embedding = tp.get_topic_embedding(topics, port=3500, port_out=3501, model_path=model_dir)
    #topic_embedding = np.load('../output/topic_embedding.npy')
    print('topic embedding shape = ', topic_embedding.shape)
    stop_words = tp.expand_stopwords()
    print(len(stop_words))
    text_flat_tokenized, text_article_tokenized = tp.bert_tokens(text_all)
    tfidf_biglist = tp.tfidf_vec(text_flat_tokenized, stop_words)
    port_in = 6550
    port_out = 6551
    tmp_dir = './output/tmp'
    if not os.path.isdir(tmp_dir):
        os.makedirs(tmp_dir)
    ZEROMQ_SOCK_TMP_DIR=tmp_dir
    common = [
        '-model_dir', model_dir,
        '-num_worker', '2',
        '-port', str(port_in),
        '-port_out', str(port_out),
        '-max_seq_len', '20',
        '-max_batch_size', '256',
        '-pooling_strategy', 'NONE',
        '-pooling_layer', '-2',
        '-graph_tmp_dir', tmp_dir,
        '-cpu',
        '-show_tokens_to_client',
    ]
    args = get_args_parser().parse_args(common)
    server = BertServer(args)
    server.start()
    print('wait until server is ready...')
    time.sleep(20)
    print('encoding...')        
    for issue_num in range(len(text_all)):  
        #issue_num = 0
        divide_list_each = divide_list[issue_num]
        text_one_issue = text_all[issue_num]
        vec = tp.get_word_embedding_server_on(text_one_issue, port=port_in, port_out=port_out)
        topics_issue, sort_topic_sim = tp.get_topics_one_issue(vec,topic_embedding,topics, divide_list_each, 
                                               tfidf_biglist, issue_num, n_topics)
        topic_all.append(topics_issue)       
    server.close()
    topic_folder = './output/topic'
    if not os.path.isdir(topic_folder):
        os.makedirs(topic_folder)
    with open(topic_folder + '/topic.pkl', 'wb') as f:
        pickle.dump(topic_all, f)
    return topic_all
Exemple #5
0
class CaissBertServer:
    def __init__(self, model_path):
        args = get_args_parser().parse_args([
            '-num_worker', '4', '-model_dir', model_path, '-port', '5555',
            '-port_out', '5556', '-max_seq_len', 'NONE', '-mask_cls_sep',
            '-cpu'
        ])
        # 详细说明,请参考:https://github.com/hanxiao/bert-as-service
        self._server = BertServer(args)

    def start(self):
        self._server.start()

    def close(self):
        self._server.close()
Exemple #6
0
class BertEncoderServer(BaseTextEncoder):
    store_args_kwargs = True
    is_trained = True

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        bert_args = ['-%s' % v for v in args]
        for k, v in kwargs.items():
            bert_args.append('-%s' % k)
            bert_args.append(str(v))
        self._bert_args = bert_args

    def post_init(self):
        from bert_serving.server import BertServer
        from bert_serving.server import get_args_parser
        self.bert_server = BertServer(get_args_parser().parse_args(self._bert_args))
        self.bert_server.start()
        self.bert_server.is_ready.wait()

    def close(self):
        self.bert_server.close()
for k, v in common.items():
    setattr(args, k, v)

for pool_layer in range(1, 13):
    setattr(args, 'pooling_layer', [-pool_layer])
    server = BertServer(args)
    server.start()
    print('wait until server is ready...')
    time.sleep(15)
    print('encoding...')
    bc = BertClient(port=common['port'],
                    port_out=common['port_out'],
                    show_server_config=True)
    subset_vec_all_layers.append(bc.encode(subset_text))
    bc.close()
    server.close()
    print('done at layer -%d' % pool_layer)


def vis(embed, vis_alg='PCA', pool_alg='REDUCE_MEAN'):
    plt.close()
    fig = plt.figure()
    plt.rcParams['figure.figsize'] = [21, 7]
    for idx, ebd in enumerate(embed):
        ax = plt.subplot(2, 6, idx + 1)
        vis_x = ebd[:, 0]
        vis_y = ebd[:, 1]
        plt.scatter(vis_x,
                    vis_y,
                    c=subset_label,
                    cmap=ListedColormap(["blue", "green", "yellow", "red"]),