with tf.Session() as sess:
        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.bind((socket.gethostname(), 6666))
        s.listen(1)
        print("server ready")
        while True:
            c, _ = s.accept()
            sock_func(c, sess, module)
            # t = threading.Thread(target=sock_func, args=[c, sess, module])
            # t.start()


def sock_func(sock, sess, module):
    try:
        while True:
            data = sock.recv(4096).decode("utf-8")
            print(data, len(data))
            result = json.dumps(module.predict(data, sess),
                                ensure_ascii=False).encode("utf-8")
            sock.send(result)
    except Exception:
        print("EXCEPTION")
        sock.close()


if __name__ == '__main__':
    args = argparse.Namespace()
    args.save_path = "saves/wikipedia_150/"
    args.load_from_file = True
    ner_module = NER.NER(args)
    open_socket(ner_module)
Exemple #2
0
                     embeddings_file_path=None,
                     stacked_embeddings=stacked_embeddings)

model_params = {
    "filter_width": 3,
    "embeddings_dropout": True,
    "n_filters": [256],
    "dense_dropout": True,
    "token_embeddings_dim": 300,
    "char_embeddings_dim": 50,
    "cell_type": 'lstm',
    "use_batch_norm": True,
    "concat_embeddings": True,
    "use_crf": True,
    "use_char_embeddins": True,
    "net_type": 'rnn',
    "use_capitalization": False,
}

net = NER.NER(corp, stacked_embeddings, **model_params)

learning_params = {
    'dropout_rate': 0.5,
    'epochs': 200,
    'learning_rate': 0.001,  # 0.0003
    'batch_size': 20,
    'learning_rate_decay': 0.94
}

results = net.fit(**learning_params)