예제 #1
0
def test_mpc():
    print("\n======== Test mpc NN protocol ============\n")
    ip_dict = {
        0: "127.0.0.1:19001",
        1: "127.0.0.1:19002",
        2: "127.0.0.1:19003",
        3: "127.0.0.1:19004",
        4: "127.0.0.1:19005"
    }
    channel0 = Peer(0, "[::]:19001", 10, ip_dict, 3, logger=Logger(prefix="Channel0:"))
    channel1 = Peer(1, "[::]:19002", 10, ip_dict, 3, logger=Logger(prefix="Channel1:", level=1))
    channel2 = Peer(2, "[::]:19003", 10, ip_dict, 3, logger=Logger(prefix="Channel2:"))
    channel3 = Peer(3, "[::]:19004", 10, ip_dict, 3, logger=Logger(prefix="Channel3:"))
    channel4 = Peer(4, "[::]:19005", 10, ip_dict, 3, logger=Logger(prefix="Channel4:"))
    main_client = MainTFClient(channel0, [2, 3], 4, logger=Logger(prefix="Main client:"))
    triplets_provider = TripletsProvider(channel1, logger=Logger(prefix="Triplet provider:"))
    data_client0 = DataClient(channel2, RandomDataLoader(10), server_id=0, triplets_id=1, other_data_clients=[3],
                              logger=Logger(prefix="Data client 0:"))
    data_client1 = DataClient(channel3, RandomDataLoader(20), server_id=0, triplets_id=1, other_data_clients=[2],
                              logger=Logger(prefix="Data client 1:"))
    label_client = LabelClient(channel4, RandomDataLoader(1), server_id=0, logger=Logger(prefix="Lable client:"))
    triplets_provider.start_listening()
    data_client0_th = threading.Thread(target=data_client0.start_train)
    data_client1_th = threading.Thread(target=data_client1.start_train)
    label_client_th = threading.Thread(target=label_client.start_train)
    main_client_send_config_th = threading.Thread(
        target=main_client.send_config_message,
        args=({
                  "client_dims": {2: 10, 3: 20},
                  "out_dim": 10,
                  "batch_size": 10,
                  "learning_rate": 0.01
              },)
    )

    data_client0_th.start()
    data_client1_th.start()
    label_client_th.start()
    main_client.build_default_network(10, 1)
    time.sleep(15)
    main_client_send_config_th.start()

    main_client_send_config_th.join()
    triplets_provider.start_listening()
    time.sleep(0.5)
    print("====== Configuration message sent =========")
    main_client_start_th = threading.Thread(target=main_client.start_train)
    main_client_start_th.start()
    print("====== Stop the triplet provider, the training should be auto exited =========")
    time.sleep(20)
    triplets_provider.stop_listening()
    main_client_start_th.join()
    data_client0_th.join()
    data_client1_th.join()
    label_client_th.join()

    print("====== MPC NN Test finished =============")
예제 #2
0
파일: Channel.py 프로젝트: CindyLYP/Sim_MPC
 def __init__(self, self_id: int, n_clients: int, logger: Logger = None):
     """
     :param n_clients: Number of clients that will Join this channel
     """
     self.client_id = self_id
     self.n_clients = n_clients
     if logger is None:
         logger = Logger()
     self.logger = logger
예제 #3
0
 def __init__(self, channel: BaseChannel, logger: Logger = None):
     """
     :param client_id: An integer to identify the client
     :type client_id: int
     :param channel: Channel for communication
     """
     self.client_id = channel.client_id
     self.channel = channel
     if not logger:
         logger = Logger()
     self.logger = logger
예제 #4
0
    def __init__(self, task_name, client_id: int, client_port: int,
                 ip_dict: dict, client_config: dict):
        self.status = None
        self.task_path = Config.TaskRootPath + task_name + "-%d" % client_id + "/"

        self.arg_dict = dict()

        self.arg_dict["logger"] = self.logger = Logger(
            open(self.task_path + "log.txt", "w"))
        self.arg_dict["task_path"] = self.task_path
        try:
            ip_dict = str_dict_to_int(ip_dict)
            self.arg_dict["channel"] = self.channel = \
                Channel(client_id, '127.0.0.1:%d' % client_port, 3, ip_dict, 120, self.logger)
        except:
            self.logger.logE("Failed to create channel. Abort.")
            self.status = TaskStatus.Error
            return

        try:
            self.arg_dict["mpc_paras"] = MPCClientParas(
                **client_config["mpc_paras"])
            del client_config["mpc_paras"]
            self.arg_dict.update(client_config)
        except:
            self.logger.logE("Set mpc_paras parameter failed. Abort.")
            self.status = TaskStatus.Error
            return

        try:
            self.client_handle = build_client(self.arg_dict)
        except:
            self.logger.logE("Failed to build client. Abort.")
            self.status = TaskStatus.Error
            return

        grpc_servicer = add_query_service_to_computation_grpc_server(self)
        if grpc_servicer is not None:
            self.logger.log(
                "Attach query service to computation grpc server. Available to query stask status"
            )
        else:
            self.logger.logW(
                "Cannot attach query service since compuation server is not grpc server."
            )
        self.grpc_servicer = grpc_servicer
        if self.grpc_servicer is not None:
            self.grpc_servicer.add_query_dict(self.client_handle.calls)
            self.grpc_servicer.add_query("status", lambda: self.status.name)

        self.status = TaskStatus.Created
예제 #5
0
def test_align_data():
    print("\n======================== Test aligning a Dataset ========================\n")
    ip_dict = {
        0: "127.0.0.1:19001",
        1: "127.0.0.1:19002",
        2: "127.0.0.1:19003",
        3: "127.0.0.1:19004"
    }
    channel0 = Peer(0, "[::]:19001", 10, ip_dict, 3, logger=Logger(prefix="Channel0:"))
    channel1 = Peer(1, "[::]:19002", 10, ip_dict, 3, logger=Logger(prefix="Channel1:"))
    channel2 = Peer(2, "[::]:19003", 10, ip_dict, 3, logger=Logger(prefix="Channel2:"))
    channel3 = Peer(3, "[::]:19004", 10, ip_dict, 3, logger=Logger(prefix="Channel3:"))
    align_client = AlignClient(channel0, [1, 2, 3], logger=Logger(prefix="align client:"))
    data_client1 = PreprocessClient(channel1,filepath='TestDataset/data',filename='d1.csv',prim_key=0,
                                    iv=iv, key='d1d1d1d1d1d1d1d1', align_id=0,other_data_clients=[2, 3],
                                    logger=Logger(prefix="Data client 1:"))

    data_client2 = PreprocessClient(channel2, filepath='TestDataset/data', filename='d2.csv', prim_key=0,
                                    iv=iv, key='2f342dfwvge412qw', align_id=0, other_data_clients=[1, 3],
                                    logger=Logger(prefix="Data client 2:"))

    data_client3 = PreprocessClient(channel3, filepath='TestDataset/data', filename='d3.csv', prim_key=0,
                                    iv=iv, key='data_client3d9d9', align_id=0, other_data_clients=[1, 2],
                                    logger=Logger(prefix="Data client 3:"))

    data_client1_th = threading.Thread(target=data_client1.start_align)
    data_client2_th = threading.Thread(target=data_client2.start_align)
    data_client3_th = threading.Thread(target=data_client3.start_align)
    align_client_th = threading.Thread(target=align_client.start_align)

    data_client1_th.start()
    data_client2_th.start()
    data_client3_th.start()
    align_client_th.start()

    data_client1_th.join()
    data_client2_th.join()
    data_client3_th.join()
    align_client_th.join()

    print("==========================  finished ==========================")
예제 #6
0
import Task.Monitor as monitor

client_server = Flask(__name__)


class ClientConfig:
    server_log_path = "Test/Log/"
    log_level = 0


def resp_msg(status="ok", msg=None):
    return {"status": status, "msg": msg}


logger = Logger(open(
    ClientConfig.server_log_path +
    "/client_server-%d_log.txt" % id(client_server), "a+"),
                level=0)


@client_server.route("/createTask", methods=["POST"])
def create_task():
    post_data = request.get_data(as_text=True)
    logger.log("Received createTask request with data:\n" + post_data)
    try:
        post_json = json.loads(post_data)
    except Exception as e:
        err = "Error while parsing task parameters. Error:\n" + str(e)
        logger.logE(err)
        return resp_msg("err", err)
    try:
        TM.create_task_pyscript(**post_json)
예제 #7
0
def test_credit_data_2pc():
    # Disable GPU since server do not have any computation other than sigmoid

    print("\n======== Test mpc SecureXGBoost protocol with Credit Default TestDataset ============\n")
    ip_dict = {
        0: "127.0.0.1:19001",
        1: "127.0.0.1:19002",
        2: "127.0.0.1:19003",
        3: "127.0.0.1:19004",
        4: "127.0.0.1:19005"
    }
    channel0 = Peer(0, "[::]:19001", 10, ip_dict, 13, logger=Logger(prefix="Channel0:"))
    channel1 = Peer(1, "[::]:19002", 10, ip_dict, 13, logger=Logger(prefix="Channel1:", level=1))
    channel2 = Peer(2, "[::]:19003", 10, ip_dict, 13, logger=Logger(prefix="Channel2:"))
    channel3 = Peer(3, "[::]:19004", 10, ip_dict, 13, logger=Logger(prefix="Channel3:"))
    channel4 = Peer(4, "[::]:19005", 10, ip_dict, 13, logger=Logger(prefix="Channel4:"))

    mpc_paras = MPCClientParas([2, 3], 4, 0, 1)
    config = {
        "learning_rate": 0.1,
        "sync_info": {
            "seed": 8964
        },
        "max_iteration": 5,
        "max_depth": 2,
        "reg_lambda" : 1,
        "gamma" : 0.,
        "col_sample_ratio" : 0.8,
        "row_sample_ratio" : 1.,
        "batch_size" : None,
        "test_batch_size" : None
    }
    main_client = MainClient(channel0, Logger(prefix="Main client:"), mpc_paras,
                             AUC_KS, config)
    # triplets_provider = TripletProducer(channel1, Logger(prefix="Triplet provider:"), mpc_paras, [2, 3])
    data_client0 = FeatureClient(channel2, Logger(prefix="Data client 0:"), mpc_paras,
                                 CSVDataLoader("Test/TestDataset/Data/credit_default.csv", list(range(400)), list(range(30))),
                                 CSVDataLoader("Test/TestDataset/Data/credit_default.csv", list(range(400, 500)), list(range(30))))
    data_client1 = FeatureClient(channel3, Logger(prefix="Data client 1:"), mpc_paras,
                                 CSVDataLoader("Test/TestDataset/Data/credit_default.csv", list(range(400)), list(range(30, 72))),
                                 CSVDataLoader("Test/TestDataset/Data/credit_default.csv", list(range(400, 500)), list(range(30, 72))))

    label_client = LabelClient(channel4, Logger(prefix="Lable client:"), mpc_paras,
                               CSVDataLoader("Test/TestDataset/Data/credit_default.csv", list(range(400)), list(range(72, 73))),
                               CSVDataLoader("Test/TestDataset/Data/credit_default.csv", list(range(400, 500)),
                                             list(range(72, 73))), AUC_KS, "")
    main_client_start_th = threading.Thread(
        target=main_client.start_train,
    )
    data_client0_th = threading.Thread(target=data_client0.start_train)
    data_client1_th = threading.Thread(target=data_client1.start_train)
    label_client_th = threading.Thread(target=label_client.start_train)
    # triplets_provider_th = threading.Thread(target=triplets_provider.start_listening)
    # triplets_provider_th.start()
    data_client0_th.start()
    data_client1_th.start()
    label_client_th.start()
    time.sleep(1)
    main_client_start_th.start()
    print("====== Stop the triplet provider, the training should be auto exited =========")
    main_client_start_th.join()
    data_client0_th.join()
    data_client1_th.join()
    label_client_th.join()
    print("====== MPC SharedNN Test finished =============")
예제 #8
0
from Client.MPCClient import MPCClientParas, ClientMode
from Client.SharedNN.DataProviders import LabelClient
from Utils.Log import Logger
from Client.Data.DataLoader import CSVDataLoader

ip_dict = {
    0: "127.0.0.1:19001",
    1: "127.0.0.1:19002",
    2: "127.0.0.1:19003",
    3: "127.0.0.1:19004",
    4: "127.0.0.1:19005"
}

mpc_paras = MPCClientParas([2, 3], 4, 0, 1)

channel = Peer(4,
               "[::]:19005",
               3,
               ip_dict,
               13,
               logger=Logger(prefix="Channel4:"))

label_client = LabelClient(
    channel, Logger(prefix="Lable client:"), mpc_paras, ClientMode.Train,
    CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                  list(range(40000)), list(range(72, 73))),
    CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                  list(range(40000, 50000)), list(range(72, 73))), MSELoss(),
    AUC_KS)

label_client.start_train()
예제 #9
0
from Communication.RPCComm import Peer
from Client.MPCClient import MPCClientParas, ClientMode
from Client.SharedNN.DataProviders import FeatureClient
from Utils.Log import Logger
from Client.Data.DataLoader import CSVDataLoader

ip_dict = {
    0: "127.0.0.1:19001",
    1: "127.0.0.1:19002",
    2: "127.0.0.1:19003",
    3: "127.0.0.1:19004",
    4: "127.0.0.1:19005"
}
mpc_paras = MPCClientParas([2, 3], 4, 0, 1)
channel = Peer(3,
               "[::]:19004",
               3,
               ip_dict,
               13,
               logger=Logger(prefix="Channel3:"))
feature_client2 = FeatureClient(
    channel, Logger(prefix="Data client 1:"), mpc_paras, ClientMode.Train,
    CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                  list(range(40000)), list(range(30, 72))),
    CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                  list(range(40000, 50000)), list(range(30, 72))))
feature_client2.start_train()
예제 #10
0
# -*-coding:utf-8-*-
import os, re, json, sys
# import keras
# keras.backend.clear_session()
from tensorflow import keras
import tensorflow as tf
import jieba
from flask import request, jsonify, render_template, Response, Flask
from tensorflow.python.keras.preprocessing.sequence import pad_sequences
import numpy as np
from tensorflow.python.keras.models import load_model
import pickle
from config.config import TCconfig
from Utils.Log import Logger
logger = Logger().logger

app = Flask(__name__)
app.config['DEBUG'] = False
logger.info('start flask ...')

with open(os.path.join(os.getcwd(), 'Pickle/words_and_index_dict.pickle'),
          'rb') as fr:
    cn_model = pickle.load(fr)
logger.info('load wiki ...')

model_path = os.path.join(os.getcwd(),
                          'Checkpoints/news_classifier_checkpoint.h5')
model = load_model(model_path)
logger.info('load trained model ...')
logger.info('init graph ...')
예제 #11
0
def test_credit_data_2pc():
    # Disable GPU since server do not have any computation other than sigmoid

    print(
        "\n======== Test mpc SharedNN protocol with Credit Default TestDataset ============\n"
    )
    ip_dict = {
        0: "127.0.0.1:19001",
        1: "127.0.0.1:19002",
        2: "127.0.0.1:19003",
        3: "127.0.0.1:19004",
        4: "127.0.0.1:19005"
    }
    channel0 = Peer(0,
                    "[::]:19001",
                    10,
                    ip_dict,
                    13,
                    logger=Logger(prefix="Channel0:"))
    channel1 = Peer(1,
                    "[::]:19002",
                    10,
                    ip_dict,
                    13,
                    logger=Logger(prefix="Channel1:", level=1))
    channel2 = Peer(2,
                    "[::]:19003",
                    10,
                    ip_dict,
                    13,
                    logger=Logger(prefix="Channel2:"))
    channel3 = Peer(3,
                    "[::]:19004",
                    10,
                    ip_dict,
                    13,
                    logger=Logger(prefix="Channel3:"))
    channel4 = Peer(4,
                    "[::]:19005",
                    10,
                    ip_dict,
                    13,
                    logger=Logger(prefix="Channel4:"))
    mpc_paras = MPCClientParas([2, 3], 4, 0, 1)
    main_client = MainClient(channel0,
                             Logger(prefix="Main client:"),
                             mpc_paras,
                             in_dim=64,
                             out_dim=1,
                             layers=[1],
                             batch_size=64,
                             test_batch_size=10000,
                             test_per_batches=11,
                             learning_rate=0.1,
                             max_iter=33)
    triplets_provider = TripletProducer(channel1,
                                        Logger(prefix="Triplet provider:"),
                                        mpc_paras, [2, 3])
    data_client0 = FeatureClient(
        channel2, Logger(prefix="Data client 0:"), mpc_paras,
        CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                      list(range(40000)), list(range(30))),
        CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                      list(range(40000, 50000)), list(range(30))))
    data_client1 = FeatureClient(
        channel3, Logger(prefix="Data client 1:"), mpc_paras,
        CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                      list(range(40000)), list(range(30, 72))),
        CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                      list(range(40000, 50000)), list(range(30, 72))))

    label_client = LabelClient(
        channel4, Logger(prefix="Lable client:"), mpc_paras,
        CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                      list(range(40000)), list(range(72, 73))),
        CSVDataLoader("Test/TestDataset/Data/credit_default.csv",
                      list(range(40000, 50000)), list(range(72, 73))),
        MSELoss(), AUC_KS, "")
    main_client_start_th = threading.Thread(target=main_client.start_train, )
    data_client0_th = threading.Thread(target=data_client0.start_train)
    data_client1_th = threading.Thread(target=data_client1.start_train)
    label_client_th = threading.Thread(target=label_client.start_train)
    triplets_provider_th = threading.Thread(
        target=triplets_provider.start_listening)
    triplets_provider_th.start()
    data_client0_th.start()
    data_client1_th.start()
    label_client_th.start()
    time.sleep(1)
    main_client_start_th.start()
    print(
        "====== Stop the triplet provider, the training should be auto exited ========="
    )
    main_client_start_th.join()
    data_client0_th.join()
    data_client1_th.join()
    label_client_th.join()
    print("====== MPC SharedNN Test finished =============")
    np.savetxt("mpc_record.csv",
               np.array(label_client.test_record),
               delimiter=",")
    triplets_provider.stop_listening()
예제 #12
0
def test_2pc_mnist():
    print("\n======== Test mpc NN protocol with MNIST Dataset ============\n")
    ip_dict = {
        0: "127.0.0.1:19001",
        1: "127.0.0.1:19002",
        2: "127.0.0.1:19003",
        3: "127.0.0.1:19004",
        4: "127.0.0.1:19005"
    }
    channel0 = Peer(0, "[::]:19001", 10, ip_dict, 3, logger=Logger(prefix="Channel0:"))
    channel1 = Peer(1, "[::]:19002", 10, ip_dict, 3, logger=Logger(prefix="Channel1:", level=1))
    channel2 = Peer(2, "[::]:19003", 10, ip_dict, 3, logger=Logger(prefix="Channel2:"))
    channel3 = Peer(3, "[::]:19004", 10, ip_dict, 3, logger=Logger(prefix="Channel3:"))
    channel4 = Peer(4, "[::]:19005", 10, ip_dict, 3, logger=Logger(prefix="Channel4:"))
    main_client = MainTFClient(channel0, [2, 3], 4, logger=Logger(prefix="Main client:"))
    triplets_provider = TripletsProvider(channel1, logger=Logger(prefix="Triplet provider:"))
    data_client0 = DataClient(channel2,
                              CSVDataLoader("Test/TestDataset/mnist.csv", list(range(50000)), list(range(300))),
                              CSVDataLoader("Test/TestDataset/mnist.csv", list(range(50000, 55000)), list(range(300))),
                              server_id=0, triplets_id=1, other_data_clients=[3],
                              logger=Logger(prefix="Data client 0:"))
    data_client1 = DataClient(channel3,
                              CSVDataLoader("Test/TestDataset/mnist.csv", list(range(50000)), list(range(300, 784))),
                              CSVDataLoader("Test/TestDataset/mnist.csv", list(range(50000, 55000)),
                                            list(range(300, 784))),
                              server_id=0, triplets_id=1, other_data_clients=[2],
                              logger=Logger(prefix="Data client 1:"))
    label_client = LabelClient(channel4,
                               CSVDataLoader("Test/TestDataset/mnist.csv", list(range(50000)), list(range(784, 794))),
                               CSVDataLoader("Test/TestDataset/mnist.csv", list(range(50000, 55000)),
                                             list(range(784, 794))),
                               server_id=0, logger=Logger(prefix="Lable client:"))
    triplets_provider.start_listening()
    data_client0_th = threading.Thread(target=data_client0.start_train)
    data_client1_th = threading.Thread(target=data_client1.start_train)
    label_client_th = threading.Thread(target=label_client.start_train)
    config = {
        "client_dims": {2: 300, 3: 484},
        "out_dim": 150,
        "batch_size": 32,
        "test_per_batch": 100,
        "test_batch_size": 1000,
        "learning_rate": 0.01,
        "sync_info": {
            "seed": 8964
        }
    }
    main_client.set_config_message(config)
    main_client_send_config_th = threading.Thread(
        target=main_client.send_config_message,
        args=(config,)
    )

    data_client0_th.start()
    data_client1_th.start()
    label_client_th.start()
    main_client.build_default_network(150, 10)
    time.sleep(15)
    main_client_send_config_th.start()

    main_client_send_config_th.join()
    triplets_provider.start_listening()
    time.sleep(0.5)
    print("====== Configuration message sent =========")
    main_client_start_th = threading.Thread(target=main_client.start_train)
    main_client_start_th.start()
    print("====== Stop the triplet provider, the training should be auto exited =========")
    time.sleep(200)
    triplets_provider.stop_listening()
    main_client_start_th.join()
    data_client0_th.join()
    data_client1_th.join()
    label_client_th.join()

    print("====== MPC NN Test finished =============")
예제 #13
0
import threading
from flask import Flask, request
import requests
import json
import os
from Communication.protobuf.message_pb2 import TaskQuery
from Utils.Log import Logger
from Task.TaskQuery import TaskQueryClient
from Server.HttpServer.ServerConfig import ServerLogPath, ServerTaskRoot, ClientProtocol
from Server.HttpServer.TaskParaGenerator import generate_task_paras, generate_dataset_json
from Server.HttpServer.BroadcastRequests import broadcast_request

main_server = Flask(__name__)

logger = Logger(open(
    ServerLogPath + "main_server-%d_log.txt" % id(main_server), "a"),
                level=0)


def resp_msg(status="ok", msg=None):
    return {"status": status, "msg": msg}


@main_server.route("/helloWorld")
def hello_world():
    return "Hello, world"


@main_server.route("/createTask", methods=["POST"])
def create_task():
    try:
import time
import numpy as np
from Client.MPCClient import MPCClientParas
from Client.Common.SecureMultiplicationClient import SecureMultiplicationClient
from Client.MPCProviders.TripletProducer import TripletProducer
from Communication.RPCComm import Peer
from Utils.Log import Logger


print("==========Test Secure Multiplication Client==============")
ip_dict = {
    0: '127.0.0.1:8900',
    1: '127.0.0.1:8901',
    2: '127.0.0.1:8902'
}
channel0 = Peer(0, '0.0.0.0:8900', 3, ip_dict, logger=Logger(prefix="0:"))
channel1 = Peer(1, '0.0.0.0:8901', 3, ip_dict, logger=Logger(prefix="1:"))
channel2 = Peer(2, '0.0.0.0:8902', 3, ip_dict, logger=Logger(prefix="triplet:"))
mul_client_0 = SecureMultiplicationClient(channel0, logger=Logger(prefix="0:"))
mul_client_1 = SecureMultiplicationClient(channel1, logger=Logger(prefix="1:"))
triplet_client = TripletProducer(channel2, Logger(prefix="triplet:"), MPCClientParas([0, 1], -1, -1, 2), [0, 1])

threading.Thread(target=triplet_client.start_listening).start()
time.sleep(3)
mat0 = np.random.uniform(0, 1, [5, 10])
mat1 = np.random.uniform(0, 1, [10, 20])
mul_0_th = threading.Thread(target=mul_client_0.multiply_AB_with, args=(1, 2, (10, 20), mat0))
mul_1_th = threading.Thread(target=mul_client_1.multiply_BA_with, args=(0, 2, (5, 10), mat1))
mul_0_th.start()
mul_1_th.start()
mul_0_th.join()
예제 #15
0
import jieba
import os, sys
envipath = os.path.dirname(os.getcwd())
sys.path.append(envipath)
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.layers import Dense, LSTM, Embedding, Bidirectional, Dropout
from tensorflow.python.keras.preprocessing.sequence import pad_sequences
from tensorflow.python.keras.optimizers import Adam
from tensorflow.python.keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard, ReduceLROnPlateau
from tensorflow.python.keras.utils import to_categorical
import pickle
from config.config import TCconfig
import pandas as pd
from Utils.LoadData import load_data
from Utils.Log import Logger
logger = Logger().logger


class BiLstmTextClassify(object):
    def __init__(self):

        self.origh_data_path = os.path.join(envipath, 'Data/TRAINs.csv')
        self.wiki_pickle_path = os.path.join(envipath,
                                             'wiki_matrix/cn_model.pickle')
        self.maxlenth = TCconfig.maxlenth
        load_data()
        logger.info('start load data')

    def generate_classes(self, labels):
        """
        # 动态生成模型的文本类型
예제 #16
0
"""
predict text which label is 
"""
import re
import os, sys
import jieba
#from gensim.models import KeyedVectors
from tensorflow.python.keras.preprocessing.sequence import pad_sequences
import numpy as np
#from keras.models import model_from_json
import os, sys
from config.config import TCconfig
from tensorflow.python.keras.models import load_model
import pickle
from Utils.Log import Logger
logger = Logger().logger
logger.info('start to predict')

try:
    text = sys.argv[1]
except:
    text = '文思海辉被中国电子收购'


def load_wiki():
    """
    @ load wiki matrix
    :return:
    """
    wiki_pickle_path = os.path.join(os.getcwd(),
                                    'Pickle/words_and_index_dict.pickle')
예제 #17
0
from Client.SharedNN.ComputationProviders import MainClient
from Utils.Log import Logger

ip_dict = {
    0: "127.0.0.1:19001",
    1: "127.0.0.1:19002",
    2: "127.0.0.1:19003",
    3: "127.0.0.1:19004",
    4: "127.0.0.1:19005"
}
channel = Peer(0,
               "[::]:19001",
               3,
               ip_dict,
               13,
               logger=Logger(prefix="Channel0:"))

mpc_paras = MPCClientParas([2, 3], 4, 0, 1)
main_client = MainClient(channel,
                         Logger(prefix="Main client:"),
                         mpc_paras,
                         ClientMode.Train,
                         in_dim=32,
                         out_dim=1,
                         layers=[1],
                         batch_size=64,
                         test_batch_size=10000,
                         test_per_batches=101,
                         learning_rate=0.1,
                         max_iter=1000)
예제 #18
0
from Communication.RPCComm import Peer
from Client.MPCClient import MPCClientParas, ClientMode
from Client.SharedNN.DataProviders import FeatureClient, LabelClient
from Client.MPCProviders.TripletProducer import TripletProducer
from Client.SharedNN.ComputationProviders import MainClient
from Utils.Log import Logger
from Client.Data.DataLoader import CSVDataLoader

ip_dict = {
    0: "127.0.0.1:19001",
    1: "127.0.0.1:19002",
    2: "127.0.0.1:19003",
    3: "127.0.0.1:19004",
    4: "127.0.0.1:19005"
}

channel = Peer(1,
               "[::]:19002",
               3,
               ip_dict,
               13,
               logger=Logger(prefix="Channel1:", level=1))

mpc_paras = MPCClientParas([2, 3], 4, 0, 1)

triplets_provider = TripletProducer(channel,
                                    Logger(prefix="Triplet provider:"),
                                    mpc_paras, [2, 3])

triplets_provider.start_listening()
예제 #19
0
    def u_data(self,
               code='test2010',
               tablename='ait_training',
               status='COMPLETE'):

        this_is_log = Logger(code).logger
        try:
            connection = pymysql.connect(**self.config)
        except pymysql.err.OperationalError as e:
            this_is_log.error('pymysql.err.OperationalError:%s' % str(e))
            raise e
        except Exception as e:
            # self.log.error(e)
            this_is_log.error(e)
            raise e
        query_sql = " update {tablename} set STATUS_=\'{status}\' where CODE_ = \'{code}\'" \
                    "".format(tablename=tablename, status=status, code=code)
        print('执行mysql语句:', query_sql)
        count = 1
        try:
            with connection.cursor() as cursor:
                try:
                    if count < 10:
                        print('第{}次更新'.format(count))
                        connection.ping(reconnect=True)
                        cursor.execute(query_sql)
                        print('执行mysql执行语句')
                        connection.commit()
                        print('更新mysql状态成功')
                        this_is_log.info('更新mysql状态成功')
                    else:
                        print('更新mysql状态失败')
                        this_is_log.info('更新mysql状态失败')
                        connection.close()
                except Exception as e:
                    this_is_log.error(e)
                    connection.rollback()
        except:
            count += 1
        finally:
            connection.close()