示例#1
0
def test(task_id):
    rtt.py_protocol_handler.set_loglevel(0)
    np.set_printoptions(suppress=True)

    rtt.activate("SecureNN", task_id=task_id)
    print('begin get io wrapper', task_id)
    node_id = rtt.get_current_node_id(task_id=task_id)
    print('end get io wrapper', task_id)
    dg = tf.Graph()
    with dg.as_default():
        # Get private data from Alice (input x), Bob (input y)
        w = tf.Variable(rtt.private_input(0, [[1, 2], [2, 3]],
                                          task_id=task_id))
        x = tf.Variable(rtt.private_input(1, [[1, 2], [2, 3]],
                                          task_id=task_id))
        y = tf.Variable(rtt.private_input(2, [[1, 2], [2, 3]],
                                          task_id=task_id))

        # Define matmul operation
        res = tf.matmul(tf.matmul(w, x), y)
        init = tf.global_variables_initializer()
        config = tf.ConfigProto(inter_op_parallelism_threads=16,
                                intra_op_parallelism_threads=16)

        with tf.Session(task_id=task_id, config=config) as sess:
            sess.run(init)
            #rW, rb = sess.run([reveal_W, reveal_b])
            #print("init weight:{} \nbias:{}".format(rW, rb))

            #Y_pred = sess.run(reveal_Y, feed_dict={X: real_X, Y: real_Y})
            #print("Y_pred:", Y_pred)
            sess.run(res)

        print(rtt.get_perf_stats(pretty=True, task_id=task_id))
        rtt.deactivate(task_id=task_id)
np.set_printoptions(suppress=True)

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

np.random.seed(0)

EPOCHES = 10
BATCH_SIZE = 16
learning_rate = 0.0002

rtt.activate("Helix")
rtt.set_saver_model(['p0', 'p1', 'p2', 'p9'])
#rtt.set_saver_model([])
#rtt.set_saver_model(['p0', 'p1', 'p2'])
print('saver model:', rtt.get_saver_model())
node_id = rtt.get_current_node_id()

# real data
# ######################################## difference from tensorflow
file_x = '../dsets/' + node_id + "/reg_train_x.csv"
file_y = '../dsets/' + node_id + "/reg_train_y.csv"
real_X, real_Y = rtt.PrivateDataset(data_owner=(0, 'p9'),
                                    label_owner=1).load_data(file_x,
                                                             file_y,
                                                             header=None)
# ######################################## difference from tensorflow
DIM_NUM = real_X.shape[1]

X = tf.placeholder(tf.float64, [None, DIM_NUM])
Y = tf.placeholder(tf.float64, [None, 1])
print(X)
示例#3
0
#!/usr/bin/env python3

# Import rosetta package
import latticex.rosetta as rtt
import tensorflow as tf

# You can activate a backend protocol, here use SecureNN
rtt.set_backend_loglevel(0)
rtt.activate("Helix")
#rtt.py_protocol_handler.set_loglevel(0)
print('node party', rtt.get_current_node_id(),
      rtt.node_id_to_party_id(rtt.get_current_node_id()))
print('party node', rtt.get_current_party_id(),
      rtt.party_id_to_node_id(rtt.get_current_party_id()))
print('data nodes', rtt.get_data_node_ids())
print('computation nodes', rtt.get_computation_node_ids())
print('result nodes', rtt.get_result_node_ids())
nodes = rtt.get_connected_node_ids()
node = rtt.get_current_node_id()
msgid = 'test'
for n in nodes:
    if n != node:
        rtt.send_msg(n, msgid, node + " to " + n)
for n in nodes:
    if n != node:
        msg = rtt.recv_msg(n, msgid, 2 * len(node) + 4)
        print('get msg from ', n, " msg:", msg)

# Get private data from Alice (input x), Bob (input y)
w = tf.Variable(rtt.private_input(0, [[1, 2], [2, 3]]))
x = tf.Variable(rtt.private_input(1, [[1, 2], [2, 3]]))
import numpy as np
from util import read_dataset

np.set_printoptions(suppress=True)

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

np.random.seed(0)

EPOCHES = 10
BATCH_SIZE = 16
learning_rate = 0.0002

task_id = 'task-id'
rtt.activate("Helix", task_id = task_id)
node_id = rtt.get_current_node_id(task_id = task_id)

# real data
# ######################################## difference from tensorflow
file_x = '../dsets/' + node_id + "/cls_test_x.csv"
file_y = '../dsets/' + node_id + "/cls_test_y.csv"
real_X, real_Y = rtt.PrivateDataset(data_owner=(
    0, 'p9'), label_owner='p9', task_id = task_id).load_data(file_x, file_y, header=None)
# ######################################## difference from tensorflow
DIM_NUM = real_X.shape[1]

X = tf.placeholder(tf.float64, [None, DIM_NUM])
Y = tf.placeholder(tf.float64, [None, 1])
print(X)
print(Y)
示例#5
0
def test(task_id):
    rtt.py_protocol_handler.set_loglevel(0)
    np.set_printoptions(suppress=True)

    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'

    np.random.seed(0)

    EPOCHES = 10
    BATCH_SIZE = 16
    learning_rate = 0.0002

    rtt.activate("SecureNN", task_id=task_id)
    node_id = rtt.get_current_node_id(task_id=task_id)
    dg = tf.Graph()
    with dg.as_default():

        # real data
        # ######################################## difference from tensorflow
        file_x = '../dsets/' + node_id + "/reg_train_x.csv"
        file_y = '../dsets/' + node_id + "/reg_train_y.csv"
        real_X, real_Y = rtt.PrivateDataset(data_owner=(0, 1),
                                            label_owner=1,
                                            task_id=task_id).load_data(
                                                file_x, file_y, header=None)
        # ######################################## difference from tensorflow
        DIM_NUM = real_X.shape[1]

        X = tf.placeholder(tf.float64, [None, DIM_NUM])
        Y = tf.placeholder(tf.float64, [None, 1])
        print(X)
        print(Y)

        # initialize W & b
        W = tf.Variable(tf.zeros([DIM_NUM, 1], dtype=tf.float64))
        b = tf.Variable(tf.zeros([1], dtype=tf.float64))
        print(W)
        print(b)

        # predict
        pred_Y = tf.matmul(X, W) + b
        print(pred_Y)

        # loss
        loss = tf.square(Y - pred_Y)
        loss = tf.reduce_mean(loss)
        print(loss)

        # optimizer
        train = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss)
        print(train)

        init = tf.global_variables_initializer()
        print(init)

        # ########### for test, reveal
        reveal_W = rtt.SecureReveal(W)
        reveal_b = rtt.SecureReveal(b)
        reveal_Y = rtt.SecureReveal(pred_Y)
        # ########### for test, reveal

        config = tf.ConfigProto(inter_op_parallelism_threads=16,
                                intra_op_parallelism_threads=16)
        with tf.Session(task_id=task_id, config=config) as sess:
            sess.run(init)
            #rW, rb = sess.run([reveal_W, reveal_b])
            #print("init weight:{} \nbias:{}".format(rW, rb))

            # train
            BATCHES = math.ceil(len(real_X) / BATCH_SIZE)
            for e in range(EPOCHES):
                for i in range(BATCHES):
                    bX = real_X[(i * BATCH_SIZE):(i + 1) * BATCH_SIZE]
                    bY = real_Y[(i * BATCH_SIZE):(i + 1) * BATCH_SIZE]
                    print('*' * 80, task_id)
                    sess.run(train, feed_dict={X: bX, Y: bY})
                    print('#' * 80, task_id)

                    j = e * BATCHES + i
                    if j % 50 == 0 or (j == EPOCHES * BATCHES - 1
                                       and j % 50 != 0):
                        pass
                        #rW, rb = sess.run([reveal_W, reveal_b])
                        #print("I,E,B:{:0>4d},{:0>4d},{:0>4d} weight:{} \nbias:{}".format(
                        #    j, e, i, rW, rb))

            # predict
            #Y_pred = sess.run(reveal_Y, feed_dict={X: real_X, Y: real_Y})
            #print("Y_pred:", Y_pred)

        print(rtt.get_perf_stats(pretty=True, task_id=task_id))
        rtt.deactivate(task_id=task_id)