def TEST0(data_owner, label_owner): print( "All (maybe) exception case - SampleAligned - [data_owner={}, label_owner={}]" .format(data_owner, label_owner)) dset = rtt.PrivateDataset(data_owner=data_owner, label_owner=label_owner) try: res = dset.load_X(get_X0(data_owner)) print("load_X res.shape:", res.shape) print("load_X res:", res) except Exception as e: print(e) dset = rtt.PrivateDataset(data_owner=data_owner, label_owner=label_owner) try: res = dset.load_y(get_Y0(label_owner)) print("load_y res.shape:", res.shape) print("load_y res:", res) except Exception as e: print(e) dset = rtt.PrivateDataset(data_owner=data_owner, label_owner=label_owner) try: XX_ = get_X0(data_owner) YY_ = get_Y0(label_owner) resX, resy = dset.load_data(XX_, YY_) print("load_data resX.shape:", resX.shape) print("load_data resX:", resX) print("load_data resy.shape:", resy.shape) print("load_data resy:", resy) except Exception as e: print(e)
def TEST1(data_owner): print("All normal case - FeatureAligned - [data_owner={}]".format( data_owner)) # note, when dataset_type is tt.DatasetType.FeatureAligned, ignore label_owner dset = rtt.PrivateDataset(data_owner=data_owner, dataset_type=rtt.DatasetType.FeatureAligned) try: res = dset.load_X(get_X1(data_owner)) print("load_X res.shape:", res.shape) print("load_X res:", res) except Exception as e: print(e) try: res = dset.load_y(get_Y1(data_owner)) print("load_y res.shape:", res.shape) print("load_y res:", res) except Exception as e: print(e) try: XX_ = get_X1(data_owner) YY_ = get_Y1(data_owner) resX, resy = dset.load_data(XX_, YY_) print("load_data resX.shape:", resX.shape) print("load_data resX:", resX) print("load_data resy.shape:", resy.shape) print("load_data resy:", resy) except Exception as e: print(e)
np.random.seed(0) EPOCHES = 10 BATCH_SIZE = 16 learning_rate = 0.0002 rtt.activate("SecureNN") rtt.set_saver_model(False, plain_model=['P0']) mpc_player_id = rtt.py_protocol_handler.get_party_id() # real data # ######################################## difference from tensorflow file_x = '../dsets/P' + str(mpc_player_id) + "/reg_train_x.csv" file_y = '../dsets/P' + str(mpc_player_id) + "/reg_train_y.csv" real_X, real_Y = rtt.PrivateDataset(data_owner=(0, 1), label_owner=1).load_data(file_x, file_y, header=None) # ######################################## difference from tensorflow DIM_NUM = real_X.shape[1] X = tf.placeholder(tf.float64, [None, DIM_NUM]) Y = tf.placeholder(tf.float64, [None, 1]) print(X) print(Y) # initialize W & b W = tf.Variable(tf.zeros([DIM_NUM, 1], dtype=tf.float64)) b = tf.Variable(tf.zeros([1], dtype=tf.float64)) print(W) print(b)
BATCH_SIZE = 16 learning_rate = 0.0002 rtt.activate("Helix") node_id = rtt.get_current_node_id() rtt.py_protocol_handler.set_loglevel(0) # real data # ######################################## difference from tensorflow file_x = '../dsets/' + node_id + "/cls_train_x_feature.csv" file_y = '../dsets/' + node_id + "/cls_train_y_feature.csv" print('file_x', file_x) print('file_y', file_y) real_X, real_Y = rtt.PrivateDataset( data_owner=(0, 2, 'p9'), dataset_type=rtt.DatasetType.FeatureAligned).load_data(file_x, file_y, header=None) # ######################################## difference from tensorflow DIM_NUM = real_X.shape[1] print(real_X) print(real_X.shape) X = tf.placeholder(tf.float64, [None, DIM_NUM]) Y = tf.placeholder(tf.float64, [None, 1]) print(X) print(Y) # initialize W & b W = tf.Variable(tf.zeros([DIM_NUM, 1], dtype=tf.float64)) b = tf.Variable(tf.zeros([1], dtype=tf.float64))
np.random.seed(0) EPOCHES = 10 BATCH_SIZE = 16 learning_rate = 0.0002 task_id = 'task-id' rtt.activate("Helix", task_id = task_id) node_id = rtt.get_current_node_id(task_id = task_id) # real data # ######################################## difference from tensorflow file_x = '../dsets/' + node_id + "/cls_test_x.csv" file_y = '../dsets/' + node_id + "/cls_test_y.csv" real_X, real_Y = rtt.PrivateDataset(data_owner=( 0, 'p9'), label_owner='p9', task_id = task_id).load_data(file_x, file_y, header=None) # ######################################## difference from tensorflow DIM_NUM = real_X.shape[1] X = tf.placeholder(tf.float64, [None, DIM_NUM]) Y = tf.placeholder(tf.float64, [None, 1]) print(X) print(Y) # initialize W & b W = tf.Variable(tf.zeros([DIM_NUM, 1], dtype=tf.float64), name='w') b = tf.Variable(tf.zeros([1], dtype=tf.float64), name='b') print(W) print(b) # predict
def test(task_id): rtt.py_protocol_handler.set_loglevel(0) np.set_printoptions(suppress=True) os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' np.random.seed(0) EPOCHES = 10 BATCH_SIZE = 16 learning_rate = 0.0002 rtt.activate("SecureNN", task_id=task_id) node_id = rtt.get_current_node_id(task_id=task_id) dg = tf.Graph() with dg.as_default(): # real data # ######################################## difference from tensorflow file_x = '../dsets/' + node_id + "/reg_train_x.csv" file_y = '../dsets/' + node_id + "/reg_train_y.csv" real_X, real_Y = rtt.PrivateDataset(data_owner=(0, 1), label_owner=1, task_id=task_id).load_data( file_x, file_y, header=None) # ######################################## difference from tensorflow DIM_NUM = real_X.shape[1] X = tf.placeholder(tf.float64, [None, DIM_NUM]) Y = tf.placeholder(tf.float64, [None, 1]) print(X) print(Y) # initialize W & b W = tf.Variable(tf.zeros([DIM_NUM, 1], dtype=tf.float64)) b = tf.Variable(tf.zeros([1], dtype=tf.float64)) print(W) print(b) # predict pred_Y = tf.matmul(X, W) + b print(pred_Y) # loss loss = tf.square(Y - pred_Y) loss = tf.reduce_mean(loss) print(loss) # optimizer train = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss) print(train) init = tf.global_variables_initializer() print(init) # ########### for test, reveal reveal_W = rtt.SecureReveal(W) reveal_b = rtt.SecureReveal(b) reveal_Y = rtt.SecureReveal(pred_Y) # ########### for test, reveal config = tf.ConfigProto(inter_op_parallelism_threads=16, intra_op_parallelism_threads=16) with tf.Session(task_id=task_id, config=config) as sess: sess.run(init) #rW, rb = sess.run([reveal_W, reveal_b]) #print("init weight:{} \nbias:{}".format(rW, rb)) # train BATCHES = math.ceil(len(real_X) / BATCH_SIZE) for e in range(EPOCHES): for i in range(BATCHES): bX = real_X[(i * BATCH_SIZE):(i + 1) * BATCH_SIZE] bY = real_Y[(i * BATCH_SIZE):(i + 1) * BATCH_SIZE] print('*' * 80, task_id) sess.run(train, feed_dict={X: bX, Y: bY}) print('#' * 80, task_id) j = e * BATCHES + i if j % 50 == 0 or (j == EPOCHES * BATCHES - 1 and j % 50 != 0): pass #rW, rb = sess.run([reveal_W, reveal_b]) #print("I,E,B:{:0>4d},{:0>4d},{:0>4d} weight:{} \nbias:{}".format( # j, e, i, rW, rb)) # predict #Y_pred = sess.run(reveal_Y, feed_dict={X: real_X, Y: real_Y}) #print("Y_pred:", Y_pred) print(rtt.get_perf_stats(pretty=True, task_id=task_id)) rtt.deactivate(task_id=task_id)
EPOCHES = 1 BATCH_SIZE = 32 learning_rate = 0.003 rtt.activate("SecureNN") mpc_player_id = rtt.py_protocol_handler.get_party_id() # real data # ######################################## difference from tensorflow tutorials_path = os.path.abspath(os.path.join(os.getcwd(), "../tutorials/")) training_features_file_path = tutorials_path + "/dsets/P" + str( mpc_player_id) + "/iris_training_features.csv" training_label_file_path = tutorials_path + "/dsets/P" + str( mpc_player_id) + "/iris_training_label.csv" training_features, training_label = rtt.PrivateDataset( data_owner=(0, 1), label_owner=1).load_data(training_features_file_path, training_label_file_path, header=None) # The mean of label is 1, update label with (label-1) for mean is 0. # However the open-source RTT now cannot support tf.ones_like() function, # P0 and P1 must finish this process before multi-party training. # Namely data-loading process get the label with mean 0. # training_label = training_label - tf.ones_like(training_label) test_features_file_path = tutorials_path + "/dsets/P" + str( mpc_player_id) + "/iris_test_features.csv" test_label_file_path = tutorials_path + "/dsets/P" + str( mpc_player_id) + "/iris_test_label.csv" test_features, test_label = rtt.PrivateDataset(data_owner=(0, 1), label_owner=1).load_data( test_features_file_path, test_label_file_path, header=None)
#!/usr/bin/env python3 import latticex.rosetta as rtt import tensorflow as tf import sys import numpy as np np.set_printoptions(suppress=True) protocol = "Helix" protocol = "SecureNN" rtt.activate(protocol) rtt.backend_log_to_stdout(True) partyid = rtt.get_party_id() dset = rtt.PrivateDataset(data_owner=(0, 1), label_owner=0) resX, resy = dset.load_data("./testdataset/p0_attr_plain.csv", "./testdataset/p0_label_plain.csv") dset = rtt.PrivateDataset(data_owner=(0, 2), label_owner=0) resX, resy = dset.load_data("./testdataset/p1_attr_plain.csv", "./testdataset/p0_label_plain.csv") print(rtt.get_perf_stats(True)) rtt.deactivate()