def test_instantiate_tfe_layer(): from syft.frameworks.keras.model.sequential import _instantiate_tfe_layer hook = sy.KerasHook(tf.keras) input_shape = [4, 5] input_data = np.ones(input_shape) kernel = np.random.normal(size=[5, 5]) initializer = tf.keras.initializers.Constant(kernel) d_tf = tf.keras.layers.Dense( 5, kernel_initializer=initializer, batch_input_shape=input_shape, use_bias=True ) with tf.Session() as sess: x = tf.Variable(input_data, dtype=tf.float32) y = d_tf(x) sess.run(tf.global_variables_initializer()) expected = sess.run(y) stored_keras_weights = {d_tf.name: d_tf.get_weights()} with tf.Graph().as_default(): p_x = tfe.define_private_variable(input_data) d_tfe = _instantiate_tfe_layer(d_tf, stored_keras_weights) out = d_tfe(p_x) with tfe.Session() as sess: sess.run(tf.global_variables_initializer()) actual = sess.run(out.reveal()) np.testing.assert_allclose(actual, expected, rtol=0.001)
def test_share(): # pragma: no cover from tensorflow.keras import Sequential from tensorflow.keras.layers import Dense hook = sy.KerasHook(tf.keras) input_shape = [4, 5] input_data = np.ones(input_shape) kernel = np.random.normal(size=[5, 5]) initializer = tf.keras.initializers.Constant(kernel) model = Sequential() model.add( Dense(5, kernel_initializer=initializer, batch_input_shape=input_shape, use_bias=True)) AUTO = True alice = sy.TFEWorker(host="localhost:4000", auto_managed=AUTO) bob = sy.TFEWorker(host="localhost:4001", auto_managed=AUTO) carol = sy.TFEWorker(host="localhost:4002", auto_managed=AUTO) model.share(alice, bob, carol) model.serve(num_requests=0) model.shutdown_workers()
def test_share(): from tensorflow.keras import Sequential from tensorflow.keras.layers import Dense hook = sy.KerasHook(tf.keras) input_shape = [4, 5] input_data = np.ones(input_shape) kernel = np.random.normal(size=[5, 5]) initializer = tf.keras.initializers.Constant(kernel) model = Sequential() model.add( Dense(5, kernel_initializer=initializer, batch_input_shape=input_shape, use_bias=True)) alice = sy.TFEWorker(host=None) bob = sy.TFEWorker(host=None) carol = sy.TFEWorker(host=None) model.share(alice, bob, carol) model.serve(num_requests=0) model.shutdown_workers()
def test_share(): # pragma: no cover """tests tfe federated learning functionality by running a constant input on same model using tf.keras then tfe on remote workers and comparing the outputs of both cases """ from tensorflow.keras import Sequential from tensorflow.keras.layers import Dense sy.KerasHook(tf.keras) # creates input and weights constant tensors input_shape = (4, 5) kernel = np.random.normal(size=(5, 5)) initializer = tf.keras.initializers.Constant(kernel) dummy_input = np.ones(input_shape).astype(np.float32) model = Sequential() model.add( Dense(5, kernel_initializer=initializer, batch_input_shape=input_shape, use_bias=True)) output_shape = model.output_shape result_public = model.predict( dummy_input) # runs constant input on the model using tf.keras # creats a cluster of tfe workers(remote machines) client = sy.TFEWorker(host=None) alice = sy.TFEWorker(host=None) bob = sy.TFEWorker(host=None) carol = sy.TFEWorker(host=None) cluster = sy.TFECluster(alice, bob, carol) cluster.start() model.share(cluster) # sends the model to the workers # runs same input on same model on the romte workers and gets back the output with model._tfe_graph.as_default(): client.connect_to_model(input_shape, output_shape, cluster, sess=model._tfe_session) client.query_model_async(dummy_input) model.serve(num_requests=1) result_private = client.query_model_join().astype(np.float32) # compares results and raises error if not equal up to 0.01 np.testing.assert_allclose(result_private, result_public, atol=0.01) model.stop() cluster.stop()
def test_share(): from tensorflow.keras import Sequential from tensorflow.keras.layers import Dense hook = sy.KerasHook(tf.keras) input_shape = (4, 5) kernel = np.random.normal(size=(5, 5)) initializer = tf.keras.initializers.Constant(kernel) dummy_input = np.ones(input_shape).astype(np.float32) model = Sequential() model.add( Dense(5, kernel_initializer=initializer, batch_input_shape=input_shape, use_bias=True)) output_shape = model.output_shape result_public = model.predict(dummy_input) client = sy.TFEWorker(host=None) alice = sy.TFEWorker(host=None) bob = sy.TFEWorker(host=None) carol = sy.TFEWorker(host=None) cluster = sy.TFECluster(alice, bob, carol) cluster.start() model.share(cluster) with model._tfe_graph.as_default(): client.connect_to_model(input_shape, output_shape, cluster, sess=model._tfe_session) client.query_model_async(dummy_input) model.serve(num_requests=1) result_private = client.query_model_join().astype(np.float32) np.testing.assert_allclose(result_private, result_public, atol=0.01) model.stop() cluster.stop()
def test_instantiate_tfe_layer(): # pragma: no cover """ tests tfe layer by running a constant 4*5 input matrix on a network with one constant tensor of weights, using tf.keras then tfe and comparing results """ from syft.frameworks.keras.model.sequential import _instantiate_tfe_layer sy.KerasHook(tf.keras) # creates input and weights constant tensors input_shape = [4, 5] input_data = np.ones(input_shape) kernel = np.random.normal(size=[5, 5]) initializer = tf.keras.initializers.Constant(kernel) # creates a network with 4*5 input shape, 5 output units, and 5*5 weights matrix d_tf = tf.keras.layers.Dense(5, kernel_initializer=initializer, batch_input_shape=input_shape, use_bias=True) # runs the model using tf.keras with tf.Session() as sess: x = tf.Variable(input_data, dtype=tf.float32) y = d_tf(x) sess.run(tf.global_variables_initializer()) expected = sess.run(y) stored_keras_weights = {d_tf.name: d_tf.get_weights()} # runs the model using tfe with tf.Graph().as_default(): p_x = tfe.define_private_variable(input_data) d_tfe = _instantiate_tfe_layer(d_tf, stored_keras_weights) out = d_tfe(p_x) with tfe.Session() as sess: sess.run(tf.global_variables_initializer()) actual = sess.run(out.reveal()) # compares results and raises error if not equal up to 0.001 np.testing.assert_allclose(actual, expected, rtol=0.005, atol=0.001)
model.add(MaxPooling2D((2, 2))) model.add( Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same')) model.add( Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same')) model.add(MaxPooling2D((2, 2))) model.add(Flatten()) model.add(Dense(128, activation='relu', kernel_initializer='he_uniform')) model.add(Dense(num_classes, name='logit')) # We are assuming the model has already been trained and the weights # are saved in a .h5 file # load the pretrained weights pre_trained_weights = 'cifar10.h5' model.load_weights(pre_trained_weights) import syft as sy hook = sy.KerasHook(tf.keras) AUTO = True worker_1 = sy.TFEWorker(host='localhost:5000', auto_managed=AUTO) worker_2 = sy.TFEWorker(host='localhost:5001', auto_managed=AUTO) worker_3 = sy.TFEWorker(host='localhost:5002', auto_managed=AUTO) model.share(worker_1, worker_2, worker_3) model.serve(num_requests=5) # limit the number of requests to 5