コード例 #1
0
ファイル: test_sequential.py プロジェクト: racinger/PySyft
def test_share():  # pragma: no cover

    from tensorflow.keras import Sequential
    from tensorflow.keras.layers import Dense

    hook = sy.KerasHook(tf.keras)

    input_shape = [4, 5]
    input_data = np.ones(input_shape)
    kernel = np.random.normal(size=[5, 5])
    initializer = tf.keras.initializers.Constant(kernel)

    model = Sequential()

    model.add(
        Dense(5,
              kernel_initializer=initializer,
              batch_input_shape=input_shape,
              use_bias=True))

    AUTO = True
    alice = sy.TFEWorker(host="localhost:4000", auto_managed=AUTO)
    bob = sy.TFEWorker(host="localhost:4001", auto_managed=AUTO)
    carol = sy.TFEWorker(host="localhost:4002", auto_managed=AUTO)

    model.share(alice, bob, carol)

    model.serve(num_requests=0)

    model.shutdown_workers()
コード例 #2
0
def test_share():

    from tensorflow.keras import Sequential
    from tensorflow.keras.layers import Dense

    hook = sy.KerasHook(tf.keras)

    input_shape = [4, 5]
    input_data = np.ones(input_shape)
    kernel = np.random.normal(size=[5, 5])
    initializer = tf.keras.initializers.Constant(kernel)

    model = Sequential()

    model.add(
        Dense(5,
              kernel_initializer=initializer,
              batch_input_shape=input_shape,
              use_bias=True))

    alice = sy.TFEWorker(host=None)
    bob = sy.TFEWorker(host=None)
    carol = sy.TFEWorker(host=None)

    model.share(alice, bob, carol)

    model.serve(num_requests=0)

    model.shutdown_workers()
コード例 #3
0
ファイル: test_sequential.py プロジェクト: zyedmaheen/PySyft
def test_share():  # pragma: no cover
    """tests tfe federated learning functionality by running a constant input on same model
    using tf.keras
    then tfe on remote workers and comparing the outputs of both cases
    """
    from tensorflow.keras import Sequential
    from tensorflow.keras.layers import Dense

    sy.KerasHook(tf.keras)

    # creates input and weights constant tensors
    input_shape = (4, 5)
    kernel = np.random.normal(size=(5, 5))
    initializer = tf.keras.initializers.Constant(kernel)
    dummy_input = np.ones(input_shape).astype(np.float32)

    model = Sequential()

    model.add(
        Dense(5,
              kernel_initializer=initializer,
              batch_input_shape=input_shape,
              use_bias=True))
    output_shape = model.output_shape
    result_public = model.predict(
        dummy_input)  # runs constant input on the model using tf.keras

    # creats a cluster of tfe workers(remote machines)
    client = sy.TFEWorker(host=None)
    alice = sy.TFEWorker(host=None)
    bob = sy.TFEWorker(host=None)
    carol = sy.TFEWorker(host=None)
    cluster = sy.TFECluster(alice, bob, carol)

    cluster.start()

    model.share(cluster)  # sends the model to the workers

    # runs same input on same model on the romte workers and gets back the output
    with model._tfe_graph.as_default():
        client.connect_to_model(input_shape,
                                output_shape,
                                cluster,
                                sess=model._tfe_session)

    client.query_model_async(dummy_input)

    model.serve(num_requests=1)

    result_private = client.query_model_join().astype(np.float32)
    # compares results and raises error if not equal up to 0.01
    np.testing.assert_allclose(result_private, result_public, atol=0.01)

    model.stop()

    cluster.stop()
コード例 #4
0
def test_share():

    from tensorflow.keras import Sequential
    from tensorflow.keras.layers import Dense

    hook = sy.KerasHook(tf.keras)

    input_shape = (4, 5)
    kernel = np.random.normal(size=(5, 5))
    initializer = tf.keras.initializers.Constant(kernel)
    dummy_input = np.ones(input_shape).astype(np.float32)

    model = Sequential()

    model.add(
        Dense(5,
              kernel_initializer=initializer,
              batch_input_shape=input_shape,
              use_bias=True))
    output_shape = model.output_shape
    result_public = model.predict(dummy_input)

    client = sy.TFEWorker(host=None)
    alice = sy.TFEWorker(host=None)
    bob = sy.TFEWorker(host=None)
    carol = sy.TFEWorker(host=None)
    cluster = sy.TFECluster(alice, bob, carol)

    cluster.start()

    model.share(cluster)

    with model._tfe_graph.as_default():
        client.connect_to_model(input_shape,
                                output_shape,
                                cluster,
                                sess=model._tfe_session)

    client.query_model_async(dummy_input)

    model.serve(num_requests=1)

    result_private = client.query_model_join().astype(np.float32)
    np.testing.assert_allclose(result_private, result_public, atol=0.01)

    model.stop()

    cluster.stop()
コード例 #5
0
ファイル: Client.py プロジェクト: tallalj/UdacityOpenSource
#import libraries
import numpy as np
import tensorflow as tf
from tensorflow.keras.datasets import mnist
from keras.preprocessing.image import load_img, img_to_array
import syft as sy
#create a client
client = sy.TFEWorker()
worker_1 = sy.TFEWorker(host='localhost:5000')
worker_2 = sy.TFEWorker(host='localhost:5001')
worker_3 = sy.TFEWorker(host='localhost:5002')
#connect to the secure model
client.connect_to_model(input_shape, output_shape, worker_1, worker_2,
                        worker_3)


# prepare the image for prediction
def predict(filename):
    img = load_img(filename, target_size=(32, 32))
    img = img_to_array(img)
    img = img.reshape(1, 32, 32, 3)
    img = img.astype('float32')
    img = img / 255.0
    return img


filenames = ['horse.jpg', 'bird.jpg', 'car.jpg']
actual_labels = [7, 2, 1]
# Query the model for obtaining private predictions
for i, filename in enumerate(filenames):
    img = predict(filename)
コード例 #6
0
ファイル: server.py プロジェクト: tallalj/UdacityOpenSource
model.add(MaxPooling2D((2, 2)))
model.add(
    Conv2D(128, (3, 3),
           activation='relu',
           kernel_initializer='he_uniform',
           padding='same'))
model.add(
    Conv2D(128, (3, 3),
           activation='relu',
           kernel_initializer='he_uniform',
           padding='same'))
model.add(MaxPooling2D((2, 2)))
model.add(Flatten())
model.add(Dense(128, activation='relu', kernel_initializer='he_uniform'))
model.add(Dense(num_classes, name='logit'))
# We are assuming the model has already been trained and the weights # are saved in a .h5 file
# load the pretrained weights
pre_trained_weights = 'cifar10.h5'
model.load_weights(pre_trained_weights)

import syft as sy
hook = sy.KerasHook(tf.keras)

AUTO = True
worker_1 = sy.TFEWorker(host='localhost:5000', auto_managed=AUTO)
worker_2 = sy.TFEWorker(host='localhost:5001', auto_managed=AUTO)
worker_3 = sy.TFEWorker(host='localhost:5002', auto_managed=AUTO)

model.share(worker_1, worker_2, worker_3)
model.serve(num_requests=5)  # limit the number of requests to 5
コード例 #7
0
    print('Data Loading... Finished.')
    print('Data Splitting...')

    x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=test_rate, random_state=args.seed)

    # print(x_train.shape, x_test.shape)
    # print(y_train.shape, y_test.shape)
    print('Data Splitting... finished')


    # Encrypted computation in Pysyft
    hook = sy.KerasHook(tf.keras)

    AUTO = True

    alice = sy.TFEWorker(host='localhost:4000', auto_managed=AUTO)
    bob = sy.TFEWorker(host='localhost:4001', auto_managed=AUTO)
    crypto_provider = sy.TFEWorker(host='localhost:4002', auto_managed=AUTO)



    # cluster = sy.TFECluster(alice, bob, crypto_provider)
    # cluster.start()

    model = create_model(args.model, x_train.shape, 'mse')

    history = model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1)
    score = model.evaluate(x_train, y_train, verbose=0)
    print('Train loss:', score[0])
    print('Train accuracy:', score[1])
    print("%s: %.2f%%" % (model.metrics_names[1], score[1] * 100))