Пример #1
0
def test_bit_extract():
    tf.reset_default_graph()

    prot = ABY3()
    tfe.set_protocol(prot)

    x = tfe.define_private_variable(np.array([[1, -2, 3], [-4, -5, 6]]),
                                    share_type=ARITHMETIC)
    y = tfe.define_private_variable(np.array([[1, -2, 3], [-4, -5, 6]]),
                                    share_type=ARITHMETIC,
                                    apply_scaling=False)

    z = tfe.bit_extract(
        x, 63
    )  # The sign bit. Since x is scaled, you should be more careful about extracting other bits.
    w = tfe.bit_extract(y, 1)  # y is not scaled
    s = tfe.msb(x)  # Sign bit

    with tfe.Session() as sess:
        # initialize variables
        sess.run(tfe.global_variables_initializer())
        # reveal result
        result = sess.run(z.reveal())
        close(result.astype(int), np.array([[0, 1, 0], [1, 1, 0]]))
        result = sess.run(w.reveal())
        close(result.astype(int), np.array([[0, 1, 1], [0, 1, 1]]))
        result = sess.run(s.reveal())
        close(result.astype(int), np.array([[0, 1, 0], [1, 1, 0]]))
        print("test_bit_extract succeeds")
Пример #2
0
    def test_not_private(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(
            tf.constant([[1, 2, 3], [4, 5, 6]]), share_type=BOOLEAN, apply_scaling=False
        )
        y = tfe.define_private_variable(
            tf.constant([[1, 0, 0], [0, 1, 0]]),
            apply_scaling=False,
            share_type=BOOLEAN,
            factory=prot.bool_factory,
        )
        z1 = ~x
        z2 = ~y

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(
                result, np.array([[-2, -3, -4], [-5, -6, -7]]), rtol=0.0, atol=0.01
            )

            result = sess.run(z2.reveal())
            np.testing.assert_allclose(
                result, np.array([[0, 1, 1], [1, 0, 1]]), rtol=0.0, atol=0.01
            )
Пример #3
0
    def test_binary_crossentropy_from_logits(self):

        y_true_np = np.array([1, 1, 0, 0]).astype(float)
        y_pred_np = np.array([0.9, 0.1, 0.9, 0.1]).astype(float)

        y_true = tfe.define_private_variable(y_true_np)
        y_pred = tfe.define_private_variable(y_pred_np)

        loss = tfe.keras.losses.BinaryCrossentropy(from_logits=True)
        out = loss(y_true, y_pred)
        der_for_y_pred = loss.grad(y_true, y_pred)

        with tfe.Session() as sess:
            sess.run(tf.global_variables_initializer())
            actual = sess.run(out.reveal())
            actual_der = sess.run(der_for_y_pred.reveal())

        tf.reset_default_graph()
        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            y_true = tf.convert_to_tensor(y_true_np)
            y_pred = tf.convert_to_tensor(y_pred_np)
            loss = tf.keras.losses.BinaryCrossentropy(from_logits=True)
            out = loss(y_true, y_pred)
            der_for_y_pred = tf.sigmoid(y_pred) - y_true

            expected = sess.run(out)
            expected_der = sess.run(der_for_y_pred)

        np.testing.assert_allclose(actual, expected, rtol=1e-1, atol=1e-1)
        np.testing.assert_allclose(actual_der,
                                   expected_der,
                                   rtol=1e-1,
                                   atol=1e-1)
Пример #4
0
    def test_concat(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x1 = tfe.define_private_variable(tf.constant([[1, 2], [4, 5]]))
        x2 = tfe.define_private_variable(tf.constant([[3], [6]]))
        y1 = tfe.define_constant(np.array([[1, 2, 3]]))
        y2 = tfe.define_constant(np.array([[4, 5, 6]]))

        z1 = tfe.concat([x1, x2], axis=1)
        z2 = tfe.concat([y1, y2], axis=0)

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(
                result, np.array([[1, 2, 3], [4, 5, 6]]), rtol=0.0, atol=0.01
            )

            result = sess.run(z2)
            np.testing.assert_allclose(
                result, np.array([[1, 2, 3], [4, 5, 6]]), rtol=0.0, atol=0.01
            )
Пример #5
0
    def test_mean_squared_error(self):
        y_true_np = np.array([1, 2, 3, 4]).astype(float)
        y_pred_np = np.array([0.9, 2.1, 3.2, 4.1]).astype(float)

        y_true = tfe.define_private_variable(y_true_np)
        y_pred = tfe.define_private_variable(y_pred_np)

        loss = tfe.keras.losses.MeanSquaredError()
        out = loss(y_true, y_pred)

        with tfe.Session() as sess:
            sess.run(tf.global_variables_initializer())
            actual = sess.run(out.reveal())

        tf.reset_default_graph()
        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            y_true = tf.convert_to_tensor(y_true_np)
            y_pred = tf.convert_to_tensor(y_pred_np)

            loss = tf.keras.losses.MeanSquaredError()
            out = loss(y_true, y_pred)
            expected = sess.run(out)

        np.testing.assert_allclose(actual, expected, rtol=1e-1, atol=1e-1)
Пример #6
0
def test_rshift_private():
    tf.reset_default_graph()

    prot = ABY3()
    tfe.set_protocol(prot)

    x = tfe.define_private_variable(tf.constant([[1, 2, 3], [4, 5, 6]]),
                                    share_type=BOOLEAN)
    y = tfe.define_private_variable(tf.constant([[-1, -2, -3], [-4, 5, 6]]),
                                    share_type=BOOLEAN,
                                    apply_scaling=False)

    z = x >> 1
    w = y >> 1
    s = y.logical_rshift(1)

    with tfe.Session() as sess:
        # initialize variables
        sess.run(tfe.global_variables_initializer())
        # reveal result
        result = sess.run(z.reveal())
        close(result, np.array(
            [[0.5, 1, 1.5],
             [2, 2.5,
              3]]))  # NOTE: x is scaled and treated as fixed-point number
        result = sess.run(w.reveal())
        close(result, np.array([[-1, -1, -2], [-2, 2, 3]]))
        result = sess.run(s.reveal())
        close(
            result,
            np.array([[(-1 & ((1 << prot.nbits) - 1)) >> 1,
                       (-2 & ((1 << prot.nbits) - 1)) >> 1,
                       (-3 & ((1 << prot.nbits) - 1)) >> 1],
                      [(-4 & ((1 << prot.nbits) - 1)) >> 1, 2, 3]]))
        print("test_rshift_private succeeds")
Пример #7
0
    def test_3d_matmul_private(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        # 3-D matrix mult
        x = tfe.define_private_variable(
            tf.constant(np.arange(1, 13), shape=[2, 2, 3]))
        y = tfe.define_private_variable(
            tf.constant(np.arange(13, 25), shape=[2, 3, 2]))

        z = tfe.matmul(x, y)

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z.reveal())
            np.testing.assert_allclose(
                result,
                np.array([[[94, 100], [229, 244]], [[508, 532], [697, 730]]]),
                rtol=0.0,
                atol=0.01,
            )
Пример #8
0
    def test_boolean_sharing(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(
            tf.constant([[1, 2, 3], [4, 5, 6]]), share_type=BOOLEAN
        )
        y = tfe.define_private_variable(
            tf.constant([[7, 8, 9], [10, 11, 12]]), share_type=BOOLEAN
        )

        z1 = tfe.B_xor(x, y)

        z2 = tfe.B_and(x, y)

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(
                result, np.array([[6, 10, 10], [14, 14, 10]]), rtol=0.0, atol=0.01
            )

            result = sess.run(z2.reveal())
            np.testing.assert_allclose(
                result, np.array([[1, 0, 1], [0, 1, 4]]), rtol=0.0, atol=0.01
            )
Пример #9
0
 def __init__(self, num_features):
     self.w = tfe.define_private_variable(
         tf.random_uniform([num_features, 1], -0.01, 0.01)
     )
     self.w_masked = tfe.mask(self.w)
     self.b = tfe.define_private_variable(tf.zeros([1]))
     self.b_masked = tfe.mask(self.b)
Пример #10
0
    def test_ppa_private_private(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(
            tf.constant([[1, 2, 3], [4, 5, 6]]), share_type=BOOLEAN
        )
        y = tfe.define_private_variable(
            tf.constant([[7, 8, 9], [10, 11, 12]]), share_type=BOOLEAN
        )

        # Parallel prefix adder. It is simply an adder for boolean sharing.
        z1 = tfe.B_ppa(x, y, topology="sklansky")
        z2 = tfe.B_ppa(x, y, topology="kogge_stone")

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(
                result, np.array([[8, 10, 12], [14, 16, 18]]), rtol=0.0, atol=0.01
            )

            result = sess.run(z2.reveal())
            np.testing.assert_allclose(
                result, np.array([[8, 10, 12], [14, 16, 18]]), rtol=0.0, atol=0.01
            )
Пример #11
0
    def test_mul_AB_private_private(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(
            np.array([[1, 2, 3], [4, 5, 6]]), share_type=ARITHMETIC,
        )
        y = tfe.define_private_variable(
            tf.constant([[1, 0, 0], [0, 1, 0]]),
            apply_scaling=False,
            share_type=BOOLEAN,
            factory=prot.bool_factory,
        )

        z = tfe.mul_AB(x, y)

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z.reveal())
            np.testing.assert_allclose(
                result, np.array([[1, 0, 0], [0, 5, 0]]), rtol=0.0, atol=0.01
            )
Пример #12
0
  def initialize(
      self,
      initial_weights: InitialTensor = None,
      initial_bias: InitialTensor = None
  ) -> None:
    if initial_weights is None:
      initial_size = (self.in_features, self.out_features)
      initial_weights = np.random.normal(scale=0.1, size=initial_size)
    if initial_bias is not None:
      self.bias = tfe.define_private_variable(initial_bias)

    self.weights = tfe.define_private_variable(initial_weights)

    if self.transpose_weight:
      self.weights = self.weights.transpose()
Пример #13
0
def test_simple_lr_model():
    tf.reset_default_graph()

    import time
    start = time.time()
    prot = ABY3()
    tfe.set_protocol(prot)

    # define inputs
    x_raw = tf.random.uniform(minval=-0.5,
                              maxval=0.5,
                              shape=[99, 10],
                              seed=1000)
    x = tfe.define_private_variable(x_raw, name="x")
    y_raw = tf.cast(tf.reduce_mean(x_raw, axis=1, keepdims=True) > 0,
                    dtype=tf.float32)
    y = tfe.define_private_variable(y_raw, name="y")
    w = tfe.define_private_variable(tf.random_uniform([10, 1],
                                                      -0.01,
                                                      0.01,
                                                      seed=100),
                                    name="w")
    b = tfe.define_private_variable(tf.zeros([1]), name="b")
    learning_rate = 0.01

    with tf.name_scope("forward"):
        out = tfe.matmul(x, w) + b
        y_hat = tfe.sigmoid(out)

    with tf.name_scope("loss-grad"):
        dy = y_hat - y
    batch_size = x.shape.as_list()[0]
    with tf.name_scope("backward"):
        dw = tfe.matmul(tfe.transpose(x), dy) / batch_size
        db = tfe.reduce_sum(dy, axis=0) / batch_size
        upd1 = dw * learning_rate
        upd2 = db * learning_rate
        assign_ops = [tfe.assign(w, w - upd1), tfe.assign(b, b - upd2)]

    with tfe.Session() as sess:
        # initialize variables
        sess.run(tfe.global_variables_initializer())
        for i in range(1):
            sess.run(assign_ops)

        print(sess.run(w.reveal()))
    end = time.time()
    print("Elapsed time: {} seconds".format(end - start))
Пример #14
0
def test_polynomial_piecewise():
    tf.reset_default_graph()

    import time
    start = time.time()
    prot = ABY3()
    tfe.set_protocol(prot)

    x = tfe.define_private_variable(
        tf.constant([[-1, -0.5, -0.25], [0, 0.25, 2]]))

    # This is the approximation of the sigmoid function by using a piecewise function:
    # f(x) = (0 if x<-0.5), (x+0.5 if -0.5<=x<0.5), (1 if x>=0.5)
    z1 = tfe.polynomial_piecewise(
        x,
        (-0.5, 0.5),
        (
            (0, ), (0.5, 1), (1, )
        )  # Should use tuple because list is not hashable for the memoir cache key
    )
    # Or, simply use the pre-defined sigmoid API which includes a different approximation
    z2 = tfe.sigmoid(x)

    with tfe.Session() as sess:
        # initialize variables
        sess.run(tfe.global_variables_initializer())
        # reveal result
        result = sess.run(z1.reveal())
        close(result, np.array([[0, 0, 0.25], [0.5, 0.75, 1]]))
        result = sess.run(z2.reveal())
        close(result, np.array([[0.33, 0.415, 0.4575], [0.5, 0.5425, 0.84]]))
        print("test_polynomial_piecewise succeeds")

    end = time.time()
    print("Elapsed time: {} seconds".format(end - start))
Пример #15
0
    def test_neg(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        # define inputs
        x = tfe.define_private_variable(np.array([[0.6, -0.7], [-0.8, 0.9]]))
        y = tfe.define_constant(np.array([[0.6, -0.7], [-0.8, 0.9]]))

        # define computation
        z1 = -x
        z2 = -y

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(result,
                                       np.array([[-0.6, 0.7], [0.8, -0.9]]),
                                       rtol=0.0,
                                       atol=0.01)
            result = sess.run(z2)
            np.testing.assert_allclose(result,
                                       np.array([[-0.6, 0.7], [0.8, -0.9]]),
                                       rtol=0.0,
                                       atol=0.01)
            print("test_neg succeeds")
Пример #16
0
    def test_reduce_sum(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(tf.constant([[1, 2, 3], [4, 5, 6]]))
        y = tfe.define_constant(np.array([[1, 2, 3], [4, 5, 6]]))

        z1 = x.reduce_sum(axis=1, keepdims=True)
        z2 = tfe.reduce_sum(y, axis=0, keepdims=False)

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(result,
                                       np.array([[6], [15]]),
                                       rtol=0.0,
                                       atol=0.01)

            result = sess.run(z2)
            np.testing.assert_allclose(result,
                                       np.array([5, 7, 9]),
                                       rtol=0.0,
                                       atol=0.01)
Пример #17
0
    def test_pow_private(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(tf.constant([[1, 2, 3], [4, 5, 6]]))

        y = x**2
        z = x**3

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(y.reveal())
            np.testing.assert_allclose(result,
                                       np.array([[1, 4, 9], [16, 25, 36]]),
                                       rtol=0.0,
                                       atol=0.01)

            result = sess.run(z.reveal())
            np.testing.assert_allclose(result,
                                       np.array([[1, 8, 27], [64, 125, 216]]),
                                       rtol=0.0,
                                       atol=0.01)
Пример #18
0
    def test_mul_private_public(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        # define inputs
        x = tfe.define_private_variable(tf.ones(shape=(2, 2)) * 2)
        y = tfe.define_constant(np.array([[0.6, 0.7], [0.8, 0.9]]))
        w = tfe.define_constant(np.array([[2, 2], [2, 2]]))

        # define computation
        z1 = y * x  # mul_public_private
        z2 = z1 * w  # mul_private_public

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z2.reveal())
            np.testing.assert_allclose(result,
                                       np.array([[2.4, 2.8], [3.2, 3.6]]),
                                       rtol=0.0,
                                       atol=0.01)
            print("test_mul_private_public succeeds")
Пример #19
0
def layer_test(layer_cls,
               kwargs=None,
               batch_input_shape=None,
               input_data=None):
    """Test routine for a layer with a single input and single output.
  Arguments:
    layer_cls: Layer class object.
    kwargs: Optional dictionary of keyword arguments for instantiating the
      layer.
    input_shape: Input shape tuple.
    input_dtype: Data type of the input data.
    input_data: Numpy array of input data.
  Returns:
    The output data (Numpy array) returned by the layer, for additional
    checks to be done by the calling code.
  Raises:
    ValueError: if `input_data is None and input_shape is None`.
  """
    input_shape, input_data = _sanitize_testing_args(batch_input_shape,
                                                     input_data)

    # instantiation
    kwargs = kwargs or {}

    with tfe.protocol.SecureNN():
        layer = layer_cls(batch_input_shape=input_shape, **kwargs)
        model = Sequential()
        model.add(layer)

        x = tfe.define_private_variable(input_data)
        model(x)
Пример #20
0
    def test_add_private_private(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        def provide_input():
            # normal TensorFlow operations can be run locally
            # as part of defining a private input, in this
            # case on the machine of the input provider
            return tf.ones(shape=(2, 2)) * 1.3

        # define inputs
        x = tfe.define_private_variable(tf.ones(shape=(2, 2)))
        y = tfe.define_private_input('input-provider', provide_input)

        # define computation
        z = x + y

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z.reveal())
            # Should be [[2.3, 2.3], [2.3, 2.3]]
            np.testing.assert_allclose(result,
                                       np.array([[2.3, 2.3], [2.3, 2.3]]),
                                       rtol=0.0,
                                       atol=0.01)
            print("test_add_private_private succeeds")
Пример #21
0
    def test_sub_private_public(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        # define inputs
        x = tfe.define_private_variable(tf.ones(shape=(2, 2)))
        y = tfe.define_constant(np.array([[0.6, 0.7], [0.8, 0.9]]))

        # define computation
        z1 = x - y
        z2 = y - x

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(result,
                                       np.array([[0.4, 0.3], [0.2, 0.1]]),
                                       rtol=0.0,
                                       atol=0.01)
            result = sess.run(z2.reveal())
            np.testing.assert_allclose(result,
                                       np.array([[-0.4, -0.3], [-0.2, -0.1]]),
                                       rtol=0.0,
                                       atol=0.01)
            print("test_sub_private_public succeeds")
Пример #22
0
    def test_weights_as_private_var(self):
        input_shape = (1, 3)
        input_data = np.random.normal(size=input_shape)
        expected, k_weights, k_config = _model_predict_keras(
            input_data, input_shape)

        with tfe.protocol.SecureNN():
            x = tfe.define_private_input(
                "inputter", lambda: tf.convert_to_tensor(input_data))

            tfe_model = tfe.keras.models.model_from_config(k_config)
            weights_private_var = [
                tfe.define_private_variable(w) for w in k_weights
            ]

            with tfe.Session() as sess:
                for w in weights_private_var:
                    sess.run(w.initializer)

                tfe_model.set_weights(weights_private_var, sess)
                y = tfe_model(x)

                actual = sess.run(y.reveal())

                np.testing.assert_allclose(actual,
                                           expected,
                                           rtol=1e-2,
                                           atol=1e-3)
Пример #23
0
def test_instantiate_tfe_layer():

    from syft.frameworks.keras.model.sequential import _instantiate_tfe_layer

    hook = sy.KerasHook(tf.keras)

    input_shape = [4, 5]
    input_data = np.ones(input_shape)
    kernel = np.random.normal(size=[5, 5])
    initializer = tf.keras.initializers.Constant(kernel)

    d_tf = tf.keras.layers.Dense(
        5, kernel_initializer=initializer, batch_input_shape=input_shape, use_bias=True
    )

    with tf.Session() as sess:
        x = tf.Variable(input_data, dtype=tf.float32)
        y = d_tf(x)
        sess.run(tf.global_variables_initializer())
        expected = sess.run(y)

    stored_keras_weights = {d_tf.name: d_tf.get_weights()}

    with tf.Graph().as_default():
        p_x = tfe.define_private_variable(input_data)
        d_tfe = _instantiate_tfe_layer(d_tf, stored_keras_weights)

        out = d_tfe(p_x)

        with tfe.Session() as sess:
            sess.run(tf.global_variables_initializer())

            actual = sess.run(out.reveal())

    np.testing.assert_allclose(actual, expected, rtol=0.001)
Пример #24
0
    def test_mul_trunc2_private_private(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        def provide_input():
            # normal TensorFlow operations can be run locally
            # as part of defining a private input, in this
            # case on the machine of the input provider
            return tf.ones(shape=(2, 2)) * 1.3

        # define inputs
        x = tfe.define_private_variable(tf.ones(shape=(2, 2)) * 2)
        y = tfe.define_private_input("input-provider", provide_input)

        # define computation
        z = tfe.mul_trunc2(x, y)

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z.reveal(), tag="mul_trunc2")
            np.testing.assert_allclose(
                result, np.array([[2.6, 2.6], [2.6, 2.6]]), rtol=0.0, atol=0.01
            )
Пример #25
0
def test_matmul_public_private():
    tf.reset_default_graph()

    prot = ABY3()
    tfe.set_protocol(prot)

    def provide_input():
        # normal TensorFlow operations can be run locally
        # as part of defining a private input, in this
        # case on the machine of the input provider
        return tf.constant(np.array([[1.1, 1.2], [1.3, 1.4], [1.5, 1.6]]))

    # define inputs
    x = tfe.define_private_variable(tf.ones(shape=(2, 2)))
    y = tfe.define_public_input('input-provider', provide_input)
    v = tfe.define_constant(np.ones((2, 2)))

    # define computation
    w = y.matmul(x)  # matmul_public_private
    z = w.matmul(v)  # matmul_private_public

    with tfe.Session() as sess:
        # initialize variables
        sess.run(tfe.global_variables_initializer())
        # reveal result
        result = sess.run(w.reveal())
        close(result, np.array([[2.3, 2.3], [2.7, 2.7], [3.1, 3.1]]))
        result = sess.run(z.reveal())
        close(result, np.array([[4.6, 4.6], [5.4, 5.4], [6.2, 6.2]]))
        print("test_matmul_public_private succeeds")
Пример #26
0
    def test_transpose(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(tf.constant([[1, 2, 3], [4, 5, 6]]))
        y = tfe.define_constant(np.array([[1, 2, 3], [4, 5, 6]]))

        z1 = x.transpose()
        z2 = tfe.transpose(y)

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(
                result, np.array([[1, 4], [2, 5], [3, 6]]), rtol=0.0, atol=0.01
            )

            result = sess.run(z2)
            np.testing.assert_allclose(
                result, np.array([[1, 4], [2, 5], [3, 6]]), rtol=0.0, atol=0.01
            )
Пример #27
0
    def test_polynomial_piecewise(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(tf.constant([[-1, -0.5, -0.25], [0, 0.25, 2]]))

        # This is the approximation of the sigmoid function by using a piecewise function:
        # f(x) = (0 if x<-0.5), (x+0.5 if -0.5<=x<0.5), (1 if x>=0.5)
        z1 = tfe.polynomial_piecewise(
            x,
            (-0.5, 0.5),
            ((0,), (0.5, 1), (1,)),  # use tuple because list is not hashable
        )
        # Or, simply use the pre-defined sigmoid API which includes a different approximation
        z2 = tfe.sigmoid(x)

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(z1.reveal())
            np.testing.assert_allclose(
                result, np.array([[0, 0, 0.25], [0.5, 0.75, 1]]), rtol=0.0, atol=0.01
            )
            result = sess.run(z2.reveal())
            np.testing.assert_allclose(
                result,
                np.array([[0.33, 0.415, 0.4575], [0.5, 0.5425, 0.84]]),
                rtol=0.0,
                atol=0.01,
            )
Пример #28
0
    def test_polynomial_private(self):
        tf.reset_default_graph()

        prot = ABY3()
        tfe.set_protocol(prot)

        x = tfe.define_private_variable(tf.constant([[1, 2, 3], [4, 5, 6]]))

        # Friendly version
        y = 1 + 1.2 * x + 3 * (x ** 2) + 0.5 * (x ** 3)
        # More optimized version: No truncation for multiplying integer coefficients (e.g., '3' in this example)
        z = tfe.polynomial(x, [1, 1.2, 3, 0.5])

        with tfe.Session() as sess:
            # initialize variables
            sess.run(tfe.global_variables_initializer())
            # reveal result
            result = sess.run(y.reveal())
            np.testing.assert_allclose(
                result,
                np.array([[5.7, 19.4, 45.1], [85.8, 144.5, 224.2]]),
                rtol=0.0,
                atol=0.01,
            )

            result = sess.run(z.reveal())
            np.testing.assert_allclose(
                result,
                np.array([[5.7, 19.4, 45.1], [85.8, 144.5, 224.2]]),
                rtol=0.0,
                atol=0.01,
            )
Пример #29
0
  def add_weight(self, variable, make_private=True):
    if make_private:
      variable = tfe.define_private_variable(variable)
      self.weights.append(variable)
    else:
      variable = tfe.define_public_variable(variable)
      self.weights.append(variable)

    return variable
Пример #30
0
    def test_two_layers(self):
        shape = (1, 3)
        input_data = np.random.normal(size=shape)
        with tfe.protocol.SecureNN():
            model = Sequential()
            model.add(Dense(2, input_shape=shape))
            model.add(Dense(3))

            x = tfe.define_private_variable(input_data)
            model(x)