Example #1
0
def test_relu_deriv(workers):
    alice, bob, james = workers["alice"], workers["bob"], workers["james"]
    x = torch.tensor([10, 0, -3]).share(alice, bob, crypto_provider=james, dtype="long").child
    r = relu_deriv(x)

    assert (r.get() == torch.tensor([1, 1, 0])).all()

    # With dtype int
    x = torch.tensor([10, 0, -3]).share(alice, bob, crypto_provider=james, dtype="int").child
    r = relu_deriv(x)

    assert (r.get() == torch.tensor([1, 1, 0])).all()
Example #2
0
def test_relu_deriv(workers):
    alice, bob, james = (
        workers["alice"],
        workers["bob"],
        workers["james"],
    )
    tensorA = (torch.tensor([-10, 0, 10]).share(alice,
                                                bob,
                                                crypto_provider=james,
                                                dtype="long").child)
    assert (securenn.relu_deriv(tensorA).get() == torch.tensor([0, 1,
                                                                1])).all()
Example #3
0
 def positive(self):
     # self >= 0
     return securenn.relu_deriv(self)
Example #4
0
def test_relu_deriv(workers):
    alice, bob, james = workers["alice"], workers["bob"], workers["james"]
    x = th.tensor([10, 0, -3]).share(alice, bob, crypto_provider=james).child
    r = relu_deriv(x)

    assert (r.get() == th.tensor([1, 1, 0])).all()
Example #5
0
#     torch.LongTensor([13, 3567, 2 ** 60])
#     .share(alice, bob, crypto_provider=james, field=L)
#     .child
# )
#
# res = share_convert(x_bit_sh)
# assert res.field == L - 1
# assert (res.get() % L == torch.LongTensor([13, 3567, 2 ** 60])).all()
array_size = 100000

np_array = np.zeros(array_size)

t_relu = TicToc()
t_mult = TicToc()

x_sh = torch.tensor(np_array).share(alice, bob, crypto_provider=james,
                                    field=L).child
y_sh = torch.tensor(np_array).share(alice, bob, crypto_provider=james,
                                    field=L).child

t_mult.tic()
z_sh = x_sh * y_sh
t_mult.toc()

print('z : {}'.format(z_sh.get()))
t_relu.tic()
r = relu_deriv(x_sh)
t_relu.toc()
print('r : {}'.format(r.get()))

# assert (r.get() == torch.tensor([1, 0, 0])).all()