def test_digitise(): x = torch.tensor([12345]) digits = FalconHelper.digitise(x) reversed_digits = FalconHelper.digitise(x, True) assert type(digits) is list assert (digits[0] == torch.tensor(1)).all() assert (reversed_digits[0] == torch.tensor(5)).all()
def test_private_xor(workers): bob, alice, james = (workers["bob"], workers["alice"], workers["james"]) x = torch.tensor([0]).share(bob, alice, james, protocol="falcon", field=2) y = torch.tensor([1]).share(bob, alice, james, protocol="falcon", field=2) assert (FalconHelper.xor(x, y).reconstruct() == torch.tensor(1)).all() assert (FalconHelper.xor(x, x).reconstruct() == torch.tensor(0)).all() assert (FalconHelper.xor(y, y).reconstruct() == torch.tensor(0)).all() assert (FalconHelper.xor(y, x).reconstruct() == torch.tensor(1)).all()
def test_determine_sign(workers): bob, alice, james = (workers["bob"], workers["alice"], workers["james"]) workers = [bob, alice, james] x = torch.tensor([-4, 5, 6]).share(*workers, protocol="falcon") beta_0 = torch.tensor(0).share(*workers, protocol="falcon", field=2) beta_1 = torch.tensor(1).share(*workers, protocol="falcon", field=2) assert (FalconHelper.determine_sign( x, beta_0).reconstruct() == torch.tensor([-4, 5, 6])).all() assert (FalconHelper.determine_sign( x, beta_1).reconstruct() == (-1 * torch.tensor([-4, 5, 6]))).all()
def test_select_share(workers): bob, alice, james = (workers["bob"], workers["alice"], workers["james"]) workers = [bob, alice, james] x = torch.tensor([0, 1, 2]).share(*workers, protocol="falcon") y = torch.tensor([-3, 0, 1]).share(*workers, protocol="falcon") b_0 = torch.tensor(0).share(*workers, protocol="falcon", field=2) b_1 = torch.tensor(1).share(*workers, protocol="falcon", field=2) assert (FalconHelper.select_shares( b_0, x, y).reconstruct() == torch.tensor([0, 1, 2])).all() assert (FalconHelper.select_shares( b_1, x, y).reconstruct() == torch.tensor([-3, 0, 1])).all()
def test_determine_sign(beta, workers): bob, alice, james = (workers["bob"], workers["alice"], workers["james"]) workers = [bob, alice, james] x = torch.tensor([-4, 5, 6]) x_shared = x.share(*workers, protocol="falcon") ring_size = x_shared.ring_size shape = x_shared.shape expected_plaintext = (-1)**beta * x if beta: beta = torch.ones(size=shape, dtype=torch.long).share(*workers, protocol="falcon", field=ring_size) else: beta = torch.zeros(size=shape, dtype=torch.long).share(*workers, protocol="falcon", field=ring_size) plaintext = FalconHelper.determine_sign(x_shared, beta).reconstruct() assert (expected_plaintext == plaintext).all()
def conv2d(filters: syft.ReplicatedSharingTensor, image, padding=0): image_batches, image_channels, image_width, image_height = image.shape channels_out, filter_channels, filter_width, filter_height = filters.shape image = FalconHelper.unfold(image, filter_height, padding) filters = filters.view(channels_out, -1) result = filters @ image output_size = (image_height - filter_height + 2 * padding) + 1 result = result.view(-1, channels_out, output_size, output_size) return result
def test_select_share(bit_select, workers): bob, alice, james = (workers["bob"], workers["alice"], workers["james"]) workers = [bob, alice, james] x = torch.tensor([0, 1, 2]) x_shared = x.share(*workers, protocol="falcon") y = torch.tensor([-3, 0, 1]) y_shared = y.share(*workers, protocol="falcon") b_shared = torch.tensor(bit_select).share(*workers, protocol="falcon", field=2) plaintext = FalconHelper.select_share(b_shared, x_shared, y_shared).reconstruct() if bit_select: assert (plaintext == y).all() else: assert (plaintext == x).all()
def test_private_xor(x_val, y_val, x_xor_y, workers): bob, alice, james = (workers["bob"], workers["alice"], workers["james"]) x = x_val.share(bob, alice, james, protocol="falcon", field=2) y = y_val.share(bob, alice, james, protocol="falcon", field=2) assert (FalconHelper.xor(x, y).reconstruct() == x_xor_y).all()