def test_grad_div_backward(get_clients) -> None:
    parties = get_clients(2)

    session = Session(parties=parties)
    session.autograd_active = True
    SessionManager.setup_mpc(session)

    x_secret = torch.tensor([1.0, 2.1, 3.0, -4.13], requires_grad=True)
    x = MPCTensor(secret=x_secret, session=session, requires_grad=True)

    y_secret = torch.tensor([-2.0, 3.0, 4.39, 5.0], requires_grad=True)
    y = MPCTensor(secret=y_secret, session=session, requires_grad=True)

    z = x_secret / y_secret
    z.backward(torch.tensor([1, 1, 1, 1]))

    grad = torch.tensor([1, 1, 1, 1])
    grad_mpc = MPCTensor(secret=grad, session=session, requires_grad=True)

    ctx = {"x": x, "y": y, "result": x / y}

    grad_x, grad_y = GradDiv.backward(ctx, grad_mpc)

    expected_grad_x = x_secret.grad
    expected_grad_y = y_secret.grad

    res_x = grad_x.reconstruct()
    res_y = grad_y.reconstruct()

    assert np.allclose(res_x, expected_grad_x, rtol=1e-2)
    assert np.allclose(res_y, expected_grad_y, rtol=1e-2)
Example #2
0
def test_backward_without_requires_grad(get_clients):
    clients = get_clients(4)
    session = Session(parties=clients)
    session.autograd_active = True
    SessionManager.setup_mpc(session)

    x_secret = torch.tensor([[0.125, -1.25], [-4.25, 4], [-3, 3]])
    y_secret = torch.tensor([[4.5, -2.5], [5, 2.25], [-3, 3]])
    x = MPCTensor(secret=x_secret, session=session)
    y = MPCTensor(secret=y_secret, session=session)

    res_mpc = x - y
    s_mpc = res_mpc.sum()
    s_mpc.backward()

    assert not res_mpc.requires_grad
    assert res_mpc.grad is None
    assert x.grad is None
    assert y.grad is None
Example #3
0
def test_backward(get_clients):
    clients = get_clients(4)
    session = Session(parties=clients)
    session.autograd_active = True
    SessionManager.setup_mpc(session)

    x_secret = torch.tensor([[0.125, -1.25], [-4.25, 4], [-3, 3]],
                            requires_grad=True)
    y_secret = torch.tensor([[4.5, -2.5], [5, 2.25], [-3, 3]],
                            requires_grad=True)
    x = MPCTensor(secret=x_secret, session=session, requires_grad=True)
    y = MPCTensor(secret=y_secret, session=session, requires_grad=True)

    res_mpc = x * y
    res = x_secret * y_secret
    s_mpc = res_mpc.sum()
    s = torch.sum(res)
    s_mpc.backward()
    s.backward()

    assert np.allclose(x.grad.get(), x_secret.grad, rtol=1e-3)
    assert np.allclose(y.grad.get(), y_secret.grad, rtol=1e-3)
Example #4
0
def test_backward_with_one_requires_grad(get_clients):
    clients = get_clients(4)
    session = Session(parties=clients)
    session.autograd_active = True
    SessionManager.setup_mpc(session)

    x_secret = torch.tensor([[0.125, -1.25], [-4.25, 4], [-3, 3]],
                            requires_grad=True)
    y_secret = torch.tensor([[4.5, -2.5], [5, 2.25], [-3, 3]])
    x = MPCTensor(secret=x_secret, session=session, requires_grad=True)
    y = MPCTensor(secret=y_secret, session=session)

    res_mpc = x - y
    res = x_secret - y_secret
    s_mpc = res_mpc.sum()
    s = torch.sum(res)
    s_mpc.backward()
    s.backward()

    # TODO: add assert for res_mpc.grad and res.grad
    assert res_mpc.requires_grad
    assert np.allclose(x.grad.get(), x_secret.grad, rtol=1e-3)
    assert y.grad is None
def test_forward(get_clients) -> None:
    model = LinearSyNet(torch)

    clients = get_clients(2)

    session = Session(parties=clients)
    session.autograd_active = True
    SessionManager.setup_mpc(session)
    mpc_model = model.share(session=session)

    x_secret = torch.tensor([[0.125, -1.25, -4.25], [-3, 3, 8], [-3, 3, 8]],
                            requires_grad=True)
    x_mpc = MPCTensor(secret=x_secret, session=session, requires_grad=True)

    out_torch = model(x_secret)
    out_mpc = mpc_model(x_mpc)

    s_torch = torch.sum(out_torch)
    s_mpc = out_mpc.sum()

    s_torch.backward()
    s_mpc.backward()

    assert np.allclose(x_mpc.grad.get(), x_secret.grad, rtol=1e-2)