Ejemplo n.º 1
0
def test_fallback_warning():
    class MiniModel(torch.nn.Module):
        def __init__(self):
            super().__init__()
            self.lin = torch.nn.Linear(10, 1)

        def forward(self, x):
            return self.lin(x)

    class BigDS(Dataset):
        def __getitem__(self, idx):
            return torch.rand(1, )

        def __len__(self):
            return 50_000

    dl = UniformDataLoader(BigDS(), batch_size=200)
    with LogCapture() as l:
        watchdog = PrivacyWatchdog(
            dl,
            report_every_n_steps=1,
            target_delta=1e-5,
            target_epsilon=1.0,
            abort=False,
            fallback_to_rdp=True,
        )
        assert "CRITICAL" and "RDP" in str(l)
Ejemplo n.º 2
0
def test_wrapper_returns_epsilon():
    class MiniModel(torch.nn.Module):
        def __init__(self):
            super().__init__()
            self.lin = torch.nn.Linear(10, 1)

        def forward(self, x):
            return self.lin(x)

    class BigDS(Dataset):
        def __getitem__(self, idx):
            return torch.rand(1, )

        def __len__(self):
            return 50_000

    dl = UniformDataLoader(BigDS(), batch_size=200)
    watchdog = PrivacyWatchdog(dl,
                               report_every_n_steps=1,
                               target_delta=1e-5,
                               target_epsilon=1.0,
                               abort=False)

    data = torch.randn(2, 1, 10)
    wrapped = PrivacyWrapper(MiniModel(), 2, 1.0, 1.0, watchdog=watchdog)
    epsila = []  # this one's for you @a1302z
    for _ in range(5):
        output = wrapped(data)
        loss = output.mean()
        loss.backward()
        wrapped.clip_and_accumulate()
        wrapped.noise_gradient()
        wrapped.prepare_next_batch()
        epsila.append(wrapped.current_epsilon)
    assert len(epsila) == 5 and None not in epsila
Ejemplo n.º 3
0
def test_orphan_watchdog():
    """Watchdog not attached"""
    dl = UniformDataLoader(udl, batch_size=200)
    watchdog = PrivacyWatchdog(
        dl,
        report_every_n_steps=1,
        target_delta=1e-5,
        target_epsilon=1.0,
    )
    with pytest.raises(RuntimeError):
        watchdog.inform(1)
Ejemplo n.º 4
0
def test_abort_training():
    class BigDS(Dataset):
        def __getitem__(self, idx):
            return torch.rand(1, )

        def __len__(self):
            return 50_000

    dl = UniformDataLoader(BigDS(), batch_size=200)
    watchdog = PrivacyWatchdog(dl,
                               report_every_n_steps=1,
                               target_delta=1e-5,
                               target_epsilon=1.0,
                               abort=True)

    class FakeWrapper:
        noise_multiplier = 1.0

    watchdog.wrapper = FakeWrapper
    with pytest.raises(PrivacyBudgetExhausted):
        watchdog.inform(50000)
Ejemplo n.º 5
0
def test_log_exhausted():
    class BigDS(Dataset):
        def __getitem__(self, idx):
            return torch.rand(1, )

        def __len__(self):
            return 50_000

    dl = UniformDataLoader(BigDS(), batch_size=200)
    watchdog = PrivacyWatchdog(dl,
                               report_every_n_steps=1,
                               target_delta=1e-5,
                               target_epsilon=1.0,
                               abort=False)

    class FakeWrapper:
        noise_multiplier = 1.0

    watchdog.wrapper = FakeWrapper
    with LogCapture() as l:
        watchdog.inform(50000)
        assert "WARNING" and "exhausted" in str(l)
Ejemplo n.º 6
0
def test_overfitting():
    class Model(torch.nn.Module):
        def __init__(self):
            super().__init__()
            self.lin = torch.nn.Linear(1, 1)

        def forward(self, x):
            return self.lin(x)

    class DS(torch.utils.data.Dataset):
        def __init__(self):
            self.features = torch.linspace(0, 1, 1000).requires_grad_(True)
            self.labels = torch.linspace(0, 1, 1000).requires_grad_(True)

        def __getitem__(self, idx):
            return (self.features, self.labels)

        def __len__(self):
            return len(self.features)

    dl = UniformDataLoader(DS(), batch_size=2)

    model = PrivacyWrapper(Model(), 2, 1.0, 1.0)

    optimizer = torch.optim.Adam(model.wrapped_model.parameters(), lr=1e-2)
    losses = []
    for feature, label in dl:
        output = model(feature[..., None])
        loss = ((output - label[..., None])**2).mean()
        loss.backward()
        model.clip_and_accumulate()
        model.noise_gradient()
        optimizer.step()
        model.prepare_next_batch()
        losses.append(loss.item())

    assert min(losses) < 0.01
Ejemplo n.º 7
0
def test_inform():
    """Test reporting of epsilon"""
    class BigDS(Dataset):
        def __getitem__(self, idx):
            return torch.rand(1, )

        def __len__(self):
            return 50_000

    dl = UniformDataLoader(BigDS(), batch_size=200)
    watchdog = PrivacyWatchdog(
        dl,
        report_every_n_steps=1,
        target_delta=1e-5,
        target_epsilon=1.0,
    )

    class FakeWrapper:
        noise_multiplier = 1.0

    watchdog.wrapper = FakeWrapper
    with LogCapture() as l:
        watchdog.inform(1)
        assert "Privacy spent at 1 steps" in str(l)
Ejemplo n.º 8
0
from deepee import UniformDataLoader
from torch.utils.data import Dataset
import torch


class SimpleDataset(Dataset):
    def __init__(self):
        super().__init__()
        self.data = torch.arange(1, 100, 1, dtype=torch.int)

    def __getitem__(self, idx: int) -> torch.Tensor:
        return self.data[idx]

    def __len__(self) -> int:
        return len(self.data)


ds = SimpleDataset()

dl = UniformDataLoader(ds, 50)


def test_dataloader():
    for item in dl:
        assert (
            len(set(item)) == 50
        )  # always returns correct batch size and never the same item twice
Ejemplo n.º 9
0
from torch.utils.data import DataLoader, Dataset
import torch
import pytest
from testfixtures import LogCapture


class DS(Dataset):
    def __getitem__(self, idx):
        return torch.rand(1, )

    def __len__(self):
        return 5


dl = DataLoader(DS())
udl = UniformDataLoader(DS(), 1)
bsdl = DataLoader(DS(), batch_sampler=UniformWORSubsampler(DS(), 5))


def test_uniform_dl():
    with LogCapture() as l:
        watchdog = PrivacyWatchdog(udl, target_delta=1e-5, target_epsilon=1.0)
        watchdog2 = PrivacyWatchdog(bsdl,
                                    target_delta=1e-5,
                                    target_epsilon=1.0)
        watchdog2 = PrivacyWatchdog(dl, target_delta=1e-5, target_epsilon=1.0)
        assert "CRITICAL" and "replacement" in str(l)


def test_epsilon_delta_positive():
    with pytest.raises(ValueError):