Beispiel #1
0
            features_trainloader[str(l + 1)].append(norm_feat)
        labels.append(target.item())
    for l in range(4):
        features_trainloader[str(l + 1)] = torch.tensor(
            features_trainloader[str(l + 1)])

    pickle.dump(features_trainloader,
                open('features/features_trainloader_tensor_' + net + '.pickle',
                     'wb'),
                protocol=4)
    pickle.dump(labels, open('features/labels_' + net + '.pickle', 'wb'))


if extract:
    if net == 'mnist':
        libary_loader = get_mnist_train_loader(batch_size=1, shuffle=False)
    elif net == 'fashion_mnist':
        libary_loader = get_fashion_mnist_train_loader(batch_size=1,
                                                       shuffle=False)
    elif net == 'svhn':
        libary_loader = get_svhn_train_loader(batch_size=1, shuffle=False)
    extract_libary(model1_, libary_loader, net)

with open('features/features_trainloader_tensor_' + net + '.pickle',
          'rb') as handle:
    features_trainloader = pickle.load(handle)
with open('features/labels_' + net + '.pickle', 'rb') as handle:
    labels = pickle.load(handle)

for l in range(4):
    features_trainloader[str(l + 1)] = features_trainloader[str(l +
Beispiel #2
0
        np.save('train_meta/w_dist_' + str(epoch) + '.npy',
                self.losses['w_dist'])


if __name__ == '__main__':
    LAMBDA = 10
    ITERS = 200
    parser = argparse.ArgumentParser(description='Train MNIST')
    parser.add_argument('--seed', default=0, type=int)
    parser.add_argument('--train_batch_size', default=100, type=int)
    parser.add_argument('--test_batch_size', default=1000, type=int)
    parser.add_argument('--log_interval', default=10, type=int)
    args = parser.parse_args()
    model_filename = "mnist_wass_net.pt"
    torch.manual_seed(args.seed)
    train_loader = get_mnist_train_loader(batch_size=args.train_batch_size,
                                          shuffle=True)
    test_loader = get_mnist_test_loader(batch_size=args.test_batch_size,
                                        shuffle=False)

    filename = "mnist_lenet5_clntrained.pt"
    model = LeNet5()
    model.load_state_dict(
        torch.load(os.path.join(TRAINED_MODEL_PATH, filename),
                   map_location='cpu'))
    model.to(device)
    model.eval()
    adversary = L1BasicIterativeAttack(
        model,
        loss_fn=nn.CrossEntropyLoss(reduction="sum"),
        eps=0.2,
        nb_iter=10,
Beispiel #3
0
from advertorch.test_utils import LeNet5
from advertorch_examples.utils import get_mnist_train_loader
from advertorch_examples.utils import get_mnist_test_loader

import constopt
from constopt.adversary import Adversary
from constopt.optim import PGD, PGDMadry, FrankWolfe, MomentumFrankWolfe

# Setup
torch.manual_seed(0)

use_cuda = torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")

# Data Loaders
train_loader = get_mnist_train_loader(batch_size=128, shuffle=True)
test_loader = get_mnist_test_loader(batch_size=512, shuffle=True)

# Model setup
model = LeNet5()
model.to(device)
criterion = nn.CrossEntropyLoss()

# Outer optimization parameters
nb_epochs = 20
optimizer = torch.optim.SGD(model.parameters(), lr=.1, momentum=.9)

# Inner optimization parameters
eps = 0.3
constraint = constopt.constraints.make_LpBall(alpha=eps, p=np.inf)
inner_iter = 40