Beispiel #1
0
}
EPS = args.eps if args.eps is not None else DEFAULT_EPS[args.mode]
LR = args.pgd_lr if args.pgd_lr is not None else 2*EPS/NUM_STEPS

NORMALIZER = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])

SAVE_ITERS = args.save_iters
ATTACKS = {
    "l2": pgd_l2,
    "linf": pgd_linf,
}
attack = ATTACKS[MODE]


loss_fn = ch.nn.CrossEntropyLoss()
param_set = net.parameters()

if args.opt == "sgd":
    opt = optim.SGD(param_set, lr=args.sgd_lr, momentum=0.9, weight_decay=2e-4)
    scheduler = optim.lr_scheduler.MultiStepLR(opt, milestones=[50,100,150,500], gamma=0.1)
elif args.opt == "adam":
    opt = optim.Adam(param_set, lr=args.sgd_lr)
    scheduler = optim.lr_scheduler.MultiStepLR(opt, milestones=[args.num_epochs+1], gamma=0.1)   # lr for mnist is 1e-4, 1e-5
elif args.opt == 'yf':
    opt = YFOptimizer(param_set, lr=args.sgd_lr, clip_thresh=None, adapt_clip=False)
    #scheduler = optim.lr_scheduler.MultiStepLR(opt, milestones=[args.num_epochs+1], gamma=0.1)


def se(x1, x2, reduction='mean'):
    y = ch.norm((x1-x2).view(x1.shape[0],-1),dim=-1,p=2)**2
    if reduction=='sum':
def encode(x, bypass=False, no_decode=False, only_decode=False):
    if only_decode:
        return ae.decode(x)
    if bypass:
        return x
    if not args.no_norm:
        x = ch.stack([NORMALIZER(x[i]) for i in range(x.shape[0])])
    if args.use_orig:
        return x
    return ae(x, no_decode=no_decode)


loss_fn = ch.nn.CrossEntropyLoss()
if args.trainable_encode:
    param_set = [{'params': net.parameters()}, {'params': ae.parameters()}]
elif args.trainable_decode:
    param_set = [{'params': net.parameters()}, {'params': ae.decode_vars}]
else:
    param_set = net.parameters()

if args.opt == "sgd":
    opt = optim.SGD(param_set, lr=args.sgd_lr, momentum=0.9, weight_decay=2e-4)
    scheduler = optim.lr_scheduler.MultiStepLR(opt,
                                               milestones=[50, 100, 150, 500],
                                               gamma=0.1)
elif args.opt == "adam":
    opt = optim.Adam(param_set, lr=args.sgd_lr)
    scheduler = optim.lr_scheduler.MultiStepLR(
        opt, milestones=[args.num_epochs + 1], gamma=0.1)
elif args.opt == 'yf':