Exemplo n.º 1
0
    args.device = torch.device("cuda:" + str(args.gpu) if torch.cuda.is_available() else "cpu")
    train_loader, test_loader, train_eval_loader = get_cifar10_loaders(data_aug=True, batch_size=args.tbsize)

    model = cifar_model(args.model, layers=args.block, norm_type=args.norm, init_option=args.init)
    logger.info(model)
    if args.load != "none" :
        model.load_state_dict(torch.load(os.path.join(args.load, "model_final.pt"), map_location=args.device)['state_dict'])
    model.to(args.device)

    loader = {"train_loader": train_loader, "train_eval_loader": train_eval_loader, "test_loader": test_loader}
    if args.opt =="sgd" :
        optimizer = torch.optim.SGD(model.parameters(), lr=args.lr, weight_decay=args.decay, momentum=0.9, nesterov=args.nesterov)
        if args.adv == "none" :
            if args.model == "ssp2" or args.model == "ssp3" :
                scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[70,120,160], gamma=0.1)
            else :
                scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[60,100,140], gamma=0.1)
                if args.epochs <= 100 :
                    scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[30,60,90], gamma=0.1)
        elif args.lr < 0.1 :
            scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[120,160,180], gamma=0.1)
        else :
            scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[80,140,180], gamma=0.1)
    elif args.opt == "adam" :
        optimizer = torch.optim.Adam(model.parameters(), lr=0.0001, betas=(0., 0.9))
        scheduler = None

    adv_train = args.adv if args.adv != "none" else None
    model = trainer(model, logger, loader, args, "cifar10", optimizer, scheduler, adv_train=adv_train)
Exemplo n.º 2
0
    model.to(args.device)

    train_loader, test_loader, train_eval_loader = get_mnist_loaders()
    loader = {
        "train_loader": train_loader,
        "train_eval_loader": train_eval_loader,
        "test_loader": test_loader
    }
    if args.opt == "sgd":
        optimizer = torch.optim.SGD(model.parameters(), lr=args.lr)
        scheduler = torch.optim.lr_scheduler.MultiStepLR(
            optimizer, milestones=[30, 60, 90], gamma=0.1)
    elif args.opt == "adam":
        optimizer = torch.optim.Adam(model.parameters(),
                                     lr=args.lr,
                                     betas=(0., 0.9))
        scheduler = None
    elif args.opt == "rms":
        optimizer = torch.optim.RMSprop(model.parameters(), lr=1e-3)
        scheduler = None

    adv_train = args.adv if args.adv != "none" else None
    model = trainer(model,
                    logger,
                    loader,
                    args,
                    "mnist",
                    optimizer,
                    scheduler,
                    adv_train=adv_train)