pickle.dump(fp_target, fp_target_pkl) fp_target_pkl.close() fp_target = util.np2var(fp_target, args.cuda) fp = Fingerprints() fp.dxs = fp_dx fp.dys = fp_target from model import CW2_Net as Net #from res_model import ResNet as Net from models import * print("Train using model", Net) model = Net() if args.cuda: model.cuda() optimizer = optim.SGD(model.parameters(), lr=args.lr, weight_decay=1e-6, momentum=args.momentum) print("Args:", args) val_losses=[] for epoch in range(1, args.epochs + 1): if(epoch==1): test_loss = fp_train.test(epoch, args, model, test_loader, fp.dxs, fp.dys, test_length=0.1*len(valid_dataset)) fp_train.train(epoch, args, model, optimizer, train_loader, fp.dxs, fp.dys) test_loss = fp_train.test(epoch, args, model, test_loader, fp.dxs, fp.dys, test_length=0.1*len(valid_dataset)) val_losses.append(test_loss) loss_flag = 1
pickle.dump(fp_target, fp_target_pkl) fp_target_pkl.close() fp_target = util.np2var(fp_target, args.cuda) fp = Fingerprints() fp.dxs = fp_dx fp.dys = fp_target from model import CW2_Net as Net #from res_model import ResNet as Net from models import * print("Train using model", Net) model = Net() if args.cuda: model.cuda() optimizer = optim.SGD(model.parameters(), lr=args.lr, weight_decay=1e-6, momentum=args.momentum) print("Args:", args) val_losses = [] for epoch in range(1, args.epochs + 1): if (epoch == 1): test_loss = fp_train.test(epoch, args,
dataset = 'cifar' list_adv_loader=[] for advs in list_advs: attack_file = os.path.join(args.adv_ex_dir, 'Adv_%s_%s.p' % (dataset, advs)) # FGSM attack is already shifted/normalized adv_loader= torch.utils.data.DataLoader( custom_datasets.Adv(filename=attack_file, transp=True), batch_size=args.batch_size, shuffle=False, **kwargs) list_adv_loader.append(adv_loader) from model import CW2_Net as Net #from small_model import Very_Small_Net as Net print("Eval using model", Net) model = Net() print("Loading ckpt", args.ckpt) model.load_state_dict(torch.load(args.ckpt)) model.train(False) model.eval() if args.cuda: model.cuda() print("Args:", args) fixed_dxs = pickle.load(open(os.path.join(args.fingerprint_dir, "fp_inputs_dx.pkl"), "rb")) fixed_dys = pickle.load(open(os.path.join(args.fingerprint_dir, "fp_outputs.pkl"), "rb")) fp = Fingerprints() fp.dxs = fixed_dxs
K.set_session(sess) K.set_image_data_format('channels_first') # Sample random test data _, _, X_test, Y_test = get_data(dataset) num_samples = np.shape(X_test)[0] num_rand_samples = 1328 random_samples = np.random.randint(0, num_samples, num_rand_samples) new_X_test = X_test[random_samples, :, :, :] new_Y_test = Y_test[random_samples, :] f = open(os.path.join(args.log_dir, 'Random_Test_%s_.p' % (dataset)), 'w') pickle.dump({"adv_input": new_X_test, "adv_labels": new_Y_test}, f) f.close() if (args.attack == 'spsa' or args.attack == 'all'): pytorch_network = Net() pytorch_network.load_state_dict(torch.load(args_ckpt)) pytorch_network.eval() model = Model(torch_model=pytorch_network) keras_network = model.model transfer.pytorch_to_keras(pytorch_network, model.model) pytorch_network.eval() model = model.model model_logits = model batch_size = 16 craft_one_type(sess, model, new_X_test, new_Y_test, dataset, 'spsa',