Xdata_dict[1] = X_test indices = get_indices(model_dict, Xdata_dict, y_test, len(model_dict)) indices = np.random.choice(indices, 1000, replace=False) #FGSM print("FGSM") fgsm_params = {'eps': 0.03, 'clip_min': 0., 'clip_max': 1.} fgsm_attack = FastGradientMethod(wrap, sess=sess) X_adv = np.zeros((len(indices), 32, 32, 3)) for i in range(0, len(indices), 200): X_adv[i:(i + 200)] = fgsm_attack.generate_np(X_test[indices[i:(i + 200)]], **fgsm_params) print("metrics") print(metrics(model, X_adv, X_test, y_test, indices)) print(metrics(model_target, X_adv, X_test, y_test, indices)) #MIM Diverse print("MIM-DIVERSE") mim_params = { 'eps': 0.03, 'eps_iter': 0.01, 'nb_iter': 40, 'ord': np.inf, 'clip_min': 0., 'clip_max': 1., 'prob': 0.8 } mim_attack = MomentumIterativeMethod_Diverse(wrap, sess=sess)
backend._LEARNING_PHASE = tf.constant(0) backend.set_learning_phase(0) wrap = KerasModelWrapper(model) #####FGSM print("FGSM") fgsm_params = {'eps': 0.03, 'clip_min': 0., 'clip_max': 1.} fgsm = FastGradientMethod(wrap, sess=sess) X_adv = np.zeros((len(indices),32,32,3)) for i in range(0,len(indices),batch_attack): X_adv[i:i+batch_attack] = fgsm.generate_np(X_test[indices[i:(i+batch_attack)]], **fgsm_params) print("results on source model: ") results = metrics(model, X_adv, X_test, y_test, indices) print(results) print("results on target model: ") results = metrics(model_target, X_adv, X_test, y_test, indices) print(results) #####BIM print("BIM") bim_params = {'eps': 0.03, 'nb_iter': 300, 'eps_iter': 0.03/100, 'ord': np.inf, 'clip_min': 0., 'clip_max': 1., 'rand_init': False }
#################################### #FGSM print("\n\n") print("FGSM") fgsm_params = {'eps': float(sys.argv[1]), 'clip_min': 0., 'clip_max': 1.} fgsm_source = FastGradientMethod(wrap_source, sess=sess) X_adv_source = np.zeros((len(indices_test), 32, 32, 3)) for i in np.arange(0, len(indices_test), 500): X_adv_source[i:(i + 500)] = fgsm_source.generate_np( X_test[indices_test[i:(i + 500)]], **fgsm_params) print("metrics source model") print(metrics(model_source, X_adv_source, X_test, pred_source, indices_test)) print("metrics base model") print(metrics(model, X_adv_source, X_test, pred_base, indices_test)) pred_source_adv = np.argmax(model_source.predict(X_adv_source), axis=1) pred_adv_basefromsource = np.argmax(model.predict(X_adv_source), axis=1) agree_func(indices_test, pred_adv_basefromsource, pred_source_adv, pred_base, pred_source) print(" ") #################################### #MIM print("\n\n") print("MIM") mim_params = {
pred_clean = np.argmax(model.predict(X_test), axis=1) well_pred = np.arange(0, len(X_test))[pred_clean == y_test] indices = np.random.choice(well_pred, 1000, replace=False) ############################# #FGSM fgsm_params = { 'eps': 0.03, } fgsm_attack = FastGradientMethod(wrap, sess=sess) X_adv = np.zeros((len(indices), 32, 32, 3)) for i in range(0, len(indices), 200): X_adv[i:(i + 200)] = fgsm_attack.generate_np(X_test[indices[i:(i + 200)]], **fgsm_params) print(metrics(model, X_adv, X_test, y_test, indices)) X_adv_noise = X_adv + np.random.normal(0, 0.05, size=X_adv.shape) print(metrics(model, X_adv_noise, X_test, y_test, indices)) ############################# #PGD pgd_params = { 'eps': 0.03, 'eps_iter': 0.01, 'nb_iter': 100, 'ord': np.inf, 'rand_init': True } pgd_attack = ProjectedGradientDescent(wrap, sess=sess) X_adv = np.zeros((len(indices), 32, 32, 3))