def main(argv=None): datasets = FLAGS.datasets start=FLAGS.start end=FLAGS.end if 'mnist' == datasets: train_start = 0 train_end = 60000 test_start = 0 test_end = 10000 # Get MNIST test data X_train, Y_train, X_test, Y_test = data_mnist(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end) sample = X_test[start:end] #imsave(FLAGS.sample, deprocess_image_1(sample)) elif 'cifar10' == datasets: preprocess_image = preprocess_image_1 train_start = 0 train_end = 50000 test_start = 0 test_end = 10000 # Get CIFAR10 test data X_train, Y_train, X_test, Y_test = data_cifar10(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sample = X_train[0:10000] #imsave(FLAGS.sample, deprocess_image_1(sample)) elif 'svhn' == datasets: # choose the method of preprocess image preprocess_image = preprocess_image_1 train_start = 0 train_end = 73257 test_start = 0 test_end = 26032 # Get SVHN test data X_train, Y_train, X_test, Y_test = data_svhn(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sample = X_test[0:100] imsave(FLAGS.sample, deprocess_image_1(sample)) store_path = 'test' suc,total=cw(datasets=datasets, sample=sample, model_name=FLAGS.model, target=FLAGS.target, store_path=store_path, ini_con=0.1,start=start,end=end) print(suc) print(total)
def main(argv=None): datasets = FLAGS.datasets if 'mnist' == datasets: train_start = 0 train_end = 60000 test_start = 0 test_end = 10000 # Get MNIST test data X_train, Y_train, X_test, Y_test = data_mnist(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end) sample = X_test[0:1000] elif 'cifar10' == datasets: preprocess_image = preprocess_image_1 train_start = 0 train_end = 50000 test_start = 0 test_end = 10000 # Get CIFAR10 test data X_train, Y_train, fn_train, X_test, Y_test, fn_test = data_cifar10( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sample = X_test[198:199] imsave(FLAGS.sample, deprocess_image_1(sample)) elif 'svhn' == datasets: # choose the method of preprocess image preprocess_image = preprocess_image_1 train_start = 0 train_end = 73257 test_start = 0 test_end = 26032 # Get SVHN test data X_train, Y_train, X_test, Y_test = data_svhn( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sample = X_test[198:199] imsave(FLAGS.sample, deprocess_image_1(sample)) store_path = '../datasets/experiment/mnist/fgsm/test' fgsm(datasets=datasets, sample=sample, model_name=FLAGS.model_name, store_path=store_path, step_size=FLAGS.step_size, epoch=FLAGS.epoch)
def main(argv=None): datasets = FLAGS.datasets if 'mnist' == datasets: train_start = 0 train_end = 60000 test_start = 0 test_end = 10000 # Get MNIST test data X_train, Y_train, X_test, Y_test = data_mnist(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end) sample = X_test[0:10] elif 'cifar10' == datasets: preprocess_image = preprocess_image_1 train_start = 0 train_end = 50000 test_start = 0 test_end = 10000 # Get CIFAR10 test data X_train, Y_train, X_test, Y_test = data_cifar10( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sample = X_test[0:2] #imsave('a.png', deprocess_image_3(sample)) elif 'svhn' == datasets: # choose the method of preprocess image preprocess_image = preprocess_image_1 train_start = 0 train_end = 73257 test_start = 0 test_end = 26032 # Get SVHN test data X_train, Y_train, X_test, Y_test = data_svhn( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sample = X_test[198:199] imsave(FLAGS.sample, deprocess_image_1(sample)) store_path = 'test0.03' bim(datasets=datasets, sample=sample, model_name=FLAGS.model, store_path=store_path, step_size='0.03')
def main(argv=None): datasets = FLAGS.datasets if 'mnist' == datasets: train_start = 0 train_end = 60000 test_start = 0 test_end = 10000 # Get MNIST test data X_train, Y_train, X_test, Y_test = data_mnist(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end) sample = X_test[0:1] imsave(FLAGS.sample, deprocess_image_1(sample)) elif 'cifar10' == datasets: preprocess_image = preprocess_image_1 train_start = 0 train_end = 50000 test_start = 0 test_end = 10000 # Get CIFAR10 test data X_train, Y_train, X_test, Y_test = data_cifar10( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sample = X_test[198:199] imsave(FLAGS.sample, deprocess_image_1(sample)) elif 'svhn' == datasets: # choose the method of preprocess image preprocess_image = preprocess_image_1 train_start = 0 train_end = 73257 test_start = 0 test_end = 26032 # Get SVHN test data X_train, Y_train, X_test, Y_test = data_svhn( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sample = X_test[198:199] imsave(FLAGS.sample, deprocess_image_1(sample)) jsma(datasets=FLAGS.datasets, sample_path=FLAGS.sample, model_name=FLAGS.model, target=FLAGS.target, store_path=FLAGS.store_path)
def get_data(datasets): if 'mnist' == datasets: train_start = 0 train_end = 60000 test_start = 0 test_end = 10000 # Get MNIST test data X_train, Y_train, X_test, Y_test = data_mnist(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end) elif 'cifar10' == datasets: # choose the method of preprocess image preprocess_image = preprocess_image_1 train_start = 0 train_end = 50000 test_start = 0 test_end = 10000 # Get CIFAR10 test data X_train, Y_train, X_test, Y_test = data_cifar10( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) elif 'svhn' == datasets: # choose the method of preprocess image preprocess_image = preprocess_image_1 train_start = 0 train_end = 73257 test_start = 0 test_end = 26032 # Get SVHN test data X_train, Y_train, X_test, Y_test = data_svhn( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) return X_train, Y_train, X_test, Y_test
def mutation_tutorial(datasets, attack, sample_path, store_path, model_path, level=1, test_num=100, mutation_number=1000, mutated=False): if 'mnist' == datasets: train_start = 0 train_end = 60000 test_start = 0 test_end = 10000 # Get MNIST test data X_train, Y_train, X_test, Y_test = data_mnist(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end) elif 'cifar10' == datasets: preprocess_image = preprocess_image_1 train_start = 0 train_end = 50000 test_start = 0 test_end = 10000 # Get CIFAR10 test data X_train, Y_train, fn_train, X_test, Y_test, fn_test = data_cifar10( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) elif 'svhn' == datasets: # choose the method of preprocess image preprocess_image = preprocess_image_1 train_start = 0 train_end = 73257 test_start = 0 test_end = 26032 # Get SVHN test data X_train, Y_train, X_test, Y_test = data_svhn( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) sess, preds, x, y, model, feed_dict = model_load(datasets, model_path + datasets) # Generate random matution matrix for mutations store_path = store_path + attack + '/' + datasets + '/' + str(level) if not os.path.exists(store_path): os.makedirs(store_path) result = '' sample_path = sample_path + attack + '/' + datasets [image_list, image_files, real_labels, predicted_labels] = utils.get_data_mutation_test(sample_path) index = np.random.choice(len(image_files), test_num, replace=False) image_list = np.asarray(image_list)[index] image_files = np.asarray(image_files)[index].tolist() predicted_labels = np.asarray(predicted_labels)[index].tolist() seed_number = len(image_list) if datasets == 'mnist': img_rows = 28 img_cols = 28 mutation_test = MutationTest(img_rows, img_cols, seed_number, mutation_number, level) mutation_test.mutation_generate(mutated, store_path, utils.generate_value_1) elif datasets == 'cifar10' or datasets == 'svhn': img_rows = 32 img_cols = 32 mutation_test = MutationTest(img_rows, img_cols, seed_number, mutation_number, level) mutation_test.mutation_generate(mutated, store_path, utils.generate_value_3) store_string, result = mutation_test.mutation_test_adv( preprocess_image_1, result, image_list, predicted_labels, sess, x, preds, image_files, feed_dict) with open(store_path + "/adv_result.csv", "w") as f: f.write(store_string) path = store_path + '/ori_jsma' if not os.path.exists(path): os.makedirs(path) preds_test = np.asarray([]) for i in range(40): preds_test = np.concatenate( (preds_test, model_argmax(sess, x, preds, X_test[i * 250:(i + 1) * 250], feed=feed_dict))) inds_correct = np.asarray(np.where(preds_test == Y_test.argmax(axis=1))[0]) inds_correct = inds_correct[np.random.choice(len(inds_correct), test_num, replace=False)] image_list = X_test[inds_correct] real_labels = Y_test[inds_correct].argmax(axis=1) np.save(path + '/ori_x.npy', np.asarray(image_list)) np.save(path + '/ori_y.npy', np.asarray(real_labels)) image_list = np.load(path + '/ori_x.npy') real_labels = np.load(path + '/ori_y.npy') store_string, result = mutation_test.mutation_test_ori( result, image_list, sess, x, preds, feed_dict) with open(store_path + "/ori_result.csv", "w") as f: f.write(store_string) with open(store_path + "/result.csv", "w") as f: f.write(result) # Close TF session sess.close() print('Finish.')
def batch_attack(datasets, attack, model_path, store_path, nb_classes): if 'mnist' == datasets: train_start = 0 train_end = 60000 test_start = 0 test_end = 10000 # Get MNIST test data X_train, Y_train, X_test, Y_test = data_mnist(train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end) elif 'cifar10' == datasets: preprocess_image = preprocess_image_1 train_start = 0 train_end = 50000 test_start = 0 test_end = 10000 # Get CIFAR10 test data X_train, Y_train, fn_train, X_test, Y_test, fn_test = data_cifar10( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) elif 'svhn' == datasets: # choose the method of preprocess image preprocess_image = preprocess_image_1 train_start = 0 train_end = 73257 test_start = 0 test_end = 26032 # Get SVHN test data X_train, Y_train, X_test, Y_test = data_svhn( train_start=train_start, train_end=train_end, test_start=test_start, test_end=test_end, preprocess=preprocess_image) store_path = store_path + attack + '/' + datasets sample_path = '../datasets/integration/batch_attack/' + datasets + '/' sess, preds, x, y, model, feed_dict = model_load(datasets, model_path) if os.listdir(sample_path) == []: for i in range(len(X_test)): sample = X_test[i:i + 1] path = sample_path + str(i) + '.png' imsave(path, deprocess_image_1(sample)) current_img = ndimage.imread(path) img = np.expand_dims( preprocess_image_1(current_img.astype('float64')), 0) p = model_argmax(sess, x, preds, img, feed=feed_dict) if p != Y_test[i].argmax(axis=0): os.remove(path) # for i in range(len(X_test)): # sample = X_test[i:i+1] # if model_argmax(sess, x, preds, sample, feed=feed_dict) == Y_test[i].argmax(axis=0): # path = sample_path + str(i) + '.png' # imsave(path, deprocess_image_1(sample)) sess.close() samples = os.listdir(sample_path) for sample in samples: tf.reset_default_graph() if 'blackbox' == attack: blackbox(datasets=datasets, sample_path=sample_path + sample, model_path=model_path, store_path=store_path, nb_classes=nb_classes) elif 'fgsm' == attack: fgsm(datasets=datasets, sample_path=sample_path + sample, model_path=model_path, store_path=store_path, nb_classes=nb_classes) else: i = int(sample.split('.')[-2]) for j in range(nb_classes): tf.reset_default_graph() if Y_test[i][j] == 0: if 'jsma' == attack: jsma(datasets=datasets, sample_path=sample_path + sample, target=j, model_path=model_path, store_path=store_path, nb_classes=nb_classes) if 'cw' == attack: cw(datasets=datasets, sample_path=sample_path + sample, target=j, model_path=model_path, store_path=store_path, nb_classes=nb_classes)