def main(): args = parser.parse_args() pp = pprint.PrettyPrinter() pp.pprint(vars(args)) # SVHN # 26032 examples mat_content = sio.loadmat('./SVHN/test_32x32.mat') test_label_npy = mat_content['y'] test_label_npy[test_label_npy == 10] = 0 test_image_npy = mat_content['X'].astype(np.float32) / 256.0 test_image_npy = np.transpose(test_image_npy, (3, 0, 1, 2)) config = vars(args) discriminator1 = models.get_discriminator(args.d_architecture1, scope='discriminator1', output_size=args.output_size, c_dim=args.c_dim, f_dim=args.df_dim, is_training=True) discriminator2 = models.get_discriminator(args.d_architecture2, scope='discriminator2', output_size=args.output_size, c_dim=args.c_dim, f_dim=args.df_dim, is_training=True) black_box_attacks(discriminator1, discriminator2, args.model_file1, args.model_file2, test_image_npy, test_label_npy, config)
def main(): args = parser.parse_args() pp = pprint.PrettyPrinter() pp.pprint(vars(args)) # test data mnist = input_data.read_data_sets('MNIST_data', one_hot=False) # mnist only # https://gist.github.com/noahfl/0b244346d4ad2501718bbb226be16b1e test_image, test_label = mnist.test.images, mnist.test.labels test_image = tf.reshape(test_image, [10000, 28, 28, 1]) test_image_batch = tf.train.shuffle_batch([test_image, test_label], batch_size=args.batch_size, enqueue_many=True, num_threads=16, capacity=10000 + 3 * args.batch_size, min_after_dequeue=10000) config = vars(args) discriminator1 = models.get_discriminator(args.d_architecture1, scope='discriminator1', output_size=args.output_size, c_dim=args.c_dim, f_dim=args.df_dim, is_training=True) discriminator2 = models.get_discriminator(args.d_architecture2, scope='discriminator2', output_size=args.output_size, c_dim=args.c_dim, f_dim=args.df_dim, is_training=True) black_box_attacks(discriminator1, discriminator2, args.model_file1, args.model_file2, test_image_batch, config)
def main(): args = parser.parse_args() pp = pprint.PrettyPrinter() pp.pprint(vars(args)) # Test data test_filename_queue = get_filename_queue( split_file=os.path.join(args.data_dir, 'splits', args.dataset, 'test.lst'), data_dir=os.path.join(args.data_dir, args.dataset)) test_image, test_label = get_input_cifar10(test_filename_queue) test_image_batch = tf.train.shuffle_batch([test_image, test_label], batch_size=args.batch_size, num_threads=16, capacity=10000 + 3 * args.batch_size, min_after_dequeue=10000) config = vars(args) discriminator1 = models.get_discriminator(args.d_architecture1, scope='discriminator1', output_size=args.output_size, c_dim=args.c_dim, f_dim=args.df_dim, is_training=True) discriminator2 = models.get_discriminator(args.d_architecture2, scope='discriminator2', output_size=args.output_size, c_dim=args.c_dim, f_dim=args.df_dim, is_training=True) black_box_attacks(discriminator1, discriminator2, args.model_file1, args.model_file2, test_image_batch, config)
def main(): args = parser.parse_args() pp = pprint.PrettyPrinter() pp.pprint(vars(args)) # Data filename_queue = get_filename_queue( split_file=os.path.join(args.data_dir, 'splits', args.dataset, args.split + '.lst'), data_dir=os.path.join(args.data_dir, args.dataset)) # test data test_filename_queue = get_filename_queue( split_file=os.path.join(args.data_dir, 'splits', args.dataset, 'test.lst'), data_dir=os.path.join(args.data_dir, args.dataset)) image, label = get_input_cifar10(filename_queue) output_size = 32 c_dim = 3 test_image, test_label = get_input_cifar10(test_filename_queue) image_batch = create_batch([image, label], batch_size=args.batch_size, num_preprocess_threads=16, min_queue_examples=10000) test_image_batch = tf.train.shuffle_batch([test_image, test_label], batch_size=args.batch_size, num_threads=16, capacity=10000 + 3 * args.batch_size, min_after_dequeue=10000) config = vars(args) discriminator = models.get_discriminator(args.d_architecture, scope='discriminator', output_size=output_size, c_dim=args.c_dim, f_dim=args.df_dim, is_training=True) gen_adv_examples(discriminator, args.model_file, image_batch, test_image_batch, config)
def main(): args = parser.parse_args() pp = pprint.PrettyPrinter() pp.pprint(vars(args)) # SVHN mat_content = sio.loadmat('./SVHN/train_32x32.mat') label_npy = mat_content['y'] label_npy[label_npy==10] = 0 image_npy = mat_content['X'].astype(np.float32)/256.0 # extra data extra_content = sio.loadmat('./SVHN/extra_32x32.mat') extra_label_npy = extra_content['y'] extra_label_npy[extra_label_npy==10] = 0 extra_image_npy = extra_content['X'].astype(np.float32)/256.0 idx = np.random.choice(531131, 80000) extra_label_npy = extra_label_npy[idx] extra_image_npy = extra_image_npy[:,:,:,idx] image_combined = np.concatenate((image_npy, extra_image_npy), axis=3) image_combined = np.transpose(image_combined, (3,0,1,2)) label_combined = np.concatenate((label_npy, extra_label_npy), axis=0) # 26032 examples mat_content = sio.loadmat('./SVHN/test_32x32.mat') test_label_npy = mat_content['y'] test_label_npy[test_label_npy==10] = 0 test_image_npy = mat_content['X'].astype(np.float32)/256.0 test_image_npy = np.transpose(test_image_npy, (3,0,1,2)) config = vars(args) generator = models.get_generator(args.g_architecture, output_size=64, c_dim=args.c_dim, f_dim=args.gf_dim) discriminator = models.get_discriminator(args.d_architecture, output_size=64, c_dim=args.c_dim, f_dim=args.df_dim) train(generator, discriminator, config, image_combined, label_combined, test_image_npy, test_label_npy)
def main(): args = parser.parse_args() pp = pprint.PrettyPrinter() pp.pprint(vars(args)) # Data mnist = input_data.read_data_sets('MNIST_data', one_hot=False) # mnist only # https://gist.github.com/noahfl/0b244346d4ad2501718bbb226be16b1e image, label = mnist.train.images, mnist.train.labels image = tf.reshape(image, [55000, 28, 28, 1]) image_batch = tf.train.shuffle_batch([image, label], batch_size=args.batch_size, enqueue_many=True, num_threads=16, capacity=10000 + 3 * args.batch_size, min_after_dequeue=10000) test_image, test_label = mnist.test.images, mnist.test.labels test_image = tf.reshape(test_image, [10000, 28, 28, 1]) test_image_batch = tf.train.shuffle_batch([test_image, test_label], batch_size=args.batch_size, enqueue_many=True, num_threads=16, capacity=10000 + 3 * args.batch_size, min_after_dequeue=10000) config = vars(args) generator = models.get_generator(args.g_architecture, output_size=64, c_dim=args.c_dim, f_dim=args.gf_dim) discriminator = models.get_discriminator(args.d_architecture, output_size=64, c_dim=args.c_dim, f_dim=args.df_dim) train(generator, discriminator, image_batch, test_image_batch, config)
def main(): args = parser.parse_args() pp = pprint.PrettyPrinter() pp.pprint(vars(args)) config = vars(args) # EAT Data mat_content = np.load(config['eat_train_data']) label_npy = mat_content['labels'].astype(np.int) image_npy = mat_content['examples'].astype(np.float32) # test data mat_content = np.load(config['eat_test_data']) test_label_npy = mat_content['labels'].astype(np.int) test_image_npy = mat_content['examples'].astype(np.float32) discriminator = models.get_discriminator(args.d_architecture, output_size=32, c_dim=args.c_dim, f_dim=args.df_dim) train(discriminator, config, image_npy, label_npy, test_image_npy, test_label_npy)