def calc_cosine_sim(task, pgen_type, args, gt_dy, n_epoch=10): B = gt_dy.shape[0] gt_dy = torch.FloatTensor(gt_dy) p_gen = attack_setting.load_pgen(task=task, pgen_type=pgen_type, args=args) input_size = attack_setting.pgen_input_size(task=task, pgen_type=pgen_type, args=args) latent_data = torch.randn((B, *input_size)) if args.use_gpu: latent_data = latent_data.cuda() gt_dy = gt_dy.cuda() # Puts the tensor on cuda before setting requires_grad to True # Otherwise it would generate "ValueError: can't optimize a non-leaf Tensor" latent_data.requires_grad = True cos_criterion = torch.nn.CosineSimilarity() optimizer = torch.optim.Adam([latent_data], lr=1e-3) old_loss = -999 smallest_loss = 999 with tqdm(range(n_epoch)) as pbar: for i in pbar: optimizer.zero_grad() projected_data = p_gen.project(latent_data) proj_loss = torch.mean( -cos_criterion(projected_data.view(B, -1), gt_dy.view(B, -1))) proj_loss.backward() old_loss = proj_loss.detach().cpu().numpy() if old_loss < smallest_loss: smallest_loss = old_loss optimizer.step() pbar.set_description("Epoch %d, cosine similarity %f" % (i, -old_loss)) return -smallest_loss
#rvs = p_gen.generate_ps(src_image, 10, level=3) #t2 = time.time() #print (rvs.shape) #grad_gt = fmodel.gradient_one(src_image, label=src_label) #print (grad_gt.shape) #print (p_gen.calc_rho(grad_gt, src_image)) #t3 = time.time() #print (t2-t1,t3-t2) #assert 0 #for PGEN in ['PCA9408basis', 'DCT9408', 'naive', 'resize9408']: #for PGEN in ['DCT9408', 'naive', 'resize9408']: for PGEN in [ 'naive', ]: p_gen = load_pgen(TASK, PGEN, args) if TASK == 'cifar': if PGEN == 'naive': ITER = 150 maxN = 30 initN = 30 elif PGEN.startswith('DCT') or PGEN.startswith('resize'): ITER = 150 maxN = 30 initN = 30 elif PGEN.startswith('PCA'): ITER = 150 maxN = 30 initN = 30 else: raise NotImplementedError()
def calc_internal_dim(task, pgen_type, args): B = 5000 BATCH_SIZE = 32 p_gen = attack_setting.load_pgen(task=task, pgen_type=pgen_type, args=args) input_size = attack_setting.pgen_input_size(task=task, pgen_type=pgen_type, args=args) latent_data = torch.randn((B, *input_size)) if p_gen is not None: if args.use_gpu: latent_data = latent_data.cuda() # projected_data = p_gen.project(latent_data) projected_np = None for _i in range(int(B / BATCH_SIZE) + 1): _data = latent_data[_i * BATCH_SIZE:(_i + 1) * BATCH_SIZE] _B = _data.shape[0] if _B < 1: break _projected = p_gen.project(_data) _np = _projected.detach().cpu().numpy().reshape(_B, -1) if projected_np is None: projected_np = _np else: projected_np = np.concatenate((projected_np, _np), axis=0) else: projected_np = latent_data.numpy().reshape(B, -1) model_file_name = model_settings.get_model_file_name(TASK=task, args=args) if args.do_svd: print('Doing svd...') u, s, v = np.linalg.svd(projected_np, full_matrices=False) np.save( 'BAPP_result/%s_%s_internal_dim_u.npy' % (model_file_name, pgen_type), u) np.save( 'BAPP_result/%s_%s_internal_dim_s.npy' % (model_file_name, pgen_type), s) np.save( 'BAPP_result/%s_%s_internal_dim_v.npy' % (model_file_name, pgen_type), v) else: u = np.load('BAPP_result/%s_%s_internal_dim_u.npy' % (model_file_name, pgen_type)) s = np.load('BAPP_result/%s_%s_internal_dim_s.npy' % (model_file_name, pgen_type)) v = np.load('BAPP_result/%s_%s_internal_dim_v.npy' % (model_file_name, pgen_type)) projected_np = u.dot(np.diag(s)).dot(v) cos_sims = [] s_keep = np.zeros(s.shape) with tqdm(range(s.shape[0])) as pbar: for i in pbar: s_keep[i] = s[i] if i % inter_gap == 0: slice = u.dot(np.diag(s_keep)).dot(v) cos_sim = np.mean( utils.calc_cos_sim(x1=projected_np, x2=slice, dim=1)) cos_sims.append(cos_sim) pbar.set_description('Keep dim %d, cosine similarity %f' % (i, cos_sim)) np.save( 'BAPP_result/%s_%s_internal_dim.npy' % (model_file_name, pgen_type), cos_sims) if cos_sim > 0.9999: break print("%s, %s, Cosine similarity for internal dim done" % (model_file_name, pgen_type)) print(cos_sims)
def calc_exp_cos(task, pgen_type, args): p_gen = attack_setting.load_pgen(task=task, pgen_type=pgen_type, args=args) input_size = attack_setting.pgen_input_size(task=task, pgen_type=pgen_type, args=args) REF = 'res18' if task == 'mnist': img_size = args.mnist_img_size elif task == 'cifar10': img_size = args.cifar10_img_size else: img_size = 224 if args.mounted: data_dir = 'ANONYMIZED_DIRECTORY/%s_%s' % (task, REF) if task == 'cifar10' and args.cifar10_img_size != 32: img_size = args.cifar10_img_size data_dir = 'ANONYMIZED_DIRECTORY/%s_%d_%s' % (task, img_size, REF) if task == 'mnist' and args.mnist_img_size != 28: img_size = args.mnist_img_size data_dir = 'ANONYMIZED_DIRECTORY/%s_%d_%s' % (task, img_size, REF) else: data_dir = 'ANONYMIZED_DIRECTORY/%s_%s' % (task, REF) if task == 'cifar10' and args.cifar10_img_size != 32: img_size = args.cifar10_img_size data_dir = 'ANONYMIZED_DIRECTORY/%s_%d_%s' % (task, img_size, REF) if task == 'mnist' and args.mnist_img_size != 28: img_size = args.mnist_img_size data_dir = 'ANONYMIZED_DIRECTORY/%s_%d_%s' % (task, img_size, REF) data_path = data_dir + '/test_batch_%d.npy' cos_sims = [] if task == 'celeba': N_test = 624 if REF == 'rnd': N_test = 625 elif task == 'imagenet': N_test = 625 if REF == 'rnd': N_test = 625 elif task == 'celebaid': N_test = 3 if REF == 'rnd': N_test = 5 * 3 elif task == 'cifar10': N_test = 313 if REF == 'rnd': N_test = 5 * 313 elif args.TASK == 'mnist': N_test = 313 if REF == 'rnd': N_test = 5 * 313 with tqdm(range(min(10000, N_test))) as pbar: for idx in pbar: X = np.load(data_path % (idx)) B = X.shape[0] latent_data = torch.randn((B, *input_size)) if p_gen is not None: if args.use_gpu: latent_data = latent_data.cuda() projected_data = p_gen.project(latent_data) projected_np = projected_data.detach().cpu().numpy().reshape( B, -1) else: projected_np = latent_data.numpy().reshape(B, -1) # if np.sum((X ** 2).sum(1)) < 1e-8 or np.sum((projected_np ** 2).sum(1)) < 1e-8: # continue cos_sim = np.mean( np.abs(calc_cos_sim(x1=X, x2=projected_np, dim=1))) cos_sims.append(cos_sim) pbar.set_description('Data # %d, cosine similarity %f' % (idx, cos_sim)) print("%s, %s, Expected cos similarity" % (task, pgen_type)) print(cos_sims) np.save('BAPP_result/%s_%d_%s_exp_cos.npy' % (task, img_size, pgen_type), cos_sims) return np.mean(cos_sims)
simi_threshold=args.threshold) src_image = load_img(src_img_path) tgt_image = load_img(tgt_img_path) src_label = np.argmax(fmodel.forward_one(src_image)) tgt_label = np.argmax(fmodel.forward_one(tgt_image)) if src_label == tgt_label: print("src tgt same prediction", src_id, tgt_id) continue print( "======== # %d, src id %d, tgt id %d, src label %d, tgt label %d ========" % (i, src_id, tgt_id, src_label, tgt_label)) mask = None p_gen = attack_setting.load_pgen(task=TASK, pgen_type=args.pgen, args=args) attack = foolbox.attacks.BAPP_custom(fmodel, criterion=TargetClass(src_label)) adv = attack(tgt_image, tgt_label, starting_point=src_image, iterations=20, batch_size=9999, stepsize_search='geometric_progression', verbose=True, unpack=False, max_num_evals=100, initial_num_evals=100, internal_dtype=np.float32,