def get_reconstructions(self, pc_input, flags): opt = argument_parser.parser_transfer(flags) opt.mode = 'test' opt.custom_data = True pc_recon = np.zeros([pc_input.shape[0], 2500, 3], dtype=pc_input.dtype) torch.cuda.set_device(opt.multi_gpu[0]) my_utils.plant_seeds(random_seed=opt.random_seed) import transfer.atlasnet.training.trainer as trainer trainer = trainer.Trainer(opt) trainer.build_dataset(test_pc=pc_input, shuffle_test=False) trainer.build_network() trainer.build_optimizer() trainer.build_losses() trainer.start_train_time = time.time() with torch.no_grad(): pc_recon = trainer.test_epoch(pc_recon=pc_recon) show = False if show: plot_3d_point_cloud(pc_input[0]) plot_3d_point_cloud(pc_recon[0]) return pc_recon
# Load point clouds object_class = conf.object_class class_names = conf.class_names pc_data, slice_idx, pc_label = load_dataset(class_names, flags.set_type, top_in_dir) point_clouds = pc_data.point_clouds.copy() # Sort point cloud axes if conf.sort_axes: point_clouds_axes_sorted = sort_axes(point_clouds) point_clouds = point_clouds_axes_sorted show = False if show: plot_3d_point_cloud(point_clouds[0]) plot_3d_point_cloud(point_clouds_axes_sorted[0]) # Build AE Model reset_tf_graph() ae = PointNetAutoEncoder(conf.experiment_name, conf) # Reload a saved model ae.restore_model(train_dir, epoch=restore_epoch, verbose=True) # Create evaluation dir eval_dir = create_dir(osp.join(train_dir, flags.output_folder_name)) # Save point clouds data pc_classes_np = np.array(class_names) file_name = '_'.join(['pc_classes'] + object_class) + '.npy'
int(best_t_idx_flatten)] azim = -40 elev = 20 show = False save_dir_pc_plots = create_dir(osp.join(save_dir, 'pc_plots')) save_path = osp.join( save_dir_pc_plots, 'adv_%s_%d_target_%s_%d_inputs.png' % (pc_class_name, j, t_c_name, best_t_idx)) fig = plt.figure(figsize=(15, 5)) ax = fig.add_subplot(131, projection='3d') plot_3d_point_cloud(source_pc, azim=azim, elev=elev, show=show, axis=ax) ax = fig.add_subplot(132, projection='3d') plot_3d_point_cloud(adv_pc_input, azim=azim, elev=elev, show=show, axis=ax) ax = fig.add_subplot(133, projection='3d') plot_3d_point_cloud(target_pc, azim=azim, elev=elev, show=show, axis=ax) plt.savefig(save_path)
adversarial_target_nre).max() assert diff_target_nre_max < 1e-04, \ 'when transfer_ae_folder and ae_folder are the same, the ae target normalized recon error should also be the same! (up to precision errors)' transfer_metrics = np.concatenate([ np.expand_dims(m, axis=-1) for m in [ transferred_target_recon_error, transferred_target_nre, adversarial_target_recon_error, adversarial_target_nre ] ], axis=-1) show = False if show: j, k = 0, 0 plot_3d_point_cloud(source_pc[k], title='source pc') plot_3d_point_cloud(target_pc[k], title='target pc') plot_3d_point_cloud(adversarial_pc_input[j, k], title='adversarial pc input') plot_3d_point_cloud(adversarial_pc_recon[j, k], title='adversarial pc recon') plot_3d_point_cloud(transferred_pc_recon[j, k], title='transferred pc recon') # save results if flags.transfer_ae_folder != flags.ae_folder: np.save(osp.join(save_dir, 'transferred_pc_recon'), transferred_pc_recon) np.save(osp.join(save_dir, 'transfer_metrics'), transfer_metrics) duration = time.time() - start_time
# Load point clouds pc_data_train, _, _ = load_dataset(class_names, 'train_set', top_in_dir) pc_data_val, _, _ = load_dataset(class_names, 'val_set', top_in_dir) # Sort point cloud axes if flags.sort_axes: point_clouds_train_axes_sorted = sort_axes(pc_data_train.point_clouds) pc_data_train.point_clouds = point_clouds_train_axes_sorted point_clouds_val_axes_sorted = sort_axes(pc_data_val.point_clouds) pc_data_val.point_clouds = point_clouds_val_axes_sorted show = False if show: plot_3d_point_cloud(pc_data_train.point_clouds[0]) plot_3d_point_cloud(point_clouds_train_axes_sorted[0]) plot_3d_point_cloud(pc_data_val.point_clouds[0]) plot_3d_point_cloud(point_clouds_val_axes_sorted[0]) if len(class_names) > 1: pc_data_train.shuffle_data(seed=55) pc_data_val.shuffle_data(seed=55) # Build AE Model reset_tf_graph() ae = PointNetAutoEncoder(conf.experiment_name, conf) # Train the AE (save output to train_stats.txt) buf_size = 1 # Make 'training_stats' file to flush each output line regarding training. fout = open(osp.join(conf.train_dir, 'train_stats.txt'), 'a', buf_size)
# define basic parameters project_dir = osp.dirname(osp.dirname(osp.abspath(__file__))) data_path = create_dir(osp.join(project_dir, flags.ae_folder, 'eval')) files = [ f for f in os.listdir(data_path) if osp.isfile(osp.join(data_path, f)) ] # load data point_clouds, latent_vectors, pc_classes, slice_idx = \ load_data(data_path, files, ['point_clouds_test_set', 'latent_vectors_test_set', 'pc_classes', 'slice_idx_test_set']) show = False if show: n = 0 plot_3d_point_cloud(point_clouds[n]) slice_idx_file_name = [f for f in files if 'slice_idx_test_set' in f][0] file_name_parts = slice_idx_file_name.split('_') # constants num_classes = len(pc_classes) range_num_classes = range(num_classes) # reproducibility seed = 55 def get_rand_idx(): # loop over categories sel_idx = -1 * np.ones([num_classes, flags.num_instance_per_class],
defended_source_recon_error, defended_source_nre, adversarial_source_recon_error, adversarial_source_nre ] ], axis=-1) # data above max number of outliers can be discarded outlier_num_max = adversarial_outlier_num.max() adversarial_outlier_points = adversarial_outlier_points[:, :, : outlier_num_max, :] adversarial_outlier_idx = adversarial_outlier_idx[:, :, :outlier_num_max] show = False if show: j, k = 0, 0 plot_3d_point_cloud(adversarial_pc_input[j, k]) plot_3d_point_cloud(adversarial_pc_recon[j, k]) plot_3d_point_cloud(defended_pc_input[j, k]) plot_3d_point_cloud(defended_pc_recon[j, k]) # save results np.save(osp.join(save_dir, 'adversarial_critical_points'), adversarial_outlier_points) np.save(osp.join(save_dir, 'adversarial_critical_idx'), adversarial_outlier_idx) np.save(osp.join(save_dir, 'adversarial_critical_num'), adversarial_outlier_num) np.save(osp.join(save_dir, 'defended_pc_input'), defended_pc_input) np.save(osp.join(save_dir, 'defended_pc_recon'), defended_pc_recon) np.save(osp.join(save_dir, 'defense_metrics'), defense_metrics)
adv_pc_input_point_color = np.array(['b'] * num_points) adv_pc_input_point_color[adv_critical_idx[:adv_critical_num]] = 'r' def_pc_input = defended_pc_input[best_dist_weight_idx, int(best_t_idx_flatten), :-adv_critical_num] def_pc_recon = defended_pc_recon[best_dist_weight_idx, int(best_t_idx_flatten)] azim = -40 elev = 20 show = False save_dir_pc_plots = create_dir(osp.join(load_dir_defense, 'analysis_results', 'pc_plots')) save_path = osp.join(save_dir_pc_plots, 'def_%s_%d_target_%s_%d_inputs.png' % (pc_class_name, j, t_c_name, best_t_idx)) fig = plt.figure(figsize=(15, 5)) ax = fig.add_subplot(131, projection='3d') plot_3d_point_cloud(source_pc, azim=azim, elev=elev, show=show, axis=ax) ax = fig.add_subplot(132, projection='3d') plot_3d_point_cloud(adv_pc_input, azim=azim, elev=elev, show=show, axis=ax, c=adv_pc_input_point_color) ax = fig.add_subplot(133, projection='3d') plot_3d_point_cloud(def_pc_input, azim=azim, elev=elev, show=show, axis=ax) plt.savefig(save_path) plt.close() save_path = osp.join(save_dir_pc_plots, 'def_%s_%d_target_%s_%d_recons.png' % (pc_class_name, j, t_c_name, best_t_idx)) fig = plt.figure(figsize=(15, 5)) ax = fig.add_subplot(131, projection='3d') plot_3d_point_cloud(source_pc_recon, azim=azim, elev=elev, show=show, axis=ax) ax = fig.add_subplot(132, projection='3d') plot_3d_point_cloud(adv_pc_recon, azim=azim, elev=elev, show=show, axis=ax) ax = fig.add_subplot(133, projection='3d') plot_3d_point_cloud(def_pc_recon, azim=azim, elev=elev, show=show, axis=ax)
# prepare data for attack source_pc, _ = prepare_data_for_attack(pc_classes, [pc_class_name], classes_for_target, point_clouds, slice_idx, attack_pc_idx, conf.num_pc_for_target, nn_idx, correct_pred) # load data load_dir = osp.join(output_path, pc_class_name) # adversarial metrics: loss_adv, loss_dist, source_chamfer_dist, target_nre, target_recon_error adversarial_metrics = np.load(osp.join(load_dir, 'adversarial_metrics.npy')) adversarial_pc_input = np.load(osp.join(load_dir, 'adversarial_pc_input.npy')) source_chamfer_dist = adversarial_metrics[:, :, 2] show = False if show: plot_idx = 0 plot_3d_point_cloud(source_pc[plot_idx]) plot_3d_point_cloud(adversarial_pc_input[-1, plot_idx]) num_dist_weight, num_examples_curr, _, _ = adversarial_pc_input.shape adversarial_pc_input_dists = -1 * np.ones(adversarial_pc_input.shape[:3], dtype=np.float32) for j in range(num_dist_weight): for k in range(0, num_examples_curr, chamfer_batch_size): adv_pc_batch = adversarial_pc_input[j, k:k + chamfer_batch_size] inp_pc_batch = source_pc[k:k + chamfer_batch_size] feed_dict = {adv_pc_pl: adv_pc_batch, inp_pc_pl: inp_pc_batch} dists_first_to_second_batch, dist_batch = sess.run([dists_first_to_second, chamfer_dist], feed_dict=feed_dict) # sanity check if flags.do_sanity_checks:
) # Top-dir of where point-clouds are stored. class_name = 'chair' syn_id = snc_category_to_synth_id()[class_name] class_dir = osp.join(top_in_dir, syn_id) _, _, pc_data_test = load_and_split_all_point_clouds_under_folder( class_dir, n_threads=8, file_ending='.ply', verbose=True) ################ # euler2mat_np # ################ pc = pc_data_test.point_clouds[0:4] rot = np.array([0, 0, -0.5 * np.pi]) pc_rot = euler2mat_np(pc, rot) plot_3d_point_cloud(pc[0], title='input 0') plot_3d_point_cloud(pc_rot[0], title='input 0 rot 90 deg z') plot_3d_point_cloud(pc[1], title='input 1') plot_3d_point_cloud(pc_rot[1], title='input 1 rot 90 deg z') plot_3d_point_cloud(pc[2], title='input 2') plot_3d_point_cloud(pc_rot[2], title='input 2 rot 90 deg z') plot_3d_point_cloud(pc[3], title='input 3') plot_3d_point_cloud(pc_rot[3], title='input 3 rot 90 deg z') ################ # euler2mat_tf # ################ pc = np.tile(pc_data_test.point_clouds[0:1], [4, 1, 1]) rot = np.array( [ [-0.5 * np.pi, 0, 0], # 90 deg x