def load(model_path, z_rotate, num_points, point_dimension=3): model_dir = osp.dirname(model_path) model_epoch = int(osp.basename(model_path).split('-')[1]) experiment_name = osp.basename(osp.dirname(model_path)).split('train_')[1] #'single_class_ae_plane_chamfer_z_rotate' # Number of points per model. bneck_size = 128 # Bottleneck-AE size ae_loss = 'chamfer' # Loss to optimize: 'emd' or 'chamfer' class_name = "airplane" syn_id = snc_category_to_synth_id()[class_name] class_dir = osp.join(top_in_dir , syn_id) # e.g. /home/yz6/code/latent_3d_points/data/shape_net_core_uniform_samples_2048/02691156 train_dir = create_dir(osp.join(top_out_dir, experiment_name)) train_params = default_train_params() encoder, decoder, enc_args, dec_args = mlp_architecture_ala_iclr_18(num_points, bneck_size, point_dimension=point_dimension) conf = Conf(n_input = [num_points, point_dimension], loss = ae_loss, training_epochs = train_params['training_epochs'], batch_size = train_params['batch_size'], denoising = train_params['denoising'], learning_rate = train_params['learning_rate'], loss_display_step = train_params['loss_display_step'], saver_step = train_params['saver_step'], z_rotate = z_rotate == 'True', train_dir = train_dir, encoder = encoder, decoder = decoder, encoder_args = enc_args, decoder_args = dec_args, experiment_name = experiment_name, allow_gpu_growth = True ) # pdb.set_trace() reset_tf_graph() ae = PointNetAutoEncoder(conf.experiment_name, conf) ae.restore_model(model_dir, model_epoch) return ae, conf
top_out_dir = '../data/' # Use to save Neural-Net check-points etc. top_in_dir = '../data/shape_net_core_uniform_samples_2048/' # Top-dir of where point-clouds are stored. experiment_name = 'test' n_pc_points = 2048 # Number of points per model. bneck_size = 128 # Bottleneck-AE size ae_loss = 'chamfer' # Loss to optimize: 'emd' or 'chamfer' # class_name = raw_input('Give me the class name (e.g. "chair"): ').lower() class_name = 'chair' # Load Point-Clouds # In[5]: syn_id = snc_category_to_synth_id()[class_name] class_dir = osp.join(top_in_dir, syn_id) all_pc_data = load_all_point_clouds_under_folder(class_dir, n_threads=8, file_ending='.ply', verbose=True) # Load default training parameters (some of which are listed beloq). For more details please print the configuration object. # # 'batch_size': 50 # # 'denoising': False (# by default AE is not denoising) # # 'learning_rate': 0.0005 # # 'z_rotate': False (# randomly rotate models of each batch)