def patch_prediction(self, patch_point): # normalize the point clouds patch_point, centroid, furthest_distance = pc_util.normalize_point_cloud(patch_point) patch_point = np.expand_dims(patch_point, axis=0) pred = self.sess.run([self.pred_pc], feed_dict={self.inputs: patch_point}) pred = np.squeeze(centroid + pred * furthest_distance, axis=0) return pred
def test(self): self.inputs = tf.placeholder(tf.float32, shape=[1, self.opts.patch_num_point, 3]) is_training = tf.placeholder_with_default(False, shape=[], name='is_training') Gen = Generator(self.opts, is_training, name='generator') self.pred_pc = Gen(self.inputs) for i in range(round(math.pow(self.opts.up_ratio, 1 / 4)) - 1): self.pred_pc = Gen(self.pred_pc) saver = tf.train.Saver() print("****** phrase test ******") ##restore_epoch, checkpoint_path = model_utils.pre_load_checkpoint(self.opts.log_dir) ##to use pretrained model comment the line above checkpoint_path = "/home/alitokur/Softwares/PU-GAN/model/model-100" print(checkpoint_path) saver.restore(self.sess, checkpoint_path) samples = glob(self.opts.test_data) point = pc_util.load(samples[0]) self.opts.num_point = point.shape[0] out_point_num = int(self.opts.num_point * self.opts.up_ratio) for point_path in samples: logging.info(point_path) start = time() pc = pc_util.load(point_path)[:, :3] pc, centroid, furthest_distance = pc_util.normalize_point_cloud(pc) if self.opts.jitter: pc = pc_util.jitter_perturbation_point_cloud( pc[np.newaxis, ...], sigma=self.opts.jitter_sigma, clip=self.opts.jitter_max) pc = pc[0, ...] input_list, pred_list = self.pc_prediction(pc) end = time() print("total time: ", end - start) pred_pc = np.concatenate(pred_list, axis=0) pred_pc = (pred_pc * furthest_distance) + centroid pred_pc = np.reshape(pred_pc, [-1, 3]) path = os.path.join(self.opts.out_folder, point_path.split('/')[-1][:-4] + '.ply') idx = farthest_point_sample(out_point_num, pred_pc[np.newaxis, ...]).eval()[0] pred_pc = pred_pc[idx, 0:3] np.savetxt(path[:-4] + '.xyz', pred_pc, fmt='%.6f')
def test(self): self.opts.batch_size = 1 final_ratio = self.opts.final_ratio step_ratio = 4 self.opts.up_ratio = step_ratio self.build_model_test(final_ratio=self.opts.final_ratio, step_ratio=step_ratio) saver = tf.train.Saver() restore_epoch, checkpoint_path = model_utils.pre_load_checkpoint( self.opts.log_dir) print(checkpoint_path) self.saver.restore(self.sess, checkpoint_path) #self.restore_model(self.opts.log_dir, epoch=self.opts.restore_epoch, verbose=True) samples = glob(self.opts.test_data) point = pc_util.load(samples[0]) self.opts.num_point = point.shape[0] out_point_num = int(self.opts.num_point * final_ratio) for point_path in samples: logging.info(point_path) start = time() pc = pc_util.load(point_path)[:, :3] pc, centroid, furthest_distance = pc_util.normalize_point_cloud(pc) input_list, pred_list, coarse_list = self.pc_prediction(pc) end = time() print("total time: ", end - start) pred_pc = np.concatenate(pred_list, axis=0) pred_pc = (pred_pc * furthest_distance) + centroid pred_pc = np.reshape(pred_pc, [-1, 3]) idx = farthest_point_sample( out_point_num, pred_pc[np.newaxis, ...]).eval(session=self.sess)[0] pred_pc = pred_pc[idx, 0:3] # path = os.path.join(self.opts.out_folder, point_path.split('/')[-1][:-4] + '.ply') # np.savetxt(path[:-4] + '.xyz',pred_pc,fmt='%.6f') in_folder = os.path.dirname(self.opts.test_data) path = os.path.join( self.opts.out_folder, point_path.split('/')[-1][:-4] + '_X%d.xyz' % final_ratio) np.savetxt(path, pred_pc, fmt='%.6f')