示例#1
0
    def test(self):

        self.inputs = tf.placeholder(tf.float32,
                                     shape=[1, self.opts.patch_num_point, 3])
        is_training = tf.placeholder_with_default(False,
                                                  shape=[],
                                                  name='is_training')
        Gen = Generator(self.opts, is_training, name='generator')
        self.pred_pc = Gen(self.inputs)
        for i in range(round(math.pow(self.opts.up_ratio, 1 / 4)) - 1):
            self.pred_pc = Gen(self.pred_pc)

        saver = tf.train.Saver()
        print("****** phrase test ******")

        ##restore_epoch, checkpoint_path = model_utils.pre_load_checkpoint(self.opts.log_dir)
        ##to use pretrained model comment the line above
        checkpoint_path = "/home/alitokur/Softwares/PU-GAN/model/model-100"
        print(checkpoint_path)
        saver.restore(self.sess, checkpoint_path)

        samples = glob(self.opts.test_data)
        point = pc_util.load(samples[0])
        self.opts.num_point = point.shape[0]
        out_point_num = int(self.opts.num_point * self.opts.up_ratio)

        for point_path in samples:
            logging.info(point_path)
            start = time()
            pc = pc_util.load(point_path)[:, :3]
            pc, centroid, furthest_distance = pc_util.normalize_point_cloud(pc)

            if self.opts.jitter:
                pc = pc_util.jitter_perturbation_point_cloud(
                    pc[np.newaxis, ...],
                    sigma=self.opts.jitter_sigma,
                    clip=self.opts.jitter_max)
                pc = pc[0, ...]

            input_list, pred_list = self.pc_prediction(pc)

            end = time()
            print("total time: ", end - start)
            pred_pc = np.concatenate(pred_list, axis=0)
            pred_pc = (pred_pc * furthest_distance) + centroid

            pred_pc = np.reshape(pred_pc, [-1, 3])
            path = os.path.join(self.opts.out_folder,
                                point_path.split('/')[-1][:-4] + '.ply')
            idx = farthest_point_sample(out_point_num, pred_pc[np.newaxis,
                                                               ...]).eval()[0]
            pred_pc = pred_pc[idx, 0:3]
            np.savetxt(path[:-4] + '.xyz', pred_pc, fmt='%.6f')
示例#2
0
    def test(self):
        self.opts.batch_size = 1
        final_ratio = self.opts.final_ratio
        step_ratio = 4
        self.opts.up_ratio = step_ratio
        self.build_model_test(final_ratio=self.opts.final_ratio,
                              step_ratio=step_ratio)

        saver = tf.train.Saver()
        restore_epoch, checkpoint_path = model_utils.pre_load_checkpoint(
            self.opts.log_dir)
        print(checkpoint_path)
        self.saver.restore(self.sess, checkpoint_path)
        #self.restore_model(self.opts.log_dir, epoch=self.opts.restore_epoch, verbose=True)

        samples = glob(self.opts.test_data)
        point = pc_util.load(samples[0])
        self.opts.num_point = point.shape[0]
        out_point_num = int(self.opts.num_point * final_ratio)

        for point_path in samples:
            logging.info(point_path)
            start = time()
            pc = pc_util.load(point_path)[:, :3]
            pc, centroid, furthest_distance = pc_util.normalize_point_cloud(pc)

            input_list, pred_list, coarse_list = self.pc_prediction(pc)

            end = time()
            print("total time: ", end - start)
            pred_pc = np.concatenate(pred_list, axis=0)
            pred_pc = (pred_pc * furthest_distance) + centroid

            pred_pc = np.reshape(pred_pc, [-1, 3])
            idx = farthest_point_sample(
                out_point_num, pred_pc[np.newaxis,
                                       ...]).eval(session=self.sess)[0]
            pred_pc = pred_pc[idx, 0:3]
            # path = os.path.join(self.opts.out_folder, point_path.split('/')[-1][:-4] + '.ply')
            # np.savetxt(path[:-4] + '.xyz',pred_pc,fmt='%.6f')
            in_folder = os.path.dirname(self.opts.test_data)
            path = os.path.join(
                self.opts.out_folder,
                point_path.split('/')[-1][:-4] + '_X%d.xyz' % final_ratio)
            np.savetxt(path, pred_pc, fmt='%.6f')
示例#3
0
def analyze_uniform(idx_file, radius_file, map_points_file):
    start_time = time()
    points = load(map_points_file)[:, 4:]
    radius = np.loadtxt(radius_file)
    print('radius:', radius)
    with open(idx_file) as f:
        lines = f.readlines()

    sample_number = 1000
    rad_number = radius.shape[0]

    uniform_measure = np.zeros([rad_number, 1])

    densitys = np.zeros([rad_number, sample_number])

    expect_number = precentages * points.shape[0]
    expect_number = np.reshape(expect_number, [rad_number, 1])

    for j in range(rad_number):
        uniform_dis = []

        for i in range(sample_number):

            density, idx = lines[i * rad_number + j].split(':')
            densitys[j, i] = int(density)
            coverage = np.square(densitys[j, i] -
                                 expect_number[j]) / expect_number[j]

            num_points = re.findall("(\d+)", idx)

            idx = list(map(int, num_points))
            if len(idx) < 5:
                continue

            idx = np.array(idx).astype(np.int32)
            map_point = points[idx]

            shortest_dis = cal_nearest_distance(map_point, map_point, 2)
            disk_area = math.pi * (radius[j]**2) / map_point.shape[0]
            expect_d = math.sqrt(2 * disk_area / 1.732)  ##using hexagon

            dis = np.square(shortest_dis - expect_d) / expect_d
            dis_mean = np.mean(dis)
            uniform_dis.append(coverage * dis_mean)

        uniform_dis = np.array(uniform_dis).astype(np.float32)
        uniform_measure[j, 0] = np.mean(uniform_dis)

    print('time cost for uniform :', time() - start_time)
    return uniform_measure
示例#4
0
parser = argparse.ArgumentParser()
parser.add_argument("--pred", type=str, required=True, help=".xyz")
parser.add_argument("--gt", type=str, required=True, help=".xyz")
FLAGS = parser.parse_args()
PRED_DIR = os.path.abspath(FLAGS.pred)
GT_DIR = os.path.abspath(FLAGS.gt)
print(PRED_DIR)
NAME = FLAGS.name

print(GT_DIR)
gt_paths = glob(os.path.join(GT_DIR, '*.xyz'))

gt_names = [os.path.basename(p)[:-4] for p in gt_paths]
print(len(gt_paths))

gt = load(gt_paths[0])[:, :3]
pred_placeholder = tf.placeholder(tf.float32, [1, gt.shape[0], 3])
gt_placeholder = tf.placeholder(tf.float32, [1, gt.shape[0], 3])
pred_tensor, centroid, furthest_distance = normalize_point_cloud(
    pred_placeholder)
gt_tensor, centroid, furthest_distance = normalize_point_cloud(gt_placeholder)

cd_forward, _, cd_backward, _ = tf_nndistance.nn_distance(
    pred_tensor, gt_tensor)
cd_forward = cd_forward[0, :]
cd_backward = cd_backward[0, :]

precentages = np.array([0.008, 0.012])


def cal_nearest_distance(queries, pc, k=2):