예제 #1
0
def eval_real_epoch(sess, ops, test_writer):
    PC_PATH = os.path.join(POINTCLOUD_DIR,
                           'Dataset/Data/partialface/real_scan/')
    INPUT_FOLDER = os.path.join(PC_PATH, 'sampled')
    OUTPUT_FOLDER = os.path.join(PC_PATH, 'pred')

    from glob import glob
    is_training = False

    samples = glob(INPUT_FOLDER + "/*.xyz")
    samples.sort(reverse=False)
    total_num = len(samples)
    for i in range(total_num):
        filename = samples[i].split('\\')[-1].replace('.xyz', '')
        print(filename)
        pointclouds_pl = np.loadtxt(samples[i])
        pointclouds_pl = np.expand_dims(pointclouds_pl, axis=0)

        feed_dict = {
            ops['pointclouds_pl']: pointclouds_pl,
            ops['is_training_pl']: is_training,
        }
        # loss_val, pred_angle = sess.run([ops['loss'], ops['pred_angle']], feed_dict=feed_dict)
        pred_angle = sess.run([ops['pred_angle']], feed_dict=feed_dict)
        pred_angle = np.squeeze(pred_angle)
        print(pred_angle.shape)

        print(pred_angle)
        transform_xyz = Tools3D.quaternion_To_rotation_matrix(
            np.squeeze(pred_angle))
        transform_xyz = np.array(transform_xyz)
        print(transform_xyz)
        np.savetxt(os.path.join(INPUT_FOLDER, filename + '.txt'),
                   np.expand_dims(pred_angle, axis=0),
                   fmt='%0.6f')

        point_cloud_transformed = np.matmul(pointcloud_gt_s, transform_xyz)

        # _point_cloud_transformed = sess.run(point_cloud_transformed, feed_dict=feed_dict)
        img_filename = '%d_coarse.png' % (
            i)  # , datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
        img_filename = os.path.join(OUTPUT_FOLDER, img_filename)

        point_input = np.squeeze(pointclouds_pl)
        points_gt = np.squeeze(pointcloud_gt_s)
        points_rotated = np.squeeze(point_cloud_transformed)

        print(points_gt.shape, points_rotated.shape,
              point_cloud_transformed.shape)

        info = 'Nothing'

        pc_util.point_cloud_three_points(point_input, point_cloud_transformed,
                                         point_cloud_transformed, img_filename,
                                         info)
예제 #2
0
def eval_one_epoch(sess, ops, test_writer):
    is_training = False

    # Make sure batch data is of same size
    cur_batch_data = np.zeros(
        (BATCH_SIZE, NUM_POINT, TEST_DATASET.num_channel()))
    cur_batch_angle = np.zeros((BATCH_SIZE, 4))
    cur_batch_label = np.zeros((BATCH_SIZE))

    loss_sum = 0
    batch_idx = 0

    while TEST_DATASET.has_next_batch():
        batch_data, batch_angel, batch_data_label = TEST_DATASET.next_batch(
            augment=True, sess=sess)
        bsize = batch_data.shape[0]
        print('Batch: %03d, batch size: %d' % (batch_idx, bsize))
        # for the last batch in the epoch, the bsize:end are from last batch
        cur_batch_label[0:bsize] = batch_data_label
        # for the last batch in the epoch, the bsize:end are from last batch
        cur_batch_data[0:bsize, ...] = batch_data
        cur_batch_angle[0:bsize, ...] = batch_angel[:, :4]
        feed_dict = {
            ops['pointclouds_pl']: cur_batch_data,
            ops['pointclouds_gt']: pointcloud_gt_val,
            ops['pointclouds_gt_big']: pointcloud_gt_big_val,
            ops['pointclouds_angle']: cur_batch_angle,
            ops['is_training_pl']: is_training,
        }
        # loss_val, pred_angle = sess.run([ops['loss'], ops['pred_angle']], feed_dict=feed_dict)
        summary, step, loss_val, pred_angle, cd_dists, knn_dists = sess.run(
            [
                ops['merged'], ops['step'], ops['loss'], ops['pred_angle'],
                ops['cd_dists'], ops['knn_dists']
            ],
            feed_dict=feed_dict)

        test_writer.add_summary(summary, step)

        loss_sum += loss_val
        batch_idx += 1

        transform_xyz_input = Tools3D.batch_quaternion2mat(cur_batch_angle)

        transform_xyz = Tools3D.batch_quaternion2mat(pred_angle)
        point_cloud_transformed = np.matmul(pointcloud_gt_val, transform_xyz)
        point_cloud_gt_transformed = np.matmul(pointcloud_gt_val,
                                               transform_xyz_input)

        # _point_cloud_transformed = sess.run(point_cloud_transformed, feed_dict=feed_dict)

        for i in range(bsize):
            index = cur_batch_label[i]
            img_filename = '%d.png' % (
                index)  # , datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
            img_filename = os.path.join(DUMP_DIR, img_filename)

            points_gt = np.squeeze(point_cloud_gt_transformed[i, :, :])
            points_rotated = np.squeeze(cur_batch_data[i, :, :])
            points_align = np.squeeze(point_cloud_transformed[i, :, :])
            # print(points_rotated.shape)
            # print(point_cloud_transformed.shape)
            # print(points_align.shape)
            info_input = pc_util.log_visu_vec('Input Data %d' % (index),
                                              cur_batch_angle[i, :])
            pre_angle = pc_util.log_visu_vec('Pred Data', pred_angle[i, :])
            matrix_input = pc_util.log_visu_matrix(
                'Input Matrix', np.squeeze(transform_xyz_input[i, :, :]))
            matrix_pred = pc_util.log_visu_matrix(
                'Pred Matrix', np.squeeze((transform_xyz[i, :, :])))
            # print(point_cloud_transformed[i,:,:].shape)
            cd_loss = cd_dists[i]
            knn_loss = knn_dists[i]
            matloss = np.sum(
                np.square(transform_xyz_input[i, :, :] -
                          transform_xyz[i, :, :])) / 2
            vecloss = np.sum(
                np.square(pred_angle[i, :] - cur_batch_angle[i, :])) / 2

            loss_cd = pc_util.log_visu_loss('CD  Loss', cd_loss)
            loss_knn = pc_util.log_visu_loss('KNN  Loss', knn_loss)
            loss_mat = pc_util.log_visu_loss('MAT Loss', matloss)
            vec_mat = pc_util.log_visu_loss('VEC Loss', vecloss)

            info = info_input + pre_angle + matrix_input + matrix_pred + vec_mat + loss_mat + loss_cd + loss_knn

            pc_util.point_cloud_three_points(points_rotated, points_gt,
                                             points_align, img_filename, info)

            # scipy.misc.imsave(img_filename, output_img)

    log_string('eval mean loss: %f' % (loss_sum / float(batch_idx)))
    TEST_DATASET.reset()
예제 #3
0
    def eval_one_frame(self,
                       points,
                       isSampled=False,
                       isNormalized=False,
                       isSaving=False):
        is_training = False

        PC_PATH = os.path.join(
            POINTCLOUD_DIR,
            'Dataset/%s/partialface/real_scan/' % (self.model_name))
        OUTPUT_FOLDER = os.path.join(PC_PATH, 'pred')
        #PointCloudOperator.get_pairwise_distance(points)
        if isSampled:
            if self.predict_trans is not None:
                point_temp = np.dot(points.copy(),
                                    np.eye(3))  #self.predict_trans.T)
            else:
                point_temp = np.dot(points.copy(), np.eye(3))
            point_temp_max = np.max(point_temp, axis=0)
            point_temp_min = np.min(point_temp, axis=0)

            #print('point_temp_max:',point_temp_max)
            _clip_ = np.where((point_temp[:, 1] < point_temp_max[1] * 0.9))
            points = points[_clip_]

        #  interv = int(points.shape[0]/1024)
        #  print(points.shape[0],interv)
        #  idx = np.arange(points.shape[0])
        #
        #  idx_n = [i for i in idx if i % interv == 0]
        #
        # idx_n = np.arange(points.shape[0])
        # np.random.shuffle(idx_n)
        #
        # if points.shape[0]<POINT_NUM:
        #     offset = POINT_NUM - points.shape[0]
        #     idx_n = np.concatenate([np.arange(points.shape[0]), np.random.randint(0, points.shape[0], size=offset)], axis=0)
        #     np.random.shuffle(idx_n)
        #
        #     #idx_n = np.random.randint(0,points.shape[0],size=POINT_NUM)
        #
        #
        # idx_n = idx_n[:POINT_NUM]
        #
        # np.random.shuffle(idx_n)
        #points = points[idx_n,...]

        if isNormalized:
            centroid = np.mean(points, axis=0, keepdims=True)
            print('------------centroid:', centroid)
            furthest_distance = np.amax(np.sqrt(
                np.sum((points - centroid)**2, axis=-1)),
                                        keepdims=True)
            points = (points - centroid) / furthest_distance
            distance = np.sqrt(np.sum(points**2, axis=-1))
            med_distance = np.median(distance)
            max_distance = np.max(distance)
            scale = max_distance / med_distance * 0.8
            print('med_distance:', med_distance, '--max_distance:',
                  max_distance)
            _clip_ = np.where(distance < scale * med_distance)
            points = points[_clip_]

            centroid = np.mean(points, axis=0, keepdims=True)
            print('------------centroid:', centroid)
            furthest_distance = np.amax(np.sqrt(
                np.sum((points - centroid)**2, axis=-1)),
                                        keepdims=True) * 0.89
            points = (points - centroid) / furthest_distance

        pointclouds_pl = np.expand_dims(points, axis=0)
        # pointclouds_pl[:,:,1:3] =  -pointclouds_pl[:,:,1:3]
        #pointclouds_pl[:,:,2] =  -pointclouds_pl[:,:,2]

        pointcloud_gt_big = np.expand_dims(self.pointcloud_gt_big, axis=0)
        pointcloud_gt_small = np.expand_dims(self.pointcloud_gt_small, axis=0)

        feed_dict = {
            self.ops['pointclouds_pl']: pointclouds_pl,
            self.ops['pointclouds_pl_big']: pointcloud_gt_big,
            self.ops['pointclouds_gt_small']: pointcloud_gt_small,
            self.ops['is_training_pl']: is_training,
        }
        # loss_val, pred_angle = sess.run([ops['loss'], ops['pred_angle']], feed_dict=feed_dict)
        pred_angle = self.sess.run([self.ops['pred_angle']],
                                   feed_dict=feed_dict)
        pred_angle = np.squeeze(pred_angle)
        pred_angle = pred_angle[:4]
        print(pred_angle)
        transform_xyz = Tools3D.quaternion_To_rotation_matrix(pred_angle)
        transform_xyz = np.array(transform_xyz)

        if isSaving and (time() - self.startime > 2.5):
            self.startime = time()
            point_cloud_transformed = np.matmul(self.pointcloud_gt,
                                                transform_xyz)

            point_input = np.squeeze(pointclouds_pl)
            points_gt = np.squeeze(self.pointcloud_gt)
            points_aligned = np.squeeze(point_cloud_transformed)

            info = 'Nothing'
            filename = datetime.now().strftime('%Y_%m_%d_%H_%M_%S')
            pc_filename = os.path.join(OUTPUT_FOLDER, '%s.xyz' % (filename))
            np.savetxt(pc_filename, points, fmt='%0.6f')

            img_filename = os.path.join(OUTPUT_FOLDER, '%s.png' % (filename))
            #img_filename = os.path.join(OUTPUT_FOLDER, '1.png')
            pc_util.point_cloud_three_points(point_input, points_aligned,
                                             points_aligned, img_filename,
                                             info)
        # if self.predict_trans is not None:
        #     transform_xyz = 0.2*transform_xyz + 0.8*self.predict_trans
        self.predict_trans = transform_xyz
        return transform_xyz, points