Пример #1
0
    def report_chamfer_presampled(self):
        evaluation_ids = self.data_splitter.get_data(
            tf.estimator.ModeKeys.PREDICT)

        point_cloud_dataset = get_point_clouds(self.preprocessed_data_path,
                                               self.cat_id,
                                               self.n_ffd_resamples)
        point_cloud_dataset = point_cloud_dataset.subset(evaluation_ids)

        mesh_dataset = MeshReader(self.preprocessed_data_path).get_dataset(
            self.cat_id)
        mesh_dataset = mesh_dataset.subset(evaluation_ids)
        mesh_dataset.open()

        deformed_predictions = []
        ground_truth_point_cloud = []
        mesh_ground_truth = []

        ffd_dataset = get_template_ffd(self.preprocessed_data_path,
                                       self.cat_id,
                                       edge_length_threshold=None)

        template_ids, bs, ps = zip(*self.get_ffd_data(ffd_dataset))

        with tf.Graph().as_default():
            dataset = get_dataset(self.preprocessed_data_path,
                                  self.cat_id,
                                  self.view_angles,
                                  self.cloud_resamples,
                                  evaluation_ids,
                                  False,
                                  False,
                                  batch_size=len(evaluation_ids))
            features, targets = dataset.make_one_shot_iterator().get_next()
            predictions = self.build_estimator(
                features, targets, tf.estimator.ModeKeys.PREDICT).predictions
            saver = tf.train.Saver()
            with tf.train.MonitoredSession() as sess:
                saver.restore(sess, tf.train.latest_checkpoint(self.model_dir))
                data = sess.run(predictions)
                point_cloud_dataset.open()
                for evaluation_id, prediction_tensor in zip(
                        evaluation_ids, nested_generator(data)):
                    dp = prediction_tensor['deformed_points']
                    probs = prediction_tensor['probs']
                    i = np.argmax(probs)
                    predicted_vertices = np.matmul(bs[i], ps[i] + dp[i])
                    deformed_predictions.append(
                        sample_points(predicted_vertices,
                                      self.n_ffd_resamples))
                    ground_truth_point_cloud.append(
                        point_cloud_dataset[evaluation_id])
                    mesh_ground_truth.append(mesh_dataset[evaluation_id])
            chamfer_list, unnorm_chamfer = get_normalized_chamfer(
                mesh_ground_truth, ground_truth_point_cloud,
                deformed_predictions, self.n_ffd_resamples)
            print("The normalized chamfer for test set is " +
                  str(np.mean(chamfer_list)))
            print("The non normalized chamfer for test set is " +
                  str(np.mean(unnorm_chamfer)))
Пример #2
0
    def _mesh_transformation(self, edge_length_threshold=0.02):
        ffd_dataset = get_template_ffd(self.preprocessed_data_path,
                                       self.cat_id, edge_length_threshold)

        template_ids, bs, ps = zip(*self.get_ffd_data(ffd_dataset))
        mesh_dataset = get_thresholded_template_mesh(
            self.preprocessed_data_path, self.cat_id, edge_length_threshold)
        with mesh_dataset:
            all_faces = []
            all_vertices = []
            for k in template_ids:
                sg = mesh_dataset[k]
                all_faces.append(np.array(sg['faces']))
                all_vertices.append(np.array(sg['vertices']))

        def transform_predictions(probs, dp):
            i = np.argmax(probs)
            vertices = np.matmul(bs[i], ps[i] + dp[i])
            faces = all_faces[i]
            original_vertices = all_vertices[i]
            return dict(vertices=vertices,
                        faces=faces,
                        original_vertices=original_vertices,
                        attrs=dict(template_id=template_ids[i]))

        return transform_predictions
Пример #3
0
 def get_ffd_data(self, ffd_dataset=None):
     if ffd_dataset is None:
         n_ffd_points = self.n_ffd_samples
         ffd_dataset = get_template_ffd(self.preprocessed_data_path,
                                        self.cat_id,
                                        n_samples=n_ffd_points)
     with ffd_dataset:
         return tuple(self._get_ffd_data(ffd_dataset))
Пример #4
0
from helper.shapenet.shapenetMapper import desc_to_id
from deformations.FFD import get_template_ffd
from deformations.meshDeformation import get_thresholded_template_mesh
from mayavi import mlab
import numpy as np
from graphicUtils.visualizer.mayaviVisualizer import visualize_mesh, visualize_point_cloud



ds = get_template_ffd("/media/saurabh/e56e40fb-030d-4f7f-9e63-42ed5f7f6c711/preprocessing_new", desc_to_id("pistol"),
                      edge_length_threshold=None,  n_samples=16384)

key = "1f646ff59cabdddcd810dcd63f342aca"
with ds:
    b = np.array(ds[key]['b'])
    p = np.array(ds[key]['p'])

mesh_dataset = get_thresholded_template_mesh("/media/saurabh/e56e40fb-030d-4f7f-9e63-42ed5f7f6c711/preprocessing_new", desc_to_id("pistol"),
                      None)

with mesh_dataset:
    f = np.array(mesh_dataset[key]['faces'])
    v_orignal = np.array(mesh_dataset[key]['vertices'])

# print(b)
# visualize_mesh(v_orignal, f)
# mlab.show()

visualize_mesh(np.matmul(b, p), f)
mlab.show()
from deformations.FFD import get_template_ffd

get_template_ffd("/media/saurabh/e56e40fb-030d-4f7f-9e63" +
                 "-42ed5f7f6c71/preprocessing_new/",
                 "03948459",
                 n_samples=16384)