Exemplo n.º 1
0
    def eval_mesh(self, mesh, pointcloud_tgt, normals_tgt, points_iou,
                  occ_tgt):
        ''' Evaluates a mesh.

        Args:
            mesh (trimesh): mesh which should be evaluated
            pointcloud_tgt (numpy array): target point cloud
            normals_tgt (numpy array): target normals
            points_iou (numpy_array): points tensor for IoU evaluation
            occ_tgt (numpy_array): GT occupancy values for IoU points
        '''
        if len(mesh.vertices) != 0 and len(mesh.faces) != 0:
            if True:  #! Remove walls and floors
                pointcloud, idx = mesh.sample(2 * self.n_points,
                                              return_index=True)
                eps = 0.007
                x_max, x_min = pointcloud_tgt[:, 0].max(
                ), pointcloud_tgt[:, 0].min()
                y_max, y_min = pointcloud_tgt[:, 1].max(
                ), pointcloud_tgt[:, 1].min()
                z_max, z_min = pointcloud_tgt[:, 2].max(
                ), pointcloud_tgt[:, 2].min()

                # add small offsets
                x_max, x_min = x_max + eps, x_min - eps
                y_max, y_min = y_max + eps, y_min - eps
                z_max, z_min = z_max + eps, z_min - eps

                mask_x = (pointcloud[:, 0] <= x_max) & (pointcloud[:, 0] >=
                                                        x_min)
                mask_y = (pointcloud[:, 1] >= y_min)  # floor
                mask_z = (pointcloud[:, 2] <= z_max) & (pointcloud[:, 2] >=
                                                        z_min)

                mask = mask_x & mask_y & mask_z
                pointcloud_new = pointcloud[mask]
                # Subsample
                idx_new = np.random.randint(pointcloud_new.shape[0],
                                            size=self.n_points)
                pointcloud = pointcloud_new[idx_new]
                idx = idx[mask][idx_new]
            else:
                pointcloud, idx = mesh.sample(self.n_points, return_index=True)

            pointcloud = pointcloud.astype(np.float32)
            normals = mesh.face_normals[idx]
        else:
            pointcloud = np.empty((0, 3))
            normals = np.empty((0, 3))

        out_dict = self.eval_pointcloud(pointcloud, pointcloud_tgt, normals,
                                        normals_tgt)

        if len(mesh.vertices) != 0 and len(mesh.faces) != 0:
            occ = check_mesh_contains(mesh, points_iou)
            out_dict['iou'] = compute_iou(occ, occ_tgt)
        else:
            out_dict['iou'] = 0.

        return out_dict
    def eval_step(self, data):
        ''' Performs an evaluation step.

        Args:
            data (dict): data dictionary
        '''
        self.model.eval()

        device = self.device
        threshold = self.threshold
        eval_dict = {}

        points = data.get('points').to(device)
        occ = data.get('points.occ').to(device)

        inputs = data.get('inputs', torch.empty(points.size(0), 0)).to(device)
        voxels_occ = data.get('voxels')

        points_iou = data.get('points_iou').to(device)
        occ_iou = data.get('points_iou.occ').to(device)

        batch_size = points.size(0)

        kwargs = {}

        # add pre-computed index
        inputs = add_key(inputs,
                         data.get('inputs.ind'),
                         'points',
                         'index',
                         device=device)
        # add pre-computed normalized coordinates
        points = add_key(points,
                         data.get('points.normalized'),
                         'p',
                         'p_n',
                         device=device)
        points_iou = add_key(points_iou,
                             data.get('points_iou.normalized'),
                             'p',
                             'p_n',
                             device=device)

        # Compute iou
        with torch.no_grad():
            p_out = self.model(points_iou,
                               inputs,
                               sample=self.eval_sample,
                               **kwargs)

        occ_iou_np = (occ_iou >= 0.5).cpu().numpy()
        occ_iou_hat_np = (p_out.probs >= threshold).cpu().numpy()

        iou = compute_iou(occ_iou_np, occ_iou_hat_np).mean()
        eval_dict['iou'] = iou

        # Estimate voxel iou
        if voxels_occ is not None:
            voxels_occ = voxels_occ.to(device)
            points_voxels = make_3d_grid((-0.5 + 1 / 64, ) * 3,
                                         (0.5 - 1 / 64, ) * 3,
                                         voxels_occ.shape[1:])
            points_voxels = points_voxels.expand(batch_size,
                                                 *points_voxels.size())
            points_voxels = points_voxels.to(device)
            with torch.no_grad():
                p_out = self.model(points_voxels,
                                   inputs,
                                   sample=self.eval_sample,
                                   **kwargs)

            voxels_occ_np = (voxels_occ >= 0.5).cpu().numpy()
            occ_hat_np = (p_out.probs >= threshold).cpu().numpy()
            iou_voxels = compute_iou(voxels_occ_np, occ_hat_np).mean()

            eval_dict['iou_voxels'] = iou_voxels

        return eval_dict
    def eval_step(self, data):
        ''' Performs an evaluation step.

        Args:
            data (dict): data dictionary
        '''
        self.model.eval()

        device = self.device
        threshold = self.threshold
        eval_dict = {}

        # Compute elbo
        points = data.get('points').to(device)
        occ = data.get('points.occ').to(device)

        inputs = data.get('inputs', torch.empty(points.size(0), 0)).to(device)
        voxels_occ = data.get('voxels')

        points_iou = data.get('points_iou').to(device)
        occ_iou = data.get('points_iou.occ').to(device)

        kwargs = {}

        semantic_map = data.get('semantic_map', None)
        with torch.no_grad():
            elbo, rec_error, kl = self.model.compute_elbo(
                points, occ, inputs, semantic_map, **kwargs)

        eval_dict['loss'] = -elbo.mean().item()
        eval_dict['rec_error'] = rec_error.mean().item()
        eval_dict['kl'] = kl.mean().item()

        # Compute iou
        batch_size = points.size(0)

        with torch.no_grad():
            p_out = self.model(points_iou,
                               inputs,
                               sample=self.eval_sample,
                               semantic_map=semantic_map,
                               **kwargs)

        occ_iou_np = (occ_iou >= 0.5).cpu().numpy()
        occ_iou_hat_np = (p_out.probs >= threshold).cpu().numpy()
        iou = compute_iou(occ_iou_np, occ_iou_hat_np).mean()
        eval_dict['iou'] = iou

        # Estimate voxel iou
        if voxels_occ is not None:
            voxels_occ = voxels_occ.to(device)
            points_voxels = make_3d_grid((-0.5 + 1 / 64, ) * 3,
                                         (0.5 - 1 / 64, ) * 3,
                                         voxels_occ.shape[1:])
            points_voxels = points_voxels.expand(batch_size,
                                                 *points_voxels.size())
            points_voxels = points_voxels.to(device)
            with torch.no_grad():
                p_out = self.model(points_voxels,
                                   inputs,
                                   sample=self.eval_sample,
                                   **kwargs)

            voxels_occ_np = (voxels_occ >= 0.5).cpu().numpy()
            occ_hat_np = (p_out.probs >= threshold).cpu().numpy()
            iou_voxels = compute_iou(voxels_occ_np, occ_hat_np).mean()

            eval_dict['iou_voxels'] = iou_voxels

        return eval_dict