示例#1
0
    def normalize_to_box_(self):
        """
        center and scale the point clouds to a unit cube,
        Returns:
            normalizing_trans (Transform3D): Transform3D used to normalize the pointclouds
        """
        # (B,3,2)
        boxMinMax = self.get_bounding_boxes()
        boxCenter = boxMinMax.sum(dim=-1) / 2
        # (B,)
        boxRange, _ = (boxMinMax[:, :, 1] - boxMinMax[:, :, 0]).max(dim=-1)
        if boxRange == 0:
            boxRange = 1

        # center and scale the point clouds, likely faster than calling obj2world_trans directly?
        pointOffsets = torch.repeat_interleave(-boxCenter,
                                               self.num_points_per_cloud(),
                                               dim=0)
        self.offset_(pointOffsets)
        self.scale_(1 / boxRange)

        # update obj2world_trans
        normalizing_trans = Translate(-boxCenter).compose(Scale(
            1 / boxRange)).to(device=self.device)
        self.obj2world_trans = normalizing_trans.inverse().compose(
            self.obj2world_trans)
        return normalizing_trans
示例#2
0
    def normalize_to_sphere_(self):
        """
        Center and scale the point clouds to a unit sphere
        Returns: normalizing_trans (Transform3D)
        """
        # (B,3,2)
        boxMinMax = self.get_bounding_boxes()
        boxCenter = boxMinMax.sum(dim=-1) / 2
        # (B,)
        boxRange, _ = (boxMinMax[:, :, 1] - boxMinMax[:, :, 0]).max(dim=-1)
        if boxRange == 0:
            boxRange = 1

        # center and scale the point clouds, likely faster than calling obj2world_trans directly?
        pointOffsets = torch.repeat_interleave(-boxCenter,
                                               self.num_points_per_cloud(),
                                               dim=0)
        self.offset_(pointOffsets)
        # (P)
        norms = torch.norm(self.points_packed(), dim=-1)
        # List[(Pi)]
        norms = torch.split(norms, self.num_points_per_cloud())
        # (N)
        scale = torch.stack([x.max() for x in norms], dim=0)
        self.scale_(1 / eps_denom(scale))
        normalizing_trans = Translate(-boxCenter).compose(
            Scale(1 / eps_denom(scale))).to(device=self.device)
        self.obj2world_trans = normalizing_trans.inverse().compose(
            self.obj2world_trans)
        return normalizing_trans
示例#3
0
文件: cloud.py 项目: yifita/DSS
 def normalize_to_sphere_(self):
     """
     Center and scale the point clouds to a unit sphere
     Returns: normalizing_trans (Transform3D)
     """
     # packed offset
     center = torch.stack([x.mean(dim=0) for x in self.points_list()],
                          dim=0)
     center_packed = torch.repeat_interleave(-center,
                                             self.num_points_per_cloud(),
                                             dim=0)
     self.offset_(center_packed)
     # (P)
     norms = torch.norm(self.points_packed(), dim=-1)
     # List[(Pi)]
     norms = torch.split(norms, self.num_points_per_cloud())
     # (N)
     scale = torch.stack([x.max() for x in norms], dim=0)
     self.scale_(1 / eps_denom(scale))
     normalizing_trans = Translate(-center).compose(
         Scale(1 / eps_denom(scale))).to(device=self.device)
     self.obj2world_trans = normalizing_trans.inverse().compose(
         self.obj2world_trans)
     return normalizing_trans
示例#4
0
def marching_cubes_naive(
    volume_data_batch: torch.Tensor,
    isolevel: Optional[float] = None,
    spacing: int = 1,
    return_local_coords: bool = True,
) -> Tuple[List[torch.Tensor], List[torch.Tensor]]:
    """
    Runs the classic marching cubes algorithm, iterating over
    the coordinates of the volume_data and using a given isolevel
    for determining intersected edges of cubes of size `spacing`.
    Returns vertices and faces of the obtained mesh.
    This operation is non-differentiable.

    This is a naive implementation, and is not optimized for efficiency.

    Args:
        volume_data_batch: a Tensor of size (N, D, H, W) corresponding to
            a batch of 3D scalar fields
        isolevel: the isosurface value to use as the threshold to determine
            whether points are within a volume. If None, then the average of the
            maximum and minimum value of the scalar field will be used.
        spacing: an integer specifying the cube size to use
        return_local_coords: bool. If True the output vertices will be in local coordinates in
        the range [-1, 1] x [-1, 1] x [-1, 1]. If False they will be in the range
        [0, W-1] x [0, H-1] x [0, D-1]
    Returns:
        verts: [(V_0, 3), (V_1, 3), ...] List of N FloatTensors of vertices.
        faces: [(F_0, 3), (F_1, 3), ...] List of N LongTensors of faces.
    """
    volume_data_batch = volume_data_batch.detach().cpu()
    batched_verts, batched_faces = [], []
    D, H, W = volume_data_batch.shape[1:]
    # pyre-ignore [16]
    volume_size_xyz = volume_data_batch.new_tensor([W, H, D])[None]

    if return_local_coords:
        # Convert from local coordinates in the range [-1, 1] range to
        # world coordinates in the range [0, D-1], [0, H-1], [0, W-1]
        local_to_world_transform = Translate(
            x=+1.0, y=+1.0, z=+1.0, device=volume_data_batch.device).scale(
                (volume_size_xyz - 1) * spacing * 0.5)
        # Perform the inverse to go from world to local
        world_to_local_transform = local_to_world_transform.inverse()

    for i in range(len(volume_data_batch)):
        volume_data = volume_data_batch[i]
        curr_isolevel = (((volume_data.max() + volume_data.min()) /
                          2).item() if isolevel is None else isolevel)
        edge_vertices_to_index = {}
        vertex_coords_to_index = {}
        verts, faces = [], []
        # Use length - spacing for the bounds since we are using
        # cubes of size spacing, with the lowest x,y,z values
        # (bottom front left)
        for x in range(0, W - spacing, spacing):
            for y in range(0, H - spacing, spacing):
                for z in range(0, D - spacing, spacing):
                    cube = Cube((x, y, z), spacing)
                    new_verts, new_faces = polygonise(
                        cube,
                        curr_isolevel,
                        volume_data,
                        edge_vertices_to_index,
                        vertex_coords_to_index,
                    )
                    verts.extend(new_verts)
                    faces.extend(new_faces)
        if len(faces) > 0 and len(verts) > 0:
            verts = torch.tensor(verts, dtype=torch.float32)
            # Convert vertices from world to local coords
            if return_local_coords:
                verts = world_to_local_transform.transform_points(verts[None,
                                                                        ...])
                verts = verts.squeeze()
            batched_verts.append(verts)
            batched_faces.append(torch.tensor(faces, dtype=torch.int64))
    return batched_verts, batched_faces