Ejemplo n.º 1
0
    def test_network(self):
        dense_tensor = torch.rand(3, 4, 11, 11, 11, 11)  # BxCxD1xD2x....xDN
        dense_tensor.requires_grad = True

        # Since the shape is fixed, cache the coordinates for faster inference
        coordinates = dense_coordinates(dense_tensor.shape)

        network = nn.Sequential(
            # Add layers that can be applied on a regular pytorch tensor
            nn.ReLU(),
            MinkowskiToSparseTensor(remove_zeros=False,
                                    coordinates=coordinates),
            MinkowskiConvolution(4, 5, stride=2, kernel_size=3, dimension=4),
            MinkowskiBatchNorm(5),
            MinkowskiReLU(),
            MinkowskiConvolutionTranspose(5,
                                          6,
                                          stride=2,
                                          kernel_size=3,
                                          dimension=4),
            MinkowskiToDenseTensor(
                dense_tensor.shape),  # must have the same tensor stride.
        )

        for i in range(5):
            print(f"Iteration: {i}")
            output = network(dense_tensor)
            output.sum().backward()

        assert dense_tensor.grad is not None
Ejemplo n.º 2
0
    def slice(self):
        coords, colors, pcd = load_file("1.ply")
        voxel_size = 0.02
        colors = torch.from_numpy(colors).float()
        bcoords = batched_coordinates([coords / voxel_size],
                                      dtype=torch.float32)
        tfield = TensorField(colors, bcoords)

        network = nn.Sequential(
            MinkowskiLinear(3, 16),
            MinkowskiBatchNorm(16),
            MinkowskiReLU(),
            MinkowskiLinear(16, 32),
            MinkowskiBatchNorm(32),
            MinkowskiReLU(),
            MinkowskiToSparseTensor(),
            MinkowskiConvolution(32, 64, kernel_size=3, stride=2, dimension=3),
            MinkowskiConvolutionTranspose(64,
                                          32,
                                          kernel_size=3,
                                          stride=2,
                                          dimension=3),
        )

        otensor = network(tfield)
        ofield = otensor.slice(tfield)
        self.assertEqual(len(tfield), len(ofield))
        self.assertEqual(ofield.F.size(1), otensor.F.size(1))
        ofield = otensor.cat_slice(tfield)
        self.assertEqual(len(tfield), len(ofield))
        self.assertEqual(ofield.F.size(1),
                         (otensor.F.size(1) + tfield.F.size(1)))
Ejemplo n.º 3
0
    def stride_slice(self):
        coords, colors, pcd = load_file("1.ply")
        voxel_size = 0.02
        colors = torch.from_numpy(colors).float()
        bcoords = batched_coordinates([coords / voxel_size],
                                      dtype=torch.float32)
        tfield = TensorField(colors, bcoords)

        network = nn.Sequential(
            MinkowskiToSparseTensor(),
            MinkowskiConvolution(3, 8, kernel_size=3, stride=4, dimension=3),
            MinkowskiReLU(),
            MinkowskiConvolution(8, 16, kernel_size=3, stride=4, dimension=3),
        )

        otensor = network(tfield)
        ofield = otensor.slice(tfield)
Ejemplo n.º 4
0
    def test_network_device(self):
        coords, colors, pcd = load_file("1.ply")
        voxel_size = 0.02
        colors = torch.from_numpy(colors)
        bcoords = batched_coordinates([coords / voxel_size])
        tfield = TensorField(colors, bcoords, device=0).float()

        network = nn.Sequential(
            MinkowskiLinear(3, 16),
            MinkowskiBatchNorm(16),
            MinkowskiReLU(),
            MinkowskiLinear(16, 32),
            MinkowskiBatchNorm(32),
            MinkowskiReLU(),
            MinkowskiToSparseTensor(),
            MinkowskiConvolution(32, 64, kernel_size=3, stride=2, dimension=3),
        ).to(0)

        print(network(tfield))
Ejemplo n.º 5
0
    def field_to_sparse(self):
        coords, colors, pcd = load_file("1.ply")
        voxel_size = 0.02
        colors = torch.from_numpy(colors).float()
        bcoords = batched_coordinates([coords / voxel_size],
                                      dtype=torch.float32)
        tfield = TensorField(colors, bcoords)

        network = nn.Sequential(
            MinkowskiToSparseTensor(),
            MinkowskiConvolution(3, 8, kernel_size=3, stride=4, dimension=3),
            MinkowskiReLU(),
            MinkowskiConvolution(8, 16, kernel_size=3, stride=4, dimension=3),
        )

        otensor = network(tfield)
        field_to_sparse = tfield.sparse(
            coordinate_map_key=otensor.coordinate_map_key)
        self.assertTrue(len(field_to_sparse.F) == len(otensor))