Ejemplo n.º 1
0
def test_points():
    """PyntCloud.points.

    - Points must be a pandas DataFrame
    - DataFrame must have at least "x", "y" and "z" named columns
    - When PyntCloud.points is re-assigned all structures must be removed

    """
    points = np.random.rand(10, 3)

    # not dataframe
    with pytest.raises(TypeError):
        PyntCloud(points)

    points = pd.DataFrame(points)

    # not x, y, z
    with pytest.raises(ValueError):
        PyntCloud(points)

    points = pd.DataFrame(points.values, columns=["x", "y", "z"])

    assert PyntCloud(points)

    cloud = PyntCloud(points)

    cloud.add_structure("voxelgrid")

    assert len(cloud.structures) == 1

    # dummy filter
    x_above_05 = cloud.points["x"] > 0.5
    cloud.points = cloud.points[x_above_05]

    assert len(cloud.structures) == 0
Ejemplo n.º 2
0
def test_voxelgrid_sf():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")

    with pytest.raises(TypeError):
        # missing arg
        cloud.add_scalar_field("voxel_x")

    vg_id = cloud.add_structure("voxelgrid", x_y_z=[2, 2, 2])

    with pytest.raises(KeyError):
        # wrong id
        cloud.add_scalar_field("voxel_x", voxelgrid="V([1,1,1],True)")

    for sf in {"voxel_x", "voxel_y", "voxel_z"}:
        cloud.add_scalar_field(sf, voxelgrid=vg_id)
        sf_id = "{}({})".format(sf, vg_id)
        assert min(cloud.points[sf_id]) >= 0
        assert max(cloud.points[sf_id]) <= 1
        cloud.points.drop(sf_id, 1, inplace=True)

    cloud.add_scalar_field("voxel_n", voxelgrid=vg_id)
    sf_id = "voxel_n({})".format(vg_id)
    assert min(cloud.points[sf_id]) >= 0
    assert max(cloud.points[sf_id]) <= 7
    cloud.points.drop(sf_id, 1, inplace=True)

    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")

    voxelgrid = cloud.add_structure("voxelgrid", sizes=[0.3] * 3)
    clusters = cloud.add_scalar_field(
        "euclidean_clusters", voxelgrid=voxelgrid)
    counts = sorted(cloud.points[clusters].value_counts().values)
    assert len(counts) == 2
    assert counts == [2, 4]
Ejemplo n.º 3
0
def test_write_obj():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'writed.obj', also_save=["mesh"])

    writed_obj = PyntCloud.from_file(data_path + 'writed.obj')

    assert all(data.points[["x", "y", "z"]] == writed_obj.points)

    os.remove(data_path + 'writed.obj')
Ejemplo n.º 4
0
def test_write_npz():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'writed_npz.npz', also_save=["mesh"])

    writed_npz = PyntCloud.from_file(data_path + 'writed_npz.npz')

    assert all(data.points == writed_npz.points)
    assert all(data.mesh == writed_npz.mesh)

    os.remove(data_path + 'writed_npz.npz')
Ejemplo n.º 5
0
def pointcloud2ply(vertices, normals, out_file=None):
    """Converts the file to PLY format"""
    from pathlib import Path
    import pandas as pd
    from pyntcloud import PyntCloud
    df = pd.DataFrame(np.hstack((vertices, normals)))
    df.columns = ['x', 'y', 'z', 'nx', 'ny', 'nz']
    cloud = PyntCloud(df)

    if out_file is None:
        out_file = Path('pointcloud.ply').resolve()

    cloud.to_file(str(out_file))
    return out_file
Ejemplo n.º 6
0
def test_from_file(data_path, extension, color, mesh):
    cloud = PyntCloud.from_file(str(data_path / "diamond{}".format(extension)))
    assert_points_xyz(cloud)
    if color:
        assert_points_color(cloud)
    if mesh:
        assert_mesh(cloud)
Ejemplo n.º 7
0
def test_split_on():
    """PyntCloud.split_on.

    - Raise KeyError on invalid scalar field
    - Raise ValueError on invalid save_format
    - and_return should return list of PyntClouds
    - Implicitily check save_path is working

    """
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")
    vg_id = cloud.add_structure("voxelgrid", x_y_z=[2, 2, 2])

    voxel_n = cloud.add_scalar_field("voxel_n", voxelgrid=vg_id)

    with pytest.raises(KeyError):
        cloud.split_on("bad_sf")

    with pytest.raises(ValueError):
        cloud.split_on(voxel_n, save_format="bad_format")

    output = cloud.split_on(voxel_n, save_path="tmp_out")

    assert output is None

    output = cloud.split_on(voxel_n, and_return=True, save_path="tmp_out")

    assert len(output) == 8

    rmtree("tmp_out")
Ejemplo n.º 8
0
def test_read_ascii():
    data = PyntCloud.from_file(data_path + '.xyz', sep=" ", header=None,
                               index_col=False,
                               names=["x", "y", "z", "nx", "ny", "nz"],
                               dtype="f")

    assert_points_xyz(data)
Ejemplo n.º 9
0
def test_eigenvalues():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")
    k_neighbors = cloud.get_neighbors(k=5)
    ev = cloud.add_scalar_field("eigen_values", k_neighbors=k_neighbors)

    with pytest.raises(TypeError):
        # missing arg
        cloud.add_scalar_field("sphericity")

    cloud.add_scalar_field("sphericity", ev=ev)
    cloud.points.drop("sphericity(5)", 1, inplace=True)
    cloud.add_scalar_field("anisotropy", ev=ev)
    cloud.points.drop("anisotropy(5)", 1, inplace=True)
    cloud.add_scalar_field("linearity", ev=ev)
    cloud.points.drop("linearity(5)", 1, inplace=True)
    cloud.add_scalar_field("omnivariance", ev=ev)
    cloud.points.drop("omnivariance(5)", 1, inplace=True)
    cloud.add_scalar_field("eigenentropy", ev=ev)
    cloud.points.drop("eigenentropy(5)", 1, inplace=True)
    cloud.add_scalar_field("planarity", ev=ev)
    cloud.points.drop("planarity(5)", 1, inplace=True)
    cloud.add_scalar_field("eigen_sum", ev=ev)
    cloud.points.drop("eigen_sum(5)", 1, inplace=True)
    cloud.add_scalar_field("curvature", ev=ev)
    cloud.points.drop("curvature(5)", 1, inplace=True)
Ejemplo n.º 10
0
def test_write_ascii():
    data = PyntCloud.from_file(data_path + '.xyz', sep=" ", header=None,
                               index_col=False,
                               names=["x", "y", "z", "nx", "ny", "nz"],
                               dtype="f")

    data.to_file(data_path + 'writed.txt', sep=" ", header=None)

    writed_data = PyntCloud.from_file(data_path + 'writed.txt', sep=" ", header=None,
                                      index_col=False,
                                      names=["x", "y", "z", "nx", "ny", "nz"],
                                      dtype="f")

    assert all(data.points == writed_data.points)

    os.remove(data_path + 'writed.txt')
Ejemplo n.º 11
0
def test_obj_issue_221(data_path):
    """ Regression test https://github.com/daavoo/pyntcloud/issues/221
    """
    cloud = PyntCloud.from_file(str(data_path / "obj_issue_221.obj"))

    assert (len(cloud.xyz)) == 42
    assert (len(cloud.mesh)) == 88
Ejemplo n.º 12
0
def test_rgb_sf():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")

    cloud.add_scalar_field('rgb_intensity')
    assert min(cloud.points["Ri"]) >= 0
    assert min(cloud.points["Gi"]) >= 0
    assert min(cloud.points["Bi"]) >= 0
    assert max(cloud.points["Ri"]) <= 1
    assert max(cloud.points["Gi"]) <= 1
    assert max(cloud.points["Bi"]) <= 1
    cloud.points.drop(["Ri", "Gi", "Bi"], 1, inplace=True)

    cloud.add_scalar_field('relative_luminance')
    assert min(cloud.points["relative_luminance"]) >= 0
    assert max(cloud.points["relative_luminance"]) < 255.01
    cloud.points.drop("relative_luminance", 1, inplace=True)

    cloud.add_scalar_field('hsv')
    assert min(cloud.points["H"]) >= 0
    assert max(cloud.points["H"]) <= 360
    assert min(cloud.points["S"]) >= 0
    assert max(cloud.points["S"]) <= 1
    assert min(cloud.points["V"]) >= 0
    assert max(cloud.points["V"]) <= 100
    cloud.points.drop(["H", "S", "V"], 1, inplace=True)
Ejemplo n.º 13
0
class CorrespondingLidarPointCloud():
    def __init__(self, pcl_path, pointsensor):
        self._points = np.fromfile(str(pcl_path), dtype=np.float32).reshape((-1, 5))[:, :3]
        self._pointsensor = pointsensor
        self._pc = PyntCloud(self._points.T) #[3, N] -> [N, 3]

    def getPointCloud(self):
        return self._points

    def getPointsensor(self):
        return self._pointsensor

    def getOccupancyMatrix(self, as_tensor=True):
        assert self._occupancy, "Call voxelize() first"

        if as_tensor:
            return torch.tensor(self._occupancy)

        return self._occupancy

    def getNeighbors(self):
        assert self._neighbors, "Call generateKNN() first"
        return self._neighbors

    def getKNN(self, k):
        return self._pc.get_neighbors(k=k)

    def nbr_points(self) -> int:
        return self._points.shape[1]

    def voxelize(self, size_x, size_y, size_z):
        voxelgrid_id = self._pc.add_structure("voxelgrid", size_x=size_x, size_y=size_y, size_z=size_z, regular_bounding_box=False)
        voxelgrid = self._pc.structures[voxelgrid_id]
        self._occupancy = voxelgrid.get_feature_vector(mode='binary')

    def generateKNN(self):
        self._neighbors = self._pc.get_neighbors() #(N, k)

    def translate(self, x):
        for i in range(3):
            self._points[i, :] = self._points[i, :] + x[i]

    def rotate(self, rot_matrix):
        self._points = np.dot(rot_matrix, self._points)

    def transform(self, transf_matrix):
        self._points = transf_matrix.dot(np.vstack((self._points, np.ones(self.nbr_points()))))
Ejemplo n.º 14
0
 def _make(self, filename):
     cloud = PyntCloud.from_file("{}/{}.ply".format(CLOUD_SAVE, filename))
     cloud = cloud.get_sample('mesh_random',
                              n=self.points,
                              rgb=False,
                              normals=True,
                              as_PyntCloud=True)
     cloud.to_file("{}/{}.ply".format(CLOUD_SAVE, filename))
 def __getitem__(self, index):
     points = PyntCloud.from_file(self.list_files[index])
     points = np.array(points.points)
     points_normalized = (points - (-0.5)) / (0.5 - (-0.5))
     points = points_normalized.astype(np.float)
     points = torch.from_numpy(points)
     
     return points
Ejemplo n.º 16
0
def test_points_sampling():
    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")
    with pytest.raises(TypeError):
        sample = cloud.get_sample("random_points")

    sample = cloud.get_sample("random_points", n=1)

    assert point_in_array_2D(sample, cloud.xyz)
Ejemplo n.º 17
0
def load_pc(path):
    try:
        pc = PyntCloud.from_file(path)
        points = pc.points
        ret = df_to_pc(points)
        return ret
    except:
        return
Ejemplo n.º 18
0
def run(input_files, output_file):
    for f in input_files:
        assert os.path.exists(f), f'{f} not found'

    logger.info(input_files)
    logger.info(output_file)

    frames = []
    for f in tqdm(input_files):
        pc = PyntCloud.from_file(f)
        frames.append(pc.points)
    final_df = pd.concat(frames)

    output_folder, _ = os.path.split(output_file)
    os.makedirs(output_folder, exist_ok=True)
    PyntCloud(final_df).to_file(output_file)
    logging.info(f'{output_file} written.')
Ejemplo n.º 19
0
 def write_landmarks_as_ply_external_for_recognition(file_name):
     cloud = PyntCloud.from_file(file_name,
                                 sep=" ",
                                 header=0,
                                 names=["x", "y", "z"])
     name_lm_ply = os.path.splitext(
         file_name)[0] + '.ply'  # this is filename.ply
     cloud.to_file(name_lm_ply)  # save landmarks point as ply file
def get_point_cloud(point_cloud_file):
    # get data of point cloud
    _, suffix = os.path.splitext(point_cloud_file)
    if suffix == '.ply':
        point_cloud = PyntCloud.from_file(point_cloud_file)
        point_cloud = np.array(point_cloud.points)

    return point_cloud
Ejemplo n.º 21
0
    def voxelization_raw_data(self, human_body):
        dataset = pd.DataFrame({
            'x': human_body[:, 0],
            'y': human_body[:, 1],
            'z': human_body[:, 2]
        })
        cloud = PyntCloud(dataset)
        voxelgrid_id = cloud.add_structure("voxelgrid", n_x=32, n_y=32, n_z=32)
        voxelgrid = cloud.structures[voxelgrid_id]
        x_cords = voxelgrid.voxel_x
        y_cords = voxelgrid.voxel_y
        z_cords = voxelgrid.voxel_z
        voxel = np.zeros((32, 32, 32)).astype(np.bool)

        for x, y, z in zip(x_cords, y_cords, z_cords):
            voxel[x][y][z] = True
        return voxel
Ejemplo n.º 22
0
 def __getitem__(self, index):
     path, label = self.path_label_pairs[index]
     label = torch.LongTensor([label])
     obj = PyntCloud.from_file(path)
     points = torch.FloatTensor(obj.xyz)
     if self.transform:
         points = self.transform(points)
     return points, label
Ejemplo n.º 23
0
def test_pyvista_rgb_is_handled():
    """ Serves as regression test for old `in` behaviour that could cause a subtle bug
    if poin_arrays contain a field with `name in "RGB"`
    """
    poly = pv.Sphere()
    poly.point_data["RG"] = np.zeros_like(poly.points)[:, :2]
    pc = PyntCloud.from_instance("pyvista", poly)
    assert all(x in pc.points.columns for x in ["RG_0", "RG_1"])
    def get_datum(self, idx):
        taxonomy_name = self.file_list[idx]['taxonomy_name']
        sample_name = self.file_list[idx]['sample_name']

        rendering_image_paths = self.file_list[idx]['rendering_images']

        rec_radian_azi = self.file_list[idx]['rec_radian_azi']
        rec_radian_ele = self.file_list[idx]['rec_radian_ele']

        ground_truth_point_cloud_path = self.file_list[idx]['point_cloud']

        rec_id = 0
        # get data of rendering images (sample 1 image from paths)
        if self.dataset_type == DatasetType.TRAIN:
            rand_id = random.randint(0, len(rendering_image_paths) - 1)
            selected_rendering_image_path = rendering_image_paths[rand_id]
            # update_id is equal to image_id in single-view model
            rec_id = rand_id
        else:
            # test, valid with the first image
            selected_rendering_image_path = rendering_image_paths[1]
            rec_id = 1

        # read the test, train image
        rendering_images = []
        rendering_image = cv2.imread(selected_rendering_image_path,
                                     cv2.IMREAD_UNCHANGED).astype(
                                         np.float32) / 255.
        rendering_image = cv2.cvtColor(rendering_image, cv2.COLOR_GRAY2RGB)

        if len(rendering_image.shape) < 3:
            print(
                '[FATAL] %s It seems that there is something wrong with the rendering image file %s'
                % (dt.now(), selected_rendering_image_path))
            sys.exit(2)
        rendering_images.append(rendering_image)

        # get model_azi, model_ele
        model_azi = rec_radian_azi[rec_id]
        model_ele = rec_radian_ele[rec_id]

        # get data of point cloud
        _, suffix = os.path.splitext(ground_truth_point_cloud_path)

        if suffix == '.ply':
            ground_truth_point_cloud = PyntCloud.from_file(
                ground_truth_point_cloud_path)
            ground_truth_point_cloud = np.array(
                ground_truth_point_cloud.points).astype(np.float32)

        # convert to np array
        rendering_images = np.array(rendering_images).astype(np.float32)
        model_azi = np.array(model_azi).astype(np.float32)
        model_ele = np.array(model_ele).astype(np.float32)

        return (taxonomy_name, sample_name, rendering_images, model_azi,
                model_ele, init_pointcloud_loader(self.init_num_points),
                ground_truth_point_cloud)
Ejemplo n.º 25
0
def points_save_as_ply(points, rgb, savefilepath):
    """

    :param points:
    :param rgb:
    :param savefilepath:
    :return:
    """
    rows, cols, _ = points.shape
    data_list = []
    for i in range(rows):
        for j in range(cols):
            if np.isnan(points[i, j][2]):
                continue
            # make sure the DataFrame has the colours as uint8
            data = [
                points[i, j][0], points[i, j][1], points[i, j][2],
                rgb[i, j][0], rgb[i, j][1], rgb[i, j][2]
            ]
            # print(data)
            data_list.append(data)

    data_list = np.array(data_list)
    cloud = PyntCloud(
        pd.DataFrame(
            # same arguments that you are passing to visualize_pcl
            data=data_list,
            columns=["x", "y", "z", "red", "green", "blue"]))

    cloud.points["red"] = cloud.points["red"].astype(np.uint8)
    cloud.points["green"] = cloud.points["green"].astype(np.uint8)
    cloud.points["blue"] = cloud.points["blue"].astype(np.uint8)

    cloud.to_file(savefilepath)
Ejemplo n.º 26
0
def sphere_pyntcloud():
    return PyntCloud(pd.DataFrame(
        data=np.array([
            [-1., 0., 0.],
            [0., 0., 1.],
            [1., 0., 0.],
            [0., 1., 0.],
            [0., 0., 1.2]], dtype=np.float32),
        columns=["x", "y", "z"]))
Ejemplo n.º 27
0
def test_from_file(data_path, extension, color, mesh, comments):
    cloud = PyntCloud.from_file(str(data_path / "diamond{}".format(extension)))
    assert_points_xyz(cloud)
    if color:
        assert_points_color(cloud)
    if mesh:
        assert_mesh(cloud)
    if comments:
        assert cloud.comments == ["PyntCloud is cool"]
Ejemplo n.º 28
0
def test_sf_xyz():
    cloud = PyntCloud.from_file(path + "/data/plane.npz")

    # fit with default values (max_dist=1e-4)
    is_plane = cloud.add_scalar_field("plane_fit")
    assert sorted(cloud.points[is_plane].value_counts()) == [1, 4]

    # fit with higher tolerance -> include outlier
    is_plane = cloud.add_scalar_field("plane_fit", max_dist=0.4)
    assert sorted(cloud.points[is_plane].value_counts()) == [5]

    cloud = PyntCloud.from_file(path + "/data/sphere.ply")

    is_sphere = cloud.add_scalar_field("sphere_fit")
    assert sorted(cloud.points[is_sphere].value_counts()) == [1, 2928]

    is_sphere = cloud.add_scalar_field("sphere_fit", max_dist=26)
    assert sorted(cloud.points[is_sphere].value_counts()) == [2929]
Ejemplo n.º 29
0
def get_depth_image_from_point_cloud(calibration_file, pcd_file, output_file):
    if not os.path.exists(pcd_file):  # check all files exist
        logging.error('Point cloud does not exist')
        return

    # if not os.path.exists(calibration_file):                # check if the califile exists
    #     logging.error ('Calibration does not exist')
    #     return

    try:
        cloud = PyntCloud.from_file(pcd_file)  # load the data from the files
    except ValueError:
        logging.error(" Error reading point cloud ")
        raise
        return

    # points       = cloud.points.values[:, :3]
    z = cloud.points.values[:, 3]

    print(cloud.points.values.shape)

    height = 172  # todo: get this from calibration file
    width = 224

    z = (z - min(z)) / (max(z) - min(z))  # normalize the data to 0 to 1

    # print (z)

    # print (z.size)

    # iterat of the points and calculat the x y coordinates in the image
    # get the data for calibration
    # im_coords = apply_projection(points)

    # manipulate the pixels color value depending on the z coordinate
    # TODO make this a function
    # for i, t in enumerate(im_coords):
    #     x, y = t.squeeze()
    #     x = int(np.round(x))
    #     y = int(np.round(y))
    #     if x >= 0 and x < width and y >= 0 and y < height:
    #         viz_image[x,y] = 255*z[i]

    # # resize and  return the image after pricessing
    # imgScale  = 0.25
    # newX,newY = viz_image.shape[1]*imgScale, viz_image.shape[0]*imgScale
    # cv2.imwrite('/tmp/depth_visualization.png', viz_image)

    #111 utilit get vizc_channel

    depth_img = np.resize(z * 255, [224, 172, 3])

    depth_img_resize = cv2.resize(
        z * 255, (180, 180))  # todo: make width and height variable

    cv2.imwrite("/tmp/depth_224x172.png", depth_img)
    cv2.imwrite("/tmp/depth_240x180.png", depth_img_resize)
Ejemplo n.º 30
0
def test_sf_xyz():
    cloud = PyntCloud.from_file(path + "/data/plane.npz")

    # fit with default values (max_dist=1e-4)
    is_plane = cloud.add_scalar_field("plane_fit")
    assert sorted(cloud.points[is_plane].value_counts()) == [1, 4]

    # fit with higher tolerance -> include outlier
    is_plane = cloud.add_scalar_field("plane_fit", max_dist=0.4)
    assert sorted(cloud.points[is_plane].value_counts()) == [5]

    cloud = PyntCloud.from_file(path + "/data/sphere.ply")

    is_sphere = cloud.add_scalar_field("sphere_fit")
    assert sorted(cloud.points[is_sphere].value_counts()) == [1, 2928]

    is_sphere = cloud.add_scalar_field("sphere_fit", max_dist=26)
    assert sorted(cloud.points[is_sphere].value_counts()) == [2929]
Ejemplo n.º 31
0
def test_write_ply():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'writed_ascii.ply', also_save=["mesh"],
                 as_text=True)
    data.to_file(data_path + 'writed_bin.ply', also_save=["mesh"],
                 as_text=False)

    writed_ply_ascii = PyntCloud.from_file(data_path + 'writed_ascii.ply')
    writed_ply_bin = PyntCloud.from_file(data_path + 'writed_bin.ply')

    assert all(data.points == writed_ply_ascii.points)
    assert all(data.points == writed_ply_bin.points)
    assert all(data.mesh == writed_ply_ascii.mesh)
    assert all(data.mesh == writed_ply_bin.mesh)

    os.remove(data_path + 'writed_ascii.ply')
    os.remove(data_path + 'writed_bin.ply')
Ejemplo n.º 32
0
def dump_point_cloud(
    points: np.ndarray, timestamp: int, log_id: str, parent_path: str
) -> None:
    """Saves point cloud as .ply file extracted from Waymo's range images

    Args:
        points: A (N,3) numpy array representing the point cloud created from lidar readings
        timestamp: Timestamp in nanoseconds when the lidar reading occurred
        log_id: Log ID that the reading belongs to
        parent_path: The directory that the converted data is written to
    """
    # Point cloud needs to be of type float
    points = points.astype(float)
    data = {"x": points[:, 0], "y": points[:, 1], "z": points[:, 2]}
    cloud = PyntCloud(pd.DataFrame(data))
    cloud_fpath = f"{parent_path}/{log_id}/lidar/PC_{timestamp}.ply"
    check_mkdir(str(Path(cloud_fpath).parent))
    cloud.to_file(cloud_fpath)
Ejemplo n.º 33
0
def plane_pyntcloud():
    return PyntCloud(pd.DataFrame(
        data=np.array([
            [0., 0., 0.],
            [1., 1., 0.],
            [2., 2., 0.],
            [1., 2., 0.],
            [0.1, 0.2, 0.3]], dtype=np.float32),
        columns=["x", "y", "z"]))
Ejemplo n.º 34
0
def test_read_ascii():
    ply_ascii = PyntCloud.from_file(data_path + '.xyz',
                                    sep=" ",
                                    header=None,
                                    index_col=False,
                                    names=["x", "y", "z", "nx", "ny", "nz"],
                                    dtype="f")

    assert_points_xyz(ply_ascii)
Ejemplo n.º 35
0
def cal_planes(input_file_path="./points/", output_file_path="./planes/"):
    """
    Using Ransac in PyntCloud to find the groud plane.
    Groud plane parameters (A, B, C, D) for Ax+By+Cz+D=0.
    Note the lidar points have transformed to the camera coordinate.

    Parameters:
    input_file_path: the path of the points files, the shape of the points should be Nx3
    output_file_path: the path to save the plane files
    """
    print ("----------   Calculating the planes   ----------")
    f_error = open("error.log", "w")
    error_cnt = 0
    error_flag = False
    for file_name in tqdm(os.listdir(input_file_path)):
        #print ("Processing: ", file_name)
        cloud = PyntCloud.from_file(input_file_path + file_name)
        cloud.points = cloud.points[cloud.points["y"] > 1]

        is_floor = cloud.add_scalar_field("plane_fit", n_inliers_to_stop=len(cloud.points) / 20, max_dist=0.001, max_iterations=500)

        cloud.points = cloud.points[cloud.points[is_floor] > 0]
        data = np.array(cloud.points)

        # best-fit linear plane : Z = C[0] * X + C[1] * Y + C[2]
        A = np.c_[data[:, 0], data[:, 1], np.ones(data.shape[0])]
        C, _, _, _ = scipy.linalg.lstsq(A, data[:, 2])

        normal = np.array([C[0], C[1], 1, C[2]])
        normal = - normal / normal[1]
        #print(normal)

        # Check if the result is almost the groud plane.
        # if the result is right, parameter B should be nearly 1 when the D is the height of the camera.
        # if the result is not right, wirte the default value for KITTI
        if (normal[3] > 2.0 or normal[3] < 1.3) :
            #print("error_result")
            error_flag = True
            error_cnt += 1
            f_error.write(file_name[:-4] + ".txt    " + str(normal[0]) + " " + str(normal[1]) + " " + str(normal[2]) + " " + str(normal[3]) + "\n")
            
            str_normal = "0.0" + " " + "-1.0" + " " + "0.0" + " " + "1.65"
        else:
            str_normal = str(normal[0]) + " " + str(normal[1]) + " " + str(normal[2]) + " " + str(normal[3])

        plane_file_name = output_file_path + file_name[:-4] + ".txt"
        f = open(plane_file_name, "w")

        f.write("# Plane\n")
        f.write("Width 4\n")
        f.write("Height 1\n")
        f.write(str_normal)

        f.close()
    f_error.close()
    if error_flag:
        print ("\n There are ", error_cnt, " planes results may not be right! \n The files' name is saved in error.log")
Ejemplo n.º 36
0
def arr_to_pc(arr, cols, types):
    d = {}
    for i in range(arr.shape[1]):
        col = cols[i]
        dtype = types[i]
        d[col] = arr[:, i].astype(dtype)
    df = pd.DataFrame(data=d)
    pc = PyntCloud(df)
    return pc
Ejemplo n.º 37
0
def test_points_sampling():
    path = os.path.abspath(os.path.dirname(__file__))
    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")
    with pytest.raises(TypeError):
        sample = cloud.get_sample("points_random_sampling")

    sample = cloud.get_sample("points_random_sampling", n=1)

    assert point_in_array_2D(sample, cloud.xyz)
Ejemplo n.º 38
0
def process(source):
    pc_mesh = PyntCloud.from_file(source)
    pcQueue_512 = []
    pcQueue_1024 = []
    pcQueue_2048 = []
    traverse_recurse(pc_mesh, pcQueue_512, pcQueue_1024, pcQueue_2048)
    pc = pcQueue_512 + pcQueue_1024 + pcQueue_2048
    pc = np.vstack(pc[:])
    return pcQueue_512, pcQueue_1024, pcQueue_2048
Ejemplo n.º 39
0
def test_repr():
    """PyntCloud.__repr__.

    - When custom attributes are added, __repr__ must show it's name and type

    """
    points = np.random.rand(10, 3)
    points = pd.DataFrame(points, columns=["x", "y", "z"])
    cloud = PyntCloud(points)

    # some dummy attribute
    important_dict = {"black": "Carl", "white": "Lenny"}
    cloud.important_information = important_dict

    reprstring = cloud.__repr__()
    reprstring = reprstring.split("\n")

    assert reprstring[-2].strip() == "important_information: <class 'dict'>"
Ejemplo n.º 40
0
def computeNormals(model):
    cloudable = pd.DataFrame(data=np.transpose(model), columns=['x', 'y', 'z'])
    # calculates a pointcloud of the input model using a pandas DataFrame
    cloud = PyntCloud(cloudable)
    # use neighbors to get normals from pointcloud
    neighbors = cloud.get_neighbors(k=10)
    cloud.add_scalar_field('normals', k_neighbors=neighbors)
    # extract normals from the altered DataFrame
    normals = np.transpose(np.asarray(cloudable.loc[:, 'nx(10)':'nz(10)']))
    master = np.repeat(masterOrig, len(normals[0]), axis=1)
    # taking the dot product column-wise; the 'ij,ij->' notation is saying:
    # take dot product of ith row, jth column of normals and ith row, jth column of master
    # then, create boolean mask array for normal comparison
    I = np.einsum('ij,ij->j', normals, master) < 0
    # flip all values in column if I is true at that column (dot prod < 0)
    normals[:, I] = -normals[:, I]

    return (normals)
Ejemplo n.º 41
0
def process2(path, args):
    ori_path = join(args.source, path)
    target_path, _ = splitext(join(args.dest, path))
    target_folder, _ = split(target_path)
    makedirs(target_folder, exist_ok=True)

    pc = PyntCloud.from_file(ori_path)
    coords = ['x', 'y', 'z']
    points = pc.points[coords]
    length = int(args.vg_size / 64)
    cnt = 0
    for d in range(length):
        for h in range(length):
            for w in range(length):
                center = points[
                    (points['x'] >= np.max([0, d * 64]))
                    & (points['x'] < np.min([args.vg_size, d * 64 + 64])) &
                    (points['y'] >= np.max([0, h * 64])) &
                    (points['y'] < np.min([args.vg_size, h * 64 + 64])) &
                    (points['z'] >= np.max([0, w * 64])) &
                    (points['z'] < np.min([args.vg_size, w * 64 + 64]))]
                if (center.shape[0] > 0):
                    cnt += 1
                    sample = points[
                        (points['x'] >= np.max([0, d * 64 - 8]))
                        & (points['x'] < np.min([args.vg_size, d * 64 + 72])) &
                        (points['y'] >= np.max([0, h * 64 - 8])) &
                        (points['y'] < np.min([args.vg_size, h * 64 + 72])) &
                        (points['z'] >= np.max([0, w * 64 - 8])) &
                        (points['z'] < np.min([args.vg_size, w * 64 + 72]))]

                    sample = sample - np.min(sample, axis=0)
                    sample = (sample - 0.01) / 2
                    sample = np.round(sample)
                    sample = np.abs(sample)
                    sample = sample.drop_duplicates()
                    sample = sample.dropna()

                    center = center - np.min(center, axis=0)

                    #print(np.max(center, axis=0), np.max(sample, axis=0))

                    center_target_path = target_path + '_bl64' + f'_{cnt:04d}{args.target_extension}'
                    bbox_target_path = target_path + '_bl40' + f'_{cnt:04d}{args.target_extension}'
                    pc1 = PyntCloud(sample)
                    pc2 = PyntCloud(center)
                    pc1.to_file(bbox_target_path)
                    pc2.to_file(center_target_path)
    print(cnt, ' blocks are written')
Ejemplo n.º 42
0
def test_write_bin():

    data = PyntCloud.from_file(data_path + '.bin')

    data.to_file(data_path + 'written.bin')

    # write_bin only accepts kwargs: "sep" and "format" for numpy.ndarray.tofile()
    with pytest.raises(ValueError):
        data.to_file(data_path + '_fail.bin', also_save=['mesh'])
    with pytest.raises(ValueError):
        data.to_file(data_path + '_fail.bin', some_other_kwarg='some_value')

    written_data = PyntCloud.from_file(data_path + 'written.bin')

    assert_points_xyz(written_data)
    assert np.array_equal(data.points, written_data.points)
    assert np.array_equal(data.xyz, written_data.xyz)

    os.remove(data_path + 'written.bin')
Ejemplo n.º 43
0
def load_ply_data(filename):
    '''
  load data from ply file.
  '''
    cloud = PyntCloud.from_file(filename)
    coords = ['x', 'y', 'z']
    points = np.array(cloud.points[coords])
    points = points.astype(np.int32)  #np.uint8

    return points
Ejemplo n.º 44
0
def voxelization(tensor, vox_size = 32):
    '''
    input: # points * xyz
    output: ch * gridsize * gridsize * gridsize
    ch is occupacy indomation 1: oppupied, 0: null
    '''
    bunny = pd.DataFrame({'x': tensor[:,0],
                   'y': tensor[:,1],
                   'z': tensor[:,2]})
    cloud = PyntCloud(pd.DataFrame(bunny))
    voxelgrid_id = cloud.add_structure("voxelgrid", n_x=vox_size, n_y=vox_size, n_z=vox_size)
    voxelgrid = cloud.structures[voxelgrid_id]
    x_cords = voxelgrid.voxel_x
    y_cords = voxelgrid.voxel_y
    z_cords = voxelgrid.voxel_z
    voxel = np.zeros((1, vox_size, vox_size, vox_size)).astype(np.float)
    for x, y, z in zip(x_cords, y_cords, z_cords):
        voxel[0][x][y][z] = 1.
    return voxel
Ejemplo n.º 45
0
def test_regular_bounding_box_changes_the_shape_of_the_bounding_box(x, y, z):

    cloud = PyntCloud(pd.DataFrame(
        data={
            "x": np.array(x, dtype=np.float32),
            "y": np.array(y, dtype=np.float32),
            "z": np.array(z, dtype=np.float32)
        }))

    voxelgrid_id = cloud.add_structure("voxelgrid", n_x=2, n_y=2, n_z=2, regular_bounding_box=False)
    voxelgrid = cloud.structures[voxelgrid_id]

    irregular_last_centroid = voxelgrid.voxel_centers[-1]

    voxelgrid_id = cloud.add_structure("voxelgrid", n_x=2, n_y=2, n_z=2)
    voxelgrid = cloud.structures[voxelgrid_id]

    regular_last_centroid = voxelgrid.voxel_centers[-1]

    assert np.all(irregular_last_centroid <= regular_last_centroid)
Ejemplo n.º 46
0
def test_mesh_sampling():
    path = os.path.abspath(os.path.dirname(__file__))
    cloud = PyntCloud.from_file(path + "/data/diamond.ply")
    with pytest.raises(TypeError):
        sample = cloud.get_sample("mesh_random_sampling")

    sample = cloud.get_sample("mesh_random_sampling", n=100)

    assert len(sample) == 100
    assert all(sample.max(0) <= cloud.xyz.max(0))
    assert all(sample.min(0) >= cloud.xyz.min(0))
Ejemplo n.º 47
0
def test_mesh_sampling():

    for ext in {"ply", "obj"}:
        cloud = PyntCloud.from_file(path + "/data/diamond.{}".format(ext))
        with pytest.raises(TypeError):
            sample = cloud.get_sample("mesh_random_sampling")

        sample = cloud.get_sample("mesh_random_sampling", n=100)

        assert len(sample) == 100
        assert all(sample.max(0) <= cloud.xyz.max(0))
        assert all(sample.min(0) >= cloud.xyz.min(0))
Ejemplo n.º 48
0
def test_points_sampling():
    path = os.path.abspath(os.path.dirname(__file__))
    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")
    with pytest.raises(TypeError):
        sample = cloud.get_sample("points_random_sampling")

    sample = cloud.get_sample("points_random_sampling", n=1)

    assert point_in_array_2D(sample, cloud.xyz)

    sample = cloud.get_sample("points_random_sampling", n=1, as_PyntCloud=True)

    assert isinstance(sample, PyntCloud)
Ejemplo n.º 49
0
def test_to_file(tmpdir, diamond, extension, color, mesh):
    extra_write_args = {}
    if mesh:
        extra_write_args["also_save"] = ["mesh"]
    if extension == ".ply":
        extra_write_args["as_text"] = False
    if extension == "_ascii.ply":
        extra_write_args["as_text"] = True

    diamond.to_file(str(tmpdir.join("written{}".format(extension))), **extra_write_args)

    written_file = PyntCloud.from_file(str(tmpdir.join("written{}".format(extension))))

    assert_points_xyz(written_file)
    if color:
        assert_points_color(written_file)
    if mesh:
        assert_mesh(written_file)
Ejemplo n.º 50
0
def test_k_neighbors():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")
    k_neighbors = cloud.get_neighbors(k=5)

    with pytest.raises(TypeError):
        # missing arg
        cloud.add_scalar_field("eigen_values")

    ev = cloud.add_scalar_field("eigen_values", k_neighbors=k_neighbors)
    assert ev[0] == "e1(5)"

    ev = ev = cloud.add_scalar_field(
        "eigen_decomposition", k_neighbors=k_neighbors)
    assert ev[3] == "ev1(5)"
    idx = np.random.randint(0, 100)
    for i in [3, 4, 5]:
        assert np.linalg.norm(cloud.points[ev[i]][idx]) > 0.99
        assert np.linalg.norm(cloud.points[ev[i]][idx]) < 1.01
Ejemplo n.º 51
0
def test_xyz_filters():
    """filters.f_xyz.

    - Manually check known result.

    """
    cloud = PyntCloud.from_file(path + "/data/filters.ply")

    bbox = {
        "min_x": 0.4,
        "max_x": 0.6,
        "min_y": 0.4,
        "max_y": 0.6
    }

    f = cloud.get_filter("BBOX", and_apply=True, **bbox)

    assert f.argmax() == 3
    assert len(cloud.points == 1)
Ejemplo n.º 52
0
def test_normals_sf():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")

    cloud.add_scalar_field('inclination_deg')
    assert min(cloud.points["inclination_deg"]) >= 0
    assert max(cloud.points["inclination_deg"]) <= 180
    cloud.points.drop("inclination_deg", 1, inplace=True)

    cloud.add_scalar_field('inclination_rad')
    assert min(cloud.points["inclination_rad"]) >= 0
    assert max(cloud.points["inclination_rad"]) <= PI
    cloud.points.drop("inclination_rad", 1, inplace=True)

    cloud.add_scalar_field('orientation_deg')
    assert min(cloud.points["orientation_deg"]) >= 0
    assert max(cloud.points["orientation_deg"]) <= 360
    cloud.points.drop("orientation_deg", 1, inplace=True)

    cloud.add_scalar_field('orientation_rad')
    assert min(cloud.points["orientation_rad"]) >= 0
    assert max(cloud.points["orientation_rad"]) <= 2 * PI
    cloud.points.drop("orientation_rad", 1, inplace=True)
Ejemplo n.º 53
0
def ply2gii(in_file, metadata, out_file=None):
    """Convert from ply to GIfTI"""
    from pathlib import Path
    from numpy import eye
    from nibabel.gifti import (
        GiftiMetaData, GiftiCoordSystem, GiftiImage, GiftiDataArray,
    )
    from pyntcloud import PyntCloud

    in_file = Path(in_file)
    surf = PyntCloud.from_file(str(in_file))

    # Update centroid metadata
    metadata.update(
        zip(('SurfaceCenterX', 'SurfaceCenterY', 'SurfaceCenterZ'),
            ['%.4f' % c for c in surf.centroid])
    )

    # Prepare data arrays
    da = (
        GiftiDataArray(
            data=surf.xyz.astype('float32'),
            datatype='NIFTI_TYPE_FLOAT32',
            intent='NIFTI_INTENT_POINTSET',
            meta=GiftiMetaData.from_dict(metadata),
            coordsys=GiftiCoordSystem(xform=eye(4), xformspace=3)),
        GiftiDataArray(
            data=surf.mesh.values,
            datatype='NIFTI_TYPE_INT32',
            intent='NIFTI_INTENT_TRIANGLE',
            coordsys=None))
    surfgii = GiftiImage(darrays=da)

    if out_file is None:
        out_file = fname_presuffix(
            in_file.name, suffix='.gii', use_ext=False, newpath=str(Path.cwd()))

    surfgii.to_filename(str(out_file))
    return out_file
Ejemplo n.º 54
0
def test_voxelgrid_sampling():
    path = os.path.abspath(os.path.dirname(__file__))
    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")

    with pytest.raises(TypeError):
        cloud.get_sample("voxelgrid_centers")

    vg_id = cloud.add_structure("voxelgrid")

    with pytest.raises(KeyError):
        cloud.get_sample("voxelgrid_centers", voxelgrid=vg_id[:-2])

    sample = cloud.get_sample("voxelgrid_centers", voxelgrid=vg_id)

    assert point_in_array_2D([0.25, 0.25, 0.25], sample.values)

    sample = cloud.get_sample("voxelgrid_centroids", voxelgrid=vg_id)

    assert point_in_array_2D([0.2, 0.2, 0.2], sample.values)

    sample = cloud.get_sample("voxelgrid_nearest", voxelgrid=vg_id)

    assert point_in_array_2D([0.9, 0.9, 0.9], sample.values)
Ejemplo n.º 55
0
def test_read_obj():
    obj = PyntCloud.from_file(data_path + '.obj')

    assert_points_xyz(obj)
Ejemplo n.º 56
0
def test_read_ply_bin():
    ply_bin = PyntCloud.from_file(data_path + '.ply')

    assert_points_xyz(ply_bin)
    assert_points_color(ply_bin)
    assert_mesh(ply_bin)
Ejemplo n.º 57
0
def test_read_ply_ascii():
    ply_ascii = PyntCloud.from_file(data_path + '_ascii.ply')

    assert_points_xyz(ply_ascii)
    assert_points_color(ply_ascii)
    assert_mesh(ply_ascii)
Ejemplo n.º 58
0
def test_read_color_off():
    color_off = PyntCloud.from_file(data_path + '_color.off')

    assert_points_xyz(color_off)
    assert_points_color(color_off)
Ejemplo n.º 59
0
def test_read_npz():
    npz = PyntCloud.from_file(data_path + '.npz')

    assert_points_xyz(npz)
    assert_points_color(npz)
    assert_mesh(npz)