Esempio n. 1
0
def test_voxelgrid_sf():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")

    with pytest.raises(TypeError):
        # missing arg
        cloud.add_scalar_field("voxel_x")

    vg_id = cloud.add_structure("voxelgrid", x_y_z=[2, 2, 2])

    with pytest.raises(KeyError):
        # wrong id
        cloud.add_scalar_field("voxel_x", voxelgrid="V([1,1,1],True)")

    for sf in {"voxel_x", "voxel_y", "voxel_z"}:
        cloud.add_scalar_field(sf, voxelgrid=vg_id)
        sf_id = "{}({})".format(sf, vg_id)
        assert min(cloud.points[sf_id]) >= 0
        assert max(cloud.points[sf_id]) <= 1
        cloud.points.drop(sf_id, 1, inplace=True)

    cloud.add_scalar_field("voxel_n", voxelgrid=vg_id)
    sf_id = "voxel_n({})".format(vg_id)
    assert min(cloud.points[sf_id]) >= 0
    assert max(cloud.points[sf_id]) <= 7
    cloud.points.drop(sf_id, 1, inplace=True)

    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")

    voxelgrid = cloud.add_structure("voxelgrid", sizes=[0.3] * 3)
    clusters = cloud.add_scalar_field("euclidean_clusters",
                                      voxelgrid=voxelgrid)
    counts = sorted(cloud.points[clusters].value_counts().values)
    assert len(counts) == 2
    assert counts == [2, 4]
Esempio n. 2
0
def test_voxelgrid_sf():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")

    with pytest.raises(TypeError):
        # missing arg
        cloud.add_scalar_field("voxel_x")

    vg_id = cloud.add_structure("voxelgrid", x_y_z=[2, 2, 2])

    with pytest.raises(KeyError):
        # wrong id
        cloud.add_scalar_field("voxel_x", voxelgrid="V([1,1,1],True)")

    for sf in {"voxel_x", "voxel_y", "voxel_z"}:
        cloud.add_scalar_field(sf, voxelgrid=vg_id)
        sf_id = "{}({})".format(sf, vg_id)
        assert min(cloud.points[sf_id]) >= 0
        assert max(cloud.points[sf_id]) <= 1
        cloud.points.drop(sf_id, 1, inplace=True)

    cloud.add_scalar_field("voxel_n", voxelgrid=vg_id)
    sf_id = "voxel_n({})".format(vg_id)
    assert min(cloud.points[sf_id]) >= 0
    assert max(cloud.points[sf_id]) <= 7
    cloud.points.drop(sf_id, 1, inplace=True)

    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")

    voxelgrid = cloud.add_structure("voxelgrid", sizes=[0.3] * 3)
    clusters = cloud.add_scalar_field(
        "euclidean_clusters", voxelgrid=voxelgrid)
    counts = sorted(cloud.points[clusters].value_counts().values)
    assert len(counts) == 2
    assert counts == [2, 4]
Esempio n. 3
0
def run(file1, file2, point_size=1):
    assert os.path.exists(file1), f'{file1} not found'
    assert os.path.exists(file2), f'{file2} not found'

    file2_folder, _ = os.path.split(file2)
    file2_report = os.path.join(file2_folder, 'report.json')
    assert os.path.exists(file2_report)

    logging.info(f'Updating {file2_report}.')
    with open(file2_report, 'r') as f:
        data = json.load(f)

    pc1 = PyntCloud.from_file(file1)
    pc2 = PyntCloud.from_file(file2)

    cols = ['x', 'y', 'z', 'red', 'green', 'blue']
    final_metrics, fwd_metrics, bwd_metrics = quality_eval.color_with_geo(pc1.points[cols].values, pc2.points[cols].values)

    n_points = len(pc1.points)
    size_in_bytes = data['color_bitstream_size_in_bytes']
    size_in_bits = size_in_bytes * 8
    bpp = size_in_bits / n_points
    data = {**data,
            **metrics_to_dict(final_metrics, ''),
            **metrics_to_dict(fwd_metrics, 'AB_'),
            **metrics_to_dict(bwd_metrics, 'BA_'),
            'color_bits_per_input_point': bpp,
            'input_point_count': n_points}
    with open(file2_report, 'w') as f:
        json.dump(data, f, sort_keys=True, indent=4)
    logging.info(f'{file2_report} written.')
Esempio n. 4
0
def compare(a, b):
    m = pc.from_file(a).points
    n = pc.from_file(b).points
    m = [tuple(m.x), tuple(m.y), tuple(m.z)]
    m = m[0]
    n = [tuple(n.x), tuple(n.y), tuple(n.z)]
    n = n[0]
    v1, v2 = verify_rmse(m, n), rmse(m, n)
    print(v1, v2)
Esempio n. 5
0
def test_ply_with_bool(data_path):
    """Expectation: a PLY file that contains bool types can be read into a PyntCloud object."""
    TEST_PLY = str(data_path / "diamond_with_bool.ply")

    with pytest.raises(KeyError, match="bool"):
        cloud = PyntCloud.from_file(TEST_PLY)

    cloud = PyntCloud.from_file(filename=TEST_PLY, allow_bool=True)
    assert "is_green" in cloud.points.columns, "Failed to find expected Boolean column: 'is_green'"
    assert cloud.points.is_green.dtype == bool, "Boolean column no loaded as bool dtype"
Esempio n. 6
0
def test_write_obj():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'writed.obj', also_save=["mesh"])

    writed_obj = PyntCloud.from_file(data_path + 'writed.obj')

    assert all(data.points[["x", "y", "z"]] == writed_obj.points)

    os.remove(data_path + 'writed.obj')
Esempio n. 7
0
def test_write_obj():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'writed.obj', internal=["points", "mesh"])

    writed_obj = PyntCloud.from_file(data_path + 'writed.obj')

    assert all(data.points[["x", "y", "z"]] == writed_obj.points)

    os.remove(data_path + 'writed.obj')
Esempio n. 8
0
def test_write_npz():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'writed_npz.npz', also_save=["mesh"])

    writed_npz = PyntCloud.from_file(data_path + 'writed_npz.npz')

    assert all(data.points == writed_npz.points)
    assert all(data.mesh == writed_npz.mesh)

    os.remove(data_path + 'writed_npz.npz')
Esempio n. 9
0
def test_write_obj():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'written.obj', also_save=["mesh"])

    written_obj = PyntCloud.from_file(data_path + 'written.obj')

    assert all(data.points[["x", "y", "z"]] == written_obj.points)
    assert all(data.mesh[["v1", "v2", "v3"]] == written_obj.mesh)

    os.remove(data_path + 'written.obj')
Esempio n. 10
0
def test_write_npz():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'written_npz.npz', also_save=["mesh"])

    written_npz = PyntCloud.from_file(data_path + 'written_npz.npz')

    assert all(data.points == written_npz.points)
    assert all(data.mesh == written_npz.mesh)

    os.remove(data_path + 'written_npz.npz')
Esempio n. 11
0
def test_write_npz():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'writed_npz.npz', internal=["points", "mesh"])

    writed_npz = PyntCloud.from_file(data_path + 'writed_npz.npz')

    assert all(data.points == writed_npz.points)
    assert all(data.mesh == writed_npz.mesh)

    os.remove(data_path + 'writed_npz.npz')
Esempio n. 12
0
def _label_transfer(incld, labelcloud, field='training'):
    """ get labels from one cloud and add to another if you
    forget to close down cgal correctly or something
    
    """

    pcd = PyntCloud.from_file(incld)

    pcd2 = PyntCloud.from_file(labelcloud)

    pcd.points['training'] = pcd2.points['training']

    pcd.to_file(incld)
Esempio n. 13
0
def test_from_file(data_path, extension, color, mesh):
    cloud = PyntCloud.from_file(str(data_path / "diamond{}".format(extension)))
    assert_points_xyz(cloud)
    if color:
        assert_points_color(cloud)
    if mesh:
        assert_mesh(cloud)
Esempio n. 14
0
def test_rgb_sf():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")

    cloud.add_scalar_field('rgb_intensity')
    assert min(cloud.points["Ri"]) >= 0
    assert min(cloud.points["Gi"]) >= 0
    assert min(cloud.points["Bi"]) >= 0
    assert max(cloud.points["Ri"]) <= 1
    assert max(cloud.points["Gi"]) <= 1
    assert max(cloud.points["Bi"]) <= 1
    cloud.points.drop(["Ri", "Gi", "Bi"], 1, inplace=True)

    cloud.add_scalar_field('relative_luminance')
    assert min(cloud.points["relative_luminance"]) >= 0
    assert max(cloud.points["relative_luminance"]) < 255.01
    cloud.points.drop("relative_luminance", 1, inplace=True)

    cloud.add_scalar_field('hsv')
    assert min(cloud.points["H"]) >= 0
    assert max(cloud.points["H"]) <= 360
    assert min(cloud.points["S"]) >= 0
    assert max(cloud.points["S"]) <= 1
    assert min(cloud.points["V"]) >= 0
    assert max(cloud.points["V"]) <= 100
    cloud.points.drop(["H", "S", "V"], 1, inplace=True)
Esempio n. 15
0
def process(path, args):
    ori_path = join(args.source, path)
    target_path, _ = splitext(join(args.dest, path))
    target_path += '.ply'
    target_folder, _ = split(target_path)
    makedirs(target_folder, exist_ok=True)

    logger.debug(f"Writing PC {ori_path} to {target_path}")

    with open(ori_path, 'rb') as f:
        pc = PyntCloud.from_file(ori_path)
        coords = ['x', 'y', 'z']
        points = pc.points.values

        if(args.mode==0):
            points = points * (args.vg_size - 1)
            points = np.round(points)
        else:
            points = points / (args.vg_size - 1)

        pc.points[coords] = points

        if(args.mode==0):
            if len(set(pc.points.columns) - set(coords)) > 0:
                pc.points = pc.points.groupby(by=coords, sort=False).mean()
            else:
                pc.points = pc.points.drop_duplicates()

        pc.to_file(target_path)
Esempio n. 16
0
def test_obj_issue_221(data_path):
    """ Regression test https://github.com/daavoo/pyntcloud/issues/221
    """
    cloud = PyntCloud.from_file(str(data_path / "obj_issue_221.obj"))

    assert (len(cloud.xyz)) == 42
    assert (len(cloud.mesh)) == 88
Esempio n. 17
0
def prepare_content_or_style(path, downsample_points=None):
    if path.endswith("ply"):
        content = PyntCloud.from_file(path).points.values
        if downsample_points:
            mask = np.random.choice(content.shape[0], downsample_points)
            content = content[mask]
        content_ndata = content[:, :3]
        content_ncolor = (content[:, 3:6] - 127.5) / 127.5
        return content_ndata, content_ncolor
    elif path.endswith("npy"):
        content = np.load(path)
        if downsample_points:
            mask = np.random.choice(content.shape[0], downsample_points)
            content = content[mask]
        content_ndata = content[:, :3]
        content_ncolor = (content[:, 3:6] - 127.5) / 127.5
        return content_ndata, content_ncolor
    else:
        img = Image.open(path).convert("RGB")
        style_color = np.reshape(np.array(img), [-1, 3])
        style_color = (style_color - 127.5) / 127.5
        if downsample_points:
            mask = np.random.choice(style_color.shape[0], downsample_points)
            style_color = style_color[mask]
        return style_color
Esempio n. 18
0
def load_pc(path):
    logger.debug(f"Loading PC {path}")
    pc = PyntCloud.from_file(path)
    ret = df_to_pc(pc.points)
    logger.debug(f"Loaded PC {path}")

    return ret
Esempio n. 19
0
def test_eigenvalues():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")
    k_neighbors = cloud.get_neighbors(k=5)
    ev = cloud.add_scalar_field("eigen_values", k_neighbors=k_neighbors)

    with pytest.raises(TypeError):
        # missing arg
        cloud.add_scalar_field("sphericity")

    cloud.add_scalar_field("sphericity", ev=ev)
    cloud.points.drop("sphericity(5)", 1, inplace=True)
    cloud.add_scalar_field("anisotropy", ev=ev)
    cloud.points.drop("anisotropy(5)", 1, inplace=True)
    cloud.add_scalar_field("linearity", ev=ev)
    cloud.points.drop("linearity(5)", 1, inplace=True)
    cloud.add_scalar_field("omnivariance", ev=ev)
    cloud.points.drop("omnivariance(5)", 1, inplace=True)
    cloud.add_scalar_field("eigenentropy", ev=ev)
    cloud.points.drop("eigenentropy(5)", 1, inplace=True)
    cloud.add_scalar_field("planarity", ev=ev)
    cloud.points.drop("planarity(5)", 1, inplace=True)
    cloud.add_scalar_field("eigen_sum", ev=ev)
    cloud.points.drop("eigen_sum(5)", 1, inplace=True)
    cloud.add_scalar_field("curvature", ev=ev)
    cloud.points.drop("curvature(5)", 1, inplace=True)
Esempio n. 20
0
def test_from_file(data_path, extension, color, mesh):
    cloud = PyntCloud.from_file(str(data_path / "diamond{}".format(extension)))
    assert_points_xyz(cloud)
    if color:
        assert_points_color(cloud)
    if mesh:
        assert_mesh(cloud)
Esempio n. 21
0
def test_obj_issue_221(data_path):
    """ Regression test https://github.com/daavoo/pyntcloud/issues/221
    """
    cloud = PyntCloud.from_file(str(data_path / "obj_issue_221.obj"))

    assert (len(cloud.xyz)) == 42
    assert (len(cloud.mesh)) == 88
Esempio n. 22
0
def load_pcd_as_ndarray(pcd_path):
    """
    Loads a PCD-file. Yields a numpy-array.
    """
    pointcloud = PyntCloud.from_file(pcd_path)
    values = pointcloud.points.values
    return values
Esempio n. 23
0
def test_write_ascii():
    data = PyntCloud.from_file(data_path + '.xyz', sep=" ", header=None,
                               index_col=False,
                               names=["x", "y", "z", "nx", "ny", "nz"],
                               dtype="f")

    data.to_file(data_path + 'writed.txt', sep=" ", header=None)

    writed_data = PyntCloud.from_file(data_path + 'writed.txt', sep=" ", header=None,
                                      index_col=False,
                                      names=["x", "y", "z", "nx", "ny", "nz"],
                                      dtype="f")

    assert all(data.points == writed_data.points)

    os.remove(data_path + 'writed.txt')
Esempio n. 24
0
def test_write_ply():
    data = PyntCloud.from_file(data_path + '_bin.ply')    
    
    data.to_file(data_path + 'writed_ascii.ply', internal=["points", "mesh"], as_text=True)  
    data.to_file(data_path + 'writed_bin.ply', internal=["points", "mesh"], as_text=False) 
              
    writed_ply_ascii = PyntCloud.from_file(data_path + 'writed_ascii.ply')
    writed_ply_bin = PyntCloud.from_file(data_path + 'writed_bin.ply')
    
    assert all(data.points == writed_ply_ascii.points)
    assert all(data.points == writed_ply_bin.points)
    assert all(data.mesh == writed_ply_ascii.mesh)
    assert all(data.mesh == writed_ply_bin.mesh)

    os.remove(data_path + 'writed_ascii.ply')
    os.remove(data_path + 'writed_bin.ply')
Esempio n. 25
0
    def _load_voxelgrid(self, pcd_path, preprocess=True, augmentation=True):
        voxelgrid = self.voxelgrid_cache.get(pcd_path, [])
        if voxelgrid == []:

            # Load the pointcloud.
            point_cloud = PyntCloud.from_file(pcd_path)
            if self.voxelgrid_random_rotation == True and augmentation == True:
                points = point_cloud.points
                numpy_points = points.values[:,0:3]
                numpy_points = self._rotate_point_cloud(numpy_points)
                points.iloc[:,0:3] = numpy_points
                point_cloud.points = points

            # Create voxelgrid from pointcloud.
            voxelgrid_id = point_cloud.add_structure("voxelgrid", size_x=self.voxel_size_meters, size_y=self.voxel_size_meters, size_z=self.voxel_size_meters)
            voxelgrid = point_cloud.structures[voxelgrid_id].get_feature_vector(mode="density")

            # Do the preprocessing.
            if preprocess == True:
                voxelgrid = utils.ensure_voxelgrid_shape(voxelgrid, self.voxelgrid_target_shape)
                assert voxelgrid.shape == self.voxelgrid_target_shape

            #self.voxelgrid_cache[pcd_path] = voxelgrid # TODO cache is turned off because of you know why...

        return voxelgrid
Esempio n. 26
0
def test_split_on():
    """PyntCloud.split_on.

    - Raise KeyError on invalid scalar field
    - Raise ValueError on invalid save_format
    - and_return should return list of PyntClouds
    - Implicitily check save_path is working

    """
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")
    vg_id = cloud.add_structure("voxelgrid", x_y_z=[2, 2, 2])

    voxel_n = cloud.add_scalar_field("voxel_n", voxelgrid=vg_id)

    with pytest.raises(KeyError):
        cloud.split_on("bad_sf")

    with pytest.raises(ValueError):
        cloud.split_on(voxel_n, save_format="bad_format")

    output = cloud.split_on(voxel_n, save_path="tmp_out")

    assert output is None

    output = cloud.split_on(voxel_n, and_return=True, save_path="tmp_out")

    assert len(output) == 8

    rmtree("tmp_out")
Esempio n. 27
0
def getPointCloudFromMesh(filename, flag):
    cloud = PyntCloud.from_file(filename + '.ply')
    #cloud.plot()
    if flag == 'hand':
        voxelgrid_id = cloud.add_structure("voxelgrid", n_x=32, n_y=32, n_z=32)
        points = cloud.get_sample("mesh_random", n=3000, normals=True)

    else:
        voxelgrid_id = cloud.add_structure("voxelgrid")
        points = cloud.get_sample("mesh_random", n=3000, normals=False)

    points_array = numpy.array(points.to_records())
    point_list = []
    kd_tree_list = []
    for i in range(0, len(points_array)):
        #point = [points_array[i][1], points_array[i][2], points_array[i][3]]
        if flag == 'hand':
            point_list.append(points_array[i][1])
            point_list.append(points_array[i][2])
            point_list.append(points_array[i][3])
            point_list.append(points_array[i][4])
            point_list.append(points_array[i][5])
            point_list.append(points_array[i][6])
        else:
            kd_tree_list.append(points_array[i][1])
            kd_tree_list.append(points_array[i][2])
            kd_tree_list.append(points_array[i][3])

    if flag == 'hand':
        numpy.savetxt(filename + '.out', point_list, delimiter=',')
        new_cloud = PyntCloud(points)
        new_cloud.to_file(filename + '.npz')
        new_cloud.to_file(filename + '.ply')
    else:
        numpy.savetxt(filename + '.out', kd_tree_list, delimiter=',')
def sample_pt_cld(scale, grasp_rescale, abs_model_path):
    m = PyntCloud.from_file(abs_model_path)
    pt_cld = m.get_sample("mesh_random", n=10000, rgb=False,
                          normals=False).values
    pt_cld *= grasp_rescale
    pt_cld *= scale
    return pt_cld
Esempio n. 29
0
    def _load_voxelgrid(self, pcd_path, preprocess=True, augmentation=True):
        point_cloud = PyntCloud.from_file(pcd_path)

        if self.voxelgrid_random_rotation is True and augmentation is True:
            points = point_cloud.points
            numpy_points = points.values[:, 0:3]
            numpy_points = self._rotate_point_cloud(numpy_points)
            points.iloc[:, 0:3] = numpy_points
            point_cloud.points = points

        # Create voxelgrid from pointcloud.
        voxelgrid_id = point_cloud.add_structure("voxelgrid",
                                                 size_x=self.voxel_size_meters,
                                                 size_y=self.voxel_size_meters,
                                                 size_z=self.voxel_size_meters)

        voxelgrid = point_cloud.structures[voxelgrid_id].get_feature_vector(
            mode="density")

        # Do the preprocessing.
        if preprocess:
            voxelgrid = etl_utils.ensure_voxelgrid_shape(
                voxelgrid, self.voxelgrid_target_shape)
            assert voxelgrid.shape == self.voxelgrid_target_shape

        return voxelgrid
Esempio n. 30
0
def sample_points(root_mesh_dir, root_points_dir, n_points, resolution):
  #1. read folders
  filedirs = read_file(root_mesh_dir, postfix='.obj')[:200]
  print('shapenet:', len(filedirs))

  for index, filedir in enumerate(filedirs):
    # 1. transform format to .ply
    os.system('pcl_obj2ply -format 1 ' + filedir + ' ' + './tp.ply')

    # 2. sample points.
    pc_mesh = PyntCloud.from_file('tp.ply')
    pc = pc_mesh.get_sample("mesh_random", n=n_points, as_PyntCloud=True)

    # 3. random rotate.
    points = pc.points.values
    points = np.dot(points, get_rotate_matrix())

    # 4. voxelization.
    points = points - np.min(points)
    points = points / np.max(points)
    points = points * (resolution)
    points = np.round(points).astype('float32')
    coords = ['x', 'y', 'z']
    pc.points[coords] = points

    if len(set(pc.points.columns) - set(coords)) > 0:
      pc.points = pc.points.groupby(by=coords, sort=False).mean()
    else:
      pc.points = pc.points.drop_duplicates()
    
    # 5. write points.
    pcdir = os.path.join(root_points_dir, str(index)+'.ply')   
    os.system('rm ' + pcdir)
    write_ply_data(pcdir, pc.points.values)
Esempio n. 31
0
def test_read_ascii():
    data = PyntCloud.from_file(data_path + '.xyz', sep=" ", header=None,
                               index_col=False,
                               names=["x", "y", "z", "nx", "ny", "nz"],
                               dtype="f")

    assert_points_xyz(data)
Esempio n. 32
0
def test_open3d_triangle_mesh_conversion(data_path):
    cloud = PyntCloud.from_file(str(data_path.joinpath("diamond.ply")))
    # mesh=True by default
    triangle_mesh = cloud.to_instance("open3d")
    assert isinstance(triangle_mesh, o3d.geometry.TriangleMesh)
    assert np.allclose(cloud.xyz, triangle_mesh.vertices)
    assert np.allclose(cloud.mesh.values, triangle_mesh.triangles)
def read_fused(path_to_fused_ply, path_to_fused_ply_vis):
    """
    see: src/mvs/meshing.cc
        void ReadDenseReconstruction(const std::string& path
    """
    assert os.path.isfile(path_to_fused_ply)
    assert os.path.isfile(path_to_fused_ply_vis)

    point_cloud = PyntCloud.from_file(path_to_fused_ply)
    xyz_arr = point_cloud.points.loc[:, ["x", "y", "z"]].to_numpy()
    normal_arr = point_cloud.points.loc[:, ["nx", "ny", "nz"]].to_numpy()
    color_arr = point_cloud.points.loc[:, ["red", "green", "blue"]].to_numpy()

    with open(path_to_fused_ply_vis, "rb") as fid:
        num_points = read_next_bytes(fid, 8, "Q")[0]
        mesh_points = [0] * num_points
        for i in range(num_points):
            num_visible_images = read_next_bytes(fid, 4, "I")[0]
            visible_image_idxs = read_next_bytes(
                fid,
                num_bytes=4 * num_visible_images,
                format_char_sequence="I" * num_visible_images)
            visible_image_idxs = np.array(tuple(map(int, visible_image_idxs)))
            mesh_point = MeshPoint(position=xyz_arr[i],
                                   color=color_arr[i],
                                   normal=normal_arr[i],
                                   num_visible_images=num_visible_images,
                                   visible_image_idxs=visible_image_idxs)
            mesh_points[i] = mesh_point
        return mesh_points
Esempio n. 34
0
def pytncloud_plyfile_test(ifp):
    cloud = PyntCloud.from_file(ifp)
    points_pyntcloud = cloud.points.loc[:, ["x", "y", "z"]].to_numpy()
    colors_pyntcloud = cloud.points.loc[:, ["red", "green", "blue"]].to_numpy()
    points_plyfile, colors_plyfile, comments = ply2np(ifp)
    assert np.array_equal(points_pyntcloud, points_plyfile)
    assert np.array_equal(colors_pyntcloud, colors_plyfile)
Esempio n. 35
0
def main():
    # # 从ModelNet数据集文件夹中自动索引路径,加载点云
    cat_index = 0 # 物体编号,范围是0-39,即对应数据集中40个物体 #
    root_dir = '/home/ljn/SLAM/dateset/modelnet40_normal_resampled'  # 数据集路径
    cat = os.listdir(root_dir)
    #['cone', 'bench', 'vase', 'toilet', 'bottle', 'sofa',
    # 'stairs', 'flower_pot', 'bathtub', 'piano', 'airplane',
    # 'bed', 'curtain', 'chair', 'mantel', 'keyboard', 'night_stand',
    # 'wardrobe', 'desk', 'table', 'door', 'dresser', 'car', 'laptop',
    # 'guitar', 'cup', 'glass_box', 'xbox', 'monitor', 'plant', 'tent',
    # 'bowl', 'radio', 'stool', 'person', 'tv_stand', 'range_hood', 'sink',
    # 'bookshelf', 'lamp']
    filename = os.path.join(root_dir, cat[cat_index], cat[cat_index] + '_0001.txt')  # 默认使用第一个点云
    # 加载原始点云
    point_cloud_pynt = PyntCloud.from_file(filename, sep=",", header=None, names=["x", "y", "z"], usecols=[0, 1, 2])

    # 转成open3d能识别的格式
    point_cloud_o3d = point_cloud_pynt.to_instance("open3d", mesh=False)
    # o3d.visualization.draw_geometries([point_cloud_o3d]) # 显示原始点云

    # 调用voxel滤波函数,实现滤波
    filtered_cloud = voxel_filter(point_cloud_pynt.points, 1000)# 采样后数量约为1000
    point_cloud_o3d  = o3d.geometry.PointCloud()
    point_cloud_o3d.points = o3d.utility.Vector3dVector(filtered_cloud)
    # 显示滤波后的点云 (更改)
    #o3d.visualization.draw_geometries([point_cloud_o3d])

    vis = o3d.visualization.Visualizer()
    vis.create_window(window_name='Open3D_vexel_filter', width=860, height=540, left=50, top=50, visible=True)
    vis.add_geometry(point_cloud_o3d)
    filename = os.path.join('../TestData/image/', cat[cat_index] + '_vexel_filter'+approximate+'_0001.png')
    vis.run()
    vis.capture_screen_image(filename, do_render=False)
    vis.destroy_window()
def main(point_cloud_filename):
    # load point cloud:
    point_cloud_pynt = PyntCloud.from_file(point_cloud_filename)
    point_cloud_o3d = point_cloud_pynt.to_instance("open3d", mesh=False)

    # 从点云中获取点,只对点进行处理
    points = point_cloud_pynt.points
    print('[PCA Normal]: Total number of points:', points.shape[0])

    # 用PCA分析点云主方向
    w, v = PCA(points)
    point_cloud_vector = v[:, 2]  #点云主方向对应的向量
    print('the main orientation of this pointcloud is: ', point_cloud_vector)
    # get PCA geometry:
    pca_o3d = get_pca_o3d(w, v, points)

    # TODO 02:
    # 循环计算每个点的法向量
    normals = get_surface_normals(point_cloud_o3d, points)
    # 此处把法向量存放在了normals中
    point_cloud_o3d.normals = o3d.utility.Vector3dVector(normals)
    # get surface normals geometry:
    surface_normals_o3d = get_surface_normals_o3d(normals, points)

    # visualize point clouds with PCA and surface normals:
    o3d.visualization.draw_geometries(
        [point_cloud_o3d, pca_o3d, surface_normals_o3d])
Esempio n. 37
0
def main():
    # # 从ModelNet数据集文件夹中自动索引路径,加载点云
    # cat_index = 10 # 物体编号,范围是0-39,即对应数据集中40个物体
    # root_dir = '/Users/renqian/cloud_lesson/ModelNet40/ply_data_points' # 数据集路径
    # cat = os.listdir(root_dir)
    # filename = os.path.join(root_dir, cat[cat_index],'train', cat[cat_index]+'_0001.ply') # 默认使用第一个点云
    # point_cloud_pynt = PyntCloud.from_file(file_name)

    # 加载自己的点云文件
    file_name = "/home/xcy/myWork/pointCloudProcessing/ModelNet40/ply_data_points/airplane/train/airplane_0001.ply"

    point_cloud_pynt = PyntCloud.from_file(file_name)

    print(point_cloud_pynt.points.shape)

    # 转成open3d能识别的格式
    point_cloud_o3d = point_cloud_pynt.to_instance("open3d", mesh=False)

    #    o3d.visualization.draw_geometries([point_cloud_o3d]) # 显示原始点云

    # 调用voxel滤波函数,实现滤波
    filtered_cloud = voxel_filter(point_cloud_pynt.points, 100)

    print(filtered_cloud.shape)

    point_cloud_o3d.points = o3d.utility.Vector3dVector(filtered_cloud)
    # 显示滤波后的点云
    o3d.visualization.draw_geometries([point_cloud_o3d])
Esempio n. 38
0
 def setReferenceModel(self, reference_model_path):
     pc = PyntCloud.from_file(reference_model_path)
     pc_points = np.array(pc.points).astype(np.float32)
     GM.base_point_cloud = PointCloud(pc_points)
     GM.base_point_cloud.set_color_according_camera_pos(
         camera_pos=[1.5, 1.5, 0.0])
     self.update()
Esempio n. 39
0
def process(path, args):
    ori_path = join(args.source, path)
    target_path, _ = splitext(join(args.dest, path))
    target_path += args.target_extension
    target_folder, _ = split(target_path)
    makedirs(target_folder, exist_ok=True)

    logger.debug(f"Writing PC {ori_path} to {target_path}")
    pc_mesh = PyntCloud.from_file(ori_path)
    mesh = pc_mesh.mesh
    pc_mesh.points = pc_mesh.points.astype('float64', copy=False)
    pc_mesh.mesh = mesh

    pc = pc_mesh.get_sample("mesh_random", n=args.n_samples, as_PyntCloud=True)
    coords = ['x', 'y', 'z']
    points = pc.points.values
    points = points - np.min(points)
    points = points / np.max(points)
    points = points * (args.vg_size - 1)
    points = np.round(points)
    pc.points[coords] = points
    if len(set(pc.points.columns) - set(coords)) > 0:
        pc.points = pc.points.groupby(by=coords, sort=False).mean()
    else:
        pc.points = pc.points.drop_duplicates()

    pc.to_file(target_path)
Esempio n. 40
0
def test_write_ply():
    data = PyntCloud.from_file(data_path + '.ply')

    data.to_file(data_path + 'writed_ascii.ply', also_save=["mesh"],
                 as_text=True)
    data.to_file(data_path + 'writed_bin.ply', also_save=["mesh"],
                 as_text=False)

    writed_ply_ascii = PyntCloud.from_file(data_path + 'writed_ascii.ply')
    writed_ply_bin = PyntCloud.from_file(data_path + 'writed_bin.ply')

    assert all(data.points == writed_ply_ascii.points)
    assert all(data.points == writed_ply_bin.points)
    assert all(data.mesh == writed_ply_ascii.mesh)
    assert all(data.mesh == writed_ply_bin.mesh)

    os.remove(data_path + 'writed_ascii.ply')
    os.remove(data_path + 'writed_bin.ply')
Esempio n. 41
0
def test_sf_xyz():
    cloud = PyntCloud.from_file(path + "/data/plane.npz")

    # fit with default values (max_dist=1e-4)
    is_plane = cloud.add_scalar_field("plane_fit")
    assert sorted(cloud.points[is_plane].value_counts()) == [1, 4]

    # fit with higher tolerance -> include outlier
    is_plane = cloud.add_scalar_field("plane_fit", max_dist=0.4)
    assert sorted(cloud.points[is_plane].value_counts()) == [5]

    cloud = PyntCloud.from_file(path + "/data/sphere.ply")

    is_sphere = cloud.add_scalar_field("sphere_fit")
    assert sorted(cloud.points[is_sphere].value_counts()) == [1, 2928]

    is_sphere = cloud.add_scalar_field("sphere_fit", max_dist=26)
    assert sorted(cloud.points[is_sphere].value_counts()) == [2929]
Esempio n. 42
0
def test_mesh_sampling():
    path = os.path.abspath(os.path.dirname(__file__))
    cloud = PyntCloud.from_file(path + "/data/diamond.ply")
    with pytest.raises(TypeError):
        sample = cloud.get_sample("mesh_random_sampling")

    sample = cloud.get_sample("mesh_random_sampling", n=100)

    assert len(sample) == 100
    assert all(sample.max(0) <= cloud.xyz.max(0))
    assert all(sample.min(0) >= cloud.xyz.min(0))
Esempio n. 43
0
def test_mesh_sampling():

    for ext in {"ply", "obj"}:
        cloud = PyntCloud.from_file(path + "/data/diamond.{}".format(ext))
        with pytest.raises(TypeError):
            sample = cloud.get_sample("mesh_random_sampling")

        sample = cloud.get_sample("mesh_random_sampling", n=100)

        assert len(sample) == 100
        assert all(sample.max(0) <= cloud.xyz.max(0))
        assert all(sample.min(0) >= cloud.xyz.min(0))
Esempio n. 44
0
def test_points_sampling():
    path = os.path.abspath(os.path.dirname(__file__))
    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")
    with pytest.raises(TypeError):
        sample = cloud.get_sample("points_random_sampling")

    sample = cloud.get_sample("points_random_sampling", n=1)

    assert point_in_array_2D(sample, cloud.xyz)

    sample = cloud.get_sample("points_random_sampling", n=1, as_PyntCloud=True)

    assert isinstance(sample, PyntCloud)
Esempio n. 45
0
def test_to_file(tmpdir, diamond, extension, color, mesh):
    extra_write_args = {}
    if mesh:
        extra_write_args["also_save"] = ["mesh"]
    if extension == ".ply":
        extra_write_args["as_text"] = False
    if extension == "_ascii.ply":
        extra_write_args["as_text"] = True

    diamond.to_file(str(tmpdir.join("written{}".format(extension))), **extra_write_args)

    written_file = PyntCloud.from_file(str(tmpdir.join("written{}".format(extension))))

    assert_points_xyz(written_file)
    if color:
        assert_points_color(written_file)
    if mesh:
        assert_mesh(written_file)
Esempio n. 46
0
def test_k_neighbors():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")
    k_neighbors = cloud.get_neighbors(k=5)

    with pytest.raises(TypeError):
        # missing arg
        cloud.add_scalar_field("eigen_values")

    ev = cloud.add_scalar_field("eigen_values", k_neighbors=k_neighbors)
    assert ev[0] == "e1(5)"

    ev = ev = cloud.add_scalar_field(
        "eigen_decomposition", k_neighbors=k_neighbors)
    assert ev[3] == "ev1(5)"
    idx = np.random.randint(0, 100)
    for i in [3, 4, 5]:
        assert np.linalg.norm(cloud.points[ev[i]][idx]) > 0.99
        assert np.linalg.norm(cloud.points[ev[i]][idx]) < 1.01
Esempio n. 47
0
def test_xyz_filters():
    """filters.f_xyz.

    - Manually check known result.

    """
    cloud = PyntCloud.from_file(path + "/data/filters.ply")

    bbox = {
        "min_x": 0.4,
        "max_x": 0.6,
        "min_y": 0.4,
        "max_y": 0.6
    }

    f = cloud.get_filter("BBOX", and_apply=True, **bbox)

    assert f.argmax() == 3
    assert len(cloud.points == 1)
Esempio n. 48
0
def ply2gii(in_file, metadata, out_file=None):
    """Convert from ply to GIfTI"""
    from pathlib import Path
    from numpy import eye
    from nibabel.gifti import (
        GiftiMetaData, GiftiCoordSystem, GiftiImage, GiftiDataArray,
    )
    from pyntcloud import PyntCloud

    in_file = Path(in_file)
    surf = PyntCloud.from_file(str(in_file))

    # Update centroid metadata
    metadata.update(
        zip(('SurfaceCenterX', 'SurfaceCenterY', 'SurfaceCenterZ'),
            ['%.4f' % c for c in surf.centroid])
    )

    # Prepare data arrays
    da = (
        GiftiDataArray(
            data=surf.xyz.astype('float32'),
            datatype='NIFTI_TYPE_FLOAT32',
            intent='NIFTI_INTENT_POINTSET',
            meta=GiftiMetaData.from_dict(metadata),
            coordsys=GiftiCoordSystem(xform=eye(4), xformspace=3)),
        GiftiDataArray(
            data=surf.mesh.values,
            datatype='NIFTI_TYPE_INT32',
            intent='NIFTI_INTENT_TRIANGLE',
            coordsys=None))
    surfgii = GiftiImage(darrays=da)

    if out_file is None:
        out_file = fname_presuffix(
            in_file.name, suffix='.gii', use_ext=False, newpath=str(Path.cwd()))

    surfgii.to_filename(str(out_file))
    return out_file
Esempio n. 49
0
def test_normals_sf():
    cloud = PyntCloud.from_file(path + "/data/mnist.npz")

    cloud.add_scalar_field('inclination_deg')
    assert min(cloud.points["inclination_deg"]) >= 0
    assert max(cloud.points["inclination_deg"]) <= 180
    cloud.points.drop("inclination_deg", 1, inplace=True)

    cloud.add_scalar_field('inclination_rad')
    assert min(cloud.points["inclination_rad"]) >= 0
    assert max(cloud.points["inclination_rad"]) <= PI
    cloud.points.drop("inclination_rad", 1, inplace=True)

    cloud.add_scalar_field('orientation_deg')
    assert min(cloud.points["orientation_deg"]) >= 0
    assert max(cloud.points["orientation_deg"]) <= 360
    cloud.points.drop("orientation_deg", 1, inplace=True)

    cloud.add_scalar_field('orientation_rad')
    assert min(cloud.points["orientation_rad"]) >= 0
    assert max(cloud.points["orientation_rad"]) <= 2 * PI
    cloud.points.drop("orientation_rad", 1, inplace=True)
Esempio n. 50
0
def test_voxelgrid_sampling():
    path = os.path.abspath(os.path.dirname(__file__))
    cloud = PyntCloud.from_file(path + "/data/voxelgrid.ply")

    with pytest.raises(TypeError):
        cloud.get_sample("voxelgrid_centers")

    vg_id = cloud.add_structure("voxelgrid")

    with pytest.raises(KeyError):
        cloud.get_sample("voxelgrid_centers", voxelgrid=vg_id[:-2])

    sample = cloud.get_sample("voxelgrid_centers", voxelgrid=vg_id)

    assert point_in_array_2D([0.25, 0.25, 0.25], sample.values)

    sample = cloud.get_sample("voxelgrid_centroids", voxelgrid=vg_id)

    assert point_in_array_2D([0.2, 0.2, 0.2], sample.values)

    sample = cloud.get_sample("voxelgrid_nearest", voxelgrid=vg_id)

    assert point_in_array_2D([0.9, 0.9, 0.9], sample.values)
Esempio n. 51
0
def test_read_obj():
    obj = PyntCloud.from_file(data_path + '.obj')

    assert_points_xyz(obj)
Esempio n. 52
0
def test_read_off():
    off = PyntCloud.from_file(data_path + '.off')

    assert_points_xyz(off)
Esempio n. 53
0
def test_read_color_off():
    color_off = PyntCloud.from_file(data_path + '_color.off')

    assert_points_xyz(color_off)
    assert_points_color(color_off)
Esempio n. 54
0
def test_read_ply_bin():
    ply_bin = PyntCloud.from_file(data_path + '.ply')

    assert_points_xyz(ply_bin)
    assert_points_color(ply_bin)
    assert_mesh(ply_bin)
Esempio n. 55
0
def test_read_ply_ascii():
    ply_ascii = PyntCloud.from_file(data_path + '_ascii.ply')

    assert_points_xyz(ply_ascii)
    assert_points_color(ply_ascii)
    assert_mesh(ply_ascii)
Esempio n. 56
0
def test_read_npz():
    npz = PyntCloud.from_file(data_path + '.npz')

    assert_points_xyz(npz)
    assert_points_color(npz)
    assert_mesh(npz)
Esempio n. 57
0
import os
import pytest
from pyntcloud import PyntCloud


path = os.path.abspath(os.path.dirname(__file__))
cloud = PyntCloud.from_file(path + "/data/filters.ply")


def test_kdtree_filters():
    """filters.f_kdtree.

    - Raise TypeError when missing required arguments
    - Raise KeyError when structure.id is invalid
    - Raise TypeError when wrong argument is given (k instead of r)
    - Manually check known result.

    """
    with pytest.raises(TypeError):
        cloud.get_filter("ROR")

    kdtree = cloud.add_structure("kdtree")

    with pytest.raises(KeyError):
        cloud.get_filter("ROR", kdtree="K(12)", k=2, r=0.2)

    f = cloud.get_filter("ROR", kdtree=kdtree, k=2, r=0.2)

    assert f.argmin() == 3

    with pytest.raises(TypeError):
Esempio n. 58
0
def test_obj_issue_226(data_path):
    """ Regression test https://github.com/daavoo/pyntcloud/issues/226
    """
    cloud = PyntCloud.from_file(str(data_path / "obj_issue_226.obj"))

    assert "w" in cloud.points.columns