예제 #1
0
# -*- coding: utf-8 -*-
from __future__ import print_function

# This Code Base
# http://ros-robot.blogspot.jp/2011/08/pclapi-point-cloud-library-pcl-pcl-api.html

import numpy as np
import pcl
import random

import pcl.pcl_visualization

# pcl::PointCloud<pcl::PointXYZRGB> cloud;
cloud = pcl.PointCloud_PointXYZRGB()

# Fill in the cloud data
# cloud.width  = 15;
# cloud.height = 10;
# cloud.points.resize (cloud.width * cloud.height)
# cloud.resize (np.array([15, 10], dtype=np.float))
# points = np.zeros((10, 15, 4), dtype=np.float32)
points = np.zeros((150, 4), dtype=np.float32)
RAND_MAX = 1.0
# Generate the data
for i in range(0, 75):
    # set Point Plane
    points[i][0] = 1024 * random.random() / (RAND_MAX + 1.0)
    points[i][1] = 1024 * random.random() / (RAND_MAX + 1.0)
    points[i][2] = 0.1 * random.random() / (RAND_MAX + 1.0)
    points[i][3] = 255 << 16 | 255 << 8 | 255
예제 #2
0
    seg.set_MaxIterations(10000)
    seg.set_optimize_coefficients("true")
    seg.set_method_type(6)
    inliers, coefficients = seg.segment()
    clc = tool0.extract(inliers, negative=False)
    outliers = tool0.extract(inliers, negative=True)
    points_list = []
    for data in clc:
        points_list.append(
            [data[0], data[1], data[2],
             rgb_to_float([0, 255, 0])])
    for data in outliers:
        points_list.append(
            [data[0], data[1], data[2],
             rgb_to_float([255, 0, 0])])
    tool0_c = pcl.PointCloud_PointXYZRGB()
    tool0_c.from_list(points_list)
    pcl.save(tool0_c, "tool0_c%s.pcd" % (i + 1))
    p_camera.append(coefficients[:3])
    coefficients[3]

p_camera_ = np.matrix(
    np.concatenate((np.array(p_camera).transpose() / 1000, np.ones([1, 4])),
                   axis=0))
p_camera_reverse = p_camera_.getI()

H = np.matmul(p_robot, p_camera_reverse)

R = H[:3, :3]
t = H[:3, 3]
예제 #3
0
def pcl_callback(pcl_msg):
    """Callback function for your Point Cloud Subscriber
    :param pcl_msg: ROS point cloud message
    """

    # Convert ROS msg to PCL data (XYZRGB)
    cloud = ros_to_pcl(pcl_msg)

    # Voxel Grid Downsampling
    vox = cloud.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()
    '''
    # Much like the previous filters, we start by creating a filter object: 
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)

    # Set threshold scale factor
    x = 1.0

    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)

    # Finally call the filter function for magic
    cloud_filtered = outlier_filter.filter()
    '''
    # PassThrough Filter to isolate the table and objects
    passthrough = cloud_filtered.make_passthrough_filter()
    passthrough.set_filter_field_name('z')
    axis_min = 0.76
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # RANSAC Plane Segmentation to identify the table from the objects
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)
    inliers, coefficients = seg.segment()

    # Extract inliers (table surface) and outliers (objects)
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # Convert the xyzrgb cloud to only xyz since k-d tree only needs xyz
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    # k-d tree decreases the computation of searching for neighboring points.
    #   PCL's euclidian clustering algo only supports k-d trees
    tree = white_cloud.make_kdtree()

    # Euclidean clustering used to build individual point clouds for each
    #   object. The Cluster-Mask point cloud allows each cluster to be
    #   visualized separately.
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(100)
    ec.set_MaxClusterSize(3000)
    ec.set_SearchMethod(tree)       # Search the k-d tree for clusters
    # Extract indices for each found cluster. This is a list of indices for
    #   each cluster (list of lists)
    cluster_indices = ec.Extract()

    # Assign a random color to each isolated object in the scene
    cluster_color = get_color_list(len(cluster_indices))
        
    # Store the detected objects and labels in these lists
    detected_objects_labels = []
    detected_objects = []
    color_cluster_point_list = []

    # Iterate through each detected object cluster for object recognition
    for index, pts_list in enumerate(cluster_indices):
        
        object_cluster = []

        # Create an individual cluster just for the object being processed
        for i, pts in enumerate(pts_list):
            # Retrieve cloud values for the x, y, z, rgb object
            object_cluster.append([cloud_objects[pts][0],
                                   cloud_objects[pts][1],
                                   cloud_objects[pts][2],
                                   cloud_objects[pts][3]])
            
            # Retrieve cloud values for the x, y, z object, assigning a
            #   preidentified color to all cloud values
            color_cluster_point_list.append([white_cloud[pts][0],
                                             white_cloud[pts][1],
                                             white_cloud[pts][2],
                                             rgb_to_float(cluster_color[index])])


        # Convert list of point cloud features (x,y,z,rgb) into a point cloud
        pcl_cluster = pcl.PointCloud_PointXYZRGB()
        pcl_cluster.from_list(object_cluster)

        # Convert the cluster from pcl to ROS using helper function
        ros_cloud = pcl_to_ros(pcl_cluster)
        #pcl_objects_pub.publish(ros_cloud)

        # Extract histogram features (similar to capture_features.py)
        chists = compute_color_histograms(ros_cloud,
                                          nbins=64,
                                          using_hsv=True)
        normals = get_normals(ros_cloud)
        nhists = compute_normal_histograms(normals,nbins=64)
        feature = np.concatenate((chists, nhists))

        # Make the prediction, retrieve the label for the result and add it
        # to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cloud
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
    
    # Create new cloud containing all clusters, each with a unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # Convert PCL data to ROS messages
    ros_cloud_cluster = pcl_to_ros(cluster_cloud)
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)

    # Publish ROS messages of the point clouds and detected objects
    pcl_objects_cloud_pub.publish(ros_cloud_cluster) 
    pcl_objects_pub.publish(ros_cloud_objects)      
    pcl_table_pub.publish(ros_cloud_table)          
    detected_objects_pub.publish(detected_objects)  # detected object labels
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # TODO: Statistical Outlier Filtering
    outlier_filter = cloud.make_statistical_outlier_filter()

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(20)

    # Set threshold scale factor
    x = 0.1

    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)

    # Finally call the filter function for magic
    cloud_outlier_filtered = outlier_filter.filter()

    # TODO: Voxel Grid Downsampling
    # Create a VoxelGrid filter object for our input point cloud
    vox = cloud_outlier_filtered.make_voxel_grid_filter()
    LEAF_SIZE = 0.01

    # Set the voxel (or leaf) size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the resultant downsampled point cloud
    vox_filtered = vox.filter()

    # TODO: PassThrough Filtering
    # along the z-axis
    passthrough = vox_filtered.make_passthrough_filter()

    # Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.61
    axis_max = 0.95
    passthrough.set_filter_limits(axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud.
    ptz_filtered = passthrough.filter()

    # Along the y-axis
    passthrough = ptz_filtered.make_passthrough_filter()

    # Assign axis and range to the passthrough filter object.
    filter_axis = 'y'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = -0.4
    axis_max = 0.4
    passthrough.set_filter_limits(axis_min, axis_max)
    pt_filtered = passthrough.filter()

    # TODO: RANSAC Plane Segmentation
    # Create the segmentation object
    seg = pt_filtered.make_segmenter()

    # Set the model you wish to fit
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()

    # Extract outliers (what we're interested in)
    cloud_objects = pt_filtered.extract(inliers, negative=True)

    # Euclidean Clustering
    # remove color data
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    # extract KD-tree from cloud
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(50)
    ec.set_MaxClusterSize(1000)

    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_objects = pcl_to_ros(cloud_objects)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)
    ros_of = pcl_to_ros(cloud_outlier_filtered)
    ros_vox = pcl_to_ros(vox_filtered)
    ros_pt = pcl_to_ros(pt_filtered)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_objects)
    pcl_cluster_pub.publish(ros_cluster_cloud)
    pcl_of_pub.publish(ros_of)
    pcl_vox_pub.publish(ros_vox)
    pcl_pt_pub.publish(ros_pt)

    # Exercise-3 TODOs:
    detected_objects_labels = []
    detected_objects = []
    labeled_centroids = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = cloud_objects.extract(pts_list)
        # TODO: convert the cluster from pcl to ROS using helper function
        pcl_cluster_ros = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        # TODO: complete this step just as is covered in capture_features.py
        color_bins = 32
        normals_bins = 2
        feature = extract_cloud_features(pcl_cluster_ros, color_bins,
                                         normals_bins)

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = pcl_cluster_ros
        detected_objects.append(do)

        #rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
        detected_objects_pub.publish(detected_objects)

    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    # Could add some logic to determine whether or not your object detections are robust
    # before calling pr2_mover()
    ####################################
    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
def do_euclidian_clustering(cloud):
    # Euclidean Clustering
    white_cloud = pcl_helper.XYZRGB_to_XYZ(cloud)
    tree = white_cloud.make_kdtree()
    ec = white_cloud.make_EuclideanClusterExtraction()

    ec.set_ClusterTolerance(0.3)  #0.14, 0.08
    ec.set_MinClusterSize(1)
    ec.set_MaxClusterSize(3500)

    ec.set_SearchMethod(tree)

    cluster_indices = ec.Extract()

    cluster_color = pcl_helper.random_color_gen()
    #cluster_color = pcl_helper.get_color_list(len(cluster_indices))

    #print("No. of cones visible at this moment ", len(cluster_indices))

    #print(cluster_indices)

    cluster_coords = Track()
    #cone = TrackCone()
    #cluster_coords.cones = []
    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        x, y, z = 0, 0, 0
        cone_clusters = []
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                pcl_helper.rgb_to_float(cluster_color)
            ])
            x = x + white_cloud[indice][0]
            y = y + white_cloud[indice][1]
            z = z + white_cloud[indice][2]

        cone_clusters.append(x / len(indices))
        cone_clusters.append(y / len(indices))
        #print(cone_clusters)
        #print(color_cluster_point_list)
        #cone.x = x/len(indices)
        #cone.y = y/len(indices)
        #cone.type = f'cone{j+1}'

        #cluster_coords.cones = cone_clusters
        #cluster_coords.data.append(TrackCone(data = cone_clusters))
        cluster_coords.data.append(
            TrackCone(x=x / len(indices), y=y / len(indices)))
        #cluster_coords.cones[j].x = x/len(indices)
        #cluster_coords.cones[j].y = y/len(indices)
        #cluster_coords.cones[j].type = 'cone{k}'

    #print(len(cluster_coords.data))
    #print(len(cluster_coords.cones[0]))

    #print("No. of cones visible at this moment ", len(color_cluster_point_list))
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    ros_cluster_cloud = pcl_helper.pcl_to_ros(cluster_cloud)

    return cluster_cloud, cluster_coords
예제 #6
0
#here we are gonna try to stich point cloud which going get from the lazer to generate full point cloud
# so we are gonna load 2 pcl and stich it together


import pcl
from pcl import pcl_visualization
import numpy as np
<<<<<<< HEAD

from pykdtree.kdtree import KDTree
import matplotlib.pyplot as plt
cloud5 = pcl.PointCloud_PointXYZRGB()




=======
import matplotlib.pyplot as plt
cloud5 = pcl.PointCloud_PointXYZRGB()

>>>>>>> origin/master
#hwe we are gonna load one point cloud
cloud1 = pcl.load_XYZRGB('/media/ribin/DATA/addverb/binpicking/pcd files/object_pcd/soap_santoor/2.pcd')
print(cloud1)
#here we are gonna load the second point cloud
cloud2 = pcl.load_XYZRGB('/media/ribin/DATA/addverb/binpicking/pcd files/object_pcd/colgate/2.pcd')


array_size1 = cloud1.size
array_size2 = cloud2.size
array_size = array_size1 + array_size2 + 10
예제 #7
0
            np_pc = np.array(list_pc, dtype=np.float32)
            print(np_pc.shape)

            # test

            # save numpy array to bin or pcd
            filename = str(lidar_t)
            print(filename)

            np_pc_bin = copy.deepcopy(np_pc)
            bin_path_ = save_path + '/bin'
            Path(bin_path_).mkdir(parents=True, exist_ok=True)
            bin_path = bin_path_ + "/" + filename + ".bin"
            np_pc_bin.astype(np.float32).tofile(bin_path)

            np_pc_pcd = copy.deepcopy(np_pc)
            pcd_path_ = save_path + '/pcd'
            Path(pcd_path_).mkdir(parents=True, exist_ok=True)
            pcd_path = pcd_path_ + "/" + filename + ".pcd"

            if args.is_ml == True:
                pc_rgb = pcl.PointCloud_PointXYZRGB()
                pc_rgb.from_list(test_list_pc)
                pcl.save(pc_rgb, pcd_path)
            else:
                pc_intensity = pcl.PointCloud_PointXYZI()
                pc_intensity.from_array(np_pc_pcd)
                pcl.save(pc_intensity, pcd_path)

    print('DONE')
예제 #8
0
def pcl_callback(pcl_msg):

    # TODO: Convert ROS msg to PCL data
    pcl_msg_new = ros_to_pcl(pcl_msg)

    # TODO: Voxel Grid Downsampling
    # Create a VoxelGrid filter object for our input point cloud
    vox = pcl_msg_new.make_voxel_grid_filter()
    
    # Leaf size
    LEAF_SIZE = 0.01

    # set the voxel (or leaf) size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the rsultant downsampled point cloud
    cloud_filtered = vox.filter()
    filename = 'voxel_downsampled.pcd'
    pcl.save(cloud_filtered, filename)


    # TODO: PassThrough Filter
    # Create a PassThrough filter object
    passthrough = cloud_filtered.make_passthrough_filter()

    # Assign axis and range to the passthrough filter object
    filter_axis = 'z'
    passthrough.set_filter_field_name (filter_axis)
    axis_min = 0.757
    axis_max = 1.1
    passthrough.set_filter_limits (axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud.
    cloud_filtered = passthrough.filter()
    filename = 'pass_through_filtered.pcd'
    pcl.save(cloud_filtered, filename)


    # TODO: RANSAC Plane Segmentation
    # Create the segmentation object
    seg = cloud_filtered.make_segmenter()

    # Set the model you wish to fit 
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    # Experiment with different values for max_distance 
    # for segmenting the table
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()


    # TODO: Extract table and objects
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    filename = 'cloud_table.pcd'
    pcl.save(cloud_table, filename)


    cloud_objects = cloud_filtered.extract(inliers, negative=True)
    filename = 'cloud_objects.pcd'
    pcl.save(cloud_objects, filename)


    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()

    # Set tolerances for distance threshold 
    # as well as minimum and maximum cluster size (in points)
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(175)
    ec.set_MaxClusterSize(1200)

    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)

    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                            white_cloud[indice][1],
                                            white_cloud[indice][2],
                                            rgb_to_float(cluster_color[j])])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)


    # TODO: Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)


    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)
예제 #9
0
def pcl_callback(pcl_msg):

    # TODO: Convert ROS msg to PCL data
    pcl_data = ros_to_pcl(pcl_msg)

    # TODO: Voxel Grid Downsampling
    vox = pcl_data.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # TODO: PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)
    inliers, coefficients = seg.segment()

    # TODO: Extract inliers and outliers
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # Remove Noise - in particular the table edge from objects
    noise_filter = cloud_objects.make_statistical_outlier_filter()
    noise_filter.set_mean_k(50)
    x = 1.0
    noise_filter.set_std_dev_mul_thresh(x)
    cloud_objects = noise_filter.filter()

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    ec.set_ClusterTolerance(0.04)
    ec.set_MinClusterSize(50)
    ec.set_MaxClusterSize(5000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, index in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[index][0], white_cloud[index][1],
                white_cloud[index][2],
                rgb_to_float(cluster_color[j])
            ])

    # Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)
    def circle_fitting(self,
                       point_cloud_path,
                       index=999,
                       need_judge=True,
                       save_interm=False):
        # Load point cloud from given path
        # FIXME: For debug
        # print(point_cloud_path)
        # cloud = pcl.load("/home/bionicdl-Mega/repos/01.ply")
        cloud = pcl.load(point_cloud_path)

        # Passthrough Filter
        passthrough = cloud.make_passthrough_filter()
        filter_axis = 'z'
        passthrough.set_filter_field_name(filter_axis)
        axis_min = 200
        axis_max = 1000
        passthrough.set_filter_limits(axis_min, axis_max)
        cloud_filtered = passthrough.filter()
        print("Path Through for %s" % str(index))
        # pcl.save(cloud_filtered, "%s.pcd"%(cloud_path+"pass"+str(index)))

        # Statistical Outlier Filter
        print("Outlier Filter :%s" % str(index))
        outlier_filter = cloud_filtered.make_statistical_outlier_filter()
        outlier_filter.set_mean_k(50)
        outlier_filter.set_std_dev_mul_thresh(1.0)
        cloud_filtered = outlier_filter.filter()
        # pcl.save(cloud_filtered, "%s.pcd"%(cloud_path+"ol"+str(index)))

        # Euclidean Clustering
        print("Euclidean Clustering:%s" % str(index))
        white_cloud = cloud_filtered
        tree = white_cloud.make_kdtree()
        ec = white_cloud.make_EuclideanClusterExtraction()
        ec.set_ClusterTolerance(0.5)  # Set tolerances for distance threshold
        ec.set_MinClusterSize(2000)
        ec.set_MaxClusterSize(
            100000)  # as well as minimum and maximum cluster size (in points)
        ec.set_SearchMethod(tree)
        cluster_indices = ec.Extract()
        tool_index = []

        min_height = 10010
        current_length = 0

        # Pick out cluseter on the top
        for cluster in cluster_indices:
            cloud = white_cloud.extract(cluster)
            pcl.save(cloud, "/home/bionicdl-Mega/repos/test.pcd")
            cloud_array = np.array(cloud)
            length = cloud_array.shape[0]
            height = cloud_array[:, 2].min()
            if height < min_height:
                min_height = height
                tool_index = cluster

        tool0 = white_cloud.extract(tool_index)
        # pcl.save(tool0, "%s.pcd"%(cloud_path+"clustering"+str(index)))

        # RANSAC circle segmentation
        print("RANSAC :%s" % str(index))
        seg = tool0.make_segmenter()
        seg.set_model_type(pcl.SACMODEL_CIRCLE3D)
        max_distance = 0.0002
        seg.set_distance_threshold(max_distance)
        seg.set_MaxIterations(50000)
        seg.set_optimize_coefficients("true")
        seg.set_method_type(6)
        inliers, coefficients = seg.segment()
        clc = tool0.extract(inliers, negative=False)
        outliers = tool0.extract(inliers, negative=True)
        points_list = []
        for data in clc:
            points_list.append([data[0], data[1], data[2], 0.5])

        index_data = 0
        for data in outliers:
            points_list.append([data[0], data[1], data[2], 255])
        center = coefficients[:3]
        points_list.append([center[0], center[1], center[2], 100])

        if need_judge in index:
            point_cloud_valid = self._feasibility_test(len(tool_index),
                                                       len(inliers))
            if not point_cloud_valid:
                return False

        tool0_c = pcl.PointCloud_PointXYZRGB()
        tool0_c.from_list(points_list)
        # pcl.save(tool0_c, "%s.pcd"%(cloud_path+"tool_c"))

        # TODO: remove temporal point cloud and rename
        return coefficients[:3]
    def ransac_circle_fitting(self, point_cloud_path):
        """
        Apply RANSAC circle fitting to find circle
        :param point_cloud_path: path of point cloud of flange
        :return: Coordinate of center of flange in camera frame
        """
        p_robot_used = []
        maxD = 0.3
        R_FLANGE = 31.0
        detR = 1
        p_camera = []
        # for i in range(len(index_list)):
        # index = index_list[i]
        try:
            # tool0 = pcl.load(
            # "/home/bionicdl/photoneo_data/calibration_images/data_ransac10000_valid/tool0_%s.pcd" % (
            # index[:-1]))
            tool0 = pcl.load(point_cloud_path)
        except:
            print("PointCloud with path %s not found" % (point_cloud_path))

        # p_robot_used.append(p_robot_list[i])
        points = tool0.to_array()
        max_num_inliers = 0
        # FIXME: For testing
        for k in range(10000):
            idx = np.random.randint(points.shape[0], size=3)
            A, B, C = points[idx, :]
            a = np.linalg.norm(C - B)
            b = np.linalg.norm(C - A)
            c = np.linalg.norm(B - A)
            s = (a + b + c) / 2
            R = a * b * c / 4 / np.sqrt(s * (s - a) * (s - b) * (s - c))
            if (R_FLANGE - R) > detR or R_FLANGE - R < 0:
                continue
            b1 = a * a * (b * b + c * c - a * a)
            b2 = b * b * (a * a + c * c - b * b)
            b3 = c * c * (a * a + b * b - c * c)
            P = np.column_stack((A, B, C)).dot(np.hstack((b1, b2, b3)))
            P /= b1 + b2 + b3
            num_inliers = 0
            inliers = []
            outliers = []
            for point in points:
                d = np.abs(np.linalg.norm(point - P) - R)
                if d < maxD:
                    num_inliers += 1
                    inliers.append(point)
                else:
                    outliers.append(point)
            if num_inliers > max_num_inliers:
                max_num_inliers = num_inliers
                max_A = A
                max_B = B
                max_C = C
                max_R = R
                max_P = P
                max_inliers = np.array(inliers)
                max_outliers = np.array(outliers)
        points_list = []
        for data in max_inliers:
            points_list.append(
                [data[0], data[1], data[2],
                 rgb_to_float([0, 255, 0])])
        for data in max_outliers:
            points_list.append(
                [data[0], data[1], data[2],
                 rgb_to_float([255, 0, 0])])
        tool0_c = pcl.PointCloud_PointXYZRGB()
        tool0_c.from_list(points_list)

        output_path = point_cloud_path
        if "pcd" in output_path:
            output_path = output_path.replace(".pcd", "_valid.pcd")
        elif "ply" in output_path:
            output_path = output_path.replace(".ply", "_valid.ply")

        output_path = output_path.replace(".ply", ".pcd")
        pcl.save(tool0_c, output_path)

        output_path = output_path.replace(".pcd", ".ply")
        pcl.save(tool0_c, output_path)

        return np.array(max_P)