예제 #1
0
def do_euclidian_clustering(cloud):
    # Euclidean Clustering
    white_cloud = pcl_helper.XYZRGB_to_XYZ(cloud) # <type 'pcl._pcl.PointCloud'>
    tree = white_cloud.make_kdtree() # <type 'pcl._pcl.KdTree'>
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.02) # for hammer
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(250)
    ec.set_SearchMethod(tree)
    cluster_indices = ec.Extract() # indices for each cluster (a list of lists)
    # Assign a color to each cluster

    cluster_color = pcl_helper.random_color_gen()
    #cluster_color = pcl_helper.get_color_list(len(cluster_indices))

    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], pcl_helper.rgb_to_float(cluster_color)])
    # Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)
    # publish to cloud
    ros_cluster_cloud = pcl_helper.pcl_to_ros(cluster_cloud)
    # save to local
    pcl.save(cluster_cloud, 'cluster.pcd')
    return cluster_cloud
예제 #2
0
def do_euclidian_clustering(cloud):
    # Euclidean Clustering
    white_cloud = pcl_helper.XYZRGB_to_XYZ(cloud)
    tree = white_cloud.make_kdtree()
    ec = white_cloud.make_EuclideanClusterExtraction()

    ec.set_ClusterTolerance(5)
    ec.set_MinClusterSize(1)
    ec.set_MaxClusterSize(3500)

    ec.set_SearchMethod(tree)

    cluster_indices = ec.Extract()

    cluster_color = pcl_helper.random_color_gen()
    #cluster_color = pcl_helper.get_color_list(len(cluster_indices))

    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                pcl_helper.rgb_to_float(cluster_color)
            ])

    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    ros_cluster_cloud = pcl_helper.pcl_to_ros(cluster_cloud)

    return cluster_cloud
예제 #3
0
def callback(input_ros_msg):
    cloud = pcl_helper.ros_to_pcl(input_ros_msg)
    print("INPUT: ", cloud, type(cloud))
    cloud = pcl_helper.XYZRGB_to_XYZ(cloud)
    mean_k = 10
    thresh = 0.001
    cloud = do_statistical_outlier_filtering(cloud, mean_k, thresh)
    color = pcl_helper.random_color_gen()
    cloud = pcl_helper.XYZ_to_XYZRGB(cloud, color)
    cloud_new = pcl_helper.pcl_to_ros(cloud)
    print("OUTPUT: ", cloud, type(cloud))
    pub.publish(cloud_new)
def Process_rawPCL(pclpcRawIn):

    DebugDumpMsgPCL(pclpcRawIn)

    pclRecs = [] # For dev/debug display. Container for point cloud records: tuple (pclObj, pclName# )

    pclRecs.append((pclpcRawIn, "pclpcRawIn"))

    pclRecsDownSampled = pclproc.PCLProc_DownSampleVoxels(pclpcRawIn)
    pclRecs += pclRecsDownSampled
    pclpcDownSampled, pclpcDownSampledName = pclRecsDownSampled[0]

    # PassThrough Filter
    pclRecsRansac = pclproc.PCLProc_Ransac(pclpcDownSampled)
    pclRecs += pclRecsRansac

    # Extract inliers and outliers
    pclpcPassZ, pclpcPassZIn, pclpcPassZOut = pclRecsRansac[0][0], pclRecsRansac[1][0], pclRecsRansac[2][0]
    pclpcTable, pclpcObjects = pclpcPassZIn, pclpcPassZOut # Rename for clarity

    # Euclidean Clustering
    pclpObjectsNoColor = pcl_helper.XYZRGB_to_XYZ(pclpcObjects)
    clusterIndices, pclpcClusters = pclproc.PCLProc_ExtractClusters(pclpObjectsNoColor)

    labelRecs = []

    for index, pts_list in enumerate(clusterIndices):
        # Get points for a single object in the overall cluster
        pcl_cluster = pclpcObjects.extract(pts_list)
        msgPCL_cluster = pcl_helper.pcl_to_ros(pcl_cluster) # Needed for histograms... would refactor

        # Extract histogram features
        chists = pclproc.compute_color_histograms(msgPCL_cluster, doConvertToHSV=True)
        normals = get_normals(msgPCL_cluster)
        nhists = pclproc.compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # CLASSIFY, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = g_clf.predict(g_scaler.transform(feature.reshape(1, -1)))
        label = g_encoder.inverse_transform(prediction)[0]

        # Accumulate label records for publishing (and labeling detected objects)
        label_pos = list(pclpcObjects[pts_list[0]])
        label_pos[2] += 0.3
        labelRecs.append((label, label_pos, index))

    return labelRecs, pclpcObjects, pclpcTable, pclpcClusters
def detect_objects(cloud_objects, cluster_indices):

    # TODO: Classify the clusters!
    detected_objects_labels = []
    detected_objects = []
    labeled_features = []

    for index, pts_list in enumerate(cluster_indices):
        # TODO: Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = cloud_objects.extract(pts_list)
        # TODO: Convert the cluster from pcl to ROS using helper function
        ros_cluster = pclh.pcl_to_ros(pcl_cluster)

        # TODO: Extract histogram features
        hists_color = ft.compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        hists_normals = ft.compute_normal_histograms(normals)
        feature = np.concatenate((hists_color, hists_normals))
        #labeled_features.append([feature, model_name])

        # TODO: Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        # TODO: Retrieve the label for the result
        label = encoder.inverse_transform(prediction)[0]
        # TODO: Add it to detected_objects_labels list
        detected_objects_labels.append(label)

        # TODO: Publish a label into RViz
        white_cloud = pclh.XYZRGB_to_XYZ(cloud_objects)
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .2
        object_markers_pub.publish(make_label(label, label_pos, index))

        # TODO: Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    return detected_objects, detected_objects_labels
예제 #6
0
def euclidean_clustering(cloud, ClusterTolerance=0.01, MinClusterSize=10, MaxClusterSize=10000):
    # Convert XYZRGB point cloud to XYZ since EC uses spatial info only
    cloud_xyz = pclh.XYZRGB_to_XYZ(cloud)
    # Use k-d tree to decrease the computational burden of searching for neighboring points
    tree = cloud_xyz.make_kdtree()
    # Create a cluster extraction object
    ec = cloud_xyz.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold 
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(ClusterTolerance)
    ec.set_MinClusterSize(MinClusterSize)
    ec.set_MaxClusterSize(MaxClusterSize)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    return cluster_indices
예제 #7
0
def PCLProc_Noise(pclpIn):
    pclRecs = [
    ]  # For dev/debug display. Container for point cloud records: tuple (pclObj, pclName)

    print("pcl.__file__", pcl.__file__)
    print("type pcl", type(pclpIn))

    if (not type(pclpIn) is pcl._pcl.PointCloud):
        pclpIn = pcl_helper.XYZRGB_to_XYZ(pclpIn)

    fil = pclpIn.make_statistical_outlier_filter()
    numNeighborsToCheck = 20
    threshScaleFactor = 1
    fil.set_mean_k(numNeighborsToCheck)
    fil.set_std_dev_mul_thresh(threshScaleFactor)

    pclpNoiseInliers = fil.filter()
    fil.set_negative(True)
    pclpNoiseOutliers = fil.filter()

    pclRecs.append((pclpNoiseInliers, "pclpNoiseInliers"))
    pclRecs.append((pclpNoiseOutliers, "pclpNoiseOutliers"))
    return (pclRecs)
예제 #8
0
def ProcessPCL(pclpcRawIn):
    pclRecs = [
    ]  # For dev/debug display. Container for point cloud records: tuple (pclObj, pclName# )

    pclRecs.append((pclpcRawIn, "pclpcRawIn"))

    pclRecsDownSampled = pclproc.PCLProc_DownSampleVoxels(pclpcRawIn)
    pclRecs += pclRecsDownSampled
    pclpcDownSampled, pclpcDownSampledName = pclRecsDownSampled[0]

    # PassThrough Filter
    pclRecsRansac = pclproc.PCLProc_Ransac(pclpcDownSampled)
    pclRecs += pclRecsRansac

    # Extract inliers and outliers
    pclpcPassZ, pclpcPassZIn, pclpcPassZOut = pclRecsRansac[0][
        0], pclRecsRansac[1][0], pclRecsRansac[2][0]
    pclpcTable, pclpcObjects = pclpcPassZIn, pclpcPassZOut

    # Euclidean Clustering
    pclpObjectsNoColor = pcl_helper.XYZRGB_to_XYZ(pclpcObjects)
    pclpcClusters = pclproc.PCLProc_ExtractClusters(pclpObjectsNoColor)

    return pclpcObjects, pclpcTable, pclpcClusters
def pcl_callback(ros_pcl_msg):
# Exercise-1 TODOs:
    # TODO: Load point cloud data
    # TODO: Voxel Grid Downsampling
    # TODO: PassThrough Filter
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers and outliers
    # TODO: Save the inliers in .pcd file
    # TODO: Run the inliers.pcd file on terminal
# Exercise-2 TODOs:
    # TODO: Convert ROS msg to PCL data
    # TODO: Voxel Grid Downsampling
    # TODO: PassThrough Filter
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers and outliers
    # TODO: Euclidean Clustering
    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    # TODO: Convert PCL data to ROS messages
    # TODO: Publish ROS messages
# Exercise-3 TODOs: 
    # TODO: Classify the clusters! (loop through each detected cluster one at a time)
        # TODO: Grab the points for the cluster
        # TODO: Compute the associated feature vector
        # TODO: Make the prediction
        # TODO: Publish a label into RViz
        # TODO: Add the detected object to the list of detected objects.
    # TODO: Publish the list of detected objects


    # TODO: Convert ROS msg of type PointCloud2 to PCL data PointXYZRGB format
    pcl_data = pclh.ros_to_pcl(ros_pcl_msg)

    # TODO: Statistical filter
    stat_inlier, stat_outlier = flt.statistical_outlier_filter(pcl_data)
    pcl_data = stat_inlier
    
    # TODO: Voxel Grid Downsampling
    vox_filt = flt.voxel_downsampling(pcl_data)

    # TODO: PassThrough Filter
    pass_filt = flt.pass_through_filter(vox_filt)
    
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers (objects) and outliers (table)
    cloud_table, cloud_objects = flt.ransac(pass_filt)
    
    # TODO: Euclidean Clustering
    cluster_indices = flt.euclidean_clustering(cloud_objects)

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_cloud = flt.visualize_clusters(cloud_objects, cluster_indices)

    # TODO: Convert PCL data to ROS messages
    ros_table_cloud = pclh.pcl_to_ros(cloud_table)
    ros_objects_cloud = pclh.pcl_to_ros(cloud_objects)
    ros_cluster_cloud = pclh.pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_table_pub.publish(ros_table_cloud)
    pcl_objects_pub.publish(ros_objects_cloud)
    pcl_cluster_pub.publish(ros_cluster_cloud)


    # TODO: Classify the clusters!
    detected_objects_labels = []
    detected_objects = []
    labeled_features = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = cloud_objects.extract(pts_list)
        # TODO: convert the cluster from pcl to ROS using helper function
        ros_cluster = pclh.pcl_to_ros(pcl_cluster)

        # Extract histogram features
        # TODO: complete this step just as is covered in capture_features.py
        # Extract histogram features
        hists_color = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        hists_normals = compute_normal_histograms(normals)
        feature = np.concatenate((hists_color, hists_normals))
        #labeled_features.append([feature, model_name])


        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        white_cloud = pclh.XYZRGB_to_XYZ(cloud_objects)
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)
예제 #10
0
    def pc2CB(self, msg):
        if self.init:
            # Convert ROS PointCloud2 msg to PCL data
            cloud = pcl_helper.ros_to_pcl(msg)

            # Filter the Cloud
            fil_cloud = filtering_helper.do_passthrough_filter(
                cloud, 'x', self.x_min, self.x_max)
            filtered_cloud = filtering_helper.do_passthrough_filter(
                fil_cloud, 'y', self.y_min, self.y_max)
            # Turn cloud into colorless cloud
            colorless_cloud = pcl_helper.XYZRGB_to_XYZ(filtered_cloud)

            # Get groups of cluster of points
            #            clusters = self.getClusters(colorless_cloud, 0.05, 20, 300)   # Table on Top AGV - 60~300
            clusters = self.getClusters(colorless_cloud, 0.05, 20, 350)
            print "len(clusters)", len(clusters)
            for i in range(0, len(clusters)):
                print "len(clusters[", i, "]", len(clusters[i])

            # Get mean points from clusters
            mean_pts = []
            for cluster in clusters:
                mean_pts.append(self.getMeanPoint(cluster, filtered_cloud))
            print "len(mean_pts)", len(mean_pts)
            for i in range(0, len(mean_pts)):
                print "mean_pts[", i, "]", mean_pts[i]

            # Remove other points, leave the table points only
            table_pts = []
            table_pts = self.getTablePoints(mean_pts)
            print "len(table_pts)", len(table_pts)
            print table_pts

            for i in range(len(table_pts)):
                self.vizPoint(i, table_pts[i], ColorRGBA(1, 1, 1, 1),
                              "base_link")

            # Get transform of "base_link" relative to "map"
            trans = self.getTransform("map", "base_link")

            # Finding 2 middle points from 4 table legs and use them as path_pts
            path_pts = []
            if (len(table_pts) == 4):
                for p1 in table_pts:
                    for p2 in table_pts:
                        if (p1 == p2):
                            break
                        # Register 2 middle_pts of 4 table legs
                        if (0.73 < self.getDistance(p1, p2) < 0.83):
                            path_pts.append(self.getMiddlePoint(p1, p2))
                            break

            print "len(path_pts)", len(path_pts)
            print path_pts

            # Turn path_pts into map frame
            for i in range(len(path_pts)):
                path_pts[i] = tf2_geometry_msgs.do_transform_pose(
                    self.toPoseStamped(path_pts[i], Quaternion(0, 0, 0, 1)),
                    trans).pose.position
                path_pts[i] = Point(path_pts[i].x, path_pts[i].y, 0)

            # Record the starting point and heading once
            if self.record_once:
                self.start_pt = trans.transform.translation
                self.record_once = False

            # Create a list with distance information between initial_pt and path_pts
            distance_list = [
                self.getDistance(p, self.start_pt) for p in path_pts
            ]

            # Rearrange the path_pts to be in descending order
            path_pts = self.getAscend(path_pts, distance_list)

            # Duplicate the path_pts to prevent from being edited
            table_pathpts = path_pts[:]
            print "table_pathpts: ", len(table_pathpts)

            if (len(table_pathpts) == 2):
                # Get gradient of the Line of Best Fit
                m1 = self.getGradientLineOfBestFit(table_pathpts)

                # Insert another point on the line with d[m] away from the 2nd table_pathpts
                path_pts.append(
                    self.getPointOnLine(m1, 1.0, table_pathpts[1],
                                        self.start_pt))

                # Visualize the path_pts
                self.vizPoint(0, path_pts[0], ColorRGBA(1, 0, 0, 1),
                              "map")  # Red
                self.vizPoint(1, path_pts[1], ColorRGBA(1, 1, 0, 1),
                              "map")  # Yellow
                self.vizPoint(2, path_pts[2], ColorRGBA(1, 0, 1, 1),
                              "map")  # Magenta
                print len(path_pts)

                # Find the heading of the table (last 2 path_pts)
                heading_q = self.getOrientation(table_pathpts[0],
                                                table_pathpts[1])

                # Publish the path
                self.route_pub.publish(self.getPath2Table(path_pts, heading_q))
            print "-----------------"
예제 #11
0
    def do_euclidian_clustering(self, cloud):
        # Euclidean Clustering
        white_cloud = pcl_helper.XYZRGB_to_XYZ(cloud) 
        tree = white_cloud.make_kdtree() 
        ec = white_cloud.make_EuclideanClusterExtraction()

        ec.set_ClusterTolerance(0.3) #0.14, 0.08
        ec.set_MinClusterSize(1)
        ec.set_MaxClusterSize(3500)

        ec.set_SearchMethod(tree)

        cluster_indices = ec.Extract() 

        cluster_color = pcl_helper.random_color_gen()
        #cluster_color = pcl_helper.get_color_list(len(cluster_indices))
        
        #print("No. of cones visible at this moment ", len(cluster_indices))
        
        #print(cluster_indices)
        
        cluster_coords = Track()
        #cone = TrackCone()
        #cluster_coords.cones = []
        color_cluster_point_list = []
        for j, indices in enumerate(cluster_indices):
            x, y, z = 0, 0, 0
            cone_clusters = []
            for i, indice in enumerate(indices):
                color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], pcl_helper.rgb_to_float(cluster_color)])
                x = x + white_cloud[indice][0]
                y = y + white_cloud[indice][1]
                z = z + white_cloud[indice][2]

            cone_clusters.append(x/len(indices) + self.carPosX)
            cone_clusters.append(y/len(indices) + self.carPosY)
            print(cone_clusters)
            #print(color_cluster_point_list) 
            #cone.x = x/len(indices)     
            #cone.y = y/len(indices)
            #cone.type = f'cone{j+1}'
            

            #cluster_coords.cones = cone_clusters
            #cluster_coords.data.append(TrackCone(data = cone_clusters))
            cluster_coords.data.append(TrackCone(x = x/len(indices) + self.carPosX,     
                                                y = y/len(indices) + self.carPosY))
            #cluster_coords.cones[j].x = x/len(indices)
            #cluster_coords.cones[j].y = y/len(indices)
            #cluster_coords.cones[j].type = 'cone{k}' 

        #print(len(cluster_coords.cones))
        #print(len(cluster_coords.cones[0])) 
        
        #print("No. of cones visible at this moment ", len(color_cluster_point_list))
        cluster_cloud = pcl.PointCloud_PointXYZRGB()
        cluster_cloud.from_list(color_cluster_point_list)

        ros_cluster_cloud = pcl_helper.pcl_to_ros(cluster_cloud)

        return cluster_cloud, cluster_coords
    def pc2CB(self, msg):
        if self.init:
            # Convert ROS PointCloud2 msg to PCL data
            cloud = pcl_helper.ros_to_pcl(msg)

            # Get transform of "base_link" relative to "map"
            trans = self.getTransform("map", "base_link")

            # Filter the Cloud
            fil_cloud = filtering_helper.do_passthrough_filter(
                cloud, 'x', -3.0, 3.0)
            fil_cloud = filtering_helper.do_passthrough_filter(
                fil_cloud, 'y', -3.0, 3.0)

            # Turn cloud into colorless cloud
            colorless_cloud = pcl_helper.XYZRGB_to_XYZ(fil_cloud)

            #            source_cloud = colorless_cloud

            # Get groups of cluster of points
            clusters = self.getClusters(colorless_cloud, 0.05, 100, 450)
            #            print "len(clusters)", len(clusters)

            source_cloud = self.getCloudFromCluster(clusters[0],
                                                    colorless_cloud)
            middle_pt = self.getMeanPoint(clusters[0], colorless_cloud)
            self.vizPoint(0, middle_pt, ColorRGBA(1, 1, 1, 1), "base_link")

            icp = source_cloud.make_IterativeClosestPoint()
            converged, transform, estimate, fitness = icp.icp(
                source_cloud, self.target_cloud, 100)
            #            gicp = colorless_cloud.make_GeneralizedIterativeClosestPoint()
            #            converged, transform, estimate, fitness = gicp.gicp(colorless_cloud, self.target_cloud, 100)
            #            icp_nl = colorless_cloud.make_IterativeClosestPointNonLinear()
            #            converged, transform, estimate, fitness = icp_nl.icp_nl(self.target_cloud, colorless_cloud, 100)

            #            if not converged:
            print "converged", converged
            print "transform", transform
            print "estimate", estimate
            print "fitness", fitness

            #            new_transform = transform.inverse()
            #            print new_transform
            translation = Vector3(transform[0, 3], transform[1, 3],
                                  transform[2, 3])
            q = tf.transformations.quaternion_from_matrix(transform)
            rotation = Quaternion(q[0], q[1], q[2], q[3])
            roll, pitch, yaw = tf.transformations.euler_from_matrix(transform)
            print np.rad2deg(yaw)

            #            print "translation\n", translation
            #            print "rotation\n", rotation

            #            transformation = self.toTransformStamped(translation, rotation, "map", "base_link")

            print "------------------------------------------------"

            # Visualize the cloud
            outmsg = pcl_helper.pcl_to_ros(estimate, "map")
            outmsg.header = msg.header
            self.pc2_pub.publish(outmsg)
def Process_rawPCL(pclpcRawIn):
    DebugDumpMsgPCL(pclpcRawIn)
    pclRecs = [
    ]  # For dev/debug display. Container for point cloud records: tuple (pclObj, pclName# )
    pclRecs.append((pclpcRawIn, "pclpcRawIn"))

    pclRecsDownSampled = pclproc.PCLProc_DownSampleVoxels(pclpcRawIn)
    pclRecs += pclRecsDownSampled
    pclpcDownSampled, pclpcDownSampledName = pclRecsDownSampled[0]

    # Region of Interest: PassThrough Filter
    recsPassThruZ = pclproc.PCLProc_PassThrough(pclpcDownSampled, 'z', 0.6,
                                                1.1)
    pclRecs += recsPassThruZ
    pclpcPassZ = recsPassThruZ[0][0]

    recsPassThruY = pclproc.PCLProc_PassThrough(pclpcPassZ, 'y', -0.5, 0.5)
    pclRecs += recsPassThruY
    pclpcPassY = recsPassThruY[0][0]

    # RANSAC Table/Object separation
    pclRecsRansac = pclproc.PCLProc_Ransac(pclpcPassY)
    pclRecs += pclRecsRansac
    pclpcPassZIn, pclpcPassZOut = pclRecsRansac[0][0], pclRecsRansac[1][0]
    pclpcTable, pclpcObjects = pclpcPassZIn, pclpcPassZOut  # Rename for clarity

    # Noise reduction
    pclpRecsNoNoise = pclproc.PCLProc_Noise(pclpcObjects)
    pclRecs += pclpRecsNoNoise
    pclpNoColorNoNoise = pclpRecsNoNoise[0][0]
    Debug_Publish(pclpNoColorNoNoise)

    # Euclidean Clustering
    pclpObjectsNoColor = pcl_helper.XYZRGB_to_XYZ(pclpcObjects)
    clusterIndices, pclpClusters = pclproc.PCLProc_ExtractClusters(
        pclpObjectsNoColor)
    #clusterIndices, pclpClusters = pclproc.PCLProc_ExtractClusters(pclpNoColorNoNoise)

    labelRecs = []

    for index, pts_list in enumerate(clusterIndices):
        # Get points for a single object in the overall cluster
        pcl_cluster = pclpcObjects.extract(pts_list)
        msgPCL_cluster = pcl_helper.pcl_to_ros(
            pcl_cluster)  # Needed for histograms... would refactor

        # Extract histogram features
        chists = pclproc.compute_color_histograms(msgPCL_cluster,
                                                  g_numHistBinsHSV,
                                                  doConvertToHSV=True)
        normals = get_normals(msgPCL_cluster)
        nhists = pclproc.compute_normal_histograms(normals,
                                                   g_numHistBinsNormals)
        feature = np.concatenate((chists, nhists))

        # CLASSIFY, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = g_clf.predict(g_scaler.transform(feature.reshape(1, -1)))
        label = g_encoder.inverse_transform(prediction)[0]

        # Accumulate label records for publishing (and labeling detected objects)
        label_pos = list(pclpcObjects[pts_list[0]])
        label_pos[0] -= 0.1
        label_pos[2] += 0.2
        labelRecs.append(
            (label, label_pos, index,
             pcl_cluster))  # Don't like passing pcl_cluster in this Records...

    return labelRecs, pclpcObjects, pclpcTable, pclpClusters
    def pc2CB(self, msg):
        if self.init:
            # Convert ROS PointCloud2 msg to PCL data
            cloud = pcl_helper.ros_to_pcl(msg)

            # Filter the Cloud
            fil_cloud = filtering_helper.do_passthrough_filter(cloud, 'x', -2.5, 2.5)
            filtered_cloud = filtering_helper.do_passthrough_filter(fil_cloud, 'y', -2.5, 2.5)
            # Turn cloud into colorless cloud
            colorless_cloud = pcl_helper.XYZRGB_to_XYZ(filtered_cloud)

            # Get groups of cluster of points
            clusters = self.getClusters(colorless_cloud, 0.05, 10, 1000)
            print "len(clusters)", len(clusters)

            # Extract charger wall cluster - use for compute Line of Best Fit
            if(len(clusters) != 0):
                size_clusters = []
                for cluster in clusters:
                    size_clusters.append(len(cluster))
                # Find the biggest cluster
                max_id = size_clusters.index(max(size_clusters))
                wall_cluster = clusters.pop(max_id)
                print "len(wall_cluster)", len(wall_cluster)
                feature_clusters = clusters
                print "len(feature_clusters)", len(feature_clusters)

            # Get transform of "base_link" relative to "map"
            trans = self.getTransform("map", "base_link")

            # Record the starting point and heading once
            if self.record_once:
                self.start_pt = trans.transform.translation
                self.record_once = False

            # Get Points of wall cluster
            wall_pts = self.getPointsFromCluster(wall_cluster, filtered_cloud)

            # Turn wall_pts into map frame
            for i in range(len(wall_cluster)):
                wall_pts[i] = tf2_geometry_msgs.do_transform_pose(self.toPoseStamped(wall_pts[i], Quaternion(0,0,0,1)), trans).pose.position

            # Get gradient of the Line of Best Fit
            m1 = self.getGradientLineOfBestFit(wall_pts)
            print m1

            # Get mean points from feature clusters
            mean_pts = []
            for cluster in feature_clusters:
                mean_pts.append(self.getMeanPointFromCluster(cluster, filtered_cloud))
            print "len(mean_pts)", len(mean_pts)
            for i in range(len(mean_pts)):
                self.vizPoint(i, mean_pts[i], ColorRGBA(1,1,1,1), "base_link")

            # Finding 1 middle point from 2 charger feature columns and use it as path_pts
            if(len(mean_pts)>=2):
                for p1 in mean_pts:
                    for p2 in mean_pts:
                        if(p1==p2):
                            break
                        # Only register middle_pt of 2 charger feature points
                        if(0.5 < self.getDistance(p1, p2) < 0.65):
                            middle_pt = self.getMiddlePoint(p1, p2)

                # Turn middle_pt into map frame
                middle_pt = tf2_geometry_msgs.do_transform_pose(self.toPoseStamped(middle_pt, Quaternion(0,0,0,1)), trans).pose.position

                # Register the middle_pt as one of the path_pts
                path_pts = []
                path_pts.append(middle_pt)

                # Insert another point on the normal line with d[m] away from middle_pt
                path_pts.insert(0, self.getPointOnNormalLine(m1, 1.0, middle_pt, self.start_pt))

                # Duplicate the path points to prevent from being edited
                feature_pathpts = path_pts[:]

                # Insert another point on the normal line with d[m] away from middle_pt
                path_pts.insert(1, self.getPointOnNormalLine(m1, 0.3, middle_pt, self.start_pt))

                # Create a list with distance information between init_pt and path_pts
                distance_list = [self.getDistance(p, self.start_pt) for p in path_pts]

                # Rearrange the path_pts to be in ascending order
                path_pts = self.getAscend(path_pts, distance_list)

                print "len(path_pts)", len(path_pts)
                    # Add the AGV start_pt to the begin of path_pts
#                    path_pts.insert(0, self.start_pt)

                # Remove last point of path_pts (middle pt of 2 charger feature pts)
                path_pts = path_pts[:len(path_pts)-1]

                # Visualize the path_pts
                self.vizPoint(0, path_pts[0], ColorRGBA(1,0,0,1), "map") # Red
                self.vizPoint(1, path_pts[1], ColorRGBA(1,1,0,1), "map") # Yellow
#                    self.vizPoint(2, path_pts[2], ColorRGBA(0,0,1,1), "map") # Blue

                # Find the heading of the charger (last 2 path_pts)
                heading_q = self.getOrientation(feature_pathpts[-2], feature_pathpts[-1])

                # Publish the path
                self.route_pub.publish(self.getPath2Charger(path_pts, heading_q))
            print "-----------------"