Example #1
0
def pcl_callback(ros_point_cloud_msg):

    # TODO: Convert ROS msg of type PointCloud2 to PCL data PointXYZRGB format
    pcl_data = pclh.ros_to_pcl(ros_point_cloud_msg)

    # TODO: Statistical filter
    stat_inlier, stat_outlier = flt.statistical_outlier_filter(pcl_data)

    # TODO: Voxel Grid Downsampling
    vox_filt = flt.voxel_downsampling(stat_inlier)

    # TODO: PassThrough Filter
    pass_filt = flt.pass_through_filter(vox_filt)

    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers and outliers
    cloud_table, cloud_objects = flt.ransac(pass_filt)

    # TODO: Euclidean Clustering
    cluster_indices = flt.euclidean_clustering(cloud_objects)

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_cloud = flt.visualize_clusters(cloud_objects, cluster_indices)

    # TODO: Convert PCL data to ROS messages
    ros_table_cloud = pclh.pcl_to_ros(cloud_table)
    ros_objects_cloud = pclh.pcl_to_ros(cloud_objects)
    ros_cluster_cloud = pclh.pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_table_pub.publish(ros_table_cloud)
    pcl_objects_pub.publish(ros_objects_cloud)
    pcl_cluster_pub.publish(ros_cluster_cloud)
Example #2
0
    def pointcloud_cb(self, ros_cloud):
        ransac_start = time.clock()
        filtered_cloud = ground_filter(ros_cloud)
        ransac_end = time.clock()

        pc_msg = pcl_to_ros(filtered_cloud, ros_cloud.header)
        self.objects_pub.publish(pc_msg)

        ece_start = time.clock()
        cluster_indices = get_clusters(filtered_cloud)
        ece_end = time.clock()

        tracking_start = time.clock()
        objects_loc = []
        objects = []
        #for all j clusters
        print("# clusters ", len(cluster_indices))
        for j, indices in enumerate(cluster_indices):
            print("Cluster", j, ": # points ", len(indices))
            sum_x = 0
            sum_y = 0
            sum_z = 0
            #for all points in the cluster j
            for i, indice in enumerate(indices):
                sum_x = sum_x + filtered_cloud[indice][0]
                sum_y = sum_y + filtered_cloud[indice][1]
                sum_z = sum_z + filtered_cloud[indice][2]

            x = sum_x / len(indices)
            y = sum_y / len(indices)
            z = sum_z / len(indices)

            objects_loc.append([x, y, z])

            obj = filtered_cloud.extract(indices, negative=False)
            objects.append(obj)

            pc_msg = pcl_to_ros(obj, ros_cloud.header)
            self.object_pub.publish(pc_msg)

        marker_array = MarkerArray()
        for i, obj in enumerate(objects_loc):
            marker_array.markers.append(
                get_marker(i, ros_cloud.header.frame_id, obj))

        self.track(marker_array)
        tracking_end = time.clock()

        print("RANSAC: ", ransac_end - ransac_start)
        print("ECE: ", ece_end - ece_start)
        print("Tracking: ", tracking_end - tracking_start)
        print("Total: ", tracking_end - ransac_start)
def CB_msgPCL(msgPCL):
    """
    ROS "/sensor_stick/point_cloud" subscription Callback handler
    Handle the PointCloud ROS msg received by the "/sensor_stick/point_cloud"
    This function is almost entirely unpacking/packing ROS messages and publishing.
    The the unpacked input pcl is processed by Process_rawPCL(pclpcRawIn)
    which returns the values that need to be packed and published
    :param msgPCL:
    :return:
    """
    global g_callBackCount
    g_callBackCount += 1

    if (g_callBackCount % g_callBackSkip != 0):
        return;

    print "\rCBCount= {:05d}".format(g_callBackCount),
    sys.stdout.flush()

    DebugDumpMsgPCL(msgPCL)

    # Extract pcl Raw from Ros msgPCL
    pclpcRawIn = pcl_helper.ros_to_pcl(msgPCL)

    #------- PROCESS RAW PCL-------------------------
    labelRecs, pclpcObjects, pclpcTable, pclpcClusters = Process_rawPCL(pclpcRawIn)

    detected_objects_labels = [] # For ros loginfo only
    detected_objects = [] # For publish - for PROJ3!

    for (labelText, labelPos, labelIndex) in labelRecs:
        detected_objects_labels.append(labelText)
        g_object_markers_pub.publish(marker_tools.make_label(labelText, labelPos, labelIndex ))
        # Add  detected object to the list of detected objects.
        do = DetectedObject()
        do.label = labelText
        do.cloud = pclpcClusters
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Package Processed pcls into Ros msgPCL
    msgPCLObjects = pcl_helper.pcl_to_ros(pclpcObjects)
    msgPCLTable = pcl_helper.pcl_to_ros(pclpcTable)
    msgPCLClusters = pcl_helper.pcl_to_ros(pclpcClusters)

    # Publish everything
    # This is the output you'll need to complete the upcoming project!
    g_detected_objects_pub.publish(detected_objects) # THIS IS THE CRUCIAL STEP FOR PROJ3
    g_pcl_objects_pub.publish(msgPCLObjects)
    g_pcl_table_pub.publish(msgPCLTable)
    g_pcl_cluster_pub.publish(msgPCLClusters)
Example #4
0
def Process_msgPCL(msgPCL):
    # Extract pcl Raw from Ros msgPCL
    pclpcRawIn = pcl_helper.ros_to_pcl(msgPCL)

    #------- PROCESS RAW PC-------------------------
    pclpcObjects, pclpcTable, pclpcClusters = ProcessPCL(pclpcRawIn)

    # Package Processed pcls into Ros msgPCL
    msgPCLObjects = pcl_helper.pcl_to_ros(pclpcObjects)
    msgPCLTable = pcl_helper.pcl_to_ros(pclpcTable)
    msgPCLClusters = pcl_helper.pcl_to_ros(pclpcClusters)

    return msgPCLObjects, msgPCLTable, msgPCLClusters
Example #5
0
def do_euclidian_clustering(cloud):
    # Euclidean Clustering
    white_cloud = pcl_helper.XYZRGB_to_XYZ(cloud) # <type 'pcl._pcl.PointCloud'>
    tree = white_cloud.make_kdtree() # <type 'pcl._pcl.KdTree'>
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.02) # for hammer
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(250)
    ec.set_SearchMethod(tree)
    cluster_indices = ec.Extract() # indices for each cluster (a list of lists)
    # Assign a color to each cluster

    cluster_color = pcl_helper.random_color_gen()
    #cluster_color = pcl_helper.get_color_list(len(cluster_indices))

    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], pcl_helper.rgb_to_float(cluster_color)])
    # Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)
    # publish to cloud
    ros_cluster_cloud = pcl_helper.pcl_to_ros(cluster_cloud)
    # save to local
    pcl.save(cluster_cloud, 'cluster.pcd')
    return cluster_cloud
Example #6
0
def do_euclidian_clustering(cloud):
    # Euclidean Clustering
    white_cloud = pcl_helper.XYZRGB_to_XYZ(cloud)
    tree = white_cloud.make_kdtree()
    ec = white_cloud.make_EuclideanClusterExtraction()

    ec.set_ClusterTolerance(5)
    ec.set_MinClusterSize(1)
    ec.set_MaxClusterSize(3500)

    ec.set_SearchMethod(tree)

    cluster_indices = ec.Extract()

    cluster_color = pcl_helper.random_color_gen()
    #cluster_color = pcl_helper.get_color_list(len(cluster_indices))

    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                pcl_helper.rgb_to_float(cluster_color)
            ])

    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    ros_cluster_cloud = pcl_helper.pcl_to_ros(cluster_cloud)

    return cluster_cloud
Example #7
0
def callback(input_ros_msg):
    cloud = pcl_helper.ros_to_pcl(input_ros_msg)
    #print("INPUT: ", cloud)
    cloud = do_passthrough(cloud, "x", 0.0, 20.0)
    cloud = do_passthrough(cloud, "y", -5.0, 5.0)
    _, _, cloud = do_ransac_plane_normal_segmentation(cloud, 0.14)
    #cloud = do_euclidian_clustering(cloud)
    cloud_new = pcl_helper.pcl_to_ros(cloud)
    pub.publish(cloud_new)
def callback(input_ros_msg):
    cloud = pcl_helper.ros_to_pcl(input_ros_msg)
    # print("INPUT: ", cloud)

    LEAF_SIZE = 0.1
    cloud = do_voxel_grid_downsampling(cloud, LEAF_SIZE)
    #  print("OUTPUT: ", cloud)
    # print("")

    cloud_new = pcl_helper.pcl_to_ros(cloud)
    pub.publish(cloud_new)
Example #9
0
def callback(input_ros_msg):
    cloud = pcl_helper.ros_to_pcl(input_ros_msg)
    print("INPUT: ", cloud, type(cloud))
    cloud = pcl_helper.XYZRGB_to_XYZ(cloud)
    mean_k = 10
    thresh = 0.001
    cloud = do_statistical_outlier_filtering(cloud, mean_k, thresh)
    color = pcl_helper.random_color_gen()
    cloud = pcl_helper.XYZ_to_XYZRGB(cloud, color)
    cloud_new = pcl_helper.pcl_to_ros(cloud)
    print("OUTPUT: ", cloud, type(cloud))
    pub.publish(cloud_new)
Example #10
0
	def get_clusters(self, cloud):
		#print("point cloud size = ",len(cloud.data), " bytes")

		marker_array = MarkerArray()

		pcl_cloud = ros_to_pcl(cloud)
		#convert pointcloud_xyzrgb to pointcloud_xyz 
		# since pointcloud_xyzrgb does not support make_EuclideanClusterExtraction
		xyz_pc = pcl.PointCloud()
		xyz_pc.from_list( [ x[:3] for x in pcl_cloud.to_list() ] )
		tree = xyz_pc.make_kdtree()
	
		t1 =  time.clock()
		ec = xyz_pc.make_EuclideanClusterExtraction()
		ec.set_ClusterTolerance (0.005)
		ec.set_MinClusterSize (200)
		ec.set_MaxClusterSize (5000)
		ec.set_SearchMethod (tree)
		cluster_indices = ec.Extract()
		t2 =  time.clock()

		#print("ECE took ", t2-t1)

		#for all j clusters
		print("# clusters ", len(cluster_indices))
		for j, indices in enumerate(cluster_indices):
			print("Cluster", j ,": # points ", len(indices))

			sum_x = 0
			sum_y = 0
			sum_z = 0
			#for all points in the cluster j
			for i, indice in enumerate(indices):
				sum_x = sum_x + xyz_pc[indice][0]
				sum_y = sum_y + xyz_pc[indice][1]
				sum_z = sum_z + xyz_pc[indice][2]

			x = sum_x/len(indices)
			y = sum_y/len(indices)
			z = sum_z/len(indices)

			label = "obj_" + str(j)
			marker_array.markers.append(get_marker(j, label, cloud.header.frame_id, x, y, z))

			obj = pcl_cloud.extract(indices, negative=False)

			pc_msg = pcl_to_ros(obj, cloud.header)
			self.pub.publish(pc_msg)

		self.track(marker_array)
Example #11
0
def callback(input_ros_msg):
    cloud = pcl_helper.ros_to_pcl(input_ros_msg)

    filter_axis = 'x'
    axis_min = 0.0
    axis_max = 20.0
    cloud = do_passthrough(cloud, filter_axis, axis_min, axis_max)

    filter_axis = 'y'
    axis_min = -5.0
    axis_max = 5.0
    cloud = do_passthrough(cloud, filter_axis, axis_min, axis_max)

    cloud_new = pcl_helper.pcl_to_ros(cloud)
    pub.publish(cloud_new)
def Process_rawPCL(pclpcRawIn):

    DebugDumpMsgPCL(pclpcRawIn)

    pclRecs = [] # For dev/debug display. Container for point cloud records: tuple (pclObj, pclName# )

    pclRecs.append((pclpcRawIn, "pclpcRawIn"))

    pclRecsDownSampled = pclproc.PCLProc_DownSampleVoxels(pclpcRawIn)
    pclRecs += pclRecsDownSampled
    pclpcDownSampled, pclpcDownSampledName = pclRecsDownSampled[0]

    # PassThrough Filter
    pclRecsRansac = pclproc.PCLProc_Ransac(pclpcDownSampled)
    pclRecs += pclRecsRansac

    # Extract inliers and outliers
    pclpcPassZ, pclpcPassZIn, pclpcPassZOut = pclRecsRansac[0][0], pclRecsRansac[1][0], pclRecsRansac[2][0]
    pclpcTable, pclpcObjects = pclpcPassZIn, pclpcPassZOut # Rename for clarity

    # Euclidean Clustering
    pclpObjectsNoColor = pcl_helper.XYZRGB_to_XYZ(pclpcObjects)
    clusterIndices, pclpcClusters = pclproc.PCLProc_ExtractClusters(pclpObjectsNoColor)

    labelRecs = []

    for index, pts_list in enumerate(clusterIndices):
        # Get points for a single object in the overall cluster
        pcl_cluster = pclpcObjects.extract(pts_list)
        msgPCL_cluster = pcl_helper.pcl_to_ros(pcl_cluster) # Needed for histograms... would refactor

        # Extract histogram features
        chists = pclproc.compute_color_histograms(msgPCL_cluster, doConvertToHSV=True)
        normals = get_normals(msgPCL_cluster)
        nhists = pclproc.compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # CLASSIFY, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = g_clf.predict(g_scaler.transform(feature.reshape(1, -1)))
        label = g_encoder.inverse_transform(prediction)[0]

        # Accumulate label records for publishing (and labeling detected objects)
        label_pos = list(pclpcObjects[pts_list[0]])
        label_pos[2] += 0.3
        labelRecs.append((label, label_pos, index))

    return labelRecs, pclpcObjects, pclpcTable, pclpcClusters
def detect_objects(cloud_objects, cluster_indices):

    # TODO: Classify the clusters!
    detected_objects_labels = []
    detected_objects = []
    labeled_features = []

    for index, pts_list in enumerate(cluster_indices):
        # TODO: Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = cloud_objects.extract(pts_list)
        # TODO: Convert the cluster from pcl to ROS using helper function
        ros_cluster = pclh.pcl_to_ros(pcl_cluster)

        # TODO: Extract histogram features
        hists_color = ft.compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        hists_normals = ft.compute_normal_histograms(normals)
        feature = np.concatenate((hists_color, hists_normals))
        #labeled_features.append([feature, model_name])

        # TODO: Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        # TODO: Retrieve the label for the result
        label = encoder.inverse_transform(prediction)[0]
        # TODO: Add it to detected_objects_labels list
        detected_objects_labels.append(label)

        # TODO: Publish a label into RViz
        white_cloud = pclh.XYZRGB_to_XYZ(cloud_objects)
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .2
        object_markers_pub.publish(make_label(label, label_pos, index))

        # TODO: Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    return detected_objects, detected_objects_labels
Example #14
0
def callback(input_ros_msg):
    cloud = pcl_helper.ros_to_pcl(input_ros_msg)
    #cloud = pcl_helper.XYZRGB_to_XYZ(cloud)

    #cloud_denoised = do_statistical_outlier_filtering(cloud)

    #cloud_downsampled = do_voxel_grid_downsampling(cloud)

    cloud_roi_x = do_passthrough(cloud, 'x', 0.0, 20.0)

    cloud_roi_y = do_passthrough(cloud_roi_x, 'y', -5.0, 5.0)

    _, _, cloud_ransaced = do_ransac_plane_normal_segmentation(
        cloud_roi_y, 0.007)

    cloud_clustered = do_euclidian_clustering(cloud_ransaced)

    cloud_new = pcl_helper.pcl_to_ros(cloud_clustered)
    pub.publish(cloud_new)
Example #15
0
    def pc2CB(self, msg):
        # Convert ROS PointCloud2 msg to PCL data
        cloud = pcl_helper.ros_to_pcl(msg)

        # Filter the Cloud according limitation on x-axis and y-axis
        fil_cloud = filtering_helper.do_passthrough_filter(
            cloud, 'x', -0.5, 0.0)
        fil_cloud = filtering_helper.do_passthrough_filter(
            fil_cloud, 'y', -0.5, 0.5)
        print fil_cloud.size

        # Visualize the filtered cloud
        outmsg = pcl_helper.pcl_to_ros(fil_cloud, "base_link")
        outmsg.header = msg.header
        self.pc2_pub.publish(outmsg)

        # Save the cloud
        if self.init:
            pcl.save(fil_cloud, "/home/samuel/charger_unit.pcd", None, False)
            self.init = False
            print "record finish"
def parsePoint(data, tlvLength):    # Type : x06
    point_data = np.zeros([tlvLength/struct.calcsize('4f'),3],dtype=np.float32)
    for i in range(tlvLength/struct.calcsize('4f')):
        if(struct.calcsize('4f') == len(data[16*i:16*i+16])):
            parse_range, parse_angle, parse_doppler, parse_snr = struct.unpack('4f', data[16*i:16*i+16])  #struct.unpack('3H3h', data[4+12*i:4+12*i+12]) # heder 4, payload 12
            #print("Point : {}, {}".format(parse_range, parse_angle))
            x,y = rect(parse_range, parse_angle)
            datas = np.array([x,y,0])
            point_data[i,]=datas
            
            #np.append(point_data, datas)
            #point_data = np.vstack((point_data,datas))
            #plt.scatter(x, y)            
            #plt.show()
            #plt.pause(0.0001) #Note this correction

        else:
            #print("** parsePoint Size [{}]**".format(len(data[16*i:16*i+16])))
            pass
    
    pc = pcl.PointCloud(point_data)
    pcl_xyzrgb = pcl_helper.XYZ_to_XYZRGB(pc, [255,255,255])  
    out_ros_msg = pcl_helper.pcl_to_ros(pcl_xyzrgb)
    pub_point.publish(out_ros_msg)
def parseTargetList(data, tlvLength):    # Type : x07
    target_data = np.zeros([tlvLength/struct.calcsize('I16f'),3],dtype=np.float32)  # 'I16f' for little endian , '>I16f' for big endian    
    for i in range(tlvLength/struct.calcsize('I16f')):
        if(struct.calcsize('I16f') == len(data[68*i:68*i+68])):
            tid, posX, posY, velX, velY, accX, accY, EC1, EC2,EC3,EC4,EC5,EC6,EC7,EC8,EC9, g  = struct.unpack('I16f', data[68*i:68*i+68])  #76, 144, 212, 280
            #print("Detect[{}] : {}, {}".format(tid, posX, posY))
            print("Detect[{}] : {}, {}".format(tid, abs((int(posX*100))), abs(int(posY*100))))
            datas = np.array([posX,posY,0])
            target_data[i,]=datas
            #np.append(taget_data, datas)
            #point_data = np.vstack((taget_data,datas))
            
            #plt.scatter(posX, posY)            
            #plt.show()
            #plt.pause(0.0001) #Note this correction

        else:
            #print("** parseTargetList Size[{}]**".format(len(data[68*i:68*i+68])))
            pass
    
    pc = pcl.PointCloud(target_data)
    pcl_xyzrgb = pcl_helper.XYZ_to_XYZRGB(pc, [255,255,255])  
    out_ros_msg = pcl_helper.pcl_to_ros(pcl_xyzrgb)
    pub_track.publish(out_ros_msg)
    def pc2CB(self, msg):
        if self.init:
            self.start_time = rospy.Time.now().to_sec()
            print "-----------------"
            # Convert ROS PointCloud2 msg to PCL data
            cloud = pcl_helper.ros_to_pcl(msg)

            # Get transform of "base_link" relative to "map"
            trans = self.getTransform("map", "base_link")

            # Filter the Cloud
            fil_cloud = filtering_helper.do_passthrough_filter(
                cloud, 'x', self.x_min, self.x_max)
            fil_cloud = filtering_helper.do_passthrough_filter(
                fil_cloud, 'y', self.y_min, self.y_max)
            fil_cloud = filtering_helper.do_passthrough_filter(
                fil_cloud, 'intensity', 450, 4096)

            # Statistical outlier filter
            fil = pcl_helper.XYZI_to_XYZ(
                fil_cloud).make_statistical_outlier_filter()
            fil.set_mean_k(10)
            fil.set_std_dev_mul_thresh(0.05)  #1.0 / 0.1
            filtered_cloud = fil.filter()

            pc2_msg = pcl_helper.pcl_to_ros(filtered_cloud, "base_link")
            self.pc2_pub.publish(pc2_msg)

            # Get groups of cluster of points
            clusters = self.getClusters(filtered_cloud, 0.05, 10, 800)
            print "len(clusters)", len(clusters)
            for i in range(len(clusters)):
                print "clusters[" + str(i) + "]", len(clusters[i])

            # Not enough clusters to be identified as charger unit
            if (len(clusters) < 3):
                return


#            self.middle_time_1 = rospy.Time.now().to_sec()

# Extract charger_wall & charger_pillars clusters
# - use for compute gradient of Line of Best Fit
# - use for compute middle_pt
            wall_cluster, pillar_clusters = self.getChargerClusters(
                clusters, filtered_cloud)

            #            self.middle_time_2 = rospy.Time.now().to_sec()

            # Cannot find the charger_wall and charger_pillars
            if (wall_cluster == None or pillar_clusters == []):
                print "wall cluster or pillar clusters not found"
                return

            print "wall_cluster", len(wall_cluster)
            self.vizPoint(
                0, self.getMeanPointFromCluster(wall_cluster, filtered_cloud),
                ColorRGBA(0, 1, 0, 1), "base_link")
            for i in range(len(pillar_clusters)):
                print "pillar_clusters[" + str(i) + "]", len(
                    pillar_clusters[i])
                self.vizPoint(
                    i + 1,
                    self.getMeanPointFromCluster(pillar_clusters[i],
                                                 filtered_cloud),
                    ColorRGBA(0, 1, 0, 1), "base_link")

            # Get Points of wall_cluster
            wall_pts = self.getPointsFromCluster(wall_cluster, filtered_cloud)

            # Turn wall_pts into map frame
            for i in range(len(wall_cluster)):
                wall_pts[i] = tf2_geometry_msgs.do_transform_pose(
                    self.toPoseStamped(wall_pts[i], Quaternion(0, 0, 0, 1)),
                    trans).pose.position

            # Get gradient of the Line of Best Fit
            m1 = self.getGradientLineOfBestFit(wall_pts)
            print "gradient", m1

            pillar_pts = []
            # Get mean points from pillar_clusters
            for cluster in pillar_clusters:
                pillar_pts.append(
                    self.getMeanPointFromCluster(cluster, filtered_cloud))

            # Finding 1 middle_pt from 2 charger_pillars and use it as path_pts
            print pillar_pts
            if (len(pillar_pts) == 2):
                middle_pt = None
                for p1 in pillar_pts:
                    for p2 in pillar_pts:
                        if (p1 == p2):
                            break
                        # Only register middle_pt of 2 charger_pillars points
                        if (0.5 < self.getDistance(p1, p2) < 0.65):
                            middle_pt = self.getMiddlePoint(p1, p2)
                            break

                if (middle_pt == None):
                    "middle_pt not found"
                    return

                # Turn middle_pt into map frame
                middle_pt = tf2_geometry_msgs.do_transform_pose(
                    self.toPoseStamped(middle_pt, Quaternion(0, 0, 0, 1)),
                    trans).pose.position

                # Register the middle_pt as one of the path_pts
                path_pts = []
                path_pts.append(middle_pt)

                # Record the starting point and heading once
                if self.record_once:
                    self.start_pt = trans.transform.translation
                    self.record_once = False

                # Insert another point on the normal line with d[m] away from middle_pt
                path_pts.insert(
                    0,
                    self.getPointOnNormalLine(m1, 1.5, middle_pt,
                                              self.start_pt))

                # Duplicate the path points to prevent from being edited
                charger_pathpts = path_pts[:]

                # Insert another point on the normal line with d[m] away from middle_pt
                path_pts.insert(0,
                                self.getPointOnNormalLine(
                                    m1, 0.29, middle_pt, self.start_pt))  #0.27

                # Create a list with distance information between init_pt and path_pts
                distance_list = [
                    self.getDistance(p, self.start_pt) for p in path_pts
                ]

                # Rearrange the path_pts to be in ascending order
                path_pts = self.getAscend(path_pts, distance_list)

                # Add the AGV start_pt to the begin of path_pts
                #                    path_pts.insert(0, self.start_pt)

                # Remove last point of path_pts (middle pt of 2 charger feature pts)
                path_pts = path_pts[:len(path_pts) - 1]
                print "len(path_pts)", len(path_pts)

                #                # Visualize the path_pts
                self.vizPoint(0, path_pts[0], ColorRGBA(1, 0, 0, 1),
                              "map")  # Red
                self.vizPoint(1, path_pts[1], ColorRGBA(1, 1, 0, 1),
                              "map")  # Yellow
                #                self.vizPoint(2, path_pts[2], ColorRGBA(0,0,1,1), "map") # Blue

                # Find the heading of the charger (last 2 path_pts)
                heading_q = self.getOrientation(charger_pathpts[0],
                                                charger_pathpts[1])

                # Publish the path
                self.route_pub.publish(
                    self.getPath2Charger(path_pts, heading_q))

            print "start_time", self.start_time
            #            print "middle_time_1", self.middle_time_1
            #            print "middle_time_2", self.middle_time_2
            print "end_time", rospy.Time.now().to_sec()
Example #19
0
    def pc2CB(self, msg):
        if self.init:
            self.start_time = rospy.Time.now().to_sec()
            print "---------------------"
            # Convert ROS PointCloud2 msg to PCL data
            cloud = pcl_helper.ros_to_pcl(msg)

            # Get transform of "base_link" relative to "map"
            trans = self.getTransform("map", "base_link")

            # Filter the Cloud
            fil_cloud = filtering_helper.do_passthrough_filter(cloud, 'x', self.x_min, self.x_max)
            fil_cloud = filtering_helper.do_passthrough_filter(fil_cloud, 'y', self.y_min, self.y_max)
            fil_cloud = filtering_helper.do_passthrough_filter(fil_cloud, 'intensity', 330, 4096)

            # Turn XYZI cloud into XYZ cloud
            XYZ_cloud =  pcl_helper.XYZI_to_XYZ(fil_cloud)

            # Statistical outlier filter
            fil = XYZ_cloud.make_statistical_outlier_filter()
            fil.set_mean_k(5)
            fil.set_std_dev_mul_thresh(0.2)
            filtered_cloud = fil.filter()

            pc2_msg = pcl_helper.pcl_to_ros(filtered_cloud, "base_link")
            self.pc2_pub.publish(pc2_msg)

            # Get groups of cluster of points
#            clusters = self.getClusters(colorless_cloud, 0.05, 20, 250)   # Table on Top AGV - 60~230
            clusters = self.getClusters(filtered_cloud, 0.05, 5, 350)
            print "len(clusters)", len(clusters)
            for i in range(0,len(clusters)):
                print "len(clusters[", i, "]", len(clusters[i])

            # Not enough clusters to be identified as table unit
            if(len(clusters)<4):
                return

            # Get mean points from clusters
            mean_pts = []
            for cluster in clusters:
                mean_pts.append(self.getMeanPoint(cluster, filtered_cloud))
            print "len(mean_pts)", len(mean_pts)
            for i in range(0,len(mean_pts)):
                print "mean_pts[", i, "]", mean_pts[i]

            # Remove other points, leave the table points only
            table_pts = []
            table_pts = self.getTablePoints(mean_pts)
#            print "len(table_pts)", len(table_pts)
#            print table_pts

            for i in range(len(table_pts)):
                self.vizPoint(i, table_pts[i], ColorRGBA(1,1,1,1), "base_link")

            # Finding 2 middle points from 4 table legs and use them as path_pts
            path_pts = []
            if(len(table_pts)==4):
                for p1 in table_pts:
                    for p2 in table_pts:
                        if(p1==p2):
                            break
                        # Register 2 middle_pts of 4 table legs
                        if(0.73 < self.getDistance(p1, p2) < 0.83):
                            path_pts.append(self.getMiddlePoint(p1, p2))
                            break

#            print "len(path_pts)", len(path_pts)
#            print path_pts

            # Path_pts that generated from between 2 legs of table must be fulfill below requirement, else return
            if(len(path_pts)==2):
                if not (0.33 < self.getDistance(path_pts[0], path_pts[1]) < 0.48):
                    return

            # Turn path_pts into map frame
            for i in range(len(path_pts)):
                path_pts[i] = tf2_geometry_msgs.do_transform_pose(self.toPoseStamped(path_pts[i], Quaternion(0,0,0,1)), trans).pose.position

            # Record the starting point once
            if self.record_once:
                self.start_pt = trans.transform.translation
                self.record_once = False

            # Create a list with distance information between initial_pt and path_pts
            distance_list = [self.getDistance(p, self.start_pt) for p in path_pts]

            # Rearrange the path_pts to be in ascending order
            path_pts = self.getAscend(path_pts, distance_list)

            # Duplicate the path_pts to prevent from being edited
            table_pathpts = path_pts[:]

            if(len(table_pathpts)==2):
                # Get gradient of the Line of Best Fit
                m1 = self.getGradientLineOfBestFit(table_pathpts)

                # Insert another point on the line with d[m] away from the 1st table_pathpts
                path_pts.insert(0, self.getPointOnLine(m1, 1.5, table_pathpts[0], self.start_pt))

                # Insert a point ahead d[m] of table to the begin of path_pts
#                ahead_pt = self.getPointAheadTable(table_pathpts, 1.0)
#                path_pts.insert(0, ahead_pt)

                # Insert a point ahead d[m] of table to the middle of table_pathpts
#                midway_pt = self.getPointAheadTable(table_pathpts, -0.4)
#                path_pts.insert(2, midway_pt)
                # Insert another point on the line with d[m] away from the 1st table_pathpts
#                path_pts.insert(2, self.getPointOnLine(m1, 0.05, table_pathpts[1], self.start_pt))

                # Remove the last point of path_pts
#                path_pts = path_pts[:len(path_pts)-1]

                # Add the AGV start_pt to the begin of path_pts
#                path_pts.insert(0, self.start_pt)

                # Visualize the path_pts
                self.vizPoint(10, path_pts[0], ColorRGBA(1,0,0,1), "map") # Red
                self.vizPoint(11, path_pts[1], ColorRGBA(1,1,0,1), "map") # Yellow
                self.vizPoint(12, path_pts[2], ColorRGBA(1,0,1,1), "map") # Magenta

                # Find the heading of the table (last 2 path_pts)
                heading_q = [self.getOrientation(path_pts[i], path_pts[i+1]) for i in range(len(path_pts)-1)]
                heading_q.append(self.getOrientation(table_pathpts[0], table_pathpts[1]))
#                print heading_q

                # Publish the path
                self.route_pub.publish(self.getPath2Table(path_pts, heading_q))
            print "start_time", self.start_time
            print "end_time", rospy.Time.now().to_sec()
def Process_rawPCL(pclpcRawIn):
    DebugDumpMsgPCL(pclpcRawIn)
    pclRecs = [
    ]  # For dev/debug display. Container for point cloud records: tuple (pclObj, pclName# )
    pclRecs.append((pclpcRawIn, "pclpcRawIn"))

    pclRecsDownSampled = pclproc.PCLProc_DownSampleVoxels(pclpcRawIn)
    pclRecs += pclRecsDownSampled
    pclpcDownSampled, pclpcDownSampledName = pclRecsDownSampled[0]

    # Region of Interest: PassThrough Filter
    recsPassThruZ = pclproc.PCLProc_PassThrough(pclpcDownSampled, 'z', 0.6,
                                                1.1)
    pclRecs += recsPassThruZ
    pclpcPassZ = recsPassThruZ[0][0]

    recsPassThruY = pclproc.PCLProc_PassThrough(pclpcPassZ, 'y', -0.5, 0.5)
    pclRecs += recsPassThruY
    pclpcPassY = recsPassThruY[0][0]

    # RANSAC Table/Object separation
    pclRecsRansac = pclproc.PCLProc_Ransac(pclpcPassY)
    pclRecs += pclRecsRansac
    pclpcPassZIn, pclpcPassZOut = pclRecsRansac[0][0], pclRecsRansac[1][0]
    pclpcTable, pclpcObjects = pclpcPassZIn, pclpcPassZOut  # Rename for clarity

    # Noise reduction
    pclpRecsNoNoise = pclproc.PCLProc_Noise(pclpcObjects)
    pclRecs += pclpRecsNoNoise
    pclpNoColorNoNoise = pclpRecsNoNoise[0][0]
    Debug_Publish(pclpNoColorNoNoise)

    # Euclidean Clustering
    pclpObjectsNoColor = pcl_helper.XYZRGB_to_XYZ(pclpcObjects)
    clusterIndices, pclpClusters = pclproc.PCLProc_ExtractClusters(
        pclpObjectsNoColor)
    #clusterIndices, pclpClusters = pclproc.PCLProc_ExtractClusters(pclpNoColorNoNoise)

    labelRecs = []

    for index, pts_list in enumerate(clusterIndices):
        # Get points for a single object in the overall cluster
        pcl_cluster = pclpcObjects.extract(pts_list)
        msgPCL_cluster = pcl_helper.pcl_to_ros(
            pcl_cluster)  # Needed for histograms... would refactor

        # Extract histogram features
        chists = pclproc.compute_color_histograms(msgPCL_cluster,
                                                  g_numHistBinsHSV,
                                                  doConvertToHSV=True)
        normals = get_normals(msgPCL_cluster)
        nhists = pclproc.compute_normal_histograms(normals,
                                                   g_numHistBinsNormals)
        feature = np.concatenate((chists, nhists))

        # CLASSIFY, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = g_clf.predict(g_scaler.transform(feature.reshape(1, -1)))
        label = g_encoder.inverse_transform(prediction)[0]

        # Accumulate label records for publishing (and labeling detected objects)
        label_pos = list(pclpcObjects[pts_list[0]])
        label_pos[0] -= 0.1
        label_pos[2] += 0.2
        labelRecs.append(
            (label, label_pos, index,
             pcl_cluster))  # Don't like passing pcl_cluster in this Records...

    return labelRecs, pclpcObjects, pclpcTable, pclpClusters
def pcl_callback(ros_pcl_msg):
# Exercise-1 TODOs:
    # TODO: Load point cloud data
    # TODO: Voxel Grid Downsampling
    # TODO: PassThrough Filter
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers and outliers
    # TODO: Save the inliers in .pcd file
    # TODO: Run the inliers.pcd file on terminal
# Exercise-2 TODOs:
    # TODO: Convert ROS msg to PCL data
    # TODO: Voxel Grid Downsampling
    # TODO: PassThrough Filter
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers and outliers
    # TODO: Euclidean Clustering
    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    # TODO: Convert PCL data to ROS messages
    # TODO: Publish ROS messages
# Exercise-3 TODOs: 
    # TODO: Classify the clusters! (loop through each detected cluster one at a time)
        # TODO: Grab the points for the cluster
        # TODO: Compute the associated feature vector
        # TODO: Make the prediction
        # TODO: Publish a label into RViz
        # TODO: Add the detected object to the list of detected objects.
    # TODO: Publish the list of detected objects


    # TODO: Convert ROS msg of type PointCloud2 to PCL data PointXYZRGB format
    pcl_data = pclh.ros_to_pcl(ros_pcl_msg)

    # TODO: Statistical filter
    stat_inlier, stat_outlier = flt.statistical_outlier_filter(pcl_data)
    pcl_data = stat_inlier
    
    # TODO: Voxel Grid Downsampling
    vox_filt = flt.voxel_downsampling(pcl_data)

    # TODO: PassThrough Filter
    pass_filt = flt.pass_through_filter(vox_filt)
    
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers (objects) and outliers (table)
    cloud_table, cloud_objects = flt.ransac(pass_filt)
    
    # TODO: Euclidean Clustering
    cluster_indices = flt.euclidean_clustering(cloud_objects)

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_cloud = flt.visualize_clusters(cloud_objects, cluster_indices)

    # TODO: Convert PCL data to ROS messages
    ros_table_cloud = pclh.pcl_to_ros(cloud_table)
    ros_objects_cloud = pclh.pcl_to_ros(cloud_objects)
    ros_cluster_cloud = pclh.pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_table_pub.publish(ros_table_cloud)
    pcl_objects_pub.publish(ros_objects_cloud)
    pcl_cluster_pub.publish(ros_cluster_cloud)


    # TODO: Classify the clusters!
    detected_objects_labels = []
    detected_objects = []
    labeled_features = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = cloud_objects.extract(pts_list)
        # TODO: convert the cluster from pcl to ROS using helper function
        ros_cluster = pclh.pcl_to_ros(pcl_cluster)

        # Extract histogram features
        # TODO: complete this step just as is covered in capture_features.py
        # Extract histogram features
        hists_color = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        hists_normals = compute_normal_histograms(normals)
        feature = np.concatenate((hists_color, hists_normals))
        #labeled_features.append([feature, model_name])


        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        white_cloud = pclh.XYZRGB_to_XYZ(cloud_objects)
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)
Example #22
0
    def do_euclidian_clustering(self, cloud):
        # Euclidean Clustering
        white_cloud = pcl_helper.XYZRGB_to_XYZ(cloud) 
        tree = white_cloud.make_kdtree() 
        ec = white_cloud.make_EuclideanClusterExtraction()

        ec.set_ClusterTolerance(0.3) #0.14, 0.08
        ec.set_MinClusterSize(1)
        ec.set_MaxClusterSize(3500)

        ec.set_SearchMethod(tree)

        cluster_indices = ec.Extract() 

        cluster_color = pcl_helper.random_color_gen()
        #cluster_color = pcl_helper.get_color_list(len(cluster_indices))
        
        #print("No. of cones visible at this moment ", len(cluster_indices))
        
        #print(cluster_indices)
        
        cluster_coords = Track()
        #cone = TrackCone()
        #cluster_coords.cones = []
        color_cluster_point_list = []
        for j, indices in enumerate(cluster_indices):
            x, y, z = 0, 0, 0
            cone_clusters = []
            for i, indice in enumerate(indices):
                color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], pcl_helper.rgb_to_float(cluster_color)])
                x = x + white_cloud[indice][0]
                y = y + white_cloud[indice][1]
                z = z + white_cloud[indice][2]

            cone_clusters.append(x/len(indices) + self.carPosX)
            cone_clusters.append(y/len(indices) + self.carPosY)
            print(cone_clusters)
            #print(color_cluster_point_list) 
            #cone.x = x/len(indices)     
            #cone.y = y/len(indices)
            #cone.type = f'cone{j+1}'
            

            #cluster_coords.cones = cone_clusters
            #cluster_coords.data.append(TrackCone(data = cone_clusters))
            cluster_coords.data.append(TrackCone(x = x/len(indices) + self.carPosX,     
                                                y = y/len(indices) + self.carPosY))
            #cluster_coords.cones[j].x = x/len(indices)
            #cluster_coords.cones[j].y = y/len(indices)
            #cluster_coords.cones[j].type = 'cone{k}' 

        #print(len(cluster_coords.cones))
        #print(len(cluster_coords.cones[0])) 
        
        #print("No. of cones visible at this moment ", len(color_cluster_point_list))
        cluster_cloud = pcl.PointCloud_PointXYZRGB()
        cluster_cloud.from_list(color_cluster_point_list)

        ros_cluster_cloud = pcl_helper.pcl_to_ros(cluster_cloud)

        return cluster_cloud, cluster_coords
def Debug_Publish(pclIn):
    pclpcIn = pcl_helper.XYZ_to_XYZRGB(pclIn, (0, 255, 0))
    msgPCL = pcl_helper.pcl_to_ros(pclpcIn)
    g_pcl_debug_pub.publish(msgPCL)
    def pc2CB(self, msg):
        if self.init:
            # Convert ROS PointCloud2 msg to PCL data
            cloud = pcl_helper.ros_to_pcl(msg)

            # Get transform of "base_link" relative to "map"
            trans = self.getTransform("map", "base_link")

            # Filter the Cloud
            fil_cloud = filtering_helper.do_passthrough_filter(
                cloud, 'x', -3.0, 3.0)
            fil_cloud = filtering_helper.do_passthrough_filter(
                fil_cloud, 'y', -3.0, 3.0)

            # Turn cloud into colorless cloud
            colorless_cloud = pcl_helper.XYZRGB_to_XYZ(fil_cloud)

            #            source_cloud = colorless_cloud

            # Get groups of cluster of points
            clusters = self.getClusters(colorless_cloud, 0.05, 100, 450)
            #            print "len(clusters)", len(clusters)

            source_cloud = self.getCloudFromCluster(clusters[0],
                                                    colorless_cloud)
            middle_pt = self.getMeanPoint(clusters[0], colorless_cloud)
            self.vizPoint(0, middle_pt, ColorRGBA(1, 1, 1, 1), "base_link")

            icp = source_cloud.make_IterativeClosestPoint()
            converged, transform, estimate, fitness = icp.icp(
                source_cloud, self.target_cloud, 100)
            #            gicp = colorless_cloud.make_GeneralizedIterativeClosestPoint()
            #            converged, transform, estimate, fitness = gicp.gicp(colorless_cloud, self.target_cloud, 100)
            #            icp_nl = colorless_cloud.make_IterativeClosestPointNonLinear()
            #            converged, transform, estimate, fitness = icp_nl.icp_nl(self.target_cloud, colorless_cloud, 100)

            #            if not converged:
            print "converged", converged
            print "transform", transform
            print "estimate", estimate
            print "fitness", fitness

            #            new_transform = transform.inverse()
            #            print new_transform
            translation = Vector3(transform[0, 3], transform[1, 3],
                                  transform[2, 3])
            q = tf.transformations.quaternion_from_matrix(transform)
            rotation = Quaternion(q[0], q[1], q[2], q[3])
            roll, pitch, yaw = tf.transformations.euler_from_matrix(transform)
            print np.rad2deg(yaw)

            #            print "translation\n", translation
            #            print "rotation\n", rotation

            #            transformation = self.toTransformStamped(translation, rotation, "map", "base_link")

            print "------------------------------------------------"

            # Visualize the cloud
            outmsg = pcl_helper.pcl_to_ros(estimate, "map")
            outmsg.header = msg.header
            self.pc2_pub.publish(outmsg)
def pcl_callback(ros_pcl_msg):
# Exercise-1 TODOs:
    # TODO: Load point cloud data
    # TODO: Voxel Grid Downsampling
    # TODO: PassThrough Filter
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers and outliers
    # TODO: Save the inliers in .pcd file
    # TODO: Run the inliers.pcd file on terminal
# Exercise-2 TODOs:
    # TODO: Convert ROS msg to PCL data
    # TODO: Voxel Grid Downsampling
    # TODO: PassThrough Filter
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers and outliers
    # TODO: Euclidean Clustering
    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    # TODO: Convert PCL data to ROS messages
    # TODO: Publish ROS messages
# Exercise-3 TODOs: 
    # TODO: Classify the clusters! (loop through each detected cluster one at a time)
        # TODO: Grab the points for the cluster
        # TODO: Compute the associated feature vector
        # TODO: Make the prediction
        # TODO: Publish a label into RViz
        # TODO: Add the detected object to the list of detected objects.
    # TODO: Publish the list of detected objects


    # TODO: Convert ROS msg of type PointCloud2 to PCL data PointXYZRGB format
    pcl_data = pclh.ros_to_pcl(ros_pcl_msg)

    # TODO: Statistical filter
    stat_inlier, stat_outlier = flt.statistical_outlier_filter(pcl_data)
    
    # TODO: Voxel Grid Downsampling
    vox_filt = flt.voxel_downsampling(stat_inlier)

    # TODO: PassThrough Filter
    pass_filt = flt.pass_through_filter(vox_filt)
    
    # TODO: RANSAC Plane Segmentation
    # TODO: Extract inliers (objects) and outliers (table)
    cloud_table, cloud_objects = flt.ransac(pass_filt)
    
    # TODO: Euclidean Clustering
    cluster_indices = flt.euclidean_clustering(cloud_objects)

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_cloud = flt.visualize_clusters(cloud_objects, cluster_indices)

    # TODO: Classify the clusters and create list of detected objects    
    detected_objects, detected_objects_labels = detect_objects(cloud_objects, cluster_indices)   

    # TODO: Convert PCL data to ROS messages
    ros_table_cloud = pclh.pcl_to_ros(cloud_table)
    ros_objects_cloud = pclh.pcl_to_ros(cloud_objects)
    ros_cluster_cloud = pclh.pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_table_pub.publish(ros_table_cloud)
    pcl_objects_pub.publish(ros_objects_cloud)
    pcl_cluster_pub.publish(ros_cluster_cloud)
    detected_objects_pub.publish(detected_objects)
Example #26
0
	def get_clusters(self, cloud):

		print("point cloud size = ",len(cloud.data), " bytes")

		marker_array = MarkerArray()

		pcl_cloud = ros_to_pcl(cloud)
		#convert pointcloud_xyzrgb to pointcloud_xyz 
		# since pointcloud_xyzrgb does not support make_EuclideanClusterExtraction
		xyz_pc = pcl.PointCloud()
		xyz_pc.from_list( [ x[:3] for x in pcl_cloud.to_list() ] )

		tree = xyz_pc.make_kdtree()

		ec = xyz_pc.make_EuclideanClusterExtraction()
		ec.set_ClusterTolerance (0.01)
		ec.set_MinClusterSize (100)
		ec.set_MaxClusterSize (25000)
		ec.set_SearchMethod (tree)
		cluster_indices = ec.Extract()

		indx = []
		#for all j clusters
		print("# clusters ", len(cluster_indices))
		for j, indices in enumerate(cluster_indices):
			tmp_indx = []
			print("# points ", len(indices))
			min_x = 999.9
			max_x = -999.9
			min_y = 999.9
			max_y = -999.9		
			min_z = 999.9
			max_z = -999.9
			#for all points in the cluster j
			for i, indice in enumerate(indices):
				tmp_indx.append(indice)
				if xyz_pc[indice][0] < min_x:
					min_x = xyz_pc[indice][0]
				elif xyz_pc[indice][0] > max_x:
					max_x = xyz_pc[indice][0]

				if xyz_pc[indice][1] < min_y:
					min_y = xyz_pc[indice][1]
				elif xyz_pc[indice][1] > max_y:
					max_y = xyz_pc[indice][1]

				if xyz_pc[indice][2] < min_z:
					min_z = xyz_pc[indice][2]
				elif xyz_pc[indice][2] > max_z:
					max_z = xyz_pc[indice][2]

			x = (min_x + max_x)/2
			y = (min_y + max_y)/2
			z = (min_z + max_z)/2

			p = get_stamped_point(x, y, z, cloud.header.frame_id)

			label = "obj_" + str(j)
			marker_array.markers.append(get_marker(j, label, cloud.header.frame_id, x, y, z))

			obj = pcl_cloud.extract(indices, negative=False)

			pc_msg = pcl_to_ros(obj, stamp=cloud.header.stamp, frame_id=cloud.header.frame_id, seq=cloud.header.seq)

			converter = PointCloudToImages('/home/phiggin1/deep_rgbd_v01.2/data/')
			converter.convert(obj)
			fuse_color_depth()
			features = get_features()

			self.pub.publish(pc_msg)

			print("object at ", p, "\nwith features: ",features)

		self.obj_markers_pub.publish(marker_array)
Example #27
0
def pcl_callback(pcl_msg):

    # Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # create a voxelgrid filter object for our input point cloud
    vox = cloud.make_voxel_grid_filter()

    # Choose a voxel (also known as leaf) size
    LEAF_SIZE = 0.01

    # set the voxel (or leaf)size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()
    #filename = 'voxel_downsampled.pcd'
    #pcl.save(cloud_filtered, filename)

    # PassThrough filter
    # create pass through filter object
    passthrough = cloud_filtered.make_passthrough_filter()

    # assign axis and ranve to the passthrough filter object
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)

    # generate the resultant point cloud
    cloud_filtered = passthrough.filter()

    # RANSAC plane segmentation
    # create the segmentation object
    seg = cloud_filtered.make_segmenter()

    # set the model to fit
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()

    # Extract table
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    #filename = 'cloud_table.pcd'
    #pcl.save(cloud_table, filename)

    # Extract objects
    #cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # Extract outliers
    # create the filter objectd
    objects = cloud_filtered.extract(inliers, negative=True)

    outlier_filter = objects.make_statistical_outlier_filter()

    # set the number of points to analyze for any given point
    outlier_filter.set_mean_k(50)
    # set threshold scale factor
    x = 1.0
    # any point with a mena distance large than the global (mean distance + x * std_dev)
    # will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)
    # call the filter function
    cloud_objects = outlier_filter.filter()

    filename = 'cloud_objects.pcd'
    pcl.save(cloud_objects, filename)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    # create cluster extraction
    ec = white_cloud.make_EuclideanClusterExtraction()

    # set the tolerances for
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(100)
    ec.set_MaxClusterSize(2000)
    ec.set_SearchMethod(tree)
    cluster_indices = ec.Extract()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately

    # asign colors to the individual point cloud indices for visualization
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    # create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)
    def pc2CB(self, msg):
        if self.init:
            print "---------------------"
            # Convert ROS PointCloud2 msg to PCL data
            cloud = pcl_helper.ros_to_pcl(msg)

            # Get transform of "base_link" relative to "map"
            trans = self.getTransform("map", "base_link")
            os1_trans = self.getTransform("map", "os1_sensor")

            # Filter the Cloud
            fil_cloud = filtering_helper.do_passthrough_filter(cloud, 'x', self.x_min, self.x_max)
            fil_cloud = filtering_helper.do_passthrough_filter(fil_cloud, 'y', self.y_min, self.y_max)
            fil_cloud = filtering_helper.do_passthrough_filter(fil_cloud, 'intensity', 330, 4096)

            # Turn XYZI cloud into XYZ cloud
            XYZ_cloud =  pcl_helper.XYZI_to_XYZ(fil_cloud)

            # Statistical outlier filter
            fil = XYZ_cloud.make_statistical_outlier_filter()
            fil.set_mean_k(10)
            fil.set_std_dev_mul_thresh(0.1)
            filtered_cloud = fil.filter()

            pc2_msg = pcl_helper.pcl_to_ros(filtered_cloud, "base_link")
            self.pc2_pub.publish(pc2_msg)

            # Get groups of cluster of points
            clusters = self.getClusters(filtered_cloud, 0.05, 5, 350)
            print "len(clusters)", len(clusters)
            for i in range(0,len(clusters)):
                print "len(clusters[", i, "]", len(clusters[i])

            # Get mean points from clusters
            mean_pts = []
            for cluster in clusters:
                mean_pts.append(self.getMeanPoint(cluster, filtered_cloud))
            print "len(mean_pts)", len(mean_pts)
#            for i in range(0,len(mean_pts)):
#                print "mean_pts[", i, "]", mean_pts[i]

            # Remove other points, leave the table points only
            table_pts = []
            table_pts = self.getTablePoints(mean_pts)
            print "len(table_pts)", len(table_pts)
#            print table_pts

            for i in range(len(table_pts)):
                self.vizPoint(i, table_pts[i], ColorRGBA(1,1,1,1), "base_link")

            # Finding 2 middle points from 4 table legs and use them as path_pts
            path_pts = []
            if(len(table_pts)==4):
                for p1 in table_pts:
                    for p2 in table_pts:
                        if(p1==p2):
                            break
                        # Register 2 middle_pts of 4 table legs
                        if(0.73 < self.getDistance(p1, p2) < 0.83):
                            path_pts.append(self.getMiddlePoint(p1, p2))
                            break

#            print "len(path_pts)", len(path_pts)
#            print path_pts

            # Turn path_pts into map frame
            for i in range(len(path_pts)):
                path_pts[i] = tf2_geometry_msgs.do_transform_pose(self.toPoseStamped(path_pts[i], Quaternion(0,0,0,1)), trans).pose.position
                path_pts[i] = Point(path_pts[i].x, path_pts[i].y, 0)

            # Record the starting point and heading once
            if self.record_once:
#                self.start_pt = trans.transform.translation
                self.start_pt = os1_trans.transform.translation
                self.record_once = False

            # Create a list with distance information between initial_pt and path_pts
            distance_list = [self.getDistance(p, self.start_pt) for p in path_pts]

            # Rearrange the path_pts to be in descending order
            path_pts = self.getDescend(path_pts, distance_list)

            # Duplicate the path_pts to prevent from being edited
            table_pathpts = path_pts[:]
#            print "table_pathpts: ", len(table_pathpts)

            if(len(table_pathpts)==2):
                # Get gradient of the Line of Best Fit
                m1 = self.getGradientLineOfBestFit(table_pathpts)

                # Insert another point on the line with d[m] away from the 2nd table_pathpts
                path_pts.append(self.getPointOnLine(m1, 1.0, table_pathpts[1], self.start_pt))

                # Visualize the path_pts
                self.vizPoint(10, path_pts[0], ColorRGBA(1,0,0,1), "map") # Red
                self.vizPoint(11, path_pts[1], ColorRGBA(1,1,0,1), "map") # Yellow
                self.vizPoint(12, path_pts[2], ColorRGBA(1,0,1,1), "map") # Magenta
#                print len(path_pts)

                # Find the heading of the table (last 2 path_pts)
                heading_q = self.getOrientation(table_pathpts[0], table_pathpts[1])

                # Publish the path
                self.route_pub.publish(self.getPath2Table(path_pts, heading_q))
Example #29
0
            pc_xyzrgb_list = []
            for line in lines:
                pc_xyzrgb = line.strip().split(',')
                
                pc_xyz = [float(pc_xyzrgb[0]), float(pc_xyzrgb[1]), float(pc_xyzrgb[2])]
                color = [int(pc_xyzrgb[3]), int(pc_xyzrgb[4]), int(pc_xyzrgb[5])]

                #pc_list.append(pc_xyz)
                #color_list.append(color)

                float_color = pcl_helper.rgb_to_float(color)
                pc_xyzrgb_list.append([pc_xyz[0], pc_xyz[1], pc_xyz[2], float_color])

            #np_pc_list = np.asarray(pc_list)
            #np_color = np.asarray(color_list)
            
            #msg_pc = pcl_helper.xyzrgb_array_to_pointcloud2(points=np_pc_list, colors=np_color, stamp=time, frame_id='map')

            XYZRGB_cloud = pcl.PointCloud_PointXYZRGB()
            XYZRGB_cloud.from_list(pc_xyzrgb_list)
            msg_pc = pcl_helper.pcl_to_ros(XYZRGB_cloud)

            time = rospy.Time.now()
            write_rosbag.write(topic='/soslab_sl/pointcloud', msg=msg_pc, t=time)

        r.sleep()
    
    write_rosbag.close()


Example #30
0
    def pointcloud_cb(self, cloud):
        filtered_cloud = self.ground_filter(cloud)
        pc_msg = pcl_to_ros(filtered_cloud, cloud.header)

        self.pub.publish(pc_msg)