def pcl_callback(pcl_msg): # Exercise-2 TODOs: cloud = ros_to_pcl(pcl_msg) # statistical outlier filter to deal with NOISE in data filter = cloud.make_statistical_outlier_filter() # neighbor points to analyze per point filter.set_mean_k(15) # threshold scale factor x = 0.0002 # outlier points will have a dist > (mean dist + x * stddev) filter.set_std_dev_mul_thresh(x) # actual filter function cloud_filtered = filter.filter() vox = cloud.make_voxel_grid_filter() LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) passthrough.set_filter_limits(0.6, 1.1) cloud_filtered = passthrough.filter() passthrough_x = cloud_filtered.make_passthrough_filter() filter_axis = 'x' passthrough_x.set_filter_field_name(filter_axis) passthrough_x.set_filter_limits(0.32, 1.05) cloud_filtered = passthrough_x.filter() seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() extracted_inl = cloud_filtered.extract(inliers, negative=False) extracted_outl = cloud_filtered.extract(inliers, negative=True) white_cloud = XYZRGB_to_XYZ(extracted_outl) tree = white_cloud.make_kdtree() ec = white_cloud.make_EuclideanClusterExtraction() # NOTE: experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.02) ec.set_MinClusterSize(45) ec.set_MaxClusterSize(12000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) # new pcl instance with COLOR! cluster_cloud = pcl.PointCloud_PointXYZRGB() # populate color plc with x,y,z,rgb list cluster_cloud.from_list(color_cluster_point_list) ros_cloud_objects = pcl_to_ros(extracted_outl) ros_cloud_table = pcl_to_ros(extracted_inl) ros_cluster_cloud = pcl_to_ros(cluster_cloud) pcl_objects_pub.publish(ros_cloud_objects) #pcl_table_pub.publish(ros_cloud_table) #pcl_cluster_pub.publish(ros_cluster_cloud) #pcl_sub.publish(pcl_sub) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = extracted_outl.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! #detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud_filtered = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling ''' Voxel Grid filter ''' vox = cloud_filtered.make_voxel_grid_filter() # Choose a voxel (also known as leaf) size # this is the measurement size of each unit LEAF_SIZE = 0.01 # Set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() # TODO: PassThrough Filter ''' PassThrough filter ''' # Create a PassThrough filter object. passthrough = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object. filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.757 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud. cloud_filtered = passthrough.filter() # TODO: RANSAC Plane Segmentation ''' RANSAC plane segmentation ''' # Create the segmentation object seg = cloud_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model # Experiment with different values for max_distance # for segmenting the table max_distance = 0.01 seg.set_distance_threshold(max_distance) # Call the segment function to obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering # Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) # Apply function to convert XYZRGB to XYZ tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.02) ec.set_MinClusterSize(100) ec.set_MaxClusterSize(25000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) # Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) # Classify the clusters! detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = cloud_objects.extract(pts_list) # TODO: convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features # TODO: complete this step just as is covered in capture_features.py chists = compute_color_histograms(ros_cluster, using_hsv=True, nbins=44) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals, nbins=20) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling vox = cloud.make_voxel_grid_filter() LEAF_SIZE = 0.005 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # TODO: PassThrough Filter passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.3 axis_max = .8 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'x' passthrough.set_filter_field_name(filter_axis) axis_min = 0.3 axis_max = .7 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() #statistical outlier filter # Much like the previous filters, we start by creating a filter object: outlier_filter = cloud_filtered.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(50) # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier x = .04 outlier_filter.set_std_dev_mul_thresh(x) # Finally call the filter function for magic cloud_filtered = outlier_filter.filter() # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # TODO: Extract inliers and outliers seg.set_distance_threshold(0.02) inliers, coefficients = seg.segment() extracted_inliers = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(extracted_inliers) tree = white_cloud.make_kdtree() ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(.05) ec.set_MinClusterSize(50) ec.set_MaxClusterSize(5000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(extracted_inliers) ros_cloud_table = pcl_to_ros( cloud_filtered.extract(inliers, negative=False)) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = extracted_inliers.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label print label do.cloud = ros_cluster detected_objects.append(do) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException as e: print e pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling # Create a VoxelGrid filter object for our input point cloud vox = cloud.make_voxel_grid_filter() # Choose a voxel (also known as leaf) size LEAF_SIZE = 0.005 #5cm # Set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() # Much like the previous filters, we start by creating a filter object: outlier_filter = cloud_filtered.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(50) # Set threshold scale factor x = 1.1 # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(x) # Finally call the filter function for magic cloud_filtered = outlier_filter.filter() # TODO: PassThrough Filter passthrough_z = cloud_filtered.make_passthrough_filter() filter_axis = 'z' passthrough_z.set_filter_field_name (filter_axis) passthrough_z.set_filter_limits(0.6, 0.9) cloud_filtered = passthrough_z.filter() passthrough_y = cloud_filtered.make_passthrough_filter() filter_axis = 'y' passthrough_y.set_filter_field_name (filter_axis) passthrough_y.set_filter_limits(-0.5, 0.5) cloud_filtered = passthrough_y.filter() # TODO: RANSAC Plane Segmentation # Create the segmentation object seg = cloud_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model # Experiment with different values for max_distance # for segmenting the table max_distance = 0.01 seg.set_distance_threshold(max_distance) # Call the segment function to obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) # Apply function to convert XYZRGB to XYZ tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.01) ec.set_MinClusterSize(100) ec.set_MaxClusterSize(7000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) # cloud_out.extract(pts_list) # TODO: convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features # TODO: complete this step just as you did before in capture_features.py chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do)
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling vox = cloud.make_voxel_grid_filter() LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # TODO: PassThrough Filter to isolate the table and objects passthrough = cloud_filtered.make_passthrough_filter() passthrough.set_filter_field_name('z') axis_min = 0.76 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() # TODO: RANSAC Plane Segmentation to identify the table from the objects seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # TODO: Extract inliers (table surface) and outliers (objects) cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # Convert the xyzrgb cloud to only xyz since k-d tree only needs xyz white_cloud = XYZRGB_to_XYZ(cloud_objects) # k-d tree decreases the computation of searching for neighboring points. # PCL's euclidian clustering algo only supports k-d trees tree = white_cloud.make_kdtree() # TODO: Euclidean Clustering used to build individual point clouds for each object. # The Cluster-Mask point cloud allows each cluster to be visualized separately. # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold as well as minimum and maximum cluster size (in points). # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.02) # 0.02, 0.05 ec.set_MinClusterSize(50) # 50, 100 ec.set_MaxClusterSize(15000) # 15000, 3000 # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each found cluster. This is a list of indices for each cluster (list of lists) cluster_indices = ec.Extract() # Assign a random color to each isolated object in the scene cluster_color = get_color_list(len(cluster_indices)) # Store the detected objects and labels in these lists detected_objects_labels = [] detected_objects = [] color_cluster_point_list = [] # Iterate through each detected object cluster for object recognition for index, pts_list in enumerate(cluster_indices): # Store the object's cloud in this list object_cluster = [] # Create an individual cluster just for the object being processed for i, pts in enumerate(pts_list): # Retrieve cloud values for the x, y, z, rgb object object_cluster.append([ cloud_objects[pts][0], cloud_objects[pts][1], cloud_objects[pts][2], cloud_objects[pts][3] ]) # Retrieve cloud values for the x, y, z object, assigning a # preidentified color to all cloud values color_cluster_point_list.append([ white_cloud[pts][0], white_cloud[pts][1], white_cloud[pts][2], rgb_to_float(cluster_color[index]) ]) # Convert list of point cloud features (x,y,z,rgb) into a point cloud pcl_cluster = pcl.PointCloud_PointXYZRGB() pcl_cluster.from_list(object_cluster) # Convert the cluster from pcl to ROS using helper function ros_cloud = pcl_to_ros(pcl_cluster) #pcl_objects_pub.publish(ros_cloud) # Extract histogram features (similar to capture_features.py) histogram_bins = 128 chists = compute_color_histograms(ros_cloud, nbins=histogram_bins, using_hsv=True) normals = get_normals(ros_cloud) nhists = compute_normal_histograms(normals, nbins=histogram_bins) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result and add it # to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cloud detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Create new cloud containing all clusters, each with a unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_object_cluster = pcl_to_ros(cluster_cloud) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) # Publish ROS messages of the point clouds and detected objects pcl_objects_cloud_pub.publish( ros_cloud_object_cluster) # solid color objects pcl_objects_pub.publish(ros_cloud_objects) # original color objects pcl_table_pub.publish(ros_cloud_table) # table cloud detected_objects_pub.publish(detected_objects) # detected object labels
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Statistical Outlier Filtering flt = cloud.make_statistical_outlier_filter() flt.set_mean_k(10) flt.set_std_dev_mul_thresh(0.1) cloud = flt.filter() # TODO: Voxel Grid Downsampling flt = cloud.make_voxel_grid_filter() LEAF_SIZE = 0.005 flt.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud = flt.filter() # TODO: PassThrough Filter flt = cloud.make_passthrough_filter() flt_axis = 'z' flt.set_filter_field_name(flt_axis) axis_min = 0.6 axis_max = 1.5 flt.set_filter_limits(axis_min, axis_max) cloud = flt.filter() flt = cloud.make_passthrough_filter() flt_axis = 'y' flt.set_filter_field_name(flt_axis) axis_min = -0.5 axis_max = 0.5 flt.set_filter_limits(axis_min, axis_max) cloud = flt.filter() # TODO: RANSAC Plane Segmentation seg = cloud.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers cloud_table = cloud.extract(inliers, negative=False) cloud_objects = cloud.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.01) ec.set_MinClusterSize(30) ec.set_MaxClusterSize(10000) tree = white_cloud.make_kdtree() ec.set_SearchMethod(tree) cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) #Create new cloud containing all clusters, each with unique color cloud_cluster = pcl.PointCloud_PointXYZRGB() cloud_cluster.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_cluster = pcl_to_ros(cloud_cluster) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cloud_cluster) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # TODO: Convert ROS msg to PCL data cloud=ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling #create a Voxel filter object forourinput point cloud cloud_filtered_v=voxel_filter(cloud) # TODO: PassThrough Filter #Create a PassThrough filter object #cloud_filtered_pass=passthrough_filter(cloud_filtered_v,'x',0.6,1.1) cloud_filtered_pass=passthrough_filter(cloud_filtered_v,'y',-0.4,0.47) cloud_filtered_pass=passthrough_filter(cloud_filtered_v,'z',0.6,0.91) # TODO: RANSAC Plane Segmentation extracted_inliers,extracted_outliers=RANSCAN_filter(cloud_filtered_pass) # TODO: Convert PCL data to ROS msg ros_cloud_objects = pcl_to_ros(extracted_outliers) ros_cloud_table = pcl_to_ros(extracted_inliers) # TODO: Euclidean Clustering cluster_indices,white_cloud=Euclidean_Clustering(extracted_inliers,extracted_outliers) ##cluster_indices now contains a list of indices for each cluster (a list of lists). #In the next step, you'll create a new point cloud to visualize the clusters by assigning a color to each of them. # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_cloud=Cluster_Mask_Point(cluster_indices,white_cloud) # TODO: Convert PCL data to ROS messages ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS msg pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) # Classify the clusters! detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = extracted_outliers.extract(pts_list) # TODO: convert the cluster from pcl to ROS using helper function ros_cluster=pcl_to_ros(pcl_cluster) # Extract histogram features # TODO: complete this step just as is covered in capture_features.py # Extract histogram features chists = compute_color_histograms(ros_cluster, using_hsv=True)#False normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) #labeled_features.append([feature, model_name]) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling vox = cloud.make_voxel_grid_filter() # Select a leaf/voxel size - note that 1.0 is very large. # The leaf size is measured in meters. Therefore a size of 1 is a cubic meter. LEAF_SIZE = 0.01 # experiment with this # Set the voxel/leaf size. vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() # TODO: PassThrough Filter # Create a passthrough filter object for the z-axis passthrough_z = cloud_filtered.make_passthrough_filter() # Assign axis and range of passthrough filter object. filter_axis = 'z' passthrough_z.set_filter_field_name(filter_axis) axis_min = 0.735 # experiment with this axis_max = 1.1 # experiment with this passthrough_z.set_filter_limits(axis_min, axis_max) # Filter the downsampled point cloud with the passthrough object to get resultant cloud. cloud_filtered = passthrough_z.filter() # Create a passthrough filter for the y-axis to remove the front table edge passthrough_y = cloud_filtered.make_passthrough_filter() filter_axis = 'y' passthrough_y.set_filter_field_name(filter_axis) axis_min = -2.26 axis_max = -1.35 passthrough_y.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough_y.filter() # TODO: RANSAC Plane Segmentation # Create segmentation model seg = cloud_filtered.make_segmenter() # Set the model to fit the objects to seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Set the max distance for a point to be considered to be fitting the model max_distance = 0.001 # experiement with this seg.set_distance_threshold(max_distance) # TODO: Extract inliers and outliers # Call the segment function to obtain the set of inlier indices and model coefficients inliers, coefficients = seg.segment() pcl_cloud_objects = cloud_filtered.extract(inliers, negative=True) pcl_cloud_table = cloud_filtered.extract(inliers, negative=False) # TODO: Euclidean Clustering # Convert XYZRGB point cloud to XYZ as Euclidean Clustering cannot use colour information white_cloud = XYZRGB_to_XYZ(pcl_cloud_objects) # Construct the k-d tree tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as a minimum and maximum cluster size (in points) ec.set_ClusterTolerance(0.05) #experiment ec.set_MinClusterSize(200) #experiment ec.set_MaxClusterSize(2000) #experiment # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered indices cluster_indices = ec.Extract() # cluster_indices now contains a list of indices for each cluster (a list of lists) # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately # This cloud will contain points for each of the segmented objects, with each set of points having a unique colour. # Assign a colour corresponding to each segmented object in the camera view cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) # Create new cloud object containing all clusters, each with a unique colour cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(pcl_cloud_objects) ros_cloud_table = pcl_to_ros(pcl_cloud_table) ros_cloud_clusters = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_cloud.publish(ros_cloud_clusters) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster sample_cloud = pcl_cloud_objects.extract(pts_list) sample_cloud = pcl_to_ros(sample_cloud) # Compute the associated feature vector chists = compute_color_histograms(sample_cloud, using_hsv=True) normals = get_normals(sample_cloud) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = sample_cloud detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Statistical Outlier Filtering outlier_filter = cloud.make_statistical_outlier_filter() outlier_filter.set_mean_k(7) outlier_filter.set_std_dev_mul_thresh(-0.6) cloud_filtered = outlier_filter.filter() # TODO: Voxel Grid Downsampling vox = cloud_filtered.make_voxel_grid_filter() LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # TODO: PassThrough Filter passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.605 axis_max = 1.0 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() # TODO: Rotate PR2 in place to capture side tables for the collision map joint_angle = Float64() global task_state global collision_base collision_map_pub.publish(pcl_to_ros(collision_base)) if task_state == 'start': joint_angle.data = np.pi / 2 joint_command_pub.publish(joint_angle) current_joint_angle = get_joint_properties('world_joint').position[0] if abs(current_joint_angle - np.pi / 2) < 0.0001: task_state = 'left_sector' passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'y' passthrough.set_filter_field_name(filter_axis) axis_min = 0.43 axis_max = 1.0 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() collision_base = cloud_filtered collision_map_pub.publish(pcl_to_ros(collision_base)) return else: return elif task_state == 'left_sector': joint_angle.data = -np.pi / 2 joint_command_pub.publish(joint_angle) current_joint_angle = get_joint_properties('world_joint').position[0] if abs(current_joint_angle - -np.pi / 2) < 0.0001: task_state = 'right_sector' passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'y' passthrough.set_filter_field_name(filter_axis) axis_min = -1.0 axis_max = -0.43 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() collision_base.from_list(collision_base.to_list() + cloud_filtered.to_list()) collision_map_pub.publish(pcl_to_ros(collision_base)) return else: return elif task_state == 'right_sector': joint_angle.data = 0 joint_command_pub.publish(joint_angle) current_joint_angle = get_joint_properties('world_joint').position[0] if abs(current_joint_angle) < 0.0001: task_state = 'finish' return else: return else: pass passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'y' passthrough.set_filter_field_name(filter_axis) axis_min = -0.45 axis_max = 0.45 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.006 seg.set_distance_threshold(max_distance) plane_inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers cloud_table = cloud_filtered.extract(plane_inliers, negative=False) collision_base.from_list(collision_base.to_list() + cloud_table.to_list()) collision_map_pub.publish(pcl_to_ros(collision_base)) cloud_objects = cloud_filtered.extract(plane_inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.02) ec.set_MinClusterSize(10) ec.set_MaxClusterSize(1500) ec.set_SearchMethod(tree) cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() # try: # pr2_mover(detected_objects) # except rospy.ROSInterruptException: # pass if detected_objects: pr2_mover(detected_objects)
def pcl_callback(pcl_msg): pcl_msg = ros_to_pcl(pcl_msg) # Voxel grid downsampling vox = pcl_msg.make_voxel_grid_filter() LEAF_SIZE = 0.005 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # PassThrough Filter passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'z' passthrough.set_filter_field_name (filter_axis) axis_min = 0.75 axis_max = 1.1 passthrough.set_filter_limits (axis_min, axis_max) cloud_filtered = passthrough.filter() passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'y' passthrough.set_filter_field_name (filter_axis) axis_min = -3 axis_max = -1.35 passthrough.set_filter_limits (axis_min, axis_max) cloud_filtered = passthrough.filter() # RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) # Extract inliers and outliers inliers, coefficients = seg.segment() cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.01) ec.set_MinClusterSize(600) ec.set_MaxClusterSize(3000) ec.set_SearchMethod(tree) cluster_indices = ec.Extract() # Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] # Grab the points for the cluster for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster cloud_objects_cluster = cloud_objects.extract(pts_list) ros_cluster = pcl_to_ros(cloud_objects_cluster) # Compute the associated feature vector chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) # Publish the list of detected objects rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # Convert ROS msg to PCL data pcl_data = ros_to_pcl(pcl_msg) # Steps: # 1. Downsample your point cloud by applying the Voxel Grid Filter # 2. Apply a Pass Through Filter to isolate the table and objects # 3. Perform RANSAC plane fitting to identify the table # 4. Use the ExtractIndices Filter to create new point clouds containg # the table and objects separately (called cloud_table and cloud_objects) # Voxel Grid Downsampling LEAF_SIZE = 0.01 vox = pcl_data.make_voxel_grid_filter() vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # PassThrough Filter filter_axis = 'z' axis_min = 0.759 axis_max = 1.1 passthrough = cloud_filtered.make_passthrough_filter() passthrough.set_filter_field_name(filter_axis) passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() # RANSAC Plane Segmentation max_distance = 0.01 ransac = cloud_filtered.make_segmenter() ransac.set_model_type(pcl.SACMODEL_PLANE) ransac.set_method_type(pcl.SAC_RANSAC) ransac.set_distance_threshold(max_distance) # Extract inliers and outliers inliers, coefficients = ransac.segment() cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # Euclidean Clustering # white_cloud would be a spatial information array data type white_cloud = XYZRGB_to_XYZ(cloud_objects) kd_tree = white_cloud.make_kdtree() # Create Cluster-Mask Point Cloud to visualize each cluster separately # Cluster extraction ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.05) ec.set_MinClusterSize(200) ec.set_MaxClusterSize(2000) # Search the k-d tree for clusters ec.set_SearchMethod(kd_tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # print cluster_indices # Visualization of Euclidean Segmentation step # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, index in enumerate(indices): color_cluster_point_list.append([ white_cloud[index][0], white_cloud[index][1], white_cloud[index][2], rgb_to_float(cluster_color[j]) ]) # new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_cluster = pcl_to_ros(cluster_cloud) # Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cloud_cluster) # Exercise-3 TODOs: # Array containers to get the pint cloud data and the labels detected_objects_labels = [] detected_objects = [] # Classify the clusters! (loop through each detected cluster one at a time) # cycle through each segmented clusters for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = cloud_objects.extract(pts_list) # convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) sample_cloud = ros_cluster sample_cloud_arr = ros_to_pcl(sample_cloud).to_array() # Compute the associated feature vector # Extract histogram features # complete this step just as is covered in capture_features.py # Check for invalid clouds. # TODO: note used if sample_cloud_arr.shape[0] == 0: print('Invalid cloud detected') try_count += 1 else: sample_was_good = True # Using hsv instead of RGB chists = compute_color_histograms(sample_cloud, using_hsv=True) normals = get_normals(sample_cloud) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # labeled_features.append([feature, model_name]) # Not needed, we're not saving the labels as training or model data # Make the prediction # retrieve the label for the result and add # it to the detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into Rviz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += 0.4 # TODO: What is this? object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) # Do some logging rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling vox = cloud.make_voxel_grid_filter() LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # TODO: PassThrough Filter passthrough = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object. filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model # Experiment with different values for max_distance # for segmenting the table max_distance = 0.01 seg.set_distance_threshold(max_distance) # TODO: Extract inliers and outliers # Call the segment function to obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() ros_cloud_table = cloud_filtered.extract(inliers, negative=False) ros_cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(ros_cloud_objects) tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.05) ec.set_MinClusterSize(10) ec.set_MaxClusterSize(2000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = ros_cloud_objects.extract(pts_list) # TODO: convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features # TODO: complete this step just as is covered in capture_features.py chists = compute_color_histograms(ros_cluster, using_hsv=False) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects) # TODO: Convert PCL data to ROS messages ros_cloud_table = pcl_to_ros(ros_cloud_table) ros_cloud_objects = pcl_to_ros(ros_cloud_objects) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud)
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling vox = cloud.make_voxel_grid_filter() LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # TODO: PassThrough Filter passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) # TODO: Extract inliers and outliers inliers, coefficients = seg.segment() extracted_inliers = cloud_filtered.extract(inliers, negative=False) extracted_outliers = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(extracted_outliers) # <type 'pcl._pcl.PointCloud'> tree = white_cloud.make_kdtree() # <type 'pcl._pcl.KdTree'> ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.05) ec.set_MinClusterSize(50) ec.set_MaxClusterSize(1000) ec.set_SearchMethod(tree) cluster_indices = ec.Extract() # list of list print len(cluster_indices) # how many clusters # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) # Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cluster_cloud = pcl_to_ros(cluster_cloud) ros_cloud_table = pcl_to_ros(extracted_inliers) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cluster_cloud) pcl_table_pub.publish(ros_cloud_table) # Exercise-3 TODOs: # Classify the clusters! detected_objects_labels = [] detected_objects = [] # Classify the clusters! (loop through each detected cluster one at a time) for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = extracted_outliers.extract(pts_list) # <type 'pcl._pcl.PointCloud_PointXYZRGB'> # TODO: convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features # TODO: complete this step just as is covered in capture_features.py chists = compute_color_histograms(ros_cluster, using_hsv=False) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) #labeled_features.append([feature, model_name]) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg): # TODO: Convert ROS msg to PCL data pcl_cloud = ros_to_pcl(pcl_msg) fil = pcl_cloud.make_statistical_outlier_filter() fil.set_mean_k(50) fil.set_std_dev_mul_thresh(1.0) cloud_filtered = fil.filter() # TODO: Voxel Grid Downsampling vox = cloud_filtered.make_voxel_grid_filter() LEAF_SIZE = 0.001 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) #cloud_filtered = vox.filter() # TODO: PassThrough Filter passthrough = cloud_filtered.make_passthrough_filter() passthrough.set_filter_field_name('z') z_min = 0.63 z_max = 1.1 passthrough.set_filter_limits(z_min, z_max) cloud_filtered = passthrough.filter() #To isolate edges of boxes passthrough1 = cloud_filtered.make_passthrough_filter() passthrough1.set_filter_field_name('y') y_min = -0.4 y_max = 0.4 passthrough1.set_filter_limits(y_min, y_max) cloud_filtered = passthrough1.filter() #pcl.save(cloud_filtered, 'test.pcd') # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) # TODO: Extract inliers and outliers inliers, coefficients = seg.segment() cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) ec.set_ClusterTolerance(0.045) ec.set_MinClusterSize(200) #ec.set_MaxClusterSize(1700) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) cluster_ros = pcl_to_ros(pcl_cluster) # Compute the associated feature vector colour_hists = compute_color_histograms(cluster_ros, using_hsv=True) cluster_norms = get_normals(cluster_ros) normal_hists = compute_normal_histograms(cluster_norms) feature = np.concatenate((colour_hists, normal_hists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = cluster_ros detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # Convert ROS msg to PCL data point_cloud = ros_to_pcl(pcl_msg) # Statistical Outlier Filtering outlier_filter = point_cloud.make_statistical_outlier_filter() outlier_filter.set_mean_k(50) # num of neighboring points to analyze x = .05 # threshold scale factor outlier_filter.set_std_dev_mul_thresh( x) # outlier > global_mean_distance + x*global_std_dev point_cloud_filtered = outlier_filter.filter() # Voxel Grid Downsampling vox = point_cloud_filtered.make_voxel_grid_filter() leaf_size = .005 # voxel (leaf) size vox.set_leaf_size(leaf_size, leaf_size, leaf_size) point_cloud_filtered = vox.filter() # PassThrough Filter passthrough = point_cloud_filtered.make_passthrough_filter() passthrough.set_filter_field_name('y') passthrough.set_filter_limits(-0.42, 0.42) # prevent bin recognition point_cloud_filtered = passthrough.filter() passthrough = point_cloud_filtered.make_passthrough_filter() passthrough.set_filter_field_name('z') passthrough.set_filter_limits(0.6, 1.8) # only table + objects point_cloud_filtered = passthrough.filter() # Moving Least Squares Smoothing (improved normal estimation) # mls = point_cloud_filtered.make_moving_least_squares() # mls.set_polynomial_fit(3) # mls.set_polynomial_order(True) # mls.set_search_radius(.01) # point_cloud_filtered = pcl.MovingLeastSquares_PointXYZRGB.process(mls) # RANSAC Plane Segmentation seg = point_cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) seg.set_distance_threshold(.01) inliers, coefficients = seg.segment() # Extract inliers and outliers cloud_table = point_cloud_filtered.extract(inliers, negative=False) cloud_objects = point_cloud_filtered.extract(inliers, negative=True) # Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.025) # distance tolerance ec.set_MinClusterSize(10) ec.set_MaxClusterSize(5500) ec.set_SearchMethod(tree) cluster_indices = ec.Extract( ) # extract indices for each of the discovered clusters # Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cluster_cloud = pcl_to_ros(cluster_cloud) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) # Publish ROS messages pcl_cluster_pub.publish(ros_cluster_cloud) pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) # Rotate PR2 in place to capture side tables for the collision map # if not HAS_RETURNED_CENTER: # rotate_pr2_world_joint() # return # Exercise-3 TODOs: detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = cloud_objects.extract(pts_list) # convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features # complete this step just as is covered in capture_features.py chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # move detected objects (if objects detected) if detected_objects: try: pr2_mover(detected_objects, ros_cloud_table) except rospy.ROSInterruptException: pass
spawn_model(model_name) for i in range(50): print('%s %i' % (model_name, i)) # make five attempts to get a valid a point cloud then give up sample_was_good = False try_count = 0 while not sample_was_good and try_count < 5: sample_cloud = capture_sample() # pickle.dump(sample_cloud, open('%s_%i.pickle'%(model_name, i), 'wb')) sample_cloud_arr = ros_to_pcl(sample_cloud).to_array() # Check for invalid clouds. if sample_cloud_arr.shape[0] == 0: print('Invalid cloud detected') try_count += 1 else: sample_was_good = True # sample_cloud = pcl_to_ros(pipeline.voxel(ros_to_pcl(sample_cloud))) # Extract histogram features chists = compute_color_histograms(sample_cloud, using_hsv=True) normals = get_normals(sample_cloud) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) labeled_features.append([feature, model_name]) delete_model() pickle.dump(labeled_features, open('training_set.sav', 'wb'))
def callback(pcl_msg): """Callback function for your Point Cloud Subscriber""" # Convert ROS msg to PCL data pcl_data = ros_to_pcl(pcl_msg) # Voxel Grid Downsampling vox = pcl_data.make_voxel_grid_filter() leaf_size = 0.005 vox.set_leaf_size(leaf_size, leaf_size, leaf_size) pcl_data_filtered = vox.filter() # PassThrough Filter passthrough = pcl_data_filtered.make_passthrough_filter() filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) pcl_data_filtered = passthrough.filter() passthrough = pcl_data_filtered.make_passthrough_filter() filter_axis = 'y' passthrough.set_filter_field_name(filter_axis) axis_min = -0.55 axis_max = 0.55 passthrough.set_filter_limits(axis_min, axis_max) pcl_data_filtered = passthrough.filter() # RANSAC Plane Segmentation seg = pcl_data_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # Extract inliers and outliers cloud_table = pcl_data_filtered.extract(inliers, negative=False) cloud_objects = pcl_data_filtered.extract(inliers, negative=True) # Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() # Create Cluster-Mask Point Cloud to visualize each cluster separately ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.02) ec.set_MinClusterSize(100) ec.set_MaxClusterSize(25000) ec.set_SearchMethod(tree) cluster_indices = ec.Extract() # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) # Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) collision_pub.publish(ros_cloud_table) # classification detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) # convert the cluster from pcl to ROS using helper function ros_cluster_cloud = pcl_to_ros(pcl_cluster) # Extract histogram features color_hists = compute_color_histograms(ros_cluster_cloud, using_hsv=True) normal_hists = compute_normal_histograms( get_normals(ros_cluster_cloud)) feature = np.concatenate((color_hists, normal_hists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster_cloud detected_objects.append(do) detected_objects_pub.publish(detected_objects) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels))
def pcl_callback(pcl_msg): """ROS callback function for the Point Cloud Subscriber. Takes a point cloud and performs object recognition on it. The pipeline is documented in the writeup. Publishes point clouds containing recognized objects and clustered objects. Uses machine learning model to classify objects in the cluster. Publishes the labeled object list to ROS. Calls the pr2_mover routine with the resulting list of detected objects. """ # Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # Statistical Outlier Filtering outlier_filter = cloud.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(8) # Any point with a mean distance larger than global # (mean_distance + 0.3 *std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(0.3) cloud = outlier_filter.filter() # Voxel Grid filter vox = cloud.make_voxel_grid_filter() # Define leaf size LEAF_SIZE = 0.005 # Set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() # PassThrough filter 0.6 < z < 1.1 pz = cloud_filtered.make_passthrough_filter() pz.set_filter_field_name('z') pz.set_filter_limits(0.6, 1.1) cloud_filtered = pz.filter() # PassThrough filter 0.34 < x < 1.0 px = cloud_filtered.make_passthrough_filter() px.set_filter_field_name('x') px.set_filter_limits(0.34, 1.0) cloud_filtered = px.filter() # RANSAC plane segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model max_distance = 0.01 seg.set_distance_threshold(max_distance) # Obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() # Extract inliers cloud_table = cloud_filtered.extract(inliers, negative=False) # Extract outliers cloud_objects = cloud_filtered.extract(inliers, negative=True) # Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) ec.set_ClusterTolerance(0.02) ec.set_MinClusterSize(40) ec.set_MaxClusterSize(4000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # Create Cluster-Mask Point Cloud to visualize each cluster separately # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, idx in enumerate(indices): x = white_cloud[idx][0] y = white_cloud[idx][1] z = white_cloud[idx][2] c = rgb_to_float(cluster_color[j]) color_cluster_point_list.append([x, y, z, c]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_cluster = pcl_to_ros(cluster_cloud) # Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cloud_cluster) # Classify the clusters! (loop through each detected cluster one at a time) detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) # convert the cluster from pcl to ROS ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .2 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) # Handle the resulting list of objects if detected_objects: # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Call the pr2_mover routine try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass else: rospy.loginfo("No objects detected.")
def pcl_callback(pcl_msg): # TODO: Convert ROS msg to PCL data pcl_msg_new = ros_to_pcl(pcl_msg) # TODO: Statistical Outlier Filtering # TODO: Voxel Grid Downsampling # voxel is short for volume element, so you can divide your 3D point cloud into a regular 3D grid of volume elements creating a voxel grid # we want to downsample the data to improve computation time so we take a spatial average of the points confined by each voxel # the set of points which lie in the bounds of a voxel are assigned to that voxel and statisically combined into one output point # Create a VoxelGrid filter object for our input point cloud vox = pcl_msg_new.make_voxel_grid_filter() # Leaf size (voxel size in meters) LEAF_SIZE = 0.01 # set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() ########################################################### # Our camera displays noise due to external factors (ex dust, light sources), so we need to use outlier removal to remove the noise # Statistical Outlier Filter: For each point in the point cloud, it computes the distance to all of its neighbors, and then calculates a mean distance. By assuming a Gaussian distribution, all points whose mean distances are outside of an interval defined by the global distances mean+standard deviation are considered to be outliers and removed from the point cloud. # Much like the previous filters, we start by creating a filter object: outlier_filter = cloud_filtered.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(1) # Set threshold scale factor x = 0.0001 # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(x) # Finally call the filter function for magic cloud_filtered = outlier_filter.filter() ########################################################### filename = 'voxel_downsampled.pcd' pcl.save(cloud_filtered, filename) # TODO: PassThrough Filter # if you have prior info about the location of your target in the scene, you can apply a pass through filter to remove useless data from your point cloud # Create a PassThrough filter object passthrough_y = cloud_filtered.make_passthrough_filter() passthrough_z = cloud_filtered.make_passthrough_filter() # specify cut-off values along the z-axis to indicate where to crop the scene. The region you allow to pass through is called the region of interest # here we crop out up to the bottom of the objects so table and everything below is removed # Assign axis and range to the passthrough filter object, values in meters filter_axis = 'y' passthrough_y.set_filter_field_name(filter_axis) axis_min = -0.2 axis_max = 0.2 passthrough_y.set_filter_limits(axis_min, axis_max) filter_axis = 'z' passthrough_z.set_filter_field_name(filter_axis) axis_min = 0.61 axis_max = 1.2 passthrough_z.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud. cloud_filtered = passthrough_y.filter() filename = 'pass_through_y_filtered.pcd' pcl.save(cloud_filtered, filename) cloud_filtered = passthrough_z.filter() filename = 'pass_through_z_filtered.pcd' pcl.save(cloud_filtered, filename) # TODO: RANSAC Plane Segmentation # RANSAC is an algorithm used to identify points in your dataset that belong to a particular model (ex plane, cylinder, box, or any other common shape) # RANSAC assumes that all data in a dataset is composed of both inliers and outliers, where inliers can be defined by a particular model with a specific set of parameters, and outliers do not fit that model and can be discared. # Create the segmentation object seg = cloud_filtered.make_segmenter() # we model the table as a plane, so we can remove it from the point cloud using RANSAC # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model max_distance = 0.01 seg.set_distance_threshold(max_distance) # TODO: Extract inliers and outliers # Call the segment function to obtain set of inlier indices and model coefficients # inliers refers to the table inliers, coefficients = seg.segment() # TODO: Extract table and objects cloud_table = cloud_filtered.extract(inliers, negative=False) filename = 'cloud_table.pcd' pcl.save(cloud_table, filename) # use the negative flag to extract the objects rather than the table cloud_objects = cloud_filtered.extract(inliers, negative=True) filename = 'cloud_objects.pcd' pcl.save(cloud_objects, filename) # TODO: Euclidean Clustering # use color and/or distance information to perform further segmentation. If we rely only on RANSAC, you can produce false positives since it only relies on shape. A soda can looks similar to a beer can so it can mix these two objects up if only looking at shape. Plus you would have to run through the entire point cloud several times for each model shape, which is not optimal # Euclidean creates clusters by grouping data points that are within some threshold distance from the nearest other point in the data # coordinates don't need to be defined by position but can be defined in color space or any other feature # PCL's Eucliean Clustering algorithm requires point cloud data with only spatial information so convert from color/space to just space white_cloud = XYZRGB_to_XYZ(cloud_objects) # construct a k-d tree to decrease the computational burden of searching for neighboring points tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) ec.set_ClusterTolerance(0.05) ec.set_MinClusterSize(10) ec.set_MaxClusterSize(1500) #1200) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters # cluster_indices contains a list of indices for each cluster (a list of lists) cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately # Assign an individual color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) # Create new cloud containing all clusters, each with unique color # cluster_cloud is the final point cloud which contains points for each of the segmented objects, with each set of points having a unique color. The cluser_cloud is of type PointCloud_PointXYZRGB cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) # Convert the cluster from pcl to ROS using helper function ros_pcl_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector chists = compute_color_histograms(ros_pcl_cluster, using_hsv=True) normals = get_normals(ros_pcl_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_pcl_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data pcl_data = ros_to_pcl(pcl_msg) # TODO: Statistical Outlier Filtering # From Lesson 3-16 Outlier Removal Filter # Much like the previous filters, we start by creating a filter object: outlier_filter = pcl_data.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(20) # Set threshold scale factor x = 0.3 # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(x) # Finally call the filter function for magic pcl_data = outlier_filter.filter() # TODO: Voxel Grid Downsampling # Voxel Grid filter # Create a VoxelGrid filter object for input point cloud vox = pcl_data.make_voxel_grid_filter() # Choose a voxel (leaf) size # LEAF_SIZE = 1 LEAF_SIZE = 0.01 #this gives the best results # Set the voxel (leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud pcl_data_filtered = vox.filter() # filename = 'voxel_downsampled.pcd' # pcl.save(pcl_data_filtered, filename) # TODO: PassThrough Filter # Create a PassThrough filter object. passthrough = pcl_data_filtered.make_passthrough_filter() #Assign axis and range to the passthrough filter object. filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) #Finally use the filter function to obtain the resultant point cloud. pcl_data_filtered = passthrough.filter() # filename = 'pass_through_filtered.pcd' # pcl.save(pcl_data_filtered, filename) # TODO: RANSAC Plane Segmentation # Create the segmentation object seg = pcl_data_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model max_distance = 0.01 seg.set_distance_threshold(max_distance) # ****This is so-called Extracting Indices**** # TODO: Extract inliers and outliers # Call the segment function to obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() # Extract inliers cloud_table = pcl_data_filtered.extract(inliers, negative=False) # filename = 'cloud_table.pcd' # pcl.save(cloud_table, filename) # Extract outliers cloud_objects = pcl_data_filtered.extract(inliers, negative=True) # filename = 'cloud_objects.pcd' # pcl.save(cloud_objects, filename) # TODO: Euclidean Clustering ****DBSCAN Algorithm**** white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) ec.set_ClusterTolerance(0.015) ec.set_MinClusterSize(20) ec.set_MaxClusterSize(3000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster # print ("index: {}; pts_list: {}".format(index, pts_list)) pcl_cluster = cloud_objects.extract(pts_list) # TODO: convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features # TODO: complete this step just as is covered in capture_features.py chists = compute_color_histograms(ros_cluster, using_hsv=True) #GC: Use HSV normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # labeled_features.append([feature, model_name]) # detected_objects_labels.append([feature, index]) #GC do we need this line? # Compute the associated feature vector # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects #GC: TODO detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): global table_cluster # Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) save_pcd(cloud, 'original.pcd') # Statistical Outlier Filtering outlier_filter = cloud.make_statistical_outlier_filter() outlier_filter.set_mean_k(25) outlier_filter.set_std_dev_mul_thresh(1) cloud_no_outliers = outlier_filter.filter() rospy.loginfo("Completed outlier filtering. Points in cloud: {}".format( cloud_no_outliers.size)) save_pcd(cloud_no_outliers, 'no_outlier.pcd') # Voxel Grid Downsampling vox = cloud_no_outliers.make_voxel_grid_filter() # voxel size (also called leafs) LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_downsampled = vox.filter() rospy.loginfo("Completed downsampling. Points in cloud: {}".format( cloud_downsampled)) save_pcd(cloud_downsampled, 'downsampled.pcd') # PassThrough Filter passthrough_z = cloud_downsampled.make_passthrough_filter() filter_axis = 'z' passthrough_z.set_filter_field_name(filter_axis) axis_min = 0.60 axis_max = 4 passthrough_z.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough_z.filter() rospy.loginfo( "Completed passthrough. Points in cloud: {}".format(cloud_filtered)) save_pcd(cloud_filtered, 'passthrough.pcd') # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # Extract inliers and outliers cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects_pre = cloud_filtered.extract(inliers, negative=True) passthrough_y = cloud_objects_pre.make_passthrough_filter() filter_axis = 'y' passthrough_y.set_filter_field_name(filter_axis) axis_min = -0.5 axis_max = 0.5 passthrough_y.set_filter_limits(axis_min, axis_max) cloud_objects = passthrough_y.filter() rospy.loginfo("Completed ransac") save_pcd(cloud_table, 'table.pcd') save_pcd(cloud_objects_pre, 'objects_pre.pcd') save_pcd(cloud_objects, 'objects.pcd') # Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) kd_tree = white_cloud.make_kdtree() ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.03) ec.set_MinClusterSize(50) ec.set_MaxClusterSize(2200) ec.set_SearchMethod(kd_tree) cluster_indices = ec.Extract() # Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) # Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cluster_cloud = pcl_to_ros(cluster_cloud) rospy.loginfo("Publishing results") # Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) rospy.loginfo("Completed euclidean clustering") save_pcd(cluster_cloud, 'cluster_cloud.pcd') # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = cloud_objects.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() # setup table cloud # we keep appending newly discovered table points to the table points we have already found table_cluster = table_cluster + cloud_table.to_list() pcl_table_cluster = pcl.PointCloud_PointXYZRGB() pcl_table_cluster.from_list(table_cluster) clear_collision_map() collision_map_pub.publish(pcl_to_ros(pcl_table_cluster)) try: pr2_mover(detected_objects, table_cluster) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Statistical Outlier Filtering outlier_filter = cloud.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(20) # Any point with a mean distance larger than global will be considered out outlier_filter.set_std_dev_mul_thresh(0.1) cloud_filtered = outlier_filter.filter() # TODO: Voxel Grid Downsampling vox = cloud_filtered.make_voxel_grid_filter() LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # TODO: PassThrough Filter # PassThrough Filter to remove the areas on the side of the table passthrough_y = cloud_filtered.make_passthrough_filter() passthrough_y.set_filter_field_name('y') y_axis_min = -0.45 y_axis_max = 0.45 passthrough_y.set_filter_limits(y_axis_min, y_axis_max) cloud_filtered = passthrough_y.filter() # PassThrough Filter to isolate only the objects on the table surface passthrough_z = cloud_filtered.make_passthrough_filter() passthrough_z.set_filter_field_name('z') z_axis_min = 0.6 z_axis_max = 1.0 passthrough_z.set_filter_limits(z_axis_min, z_axis_max) cloud_filtered = passthrough_z.filter() # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.05) ec.set_MinClusterSize(100) ec.set_MaxClusterSize(3000) ec.set_SearchMethod(tree) cluster_indices = ec.Extract() # Assign a random color cluster_color = get_color_list(len(cluster_indices)) # Store the detected objects and labels in these lists detected_objects_labels = [] detected_objects = [] color_cluster_point_list = [] # Iterate through each detected object cluster for object recognition for index, pts_list in enumerate(cluster_indices): # Store the object's cloud in this list object_cluster = [] # Create an individual cluster just for the object being processed for i, pts in enumerate(pts_list): # Retrieve cloud values for the x, y, z, rgb object object_cluster.append([ cloud_objects[pts][0], cloud_objects[pts][1], cloud_objects[pts][2], cloud_objects[pts][3] ]) # Retrieve cloud values for the x, y, z object, assigning a # preidentified color to all cloud values color_cluster_point_list.append([ white_cloud[pts][0], white_cloud[pts][1], white_cloud[pts][2], rgb_to_float(cluster_color[index]) ]) # Convert list of point cloud features (x,y,z,rgb) into a point cloud pcl_cluster = pcl.PointCloud_PointXYZRGB() pcl_cluster.from_list(object_cluster) # TODO: Convert PCL data to ROS messages ros_cloud = pcl_to_ros(pcl_cluster) # TODO: Publish ROS messages #pcl_objects_pub.publish(ros_cloud) # Exercise-3 TODOs: # Compute the associated feature vector # Extract histogram features (similar to capture_features.py) chists = compute_color_histograms(ros_cloud, nbins=128, using_hsv=True) normals = get_normals(ros_cloud) nhists = compute_normal_histograms(normals, nbins=128) feature = np.concatenate((chists, nhists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cloud detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Create new cloud containing all clusters cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_object_cluster = pcl_to_ros(cluster_cloud) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) # Publish ROS messages of the point clouds and detected objects pcl_objects_cloud_pub.publish(ros_cloud_object_cluster) pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) detected_objects_pub.publish(detected_objects) # detected object labels # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data pcl_cloud = ros_to_pcl(pcl_msg) # TODO: Statistical Outlier Filtering outlier_filter = pcl_cloud.make_statistical_outlier_filter() outlier_filter.set_mean_k(20) # number of neighbor points. try 20-50 outlier_filter.set_std_dev_mul_thresh( 0.5) #set average distance to be considered out of range try .1-.5 cloud_filtered = outlier_filter.filter() # TODO: Voxel Grid Downsampling vox = cloud_filtered.make_voxel_grid_filter() LEAF_SIZE = .005 # try .005 .01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # TODO: PassThrough Filter cloud_filtered = passthrough_filter(cloud_filtered, 'z', .6, 1.3) cloud_filtered = passthrough_filter(cloud_filtered, 'x', .3, 1.0) cloud_filtered = passthrough_filter(cloud_filtered, 'y', -.5, .5) # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = .01 # try .01 .03 .034 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ( cloud_objects) # Apply function to convert XYZRGB to XYZ # cluster_indices = do_euclidean_clustering(white_cloud,.01,200,15000) # only gets 7 objects cluster_indices = do_euclidean_clustering(white_cloud, .01, 20, 15000) # gets 8 objects # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_object_cluster = pcl_to_ros(cluster_cloud) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) # TODO: Publish ROS messages pcl_objects_cloud_pub.publish(ros_cloud_object_cluster) pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector color_hists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) normal_hists = compute_normal_histograms(normals) feature = np.concatenate((color_hists, normal_hists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): """Callback function for the Point Cloud Subscriber :param pcl_msg: ROS point cloud message """ # Convert ROS msg to PCL data (XYZRGB) cloud = ros_to_pcl(pcl_msg) #pcl.save(cloud, './misc/pipeline_0_raw_cloud.pcd') # Statistical outlier filtering outlier_filter = cloud.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(20) # Any point with a mean distance larger than global will be considered out outlier_filter.set_std_dev_mul_thresh(0.1) cloud_filtered = outlier_filter.filter() #pcl.save(cloud_filtered, './misc/pipeline_1_outlier_removal_filter.pcd') # Voxel Grid Downsampling vox = cloud_filtered.make_voxel_grid_filter() LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() #pcl.save(cloud_filtered, './misc/pipeline_2_voxel_grid_filter.pcd') # PassThrough Filter to remove the areas on the side of the table passthrough_y = cloud_filtered.make_passthrough_filter() passthrough_y.set_filter_field_name('y') y_axis_min = -0.4 y_axis_max = 0.4 passthrough_y.set_filter_limits(y_axis_min, y_axis_max) cloud_filtered = passthrough_y.filter() # PassThrough Filter to isolate only the objects on the table surface passthrough_z = cloud_filtered.make_passthrough_filter() passthrough_z.set_filter_field_name('z') z_axis_min = 0.61 z_axis_max = 0.9 passthrough_z.set_filter_limits(z_axis_min, z_axis_max) cloud_filtered = passthrough_z.filter() #pcl.save(cloud_filtered, './misc/pipeline_3_passthrough_filter.pcd') # RANSAC Plane Segmentation to identify the table from the objects seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # Extract inliers (table surface) and outliers (objects) cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) #pcl.save(cloud_table, './misc/pipeline_4_extracted_inliers.pcd') #pcl.save(cloud_objects, './misc/pipeline_5_extracted_outliers.pcd') # Convert the xyzrgb cloud to only xyz since k-d tree only needs xyz white_cloud = XYZRGB_to_XYZ(cloud_objects) # k-d tree decreases the computation of searching for neighboring points. # PCL's euclidian clustering algo only supports k-d trees tree = white_cloud.make_kdtree() # Euclidean clustering used to build individual point clouds for each # object. The Cluster-Mask point cloud allows each cluster to be # visualized separately. ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.05) ec.set_MinClusterSize(100) ec.set_MaxClusterSize(3000) ec.set_SearchMethod(tree) # Search the k-d tree for clusters # Extract indices for each found cluster. This is a list of indices for # each cluster (list of lists) cluster_indices = ec.Extract() # Assign a random color to each isolated object in the scene cluster_color = get_color_list(len(cluster_indices)) # Store the detected objects and labels in these lists detected_objects_labels = [] detected_objects = [] color_cluster_point_list = [] # Iterate through each detected object cluster for object recognition for index, pts_list in enumerate(cluster_indices): # Store the object's cloud in this list object_cluster = [] # Create an individual cluster just for the object being processed for i, pts in enumerate(pts_list): # Retrieve cloud values for the x, y, z, rgb object object_cluster.append([ cloud_objects[pts][0], cloud_objects[pts][1], cloud_objects[pts][2], cloud_objects[pts][3] ]) # Retrieve cloud values for the x, y, z object, assigning a # preidentified color to all cloud values color_cluster_point_list.append([ white_cloud[pts][0], white_cloud[pts][1], white_cloud[pts][2], rgb_to_float(cluster_color[index]) ]) # Convert list of point cloud features (x,y,z,rgb) into a point cloud pcl_cluster = pcl.PointCloud_PointXYZRGB() pcl_cluster.from_list(object_cluster) # Convert the cluster from pcl to ROS using helper function ros_cloud = pcl_to_ros(pcl_cluster) #pcl_objects_pub.publish(ros_cloud) # Extract histogram features (similar to capture_features.py) histogram_bins = 128 chists = compute_color_histograms(ros_cloud, nbins=histogram_bins, using_hsv=True) normals = get_normals(ros_cloud) nhists = compute_normal_histograms(normals, nbins=histogram_bins) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result and add it # to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cloud detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Create new cloud containing all clusters, each with a unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_object_cluster = pcl_to_ros(cluster_cloud) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) # Publish ROS messages of the point clouds and detected objects pcl_objects_cloud_pub.publish( ros_cloud_object_cluster) # solid color objects pcl_objects_pub.publish(ros_cloud_objects) # original color objects pcl_table_pub.publish(ros_cloud_table) # table cloud detected_objects_pub.publish(detected_objects) # detected object labels try: # Add some logic to determine whether or not your object detections # are robust enough before calling pr2_mover() pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling # Create a VoxelGrid filter object for our input point cloud vox = cloud.make_voxel_grid_filter() # Choose a voxel (also known as leaf) size # Note: this (1) is a poor choice of leaf size # Experiment and find the appropriate size! LEAF_SIZE = 0.01 #set the voxel (or leaf size) vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() # TODO: PassThrough Filter # Create a PassThrough filter object passthrough = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.7 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud cloud_filtered = passthrough.filter() # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() #set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model # Experiment with different values for max_distance # for segmenting the table max_distance = 0.01 seg.set_distance_threshold(max_distance) # Call the segment function to obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers # Extract inliers cloud_table = cloud_filtered.extract(inliers, negative=False) # Extract outliers cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() #create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects ec.set_ClusterTolerance(0.015) ec.set_MinClusterSize(20) ec.set_MaxClusterSize(1500) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) # convert list of point cloud features (x,y,z, rgb) to a point cloud cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) #convert the cluster from pcl to ROS ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # publihs ros messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) #classify the clusters detected_objects_labels = [] detected_objects = [] #classify the clusters for index, pts_list in enumerate(cluster_indices): pcl_cluster = cloud_objects.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) #extract histogam features colorhists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) normhists = compute_normal_histograms(normals) features = np.concatenate((colorhists, normhists)) prediction = clf.predict(scaler.transform(features.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) #publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) #Add the detected object to the list of detected objects do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling vox = cloud.make_voxel_grid_filter() # chose a voxel alos known as a leaf # change from 0.01 to 0.005 LEAF_SIZE = 0.005 # call the filter function to obtain the resultant downsample point cloud vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_downsample = vox.filter() # Outlier Removal Filter # Creating a filter object: outlier_filter = cloud_downsample.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point # previous from 5 to 8 outlier_filter.set_mean_k(8) # Set threshold scale factor # changed from 0.1 to 0.3 x = 0.3 # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(x) # Ccall the filter function for magic outliers_removed = outlier_filter.filter() # TODO: PassThrough Filter passthrough = outliers_removed.make_passthrough_filter() # assign axis and range to the passthrough filter filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) passthrough_z = passthrough.filter() # Limiting on the Y axis too to avoid having the bins recognized as snacks passthrough = passthrough_z.make_passthrough_filter() # Assign axis and range to the passthrough filter object. # change from y to x filter_axis = 'x' passthrough.set_filter_field_name(filter_axis) # change min from -0.45 to 0.34 # change max from +0.45 to 1.0 axis_min = 0.34 axis_max = 1.0 passthrough.set_filter_limits(axis_min, axis_max) #use the filter function to obtain the resultant point cloud cloud_passthrough = passthrough.filter() # TODO: RANSAC Plane Segmentation seg = cloud_passthrough.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Set max distance for a point to be consedered fitting the model max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers # Exctract inliers - tabletop cloud_table = cloud_passthrough.extract(inliers, negative=False) # Extract outliers - objects cloud_objects = cloud_passthrough.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerance for extraction ec.set_ClusterTolerance(0.02) # change from 10 to 40 ec.set_MinClusterSize(40) # change 2500 to 4000 ec.set_MaxClusterSize(4000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) # accept new cloud containing all clusters, each with a unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cluster_cloud = pcl_to_ros(cluster_cloud) ros_cloud_passthrough = pcl_to_ros(cloud_passthrough) ros_outliners_removed = pcl_to_ros(outliers_removed) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_cloud_pub.publish(ros_cluster_cloud) pcl_cloud_passthrough_pub.publish(ros_cloud_passthrough) pcl_outliners_removed_pub.publish(ros_outliners_removed) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) # convert the cluster from pcl to ROS ros_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector # extract the histogram features chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) # get the label label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 # Publish a label into RViz object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) # Publish the list of detected objects rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): print 'Callback' # Exercise-2 TODOs: # Convert ROS msg to PCL data cloud_filtered = ros_to_pcl(pcl_msg) # Implement pipeline # I moved it to functions to clean it up a bit but # here is so much it could be done. This almost calls for a framework like pytoolz # where we could build chained pipelines much more naturally without the clutter of # intermediary variables cloud_filtered = filter_outliers(cloud_filtered, 10, 0.1) # Remove noise. It is akin to white noise so it can be effectively removed by means of statistical analysis # Much like the previous filters, we start by creating a filter object: tracer_cloud = cloud_filtered # Voxel Grid Downsampling cloud_filtered = filter_voxel(cloud_filtered, 0.005) # PassThrough Filtering # We make three filters along each axis cloud_filtered = filter_passthrough(cloud_filtered, 'z', 0.6, 1.1) cloud_filtered = filter_passthrough(cloud_filtered, 'y', -0.45, 0.45) cloud_filtered = filter_passthrough(cloud_filtered, 'x', 0.35, 0.85) extracted_inliers, extracted_outliers = filter_ransac_plane(cloud_filtered, 0.01) # Now we have point cloud representing objects, lets cluster them into separate clumps # And apply some colors so we can color them in RViz # Euclidean Clustering (DBSCAN) white_cloud = XYZRGB_to_XYZ(extracted_outliers) tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.05) ec.set_MinClusterSize(50) ec.set_MaxClusterSize(2500) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # Create Cluster-Mask Point Cloud to visualize each cluster separately # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] # Here we create list of lists that we will transform to point cloud shortly, adding a color # iterate over cluster for j, indices in enumerate(cluster_indices): # iterate over indices for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) # Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages # pc after pipeline without RANSAC ros_tracer_cloud = pcl_to_ros(tracer_cloud) # objects ros_cloud_objects = pcl_to_ros(extracted_outliers) # table ros_cloud_table = pcl_to_ros(extracted_inliers) # pc of the clouds. Take note this is one big PC, just colored ros_cluster_cloud = pcl_to_ros(cluster_cloud) # publish the clouds my_cloud_pub.publish(ros_tracer_cloud) pcl_table_pub.publish(ros_cloud_table) pcl_objects_pub.publish(ros_cloud_objects) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 # Now we are goint to classify # Bascially we walk through clusters # compute the histograms which is what we trained on # and feed this into the classifier detected_objects_labels = [] detected_objects = [] # Classify the clusters! (loop through each detected cluster one at a time) for index, pts_list in enumerate(cluster_indices): # Grab the points for the one object from the extracted outliers (cloud_objects) pcl_cluster = extracted_outliers.extract(pts_list) # convert the cluster from pcl to ROS using helper function sample_cloud = pcl_to_ros(pcl_cluster) # Extract histogram features chists = compute_color_histograms(sample_cloud, using_hsv=True) normals = get_normals(sample_cloud) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz # Seems like the label is just made as a shifted position of the first pixes label_pos = list(white_cloud[pts_list[0]]) # shifted in z axis label_pos[2] += .4 # and published to ros object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. # create a datastructure to hold the details of detected object and push to an array do = DetectedObject() do.label = label do.cloud = sample_cloud detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud_filtered = ros_to_pcl(pcl_msg) # TODO: Statistical Outlier Filtering outlier_filter = cloud_filtered.make_statistical_outlier_filter() outlier_filter.set_mean_k(20) # Set threshold scale factor x = 0.3 outlier_filter.set_std_dev_mul_thresh(x) cloud_filtered = outlier_filter.filter() # TODO: Voxel Grid Downsampling # Create a VoxelGrid filter object for our input point cloud vox = cloud_filtered.make_voxel_grid_filter() # Choose a voxel (also known as leaf) size LEAF_SIZE = 0.005 # Set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() # TODO: PassThrough Filter # Z axis # Create a PassThrough filter object. passthrough = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object. filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.4 passthrough.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud. cloud_filtered = passthrough.filter() # Y axis # Create a PassThrough filter object. passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'y' passthrough.set_filter_field_name(filter_axis) axis_min = -0.5 axis_max = 0.5 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() # TODO: RANSAC Plane Segmentation # Create the segmentation object seg = cloud_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model # Experiment with different values for max_distance # for segmenting the table max_distance = 0.01 seg.set_distance_threshold(max_distance) # TODO: Extract inliers and outliers # Call the segment function to obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() # Extract inliers # pcd for table cloud_table = cloud_filtered.extract(inliers, negative=False) # Extract outliers # pcd for tabletop objects cloud_objects= cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() # Create a cluster extraction object ################################ ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) ec.set_ClusterTolerance(0.013) ec.set_MinClusterSize(20) ec.set_MaxClusterSize(3000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector # Extract histogram features # Generate Color hist c_hists = compute_color_histograms(ros_cluster, using_hsv=True) # Generate normals and notmal histograms normals = get_normals(ros_cluster) n_hists = compute_normal_histograms(normals) # Generate feature by concatenate of color and normals. feature = np.concatenate((c_hists, n_hists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() if len(detected_objects) > 0: try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
for model_name in models: spawn_model(model_name) for i in range(5): # make five attempts to get a valid a point cloud then give up sample_was_good = False try_count = 0 while not sample_was_good and try_count < 5: sample_cloud = capture_sample() sample_cloud_arr = ros_to_pcl(sample_cloud).to_array() # Check for invalid clouds. if sample_cloud_arr.shape[0] == 0: print('Invalid cloud detected') try_count += 1 else: sample_was_good = True # Extract histogram features chists = compute_color_histograms(sample_cloud, using_hsv=False) normals = get_normals(sample_cloud) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) labeled_features.append([feature, model_name]) delete_model() pickle.dump(labeled_features, open('training_set.sav', 'wb'))
def pcl_callback(pcl_msg): if (which_phase == 0 or which_phase == 2): move_robot() if (which_phase == 1 or which_phase == 3): # Convert ROS msg to PCL data pcl_msg = ros_to_pcl(pcl_msg) # Voxel Grid Downsampling vox = pcl_msg.make_voxel_grid_filter() LEAF_SIZE =.005 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() # Two PassThrough Filter one over Z one over X passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'z' passthrough.set_filter_field_name (filter_axis) axis_min = 0.3 axis_max = 5.0 passthrough.set_filter_limits (axis_min, axis_max) cloud_filtered = passthrough.filter() passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'x' passthrough.set_filter_field_name (filter_axis) axis_min = 0.34 axis_max = 1.0 passthrough.set_filter_limits (axis_min, axis_max) cloud_filtered = passthrough.filter() # Outlier filter outlier_filter = cloud_filtered.make_statistical_outlier_filter() outlier_filter.set_mean_k(50) x = 0.5 outlier_filter.set_std_dev_mul_thresh(x) cloud_filtered = outlier_filter.filter() # RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.015 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() # Extract inliers and outliers extracted_inliers = cloud_filtered.extract(inliers, negative=False) extracted_outliers = cloud_filtered.extract(inliers, negative=True) # Euclidean Clustering white_cloud = XYZRGB_to_XYZ(extracted_outliers) # Apply function to convert XYZRGB to XYZ tree = white_cloud.make_kdtree() #Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) ec.set_ClusterTolerance(0.01) ec.set_MinClusterSize(50) ec.set_MaxClusterSize(15000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # Create Cluster-Mask Point Cloud to visualize each cluster separately # Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) # Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages table_pcl_msg = pcl_to_ros(extracted_inliers) objects_pcl_msg = pcl_to_ros(extracted_outliers) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # Publish ROS messages pcl_objects_pub.publish(objects_pcl_msg) pcl_table_pub.publish(table_pcl_msg) pcl_clusters_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] lastone = 0 for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = extracted_outliers.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .25 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling vox = cloud.make_voxel_grid_filter() # Choose voxel size. LEAF_SIZE = 0.005 # Set the voxel size. vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to downsample cloud. cloud_filtered = vox.filter() # TODO: PassThrough Filter passthrough_z = cloud_filtered.make_passthrough_filter() # Assign axis and range. filter_axis = 'z' passthrough_z.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passthrough_z.set_filter_limits(axis_min, axis_max) # Use filter function to obtain new point cloud. cloud_filtered = passthrough_z.filter() # Filter in x direction also. passthrough_x = cloud_filtered.make_passthrough_filter() filter_axis = 'x' passthrough_x.set_filter_field_name(filter_axis) axis_min = 0.35 axis_max = 2.0 passthrough_x.set_filter_limits(axis_min, axis_max) # Use filter function to obtain new point cloud. cloud_filtered = passthrough_x.filter() # Statistical outlier removal. outlier_filter = cloud_filtered.make_statistical_outlier_filter() # Set the number of neighboring points to analyze. outlier_filter.set_mean_k(20) # Threshold scale factor. x = 0.25 # Any point with mean distance larger than mean distance + x*stddev will be an outlier. outlier_filter.set_std_dev_mul_thresh(x) cloud_filtered = outlier_filter.filter() #test_pub.publish(pcl_to_ros(cloud_objects)) # TODO: RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() # Set the model you wish to fit. seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered to fit the model. max_distance = 0.01 seg.set_distance_threshold(max_distance) # Perform segmentation. inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers cloud_table = cloud_filtered.extract(inliers, negative=False) cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering location_cloud = XYZRGB_to_XYZ(cloud_objects) tree = location_cloud.make_kdtree() #Create cluster extraction. euclid_cluster = location_cloud.make_EuclideanClusterExtraction() # Set tolerances. euclid_cluster.set_ClusterTolerance(0.03) euclid_cluster.set_MinClusterSize(10) euclid_cluster.set_MaxClusterSize(2000) # Search k-d tree for clusters. euclid_cluster.set_SearchMethod(tree) # Extract indices for found clusters. cluster_indices = euclid_cluster.Extract() # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately cluster_color = get_color_list(len(cluster_indices)) #print(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, index in enumerate(indices): color_cluster_point_list.append([ location_cloud[index][0], location_cloud[index][1], location_cloud[index][2], rgb_to_float(cluster_color[j]) ]) # Create new cloud with clusters a unique color. cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_cluster = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cloud_cluster) # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) # Compute the associated feature vector ros_cluster = pcl_to_ros(pcl_cluster) color_hist = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) normal_hist = compute_normal_histograms(normals) features = np.concatenate((color_hist, normal_hist)) # Make the prediction prediction = clf.predict(scaler.transform(features.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(location_cloud[pts_list[0]]) label_pos[2] += 0.2 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) # Publish the list of detected objects #rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) detected_objects_pub.publish(detected_objects) # Generate YAML file containing labels and centriods for everything in pick_list. labels = [] centriods = [] for item in pick_list: for i in range(len(detected_objects)): if detected_objects[i].label == item['name']: print("Found", item['name']) labels.append(item['name']) points_array = ros_to_pcl(detected_objects[i].cloud).to_array() cent = np.mean(points_array, axis=0) centriods.append((np.asscalar(cent[0]), np.asscalar(cent[1]), np.asscalar(cent[2]))) yaml_list = [] scene_num = Int32() object_name = String() arm = String() pick_pose = Pose() place_pose = Pose() scene_num.data = 2 arm.data = 'none' for i in range(len(labels)): object_name.data = labels[i] pick_pose.position.x = centriods[i][0] pick_pose.position.y = centriods[i][1] pick_pose.position.z = centriods[i][2] yaml_dict = make_yaml_dict(scene_num, arm, object_name, pick_pose, place_pose) yaml_list.append(yaml_dict) send_to_yaml('output_none.yaml', yaml_list)
def pcl_callback(pcl_msg): #Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) #Voxel Grid Downsampling vox = cloud.make_voxel_grid_filter() LEAF_SIZE = 0.01 vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) cloud_filtered = vox.filter() #Statistical Outlier Removal Filter outlier_filter = cloud_filtered.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(20) # Set threshold scale factor x = 0.1 # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(x) # Finally call the filter function for magic cloud_filtered = outlier_filter.filter() test.publish(pcl_to_ros(cloud_filtered)) #PassThrough Filter passthrough = cloud_filtered.make_passthrough_filter() passthrough.set_filter_field_name('z') axis_min = 0.6 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() #for y too we have wide not useable area passthroughY = cloud_filtered.make_passthrough_filter() passthroughY.set_filter_field_name('y') axis_min = -0.4 axis_max = 0.4 passthroughY.set_filter_limits(axis_min, axis_max) cloud_filtered = passthroughY.filter() #RANSAC Plane Segmentation seg = cloud_filtered.make_segmenter() seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) max_distance = 0.01 seg.set_distance_threshold(max_distance) inliers, coefficients = seg.segment() #Extract inliers and outliers extracted_inliers = cloud_filtered.extract(inliers, negative=False) extracted_outliers = cloud_filtered.extract(inliers, negative=True) #Euclidean Clustering white_cloud = XYZRGB_to_XYZ (extracted_outliers) tree = white_cloud.make_kdtree() #Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) ec.set_ClusterTolerance(0.03) ec.set_MinClusterSize(100) ec.set_MaxClusterSize(3000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() #Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] # Grab the points for the cluster for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = extracted_outliers.extract(pts_list) #convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features chists = compute_color_histograms(ros_cluster , using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
spawn_model(model_name) for i in range(capture_attempts): # make five attempts to get a valid a point cloud then give up sample_was_good = False try_count = 0 while not sample_was_good and try_count < 5: # Capture the point cloud using the sensor stock RGBD camera sample_cloud = capture_sample() # Convert the ros cloud to a pcl cloud sample_cloud_arr = ros_to_pcl(sample_cloud).to_array() # Check for invalid clouds. if sample_cloud_arr.shape[0] == 0: print('Invalid cloud detected') try_count += 1 else: sample_was_good = True # Extract histogram features chists = compute_color_histograms(sample_cloud, nbins=histogram_bins, using_hsv=True) normals = get_normals(sample_cloud) nhists = compute_normal_histograms(normals, nbins=histogram_bins) feature = np.concatenate((chists, nhists)) labeled_features.append([feature, model_name]) delete_model() pickle.dump(labeled_features, open('training_set.sav', 'wb'))
def pcl_callback(PointCloud2_msg): # Exercise-2: # Convert ROS msg to PCL data cloud = ros_to_pcl(PointCloud2_msg) # Statistical Outlier Filtering: filename = 'initial_point_cloud.pcd' pcl.save(cloud, filename) # Much like the previous filters, we start by creating a filter object: outlier_filter = cloud.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(50) # Set threshold scale factor x = 1.0 # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(x) # Finally call the filter function for magic cloud = outlier_filter.filter() filename = 'statistical_filter_inliers.pcd' pcl.save(cloud, filename) outlier_filter.set_negative(True) filename = 'statistical_filter_outliers.pcd' pcl.save(outlier_filter.filter(), filename) # Voxel Grid Downsampling: # Create a VoxelGrid filter object for our input point cloud vox = cloud.make_voxel_grid_filter() # Choose a voxel (also known as leaf) size # Note: this (1) is a poor choice of leaf size # Experiment and find the appropriate size! LEAF_SIZE = 0.01 #0.01 # Set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() filename = 'voxel_downsampled.pcd' pcl.save(cloud_filtered, filename) # PassThrough filter # Create a PassThrough filter object. passthrough = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object. filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 #Set to 0.6 axis_max = 1.0 #Set to 1.0 passthrough.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud cloud_filtered = passthrough.filter() filename = 'pass_through_filtered.pcd' pcl.save(cloud_filtered, filename) # RANSAC Plane Segmentation: # Create the segmentation object seg = cloud_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model # Experiment with different values for max_distance # for segmenting the table max_distance = 0.005 #Set to 0.005 seg.set_distance_threshold(max_distance) # Call the segment function to obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() # Extract inliers and outliers: # Extract inliers extracted_inliers = cloud_filtered.extract(inliers, negative=False) filename = 'extracted_inliers.pcd' pcl.save(extracted_inliers, filename) cloud_table = extracted_inliers #Assign inliers to cloud_table # Extract outliers extracted_outliers = cloud_filtered.extract(inliers, negative=True) filename = 'extracted_outliers.pcd' pcl.save(extracted_outliers, filename) cloud_objects = extracted_outliers #Assign outliers to cloud_objects # Euclidean Clustering: white_cloud = XYZRGB_to_XYZ(cloud_objects) tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.02) #Set to 0.02 ec.set_MinClusterSize(50) #Set to 50 ec.set_MaxClusterSize(20000) #Set to 20000 # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() # Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster pcl_cluster = cloud_objects.extract(pts_list) # Convert the cluster from pcl to ROS using helper function ros_cluster = pcl_to_ros(pcl_cluster) # Compute the associated feature vector # Extract histogram features chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # Make the prediction # retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects) try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass
def pcl_callback(pcl_msg): # Exercise-2 TODOs: ### Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) ### Voxel Grid Downsampling # Create a VoxelGrid filter object for our input point cloud vox = cloud.make_voxel_grid_filter() # Choose a voxel (also known as leaf) size # Note: this (1) is a poor choice of leaf size # Experiment and find the appropriate size! LEAF_SIZE = 0.003 # Set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() ### Statistical Outlier Removal Filter # Much like the previous filters, we start by creating a filter object: outlier_filter = cloud_filtered.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(50) # Set threshold scale factor x = 1.0 # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(x) # Finally call the filter function for magic cloud_filtered = outlier_filter.filter() ### PassThrough Filter # Create a PassThrough filter object first across the Z axis passThroughZ = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object. filter_axis = 'z' passThroughZ.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passThroughZ.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud. cloud_filtered = passThroughZ.filter() ## Now, Create a PassThrough filter object across the Y axis passThroughY = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object. filter_axis = 'y' passThroughY.set_filter_field_name(filter_axis) axis_min = -0.5 axis_max = 0.5 passThroughY.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud. cloud_filtered = passThroughY.filter() ### RANSAC Plane Segmentation # Create the segmentation object seg = cloud_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model # Experiment with different values for max_distance # for segmenting the table max_distance = 0.01 seg.set_distance_threshold(max_distance) # Call the segment function to obtain set of inlier indices and model coefficients inliers, coefficients = seg.segment() ### Extract inliers and outliers extracted_inliers = cloud_filtered.extract(inliers, negative=False) extracted_outliers = cloud_filtered.extract(inliers, negative=True) ### Euclidean Clustering # Go from XYZRGB to RGB since to build the k-d tree we only needs spatial data white_cloud = XYZRGB_to_XYZ(extracted_outliers) # Apply function to convert XYZRGB to XYZ tree = white_cloud.make_kdtree() ### Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.01) ec.set_MinClusterSize(100) # Refering to the minimum and maximum number of points that make up an object's cluster ec.set_MaxClusterSize(50000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract() ### Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j])]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO - set proper names ### Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(extracted_outliers) ros_cloud_table = pcl_to_ros(extracted_inliers) ros_cluster_cloud = pcl_to_ros(cluster_cloud) # TODO set names ### Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_cluster_pub.publish(ros_cluster_cloud) # Exercise-3 TODOs: # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = extracted_outliers.extract(pts_list) ros_cluster = pcl_to_ros(pcl_cluster) # Extract histogram features # Complete this step just as is covered in capture_features.py chists = compute_color_histograms(ros_cluster, using_hsv=True) normals = get_normals(ros_cluster) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) # labeled_features.append([feature, model_name]) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1,-1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label,label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = ros_cluster detected_objects.append(do) print "Trying to match the pick list with the objects detected..." print "\n" # Publish the list of detected objects rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels)) # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects) object_list_param = rospy.get_param('/object_list') pick_list_objects = [] for i in range(len(object_list_param)): pick_list_objects.append(object_list_param[i]['name']) print "\n" print "Pick List includes: " print pick_list_objects print "\n" pick_set_objects = set(pick_list_objects) detected_set_objects = set(detected_objects_labels) if detected_set_objects == pick_set_objects: try: pr2_mover(detected_objects) except rospy.ROSInterruptException: pass