def pcl_callback(pcl_msg):

# Exercise-2 TODOs:
    # Convert ROS msg to PCL data
    pcl_data = ros_to_pcl(pcl_msg)

    # Steps:
    # 1. Downsample your point cloud by applying the Voxel Grid Filter
    # 2. Apply a Pass Through Filter to isolate the table and objects
    # 3. Perform RANSAC plane fitting to identify the table
    # 4. Use the ExtractIndices Filter to create new point clouds containg
    #    the table and objects separately (called cloud_table and cloud_objects)

    # Voxel Grid Downsampling
    LEAF_SIZE = 0.01
    vox = pcl_data.make_voxel_grid_filter()
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # PassThrough Filter
    filter_axis = 'z'
    axis_min = 0.759
    axis_max = 1.1
    passthrough = cloud_filtered.make_passthrough_filter()
    passthrough.set_filter_field_name(filter_axis)
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # RANSAC Plane Segmentation
    max_distance = 0.01
    ransac = cloud_filtered.make_segmenter()
    ransac.set_model_type(pcl.SACMODEL_PLANE)
    ransac.set_method_type(pcl.SAC_RANSAC)
    ransac.set_distance_threshold(max_distance)

    # Extract inliers and outliers
    inliers, coefficients = ransac.segment()
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # Euclidean Clustering
    # white_cloud would be a spatial information array data type
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    kd_tree = white_cloud.make_kdtree()

    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    # Cluster extraction
    ec = white_cloud.make_EuclideanClusterExtraction()

    # Set tolerances for distance threshold 
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(200)
    ec.set_MaxClusterSize(2000)

    # Search the k-d tree for clusters
    ec.set_SearchMethod(kd_tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()
    # print cluster_indices

    # Visualization of Euclidean Segmentation step
    # Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, index in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[index][0],
                white_cloud[index][1],
                white_cloud[index][2],
                rgb_to_float(cluster_color[j])
            ])
    
    # new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cloud_cluster = pcl_to_ros(cluster_cloud)

    # Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cloud_cluster)

# Exercise-3 TODOs: 

    # Array containers to get the pint cloud data and the labels
    detected_objects_labels = []
    detected_objects = []

    # Classify the clusters! (loop through each detected cluster one at a time)
    # cycle through each segmented clusters
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = cloud_objects.extract(pts_list)
        # convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)
        sample_cloud = ros_cluster
        sample_cloud_arr = ros_to_pcl(sample_cloud).to_array()

        # Compute the associated feature vector
        # Extract histogram features
        # complete this step just as is covered in capture_features.py
        # Check for invalid clouds.
        # TODO: note used
        if sample_cloud_arr.shape[0] == 0:
            print('Invalid cloud detected')
            try_count += 1
        else:
            sample_was_good = True

        # Using hsv instead of RGB
        chists = compute_color_histograms(sample_cloud, using_hsv=True)
        normals = get_normals(sample_cloud)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
        # labeled_features.append([feature, model_name])    # Not needed, we're not saving the labels as training or model data

        # Make the prediction
        # retrieve the label for the result and add
        # it to the detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into Rviz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += 0.4 # TODO: What is this?
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

        # Do some logging
        rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

        # Publish the list of detected objects
        # This is the output you'll need to complete the upcoming project!
        detected_objects_pub.publish(detected_objects)
Esempio n. 2
0
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    cloud = statistical_outlier_filtering(cloud, 10, 0.001)

    # TODO: Voxel Grid Downsampling
    LEAF_SIZE = 0.01
    cloud = voxel_grid_downssampling(cloud, LEAF_SIZE)

    # TODO: PassThrough Filter
    filter_axis = 'z'
    axis_min = 0.50
    axis_max = 0.90
    cloud = passthrough(cloud, filter_axis, axis_min, axis_max)

    filter_axis = 'x'
    axis_min = 0.30
    axis_max = 1.0
    cloud = passthrough(cloud, filter_axis, axis_min, axis_max)

    # TODO: RANSAC Plane Segmentation
    ransac_segmentation = ransac_plane_segmentation(cloud, pcl.SACMODEL_PLANE,
                                                    pcl.SAC_RANSAC, 0.01)

    # TODO: Extract inliers and outliers
    cloud_table, cloud_objects = extract_cloud_objects_and_cloud_table(
        cloud, ransac_segmentation)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    cluster_cloud, cluster_indices = euclidean_clustering(white_cloud)

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately

    # TODO: Convert PCL data to ROS messages
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    # Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    # write a for loop to cycle through each of the segmented clusters
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)
        # TODO: convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Compute the associated feature vector
        # Extract histogram features
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))
    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg):

# Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # TODO: Voxel Grid Downsampling
    vox = cloud.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # TODO: PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    # Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    # Set the model you wish to fit 
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    # Max distance for a point to be considered fitting the model
    # Experiment with different values for max_distance 
    # for segmenting the table
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # TODO: Extract inliers and outliers
    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()
    ros_cloud_table = cloud_filtered.extract(inliers, negative=False)
    ros_cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(ros_cloud_objects)
    tree = white_cloud.make_kdtree()

    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold 
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(2000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0],
                white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = ros_cloud_objects.extract(pts_list)
        # TODO: convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        # TODO: complete this step just as is covered in capture_features.py

        chists = compute_color_histograms(ros_cluster, using_hsv=False)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_table = pcl_to_ros(ros_cloud_table)
    ros_cloud_objects = pcl_to_ros(ros_cloud_objects)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    #display raw input
    pcl_raw.publish(pcl_msg)

    # TODO: Convert ROS msg to PCL data
    pcl_data = ros_to_pcl(pcl_msg)

    # TODO: Statistical Outlier Filtering
    outlier_filter = pcl_data.make_statistical_outlier_filter()

    #trial and error values
    mean_k = 25
    thresh_scale = 0.001

    outlier_filter.set_mean_k(mean_k)
    outlier_filter.set_std_dev_mul_thresh(thresh_scale)

    pcl_o_f = outlier_filter.filter()
    # TODO: Voxel Grid Downsampling

    vox = pcl_o_f.make_voxel_grid_filter()
    LEAF_SIZE = 0.005  #value from testing in class lab
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    pcl_vox = vox.filter()

    # TODO: PassThrough Filter
    #X axis passthrough
    passthrough_x = pcl_vox.make_passthrough_filter()

    #all max_min values determined trial and error, should have used measure function in Rviz/Gazebo
    x_min = 0.33
    x_max = 0.88

    passthrough_x.set_filter_field_name('x')
    passthrough_x.set_filter_limits(x_min, x_max)

    passthrough = passthrough_x.filter()

    #Y axis passthrough
    passthrough_y = passthrough.make_passthrough_filter()

    y_min = -0.46
    y_max = 0.46

    passthrough_y.set_filter_field_name('y')
    passthrough_y.set_filter_limits(y_min, y_max)
    passthrough = passthrough_y.filter()

    #Z axis passthrough
    passthrough_z = passthrough.make_passthrough_filter()

    z_min = 0.605
    z_max = 0.9

    passthrough_z.set_filter_field_name('z')
    passthrough_z.set_filter_limits(z_min, z_max)
    passthrough = passthrough_z.filter()

    # TODO: RANSAC Plane Segmentation
    seg = passthrough.make_segmenter()

    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    #value determined through trial and error
    max_distance_table = 0.008
    seg.set_distance_threshold(max_distance_table)
    inliers, outliers = seg.segment()

    # TODO: Extract inliers and outliers

    cloud_objects = passthrough.extract(inliers, negative=True)
    cloud_table = passthrough.extract(inliers, negative=False)

    # TODO: Euclidean Clustering
    #color information is not required
    euclidean_objects = XYZRGB_to_XYZ(cloud_objects)

    #initialize KDtree
    tree = euclidean_objects.make_kdtree()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    ec = euclidean_objects.make_EuclideanClusterExtraction()
    #clustering values determined through trial and error
    ec.set_ClusterTolerance(0.01)
    ec.set_MinClusterSize(25)
    ec.set_MaxClusterSize(5000)

    #set euclidian cluster search method
    ec.set_SearchMethod(tree)
    cluster_indices = ec.Extract()

    #variable for coloring the clusters
    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []

    #assign color to each cluster
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                euclidean_objects[indice][0], euclidean_objects[indice][1],
                euclidean_objects[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Convert cluster back to
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    #use pcl_to_ros() function

    ros_filtered = pcl_to_ros(pcl_o_f)
    ros_vox = pcl_to_ros(pcl_vox)
    ros_passthrough = pcl_to_ros(passthrough)
    ros_objects = pcl_to_ros(cloud_objects)
    ros_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages

    stat_filter.publish(ros_filtered)
    pub_vox.publish(ros_vox)
    pub_pass.publish(ros_passthrough)
    pub_objects.publish(ros_objects)
    pub_table.publish(ros_table)
    pub_cloud.publish(ros_cluster_cloud)

    # Exercise-3 TODOs:

    #intialize detected objects variables.
    detected_objects_labels = []
    detected_objects = []

    #Looping through the cluster list:
    for index, pts_list in enumerate(cluster_indices):
        pcl_cluster = cloud_objects.extract(pts_list)
        pcl_to_ros_cluster = pcl_to_ros(pcl_cluster)

        #Construct the color histogram uses HSV instead of RGB
        chists = compute_color_histograms(pcl_to_ros_cluster, using_hsv=True)
        #collect the surface normals
        normals = get_normals(pcl_to_ros_cluster)
        nhists = compute_normal_histograms(normals)
        #concatenate the HSV and surface normals
        feature = np.concatenate((chists, nhists))

        #make the prediction based on the combined HSV and surface normal information
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        #match the label
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        label_pos = list(euclidean_objects[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        #fill detected objects and label variables
        det_ob = DetectedObject()
        det_ob.label = label
        det_ob.cloud = pcl_to_ros_cluster
        detected_objects.append(det_ob)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))
    #publish detected objects
    detected_objects_pub.publish(detected_objects)

    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
Esempio n. 5
0
def pcl_callback(pcl_msg):

# Exercise-2:
    # Convert ROS msg to PCL data
    pcl_data = ros_to_pcl(pcl_msg)

    # Voxel Grid Downsampling
    vox = pcl_data.make_voxel_grid_filter()

    LEAF_SIZE = 0.005

    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    cloud_filtered = vox.filter()

    # PassThrough Filter in z axis
    passthrough = cloud_filtered.make_passthrough_filter()

    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.61
    axis_max = 0.9
    passthrough.set_filter_limits(axis_min, axis_max)

    cloud_filtered = passthrough.filter()

    # PassThrough Filter in y axis
    passthrough_2 = cloud_filtered.make_passthrough_filter()

    filter_axis_2 = 'y'
    passthrough_2.set_filter_field_name(filter_axis_2)
    axis_min_2 = -0.425
    axis_max_2 = 0.425
    passthrough_2.set_filter_limits(axis_min_2, axis_max_2)

    cloud_filtered = passthrough_2.filter()


    # RANSAC Plane Segmentation
    max_distance = 0.005

    seg = cloud_filtered.make_segmenter()

    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    seg.set_distance_threshold(max_distance)

    inliers, coefficients = seg.segment()


    # Extract inliers and outliers
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)


    # Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(extracted_outliers) # Apply function to convert XYZRGB to XYZ
    tree = white_cloud.make_kdtree()

    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold 
    # as well as minimum and maximum cluster size (in points)
    ec.set_ClusterTolerance(0.010)
    ec.set_MinClusterSize(250)
    ec.set_MaxClusterSize(3550)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()
    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                            white_cloud[indice][1],
                                            white_cloud[indice][2],
                                             rgb_to_float(cluster_color[j])])

    # Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(extracted_outliers)
    ros_cloud_table = pcl_to_ros(extracted_inliers)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)


# Exercise-3: 

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        # pcl_cluster = cloud_out.extract(pts_list)
        pcl_cluster = extracted_outliers.extract(pts_list)
        
        # convert the cluster from pcl to ROS using helper function
        sample_cloud = pcl_to_ros(pcl_cluster)
        
        # Extract histogram features
        chists = compute_color_histograms(sample_cloud, using_hsv=True)
        normals = get_normals(sample_cloud)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
     
        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = sample_cloud
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)

    # Invoking the pr2_mover() 
    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
def pcl_callback(pcl_msg):
    

    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    pcl_unfiltered = ros_to_pcl(pcl_msg)
    
    # TODO: Voxel Grid Downsampling
    vox = pcl_unfiltered.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # TODO: PassThrough Filter
    passThrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)

    cloud_filtered = passThrough.filter()

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # TODO: Extract inliers and outliers
    inliers, coefficients = seg.segment()
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)

    outlier_filter = cloud_filtered.make_statistical_outlier_filter()
    outlier_filter.set_mean_k(50)
    x = 1.0
    outlier_filter.set_std_dev_mul_thresh(x)
    cloud_filtered = outlier_filter.filter()

    # TODO: Euclidean Clustering

    white_cloud = XYZRGB_to_XYZ(cloud_filtered)
    tree = white_cloud.make_kdtree()
    
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    
    # Set tolerances for distance threshold 
    ec.set_ClusterTolerance(0.001)
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(250)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()


    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    # Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                            white_cloud[indice][1],
                                            white_cloud[indice][2],
                                             rgb_to_float(cluster_color[j])])

    # TODO: Convert PCL data to ROS messages
    ros_cloud_object = pcl_to_array(extracted_outliers)
    ros_table_object = pcl_to_array(extracted_inliers)

    # Convert cluster cloud to pcl2 for ROS
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_object)
    pcl_table_pub.publish(ros_table_object)
    ros_cluster_cloud_pub.publish(ros_cluster_cloud)


# Exercise-3 TODOs: 

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []
    object_list_param = rospy.get_param('/object_list')

    # Grab the points for the cluster
    for index, pts_lists in enumerate(cluster_indices)
        pcl_cluster = cloud_objects.extract(pts_lists)
        pcl_cluster = pcl_to_ros(pcl_cluster)

        # Extract Histogram Features
        chists = compute_color_histograms(pcl_cluster, using_hsv=False)
        normals = get_normals(pcl_cluster)
        nhists = compute_normal_histograms(normals) 
        feature = np.concatenate((chists, nhists))
        detected_objects_labels.append([feature, model_name])

        # Compute the associated feature vector

        # Make the prediction
        prediction = clf.predict(scalar.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects.append(label) 

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.

        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster_cloud
        detected_objects.append(do)


    lables = []
    centroids = []

    for object in objects:
        labels.append(object.label)
        points_arr = ros_to_pcl(object.cloud).to_array()
        centroids.append(np.mean(points_arr, axis=0)[:3])

    dict_list = []
    for i in range(0, len(object_list_param)):
        yaml_dict = make_yaml_dict(test_scene_num, arm_name, object_name, pick_pose, place_pose)
        dict_list.append(yaml_dict)

    yaml_filename = "output_yaml.txt"
    send_to_yaml(yaml_filename, dict_list)



    # Publish the list of detected objects
    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
    detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg):

    cloud = ros_to_pcl(pcl_msg)

    vox = cloud.make_voxel_grid_filter()

    # Choose a voxel (also known as leaf) size
    LEAF_SIZE = 0.003

    # Set the voxel (or leaf) size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()

    ### Statistical Outlier Removal Filter
    # Much like the previous filters, we start by creating a filter object:
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)

    # Set threshold scale factor
    x = 1.0

    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)
    cloud_filtered = outlier_filter.filter()

    ### PassThrough Filter
    # Create a PassThrough filter object first across the Z axis
    passThroughZ = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passThroughZ.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passThroughZ.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passThroughZ.filter()

    ## Now, Create a PassThrough filter object across the Y axis
    passThroughY = cloud_filtered.make_passthrough_filter()
    filter_axis = 'y'
    passThroughY.set_filter_field_name(filter_axis)
    axis_min = -0.5
    axis_max = 0.5
    passThroughY.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passThroughY.filter()

    ### RANSAC Plane Segmentation
    # Create the segmentation object
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    max_distance = 0.01
    seg.set_distance_threshold(max_distance)
    inliers, coefficients = seg.segment()

    ### Extract inliers and outliers
    pcl_table_cloud = cloud_filtered.extract(inliers, negative=False)
    pcl_object_cloud = cloud_filtered.extract(inliers, negative=True)

    ### Euclidean Clustering
    # Go from XYZRGB to RGB since to build the k-d tree we only needs spatial data
    white_cloud = XYZRGB_to_XYZ(pcl_object_cloud)
    # Apply function to convert XYZRGB to XYZ
    tree = white_cloud.make_kdtree()

    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.005)
    ec.set_MinClusterSize(150)
    ec.set_MaxClusterSize(50000)
    ec.set_SearchMethod(tree)
    cluster_indices = ec.Extract()

    #Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    #Convert PCL data to ROS messages
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)
    ros_object_cloud = pcl_to_ros(pcl_object_cloud)
    ros_table_cloud = pcl_to_ros(pcl_table_cloud)

    #Publish ROS messages
    pcl_objects_pub.publish(ros_object_cloud)
    pcl_table_pub.publish(ros_table_cloud)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    det_obj_label = []
    det_obj = []
    # Grab the points for the cluster
    for index, pts_list in enumerate(cluster_indices):
        pcl_cluster = pcl_object_cloud.extract(pts_list)
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Compute associated feature vector
        color_hist = compute_color_histograms(ros_cluster, using_hsv=True)

        norms = get_normals(ros_cluster)
        norm_hist = compute_normal_histograms(norms)

        # Make prediction
        features = np.concatenate((color_hist, norm_hist))
        predict = clf.predict(scaler.transform(features.reshape(1, -1)))
        label = encoder.inverse_transform(predict)[0]
        det_obj_label.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .2
        object_markers_pub.publish(make_label(label, label_pos, index))

        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        det_obj.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(det_obj_label),
                                                   det_obj_label))

    # Publish the list of detected objects
    detected_objects_pub.publish(det_obj)

    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    true_object_list = rospy.get_param('/object_list')
    dropbox_param = rospy.get_param('/dropbox')
    true_list = []

    for i in range(len(true_object_list)):
        true_list.append(true_object_list[i]['name'])

    print "\n"
    print "Need to find: "
    print true_list
    print "\n"
    trueset = set(true_list)
    detset = set(det_obj_label)

    # Could add some logic to determine whether or not your object detections are robust
    # before calling pr2_mover()
    if detset == trueset:
        try:
            pr2_mover(det_obj)
        except rospy.ROSInterruptException:
            pass
    else:
        rospy.loginfo("Wrong object(s) detected")
Esempio n. 8
0
def pcl_callback(pcl_msg):

# Exercise-2 TODOs:
##############################################################################
    # TODO: Convert ROS msg to PCL data
##############################################################################
    cloud = ros_to_pcl(pcl_msg)

##############################################################################
    # TODO: Statistical Outlier Filtering
##############################################################################
    outlier_filter = cloud.make_statistical_outlier_filter()
    outlier_filter.set_mean_k(2)
    outlier_filter.set_std_dev_mul_thresh(0.5)
    cloud_filtered = outlier_filter.filter()

##############################################################################
    # TODO: Voxel Grid Downsampling
##############################################################################
    # Create a VoxelGrid filter object for our input point cloud
    vox = cloud_filtered.make_voxel_grid_filter()

    # Choose a voxel (also known as leaf) size
    LEAF_SIZE = 0.01

    # Set the voxel (or leaf) size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()


##############################################################################
    # TODO: PassThrough Filter
##############################################################################
    # Create a PassThrough filter object.
    passthrough_z = cloud_filtered.make_passthrough_filter()

    # ZAXIS Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passthrough_z.set_filter_field_name(filter_axis)
    axis_min = 0.60
    axis_max = 1.8
    passthrough_z.set_filter_limits(axis_min, axis_max)

    # Use the filter function to obtain the filtered z-axis.
    cloud_filtered = passthrough_z.filter()

    # Create PassThrough filter object
    passthrough_y = cloud_filtered.make_passthrough_filter()

    # YAXIS Assign axis and range to the passthrough filter object
    filter_axis = 'y'
    passthrough_y.set_filter_field_name(filter_axis)
    axis_min = -0.5
    axis_max = 0.5
    passthrough_y.set_filter_limits(axis_min, axis_max)

    # Use the filter function to obtain the filtered y-axis
    cloud_filtered = passthrough_y.filter()


##############################################################################
    # TODO: RANSAC Plane Segmentation
##############################################################################
    # Create the segmentation object
    seg = cloud_filtered.make_segmenter()

    # Set the model you wish to fit
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    # Experiment with different values for max_distance
    # for segmenting the table
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()


##############################################################################
    # TODO: Extract inliers and outliers
##############################################################################
    # Extract inliers
    cloud_table = cloud_filtered.extract(inliers, negative=False)

    # Extract outliers
    cloud_objects = cloud_filtered.extract(inliers, negative=True)


##############################################################################
    # TODO: Euclidean Clustering
##############################################################################
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()

    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(50)
    ec.set_MaxClusterSize(3000)

    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)

    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()


##############################################################################
    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
##############################################################################
    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                            white_cloud[indice][1],
                                            white_cloud[indice][2],
                                            rgb_to_float(cluster_color[j])])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)


##############################################################################
    # TODO: Convert PCL data to ROS messages
##############################################################################
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)


##############################################################################
    # TODO: Publish ROS messages
##############################################################################
    pcl_table_pub.publish(ros_cloud_table)
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_cluster_pub.publish(ros_cluster_cloud)

# Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    # Grab the points for the cluster
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = cloud_objects.extract(pts_list)

        # Convert the cluster from pcl to ROS using helper functions
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)

        # Compute the associated feature vector
        feature = np.concatenate((chists, nhists))

        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)

    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    # Could add some logic to determine whether or not your object detections are robust
    # before calling pr2_mover()

    try:
        pr2_mover(detected_objects_list)
    except rospy.ROSInterruptException:
        pass
Esempio n. 9
0
def pcl_callback(pcl_msg):

    ## Convert ROS msg to PCL data
    pcl_data = ros_to_pcl(pcl_msg)

    ## Voxel Grid Downsampling
    voxel_grid_filter = pcl_data.make_voxel_grid_filter()
    voxel_grid_filter.set_leaf_size(0.01, 0.01, 0.01)
    pcl_data = voxel_grid_filter.filter()

    # PassThrough Filter Z
    passthrough_filter = pcl_data.make_passthrough_filter()
    filter_axis = 'z'
    passthrough_filter.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough_filter.set_filter_limits(axis_min, axis_max)
    pcl_data = passthrough_filter.filter()

    # PassThrough Filter X
    passthrough_filter = pcl_data.make_passthrough_filter()
    filter_axis = 'y'
    passthrough_filter.set_filter_field_name(filter_axis)
    axis_min = -5.0
    axis_max = -1.4
    passthrough_filter.set_filter_limits(axis_min, axis_max)
    pcl_data = passthrough_filter.filter()

    # RANSAC Plane Segmentation
    segmenter = pcl_data.make_segmenter()
    segmenter.set_model_type(pcl.SACMODEL_PLANE)
    segmenter.set_method_type(pcl.SAC_RANSAC)
    segmenter.set_distance_threshold(0.01)
    inliers, coefficients = segmenter.segment()
    # Extract inliers and outliers
    pcl_data_objects = pcl_data.extract(inliers, negative=True)
    pcl_data_table = pcl_data.extract(inliers, negative=False)

    # Euclidean Clustering
    pcl_data_objects_xyz = XYZRGB_to_XYZ(pcl_data_objects)
    tree = pcl_data_objects_xyz.make_kdtree()
    EuclideanClusterExtraction = pcl_data_objects_xyz.make_EuclideanClusterExtraction(
    )
    EuclideanClusterExtraction.set_ClusterTolerance(0.05)
    EuclideanClusterExtraction.set_MinClusterSize(100)
    EuclideanClusterExtraction.set_MaxClusterSize(2000)
    # Search the k-d tree for clusters
    EuclideanClusterExtraction.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = EuclideanClusterExtraction.Extract()

    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                pcl_data_objects_xyz[indice][0],
                pcl_data_objects_xyz[indice][1],
                pcl_data_objects_xyz[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    pcl_data_objects_clustered = pcl.PointCloud_PointXYZRGB()
    pcl_data_objects_clustered.from_list(color_cluster_point_list)

    # Convert PCL data to ROS messages
    ros_objects_msg = pcl_to_ros(pcl_data_objects_clustered)
    ros_table_msg = pcl_to_ros(pcl_data_table)

    # Publish ROS messages
    pcl_objects_pub.publish(ros_objects_msg)
    pcl_table_pub.publish(ros_table_msg)

    detected_objects = []
    detected_objects_labels = []
    # Classify the clusters! (loop through each detected cluster one at a time)
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        pcl_data_single_object_clustered = pcl_data_objects.extract(pts_list)
        ros_data_single_object_clustered = pcl_to_ros(
            pcl_data_single_object_clustered)
        # Compute the associated feature vector
        chists = compute_color_histograms(ros_data_single_object_clustered,
                                          using_hsv=True)
        normals = get_normals(ros_data_single_object_clustered)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)
        # Publish a label into RViz
        label_pos = list(pcl_data_objects_xyz[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))
        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_data_single_object_clustered
        detected_objects.append(do)

    # Publish the list of detected objects
    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))
    detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg):

    # Convert ROS msg to PCL data

    PCL_data = ros_to_pcl(pcl_msg)

    # Voxel Grid Downsampling filter
    # Create a VoxelGrid filter object for our input point cloud
    vox = PCL_data.make_voxel_grid_filter()

    # Choose a voxel (also known as leaf) size
    # Note: this (1) means 1mx1mx1m is a poor choice of leaf size
    # Experiment and find the appropriate size!
    LEAF_SIZE = 0.01

    # Set the voxel (or leaf) size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()

    # PassThrough filter
    ################################
    # Z axis
    # Create a PassThrough filter object.
    passthrough = cloud_filtered.make_passthrough_filter()

    # Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.77
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud.
    cloud_filtered = passthrough.filter()

    # RANSAC plane segmentation
    ################################
    # Create the segmentation object
    seg = cloud_filtered.make_segmenter()

    # Set the model you wish to fit
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    # Experiment with different values for max_distance
    # for segmenting the table
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()

    # Extract inliers
    # pcd for table

    cloud_table = cloud_filtered.extract(inliers, negative=False)

    # Extract outliers
    # pcd for tabletop objects

    cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    # Create a cluster extraction object
    ################################
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.02)
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(30000)

    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)

    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    ################################
    # Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    # Create new cloud containing all clusters, each with unique color
    ################################
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # Convert PCL data to ROS messages
    ################################
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # Publish ROS messages
    ################################
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    # Exercise-3:
    # Classify the clusters!
    ################################
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):

        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)

        ros_cluster = pcl_to_ros(pcl_cluster)
        # Compute the associated feature vector

        # Extract histogram features
        # Generate Color hist
        c_hists = compute_color_histograms(ros_cluster, using_hsv=True)

        # Generate normals and notmal histograms

        normals = get_normals(ros_cluster)
        n_hists = compute_normal_histograms(normals)

        # Generate feature by concatenate of color and normals.

        feature = np.concatenate((c_hists, n_hists))

        # Make the prediction

        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz

        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.

        det_obj = DetectedObject()
        det_obj.label = label
        det_obj.cloud = ros_cluster
        detected_objects.append(det_obj)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    ################################
    detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    # Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # Voxel Grid Downsampling

    vox = cloud.make_voxel_grid_filter()

    LEAF_SIZE = 0.01

    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    cloud_filtered = vox.filter()

    #filename = 'vox_downsampled.pcd'
    #pcl.save(cloud_filtered, filename)

    # PassThrough filter

    passthrough = cloud_filtered.make_passthrough_filter()

    filter_axis = 'z'

    axis_min = 0.6
    axis_max = 1.1

    passthrough.set_filter_field_name(filter_axis)
    passthrough.set_filter_limits(axis_min, axis_max)

    cloud_filtered = passthrough.filter()

    # RANSAC plane segmentation

    seg = cloud_filtered.make_segmenter()

    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    max_distance = 0.01
    seg.set_distance_threshold(max_distance)
    inliers, coefficients = seg.segment()

    # TODO: Extract inliers and outliers
    pcl_data_table = cloud.extract(inliers, negative=False)
    pcl_data_objects = cloud.extract(inliers, negative=True)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(pcl_data_objects)
    tree = white_cloud.make_kdtree()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.015)
    ec.set_MinClusterSize(120)
    ec.set_MaxClusterSize(2000)
    cluster_indices = ec.Extract()

    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_cluster = pcl_to_ros(cluster_cloud)
    ros_cloud_objects = pcl_to_ros(pcl_data_objects)
    ros_cloud_table = pcl_to_ros(pcl_data_table)

    # TODO: Publish ROS messages
    pcl_cluster_pub.publish(ros_cloud_cluster)
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)

    # Exercise-3 TODOs:

    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):

        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)
        pcl_cluster = pcl_to_ros(pcl_cluster)

        # Compute the associated feature vector
        chists = compute_color_histograms(pcl_cluster, using_hsv=False)
        normals = get_normals(pcl_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]

        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg):
    global map_ready
    global mapping_stage
    global start_time
    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)
    # TODO: Statistical Outlier Filtering
    outlier_filter = cloud.make_statistical_outlier_filter()
    x = 0.005
    outlier_filter.set_std_dev_mul_thresh(x)
    cloud = outlier_filter.filter()
    # TODO: Voxel Grid Downsampling
    LEAF_SIZE = 0.01
    vox = cloud.make_voxel_grid_filter()
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    obs_cloud = cloud_filtered = vox.filter()

    # TODO: PassThrough Filter
    # passthrough filter of obs

    passthrough = obs_cloud.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 0.62
    passthrough.set_filter_limits(axis_min, axis_max)

    obs_cloud = passthrough.filter()
    obvoidence_pub.publish(pcl_to_ros(obs_cloud))
    if map_ready:
        passthrough = cloud_filtered.make_passthrough_filter()
        filter_axis = 'z'
        passthrough.set_filter_field_name(filter_axis)
        axis_min = 0.62
        axis_max = 1.1
        passthrough.set_filter_limits(axis_min, axis_max)

        cloud_filtered = passthrough.filter()

        passthrough = cloud_filtered.make_passthrough_filter()
        filter_axis = 'y'
        passthrough.set_filter_field_name(filter_axis)
        axis_min = -0.5
        axis_max = 0.5
        passthrough.set_filter_limits(axis_min, axis_max)
        cloud_filtered = passthrough.filter()
        # TODO: RANSAC Plane Segmentation
        seg = cloud_filtered.make_segmenter()
        seg.set_model_type(pcl.SACMODEL_PLANE)
        seg.set_method_type(pcl.SAC_RANSAC)

        max_distance = 0.01
        seg.set_distance_threshold(max_distance)
        # TODO: Extract inliers and outliers
        inliers, coefficients = seg.segment()
        extracted_inliers = cloud_filtered.extract(inliers, negative=False)
        extracted_outliers = cloud_filtered.extract(inliers, negative=True)
        # TODO: Euclidean Clustering
        white_cloud = XYZRGB_to_XYZ(extracted_outliers)
        tree = white_cloud.make_kdtree()
        # Create a cluster extraction object
        ec = white_cloud.make_EuclideanClusterExtraction()
        # Set tolerances for distance threshold
        # as well as minimum and maximum cluster size (in points)
        # NOTE: These are poor choices of clustering parameters
        # Your task is to experiment and find values that work for segmenting objects.
        ec.set_ClusterTolerance(0.05)  # bigger makes more point as a p
        ec.set_MinClusterSize(0)
        ec.set_MaxClusterSize(1000)
        # Search the k-d tree for clusters
        ec.set_SearchMethod(tree)
        # Extract indices for each of the discovered clusters
        cluster_indices = ec.Extract()

        miss_list = []
        for i in range(len(cluster_indices)):
            if len(cluster_indices[i]) < 30:
                miss_list.append(i)

        # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
        # Assign a color corresponding to each segmented object in scene
        cluster_color = get_color_list(len(cluster_indices))
        color_cluster_point_list = []
        for j, indices in enumerate(cluster_indices):
            for i, indice in enumerate(indices):
                color_cluster_point_list.append([
                    white_cloud[indice][0], white_cloud[indice][1],
                    white_cloud[indice][2],
                    rgb_to_float(cluster_color[j])
                ])

        # Create new cloud containing all clusters, each with unique color
        cluster_cloud = pcl.PointCloud_PointXYZRGB()
        cluster_cloud.from_list(color_cluster_point_list)
        # print(cluster_indices)
        # TODO: Convert PCL data to ROS messages
        ros_cluster_cloud = pcl_to_ros(cluster_cloud)
        # TODO: Publish ROS messages

        # Exercise-3 TODOs:

        # Classify the clusters! (loop through each detected cluster one at a time)
        detected_objects_labels = []
        detected_objects = []

        # needed to be alternate when change the world
        for index, pts_list in enumerate(cluster_indices):
            if index in miss_list:
                continue
            # Grab the points for the cluster from the extracted outliers (cloud_objects)
            pcl_cluster = extracted_outliers.extract(pts_list)
            # TODO: convert the cluster from pcl to ROS using helper function
            ros_cluster = pcl_to_ros(pcl_cluster)
            # Extract histogram features
            # TODO: complete this step just as is covered in capture_features.py

            chists = compute_color_histograms(ros_cluster, using_hsv=True)
            normals = get_normals(ros_cluster)
            nhists = compute_normal_histograms(normals)
            feature = np.concatenate((chists, nhists))

            # Make the prediction, retrieve the label for the result
            # and add it to detected_objects_labels list
            prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
            label = encoder.inverse_transform(prediction)[0]
            detected_objects_labels.append(label)

            # Publish a label into RViz
            label_pos = list(white_cloud[pts_list[0]])
            label_pos[2] += .4
            object_markers_pub.publish(make_label(label, label_pos, index))

            # Add the detected object to the list of detected objects.
            do = DetectedObject()
            do.label = label
            do.cloud = ros_cluster
            detected_objects.append(do)

        rospy.loginfo('Detected {} objects: {}'.format(
            len(detected_objects_labels), detected_objects_labels))

        # Publish the list of detected objects
        # This is the output you'll need to complete the upcoming project!
        detected_objects_pub.publish(detected_objects)
        cloud_puber.publish(pcl_to_ros(extracted_outliers))
        # cloud_puber.publish(ros_cluster_cloud)
        print('point cloud published')
        detected_objects_list = detected_objects
        try:
            pr2_mover(detected_objects_list)
        except rospy.ROSInterruptException:
            pass

    # rotate PR2 to capture the collision map
    if not map_ready and mapping_stage == 0:
        start_time = rospy.get_time()
        pr2_world_joint_pub.publish(angle1)
        mapping_stage += 1

    if not map_ready and mapping_stage == 1:
        if rospy.get_time() - start_time >= 15:
            mapping_stage += 1
            pr2_world_joint_pub.publish(angle2)

    if not map_ready and mapping_stage == 2:
        if rospy.get_time() - start_time >= 45:
            mapping_stage += 1
            pr2_world_joint_pub.publish(0)

    if not map_ready and mapping_stage == 3:
        if rospy.get_time() - start_time >= 65:
            mapping_stage += 1
            map_ready = True
def pcl_callback(pcl_msg):

# Exercise-2 TODOs:

# Convert ROS msg to PCL data
    msg_to_pcl =  ros_to_pcl(pcl_msg)
# Statistical Outlier Filtering
    # We start by creating a filter object:
    outlier_filter = msg_to_pcl.make_statistical_outlier_filter()
    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(5)
    # Set threshold scale factor
    x = 0.00000001
    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)
    # Finally call the filter function for magic
    msg_to_pcl = outlier_filter.filter()


# Voxel Grid Downsampling
    vox = msg_to_pcl.make_voxel_grid_filter()
    LEAF_SIZE = 0.005
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()


# PassThrough Filter Z
    passthroughZ = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthroughZ.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    # axis_min = 0.1
    # axis_max = 0.7
    passthroughZ.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthroughZ.filter()
    # Passthrough filter X to remove appearance of bin edges
    passthroughX = cloud_filtered.make_passthrough_filter()
    filter_axis = 'x'
    passthroughX.set_filter_field_name(filter_axis)
    axis_min = 0.4
    axis_max = 3.5
    #axis_min = 0.3
    #axis_max = 3.6
    passthroughX.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthroughX.filter()

# RANSAC Plane Segmentation
# Max distance for a point to be considered fitting the model
# Experiment with different values for max_distance
# for segmenting the table
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.008
    seg.set_distance_threshold(max_distance)

# Extract inliers and outliers
    inliers, coefficients = seg.segment()
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)
    cloud_table = extracted_inliers
    cloud_objects = extracted_outliers

# Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

# Create Cluster-Mask Point Cloud to visualize each cluster separately
# Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()

# Set tolerances for distance threshold
# as well as minimum and maximum cluster size (in points)

# Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.0500)
    ec.set_MinClusterSize(50)
    ec.set_MaxClusterSize(5000)

    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)

    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # Create a Cluster Visualization using PointCloud_PointXYZRGB
    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                        white_cloud[indice][1],
                                        white_cloud[indice][2],
                                        rgb_to_float(cluster_color[j])])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    #Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

# Publish ROS messages

    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)


# Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)
        ros_cluster = pcl_to_ros(pcl_cluster)
                # Compute the associated feature vector
        chists = compute_color_histograms(ros_cluster, using_hsv=True)#ros_pcl_cluster, using_hsv=False)
        normals = get_normals(ros_cluster)#ros_pcl_cluster) as changed above
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
                # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)
                # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects. Uses DetectedObject.msg, a
        # two line file that defines the msg fields
        do = DetectedObject()
        #Populate the msg fields
        do.label = label
        #Populate with cluster array
        do.cloud = ros_cluster
        #this is the detected objects list that we later compare the pick list to, outputs
        # format e.g. ['biscuits', 'soap'. 'soap2']
        detected_objects.append(do)
    #print('test = ', do.label)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)
    #Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    #Could add some logic to determine whether or not your object detections are robust
    #before calling pr2_mover()
    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
Esempio n. 14
0
def pcl_callback(pcl_msg):

# Exercise-2 TODOs:    
    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # TODO: Voxel Grid Downsampling
    vox = cloud.make_voxel_grid_filter()
    LEAF_SIZE = 0.01   
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # TODO: PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # TODO: Extract inliers and outliers
    inliers, coefficients = seg.segment()
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(extracted_outliers) # <type 'pcl._pcl.PointCloud'>
    tree = white_cloud.make_kdtree() # <type 'pcl._pcl.KdTree'>
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(50)
    ec.set_MaxClusterSize(1000)
    ec.set_SearchMethod(tree)
    cluster_indices = ec.Extract() # list of list
    print len(cluster_indices) # how many clusters
       
    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):

        for i, indice in enumerate(indices):    
            color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1],             
                                             white_cloud[indice][2], rgb_to_float(cluster_color[j])])
    # Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)
    # TODO: Convert PCL data to ROS messages
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)
    ros_cloud_table = pcl_to_ros(extracted_inliers)
    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cluster_cloud)
    pcl_table_pub.publish(ros_cloud_table)

# Exercise-3 TODOs: 
    # Classify the clusters!
    detected_objects_labels = []
    detected_objects = []

    # Classify the clusters! (loop through each detected cluster one at a time)
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = extracted_outliers.extract(pts_list) # <type 'pcl._pcl.PointCloud_PointXYZRGB'>
        # TODO: convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)
        # Extract histogram features
        # TODO: complete this step just as is covered in capture_features.py
        chists = compute_color_histograms(ros_cluster, using_hsv=False)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
        #labeled_features.append([feature, model_name])
        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg):

    pcl_msg = ros_to_pcl(pcl_msg)
    
    # Voxel grid downsampling
    vox = pcl_msg.make_voxel_grid_filter()
    LEAF_SIZE = 0.005
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name (filter_axis)
    axis_min = 0.75
    axis_max = 1.1
    passthrough.set_filter_limits (axis_min, axis_max)
    cloud_filtered = passthrough.filter()
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'y'
    passthrough.set_filter_field_name (filter_axis)
    axis_min = -3
    axis_max = -1.35
    passthrough.set_filter_limits (axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # Extract inliers and outliers
    inliers, coefficients = seg.segment()
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.01)
    ec.set_MinClusterSize(600)
    ec.set_MaxClusterSize(3000)
    ec.set_SearchMethod(tree)
    cluster_indices = ec.Extract()

    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                            white_cloud[indice][1],
                                            white_cloud[indice][2],
                                             rgb_to_float(cluster_color[j])])

    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)


    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

        # Grab the points for the cluster
    for index, pts_list in enumerate(cluster_indices):

        # Grab the points for the cluster
        cloud_objects_cluster = cloud_objects.extract(pts_list)
        ros_cluster = pcl_to_ros(cloud_objects_cluster)
        # Compute the associated feature vector
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
        
        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    # Publish the list of detected objects
    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))


    # Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)
    detected_objects_pub.publish(detected_objects)
Esempio n. 16
0
def pcl_callback(pcl_msg):

    # Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # Since the raw Point Cloud contains unnecessary data making publishing intensive, its a good idea to trim it first.
    # PassThrough Filters
    passthrough = cloud.make_passthrough_filter()
    passthrough.set_filter_field_name('y')
    passthrough.set_filter_limits(-0.5, 0.5)
    cloud_passthrough = passthrough.filter()

    passthrough = cloud_passthrough.make_passthrough_filter()
    passthrough.set_filter_field_name('z')
    passthrough.set_filter_limits(0.6, 1.2)
    cloud_passthrough = passthrough.filter()

    # Statistical Outlier Filtering
    stats = cloud_passthrough.make_statistical_outlier_filter()
    stats.set_mean_k(1)
    stats.set_std_dev_mul_thresh(1.0)
    cloud_stats = stats.filter()

    # Voxel Grid Downsampling
    vox = cloud_stats.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_vox = vox.filter()

    # RANSAC Plane Segmentation
    seg = cloud_vox.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)
    inliers, coefficients = seg.segment()

    # Extract inliers and outliers
    cloud_table = cloud_vox.extract(inliers, negative=False)
    cloud_objects = cloud_vox.extract(inliers, negative=True)

    # Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(50)
    ec.set_MaxClusterSize(2500)
    ec.set_SearchMethod(tree)

    # cluster_indices contains a list of indices for each cluster (a list of list)
    cluster_indices = ec.Extract()

    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    cloud_cluster = pcl.PointCloud_PointXYZRGB()
    cloud_cluster.from_list(color_cluster_point_list)

    # Convert PCL data to ROS messages
    ros_cloud_passthrough = pcl_to_ros(cloud_passthrough)
    ros_cloud_stats = pcl_to_ros(cloud_stats)
    ros_cloud_vox = pcl_to_ros(cloud_vox)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_cluster = pcl_to_ros(cloud_cluster)

    # Publish ROS messages
    pcl_pub_passthrough.publish(ros_cloud_passthrough)
    pcl_pub_stats.publish(ros_cloud_stats)
    pcl_pub_vox.publish(ros_cloud_vox)
    pcl_pub_table.publish(ros_cloud_table)
    pcl_pub_objects.publish(ros_cloud_objects)
    pcl_pub_cluster.publish(ros_cloud_cluster)

    detected_objects_labels = []
    detected_objects = []

    # Classify the clusters! (loop through each detected cluster one at a time)
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Compute the associated feature vector
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)

    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    # Could add some logic to determine whether or not your object detections are robust
    # before calling pr2_mover()

    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
def pcl_callback(pcl_msg):

# Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)
    
    # TODO: Voxel Grid Downsampling
    vox = cloud.make_voxel_grid_filter()
    # Select a leaf/voxel size - note that 1.0 is very large.
    # The leaf size is measured in meters. Therefore a size of 1 is a cubic meter.
    LEAF_SIZE = 0.01 # experiment with this
    # Set the voxel/leaf size. 
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()

    # TODO: PassThrough Filter
    # Create a passthrough filter object for the z-axis
    passthrough_z = cloud_filtered.make_passthrough_filter()
    # Assign axis and range of passthrough filter object.
    filter_axis = 'z'
    passthrough_z.set_filter_field_name(filter_axis)
    axis_min = 0.735 # experiment with this
    axis_max = 1.1 # experiment with this
    passthrough_z.set_filter_limits(axis_min, axis_max)
    # Filter the downsampled point cloud with the passthrough object to get resultant cloud.
    cloud_filtered = passthrough_z.filter()

    # Create a passthrough filter for the y-axis to remove the front table edge
    passthrough_y = cloud_filtered.make_passthrough_filter()
    filter_axis = 'y'
    passthrough_y.set_filter_field_name(filter_axis)
    axis_min = -2.26
    axis_max = -1.35
    passthrough_y.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough_y.filter()

    # TODO: RANSAC Plane Segmentation
    # Create segmentation model
    seg = cloud_filtered.make_segmenter()
    # Set the model to fit the objects to
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    # Set the max distance for a point to be considered to be fitting the model
    max_distance = 0.001 # experiement with this
    seg.set_distance_threshold(max_distance)

    # TODO: Extract inliers and outliers
    # Call the segment function to obtain the set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()
    pcl_cloud_objects = cloud_filtered.extract(inliers, negative=True)
    pcl_cloud_table = cloud_filtered.extract(inliers, negative=False)

    # TODO: Euclidean Clustering
    # Convert XYZRGB point cloud to XYZ as Euclidean Clustering cannot use colour information
    white_cloud = XYZRGB_to_XYZ(pcl_cloud_objects)
    # Construct the k-d tree
    tree = white_cloud.make_kdtree()
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    # as well as a minimum and maximum cluster size (in points)
    ec.set_ClusterTolerance(0.05) #experiment
    ec.set_MinClusterSize(200) #experiment
    ec.set_MaxClusterSize(2000) #experiment
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered indices
    cluster_indices = ec.Extract()
    # cluster_indices now contains a list of indices for each cluster (a list of lists)

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    # This cloud will contain points for each of the segmented objects, with each set of points having a unique colour.
    # Assign a colour corresponding to each segmented object in the camera view
    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                             white_cloud[indice][1],
                                             white_cloud[indice][2],
                                             rgb_to_float(cluster_color[j])])

    # Create new cloud object containing all clusters, each with a unique colour
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(pcl_cloud_objects)
    ros_cloud_table = pcl_to_ros(pcl_cloud_table)
    ros_cloud_clusters = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_cloud.publish(ros_cloud_clusters)

# Exercise-3 TODOs: 

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        sample_cloud = pcl_cloud_objects.extract(pts_list)
        sample_cloud = pcl_to_ros(sample_cloud)

        # Compute the associated feature vector
        chists = compute_color_histograms(sample_cloud, using_hsv=True)
        normals = get_normals(sample_cloud)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)        

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = sample_cloud
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)
Esempio n. 18
0
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)
    # TODO: Voxel Grid Downsampling
    vox = cloud.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()
    # TODO: PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.75
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)
    # TODO: Extract inliers and outliers
    inliers, coefficients = seg.segment()
    cloud_objects = cloud_filtered.extract(inliers, negative=True)
    table_top = cloud_filtered.extract(inliers, negative=False)
    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    objects_tree = white_cloud.make_kdtree()
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.02)
    ec.set_MinClusterSize(190)
    ec.set_MaxClusterSize(2000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(objects_tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    pcl_msg_objects = pcl_to_ros(cloud_objects)
    pcl_msg_table = pcl_to_ros(table_top)
    pcl_msg_cluster = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(pcl_msg_objects)
    pcl_table_pub.publish(pcl_msg_table)
    pcl_cluster.publish(pcl_msg_cluster)

    # Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_clus = cloud_objects.extract(pts_list)
        # TODO: convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_clus)
        # Extract histogram features
        # TODO: complete this step just as is covered in capture_features.py
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)
Esempio n. 19
0
def pcl_callback(pcl_msg):

    PCL_Counter = 0
    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # TODO: Voxel Grid Downsampling
    vox = cloud.make_voxel_grid_filter()
    LEAF_SIZE = 0.005
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    if PCL_Counter < 3:
        filename = 'voxel_downsampled' + str(PCL_Counter) + '.pcd'
        pcl.save(cloud_filtered, filename)

    # TODO: PassThrough Filter
    # Assign axis and range to the passthrough filter object.
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 0.85
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # Y direction
    passthrough_2 = cloud_filtered.make_passthrough_filter()
    filter_axis_2 = 'y'
    passthrough_2.set_filter_field_name(filter_axis_2)
    axis_2_min = -0.35
    axis_2_max = 0.30
    passthrough_2.set_filter_limits(axis_2_min, axis_2_max)
    cloud_filtered = passthrough_2.filter()

    # Y direction
    passthrough_3 = cloud_filtered.make_passthrough_filter()
    filter_axis_3 = 'x'
    passthrough_3.set_filter_field_name(filter_axis_3)
    axis_3_min = 0.4
    axis_3_max = 0.7
    passthrough_3.set_filter_limits(axis_3_min, axis_3_max)
    cloud_filtered = passthrough_3.filter()

    # Much like the previous filters, we start by creating a filter object:
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)

    # Set threshold scale factor
    x = 1.0

    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)

    # Finally call the filter function for magic
    cloud_filtered = outlier_filter.filter()

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.02  #0.01
    seg.set_distance_threshold(max_distance)

    # TODO: Extract inliers and outliers
    inliers, cofficients = seg.segment()
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(
        cloud_objects)  # Apply function to convert XYZRGB to XYZ
    tree = white_cloud.make_kdtree()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.05)  #0.05
    ec.set_MinClusterSize(50)  #10
    ec.set_MaxClusterSize(3000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()
    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    # Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)v
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Compute the associated feature vector
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)
Esempio n. 20
0
        normals = get_normals(ros_cloud)
        nhists  = compute_normal_histograms(normals, nbins=histogram_bins)
        feature = np.concatenate((chists, nhists))

        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        pcl_object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do       = DetectedObject()
        do.label = label
        do.cloud = ros_cloud
        detected_objects.append(do)

    #---------------------------------------
    # Publish the list of detected objects
    #---------------------------------------
    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
	pcl_detected_objects_pub.publish(detected_objects)

if __name__ == '__main__':
    #---------------------------------------
    # ROS node initialization
    #---------------------------------------
    rospy.init_node('recognition')
 def detect_objects(self, pcl_msg):
     """
     This method takes in the scene in front of the robot, carries out
     object detection, and generates the front part of the collision map
     """
     # Convert ROS msg to PCL data
     cloud = ros_to_pcl(pcl_msg)
     # Statistical Outlier Filtering
     outlier_filter = cloud.make_statistical_outlier_filter()
     outlier_filter.set_mean_k(20)
     x = 0.05
     outlier_filter.set_std_dev_mul_thresh(x)
     cloud = outlier_filter.filter()
     # Voxel Grid Downsampling
     vox = cloud.make_voxel_grid_filter()
     LEAF_SIZE = 0.007
     vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
     cloud = vox.filter()
     # 1st PassThrough filter in z-axis
     passthrough = cloud.make_passthrough_filter()
     filter_axis = 'z'
     passthrough.set_filter_field_name(filter_axis)
     axis_min = 0.59
     axis_max = 5.0
     passthrough.set_filter_limits(axis_min, axis_max)
     cloud = passthrough.filter()
     # 2nd PassThrough filter in y-axis
     passthrough2 = cloud.make_passthrough_filter()
     filter_axis = 'y'
     passthrough2.set_filter_field_name(filter_axis)
     axis_min = -0.55
     axis_max = 0.55
     passthrough2.set_filter_limits(axis_min, axis_max)
     cloud = passthrough2.filter()
     # RANSAC Plane Segmentation
     seg = cloud.make_segmenter()
     seg.set_model_type(pcl.SACMODEL_PLANE)
     seg.set_method_type(pcl.SAC_RANSAC)
     max_distance = 0.01
     seg.set_distance_threshold(max_distance)
     inliers, coefficients = seg.segment()
     # Extract inliers and outliers as subset point clouds
     self.collision_map = cloud.extract(inliers, negative=False)
     extracted_outliers = cloud.extract(inliers, negative=True)
     outlier_filter = extracted_outliers.make_statistical_outlier_filter()
     outlier_filter.set_mean_k(20)
     x = 0.05
     outlier_filter.set_std_dev_mul_thresh(x)
     extracted_outliers = outlier_filter.filter()
     # Euclidean Clustering
     white_cloud = XYZRGB_to_XYZ(extracted_outliers)
     tree = white_cloud.make_kdtree()
     ec = white_cloud.make_EuclideanClusterExtraction()
     ec.set_ClusterTolerance(0.02)
     ec.set_MinClusterSize(30)
     ec.set_MaxClusterSize(25000)
     ec.set_SearchMethod(tree)
     cluster_indices = ec.Extract()
     # Classify the clusters! (loop through each detected cluster one at a time)
     detected_objects_labels = []
     # Grab the indices of extracted_outliers/white_cloud for each cluster
     for index, pts_list in enumerate(cluster_indices):
         # Extract the points for the current cluster using indices (pts_list)
         pcl_cluster = extracted_outliers.extract(pts_list)
         # Compute the associated feature vector
         ros_cluster = pcl_to_ros(pcl_cluster)
         chists = compute_color_histograms(ros_cluster, using_hsv=True)
         normals = get_normals(ros_cluster)
         nhists = compute_normal_histograms(normals)
         feature = np.concatenate((chists, nhists))
         # Make the prediction
         prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
         label = encoder.inverse_transform(prediction)[0]
         detected_objects_labels.append(label)
         # Publish a label into RViz, place above 1st cluster point
         label_pos = list(white_cloud[pts_list[0]])
         label_pos[2] += 0.4
         self.object_markers_pub.publish(make_label(label, label_pos, index))
         # Add the detected object to the list of detected objects.
         do = DetectedObject()
         do.label = label
         do.cloud = ros_cluster
         self.detected_objects.append(do)
     print 'Detected {} objects: {}'.format(len(detected_objects_labels),
 					                           detected_objects_labels)
     # Publish the list of detected objects
     self.detected_objects_pub.publish(self.detected_objects)
Esempio n. 22
0
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    # Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # Voxel Grid Downsampling
    vox = cloud.make_voxel_grid_filter()
    LEAF_SIZE = .01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()
    filename = 'voxel_downsampled.pcd'
    pcl.save(cloud_filtered, filename)

    # Statistical Outlier Removal Filter
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)

    # Set threshold scale factor
    x = 1.0

    # Points with mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)

    # Call the filter function
    cloud_filtered = outlier_filter.filter()

    # PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()

    # Assign axis and range to the passthrough filter object.
    filter_axis = 'x'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.4
    axis_max = 3.
    passthrough.set_filter_limits(axis_min, axis_max)

    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud.
    cloud_filtered = passthrough.filter()
    filename = 'pass_through_filtered.pcd'
    pcl.save(cloud_filtered, filename)

    # RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    # Set the model you wish to fit
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # Segment Function to obtain set of inlier indices/model coeffs
    inliers, coefficients = seg.segment()
    outliers, coefficients = seg.segment()

    # Extract inliers and outliers
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    filename = 'extracted_inliers.pcd'
    pcl.save(extracted_inliers, filename)

    # Extract outliers
    extracted_outliers = cloud_filtered.extract(outliers, negative=True)
    filename = 'extracted_outliers.pcd'
    pcl.save(extracted_outliers, filename)

    cloud_table = extracted_inliers
    cloud_objects = extracted_outliers

    # Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()

    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.03)
    ec.set_MinClusterSize(30)
    ec.set_MaxClusterSize(1200)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # Create a Cluster Visualization using PointCloud_PointXYZRGB
    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    # Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    # Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)
        pcl_cluster = pcl_to_ros(pcl_cluster)
        chists = compute_color_histograms(pcl_cluster, using_hsv=True)
        normals = get_normals(pcl_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
        detected_objects.append([feature])

        # Compute the associated feature vector

        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster_cloud
        detected_objects.append(do)
    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)
Esempio n. 23
0
def pcl_callback(ros_pcl_msg):

    #----------------------------------------------------------------------------------
    # Convert a ROS PointCloud2 message to a pcl PointXYZRGB
    #----------------------------------------------------------------------------------
    cloud_filtered = ros_to_pcl(ros_pcl_msg)

    #----------------------------------------------------------------------------------
    # Statistical Outlier Filtering
    #----------------------------------------------------------------------------------
    # Create a statistical filter object:
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()
    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(3)
    # Set threshold scale factor
    x = 0.00001
    # Any point with a mean distance larger than global (mean distance+x*std_dev)
    # will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)
    # Call the filter function
    cloud_filtered = outlier_filter.filter()

    #----------------------------------------------------------------------------------
    # Voxel Grid Downsampling filter
    #----------------------------------------------------------------------------------
    # Create a VoxelGrid filter object for our input point cloud
    vox = cloud_filtered.make_voxel_grid_filter()
    # Choose a voxel (also known as leaf) size
    # 1 means 1mx1mx1m leaf size
    # Experiment and find the appropriate size!
    LEAF_SIZE = 0.005
    # Set the voxel (or leaf) size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()

    #----------------------------------------------------------------------------------
    # PassThrough filter (Z Axis)
    #----------------------------------------------------------------------------------
    # Create a PassThrough filter object.
    passthrough = cloud_filtered.make_passthrough_filter()
    # Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6095
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)
    # Use the filter function to obtain the resultant point cloud.
    cloud_filtered = passthrough.filter()

    #----------------------------------------------------------------------------------
    # PassThrough filter (Y Axis)
    #----------------------------------------------------------------------------------
    # Create a PassThrough filter object.
    passthrough = cloud_filtered.make_passthrough_filter()
    # Assign axis and range to the passthrough filter object.
    filter_axis = 'y'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = -0.456
    axis_max = 0.456
    passthrough.set_filter_limits(axis_min, axis_max)
    # Use the filter function to obtain the resultant point cloud.
    cloud_filtered = passthrough.filter()

    #----------------------------------------------------------------------------------
    # RANSAC plane segmentation
    #----------------------------------------------------------------------------------
    # Create the segmentation object
    seg = cloud_filtered.make_segmenter()
    # Set the model you wish to fit
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    # Max distance for a point to be considered fitting the model
    # Experiment with different values for max_distance
    # for segmenting the table
    max_distance = 0.006
    seg.set_distance_threshold(max_distance)
    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()
    # Extract inliers (Table)
    extracted_table = cloud_filtered.extract(inliers, negative=False)
    # Extract outliers (Tabletop Objects)
    extracted_objects = cloud_filtered.extract(inliers, negative=True)

    #----------------------------------------------------------------------------------
    # Euclidean Clustering
    #----------------------------------------------------------------------------------
    white_cloud = XYZRGB_to_XYZ(extracted_objects)
    tree = white_cloud.make_kdtree()
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    ec.set_ClusterTolerance(0.03)
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(9000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    #----------------------------------------------------------------------------------
    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    #----------------------------------------------------------------------------------
    # Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])
    # Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    #----------------------------------------------------------------------------------
    # Converts a pcl PointXYZRGB to a ROS PointCloud2 message
    #----------------------------------------------------------------------------------
    ros_cloud_objects = pcl_to_ros(extracted_objects)
    ros_cloud_table = pcl_to_ros(extracted_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    #----------------------------------------------------------------------------------
    # Publish ROS messages
    #----------------------------------------------------------------------------------
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    #----------------------------------------------------------------------------------
    # Classify the clusters!
    #----------------------------------------------------------------------------------
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):

        #----------------------------------------------------------------------------------
        # Grab the points for the cluster from the extracted_objects
        #----------------------------------------------------------------------------------
        pcl_cluster = extracted_objects.extract(pts_list)
        # Convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)

        #----------------------------------------------------------------------------------
        # Generate Histograms
        #----------------------------------------------------------------------------------
        # Color Histogram
        c_hists = compute_color_histograms(ros_cluster, using_hsv=True)
        # Normals Histogram
        normals = get_normals(ros_cluster)
        n_hists = compute_normal_histograms(normals)

        #----------------------------------------------------------------------------------
        # Generate feature by concatenate of color and normals.
        #----------------------------------------------------------------------------------
        feature = np.concatenate((c_hists, n_hists))

        #----------------------------------------------------------------------------------
        # Make the prediction
        #----------------------------------------------------------------------------------
        # Retrieve the label for the result and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        #----------------------------------------------------------------------------------
        # Publish a label into RViz
        #----------------------------------------------------------------------------------
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .2
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        #----------------------------------------------------------------------------------
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    #----------------------------------------------------------------------------------
    # Publish the list of detected objects
    #----------------------------------------------------------------------------------
    detected_objects_pub.publish(detected_objects)

    #----------------------------------------------------------------------------------
    # Invoke pr2_mover() function
    #----------------------------------------------------------------------------------
    if len(detected_objects) > 0:
        try:
            pr2_mover(detected_objects)
        except rospy.ROSInterruptException:
            pass
    else:
        rospy.logwarn('No detected objects !!!')

    return
Esempio n. 24
0
def pcl_callback(pcl_msg):

    cloud = ros_to_pcl(pcl_msg)  #Convert ROS msg to PCL data

    # Statistical Outlier Filtering
    outlier_filter = cloud.make_statistical_outlier_filter()
    neighboring_pts_to_analyze = 8
    threshold_scale_factor = 0.5
    outlier_filter.set_mean_k(neighboring_pts_to_analyze)
    outlier_filter.set_std_dev_mul_thresh(threshold_scale_factor)

    cloud_filtered = outlier_filter.filter()  # resulting filtered point cloud

    # Voxel Grid Downsampling
    vox = cloud_filtered.make_voxel_grid_filter()

    LEAF_SIZE = 0.01  # voxel size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    cloud_filtered = vox.filter()  #resulting downsampled point cloud

    # PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()

    # z-axis passthrough filter for table elimination
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)

    cloud_filtered = passthrough.filter()  # resulting filtered point cloud

    # x-axis passthrough filter for dropboxes elimination
    filter_axis = 'x'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.4
    axis_max = 1.0
    passthrough.set_filter_limits(axis_min, axis_max)

    cloud_filtered = passthrough.filter()  # resulting filtered point cloud

    # RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    inliers, coefficients = seg.segment(
    )  # set of inlier indices and model coefficients

    cloud_table = cloud_filtered.extract(inliers,
                                         negative=False)  # Extract inliers
    ros_cloud_table = pcl_to_ros(
        cloud_table)  # convert PCL data to ROS messages

    cloud_objects = cloud_filtered.extract(inliers,
                                           negative=True)  # Extract outliers
    ros_cloud_objects = pcl_to_ros(
        cloud_objects)  # convert PCL data to ROS messages

    # Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()
    ec = white_cloud.make_EuclideanClusterExtraction()

    ec.set_ClusterTolerance(0.02)
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(800)
    ec.set_SearchMethod(tree)

    cluster_indices = ec.Extract(
    )  # extract indices for each discovered cluster

    # Visualize Clusters
    cluster_color = get_color_list(
        len(cluster_indices))  # assign a color for each segmented object

    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    cluster_cloud = pcl.PointCloud_PointXYZRGB()  # create new cloud object
    cluster_cloud.from_list(
        color_cluster_point_list
    )  # assign containing all clusters, each with unique color
    ros_cloud_cluster = pcl_to_ros(
        cluster_cloud)  # convert PCL data to ROS messages

    # Classify Clusters
    detected_objects_labels = []
    detected_objects_list = []

    for index, pts_list in enumerate(cluster_indices):
        pcl_cluster = cloud_objects.extract(pts_list)
        ros_cluster = pcl_to_ros(
            pcl_cluster)  # convert PCL data to ROS messages

        # Extract histogram features
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        prediction = clf.predict(scaler.transform(feature.reshape(
            1, -1)))  # Make the prediction using support vector machines model
        label = encoder.inverse_transform(prediction)[
            0]  # get labels from predictions
        detected_objects_labels.append(label)  #append labels list

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects_list.append(do)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # Publish ROS messages
    detected_objects_pub.publish(detected_objects_list)
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cloud_cluster)

    try:
        pr2_mover(detected_objects_list)
    except rospy.ROSInterruptException:
        pass
Esempio n. 25
0
def pcl_callback(pcl_msg):

# Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    pcl_datacloud=ros_to_pcl(pcl_msg)
    
    # TODO: Statistical Outlier Filtering
    outlier_filter = pcl_datacloud.make_statistical_outlier_filter()  #change 1, cloud_filtered to pcl_datacloud

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)

    # Set threshold scale factor
    x = 1.0
 
    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)

    # Finally call the filter function for magic
    cloud_filtered = outlier_filter.filter()            #change 1, plc_datacloud to cloud_filtered

    # TODO: Voxel Grid Downsampling
    vox = cloud_filtered.make_voxel_grid_filter()
    LEAF_SIZE = 0.003
    # Set the voxel (or leaf) size  
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()
    # TODO: PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    # Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    passthrough = cloud_filtered.make_passthrough_filter()  # second filter for passing out drop boxes
    # Assign axis and range to the passthrough filter object.
    filter_axis = 'x'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.3
    axis_max = 1.0
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

   

    
    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()

    # Set the model you wish to fit 
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)
    inliers, coefficients = seg.segment()

    # TODO: Extract inliers and outliers
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(extracted_outliers)
    tree = white_cloud.make_kdtree()
    ec = white_cloud.make_EuclideanClusterExtraction()

    # Set tolerances for distance threshold 
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.01)
    ec.set_MinClusterSize(100)
    ec.set_MaxClusterSize(6000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
   	 for i, indice in enumerate(indices):
      	 	color_cluster_point_list.append([white_cloud[indice][0],
         	                               white_cloud[indice][1],
         	                               white_cloud[indice][2],
                                               rgb_to_float(cluster_color[j])])

     #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(extracted_outliers) 
    ros_cloud_table = pcl_to_ros(extracted_inliers)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

# Exercise-3 TODOs:
    detected_objects_labels = []
    detected_objects = []
    # Classify the clusters! (loop through each detected cluster one at a time)
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = extracted_outliers.extract(pts_list)                                     ##change 1
        # TODO: convert the cluster from pcl to ROS using helper function
        rospcl_cluster=pcl_to_ros(pcl_cluster) 
        # Extract histogram features
        # TODO: complete this step just as is covered in capture_features.py
        chists = compute_color_histograms(rospcl_cluster, using_hsv=True)
        normals = get_normals(rospcl_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
        
        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = rospcl_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)
    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    # Could add some logic to determine whether or not your object detections are robust
    # before calling pr2_mover()
    
    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    ### Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    ### Voxel Grid Downsampling
    # Create a VoxelGrid filter object for our input point cloud
    vox = cloud.make_voxel_grid_filter()

    # Choose a voxel (also known as leaf) size
    # Note: this (1) is a poor choice of leaf size
    # Experiment and find the appropriate size!
    LEAF_SIZE = 0.003

    # Set the voxel (or leaf) size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()

    ### Statistical Outlier Removal Filter
    # Much like the previous filters, we start by creating a filter object:
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)

    # Set threshold scale factor
    x = 1.0

    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)

    # Finally call the filter function for magic
    cloud_filtered = outlier_filter.filter()

    ### PassThrough Filter
    # Create a PassThrough filter object first across the Z axis
    passThroughZ = cloud_filtered.make_passthrough_filter()

    # Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passThroughZ.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passThroughZ.set_filter_limits(axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud.
    cloud_filtered = passThroughZ.filter()

    ## Now, Create a PassThrough filter object across the Y axis
    passThroughY = cloud_filtered.make_passthrough_filter()
    # Assign axis and range to the passthrough filter object.
    filter_axis = 'y'
    passThroughY.set_filter_field_name(filter_axis)
    axis_min = -0.5
    axis_max = 0.5
    passThroughY.set_filter_limits(axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud.
    cloud_filtered = passThroughY.filter()

    ### RANSAC Plane Segmentation
    # Create the segmentation object
    seg = cloud_filtered.make_segmenter()

    # Set the model you wish to fit
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    # Experiment with different values for max_distance
    # for segmenting the table
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()

    ### Extract inliers and outliers
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)

    ### Euclidean Clustering
    # Go from XYZRGB to RGB since to build the k-d tree we only needs spatial data
    white_cloud = XYZRGB_to_XYZ(extracted_outliers)
    # Apply function to convert XYZRGB to XYZ
    tree = white_cloud.make_kdtree()

    ### Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.01)
    ec.set_MinClusterSize(100)
    # Refering to the minimum and maximum number of points that make up an object's cluster
    ec.set_MaxClusterSize(50000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    ### Create Cluster-Mask Point Cloud to visualize each cluster separately

    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO - set proper names    ### Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(extracted_outliers)
    ros_cloud_table = pcl_to_ros(extracted_inliers)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO   set names ### Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    # Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = extracted_outliers.extract(pts_list)
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        # Complete this step just as is covered in capture_features.py
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
        # labeled_features.append([feature, model_name])

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    print "Trying to match the pick list with the objects detected..."
    print "\n"
    # Publish the list of detected objects
    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)

    object_list_param = rospy.get_param('/object_list')
    pick_list_objects = []
    for i in range(len(object_list_param)):
        pick_list_objects.append(object_list_param[i]['name'])

    print "\n"
    print "Pick List includes: "
    print pick_list_objects
    print "\n"
    pick_set_objects = set(pick_list_objects)
    detected_set_objects = set(detected_objects_labels)

    if detected_set_objects == pick_set_objects:
        try:
            pr2_mover(detected_objects)
        except rospy.ROSInterruptException:
            pass
def pcl_callback(pcl_msg):

# Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # TODO: Statistical Outlier Filtering
    outlier_filter = cloud.make_statistical_outlier_filter()
    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)
    # Set threshold scale factor
    x = 0.001
    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)
    # Finally call the filter function for magic
    cloud_filtered = outlier_filter.filter()

    # TODO: Voxel Grid Downsampling
    vox = cloud_filtered.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()
    # TODO: PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0
    axis_max = 2
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # filter_axis = 'x'
    # axis_min = 0.33
    # axis_max = 1.0
    # cloud = do_passthrough(cloud, filter_axis, axis_min, axis_max)


    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # TODO: Extract inliers and outliers
    inliers, coefficients = seg.segment()
    table_cloud = cloud_filtered.extract(inliers, negative=False)
    objects_cloud = cloud_filtered.extract(inliers, negative=True)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(objects_cloud)
    tree = white_cloud.make_kdtree()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.020)
    ec.set_MinClusterSize(20)
    ec.set_MaxClusterSize(2000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    #Assign a color corresponding to each segmented object in scene

    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                        white_cloud[indice][1],
                                        white_cloud[indice][2],
                                         rgb_to_float(cluster_color[j])])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_objects_cloud = pcl_to_ros(objects_cloud)
    ros_table_cloud = pcl_to_ros(table_cloud)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_objects_cloud)
    pcl_table_pub.publish(ros_table_cloud)
    pcl_cluster_pub.publish(ros_cluster_cloud)

# Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        pcl_cluster = objects_cloud.extract(pts_list)

        # convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        hist_features = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        norm_hist_features = compute_normal_histograms(normals)
        feature = np.concatenate((hist_features, norm_hist_features))

        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)


    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    # Could add some logic to determine whether or not your object detections are robust
    # before calling pr2_mover()
    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
Esempio n. 28
0
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)
    print(cloud)

    # TODO: Statistical Outlier Filtering
    # we start by creating a filter object:
    vox = cloud.make_voxel_grid_filter()
    outlier_filter = cloud.make_statistical_outlier_filter()
    outlier_filter.set_mean_k(
        3
    )  # Set the number of neighboring points to analyze for any given point
    # Any point with a mean distance larger than global (mean distance+1*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(0.)
    cloud_filtered = outlier_filter.filter(
    )  # Finally call the filter function for magic
    pub_noise.publish(pcl_to_ros(cloud_filtered))

    # TODO: Voxel Grid Downsampling
    vox = cloud_filtered.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # TODO: PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.61
    axis_max = 0.9
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'y'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = -0.5
    axis_max = 0.5
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    pub_passthrough.publish(pcl_to_ros(cloud_filtered))

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.005
    seg.set_distance_threshold(max_distance)
    inliers, coefficients = seg.segment()

    # TODO: Extract inliers and outliers
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    cloud_objects = cloud_filtered.extract(inliers, negative=True)
    pub_table.publish(pcl_to_ros(cloud_table))
    pub_table_outlier.publish(pcl_to_ros(cloud_objects))

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(
        cloud_objects)  # Apply function to convert XYZRGB to XYZ
    tree = white_cloud.make_kdtree()
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold as well as minimum and maximum cluster size (in points)
    ec.set_ClusterTolerance(0.05)
    ec.set_MinClusterSize(100)
    ec.set_MaxClusterSize(10000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages and publish
    ros_cloud = pcl_to_ros(cluster_cloud)
    points_pub.publish(ros_cloud)

    # Exercise-3 TODOs:
    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = cloud_objects.extract(pts_list)

        # convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    # Could add some logic to determine whether or not your object detections are robust
    # before calling pr2_mover()
    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
def pcl_callback(pcl_msg):
    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    # Bring camaera image (ROS) to PCL to do some CV methods
    pcl_data = ros_to_pcl(pcl_msg)

    # TODO: Voxel Grid Downsampling
    vox = pcl_data.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # Statistical filter
    stat_filter = cloud_filtered.make_statistical_outlier_filter()
    stat_filter.set_mean_k(5)
    x = 0.1
    stat_filter.set_std_dev_mul_thresh(x)
    cloud_stat_filtered = stat_filter.filter()

    # TODO: PassThrough Filter

    passthrough = cloud_stat_filtered.make_passthrough_filter()
    filter_axis = 'y'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = -0.4
    axis_max = 0.4
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # Need to apply in Y direction as well since Robot try to guess its arms as book ...

    passthrough2 = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough2.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 0.8
    passthrough2.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough2.filter()

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # TODO: Extract inliers and outliers
    inliers, coefficients = seg.segment()
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(extracted_outliers)
    tree = white_cloud.make_kdtree()
    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.02)
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(5000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()
    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []
    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_cluster = pcl_to_ros(cluster_cloud)
    cloud_objects = extracted_outliers
    cloud_table = extracted_inliers
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)

    # TODO: Publish ROS messages

    pcl_points_pub.publish(pcl_msg)
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cloud_cluster)

    # Exercise-3 TODOs:
    EX3 = 1
    if (EX3 == 1):
        # Classify the clusters! (loop through each detected cluster one at a time)
        detected_objects_labels = []
        detected_objects = []

        for index, pts_list in enumerate(cluster_indices):
            # Grab the points for the cluster from the extracted outliers (cloud_objects)
            pcl_cluster = cloud_objects.extract(pts_list)
            # TODO: convert the cluster from pcl to ROS using helper function
            ros_cluster = pcl_to_ros(pcl_cluster)
            # Extract histogram features
            # TODO: complete this step just as is covered in capture_features.py

            sample_cloud = ros_cluster

            chists = compute_color_histograms(sample_cloud, using_hsv=True)
            normals = get_normals(sample_cloud)
            nhists = compute_normal_histograms(normals)
            feature = np.concatenate((chists, nhists))
            #labeled_features.append([feature, model_name])

            # Make the prediction, retrieve the label for the result
            # and add it to detected_objects_labels list
            prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
            label = encoder.inverse_transform(prediction)[0]
            detected_objects_labels.append(label)

            # Publish a label into RViz
            label_pos = list(white_cloud[pts_list[0]])
            label_pos[2] += 0
            object_markers_pub.publish(make_label(label, label_pos, index))

            # Add the detected object to the list of detected objects.
            do = DetectedObject()
            do.label = label
            do.cloud = ros_cluster
            detected_objects.append(do)

        rospy.loginfo('Detected {} objects: {}'.format(
            len(detected_objects_labels), detected_objects_labels))

        # Publish the list of detected objects
        # This is the output you'll need to complete the upcoming project!
        detected_objects_pub.publish(detected_objects)

        # Publish the list of detected objects

        # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
        # Could add some logic to determine whether or not your object detections are robust
        # before calling pr2_mover()
        try:
            pr2_mover(detected_objects)
        except rospy.ROSInterruptException:
            pass
def pcl_callback(pcl_msg):


    #### Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    ##############                     ###############
    ##############  FILTERING & RANSAC ###############
    ##############                     ###############

    #################################################################
    ############### Statistical Outlier Filtering ###################
    outlier_filter = cloud.make_statistical_outlier_filter()
    outlier_filter.set_mean_k(1) # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_std_dev_mul_thresh(0.3) # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    cloud_filtered = outlier_filter.filter()

    #################################################################
    ############### Voxel Grid Downsampling #########################
    vox = cloud_filtered.make_voxel_grid_filter()
    LEAF_SIZE = 0.01                                  #voxel size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) #set voxel (leaf) size.
    cloud_filtered = vox.filter()

    #################################################################
    ############## PassThrough Filter ###############################
    passthrough_z = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough_z.set_filter_field_name(filter_axis)
    axis_min = 0.32
    axis_max = 0.86
    passthrough_z.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough_z.filter()

    passthrough_x = cloud_filtered.make_passthrough_filter()
    filter_axis = 'x'
    passthrough_x.set_filter_field_name(filter_axis)
    axis_min = 0.35
    axis_max = 0.79
    passthrough_x.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough_x.filter()

    ################################################################
    ############# RANSAC Plane Segmentation ########################
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.015
    seg.set_distance_threshold(max_distance)
    inliers, coefficients = seg.segment()

    ################################################################
    ############# Extract inliers and outliers #####################
    cloud_table = cloud_filtered.extract(inliers, negative=False)
    cloud_objects = cloud_filtered.extract(inliers, negative=True)

    #################                    ###################
    ################# Euclidean Clusting ###################
    #################                    ###################

    white_cloud = XYZRGB_to_XYZ(cloud_objects) # Apply function to convert XYZRGB to XYZ
    tree = white_cloud.make_kdtree()

    
    ec = white_cloud.make_EuclideanClusterExtraction() # Create a cluster extraction object

    ec.set_ClusterTolerance(0.026) # Set tolerances for distance threshold 
    ec.set_MinClusterSize(10) # Minimum cluster size (in points)
    ec.set_MaxClusterSize(1000) # Maximum cluster size (in points)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    ####################################################################################
    ############ Cluster-Mask Point Cloud to visualize each cluster separately #########
    
    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                             white_cloud[indice][1],
                                             white_cloud[indice][2],
                                    rgb_to_float(cluster_color[j])])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    ###########################################################
    ############ Convert PCL data to ROS messages #############
    ros_cloud_objects =  pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    ###########################################################
    ############ Publish ROS messages #########################
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    #################   Object Recognition  ###################
    #################          &            ###################
    #################       Laveling        ###################

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        ####### Grab the points for the cluster from the extracted outliers
        pcl_cluster = cloud_objects.extract(pts_list)

        ####### Convert the cluster from pcl to ROS
        ros_cluster = pcl_to_ros(pcl_cluster)

        ###############################################################
        ############### Extract histogram features #####################
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        ####### Prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        ####### Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        ###### Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)
    
    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    try:
        pr2_mover(detected_objects)
    except rospy.ROSInterruptException:
        pass
Esempio n. 31
0
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    # TODO: Statistical Outlier Filtering
    outlier_filter = cloud.make_statistical_outlier_filter()
    outlier_filter.set_mean_k(7)
    outlier_filter.set_std_dev_mul_thresh(-0.6)
    cloud_filtered = outlier_filter.filter()

    # TODO: Voxel Grid Downsampling
    vox = cloud_filtered.make_voxel_grid_filter()
    LEAF_SIZE = 0.01
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    # TODO: PassThrough Filter
    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.605
    axis_max = 1.0
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # TODO: Rotate PR2 in place to capture side tables for the collision map
    joint_angle = Float64()
    global task_state
    global collision_base
    collision_map_pub.publish(pcl_to_ros(collision_base))

    if task_state == 'start':
        joint_angle.data = np.pi / 2
        joint_command_pub.publish(joint_angle)
        current_joint_angle = get_joint_properties('world_joint').position[0]
        if abs(current_joint_angle - np.pi / 2) < 0.0001:
            task_state = 'left_sector'
            passthrough = cloud_filtered.make_passthrough_filter()
            filter_axis = 'y'
            passthrough.set_filter_field_name(filter_axis)
            axis_min = 0.43
            axis_max = 1.0
            passthrough.set_filter_limits(axis_min, axis_max)
            cloud_filtered = passthrough.filter()
            collision_base = cloud_filtered
            collision_map_pub.publish(pcl_to_ros(collision_base))
            return
        else:
            return
    elif task_state == 'left_sector':
        joint_angle.data = -np.pi / 2
        joint_command_pub.publish(joint_angle)
        current_joint_angle = get_joint_properties('world_joint').position[0]
        if abs(current_joint_angle - -np.pi / 2) < 0.0001:
            task_state = 'right_sector'
            passthrough = cloud_filtered.make_passthrough_filter()
            filter_axis = 'y'
            passthrough.set_filter_field_name(filter_axis)
            axis_min = -1.0
            axis_max = -0.43
            passthrough.set_filter_limits(axis_min, axis_max)
            cloud_filtered = passthrough.filter()
            collision_base.from_list(collision_base.to_list() +
                                     cloud_filtered.to_list())
            collision_map_pub.publish(pcl_to_ros(collision_base))
            return
        else:
            return
    elif task_state == 'right_sector':
        joint_angle.data = 0
        joint_command_pub.publish(joint_angle)
        current_joint_angle = get_joint_properties('world_joint').position[0]
        if abs(current_joint_angle) < 0.0001:
            task_state = 'finish'
            return
        else:
            return
    else:
        pass

    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = 'y'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = -0.45
    axis_max = 0.45
    passthrough.set_filter_limits(axis_min, axis_max)
    cloud_filtered = passthrough.filter()

    # TODO: RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    max_distance = 0.006
    seg.set_distance_threshold(max_distance)
    plane_inliers, coefficients = seg.segment()

    # TODO: Extract inliers and outliers
    cloud_table = cloud_filtered.extract(plane_inliers, negative=False)
    collision_base.from_list(collision_base.to_list() + cloud_table.to_list())
    collision_map_pub.publish(pcl_to_ros(collision_base))
    cloud_objects = cloud_filtered.extract(plane_inliers, negative=True)

    # TODO: Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()
    ec = white_cloud.make_EuclideanClusterExtraction()
    ec.set_ClusterTolerance(0.02)
    ec.set_MinClusterSize(10)
    ec.set_MaxClusterSize(1500)
    ec.set_SearchMethod(tree)
    cluster_indices = ec.Extract()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_color = get_color_list(len(cluster_indices))
    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    # Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):

        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Compute the associated feature vector
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))

        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(1, -1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)

    # Suggested location for where to invoke your pr2_mover() function within pcl_callback()
    # Could add some logic to determine whether or not your object detections are robust
    # before calling pr2_mover()
    # try:
    #     pr2_mover(detected_objects)
    # except rospy.ROSInterruptException:
    #     pass
    if detected_objects:
        pr2_mover(detected_objects)
Esempio n. 32
0
def pcl_callback(pcl_msg):

# Exercise-2 TODOs:

    # Convert ROS msg to PCL data
    pcl_data = ros_to_pcl(pcl_msg)

    # Voxel Grid Downsampling
    pcl_downsampled = voxel_downsampling(pcl_data)

    # PassThrough Filter
    pcl_passed = passthrough_filtering(pcl_downsampled)

    # RANSAC Plane Segmentation
    inliers = plane_fitting(pcl_passed)

    # Extract inliers and outliers
    # inliers: table
    # outliers: objects
    cloud_table = extract_inliers(inliers, pcl_passed)
    cloud_objects = extract_outliers(inliers, pcl_passed)

    # Euclidean Clustering
    white_cloud, tree, cluster_indices = clustering(cloud_objects)

    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    cluster_cloud_colored = color_clusters(white_cloud, cluster_indices)

    # Convert PCL data to ROS messages
    ros_cloud_table = pcl_to_ros(cloud_table)
    ros_cloud_objects = pcl_to_ros(cloud_objects)
    ros_cluster_cloud_colored = pcl_to_ros(cluster_cloud_colored)

    # Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud_colored)

# Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster
        pcl_cluster = cloud_objects.extract(pts_list)

        # convert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)

        # Compute the associated feature vector
        feature = np.concatenate((chists, nhists))

        # Make the prediction
        prediction = clf.predict(scaler.transform(feature.reshape(-1, 1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}').format(len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)
Esempio n. 33
0
def pcl_callback(pcl_msg):
	
# TODO: Convert ROS msg to PCL data
	cloud=ros_to_pcl(pcl_msg)

# TODO: Voxel Grid Downsampling
	#create a Voxel filter  object forourinput point cloud
	cloud_filtered_v=voxel_filter(cloud)
# TODO: PassThrough Filter
	#Create a PassThrough filter object
	#cloud_filtered_pass=passthrough_filter(cloud_filtered_v,'x',0.6,1.1)
	cloud_filtered_pass=passthrough_filter(cloud_filtered_v,'y',-0.4,0.47)
	cloud_filtered_pass=passthrough_filter(cloud_filtered_v,'z',0.6,0.91)
# TODO: RANSAC Plane Segmentation
	extracted_inliers,extracted_outliers=RANSCAN_filter(cloud_filtered_pass)
# TODO: Convert PCL data to ROS msg
	ros_cloud_objects = pcl_to_ros(extracted_outliers)
	ros_cloud_table = pcl_to_ros(extracted_inliers)
# TODO: Euclidean Clustering
	cluster_indices,white_cloud=Euclidean_Clustering(extracted_inliers,extracted_outliers)
##cluster_indices now contains a list of indices for each cluster (a list of lists). 
#In the next step, you'll create a new point cloud to visualize the clusters by assigning a color to each of them.
# TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately
#Assign a color corresponding to each segmented object in scene
	cluster_cloud=Cluster_Mask_Point(cluster_indices,white_cloud)
# TODO: Convert PCL data to ROS messages
	ros_cluster_cloud = pcl_to_ros(cluster_cloud)
# TODO: Publish ROS msg
	pcl_objects_pub.publish(ros_cloud_objects)
	pcl_table_pub.publish(ros_cloud_table)
	pcl_cluster_pub.publish(ros_cluster_cloud)
# Exercise-3 TODOs: 
    # Classify the clusters! (loop through each detected cluster one at a time)
    # Classify the clusters!
    	detected_objects_labels = []
    	detected_objects = []
    	for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        	pcl_cluster = extracted_outliers.extract(pts_list)
        # TODO: convert the cluster from pcl to ROS using helper function
		ros_cluster=pcl_to_ros(pcl_cluster)
        # Extract histogram features
        # TODO: complete this step just as is covered in capture_features.py
	 # Extract histogram features
            	chists = compute_color_histograms(ros_cluster, using_hsv=True)#False
            	normals = get_normals(ros_cluster)
            	nhists = compute_normal_histograms(normals)
            	feature = np.concatenate((chists, nhists))
            	#labeled_features.append([feature, model_name])
        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        	prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        	label = encoder.inverse_transform(prediction)[0]
        	detected_objects_labels.append(label)
        # Publish a label into RViz
        	label_pos = list(white_cloud[pts_list[0]])
        	label_pos[2] += .4
        	object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        	do = DetectedObject()
        	do.label = label
        	do.cloud = ros_cluster
        	detected_objects.append(do)
    	rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    ### Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)


    ### Voxel Grid Downsampling
    # Create a VoxelGrid filter object for our input point cloud
    vox = cloud.make_voxel_grid_filter()

    # Choose a voxel (also known as leaf) size
    # Note: this (1) is a poor choice of leaf size   
    # Experiment and find the appropriate size!
    LEAF_SIZE = 0.003


    # Set the voxel (or leaf) size  
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()

    ### Statistical Outlier Removal Filter
    # Much like the previous filters, we start by creating a filter object: 
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)

    # Set threshold scale factor
    x = 1.0

    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)

    # Finally call the filter function for magic
    cloud_filtered = outlier_filter.filter()


    ### PassThrough Filter
    # Create a PassThrough filter object first across the Z axis
    passThroughZ = cloud_filtered.make_passthrough_filter()

    # Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passThroughZ.set_filter_field_name(filter_axis)
    axis_min = 0.6
    axis_max = 1.1
    passThroughZ.set_filter_limits(axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud. 
    cloud_filtered = passThroughZ.filter()

    ## Now, Create a PassThrough filter object across the Y axis
    passThroughY = cloud_filtered.make_passthrough_filter()
    # Assign axis and range to the passthrough filter object.
    filter_axis = 'y'
    passThroughY.set_filter_field_name(filter_axis)
    axis_min = -0.5
    axis_max = 0.5
    passThroughY.set_filter_limits(axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud. 
    cloud_filtered = passThroughY.filter()


    ### RANSAC Plane Segmentation
    # Create the segmentation object
    seg = cloud_filtered.make_segmenter()

    # Set the model you wish to fit 
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    # Experiment with different values for max_distance 
    # for segmenting the table
    max_distance = 0.01
    seg.set_distance_threshold(max_distance)

    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()


    ### Extract inliers and outliers
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)

    ### Euclidean Clustering
    # Go from XYZRGB to RGB since to build the k-d tree we only needs spatial data
    white_cloud = XYZRGB_to_XYZ(extracted_outliers)
    # Apply function to convert XYZRGB to XYZ
    tree = white_cloud.make_kdtree()

    ### Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold 
    # as well as minimum and maximum cluster size (in points)
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.01)
    ec.set_MinClusterSize(100)
    # Refering to the minimum and maximum number of points that make up an object's cluster
    ec.set_MaxClusterSize(50000)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()


    ### Create Cluster-Mask Point Cloud to visualize each cluster separately

    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([white_cloud[indice][0],
                                            white_cloud[indice][1],
                                            white_cloud[indice][2],
                                             rgb_to_float(cluster_color[j])])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)


    # TODO - set proper names    ### Convert PCL data to ROS messages
    ros_cloud_objects = pcl_to_ros(extracted_outliers) 
    ros_cloud_table = pcl_to_ros(extracted_inliers)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)


    # TODO   set names ### Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)


    # Exercise-3 TODOs: 

    # Classify the clusters! (loop through each detected cluster one at a time)
    detected_objects_labels = []
    detected_objects = []

    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = extracted_outliers.extract(pts_list)
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        # Complete this step just as is covered in capture_features.py
        chists = compute_color_histograms(ros_cluster, using_hsv=True)
        normals = get_normals(ros_cluster)
        nhists = compute_normal_histograms(normals)
        feature = np.concatenate((chists, nhists))
        # labeled_features.append([feature, model_name])

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        prediction = clf.predict(scaler.transform(feature.reshape(1,-1)))
        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label,label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    print "Trying to match the pick list with the objects detected..."
    print "\n"
    # Publish the list of detected objects
    rospy.loginfo('Detected {} objects: {}'.format(len(detected_objects_labels), detected_objects_labels))

    # This is the output you'll need to complete the upcoming project!
    detected_objects_pub.publish(detected_objects)

    object_list_param = rospy.get_param('/object_list')
    pick_list_objects = []
    for i in range(len(object_list_param)):
        pick_list_objects.append(object_list_param[i]['name'])

    print "\n"  
    print "Pick List includes: "
    print pick_list_objects
    print "\n"
    pick_set_objects = set(pick_list_objects)
    detected_set_objects = set(detected_objects_labels)


    if detected_set_objects == pick_set_objects:
        try:
            pr2_mover(detected_objects)
        except rospy.ROSInterruptException:
            pass