Exemple #1
0
    # Disable gravity and delete the ground plane
    initial_setup()
    labeled_features = []

    for model_name in models:
        spawn_model(model_name)
        print(model_name)
        for i in range(nb_point):
            # make five attempts to get a valid a point cloud then give up
            sample_was_good = False
            try_count = 0
            while not sample_was_good and try_count < 5:
                sample_cloud = capture_sample()
                sample_cloud_arr = ros_to_pcl(sample_cloud).to_array()

                # Check for invalid clouds.
                if sample_cloud_arr.shape[0] == 0:
                    print('Invalid cloud detected')
                    try_count += 1
                else:
                    sample_was_good = True
            # Save point cloud
            # Extract histogram features
            feature = extract_features(sample_cloud, get_normals)
            labeled_features.append([feature, model_name])

        delete_model()

    pickle.dump(labeled_features, open('training_set.sav', 'wb'))
def pcl_callback(pcl_msg):

    # Exercise-2 TODOs:

    # TODO: Convert ROS msg to PCL data

    pcl_cloud = ros_to_pcl(pcl_msg)

    # TODO: Voxel Grid Downsampling

    # Create a VoxelGrid filter object for our input point cloud
    vox = pcl_cloud.make_voxel_grid_filter()

    # Choose a voxel (also known as leaf) size
    # Note: this (1) is a poor choice of leaf size
    # Experiment and find the appropriate size!
    LEAF_SIZE = 0.01

    # Set the voxel (or leaf) size
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)

    # Call the filter function to obtain the resultant downsampled point cloud
    cloud_filtered = vox.filter()

    # TODO: PassThrough Filter

    # Create a PassThrough filter object.
    passthrough = cloud_filtered.make_passthrough_filter()

    # Assign axis and range to the passthrough filter object.
    filter_axis = 'z'
    passthrough.set_filter_field_name(filter_axis)
    axis_min = 0.77
    axis_max = 1.1
    passthrough.set_filter_limits(axis_min, axis_max)

    # Finally use the filter function to obtain the resultant point cloud.
    cloud_filtered = passthrough.filter()

    # Much like the previous filters, we start by creating a filter object:
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()

    # Set the number of neighboring points to analyze for any given point
    outlier_filter.set_mean_k(50)

    # Set threshold scale factor
    x = 1.0

    # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier
    outlier_filter.set_std_dev_mul_thresh(x)

    # Finally call the filter function for magic
    cloud_filtered = outlier_filter.filter()

    # TODO: RANSAC Plane Segmentation

    # Create the segmentation object
    seg = cloud_filtered.make_segmenter()

    # Set the model you wish to fit
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)

    # Max distance for a point to be considered fitting the model
    # Experiment with different values for max_distance
    # for segmenting the table
    max_distance = 0.001
    seg.set_distance_threshold(max_distance)

    # Call the segment function to obtain set of inlier indices and model coefficients
    inliers, coefficients = seg.segment()

    # TODO: Extract inliers and outliers

    # Extract inliers
    cloud_table = cloud_filtered.extract(inliers, negative=False)

    # Extract outliers
    cloud_objects = cloud_filtered.extract(inliers, negative=True)

    # TODO: Euclidean Clustering

    white_cloud = XYZRGB_to_XYZ(cloud_objects)
    tree = white_cloud.make_kdtree()

    # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately

    # Create a cluster extraction object
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    # NOTE: These are poor choices of clustering parameters
    # Your task is to experiment and find values that work for segmenting objects.
    ec.set_ClusterTolerance(0.04)
    ec.set_MinClusterSize(60)
    ec.set_MaxClusterSize(1500)
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()

    #Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    #Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)

    # TODO: Convert PCL data to ROS messages

    table_msg = pcl_to_ros(cloud_table)
    objects_msg = pcl_to_ros(cloud_objects)
    cluster_msg = pcl_to_ros(cluster_cloud)

    # TODO: Publish ROS messages

    pcl_table_pub.publish(table_msg)
    pcl_objects_pub.publish(objects_msg)
    pcl_cluster_pub.publish(cluster_msg)

    # Exercise-3 TODOs:

    # Classify the clusters! (loop through each detected cluster one at a time)

    detected_objects_labels = []
    detected_objects = []

    for j, indices in enumerate(cluster_indices):

        # Grab the points for the cluster
        cluster = cloud_objects.extract(indices, negative=False)
        cluster_msg = pcl_to_ros(cluster)

        # Compute the associated feature vector
        features = extract_features(cluster_msg, using_hsv=True)
        features = X_scaler.transform([features])

        # Make the prediction
        prediction = clf.predict(features)

        label = encoder.inverse_transform(prediction)[0]
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[indices[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, j))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = cluster_msg
        detected_objects.append(do)

    # Publish the list of detected objects

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))
    detected_objects_pub.publish(detected_objects)
def pcl_callback(pcl_msg):
    # PART 1 : FILTERING AND RANSAC

    # Convert ROS msg to PCL data
    cloud = ros_to_pcl(pcl_msg)

    print("PassThrough Filter")
    # PassThrough Filter
    passthrough = cloud.make_passthrough_filter()
    filter_axis = "z"
    passthrough.set_filter_field_name(filter_axis)
    passthrough.set_filter_limits(Z_AXIS_MIN, Z_AXIS_MAX)
    cloud_filtered = passthrough.filter()

    passthrough = cloud_filtered.make_passthrough_filter()
    filter_axis = "y"
    passthrough.set_filter_field_name(filter_axis)
    passthrough.set_filter_limits(Y_AXIS_MIN, Y_AXIS_MAX)
    cloud_filtered = passthrough.filter()

    print("Outlier removal")
    # Outlier removal
    outlier_filter = cloud_filtered.make_statistical_outlier_filter()
    outlier_filter.set_mean_k(MEAN_K)
    outlier_filter.set_std_dev_mul_thresh(STD_DEV)
    cloud_filtered = outlier_filter.filter()

    print("Voxel Grid Downsampling")
    # Voxel Grid Downsampling
    vox = cloud_filtered.make_voxel_grid_filter()
    vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
    cloud_filtered = vox.filter()

    print("RANSAC Plane Segmentation")
    # RANSAC Plane Segmentation
    seg = cloud_filtered.make_segmenter()
    seg.set_model_type(pcl.SACMODEL_PLANE)
    seg.set_method_type(pcl.SAC_RANSAC)
    seg.set_distance_threshold(MAX_DISTANCE)
    inliers, coefficient = seg.segment()

    print("Extract inliers and outliers")
    # Extract inliers and outliers
    extracted_inliers = cloud_filtered.extract(inliers, negative=False)
    extracted_outliers = cloud_filtered.extract(inliers, negative=True)

    # PART 2 : CLUSTERING FOR SEGMENTATION

    print("Euclidean Clustering")
    # Euclidean Clustering
    white_cloud = XYZRGB_to_XYZ(extracted_outliers)
    tree = white_cloud.make_kdtree()

    print(
        "Create Cluster-Mask Point Cloud to visualize each cluster separately")
    # Create Cluster-Mask Point Cloud to visualize each cluster separately
    ec = white_cloud.make_EuclideanClusterExtraction()
    # Set tolerances for distance threshold
    # as well as minimum and maximum cluster size (in points)
    ec.set_ClusterTolerance(TOLERANCE)
    ec.set_MinClusterSize(MIN_SIZE)
    ec.set_MaxClusterSize(MAX_SIZE)

    print("Search the k-d tree for clusters")
    # Search the k-d tree for clusters
    ec.set_SearchMethod(tree)
    # Extract indices for each of the discovered clusters
    cluster_indices = ec.Extract()
    # Assign a color corresponding to each segmented object in scene
    cluster_color = get_color_list(len(cluster_indices))

    color_cluster_point_list = []

    for j, indices in enumerate(cluster_indices):
        for i, indice in enumerate(indices):
            color_cluster_point_list.append([
                white_cloud[indice][0], white_cloud[indice][1],
                white_cloud[indice][2],
                rgb_to_float(cluster_color[j])
            ])

    # Create new cloud containing all clusters, each with unique color
    cluster_cloud = pcl.PointCloud_PointXYZRGB()
    cluster_cloud.from_list(color_cluster_point_list)
    ros_cluster_cloud = pcl_to_ros(cluster_cloud)

    # Convert PCL data to ROS messages
    ros_cloud_table = pcl_to_ros(extracted_inliers)
    ros_cloud_objects = pcl_to_ros(extracted_outliers)

    # Publish ROS messages
    pcl_objects_pub.publish(ros_cloud_objects)
    pcl_table_pub.publish(ros_cloud_table)
    pcl_cluster_pub.publish(ros_cluster_cloud)

    # PART 3 CLASSIFICATION

    print("Classify the clusters!")
    # Classify the clusters!
    separate_objects_cloud = {
        "table": extracted_inliers
    }  # Stores the point cloud before sending them to the Pr2Mover.
    detected_objects_labels = []
    detected_objects = []
    for index, pts_list in enumerate(cluster_indices):
        # Grab the points for the cluster from the extracted outliers (cloud_objects)
        pcl_cluster = extracted_outliers.extract(pts_list)
        # Tconvert the cluster from pcl to ROS using helper function
        ros_cluster = pcl_to_ros(pcl_cluster)

        # Extract histogram features
        feature = extract_features(ros_cluster, get_normals)

        # Make the prediction, retrieve the label for the result
        # and add it to detected_objects_labels list
        X = feature.reshape(1, -1)
        if scaler:
            X = scaler.transform(X)
        prediction = clf.predict(X)
        label = encoder.inverse_transform(prediction)[0]
        separate_objects_cloud[label] = pcl_cluster
        detected_objects_labels.append(label)

        # Publish a label into RViz
        label_pos = list(white_cloud[pts_list[0]])
        label_pos[2] += .4
        object_markers_pub.publish(make_label(label, label_pos, index))

        # Add the detected object to the list of detected objects.
        do = DetectedObject()
        do.label = label
        do.cloud = ros_cluster
        detected_objects.append(do)

    rospy.loginfo('Detected {} objects: {}'.format(
        len(detected_objects_labels), detected_objects_labels))

    # Publish the list of detected objects
    detected_objects_pub.publish(detected_objects)

    if len(separate_objects_cloud) > 1:
        if pickup:  # perform pick and place.
            try:
                pr2_mover.move_next(separate_objects_cloud)
            except rospy.ROSInterruptException:
                pass
        else:  # just output to result file.
            send_all_to_yaml(separate_objects_cloud)
            sample_was_good = False
            try_count = 0
            while not sample_was_good and try_count < 5:
                sample_cloud = capture_sample()
                sample_cloud_arr = ros_to_pcl(sample_cloud).to_array()

                # Check for invalid clouds.
                if sample_cloud_arr.shape[0] == 0:
                    print('Invalid cloud detected')
                    try_count += 1
                else:
                    sample_was_good = True

            # Extract histogram features
            # chists = compute_color_histograms(sample_cloud, using_hsv=True)
            # normals = get_normals(sample_cloud)
            # nhists = compute_normal_histograms(normals)
            # feature = np.concatenate((chists, nhists))

            feature = extract_features(sample_cloud, using_hsv=True)
            # print('feature =', feature)
            labeled_features.append([feature, model_name, 0])

            cnt += 1

            print 'captured ', cnt, ' of ', len(models) * capture_samples

        delete_model()

    pickle.dump(labeled_features, open('training_set.sav', 'wb'))