def CaptureFeaturesOfModel(modelName, numCapturesReq, numHistBinsHSV, numHistBinsNormals): labeledFeaturesModel = [] spawn_model(modelName) numRetriesPerCapture = g_numRetriesPerCapture for capIndex in range(int(numCapturesReq)): try_count, sample_was_good, sample_cloud = CaptureCloud( numRetriesPerCapture) print('\rCapturePCLFeatures({} {}/{})({} tries)'.format( modelName, capIndex + 1, numCapturesReq, try_count + 1)), sys.stdout.flush() if (sample_was_good): # Extract histogram features try: chists = pclproc.compute_color_histograms(sample_cloud, numHistBinsHSV, doConvertToHSV=True) normals = get_normals(sample_cloud) nhists = pclproc.compute_normal_histograms( normals, numHistBinsNormals) feature = np.concatenate((chists, nhists)) labeledFeaturesModel.append([feature, modelName]) except Exception as exc: print("Ignoring bad histogram...") delete_model() return labeledFeaturesModel
def CaptureFeaturesxxx(): rospy.init_node('capture_node') models = [\ 'beer', 'bowl', 'create', 'disk_part', 'hammer', 'plastic_cup', 'soda_can'] # Disable gravity and delete the ground plane initial_setup() labeled_features = [] numCapturesPerModel = 4 numRetriesPerCapture = 3 for model_name in models: print("Feature capture on model '{}' ({}) target numAttempts".format( model_name, numCapturesPerModel)) spawn_model(model_name) for i in range(numCapturesPerModel): # make five attempts to get a valid a point cloud then give up sample_was_good = False try_count = 0 while not sample_was_good and try_count < numRetriesPerCapture: sample_cloud = capture_sample() sample_cloud_arr = ros_to_pcl(sample_cloud).to_array() # Check for invalid clouds. if sample_cloud_arr.shape[0] == 0: print('Invalid cloud detected') try_count += 1 else: sample_was_good = True if (sample_was_good): # Extract histogram features chists = pclproc.compute_color_histograms(sample_cloud, doConvertToHSV=True) normals = get_normals(sample_cloud) nhists = pclproc.compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) labeled_features.append([feature, model_name]) delete_model() pickle.dump(labeled_features, open('training_set.sav', 'wb'))
def run(self): """ Produces training_set.sav Formatted as [param, data], where: - param : {'hsv':bool, 'bin':int} hsv : hsv colorspace flag (otherwise rgb) bin : number of histogram bins for feature extraction - data : [{name : [features]} for name in models] """ initial_setup() # save collection parameters param = {'hsv': self._as_hsv, 'bin': self._nbins} data = {} m_n = len(self._models) for m_i, model_name in enumerate(self._models): rospy.loginfo('[{}/{}] Processing Model Name : {}'.format( m_i, m_n, model_name)) model_data = [] spawn_model(model_name) for i in range(self._steps): # get_cloud() sample_cloud = None for j in range(self._max_try): sample_cloud = capture_sample() sample_cloud_arr = ros_to_pcl(sample_cloud).to_array() if sample_cloud_arr.shape[0] == 0: rospy.loginfo('') print('Invalid cloud detected') else: break # save_data() if sample_cloud is not None: if self._as_feature: # Extract histogram features normals = get_normals(sample_cloud) feature = cloud2feature(sample_cloud, normals, hsv=self._as_hsv, bins=self._nbins) model_data.append(feature) else: model_data.append(sample_cloud) data[model_name] = model_data delete_model() # save data with pickle with open(self._path, 'wb') as f: pickle.dump([param, data], f)
def CaptureFeaturesOfModel(modelName, numCapturesReq, numRetriesPerCapture): labeledFeaturesModel = [] spawn_model(modelName) print('Capclouds({}):'.format(numCapturesReq)), for i in range(numCapturesReq): sample_was_good, sample_cloud = CaptureCloud(numRetriesPerCapture) if (sample_was_good): # Extract histogram features chists = pclproc.compute_color_histograms(sample_cloud, doConvertToHSV=True) normals = get_normals(sample_cloud) nhists = pclproc.compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) labeledFeaturesModel.append([feature, modelName]) delete_model() return labeledFeaturesModel
else: nb_point = 5 rospy.init_node('capture_node') models = [ "biscuits", "soap", "soap2", "book", "glue", "sticky_notes", "snacks", "eraser" ] # Disable gravity and delete the ground plane initial_setup() labeled_features = [] for model_name in models: spawn_model(model_name) print(model_name) for i in range(nb_point): # make five attempts to get a valid a point cloud then give up sample_was_good = False try_count = 0 while not sample_was_good and try_count < 5: sample_cloud = capture_sample() sample_cloud_arr = ros_to_pcl(sample_cloud).to_array() # Check for invalid clouds. if sample_cloud_arr.shape[0] == 0: print('Invalid cloud detected') try_count += 1 else: sample_was_good = True
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud = ros_to_pcl(pcl_msg) # TODO: Voxel Grid Downsampling # Create a VoxelGrid filter object for our input point cloud vox = cloud.make_voxel_grid_filter() # Choose a voxel (also known as leaf) size LEAF_SIZE = 0.01 # Set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() filename = 'voxel_downsampled.pcd' pcl.save(cloud_filtered, filename) # TODO: PassThrough Filter # Create a PassThrough filter object. passthrough = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object. filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud. cloud_filtered = passthrough.filter() filename = 'pass_through_filtered.pcd' pcl.save(cloud_filtered, filename) # TODO: RANSAC Plane Segmentation # Create the segmentation object seg = cloud_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model max_distance = 0.01 seg.set_distance_threshold(max_distance) # Call the segment funcion to obtain set of inlier indices # and model coefficients inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers # Extract inliers cloud_table = cloud_filtered.extract(inliers, negative=False) filename = 'cloud_table.pcd' pcl.save(cloud_table, filename) # Extract outliers cloud_objects = cloud_filtered.extract(inliers, negative=True) filename = 'cloud_objects.pcd' pcl.save(cloud_objects, filename) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ( cloud_objects) # Apply function to convert XYZRGB to XYZ tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.01) ec.set_MinClusterSize(10) ec.set_MaxClusterSize(10000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract( ) #cluster_indices ahora contiene una lista de los objetos # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_cluster = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_mask_pub.publish(ros_cloud_cluster) # Exercise-3 TODOs: #rospy.init_node('capture_node') models = [\ 'beer', 'bowl', 'create', 'disk_part', 'hammer', 'plastic_cup', 'soda_can'] # Disable gravity and delete the ground plane # initial_setup() labeled_features = [] for model_name in models: spawn_model(model_name) # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = cloud_objects.extract(pts_list) # TODO: convert the cluster from pcl to ROS using helper function sample_cloud = pcl_to_ros(pcl_cluster) # Extract histogram features # TODO: complete this step just as is covered in capture_features.py chists = compute_color_histograms(sample_cloud, using_hsv=True) normals = get_normals(sample_cloud) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) labeled_features.append([feature, model_name]) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = sample_cloud detected_objects.append(do) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects)
models = [\ 'beer', 'bowl', 'create', 'disk_part', 'hammer', 'plastic_cup', 'soda_can'] # Disable gravity and delete the ground plane initial_setup() labeled_features = [] for model_name in models: spawn_model(model_name) for i in range(5): # make five attempts to get a valid a point cloud then give up sample_was_good = False try_count = 0 while not sample_was_good and try_count < 5: sample_cloud = capture_sample() sample_cloud_arr = ros_to_pcl(sample_cloud).to_array() # Check for invalid clouds. if sample_cloud_arr.shape[0] == 0: print('Invalid cloud detected') try_count += 1 else: sample_was_good = True
def pcl_callback(pcl_msg): # Exercise-2 TODOs: # TODO: Convert ROS msg to PCL data cloud_filtered = ros_to_pcl(pcl_msg) # TODO: Statistical Outlier Filtering # Much like the previous filters, we start by creating a filter object: outlier_filter = cloud_filtered.make_statistical_outlier_filter() # Set the number of neighboring points to analyze for any given point outlier_filter.set_mean_k(50) # Set threshold scale factor x = 0.01 # Any point with a mean distance larger than global (mean distance+x*std_dev) will be considered outlier outlier_filter.set_std_dev_mul_thresh(x) # Finally call the filter function for magic cloud_filtered = outlier_filter.filter() # TODO: Voxel Grid Downsampling # Create a VoxelGrid filter object for our input point cloud vox = cloud_filtered.make_voxel_grid_filter() # Choose a voxel (also known as leaf) size LEAF_SIZE = 0.01 # Set the voxel (or leaf) size vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE) # Call the filter function to obtain the resultant downsampled point cloud cloud_filtered = vox.filter() # TODO: PassThrough Filter # Create a PassThrough filter object. passthrough = cloud_filtered.make_passthrough_filter() # Assign axis and range to the passthrough filter object. filter_axis = 'z' passthrough.set_filter_field_name(filter_axis) axis_min = 0.6 axis_max = 1.1 passthrough.set_filter_limits(axis_min, axis_max) cloud_filtered = passthrough.filter() passthrough = cloud_filtered.make_passthrough_filter() filter_axis = 'x' passthrough.set_filter_field_name(filter_axis) axis_min = 0.4 axis_max = 3.0 passthrough.set_filter_limits(axis_min, axis_max) # Finally use the filter function to obtain the resultant point cloud. cloud_filtered = passthrough.filter() # TODO: RANSAC Plane Segmentation # Create the segmentation object seg = cloud_filtered.make_segmenter() # Set the model you wish to fit seg.set_model_type(pcl.SACMODEL_PLANE) seg.set_method_type(pcl.SAC_RANSAC) # Max distance for a point to be considered fitting the model max_distance = 0.05 seg.set_distance_threshold(max_distance) # Call the segment funcion to obtain set of inlier indices # and model coefficients inliers, coefficients = seg.segment() # TODO: Extract inliers and outliers # Extract inliers cloud_table = cloud_filtered.extract(inliers, negative=False) # Extract outliers cloud_objects = cloud_filtered.extract(inliers, negative=True) # TODO: Euclidean Clustering white_cloud = XYZRGB_to_XYZ( cloud_objects) # Apply function to convert XYZRGB to XYZ tree = white_cloud.make_kdtree() # Create a cluster extraction object ec = white_cloud.make_EuclideanClusterExtraction() # Set tolerances for distance threshold # as well as minimum and maximum cluster size (in points) # NOTE: These are poor choices of clustering parameters # Your task is to experiment and find values that work for segmenting objects. ec.set_ClusterTolerance(0.03) ec.set_MinClusterSize(10) ec.set_MaxClusterSize(50000) # Search the k-d tree for clusters ec.set_SearchMethod(tree) # Extract indices for each of the discovered clusters cluster_indices = ec.Extract( ) #cluster_indices ahora contiene una lista de los objetos print("Numero de cluster #", len(cluster_indices)) # print("aqui esta el cluster #",cluster_indices) # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separately #Assign a color corresponding to each segmented object in scene cluster_color = get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([ white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], rgb_to_float(cluster_color[j]) ]) #Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # TODO: Convert PCL data to ROS messages ros_cloud_objects = pcl_to_ros(cloud_objects) ros_cloud_table = pcl_to_ros(cloud_table) ros_cloud_cluster = pcl_to_ros(cluster_cloud) # TODO: Publish ROS messages pcl_objects_pub.publish(ros_cloud_objects) pcl_table_pub.publish(ros_cloud_table) pcl_mask_pub.publish(ros_cloud_cluster) # Exercise-3 TODOs: # <arg name="world_name" value="$(find pr2_robot)/worlds/test1.world"/> ''' models = [\ 'biscuits', 'soap', 'soap2'] # <arg name="world_name" value="$(find pr2_robot)/worlds/test2.world"/> models = [\ 'biscuits', 'soap', 'book', 'soap2', 'glue',] ''' # <arg name="world_name" value="$(find pr2_robot)/worlds/test3.world"/> models = [\ 'sticky_notes', 'book', 'snacks', 'biscuits', 'eraser', 'soap2', 'soap', 'glue'] # Disable gravity and delete the ground plane # initial_setup() labeled_features = [] for model_name in models: spawn_model(model_name) # Classify the clusters! (loop through each detected cluster one at a time) detected_objects_labels = [] detected_objects = [] for index, pts_list in enumerate(cluster_indices): # Grab the points for the cluster from the extracted outliers (cloud_objects) pcl_cluster = cloud_objects.extract(pts_list) # TODO: convert the cluster from pcl to ROS using helper function sample_cloud = pcl_to_ros(pcl_cluster) # Extract histogram features # TODO: complete this step just as is covered in capture_features.py chists = compute_color_histograms(sample_cloud, using_hsv=True) normals = get_normals(sample_cloud) nhists = compute_normal_histograms(normals) feature = np.concatenate((chists, nhists)) labeled_features.append([feature, model_name]) # Make the prediction, retrieve the label for the result # and add it to detected_objects_labels list prediction = clf.predict(scaler.transform(feature.reshape(1, -1))) label = encoder.inverse_transform(prediction)[0] detected_objects_labels.append(label) # Publish a label into RViz label_pos = list(white_cloud[pts_list[0]]) label_pos[2] += .4 object_markers_pub.publish(make_label(label, label_pos, index)) # Add the detected object to the list of detected objects. do = DetectedObject() do.label = label do.cloud = sample_cloud detected_objects.append(do) #print(detected_objects) rospy.loginfo('Detected {} objects: {}'.format( len(detected_objects_labels), detected_objects_labels)) # Publish the list of detected objects # This is the output you'll need to complete the upcoming project! detected_objects_pub.publish(detected_objects) # Suggested location for where to invoke your pr2_mover() function within pcl_callback() # Could add some logic to determine whether or not your object detections are robust # before calling pr2_mover() detected_objects_list = detected_objects try: pr2_mover(detected_objects_list) except rospy.ROSInterruptException: pass