def call_back(self, msg): #rospy.loginfo("Process Object List") self.obj_list = ObjectPoseList() self.obj_list = msg self.map_confidence = ObjectPoseList() self.map_confidence.header.frame_id = self.frame_id self.map.header.frame_id = self.frame_id self.matching = [] self.remove_list = [] self.obj_list.header.frame_id = self.frame_id # First obj_list recieved if self.first: self.first = False self.map = self.obj_list self.old_pos = [0, 0, 0] self.new_pos = [0, 0, 0] for i in range(self.map.size): self.map.list[i].occupy = False self.map.list[i].varianceX = self.init_varX self.map.list[i].varianceY = self.init_varY else: self.old_pos = self.new_pos for i in range(self.map.size): self.map.list[i].occupy = False self.data_associate() self.estimate_transform() self.map = self.obj_list self.br.sendTransform((self.lidar_pose.pose.position.x, \ self.lidar_pose.pose.position.y, self.lidar_pose.pose.position.z), \ (self.lidar_pose.pose.orientation.x, self.lidar_pose.pose.orientation.y, \ self.lidar_pose.pose.orientation.z, self.lidar_pose.pose.orientation.w), \ self.obj_list.header.stamp,"/velodyne","/odom") #self.update_map() '''for i in range(self.map.size):
def call_back(self, msg): obj_list = ObjectPoseList() obj_list.header.frame_id = msg.header.frame_id obj_list.size = 0 cluster_num = len(msg.list) #pcl_size = len(msg.poses) for i in range(cluster_num): self.image = np.zeros((int(self.height), int(self.width), 3), np.uint8) img = [] pcl_size = len(msg.list[i].poses) avg_x = avg_y = avg_z = 0 for j in range(pcl_size): # project to XY, YZ, XZ plane avg_x = avg_x + msg.list[i].poses[j].position.x avg_y = avg_y + msg.list[i].poses[j].position.y avg_z = avg_z + msg.list[i].poses[j].position.z img.append([msg.list[i].poses[j].position.x, msg.list[i].poses[j].position.y, msg.list[i].poses[j].position.z]) self.toIMG(pcl_size, img, 'img') model_type = self.classify() # *************************************************************** # Add to object list # *************************************************************** obj = ObjectPose() obj.position.x = float(avg_x/pcl_size) obj.position.y = float(avg_y/pcl_size) obj.position.z = float(avg_z/pcl_size) obj.type = model_type obj_list.list.append(obj) #cv2.imwrite( "Image.jpg", self.image) #self.index = self.index + 1 #print "Save image" obj_list.size = cluster_num self.pub_obj.publish(obj_list) self.drawRviz(obj_list)
def call_back(self, msg): tf_points = PCL_points() tf_points = msg obj_list = ObjectPoseList() obj_list.header.frame_id = tf_points.header.frame_id obj_list.size = 0 cluster_num = len(tf_points.list) #pcl_size = len(msg.poses) for i in range(cluster_num): self.image = np.zeros((int(self.height), int(self.width), 3), np.uint8) plane_xy = [] plane_yz = [] plane_xz = [] pcl_size = len(tf_points.list[i].poses) # ======= Coordinate transform for better project performance ====== position = [0, 0, 0] rad = math.atan2(tf_points.centroids[i].y, tf_points.centroids[i].x) quaternion = tf.transformations.quaternion_from_euler(0., 0., -rad) transformer = tf.TransformerROS() transpose_matrix = transformer.fromTranslationRotation(position, quaternion) for m in range(pcl_size): new_x = tf_points.list[i].poses[m].position.x new_y = tf_points.list[i].poses[m].position.y new_z = tf_points.list[i].poses[m].position.z orig_point = np.array([new_x, new_y, new_z, 1]) new_center = np.dot(transpose_matrix, orig_point) tf_points.list[i].poses[m].position.x = new_center[0] tf_points.list[i].poses[m].position.y = new_center[1] tf_points.list[i].poses[m].position.z = new_center[2] # ======= project to XY, YZ, XZ plane ======= for j in range(pcl_size): plane_xy.append([tf_points.list[i].poses[j].position.x, tf_points.list[i].poses[j].position.y]) plane_yz.append([tf_points.list[i].poses[j].position.y, tf_points.list[i].poses[j].position.z]) plane_xz.append([tf_points.list[i].poses[j].position.x, tf_points.list[i].poses[j].position.z]) self.toIMG(pcl_size, plane_xy, 'xy') self.toIMG(pcl_size, plane_yz, 'yz') self.toIMG(pcl_size, plane_xz, 'xz') model_type = self.classify() # *************************************************************** # Add to object list # *************************************************************** obj = ObjectPose() obj.position.x = msg.centroids[i].x obj.position.y = msg.centroids[i].y obj.position.z = msg.centroids[i].z obj.varianceX = msg.varianceX obj.varianceY = msg.varianceY obj.type = model_type obj_list.list.append(obj) #cv2.imwrite( "Image" + str(self.index) + ".jpg", self.image) #self.index = self.index + 1 #print "Save image" obj_list.size = cluster_num self.pub_obj.publish(obj_list) self.drawRviz(obj_list)
def call_back(self, msg): #rospy.loginfo("Process Object List") self.obj_list = ObjectPoseList() self.obj_list = msg self.map_confidence = ObjectPoseList() self.map_confidence.header.frame_id = self.frame_id self.map.header.frame_id = self.frame_id self.map.robot = msg.robot self.matching = [] self.remove_list = [] robot_x = self.obj_list.robot.position.x robot_y = self.obj_list.robot.position.y robot_z = self.obj_list.robot.position.z self.robot_pose = [robot_x, robot_y, robot_z] self.obj_list.header.frame_id = self.frame_id if self.first: self.map = self.obj_list self.first = False for i in range(self.map.size): self.map.list[i].occupy = False #self.map.list[i].varianceX = self.init_varX #self.map.list[i].varianceY = self.init_varY else: for i in range(self.map.size): self.map.list[i].occupy = False self.data_associate() self.update_map() for i in range(self.map.size): mean_x, mean_y = self.map.list[i].position.x, self.map.list[ i].position.y var_x, var_y = self.map.list[i].varianceX, self.map.list[ i].varianceY prob_x = scipy.stats.norm(mean_x, var_x).pdf(mean_x) prob_y = scipy.stats.norm(mean_y, var_y).pdf(mean_y) #print prob_x, prob_y if prob_x > self.prob_threshold and prob_y > self.prob_threshold: self.map_confidence.list.append(self.map.list[i]) elif prob_x < self.remove_threshold and prob_y < self.remove_threshold: self.remove_list.append(i) remove_num = 0 #ensure the index are correct during removing for i in self.remove_list: del self.map.list[i - remove_num] remove_num = remove_num + 1 self.map.size = len(self.map.list) self.map.header.stamp = rospy.Time.now() self.map_confidence.size = len(self.map_confidence.list) self.map.header.stamp = rospy.Time.now() self.map_confidence.header.stamp = rospy.Time.now() self.pub_obj.publish(self.map) self.drawRviz(self.map)
def call_back(msg): try: obj_list = ObjectPoseList() obj_list = msg position, quaternion = tf_.lookupTransform( "/odom", "/velodyne",rospy.Time(0)) transpose_matrix = transformer.fromTranslationRotation(position, quaternion) if is_moos: dur = rospy.Duration(2) tf_.waitForTransform("/odom", "/wp_0", rospy.Time(), dur) (trans,rot) = tf_.lookupTransform("/odom", "/wp_0", rospy.Time(0)) for obs_index in range(obj_list.size): origin_p = np.array([obj_list.list[obs_index].position.x, obj_list.list[obs_index].position.y, obj_list.list[obs_index].position.z, 1]) new_p = np.dot(transpose_matrix, origin_p) obj_list.list[obs_index].position.x = new_p[0] - trans[0] obj_list.list[obs_index].position.y = new_p[1] - trans[1] obj_list.header.frame_id = "odom" else: for obs_index in range(obj_list.size): origin_p = np.array([obj_list.list[obs_index].position.x, obj_list.list[obs_index].position.y, obj_list.list[obs_index].position.z, 1]) new_p = np.dot(transpose_matrix, origin_p) obj_list.list[obs_index].position.x = new_p[0] obj_list.list[obs_index].position.y = new_p[1] obj_list.header.frame_id = "odom" pub_obs.publish(obj_list) except (LookupException, ConnectivityException, ExtrapolationException): print "Nothing Happen"
def call_back(msg): try: #print ("Process Object List") obj_list = ObjectPoseList() obj_list = msg position, quaternion = tf_.lookupTransform( "/odom", "/velodyne",rospy.Time(0)) transpose_matrix = transformer.fromTranslationRotation(position, quaternion) robot_pose = np.dot(transpose_matrix, [0, 0, 0, 1]) obj_list.robot.position.x = robot_pose[0] obj_list.robot.position.y = robot_pose[1] obj_list.robot.position.z = robot_pose[2] obj_list.robot.orientation.x = quaternion[0] obj_list.robot.orientation.y = quaternion[1] obj_list.robot.orientation.z = quaternion[2] obj_list.robot.orientation.w = quaternion[3] for obj_index in range(obj_list.size): center_x = obj_list.list[obj_index].position.x center_y = obj_list.list[obj_index].position.y center_z = obj_list.list[obj_index].position.z center = np.array([center_x, center_y, center_z, 1]) new_center = np.dot(transpose_matrix, center) obj_list.list[obj_index].position.x = new_center[0] obj_list.list[obj_index].position.y = new_center[1] obj_list.list[obj_index].position.z = new_center[2] obj_list.header.frame_id = "/odom" pub_obj.publish(obj_list) except (LookupException, ConnectivityException, ExtrapolationException): print "Nothing Happen"
def __init__(self): self.br = tf.TransformBroadcaster() # ======== Subscriber ======== rospy.Subscriber("/obj_list", ObjectPoseList, self.call_back, queue_size=1) #rospy.Subscriber("/waypointList", WaypointList, call_back, queue_size=10) # ======== Publisher ======== self.pub_obj = rospy.Publisher("/obj_list/map", ObjectPoseList, queue_size=1) self.pub_marker = rospy.Publisher("/obj_marker/map", MarkerArray, queue_size=1) self.pub_path = rospy.Publisher("/slam/path", Path, queue_size=1) self.pub_pose = rospy.Publisher("/slam/pose", PoseStamped, queue_size=1) #pub_rviz = rospy.Publisher("/wp_path", Marker, queue_size = 1) # ======== Get ROS parameter ======== self.visual = rospy.get_param('~visual', False) # ======== Declare Variable ======== self.radius = 0 self.lidar_pose = PoseStamped() self.cosine = None self.sine = None self.path = Path() self.old_obj_list = None self.first = True self.old_pos = [0, 0, 0] self.new_pos = [0, 0, 0] self.map = ObjectPoseList() self.obj_list = None self.frame_id = "odom" self.matching = [] self.remove_list = [] self.remove_threshold = 0.05 self.r_threshold = 10 self.prob_threshold = 0.3 self.update_range = 40. self.punish_range = 20. self.classify_range = 10. self.prior_mean = None self.prior_cov = None self.punish_no_detect = 2 self.punish_unclassify = 1.5 self.measurement_var = 1. self.init_varX = 2. self.init_varY = 2. self.kernel = scipy.stats.norm(loc=0, scale=0.5) # ======== Get from odometry ======= #self.pos_covariance = np.diag([3., 3.]) self.sensor_error = 1.
def __init__(self): # ======== Subscriber ======== odom_sub = Subscriber("/odometry/filtered", Odometry) obj_sub = Subscriber("/obj_list/classify", ObjectPoseList) ats = ApproximateTimeSynchronizer((odom_sub, obj_sub), queue_size=1, slop=0.1) ats.registerCallback(self.call_back) #rospy.Subscriber("/object_list", ObjectPoseList, self.call_back, queue_size=10) #rospy.Subscriber("/waypointList", WaypointList, call_back, queue_size=10) # ======== Publisher ======== self.pub_obj = rospy.Publisher("/obj_list/classify/map", ObjectPoseList, queue_size=1) self.pub_marker = rospy.Publisher("/obj_classify/map", MarkerArray, queue_size=1) #pub_rviz = rospy.Publisher("/wp_path", Marker, queue_size = 1) # ======== Declare Variable ======== self.first = True self.robot_pose = None self.map = ObjectPoseList() self.obj_list = None self.frame_id = "odom" self.matching = [] self.remove_list = [] self.remove_threshold = 0.05 self.r_threshold = 5 self.prob_threshold = 0.3 self.velodyne_range = 30. self.prior_mean = None self.prior_cov = None self.measurement_var = 1. self.init_varX = 1. self.init_varY = 1. self.kernel = scipy.stats.norm(loc=0, scale=0.5) # ======== Get from odometry ======= self.sensor_error = 1.
def __init__(self): # ======== Subscriber ======== rospy.Subscriber("/obj_list/classify", ObjectPoseList, self.call_back, queue_size=10) #rospy.Subscriber("/waypointList", WaypointList, call_back, queue_size=10) # ======== Publisher ======== self.pub_obj = rospy.Publisher("/object_list/map", ObjectPoseList, queue_size=1) self.pub_marker = rospy.Publisher("/obj_classify", MarkerArray, queue_size=1) #pub_rviz = rospy.Publisher("/wp_path", Marker, queue_size = 1) # ======== Declare Variable ======== self.first = True self.robot_pose = None self.map = ObjectPoseList() self.obj_list = None self.matching = [] self.remove_list = [] self.remove_threshold = 0.05 self.r_threshold = 5 self.prob_threshold = 0.3 self.velodyne_range = 30. self.prior_mean = None self.prior_cov = None self.measurement_var = 1. self.init_varX = 1. self.init_varY = 1. self.kernel = scipy.stats.norm(loc=0, scale=0.5) # ======== Get from odometry ======= #self.pos_covariance = np.diag([3., 3.]) self.sensor_error = 1.
def call_back(self, msg): print "recieve msg" obj_list = ObjectPoseList() obj_list = msg cluster_num = obj_list.size #cluster_num = len(tf_points.list) #pcl_size = len(msg.poses) for i in range(cluster_num): self.no_camera_img = False self.get_roi_image(obj_list.list[i].img) tf_points = PoseArray() tf_points = obj_list.list[i].pcl_points centroids = Point() centroids = obj_list.list[i].position self.image = np.zeros((int(self.height), int(self.width), 3), np.uint8) plane_xy = [] plane_yz = [] plane_xz = [] pcl_size = len(tf_points.poses) # ======= Coordinate transform for better project performance ====== position = [0, 0, 0] rad = math.atan2(centroids.y, centroids.x) quaternion = tf.transformations.quaternion_from_euler(0., 0., -rad) transformer = tf.TransformerROS() transpose_matrix = transformer.fromTranslationRotation( position, quaternion) for m in range(pcl_size): new_x = tf_points.poses[m].position.x new_y = tf_points.poses[m].position.y new_z = tf_points.poses[m].position.z orig_point = np.array([new_x, new_y, new_z, 1]) new_center = np.dot(transpose_matrix, orig_point) tf_points.poses[m].position.x = new_center[0] tf_points.poses[m].position.y = new_center[1] tf_points.poses[m].position.z = new_center[2] # ======= project to XY, YZ, XZ plane ======= for j in range(pcl_size): plane_xy.append([ tf_points.poses[j].position.x, tf_points.poses[j].position.y ]) plane_yz.append([ tf_points.poses[j].position.y, tf_points.poses[j].position.z ]) plane_xz.append([ tf_points.poses[j].position.x, tf_points.poses[j].position.z ]) self.toIMG(pcl_size, plane_xy, 'xy') self.toIMG(pcl_size, plane_yz, 'yz') self.toIMG(pcl_size, plane_xz, 'xz') #cv2.imwrite( "Image.jpg", self.image) if not self.no_camera_img: cv2.imwrite("pcl/Image" + str(self.index) + ".jpg", self.image) self.index = self.index + 1 print "Save image" rospy.sleep(0.3)
def call_back(self, odom_msg, obj_msg): rospy.loginfo("Process Object List") self.obj_list = ObjectPoseList() self.obj_list = obj_msg self.map_confidence = ObjectPoseList() self.map_confidence.header.frame_id = self.frame_id self.map.header.frame_id = self.frame_id self.matching = [] self.remove_list = [] vlp2robot = transformations.quaternion_from_euler(0, 0, np.pi) vlp2robot_matrix = transformer.fromTranslationRotation([0, 0, 0], vlp2robot) position = [ odom_msg.pose.pose.position.x, odom_msg.pose.pose.position.y, odom_msg.pose.pose.position.z ] quaternion = [ odom_msg.pose.pose.orientation.x, odom_msg.pose.pose.orientation.y, odom_msg.pose.pose.orientation.z, odom_msg.pose.pose.orientation.w ] transpose_matrix = transformer.fromTranslationRotation( position, quaternion) self.robot_pose = np.dot(transpose_matrix, [0, 0, 0, 1]) for obj_index in range(self.obj_list.size): center_x = self.obj_list.list[obj_index].position.x center_y = self.obj_list.list[obj_index].position.y center_z = self.obj_list.list[obj_index].position.z center = np.array([center_x, center_y, center_z, 1]) robot_scene = np.dot(vlp2robot_matrix, center) new_center = np.dot(transpose_matrix, robot_scene) self.obj_list.list[obj_index].position.x = new_center[0] self.obj_list.list[obj_index].position.y = new_center[1] self.obj_list.list[obj_index].position.z = new_center[2] self.obj_list.list[obj_index].varianceX = odom_msg.pose.covariance[ 0] self.obj_list.list[obj_index].varianceY = odom_msg.pose.covariance[ 7] self.obj_list.header.frame_id = self.frame_id if self.first: self.map = self.obj_list self.first = False for i in range(self.map.size): self.map.list[i].occupy = False #self.map.list[i].varianceX = self.init_varX #self.map.list[i].varianceY = self.init_varY else: for i in range(self.map.size): self.map.list[i].occupy = False self.data_associate() self.update_map() for i in range(self.map.size): mean_x, mean_y = self.map.list[i].position.x, self.map.list[ i].position.y var_x, var_y = self.map.list[i].varianceX, self.map.list[ i].varianceY prob_x = scipy.stats.norm(mean_x, var_x).pdf(mean_x) prob_y = scipy.stats.norm(mean_y, var_y).pdf(mean_y) print prob_x, prob_y if prob_x > self.prob_threshold and prob_y > self.prob_threshold: self.map_confidence.list.append(self.map.list[i]) elif prob_x < self.remove_threshold and prob_y < self.remove_threshold: self.remove_list.append(i) for i in self.remove_list: del self.map.list[i] self.map.size = len(self.map.list) self.map.header.stamp = rospy.Time.now() print self.map.size self.map_confidence.size = len(self.map_confidence.list) self.map.header.stamp = rospy.Time.now() self.map_confidence.header.stamp = rospy.Time.now() self.pub_obj.publish(self.map) self.drawRviz(self.map_confidence)
def call_back(self, msg): try: rospy.loginfo("Process Object List") self.obj_list = ObjectPoseList() self.obj_list = msg self.map_confidence = ObjectPoseList() self.map_confidence.header.frame_id = "map" self.map.header.frame_id = "map" self.matching = [] self.remove_list = [] position, quaternion = tf_.lookupTransform("/map", "/velodyne", rospy.Time(0)) transpose_matrix = transformer.fromTranslationRotation( position, quaternion) self.robot_pose = np.dot(transpose_matrix, [0, 0, 0, 1]) for obj_index in range(self.obj_list.size): center_x = self.obj_list.list[obj_index].position.x center_y = self.obj_list.list[obj_index].position.y center_z = self.obj_list.list[obj_index].position.z center = np.array([center_x, center_y, center_z, 1]) new_center = np.dot(transpose_matrix, center) self.obj_list.list[obj_index].position.x = new_center[0] self.obj_list.list[obj_index].position.y = new_center[1] self.obj_list.list[obj_index].position.z = new_center[2] self.obj_list.header.frame_id = "map" if self.first: self.map = self.obj_list self.first = False for i in range(self.map.size): self.map.list[i].occupy = False self.map.list[i].varianceX = self.init_varX self.map.list[i].varianceY = self.init_varY else: for i in range(self.map.size): self.map.list[i].occupy = False self.data_associate() self.update_map() for i in range(self.map.size): mean_x, mean_y = self.map.list[ i].position.x, self.map.list[i].position.y var_x, var_y = self.map.list[i].varianceX, self.map.list[ i].varianceY prob_x = scipy.stats.norm(mean_x, var_x).pdf(mean_x) prob_y = scipy.stats.norm(mean_y, var_y).pdf(mean_y) print prob_x, prob_y if prob_x > self.prob_threshold and prob_y > self.prob_threshold: self.map_confidence.list.append(self.map.list[i]) elif prob_x < self.remove_threshold and prob_y < self.remove_threshold: self.remove_list.append(i) for i in self.remove_list: del self.map.list[i] self.map.size = len(self.map.list) self.map.header.stamp = rospy.Time.now() print self.map.size self.map_confidence.size = len(self.map_confidence.list) self.map.header.stamp = rospy.Time.now() self.map_confidence.header.stamp = rospy.Time.now() self.pub_obj.publish(self.map) self.drawRviz(self.map_confidence) except (LookupException, ConnectivityException, ExtrapolationException): print "TF recieve error"
def call_back(self, msg): obj_list = ObjectPoseList() obj_list = msg cluster_num = obj_list.size #tf_points = PCL_points() #tf_points = msg #cluster_num = len(tf_points.list) #pcl_size = len(msg.poses) for i in range(cluster_num): self.no_camera_img = False self.get_roi_image(obj_list.list[i].img) self.image = np.zeros((int(self.height), int(self.width), 3), np.uint8) tf_points = PoseArray() tf_points = obj_list.list[i].pcl_points centroids = Point() centroids = obj_list.list[i].position plane_xy = [] plane_yz = [] plane_xz = [] pcl_size = len(tf_points.poses) # ======= Coordinate transform for better project performance ====== position = [0, 0, 0] rad = math.atan2(centroids.y, centroids.x) quaternion = tf.transformations.quaternion_from_euler(0., 0., -rad) transformer = tf.TransformerROS() transpose_matrix = transformer.fromTranslationRotation( position, quaternion) for m in range(pcl_size): new_x = tf_points.poses[m].position.x new_y = tf_points.poses[m].position.y new_z = tf_points.poses[m].position.z orig_point = np.array([new_x, new_y, new_z, 1]) new_center = np.dot(transpose_matrix, orig_point) tf_points.poses[m].position.x = new_center[0] tf_points.poses[m].position.y = new_center[1] tf_points.poses[m].position.z = new_center[2] # ======= project to XY, YZ, XZ plane ======= for j in range(pcl_size): plane_xy.append([ tf_points.poses[j].position.x, tf_points.poses[j].position.y ]) plane_yz.append([ tf_points.poses[j].position.y, tf_points.poses[j].position.z ]) plane_xz.append([ tf_points.poses[j].position.x, tf_points.poses[j].position.z ]) self.toIMG(pcl_size, plane_xy, 'xy') self.toIMG(pcl_size, plane_yz, 'yz') self.toIMG(pcl_size, plane_xz, 'xz') model_type = None if self.use_3_channels: model_type = self.classify_3() else: model_type = self.classify_6() # *************************************************************** # Add typpe to object list # *************************************************************** obj_list.list[i].type = model_type #cv2.imwrite( "Image" + str(self.index) + ".jpg", self.image) #self.index = self.index + 1 #print "Save image" self.pub_obj.publish(obj_list) self.drawRviz(obj_list)