Пример #1
0
    def on_people(self, ppl_msg):
        """
        ppl_msg (jsk_recognition_msgs/PeoplePoseArray)

        refer: https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/doc/output.md
        """
        frame_id = remove_slash(ppl_msg.header.frame_id)
        in_objects = self.objects
        objects = []
        for bbox in in_objects:
            if remove_slash(bbox.header.frame_id) != frame_id:
                ps = PoseStamped(header=bbox.header, pose=bbox.pose)
                try:
                    ps = self.tfl.transform(ps, frame_id)
                    bbox.header, bbox.pose = ps.header, ps.pose
                    objects.append(bbox)
                except tf2_ros.TransformException:
                    continue

        out_bboxes = []
        out_markers = []
        for i, pose in enumerate(ppl_msg.poses):
            # for each person
            if "rarm" in self.use_arm:
                rwrist = find_pose("RWrist", pose)
                rfinger = find_pose(["RHand5", "RHand6", "RHand7", "RHand8"],
                                    pose)
                if rwrist is not None and rfinger is not None:
                    rclosest, rdist = self.get_closest_bbox((rwrist, rfinger),
                                                            objects)
                    if rdist is not None and\
                       self.min_dist_threshold <= rdist <= self.max_dist_threshold:
                        out_bboxes.append(rclosest)
                    rmarker = self.get_marker((rwrist, rfinger), frame_id)
                    rmarker.id = 2 * i
                    out_markers.append(rmarker)

            if "larm" in self.use_arm:
                lwrist = find_pose("LWrist", pose)
                lfinger = find_pose(["LHand5", "LHand6", "LHand7", "LHand8"],
                                    pose)
                if lwrist is not None and lfinger is not None:
                    lclosest, ldist = self.get_closest_bbox((lwrist, lfinger),
                                                            objects)
                    if ldist is not None and\
                       self.min_dist_threshold <= ldist <= self.max_dist_threshold:
                        out_bboxes.append(lclosest)
                    lmarker = self.get_marker((lwrist, lfinger), frame_id)
                    lmarker.id = 2 * i + 1
                    out_markers.append(lmarker)

        # publish
        if out_bboxes:
            msg = BoundingBoxArray()
            msg.header.stamp = rospy.Time.now()
            msg.header.frame_id = frame_id
            msg.boxes = out_bboxes
            self.pub_bbox.publish(msg)
        if out_markers:
            self.pub_marker.publish(MarkerArray(markers=out_markers))
def callback(data):
    header = data.header
    frame = header.seq

    boxes = BoundingBoxArray()  #3D Boxes with JSK
    boxes.header = header

    rects = image_obj()  #Rects Autoware
    rects.header = header
    rects.type = "car"

    texts = PictogramArray()  #Labels with JSK
    texts.header = header

    if kitti_data.has_key(frame) == True:
        for b in kitti_data[frame]:
            b.header = header
            boxes.boxes.append(b)

    if auto_boxes.has_key(frame) == True:
        for rect in auto_boxes[frame]:
            rects.obj.append(rect)

    if pictogram_texts.has_key(frame) == True:
        for txt in pictogram_texts[frame]:
            txt.header = header
            texts.pictograms.append(txt)

    pub.publish(boxes)
    pub_boxes.publish(rects)
    pub_pictograms.publish(texts)
Пример #3
0
def callback(msg):
    global pub, frame_id, min_z, max_z, tf_listener
    boxes = msg.boxes
    # latest
    (pos, rot) = tf_listener.lookupTransform(msg.header.frame_id, frame_id, rospy.Time(0))
    origin_pose = concatenate_matrices(
        translation_matrix(pos), quaternion_matrix(rot))
    result = BoundingBoxArray()
    result.header = msg.header
    min_z_pos = 1000000.0
    min_box = None
    for box in msg.boxes:
        box_pos = [box.pose.position.x, box.pose.position.y, box.pose.position.z]
        # [x, y, z, w]
        box_rot = [box.pose.orientation.x, box.pose.orientation.y, box.pose.orientation.z,
                   box.pose.orientation.w]
        box_pose = concatenate_matrices(
            translation_matrix(box_pos), quaternion_matrix(box_rot))
        box_global_pose = concatenate_matrices(origin_pose, box_pose)
        box_global_pos = translation_from_matrix(box_global_pose)
        if box_global_pos[2] > min_z and box_global_pos[2] < max_z:
            result.boxes.append(box)
            if box_global_pos[2] < min_z_pos:
                min_z_pos = box_global_pos[2]
                min_box = box
    if len(result.boxes) > 1:
        result.boxes = [min_box]
    pub.publish(result)
Пример #4
0
def callback(box_array, coef_array):
    # We assume that the size of box_array and polygon_array are 1
    if len(box_array.boxes) > 0 and len(coef_array.coefficients) > 0:
        if (box_array.boxes[0].header.frame_id ==
                coef_array.coefficients[0].header.frame_id):
            box = box_array.boxes[0]
            box_pos = numpy.array([
                box.pose.position.x, box.pose.position.y, box.pose.position.z
            ])
            coef = coef_array.coefficients[0].values
            n = numpy.array([coef[0], coef[1], coef[2]])
            d = (coef[0] * box_pos[0] + coef[1] * box_pos[1] +
                 coef[2] * box_pos[2] +
                 coef[3] / math.sqrt(coef[0] * coef[0] + coef[1] * coef[1] +
                                     coef[2] * coef[2]))
            required_distance = distance - d
            rospy.loginfo("required_distance: %f" % (required_distance))
            new_box_pos = required_distance * n + box_pos
            box.pose.position.x = new_box_pos[0]
            box.pose.position.y = new_box_pos[1]
            box.pose.position.z = new_box_pos[2]
            result_box_array = BoundingBoxArray()
            result_box_array.header = box_array.header
            result_box_array.boxes = [box]
            pub_box_array.publish(result_box_array)
        else:
            rospy.logwarn("frame_id of box array and coef array are not same")
    else:
        rospy.logwarn("Size of box array and coef array are not enough")
    def to_msg(self, boxes, class_ids, header):
        box_arr = BoundingBoxArray()
        box_arr.header = header

        for i in range(len(boxes)):
            if str(self.classes[class_ids[i]]) == "person":
                x, y, w, h = boxes[i]
                box = BoundingBox()
                box.label = i
                box.value = 0

                box.pose.position.x = x
                box.pose.position.y = y
                box.pose.position.z = 0

                box.pose.orientation.x = 0
                box.pose.orientation.y = 0
                box.pose.orientation.x = 0
                box.pose.orientation.w = 0

                box.dimensions.x = w
                box.dimensions.y = h
                box.dimensions.z = 0

                box_arr.boxes.append(box)

        return box_arr
Пример #6
0
def callback(msg):
    new_msg = BoundingBoxArray()
    new_msg.header = msg.header
    trans = concatenate_matrices(translation_matrix(transform[0:3]),
                                 euler_matrix(*transform[3:6]))
    for box in msg.boxes:
        old_pose = concatenate_matrices(translation_matrix([
            box.pose.position.x,
            box.pose.position.y,
            box.pose.position.z]),
                                        quaternion_matrix([
                                            box.pose.orientation.x,
                                            box.pose.orientation.y,
                                            box.pose.orientation.z,
                                            box.pose.orientation.w]))
        new_pose = concatenate_matrices(old_pose, trans)
        translation = translation_from_matrix(new_pose)
        rotation = quaternion_from_matrix(new_pose)
        box.pose.position.x = translation[0]
        box.pose.position.y = translation[1]
        box.pose.position.z = translation[2]
        box.pose.orientation.x = rotation[0]
        box.pose.orientation.y = rotation[1]
        box.pose.orientation.z = rotation[2]
        box.pose.orientation.w = rotation[3]
        # box.dimensions.z, box.dimensions.y = box.dimensions.y, box.dimensions.z
        new_msg.boxes.append(box)
    p.publish(new_msg)
Пример #7
0
def callback(data):
    header = data.header
    frame = header.seq

    boxes = BoundingBoxArray()
    boxes.header = header

    texts = PictogramArray()
    texts.header = header

    if kitti_data.has_key(frame) == True:
        for b in kitti_data[frame]:
            b.header = header
            boxes.boxes.append(b)
                
            str = b
            rospy.loginfo(str)

    if pictogram_texts.has_key(frame) == True:
        for txt in pictogram_texts[frame]:
            txt.header = header
            texts.pictograms.append(txt)

            str = txt
            rospy.loginfo(str)

    pub_boxes.publish(boxes)
    pub_pictograms.publish(texts)
    def msg_builder(self, header):
        msg = BoundingBoxArray()
        msg.header = header

        for b in self.bboxes:
            msg.boxes.append(self.bboxes[b].to_msg())
        return msg
def callback(msg):
    if not candidate_pose:
        return
    target_array = BoundingBoxArray()
    target_array.header.stamp = msg.header.stamp
    target_array.header.frame_id = "world"
    target = BoundingBox()
    target.header.stamp = msg.header.stamp
    target.header.frame_id = "world"
    target.pose = candidate_pose
    target.dimensions.x = 0.2
    target.dimensions.y = 0.2
    target.dimensions.z = 0.2
    target_array.boxes = [target]
    pub_target.publish(target_array)
    candidate_array = BoundingBoxArray()
    candidate_array.header.stamp = msg.header.stamp
    candidate_array.header.frame_id = "world"
    for x in [-0.2, -0.1, 0.0, 0.1, 0.2]:
        for y in [-0.2, -0.1, 0.0, 0.1, 0.2]:
            for z in [-0.2, -0.1, 0.0, 0.1, 0.2]:
                candidate = BoundingBox()
                candidate.header.stamp = msg.header.stamp
                candidate.header.frame_id = "world"
                candidate.pose.position.z = 2 + z
                candidate.pose.position.x = x
                candidate.pose.position.y = y
                candidate.pose.orientation.w = 1.0
                candidate.dimensions.x = 0.1
                candidate.dimensions.y = 0.1
                candidate.dimensions.z = 0.1
                candidate_array.boxes.append(candidate)
    pub_candidate.publish(candidate_array)
Пример #10
0
    def __init__(self):
        mask_rcnn_label_lst = rospy.get_param('~fg_class_names')
        qatm_label_lst = rospy.get_param('~qatm_class_names')
        color_label_lst = ['red']
        self.label_lst = mask_rcnn_label_lst + qatm_label_lst + color_label_lst

        self.boxes = BoundingBoxArray()

        self.header = None
        self.red_boxes = BoundingBoxArray()
        self.labeled_boxes = BoundingBoxArray()
        self.mask_rcnn_boxes = BoundingBoxArray()
        self.qatm_boxes = BoundingBoxArray()

        self.listener = tf.TransformListener()
        self.broadcaster = tf.TransformBroadcaster()
        self.shelf_flont_angle = 181.28271996356708

        rospy.Subscriber(
            "~input_instance_boxes", BoundingBoxArray, self.instance_box_callback)
        rospy.Subscriber(
            "~input_cluster_boxes", BoundingBoxArray, self.cluster_box_callback)
        rospy.Subscriber(
            "~input_qatm_pos", LabeledPoseArray, self.labeled_pose_callback)
        rospy.Subscriber(
            "~input_red_boxes", BoundingBoxArray, self.red_box_callback)

        rospy.Service(
            '/display_task_vision_server', VisionServer, self.vision_server)
Пример #11
0
def callback(msg):
    if not candidate_pose:
        return
    target_array = BoundingBoxArray()
    target_array.header.stamp = msg.header.stamp
    target_array.header.frame_id = "world"
    target = BoundingBox()
    target.header.stamp = msg.header.stamp
    target.header.frame_id = "world"
    target.pose = candidate_pose
    target.dimensions.x = 0.2
    target.dimensions.y = 0.2
    target.dimensions.z = 0.2
    target_array.boxes = [target]
    pub_target.publish(target_array)
    candidate_array = BoundingBoxArray()
    candidate_array.header.stamp = msg.header.stamp
    candidate_array.header.frame_id = "world"
    for x in [-0.2, -0.1, 0.0, 0.1, 0.2]:
        for y in [-0.2, -0.1, 0.0, 0.1, 0.2]:
            for z in [-0.2, -0.1, 0.0, 0.1, 0.2]:
                candidate = BoundingBox()
                candidate.header.stamp = msg.header.stamp
                candidate.header.frame_id = "world"
                candidate.pose.position.z = 2 + z
                candidate.pose.position.x = x
                candidate.pose.position.y = y
                candidate.pose.orientation.w = 1.0
                candidate.dimensions.x = 0.1
                candidate.dimensions.y = 0.1
                candidate.dimensions.z = 0.1
                candidate_array.boxes.append(candidate)
    pub_candidate.publish(candidate_array)
Пример #12
0
def callback(msg):
    global pub, frame_id, min_z, max_z, tf_listener
    boxes = msg.boxes
    # latest
    (pos, rot) = tf_listener.lookupTransform(msg.header.frame_id, frame_id,
                                             rospy.Time(0))
    origin_pose = concatenate_matrices(translation_matrix(pos),
                                       quaternion_matrix(rot))
    result = BoundingBoxArray()
    result.header = msg.header
    min_z_pos = 1000000.0
    min_box = None
    for box in msg.boxes:
        box_pos = [
            box.pose.position.x, box.pose.position.y, box.pose.position.z
        ]
        # [x, y, z, w]
        box_rot = [
            box.pose.orientation.x, box.pose.orientation.y,
            box.pose.orientation.z, box.pose.orientation.w
        ]
        box_pose = concatenate_matrices(translation_matrix(box_pos),
                                        quaternion_matrix(box_rot))
        box_global_pose = concatenate_matrices(origin_pose, box_pose)
        box_global_pos = translation_from_matrix(box_global_pose)
        if box_global_pos[2] > min_z and box_global_pos[2] < max_z:
            result.boxes.append(box)
            if box_global_pos[2] < min_z_pos:
                min_z_pos = box_global_pos[2]
                min_box = box
    if len(result.boxes) > 1:
        result.boxes = [min_box]
    pub.publish(result)
Пример #13
0
    def __init__(self):
        self.lock = Lock()
        self.save_log = False

        self.label_lst = rospy.get_param('~fg_class_names')
        self.neatness_pub = rospy.Publisher('~neatness_mean',
                                            Neatness,
                                            queue_size=1)

        self.has_requested_boxes = False
        self.instance_msg = BoundingBoxArray()
        self.cluster_msg = BoundingBoxArray()

        self.output_dir_name = os.path.join(
            rospkg.RosPack().get_path('neatness_estimator'), 'output')
        self.output_dir = self.output_dir_name
        self.neatness_log = 'neatness_output.csv'
        self.items_group_neatness = 'items_group_neatness.csv'
        self.items_filling_neatness = 'items_filling_neatness.csv'
        self.items_pulling_neatness = 'items_pulling_neatness.csv'

        self.thresh = rospy.get_param('~thresh', 0.8)
        self.boxes = []
        self.bridge = CvBridge()

        rospy.Service('~get_display_feature', GetDisplayFeature,
                      self.service_callback)
        self.subscribe()
Пример #14
0
def callback(data):
	header = data.header
	frame = header.seq

	boxes = BoundingBoxArray() #3D Boxes with JSK
	boxes.header = header
	
	rects = ImageObj() #Rects Autoware
	rects.header = header
	rects.type = "car"
	
	texts = PictogramArray() #Labels with JSK
	texts.header = header

	if kitti_data.has_key(frame) == True:
		for b in kitti_data[frame]:
			b.header = header
			boxes.boxes.append(b)

	if auto_boxes.has_key(frame) == True:
		for rect in auto_boxes[frame]:
			rects.obj.append(rect)
	
	if pictogram_texts.has_key(frame) == True:
		for txt in pictogram_texts[frame]:
			txt.header = header
			texts.pictograms.append(txt)

	pub.publish(boxes)
	pub_boxes.publish(rects)
	pub_pictograms.publish(texts)
Пример #15
0
def rslidar_callback(msg):
    t_t = time.time()

    #calib = getCalibfromFile(calib_file)
    #calib = getCalibfromROS(calibmsg)

    frame = msg.header.seq

    arr_bbox = BoundingBoxArray()

    msg_cloud = ros_numpy.point_cloud2.pointcloud2_to_array(msg)
    np_p = get_xyz_points(msg_cloud, True)
    print("  ")
    #scores, dt_box_lidar, types = proc_1.run(np_p)
    scores, dt_box_lidar, types, pred_dict = proc_1.run(np_p, calib, frame)

    annos_sorted = sortbydistance(dt_box_lidar, scores, types)
    #pp_AB3DMOT_list  = anno_to_AB3DMOT(pred_dict, msg)
    pp_AB3DMOT_list = anno_to_AB3DMOT(dt_box_lidar, scores, types, msg)
    pp_list = anno_to_sort(dt_box_lidar, scores, types)
    pp_3D_list = anno_to_3Dsort(annos_sorted, types)
    MarkerArray_list = anno_to_rviz(dt_box_lidar, scores, types, msg)

    if scores.size != 0:
        for i in range(scores.size):
            if scores[i] > threshold:
                bbox = BoundingBox()
                bbox.header.frame_id = msg.header.frame_id
                bbox.header.stamp = rospy.Time.now()
                q = yaw2quaternion(float(dt_box_lidar[i][6]))
                bbox.pose.orientation.x = q[1]
                bbox.pose.orientation.y = q[2]
                bbox.pose.orientation.z = q[3]
                bbox.pose.orientation.w = q[0]
                bbox.pose.position.x = float(
                    dt_box_lidar[i][0]) - movelidarcenter
                bbox.pose.position.y = float(dt_box_lidar[i][1])
                bbox.pose.position.z = float(dt_box_lidar[i][2])
                bbox.dimensions.x = float(dt_box_lidar[i][3])
                bbox.dimensions.y = float(dt_box_lidar[i][4])
                bbox.dimensions.z = float(dt_box_lidar[i][5])
                bbox.value = scores[i]
                bbox.label = int(types[i])
                arr_bbox.boxes.append(bbox)

    print("total callback time: ", time.time() - t_t)
    arr_bbox.header.frame_id = msg.header.frame_id
    arr_bbox.header.stamp = msg.header.stamp
    if len(arr_bbox.boxes) is not 0:
        pub_arr_bbox.publish(arr_bbox)
        arr_bbox.boxes = []
    else:
        arr_bbox.boxes = []
        pub_arr_bbox.publish(arr_bbox)

    pubRviz.publish(MarkerArray_list)
    pubSort.publish(pp_list)
    pub3DSort.publish(pp_3D_list)
    pubAB3DMOT.publish(pp_AB3DMOT_list)
Пример #16
0
def rslidar_callback(msg):
    t_t = time.time()

    arr_bbox = BoundingBoxArray()

    #t = time.time()
    msg_cloud = ros_numpy.point_cloud2.pointcloud2_to_array(msg)
    np_p = get_xyz_points(msg_cloud, True)
    print("  ")
    #print("prepare cloud time: ", time.time() - t)

    #t = time.time()
    scores, dt_box_lidar, types = proc_1.run(np_p)
    #print("network forward time: ", time.time() - t)
    # filter_points_sum = []
    #t = time.time()
    if scores.size != 0:
        for i in range(scores.size):
            bbox = BoundingBox()
            bbox.header.frame_id = msg.header.frame_id
            bbox.header.stamp = rospy.Time.now()

            q = yaw2quaternion(float(dt_box_lidar[i][6]))
            bbox.pose.orientation.x = q[0]
            bbox.pose.orientation.y = q[1]
            bbox.pose.orientation.z = q[2]
            bbox.pose.orientation.w = q[3]

            bbox.pose.position.x = float(dt_box_lidar[i][0])
            bbox.pose.position.y = float(dt_box_lidar[i][1])
            bbox.pose.position.z = float(dt_box_lidar[i][2])
            bbox.dimensions.x = float(dt_box_lidar[i][3])
            bbox.dimensions.y = float(dt_box_lidar[i][4])
            bbox.dimensions.z = float(dt_box_lidar[i][5])
            bbox.value = scores[i]
            bbox.label = types[i]
            arr_bbox.boxes.append(bbox)

            # filter_points = np_p[point_indices[:,i]]
            # filter_points_sum.append(filter_points)
    #print("publish time cost: ", time.time() - t)
    # filter_points_sum = np.concatenate(filter_points_sum, axis=0)
    # filter_points_sum = filter_points_sum[:, :3]

    # print("output of concatenate:", filter_points_sum)
    # filter_points_sum = np.arange(24).reshape(8,3)
    # cluster_cloud = xyz_array_to_pointcloud2(filter_points_sum, stamp=rospy.Time.now(), frame_id=msg.header.frame_id)
    # pub_segments.publish(cluster_cloud)

    print("total callback time: ", time.time() - t_t)
    arr_bbox.header.frame_id = msg.header.frame_id
    arr_bbox.header.stamp = rospy.Time.now()
    if len(arr_bbox.boxes) is not 0:
        pub_arr_bbox.publish(arr_bbox)
        arr_bbox.boxes = []
    else:
        arr_bbox.boxes = []
        pub_arr_bbox.publish(arr_bbox)
Пример #17
0
    def msg_builder(self, header):
        msg = BoundingBoxArray()
        msg.header = header

        for t in self.trace:
            if t.active:
                msg.boxes.append(t.to_msg())

        return msg
    def callback(self, box_msg):
        labeled_boxes = {}
        label_buf = []
        orientation = Pose().orientation

        for box in box_msg.boxes:
            if box.label in label_buf:
                labeled_boxes[box.label] += [self.get_points(box)]
                orientation = box.pose.orientation
            else:
                labeled_boxes[box.label] = [self.get_points(box)]
                label_buf.append(box.label)

        bounding_box_msg = BoundingBoxArray()

        for label, boxes in zip(labeled_boxes.keys(), labeled_boxes.values()):
            thresh = self.thresh
            if self.label_lst[label] == 'shelf_flont':
                thresh = 2.0

            clustering = Clustering()
            boxes = np.array(boxes)
            result = clustering.clustering_wrapper(boxes, thresh)

            for cluster in result:
                max_candidates = [
                    boxes[i][0] + (boxes[i][1] * 0.5) for i in cluster.indices
                ]
                min_candidates = [
                    boxes[i][0] - (boxes[i][1] * 0.5) for i in cluster.indices
                ]
                candidates = np.array(max_candidates + min_candidates)

                dimension = candidates.max(axis=0) - candidates.min(axis=0)
                center = candidates.min(axis=0) + (dimension * 0.5)

                distances = self.get_distances(
                    np.array([boxes[i][0] for i in cluster.indices]), [
                        np.linalg.norm(boxes[i][1]) * 0.5
                        for i in cluster.indices
                    ])
                tmp_box = BoundingBox()
                tmp_box.header = box_msg.header
                tmp_box.dimensions.x = dimension[0]
                tmp_box.dimensions.y = dimension[1]
                tmp_box.dimensions.z = dimension[2]
                tmp_box.pose.position.x = center[0]
                tmp_box.pose.position.y = center[1]
                tmp_box.pose.position.z = center[2]
                tmp_box.pose.orientation = orientation
                tmp_box.label = label
                tmp_box.value = distances.mean()
                bounding_box_msg.boxes.append(tmp_box)

            bounding_box_msg.header = box_msg.header
        self.box_pub.publish(bounding_box_msg)
def callback(msg):
    box_array = BoundingBoxArray()
    box_array.header = msg.header
    for footstep in msg.footsteps:
        box = BoundingBox()
        box.header = msg.header
        box.pose = footstep.pose
        box.dimensions = footstep.dimensions
        box_array.boxes.append(box)
    pub.publish(box_array)
Пример #20
0
def callback(msg):
    box_array = BoundingBoxArray()
    box_array.header = msg.header
    for footstep in msg.footsteps:
        box = BoundingBox()
        box.header = msg.header
        box.pose = footstep.pose
        box.dimensions = footstep.dimensions
        box_array.boxes.append(box)
    pub.publish(box_array)
def convert(header, xx_cube):
    boundingboxarray_msg = BoundingBoxArray()
    boundingboxarray_msg.header = header

    num_xx = int(xx_cube.shape[0] / 8)
    for i in range(num_xx):
        cube_raw = xx_cube[range(8 * i, 8 * i + 8), :]
        bb_in_camera = np.column_stack((cube_raw, np.ones((8, 1))))
        bb_in_lidar = np.linalg.inv(calib.lidar_to_cam).dot(bb_in_camera.T).T
        boundingbox_msg = BoundingBox()
        boundingbox_msg.header = boundingboxarray_msg.header

        # boundingbox中心点位置
        boundingbox_msg.pose.position.x = bb_in_lidar[:, 0].mean()
        boundingbox_msg.pose.position.y = bb_in_lidar[:, 1].mean()
        boundingbox_msg.pose.position.z = bb_in_lidar[:, 2].mean()

        # 寻找y坐标最小的顶点,计算相邻两个顶点的旋转角及边长
        bb_bottom = bb_in_lidar[:4]
        min_idx = np.where(bb_bottom[:, 1] == bb_bottom[:, 1].min())[0][0]
        theta = math.atan2(
            bb_bottom[(min_idx + 1) % 4, 1] - bb_bottom[min_idx, 1],
            bb_bottom[(min_idx + 1) % 4, 0] - bb_bottom[min_idx, 0])
        b_1 = (
            (bb_bottom[(min_idx + 1) % 4, 1] - bb_bottom[min_idx, 1])**2 +
            (bb_bottom[(min_idx + 1) % 4, 0] - bb_bottom[min_idx, 0])**2)**0.5
        b_2 = (
            (bb_bottom[(min_idx + 3) % 4, 1] - bb_bottom[min_idx, 1])**2 +
            (bb_bottom[(min_idx + 3) % 4, 0] - bb_bottom[min_idx, 0])**2)**0.5
        if theta < 90 * math.pi / 180:
            rotation_angle = theta
            dimension_x = b_1
            dimension_y = b_2
        else:
            rotation_angle = theta - 90 * math.pi / 180
            dimension_x = b_2
            dimension_y = b_1

        # boundingbox旋转角四元数
        boundingbox_msg.pose.orientation.x = 0
        boundingbox_msg.pose.orientation.y = 0
        boundingbox_msg.pose.orientation.z = math.sin(0.5 * rotation_angle)
        boundingbox_msg.pose.orientation.w = math.cos(0.5 * rotation_angle)

        # boundingbox尺寸
        boundingbox_msg.dimensions.x = dimension_x
        boundingbox_msg.dimensions.y = dimension_y
        boundingbox_msg.dimensions.z = bb_in_lidar[:, 2].max(
        ) - bb_in_lidar[:, 2].min()

        boundingbox_msg.value = 0
        boundingbox_msg.label = 0
        boundingboxarray_msg.boxes.append(boundingbox_msg)

    return boundingboxarray_msg
def callback(msg):
    box_array = BoundingBoxArray()
    box_array.header = msg.header
    for footstep in msg.footsteps:
        box = BoundingBox()
        box.header = msg.header
        box.pose = footstep.pose
        box.dimensions = footstep.dimensions
        box.pose.position.z += (z_max + z_min) / 2.0
        box.dimensions.z = z_max - z_min
        box_array.boxes.append(box)
    pub.publish(box_array)
Пример #23
0
    def detector_callback(self, pcl_msg):
        start = time.time()
        # rospy.loginfo('Processing Pointcloud with PointRCNN')
        arr_bbox = BoundingBoxArray()
        seq = pcl_msg.header.seq
        stamp = pcl_msg.header.stamp
        # in message pointcloud has x pointing forward, y pointing to the left and z pointing upward
        pts_lidar = np.array([[
            p[0], p[1], p[2], p[3]
        ] for p in pc2.read_points(
            pcl_msg, skip_nans=True, field_names=("x", "y", "z", "intensity"))
                              ],
                             dtype=np.float32)
        scores, dt_box_lidar, types = self.run_model(pts_lidar)

        # TODO: question convert into torch tensors? torch.from_numpy(pts_lidar)
        # move onto gpu if available

        # TODO: check if needs translation/rotation to compensate for tilt etc.

        if scores.size != 0:
            for i in range(scores.size):
                bbox = BoundingBox()
                bbox.header.frame_id = pcl_msg.header.frame_id
                bbox.header.stamp = rospy.Time.now()
                # bbox.header.seq = pcl_msg.header.seq
                q = yaw2quaternion(float(dt_box_lidar[i][6]))
                bbox.pose.orientation.x = q[1]
                bbox.pose.orientation.y = q[2]
                bbox.pose.orientation.z = q[3]
                bbox.pose.orientation.w = q[0]
                bbox.pose.position.x = float(dt_box_lidar[i][0])
                bbox.pose.position.y = float(dt_box_lidar[i][1])
                bbox.pose.position.z = float(dt_box_lidar[i][2])
                bbox.dimensions.x = float(dt_box_lidar[i][3])
                bbox.dimensions.y = float(dt_box_lidar[i][4])
                bbox.dimensions.z = float(dt_box_lidar[i][5])
                bbox.value = scores[i]
                bbox.label = int(types[i])
                arr_bbox.boxes.append(bbox)

        # rospy.loginfo("3D detector time: {}".format(time.time() - start))

        arr_bbox.header.frame_id = pcl_msg.header.frame_id
        arr_bbox.header.stamp = pcl_msg.header.stamp
        arr_bbox.header.seq = pcl_msg.header.seq

        if len(arr_bbox.boxes) != 0:
            self.pub_arr_bbox.publish(arr_bbox)
            arr_bbox.boxes = []
        else:
            arr_bbox.boxes = []
            self.pub_arr_bbox.publish(arr_bbox)
    def __init__(self):
        self.boxes = BoundingBoxArray()
        self.sorted_boxes = BoundingBoxArray()
        self.border_indexes = None

        self.change = rospy.get_param('change', 7)
        self.insert = rospy.get_param('insert', 14)
        self.delete = rospy.get_param('delete', 14)

        rospy.Subscriber("~input_boxes", BoundingBoxArray, self.callback)
        rospy.Service('/display_planner_server', DisplayState,
                      self.server_callback)
def callback(msg):
    box_array = BoundingBoxArray()
    box_array.header = msg.header
    for footstep in msg.footsteps:
        box = BoundingBox()
        box.header = msg.header
        box.pose = footstep.pose
        box.dimensions = footstep.dimensions
        box.pose.position.z += (z_max + z_min) / 2.0
        box.dimensions.z = z_max - z_min
        box_array.boxes.append(box)
    pub.publish(box_array)
Пример #26
0
    def on_people(self, ppl_msg):
        """
        ppl_msg (jsk_recognition_msgs/PeoplePoseArray)

        refer: https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/doc/output.md
        """
        frame_id = remove_slash(ppl_msg.header.frame_id)
        in_objects = self.objects
        objects = []
        for bbox in in_objects:
            if remove_slash(bbox.header.frame_id) != frame_id:
                ps = PoseStamped(header=bbox.header, pose=bbox.pose)
                try:
                    ps = self.tfl.transform(ps, frame_id)
                    bbox.header, bbox.pose = ps.header, ps.pose
                    objects.append(bbox)
                except tf2_ros.TransformException:
                    continue

        out_bboxes = []
        out_markers = []
        for i, pose in enumerate(ppl_msg.poses):
            # for each person
            rwrist = find_pose("RWrist", pose)
            rfinger = find_pose(["RHand5", "RHand6", "RHand7", "RHand8"], pose)
            if rwrist is not None and rfinger is not None:
                rclosest, rdist = self.get_closest_bbox((rwrist, rfinger), objects)
                if rdist is not None and rdist < self.dist_threshold:
                    out_bboxes.append(rclosest)
                rmarker = self.get_marker((rwrist, rfinger), frame_id)
                rmarker.id = 2 * i
                out_markers.append(rmarker)

            lwrist = find_pose("LWrist", pose)
            lfinger = find_pose(["LHand5", "LHand6", "LHand7", "LHand8"], pose)
            if lwrist is not None and lfinger is not None:
                lclosest, ldist = self.get_closest_bbox((lwrist, lfinger), objects)
                if ldist is not None and ldist < self.dist_threshold:
                    out_bboxes.append(lclosest)
                lmarker = self.get_marker((lwrist, lfinger), frame_id)
                lmarker.id = 2 * i + 1
                out_markers.append(lmarker)

        # publish
        if out_bboxes:
            msg = BoundingBoxArray()
            msg.header.stamp = rospy.Time.now()
            msg.header.frame_id = frame_id
            msg.boxes = out_bboxes
            self.pub_bbox.publish(msg)
        if out_markers:
            self.pub_marker.publish(MarkerArray(markers=out_markers))
    def callback(self, instance_boxes_msg, instance_label_msg):
        labeled_instance_boxes = BoundingBoxArray()
        labeled_instance_boxes.header = instance_boxes_msg.header

        for box, label in zip(instance_boxes_msg.boxes, instance_label_msg.labels):
            tmp_box = BoundingBox()
            tmp_box.header = box.header
            tmp_box.pose = box.pose
            tmp_box.dimensions = box.dimensions
            tmp_box.label = label.id
            labeled_instance_boxes.boxes.append(tmp_box)

        self.labeled_instance_boxes_pub.publish(labeled_instance_boxes)
Пример #28
0
    def create_bb_array_msg(self, bb_msg_list):
        """Create a BoundingBoxArray msg from a bounding box list dict (output from
        self.create_bb_msg_list).

        Args:
            bounding_box_list: A list of bounding box dicts as defined in read_bounding_boxes().

        Returns:
            oh_bb: An instance of the BoundingBoxArray msg.
        """
        oh_bb = BoundingBoxArray()
        oh_bb.header.frame_id = 'map'
        oh_bb.header.stamp = rospy.Time.now()
        oh_bb.boxes = bb_msg_list
        return oh_bb
Пример #29
0
def callback(data):
    header = data.header
    frame = header.seq

    boxes = BoundingBoxArray()
    boxes.header = header

    texts = PictogramArray()
    texts.header = header

    arrows = MarkerArray()

    paths = MarkerArray()

    if bounding_boxes.has_key(frame) == True:
        for bbox in bounding_boxes[frame]:
            bbox.header = header
            boxes.boxes.append(bbox)

        maxbox = len(boxes.boxes)
        # print("maxbox: %s"%maxbox)

    if pictogram_texts.has_key(frame) == True:
        for txt in pictogram_texts[frame]:
            txt.header = header
            texts.pictograms.append(txt)

            # str = txt
            # rospy.loginfo(str)

    if marker_arrows.has_key(frame) == True:
        for arrow in marker_arrows[frame]:
            arrow.header = header
            arrows.markers.append(arrow)

        maxarrow = len(arrows.markers)

        # print("maxarrow: %s"%(maxarrow))
    if marker_paths.has_key(frame) == True:
        for predict in marker_paths[frame]:
            predict.header = header
            paths.markers.append(predict)
            # print(predict)

    pub_boxes.publish(boxes)
    pub_pictograms.publish(texts)
    pub_arrows.publish(arrows)
    pub_paths.publish(paths)
Пример #30
0
    def init_boundingboxarray(self, num_boxes=30):
        self.boundingBoxArray_object = BoundingBoxArray()

        h = std_msgs.msg.Header()
        h.stamp = rospy.Time.now(
        )  # Note you need to call rospy.init_node() before this will work
        h.frame_id = "world"

        self.boundingBoxArray_object.header = h

        self.minimum_dimension = 0.2
        self.init_x_position = 1.0

        for i in range(num_boxes):
            new_box = BoundingBox()
            new_box.header = h

            new_box.pose = Pose()
            new_box.pose.position.x = self.init_x_position + i * self.minimum_dimension

            new_box.dimensions = Vector3()
            new_box.dimensions.x = self.minimum_dimension
            new_box.dimensions.y = self.minimum_dimension
            new_box.dimensions.z = self.minimum_dimension

            new_box.label = i
            new_box.value = i * self.minimum_dimension

            self.boundingBoxArray_object.boxes.append(new_box)

        self.publish_once(self.boundingBoxArray_object)
    def setUp(self):
        self.start_time = time.time()
        self.received_cluster_cloud = False
        self.received_normals = False
        self.received_objects = False
        self.received_synced_topics = False

        self.sub_cloud = rospy.Subscriber('/merged_cluster_cloud', PointCloud2,
                                          self.recv_cluster_cloud)
        self.sub_normals = rospy.Subscriber('/normals', PoseArray,
                                            self.recv_normals)
        self.sub_objects = rospy.Subscriber('/objects', BoundingBoxArray,
                                            self.recv_objects)
        self.sub_clock = rospy.Subscriber('/clock', Clock, self.recv_clock)
        self.using_sim_time = rospy.get_param('/use_sim_time', default=False)

        self.sync_sub_cloud = message_filters.Subscriber(
            '/merged_cluster_cloud', PointCloud2)
        self.sync_sub_normals = message_filters.Subscriber(
            '/normals', PoseArray)
        self.sync_sub_objects = message_filters.Subscriber(
            '/objects', BoundingBoxArray)
        self.time_syncer = message_filters.TimeSynchronizer([
            self.sync_sub_cloud, self.sync_sub_normals, self.sync_sub_objects
        ], 10)
        self.time_syncer.registerCallback(self.recv_synced_topics)
        self.cluster_cloud = PointCloud2()
        self.normals = PoseArray()
        self.objects = BoundingBoxArray()
Пример #32
0
    def get_multi_obj_pos(self, req):
        print('get_multi_obj_pos')

        rospy.loginfo(req.task)
        res = VisionServerResponse()
        res.status = False
        has_items = False

        try:
            multi_boxes, has_items = self.get_multi_boxes(req)
            rospy.loginfo('has_items = true')

            if has_items:
                if req.parent_frame == '':
                    rospy.loginfo('req.parent_frame is: empty')
                else:
                    rospy.loginfo('req.parent_frame is: %s' %(self.boxes.header.frame_id))

                # faile lookup transform
                if multi_boxes != BoundingBoxArray():
                    multi_boxes.header = self.boxes.header
                    multi_boxes.header.frame_id = req.parent_frame
                    res.multi_boxes = multi_boxes.boxes
                    res.status = True
                else:
                    res.status = False
                print(res.boxes)

        except:
            res.status = False
            import traceback
            traceback.print_exc()

        return res
Пример #33
0
 def pose_msg_callback(self, pose, classes):
     bboxes = BoundingBoxArray(header=pose.header)
     for p in pose.poses:
         b = BoundingBox(header=pose.header)
         b.pose = p
         bboxes.boxes.append(b)
     self.box_msg_callback(bboxes, classes)
Пример #34
0
def parseBoxes(data):
	global lidar_array
	global filter_box = BoundingBoxArray()
	data_ped = BoundingBoxArray()
	data_car = BoundingBoxArray()
	data_follow = BoundingBoxArray()
	counterp = 0
	counterc = 0
	data_ped.header = data.header
	data_car.header = data.header
	data_follow.header = data.header
	lidar_array = Multi_targets()

	for i in range(len(data.boxes)):
		lidar_target = target()
		x = data.boxes[i].pose.position.x
		y = data.boxes[i].pose.position.y
		height = data.boxes[i].dimensions.z
		length = data.boxes[i].dimensions.x
		width = data.boxes[i].dimensions.y
		lidar_target.pos_x = x
		lidar_target.pos_y = y

		if (y<50 and y>0) and (x<2.5 and x>-2.5):
			data_follow.boxes.append(data.boxes[i])
		if (y<20 and y>-10) and (x<8 and x>-8):
			height = data.boxes[i].dimensions.z
			length = data.boxes[i].dimensions.x
			width = data.boxes[i].dimensions.y
			# Pedstrian
			if (height < 1.8 and height > 0.5) and (length < 1.5 and length > 0) and (width < 1.5 and width > 0):
				data_ped.boxes.append(data.boxes[i])
				# lidar_target.category = 0
				# lidar_target.counter = .1
			# Vehicle
			elif (height < 5 and height > 0.5) and (length < 10 and length > 0) and (width < 10 and width > 0):
				data_car.boxes.append(data.boxes[i])
				# lidar_target.category = 1
				# lidar_target.counter = .1
			lidar_array.data.append(lidar_target)
			filter_box.boxes.append(data.boxes[i])

    # pub_ly_fuse.publish(ly_array)
	pub_car.publish(data_car)
	pub_follow.publish(data_follow)
Пример #35
0
def rslidar_callback(msg):
    # t_t = time.time()
    arr_bbox = BoundingBoxArray()
    msg_cloud = ros_numpy.point_cloud2.pointcloud2_to_array(msg)
    np_p = get_xyz_points(msg_cloud, True)
 
    print("  ")
    seq = msg.header.seq
    stamp = msg.header.stamp
    input_points = {
        'stamp': stamp,
        'seq': seq,
        'points': np_p
    }
    if(proc_1.get_lidar_data(input_points)):
        scores, dt_box_lidar, types = proc_1.run()

        if scores.size != 0:
            for i in range(scores.size):
                bbox = BoundingBox()
                bbox.header.frame_id = msg.header.frame_id
                bbox.header.stamp = rospy.Time.now()
                q = yaw2quaternion(float(dt_box_lidar[i][8]))
                bbox.pose.orientation.x = q[1]
                bbox.pose.orientation.y = q[2]
                bbox.pose.orientation.z = q[3]
                bbox.pose.orientation.w = q[0]
                bbox.pose.position.x = float(dt_box_lidar[i][0])
                bbox.pose.position.y = float(dt_box_lidar[i][1])
                bbox.pose.position.z = float(dt_box_lidar[i][2])
                bbox.dimensions.x = float(dt_box_lidar[i][4])
                bbox.dimensions.y = float(dt_box_lidar[i][3])
                bbox.dimensions.z = float(dt_box_lidar[i][5])
                bbox.value = scores[i]
                bbox.label = int(types[i])
                arr_bbox.boxes.append(bbox)
        # print("total callback time: ", time.time() - t_t)
        arr_bbox.header.frame_id = msg.header.frame_id
        arr_bbox.header.stamp = msg.header.stamp
        if len(arr_bbox.boxes) is not 0:
            pub_arr_bbox.publish(arr_bbox)
            arr_bbox.boxes = []
        else:
            arr_bbox.boxes = []
            pub_arr_bbox.publish(arr_bbox)
Пример #36
0
    def lidar_callback(self, msg):
        if self.model.inference_ctx is None or self.model.inference_ctx.anchor_cache is None:
            return

        for field in msg.fields:
            if field.name == "i" or field.name == "intensity":
                intensity_fname = field.name
                intensity_dtype = field.datatype
            else:
                intensity_fname = None
                intensity_dtype = None

        dtype_list = self._fields_to_dtype(msg.fields, msg.point_step)
        pc_arr = np.frombuffer(msg.data, dtype_list)

        if intensity_fname:
            pc_arr = structured_to_unstructured(
                pc_arr[["x", "y", "z", intensity_fname]]).copy()
            if intensity_dtype == 2:
                pc_arr[:, 3] = pc_arr[:, 3] / 255
        else:
            pc_arr = structured_to_unstructured(pc_arr[["x", "y", "z"]]).copy()
            pc_arr = np.hstack((pc_arr, np.zeros((pc_arr.shape[0], 1))))

        lidar_boxes = self.model.predcit(pc_arr)

        num_detects = len(lidar_boxes)
        arr_bbox = BoundingBoxArray()
        for i in range(num_detects):
            bbox = BoundingBox()

            bbox.header.frame_id = msg.header.frame_id
            bbox.header.stamp = rospy.Time.now()

            bbox.pose.position.x = float(lidar_boxes[i][0])
            bbox.pose.position.y = float(lidar_boxes[i][1])
            bbox.pose.position.z = float(
                lidar_boxes[i][2]) + float(lidar_boxes[i][5]) / 2
            bbox.dimensions.x = float(lidar_boxes[i][3])  # width
            bbox.dimensions.y = float(lidar_boxes[i][4])  # length
            bbox.dimensions.z = float(lidar_boxes[i][5])  # height

            q = Quaternion(axis=(0, 0, 1), radians=float(lidar_boxes[i][6]))
            bbox.pose.orientation.x = q.x
            bbox.pose.orientation.y = q.y
            bbox.pose.orientation.z = q.z
            bbox.pose.orientation.w = q.w

            arr_bbox.boxes.append(bbox)

        arr_bbox.header.frame_id = msg.header.frame_id
        arr_bbox.header.stamp = rospy.Time.now()
        print("Number of detections: {}".format(num_detects))

        self.pub_bbox.publish(arr_bbox)
Пример #37
0
def bounding_box_array_callback(msg):
    rospy.loginfo("CHECKING FOR VALIDITY OF BOXES")
    valid_boxes = BoundingBoxArray()
    for m in msg.boxes:
        box_dim = Vector3()
        box_dim = m.dimensions
        if (box_dim.x > min_threshold_x_) and \
           (box_dim.y > min_threshold_y_) and \
           (box_dim.z > min_threshold_z_):
            valid_boxes.boxes.append(m)
    print "LENGHT: ", len(valid_boxes.boxes)
            
    if (len(valid_boxes.boxes) == 0):
        int_stamp = Int32Stamped()
        int_stamp.header = msg.header
        int_stamp.data = -1
        pub_signal_.publish(int_stamp)
    else:
        valid_boxes.header = msg.header
        pub_boxes_.publish(valid_boxes)
Пример #38
0
def callback(box_array, cluster_indices):
    if len(box_array.boxes) > 0:
        min_index = 0
        min_distance = 100000
        for box, i in zip(box_array.boxes, range(len(box_array.boxes))):
            distance = math.sqrt(box.pose.position.x * box.pose.position.x +
                                 box.pose.position.y * box.pose.position.y +
                                 box.pose.position.z * box.pose.position.z)
            if min_distance > distance:
                min_distance = distance
                min_index = i
        result_box_array = BoundingBoxArray()
        result_box_array.header = box_array.header
        result_box_array.boxes.append(box_array.boxes[min_index])
        pub_box_array.publish(result_box_array)
        pub_indices.publish(cluster_indices.cluster_indices[min_index])
        result_cluster_indices = ClusterPointIndices()
        result_cluster_indices.header = cluster_indices.header
        result_cluster_indices.cluster_indices = [cluster_indices.cluster_indices[min_index]]
        pub_cluster_indices.publish(result_cluster_indices)
    else:
        rospy.logwarn("No bounding box input")
Пример #39
0
def callback(box_array, coef_array):
    # We assume that the size of box_array and polygon_array are 1
    if len(box_array.boxes) > 0 and len(coef_array.coefficients) > 0:
        if (box_array.boxes[0].header.frame_id == coef_array.coefficients[0].header.frame_id):
            box = box_array.boxes[0]
            box_pos = numpy.array([box.pose.position.x, box.pose.position.y, box.pose.position.z])
            coef = coef_array.coefficients[0].values
            n = numpy.array([coef[0], coef[1], coef[2]])
            d = (coef[0] * box_pos[0] + coef[1] * box_pos[1] + coef[2] * box_pos[2] + coef[3] /
                 math.sqrt(coef[0] * coef[0] + coef[1] * coef[1] + coef[2] * coef[2]))
            required_distance = distance - d
            rospy.loginfo("required_distance: %f" % (required_distance))
            new_box_pos = required_distance * n + box_pos
            box.pose.position.x = new_box_pos[0]
            box.pose.position.y = new_box_pos[1]
            box.pose.position.z = new_box_pos[2]
            result_box_array = BoundingBoxArray()
            result_box_array.header = box_array.header
            result_box_array.boxes = [box]
            pub_box_array.publish(result_box_array)
        else:
            rospy.logwarn("frame_id of box array and coef array are not same")
    else:
        rospy.logwarn("Size of box array and coef array are not enough")
def callback(boxes_msg):
    # we expect odom
    (trans, rot) = tf_listener.lookupTransform(frame_id, boxes_msg.header.frame_id, rospy.Time(0))
    origin_matrix = concatenate_matrices(translation_matrix(trans),
                                         quaternion_matrix(rot))
    min_y_pose = None
    min_y_dist = 100.0
    min_y_box = None
    for box in boxes_msg.boxes:
        pose = box.pose
        # pose to matrix
        pose_matrix = concatenate_matrices(translation_matrix((pose.position.x, pose.position.y, pose.position.z)),
                                           quaternion_matrix(((pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w))))
        box_pose = concatenate_matrices(origin_matrix, pose_matrix)
        box_pos = translation_from_matrix(box_pose)
        if (box.dimensions.x < min_x or box.dimensions.x > max_x
            or box.dimensions.y < min_y or box.dimensions.y > max_y
            or box.dimensions.z < min_z or box.dimensions.z > max_z):
            continue
        if 0.8 < box_pos[2] and box_pos[2] < 1.0:
            if min_y_dist > abs(box_pos[1]):
                min_y_dist = abs(box_pos[1])
                min_y_pose = pose
                min_y_box = box
    if min_y_pose:
        pose_msg = PoseStamped()
        pose_msg.header = boxes_msg.header
        pose_msg.pose = min_y_pose
        pub.publish(pose_msg)
        box_arr = BoundingBoxArray()
        box_arr.header = boxes_msg.header
        box_arr.boxes = [min_y_box]
        pub_box.publish(box_arr)
        rospy.loginfo("Success to detect handle bbx")
        return
    rospy.logerr("Failed to detect handle bbox")
Пример #41
0
 def _decompose(self, *bboxes_msgs):
     out_msg = BoundingBoxArray()
     out_msg.header = bboxes_msgs[0].header
     for bboxes_msg in bboxes_msgs:
         out_msg.boxes.extend(bboxes_msg.boxes)
     self._pub.publish(out_msg)
Пример #42
0
#!/usr/bin/env python

import rospy
import math
from geometry_msgs.msg import PoseStamped
from jsk_recognition_msgs.msg import BoundingBox, BoundingBoxArray
rospy.init_node("attention_pose_set")
pub = rospy.Publisher("/attention_clipper/input/box_array", BoundingBoxArray)
r = rospy.Rate(10)
theta = 0
while not rospy.is_shutdown():
    boxes = BoundingBoxArray()
    theta = math.fmod(theta + 0.1, math.pi * 2)
    box = BoundingBox()
    box.header.stamp = rospy.Time.now()
    box.header.frame_id = "camera_link"
    box.pose.orientation.w = 1
    box.pose.position.x = 0.8 * math.cos(theta)
    box.pose.position.y = 0.8 * math.sin(theta)
    box.pose.position.z = 0.1
    box.dimensions.x = 0.1
    box.dimensions.y = 0.1
    box.dimensions.z = 0.1
    box2 = BoundingBox()
    box2.header.stamp = rospy.Time.now()
    box2.header.frame_id = "camera_link"
    box2.pose.orientation.w = 1
    box2.pose.position.x = 0.8 * math.cos(theta)
    box2.pose.position.y = 0.8 * math.sin(theta)
    box2.pose.position.z = -0.1
    box2.dimensions.x = 0.1