def main(): req = FindScenesRequest() objects = [] obj = AsrObject() obj.type = 'CupPdV' obj.poseEstimation.pose.position.x = -0.895672837120925 obj.poseEstimation.pose.position.y = 0.876364548041306 obj.poseEstimation.pose.position.z = 0.675630282005366 obj.poseEstimation.pose.orientation.w = 0.0438159930101303 obj.poseEstimation.pose.orientation.x = -0.00274503713451766 obj.poseEstimation.pose.orientation.y = -0.673982743489664 obj.poseEstimation.pose.orientation.z = 0.737441445137044 obj1 = AsrObject() obj1.type = 'PlateDeep' obj1.poseEstimation.pose.position.x = -0.884891287861248 obj1.poseEstimation.pose.position.y = 0.639571172744163 obj1.poseEstimation.pose.position.z = 0.660505174542679 obj1.poseEstimation.pose.orientation.w = 0.650782548685679 obj1.poseEstimation.pose.orientation.x = -0.64554621902581 obj1.poseEstimation.pose.orientation.y = -0.27131992194386 obj1.poseEstimation.pose.orientation.z = 0.293492169203933 objects.append(obj) objects.append(obj1) try: rospy.wait_for_service('/rp_ism_node/find_scenes', timeout=5) except rospy.exceptions.ROSException, e: rospy.loginfo('Could not reach service')
def test_world_model_and_recognizer_prediction(self): world_model_reset = rospy.ServiceProxy( '/env/asr_world_model/empty_found_object_list', Empty) rp_ism_node_reset = rospy.ServiceProxy('/rp_ism_node/reset', Empty) world_model_reset() rp_ism_node_reset() world_model_reset = rospy.ServiceProxy( '/env/asr_world_model/empty_found_object_list', Empty) world_model_reset() req = PushFoundObjectRequest() detected_pbd_object = AsrObject() detected_pbd_object.type = 'CoffeeBox' detected_pbd_object.header.frame_id = '/map' detected_pbd_object.identifier = '' detected_pbd_object.providedBy = 'textured' detected_pbd_object.colorName = 'textured' detected_pbd_object.meshResourcePath = 'package://asr_object_database/rsc/databases/textured_objects/CoffeeBox/CoffeeBox.dae' detected_pose = Pose() detected_pose.position.x = 1 detected_pose.position.y = 2 detected_pose.position.z = 3 detected_pose.orientation.w = 0.695641823146944 detected_pose.orientation.x = -0.702895791692416 detected_pose.orientation.y = -0.102411625776331 detected_pose.orientation.z = 0.107386306462868 detected_pose_with_covariance = PoseWithCovariance() detected_pose_with_covariance.pose = detected_pose detected_pbd_object.sampledPoses.append(detected_pose_with_covariance) push_found_object = rospy.ServiceProxy( '/env/asr_world_model/push_found_object', PushFoundObject) push_found_object(detected_pbd_object) get_found_objects = rospy.ServiceProxy( '/env/asr_world_model/get_found_object_list', GetFoundObjectList) resp = get_found_objects() self.assertEqual(len(resp.object_list), 1) for found_object in resp.object_list: self.assertEqual(len(found_object.sampledPoses), 1) find_scenes = rospy.ServiceProxy('/rp_ism_node/find_scenes', FindScenes) find_scenes_req = FindScenesRequest() for pbd_object in resp.object_list: find_scenes_req.objects.append(pbd_object) find_scenes_resp = find_scenes(find_scenes_req) self.assertGreater(find_scenes_resp.result_size, 0) rp_ism_node_predict = rospy.ServiceProxy( '/rp_ism_node/get_point_cloud', GetPointCloud) pose_prediction_resp = rp_ism_node_predict() self.assertGreater(len(pose_prediction_resp.point_cloud.elements), 500)
def detection_callback(data): global detected_objects # found object for the first time rospy.loginfo("Found " + data.type) if ( len(data.sampledPoses) == 0 ): rospy.logerr("Got a AsrObject without poses.") sys.exit(1) # Construct FoundObject to push to world_model # objects are transformed to map frame in world_model object = AsrObject() object = data detected_objects.append(object)
def get_smacks(): obj = AsrObject() obj.type = 'Smacks' obj.header.frame_id = '/camera_left_frame' obj.identifier = '0' obj.providedBy = 'textured' obj.colorName = 'textured' obj.meshResourcePath = 'package://asr_object_database/rsc/databases/textured_objects/Smacks/Smacks.dae' pose_cov = PoseWithCovariance() pose_cov.pose.position.x = random.gauss(0, 0.5) pose_cov.pose.position.y = random.gauss(0, 0.5) pose_cov.pose.position.z = 1.12 q = tf.transformations.quaternion_from_euler(90, 0, 0, 'rzyx') pose_cov.pose.orientation = Quaternion(*q) obj.sampledPoses.append(pose_cov) return obj
def rosbagPdbObjectMessage(bag, posX, posY, posZ, oriW, oriX, oriY, oriZ, objType): # build object ... msg = AsrObject() msg.header.stamp = rospy.Time.now() msg.header.frame_id = '/PTU' msg.providedBy = 'textured' msg.poseEstimation.pose.position.x = posX msg.poseEstimation.pose.position.y = posY msg.poseEstimation.pose.position.z = posZ msg.poseEstimation.pose.orientation.w = oriW msg.poseEstimation.pose.orientation.x = oriX msg.poseEstimation.pose.orientation.y = oriY msg.poseEstimation.pose.orientation.z = oriZ msg.type = objType # ... and write it into bag file bag.write('/stereo/objects', msg)
def publishPdbObjectMessage(pub, posX, posY, posZ, oriW, oriX, oriY, oriZ, objType): # build object ... msg = AsrObject() msg.header.stamp = rospy.Time.now() msg.header.frame_id = '/PTU' msg.providedBy = 'textured' msg.poseEstimation.pose.position.x = posX msg.poseEstimation.pose.position.y = posY msg.poseEstimation.pose.position.z = posZ msg.poseEstimation.pose.orientation.w = oriW msg.poseEstimation.pose.orientation.x = oriX msg.poseEstimation.pose.orientation.y = oriY msg.poseEstimation.pose.orientation.z = oriZ msg.type = objType # ... and publish it pub.publish(msg)
def test_world_model_with_1_object(self): world_model_reset = rospy.ServiceProxy( '/env/asr_world_model/empty_found_object_list', Empty) world_model_reset() req = PushFoundObjectRequest() detected_pbd_object = AsrObject() detected_pbd_object.type = 'CoffeeBox' detected_pbd_object.header.frame_id = 'camera_left_frame' detected_pbd_object.identifier = '' detected_pbd_object.providedBy = 'textured' detected_pbd_object.colorName = 'textured' detected_pbd_object.meshResourcePath = 'package://asr_object_database/rsc/databases/textured_objects/CoffeeBox/CoffeeBox.dae' detected_pose = Pose() detected_pose.position.x = 1 detected_pose.position.y = 2 detected_pose.position.z = 3 detected_pose.orientation.w = 0.695641823146944 detected_pose.orientation.x = -0.702895791692416 detected_pose.orientation.y = -0.102411625776331 detected_pose.orientation.z = 0.107386306462868 detected_pose_with_c = PoseWithCovariance() detected_pose_with_c.pose = detected_pose detected_pbd_object.sampledPoses.append(detected_pose_with_c) push_found_object = rospy.ServiceProxy( '/env/asr_world_model/push_found_object', PushFoundObject) push_found_object(detected_pbd_object) get_found_objects = rospy.ServiceProxy( '/env/asr_world_model/get_found_object_list', GetFoundObjectList) resp = get_found_objects() self.assertEqual(len(resp.object_list), 1) for found_object in resp.object_list: self.assertEqual(len(found_object.sampledPoses), 1)
def main(): req = PushFoundObjectRequest() obj = AsrObject() obj.header.frame_id = '/camera_left_frame' obj.type = 'Smacks' obj.providedBy = 'textured' obj.colorName = 'textured' obj.meshResourcePath = 'package://asr_object_database/rsc/databases/textured_objects/Smacks/Smacks.dae' pose = PoseWithCovariance() pose.pose.position.x = -1.20467103562224 pose.pose.position.y = 0.589709102059754 pose.pose.position.z = 1.02933642432456 pose.pose.orientation.w = 0.573287415807964 pose.pose.orientation.x = -0.58793291283527 pose.pose.orientation.y = -0.394117786539212 pose.pose.orientation.z = 0.412731873272096 obj.sampledPoses.append(pose) try: rospy.wait_for_service('/env/asr_world_model/push_found_object', timeout=5) except rospy.exceptions.ROSException, e: rospy.loginfo('Could not reach service')
def main(): req = FindScenesRequest() objects = [] obj = AsrObject() obj.type = 'Knaeckebrot' obj.header.frame_id = '/camera_left_frame' obj.identifier = '' obj.providedBy = 'textured' obj.colorName = 'textured' obj.meshResourcePath = 'package://asr_object_database/rsc/databases/textured_objects/Knaeckebrot/Knaeckebrot.dae' obj.poseEstimation.pose.position.x = 0.15 obj.poseEstimation.pose.position.y = 0.15 obj.poseEstimation.pose.position.z = 0 obj.poseEstimation.pose.orientation.w = 1 obj.poseEstimation.pose.orientation.x = 0 obj.poseEstimation.pose.orientation.y = 0 obj.poseEstimation.pose.orientation.z = 0 objects.append(obj) try: rospy.wait_for_service('/rp_ism_node/find_scenes', timeout=5) except rospy.exceptions.ROSException, e: rospy.loginfo('Could not reach service')
def main(): req = FindScenesRequest() objects = [] obj = AsrObject() obj.type = 'CoffeeBox' obj.header.frame_id = '/camera_left_frame' obj.identifier = '' obj.providedBy = 'textured' obj.colorName = 'textured' obj.meshResourcePath = 'package://asr_object_database/rsc/databases/textured_objects/CoffeeBox/CoffeeBox.dae' obj.poseEstimation.pose.position.x = -0.23448988704059 obj.poseEstimation.pose.position.y = 0.502252516129886 obj.poseEstimation.pose.position.z = 0.684928012927488 obj.poseEstimation.pose.orientation.w = 0.695641823146944 obj.poseEstimation.pose.orientation.x = -0.702895791692416 obj.poseEstimation.pose.orientation.y = -0.102411625776331 obj.poseEstimation.pose.orientation.z = 0.107386306462868 objects.append(obj) try: rospy.wait_for_service('/rp_ism_node/find_scenes', timeout=5) except rospy.exceptions.ROSException, e: rospy.loginfo('Could not reach service')
def main(): objects = [] obj = AsrObject() obj.type = 'VitalisSchoko' obj.identifier = '/map' obj.header.frame_id = '/map' poseWithCovariance = PoseWithCovariance() poseWithCovariance.pose.position.x = -0.3 poseWithCovariance.pose.position.y = -1 poseWithCovariance.pose.position.z = -0.6 poseWithCovariance.pose.orientation.w = 0.5 poseWithCovariance.pose.orientation.x = 0 poseWithCovariance.pose.orientation.y = 0 poseWithCovariance.pose.orientation.z = -0.5 obj.sampledPoses.append(poseWithCovariance) objects.append(obj) obj2 = AsrObject() obj2.type = 'PlateDeep' obj2.identifier = '/map' obj2.header.frame_id = '/map' poseWithCovariance.pose.position.x = -0.51 poseWithCovariance.pose.position.y = -1.2 poseWithCovariance.pose.position.z = -0.65 poseWithCovariance.pose.orientation.w = 0.5 poseWithCovariance.pose.orientation.x = 0.0 poseWithCovariance.pose.orientation.y = 0.0 poseWithCovariance.pose.orientation.z = -0.5 obj2.sampledPoses.append(poseWithCovariance) #objects.append(obj2) obj2 = AsrObject() obj2.type = 'Cup' obj2.identifier = '/map' obj2.header.frame_id = '/map' poseWithCovariance.pose.position.x = 0 poseWithCovariance.pose.position.y = -1.13 poseWithCovariance.pose.position.z = -0.65 poseWithCovariance.pose.orientation.w = 0.5 poseWithCovariance.pose.orientation.x = 0.0 poseWithCovariance.pose.orientation.y = 0.0 poseWithCovariance.pose.orientation.z = -0.5 obj2.sampledPoses.append(poseWithCovariance) #objects.append(obj2) path = rospkg.RosPack().get_path( 'asr_recognizer_prediction_psm') + '/models/dome_scene1.xml' scenes = [] scenes.append('background') scenes.append('marker_scene1') scenes.append('marker_scene2') scene_probabilities = [] scene_probabilities.append(0.25) scene_probabilities.append(0.5) scene_probabilities.append(0.25) num_overall_hypothesis = 10 base_frame = '/map' try: rospy.wait_for_service('/recognizer_prediction_psm', timeout=5) except rospy.exceptions.ROSException, e: rospy.loginfo('Could not reach service')
obj.meshResourcePath = 'package://asr_object_database/rsc/databases/textured_objects/Smacks/Smacks.dae' pose_cov = PoseWithCovariance() pose_cov.pose.position.x = random.gauss(0, 0.5) pose_cov.pose.position.y = random.gauss(0, 0.5) pose_cov.pose.position.z = 1.12 q = tf.transformations.quaternion_from_euler(90, 0, 0, 'rzyx') pose_cov.pose.orientation = Quaternion(*q) obj.sampledPoses.append(pose_cov) return obj if __name__ == '__main__': global detected_objects # Initialize the node and name it. rospy.init_node('test_world_model') object_list = [] for i in range(500): found_object = AsrObject() found_object = get_coffee_box() object_list.append(found_object) push_object_in_wm(object_list) found_object_list() for i in range(500): found_object = AsrObject() found_object = get_coffee_box() object_list.append(found_object) push_object_in_wm(object_list) found_object_list() clear_world_model() found_object_list()