Esempio n. 1
1
    def calculate_mk2_ik(self):

        #goal_point = Point(0.298, -0.249, -0.890) home position

        goal_pose = Pose()

        # Goto position 1
        goal_point = Point(0.298, -0.249, -0.890)
        #goal_point = Point(0.298, -0.5, -1.0)
        goal_pose.position = goal_point
        quat = quaternion_from_euler(0.0, 0.0, 0.0) # roll, pitch, yaw
        goal_pose.orientation = Quaternion(*quat.tolist())
        moveit_goal = self.create_move_group_pose_goal(goal_pose, group="manipulator", end_link_name="link_7", plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()

        # Goto position 2
        goal_point = Point(0.305, -0.249, -1.05)
        goal_pose.position = goal_point
        quat = quaternion_from_euler(0.0, 0.0, 0.0) # roll, pitch, yaw
        goal_pose.orientation = Quaternion(*quat.tolist())
        moveit_goal = self.create_move_group_pose_goal(goal_pose, group="manipulator", end_link_name="link_7", plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()
	def	update(self):
		# protected region updateCode on begin #
		in_CameraDetections = copy.deepcopy(self.in_CameraDetections)
		
		# check if detection is available
		if len(in_CameraDetections.poses) <= 0:
			return
		
		# do transformation from pixel coords to camera_base_link coords in meter
		out1_CameraDetections = PoseArray()
		out1_CameraDetections.header = in_CameraDetections.header
		for pose in in_CameraDetections.poses:
			new_pose = Pose()
			new_pose.position.x = (pose.position.x - self.config_ResolutionX/2.0) * self.config_MeterPerPixel
			new_pose.position.y = (pose.position.y - self.config_ResolutionY/2.0) * self.config_MeterPerPixel
			new_pose.position.z = 0.0
			new_pose.orientation = pose.orientation
			out1_CameraDetections.poses.append(new_pose)
		
		# do transformation from camera_base_link to robot base_link
		out_CameraDetections = PoseArray()
		out_CameraDetections.header = out1_CameraDetections.header
		for pose in out1_CameraDetections.poses:
			new_pose = Pose()
			new_pose.position.x = self.camera_base_link_offset_X + pose.position.x
			new_pose.position.y = self.camera_base_link_offset_Y - pose.position.y
			new_pose.position.z = 0.0
			new_pose.orientation = pose.orientation # TODO: rotate 180deg around x-axis
			out_CameraDetections.poses.append(new_pose)

		self.out_CameraDetections = out_CameraDetections
		# protected region updateCode end #
		pass
Esempio n. 3
0
    def execute(self, userdata):
        rospy.loginfo("Creating goal to put robot in front of handle")
        pose_handle = Pose()
        if userdata.door_detection_data_in_base_link.handle_side == "left" or userdata.door_detection_data_in_base_link.handle_side == "right":  # closed door
            (r, p, theta) = euler_from_quaternion(( userdata.door_detection_data_in_base_link.door_handle.pose.orientation.x,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.y,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.z,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.w))  # gives back r, p, y
            theta += 3.1416  # the orientation of the door is looking towards the robot, we need the inverse
            pose_handle.orientation = Quaternion(*quaternion_from_euler(0, 0, theta))  # orientation to look parallel to the door
            pose_handle.position.x = userdata.door_detection_data_in_base_link.door_handle.pose.position.x - 0.4  # to align the shoulder with the handle
            pose_handle.position.y = userdata.door_detection_data_in_base_link.door_handle.pose.position.y + 0.2  # refer to upper comment
            pose_handle.position.z = 0.0  # we dont need the Z for moving
            userdata.door_handle_pose_goal = pose_handle
        else:  # open door
            # if it's open... just cross it?
            (r, p, theta) = euler_from_quaternion(( userdata.door_detection_data_in_base_link.door_position.pose.orientation.x,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.y,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.z,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.w))  # gives back r, p, y
            theta += 3.1416  # the orientation of the door is looking towards the robot, we need the inverse
            pose_handle.orientation = Quaternion(*quaternion_from_euler(0, 0, theta))  # orientation to look parallel to the door
            pose_handle.position.x = userdata.door_detection_data_in_base_link.door_position.pose.position.x + 1.0 # enter the door
            pose_handle.position.y = userdata.door_detection_data_in_base_link.door_position.pose.position.y
            userdata.door_handle_pose_goal = pose_handle


        rospy.loginfo("Move base goal: \n" + str(pose_handle))

        return succeeded
def move_to_box(objcolorl):
    if objcolorl == 0:
        #move to green box
        pose = Pose()
        pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
        pose.position = Point(0.570, -0.176, 0.283)

        path = '/home/ynazon1/Pictures/green_success.png' 
        img = cv2.imread(path)
        msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding="bgr8")
        pubpic = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)
        pubpic.publish(msg)
        # Sleep to allow for image to be published.
        rospy.sleep(1)

    else:
        #move to blue box
        pose = Pose()
        pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
        pose.position = Point(0.708, -0.153, 0.258)

        path = '/home/ynazon1/Pictures/red_success.png' 
        img = cv2.imread(path)
        msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding="bgr8")
        pubpic = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)
        pubpic.publish(msg)
        # Sleep to allow for image to be published.
        rospy.sleep(1)
    request_pose(pose, "left", left_group)
Esempio n. 5
0
    def update_particle_cloud(self, scan):
    	#remove NaN and inf values
    	scan.ranges = scan_filter(scan.ranges, self.sensor_model.scan_range_max)
        
        if len(scan.ranges) > 0:
	        weights = [self.sensor_model.get_weight(scan, pose) for pose in self.particlecloud.poses]
	else:
		print "Error: Scan ranges null"
		return
    	
    	# used for finding how confident you are in the weights - decay the weight average
        w_avg = mean(weights)
        self.slow_decay += 0.001 * (w_avg - self.slow_decay)
        self.fast_decay += 0.1 * (w_avg - self.fast_decay)

        breakpoints=cumsum(weights)
        
    	maximum = max(breakpoints)
    	  
    	# the probability of the weights being okay
 	prob = max(0.0, 1.0 - (self.fast_decay/self.slow_decay))   	   
    	if not prob == 0:
	    	loops = int(len(self.particlecloud.poses) * prob)
    	else:
    		loops = 0

        # Update particlecloud, given map and laser scan
       	pose_arr = PoseArray()
        for i in range(0,len(self.particlecloud.poses)):
            new_pose = Pose()
            # add completely random noise to re-localise
            if i < loops:                                   
            	# 33.1 and 31.95 being the x and y sizes of the map respectively
                new_pose.position.x = uniform(0, 33.1)
                new_pose.position.y = uniform(0,31.95)
                new_pose.orientation = rotateQuaternion(Quaternion(w=1), uniform(0, math.pi*2)) 
            # otherwise use roulette wheel resampling to resample
            else:
            	# make a random pick
		pick = uniform(0, maximum)
		# an nlogn implementation of the roulette wheel search - by splitting sorted list in half repeatedly
		i = bisect.bisect(breakpoints, pick)
		choice = self.particlecloud.poses[i]
		position = choice.position
		orientation = choice.orientation
		
		# add resampling noise to cope with changes in odometry
		new_pose.position.x = gauss(position.x, 0.05)
		new_pose.position.y = gauss(position.y, 0.05)
		rotation = gauss(0, 0.125)
		new_pose.orientation = rotateQuaternion(orientation, rotation)
		new_pose.orientation = orientation
		
	    pose_arr.poses.append(new_pose)
	
	self.particlecloud = pose_arr		
def free_spots_on_table(table, obj, orientation, blocking_objs, res=0.1):

    object_pose = Pose()
    object_pose.orientation = orientation

    #these are the corners of the object in the object's frame
    object_frame_corners = gt.bounding_box_corners(obj.shapes[0])
    #rospy.loginfo('Corners are')
    #for c in object_frame_corners: rospy.loginfo(str(c))
    #these are the corners after we apply the rotation
    header_frame_corners = [gt.transform_list(c, object_pose) for c in object_frame_corners]
    #these are the corners relative to the table's lower left corner
    table_frame_corners = [gt.inverse_transform_list(c, table.poses[0]) for c in header_frame_corners]
    #the footprint of the object above the table
    oxmax = max([c[0] for c in table_frame_corners])
    oxmin = min([c[0] for c in table_frame_corners])
    oymax = max([c[1] for c in table_frame_corners])
    oymin = min([c[1] for c in table_frame_corners])
    object_dims = (oxmax - oxmin, oymax - oymin)
    rospy.loginfo('Object dimensions are '+str(object_dims))
    obj_poses = free_spots_from_dimensions(table, object_dims, blocking_objs, res=res)
    #return these as proper object poses with the object sitting on the table
    #the height of the object in the world in its place orientation
    obj_height = -1.0*min([c[2] for c in header_frame_corners])
    above_table = gt.transform_list([0, 0, obj_height], table.poses[0])
    place_height = above_table[2]
    for pose in obj_poses:
        pose.pose.position.z = place_height
        pose.pose.orientation = copy.copy(orientation)
    return obj_poses
    def __init__(self, from_frame, to_frame, position, orientation):
        self.server = InteractiveMarkerServer("posestamped_im")
        o = orientation
        r, p, y = euler_from_quaternion([o.x, o.y, o.z, o.w])
        rospy.loginfo("Publishing transform and PoseStamped from: " +
                      from_frame + " to " + to_frame +
                      " at " + str(position.x) + " " + str(position.y) + " " + str(position.z) +
                      " and orientation " + str(o.x) + " " + str(o.y) +
                      " " + str(o.z) + " " + str(o.w) + " or rpy " +
                      str(r) + " " + str(p) + " " + str(y))
        self.menu_handler = MenuHandler()
        self.from_frame = from_frame
        self.to_frame = to_frame
        # Transform broadcaster
        self.tf_br = tf.TransformBroadcaster()

        self.pub = rospy.Publisher('/posestamped', PoseStamped, queue_size=1)
        rospy.loginfo("Publishing posestampeds at topic: " + str(self.pub.name))
        pose = Pose()
        pose.position = position
        pose.orientation = orientation
        self.last_pose = pose
        self.print_commands(pose)
        self.makeGraspIM(pose)

        self.server.applyChanges()
def processFeedback(feedback):
    #p = feedback.pose.position
    #print feedback.marker_name + " is now at " + str(p.x) + ", " + str(p.y) + ", " + str(p.z)
    pub_pose.publish(feedback.pose)
      
    if feedback.event_type == InteractiveMarkerFeedback.BUTTON_CLICK:
        pub_event.publish(event_on_click)
        rospy.loginfo( ": button click"   "." )
    elif feedback.event_type == InteractiveMarkerFeedback.MENU_SELECT:
        rospy.loginfo(  ": menu item " + str(feedback.menu_entry_id) + " clicked" )
        print(feedback.menu_entry_id)
        if feedback.menu_entry_id==1:
            rospy.loginfo( ": reset pose" )
            p=Pose();
            p=feedback.pose;
            p.orientation=starting_pose.orientation
            server.setPose( feedback.marker_name, p )
            server.applyChanges()
        elif feedback.menu_entry_id==2:
            rospy.loginfo( ": reset orientation" )
            server.setPose( feedback.marker_name, starting_pose )
            server.applyChanges()
        if feedback.menu_entry_id==3:
            print(feedback.menu_entry_id)
            pub_event.publish(event_on_click)
    elif feedback.event_type == InteractiveMarkerFeedback.MOUSE_UP:
        rospy.loginfo(  ": new Pose is  " + str(feedback.pose))
def get_grasp_pose(g_decision_making_state, object_grasp_pose, object_grasp_pose_set):
    
	# load pre-computer grasps
	# loaded_file = numpy.load('../grasp_data/' + g_decision_making_state.object_name + '.npz' )
	
	# loaded_grasp = loaded_file['grasp'][0]
	# print len(loaded_grasp[g_decision_making_state.object_name])

	loaded_file = numpy.load('../grasp_data/' + g_decision_making_state.object_name + '.npz' )
	loaded_grasp = loaded_file['grasp'][0]
	num_of_grasp = len(loaded_grasp[g_decision_making_state.object_name])
	temp = object_grasp_pose
	for i in range(0,num_of_grasp):
		print i
		print_angles(loaded_grasp[g_decision_making_state.object_name][i])
		print_angles(temp)
		object_grasp_pose_temp = temp
		print_angles(temp)
		object_grasp_pose_set[i] = Pose()
		object_grasp_pose_set[i].position = temp.position
		object_grasp_pose_set[i].orientation = rotate_orientation(object_grasp_pose_temp.orientation, loaded_grasp[g_decision_making_state.object_name][i].orientation)

		move_ok = plan_and_move_end_effector_to(object_grasp_pose, 0.3, 
                                            g_bin_positions[g_decision_making_state.bin_id]['map_robot_position_2_ik_seed'])
		print_angles(object_grasp_pose_set[i])
		# raw_input("press Enter to continue...")

	return object_grasp_pose_set
Esempio n. 10
0
 def execute(self, userdata):
     userdata.orders = [DrinkOrder('david', 'coke'), DrinkOrder('michael', 'milk')]
     pose = Pose()
     pose.position = Point(0.059, 6.26, 0.0)
     pose.orientation = Quaternion(0, 0, -0.59, 0.8)
     userdata.guests_location = pose
     return succeeded
Esempio n. 11
0
	def project(self, p, radius, width, height) :
		#print p
		#print width
		#print height
		#print "radius"
		#print radius

		xFOV = 63.38
		yFOV = 48.25
		cx = width /2
		cy = height /2
		fx = cx / np.tan((xFOV/2) * np.pi / 180)
		fy = cy / np.tan((yFOV/2) * np.pi / 180)
		
		toball = np.zeros(3)
		toball[0] = (p[0] - cx) / fx
		toball[1] = -(p[1] - cy) / fy
		toball[2] = 1
		toball = toball / np.linalg.norm(toball) #normalize so we can then multiply by distance
		distance = self.pixel_radius / radius
		toball = toball * distance

		pose = Pose()
		pose.position = Point(toball[0], toball[1], toball[2])
		pose.orientation = Quaternion(0,0,0,1)
		#print "FOUND ORANGE BALL!!!!"
		#print toball
		return pose
Esempio n. 12
0
    def __init__(self):
        smach.StateMachine.__init__(self, [succeeded, preempted, aborted])
        
        with self:
            intro_text = "Good!, I will take it to the hanger."
            smach.StateMachine.add('INTRO',
                                   SpeakActionState(intro_text),
                                   transitions={succeeded: 'MOVE_TO_HANGER'})
            
            # move to hanger
            self.userdata.hanger = HANGER_NAME
            smach.StateMachine.add('MOVE_TO_HANGER',
                                  MoveToRoomStateMachine(announcement=None),
                                  transitions={succeeded: 'APPROACH_TO_HANGER',
                                               preempted: preempted,
                                               aborted: 'MOVE_TO_HANGER'},
                                  remapping={'room_name': 'hanger'})

            # Rotate 180ยบ
            pose_turn = Pose()
            pose_turn.orientation = Quaternion(*quaternion_from_euler(0,0,3.6))
            #smach.StateMachine.add('MOVE_TO_HANGER',
                                   #MoveActionState(move_base_action_name='/move_by/move_base',pose=pose_turn),
                                   #transitions={succeeded: 'APPROACH_TO_HANGER',
                                                #preempted: preempted,
                                                #aborted: 'MOVE_TO_HANGER'},
                                   #remapping={'room_name': 'hanger'})
                                   
            smach.StateMachine.add('APPROACH_TO_HANGER',
                                   SMClothHangingApproachHangerPartStateMachine(),
                                   transitions={succeeded: succeeded,
                                                preempted: preempted,
                                                aborted: aborted})
    def add_collision_cluster(self, cluster, object_id):
        '''
        Adds a point cloud to the collision map as a single collision object composed of many small boxes.

        **Args:**

            **cluster (sensor_msg.msg.PointCloud):** The point cloud to add to the map
            
            **object_id (string):** The name the point cloud should have in the map
        '''
        many_boxes = CollisionObject()
        many_boxes.operation.operation = many_boxes.operation.ADD
        many_boxes.header = cluster.header
        many_boxes.header.stamp = rospy.Time.now()
        num_to_use = int(len(cluster.points)/100.0)
        random_indices = range(len(cluster.points))
        random.shuffle(random_indices)
        random_indices = random_indices[0:num_to_use]
        for i in range(num_to_use):
            shape = Shape()
            shape.type = Shape.BOX
            shape.dimensions = [.005]*3
            pose = Pose()
            pose.position.x = cluster.points[random_indices[i]].x
            pose.position.y = cluster.points[random_indices[i]].y
            pose.position.z = cluster.points[random_indices[i]].z
            pose.orientation = Quaternion(*[0,0,0,1])
            many_boxes.shapes.append(shape)
            many_boxes.poses.append(pose)
        
        many_boxes.id = object_id
        self._publish(many_boxes, self._collision_object_pub)
Esempio n. 14
0
 def poseFromTransform(cls, t):
     p = Pose()
     p.position.x = t.translation.x
     p.position.y = t.translation.y
     p.position.z = t.translation.z
     p.orientation = deepcopy(t.rotation)
     return p
Esempio n. 15
0
 def find_joint_pose(self, pose, targ_x=0.0, targ_y=0.0, targ_z=0.0,
                     targ_ox=0.0, targ_oy=0.0, targ_oz=0.0):
     '''
     Finds the joint position of the arm given some pose and the
     offsets from it (to avoid opening the structure all the time
     outside of the function).
     '''
     ik_srv = "ExternalTools/right/PositionKinematicsNode/IKService"
     iksvc = rospy.ServiceProxy(ik_srv, SolvePositionIK)
     ik_request = SolvePositionIKRequest()
     the_pose = deepcopy(pose)
     the_pose['position'] = Point(x=targ_x + self.baxter_off.linear.x,
                                  y=targ_y + self.baxter_off.linear.y,
                                  z=targ_z + self.baxter_off.linear.z)
     angles = tf.transformations.quaternion_from_euler( \
         targ_ox + self.baxter_off.angular.x, \
         targ_oy + self.baxter_off.angular.y, \
         targ_oz + self.baxter_off.angular.z)
     the_pose['orientation'] = Quaternion(x=angles[0],
                                          y=angles[1],
                                          z=angles[2],
                                          w=angles[3])
     approach_pose = Pose()
     approach_pose.position = the_pose['position']
     approach_pose.orientation = the_pose['orientation']
     hdr = Header(stamp=rospy.Time.now(), frame_id='base')
     pose_req = PoseStamped(header=hdr, pose=approach_pose)
     ik_request.pose_stamp.append(pose_req)
     resp = iksvc(ik_request)
     return dict(zip(resp.joints[0].name, resp.joints[0].position))
    def follow_pose_with_admitance_by_ft(self):
        fx, fy, fz = self.get_force_movement()
        rospy.loginfo(
            "fx, fy, fz: " + str((round(fx, 3), round(fy, 3), round(fz, 3))))
        ps = Pose()
        if abs(fx) > self.fx_deadband:
            ps.position.x = (fx * self.fx_scaling) * self.frame_fixer.fx
        if abs(fy) > self.fy_deadband:
            ps.position.y = (fy * self.fy_scaling) * self.frame_fixer.fy
        if abs(fz) > self.fz_deadband:
            ps.position.z = (fz * self.fz_scaling) * self.frame_fixer.fz

        tx, ty, tz = self.get_torque_movement()
        rospy.loginfo(
            "tx, ty, tz: " + str((round(tx, 3), round(ty, 3), round(tz, 3))))

        roll = pitch = yaw = 0.0
        if abs(tx) > self.tx_deadband:
            roll += (tx * self.tx_scaling) * self.frame_fixer.tx
        if abs(ty) > self.ty_deadband:
            pitch += (ty * self.ty_scaling) * self.frame_fixer.ty
        if abs(tz) > self.tz_deadband:
            yaw += (tz * self.tz_scaling) * self.frame_fixer.tz

        q = quaternion_from_euler(roll, pitch, yaw)
        ps.orientation = Quaternion(*q)

        o = self.last_pose_to_follow.pose.orientation
        r_lastp, p_lastp, y_lastp = euler_from_quaternion([o.x, o.y, o.z, o.w])

        final_roll = r_lastp + roll
        final_pitch = p_lastp + pitch
        final_yaw = y_lastp + yaw
        self.current_pose.pose.orientation = Quaternion(*quaternion_from_euler(final_roll,
                                                                               final_pitch,
                                                                               final_yaw))

        self.current_pose.pose.position.x = self.last_pose_to_follow.pose.position.x + \
            ps.position.x
        self.current_pose.pose.position.y = self.last_pose_to_follow.pose.position.y + \
            ps.position.y
        self.current_pose.pose.position.z = self.last_pose_to_follow.pose.position.z + \
            ps.position.z

        self.current_pose.pose.position.x = self.sanitize(self.current_pose.pose.position.x,
                                                          self.min_x,
                                                          self.max_x)
        self.current_pose.pose.position.y = self.sanitize(self.current_pose.pose.position.y,
                                                          self.min_y,
                                                          self.max_y)
        self.current_pose.pose.position.z = self.sanitize(self.current_pose.pose.position.z,
                                                          self.min_z,
                                                          self.max_z)
        self.current_pose.header.stamp = rospy.Time.now()
        if self.send_goals:  # send MODIFIED GOALS
            self.pose_pub.publish(self.current_pose)
        else:
            self.last_pose_to_follow.header.stamp = rospy.Time.now()
            self.pose_pub.publish(self.last_pose_to_follow)
        self.debug_pose_pub.publish(self.current_pose)
Esempio n. 17
0
def particle_to_pose(particle):
    ''' Converts a particle in the form [x, y, theta] into a Pose object '''
    pose = Pose()
    pose.position.x = particle[0]
    pose.position.y = particle[1]
    pose.orientation = angle_to_quaternion(particle[2])
    return pose
Esempio n. 18
0
            def obj_response_cb(userdata, response):
                if response.exists:
                    pose = Pose()
                    #Wont there be problems mixing float32 with 64? TODO
                    pose.position = Point(response.base_coordinates.x, response.base_coordinates.y, 0)
                    #TODO, this may give problems
                    pose.orientation = Quaternion(*quaternion_from_euler(0, 0, response.base_coordinates.z))
                    if MANTAIN_DISTANCE_FROM_OBJECT:
                        pose = pose_at_distance(pose, DISTANCE_FROM_OBJECT)
                    userdata.object_location = pose

                    release_pose = ObjectPose()
                    release_pose.pose = response.arm_coordinates
                    userdata.object_release_location = release_pose

                    print "\n\n printing pose for move to object"
                    print pose
                    print "\n\n printing pose for releasing the object"
                    print release_pose
                    print "\n that was the pose of the object\n\n"
                    print userdata.object_name
                    print "is the object name"
                    return succeeded
                else:
                    userdata.object_location = None
                    return aborted
Esempio n. 19
0
def transform_pose(pose_in, transform):
    '''
    Transforms a pose

    Takes a pose defined in the frame defined by transform (i.e. the frame in which transform is the origin) and 
    returns it in the frame in which transform is defined.  Calling this with the origin pose will return transform.
    This returns
        
        pose.point = transform_point(pose_in.point, transform)
        
        pose.orientation = transform_orientation(pose_in.orientation, transform)
    
    **Args:**

        **pose (geometry_msgs.msg.Pose):** Pose to transform

        **transform (geometry_msgs.msg.Pose):** The transform
    
    **Returns:**
        A geometry_msgs.msg.Pose
    '''
    pose = Pose()
    pose.position = transform_point(pose_in.position, transform)
    pose.orientation = transform_quaternion(pose_in.orientation, transform)
    return pose
    def process_collision_geometry_for_cluster(self, cluster):

        rospy.loginfo("adding cluster with %d points to collision map"%len(cluster.points))

        many_boxes = CollisionObject()
        
        many_boxes.operation.operation = CollisionObjectOperation.ADD
        many_boxes.header = cluster.header
        many_boxes.header.stamp = rospy.Time.now()
        num_to_use = int(len(cluster.points)/100.0)
        random_indices = range(len(cluster.points))
        scipy.random.shuffle(random_indices)
        random_indices = random_indices[0:num_to_use]
        for i in range(num_to_use):
            shape = Shape()
            shape.type = Shape.BOX
            shape.dimensions = [.005]*3
            pose = Pose()
            pose.position.x = cluster.points[random_indices[i]].x
            pose.position.y = cluster.points[random_indices[i]].y
            pose.position.z = cluster.points[random_indices[i]].z
            pose.orientation = Quaternion(*[0,0,0,1])
            many_boxes.shapes.append(shape)
            many_boxes.poses.append(pose)
        
        collision_name = self.get_next_object_name()
        many_boxes.id = collision_name
        self.object_in_map_pub.publish(many_boxes)
        return collision_name
def eef_pose_pub():
  rospy.init_node('eef_publisher')
  rospy.loginfo("Publishing right EEF gripper location")
  listener = tf.TransformListener()
  pub = rospy.Publisher('eef_pose', Pose, queue_size=10)

  DEFAULT_LINK = '/right_ee_link'
  DEFAULT_RATE = 100

  # Pull from param server the hz and EEF link
  eef_link = rospy.get_param("~eef_link", DEFAULT_LINK)
  publish_rate = rospy.get_param("~eef_rate", DEFAULT_RATE)

  rate = rospy.Rate(publish_rate)
  while not rospy.is_shutdown():
    try: 
      trans, rot = listener.lookupTransform('/base_link', eef_link, rospy.Time(0))
    except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
      continue
    msg = Pose()
    msg.position = Point()
    msg.position.x, msg.position.y, msg.position.z = trans[0], trans[1], trans[2]
    msg.orientation = Quaternion() 
    msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = rot[0], rot[1], rot[2], rot[3]
    pub.publish(msg)
    rate.sleep()
Esempio n. 22
0
def get_pose_msg_from_2d(x, y, yaw):
    pose = Pose()
    pose.position.x = x
    pose.position.y = y
    pose.position.z = 0
    pose.orientation = Quaternion(*quaternion_from_euler(0,0,yaw))
    return pose
Esempio n. 23
0
def homogeneous_product_pose_transform(pose, transform):
    """
     pose should be Pose() msg
     transform is Transform()
    """
    # 1. Get the transform homogenous matrix.
    Ht = tf.transformations.quaternion_matrix([transform.rotation.x, transform.rotation.y,
                                             transform.rotation.z, transform.rotation.w])

    Tt = [transform.translation.x, transform.translation.y, transform.translation.z]

    Ht[:3,3] = Tt

    # 2.Original pose to homogenous matrix
    H0 = tf.transformations.quaternion_matrix([pose.orientation.x, pose.orientation.y,
                                               pose.orientation.z, pose.orientation.w])

    T0 = [pose.position.x, pose.position.y, pose.position.z]
    H0[:3,3] = T0

    H1 = numpy.dot(H0, Ht)

    # Get results from matrix
    result_position = H1[:3,3].T
    result_position = geometry_msgs.msg.Vector3(result_position[0], result_position[1], result_position[2])
    result_quat     = tf.transformations.quaternion_from_matrix(H1)
    result_quat     = geometry_msgs.msg.Quaternion(result_quat[0], result_quat[1], result_quat[2], result_quat[3])
    result_pose             = Pose()
    result_pose.position    = result_position
    result_pose.orientation = result_quat
    return result_pose
Esempio n. 24
0
    def __init__(self):

        smach.StateMachine.__init__(self, [succeeded, preempted, aborted],
        output_keys=['object_data'])

        with self:

            pose = Pose()
            pose.position = Point(0, 0, 0)
            pose.orientation = Quaternion(*quaternion_from_euler(0, 0, 45))
            
            smach.StateMachine.add(
                'N2_go_to_dep_policy',
                MoveActionState("/base_link", pose=pose),
                transitions = {succeeded: 'look_for_objects'})

            def moped_enable_cb(userdata, response):
                if response.correct != None:
                    rospy.loginfo("ENABLE_CLOSE_OBJECT_SEARCH response: " + \
                                  str(response.correct))
                    return succeeded
                else:
                    return aborted
                      
            smach.StateMachine.add(
            'ENABLE_CLOSE_OBJECT_SEARCH',
            ServiceState('/iri_moped_handler/enable', enable,
            response_cb = moped_enable_cb,
            #request_key = 'object_name',
            request = True),
            transitions = {succeeded:'look_for_objects'})

            # FIXME: why is there a smaller ctimeout & retry on aborted?
            # If something goes wrong it'd rather have it fail than having
            # it keep hanging on forever... --Siegfried
            smach.StateMachine.add(
                'look_for_objects',
                TopicReaderState(
                    topic_name='/iri_moped_handler/outputOPL',
                    msg_type=ObjectPoseList,
                    timeout=10),
                remapping= {'message' : 'object_data'},
                transitions = {succeeded: 'DISABLE_CLOSE_OBJECT_SEARCH', aborted: 'look_for_objects'})

            def moped_disable_cb(userdata, response):
                if response.correct != None:
                    rospy.loginfo("DISABLE_CLOSE_OBJECT_SEARCH response: " + \
                                  str(response.correct))
                    return succeeded
                else:
                    return aborted

            smach.StateMachine.add(
                'DISABLE_CLOSE_OBJECT_SEARCH',
                ServiceState('/iri_moped_handler/enable', enable,
                    response_cb = moped_disable_cb,
                    #request_key = 'object_name',
                    request = False),
                transitions = {succeeded: succeeded})
Esempio n. 25
0
 def parse_pose(pose):
     rospy.loginfo('parse_pose')
     f_pose = Pose()
     f_pose.position.x = pose[0]
     f_pose.position.y = pose[1]
     f_pose.position.z = pose[2]
     f_pose.orientation = Quaternion(*quaternion_from_euler(pose[3], pose[4], pose[5]))
     return f_pose
Esempio n. 26
0
def move_to_object(xposl, yposl, zposl, objcolorl):
    global grip_force
    pose = Pose()
    pose.position = Point(xposl-0.01, yposl, 0.00)
    pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)


    request_pose(pose, "left", left_group)
    rospy.sleep(0.5)

    # Get left hand range state
    dist = baxter_interface.analog_io.AnalogIO('left_hand_range').state()
    rospy.sleep(1)
    if dist > 65000:
        print "Out of Range"
        truez = -0.13
    else:
        print "DISTANCE %f" % dist
        truez = dist/1000
        truez = truez - 0.06
        truez = - (truez)
        print truez


    poset = Pose()
    poset.position = Point(xposl, yposl, truez)
    poset.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
    request_pose(poset, "left", left_group)

    left_gripper.close()
    rospy.sleep(0.5)
    if grip_force == 0:
        left_gripper.open()
        move_to_vision()
    else:
        pose = Pose()
        pose.position = Point(xposl-0.01, yposl+0.01, 0.150)
        pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
        request_pose(pose, "left", left_group)
        if grip_force == 0:
            left_gripper.open()
            move_to_vision()
            return
        move_to_box(objcolorl)
        left_gripper.open()
        move_to_vision()
def callback(data):
	#rospy.loginfo("kinect x: %f, y %f, z: %f", data.pose.position.x, data.pose.position.y, data.pose.orientation.z)
	pos = Pose()
	pos.position = data.pose.position
	#pub = rospy.Publisher('PosArrayFovis', PoseArray)
	pos.orientation = data.pose.orientation
	posArr.poses.append(pos)
	pub.publish(posArr)
Esempio n. 28
0
 def calculate_pose_transform(self, pose, global_twist, dt):
     ret_pose = Pose()
     # calculate current pose as integration
     ret_pose.position.x = pose.position.x + global_twist.linear.x * dt
     ret_pose.position.y = pose.position.y + global_twist.linear.y * dt
     ret_pose.position.z = pose.position.z + global_twist.linear.z * dt
     ret_pose.orientation = self.calculate_quaternion_transform(pose.orientation, global_twist.angular, dt)
     return ret_pose
Esempio n. 29
0
def move_to_vision():
    # Set pose
    pose = Pose()
    pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
    pose.position = Point(0.712, 0.316, 0.250)

    # Request service
    request_pose(pose,"left", left_group)
 def calcule_rotation(userdata, goal_cb):
     pose = Pose()
     pose.orientation = Quaternion(*quaternion_from_euler(0, 0, degrees_to_rotate))
     moveBaseGoal = MoveBaseGoal()
     moveBaseGoal.target_pose.header.stamp = rospy.Time.now()
     moveBaseGoal.target_pose.header.frame_id = '/base_link'
     moveBaseGoal.target_pose.pose = pose
     return moveBaseGoal
Esempio n. 31
0
    def _plan_route(self, path):
        is_first_pt = True

        last_pt = [0, 0]
        last_orientation = 0

        res = OSMTrajectoryPlannerResult()

        for i, area in enumerate(path):
            for j, pt in enumerate(area.waypoints):
                temp = None
                if pt.type == 'local_area':
                    temp = self.osm_bridge.get_local_area(pt.id)
                elif pt.type == 'door':
                    temp = self.osm_bridge.get_local_area(pt.id)

                if temp is not None:
                    topology_node = temp.topology
                    if is_first_pt:
                        is_first_pt = False
                    else:
                        last_orientation = math.atan2(
                            topology_node.y - last_pt[1],
                            topology_node.x - last_pt[0])
                        p = Pose()
                        p.position = Point(x=last_pt[0], y=last_pt[1], z=0)
                        p.orientation = Quaternion(
                            *quaternion_from_euler(0, 0, last_orientation))
                        if j > 0:
                            area.waypoints[j - 1].waypoint_pose = p
                        else:
                            path[i - 1].waypoints[len(path[i - 1].waypoints) -
                                                  1].waypoint_pose = p
                        # print(last_pt[0], last_pt[1], last_orientation*180/3.1457)
                    last_pt = [topology_node.x, topology_node.y]
        p = Pose()
        p.position = Point(x=last_pt[0], y=last_pt[1], z=0)
        p.orientation = Quaternion(
            *quaternion_from_euler(0, 0, last_orientation))
        path[len(path) - 1].waypoints[len(path[len(path) - 1].waypoints) -
                                      1].waypoint_pose = p

        res.areas = path
        return res
Esempio n. 32
0
    def publish_model_states(self):
        if self.model_state_publisher.get_num_connections() != 0:
            msg = ModelStates()
            for robot_name, robot_node in self.robot_nodes.items():
                position, orientation = self.get_robot_pose_quat(
                    name=robot_name)
                robot_pose = Pose()
                robot_pose.position = Point(*position)
                robot_pose.orientation = Quaternion(*orientation)
                msg.name.append(robot_name)
                msg.pose.append(robot_pose)
                lin_vel, ang_vel = self.get_robot_velocity(robot_name)
                twist = Twist()
                twist.linear.x = lin_vel[0]
                twist.linear.y = lin_vel[1]
                twist.linear.z = lin_vel[2]
                twist.angular.x = ang_vel[0]
                twist.angular.y = ang_vel[1]
                twist.angular.z = ang_vel[2]
                msg.twist.append(twist)

                head_node = robot_node.getFromProtoDef("head")
                head_position = head_node.getPosition()
                head_orientation = head_node.getOrientation()
                head_orientation_quat = transforms3d.quaternions.mat2quat(
                    np.reshape(head_orientation, (3, 3)))
                head_pose = Pose()
                head_pose.position = Point(*head_position)
                head_pose.orientation = Quaternion(head_orientation_quat[1],
                                                   head_orientation_quat[2],
                                                   head_orientation_quat[3],
                                                   head_orientation_quat[0])
                msg.name.append(robot_name + "_head")
                msg.pose.append(head_pose)

            if self.ball is not None:
                ball_position = self.ball.getField("translation").getSFVec3f()
                ball_pose = Pose()
                ball_pose.position = Point(*ball_position)
                ball_pose.orientation = Quaternion()
                msg.name.append("ball")
                msg.pose.append(ball_pose)

            self.model_state_publisher.publish(msg)
Esempio n. 33
0
def move_to_object(xposl, yposl, zposl, objcolorl):
    global grip_force
    pose = Pose()
    pose.position = Point(xposl - 0.01, yposl, 0.00)
    pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)

    request_pose(pose, "left", left_group)
    rospy.sleep(0.5)

    # Get left hand range state
    dist = baxter_interface.analog_io.AnalogIO('left_hand_range').state()
    rospy.sleep(1)
    if dist > 65000:
        print "Out of Range"
        truez = -0.13
    else:
        print "DISTANCE %f" % dist
        truez = dist / 1000
        truez = truez - 0.06
        truez = -(truez)
        print truez

    poset = Pose()
    poset.position = Point(xposl, yposl, truez)
    poset.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
    request_pose(poset, "left", left_group)

    left_gripper.close()
    rospy.sleep(0.5)
    if grip_force == 0:
        left_gripper.open()
        move_to_vision()
    else:
        pose = Pose()
        pose.position = Point(xposl - 0.01, yposl + 0.01, 0.150)
        pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
        request_pose(pose, "left", left_group)
        if grip_force == 0:
            left_gripper.open()
            move_to_vision()
            return
        move_to_box(objcolorl)
        left_gripper.open()
        move_to_vision()
    def follow_pose_with_admitance_by_ft(self):
        fx, fy, fz = self.get_force_movement()
        rospy.loginfo("fx, fy, fz: " +
                      str((round(fx, 3), round(fy, 3), round(fz, 3))))
        ps = Pose()
        if abs(fx) > self.fx_deadband:
            ps.position.x = (fx * self.fx_scaling) * self.frame_fixer.fx
        if abs(fy) > self.fy_deadband:
            ps.position.y = (fy * self.fy_scaling) * self.frame_fixer.fy
        if abs(fz) > self.fz_deadband:
            ps.position.z = (fz * self.fz_scaling) * self.frame_fixer.fz

        tx, ty, tz = self.get_torque_movement()
        rospy.loginfo("tx, ty, tz: " +
                      str((round(tx, 3), round(ty, 3), round(tz, 3))))

        roll = pitch = yaw = 0.0
        if abs(tx) > self.tx_deadband:
            roll += (tx * self.tx_scaling) * self.frame_fixer.tx
        if abs(ty) > self.ty_deadband:
            pitch += (ty * self.ty_scaling) * self.frame_fixer.ty
        if abs(tz) > self.tz_deadband:
            yaw += (tz * self.tz_scaling) * self.frame_fixer.tz

        q = quaternion_from_euler(roll, pitch, yaw)
        ps.orientation = Quaternion(*q)

        o = self.last_pose_to_follow.pose.orientation
        r_lastp, p_lastp, y_lastp = euler_from_quaternion([o.x, o.y, o.z, o.w])

        final_roll = r_lastp + roll
        final_pitch = p_lastp + pitch
        final_yaw = y_lastp + yaw
        self.current_pose.pose.orientation = Quaternion(
            *quaternion_from_euler(final_roll, final_pitch, final_yaw))

        self.current_pose.pose.position.x = self.last_pose_to_follow.pose.position.x + \
            ps.position.x
        self.current_pose.pose.position.y = self.last_pose_to_follow.pose.position.y + \
            ps.position.y
        self.current_pose.pose.position.z = self.last_pose_to_follow.pose.position.z + \
            ps.position.z

        self.current_pose.pose.position.x = self.sanitize(
            self.current_pose.pose.position.x, self.min_x, self.max_x)
        self.current_pose.pose.position.y = self.sanitize(
            self.current_pose.pose.position.y, self.min_y, self.max_y)
        self.current_pose.pose.position.z = self.sanitize(
            self.current_pose.pose.position.z, self.min_z, self.max_z)
        self.current_pose.header.stamp = rospy.Time.now()
        if self.send_goals:  # send MODIFIED GOALS
            self.pose_pub.publish(self.current_pose)
        else:
            self.last_pose_to_follow.header.stamp = rospy.Time.now()
            self.pose_pub.publish(self.last_pose_to_follow)
        self.debug_pose_pub.publish(self.current_pose)
Esempio n. 35
0
    def _get_marker_template(
            self,
            marker_type=Marker.CUBE,  # noqa: E501, C901
            **kwargs):
        settings = dict(self.defaults, **kwargs)
        settings["type"] = marker_type

        # some options are treated differently or can be conveniently overriden
        if "pose" not in settings:
            settings["pose"] = Pose(Point(), Quaternion(w=1))

        # if size is in kwargs and scale is not present use size
        if "size" in kwargs and "scale" not in kwargs:
            # convert size to scale if present
            settings["scale"] = Vector3(settings["size"], settings["size"],
                                        settings["size"])

        if "orientation" in settings:
            settings["pose"].orientation = settings["orientation"]
            del settings["orientation"]

        if "color" in settings and not isinstance(settings["color"],
                                                  ColorRGBA):
            settings["color"] = convert_color(settings["color"])

        if "alpha" in settings:
            # override alpha for the global color option
            color = settings["color"]
            settings["color"] = ColorRGBA(color.r, color.g, color.b,
                                          settings["alpha"])
            del settings["alpha"]

        # if an id is given modify marker, otherwise use a new unique id
        if "update_id" in settings:
            if settings["update_id"] is not None:
                settings["id"] = settings["update_id"]
                settings["action"] = Marker.MODIFY
        else:
            settings["id"] = self._current_marker_id
            self._current_marker_id += 1

        # to avoid RViz warnings
        if marker_type == Marker.LINE_STRIP:
            settings["scale"] = Vector3(settings["scale"].x, 0, 0)
        if marker_type == Marker.TEXT_VIEW_FACING:
            settings["scale"] = Vector3(0, 0, settings["scale"].z)

        if "size" in settings:
            del settings["size"]

        if "update_id" in settings:
            del settings["update_id"]

        marker = Marker(**settings)
        marker.header.stamp = rospy.Time.now()
        return marker
Esempio n. 36
0
    def update_particle_cloud(self, scan):
        """
        This should use the supplied laser scan to update the current
        particle cloud. i.e. self.particlecloud should be updated.
        
        :Args:
            | scan (sensor_msgs.msg.LaserScan): laser scan to use for update

         """
        self.scan = scan
        movement_particles = []

        dtime = rospy.Time.now().to_sec() - self.last_time.to_sec()
        self.last_time = rospy.Time.now()

        for i in self.particlecloud.poses:
            x = i.position.x
            y = i.position.y
            theta = util.getHeading(i.orientation)
            for j in range(4):
                p = Pose()

                p.position.x = x + random.uniform(-0.2, 0.2)
                p.position.y = y + random.uniform(-0.2, 0.2)
                p.position.z = 0

                thetad = theta + random.uniform(-0.4, 0.4)
                p.orientation = util.rotateQuaternion(Quaternion(w=1.0),
                                                      thetad)

                probn = self.sensor_model.get_weight(scan, p)**2
                movement_particles.append((probn, p))
        w_avg = 0.0
        for x in movement_particles:
            w_avg += x[0] / len(movement_particles)

        self.w_slow += self.A_SLOW * (w_avg - self.w_slow)
        self.w_fast += self.A_FAST * (w_avg - self.w_fast)

        particlecloudnew = PoseArray()
        weights = np.asarray([i[0] for i in movement_particles]).cumsum()

        new_weights = []
        for i in range(self.NUMBER_OF_PARTICLES):
            j = random.random() * w_avg * len(movement_particles)
            index = np.searchsorted(weights, j)
            pose = movement_particles[index][1]
            weight = movement_particles[index][0]
            if random.random() > (self.w_fast / self.w_slow):
                pose = self.generate_random_map_pose()
                weight = 0.0

            particlecloudnew.poses.append(pose)
            new_weights.append(weight)
        self.particlecloud.poses = particlecloudnew.poses
        self.weights = new_weights
Esempio n. 37
0
def publish_locations(centroids,labels,radius):

	while(len(centroids) <= 1):
		a=1


	### Init some variables
	people_count = [0]*len(centroids)
	posearr = []
	posearr2 = []
	command2 = PoseArray()
	command2.header.stamp = rospy.get_rostime()
	command2.header.frame_id = 'world'
	command = ClusterCenters()

	if len(centroids) > 1 :
		centroids_temp = centroids

		### for all the cluster centers

		for i in range(len(centroids_temp)):
			try:
				### Setup Cluster Msg
                                cluster_center = ClusterCenter()

				### Store Cluster Label into Msg
				cluster_center.cluster_label = str(i)

				### Count number of people in Cluster
				for j in range(len(labels)):
					if(labels[j] == i):
						cluster_center.number_people = cluster_center.number_people + 1

				### Setup Pose Msg
				pose = Pose()
				quat = Quaternion()
				point = Point()
				point.x = centroids_temp[i][0]
				point.y = centroids_temp[i][1]
				point.z = 0
				quat.x = 0
				quat.y = 0
				quat.z = 0
				quat.w = 0
				pose.position=point
				pose.orientation=quat
				cluster_center.pose=pose
				
				### Append Clusters into both publishers
				posearr.append(cluster_center)
				posearr2.append(pose)

			except IndexError:
				print ("error", len(centroids))#'invalid index' 

		return posearr,posearr2
Esempio n. 38
0
 def getShootGoalPose(self):
     bp = self.lastBallPose.position
     gp = self.setup.zielTor
     d = self.distance(gp, bp)
     t = (0.5 / d)
     pose = Pose()
     pose.position.x = (1 + t) * bp.x - t * gp.x
     pose.position.y = (1 + t) * bp.y - t * gp.y
     pose.orientation = self.getOrientAsQuaternion(pose.position, bp)
     return pose
Esempio n. 39
0
    def getPoseMsg(self, pose):
        pose_msg = Pose()
        pose_msg.position.x = pose[0]
        pose_msg.position.y = pose[1]

        q = tf.transformations.quaternion_from_euler(0, 0, pose[2])

        pose_msg.orientation = Quaternion(*q)

        return pose_msg
Esempio n. 40
0
    def getGatePose(self,left_buoy,right_buoy):
        global tf_listener
        target = Pose()
        target.position.x = 0.0
        target.position.y = 0.0
        target.position.x = (left_buoy.pose.position.x + right_buoy.pose.position.x) /2.0
        target.position.y = (left_buoy.pose.position.y + right_buoy.pose.position.y) /2.0
        target.position.z = 0.0

        # if they are the same type, the orientation is the same as the curret position orientation
        if (left_buoy.best_guess == right_buoy.best_guess):
            target.orientation = self.current_pose.orientation
        else:
            yaw = 0.0
            yaw = math.atan2(left_buoy.pose.position.y - right_buoy.pose.position.y, left_buoy.pose.position.x - right_buoy.pose.position.x)-1.507
            rospy.loginfo("Got gate location x: %f, y: %f, yaw: %f",target.position.x ,target.position.y,yaw)
            target.orientation = eulerToQuat([0,0,yaw])
        #print(left_buoy,right_buoy, target)
        return target
Esempio n. 41
0
    def transform_to_pose(self, trans):
        """
        :param trans: TransformStamped
        :return: Pose
        """
        p = Pose()
        p.position = trans.transform.translation
        p.orientation = trans.transform.rotation

        return p
Esempio n. 42
0
 def move_to_pickup_positionL(self, point):
     pose = Pose(position=point)
     pose.position.y = pose.position.y
     pose.orientation = self._overhead_orientation
     hdr = Header(stamp=rospy.Time.now(), frame_id='base')
     srv = BaxterIKRequest()
     srv.pose_stamp.append(PoseStamped(header=hdr, pose=pose))
     resp = self._ikL(srv)
     out = dict(zip(resp.joint_angles[0].name, resp.joint_angles[0].position))
     self._limbL.move_to_joint_positions(out)
Esempio n. 43
0
def convert_pose(pose):
    """
    convert pose between left and right-hand coordinate system
    :param pose: pose to be converted
    :return: converted pose
    """
    data = Pose()
    data.position = convert_vector3(pose.position)
    data.orientation = convert_quaternion(pose.orientation)
    return data
Esempio n. 44
0
 def getKickBallPose(self):
     bp = self.lastBallPose.position
     gp = self.setup.zielTor
     d = self.distance(gp, bp)
     t = 0.5 / d
     pose = Pose()
     pose.position.x = (1 - t) * bp.x + t * gp.x
     pose.position.y = (1 - t) * bp.y + t * gp.y
     pose.orientation = self.getOrientAsQuaternion(pose.position, gp)
     return pose
Esempio n. 45
0
        def pointToWaypoint(point, startOrientation):
            waypoint = Pose()

            waypoint.position.x = point[0]
            waypoint.position.y = point[1]
            waypoint.position.z = point[2]

            waypoint.orientation = startOrientation

            return waypoint
Esempio n. 46
0
    def retract(self):
        current_pose = self.get_current_pose()

        pose = Pose()
        pose.position.x = current_pose['position'][0] - .1
        pose.position.y = current_pose['position'][1] - .1
        pose.position.z = current_pose['position'][2]
        pose.orientation = current_pose['orientation']

        self._servo_to_pose(pose)
Esempio n. 47
0
    def test_reset_robot_pose(self):
        self.__cle.gazebo_helper.set_model_pose = Mock()
        pose = Pose()
        pose.position = Point(0, 0, 0)
        pose.orientation = Quaternion(0, 0, 0, 1)

        self.__cle.initial_robot_poses = {'robot': pose}
        self.__cle.reset_robot_pose()
        self.__cle.gazebo_helper.set_model_pose.assert_called_with(
            'robot', pose)
Esempio n. 48
0
def main():
    rospy.init_node("simulator")
    gzHandler = GazeboHandler()

    rospack = rospkg.RosPack()
    boxurdf = rospack.get_path("ros_workshop") + "/src/simulator/box.urdf"
    dronesdf = rospack.get_path(
        "ros_workshop") + "/src/simulator/models/drone/drone.sdf"

    drone_start_pos = random_position(0, 0, 0.2, 0.3)

    # generate box positions
    points = []
    while len(points) < 8:
        p = random_position(-15, 15, 4, 8)
        if math.sqrt((p.x - drone_start_pos.x)**2 +
                     (p.y - drone_start_pos.y)**2) > 4:
            points.append(p)

    pose = Pose()
    pose.position = drone_start_pos
    pose.orientation = Quaternion(0, 0, 0, 1)
    gzHandler.spawn_model("drone", pose, dronesdf)

    for i, point in enumerate(points):
        pose.position = point
        pose.orientation = random_quaternion()
        model_name = "box_" + str(i)
        gzHandler.spawn_model(model_name, pose, boxurdf)
        gzHandler.track_object(model_name)

    boxpub = rospy.Publisher("simulator/boxes", PoseArray, queue_size=1)
    boxes = PoseArray()

    rate = rospy.Rate(30)
    while not rospy.is_shutdown():
        boxes.header.stamp = rospy.Time.now()
        boxes.header.frame_id = "map"
        boxes.poses = gzHandler.get_tracked()

        boxpub.publish(boxes)

        rate.sleep()
Esempio n. 49
0
 def _transform_to_pose(self, matrix):
     """ Matrix to pose """
     pose = Pose()
     trans_vector = tft.translation_from_matrix(matrix)
     pose.position = Point(trans_vector[0], trans_vector[1],
                           trans_vector[2])
     quartern = tft.quaternion_from_matrix(matrix)
     pose.orientation = Quaternion(quartern[0], quartern[1], quartern[2],
                                   quartern[3])
     return pose
def map_pose(values):
    """
    Map the values generated with the hypothesis-ros pose strategy to a rospy Pose type.
    """
    if not isinstance(values, _Pose):
        raise TypeError('Wrong type. Use appropriate hypothesis-ros type.')
    ros_pose = Pose()
    ros_pose.position = map_point(values.position)
    ros_pose.orientation = map_quaternion(values.orientation)
    return ros_pose
Esempio n. 51
0
def transform_to_pose(matrix):
    pose = Pose()
    quat_from_mat = tft.quaternion_from_matrix(matrix)
    # print quat_from_mat
    pose.orientation = Quaternion(quat_from_mat[0], quat_from_mat[1],
                                  quat_from_mat[2], quat_from_mat[3])
    vector = np.dot(matrix, np.array([0, 0, 0, 1]))
    # print vector
    pose.position = Point(vector[0], vector[1], vector[2])
    return pose
Esempio n. 52
0
    def publishRos(self, invRvec, invTvec):
        position = Point(invTvec[0], invTvec[1], invTvec[2])
        init_pos = Pose()
        init_pos.position = position

        quat = quaternion_from_euler(invRvec[0], invRvec[1], invRvec[2])
        quat_tupl = Quaternion(quat[0], quat[1], quat[2], quat[3])
        init_pos.orientation = quat_tupl
        ps = PoseStamped(self.hdr, init_pos)
        self.pub.publish(ps)
Esempio n. 53
0
    def __init__(self,
                 swarm,
                 mesh_path,
                 mesh_rgba,
                 mesh_scale,
                 namespace='swarm_item',
                 last_used_id=0):
        """
        Similar to marker array view, but this assumes that the given array of stuff
        are all identical.

        swarm needs to have get_positions() and get_orientation_quats() functions.

        if more than 1 swarm view is created, take care of last_used_id!
        """

        self.swarm = swarm
        self.last_used_id = last_used_id
        self.pub = rospy.Publisher('/rviz_swarm', MarkerArray, queue_size=1)

        # create these ahead of time, just need to update before publishing
        self.marker_array = MarkerArray()

        self.poses = []
        for i in range(len(swarm.get_position())):
            point = Point()
            quat = Quaternion()
            pose = Pose()
            pose.position = point
            pose.orientation = quat

            marker = Marker()
            marker.ns = namespace
            marker.id = self.last_used_id+1
            self.last_used_id += 1
            marker.action = 0
            # 10 for mesh
            marker.type = 10

            marker.pose = pose
            x,y,z = mesh_scale
            marker.scale.x = x
            marker.scale.y = y
            marker.scale.z = z

            r,g,b,a = mesh_rgba
            marker.color.a = a
            marker.color.r = r
            marker.color.g = g
            marker.color.b = b

            marker.mesh_resource = 'file://'+mesh_path
            marker.header.frame_id = '/world'

            self.marker_array.markers.append(marker)
	def show_target(self, target, listener, use_laser=True):
		if cfg.robot_stream:
			robot_last_pose = self.pepper_localization.get_pose()
		else:
			robot_last_pose = PoseStamped()
		if not robot_last_pose:
			return

		loc = Pose()
		loc.position.x = target.coordinates[0]
		loc.position.y = target.coordinates[1]
		loc.position.z = target.coordinates[2]
		loc.orientation = robot_last_pose.pose.orientation
		pose = PoseStamped()

		if use_laser:
			pose.header.frame_id = 'laser'
		else:
			pose.header.frame_id = 'map'
		pose.pose = loc

		if cfg.robot_stream:
			pose_in_map = listener.transformPose('/odom', pose)
		else:
			pose_in_map = pose
		pose_in_map.header.frame_id = 'odom'

		start_time = time.time()

		if target.class_type == ClassType.OBJECT:
			matched = False
			if target.label in self.encountered_positions:
				for existing_position in self.encountered_positions[target.label][1:]:
					if self.compute_euclidian_distance(existing_position.pose.position,
							pose_in_map.pose.position) < navig_cfg.objects_distance_threshold:
						matched = True
				if not matched:
					self.encountered_positions[target.label].append(pose_in_map)
			else:
				self.encountered_positions[target.label] = ['object_color', pose_in_map]
				self.add_new_possible_goal(target.label)

		else:
			if not target.label in self.encountered_positions:
				self.encountered_positions[target.label] = ['qrcode_color', pose_in_map]
				if target.class_type == ClassType.PERSON:
					self.encountered_positions[target.label][0] = 'person_color'
				elif target.class_type == ClassType.PERSON_DEFAULT:
					self.encountered_positions[target.label][0] = 'person_default_color'
				self.add_new_possible_goal(target.label)
			else:
				self.encountered_positions[target.label][1] = pose_in_map

		if cfg.debug_mode:
			print("\ttransform %s seconds" % (time.time() - start_time))
    def _generate_samples(self,
                          input_pose,
                          roll_min=0.0,
                          roll_max=0.0,
                          roll_samples=1,
                          roll_offset=0.0,
                          pitch_min=0.0,
                          pitch_max=0.0,
                          pitch_samples=1,
                          pitch_offset=0.0,
                          yaw_min=0.0,
                          yaw_max=0.0,
                          yaw_samples=1,
                          yaw_offset=0.0):
        """
        Generate samples around input pose within given limits. The amount of samples
        generated can be calculated by (`roll_samples` * `pitch_samples * `yaw_samples`)

        Note: All min, max and offset values are in degrees

        :input_pose: geometry_msgs/PoseStamped
        :roll_min: float
        :roll_max: float
        :roll_samples: int
        :roll_offset: float
        :pitch_min: float
        :pitch_max: float
        :pitch_samples: int
        :pitch_offset: float
        :yaw_min: float
        :yaw_max: float
        :yaw_samples: int
        :yaw_offset: float
        :returns: geometry_msgs/PoseArray

        """
        roll_samples = list(np.linspace(roll_min, roll_max, roll_samples))
        pitch_samples = list(np.linspace(pitch_min, pitch_max, pitch_samples))
        yaw_samples = list(np.linspace(yaw_min, yaw_max, yaw_samples))

        pose_samples = PoseArray()
        pose_samples.header = input_pose.header
        for roll in roll_samples:
            for pitch in pitch_samples:
                for yaw in yaw_samples:
                    pose = Pose()
                    pose.position = input_pose.pose.position
                    roll_rad = np.radians(roll + roll_offset)
                    pitch_rad = np.radians(pitch + pitch_offset)
                    yaw_rad = np.radians(yaw + yaw_offset)
                    pose.orientation = Quaternion(
                        *tf.transformations.quaternion_from_euler(
                            roll_rad, pitch_rad, yaw_rad))
                    pose_samples.poses.append(pose)
        return pose_samples
Esempio n. 56
0
    def test_7_move_lin_ptp_armangle_config(self):
        self.client.move_joints(0, pi / 100, 0, -pi / 2, 0, pi / 2, 0)
        _, arm_angle, config = self.client.get_current_pose(detailed=True)
        self.assertEqual(config, 2, "wrong config determined")

        goal_pose = Pose()
        goal_pose.position = Point(.6, -0.17, .7215)
        goal_pose.orientation = Quaternion(.0, .7071068, .0, .7071068)
        goal_arm_angle = 0.0
        resp = self.client.move_ptp_armangle(goal_pose, goal_arm_angle)
        self.assert_last_srv_call_success(resp)
        _, arm_angle, config = self.client.get_current_pose(detailed=True)
        self.assertEqual(config, 2, "wrong config for test case")
        self.assert_joint_values_close(arm_angle, goal_arm_angle)

        goal_pose.position = Point(.3, .0, .3)
        goal_pose.orientation = orientation_from_rpy(0, pi, 0)
        self.assert_move_ptp_success(goal_pose)
        goal_arm_angle = pi / 2
        resp = self.client.move_lin_armangle(goal_pose, goal_arm_angle, True)
        self.assert_last_srv_call_success(resp)
        _, arm_angle, config = self.client.get_current_pose(detailed=True)
        self.assert_joint_values_close(arm_angle, goal_arm_angle)
        goal_arm_angle = -pi / 2
        resp = self.client.move_lin_armangle(goal_pose, goal_arm_angle, False)
        self.assert_last_srv_call_success(resp)
        goal_arm_angle = pi / 2
        resp = self.client.move_lin_armangle(goal_pose, goal_arm_angle, True)
        self.assert_last_srv_call_success(resp)

        goal_arm_angle = 0.0
        resp = self.client.move_lin_armangle(goal_pose, goal_arm_angle, False)
        self.assert_last_srv_call_success(resp)
        goal_pose.position.y = .4
        goal_arm_angle = pi / 3
        resp = self.client.move_lin_armangle(goal_pose, goal_arm_angle, True)
        self.assert_last_srv_call_success(resp)
        goal_pose.position.x = .0
        goal_pose.orientation = orientation_from_rpy(0, pi, pi / 2)
        goal_arm_angle = -pi / 2
        resp = self.client.move_lin_armangle(goal_pose, goal_arm_angle, False)
        self.assert_last_srv_call_success(resp)
Esempio n. 57
0
    def _transport(self, block=None, position=None, orientation=None):
        overhead_pose = Pose()

        if (block is not None and position is None):
            rospy.loginfo(
                "Finding the correct block to transport to above. Looking for a %dx%d %s block",
                block.width, block.length, block.color)
            # TODO: We shouldn't have to go through the list of blocks everytime, store in a dictionary in future
            # Find the location of the block to move towards

            while(overhead_pose == Pose()):
                for block_loc in self.inv_state:
                    rospy.loginfo("Checking block: %dx%d %s", block_loc.width,
                                  block_loc.length, block_loc.color)
                    # Requested block should have same color, width and length
                    if (block_loc.color == block.color
                            and block_loc.width == block.width
                            and block_loc.length == block.length):
                        print("Location to go to is %f %f", block_loc.pose.x,
                              block_loc.pose.y)
                        overhead_pose.position = Point(
                            x=block_loc.pose.x,
                            y=block_loc.pose.y,
                            z=self._hover_distance)
                        overhead_pose.orientation = self._overhead_orientation

                self._detect()

        elif (position is not None and block is None):

            overhead_pose.position = position
            overhead_pose.orientation = self._overhead_orientation

        else:
            rospy.loginfo("One of block or position should be None")
            return

        approach = copy.deepcopy(overhead_pose)
        # approach with a pose the hover-distance above the requested pose
        approach.position.z = self._hover_distance
        joint_angles = self.ik_request(approach)
        self._guarded_move_to_joint_position(joint_angles)
Esempio n. 58
0
def execute(client):
    # type: (RLLDefaultMoveClient) -> bool
    # demonstrates how to use the available services:
    #
    # 1. moveJoints(a1, a2, ..., a7) vs move_joints(joint_values)
    # 2. move_ptp(pose)
    # 3. move_lin(pose)
    # 4. generated_pose = move_random()
    # 5. pose = get_current_pose()
    # 6. joint_values = get_current_joint_values()

    resp = client.move_joints(0.0, 0.0, 0.0, -pi / 2, 0.0, 0.0, 0.0)
    # returns True/False to indicate success, throws of critical failure
    if not resp:
        rospy.logerr("move_joints service call failed")

    joint_values = [pi / 2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
    client.move_joints(joint_values)

    goal_pose = Pose()
    goal_pose.position = Point(.4, .4, .5)
    goal_pose.orientation = orientation_from_rpy(pi / 2, -pi / 4, pi)

    # move ptp to the specified pose
    client.move_ptp(goal_pose)

    goal_pose.position.x = 0.2
    goal_pose.position.y = .5
    # linear movement to the new pose
    client.move_lin(goal_pose)

    # move to random pose, returns Pose/None to indicate success
    resp = client.move_random()

    if resp:
        # response contains the randomly generated pose
        rospy.loginfo("move_random moved to: %s", resp)

    # get current pose, should match the previous random pose
    pose = client.get_current_pose()
    rospy.loginfo("current end effector pose: %s", pose)

    # set the joint values again
    joint_values2 = [pi / 2, 0.2, 0, 0, -pi / 4, .24, 0]
    client.move_joints(joint_values2)

    # retrieve the previously set joint values
    joint_values = client.get_current_joint_values()

    match = compare_joint_values(joint_values, joint_values2)
    rospy.loginfo("Set and queried joint values match: %s",
                  "yes" if match else "no")

    return True
Esempio n. 59
0
def kdl_to_pose(frame):
    """
    :type frame: PyKDL.Frame
    :rtype: Pose
    """
    p = Pose()
    p.position.x = frame.p[0]
    p.position.y = frame.p[1]
    p.position.z = frame.p[2]
    p.orientation = normalize(Quaternion(*frame.M.GetQuaternion()))
    return p
    def get_cartesian_plan(self, trans, z_offset, start_state, grasp):
        # set argument start state
        moveit_start_state = RobotState()
        moveit_start_state.joint_state = start_state
        self.arm.set_start_state(moveit_start_state)
        # set waypoints
        waypoints = []
        self.target_pose.position.x = trans.transform.translation.x
        self.target_pose.position.y = trans.transform.translation.y
        self.target_pose.position.z = trans.transform.translation.z
        q = (trans.transform.rotation.x, trans.transform.rotation.y,
             trans.transform.rotation.z, trans.transform.rotation.w)
        (roll, pitch, yaw) = tf.transformations.euler_from_quaternion(q)
        pitch += pi / 2.0
        tar_q = tf.transformations.quaternion_from_euler(roll, pitch, yaw)
        self.target_pose.orientation.x = tar_q[0]
        self.target_pose.orientation.y = tar_q[1]
        self.target_pose.orientation.z = tar_q[2]
        self.target_pose.orientation.w = tar_q[3]
        wpose = Pose()
        wpose.position = copy.deepcopy(self.target_pose.position)
        wpose.orientation = copy.deepcopy(self.target_pose.orientation)
        wpose.position.z = copy.deepcopy(self.target_pose.position.z +
                                         z_offset)
        waypoints.append(wpose)
        # plan
        plan = RobotTrajectory()
        counter = 0
        while len(plan.joint_trajectory.points) == 0:
            (plan, fraction) = self.arm.compute_cartesian_path(
                waypoints,  # waypoints to follow
                0.01,  # eef_step
                0.0)  # jump_threshold
            counter += 1
            self.arm.set_planning_time(self.planning_limitation_time +
                                       counter * 5.0)
            if counter > 1:
                return (False, start_state)
        self.arm.set_planning_time(self.planning_limitation_time)

        rospy.loginfo("!! Got a cartesian plan !!")
        # publish the plan
        pub_msg = HandringPlan()
        pub_msg.grasp = grasp
        pub_msg.trajectory = plan
        self.hp_pub.publish(pub_msg)
        self.arm.clear_pose_targets()
        # return goal state from generated trajectory
        goal_state = JointState()
        goal_state.header = Header()
        goal_state.header.stamp = rospy.Time.now()
        goal_state.name = plan.joint_trajectory.joint_names[:]
        goal_state.position = plan.joint_trajectory.points[-1].positions[:]
        return (True, goal_state)