def matrix2pose(matrix, frame_id=None, time=None):
    msg = PoseStampedMsg()
    T = tf_conversions.fromMatrix(matrix)
    msg.pose = tf_conversions.toMsg(T)
    msg.header.frame_id = frame_id
    msg.header.stamp = time
    return msg.pose if time is None else msg
示例#2
0
def graspit_grasp_pose_to_moveit_grasp_pose(move_group_commander, listener, graspit_grasp_msg,
                                            grasp_frame='/approach_tran'):
    """
    :param move_group_commander: A move_group command from which to get the end effector link.
    :type move_group_commander: moveit_commander.MoveGroupCommander
    :param listener: A transformer for looking up the transformation
    :type tf.TransformListener 
    :param graspit_grasp_msg: A graspit grasp message
    :type graspit_grasp_msg: graspit_msgs.msg.Grasp

    """

    try:
        listener.waitForTransform(grasp_frame, move_group_commander.get_end_effector_link(),
                                     rospy.Time(0), timeout=rospy.Duration(1))
        at_to_ee_tran, at_to_ee_rot = listener.lookupTransform(grasp_frame, move_group_commander.get_end_effector_link(),rospy.Time())
    except:
        rospy.logerr("graspit_grasp_pose_to_moveit_grasp_pose::\n " +
                    "Failed to find transform from %s to %s"%(grasp_frame, move_group_commander.get_end_effector_link()))
        ipdb.set_trace()



    graspit_grasp_msg_final_grasp_tran_matrix = tf_conversions.toMatrix(tf_conversions.fromMsg(graspit_grasp_msg.final_grasp_pose))
    approach_tran_to_end_effector_tran_matrix = tf.TransformerROS().fromTranslationRotation(at_to_ee_tran, at_to_ee_rot)
    actual_ee_pose_matrix = np.dot( graspit_grasp_msg_final_grasp_tran_matrix, approach_tran_to_end_effector_tran_matrix)
    actual_ee_pose = tf_conversions.toMsg(tf_conversions.fromMatrix(actual_ee_pose_matrix))
    rospy.loginfo("actual_ee_pose: " + str(actual_ee_pose))
    return actual_ee_pose
示例#3
0
def graspit_grasp_pose_to_moveit_grasp_pose(move_group_commander, listener, graspit_grasp_msg,
                                            grasp_frame='/approach_tran'):
    """
    :param move_group_commander: A move_group command from which to get the end effector link.
    :type move_group_commander: moveit_commander.MoveGroupCommander
    :param listener: A transformer for looking up the transformation
    :type tf.TransformListener
    :param graspit_grasp_msg: A graspit grasp message
    :type graspit_grasp_msg: graspit_msgs.msg.Grasp

    """

    try:
        listener.waitForTransform(grasp_frame, move_group_commander.get_end_effector_link(),
                                     rospy.Time(0), timeout=rospy.Duration(1))
        at_to_ee_tran, at_to_ee_rot = listener.lookupTransform(grasp_frame, move_group_commander.get_end_effector_link(),rospy.Time())
    except:
        rospy.logerr("graspit_grasp_pose_to_moveit_grasp_pose::\n " +
                    "Failed to find transform from %s to %s"%(grasp_frame, move_group_commander.get_end_effector_link()))
        ipdb.set_trace()



    graspit_grasp_msg_final_grasp_tran_matrix = tf_conversions.toMatrix(tf_conversions.fromMsg(graspit_grasp_msg.final_grasp_pose))
    approach_tran_to_end_effector_tran_matrix = tf.TransformerROS().fromTranslationRotation(at_to_ee_tran, at_to_ee_rot)
    if move_group_commander.get_end_effector_link() == 'l_wrist_roll_link':
        rospy.logerr('This is a PR2\'s left arm so we have to rotate things.')
        pr2_is_weird_mat = tf.TransformerROS().fromTranslationRotation([0,0,0], tf.transformations.quaternion_from_euler(0,math.pi/2,0))
    else:
        pr2_is_weird_mat = tf.TransformerROS().fromTranslationRotation([0,0,0], [0,0,0])
    actual_ee_pose_matrix = np.dot( graspit_grasp_msg_final_grasp_tran_matrix, approach_tran_to_end_effector_tran_matrix)
    actual_ee_pose_matrix = np.dot(actual_ee_pose_matrix, pr2_is_weird_mat)
    actual_ee_pose = tf_conversions.toMsg(tf_conversions.fromMatrix(actual_ee_pose_matrix))
    rospy.loginfo("actual_ee_pose: " + str(actual_ee_pose))
    return actual_ee_pose
def axis2pose(rvec, tvec, frame_id=None, time=None):
    msg = PoseStampedMsg()
    T = axis2matrix(rvec, tvec)
    T = tf_conversions.fromMatrix(T)
    msg.pose = tf_conversions.toMsg(T)
    msg.header.frame_id = frame_id
    msg.header.stamp = time
    return msg.pose if time is None else msg
def graspit_grasp_pose_to_moveit_grasp_pose(move_group_commander,
                                            listener,
                                            graspit_grasp_msg,
                                            grasp_frame='/approach_tran'):
    """
    :param move_group_commander: A move_group command from which to get the end effector link.
    :type move_group_commander: moveit_commander.MoveGroupCommander
    :param listener: A transformer for looking up the transformation
    :type tf.TransformListener
    :param graspit_grasp_msg: A graspit grasp message
    :type graspit_grasp_msg: graspit_msgs.msg.Grasp

    """

    try:
        listener.waitForTransform(grasp_frame,
                                  move_group_commander.get_end_effector_link(),
                                  rospy.Time(0),
                                  timeout=rospy.Duration(1))
        at_to_ee_tran, at_to_ee_rot = listener.lookupTransform(
            grasp_frame, move_group_commander.get_end_effector_link(),
            rospy.Time())
    except:
        rospy.logerr(
            "graspit_grasp_pose_to_moveit_grasp_pose::\n " +
            "Failed to find transform from %s to %s" %
            (grasp_frame, move_group_commander.get_end_effector_link()))
        ipdb.set_trace()

    graspit_grasp_msg_final_grasp_tran_matrix = tf_conversions.toMatrix(
        tf_conversions.fromMsg(graspit_grasp_msg.final_grasp_pose))
    approach_tran_to_end_effector_tran_matrix = tf.TransformerROS(
    ).fromTranslationRotation(at_to_ee_tran, at_to_ee_rot)
    if move_group_commander.get_end_effector_link() == 'l_wrist_roll_link':
        rospy.logerr('This is a PR2\'s left arm so we have to rotate things.')
        pr2_is_weird_mat = tf.TransformerROS().fromTranslationRotation(
            [0, 0, 0],
            tf.transformations.quaternion_from_euler(0, math.pi / 2, 0))
    else:
        pr2_is_weird_mat = tf.TransformerROS().fromTranslationRotation(
            [0, 0, 0], [0, 0, 0])
    actual_ee_pose_matrix = np.dot(graspit_grasp_msg_final_grasp_tran_matrix,
                                   approach_tran_to_end_effector_tran_matrix)
    actual_ee_pose_matrix = np.dot(actual_ee_pose_matrix, pr2_is_weird_mat)
    actual_ee_pose = tf_conversions.toMsg(
        tf_conversions.fromMatrix(actual_ee_pose_matrix))
    rospy.loginfo("actual_ee_pose: " + str(actual_ee_pose))
    return actual_ee_pose
示例#6
0
def get_graspit_pose_to_moveit_pose(moveit_grasp_msg, graspit_tran):

    listener = tf.listener.TransformListener()

    listener.waitForTransform("approach_tran", 'staubli_rx60l_link7', rospy.Time(), timeout=rospy.Duration(1.0))
    print '3'
    at_to_ee_tran, at_to_ee_rot = listener.lookupTransform("approach_tran", 'staubli_rx60l_link7',rospy.Time())
    print '4'

    approach_tran_to_end_effector_tran_matrix = tf.TransformerROS().fromTranslationRotation(at_to_ee_tran, at_to_ee_rot)
    actual_ee_pose_matrix = np.dot( graspit_tran, approach_tran_to_end_effector_tran_matrix)
    actual_ee_pose = tf_conversions.toMsg(tf_conversions.fromMatrix(actual_ee_pose_matrix))
    rospy.loginfo("actual_ee_pose: " + str(actual_ee_pose))
    moveit_grasp_msg_copy = deepcopy(moveit_grasp_msg)
    moveit_grasp_msg_copy.grasp_pose.pose = actual_ee_pose
    return moveit_grasp_msg_copy
示例#7
0
    def send_command(self):
        self.current_joint_positions = self.q
        msg = JointState()
        msg.header.stamp = rospy.get_rostime()
        msg.header.frame_id = "simuated_data"
        msg.name = self.JOINT_NAMES
        msg.position = self.current_joint_positions
        msg.velocity = [0]*6
        msg.effort = [0]*6
        self.joint_state_publisher.publish(msg)

        pose = self.kdl_kin.forward(self.q)
        F = tf_c.fromMatrix(pose)
        # F = self.F_command
        self.current_tcp_pose = tf_c.toMsg(F)
        self.current_tcp_frame = F
        self.broadcaster_.sendTransform(tuple(F.p),tuple(F.M.GetQuaternion()),rospy.Time.now(), '/endpoint','/base_link')
示例#8
0
文件: run.py 项目: CRLab/mico_skills
def transform_pose(grasp_pose_in_world, world_frame_to_object_frame_transform):

    # Convert ROSmsg to 4x4 numpy arrays
    grasp_pose_in_world_tran_matrix = tf_conversions.toMatrix(
        tf_conversions.fromMsg(grasp_pose_in_world))
    object_pose_in_world_tran_matrix = tf_conversions.toMatrix(
        tf_conversions.fromMsg(world_frame_to_object_frame_transform))

    # Transform the grasp pose into the object reference frame
    grasp_pose_in_object_tran_matrix = np.dot(
        np.linalg.inv(object_pose_in_world_tran_matrix),
        grasp_pose_in_world_tran_matrix)

    # Convert pose back into ROS message
    grasp_pose_in_object = tf_conversions.toMsg(
        tf_conversions.fromMatrix(grasp_pose_in_object_tran_matrix))

    return grasp_pose_in_object
def graspit_grasp_pose_to_moveit_grasp_pose(
        listener,                     # type: tf.TransformListener
        graspit_grasp_msg,            # type: graspit_msgs.msg.Grasp
        end_effector_link,            # type: str
        grasp_frame                   # type: str
):
    # type: (...) -> geometry_msgs.msg.Pose
    """
    :param listener: A transformer for looking up the transformation
    :param graspit_grasp_msg: A graspit grasp message
    """

    try:
        listener.waitForTransform(
            grasp_frame,
            end_effector_link,
            rospy.Time(0),
            timeout=rospy.Duration(1)
        )

        at_to_ee_tran, at_to_ee_rot = listener.lookupTransform(
            grasp_frame,
            end_effector_link,
            rospy.Time()
        )

    except Exception as e:
        rospy.logerr("graspit_grasp_pose_to_moveit_grasp_pose::\n "
                     "Failed to find transform from {} to {}"
                     .format(grasp_frame, end_effector_link)
                     )
        ipdb.set_trace()

        return geometry_msgs.msg.Pose()

    graspit_grasp_msg_final_grasp_tran_matrix = tf_conversions.toMatrix(
        tf_conversions.fromMsg(graspit_grasp_msg.final_grasp_pose)
    )

    approach_tran_to_end_effector_tran_matrix = tf.TransformerROS().fromTranslationRotation(at_to_ee_tran, at_to_ee_rot)
    actual_ee_pose_matrix = np.dot(graspit_grasp_msg_final_grasp_tran_matrix, approach_tran_to_end_effector_tran_matrix)
    actual_ee_pose = tf_conversions.toMsg(tf_conversions.fromMatrix(actual_ee_pose_matrix))

    return actual_ee_pose
示例#10
0
    def publish_pose(self):
        q = tf_conversions.transformations.quaternion_from_euler(0, 0, -self.pos.theta)
        q = tf_conversions.transformations.quaternion_matrix(q)
        translation = tf_conversions.transformations.translation_matrix([self.pos.x, self.pos.y, 0])
        transform = np.dot(translation, q)

        t = geometry_msgs.msg.TransformStamped()
        t.header.stamp = rospy.Time.now()
        t.header.frame_id = "world"
        t.child_frame_id = "bradbot"

        pose = tf_conversions.posemath.toMsg(tf_conversions.fromMatrix(transform))
        t.transform.translation.x = pose.position.x
        t.transform.translation.y = pose.position.y
        t.transform.translation.z = pose.position.z
        t.transform.rotation.w = pose.orientation.w
        t.transform.rotation.x = pose.orientation.x
        t.transform.rotation.y = pose.orientation.y
        t.transform.rotation.z = pose.orientation.z

        self.transform_broadcaster.sendTransform(t)
def graspit_grasp_pose_to_moveit_grasp_pose(
        listener,  # type: tf.TransformListener
        graspit_grasp_msg,  # type: graspit_interface.msg.Grasp
        end_effector_link,  # type: str
        grasp_frame  # type: str
):
    # type: (...) -> geometry_msgs.msg.Pose
    """
    :param listener: A transformer for looking up the transformation
    :param graspit_grasp_msg: A graspit grasp message
    """

    try:
        listener.waitForTransform(grasp_frame,
                                  end_effector_link,
                                  rospy.Time(0),
                                  timeout=rospy.Duration(1))

        at_to_ee_tran, at_to_ee_rot = listener.lookupTransform(
            grasp_frame, end_effector_link, rospy.Time())

    except Exception as e:
        rospy.logerr("graspit_grasp_pose_to_moveit_grasp_pose::\n "
                     "Failed to find transform from {} to {}".format(
                         grasp_frame, end_effector_link))
        ipdb.set_trace()

        return geometry_msgs.msg.Pose()

    graspit_grasp_msg_final_grasp_tran_matrix = tf_conversions.toMatrix(
        tf_conversions.fromMsg(graspit_grasp_msg.pose))

    approach_tran_to_end_effector_tran_matrix = tf.TransformerROS(
    ).fromTranslationRotation(at_to_ee_tran, at_to_ee_rot)
    actual_ee_pose_matrix = np.dot(graspit_grasp_msg_final_grasp_tran_matrix,
                                   approach_tran_to_end_effector_tran_matrix)
    actual_ee_pose = tf_conversions.toMsg(
        tf_conversions.fromMatrix(actual_ee_pose_matrix))

    return actual_ee_pose
def get_graspit_pose_to_moveit_pose(moveit_grasp_msg, graspit_tran):

    listener = tf.listener.TransformListener()

    listener.waitForTransform("approach_tran",
                              'staubli_rx60l_link7',
                              rospy.Time(),
                              timeout=rospy.Duration(1.0))
    print '3'
    at_to_ee_tran, at_to_ee_rot = listener.lookupTransform(
        "approach_tran", 'staubli_rx60l_link7', rospy.Time())
    print '4'

    approach_tran_to_end_effector_tran_matrix = tf.TransformerROS(
    ).fromTranslationRotation(at_to_ee_tran, at_to_ee_rot)
    actual_ee_pose_matrix = np.dot(graspit_tran,
                                   approach_tran_to_end_effector_tran_matrix)
    actual_ee_pose = tf_conversions.toMsg(
        tf_conversions.fromMatrix(actual_ee_pose_matrix))
    rospy.loginfo("actual_ee_pose: " + str(actual_ee_pose))
    moveit_grasp_msg_copy = deepcopy(moveit_grasp_msg)
    moveit_grasp_msg_copy.grasp_pose.pose = actual_ee_pose
    return moveit_grasp_msg_copy
def get_graspit_grasp_pose_in_new_reference_frame(
        graspit_grasp_msg_pose,  # type: geometry_msgs.msg.Pose
        target_to_source_translation_rotation,  # type: tuple of tuples
):
    # type: (...) -> geometry_msgs.msg.Pose
    """
    :param target_to_source_translation_rotation: result of listener.lookupTransform((target_frame, grasp_frame, rospy.Time(0), timeout=rospy.Duration(1))
    :param graspit_grasp_msg_pose: The pose of a graspit grasp message i.e. g.pose
    t_T_G = t_T_s * s_T_G
    """

    graspit_grasp_pose_in_source_frame_matrix = tf_conversions.toMatrix(
        tf_conversions.fromMsg(graspit_grasp_msg_pose)
    )

    source_in_target_frame_tran_matrix = tf.TransformerROS().fromTranslationRotation(
        target_to_source_translation_rotation[0],
        target_to_source_translation_rotation[1])
    graspit_grasp_pose_in_target_frame_matrix = np.dot(source_in_target_frame_tran_matrix,
                                                       graspit_grasp_pose_in_source_frame_matrix)  # t_T_G = t_T_s * s_T_G
    graspit_grasp_pose_in_target_frame = tf_conversions.toMsg(
        tf_conversions.fromMatrix(graspit_grasp_pose_in_target_frame_matrix))

    return graspit_grasp_pose_in_target_frame
def change_end_effector_link(
        graspit_grasp_msg_pose,  # type: geometry_msgs.msg.Pose
        old_link_to_new_link_translation_rotation,  # type: tuple of tuples
):
    # type: (...) -> geometry_msgs.msg.Pose
    """
    :param old_link_to_new_link_translation_rotation: result of listener.lookupTransform((old_link, new_link, rospy.Time(0), timeout=rospy.Duration(1))
    :param graspit_grasp_msg_pose: The pose of a graspit grasp message i.e. g.pose
    ref_T_nl = ref_T_ol * ol_T_nl
    """

    graspit_grasp_pose_for_old_link_matrix = tf_conversions.toMatrix(
        tf_conversions.fromMsg(graspit_grasp_msg_pose)
    )

    old_link_to_new_link_tranform_matrix = tf.TransformerROS().fromTranslationRotation(
        old_link_to_new_link_translation_rotation[0],
        old_link_to_new_link_translation_rotation[1])
    graspit_grasp_pose_for_new_link_matrix = np.dot(graspit_grasp_pose_for_old_link_matrix,
                                                    old_link_to_new_link_tranform_matrix)  # ref_T_nl = ref_T_ol * ol_T_nl
    graspit_grasp_pose_for_new_link = tf_conversions.toMsg(
        tf_conversions.fromMatrix(graspit_grasp_pose_for_new_link_matrix))

    return graspit_grasp_pose_for_new_link
示例#15
0
		#camerafile.write('camera_position, rospy.Time.now(), q1w, q1x, q1y, q1z, q2w, q2x, q2y, q2z\n') 
		heuristicsfile = open(testpath,'w')
		heuristicsfile.write('PointArng, TriangleSize, Extension, Spread, Limit, PerpSym, ParallelSym, OrthoNorm, Volume, Hand_Qx, Hand_Qy, Hand_Qz,Hand_Qw,Hand_x,Hand_y,Hand_z, ObjectName, Object_Qx, Object_Qy, Object_Qz, Object_Qw, Object_x, Object_y, Object_Qz, Start_Position,' + str(rospy.Time.now()) + '\n')
		heuristicsfile.close()
	"""
	# Define the OpenRAVE variables necessary for controlling the robot. See OpenRAVE documentation
	# for more information
    
    
    
	# ****************** Repeat this loop while ROS is running. *************************

	while not rospy.is_shutdown():
		# Get hand position and robot joints to create new message
		MyTrans = MyRave.manip.GetTransform()   # Get the hand transform				
		f2 = tf_conversions.fromMatrix(MyTrans)   # Convert the transform to a ROS frame
		f2 = tf_conversions.toMsg(f2)    #  Convert the frame to a message format for publishing
		joints = MyRave.robot.GetDOFValues()

		
		#rospy.loginfo("Joints\n" + str(joints))
		#rospy.logdebug(MyTrans)

		# Use the values retrieved to create new message of current robot location
		f = RobotPose()	
		f.position = f2.position
		f.orientation = f2.orientation

		#f.j_position = [joints[0],joints[1],joints[2],joints[3]]  # For use when only Barrett hand is loaded
		#f.j_position = [joints[7],joints[8],joints[9],joints[10]] # Hand joints 1-3(7-9) and spread(10) 
示例#16
0
	def UpdateRobotJoints(self, msg):
		msg2 = None
		#rospy.loginfo('command received')

		MyTrans = self.manip.GetTransform()   # Get the hand transform
		#ftest = tf_conversions.fromMsg(MyTrans)
		#ftest = tf_conversions.toMatrix(ftest)	
		
		f1 = tf_conversions.fromMsg(msg)
		MyMatrix = tf_conversions.toMatrix(f1)


		# Create message to set robot commands
		move_msg = PoseCheck()
		# Copy Postition/Orientation from RobotPose msg to PoseCheck srv
		move_msg.position = msg.position
		move_msg.orientation = msg.orientation
		move_msg.spread = 0.0

		# Check if A button was pressed, if so initiate robot movement
		if(msg.j_position[0]):
			move_msg.move = True
		else:
			move_msg.move = False	
		# not currently used so set to false by default		
		move_msg.get_norm = False
		
		# PRE OSU_ROS_ADEPT CODE, NOT USED IN ADEPT/TRIGRIP PROGRAM

		# Function to run if using Interactive Markers
		if msg.j_position[3] != 0:
			msg2 = PoseCheck()
			mycog = self.object.GetTransform()
			#Add offset of object and subtract a small amount to fix minor Z offset error.
			MyMatrix[0:3,3] = MyMatrix[0:3,3] + mycog[0:3,3] - MyMatrix[0:3,2]*.0175
			f2 = tf_conversions.fromMatrix(MyMatrix)   # Convert the transform to a ROS frame
			msg2.pose = tf_conversions.toMsg(f2) 
			msg2.move = True
		#rospy.logdebug("before receiving error")
		if msg.j_position[1] == 0 and msg.j_position[2] == 1:
			grasp_msg=GraspObject()
			grasp_msg.grasp_direction="open"
			grasp_msg.spread=False
			grasp_msg.get_joint_vals=False		
			self.Grasp_Object(grasp_msg) # Open the hand
			#self.Grasp_Object(grasp_direction="open") # Open the hand
			
		# END NON OSU_ROS_ADEPT CODE

		if msg2 is not None:  # Move arm to new position
			self.handle_pose_check(msg2)
		else:
			# Sends osu_ros_adept message
			self.handle_pose_check(move_msg)
    
		#TODO: Fix this or remove it for the WAM operation.  Cannot use robot.SetTransform.
		# For Palm grasp option
		if msg.j_position[3] == 2:
			HandDirection = MyMatrix[0:3,2]
			for i in range(0,1000):  # loop until the object is touched or limit is reached
				MyMatrix[0:3,3] += .0001 * HandDirection
				with env:
					self.robot.SetTransform(MyMatrix)  # Move hand forward
				for link in self.robot.GetLinks():    # Check for collisions
					incollision=self.env.CheckCollision(link,report)
					if incollision:
						i = 1000
				if i == 1000:   # If hand is in collision, break from the loop
					break

		# Check to see if any of the manipulator buttons have been pressed.  Otherwise, move the hand.
		if msg.j_position[1] == 1 and msg.j_position[2] == 0:
			grasp_msg=GraspObject()
			grasp_msg.grasp_direction="close"
			grasp_msg.spread=False
			grasp_msg.get_joint_vals=False		
			self.Grasp_Object(grasp_msg) # Open the hand
			#self.Grasp_Object(grasp_direction="close") # Open the hand
		return
    def update_position(self, event):
        """
        Computes the pose of the vehicle and the ripper in the terrian map.
        """

        # Create a copy of the most recent stored twist data to perform calculations
        with self.lock:
            velocity_data = copy.deepcopy(self.twist)

        # Time elapsed since last update position call
        if hasattr(event, 'last_real'):
            if event.last_real is None:
                time = rospy.Duration(0.05)
            else:
                time = event.current_real - event.last_real

        time = time.to_sec()

        # Calculate angle turned in the given time using omega = theta/time
        angle = velocity_data.angular.z * time

        # Calculate distance travelled in the given time using linear velocity = arc distance/time
        distance = velocity_data.linear.x * time

        # Calculate yaw of the robot
        self.vehicle_yaw += angle

        # Calculate vehicle x, y, z position coordinates
        # TODO recalculate the position based on traveling in a circular arc.
        self.pose.position.x += (distance) * cos(self.vehicle_yaw)
        self.pose.position.y += (distance) * sin(self.vehicle_yaw)

        # Calculate z position using linear interpolation and create cloud array

        # 1. Create ranges to be used in interpolation function
        terrain_points_x = np.arange(
            0, self.gaussian_array.shape[1] * self.resolution, self.resolution)
        terrain_points_y = np.arange(
            0, self.gaussian_array.shape[0] * self.resolution, self.resolution)

        # 2. Create array of points to be converted to point cloud for vizualization
        terrain_mesh_x, terrain_mesh_y = np.meshgrid(terrain_points_x,
                                                     terrain_points_y)
        terrain_x = terrain_mesh_x.ravel()
        terrain_y = terrain_mesh_y.ravel()
        terrain_z = self.gaussian_array.ravel()
        terrain_grid_points = np.stack((terrain_x, terrain_y, terrain_z),
                                       axis=1)

        # 3. Create interpolation function based on the ranges and gaussian data
        interp_func = RectBivariateSpline(terrain_points_y, terrain_points_x,
                                          self.gaussian_array)

        # 4. Find z value for x and y coordinate of vehicle using interpolation function
        # TODO compute z height based on footprint
        self.pose.position.z = interp_func(self.pose.position.y,
                                           self.pose.position.x)

        # Convert Euler Angles to Quarternion
        V_rotation = tf.transformations.quaternion_from_euler(
            0.0, 0.0, self.vehicle_yaw)

        # Broadcast vehicle frame which is a child of the world frame
        br = tf.TransformBroadcaster()
        br.sendTransform(
            (self.pose.position.x, self.pose.position.y, self.pose.position.z),
            V_rotation, rospy.Time.now(), "vehicle_frame", "map")

        # Construct the homogenous transformation matrix for map to vehicle frame
        V_translation = [
            self.pose.position.x, self.pose.position.y, self.pose.position.z
        ]
        map_T_V = tf.transformations.quaternion_matrix(V_rotation)
        map_T_V[:3, 3] = np.array(V_translation)

        # Create footprint of vehicle
        V_footprint_range_x = np.linspace((-self.vehicle_length / 2),
                                          (self.vehicle_length / 2), 30)
        V_footprint_range_y = np.linspace((-self.vehicle_width / 2),
                                          (self.vehicle_width / 2), 15)
        V_footprint_mesh_x, V_footprint_mesh_y = np.meshgrid(
            V_footprint_range_x, V_footprint_range_y)
        V_footprint_x = V_footprint_mesh_x.ravel()
        V_footprint_y = V_footprint_mesh_y.ravel()

        # For every point in the vehicle footprint, calculate the position wrt to the vehicle's frame
        # and its interpolated z value. Add this point to a list of points for visualization.
        # TODO Flatten into a single matrix multiply to remove for loop
        V_viz_points = []
        for i in range(V_footprint_x.shape[0]):
            p = Point()
            V_footprint_point = np.array([[V_footprint_x[i]],
                                          [V_footprint_y[i]], [0.0], [1.0]])
            V_footprint_point = np.matmul(map_T_V, V_footprint_point)
            V_footprint_point[2, 0] = interp_func(V_footprint_point[1, 0],
                                                  V_footprint_point[0, 0])
            p.x = V_footprint_point[0, 0]
            p.y = V_footprint_point[1, 0]
            p.z = V_footprint_point[2, 0]
            V_viz_points.append(p)

        #####################################################################################
        # Create a copy of the most recent stored JointState data to perform calculations
        with self.joint_lock:
            joint_data = copy.deepcopy(self.joint)

        # If the data is empty on first run, fill with 0.0
        if not joint_data.velocity:
            joint_data.velocity = [0.0, 0.0]

        # Calculate angle based on velocity data and time
        angle = joint_data.velocity[0] * time
        angle2 = joint_data.velocity[1] * time

        self.joint1_pitch += angle
        self.joint2_pitch += angle2

        # Transformations from vehicle frame to Joint1 and Joint2

        # Static rotation about z-axis
        static_rot = tf.transformations.quaternion_from_euler(
            0.0, 0.0, 3.14159)
        translation = [0.0, 0.0, 0.0]
        V_T_SRz = tf.transformations.quaternion_matrix(static_rot)
        V_T_SRz[:3, 3] = np.array(translation)

        # Dynamic rotation about the y-axis of Joint 1
        rot_SRz_T_J1 = [[cos(self.joint1_pitch), 0.0,
                         sin(self.joint1_pitch)], [0.0, 1.0, 0.0],
                        [-sin(self.joint1_pitch), 0.0,
                         cos(self.joint1_pitch)]]

        trans_SRz_T_J1 = [0.0, 0.0, 0.0, 1.0]

        SRz_T_J1 = np.zeros((4, 4))
        SRz_T_J1[:3, :3] = rot_SRz_T_J1
        SRz_T_J1[:4, 3] = trans_SRz_T_J1

        # Translation based on length of Joint 1 arm
        no_rot = tf.transformations.quaternion_from_euler(0.0, 0.0, 0.0)
        translation = [self.joint1_length, 0.0, 0.0]
        J1_T_STx = tf.transformations.quaternion_matrix(no_rot)
        J1_T_STx[:3, 3] = np.array(translation)

        # Dynamic rotation about y-axis of Joint 2
        dynamic_rot2 = tf.transformations.quaternion_from_euler(
            0.0, self.joint2_pitch, 0.0)
        translation = [0.0, 0.0, 0.0]
        STx_T_J2 = tf.transformations.quaternion_matrix(dynamic_rot2)
        STx_T_J2[:3, 3] = np.array(translation)

        # matrix multiplication to form the homogenous matrices
        V_T_J1 = np.matmul(V_T_SRz, SRz_T_J1)
        V_T_STx = np.matmul(V_T_J1, J1_T_STx)
        V_T_J2 = np.matmul(V_T_STx, STx_T_J2)

        frame_J1 = tf_conversions.fromMatrix(V_T_J1)
        frame_J2 = tf_conversions.fromMatrix(V_T_J2)

        # The ripper tip is a point in the J2's frame, this is based on the length of the ripper
        ripper_tip_point_J2 = [self.ripper_length, 0.0, 0.0, 1.0]
        map_T_J2 = np.matmul(map_T_V, V_T_J2)
        ripper_tip_pt_map = np.matmul(map_T_J2, ripper_tip_point_J2)
        ripper_tip_point_viz = Point()
        ripper_tip_point_viz.x = ripper_tip_pt_map[0]
        ripper_tip_point_viz.y = ripper_tip_pt_map[1]
        ripper_tip_point_viz.z = ripper_tip_pt_map[2]
        V_viz_points.append(ripper_tip_point_viz)

        # use the ripper's position as an index value to access the gaussian array
        ripper_tip_cell_index_x = int(ripper_tip_pt_map[1] / self.resolution)
        ripper_tip_cell_index_y = int(ripper_tip_pt_map[0] / self.resolution)

        # Create a range of index values surrounding index_x and y
        nearby_index_cells_range_x = np.arange(
            (ripper_tip_cell_index_x - 1), (ripper_tip_cell_index_x + 2), 1)
        nearby_index_cells_range_y = np.arange(
            (ripper_tip_cell_index_y - 1), (ripper_tip_cell_index_y + 2), 1)
        nearby_index_cells_mesh_x, nearby_index_cells_mesh_y = np.meshgrid(
            nearby_index_cells_range_x, nearby_index_cells_range_y)
        nearby_index_cells_x = nearby_index_cells_mesh_x.ravel()
        nearby_index_cells_y = nearby_index_cells_mesh_y.ravel()

        # First check if the index is within the gaussian array, if it is, then check if the tip of
        # the ripper is beneath the soil, if it is, then remove the soil above the tip and disperse
        # it to the surrounding cells, provided those cells are also within the gaussian array
        # TODO Remove use of for loops and excess if statements

        if (0 <= ripper_tip_cell_index_x <=
            (self.gaussian_array.shape[0] - 1)) and (
                0 <= ripper_tip_cell_index_y <=
                (self.gaussian_array.shape[1] - 1)):
            if (self.gaussian_array[ripper_tip_cell_index_x]
                [ripper_tip_cell_index_y] > ripper_tip_pt_map[2]):
                diff = self.gaussian_array[ripper_tip_cell_index_x][
                    ripper_tip_cell_index_y] - ripper_tip_pt_map[2]
                for i in range(nearby_index_cells_x.shape[0]):
                    if (0 <= nearby_index_cells_x[i] <=
                        (self.gaussian_array.shape[0] - 1)) and (
                            0 <= nearby_index_cells_y[i] <=
                            (self.gaussian_array.shape[1] - 1)):
                        self.gaussian_array[nearby_index_cells_x[i]][
                            nearby_index_cells_y[i]] += diff / 8
                self.gaussian_array[ripper_tip_cell_index_x][
                    ripper_tip_cell_index_y] = ripper_tip_pt_map[2]

        # Publish all messages
        self.publish_messages(V_translation, V_rotation, terrain_grid_points,
                              V_viz_points, frame_J1, frame_J2)
def set_tran_as_pose(moveit_grasp_msg, tran):
    pose_msg = tf_conversions.toMsg(tf_conversions.fromMatrix(tran))
    moveit_grasp_msg_copy = deepcopy(moveit_grasp_msg)
    moveit_grasp_msg_copy.grasp_pose.pose = pose_msg
    return moveit_grasp_msg_copy
示例#19
0
def set_tran_as_pose(moveit_grasp_msg, tran):
    pose_msg = tf_conversions.toMsg(tf_conversions.fromMatrix(tran))
    moveit_grasp_msg_copy = deepcopy(moveit_grasp_msg)
    moveit_grasp_msg_copy.grasp_pose.pose = pose_msg
    return moveit_grasp_msg_copy
示例#20
0
    def __init__(self):
        rospy.init_node('ur_simulation',anonymous=True)
        rospy.logwarn('SIMPLE_UR SIMULATION DRIVER LOADING')
        # TF
        self.broadcaster_ = tf.TransformBroadcaster()
        self.listener_ = tf.TransformListener()
        # SERVICES
        self.servo_to_pose_service = rospy.Service('simple_ur_msgs/ServoToPose', ServoToPose, self.servo_to_pose_call)
        self.set_stop_service = rospy.Service('simple_ur_msgs/SetStop', SetStop, self.set_stop_call)
        self.set_teach_mode_service = rospy.Service('simple_ur_msgs/SetTeachMode', SetTeachMode, self.set_teach_mode_call)
        self.set_servo_mode_service = rospy.Service('simple_ur_msgs/SetServoMode', SetServoMode, self.set_servo_mode_call)
        # PUBLISHERS AND SUBSCRIBERS
        self.driver_status_publisher = rospy.Publisher('/ur_robot/driver_status',String)
        self.robot_state_publisher = rospy.Publisher('/ur_robot/robot_state',String)
        self.joint_state_publisher = rospy.Publisher('joint_states',JointState)
        # self.follow_pose_subscriber = rospy.Subscriber('/ur_robot/follow_goal',PoseStamped,self.follow_goal_cb)
        # Predicator
        self.pub_list = rospy.Publisher('/predicator/input', PredicateList)
        self.pub_valid = rospy.Publisher('/predicator/valid_input', ValidPredicates)
        rospy.sleep(1)
        pval = ValidPredicates()
        pval.pheader.source = rospy.get_name()
        pval.predicates = ['moving', 'stopped', 'running']
        pval.assignments = ['robot']
        self.pub_valid.publish(pval)
        # Rate
        self.run_rate = rospy.Rate(100)
        self.run_rate.sleep()
        ### Set Up Simulated Robot ###
        self.driver_status = 'IDLE'
        self.robot_state = 'POWER OFF'
        robot = URDF.from_parameter_server()
        self.kdl_kin = KDLKinematics(robot, 'base_link', 'ee_link')
        # self.q = self.kdl_kin.random_joint_angles()
        self.q = [-1.5707,-.785,-3.1415+.785,-1.5707-.785,-1.5707,0] # Start Pose?
        self.start_pose = self.kdl_kin.forward(self.q)
        self.F_start = tf_c.fromMatrix(self.start_pose)
        # rospy.logwarn(self.start_pose)
        # rospy.logwarn(type(self.start_pose))
        # pose = self.kdl_kin.forward(q)
        # joint_positions = self.kdl_kin.inverse(pose, q+0.3) # inverse kinematics
        # if joint_positions is not None:
        #     pose_sol = self.kdl_kin.forward(joint_positions) # should equal pose
        # J = self.kdl_kin.jacobian(q)
        # rospy.logwarn('q:'+str(q))
        # rospy.logwarn('joint_positions:'+str(joint_positions))
        # rospy.logwarn('pose:'+str(pose))
        # if joint_positions is not None:
        #     rospy.logwarn('pose_sol:'+str(pose_sol))
        # rospy.logwarn('J:'+str(J))

        ### START LOOP ###
        while not rospy.is_shutdown():
            if self.driver_status == 'TEACH':
                self.update_from_marker()
            
            # if self.driver_status == 'SERVO':
            #     self.update_follow()

            # Publish and Sleep
            self.publish_status()
            self.send_command()
            self.run_rate.sleep()

        # Finish
        rospy.logwarn('SIMPLE UR - Simulation Finished')