def callback(m): global pub global stop global data global tfListener global numberOfFootstepsInList if ready: if not stop: print('Preparing the footstep message') # Wait for transform tfListener.waitForTransform("/pelvis", "/leftCOP_Frame", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightCOP_Frame", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftCOP_Frame", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightCOP_Frame", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) pose = pm.fromMsg(m.pose.pose) * midFeet # Get footstep trajectopry from YAML message = message_converter.convert_dictionary_to_ros_message( 'controller_msgs/FootstepDataListMessage', data) # Update the footstep frames numberOfFootstepsInList = len(message.footstep_data_list) for step in message.footstep_data_list: pstep = pose * pm.Frame( pm.Rotation.Quaternion( step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position # Normalize orientation quat_original = np.array([ msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w ]) quat_normalized = quat_original / np.linalg.norm(quat_original) msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[ 0], quat_normalized[1], quat_normalized[ 2], quat_normalized[3] # Update step orientation step.orientation = msg.orientation print('Relocated footsteps to a local frame') print('Publishing...') pub.publish(message) stop = True print('Waiting for execution...')
def update_position(self): if self.q0 is None or self.old_q0 is None: return self.ee_pose = pm.fromMatrix(self.kdl_kin.forward(self.q0)) if self.goal is not None: cart_diff = (self.ee_pose.p - self.goal.p).Norm() rot_diff = self.goal_rotation_weight * \ (pm.Vector(*self.ee_pose.M.GetRPY()) - pm.Vector(*self.goal.M.GetRPY())).Norm() goal_diff = cart_diff + rot_diff if goal_diff < self.max_goal_diff: self.at_goal = True else: self.at_goal = False if goal_diff < 10*self.max_goal_diff: self.near_goal = True q_diff = np.abs(self.old_q0 - self.q0).sum() if q_diff < self.max_q_diff: self.moving = False else: self.moving = True
def robot_pose_callback(self, msg): # Update robot time self.robot_time = msg.header.stamp.to_sec() # Gets the SE3 transform from world to the midfeet frame. if self.state == STATE_UPDATE_ROBOT_POSE and not(self.updated_robot_pose): print (' Robot Pose Callback') # Wait for transform tfListener.waitForTransform("/pelvis", "/leftFoot", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightFoot", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftFoot",rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightFoot",rospy.Time()) pos = (np.array(pos1)+np.array(pos2))*0.5 rot = pm.transformations.quaternion_slerp(rot1,rot2,0.5) midFeet = pm.Frame(pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # From IHMC's ValkyriePhysicalProperties.java: # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)); footsizeReduction = 0.01 footLength = 0.25 - footsizeReduction ankleHeight = 0.09 footBack = 0.073 - footsizeReduction/2.0 ankleToSoleOffset = pm.Vector(footLength/2.0 - footBack, 0.0, -ankleHeight) self.robot_pose = pm.fromMsg(msg.pose.pose)*midFeet*pm.Frame(pm.Rotation(), ankleToSoleOffset) self.updated_robot_pose = True return
def transformSE3(msg): global pose curPos = pm.Frame( pm.Rotation.Quaternion(msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w), pm.Vector(msg.position.x, msg.position.y, msg.position.z)) curVel = pm.Twist( pm.Vector(msg.linear_velocity.x, msg.linear_velocity.y, msg.linear_velocity.z), pm.Vector(msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z)) pos = pose * curPos vel = pose * curVel tmp = pm.toMsg(pos) msg.position = tmp.position msg.orientation = tmp.orientation # Normalize orientation quat_original = np.array([ msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w ]) quat_normalized = quat_original / np.linalg.norm(quat_original) # Update orientation msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[ 0], quat_normalized[1], quat_normalized[2], quat_normalized[3] msg.linear_velocity = Vector3(vel.vel.x(), vel.vel.y(), vel.vel.z()) msg.angular_velocity = Vector3(vel.rot.x(), vel.rot.y(), vel.rot.z())
def transformSO3(msg): global pose curPos = pm.Frame(pm.Rotation.Quaternion(msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w), pm.Vector(0,0,0)) curVel = pm.Twist(pm.Vector(0,0,0), pm.Vector(msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z)) pos = pose*curPos vel = pose*curVel tmp = pm.toMsg(pos) msg.orientation = tmp.orientation msg.angular_velocity = Vector3(vel.rot.x(), vel.rot.y(), vel.rot.z())
def callback(m): global pub global stop global data global tfListener global numberOfFootstepsInList if ready: if not stop: print('Preparing the footstep message') # Wait for transform tfListener.waitForTransform("/pelvis", "/leftFoot", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightFoot", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftFoot",rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightFoot",rospy.Time()) pos = (np.array(pos1)+np.array(pos2))*0.5 rot = pm.transformations.quaternion_slerp(rot1,rot2,0.5) midFeet = pm.Frame(pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # From IHMC's ValkyriePhysicalProperties.java: # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)); footsizeReduction = 0.01 footLength = 0.25 - footsizeReduction ankleHeight = 0.09 footBack = 0.073 - footsizeReduction/2.0 ankleToSoleOffset = pm.Vector(footLength/2.0 - footBack, 0.0, -ankleHeight) pose = pm.fromMsg(m.pose.pose)*midFeet*pm.Frame(pm.Rotation(), ankleToSoleOffset) # Get footstep trajectopry from YAML message = message_converter.convert_dictionary_to_ros_message('controller_msgs/FootstepDataListMessage', data) # Update the footstep frames numberOfFootstepsInList = len(message.footstep_data_list) for step in message.footstep_data_list: pstep = pose*pm.Frame(pm.Rotation.Quaternion(step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position # Normalize orientation quat_original = np.array([msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w]) quat_normalized = quat_original/np.linalg.norm(quat_original) msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[0], quat_normalized[1], quat_normalized[2], quat_normalized[3] # Update step orientation step.orientation = msg.orientation print('Relocated footsteps to a local frame') print('Publishing...') pub.publish(message) stop = True print('Waiting for execution...')
def robot_pose_callback(self, msg): # Update robot time self.robot_time = msg.header.stamp.to_sec() # Gets the SE3 transform from world to the midfeet frame. if self.state == STATE_UPDATE_ROBOT_POSE and not ( self.updated_robot_pose): print(' Robot Pose Callback') # Wait for transform tfListener.waitForTransform("/pelvis", "/leftCOP_Frame", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightCOP_Frame", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftCOP_Frame", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightCOP_Frame", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) self.robot_pose = pm.fromMsg(msg.pose.pose) * midFeet self.updated_robot_pose = True return
def callback(m): global pubWhole global stop global data global robotTime global pose global tfListener robotTime = m.header.stamp.to_sec() if ready: if not stop: # Wait for transform tfListener.waitForTransform("/pelvis", "/leftCOP_Frame", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightCOP_Frame", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftCOP_Frame", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightCOP_Frame", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) pose = pm.fromMsg(m.pose.pose) * midFeet stop = True
def __init__(self): self.robot_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.mid_feet_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.right_hand_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.left_hand_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.right_hand_ihmc_offset = pm.Frame(pm.Rotation.RPY(0.0, 0.0, -np.pi/2.0), pm.Vector(0.025, -0.07, 0.0)) self.left_hand_ihmc_offset = pm.Frame(pm.Rotation.RPY(0.0, 0.0, np.pi/2.0), pm.Vector(0.025, 0.07, 0.0)) self.footsteps_msg = FootstepDataListMessage() self.hand_msg = HandTrajectoryMessage() self.robot_pose_ready = False self.update_robot_pose_once = False self.messages_sent = False self.completed_footsteps = False
def callback(m): global pub global stop global data global tfListener if ready: if not stop: print('Preparing the footstep message') # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/leftFoot", "/pelvis", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/rightFoot", "/pelvis", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # release/0.10 changed from ankle to sole frames for specifying footsteps. This is a transform extracted by Doug # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)) # with: footLength = 0.24, footBack = 0.068, ankleHeight = 0.09 ankleToSoleOffset = pm.Vector(0.24 / 2.0 - 0.068, 0.0, -0.09) pose = pm.fromMsg(m.pose.pose) * midFeet.Inverse() * pm.Frame( pm.Rotation(), ankleToSoleOffset) # Get footstep trajectopry from YAML message = message_converter.convert_dictionary_to_ros_message( 'ihmc_msgs/FootstepDataListRosMessage', data) # Update the footstep frames for step in message.footstep_data_list: pstep = pose * pm.Frame( pm.Rotation.Quaternion( step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position step.orientation = msg.orientation print('Relocated footsteps to a local frame') print('Publishing...') pub.publish(message) stop = True print('Waiting for execution...')
def callback(m): global pubWhole global stop global data global robotTime global pose global tfListener robotTime = m.header.stamp.to_sec() if ready: if not stop: # Wait for transform tfListener.waitForTransform("/pelvis", "/leftFoot", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightFoot", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftFoot", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightFoot", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # From IHMC's ValkyriePhysicalProperties.java: # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)); footsizeReduction = 0.01 footLength = 0.25 - footsizeReduction ankleHeight = 0.09 footBack = 0.073 - footsizeReduction / 2.0 ankleToSoleOffset = pm.Vector(footLength / 2.0 - footBack, 0.0, -ankleHeight) pose = pm.fromMsg(m.pose.pose) * midFeet * pm.Frame( pm.Rotation(), ankleToSoleOffset) stop = True
def callback(m): global pubWhole global stop global data global robotTime global pose global tfListener robotTime = m.header.stamp.to_sec() if ready: if not stop: # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/leftFoot", "/pelvis",rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/rightFoot", "/pelvis",rospy.Time()) pos = (np.array(pos1)+np.array(pos2))*0.5 rot = pm.transformations.quaternion_slerp(rot1,rot2,0.5) midFeet = pm.Frame(pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # release/0.10 changed from ankle to sole frames for specifying footsteps. This is a transform extracted by Doug # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)) # with: footLength = 0.24, footBack = 0.068, ankleHeight = 0.09 ankleToSoleOffset = pm.Vector(0.24 / 2.0 - 0.068, 0.0, -0.09) pose = pm.fromMsg(m.pose.pose)*midFeet.Inverse()*pm.Frame(pm.Rotation(), ankleToSoleOffset) stop = True
def __init__(self): # Robot related Variables self.robot_status = "Unknown Status" self.robot_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.robot_time = None self.updated_robot_pose = False self.robot_status = "unspecified status" self.robot_stopped_moving = False self.footstep_pause_interrupt = False self.state = STATE_LOAD_MESSAGE # Initialize state to load message self.list_of_tests = [] self.current_test = None self.test_index = -1 # Starts at invalid index self.package_path = ""
def transformFootsteps(self, msg): numberOfFootstepsInList = len(msg.footstep_data_list) for step in msg.footstep_data_list: pstep = self.robot_pose * pm.Frame( pm.Rotation.Quaternion(step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position # Normalize orientation quat_original = np.array([ msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w ]) quat_normalized = quat_original / np.linalg.norm(quat_original) msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[ 0], quat_normalized[1], quat_normalized[2], quat_normalized[3] # Update step orientation step.orientation = msg.orientation
def transformSE3_hand(self, msg, side): curPos = pm.Frame(pm.Rotation.Quaternion(msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w), pm.Vector(msg.position.x, msg.position.y, msg.position.z)) curVel = pm.Twist(pm.Vector(msg.linear_velocity.x, msg.linear_velocity.y, msg.linear_velocity.z), pm.Vector(msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z)) pos = curPos vel = curVel if side == HandTrajectoryMessage().ROBOT_SIDE_LEFT: pos = self.left_hand_pose*curPos vel = self.left_hand_pose*curVel else: pos = self.right_hand_pose*curPos vel = self.right_hand_pose*curVel tmp = pm.toMsg(pos) msg.position = tmp.position msg.orientation = tmp.orientation # Normalize orientation quat_original = np.array([msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w]) quat_normalized = quat_original/np.linalg.norm(quat_original) # Update orientation msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[0], quat_normalized[1], quat_normalized[2], quat_normalized[3] msg.linear_velocity = Vector3(vel.vel.x(), vel.vel.y(), vel.vel.z()) msg.angular_velocity = Vector3(vel.rot.x(), vel.rot.y(), vel.rot.z())
def query(self, req, disable_target_object_collision = False): # Get the best object to manipulate, just like smart move, but without the actual movement # This will check robot collision and reachability on all possible object grasp position based on its symmetry. # Then, it will returns one of the best symmetry to work with for grasp and release. # it will be put on parameter server # Find possible poses res = self.get_waypoints_srv.get_waypoints( req.obj_class, # object class to move to req.predicates, # predicates to match [req.pose], # offset/transform from each member of the class [req.name] # placeholder name ) if res is None: # no poses found that meet the query! return [] (poses,names,objects) = res if poses is None: return [] selected_objs, selected_names = [], [] dists = [] Ts = [] if self.q0 is None: rospy.logerr("Robot state has not yet been received!") return "FAILURE -- robot state not yet received!" T_fwd = pm.fromMatrix(self.kdl_kin.forward(self.q0)) number_of_valid_query_poses, number_of_invalid_query_poses = 0, 0 for (pose,name,obj) in zip(poses,names,objects): # figure out which tf frame we care about tf_path = name.split('/') tf_frame = '' for part in tf_path: if len(part) > 0: tf_frame = part break if len(tf_frame) == 0: continue # try to move to the pose until one succeeds T_base_world = pm.fromTf(self.listener.lookupTransform(self.world,self.base_link,rospy.Time(0))) T = T_base_world.Inverse()*pm.fromMsg(pose) # Ignore anything below the table: we don't need to even bother # checking that position. if self.table_pose is not None: if T.p[2] < self.table_pose[0][2]: rospy.logwarn("[QUERY] Ignoring due to relative z: %f < %f x=%f y=%f %s"%(T.p[2],self.table_pose[0][2],T.p[0],T.p[1], obj)) continue dist_from_table = (T.p - pm.Vector(*self.table_pose[0])).Norm() if dist_from_table > self.max_dist_from_table: rospy.logwarn("[QUERY] Ignoring due to table distance: %f > %f"%( dist_from_table, self.max_dist_from_table)) continue valid_pose, best_dist, best_invalid, message_print, message_print_invalid, best_q = self.get_best_distance(T,T_fwd,self.q0, check_closest_only = False, obj_name = obj) if best_q is None or len(best_q) == 0: rospy.logwarn("[QUERY] DID NOT ADD:"+message_print) continue elif valid_pose: rospy.loginfo('[QUERY] %s'%message_print) Ts.append(T) selected_objs.append(obj) selected_names.append(name) dists.append(best_dist) number_of_valid_query_poses += 1 else: rospy.loginfo('[QUERY] %s'%message_print_invalid) Ts.append(T) selected_objs.append(obj) selected_names.append(name) dists.append(best_invalid + self.state_validity_penalty) number_of_invalid_query_poses += 1 if len(Ts) is not len(dists): raise RuntimeError('You miscounted the number of transforms somehow.') if len(Ts) is not len(selected_objs): raise RuntimeError('You miscounted the number of transforms vs. objects somehow.') if len(Ts) is not len(selected_names): raise RuntimeError('You miscounted the number of transforms vs. names somehow.') if len(Ts) == 0: possible_goals = [] else: possible_goals = [(d,T,o,n) for (d,T,o,n) in zip(dists,Ts,selected_objs,selected_names) if T is not None] possible_goals.sort() joint = JointState() # joint.position = self.ik(T,self.q0) rospy.loginfo("[QUERY] There are %i of valid poses and %i of invalid poses"%(number_of_valid_query_poses,number_of_invalid_query_poses)) return possible_goals
def robot_pose_callback(self, msg): if not self.update_robot_pose_once: try: #print "Transforms ready" # Get current robot pose now = rospy.Time() self.tfListener.waitForTransform("/pelvis", "/leftCOP_Frame", now, rospy.Duration(4.0)) self.tfListener.waitForTransform("/pelvis", "/rightCOP_Frame", now, rospy.Duration(4.0)) self.tfListener.waitForTransform("/pelvis", "/rightPalm", now, rospy.Duration(4.0)) self.tfListener.waitForTransform("/pelvis", "/leftPalm", now, rospy.Duration(4.0)) pos1, rot1 = self.tfListener.lookupTransform("/pelvis", "/leftCOP_Frame", now) pos2, rot2 = self.tfListener.lookupTransform("/pelvis", "/rightCOP_Frame", now) pos_rp, rot_rp = self.tfListener.lookupTransform("/pelvis", "/rightPalm", now) pos_lp, rot_lp = self.tfListener.lookupTransform("/pelvis", "/leftPalm", now) leftFoot = pm.Frame(pm.Rotation.Quaternion(rot1[0], rot1[1], rot1[2], rot1[3]), pm.Vector(pos1[0], pos1[1], pos1[2])) rightFoot = pm.Frame(pm.Rotation.Quaternion(rot2[0], rot2[1], rot2[2], rot2[3]), pm.Vector(pos2[0], pos2[1], pos2[2])) rightPalm = pm.Frame(pm.Rotation.Quaternion(rot_rp[0], rot_rp[1], rot_rp[2], rot_rp[3]), pm.Vector(pos_rp[0], pos_rp[1], pos_rp[2])) leftPalm = pm.Frame(pm.Rotation.Quaternion(rot_lp[0], rot_lp[1], rot_lp[2], rot_lp[3]), pm.Vector(pos_lp[0], pos_lp[1], pos_lp[2])) # Mid Feet Pose mid_pos = (np.array(pos1)+np.array(pos2))*0.5 mid_rot = pm.transformations.quaternion_slerp(rot1,rot2,0.5) midFeet = pm.Frame(pm.Rotation.Quaternion(mid_rot[0], mid_rot[1], mid_rot[2], mid_rot[3]), pm.Vector(mid_pos[0], mid_pos[1], mid_pos[2])) self.robot_pose = pm.fromMsg(msg.pose.pose) self.mid_feet_pose = pm.fromMsg(msg.pose.pose)*midFeet self.right_hand_pose = pm.fromMsg(msg.pose.pose)*rightPalm*self.right_hand_ihmc_offset self.left_hand_pose = pm.fromMsg(msg.pose.pose)*leftPalm*self.left_hand_ihmc_offset self.robot_pose_ready = True self.update_robot_pose_once = True rospy.loginfo("Robot Pose is Ready!") except: rospy.logerr("Could not get the Robot pose. Will try again at the next callback.")
def send_footsteps(self): for step in self.footsteps_msg.footstep_data_list: pstep = self.mid_feet_pose*pm.Frame(pm.Rotation.Quaternion(step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position # Normalize orientation quat_original = np.array([msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w]) quat_normalized = quat_original/np.linalg.norm(quat_original) msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[0], quat_normalized[1], quat_normalized[2], quat_normalized[3] # Update step orientation step.orientation = msg.orientation rospy.loginfo("Relocated footsteps to a local frame") self.footsteps_msg.queueing_properties.message_id = 1 self.footsteps_msg.queueing_properties.execution_mode = QueueableMessage().EXECUTION_MODE_OVERRIDE rospy.loginfo("Publishing Footstep Data List...") self.pubFootsteps.publish(self.footsteps_msg)