def robot_pose_callback(self, msg): # Update robot time self.robot_time = msg.header.stamp.to_sec() # Gets the SE3 transform from world to the midfeet frame. if self.state == STATE_UPDATE_ROBOT_POSE and not(self.updated_robot_pose): print (' Robot Pose Callback') # Wait for transform tfListener.waitForTransform("/pelvis", "/leftFoot", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightFoot", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftFoot",rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightFoot",rospy.Time()) pos = (np.array(pos1)+np.array(pos2))*0.5 rot = pm.transformations.quaternion_slerp(rot1,rot2,0.5) midFeet = pm.Frame(pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # From IHMC's ValkyriePhysicalProperties.java: # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)); footsizeReduction = 0.01 footLength = 0.25 - footsizeReduction ankleHeight = 0.09 footBack = 0.073 - footsizeReduction/2.0 ankleToSoleOffset = pm.Vector(footLength/2.0 - footBack, 0.0, -ankleHeight) self.robot_pose = pm.fromMsg(msg.pose.pose)*midFeet*pm.Frame(pm.Rotation(), ankleToSoleOffset) self.updated_robot_pose = True return
def callback(m): global pub global stop global data global tfListener global numberOfFootstepsInList if ready: if not stop: print('Preparing the footstep message') # Wait for transform tfListener.waitForTransform("/pelvis", "/leftCOP_Frame", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightCOP_Frame", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftCOP_Frame", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightCOP_Frame", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) pose = pm.fromMsg(m.pose.pose) * midFeet # Get footstep trajectopry from YAML message = message_converter.convert_dictionary_to_ros_message( 'controller_msgs/FootstepDataListMessage', data) # Update the footstep frames numberOfFootstepsInList = len(message.footstep_data_list) for step in message.footstep_data_list: pstep = pose * pm.Frame( pm.Rotation.Quaternion( step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position # Normalize orientation quat_original = np.array([ msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w ]) quat_normalized = quat_original / np.linalg.norm(quat_original) msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[ 0], quat_normalized[1], quat_normalized[ 2], quat_normalized[3] # Update step orientation step.orientation = msg.orientation print('Relocated footsteps to a local frame') print('Publishing...') pub.publish(message) stop = True print('Waiting for execution...')
def callback(m): global pub global stop global data global tfListener global numberOfFootstepsInList if ready: if not stop: print('Preparing the footstep message') # Wait for transform tfListener.waitForTransform("/pelvis", "/leftFoot", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightFoot", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftFoot",rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightFoot",rospy.Time()) pos = (np.array(pos1)+np.array(pos2))*0.5 rot = pm.transformations.quaternion_slerp(rot1,rot2,0.5) midFeet = pm.Frame(pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # From IHMC's ValkyriePhysicalProperties.java: # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)); footsizeReduction = 0.01 footLength = 0.25 - footsizeReduction ankleHeight = 0.09 footBack = 0.073 - footsizeReduction/2.0 ankleToSoleOffset = pm.Vector(footLength/2.0 - footBack, 0.0, -ankleHeight) pose = pm.fromMsg(m.pose.pose)*midFeet*pm.Frame(pm.Rotation(), ankleToSoleOffset) # Get footstep trajectopry from YAML message = message_converter.convert_dictionary_to_ros_message('controller_msgs/FootstepDataListMessage', data) # Update the footstep frames numberOfFootstepsInList = len(message.footstep_data_list) for step in message.footstep_data_list: pstep = pose*pm.Frame(pm.Rotation.Quaternion(step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position # Normalize orientation quat_original = np.array([msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w]) quat_normalized = quat_original/np.linalg.norm(quat_original) msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[0], quat_normalized[1], quat_normalized[2], quat_normalized[3] # Update step orientation step.orientation = msg.orientation print('Relocated footsteps to a local frame') print('Publishing...') pub.publish(message) stop = True print('Waiting for execution...')
def callback(m): global pubWhole global stop global data global robotTime global pose global tfListener robotTime = m.header.stamp.to_sec() if ready: if not stop: # Wait for transform tfListener.waitForTransform("/pelvis", "/leftCOP_Frame", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightCOP_Frame", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftCOP_Frame", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightCOP_Frame", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) pose = pm.fromMsg(m.pose.pose) * midFeet stop = True
def robot_pose_callback(self, msg): # Update robot time self.robot_time = msg.header.stamp.to_sec() # Gets the SE3 transform from world to the midfeet frame. if self.state == STATE_UPDATE_ROBOT_POSE and not ( self.updated_robot_pose): print(' Robot Pose Callback') # Wait for transform tfListener.waitForTransform("/pelvis", "/leftCOP_Frame", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightCOP_Frame", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftCOP_Frame", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightCOP_Frame", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) self.robot_pose = pm.fromMsg(msg.pose.pose) * midFeet self.updated_robot_pose = True return
def transformSE3(msg): global pose curPos = pm.Frame( pm.Rotation.Quaternion(msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w), pm.Vector(msg.position.x, msg.position.y, msg.position.z)) curVel = pm.Twist( pm.Vector(msg.linear_velocity.x, msg.linear_velocity.y, msg.linear_velocity.z), pm.Vector(msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z)) pos = pose * curPos vel = pose * curVel tmp = pm.toMsg(pos) msg.position = tmp.position msg.orientation = tmp.orientation # Normalize orientation quat_original = np.array([ msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w ]) quat_normalized = quat_original / np.linalg.norm(quat_original) # Update orientation msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[ 0], quat_normalized[1], quat_normalized[2], quat_normalized[3] msg.linear_velocity = Vector3(vel.vel.x(), vel.vel.y(), vel.vel.z()) msg.angular_velocity = Vector3(vel.rot.x(), vel.rot.y(), vel.rot.z())
def transformSO3(msg): global pose curPos = pm.Frame(pm.Rotation.Quaternion(msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w), pm.Vector(0,0,0)) curVel = pm.Twist(pm.Vector(0,0,0), pm.Vector(msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z)) pos = pose*curPos vel = pose*curVel tmp = pm.toMsg(pos) msg.orientation = tmp.orientation msg.angular_velocity = Vector3(vel.rot.x(), vel.rot.y(), vel.rot.z())
def callback(m): global pub global stop global data global tfListener if ready: if not stop: print('Preparing the footstep message') # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/leftFoot", "/pelvis", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/rightFoot", "/pelvis", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # release/0.10 changed from ankle to sole frames for specifying footsteps. This is a transform extracted by Doug # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)) # with: footLength = 0.24, footBack = 0.068, ankleHeight = 0.09 ankleToSoleOffset = pm.Vector(0.24 / 2.0 - 0.068, 0.0, -0.09) pose = pm.fromMsg(m.pose.pose) * midFeet.Inverse() * pm.Frame( pm.Rotation(), ankleToSoleOffset) # Get footstep trajectopry from YAML message = message_converter.convert_dictionary_to_ros_message( 'ihmc_msgs/FootstepDataListRosMessage', data) # Update the footstep frames for step in message.footstep_data_list: pstep = pose * pm.Frame( pm.Rotation.Quaternion( step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position step.orientation = msg.orientation print('Relocated footsteps to a local frame') print('Publishing...') pub.publish(message) stop = True print('Waiting for execution...')
def callback(m): global pubWhole global stop global data global robotTime global pose global tfListener robotTime = m.header.stamp.to_sec() if ready: if not stop: # Wait for transform tfListener.waitForTransform("/pelvis", "/leftFoot", rospy.Time(), rospy.Duration(4.0)) tfListener.waitForTransform("/pelvis", "/rightFoot", rospy.Time(), rospy.Duration(4.0)) # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/pelvis", "/leftFoot", rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/pelvis", "/rightFoot", rospy.Time()) pos = (np.array(pos1) + np.array(pos2)) * 0.5 rot = pm.transformations.quaternion_slerp(rot1, rot2, 0.5) midFeet = pm.Frame( pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # From IHMC's ValkyriePhysicalProperties.java: # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)); footsizeReduction = 0.01 footLength = 0.25 - footsizeReduction ankleHeight = 0.09 footBack = 0.073 - footsizeReduction / 2.0 ankleToSoleOffset = pm.Vector(footLength / 2.0 - footBack, 0.0, -ankleHeight) pose = pm.fromMsg(m.pose.pose) * midFeet * pm.Frame( pm.Rotation(), ankleToSoleOffset) stop = True
def transformFootsteps(self, msg): numberOfFootstepsInList = len(msg.footstep_data_list) for step in msg.footstep_data_list: pstep = self.robot_pose*pm.Frame(pm.Rotation.Quaternion(step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position # Normalize orientation quat_original = np.array([msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w]) quat_normalized = quat_original/np.linalg.norm(quat_original) msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[0], quat_normalized[1], quat_normalized[2], quat_normalized[3] # Update step orientation step.orientation = msg.orientation
def callback(m): global pubWhole global stop global data global robotTime global pose global tfListener robotTime = m.header.stamp.to_sec() if ready: if not stop: # Get current robot pose pos1, rot1 = tfListener.lookupTransform("/leftFoot", "/pelvis",rospy.Time()) pos2, rot2 = tfListener.lookupTransform("/rightFoot", "/pelvis",rospy.Time()) pos = (np.array(pos1)+np.array(pos2))*0.5 rot = pm.transformations.quaternion_slerp(rot1,rot2,0.5) midFeet = pm.Frame(pm.Rotation.Quaternion(rot[0], rot[1], rot[2], rot[3]), pm.Vector(pos[0], pos[1], pos[2])) # release/0.10 changed from ankle to sole frames for specifying footsteps. This is a transform extracted by Doug # soleToAnkleFrame.setTranslation(new Vector3D(footLength / 2.0 - footBack, 0.0, -ValkyriePhysicalProperties.ankleHeight)) # with: footLength = 0.24, footBack = 0.068, ankleHeight = 0.09 ankleToSoleOffset = pm.Vector(0.24 / 2.0 - 0.068, 0.0, -0.09) pose = pm.fromMsg(m.pose.pose)*midFeet.Inverse()*pm.Frame(pm.Rotation(), ankleToSoleOffset) stop = True
def robot_pose_callback(self, msg): if not self.update_robot_pose_once: try: #print "Transforms ready" # Get current robot pose now = rospy.Time() self.tfListener.waitForTransform("/pelvis", "/leftCOP_Frame", now, rospy.Duration(4.0)) self.tfListener.waitForTransform("/pelvis", "/rightCOP_Frame", now, rospy.Duration(4.0)) self.tfListener.waitForTransform("/pelvis", "/rightPalm", now, rospy.Duration(4.0)) self.tfListener.waitForTransform("/pelvis", "/leftPalm", now, rospy.Duration(4.0)) pos1, rot1 = self.tfListener.lookupTransform("/pelvis", "/leftCOP_Frame", now) pos2, rot2 = self.tfListener.lookupTransform("/pelvis", "/rightCOP_Frame", now) pos_rp, rot_rp = self.tfListener.lookupTransform("/pelvis", "/rightPalm", now) pos_lp, rot_lp = self.tfListener.lookupTransform("/pelvis", "/leftPalm", now) leftFoot = pm.Frame(pm.Rotation.Quaternion(rot1[0], rot1[1], rot1[2], rot1[3]), pm.Vector(pos1[0], pos1[1], pos1[2])) rightFoot = pm.Frame(pm.Rotation.Quaternion(rot2[0], rot2[1], rot2[2], rot2[3]), pm.Vector(pos2[0], pos2[1], pos2[2])) rightPalm = pm.Frame(pm.Rotation.Quaternion(rot_rp[0], rot_rp[1], rot_rp[2], rot_rp[3]), pm.Vector(pos_rp[0], pos_rp[1], pos_rp[2])) leftPalm = pm.Frame(pm.Rotation.Quaternion(rot_lp[0], rot_lp[1], rot_lp[2], rot_lp[3]), pm.Vector(pos_lp[0], pos_lp[1], pos_lp[2])) # Mid Feet Pose mid_pos = (np.array(pos1)+np.array(pos2))*0.5 mid_rot = pm.transformations.quaternion_slerp(rot1,rot2,0.5) midFeet = pm.Frame(pm.Rotation.Quaternion(mid_rot[0], mid_rot[1], mid_rot[2], mid_rot[3]), pm.Vector(mid_pos[0], mid_pos[1], mid_pos[2])) self.robot_pose = pm.fromMsg(msg.pose.pose) self.mid_feet_pose = pm.fromMsg(msg.pose.pose)*midFeet self.right_hand_pose = pm.fromMsg(msg.pose.pose)*rightPalm*self.right_hand_ihmc_offset self.left_hand_pose = pm.fromMsg(msg.pose.pose)*leftPalm*self.left_hand_ihmc_offset self.robot_pose_ready = True self.update_robot_pose_once = True rospy.loginfo("Robot Pose is Ready!") except: rospy.logerr("Could not get the Robot pose. Will try again at the next callback.")
def __init__(self): # Robot related Variables self.robot_status = "Unknown Status" self.robot_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.robot_time = None self.updated_robot_pose = False self.robot_status = "unspecified status" self.robot_stopped_moving = False self.footstep_pause_interrupt = False self.state = STATE_LOAD_MESSAGE # Initialize state to load message self.list_of_tests = [] self.current_test = None self.test_index = -1 # Starts at invalid index self.package_path = ""
def send_footsteps(self): for step in self.footsteps_msg.footstep_data_list: pstep = self.mid_feet_pose*pm.Frame(pm.Rotation.Quaternion(step.orientation.x, step.orientation.y, step.orientation.z, step.orientation.w), pm.Vector(step.location.x, step.location.y, step.location.z)) msg = pm.toMsg(pstep) step.location = msg.position # Normalize orientation quat_original = np.array([msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w]) quat_normalized = quat_original/np.linalg.norm(quat_original) msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[0], quat_normalized[1], quat_normalized[2], quat_normalized[3] # Update step orientation step.orientation = msg.orientation rospy.loginfo("Relocated footsteps to a local frame") self.footsteps_msg.queueing_properties.message_id = 1 self.footsteps_msg.queueing_properties.execution_mode = QueueableMessage().EXECUTION_MODE_OVERRIDE rospy.loginfo("Publishing Footstep Data List...") self.pubFootsteps.publish(self.footsteps_msg)
def transformSE3_hand(self, msg, side): curPos = pm.Frame(pm.Rotation.Quaternion(msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w), pm.Vector(msg.position.x, msg.position.y, msg.position.z)) curVel = pm.Twist(pm.Vector(msg.linear_velocity.x, msg.linear_velocity.y, msg.linear_velocity.z), pm.Vector(msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z)) pos = curPos vel = curVel if side == HandTrajectoryMessage().ROBOT_SIDE_LEFT: pos = self.left_hand_pose*curPos vel = self.left_hand_pose*curVel else: pos = self.right_hand_pose*curPos vel = self.right_hand_pose*curVel tmp = pm.toMsg(pos) msg.position = tmp.position msg.orientation = tmp.orientation # Normalize orientation quat_original = np.array([msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w]) quat_normalized = quat_original/np.linalg.norm(quat_original) # Update orientation msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = quat_normalized[0], quat_normalized[1], quat_normalized[2], quat_normalized[3] msg.linear_velocity = Vector3(vel.vel.x(), vel.vel.y(), vel.vel.z()) msg.angular_velocity = Vector3(vel.rot.x(), vel.rot.y(), vel.rot.z())
def __init__(self): self.robot_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.mid_feet_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.right_hand_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.left_hand_pose = pm.Frame(pm.Rotation.Quaternion(0, 0, 0, 1), pm.Vector(0.0, 0.0, 0.0)) self.right_hand_ihmc_offset = pm.Frame(pm.Rotation.RPY(0.0, 0.0, -np.pi/2.0), pm.Vector(0.025, -0.07, 0.0)) self.left_hand_ihmc_offset = pm.Frame(pm.Rotation.RPY(0.0, 0.0, np.pi/2.0), pm.Vector(0.025, 0.07, 0.0)) self.footsteps_msg = FootstepDataListMessage() self.hand_msg = HandTrajectoryMessage() self.robot_pose_ready = False self.update_robot_pose_once = False self.messages_sent = False self.completed_footsteps = False
def get_waypoints(self, frame_type, predicates, transforms, names, constraints): ''' Parameters: ----------- frame_type: object class to match when generating new frames predicates: other list of predicates to add transforms: used to generate the actual poses names: should be same size as transforms above: prunes list of transforms based on criteria, transforms should be above their parent frame in the world coordinates ''' self.and_srv.wait_for_service() if not len(names) == len(transforms): raise RuntimeError( 'Names and transforms provided to landmark get_waypoints must be the same length' ) if not len(names) == 1: raise NotImplementedError( 'Passing a list of multiple transforms to get_waypoints is not yet supported.' ) type_predicate = PredicateStatement() type_predicate.predicate = frame_type type_predicate.params = ['*', '', ''] print "----" print "predicate(s) to match:" print predicates print "----" print "type predicate to match:" print type_predicate res = self.and_srv([type_predicate] + predicates) if len(res.matching) == 0: rospy.logerr("Failed to find any matching objects!") return None else: rospy.loginfo("Predicator found matches: " + str(res.matching)) #print res if (not res.found) or len(res.matching) < 1: return None poses = [] for tform in transforms: poses.append(pm.fromMsg(tform)) break # we only do one if frame_type not in self.obj_symmetries.keys(): self.obj_symmetries[frame_type] = ObjectSymmetry() self.obj_symmetries[frame_type].z_symmetries = 1 self.obj_symmetries[frame_type].y_symmetries = 1 self.obj_symmetries[frame_type].x_symmetries = 1 # Get only unique rotation matrices from the list of rotation matrices generated from RPY quaternion_list = list() for i in xrange(0, self.obj_symmetries[frame_type].z_symmetries): for j in xrange(0, self.obj_symmetries[frame_type].y_symmetries): for k in xrange(0, self.obj_symmetries[frame_type].x_symmetries): theta_z = i * self.obj_symmetries[frame_type].z_rotation theta_y = j * self.obj_symmetries[frame_type].y_rotation theta_x = k * self.obj_symmetries[frame_type].x_rotation rot_matrix = pm.Rotation.RPY(theta_x, theta_y, theta_z) quaternion_list.append(rot_matrix.GetQuaternion()) quaternion_list = np.array(quaternion_list) unique_brute_force = list() for i in xrange(len(quaternion_list)): unique = True for j in xrange(i, len(quaternion_list)): if i == j: continue else: if abs(quaternion_list[i].dot(quaternion_list[j])) > 0.99: unique = False break if unique: unique_brute_force.append(i) unique_rot_matrix = list() for index in unique_brute_force: unique_rot_matrix.append( pm.Rotation.Quaternion(*quaternion_list[index].tolist())) new_poses = [] new_names = [] objects = [] for match in res.matching: try: (trans, rot) = self.listener.lookupTransform(self.world, match, rospy.Time(0)) match_tform = pm.fromTf((trans, rot)) rospy.loginfo("Checking " + str(match) + " from " + str(self.world)) if frame_type in self.obj_symmetries: for rot_matrix in unique_rot_matrix: tform = pm.Frame(rot_matrix) world_tform = match_tform * tform * poses[0] violated = False for constraint in constraints: v1 = match_tform.p[constraint.pose_variable] v2 = world_tform.p[constraint.pose_variable] if constraint.greater and not ( v2 >= v1 + constraint.threshold): violated = True #print "VIOLATED", constraint rospy.logwarn("constraint violation: " + str(v2) + "<" + str(v1)) break elif not constraint.greater and not ( v2 <= v1 - constraint.threshold): violated = True rospy.logwarn("constraint violation: " + str(v2) + ">" + str(v1)) #print "VIOLATED", constraint break if violated: continue #else: # print match_tform.p, world_tform.p new_poses.append(pm.toMsg(world_tform)) new_names.append(match + "/" + names[0] + "/x%fy%fz%f" % (rot_matrix.GetRPY())) objects.append(match) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): rospy.logwarn('Could not find transform from %s to %s!' % (self.world, match)) return (new_poses, new_names, objects)
def evaluate(self, world, state, actor=None): ''' Evaluate a DMP policy by moving along a trajectory to the next feasible point. ''' # ========================================================================= # Generate the trajectory based on the current state reset_seq = state.reference is not self.skill or state.traj is None g = [] if reset_seq: q = state.q T = pm.fromMatrix(self.kinematics.forward(q)) self.activate(self.dmp.dmp_list) goal = world.observation[self.goal] if goal is None: return None ee_rpy = T.M.GetRPY() relative_goal = goal * self.dmp.goal_pose rpy = relative_goal.M.GetRPY() adj_rpy = [0, 0, 0] for j, (lvar, var) in enumerate(zip(ee_rpy, rpy)): if lvar < 0 and var > lvar + np.pi: adj_rpy[j] = var - 2 * np.pi elif lvar > 0 and var < lvar - np.pi: adj_rpy[j] = var + 2 * np.pi else: adj_rpy[j] = var x = [T.p[0], T.p[1], T.p[2], ee_rpy[0], ee_rpy[1], ee_rpy[2]] g = [relative_goal.p[0], relative_goal.p[1], relative_goal.p[2], adj_rpy[0], adj_rpy[1], adj_rpy[2]] x0 = [0.] * 6 g_threshold = [1e-1] * 6 integrate_iter = 10 res = self.plan( x, x0, 0., g, g_threshold, 2 * self.dmp.tau, 1.0, world.dt, integrate_iter) traj = res.plan for pt in traj.points: T = pm.Frame( pm.Rotation.RPY( pt.positions[3], pt.positions[4], pt.positions[5])) T.p[0] = pt.positions[0] T.p[1] = pt.positions[1] T.p[2] = pt.positions[2] q = self.kinematics.inverse(pm.toMatrix(T), state.q) else: traj = state.traj print state.seq, "/", len(traj.points) # ========================================================================= # Compute the joint velocity to take us to the next position if state.seq < len(traj.points): pt = traj.points[state.seq] T = pm.Frame( pm.Rotation.RPY(pt.positions[3], pt.positions[4], pt.positions[5])) T.p[0] = pt.positions[0] T.p[1] = pt.positions[1] T.p[2] = pt.positions[2] q = self.kinematics.inverse(pm.toMatrix(T), state.q) if q is not None: dq = (q - state.q) / world.dt return CostarAction(q=q, dq=dq, reset_seq=reset_seq, reference=self.skill, traj=traj) else: rospy.logwarn("could not get inverse kinematics for position") print pt.positions print state.q return CostarAction( q=state.q, dq=np.zeros(state.q.shape), reset_seq=reset_seq, reference=self.skill, traj=traj) else: ''' Compute a zero action from the current world state. This involves looking up actor information from the current world. ''' action = world.zeroAction(state.actor_id) action.reference = self.skill action.finish_sequence = True if state.seq > len(traj.points): raise RuntimeError('Went past end of trajectory.') return action
def get_waypoints(self, frame_type, predicates, transforms, names): self.and_srv.wait_for_service() if not len(names) == len(transforms): raise RuntimeError( 'Names and transforms provided to landmark get_waypoints must be the same length' ) if not len(names) == 1: raise NotImplementedError( 'Passing a list of multiple transforms to get_waypoints is not yet supported.' ) type_predicate = PredicateStatement() type_predicate.predicate = frame_type type_predicate.params = ['*', '', ''] print "----" print "predicate(s) to match:" print predicates print "----" print "type predicate to match:" print type_predicate res = self.and_srv([type_predicate] + predicates) print "Found matches: " + str(res.matching) #print res if (not res.found) or len(res.matching) < 1: return None poses = [] for tform in transforms: poses.append(pm.fromMsg(tform)) if frame_type not in self.obj_symmetries.keys(): self.obj_symmetries[frame_type] = ObjectSymmetry() self.obj_symmetries[frame_type].z_symmetries = 1 self.obj_symmetries[frame_type].y_symmetries = 1 self.obj_symmetries[frame_type].x_symmetries = 1 # Get only unique rotation matrices from the list of rotation matrices generated from RPY quaternion_list = list() for i in xrange(0, self.obj_symmetries[frame_type].z_symmetries): for j in xrange(0, self.obj_symmetries[frame_type].y_symmetries): for k in xrange(0, self.obj_symmetries[frame_type].x_symmetries): theta_z = i * self.obj_symmetries[frame_type].z_rotation theta_y = j * self.obj_symmetries[frame_type].y_rotation theta_x = k * self.obj_symmetries[frame_type].x_rotation rot_matrix = pm.Rotation.RPY(theta_x, theta_y, theta_z) quaternion_list.append(rot_matrix.GetQuaternion()) quaternion_list = np.array(quaternion_list) unique_rot_matrix = list() # quaternion_list = np.around(quaternion_list,decimals=5) # b = np.ascontiguousarray(quaternion_list).view(np.dtype((np.void, quaternion_list.dtype.itemsize * quaternion_list.shape[1]))) # _, unique_indices = np.unique(b, return_index=True) # for index in unique_indices: # unique_rot_matrix.append( pm.Rotation.Quaternion( *quaternion_list[index].tolist() ) ) unique_brute_force = list() for i in xrange(len(quaternion_list)): unique = True for j in xrange(i, len(quaternion_list)): if i == j: continue else: if abs(quaternion_list[i].dot(quaternion_list[j])) > 0.99: unique = False # print i, j, ' is not unique', quaternion_list[i], quaternion_list[j] break if unique: unique_brute_force.append(i) # print 'unique brute force: ', len(unique_brute_force) # print unique_brute_force for index in unique_brute_force: unique_rot_matrix.append( pm.Rotation.Quaternion(*quaternion_list[index].tolist())) new_poses = [] new_names = [] objects = [] for match in res.matching: try: (trans, rot) = self.listener.lookupTransform(self.world, match, rospy.Time(0)) # for (pose, name) in zip(poses,names): if frame_type in self.obj_symmetries: for rot_matrix in unique_rot_matrix: tform = pm.Frame(rot_matrix) new_poses.append( pm.toMsg( pm.fromTf((trans, rot)) * tform * poses[0])) new_names.append(match + "/" + names[0] + "/x%fy%fz%f" % (rot_matrix.GetRPY())) #print match, str(match + "/" + names[0] + "/x%fy%fz%f"%(rot_matrix.GetRPY())), # pm.toTf(pm.fromTf((trans,rot)) * tform * poses[0])[0] objects.append(match) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): rospy.logwarn('Could not find transform from %s to %s!' % (self.world, match)) return (new_poses, new_names, objects)
def get_waypoints(self, frame_type, predicates, transforms, names): self.and_srv.wait_for_service() type_predicate = PredicateStatement() type_predicate.predicate = frame_type type_predicate.params = ['*', '', ''] print "----" print "predicate(s) to match:" print predicates print "----" print "type predicate to match:" print type_predicate res = self.and_srv([type_predicate] + predicates) print "Found matches: " + str(res.matching) #print res if (not res.found) or len(res.matching) < 1: return None poses = [] for tform in transforms: poses.append(pm.fromMsg(tform)) #print poses new_poses = [] new_names = [] objects = [] for match in res.matching: try: (trans, rot) = self.listener.lookupTransform(self.world, match, rospy.Time(0)) for (pose, name) in zip(poses, names): #resp.waypoints.poses.append(pm.toMsg(pose * pm.fromTf((trans,rot)))) new_poses.append(pm.toMsg(pm.fromTf((trans, rot)) * pose)) new_names.append(match + "/" + name) objects.append(match) # Create extra poses for symmetries around the Z axis if frame_type in self.obj_symmetries: if self.obj_symmetries[frame_type].z_symmetries > 1: for i in xrange( 1, self.obj_symmetries[frame_type]. z_symmetries): theta = i * self.obj_symmetries[ frame_type].z_rotation #print "... " + str(theta) tform = pm.Frame(pm.Rotation.RotZ(theta)) #print tform new_poses.append( pm.toMsg( pm.fromTf( (trans, rot)) * tform * pose)) new_names.append(match + "/" + name + "/z" + str(i)) objects.append(match) if self.obj_symmetries[frame_type].y_symmetries > 1: for i in xrange( 1, self.obj_symmetries[frame_type]. y_symmetries): theta = i * self.obj_symmetries[ frame_type].y_rotation tform = pm.Frame(pm.Rotation.RotY(theta)) new_poses.append( pm.toMsg( pm.fromTf( (trans, rot)) * tform * pose)) new_names.append(match + "/" + name + "/y" + str(i)) objects.append(match) if self.obj_symmetries[frame_type].x_symmetries > 1: for i in xrange( 1, self.obj_symmetries[frame_type]. x_symmetries): theta = i * self.obj_symmetries[ frame_type].x_rotation tform = pm.Frame(pm.Rotation.RotX(theta)) new_poses.append( pm.toMsg( pm.fromTf( (trans, rot)) * tform * pose)) new_names.append(match + "/" + name + "/x" + str(i)) objects.append(match) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): rospy.logwarn('Could not find transform from %s to %s!' % (self.world, match)) return (new_poses, new_names, objects)
def debug(self, world, verbose=False): ''' Publish a bunch of ROS messages showing trajectories. This is a helpful tool for debugging problems with training data, DMP learning, and DMP segmentation. Parameters: ----------- ''' for name, instances in self.skill_instances.items(): goal_type = instances[0].objs[-1] goals = world.getObjects(goal_type) if goals is None: continue else: goal = goals[0] goal_pose = world.getPose(goal) if verbose: print(name,"goal is",goal_type,"and chose",goal) if goal_pose is None: continue if name in self.parent_skills: parent_name = self.parent_skills[name] else: parent_name = name model = self.skill_models[parent_name] option = DmpOption( policy_type=CartesianDmpPolicy, config=self.config, kinematics=self.kdl_kin, goal_object=goal, skill_name=name, feature_model=model, traj_dist=self.getParamDistribution(parent_name)) policy, condition = option.makePolicy(world) dynamics = SimulatedDynamics() state = world.actors[0].state RequestActiveDMP(instances[0].dmp_list) q = state.q if q is None: continue T = pm.fromMatrix(self.kdl_kin.forward(q)) ee_rpy = T.M.GetRPY() relative_goal = goal_pose * instances[0].goal_pose rpy = relative_goal.M.GetRPY() adj_rpy = [0, 0, 0] # Fix rotations for j, (lvar, var) in enumerate(zip(ee_rpy, rpy)): if lvar < 0 and var > lvar + np.pi: adj_rpy[j] = var - 2 * np.pi elif lvar > 0 and var < lvar - np.pi: adj_rpy[j] = var + 2 * np.pi else: adj_rpy[j] = var # Create start and goal poses x = [T.p[0], T.p[1], T.p[2], ee_rpy[0], ee_rpy[1], ee_rpy[2]] g = [relative_goal.p[0], relative_goal.p[1], relative_goal.p[2], adj_rpy[0], adj_rpy[1], adj_rpy[2]] x0 = [0.] * 6 g_threshold = [1e-1] * 6 integrate_iter = 10 # Get DMP result res = PlanDMP(x, x0, 0., g, g_threshold, 2 * instances[ 0].tau, 1.0, world.dt, integrate_iter) # Convert to poses poses = [] for i, pt in enumerate(res.plan.points): T = pm.Frame( pm.Rotation.RPY(pt.positions[3], pt.positions[4], pt.positions[5])) T.p[0] = pt.positions[0] T.p[1] = pt.positions[1] T.p[2] = pt.positions[2] poses.append(pm.toMsg(T)) new_q = self.kdl_kin.inverse(pm.toMatrix(T), q) q = new_q msg = PoseArray(poses=poses) msg.header.frame_id = self.base_link self.pubs[name].publish(msg)