def publish_cartesian_markers(arm, time_stamp, cep, rot, c1, c2, marker_id): marker = Marker() marker.header.frame_id = ar.link_tf_name(arm, 0) marker.header.stamp = time_stamp marker.ns = arm marker.type = Marker.ARROW marker.action = Marker.ADD marker.pose.position.x = cep[0, 0] marker.pose.position.y = cep[1, 0] marker.pose.position.z = cep[2, 0] marker.scale.x = 0.1 marker.scale.y = 0.2 marker.scale.z = 0.2 marker.lifetime = rospy.Duration() marker.id = marker_id * 100 + 0 #rot1 = tr.Ry(math.radians(90.)) * rot.T rot1 = rot * tr.rotY(math.pi / 2) quat = tr.matrix_to_quaternion(rot1) marker.pose.orientation.x = quat[0] marker.pose.orientation.y = quat[1] marker.pose.orientation.z = quat[2] marker.pose.orientation.w = quat[3] marker.color.r = c1[0] marker.color.g = c1[1] marker.color.b = c1[2] marker.color.a = 1. marker_pub.publish(marker) marker.id = marker_id * 100 + 1 if arm == 'left_arm': #rot2 = tr.Rz(math.radians(90.)) * rot.T rot2 = rot * tr.rotZ(-math.pi / 2) else: #rot2 = tr.Rz(math.radians(-90.)) * rot.T rot2 = rot * tr.rotZ(math.pi / 2) quat = tr.matrix_to_quaternion(rot2) marker.pose.orientation.x = quat[0] marker.pose.orientation.y = quat[1] marker.pose.orientation.z = quat[2] marker.pose.orientation.w = quat[3] marker.color.r = c2[0] marker.color.g = c2[1] marker.color.b = c2[2] marker.color.a = 1. marker_pub.publish(marker)
def publish_cartesian_markers(arm, time_stamp, cep, rot, c1, c2, marker_id): marker = Marker() marker.header.frame_id = ar.link_tf_name(arm, 0) marker.header.stamp = time_stamp marker.ns = arm marker.type = Marker.ARROW marker.action = Marker.ADD marker.pose.position.x = cep[0,0] marker.pose.position.y = cep[1,0] marker.pose.position.z = cep[2,0] marker.scale.x = 0.1 marker.scale.y = 0.2 marker.scale.z = 0.2 marker.lifetime = rospy.Duration() marker.id = marker_id*100 + 0 #rot1 = tr.Ry(math.radians(90.)) * rot.T rot1 = rot * tr.rotY(math.pi/2) quat = tr.matrix_to_quaternion(rot1) marker.pose.orientation.x = quat[0] marker.pose.orientation.y = quat[1] marker.pose.orientation.z = quat[2] marker.pose.orientation.w = quat[3] marker.color.r = c1[0] marker.color.g = c1[1] marker.color.b = c1[2] marker.color.a = 1. marker_pub.publish(marker) marker.id = marker_id*100 + 1 if arm == 'left_arm': #rot2 = tr.Rz(math.radians(90.)) * rot.T rot2 = rot * tr.rotZ(-math.pi/2) else: #rot2 = tr.Rz(math.radians(-90.)) * rot.T rot2 = rot * tr.rotZ(math.pi/2) quat = tr.matrix_to_quaternion(rot2) marker.pose.orientation.x = quat[0] marker.pose.orientation.y = quat[1] marker.pose.orientation.z = quat[2] marker.pose.orientation.w = quat[3] marker.color.r = c2[0] marker.color.g = c2[1] marker.color.b = c2[2] marker.color.a = 1. marker_pub.publish(marker)
def set_cep_jtt(self, arm, p, rot=None): if arm != 1: arm = 0 ps = PoseStamped() ps.header.stamp = rospy.rostime.get_rostime() ps.header.frame_id = 'torso_lift_link' ps.pose.position.x = p[0,0] ps.pose.position.y = p[1,0] ps.pose.position.z = p[2,0] if rot == None: if arm == 0: rot = self.r_cep_rot else: rot = self.l_cep_rot quat = tr.matrix_to_quaternion(rot) ps.pose.orientation.x = quat[0] ps.pose.orientation.y = quat[1] ps.pose.orientation.z = quat[2] ps.pose.orientation.w = quat[3] if arm == 0: self.r_arm_cart_pub.publish(ps) else: self.l_arm_cart_pub.publish(ps)
def set_cep_jtt(self, arm, p, rot=None): if arm != 1: arm = 0 ps = PoseStamped() ps.header.stamp = rospy.rostime.get_rostime() ps.header.frame_id = 'torso_lift_link' ps.pose.position.x = p[0, 0] ps.pose.position.y = p[1, 0] ps.pose.position.z = p[2, 0] if rot == None: if arm == 0: rot = self.r_cep_rot else: rot = self.l_cep_rot quat = tr.matrix_to_quaternion(rot) ps.pose.orientation.x = quat[0] ps.pose.orientation.y = quat[1] ps.pose.orientation.z = quat[2] ps.pose.orientation.w = quat[3] if arm == 0: self.r_arm_cart_pub.publish(ps) else: self.l_arm_cart_pub.publish(ps)
def setup_gripper_right_link_taxels_transforms(self): self.link_name_gripper_right_link = '/%s_gripper_r_finger_link'%(self.arm) n_taxels = 4 self.tar_gripper_right_link.data = [None for i in range(n_taxels)] idx_list = [0, 1, 2, 3] x_disp = [.03, .04, .03, 0.1] y_disp = [-0.02, -0.04, -0.02, -0.02] z_disp = [0.03, 0., -0.03, 0.] x_ang = [0, -math.pi/2, math.pi, -math.pi/2] y_ang = [math.radians(-5), 0, math.radians(5), 0] z_ang = [0, math.radians(-10), 0, -math.pi/4] for i in range(n_taxels): t = Transform() t.translation.x = x_disp[i] t.translation.y = y_disp[i] t.translation.z = z_disp[i] rot_mat = tr.Rz(z_ang[i])*tr.Ry(y_ang[i])*tr.Rx(x_ang[i]) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_gripper_right_link.data[idx_list[i]] = t
def setup_gripper_palm_taxels_transforms(self): self.link_name_gripper_palm = '/%s_gripper_palm_link'%(self.arm) n_taxels = 2 self.tar_gripper_palm.data = [None for i in range(n_taxels)] idx_list = [0, 1] x_disp = [0.06, 0.06] y_disp = [-0.02, 0.02] z_disp = [0., 0.] x_ang = [-math.pi/2, math.pi/2] y_ang = [0, 0] z_ang = [math.pi/4, -math.pi/4] for i in range(n_taxels): t = Transform() t.translation.x = x_disp[i] t.translation.y = y_disp[i] t.translation.z = z_disp[i] rot_mat = tr.Rz(z_ang[i])*tr.Ry(y_ang[i])*tr.Rx(x_ang[i]) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_gripper_palm.data[idx_list[i]] = t
def setup_pps_left_taxels_transforms(self): self.link_name_left_pps = '/%s_gripper_l_finger_tip_link'%(self.arm) n_taxels = 3 self.tar_left_pps.data = [None for i in range(n_taxels)] idx_list = [0, 1, 2] x_disp = [0.03, 0.012, 0.012] y_disp = [-0.01, -0.01, -0.01] z_disp = [0.0, -0.011, 0.011] x_ang = [0., math.pi, 0.] y_ang = [-math.pi/2., 0., 0.] z_ang = [0., 0., 0.] for i in range(n_taxels): t = Transform() t.translation.x = x_disp[i] t.translation.y = y_disp[i] t.translation.z = z_disp[i] rot_mat = tr.Rz(z_ang[i])*tr.Ry(y_ang[i])*tr.Rx(x_ang[i]) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_left_pps.data[idx_list[i]] = t
def setup_forearm_twenty_two_taxels_transforms(self): self.link_name_forearm = '/%s_forearm_link'%(self.arm) n_taxels = 22 self.tar_forearm.data = [Transform()] * n_taxels # [Transform() for i in range(n_taxels)] #[None for i in range(n_taxels)] idx_list = [14, 10, 16, 8, 9, 12, 0, 1, 4, 19, 21, 15, 7, 13, 2, 3, 6, 5, 20, 17, 11, 18] x_disp = [.16, .23, .3, .16, .23, .3, .16, .23, .3, .16, .23, .3, .17, .28, .17, .28, .17, .28, .17, .28, .34, .34] y_disp = [0., 0., 0., -0.06, -0.06, -0.06, 0., 0., 0., 0.06, 0.06, 0.06, -0.05, -0.05, -0.05, -0.05, 0.05, 0.05, 0.05, 0.05 ,-0.05, 0.05] z_disp = [0.04, 0.02, 0.03, 0., 0., 0., -0.05, -0.05, -0.05, 0., 0., 0., 0.04, 0.02, -0.04, -0.04, -0.04, -0.04, 0.04, 0.02, 0., 0.] x_ang = [0, 0, 0, -math.pi/2, -math.pi/2, -math.pi/2, math.pi, math.pi, math.pi, math.pi/2, math.pi/2, math.pi/2, -math.pi/4, -math.pi/4, -3*math.pi/4, -3*math.pi/4, 3*math.pi/4, 3*math.pi/4, math.pi/4, math.pi/4, math.radians(-30), math.radians(30)] y_ang = [-math.pi/4, math.radians(-15), math.radians(20), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, math.radians(-60), math.radians(-60)] z_ang = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] for i in range(n_taxels): t = Transform() t.translation.x = x_disp[i] t.translation.y = y_disp[i] t.translation.z = z_disp[i] rot_mat = tr.Rz(z_ang[i])*tr.Ry(y_ang[i])*tr.Rx(x_ang[i]) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_forearm.data[idx_list[i]] = t
def IK(self, arm, p, rot, q_guess): rospy.logwarn('Currently ignoring the arm parameter.') ik_req = GetPositionIKRequest() ik_req.timeout = rospy.Duration(5.) ik_req.ik_request.ik_link_name = 'r_wrist_roll_link' ik_req.ik_request.pose_stamped.header.frame_id = 'torso_lift_link' ik_req.ik_request.pose_stamped.pose.position.x = p[0,0] ik_req.ik_request.pose_stamped.pose.position.y = p[1,0] ik_req.ik_request.pose_stamped.pose.position.z = p[2,0] quat = tr.matrix_to_quaternion(rot) ik_req.ik_request.pose_stamped.pose.orientation.x = quat[0] ik_req.ik_request.pose_stamped.pose.orientation.y = quat[1] ik_req.ik_request.pose_stamped.pose.orientation.z = quat[2] ik_req.ik_request.pose_stamped.pose.orientation.w = quat[3] ik_req.ik_request.ik_seed_state.joint_state.position = q_guess ik_req.ik_request.ik_seed_state.joint_state.name = self.joint_names_list ik_resp = self.ik_srv.call(ik_req) if ik_resp.error_code.val == ik_resp.error_code.SUCCESS: ret = ik_resp.solution.joint_state.position else: rospy.logerr('Inverse kinematics failed') ret = None return ret
def setup_forearm_taxels_transforms(self): n_circum = 4 n_axis = 3 self.link_name_forearm = '/wrist_LEFT' rad = 0.04 dist_along_axis = 0.065 angle_along_circum = 2 * math.pi / n_circum offset_along_axis = 0.02 offset_along_circum = math.radians(-45) n_taxels = n_circum * n_axis self.tar_forearm.data = [None for i in range(n_taxels)] # mapping the taxels to the raw ADC list. idx_list = [6, 9, 0, 3, 7, 10, 1, 4, 8, 11, 2, 5] for i in range(n_axis): for j in range(n_circum): t = Transform() ang = j * angle_along_circum + offset_along_circum t.translation.x = rad * math.cos(ang) t.translation.y = rad * math.sin(ang) t.translation.z = offset_along_axis + i * dist_along_axis rot_mat = tr.Rz(-ang) * tr.Ry(math.radians(-90)) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_forearm.data[idx_list[i * n_circum + j]] = t
def IK(self, arm, p, rot, q_guess): rospy.logwarn('Currently ignoring the arm parameter.') ik_req = GetPositionIKRequest() ik_req.timeout = rospy.Duration(5.) ik_req.ik_request.ik_link_name = 'r_wrist_roll_link' ik_req.ik_request.pose_stamped.header.frame_id = 'torso_lift_link' ik_req.ik_request.pose_stamped.pose.position.x = p[0, 0] ik_req.ik_request.pose_stamped.pose.position.y = p[1, 0] ik_req.ik_request.pose_stamped.pose.position.z = p[2, 0] quat = tr.matrix_to_quaternion(rot) ik_req.ik_request.pose_stamped.pose.orientation.x = quat[0] ik_req.ik_request.pose_stamped.pose.orientation.y = quat[1] ik_req.ik_request.pose_stamped.pose.orientation.z = quat[2] ik_req.ik_request.pose_stamped.pose.orientation.w = quat[3] ik_req.ik_request.ik_seed_state.joint_state.position = q_guess ik_req.ik_request.ik_seed_state.joint_state.name = self.joint_names_list ik_resp = self.ik_srv.call(ik_req) if ik_resp.error_code.val == ik_resp.error_code.SUCCESS: ret = ik_resp.solution.joint_state.position else: rospy.logerr('Inverse kinematics failed') ret = None return ret
def arrow_direction_to_quat(direc): direc = direc / np.linalg.norm(direc) t = np.matrix([1., 0., 0.]).T if abs((t.T * direc)[0, 0]) > 0.866: t = np.matrix([0., 1., 0.]).T v1 = direc v2 = t - (t.T * v1)[0, 0] * v1 v2 = v2 / np.linalg.norm(v2) v3 = np.matrix(np.cross(v1.A1, v2.A1)).T rot_mat = np.column_stack([v1, v2, v3]) q = np.matrix(tr.matrix_to_quaternion(rot_mat)).T return q
def IK(self, arm, pos, rot, q_guess=[0]*7): ik_req = GetPositionIKRequest() ik_req.timeout = rospy.Duration(5.) if arm == 0: ik_req.ik_request.ik_link_name = 'r_wrist_roll_link' else: ik_req.ik_request.ik_link_name = 'l_wrist_roll_link' ik_req.ik_request.pose_stamped.header.frame_id = 'torso_lift_link' quat = tr.matrix_to_quaternion(rot) ik_req.ik_request.pose_stamped.pose = Pose(Point(*pos), Quaternion(*quat)) ik_req.ik_request.ik_seed_state.joint_state.position = q_guess ik_req.ik_request.ik_seed_state.joint_state.name = self.joint_names_list[arm] ik_resp = self.ik_srv[arm].call(ik_req) return ik_resp.solution.joint_state.position
def make_darci_ee_marker(ps, color, orientation=None, marker_array=None, control=None, mesh_id_start=0, ns="darci_ee", offset=-0.14): mesh_id = mesh_id_start # this is the palm piece mesh = Marker() mesh.ns = ns #mesh.mesh_use_embedded_materials = True mesh.scale = Vector3(1., 1., 1.) mesh.color = ColorRGBA(*color) # stub_end_effector.dae # stub_end_effector_mini45.dae # tube_with_ati_collisions.dae # wedge_blender.dae # mesh.mesh_resource = "package://hrl_dynamic_mpc/meshes/tube_with_ati_collisions.dae" mesh.mesh_resource = "package://hrl_dynamic_mpc/meshes/Darci_Flipper.stl" mesh.type = Marker.MESH_RESOURCE #rot_default = tr.Ry(np.radians(-90))*tr.Rz(np.radians(90)) rot_default = tr.Ry(np.radians(0)) * tr.Rz(np.radians(90)) if orientation == None: orientation = [0, 0, 0, 1] rot_buf = tr.quaternion_to_matrix(orientation) orientation = tr.matrix_to_quaternion(rot_buf * rot_default) mesh.pose.orientation = Quaternion(*orientation) mesh.pose.position.x = offset if control != None: control.markers.append(mesh) elif marker_array != None: mesh.pose.position = ps.pose.position mesh.header.frame_id = ps.header.frame_id mesh.id = mesh_id marker_array.markers.append(mesh) if control != None: control.interaction_mode = InteractiveMarkerControl.BUTTON return control elif marker_array != None: return marker_array
def set_cartesian(self, arm, p, rot): rospy.logwarn('Currently ignoring the arm parameter.') ps = PoseStamped() ps.header.stamp = rospy.rostime.get_rostime() ps.header.frame_id = 'torso_lift_link' ps.pose.position.x = p[0,0] ps.pose.position.y = p[1,0] ps.pose.position.z = p[2,0] quat = tr.matrix_to_quaternion(rot) ps.pose.orientation.x = quat[0] ps.pose.orientation.y = quat[1] ps.pose.orientation.z = quat[2] ps.pose.orientation.w = quat[3] self.r_arm_cart_pub.publish(ps)
def set_cartesian(self, arm, p, rot): rospy.logwarn('Currently ignoring the arm parameter.') ps = PoseStamped() ps.header.stamp = rospy.rostime.get_rostime() ps.header.frame_id = 'torso_lift_link' ps.pose.position.x = p[0, 0] ps.pose.position.y = p[1, 0] ps.pose.position.z = p[2, 0] quat = tr.matrix_to_quaternion(rot) ps.pose.orientation.x = quat[0] ps.pose.orientation.y = quat[1] ps.pose.orientation.z = quat[2] ps.pose.orientation.w = quat[3] self.r_arm_cart_pub.publish(ps)
def IK(self, arm, p, rot, q_guess): if arm != 1: arm = 0 p = make_column(p) if rot.shape == (3, 3): quat = np.matrix(tr.matrix_to_quaternion(rot)).T elif rot.shape == (4, 1): quat = make_column(rot) else: rospy.logerr('Inverse kinematics failed (bad rotation)') return None # Transform point back to wrist roll link neg_off = [-self.off_point[0], -self.off_point[1], -self.off_point[2]] transpos = self.transform_in_frame(p, quat, neg_off) ik_req = GetPositionIKRequest() ik_req.timeout = rospy.Duration(5.) if arm == 0: ik_req.ik_request.ik_link_name = 'r_wrist_roll_link' else: ik_req.ik_request.ik_link_name = 'l_wrist_roll_link' ik_req.ik_request.pose_stamped.header.frame_id = 'torso_lift_link' ik_req.ik_request.pose_stamped.pose.position.x = transpos[0] ik_req.ik_request.pose_stamped.pose.position.y = transpos[1] ik_req.ik_request.pose_stamped.pose.position.z = transpos[2] ik_req.ik_request.pose_stamped.pose.orientation.x = quat[0] ik_req.ik_request.pose_stamped.pose.orientation.y = quat[1] ik_req.ik_request.pose_stamped.pose.orientation.z = quat[2] ik_req.ik_request.pose_stamped.pose.orientation.w = quat[3] ik_req.ik_request.ik_seed_state.joint_state.position = q_guess ik_req.ik_request.ik_seed_state.joint_state.name = self.joint_names_list[ arm] ik_resp = self.ik_srv[arm].call(ik_req) if ik_resp.error_code.val == ik_resp.error_code.SUCCESS: ret = list(ik_resp.solution.joint_state.position) else: rospy.logerr('Inverse kinematics failed') ret = None return ret
def IK(self, arm, p, rot, q_guess): if arm != 1: arm = 0 p = make_column(p) if rot.shape == (3, 3): quat = np.matrix(tr.matrix_to_quaternion(rot)).T elif rot.shape == (4, 1): quat = make_column(rot) else: rospy.logerr('Inverse kinematics failed (bad rotation)') return None # Transform point back to wrist roll link neg_off = [-self.off_point[0],-self.off_point[1],-self.off_point[2]] transpos = self.transform_in_frame(p, quat, neg_off) ik_req = GetPositionIKRequest() ik_req.timeout = rospy.Duration(5.) if arm == 0: ik_req.ik_request.ik_link_name = 'r_wrist_roll_link' else: ik_req.ik_request.ik_link_name = 'l_wrist_roll_link' ik_req.ik_request.pose_stamped.header.frame_id = 'torso_lift_link' ik_req.ik_request.pose_stamped.pose.position.x = transpos[0] ik_req.ik_request.pose_stamped.pose.position.y = transpos[1] ik_req.ik_request.pose_stamped.pose.position.z = transpos[2] ik_req.ik_request.pose_stamped.pose.orientation.x = quat[0] ik_req.ik_request.pose_stamped.pose.orientation.y = quat[1] ik_req.ik_request.pose_stamped.pose.orientation.z = quat[2] ik_req.ik_request.pose_stamped.pose.orientation.w = quat[3] ik_req.ik_request.ik_seed_state.joint_state.position = q_guess ik_req.ik_request.ik_seed_state.joint_state.name = self.joint_names_list[arm] ik_resp = self.ik_srv[arm].call(ik_req) if ik_resp.error_code.val == ik_resp.error_code.SUCCESS: ret = list(ik_resp.solution.joint_state.position) else: rospy.logerr('Inverse kinematics failed') ret = None return ret
def make_darci_ee_marker(ps, color, orientation = None, marker_array=None, control=None, mesh_id_start = 0, ns = "darci_ee", offset=-0.14): mesh_id = mesh_id_start # this is the palm piece mesh = Marker() mesh.ns = ns #mesh.mesh_use_embedded_materials = True mesh.scale = Vector3(1.,1.,1.) mesh.color = ColorRGBA(*color) # stub_end_effector.dae # stub_end_effector_mini45.dae # tube_with_ati_collisions.dae # wedge_blender.dae # mesh.mesh_resource = "package://hrl_dynamic_mpc/meshes/tube_with_ati_collisions.dae" mesh.mesh_resource = "package://hrl_dynamic_mpc/meshes/Darci_Flipper.stl" mesh.type = Marker.MESH_RESOURCE #rot_default = tr.Ry(np.radians(-90))*tr.Rz(np.radians(90)) rot_default = tr.Ry(np.radians(0))*tr.Rz(np.radians(90)) if orientation == None: orientation = [0, 0, 0, 1] rot_buf = tr.quaternion_to_matrix(orientation) orientation = tr.matrix_to_quaternion(rot_buf*rot_default) mesh.pose.orientation = Quaternion(*orientation) mesh.pose.position.x = offset if control != None: control.markers.append( mesh ) elif marker_array != None: mesh.pose.position = ps.pose.position mesh.header.frame_id = ps.header.frame_id mesh.id = mesh_id marker_array.markers.append(mesh) if control != None: control.interaction_mode = InteractiveMarkerControl.BUTTON return control elif marker_array != None: return marker_array
def setup_upperarm_taxels_transforms(self): n_circum = 4 n_axis = 1 self.link_name_upperarm = '/%s_upper_arm_link'%(self.arm) angle_along_circum = 2*math.pi / n_circum offset_along_circum = math.radians(0) n_taxels = n_circum * n_axis self.tar_upperarm.data = [None for i in range(n_taxels)] # mapping the taxels to the raw ADC list. # 0 - top; 1 - left; 2 - bottom; 3 - right idx_list = [3, 1, 2, 0] x_disp = [.3, .3, .3, 0] y_disp = [0.06, 0, -0.06, 0] z_disp = [-0.03, -0.09, -0.03, 0] x_ang = [0, 0, 3*math.pi/2, 0] y_ang = [math.pi/2, math.pi, 0, 0] z_ang = [math.pi/2, 0, 0, 0] for i in range(n_axis): for j in range(n_circum): t = Transform() t.translation.x = x_disp[j] t.translation.y = y_disp[j] t.translation.z = z_disp[j] rot_mat = tr.Rz(z_ang[j])*tr.Ry(y_ang[j])*tr.Rx(x_ang[j]) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_upperarm.data[idx_list[i*n_circum+j]] = t
def update_wc(self,msg): v = self.robot.GetActiveDOFValues() for name in self.joint_names: v[self.robot.GetJoint(name).GetDOFIndex()] = self.joint_angles[self.joint_names.index(name)] self.robot.SetActiveDOFValues(v) rospy.loginfo("I have got a wc location!") pos_temp = [msg.pose.position.x, msg.pose.position.y, msg.pose.position.z] ori_temp = [msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w] self.pr2_B_wc = createBMatrix(pos_temp, ori_temp) psm = PoseStamped() psm.header.stamp = rospy.Time.now() #self.goal_pose_pub.publish(psm) self.pub_feedback("Reaching toward goal location") wheelchair_location = self.pr2_B_wc * self.corner_B_head.I self.wheelchair.SetTransform(np.array(wheelchair_location)) pos_goal = wheelchair_location[:3,3] ori_goal = tr.matrix_to_quaternion(wheelchair_location[0:3,0:3]) marker = Marker() marker.header.frame_id = "base_link" marker.header.stamp = rospy.Time() marker.ns = "arm_reacher_wc_model" marker.id = 0 marker.type = Marker.MESH_RESOURCE; marker.action = Marker.ADD marker.pose.position.x = pos_goal[0] marker.pose.position.y = pos_goal[1] marker.pose.position.z = pos_goal[2] marker.pose.orientation.x = ori_goal[0] marker.pose.orientation.y = ori_goal[1] marker.pose.orientation.z = ori_goal[2] marker.pose.orientation.w = ori_goal[3] marker.scale.x = 0.0254 marker.scale.y = 0.0254 marker.scale.z = 0.0254 marker.color.a = 1.0 marker.color.r = 1.0 marker.color.g = 0.0 marker.color.b = 0.0 #only if using a MESH_RESOURCE marker type: marker.mesh_resource = "package://hrl_base_selection/models/ADA_Wheelchair.dae" self.vis_pub.publish( marker ) v = self.robot.GetActiveDOFValues() for name in self.joint_names: v[self.robot.GetJoint(name).GetDOFIndex()] = self.joint_angles[self.joint_names.index(name)] self.robot.SetActiveDOFValues(v) goal_angle = 0#m.pi/2 self.goal = np.matrix([[ m.cos(goal_angle), -m.sin(goal_angle), 0., .5], [ m.sin(goal_angle), m.cos(goal_angle), 0., 0], [ 0., 0., 1., 1.2], [ 0., 0., 0., 1.]]) #self.goal = copy.copy(self.pr2_B_wc) #self.goal[0,3]=self.goal[0,3]-.2 #self.goal[1,3]=self.goal[1,3]-.3 #self.goal[2,3]= 1.3 print 'goal is: \n', self.goal self.goal_B_gripper = np.matrix([[ 0., 0., 1., 0.1], [ 0., 1., 0., 0.], [ -1., 0., 0., 0.], [ 0., 0., 0., 1.]]) self.pr2_B_goal = self.goal*self.goal_B_gripper self.sol = self.manip.FindIKSolution(np.array(self.pr2_B_goal), op.IkFilterOptions.CheckEnvCollisions) if self.sol is None: self.pub_feedback("Failed to find a good arm configuration for reaching.") return None rospy.loginfo("[%s] Got an IK solution: %s" % (rospy.get_name(), self.sol)) self.pub_feedback("Found a good arm configuration for reaching.") self.pub_feedback("Looking for path for arm to goal.") traj = None try: #self.res = self.manipprob.MoveToHandPosition(matrices=[np.array(self.pr2_B_goal)],seedik=10) # call motion planner with goal joint angles self.traj=self.manipprob.MoveManipulator(goal=self.sol,outputtrajobj=True) self.pub_feedback("Found a path to reach to the goal.") except: self.traj = None self.pub_feedback("Could not find a path to reach to the goal") return None tmp_traj = tmp_vel = tmp_acc = [] #np.zeros([self.traj.GetNumWaypoints(),7]) trajectory = JointTrajectory() for i in xrange(self.traj.GetNumWaypoints()): point = JointTrajectoryPoint() temp = [] for j in xrange(7): temp.append(float(self.traj.GetWaypoint(i)[j])) point.positions = temp #point.positions.append(temp) #point.accelerations.append([0.,0.,0.,0.,0.,0.,0.]) #point.velocities.append([0.,0.,0.,0.,0.,0.,0.]) trajectory.points.append(point) #tmp_traj.append(temp) #tmp_traj.append(list(self.traj.GetWaypoint(i))) #tmp_vel.append([0.,0.,0.,0.,0.,0.,0.]) #tmp_acc.append([0.,0.,0.,0.,0.,0.,0.]) #print 'tmp_traj is: \n', tmp_traj #for j in xrange(7): #tmp_traj[i,j] = float(self.traj.GetWaypoint(i)[j]) #trajectory = JointTrajectory() #point = JointTrajectoryPoint() #point.positions.append(tmp_traj) #point.velocities.append(tmp_vel) #point.accelerations.append(tmp_acc) #point.velocities=[[0.,0.,0.,0.,0.,0.,0.]] #point.accelerations=[[0.,0.,0.,0.,0.,0.,0.]] trajectory.joint_names = ['l_upper_arm_roll_joint', 'l_shoulder_pan_joint', 'l_shoulder_lift_joint', 'l_forearm_roll_joint', 'l_elbow_flex_joint', 'l_wrist_flex_joint', 'l_wrist_roll_joint'] #trajectory.points.append(point) #self.mpc_weights_pub.publish(self.weights) self.moving=True self.goal_traj_pub.publish(trajectory) self.pub_feedback("Reaching to location")
def select_base_client(): angle = -m.pi/2 pr2_B_head1 = np.matrix([[ m.cos(angle), -m.sin(angle), 0., 0.], [ m.sin(angle), m.cos(angle), 0., 2.5], [ 0., 0., 1., 1.1546], [ 0., 0., 0., 1.]]) an = -m.pi/2 pr2_B_head2 = np.matrix([[ m.cos(an), 0., m.sin(an), 0.], [ 0., 1., 0., 0.], [ -m.sin(an), 0., m.cos(an), 0.], [ 0., 0., 0., 1.]]) pr2_B_head=pr2_B_head1*pr2_B_head2 pos_goal = [pr2_B_head[0,3],pr2_B_head[1,3],pr2_B_head[2,3]] ori_goal = tr.matrix_to_quaternion(pr2_B_head[0:3,0:3]) psm_head = PoseStamped() psm_head.header.frame_id = '/base_link' psm_head.pose.position.x=pos_goal[0] psm_head.pose.position.y=pos_goal[1] psm_head.pose.position.z=pos_goal[2] psm_head.pose.orientation.x=ori_goal[0] psm_head.pose.orientation.y=ori_goal[1] psm_head.pose.orientation.z=ori_goal[2] psm_head.pose.orientation.w=ori_goal[3] head_pub = rospy.Publisher("/haptic_mpc/head_pose", PoseStamped, latch=True) head_pub.publish(psm_head) rospack = rospkg.RosPack() pkg_path = rospack.get_path('hrl_base_selection') marker = Marker() marker.header.frame_id = "/base_link" marker.header.stamp = rospy.Time() marker.ns = "base_service_wc_model" marker.id = 0 #marker.type = Marker.SPHERE marker.type = Marker.MESH_RESOURCE; marker.action = Marker.ADD marker.pose.position.x = pos_goal[0] marker.pose.position.y = pos_goal[1] marker.pose.position.z = 0 marker.pose.orientation.x = ori_goal[0] marker.pose.orientation.y = ori_goal[1] marker.pose.orientation.z = ori_goal[2] marker.pose.orientation.w = ori_goal[3] marker.scale.x = .025 marker.scale.y = .025 marker.scale.z = .025 marker.color.a = 1. marker.color.r = 0.0 marker.color.g = 1.0 marker.color.b = 0.0 #only if using a MESH_RESOURCE marker type: marker.mesh_resource = "package://hrl_base_selection/models/ADA_Wheelchair.dae"#~/git/gt-ros-pkg.hrl-assistive/hrl_base_selection/models/ADA_Wheelchair.dae" # ''.join([pkg_path, '/models/ADA_Wheelchair.dae']) #"package://pr2_description/meshes/base_v0/base.dae" vis_pub.publish( marker ) print 'I think I just published the wc model \n' goal_angle = 0#m.pi/2 pr2_B_goal = np.matrix([[ m.cos(goal_angle), -m.sin(goal_angle), 0., 2.6], [ m.sin(goal_angle), m.cos(goal_angle), 0., .3], [ 0., 0., 1., 1.2], [ 0., 0., 0., 1.]]) pos_goal = [pr2_B_goal[0,3],pr2_B_goal[1,3],pr2_B_goal[2,3]] ori_goal = tr.matrix_to_quaternion(pr2_B_goal[0:3,0:3]) psm_goal = PoseStamped() psm_goal.header.frame_id = '/base_link' psm_goal.pose.position.x=pos_goal[0] psm_goal.pose.position.y=pos_goal[1] psm_goal.pose.position.z=pos_goal[2] psm_goal.pose.orientation.x=ori_goal[0] psm_goal.pose.orientation.y=ori_goal[1] psm_goal.pose.orientation.z=ori_goal[2] psm_goal.pose.orientation.w=ori_goal[3] goal_pub = rospy.Publisher("/arm_reacher/goal_pose", PoseStamped, latch=True) goal_pub.publish(psm_goal) # task = 'shaving' task = 'yogurt' model = 'chair' start_time = time.time() rospy.wait_for_service('select_base_position') try: #select_base_position = rospy.ServiceProxy('select_base_position', BaseMove) #response = select_base_position(psm_goal, psm_head) select_base_position = rospy.ServiceProxy('select_base_position', BaseMove_multi) response = select_base_position(task, model) print 'response is: \n', response print 'Total time for service to return the response was: %fs' % (time.time()-start_time) return response.base_goal, response.configuration_goal#, response.ik_solution except rospy.ServiceException, e: print "Service call failed: %s"%e
def updateEndEffectorPose(self): pos, rot = self.robot.kinematics.FK(self.joint_angles) self.end_effector_position = pos self.end_effector_orient_cart = rot self.end_effector_orient_quat = tr.matrix_to_quaternion(rot)
f_l = arm_client.get_wrist_force(l_arm, base_frame=True) time_stamp = rospy.Time.now() h = Header() h.stamp = time_stamp force_r_pub.publish(FloatArray(h, f_r)) force_l_pub.publish(FloatArray(h, f_l)) publish_sphere_marker((0.5, 0.5, 0.5), 0.08, torso_link_name, time_stamp, 'both_arms', 0) for arm in [r_arm, l_arm]: q = arm_client.get_joint_angles(arm) links = [2, 3, 7] for i in links: p, rot = arms.FK_all(arm, q, i) qaut = tr.matrix_to_quaternion(rot) frameid = ar.link_tf_name(arm, i) transform_bcast.sendTransform(p.A1.tolist(), qaut, time_stamp, frameid, torso_link_name) publish_sphere_marker((0.5, 0.1, 0.5), 0.05, frameid, time_stamp, arm, i) c1 = (0.5, 0.1, 0.5) c2 = (0.5, 0.5, 0.1) p, rot = arms.FK_all(arm, q) publish_cartesian_markers(arm, time_stamp, p, rot, c1, c2,
rdc = spc.RawDataClient('/fabric_skin/taxels/raw_data') bias_mn, bias_std = compute_bias(rdc, 20) for i in range(n_axis): for j in range(n_circum): raw_input('Press on taxel and hit ENTER') dat = rdc.get_raw_data(True) min_idx = np.argmin(dat - bias_mn) ang = j * angle_along_circum + offset_along_circum x = rad * math.cos(ang) y = rad * math.sin(ang) z = offset_along_axis + i * dist_along_axis rot_mat = tr.Rz(-ang) * tr.Ry(math.radians(-90)) quat = tr.matrix_to_quaternion(rot_mat) tar.data[min_idx].translation.x = x tar.data[min_idx].translation.y = y tar.data[min_idx].translation.z = z tar.data[min_idx].rotation.x = quat[0] tar.data[min_idx].rotation.y = quat[1] tar.data[min_idx].rotation.z = quat[2] tar.data[min_idx].rotation.w = quat[3] d = {} d['tf_name'] = tf_link_name d['transform_array_response'] = tar ut.save_pickle(d, 'taxel_registration_dict.pkl')
def handle_select_base(self, req):#, task): #choose_task(req.task) print 'I have received inputs!' #print 'My given inputs were: \n' #print 'goal is: \n',req.goal #print 'head is: \n', req.head # The head location is received as a posestamped message and is converted and used as the head location. pos_temp = [req.head.pose.position.x, req.head.pose.position.y, req.head.pose.position.z] ori_temp = [req.head.pose.orientation.x, req.head.pose.orientation.y, req.head.pose.orientation.z, req.head.pose.orientation.w] head = createBMatrix(pos_temp, ori_temp) #print 'head from input: \n', head # This lets the service use TF to get the head location instead of requiring it as an input. #(trans,rot) = self.listener.lookupTransform('/base_link', '/head_frame', rospy.Time(0)) #pos_temp = trans #ori_temp = rot #head = createBMatrix(pos_temp,ori_temp) #print 'head from tf: \n',head # The goal location is received as a posestamped message and is converted and used as the goal location. pos_temp = [req.goal.pose.position.x, req.goal.pose.position.y, req.goal.pose.position.z] ori_temp = [req.goal.pose.orientation.x, req.goal.pose.orientation.y, req.goal.pose.orientation.z, req.goal.pose.orientation.w] goal = createBMatrix(pos_temp,ori_temp) #print 'goal: \n',goal print 'I will move to be able to reach the goal.' # Sets the wheelchair location based on the location of the head using a few homogeneous transforms. pr2_B_wc = np.matrix([[head[0,0], head[0,1], 0., head[0,3]], [head[1,0], head[1,1], 0., head[1,3]], [ 0., 0., 1., 0.], [ 0., 0., 0., 1]]) # Transform from the coordinate frame of the wc model in the back right bottom corner, to the head location corner_B_head = np.matrix([[m.cos(0.), -m.sin(0.), 0., .442603], [m.sin(0.), m.cos(0.), 0., .384275], #0.34 [ 0., 0., 1., 0.], [ 0., 0., 0., 1.]]) wheelchair_location = pr2_B_wc * corner_B_head.I self.wheelchair.SetTransform(np.array(wheelchair_location)) # Published the wheelchair location to create a marker in rviz for visualization to compare where the service believes the wheelchair is to # where the person is (seen via kinect). pos_goal = wheelchair_location[0:3,3] ori_goal = tr.matrix_to_quaternion(wheelchair_location[0:3,0:3]) self.publish_wc_marker(pos_goal, ori_goal) #Setup for inside loop angle = m.pi # Transforms from end of wrist of PR2 to the end of its gripper. Openrave has a weird coordinate system for the gripper so I use a transform to correct. goal_B_gripper = np.matrix([[ 0, 0, 1., .1], [ 0, 1, 0., 0.], [ -1., 0., 0., 0.], [ 0., 0., 0., 1.]]) #pr2_B_goal = head * head_B_goal pr2_B_goal = goal*goal_B_gripper angle_base = m.pi/2 #print 'The goal gripper pose is: \n' , np.array(pr2_B_goal) # This is to publish WC position w.r.t. PR2 after the PR2 reaches goal location. # Only necessary for testing in simulation to set the wheelchair in reach of PR2. #goalpr2_B_wc = wc_B_goalpr2.I #print 'pr2_B_wc is: \n',goalpr2_B_wc #pos_goal = goalpr2_B_wc[:3,3] #ori_goal = tr.matrix_to_quaternion(goalpr2_B_wc[0:3,0:3]) #psm_wc = PoseStamped() #psm_wc.header.frame_id = '/odom_combined' #psm_wc.pose.position.x=pos_goal[0] #psm_wc.pose.position.y=pos_goal[1] #psm_wc.pose.position.z=pos_goal[2] #psm_wc.pose.orientation.x=ori_goal[0] #psm_wc.pose.orientation.y=ori_goal[1] #psm_wc.pose.orientation.z=ori_goal[2] #psm_wc.pose.orientation.w=ori_goal[3] #self.wc_position.publish(psm_wc) # Find a base location by testing various base locations online for IK solution for i in [0.,.05,.1,.15,.2,.25,.3,.35,.4,.45,.5,-.05,-.1,-.15,-.2,-.25,-.3]:#[.1]:#[0.,.1,.3,.5,.8,1,-.1,-.2,-.3]: for j in [0.,.03,.05,.08,-.03,-.05,-.08, -.1,-.12,-.2,-.3]:#[.2]:#[0.,.1,.3,.5,.8,-.1,-.2,-.3]: for k in [0.,m.pi/4,m.pi/2,-m.pi/4,-m.pi/2,m.pi,3*m.pi/2]: #goal_pose = req.goal # transform from head frame in wheelchair to desired base goal wc_B_goalpr2 = np.matrix([[m.cos(angle_base+k), -m.sin(angle_base+k), 0., .4+i], [m.sin(angle_base+k), m.cos(angle_base+k), 0., -.9+j], [ 0., 0., 1, 0.], [ 0., 0., 0., 1]]) base_position = pr2_B_wc * wc_B_goalpr2 #print 'base position: \n',base_position self.robot.SetTransform(np.array(base_position)) #res = self.manipprob.MoveToHandPosition(matrices=[np.array(pr2_B_goal)],seedik=10) # call motion planner with goal joint angles #self.robot.WaitForController(0) # wait #print 'res: \n',res v = self.robot.GetActiveDOFValues() for name in self.joint_names: v[self.robot.GetJoint(name).GetDOFIndex()] = self.joint_angles[self.joint_names.index(name)] self.robot.SetActiveDOFValues(v) with self.env: #print 'checking goal base location: \n' , np.array(base_position) if not self.manip.CheckIndependentCollision(op.CollisionReport()): sol = self.manip.FindIKSolution(np.array(pr2_B_goal), op.IkFilterOptions.CheckEnvCollisions) if sol is not None: now = rospy.Time.now() + rospy.Duration(1.0) self.listener.waitForTransform('/odom_combined', '/base_link', now, rospy.Duration(10)) (trans,rot) = self.listener.lookupTransform('/odom_combined', '/base_link', now) odom_goal = createBMatrix(trans, rot) * base_position pos_goal = odom_goal[:3,3] ori_goal = tr.matrix_to_quaternion(odom_goal[0:3,0:3]) #print 'Got an iksolution! \n', sol psm = PoseStamped() psm.header.frame_id = '/odom_combined' psm.pose.position.x=pos_goal[0] psm.pose.position.y=pos_goal[1] psm.pose.position.z=pos_goal[2] psm.pose.orientation.x=ori_goal[0] psm.pose.orientation.y=ori_goal[1] psm.pose.orientation.z=ori_goal[2] psm.pose.orientation.w=ori_goal[3] # This is to publish WC position w.r.t. PR2 after the PR2 reaches goal location. # Only necessary for testing in simulation to set the wheelchair in reach of PR2. #goalpr2_B_wc = wc_B_goalpr2.I #print 'pr2_B_wc is: \n',goalpr2_B_wc #pos_goal = goalpr2_B_wc[:3,3] #ori_goal = tr.matrix_to_quaternion(goalpr2_B_wc[0:3,0:3]) #psm_wc = PoseStamped() #psm_wc.header.frame_id = '/odom_combined' #psm_wc.pose.position.x=pos_goal[0] #psm_wc.pose.position.y=pos_goal[1] #psm_wc.pose.position.z=pos_goal[2] #psm_wc.pose.orientation.x=ori_goal[0] #psm_wc.pose.orientation.y=ori_goal[1] #psm_wc.pose.orientation.z=ori_goal[2] #psm_wc.pose.orientation.w=ori_goal[3] #self.wc_position.publish(psm_wc) print 'I found a goal location! It is at B transform: \n',base_position print 'The quaternion to the goal location is: \n',psm return psm #self.robot.SetDOFValues(sol,self.manip.GetArmIndices()) # set the current solution #Tee = self.manip.GetEndEffectorTransform() #self.env.UpdatePublishedBodies() # allow viewer to update new robot # traj = None # try: # #res = self.manipprob.MoveToHandPosition(matrices=[np.array(pr2_B_goal)],seedik=10) # call motion planner with goal joint angles # traj = self.manipprob.MoveManipulator(goal=sol, outputtrajobj=True) # print 'Got a trajectory! \n'#,traj # except: # #print 'traj = \n',traj # traj = None # print 'traj failed \n' # pass # #traj =1 #This gets rid of traj # if traj is not None: else: print 'I found a bad goal location. Trying again!' #rospy.sleep(.1) print 'I found nothing! My given inputs were: \n', req.goal, req.head print 'I found a goal location! It is at B transform: \n',base_location print 'The quaternion to the goal location is: \n',psm return psm
def setup_wrist_taxels_transforms(self): self.link_name_wrist = '/handmount_LEFT' n_circum = 4 dist_along_axis = 0.065 angle_along_circum = 2 * math.pi / n_circum offset_along_circum = math.radians(-45) self.tar_wrist.data = [None for i in range(13)] # mapping the taxels to the raw ADC list. idx_list = [6, 9, 2, 5] n_axis = 1 rad = 0.03 offset_along_axis = -0.04 for i in range(n_axis): for j in range(n_circum): t = Transform() ang = j * angle_along_circum + offset_along_circum t.translation.x = rad * math.cos(ang) t.translation.y = rad * math.sin(ang) t.translation.z = offset_along_axis + i * dist_along_axis rot_mat = tr.Rz(-ang) * tr.Ry(math.radians(-90)) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_wrist.data[idx_list[i * n_circum + j]] = t # mapping the taxels to the raw ADC list. idx_list = [8, 11, 0, 3, 7, 10, 1, 4] n_axis = 2 rad = 0.02 offset_along_axis = -0.17 for i in range(n_axis): for j in range(n_circum): t = Transform() ang = j * angle_along_circum + offset_along_circum t.translation.x = rad * math.cos(ang) t.translation.y = rad * math.sin(ang) t.translation.z = offset_along_axis + i * dist_along_axis rot_mat = tr.Rz(-ang) * tr.Ry(math.radians(-90)) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_wrist.data[idx_list[i * n_circum + j]] = t t = Transform() t.translation.x = 0. t.translation.y = 0 t.translation.z = -0.2 rot_mat = tr.Rx(math.radians(180)) quat = tr.matrix_to_quaternion(rot_mat) t.rotation.x = quat[0] t.rotation.y = quat[1] t.rotation.z = quat[2] t.rotation.w = quat[3] self.tar_wrist.data[12] = t
f_l = arm_client.get_wrist_force(l_arm, base_frame=True) time_stamp = rospy.Time.now() h = Header() h.stamp = time_stamp force_r_pub.publish(FloatArray(h, f_r)) force_l_pub.publish(FloatArray(h, f_l)) publish_sphere_marker((0.5,0.5,0.5), 0.08, torso_link_name, time_stamp, 'both_arms', 0) for arm in [r_arm, l_arm]: q = arm_client.get_joint_angles(arm) links = [2, 3, 7] for i in links: p, rot = arms.FK_all(arm, q, i) qaut = tr.matrix_to_quaternion(rot) frameid = ar.link_tf_name(arm, i) transform_bcast.sendTransform(p.A1.tolist(), qaut, time_stamp, frameid, torso_link_name) publish_sphere_marker((0.5,0.1,0.5), 0.05, frameid, time_stamp, arm, i) c1 = (0.5, 0.1, 0.5) c2 = (0.5, 0.5, 0.1) p, rot = arms.FK_all(arm, q) publish_cartesian_markers(arm, time_stamp, p, rot, c1, c2, marker_id=76) c1 = (0.2, 0.2, 0.2) c2 = (0.6, 0.6, 0.6) jep = arm_client.get_jep(arm)
rdc = spc.RawDataClient('/fabric_skin/taxels/raw_data') bias_mn, bias_std = compute_bias(rdc, 20) for i in range(n_axis): for j in range(n_circum): raw_input('Press on taxel and hit ENTER') dat = rdc.get_raw_data(True) min_idx = np.argmin(dat-bias_mn) ang = j*angle_along_circum + offset_along_circum x = rad * math.cos(ang) y = rad * math.sin(ang) z = offset_along_axis + i * dist_along_axis rot_mat = tr.Rz(-ang)*tr.Ry(math.radians(-90)) quat = tr.matrix_to_quaternion(rot_mat) tar.data[min_idx].translation.x = x tar.data[min_idx].translation.y = y tar.data[min_idx].translation.z = z tar.data[min_idx].rotation.x = quat[0] tar.data[min_idx].rotation.y = quat[1] tar.data[min_idx].rotation.z = quat[2] tar.data[min_idx].rotation.w = quat[3] d = {} d['tf_name'] = tf_link_name d['transform_array_response'] = tar ut.save_pickle(d, 'taxel_registration_dict.pkl')
def new_goal(self, psm): rospy.loginfo("[%s] I just got a goal location. I will now start reaching!" %rospy.get_name()) psm.header.stamp = rospy.Time.now() #self.goal_pose_pub.publish(psm) self.pub_feedback("Reaching toward goal location") #return # This is to use tf to get head location. # Otherwise, there is a subscriber to get a head location. #Comment out if there is no tf to use. #now = rospy.Time.now() + rospy.Duration(0.5) #self.listener.waitForTransform('/base_link', '/head_frame', now, rospy.Duration(10)) #pos_temp, ori_temp = self.listener.lookupTransform('/base_link', '/head_frame', now) #self.head = createBMatrix(pos_temp,ori_temp) #self.pr2_B_wc = np.matrix([[ self.head[0,0], self.head[0,1], 0., self.head[0,3]], # [ self.head[1,0], self.head[1,1], 0., self.head[1,3]], # [ 0., 0., 1., 0.], # [ 0., 0., 0., 1]]) #self.pr2_B_wc = wheelchair_location = self.pr2_B_wc * self.corner_B_head.I self.wheelchair.SetTransform(np.array(wheelchair_location)) pos_goal = wheelchair_location[:3,3] ori_goal = tr.matrix_to_quaternion(wheelchair_location[0:3,0:3]) marker = Marker() marker.header.frame_id = "base_link" marker.header.stamp = rospy.Time() marker.ns = "arm_reacher_wc_model" marker.id = 0 marker.type = Marker.MESH_RESOURCE; marker.action = Marker.ADD marker.pose.position.x = pos_goal[0] marker.pose.position.y = pos_goal[1] marker.pose.position.z = pos_goal[2] marker.pose.orientation.x = ori_goal[0] marker.pose.orientation.y = ori_goal[1] marker.pose.orientation.z = ori_goal[2] marker.pose.orientation.w = ori_goal[3] marker.scale.x = 0.0254 marker.scale.y = 0.0254 marker.scale.z = 0.0254 marker.color.a = 1.0 marker.color.r = 1.0 marker.color.g = 0.0 marker.color.b = 0.0 #only if using a MESH_RESOURCE marker type: marker.mesh_resource = "package://hrl_base_selection/models/ADA_Wheelchair.dae" self.vis_pub.publish( marker ) v = self.robot.GetActiveDOFValues() for name in self.joint_names: v[self.robot.GetJoint(name).GetDOFIndex()] = self.joint_angles[self.joint_names.index(name)] self.robot.SetActiveDOFValues(v) pos_temp = [psm.pose.position.x, psm.pose.position.y, psm.pose.position.z] ori_temp = [psm.pose.orientation.x, psm.pose.orientation.y, psm.pose.orientation.z, psm.pose.orientation.w] self.goal = createBMatrix(pos_temp,ori_temp) self.goal_B_gripper = np.matrix([[ 0., 0., 1., 0.1], [ 0., 1., 0., 0.], [ -1., 0., 0., 0.], [ 0., 0., 0., 1.]]) self.pr2_B_goal = self.goal*self.goal_B_gripper self.sol = self.manip.FindIKSolution(np.array(self.pr2_B_goal), op.IkFilterOptions.CheckEnvCollisions) if self.sol is None: self.pub_feedback("Failed to find a good arm configuration for reaching.") return None rospy.loginfo("[%s] Got an IK solution: %s" % (rospy.get_name(), self.sol)) self.pub_feedback("Found a good arm configuration for reaching.") self.pub_feedback("Looking for path for arm to goal.") traj = None try: #self.res = self.manipprob.MoveToHandPosition(matrices=[np.array(self.pr2_B_goal)],seedik=10) # call motion planner with goal joint angles self.traj=self.manipprob.MoveManipulator(goal=self.sol,outputtrajobj=True) self.pub_feedback("Found a path to reach to the goal.") except: self.traj = None self.pub_feedback("Could not find a path to reach to the goal") return None tmp_traj = tmp_vel = tmp_acc = [] #np.zeros([self.traj.GetNumWaypoints(),7]) trajectory = JointTrajectory() for i in xrange(self.traj.GetNumWaypoints()): point = JointTrajectoryPoint() temp = [] for j in xrange(7): temp.append(float(self.traj.GetWaypoint(i)[j])) point.positions = temp #point.positions.append(temp) #point.accelerations.append([0.,0.,0.,0.,0.,0.,0.]) #point.velocities.append([0.,0.,0.,0.,0.,0.,0.]) trajectory.points.append(point) #tmp_traj.append(temp) #tmp_traj.append(list(self.traj.GetWaypoint(i))) #tmp_vel.append([0.,0.,0.,0.,0.,0.,0.]) #tmp_acc.append([0.,0.,0.,0.,0.,0.,0.]) #print 'tmp_traj is: \n', tmp_traj #for j in xrange(7): #tmp_traj[i,j] = float(self.traj.GetWaypoint(i)[j]) #trajectory = JointTrajectory() #point = JointTrajectoryPoint() #point.positions.append(tmp_traj) #point.velocities.append(tmp_vel) #point.accelerations.append(tmp_acc) #point.velocities=[[0.,0.,0.,0.,0.,0.,0.]] #point.accelerations=[[0.,0.,0.,0.,0.,0.,0.]] trajectory.joint_names = ['l_upper_arm_roll_joint', 'l_shoulder_pan_joint', 'l_shoulder_lift_joint', 'l_forearm_roll_joint', 'l_elbow_flex_joint', 'l_wrist_flex_joint', 'l_wrist_roll_joint'] #trajectory.points.append(point) #self.mpc_weights_pub.publish(self.weights) self.goal_traj_pub.publish(trajectory) self.pub_feedback("Reaching to location")
def updateHapticState(self): pos, rot = self.kinematics.FK(self.joint_angles) self.end_effector_position = pos self.end_effector_orient_quat = tr.matrix_to_quaternion(rot) self.J_h = self.kinematics.Jacobian(self.joint_angles)
def handle_select_base(self, req): #, task): #choose_task(req.task) print 'I have received inputs!' #print 'My given inputs were: \n' #print 'goal is: \n',req.goal #print 'head is: \n', req.head # The head location is received as a posestamped message and is converted and used as the head location. pos_temp = [ req.head.pose.position.x, req.head.pose.position.y, req.head.pose.position.z ] ori_temp = [ req.head.pose.orientation.x, req.head.pose.orientation.y, req.head.pose.orientation.z, req.head.pose.orientation.w ] head = createBMatrix(pos_temp, ori_temp) #print 'head from input: \n', head # This lets the service use TF to get the head location instead of requiring it as an input. #(trans,rot) = self.listener.lookupTransform('/base_link', '/head_frame', rospy.Time(0)) #pos_temp = trans #ori_temp = rot #head = createBMatrix(pos_temp,ori_temp) #print 'head from tf: \n',head # The goal location is received as a posestamped message and is converted and used as the goal location. pos_temp = [ req.goal.pose.position.x, req.goal.pose.position.y, req.goal.pose.position.z ] ori_temp = [ req.goal.pose.orientation.x, req.goal.pose.orientation.y, req.goal.pose.orientation.z, req.goal.pose.orientation.w ] goal = createBMatrix(pos_temp, ori_temp) #print 'goal: \n',goal print 'I will move to be able to reach the goal.' # Sets the wheelchair location based on the location of the head using a few homogeneous transforms. pr2_B_wc = np.matrix([[head[0, 0], head[0, 1], 0., head[0, 3]], [head[1, 0], head[1, 1], 0., head[1, 3]], [0., 0., 1., 0.], [0., 0., 0., 1]]) # Transform from the coordinate frame of the wc model in the back right bottom corner, to the head location corner_B_head = np.matrix([ [m.cos(0.), -m.sin(0.), 0., .442603], [m.sin(0.), m.cos(0.), 0., .384275], #0.34 [0., 0., 1., 0.], [0., 0., 0., 1.] ]) wheelchair_location = pr2_B_wc * corner_B_head.I self.wheelchair.SetTransform(np.array(wheelchair_location)) # Published the wheelchair location to create a marker in rviz for visualization to compare where the service believes the wheelchair is to # where the person is (seen via kinect). pos_goal = wheelchair_location[0:3, 3] ori_goal = tr.matrix_to_quaternion(wheelchair_location[0:3, 0:3]) self.publish_wc_marker(pos_goal, ori_goal) #Setup for inside loop angle = m.pi # Transforms from end of wrist of PR2 to the end of its gripper. Openrave has a weird coordinate system for the gripper so I use a transform to correct. goal_B_gripper = np.matrix([[0, 0, 1., .1], [0, 1, 0., 0.], [-1., 0., 0., 0.], [0., 0., 0., 1.]]) #pr2_B_goal = head * head_B_goal pr2_B_goal = goal * goal_B_gripper angle_base = m.pi / 2 #print 'The goal gripper pose is: \n' , np.array(pr2_B_goal) # This is to publish WC position w.r.t. PR2 after the PR2 reaches goal location. # Only necessary for testing in simulation to set the wheelchair in reach of PR2. #goalpr2_B_wc = wc_B_goalpr2.I #print 'pr2_B_wc is: \n',goalpr2_B_wc #pos_goal = goalpr2_B_wc[:3,3] #ori_goal = tr.matrix_to_quaternion(goalpr2_B_wc[0:3,0:3]) #psm_wc = PoseStamped() #psm_wc.header.frame_id = '/odom_combined' #psm_wc.pose.position.x=pos_goal[0] #psm_wc.pose.position.y=pos_goal[1] #psm_wc.pose.position.z=pos_goal[2] #psm_wc.pose.orientation.x=ori_goal[0] #psm_wc.pose.orientation.y=ori_goal[1] #psm_wc.pose.orientation.z=ori_goal[2] #psm_wc.pose.orientation.w=ori_goal[3] #self.wc_position.publish(psm_wc) # Find a base location by testing various base locations online for IK solution for i in [ 0., .05, .1, .15, .2, .25, .3, .35, .4, .45, .5, -.05, -.1, -.15, -.2, -.25, -.3 ]: #[.1]:#[0.,.1,.3,.5,.8,1,-.1,-.2,-.3]: for j in [ 0., .03, .05, .08, -.03, -.05, -.08, -.1, -.12, -.2, -.3 ]: #[.2]:#[0.,.1,.3,.5,.8,-.1,-.2,-.3]: for k in [ 0., m.pi / 4, m.pi / 2, -m.pi / 4, -m.pi / 2, m.pi, 3 * m.pi / 2 ]: #goal_pose = req.goal # transform from head frame in wheelchair to desired base goal wc_B_goalpr2 = np.matrix([[ m.cos(angle_base + k), -m.sin(angle_base + k), 0., .4 + i ], [ m.sin(angle_base + k), m.cos(angle_base + k), 0., -.9 + j ], [0., 0., 1, 0.], [0., 0., 0., 1]]) base_position = pr2_B_wc * wc_B_goalpr2 #print 'base position: \n',base_position self.robot.SetTransform(np.array(base_position)) #res = self.manipprob.MoveToHandPosition(matrices=[np.array(pr2_B_goal)],seedik=10) # call motion planner with goal joint angles #self.robot.WaitForController(0) # wait #print 'res: \n',res v = self.robot.GetActiveDOFValues() for name in self.joint_names: v[self.robot.GetJoint(name).GetDOFIndex( )] = self.joint_angles[self.joint_names.index(name)] self.robot.SetActiveDOFValues(v) with self.env: #print 'checking goal base location: \n' , np.array(base_position) if not self.manip.CheckIndependentCollision( op.CollisionReport()): sol = self.manip.FindIKSolution( np.array(pr2_B_goal), op.IkFilterOptions.CheckEnvCollisions) if sol is not None: now = rospy.Time.now() + rospy.Duration(1.0) self.listener.waitForTransform( '/odom_combined', '/base_link', now, rospy.Duration(10)) (trans, rot) = self.listener.lookupTransform( '/odom_combined', '/base_link', now) odom_goal = createBMatrix(trans, rot) * base_position pos_goal = odom_goal[:3, 3] ori_goal = tr.matrix_to_quaternion( odom_goal[0:3, 0:3]) #print 'Got an iksolution! \n', sol psm = PoseStamped() psm.header.frame_id = '/odom_combined' psm.pose.position.x = pos_goal[0] psm.pose.position.y = pos_goal[1] psm.pose.position.z = pos_goal[2] psm.pose.orientation.x = ori_goal[0] psm.pose.orientation.y = ori_goal[1] psm.pose.orientation.z = ori_goal[2] psm.pose.orientation.w = ori_goal[3] # This is to publish WC position w.r.t. PR2 after the PR2 reaches goal location. # Only necessary for testing in simulation to set the wheelchair in reach of PR2. #goalpr2_B_wc = wc_B_goalpr2.I #print 'pr2_B_wc is: \n',goalpr2_B_wc #pos_goal = goalpr2_B_wc[:3,3] #ori_goal = tr.matrix_to_quaternion(goalpr2_B_wc[0:3,0:3]) #psm_wc = PoseStamped() #psm_wc.header.frame_id = '/odom_combined' #psm_wc.pose.position.x=pos_goal[0] #psm_wc.pose.position.y=pos_goal[1] #psm_wc.pose.position.z=pos_goal[2] #psm_wc.pose.orientation.x=ori_goal[0] #psm_wc.pose.orientation.y=ori_goal[1] #psm_wc.pose.orientation.z=ori_goal[2] #psm_wc.pose.orientation.w=ori_goal[3] #self.wc_position.publish(psm_wc) print 'I found a goal location! It is at B transform: \n', base_position print 'The quaternion to the goal location is: \n', psm return psm #self.robot.SetDOFValues(sol,self.manip.GetArmIndices()) # set the current solution #Tee = self.manip.GetEndEffectorTransform() #self.env.UpdatePublishedBodies() # allow viewer to update new robot # traj = None # try: # #res = self.manipprob.MoveToHandPosition(matrices=[np.array(pr2_B_goal)],seedik=10) # call motion planner with goal joint angles # traj = self.manipprob.MoveManipulator(goal=sol, outputtrajobj=True) # print 'Got a trajectory! \n'#,traj # except: # #print 'traj = \n',traj # traj = None # print 'traj failed \n' # pass # #traj =1 #This gets rid of traj # if traj is not None: else: print 'I found a bad goal location. Trying again!' #rospy.sleep(.1) print 'I found nothing! My given inputs were: \n', req.goal, req.head print 'I found a goal location! It is at B transform: \n', base_location print 'The quaternion to the goal location is: \n', psm return psm