def stare_chain_thread(self, index): index = int(index) rate = rospy.Rate(10) while self.stare_landmark_init: # Compute Transformation angle = euler_from_quaternion([ self.map.landmark[index].pose.pose.orientation.x, self.map.landmark[index].pose.pose.orientation.y, self.map.landmark[index].pose.pose.orientation.z, self.map.landmark[index].pose.pose.orientation.w ]) O = PyKDL.Rotation.RPY(angle[0], angle[1], angle[2]) t = PyKDL.Vector(self.map.landmark[index].pose.pose.position.x, self.map.landmark[index].pose.pose.position.y, self.map.landmark[index].pose.pose.position.z) wTl = PyKDL.Frame(O, t) wTo = wTl * self.offset_transformation orientation = wTo.M.GetRPY() position = wTo.p wwr = WorldWaypointReq() wwr.header.stamp = rospy.Time().now() wwr.goal = GoalDescriptor(self.name, 1, GoalDescriptor.PRIORITY_NORMAL) wwr.altitude_mode = False wwr.position.north = position[0] wwr.position.east = position[1] wwr.position.depth = position[2] wwr.altitude = 5.0 wwr.orientation.roll = orientation[0] wwr.orientation.pitch = orientation[1] wwr.orientation.yaw = orientation[2] wwr.disable_axis.x = False wwr.disable_axis.y = False wwr.disable_axis.z = True wwr.disable_axis.roll = True wwr.disable_axis.pitch = True wwr.disable_axis.yaw = False self.pub_world_waypoint_req.publish(wwr) print 'publish:\n', wwr if not self.keep_pose: self.check_tolerance(wwr) rate.sleep()
def test_transform(self): b = tf2_ros.Buffer() t = TransformStamped() t.transform.translation.x = 1 t.transform.rotation.x = 1 t.header.stamp = rospy.Time(2.0) t.header.frame_id = 'a' t.child_frame_id = 'b' b.set_transform(t, 'eitan_rocks') out = b.lookup_transform('a','b', rospy.Time(2.0), rospy.Duration(2.0)) self.assertEqual(out.transform.translation.x, 1) self.assertEqual(out.transform.rotation.x, 1) self.assertEqual(out.header.frame_id, 'a') self.assertEqual(out.child_frame_id, 'b') v = PyKDL.Vector(1,2,3) out = b.transform(tf2_ros.Stamped(v, rospy.Time(2), 'a'), 'b') self.assertEqual(out.x(), 0) self.assertEqual(out.y(), -2) self.assertEqual(out.z(), -3) f = PyKDL.Frame(PyKDL.Rotation.RPY(1,2,3), PyKDL.Vector(1,2,3)) out = b.transform(tf2_ros.Stamped(f, rospy.Time(2), 'a'), 'b') print out self.assertEqual(out.p.x(), 0) self.assertEqual(out.p.y(), -2) self.assertEqual(out.p.z(), -3) # TODO(tfoote) check values of rotation t = PyKDL.Twist(PyKDL.Vector(1,2,3), PyKDL.Vector(4,5,6)) out = b.transform(tf2_ros.Stamped(t, rospy.Time(2), 'a'), 'b') self.assertEqual(out.vel.x(), 1) self.assertEqual(out.vel.y(), -8) self.assertEqual(out.vel.z(), 2) self.assertEqual(out.rot.x(), 4) self.assertEqual(out.rot.y(), -5) self.assertEqual(out.rot.z(), -6) w = PyKDL.Wrench(PyKDL.Vector(1,2,3), PyKDL.Vector(4,5,6)) out = b.transform(tf2_ros.Stamped(w, rospy.Time(2), 'a'), 'b') self.assertEqual(out.force.x(), 1) self.assertEqual(out.force.y(), -2) self.assertEqual(out.force.z(), -3) self.assertEqual(out.torque.x(), 4) self.assertEqual(out.torque.y(), -8) self.assertEqual(out.torque.z(), -4)
def _set_waypoint(self, intersection_point, person_position): """ Puts the goal as a waypoint in ED :param intersection_point: geometry_msgs PointStamped of the last raytrace intersection (in map) :param person_position: geometry_msgs PointStamped of the last measured person position (in map) """ yaw = math.atan2(person_position.point.y - intersection_point.point.y, person_position.point.x - intersection_point.point.x) position = kdl.Vector(intersection_point.point.x, intersection_point.point.y, 0.0) orientation = kdl.Rotation.RPY(0.0, 0.0, yaw) waypoint = FrameStamped(frame=kdl.Frame(orientation, position), frame_id="/map") self.robot.ed.update_entity(id=self.waypoint_id, type="waypoint", frame_stamped=waypoint) # import ipdb;ipdb.set_trace() return
def resolve(self): return Entity( identifier="foo", object_type=None, frame_id="/map", pose=kdl.Frame(), shape=None, volumes={ "on_top_of": BoxVolume( kdl.Vector(-1.0, -1.0, -1.0), kdl.Vector(1.0, 1.0, 1.0), ) }, super_types=[], last_update_time=None, )
def move_psm3_to(self,new_position): ''' new position vector is given with respect to world coordinates ''' if self.tf_world_psm3b is None: print("no transformation to move the robot") exit(0) #add the offset new_position = new_position #Transform to psmb new_position = self.tf_world_psm3b.Inverse() * new_position # print("moving to new location:") # self.print_vect(new_position) self.api_psm3_arm.move(PyKDL.Frame(self.fix_orientation, new_position))
def test_matrix(arm, speed=0.05, frame=PyKDL.Frame()): r = 0.05 #amplitude theta = np.repeat(np.linspace(0, np.pi / 2, 3), 3) #must multiply to equal each other. phi = np.tile(np.linspace(0, np.pi / 2, 3), 3) #calculating cartesian motions x = r * np.sin(theta) * np.cos(phi) y = r * np.sin(theta) * np.sin(phi) z = r * np.cos(theta) z_home = -0.1135 for a, b, c in zip(x[2:], y[2:], z[2:]): print(a, b, c) vmove(arm, PyKDL.Vector(a, b, c + z_home), speed, frame) rospy.sleep(1) vmove(arm, PyKDL.Vector(0, 0, z_home), speed, frame) rospy.sleep(1)
def transform(frontier_tf): global current_submap_poses while len(current_submap_poses) == 0: pass for frontier in frontier_tf: #sada je svaki frontier oblik FrontierTF ima submapIndex, trajectory_id i transform if (frontier.trajectory_id, frontier.submapIndex) in current_submap_poses: pomocni = posemath.toMsg( current_submap_poses[frontier.trajectory_id, frontier.submapIndex] * PyKDL.Frame( PyKDL.Rotation.Identity(), PyKDL.Vector(frontier.transform.x, frontier.transform.y, 0.0))) frontier.projected = pomocni.position
def inverseKinematics(robotChain, pos, rot, qGuess=None, minJoints=None, maxJoints=None): """ Perform inverse kinematics Args: robotChain: robot's chain object pos: 1 x 3 or 3 x 1 array of the end effector position. rot: 3 x 3 array of the end effector rotation qGuess: guess values for the joint positions minJoints: minimum value of the position for each joint maxJoints: maximum value of the position for each joint Returns: list of joint positions or None (no solution) """ posKdl = kdl.Vector(pos[0], pos[1], pos[2]) rotKdl = kdl.Rotation(rot[0, 0], rot[0, 1], rot[0, 2], rot[1, 0], rot[1, 1], rot[1, 2], rot[2, 0], rot[2, 1], rot[2, 2]) frameKdl = kdl.Frame(rotKdl, posKdl) numJoints = robotChain.getNrOfJoints() minJoints = -np.pi * np.ones(numJoints) if minJoints is None else minJoints maxJoints = np.pi * np.ones(numJoints) if maxJoints is None else maxJoints minsKdl = jointListToKdl(minJoints) maxsKdl = jointListToKdl(maxJoints) fkKdl = kdl.ChainFkSolverPos_recursive(robotChain) ikVKdl = kdl.ChainIkSolverVel_pinv(robotChain) ikPKdl = kdl.ChainIkSolverPos_NR_JL(robotChain, minsKdl, maxsKdl, fkKdl, ikVKdl) if qGuess is None: # use the midpoint of the joint limits as the guess lowerLim = np.where(np.isfinite(minJoints), minJoints, 0.) upperLim = np.where(np.isfinite(maxJoints), maxJoints, 0.) qGuess = (lowerLim + upperLim) / 2.0 qGuess = np.where(np.isnan(qGuess), [0.] * len(qGuess), qGuess) qKdl = kdl.JntArray(numJoints) qGuessKdl = jointListToKdl(qGuess) if ikPKdl.CartToJnt(qGuessKdl, frameKdl, qKdl) >= 0: return jointKdlToList(qKdl) else: return None
def callback(msg): if (msg.num_people_detected == 1): #Only publish when exactly one person is detected print("Exactly one person detected") global q_solved ############ # Solve IK # ############ #initial joint positions q_init = kdl.JntArray(chain_RHand.getNrOfJoints()) for i in q_solved: q_init[i] = i q_solved = kdl.JntArray(chain_RHand.getNrOfJoints()) if (msg.right_wrist.x == "nan"): print("No right wrist detected") else: print(msg.person_ID) x, y, z, = msg.right_wrist.x, msg.right_wrist.y, msg.right_wrist.z #Assign x, y and z with the coordinates from right_wrist #desired final cartesian position posVec = kdl.Vector(x, y, z) F_des = kdl.Frame(posVec) IK_RHand.CartToJnt(q_init, F_des, q_solved) #convert to np (for purposes of printing) q_solved = np.array([q_solved[i] for i in range(q_solved.rows())]) #publish and print desired final joint positions publisher.send({jnt: q_solved[i] for i, jnt in enumerate(Rarm_joints)}) print(q_solved) elif (msg.num_people_detected == 0): print("No person detected") else: print("Too many people detected!", msg.num_people_detected)
def move_end_effector_to_cartesian_position_and_angle_and_wrench( which_hand, angleX, angleY, angleZ, x, y, z, imp_list, imp_times, max_wrench, path_tool_tolerance, time): global velma T_hand = PyKDL.Frame(PyKDL.Rotation.RPY(angleX, angleY, angleZ), PyKDL.Vector(x, y, z)) if which_hand == RIGHT_HAND: if not velma.moveCartImpRight([T_hand], time, None, None, imp_list, imp_times, max_wrench, start_time=0.5, path_tol=path_tool_tolerance): exitError(13) error = velma.waitForEffectorRight() if error != 0: if error == PATH_TOLERANCE_VIOLATED_ERROR: return PATH_TOLERANCE_VIOLATED_ERROR elif which_hand == LEFT_HAND: if not velma.moveCartImpLeft([T_hand], [3.0], None, None, imp_list, imp_times, max_wrench, start_time=0.5, path_tol=path_tool_tolerance): exitError(13) error = velma.waitForEffectorLeft() if error != 0: if error == PATH_TOLERANCE_VIOLATED_ERROR: return PATH_TOLERANCE_VIOLATED_ERROR else: print "Error - none hand has been specified!" rospy.sleep(0.2) # Check precision if which_hand == RIGHT_HAND: T_hand_diff = PyKDL.diff(T_hand, velma.getTf("B", "Tr"), 1.0) if T_hand_diff.vel.Norm() > 0.05 or T_hand_diff.rot.Norm() > 0.05: print "Precision for right hand is not perfect!" elif which_hand == LEFT_HAND: print "d 1.13 - check precision for the left hand - later"
def cartesian_goal(self, goal): rospy.loginfo(rospy.get_caller_id() + '-> starting cartesian goal') self.prepare_cartesian() cart_goal = PyKDL.Frame() cart_goal.p = self.arm.get_desired_position().p cart_goal.M = self.arm.get_desired_position().M # Translation motion cart_goal.p[0] = goal.pose.position.x cart_goal.p[1] = goal.pose.position.y if goal.pose.position.z > -0.120: cart_goal.p[2] = -0.121 rospy.loginfo("Defaulting to -0.121 z value") else: cart_goal.p[2] = goal.pose.position.z # # Rotation motion # cart_goal.M = PyKDL.Rotation.Quaternion( # goal.pose.orientation.x, # goal.pose.orientation.y, # goal.pose.orientation.z, goal.pose.orientation.w) # r, p, yaw = cart_goal.M.GetRPY() # x, y, z, w = cart_goal.M.GetQuaternion() # rospy.loginfo(x) # rospy.loginfo("roll angle: " + str(r * 180 / 3.1415926)) # rospy.loginfo("pitch: " + str (p * 180 / 3.1415926)) # rospy.loginfo("Yaw angle: " + str(yaw * 180 / 3.1415926)) # feature_points_right = rospy.wait_for_message( # "/featurization/right/feature_points", FeaturePoints, timeout=None) # self.joint if (self.check_cartesian_error(cart_goal) < self.position_threshold): rospy.loginfo( 'Position is within position threshold, no goal will be sent.') else: # Send goal self.arm.move(cart_goal.p) error = self.check_cartesian_error(cart_goal) rospy.loginfo('Inverse kinematic error in position: ' + str(error)) rospy.loginfo('Cartesian goal complete')
def main(screen): screen.nodelay(1) # this sets the increment for our displacement delta = 0.001 total_translation = 0.0 translation = PyKDL.Vector(0.0,0.0,delta) '''define the waypoint positions for the PSMs for this load test''' cart1 = PyKDL.Vector(0.116,-0.090,-0.083) rot1 = PyKDL.Rotation() rot1 = rot1.Quaternion(0.694,0.0113,0.719,-0.005) pos1 = PyKDL.Frame(rot1,cart1) p2 = dvrk.psm('PSM2') c = dvrk.console() # set our rate to 30hz rate = rospy.Rate(30) print 'homing to position...' c.teleop_stop() #p2.open_jaw() #p2.move(pos1) p2.close_jaw() while True: if screen.getch() == ord('w'): p2.dmove(translation) total_translation = total_translation + delta if screen.getch() == ord('d'): print 'displacement is: ' print total_translation if screen.getch() == ord('q'): print 'quitting' break c.teleop_start()
def test_transform(self): b = tf2_ros.Buffer() t = TransformStamped() t.transform.translation.x = 1.0 t.transform.rotation = Quaternion(w=0, x=1, y=0, z=0) t.header.stamp = builtin_interfaces.msg.Time(sec=2) t.header.frame_id = 'a' t.child_frame_id = 'b' b.set_transform(t, 'eitan_rocks') out = b.lookup_transform('a','b', rclpy.time.Time(seconds=2), rclpy.time.Duration(seconds=2)) self.assertEqual(out.transform.translation.x, 1) self.assertEqual(out.transform.rotation.x, 1) self.assertEqual(out.header.frame_id, 'a') self.assertEqual(out.child_frame_id, 'b') v = PyKDL.Vector(1,2,3) out = b.transform(tf2_ros.Stamped(v, builtin_interfaces.msg.Time(sec=2), 'a'), 'b') self.assertEqual(out.x(), 0) self.assertEqual(out.y(), -2) self.assertEqual(out.z(), -3) f = PyKDL.Frame(PyKDL.Rotation.RPY(1,2,3), PyKDL.Vector(1,2,3)) out = b.transform(tf2_ros.Stamped(f, builtin_interfaces.msg.Time(sec=2), 'a'), 'b') self.assertEqual(out.p.x(), 0) self.assertEqual(out.p.y(), -2) self.assertEqual(out.p.z(), -3) # TODO(tfoote) check values of rotation t = PyKDL.Twist(PyKDL.Vector(1,2,3), PyKDL.Vector(4,5,6)) out = b.transform(tf2_ros.Stamped(t, builtin_interfaces.msg.Time(sec=2), 'a'), 'b') self.assertEqual(out.vel.x(), 1) self.assertEqual(out.vel.y(), -8) self.assertEqual(out.vel.z(), 2) self.assertEqual(out.rot.x(), 4) self.assertEqual(out.rot.y(), -5) self.assertEqual(out.rot.z(), -6) w = PyKDL.Wrench(PyKDL.Vector(1,2,3), PyKDL.Vector(4,5,6)) out = b.transform(tf2_ros.Stamped(w, builtin_interfaces.msg.Time(sec=2), 'a'), 'b') self.assertEqual(out.force.x(), 1) self.assertEqual(out.force.y(), -2) self.assertEqual(out.force.z(), -3) self.assertEqual(out.torque.x(), 4) self.assertEqual(out.torque.y(), -8) self.assertEqual(out.torque.z(), -4)
def move_tip(self, x=0., y=0., z=0., roll=0., pitch=0., yaw=0.): """ Moves the tooltip in the world frame by the given x,y,z / roll,pitch,yaw. @return True on success """ transform = PyKDL.Frame(PyKDL.Rotation.RPY(pitch, roll, yaw), PyKDL.Vector(-x, -y, -z)) tip_pose = self.get_tip_pose() tip_pose_kdl = posemath.fromMsg(tip_pose) final_pose = toMsg(tip_pose_kdl * transform) self.arm_commander.set_start_state_to_current_state() self.arm_commander.set_pose_targets([final_pose]) plan = self.arm_commander.plan() if not self.arm_commander.execute(plan): return False return True
def calcNextSquarePoint(): """ Wyznacza nastepny punkt ruchu po kwadracie na podstawie aktualnej pozycji cuRoPo Returns ------- Position() """ vecRo = PyKDL.Vector(cuRoPo.x, cuRoPo.y, 0) twiRo = PyKDL.Rotation.RPY(0, 0, cuRoPo.theta) mxJednoRo = PyKDL.Frame(twiRo, vecRo) vecDestRel = PyKDL.Vector(0, (lefty*2-1)*setRoPo.s, 0) # kwadrat w lewo gdy lewo==True vecDest = mxJednoRo * vecDestRel nextSqPnt = Position(vecDest[0], vecDest[1], 0) print "nextSqPnt.x:", nextSqPnt.x, " nextSqPnt.y:", nextSqPnt.y return nextSqPnt
def query_plan(self): #to disable the interpolator #return(self.dst_pose) for time, pose in self.pose_vector: if time > rospy.Time.now(): #Found the current setpoint #message = "time " + str(time) #rospy.logwarn(message) self.last_dst_pose = kdl.Frame(pose) #save as last pose #print "Found frame:" #print pose return (pose) #If we get here, found no unreached setpoint, so just return the last goal frame #rospy.loginfo("returning dst pose") #return(self.last_dst_pose) return (None) #None marks end of the interpolation
def Pos(self, t): if self.loop is True: _lambda = t / self.duration while _lambda > 2.0: _lambda /= 2.0 if _lambda > 1.0: _lambda = 2.0 - _lambda else: _lambda = np.min((t / self.duration, 1.0)) d = kdl.Frame() d.p = (1-_lambda)*self.initial_pose.p + _lambda*self.desired_pose.p R = self.initial_pose.M.Inverse()*self.desired_pose.M #v = R.GetRot() (alpha,v) = R.GetRotAngle() dR = kdl.Rotation.Rot(v,_lambda*alpha) d.M = self.initial_pose.M * dR return d
def test_inverse_frame(self, x, y, z, q): f = quaternion_matrix(q) f[0, 3] = x f[1, 3] = y f[2, 3] = z r2 = PyKDL.Frame() r2.M = PyKDL.Rotation.Quaternion(q[0], q[1], q[2], q[3]) r2.p[0] = x r2.p[1] = y r2.p[2] = z r2 = r2.Inverse() r2 = pykdl_frame_to_numpy(r2) self.assertTrue( np.isclose(w.compile_and_execute(w.inverse_frame, [f]), r2, atol=1.e-4, rtol=1.e-4).all())
def main(): filename = None if (sys.argv > 1): filename = 'uthai_description/urdf/uthai.urdf' (ok, tree) = kdl_parser_py.urdf.treeFromFile(filename) chain = tree.getChain("base_footprint","r_foot_ft_link") fksolver = kdl.ChainFkSolverPos_recursive(chain) ikvelsolver = kdl.ChainIkSolverVel_pinv(chain) iksolver = kdl.ChainIkSolverPos_NR(chain,fksolver,ikvelsolver) q_init = kdl.JntArray(6) q = kdl.JntArray(6) F_dest = kdl.Frame(kdl.Rotation.RPY(0,0,0),kdl.Vector(0.0,0,0)) status = iksolver.CartToJnt(q_init,F_dest,q) print("Status : ",status) print(q)
def create_frame_from_points(p0, p1): # type: (kdl.Vector, kdl.Vector) -> kdl.Frame """ Creates a frame from two points. The origin of the frame is the first point. The x-direction points into the direction of the next point :param p0: Origin of the frame :type p0: kdl.Vector :param p1: x-direction of the frame will point to this vector :type p1: kdl.Vector :return: Created frame :rtype: kdl.Frame """ unit_x = p1 - p0 # difference unit_x.Normalize() # normalize it so that Norm is 1.0 unit_y = kdl.Rotation.RPY(0.0, 0.0, 0.5 * math.pi) * unit_x unit_z = unit_x * unit_y # cross-product rotation = kdl.Rotation(unit_x, unit_y, unit_z) return kdl.Frame(rotation, p0)
def test_initialize(self): # initialize is run in setUp, but robot.base.get_location has been replaced, so running it again self.robot.base.get_location = lambda: FrameStamped( kdl.Frame(kdl.Rotation().Identity(), kdl.Vector(3.5, 3.5, 0)), "/map") self.tour_guide.initialize() self.assertListEqual(self.tour_guide._passed_room_ids, [self._kitchen.id]) self.assertListEqual(self.tour_guide._passed_furniture_ids, [self._cabinet.id]) self.assertListEqual(sorted(self.tour_guide._furniture_entities), sorted([self._cabinet, self._bookcase])) self.assertListEqual(sorted(self.tour_guide._room_entities), sorted([self._kitchen, self._bedroom])) self.assertDictEqual(self.tour_guide._furniture_entities_room, { self._kitchen: [self._cabinet], self._bedroom: [self._bookcase] })
def getImagePoints(self, options=VirtualObjectOptions(), camera_frame=PyKDL.Frame(), camera=None): if camera is None: return [] obj_points = self.getObjectPoints(camera_frame=camera_frame, options=options) object_to_camera = camera_frame.Inverse() * self cRvec, cTvec = transformations.KDLToCv(object_to_camera) img_points, _ = cv2.projectPoints( obj_points, cRvec, cTvec, np.array(camera.camera_matrix), np.array(camera.distortion_coefficients)) points = [] for i in range(0, len(img_points)): points.append( (int(img_points.item(i, 0, 0)), int(img_points.item(i, 0, 1)))) return points
def createReleaseMarker(self): marker = InteractiveMarker() marker.header.frame_id = self._frameID marker.name = self._releaseMarkerName # marker.scale = self._torusMeshScalar marker.scale = 0.9 wTc = getFrameFromPose(self._LargeWheelMarker.pose) # put orientation of ### need to rotate this in z cV1 = kdl.Vector(0.0, 0.0, 0.0) cR1 = kdl.Rotation.RPY(0, 0, self._turnRotation) frame = wTc * kdl.Frame(cR1, cV1) marker.pose = getPoseFromFrame(frame) marker.controls.append(CreateTransRotControl("TranslateX")) self.addInteractiveMarker(marker, self.releaseCallback) self.server.applyChanges() return marker
def calc_R(xa): ret = [] t = PyKDL.Frame(PyKDL.Rotation.Rot(axis, xa)) for i in range(0, len(l2)): pos2rot_list[i] = t * pos2_list[i] n2rot_list[i] = t * n2_list[i] for i in range(0, len(l1)): score = getScore(pos1_list[i], pos2rot_list[i], n1_list[i], n2rot_list[i]) ret.append(score) # for i in range(0, len(l2)): # score = getMinScore(pos2rot_list[i], pos1_list, n2rot_list[i], n1_list) # ret.append(score) return np.array(ret)
def pose_msg_to_kdl_frame(pose): """ Convert a geometry_msgs.msg.Pose message to a PyKDL.Frame object :param pose: Pose to be converted :type pose: geometry_msgs.msg.Pose :rtype: PyKDL.Frame >>> pose = gm.Pose(gm.Point(1, 2, 3), gm.Quaternion(1, 0, 0, 0)) >>> frame = pose_msg_to_kdl_frame(pose) >>> frame.p [ 1, 2, 3] >>> frame.M.GetQuaternion() (1.0, 0.0, 0.0, 0.0) """ rot = kdl.Rotation.Quaternion(pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w) trans = kdl.Vector(pose.position.x, pose.position.y, pose.position.z) return kdl.Frame(rot, trans)
def get_flange_pose(self, q): """Returns the flange pose for a given joint configuration. Arguments: q -- joint configuration """ joint_arr = kdl.JntArray(self._num_of_joints) for joint_num in range(self._num_of_joints): joint_arr[joint_num] = q[joint_num] frame = kdl.Frame() if self._tool_segment is None: self._fk_solver_pos.JntToCart( joint_arr, frame, self._complete_chain.getNrOfSegments()) else: self._fk_solver_pos.JntToCart( joint_arr, frame, self._complete_chain.getNrOfSegments() - 1) return kdl_frame_to_m3d_transform(frame)
def kdl_inverse_kinematics(self, pos, quat, seed=None): pos = PyKDL.Vector(pos[0], pos[1], pos[2]) rot = PyKDL.Rotation() rot = rot.Quaternion(quat[0], quat[1], quat[2], quat[3]) goal_pose = PyKDL.Frame(rot, pos) seed_array = joint_list_to_kdl(self._default_seed) if seed != None: seed_array = joint_list_to_kdl(seed) result_angles = PyKDL.JntArray(self._num_jnts) if self._ik_p_kdl.CartToJnt(seed_array, goal_pose, result_angles) >= 0: result = np.array(list(result_angles)) # return result return True else: # return None return False
def get_obstacle_pose( cap_front, cap_rear, obs_front, obs_rear, obs_gps_to_centroid, velodyne_to_front, cap_yaw_error_rad=0, cap_pitch_error_rad=0): # calculate capture yaw in ENU frame and setup correction rotation cap_front_v = dict_to_vect(cap_front) cap_rear_v = dict_to_vect(cap_rear) cap_yaw = get_yaw(cap_front_v, cap_rear_v) cap_yaw += cap_yaw_error_rad rot_cap = kd.Rotation.EulerZYX(-cap_yaw, -cap_pitch_error_rad, 0) obs_rear_v = dict_to_vect(obs_rear) if obs_front: obs_front_v = dict_to_vect(obs_front) obs_yaw = get_yaw(obs_front_v, obs_rear_v) # use the front gps as the obstacle reference point if it exists as it's closers # to the centroid and mounting metadata seems more reliable cap_to_obs = obs_front_v - cap_front_v else: cap_to_obs = obs_rear_v - cap_front_v # transform capture car to obstacle vector into capture car velodyne lidar frame res = rot_cap * cap_to_obs res += list_to_vect(velodyne_to_front) # obs_gps_to_centroid is offset for front gps if it exists, otherwise rear obs_gps_to_centroid_v = list_to_vect(obs_gps_to_centroid) if obs_front: # if we have both front + rear RTK calculate an obstacle yaw and use it for centroid offset obs_rot_z = kd.Rotation.RotZ(obs_yaw - cap_yaw) centroid_offset = obs_rot_z * obs_gps_to_centroid_v else: # if no obstacle yaw calculation possible, treat rear RTK as centroid and offset in Z only obs_rot_z = kd.Rotation() centroid_offset = kd.Vector(0, 0, obs_gps_to_centroid_v[2]) res += centroid_offset return frame_to_dict(kd.Frame(obs_rot_z, res))
def read_data(bag, arm): ''' Returns tuple (time, theta, theta_ref, angular_vel, angular_vel_ref, angular_acc, angular_acc_ref, s, gripper_pos_command, gripper_vel_command, gripper_pos) ''' global alpha time = np.array([]) frame = PyKDL.Frame(PyKDL.Rotation.RPY(0.0, 0.0, 1.22), PyKDL.Vector(0.0, 0.0, 0.0)) force_x = np.array([]) force_x_t = np.array([]) force_y = np.array([]) force_y_t = np.array([]) force_z = np.array([]) torque_x = np.array([]) torque_x_t = np.array([]) torque_y = np.array([]) torque_y_t = np.array([]) torque_z = np.array([]) for topic, msg, t in bag.read_messages( topics=['/ft/' + arm + '_gripper_motor']): time = np.append(time, msg.header.stamp.to_sec()) force_x = np.append(force_x, msg.wrench.force.x) force_y = np.append(force_y, msg.wrench.force.y) #aligned_Fx = msg.wrench.force.x*math.cos(alpha) - msg.wrench.force.y*math.sin(alpha) #aligned_Fy = msg.wrench.force.x*math.sin(alpha) + msg.wrench.force.y*math.cos(alpha) #force_x_t = np.append(force_x_t, aligned_Fx) #force_y_t = np.append(force_y_t, aligned_Fy) #force_z = np.append(force_z, msg.wrench.force.z) ## torque_x = np.append(torque_x, msg.wrench.torque.x) ## torque_y = np.append(torque_y, msg.wrench.torque.y) ## aligned_Tx = msg.wrench.torque.y*math.sin(alpha) + msg.wrench.torque.x*math.cos(alpha) ## aligned_Ty = msg.wrench.torque.y*math.cos(alpha) - msg.wrench.torque.x*math.sin(alpha) ## torque_x_t = np.append(torque_x_t, aligned_Tx) ## torque_y_t = np.append(torque_y_t, aligned_Ty) #torque_z = np.append(torque_z, msg.wrench.torque.z) #return time, force_x, force_y, force_z, torque_x, torque_y, torque_z return time, force_x, force_y, #force_x_t, force_y_t, torque_x, torque_y, torque_x_t, torque_y_t
def get_completion(partial_np_pc, transform): goal = graspit_shape_completion.msg.CompleteMeshGoal() assert partial_np_pc.shape[0] > 3 assert partial_np_pc.shape[1] == 3 for i in range(partial_np_pc.shape[0]): point = partial_np_pc[i, :] goal.partial_mesh.vertices.append(geometry_msgs.msg.Point(*point)) client = actionlib.SimpleActionClient('complete_mesh', graspit_shape_completion.msg.CompleteMeshAction) client.wait_for_server() client.send_goal(goal) client.wait_for_result() result = client.get_result() data = np.ones((len(result.completed_mesh.vertices), 4)) for i, point in enumerate(result.completed_mesh.vertices): x = point.x y = point.y z = point.z data[i] = np.array((x, y, z, 1)) data_of = data[:] trans_frame = PyKDL.Frame(PyKDL.Rotation.RPY(0, 0, 0), PyKDL.Vector(0, 0, -1)) trans_matrix = tf_conversions.posemath.toMatrix(trans_frame) data = np.dot(trans_matrix, data.T).T data_of = np.dot(transform, data_of.T).T data = data[:, 0:3] data_of = data_of[:, 0:3] completed_pcd_cf = pcl.PointCloud(np.array(data, np.float32)) completed_pcd_of = pcl.PointCloud(np.array(data_of, np.float32)) vg = np.array(result.voxel_grid).reshape((PATCH_SIZE, PATCH_SIZE, PATCH_SIZE)) vox = binvox_rw.Voxels(vg, (PATCH_SIZE, PATCH_SIZE, PATCH_SIZE), (result.voxel_offset_x, result.voxel_offset_y, result.voxel_offset_z -1 ), result.voxel_resolution * PATCH_SIZE, "xyz") return completed_pcd_cf, completed_pcd_of, vox