def to_pose2d(pos): # pos = N?x3x4 transformation matrix # 2d rotation requires (-y) component # 2d translation requires (z, -x) component # == test == # -- opt 1 : einsum -- T_pre = tx.euler_matrix(-np.pi/2,0,-np.pi/2) #4x4 T_0i = tx.inverse_matrix(np.concatenate((pos[0], [[0,0,0,1]]), axis=0)) #4x4 T_cam = np.stack([np.concatenate((p, [[0,0,0,1]]), axis=0) for p in pos], axis=0) pos_n = np.einsum('ij,jk,nkl,lm->nim', T_pre, T_0i, T_cam, T_pre.T) # normalized position res = [] for p in pos_n: t = tx.translation_from_matrix(p) q = tx.euler_from_matrix(p) res.append([ t[0], t[1], q[-1] ]) res = np.stack(res, axis=0) return res # -------------------- # -- opt 2 : no einsum -- #T_pre = tx.euler_matrix(-np.pi/2,0,-np.pi/2) #4x4 #T_0i = tx.inverse_matrix(np.concatenate((pos[0], [[0,0,0,1]]), axis=0)) #4x4 #T_cam = np.stack([np.concatenate((p, [[0,0,0,1]]), axis=0) for p in pos], axis=0) #res = [] #for p in pos: # T_cam = np.concatenate((p, [[0,0,0,1]]), axis=0) # T_base = T_pre.dot(T_0i).dot(T_cam).dot(T_pre.T) # t = tx.translation_from_matrix(T_base) # q = tx.euler_from_matrix(T_base) # res.append([t[0], t[1], q[-1]]) #res = np.stack(res, axis=0) return res
def get_observation_COL(self, robot_state, obstacle_position): ''' give coordinations in continuous space (state), return state_observation. In: robot_state, ModelState(); obstacle_position, tuple(3) Returns: state_observation, int ''' # in the world frame x_relative = obstacle_position[0] - robot_state.pose.position.x y_relative = obstacle_position[1] - robot_state.pose.position.y # coordinates transfer # 在base系中的坐标 = world系在base系中的旋转矩阵(base系在world系中的旋转矩阵求逆) × 在world系中的坐标 # 坐标变换一定要万分小心仔细 q = robot_state.pose.orientation r_matrix = trans.quaternion_matrix([q.x, q.y, q.z, q.w])[: 3, : 3] r_matrix_inverse = trans.inverse_matrix(r_matrix) relative_coor_base = np.dot( r_matrix_inverse, [x_relative, y_relative, 0]) # print 'relative_coor_base =', relative_coor_base # get RL state state = self.get_polar_coor_COL( relative_coor_base[0], relative_coor_base[1]) return state[0] * self.maze_dim_COL['num_fan'] + state[1]
def get_odom_in_world_frame(): lock.acquire() global wTo, has_detected, cb_client cb = cb_client(corners_x, corners_y, spacing_x, spacing_y) if cb.board_pose.header.frame_id == '': lock.release() return (False, transform_matrix((0, 0, 0), (0, 0, 0, 1))) pos = cb.board_pose.pose.position p = (pos.x, pos.y, pos.z) rot = cb.board_pose.pose.orientation r = (rot.x, rot.y, rot.z, rot.w) cTo = transform_matrix(p, r) #(c)amera (o)bject transform (p, r) = call_tf('odom_combined', cb.board_pose.header.frame_id) bTc = transform_matrix( p, r) #(b)ase (c)amera transform (base is actually odom) bTo = numpy.dot(bTc, cTo) #(b)ase (o)bject transform if not has_detected: wTo = bTo has_detected = True oTb = transformations.inverse_matrix(bTo) wTb = numpy.dot(wTo, oTb) lock.release() return (True, wTb)
def get_matching_ifco_wall(ifco_in_base_transform, ec_frame): # transforms points in base frame to ifco frame base_in_ifco_transform = tra.inverse_matrix(ifco_in_base_transform) # ec x axis in ifco frame ec_x_axis = base_in_ifco_transform.dot(ec_frame)[0:3, 0] ec_z_axis = base_in_ifco_transform.dot(ec_frame)[0:3, 2] # we can't check for zero because of small errors in the frame (due to vision or numerical uncertainty) space_thresh = 0.1 # one could also check for dot-product = 0 instead of using the x-axis but this is prone to the same errors if ec_z_axis.dot(np.array([1, 0, 0])) > space_thresh and ec_x_axis.dot( np.array([0, 1, 0])) > space_thresh: # print("GET MATCHING=SOUTH", tf_dbg_call_to_string(ec_frame, frame_name='ifco_south')) return 'south' elif ec_z_axis.dot(np.array([1, 0, 0])) < -space_thresh and ec_x_axis.dot( np.array([0, 1, 0])) < -space_thresh: # print("GET MATCHING=NORTH", tf_dbg_call_to_string(ec_frame, frame_name='ifco_north')) return 'north' elif ec_z_axis.dot(np.array([0, 1, 0])) < -space_thresh and ec_x_axis.dot( np.array([1, 0, 0])) > space_thresh: # print("GET MATCHING=WEST", tf_dbg_call_to_string(ec_frame, frame_name='ifco_west')) return 'west' elif ec_z_axis.dot(np.array([0, 1, 0])) > space_thresh and ec_x_axis.dot( np.array([1, 0, 0])) < -space_thresh: # print("GET MATCHING=EAST", tf_dbg_call_to_string(ec_frame, frame_name='ifco_east')) return 'east' else: # This should never be reached. Just here to prevent bugs raise ValueError( "ERROR: Could not identify matching ifco wall. Check frames!")
def forward(distance, speed): twist = Twist() cmd_vel = rospy.Publisher("cmd_vel_mux/input/teleop", Twist, queue_size=10) listener = TransformListener() listener.waitForTransform("/base_link", "/odom", rospy.Time(0), rospy.Duration(1)) (start_t, start_r) = listener.lookupTransform("/base_link", "/odom", rospy.Time()) start_transform = t.concatenate_matrices(t.translation_matrix(start_t), t.quaternion_matrix(start_r)) twist.linear.x = abs(speed) rate = rospy.Rate(10) done = False # for i in range(int((10*distance)/speed)): # cmd_vel.publish(twist) # rate.sleep() while not done: cmd_vel.publish(twist) rate.sleep() (curr_t, curr_r) = listener.lookupTransform("/base_link", "/odom", rospy.Time(0)) current_transform = t.concatenate_matrices( t.translation_matrix(curr_t), t.quaternion_matrix(curr_r)) relative = numpy.dot(t.inverse_matrix(start_transform), current_transform) (x, y, z) = t.translation_from_matrix(relative) print("distance=%s, moved=%s,stop=%s" % (str(distance), str(x), str(abs(x) > abs(distance)))) if abs(x) > abs(distance): done = True break return done, "Made it"
def inv_kin( self, q_current, T_desired, ): T_current, T_joint = self.fwd_kin(q_current) num_joints = len(self.a) - 1 c_T_d = tf_tran.concatenate_matrices( tf_tran.inverse_matrix(T_current), T_desired ) # transformation of desired frame with respect to current frame c_t_d = c_T_d[0:3, 3] # extracting the translation part # c_R_d = c_T_d[0:3, 0:3] # extracting the rotation part ROT = np.array(tf_tran.euler_from_matrix(c_T_d)) delta_x = c_t_d P = 1.0 dx = P * delta_x # ( velocity change wrt ee_current frame) v_ee = np.zeros([6, 1]) v_ee[0:3] = dx.reshape([3, 1]) v_ee[3:6] = ROT.reshape([3, 1]) # Jac = self.jacobian(T_joint, T_current) Jac = self.geometric_jacobian(T_joint, T_current) qq = np.ones([num_joints, 1]) * 1 J_pinv = np.linalg.pinv(Jac) qn_dot = np.dot((np.identity(num_joints) - np.dot(J_pinv, Jac)), qq) # null space jacobian final_theta = np.dot( J_pinv, v_ee) # + qn_dot # final_theta are the joint velocities final_theta = np.insert(final_theta, num_joints, 0) # Adding 0 at the end for the fixed joint return final_theta
def zero_start(poses): if len(poses)==0: return poses; ref = pose_to_numpy( poses[0] ); zero_poses = [ numpy_to_pose( numpy.dot( transformations.inverse_matrix(ref), pose_to_numpy(p) ) ) for p in poses] return zero_poses
def pose_callback(self, msg): print "pose cb" trans = [msg.pose.position.x, msg.pose.position.y, msg.pose.position.z] rot = [ msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w ] pose_matrix = tr.compose_matrix(angles=tr.euler_from_quaternion(rot), translate=trans) # Transfer to the frame of the robot # pose_matrix = np.dot(self.config, pose_matrix) if self.previous_tf is None: #if first callback odo_trans = trans odo_rot = rot odo = tr.compose_matrix(angles=tr.euler_from_quaternion(odo_rot), translate=odo_trans) self.origin = trans else: # calculate limit matrix if enable_filter: limit_dist = limit_speed * (msg.header.stamp.to_nsec() - self.previous_time.to_nsec()) print(msg.header.stamp.to_nsec() - self.previous_time.to_nsec()) scale, shear, angles, prev_trans, persp = tr.decompose_matrix( self.previous_tf) moved_vec = [ msg.pose.position.x - prev_trans[0], msg.pose.position.y - prev_trans[1], msg.pose.position.z - prev_trans[2] ] moved_dist = np.linalg.norm(moved_vec) if moved_dist > limit_dist: #discard this pose print "move too much" return odo = np.dot(tr.inverse_matrix(self.previous_tf), pose_matrix) odo_trans = tf.transformations.translation_from_matrix(odo) odo_rot = tf.transformations.quaternion_from_matrix(odo) self.previous_time = msg.header.stamp self.previous_tf = pose_matrix print "x: ", trans[0] - self.origin[0], "y: ", trans[1] - self.origin[ 1], "z: ", trans[2] - self.origin[2] robot_odo = PoseStamped() robot_odo.header.stamp = msg.header.stamp robot_odo.pose.position.x = odo_trans[0] robot_odo.pose.position.y = odo_trans[1] robot_odo.pose.position.z = odo_trans[2] robot_odo.pose.orientation.x = odo_rot[0] robot_odo.pose.orientation.y = odo_rot[1] robot_odo.pose.orientation.z = odo_rot[2] robot_odo.pose.orientation.w = odo_rot[3] self.pub_odom.publish(robot_odo)
def get_Tmatrix(self,disableinvert=False): Tmatrix = dot(tft.translation_matrix([self.x_offset,self.y_offset,self.z_offset]),array(tb_to_tf(self.yaw, self.pitch, self.roll, rad=True))) if not disableinvert and self.invert: Tmatrix = tft.inverse_matrix(Tmatrix) return Tmatrix
def laplace_cost_and_grad_pose(self, theta, mu_theta, inv_sigma_theta, mu_x, inv_sigma_x, mu_ang_euler_des, inv_sig_euler): f_th, T_joint = self.fwd_kin(theta) jac_th = self.geometric_jacobian(T_joint, f_th) euler_angles = tf_tran.euler_from_matrix(f_th, 'szyz') s_phi, s_nu, c_phi, c_nu = np.sin(euler_angles[0]), np.sin( euler_angles[1]), np.cos(euler_angles[0]), np.cos(euler_angles[1]) T = np.array([[0, -s_phi, c_phi * s_nu], [0, c_phi, s_phi * s_nu], [1, 0, c_nu]]) jac_analytic = np.dot(tf_tran.inverse_matrix(T), jac_th[3:6, :]) pos_th = f_th[0:3, 3] jac_pos_th = jac_th[0:3, :] diff1 = theta - mu_theta tmp1 = np.dot(inv_sigma_theta, diff1) diff2 = pos_th - mu_x tmp2 = np.dot(inv_sigma_x, diff2) ori_th = tf_tran.euler_from_matrix(f_th, 'szyz') jac_ori_th = jac_analytic # [3:6, :] diff3 = np.array(ori_th) - np.array(mu_ang_euler_des) tmp3 = np.dot(inv_sig_euler, diff3) nll = 0.5 * (np.dot(diff1, tmp1) + np.dot(diff2, tmp2)) + 0.5 * np.dot( diff3, tmp3) # print 'nll', nll # print '###########################' grad_nll = tmp1 + np.dot(jac_pos_th.T, tmp2) + np.dot( jac_ori_th.T, tmp3) return nll, grad_nll
def extrapolate_transform(a, b, alpha=1.5): T_a = transform_from_vector(a) T_b = transform_from_vector(b) T_delta = np.dot(tra.inverse_matrix(T_a), T_b) T_delta[:3, 3] = T_delta[:3, 3] * alpha return T_delta
def detectObjects(self, req): rospy.loginfo("Got object detection request.") if req.bin_index >= 0 and req.bin_index < 12: bin_dimensions = self.bin_dimensions[req.bin_index] elif req.bin_index == -1: if 'tote_dimensions' in dir(self): bin_dimensions = self.tote_dimensions rospy.loginfo("Using tote dimensions.") else: rospy.logerr("Tote dimensions required, but not loaded.") return None else: rospy.logerr("Invalid bin index given: {}.".format(req.bin_index)) return None # convert bin_pose to numpy transformation matrix rotation = transformations.quaternion_matrix([ req.bin_pose.orientation.x, req.bin_pose.orientation.y, req.bin_pose.orientation.z, req.bin_pose.orientation.w ]) translation = transformations.translation_matrix([ req.bin_pose.position.x, req.bin_pose.position.y, req.bin_pose.position.z ]) camera_to_bin = transformations.inverse_matrix( translation.dot(rotation)) # detect and filter objects based on bin contents and bin pose image = self.bridge.imgmsg_to_cv2(req.color) scores, boxes = self.classify(image) objects, pruned_objects = self.filterDetections( list(req.objects), scores, boxes, req.point_cloud, camera_to_bin, bin_dimensions) # publish markers for debugging self.publishMarkers(objects, pruned_objects, req.bin_pose, camera_to_bin, bin_dimensions, req.point_cloud.header) # visualize detections if necessary if (self.visualize): for object in objects: box = object.box class_name = apc16delft_msgs.objects.objectTypeToString[ object.object.type] cv2.rectangle(image, (box.x1, box.y1), (box.x2, box.y2), (0, 255, 0), 4) cv2.putText(image, class_name + " " + str(object.score), (int(box.x1 + 10), int(box.y1 + 30)), cv2.FONT_HERSHEY_PLAIN, 2.0, (0, 0, 255), 2) self.detection_result_publisher.publish( self.bridge.cv2_to_imgmsg(image, encoding="bgr8")) # set output for service call response = DetectObjectsResponse() response.objects = objects return response
def clickedCB(data): global askedMarkers global listener global worldTransform global measuredMarkers global clickNum print 'clicked' try: (tipTrans, tipRot) = listener.lookupTransform('/mocha_world', '/tip', rospy.Time(0)) measuredMarkers[clickNum, :] = tipTrans clickNum = clickNum + 1 if clickNum == 8: clickNum = 0 print 'finished clicking' print 'AskedMarkers:' print askedMarkers print 'measured markers:' print measuredMarkers (worldRotupdate, worldTransUpdate) = rigid_transform_3D(measuredMarkers, askedMarkers) worldRot4 = np.identity(4) for i in range(3): worldRot4[(0, 1, 2), i] = worldRotupdate[:, i] worldTransformUpdate = TR.compose_matrix( translate=worldTransUpdate, angles=TR.euler_from_matrix(worldRot4)) worldTransformationUpdateInv = TR.inverse_matrix( worldTransformUpdate) worldTransform = np.dot(worldTransformationUpdateInv, worldTransform) except (): nothing = 0
def __init__(self): self.rate = rospy.Rate(120) # 30hz self.tf_listener = tf.TransformListener() self.initTransform = self.getBaseTransform() self.initTransformInverse = transf.inverse_matrix(self.initTransform) self.basePub = rospy.Publisher('/base_controller/command', Twist) rospy.sleep(1)
def inverse_transform(transform): """ Get the inverse of a tf transform Get frame 2 -> frame 1 from frame 1 -> frame 2 :param transform: Transform from frame 1 -> frame 2 :type transform: geometry_msgs/Transform :return: Transform from frame 2 -> frame 1 :rtype: geometry_msgs/Transform """ tmat = translation_matrix( (transform.translation.x, transform.translation.y, transform.translation.z)) qmat = quaternion_matrix((transform.rotation.x, transform.rotation.y, transform.rotation.z, transform.rotation.w)) tf_mat = np.dot(tmat, qmat) inv_tf_mat = inverse_matrix(tf_mat) inv_transform = Transform() inv_transform.translation.x = translation_from_matrix(inv_tf_mat)[0] inv_transform.translation.y = translation_from_matrix(inv_tf_mat)[1] inv_transform.translation.z = translation_from_matrix(inv_tf_mat)[2] inv_transform.rotation = Quaternion(*quaternion_from_matrix(inv_tf_mat)) return inv_transform
def run(self): res_1 = least_squares(self.obj_func, self.x0) print '--original--' trans = self.x0[0:3] rod = self.x0[3:6] q = rod_to_quad(rod) print 'pose', list(trans) + list(q) print '\n--optimized--' trans = res_1.x[0:3] rod = res_1.x[3:6] q = rod_to_quad(rod) print 'pose', list(trans) + list(q) transform = tfm.concatenate_matrices(tfm.translation_matrix(trans), tfm.quaternion_matrix(q)) inversed_transform = tfm.inverse_matrix(transform) translation = tfm.translation_from_matrix(inversed_transform) quaternion = tfm.quaternion_from_matrix(inversed_transform) pose = translation.tolist() + quaternion.tolist() #print 'webcam_T_robot:', " ".join('%.8e' % x for x in pose), '\n' print 'matrix:\n', np.linalg.inv(ik.helper.matrix_from_xyzquat(pose)) #print 'K: ', [res_1.x[0], 0.0, res_1.x[2], 0.0, res_1.x[1], res_1.x[3], 0.0, 0.0, 1.0] #print 'P: ', [res_1.x[0], 0.0, res_1.x[2], 0.0, 0.0, res_1.x[1], res_1.x[3], 0.0, 0.0, 0.0, 1.0, 0.0] return res_1.x
def main(): parser = argparse.ArgumentParser() parser.add_argument('-g', '--gt-path', help='path to ground-truth files', type=str, default='data/') group = parser.add_mutually_exclusive_group() group.add_argument('-r', '--remove-repeated-pose', help='ignore repeated poses', dest='remove_repeat', action='store_true') group.add_argument('-k', '--keep-repeated-pose', help='keep repeated poses', dest='remove_repeat', action='store_false') parser.add_argument('-t', '--ate-threshold', type=float, help='ATE threshold of correctness (meter)', default=float('inf')) parser.add_argument('-o', '--aoe-threshold', type=float, help='AOE threshold of correctness (degree)', default=float('inf')) parser.add_argument('-m', '--max-pose-interval', help='consider lost after no pose for such time (sec)', type=float, default=1.) parser.add_argument('-f', '--reloc-score-factor', help='a factor to score time for re-localization (sec)', type=float, default=60.) parser.add_argument('-s', '--scale', help='find optimal scale', dest='scale', action='store_true') group = parser.add_mutually_exclusive_group() group.add_argument('-p', '--plot', help='plot trajectories', dest='plot', action='store_true') group.add_argument('-np', '--no-plot', help='not plot trajectories', dest='plot', action='store_false') parser.set_defaults(remove_repeat=True) parser.set_defaults(scale=False) parser.set_defaults(plot=True) args, left = parser.parse_known_args() if len(left) < 1: parser.print_help(sys.stderr) for filename in left: info, sequences = parse_input(filename, args.remove_repeat) if 'scene' not in info: exit('Please add "scene: xxxxx" into the file') print('Got %d trajectories of %s:' % (len(sequences), info['scene'])) gt_file = args.gt_path + '/gt_%s_1.txt' % info['scene'] gt_info, gts = parse_input(gt_file) print('"%s": {\\' % info['scene']) for seq in range(1, max(gts.keys()) + 1): traj = gts[seq]['traj'] p0 = tf_values_to_matrix(traj[min(traj.keys())]) if seq == 1: invp1 = transformations.inverse_matrix(p0) rpose = tf_matrix_to_values(np.dot(invp1, p0)) print(' %d: (%s),' % (seq, ','.join(['%f' % v for v in rpose])))
def invertTF(self, trans, rot): transform = t.concatenate_matrices(t.translation_matrix(trans), t.quaternion_matrix(rot)) inversed_transform = t.inverse_matrix(transform) translation = t.translation_from_matrix(inversed_transform) quaternion = t.quaternion_from_matrix(inversed_transform) return (translation, quaternion)
def publish_state(self, t): state_msg = TransformStamped() t_ned_imu = tft.translation_matrix(self.model.get_position()) R_ned_imu = tft.quaternion_matrix(self.model.get_orientation()) T_ned_imu = tft.concatenate_matrices(t_ned_imu, R_ned_imu) #rospy.loginfo("T_ned_imu = \n" + str(T_ned_imu)) if self.T_imu_vicon is None: # grab the static transform from imu to vicon frame from param server: frames = rospy.get_param("frames", None) ypr = frames['vicon_to_imu']['rotation'] q_vicon_imu = tft.quaternion_from_euler(*[radians(x) for x in ypr], axes='rzyx') # xyzw R_vicon_imu = tft.quaternion_matrix(q_vicon_imu) t_vicon_imu = tft.translation_matrix(frames['vicon_to_imu']['translation']) # rospy.loginfo(str(R_vicon_imu)) # rospy.loginfo(str(t_vicon_imu)) self.T_vicon_imu = tft.concatenate_matrices(t_vicon_imu, R_vicon_imu) self.T_imu_vicon = tft.inverse_matrix(self.T_vicon_imu) self.T_enu_ned = tft.euler_matrix(radians(90), 0, radians(180), 'rzyx') rospy.loginfo(str(self.T_enu_ned)) rospy.loginfo(str(self.T_imu_vicon)) #rospy.loginfo(str(T_vicon_imu)) # we have /ned -> /imu, need to output /enu -> /vicon: T_enu_vicon = tft.concatenate_matrices(self.T_enu_ned, T_ned_imu, self.T_imu_vicon ) state_msg.header.stamp = t state_msg.header.frame_id = "/enu" state_msg.child_frame_id = "/simflyer1/flyer_vicon" stt = state_msg.transform.translation stt.x, stt.y, stt.z = T_enu_vicon[:3,3] stro = state_msg.transform.rotation stro.x, stro.y, stro.z, stro.w = tft.quaternion_from_matrix(T_enu_vicon) self.pub_state.publish(state_msg)
def invert_transform(msg): """ The purpose of this is to invert a TransformStamped message. :type msg: TransformStamped :param msg: :return: TransformStamped that has been inverted """ trans_mat = translation_matrix([ msg.transform.translation.x, msg.transform.translation.y, msg.transform.translation.z ]) quat_mat = quaternion_matrix([ msg.transform.rotation.x, msg.transform.rotation.y, msg.transform.rotation.z, msg.transform.rotation.w ]) homogenous_transformation = concatenate_matrices(trans_mat, quat_mat) inverse_transformation = inverse_matrix(homogenous_transformation) translation = translation_from_matrix(inverse_transformation) quaternion = quaternion_from_matrix(inverse_transformation) t = TransformStamped() t.header = msg.header t.child_frame_id = msg.child_frame_id t.transform.translation.x = translation[0] t.transform.translation.y = translation[1] t.transform.translation.z = translation[2] t.transform.rotation.x = quaternion[0] t.transform.rotation.y = quaternion[1] t.transform.rotation.z = quaternion[2] t.transform.rotation.w = quaternion[3] return t
def get_matching_ifco_corner(ifco_in_base_transform, ec_frame): # transforms points in base frame to ifco frame base_in_ifco_transform = tra.inverse_matrix(ifco_in_base_transform) # ec (corner) z-axis in ifco frame ec_z_axis = base_in_ifco_transform.dot(ec_frame)[0:3, 2] # we can't check for zero because of small errors in the frame (due to vision or numerical uncertainty) space_thresh = 0.0 # 0.1 if ec_z_axis.dot(np.array([1, 0, 0])) > space_thresh and ec_z_axis.dot( np.array([0, 1, 0])) > space_thresh: print("GET MATCHING=SOUTH_EAST", pu.tf_dbg_call_to_string(ec_frame, frame_name='ifco_southeast')) return 'south', 'east' elif ec_z_axis.dot(np.array([1, 0, 0])) > space_thresh and ec_z_axis.dot( np.array([0, 1, 0])) < -space_thresh: print("GET MATCHING=SOUTH_WEST", pu.tf_dbg_call_to_string(ec_frame, frame_name='ifco_southwest')) return 'south', 'west' elif ec_z_axis.dot(np.array([1, 0, 0])) < -space_thresh and ec_z_axis.dot( np.array([0, 1, 0])) < -space_thresh: print("GET MATCHING=NORTH_WEST", pu.tf_dbg_call_to_string(ec_frame, frame_name='ifco_northwest')) return 'north', 'west' elif ec_z_axis.dot(np.array([1, 0, 0])) < -space_thresh and ec_z_axis.dot( np.array([0, 1, 0])) > space_thresh: print("GET MATCHING=NORTH_EAST", pu.tf_dbg_call_to_string(ec_frame, frame_name='ifco_northeast')) return 'north', 'east' else: # This should never be reached. Just here to prevent bugs raise ValueError( "ERROR: Could not identify matching ifco wall. Check frames!")
def turnRadians(angle_radians, angular_vel, clockwise): twist = Twist() cmd_vel = rospy.Publisher("cmd_vel_mux/input/teleop", Twist, queue_size=10) listener = TransformListener() listener.waitForTransform("/base_link", "/odom", rospy.Time(0), rospy.Duration(1)) (start_t, start_r) = listener.lookupTransform("/base_link", "/odom", rospy.Time(0)) start_transform = t.concatenate_matrices(t.translation_matrix(start_t), t.quaternion_matrix(start_r)) if clockwise: twist.angular.z = -abs(angular_vel) else: twist.angular.z = abs(angular_vel) rate = rospy.Rate(10) done = False while not done: cmd_vel.publish(twist) rate.sleep() (curr_t, curr_r) = listener.lookupTransform("/base_link", "/odom", rospy.Time(0)) current_transform = t.concatenate_matrices( t.translation_matrix(curr_t), t.quaternion_matrix(curr_r)) relative = numpy.dot(t.inverse_matrix(start_transform), current_transform) rot_moved, dire, point = t.rotation_from_matrix(relative) print("angle=%s, moved=%s,stop=%s" % (str(angle_radians), str(rot_moved), str(rot_moved / 2 > angle_radians))) if abs(rot_moved) > abs(angle_radians): done = True break return done, "Done!"
def tf_from_pose(self, pose_msg): # Update message timestamp self.tf_msg.header.stamp = rospy.Time.now() # Update translation self.tf_msg.transform.translation.x = pose_msg.position.x self.tf_msg.transform.translation.y = pose_msg.position.y self.tf_msg.transform.translation.z = pose_msg.position.z # Update rotation self.tf_msg.transform.rotation.x = pose_msg.orientation.x self.tf_msg.transform.rotation.y = pose_msg.orientation.y self.tf_msg.transform.rotation.z = pose_msg.orientation.z self.tf_msg.transform.rotation.w = pose_msg.orientation.w if sum ([self.tf_msg.transform.rotation.x, self.tf_msg.transform.rotation.y, self.tf_msg.transform.rotation.z, self.tf_msg.transform.rotation.w]) != 0.0: if self.invert == "True": # Create transformerROS and add our transform transformer = TransformerROS() transformer.setTransform(self.tf_msg) # Lookup the transform (trans, rot) = transformer.lookupTransform(self.tf_msg.header.frame_id, self.tf_msg.child_frame_id, rospy.Time(0)) # Create transform object transform = tft.concatenate_matrices(tft.translation_matrix(trans), tft.quaternion_matrix(rot)) # Invert inverse_tf = tft.inverse_matrix(transform) # Get translation, rotation vectors back out translation = tft.translation_from_matrix(inverse_tf) quaternion = tft.quaternion_from_matrix(inverse_tf) # Get RPY rpy = tft.euler_from_quaternion(quaternion) rpy_new = [rpy[0], rpy[1], -rpy[2]] # Back to quaternion quaternion = tft.quaternion_from_euler(rpy_new[0], rpy_new[1], rpy_new[2]) # Update translation self.tf_msg.transform.translation.x = translation[0] self.tf_msg.transform.translation.y = -translation[1] self.tf_msg.transform.translation.z = translation[2] # Update rotation self.tf_msg.transform.rotation.x = quaternion[0] self.tf_msg.transform.rotation.y = quaternion[1] self.tf_msg.transform.rotation.z = quaternion[2] self.tf_msg.transform.rotation.w = quaternion[3] # Publish transform self.broadcaster.sendTransform(self.tf_msg)
def get_Tpose(self): if self.options.listen: if not self.input_pose: return None, None, None stamp = self.input_pose.header.stamp if not self.options.frame: frame_id = self.input_pose.header.frame_id else: frame_id = self.options.frame pose = self.input_pose.pose p = tft.translation_matrix( [pose.position.x, pose.position.y, pose.position.z]) rot = tft.quaternion_matrix([ pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w ]) Tinput_pose = dot(p, rot) if self.options.invert_input_pose: Tinput_pose = tft.inverse_matrix(Tinput_pose) frame_id = self.options.frame Tpose = dot(Tinput_pose, self.get_Tmatrix()) else: frame_id = self.options.frame stamp = rospy.Time.now() Tpose = self.get_Tmatrix() if self.options.invert_output: Tpose = tft.inverse_matrix(Tpose) if self.options.tf: if self.options.invert_tf: from_frame = self.options.tf to_frame = frame_id else: from_frame = frame_id to_frame = self.options.tf else: from_frame = None to_frame = None frames = (frame_id, from_frame, to_frame) if self.options.invert_tf: Ttf = tft.inverse_matrix(Tpose) else: Ttf = Tpose return Tpose, Ttf, frames, stamp
def get_Tpose(self): if self.options.listen: if not self.input_pose: return None, None, None stamp = self.input_pose.header.stamp if not self.options.frame: frame_id = self.input_pose.header.frame_id else: frame_id = self.options.frame pose = self.input_pose.pose p = tft.translation_matrix([pose.position.x, pose.position.y, pose.position.z]) rot = tft.quaternion_matrix( [pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w] ) Tinput_pose = dot(p, rot) if self.options.invert_input_pose: Tinput_pose = tft.inverse_matrix(Tinput_pose) frame_id = self.options.frame Tpose = dot(Tinput_pose, self.get_Tmatrix()) else: frame_id = self.options.frame stamp = rospy.Time.now() Tpose = self.get_Tmatrix() if self.options.invert_output: Tpose = tft.inverse_matrix(Tpose) if self.options.tf: if self.options.invert_tf: from_frame = self.options.tf to_frame = frame_id else: from_frame = frame_id to_frame = self.options.tf else: from_frame = None to_frame = None frames = (frame_id, from_frame, to_frame) if self.options.invert_tf: Ttf = tft.inverse_matrix(Tpose) else: Ttf = Tpose return Tpose, Ttf, frames, stamp
def transformBack(tf_xyzquat, pose): T_mat = tfm.concatenate_matrices(tfm.translation_matrix(tf_xyzquat[0:3]), tfm.quaternion_matrix(tf_xyzquat[3:7])) pose_mat = tfm.concatenate_matrices(tfm.translation_matrix(pose[0:3]), tfm.quaternion_matrix(pose[3:7])) new_pose_mat = np.dot(pose_mat, tfm.inverse_matrix(T_mat)) return tfm.translation_from_matrix(new_pose_mat).tolist( ) + tfm.quaternion_from_matrix(new_pose_mat).tolist()
def odometry_callback(self, msg): world_T_t265 = None odom_T_t265 = None #self.listener.waitForTransform(world_frame, t265_world_frame, rospy.Time(0), rospy.Duration(1)) #self.listener.waitForTransform(odom_frame, t265_odom_frame, rospy.Time(0), rospy.Duration(1)) # See the tf.transformations library documentation: http://docs.ros.org/en/jade/api/tf/html/python/transformations.html try: #(trans, rot) = self.listener.lookupTransform(self.world_frame, self.t265_world_frame, rospy.Time(0)) T = self.tfBuffer.lookup_transform(self.world_frame, self.t265_world_frame, rospy.Time()) trans = (T.transform.translation.x, T.transform.translation.y, T.transform.translation.z) rot = (T.transform.rotation.x, T.transform.rotation.y, T.transform.rotation.z, T.transform.rotation.w) world_T_t265 = tf.concatenate_matrices( tf.translation_matrix(trans), tf.quaternion_matrix(rot)) #t265_T_world = tf.inverse_matrix(world_T_t265) except (LookupException, ConnectivityException, ExtrapolationException): rospy.logwarn("OdomGlue: Could not lookup world to T265 transform") return try: #(trans, rot) = self.listener.lookupTransform(self.odom_frame, self.t265_odom_frame, rospy.Time(0)) T = self.tfBuffer.lookup_transform(self.odom_frame, self.t265_odom_frame, rospy.Time()) trans = (T.transform.translation.x, T.transform.translation.y, T.transform.translation.z) rot = (T.transform.rotation.x, T.transform.rotation.y, T.transform.rotation.z, T.transform.rotation.w) odom_T_t265 = tf.concatenate_matrices(tf.translation_matrix(trans), tf.quaternion_matrix(rot)) t265_T_odom = tf.inverse_matrix(odom_T_t265) except (LookupException, ConnectivityException, ExtrapolationException): rospy.logwarn("OdomGlue: Could not lookup odom to T265 transform") return world_T_odom = tf.concatenate_matrices(world_T_t265, t265_T_odom) translation = (0.0, 0.0, 0.0) rotation = (0.0, 0.0, 0.0, 1.0) try: self.broadcaster.sendTransform( tf.translation_from_matrix(world_T_odom), tf.quaternion_from_matrix(world_T_odom), rospy.Time.now(), self.odom_frame, self.world_frame) except: pass
def cb_initial(self,msg): trans = [msg.pose.pose.position.x, msg.pose.pose.position.y, msg.pose.pose.position.z] rot = [msg.pose.pose.orientation.x, msg.pose.pose.orientation.y, msg.pose.pose.orientation.z, msg.pose.pose.orientation.w] print ("Re-Initializing pose to: ", trans, rot) # Calculate BASE_LINK to MAP transform transform = t.concatenate_matrices(t.translation_matrix(trans), t.quaternion_matrix(rot)) inversed_transform = t.inverse_matrix(transform) inv_trans = t.translation_from_matrix(inversed_transform) inv_rot = t.quaternion_from_matrix(inversed_transform) base_to_map_pose = PoseStamped() base_to_map_pose.header.stamp = msg.header.stamp base_to_map_pose.header.frame_id = "base_link" base_to_map_pose.pose.position.x, base_to_map_pose.pose.position.y, base_to_map_pose.pose.position.z = inv_trans base_to_map_pose.pose.orientation.x, base_to_map_pose.pose.orientation.y, base_to_map_pose.pose.orientation.z, base_to_map_pose.pose.orientation.w = inv_rot self.listener.waitForTransform("odom", "base_link",msg.header.stamp, rospy.Duration(0.5)) map_odom = self.listener.transformPose('odom',base_to_map_pose) # Calculate MAP to ODOM transform trans_odom = [map_odom.pose.position.x, map_odom.pose.position.y, map_odom.pose.position.z] rot_odom = [map_odom.pose.orientation.x, map_odom.pose.orientation.y, map_odom.pose.orientation.z, map_odom.pose.orientation.w] transform_odom = t.concatenate_matrices(t.translation_matrix(trans_odom), t.quaternion_matrix(rot_odom)) inversed_transform_odom = t.inverse_matrix(transform_odom) inv_trans_odom = t.translation_from_matrix(inversed_transform_odom) inv_rot_odom = t.quaternion_from_matrix(inversed_transform_odom) map_to_odom = TransformStamped() map_to_odom.header.stamp = rospy.Time.now() map_to_odom.header.frame_id = 'map' map_to_odom.child_frame_id = 'odom' map_to_odom.transform.translation.x, map_to_odom.transform.translation.y, map_to_odom.transform.translation.z = inv_trans_odom map_to_odom.transform.rotation.x, map_to_odom.transform.rotation.y, map_to_odom.transform.rotation.z, map_to_odom.transform.rotation.w = inv_rot_odom br = tf2_ros.StaticTransformBroadcaster() br.sendTransform(map_to_odom)
def _inverse_tuples(t): trans, rot = t euler = tr.euler_from_quaternion(rot) m = tr.compose_matrix(translate=trans, angles=euler) m_inv = tr.inverse_matrix(m) trans_inv = tr.translation_from_matrix(m_inv) rot_inv = tr.rotation_matrix(*tr.rotation_from_matrix(m_inv)) quat_inv = tr.quaternion_from_matrix(rot_inv) return (trans_inv, quat_inv)
def pose_callback(self,msg): #print 'got msg' frame_id = msg.header pose = msg.pose p = tft.translation_matrix([pose.position.x,pose.position.y,pose.position.z]) rot = tft.quaternion_matrix([pose.orientation.x,pose.orientation.y,pose.orientation.z,pose.orientation.w]) Tcam_to_cb = dot(p,rot) #print 'Tcam_to_cb',Tcam_to_cb #Tworld_to_cb = dot(tft.translation_matrix([self.x_offset,self.y_offset,self.z_offset]),tft.euler_matrix(0,0,pi/2)) Tworld_to_cb = dot(tft.translation_matrix([self.x_offset,self.y_offset,self.z_offset]),array(tb_to_mat(self.yaw, self.pitch, self.roll))) #print 'Tworld_to_cb',Tworld_to_cb if self.invert_tf: Tworld_to_cb = tft.inverse_matrix(Tworld_to_cb) Tworld_to_cam = dot(Tworld_to_cb,tft.inverse_matrix(Tcam_to_cb)) #print 'Tworld_to_cam',Tworld_to_cam Tworld_to_cam_p = Tworld_to_cam[0:3,3] Tworld_to_cam_q = tft.quaternion_from_matrix(Tworld_to_cam) pub_msg = PoseStamped() pub_msg.header.stamp = msg.header.stamp pub_msg.header.frame_id = '/world' pub_msg.pose.position = Point(*(Tworld_to_cam_p.tolist())) pub_msg.pose.orientation = Quaternion(*(Tworld_to_cam_q.tolist())) #print pub_msg self.pub.publish(pub_msg) pub_cb_msg = PoseStamped() pub_cb_msg.header.stamp = msg.header.stamp pub_cb_msg.header.frame_id = '/world' pub_cb_msg.pose.position = Point(*(Tworld_to_cb[0:3,3].tolist())) pub_cb_msg.pose.orientation = Quaternion(*(tft.quaternion_from_matrix(Tworld_to_cb).tolist())) self.pub_cb.publish(pub_cb_msg) self.transform = Tworld_to_cam self.transform_frame = msg.header.frame_id self.br.sendTransform(Tworld_to_cam_p,Tworld_to_cam_q,msg.header.stamp,self.transform_frame,'/world')
def get_Tmatrix(self, disableinvert=False): Tmatrix = dot( tft.translation_matrix( [self.x_offset, self.y_offset, self.z_offset]), array(tb_to_tf(self.yaw, self.pitch, self.roll, rad=True))) if not disableinvert and self.invert: Tmatrix = tft.inverse_matrix(Tmatrix) return Tmatrix
def update_angles(self, icr_y, icr_p, icr_r): y = icr_y * self.angle_increment p = icr_p * self.angle_increment r = icr_r * self.angle_increment if not self.invert_icrement: self.yaw += y self.pitch += p self.roll += r else: T = tft.inverse_matrix(array(tb_to_tf(y, p, r))) self.set_from_matrix(dot(T, self.get_Tmatrix()))
def update_pos(self, icr_x, icr_y, icr_z): x = icr_x * self.pos_increment y = icr_y * self.pos_increment z = icr_z * self.pos_increment if not self.invert_icrement: self.x_offset += x self.y_offset += y self.z_offset += z else: p = tft.inverse_matrix(tft.translation_matrix([x, y, z])) self.set_from_matrix(dot(self.get_Tmatrix(), p))
def zero_start(poses): if len(poses) == 0: return poses ref = pose_to_numpy(poses[0]) zero_poses = [ numpy_to_pose( numpy.dot(transformations.inverse_matrix(ref), pose_to_numpy(p))) for p in poses ] return zero_poses
def cog2baselinkCallback(self, msg): t = Transform() t = msg.transform t_np = ros_numpy.numpify(t) t_np = tft.inverse_matrix(t_np) ts = TransformStamped() ts.header.stamp = msg.header.stamp ts.header.frame_id = 'fc' ts.child_frame_id = 'cog' ts.transform = ros_numpy.msgify(Transform, t_np) self.br.sendTransform(ts)
def readMap_cb(data): objectmap = data map_w = data.info.width map_h = data.info.height map_scale = data.info.resolution map_origin_x = data.info.origin.position.x map_origin_y = data.info.origin.position.y bag = rosbag.Bag(sys.argv[1]) bag2 = rosbag.Bag(sys.argv[2]) tf_static = bag.read_messages(topics=['/tf_static', 'tf2_msgs/TFMessage']) vrpn_robot = bag2.read_messages(topics=['/vrpn_client_node/'+robot+'/pose', 'geometry_msgs/PoseStamped']) moveb_result = bag.read_messages(topics=['/move_base/result', 'move_base_msgs/MoveBaseActionResult']) prev_tt = rospy.Time(0) # previous time stamp thr_dt = rospy.Duration(10) # threshold between each move base result timestamp in seconds tt_vector = np.array([]) for topic, msg, t in tf_static: # we invert the matrix to get world to map rot_w2m = msg.transforms[0].transform.rotation tras_w2m = msg.transforms[0].transform.translation m2w = tff.concatenate_matrices(tff.translation_matrix([tras_w2m.x,tras_w2m.y,tras_w2m.z]), tff.quaternion_matrix([rot_w2m.x,rot_w2m.y,rot_w2m.z,rot_w2m.w])) w2m = tff.inverse_matrix(m2w) world2map = TransformStamped() world2map.transform.rotation = rot_w2m world2map.transform.rotation.w = -1 * rot_w2m.w # inverse rotation matrix just using quaternions world2map.transform.translation.x = w2m[0][3] world2map.transform.translation.y = w2m[1][3] world2map.transform.translation.z = w2m[2][3] ii = 1 # index for saved time stamp into the vector tt_vector thr_dt = rospy.Duration(0.001) # threshold between time stamp in the saved from move based and poses for topic, msg, t in vrpn_robot: robot_w = msg robot_m = tf2_geometry_msgs.do_transform_pose(robot_w, world2map) #transform robot pose from world ref to map ref robot_px_m = robot_m.pose.position.x / map_scale # robot pose in map image coordinates robot_py_m = robot_m.pose.position.y / map_scale robot_px_mc = robot_px_m + dx/2 - cx # robot pose in map cropped image coordinates robot_py_mc = robot_py_m + dy/2 - cy #print robot_px_mc , robot_py_mc , msg.header.stamp # all timestamps f = open(wp_dump_file+ str(ii-1)+ str(ii) + robot + '.txt', 'a+') f.write(str(str(robot_px_mc) + "," + str(robot_py_mc) + "," + str(msg.header.stamp) + "\n")) f.close() bag.close() sub_once.unregister() rospy.signal_shutdown("reason")
def publish_relative_frames2(self, data): tmp_dict = {} for segment in data.segments: if(segment.is_quaternion): tmp_dict[segment.id] = (segment.position,segment.quat,self.tr.fromTranslationRotation(segment.position,segment.quat)) else: tmp_dict[segment.id] = (segment.position,segment.euler) for idx in tmp_dict.keys(): p_idx = xsens_client.get_segment_parent_id(idx) child_data = tmp_dict[idx] if(p_idx==-1): continue elif(p_idx==0): helper = tft.quaternion_about_axis(1,(-1,0,0)) new_quat = tft.quaternion_multiply(tft.quaternion_inverse(helper),child_data[1])#if(segment.is_quaternion): TODO Handle Euler #self.sendTransform(child_data[0], self.sendTransform(child_data[0], child_data[1], #(1.0,0,0,0),#FIXME rospy.Time.now(), xsens_client.get_segment_name(idx), self.ref_frame) elif(p_idx>0): parent_data = tmp_dict[p_idx] new_quat = tft.quaternion_multiply(tft.quaternion_inverse(parent_data[1]),child_data[1]) tmp_trans = (parent_data[0][0] - child_data[0][0],parent_data[0][1] - child_data[0][1],parent_data[0][2] - child_data[0][2]) new_trans = qv_mult(parent_data[1],tmp_trans) self.sendTransform( new_trans, new_quat, rospy.Time.now(), xsens_client.get_segment_name(idx), xsens_client.get_segment_name(p_idx)) else: parent_data = tmp_dict[p_idx] this_data = np.multiply(tft.inverse_matrix(child_data[2]) , parent_data[2]) self.sendTransform( tft.translation_from_matrix(this_data), tft.quaternion_from_matrix(this_data), rospy.Time.now(), xsens_client.get_segment_name(idx), xsens_client.get_segment_name(p_idx))
def calculate_bridge_and_base_pose(self, table_state, phi): # Calculate bridge pose from cue position and phi phi_radians = math.pi * phi / 180.0 cue_x, cue_y = self.get_cue_ball_pos(table_state) bridge_x, bridge_y = self.get_bridge_pos(cue_x, cue_y, phi) (qx, qy, qz, qw) = transformations.quaternion_about_axis(phi_radians, (0, 0, 1)) bridge_pos = transformations.translation_matrix([bridge_x, bridge_y, 0.0]) bridge_orient = transformations.quaternion_matrix([qx, qy, qz, qw]) bridge_pose = transformations.concatenate_matrices(bridge_pos, bridge_orient) bridge_pose_pos = transformations.translation_from_matrix(bridge_pose) bridge_pose_orient = transformations.quaternion_from_matrix(bridge_pose) # Produce base pose via fixed transform from bridge pose bridge_to_base_trans = transformations.translation_matrix([Constants.BRIDGE_IN_BASE_X, Constants.BRIDGE_IN_BASE_Y, Constants.BRIDGE_IN_BASE_Z]) bridge_to_base_rot = transformations.quaternion_matrix([Constants.BRIDGE_IN_BASE_QX, Constants.BRIDGE_IN_BASE_QY, Constants.BRIDGE_IN_BASE_QZ, Constants.BRIDGE_IN_BASE_QW]) bridge_to_base = transformations.concatenate_matrices(bridge_to_base_trans, bridge_to_base_rot) base_to_bridge = transformations.inverse_matrix(bridge_to_base) base_pose = transformations.concatenate_matrices(bridge_pose, base_to_bridge) # correct order? base_pose_pos = transformations.translation_from_matrix(base_pose) base_pose_orient = transformations.quaternion_from_matrix(base_pose) print "BASE_POSE", base_pose, "BRIDGE_POSE", bridge_pose return ((bridge_pose_pos, bridge_pose_orient), (base_pose_pos, base_pose_orient))
def processAprilTags(self, msg): # Only process if the wheels_cmd hasn't changed since last time and the last time < cur time if self.wheelsCmdAlmostEqual(self.wheels_cmd, self.wheels_cmd_prev) and msg.header.stamp > self.timestamp_prev: deltas = [] for tag in msg.detections: if tag.id in self.april_tags_prev: Ta_r1 = self.geometryTransformToMatrix(self.april_tags_prev[tag.id].transform) Ta_r2 = self.geometryTransformToMatrix(tag.transform) Tr2_r1 = np.dot(Ta_r1, tr.inverse_matrix(Ta_r2)) # Get the angle, dx, and dy theta = tr.euler_from_matrix(Tr2_r1)[2] dx,dy = Tr2_r1[0:2,3] deltas.append([theta, dx, dy]) if deltas: # We found some matches, average them, and print deltas = np.mean(np.array(deltas),0).tolist() cmd = [self.wheels_cmd_prev.vel_left, self.wheels_cmd_prev.vel_right, (msg.header.stamp - self.timestamp_prev).to_sec()] sample = cmd+deltas rospy.loginfo(sample) f = open(self.filename, 'a+') np.savetxt(f, sample, fmt='%-7.8f', newline=" ") f.write('\n') f.close() else: err = "backwards time." if msg.header.stamp < self.timestamp_prev else "cmd changed" rospy.logwarn("Invalid interval %s", err) # Save the tags and wheels_cmd for next time for tag in msg.detections: self.april_tags_prev[tag.id] = tag self.timestamp_prev = msg.header.stamp self.wheels_cmd_prev = self.wheels_cmd
sorted_markers = sorted(self.markers_infos.items(), key=lambda x: x[1].update_freq(), reverse=True) print [(k,v.get_freq()) for (k,v) in sorted_markers] chosen_marker = None for marker in sorted_markers: next_footprint_tf = marker[1].drift_cor_data[0] previous_footprint_tf = self.current_map_to_footprint_tf #first marker caugth ever if previous_footprint_tf == None: previous_footprint_tf = next_footprint_tf change_tf = np.dot(next_footprint_tf, tfmath.inverse_matrix(previous_footprint_tf)) change_tf_tran, change_tf_rot = util.tran_and_euler_from_matrix(change_tf) time_diff = (rospy.Time.now() - self.last_update_timestamp).to_sec() distance = util.length(change_tf_tran) angle_diff = change_tf_rot[2] rospy.loginfo("Looking at marker %s", marker[0]) rospy.loginfo("\t{0:.3f} m, {1:.3f} radians".format(distance, angle_diff)) ''' lin_velocity = distance / time_diff ang_velocity = angle_diff / time_diff rospy.loginfo("\t{0:.3f} > {1:.3f}, diff: {2:.3f}".format(lin_velocity, self.max_lin_vel, lin_velocity-self.max_lin_vel)) rospy.loginfo("\t{0:.3f} > {1:.3f}, diff: {2:.3f}".format(ang_velocity, self.max_ang_vel, ang_velocity-self.max_ang_vel))
def getInverseMatrix(self): return inverse_matrix(self._matrix)
def setInverseMatrix(self, matrix): self._matrix = inverse_matrix(matrix) return self
def stat_poses(csv_file): print('%s %s %s' % ('>' * 20, csv_file, '<' * 20)) df = pandas.DataFrame.from_csv(csv_file, index_col=None) # Compute average of transformation matrix # ======================================== n_fused = 0 matrix_fused = None # average matrix from base -> checkerboard pose for index, row in df.iterrows(): # base -> pointIn (sensed calibboard pose) pos = row['position_x'], row['position_y'], row['position_z'] matrix_pos = tff.translation_matrix(pos) rot = row['quaternion_x'], row['quaternion_y'], row['quaternion_z'], row['quaternion_w'] matrix_rot = tff.quaternion_matrix(rot) matrix_pose = matrix_pos.dot(matrix_rot) if n_fused == 0: matrix_fused = matrix_pose n_fused += 1 continue # Fuse transformation matrix # -------------------------- # base -> pointFused (fused calibboard pose) # matrix_fused # pointFused -> pointIn = pointFused -> base -> pointIn matrix_rel = matrix_pose.dot(tff.inverse_matrix(matrix_fused)) # weight of the pointIn is 1 / (n_fused + 1) weight_of_new_point = 1. / (n_fused + 1) # midpoint of rotation angle, direction, point = tff.rotation_from_matrix(matrix_rel) matrix_rel_rot_mid = tff.rotation_matrix( angle * weight_of_new_point, direction, point) # midpoint of translation trans_rel = tff.translation_from_matrix(matrix_rel) matrix_rel_pos_mid = tff.translation_matrix( [x * weight_of_new_point for x in trans_rel]) # relative transformation for midpoint from pointFused matrix_rel_mid = matrix_rel_pos_mid.dot(matrix_rel_rot_mid) # base -> pointFused_new matrix_fused = matrix_rel_mid.dot(matrix_fused) n_fused += 1 # ------------------------------------------------------------------------- assert n_fused == len(df) print('%s Average %s' % ('-' * 30, '-' * 30)) print('N fused: %d' % n_fused) print('Matrix: \n%s' % matrix_fused) trans = tff.translation_from_matrix(matrix_fused) print('Position: {}'.format(trans)) pos_keys = ['position_%s' % x for x in 'xyz'] print('Position (simple): {}'.format(df.mean()[pos_keys].values)) euler = tff.euler_from_matrix(matrix_fused) print('Orientation: {}'.format(euler)) # Compute variance of transformation matrix # ========================================= N = len(df) keys = ['x', 'y', 'z', 'angle'] variance = {k: 0 for k in keys} for index, row in df.iterrows(): # base -> pointIn (sensed calibboard pose) pos = row['position_x'], row['position_y'], row['position_z'] matrix_pos = tff.translation_matrix(pos) rot = row['quaternion_x'], row['quaternion_y'], row['quaternion_z'], row['quaternion_w'] matrix_rot = tff.quaternion_matrix(rot) matrix_pose = matrix_pos.dot(matrix_rot) # pointFused -> pointIn = pointFused -> base -> pointIn matrix_rel = matrix_pose.dot(tff.inverse_matrix(matrix_fused)) # compute distance for translation/rotation delta_x, delta_y, delta_z = tff.translation_from_matrix(matrix_rel) delta_angle, _, _ = tff.rotation_from_matrix(matrix_rel) variance['x'] += delta_x ** 2 variance['y'] += delta_y ** 2 variance['z'] += delta_z ** 2 variance['angle'] += delta_angle ** 2 for k in keys: variance[k] /= (N - 1) print('%s Std Deviation (Variance) %s' % ('-' * 22, '-' * 22)) for k in keys: print('%s: %f (%f)' % (k, variance[k], math.sqrt(variance[k]))) if k != 'angle': print('{} (simple): {}'.format(k, df.std()['position_%s' % k]))
def transformBack(tf_xyzquat, pose): T_mat = tfm.concatenate_matrices( tfm.translation_matrix(tf_xyzquat[0:3]), tfm.quaternion_matrix(tf_xyzquat[3:7])) pose_mat = tfm.concatenate_matrices( tfm.translation_matrix(pose[0:3]), tfm.quaternion_matrix(pose[3:7]) ) new_pose_mat = np.dot(pose_mat, tfm.inverse_matrix(T_mat)) return tfm.translation_from_matrix(new_pose_mat).tolist() + tfm.quaternion_from_matrix(new_pose_mat).tolist()