def _ray2worldPhysicsPos(self, R_pose, neck_angle, ray_combo): # rotation matrix that transforms body's frame to head's frame (where lidar is) # we need only to take into account neck's angle as head's angle has already been # considered in removing the ground of every ray body_to_head_rot = tf.twoDTransformation(0, 0, neck_angle) world_to_part_rot = tf.twoDTransformation(R_pose[0], R_pose[1], R_pose[2]) [dmin, dmax, last_occu, ray_angle] = ray_combo [dmin, dmax, last_occu, ray_angle] = ray_combo[:, last_occu == 1] # Physical position of ending point of the line wrt the head of the robot ex_h = dmax * cos(ray_angle) ey_h = dmax * sin(ray_angle) sx_h = dmin * cos(ray_angle) sy_h = dmin * sin(ray_angle) e_h = np.vstack((ex_h, ey_h, np.ones_like(ex_h))) s_h = np.vstack((sx_h, sy_h, np.ones_like(ex_h))) exy1_r = np.dot(body_to_head_rot, e_h) sxy1_r = np.dot(body_to_head_rot, s_h) [eX, eY, _] = np.dot(world_to_part_rot, exy1_r) [sX, sY, _] = np.dot(world_to_part_rot, sxy1_r) return np.array([sX, sY, eX, eY])
def _ray2worldPhysicPos(self, R_pose, neck_angle, ray_combo): """Convert the ending point of a ray to world x, y coordinate and then the indices in MAP array based on the neck's angle and the particle position and orientation :input R_pos: (3L,) array representing physical orientation of a particle (x, y, theta) ray_combo: (4L,) array of the form [[dmin,dmax,last_occu,ray_angle]] unit: how much meter per grid side :output [[sX,sY,eX,eY],[last_occu]]: x, y position of starting points and ending points of the ray and whether the last cell is occupied""" # rotation matrix that transform body's frame to head's frame (where lidar located in) # we need only to take into account neck's angle as head's angle (tilt) has already been considered # in removing the ground of every ray. body_to_head_rot = tf.twoDTransformation(0, 0, neck_angle) world_to_part_rot = tf.twoDTransformation(R_pose[0], R_pose[1], R_pose[2]) [dmin, dmax, last_occu, ray_angle] = ray_combo if last_occu == 0: # there is no occupied cell return None # physical position of ending point of the line w.r.t the head of the robot ex_h = dmax * cos(ray_angle) ey_h = dmax * sin(ray_angle) # print [sx,sy,ex,ey] # transform this point to obtain physical position in the body's frame exy1_r = np.dot(body_to_head_rot, np.array([ex_h, ey_h, 1])) # transform this point to obtain physical position in the MAP (global) # Rotate these points to obtain physical position in the MAP [eX, eY, _] = np.dot(world_to_part_rot, exy1_r) return [eX, eY]
def _ray2world(self, R_pose, ray_combo, unit=1): """ Convert ray to world x, y coordinate based on the particle position and orientation :input R_pos: (3L,) array representing pose of a particle (x,y,theta) ray_combo: (4L,) array of the form [[dmin, dmax,last_occu,ray_angle]] unit: how much meter per grid side :output [[sX,sY,eX,eY],[last_occu]]: x, y position of starting and end points of the ray and whether the last cell is occupied""" world_to_part_rot = tf.twoDTransformation(R_pose[0], R_pose[1], R_pose[2]) [dmin, dmax, last_occu, ray_angle] = ray_combo #Start and end point of the line sx = dmin * cos(ray_angle) / unit sy = dmin * sin(ray_angle) / unit ex = dmax * cos(ray_angle) / unit ey = dmax * sin(ray_angle) / unit [sX, sY, _] = np.dot(world_to_part_rot, np.array([sx, sy, 1])) [eX, eY, _] = np.dot(world_to_part_rot, np.array([ex, ey, 1])) return [sX, sY, eX, eY]
def _polar_to_cartesian(self, scan, pose = None): ''' Converts polar scan to cartisian x,y coordinates ''' scan[scan > self.lidar_max_ ] = self.lidar_max_ lidar_ptx = scan * np.cos(self.lidar_angles_) lidar_pty = scan * np.sin(self.lidar_angles_) if pose is not None: T = tf.twoDTransformation(pose[0],pose[1],pose[2]) pts = np.vstack((lidar_ptx, lidar_pty, np.ones(lidar_ptx.shape))) trans_pts = T@pts lidar_ptx, lidar_pty, _ = trans_pts return lidar_ptx, lidar_pty
def clean_odom(odom): sec = np.array(odom['sec']) nsec = np.array(odom['nsec']) sec = sec - sec[0] msec = nsec/1000000 tot_msec = sec*1000 + msec tot_msec = tot_msec.astype(int) x = np.array(odom['posx']) y = np.array(odom['posy']) # x = x - x[0] # y = y - y[0] quatz = np.array(odom['quatz']) quatw = np.array(odom['quatw']) quatx = np.zeros(quatz.shape) quaty = np.zeros(quatz.shape) quat = np.vstack((quatx,quaty,quatz,quatw)) quat = quat.transpose() r = R.from_quat(quat) euler = r.as_euler('zyx', degrees=False) theta = euler[:,0] pts = np.vstack((x,y,np.ones(x.shape))) trans_pts = tf.twoDTransformation(-x[0],-y[0],-theta[0])@pts x,y = trans_pts[0],trans_pts[1] print(theta[0]) theta = theta - theta[0] angle_range = np.pi theta[theta>angle_range] = theta[theta>angle_range] - angle_range*2 theta[theta<-angle_range] = theta[theta<-angle_range] + angle_range*2 tot_msec, x, y, theta = tot_msec[::50], x[::50], y[::50], theta[::50] return tot_msec, x, y, theta