def _motion_regression_6d(self, pnts, qt, t): """ Compute translational and rotational velocities and accelerations in the inertial frame at the target time t. [1] Sittel, Florian, Joerg Mueller, and Wolfram Burgard. Computing velocities and accelerations from a pose time sequence in three-dimensional space. Technical Report 272, University of Freiburg, Department of Computer Science, 2013. """ lin_vel = np.zeros(3) lin_acc = np.zeros(3) q_d = np.zeros(4) q_dd = np.zeros(4) for i in range(3): v, a = self._motion_regression_1d( [(pnt['pos'][i], pnt['t']) for pnt in pnts], t) lin_vel[i] = v lin_acc[i] = a for i in range(4): v, a = self._motion_regression_1d( [(pnt['rot'][i], pnt['t']) for pnt in pnts], t) q_d[i] = v q_dd[i] = a # Keeping all velocities and accelerations in the inertial frame ang_vel = 2 * quaternion_multiply(q_d, quaternion_conjugate(qt)) ang_acc = 2 * quaternion_multiply(q_dd, quaternion_conjugate(qt)) return np.hstack((lin_vel, ang_vel[0:3])), np.hstack((lin_acc, ang_acc[0:3]))
def generate_sigma_points(self, mean, covariance): sigmas = [] sigmas.append(mean) if not check_symmetry(covariance): pdb.set_trace() temp = pos_sem_def_sqrt(covariance) if any(isnan(temp)): rospy.logerr("Sqrt matrix contained a NaN. Matrix was %s", covariance) temp = temp * sqrt(self.n + self.kf_lambda) for i in range(0,self.n): #Must use columns in order to get the write thing out of the #Cholesky decomposition #(http://en.wikipedia.org/wiki/Cholesky_decomposition#Kalman_filters) column = temp[:,i].reshape((self.n,1)) #Build the noise quaternion noise_vec = column[0:3,0] noise_quat = SF9DOF_UKF.build_noise_quat(noise_vec) original_quat = SF9DOF_UKF.recover_quat(mean) #Do the additive sample perturbed_quat = tf_math.quaternion_multiply(noise_quat,original_quat) #perturbed_quat = tf_math.unit_vector(perturbed_quat) new_mean = mean + column new_mean[0:3,0] = perturbed_quat[0:3,0] sigmas.append(new_mean) #Do the subtractive sample conj_noise_quat = tf_math.quaternion_conjugate(noise_quat) perturbed_quat = tf_math.quaternion_multiply(conj_noise_quat, original_quat) #perturbed_quat = tf_math.unit_vector(perturbed_quat) new_mean = mean - column new_mean[0:3,0] = perturbed_quat[0:3,0] sigmas.append(new_mean) return sigmas
def generate_sigma_points(self, mean, covariance): sigmas = [] sigmas.append(mean) temp = numpy.linalg.cholesky(covariance) temp = temp * sqrt(self.n + self.kf_lambda) for i in range(0,self.n): #Must use columns in order to get the write thing out of the #Cholesky decomposition #(http://en.wikipedia.org/wiki/Cholesky_decomposition#Kalman_filters) column = temp[:,i].reshape((self.n,1)) #Build the noise quaternion noise_vec = column[0:3,0] noise_quat = SF9DOF_UKF.build_noise_quat(noise_vec) original_quat = SF9DOF_UKF.recover_quat(mean) #Do the additive sample perturbed_quat = tf_math.quaternion_multiply(noise_quat,original_quat) #perturbed_quat = tf_math.unit_vector(perturbed_quat) new_mean = mean + column new_mean[0:3,0] = perturbed_quat[0:3,0] sigmas.append(new_mean) #Do the subtractive sample conj_noise_quat = tf_math.quaternion_conjugate(noise_quat) perturbed_quat = tf_math.quaternion_multiply(conj_noise_quat, original_quat) #perturbed_quat = tf_math.unit_vector(perturbed_quat) new_mean = mean - column new_mean[0:3,0] = perturbed_quat[0:3,0] sigmas.append(new_mean) return sigmas
def qv_mult(self): # rotate vector v1 by quaternion q1 q1 = self.orientation_imu q2 = list( [self.gps_robot[0], self.gps_robot[1], self.gps_robot[2], 0.0]) return quaternion_multiply(quaternion_multiply(q1, q2), quaternion_conjugate(q1))
def rotate(v, q): """ Rotate vector v according to quaternion q """ q2 = np.r_[v[0:3], 0.0] return transformations.quaternion_multiply( transformations.quaternion_multiply(q, q2), transformations.quaternion_conjugate(q))[0:3]
def quat_vec_mult(q1, v1): """Rotate vector v1 by quaternion q1, and return the resulting vector. From https://answers.ros.org/question/196149/how-to-rotate-vector-by-quaternion-in-python/ """ q2 = list(v1) q2.append(0.0) return quaternion_multiply(quaternion_multiply(q1, q2), quaternion_conjugate(q1))[:3]
def rotate(v, q, inverse=False): """rotate vector v from world to body frame (with quaternion q)""" cq = quaternion_conjugate(q) if inverse: z = quaternion_multiply(q, v) return quaternion_multiply(z, cq) else: z = quaternion_multiply(v, q) return quaternion_multiply(cq, z)
def odometry_callback(self, msg): """Handle updated measured velocity callback.""" if not bool(self.config): return p = msg.pose.pose.position q = msg.pose.pose.orientation p = numpy.array([p.x, p.y, p.z]) q = numpy.array([q.x, q.y, q.z, q.w]) if not self.initialized: # If this is the first callback: Store and hold latest pose. self.pos_des = p self.quat_des = q self.initialized = True # Compute control output: t = msg.header.stamp.to_sec() # Position error e_pos_world = self.pos_des - p e_pos_body = trans.quaternion_matrix(q).transpose()[0:3,0:3].dot(e_pos_world) # Error quaternion wrt body frame e_rot_quat = trans.quaternion_multiply(trans.quaternion_conjugate(q), self.quat_des) if numpy.linalg.norm(e_pos_world[0:2]) > 5.0: # special case if we are far away from goal: # ignore desired heading, look towards goal position heading = math.atan2(e_pos_world[1],e_pos_world[0]) quat_des = numpy.array([0, 0, math.sin(0.5*heading), math.cos(0.5*heading)]) e_rot_quat = trans.quaternion_multiply(trans.quaternion_conjugate(q), quat_des) # Error angles e_rot = numpy.array(trans.euler_from_quaternion(e_rot_quat)) v_linear = self.pid_pos.regulate(e_pos_body, t) v_angular = self.pid_rot.regulate(e_rot, t) # Convert and publish vel. command: cmd_vel = geometry_msgs.Twist() cmd_vel.linear = geometry_msgs.Vector3(*v_linear) cmd_vel.angular = geometry_msgs.Vector3(*v_angular) self.pub_cmd_vel.publish(cmd_vel)
def _yaw_angle_between(qa, qb): yaw = transformations.euler_from_quaternion( transformations.quaternion_multiply( qa, transformations.quaternion_conjugate(qb)) )[2] if yaw > math.pi/2: yaw -= math.pi elif yaw < -math.pi/2: yaw += math.pi return yaw
def qv_mult(q1, v1): ''' Rotates a vector by a unit quaternion @param q1 - the quaternion to rotate the vector by. Tuple (x, y, z, w) @param v1 - the vector to be rotated. Tuple (x, y, z) @return the rotated vector ''' q2 = v1 + (0.0, ) return quaternion_multiply(quaternion_multiply(q1, q2), quaternion_conjugate(q1))[:3]
def trans_quat_to_transform((trans, quat)): if trans is None or quat is None: return None # I think Jesse is confused with what is actually stored in the file # because his quaternion_to_dcm seems to give the conjugate of the right quaternion m = transformations.quaternion_matrix(transformations.quaternion_conjugate(quat)) m[0,3] = -trans[0] m[1,3] = -trans[1] m[2,3] = -trans[2] return m
def qv_mult(quaternion, vector): """ Transforms a vector by a quaternion :param quaternion: Quaternion :type quaternion: list :param vector: vector :type vector: list :return: transformed vector :type: list """ q = quaternion v = [vector[0], vector[1], vector[2], 0] return quaternion_multiply(quaternion_multiply(q, v), quaternion_conjugate(q))[:-1]
def _motion_regression_6d(self, pnts, qt, t): """ Compute translational and rotational velocities and accelerations in the inertial frame at the target time t. [1] Sittel, Florian, Joerg Mueller, and Wolfram Burgard. Computing velocities and accelerations from a pose time sequence in three-dimensional space. Technical Report 272, University of Freiburg, Department of Computer Science, 2013. """ lin_vel = np.zeros(3) ang_vel = np.zeros(4) lin_acc = np.zeros(3) ang_acc = np.zeros(4) q_d = np.zeros(4) q_dd = np.zeros(4) for i in range(3): v, a = self._motion_regression_1d([(pnt['pos'][i], pnt['t']) for pnt in pnts], t) lin_vel[i] = v lin_acc[i] = a for i in range(4): v, a = self._motion_regression_1d([(pnt['rot'][i], pnt['t']) for pnt in pnts], t) q_d[i] = v q_dd[i] = a # Keeping all velocities and accelerations in the inertial frame ang_vel = 2 * quaternion_multiply(q_d, quaternion_conjugate(qt)) ang_acc = 2 * quaternion_multiply(q_dd, quaternion_conjugate(qt)) return np.hstack((lin_vel, ang_vel[0:3])), np.hstack( (lin_acc, ang_acc[0:3]))
def listen_imu_quaternion(self, imu): """Doesn't work b/c desired yaw is too far from possible yaw""" current_orientation = np.array([imu.orientation.x, imu.orientation.y, imu.orientation.z, imu.orientation.w]) # current_angular_speed = np.array([imu.angular_velocity.x, # imu.angular_velocity.y, # imu.angular_velocity.z]) diff_orientation = transformations.quaternion_multiply( self.desired_orientation, # Unit quaternion => inverse = conjugate / norm = congugate transformations.quaternion_conjugate(current_orientation)) assert np.allclose( transformations.quaternion_multiply(diff_orientation, current_orientation), self.desired_orientation) # diff_r, diff_p, diff_y = transformations.euler_from_quaternion( # diff_orientation) # rospy.loginfo("Orientation error (quaternion): {}".format(diff_orientation)) # rospy.loginfo( # "Orientation error (deg): {}".format( # np.rad2deg([diff_r, diff_p, diff_y])) # ) # out = self.pitch_controller(diff_p) corrected_orientation = transformations.quaternion_multiply( quaternion_power(diff_orientation, 1.5), self.desired_orientation) rospy.loginfo( "Desired orientation (deg): {}".format( np.rad2deg(transformations.euler_from_quaternion( self.desired_orientation)))) rospy.loginfo( "Corrected orientation (deg): {}".format( np.rad2deg(transformations.euler_from_quaternion( corrected_orientation)))) rospy.loginfo("Desired position: {}".format( self.desired_position)) setpoint_pose = Pose(self.desired_position, corrected_orientation) servo_angles = self.platform.ik(setpoint_pose) rospy.logdebug( "Servo angles (deg): {}".format(np.rad2deg(servo_angles))) self.publish_servo_angles(servo_angles)
def measurement_update(current_state, dt, measurement): predicted_measurement = zeros(measurement.shape) orientation = SF9DOF_UKF.recover_quat(current_state) #Calculate predicted magnetometer readings h_vec = zeros((4,1)) h_vec[0:3,0] = current_state[12:15,0] h_vec[3,0] = 0. temp = tf_math.quaternion_multiply(orientation, h_vec) result = tf_math.quaternion_multiply(temp, \ tf_math.quaternion_conjugate(orientation)) predicted_measurement[0:3,0] = result[0:3,0] #Calculate the predicted gyro readings temp_gyro = current_state[3:6,0] + current_state[9:12,0] predicted_measurement[3:6,0] = temp_gyro #Calculate the predicted accelerometer readings g_vec = zeros((4,1)) g_vec[0:3,0] = current_state[15:18,0] g_vec[3,0] = 0. temp = tf_math.quaternion_multiply(orientation, g_vec) result = tf_math.quaternion_multiply(temp, \ tf_math.quaternion_conjugate(orientation)) temp_accel = current_state[6:9,0] + result[0:3,0] predicted_measurement[6:9,0] = temp_accel return predicted_measurement
def rightMultiplyPose(pose0, pose1): """ Multiply pose0 with pose1 assuming pose0 is 7xn matrix and pose1 is 7 vector. Returns: [7xn] pose matrix by multiplying pose0 x pose1 """ p0 = pose0[:3, :] q0 = pose0[3:, :] q0_c = tf.quaternion_conjugate(q0) p1 = pose1[:3] q1 = pose1[3:] p1_exp = np.hstack((p1, 0)) p1_rot = tf.quaternion_multiply(q0, tf.quaternion_multiply(p1_exp, q0_c)) q_out = tf.quaternion_multiply(q0, q1) p_out = p0 + p1_rot[:3, :] return np.vstack((p_out, q_out))
def moveToMarker(self): try: now=rospy.Time.now() self.listener.waitForTransform('/map', self.marker, now, rospy.Duration(10)) trans, rot=self.listener.lookupTransform('/map', self.marker, now) print "Have a TF frame" self.move_base_client.wait_for_server() g = MoveBaseGoal() g.target_pose.header.frame_id = '/map' g.target_pose.header.stamp=rospy.Time.now() #Add offset so that the PR2 sits right in front of the tag instead of running it over or getting the navigation stack stuck x=trans[0]-0.50 y=trans[1] z=trans[2] #Switch rotation around to be in the opposite direction q1=np.array([rot[0], rot[1], rot[2], rot[3]]) q1_con=tft.quaternion_conjugate(q1) #get the conjugate rot_y_axis=np.array([0.0*sin(pi/4), 1.0*sin(pi/4), 0.0*sin(pi/4), cos(pi/4)]) q3=tft.quaternion_multiply(q1_con, q1) q4=tft.quaternion_multiply(rot_y_axis, q3) q_fin=tft.quaternion_multiply(q1, q4) g.target_pose.pose.position.x = x g.target_pose.pose.position.y = y g.target_pose.pose.position.z = z g.target_pose.pose.orientation.x = q_fin[0] g.target_pose.pose.orientation.y = q_fin[1] g.target_pose.pose.orientation.z = q_fin[2] g.target_pose.pose.orientation.w = q_fin[3] self.move_base_client.send_goal(g) print "Made it this far, sent goal" success_to_box=self.move_base_client.wait_for_result() if success_to_box: print "Movement to box successful" self.at_box=True else: print "Movement to box failed" self.at_box=False except(tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException, tf.Exception): print "TF Exception, try again"
def main(): parser = argparse.ArgumentParser(description='Calibrate intrinsic parameters.') parser.add_argument('-d', '--dock_cam', dest='dock_cam', action='store_true', help='Calibrate dock camera.') parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Verbose mode.') parser.add_argument('robot', help='The name of the robot to configure (i.e., put p4d to edit p4d.config).') parser.add_argument('intrinsics_bag', help='The bag file with intrinsics calibration data.') parser.add_argument('extrinsics_bag', help='The bag file with extrinsics calibration data.') args = parser.parse_args() SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) target_yaml = SCRIPT_DIR + '/data/granite_april_tag.yaml' imu_yaml = SCRIPT_DIR + '/data/epson_g362p_imu.yaml' config_file = SCRIPT_DIR + '/../../astrobee/config/robots/' + args.robot + '.config' print 'Calibrating camera intrinsics...' intrinsics_yaml = intrinsics_calibrate.calibrate_camera(args.intrinsics_bag, target_yaml, dock_cam=args.dock_cam, verbose=args.verbose, model='pinhole-equi') if intrinsics_yaml == None: print >> sys.stderr, 'Failed to calibrate intrinsics.' return print 'Calibrating camera extrinsics...' extrinsics_yaml = calibrate_extrinsics(args.extrinsics_bag, target_yaml, intrinsics_yaml, imu_yaml, verbose=args.verbose) if extrinsics_yaml == None: print >> sys.stderr, 'Failed to calibrate extrinsics.' return imu_to_camera = read_yaml_extrinsics(extrinsics_yaml) if imu_to_camera is None: print >> sys.stderr, 'Failed to read extrinsics from yaml file.' return body_to_imu = trans_quat_to_transform(lua_read_transform(config_file, 'imu_transform')) if body_to_imu is None: print >> sys.stderr, 'Failed to read imu transform.' return imu_to_body = np.linalg.inv(body_to_imu) body_to_camera = np.linalg.inv(imu_to_body.dot(np.linalg.inv(imu_to_camera))) q = transformations.quaternion_conjugate(transformations.quaternion_from_matrix(body_to_camera)) t = imu_to_body.dot(np.linalg.inv(imu_to_camera)[0:4,3]) print 'Translation: ', t print 'Rotation quaternion: ', q if lua_replace_transform(config_file, 'dock_cam_transform' if args.dock_cam else 'nav_cam_transform', (t, q)): print >> sys.stderr, 'Failed to update config file.' return
def leftMultiplyPose(pose0, pose1): """ Multiply pose0 with pose1 assuming pose1 is 7xn matrix. pose0 is 7 vector. Returns: [7xn] pose matrix by multiplying pose0 on the left """ p0 = pose0[:3] q0 = pose0[3:] q0_c = tf.quaternion_conjugate(q0) p1 = pose1[:3, :] q1 = pose1[3:,:] N = p1.shape[1] #print pose1.shape p1_exp = np.vstack((p1, np.zeros(N))) p1_rot = tf.quaternion_multiply(q0, tf.quaternion_multiply(p1_exp, q0_c)) q_out = tf.quaternion_multiply(q0, q1) p_out = np.expand_dims(p0, axis=1) + p1_rot[:3, :] return np.vstack((p_out, q_out))
def qv_mult(q1, v1): """ Transforms a vector by a quaternion :param q1: Quaternion :type: Quaternion :param v1: vector :type: Point :return: transformed vector :type: Point """ q = q1 v = v1 if type(q1) is Quaternion: q = (q1.x, q1.y, q1.z, q1.w) if type(v1) is Point: v = (v1.x, v1.y, v1.z, 0) r = quaternion_multiply(quaternion_multiply(q, v), quaternion_conjugate(q)) return Point(r[0], r[1], r[2])
def get_data(self, t): try: sys.stdout = open(os.devnull, 'w') # supress stdout (trans, rot) = self.t.lookupTransform(self.root_frame, self.measured_frame, rospy.Time(0)) except tf.LookupException as e: sys.stdout = sys.__stdout__ # restore stdout #print "Exception in metric '%s' %s %s"%(self.name, type(e), e) return None except tf.ExtrapolationException as e: sys.stdout = sys.__stdout__ # restore stdout #print "Exception in metric '%s' %s %s"%(self.name, type(e), e) return None except tf.ConnectivityException as e: sys.stdout = sys.__stdout__ # restore stdout #print "Exception in metric '%s' %s %s"%(self.name, type(e), e) return None except Exception as e: sys.stdout = sys.__stdout__ # restore stdout print("general exeption in metric '%s':" % self.name, type(e), e) return None sys.stdout = sys.__stdout__ # restore stdout if self.time_old == None: self.trans_old = trans self.rot_old = rot self.time_old = t return None diff_q = transformations.quaternion_multiply( rot, transformations.quaternion_conjugate(self.rot_old)) diff_e = transformations.euler_from_quaternion(diff_q) path_increment = sum([axis**2 for axis in diff_e])**0.5 self.trans_old = trans self.rot_old = rot self.time_old = t return path_increment
def odometry_callback(self, msg): """Handle updated measured velocity callback.""" if not bool(self.config): return p = msg.pose.pose.position q = msg.pose.pose.orientation p = numpy.array([p.x, p.y, p.z]) q = numpy.array([q.x, q.y, q.z, q.w]) if not self.initialized: # If this is the first callback: Store and hold latest pose. self.pos_des = p self.quat_des = q self.initialized = True # Compute control output: t = msg.header.stamp.to_sec() # Position error wrt. body frame e_pos = trans.quaternion_matrix(q).transpose()[0:3, 0:3].dot(self.pos_des - p) # Error quaternion wrt body frame e_rot_quat = trans.quaternion_multiply(trans.quaternion_conjugate(q), self.quat_des) # Error angles e_rot = numpy.array(trans.euler_from_quaternion(e_rot_quat)) v_linear = self.pid_pos.regulate(e_pos, t) v_angular = self.pid_rot.regulate(e_rot, t) # Convert and publish vel. command: cmd_vel = geometry_msgs.Twist() cmd_vel.linear = geometry_msgs.Vector3(*v_linear) cmd_vel.angular = geometry_msgs.Vector3(*v_angular) self.pub_cmd_vel.publish(cmd_vel)
def calcRelativeCoordinate( pose, point ): # (geometry_msgs::Point point_msg, geometry_msgs::Pose current_pose) worldCarR_q = np.array([ pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w ]) worldCarT_v = np.array( [pose.position.x, pose.position.y, pose.position.z, 1.0]) carWorldR_m = ttf.quaternion_matrix(ttf.quaternion_conjugate(worldCarR_q)) carWorldT_v = -np.dot(carWorldR_m, worldCarT_v) carWorld_m = carWorldR_m carWorld_m[:3, 3] = carWorldT_v[:3] point_world = np.array([point.x, point.y, point.z, 1.0]) point_car = np.dot(carWorld_m, point_world) p = geometry_msgs.msg.Point() p.x, p.y, p.z, _ = point_car # print(t) # print(p) # print(tf2_geometry_msgs.do_transform_point(p, t)) # assert False ," hh" return p
def test_quaternion_diff(self, q1, q2): q3 = quaternion_multiply(quaternion_conjugate(q1), q2) q4 = w.compile_and_execute(w.quaternion_diff, [q1, q2]) self.assertTrue(np.isclose(q3, q4).all() or np.isclose(q3, -q4).all(), msg='{} != {}'.format(q1, q4))
def vectorRotateQuaternion(self, q, v): '''return qvq^-1. q(x, y, z, w) ''' #t = tf.transformations qua_mul = t.quaternion_multiply q_c = t.quaternion_conjugate(q) return qua_mul(qua_mul(q, v), q_c)
def test_quaternion_conjugate(self, q): r1 = w.compile_and_execute(w.quaternion_conjugate, [q]) r2 = quaternion_conjugate(q) self.assertTrue(np.isclose(r1, r2).all() or np.isclose(r1, -r2).all(), msg='{} != {}'.format(r1, r2))
def quatRotatePoint(q, p, o=Vec(0, 0, 0)): """returns point p rotated around quaternion q with the origin o (default (0,0,0)""" return quatMult(quatMult(q, numpy.append(p - o, (0, ))), quaternion_conjugate(q))[:3] + o
def main(): SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) default_calibration_target = SCRIPT_DIR + "/config/granite_april_tag.yaml" parser = argparse.ArgumentParser( description="Calibrate extrinsics parameters.") parser.add_argument( "robot", help= "The name of the robot to configure (i.e., put p4d to edit p4d.config).", ) parser.add_argument("intrinsics_yaml", help="The yaml file with intrinsics calibration data.") parser.add_argument("extrinsics_bag", help="The bag file with extrinsics calibration data.") parser.add_argument( "-d", "--dock_cam", dest="dock_cam", action="store_true", help="Calibrate dock camera.", ) parser.add_argument( "-f", "--from", dest="from_time", help="Use the bag data starting at this time, in seconds.", ) parser.add_argument( "-t", "--to", dest="to_time", help="Use the bag data until this time, in seconds.", ) parser.add_argument( "--timeoffset-padding", dest="padding", help= "Maximum range in which the timeoffset may change during estimation, in seconds. See readme.md for more info. (default: 0.01)", ) parser.add_argument( "--calibration_target", dest="calibration_target", help= "Use this yaml file to desribe the calibration target, overriding the default: " + default_calibration_target, ) parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", help="Verbose mode.") args = parser.parse_args() if args.calibration_target is None: args.calibration_target = default_calibration_target imu_yaml = SCRIPT_DIR + "/config/imu.yaml" config_file = SCRIPT_DIR + "/../../astrobee/config/robots/" + args.robot + ".config" # Sanity check has_dock = has_dock_topic(args.intrinsics_yaml) if has_dock and (not args.dock_cam): print( "File " + args.intrinsics_yaml + " + has a dock topic, " + " yet the --dock-cam flag was not used.", file=sys.stderr, ) return print("Calibrating camera extrinsics...") extrinsics_yaml = calibrate_extrinsics( args.extrinsics_bag, args.calibration_target, args.intrinsics_yaml, imu_yaml, args.from_time, args.to_time, args.padding, verbose=args.verbose, ) if extrinsics_yaml == None: print("Failed to calibrate extrinsics.", file=sys.stderr) return two_cams = has_two_cameras(extrinsics_yaml) if two_cams: imu_to_camera = read_yaml_extrinsics(extrinsics_yaml, ["cam0", "cam1"]) else: imu_to_camera = read_yaml_extrinsics(extrinsics_yaml, ["cam0"]) if not imu_to_camera: print("Failed to read extrinsics from yaml file.", file=sys.stderr) return body_to_imu = trans_quat_to_transform( lua_read_transform(config_file, "imu_transform")) if body_to_imu is None: print("Failed to read imu transform.", file=sys.stderr) return imu_to_body = np.linalg.inv(body_to_imu) body_to_camera = np.linalg.inv( imu_to_body.dot(np.linalg.inv(imu_to_camera[0]))) q = transformations.quaternion_conjugate( transformations.quaternion_from_matrix(body_to_camera)) t = imu_to_body.dot(np.linalg.inv(imu_to_camera[0])[0:4, 3]) print("Cam0:") print("Translation: ", t) print("Rotation quaternion: ", q) if two_cams: # Modify the transform for the second camera, that is the depth one body_to_depth_camera = np.linalg.inv( imu_to_body.dot(np.linalg.inv(imu_to_camera[1]))) q_depth = transformations.quaternion_conjugate( transformations.quaternion_from_matrix(body_to_depth_camera)) t_depth = imu_to_body.dot(np.linalg.inv(imu_to_camera[1])[0:4, 3]) print("Cam1:") print("Translation: ", t_depth) print("Rotation quaternion: ", q_depth) if lua_replace_transform( config_file, "perch_cam_transform" if args.dock_cam else "haz_cam_transform", (t_depth, q_depth), ): print( "Failed to update config file with depth camera parameters.", file=sys.stderr, ) return has_depth = has_depth_topic(args.intrinsics_yaml) if has_depth and (not two_cams): # Only the depth camera is present, so the first transform corresponds to the depth camera if lua_replace_transform( config_file, "perch_cam_transform" if args.dock_cam else "haz_cam_transform", (t, q), ): print( "Failed to update config file with depth camera parameters", file=sys.stderr, ) return else: # Either both cameras are present, or only the HD cam, so the first transform # corresponds to the HD camera if lua_replace_transform( config_file, "dock_cam_transform" if args.dock_cam else "nav_cam_transform", (t, q), ): print( "Failed to update config file with HD camera parameters", file=sys.stderr, ) return
def quaternion_rotate(quaternion, vector): # type: (np.ndarray, np.ndarray) -> np.ndarray q = quaternion v = np.concatenate((vector, [0])) qc = transformations.quaternion_conjugate(quaternion) return transformations.quaternion_multiply(transformations.quaternion_multiply(q, v), qc)
def qv_mult(q1, v1): q2 = np.append(v1, 0.0) return tfs.quaternion_multiply(tfs.quaternion_multiply(q1, q2), tfs.quaternion_conjugate(q1))[:3]
def inverse(self): q_congugate = quaternion_conjugate(self.q) return CompactTransform(-rotationMatrix(q_congugate) * self.p, q_congugate)
import numpy as np vel = np.genfromtxt('/home/harish/RRC/ICRA_2019/baseline_2/aaaaa/New_method/x_y_z_translations/baseline_2_velocities_single_1.txt', delimiter=' ') pose = np.genfromtxt('/home/harish/RRC/ICRA_2019/baseline_2/aaaaa/New_method/x_y_z_translations/baseline_2_exp_indi.txt',delimiter=' ') pose1 = np.zeros_like(pose) pose1[0,:] = pose[0,:] for i in range(pose.shape[0]-1): ti = pose1[i,:3] qi = pose1[i,3:] #vt = np.array([vel[i,0]*(-0.1),vel[i,1]*(-0.0988),vel[i,2]*(0.01)]) #wt = np.array([vel[i,3]*1,vel[i,4]*(-1),vel[i,5]*2]) vt = np.array([vel[i,0],vel[i,1],vel[i,2]]) wt = np.array([vel[i,3],vel[i,4],vel[i,5]]) td = vt qd = tft.quaternion_from_euler(wt[0],wt[1],wt[2],'sxyz') qi1 = tft.quaternion_multiply(qd, qi) #quaternion_multiply(q_rot, q_orig) #rotating vector around quat equivivalent of ri*Td(1:3,4)+ti tdh = np.hstack([td,0]) ti1 = tft.quaternion_multiply(tft.quaternion_multiply(qi, tdh), tft.quaternion_conjugate(qi))[:3] + ti pose1[i+1,:] = np.hstack([ti1,qi1]) np.savetxt('/home/yvsharish/working/aaaaa_2/baseline_2_after_change.txt', pose1,delimiter=' ')
def main(): SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) default_calibration_target = SCRIPT_DIR + '/config/granite_april_tag.yaml' parser = argparse.ArgumentParser( description='Calibrate extrinsics parameters.') parser.add_argument( 'robot', help= 'The name of the robot to configure (i.e., put p4d to edit p4d.config).' ) parser.add_argument('intrinsics_yaml', help='The yaml file with intrinsics calibration data.') parser.add_argument('extrinsics_bag', help='The bag file with extrinsics calibration data.') parser.add_argument('-d', '--dock_cam', dest='dock_cam', action='store_true', help='Calibrate dock camera.') parser.add_argument( '-f', '--from', dest='from_time', help='Use the bag data starting at this time, in seconds.') parser.add_argument('-t', '--to', dest='to_time', help='Use the bag data until this time, in seconds.') parser.add_argument( '--timeoffset-padding', dest='padding', help= 'Maximum range in which the timeoffset may change during estimation, in seconds. See readme.md for more info. (default: 0.01)' ) parser.add_argument( '--calibration_target', dest='calibration_target', help= 'Use this yaml file to desribe the calibration target, overriding the default: ' + default_calibration_target) parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Verbose mode.') args = parser.parse_args() if args.calibration_target is None: args.calibration_target = default_calibration_target imu_yaml = SCRIPT_DIR + '/config/imu.yaml' config_file = SCRIPT_DIR + '/../../astrobee/config/robots/' + args.robot + '.config' # Sanity check has_dock = has_dock_topic(args.intrinsics_yaml) if has_dock and (not args.dock_cam): print >> sys.stderr, 'File ' + args.intrinsics_yaml + ' + has a dock topic, ' + \ ' yet the --dock-cam flag was not used.' return print 'Calibrating camera extrinsics...' extrinsics_yaml = calibrate_extrinsics(args.extrinsics_bag, args.calibration_target, args.intrinsics_yaml, imu_yaml, args.from_time, args.to_time, args.padding, verbose=args.verbose) if extrinsics_yaml == None: print >> sys.stderr, 'Failed to calibrate extrinsics.' return two_cams = has_two_cameras(extrinsics_yaml) if two_cams: imu_to_camera = read_yaml_extrinsics(extrinsics_yaml, ['cam0', 'cam1']) else: imu_to_camera = read_yaml_extrinsics(extrinsics_yaml, ['cam0']) if not imu_to_camera: print >> sys.stderr, 'Failed to read extrinsics from yaml file.' return body_to_imu = trans_quat_to_transform( lua_read_transform(config_file, 'imu_transform')) if body_to_imu is None: print >> sys.stderr, 'Failed to read imu transform.' return imu_to_body = np.linalg.inv(body_to_imu) body_to_camera = np.linalg.inv( imu_to_body.dot(np.linalg.inv(imu_to_camera[0]))) q = transformations.quaternion_conjugate( transformations.quaternion_from_matrix(body_to_camera)) t = imu_to_body.dot(np.linalg.inv(imu_to_camera[0])[0:4, 3]) print 'Cam0:' print 'Translation: ', t print 'Rotation quaternion: ', q if two_cams: # Modify the transform for the second camera, that is the depth one body_to_depth_camera = np.linalg.inv( imu_to_body.dot(np.linalg.inv(imu_to_camera[1]))) q_depth = transformations.quaternion_conjugate( transformations.quaternion_from_matrix(body_to_depth_camera)) t_depth = imu_to_body.dot(np.linalg.inv(imu_to_camera[1])[0:4, 3]) print 'Cam1:' print 'Translation: ', t_depth print 'Rotation quaternion: ', q_depth if lua_replace_transform( config_file, 'perch_cam_transform' if args.dock_cam else 'haz_cam_transform', (t_depth, q_depth)): print >> sys.stderr, 'Failed to update config file with depth camera parameters.' return has_depth = has_depth_topic(args.intrinsics_yaml) if has_depth and (not two_cams): # Only the depth camera is present, so the first transform corresponds to the depth camera if lua_replace_transform( config_file, 'perch_cam_transform' if args.dock_cam else 'haz_cam_transform', (t, q)): print >> sys.stderr, 'Failed to update config file with depth camera parameters' return else: # Either both cameras are present, or only the HD cam, so the first transform # corresponds to the HD camera if lua_replace_transform( config_file, 'dock_cam_transform' if args.dock_cam else 'nav_cam_transform', (t, q)): print >> sys.stderr, 'Failed to update config file with HD camera parameters' return
def qv_mult(q1, v1): # https: // answers.ros.org / question / 196149 / how - to - rotate - vector - by - quaternion - in -python / v1 = unit_vector(v1) q2 = list(v1) + [0.0] return quaternion_multiply(quaternion_multiply(q1, q2), quaternion_conjugate(q1))[:3]
def main(): parser = argparse.ArgumentParser( description='Calibrate extrinsics parameters.') parser.add_argument('-d', '--dock_cam', dest='dock_cam', action='store_true', help='Calibrate dock camera.') parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Verbose mode.') parser.add_argument( 'robot', help= 'The name of the robot to configure (i.e., put p4d to edit p4d.config).' ) parser.add_argument('intrinsics_yaml', help='The bag file with intrinsics calibration data.') parser.add_argument('extrinsics_bag', help='The bag file with extrinsics calibration data.') args = parser.parse_args() SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) target_yaml = SCRIPT_DIR + '/data/granite_april_tag.yaml' imu_yaml = SCRIPT_DIR + '/data/imu.yaml' config_file = SCRIPT_DIR + '/../../astrobee/config/robots/' + args.robot + '.config' print 'Calibrating camera extrinsics...' extrinsics_yaml = calibrate_extrinsics(args.extrinsics_bag, target_yaml, intrinsics_yaml, imu_yaml, verbose=args.verbose) if extrinsics_yaml == None: print >> sys.stderr, 'Failed to calibrate extrinsics.' return depth = has_depth_cam(extrinsics_yaml) if depth: imu_to_camera = read_yaml_extrinsics(extrinsics_yaml, ['cam0', 'cam1']) else: imu_to_camera = read_yaml_extrinsics(extrinsics_yaml, ['cam0']) if not imu_to_camera: print >> sys.stderr, 'Failed to read extrinsics from yaml file.' return body_to_imu = trans_quat_to_transform( lua_read_transform(config_file, 'imu_transform')) if body_to_imu is None: print >> sys.stderr, 'Failed to read imu transform.' return imu_to_body = np.linalg.inv(body_to_imu) body_to_camera = np.linalg.inv( imu_to_body.dot(np.linalg.inv(imu_to_camera[0]))) q = transformations.quaternion_conjugate( transformations.quaternion_from_matrix(body_to_camera)) t = imu_to_body.dot(np.linalg.inv(imu_to_camera[0])[0:4, 3]) print 'Cam0:' print 'Translation: ', t print 'Rotation quaternion: ', q #if args.depth_cam: if depth: body_to_depth_camera = np.linalg.inv( imu_to_body.dot(np.linalg.inv(imu_to_camera[1]))) q_depth = transformations.quaternion_conjugate( transformations.quaternion_from_matrix(body_to_depth_camera)) t_depth = imu_to_body.dot(np.linalg.inv(imu_to_camera[1])[0:4, 3]) print 'Cam1:' print 'Translation: ', t_depth print 'Rotation quaternion: ', q_depth if lua_replace_transform( config_file, 'perch_cam_transform' if args.dock_cam else 'haz_cam_transform', (t_depth, q_depth)): print >> sys.stderr, 'Failed to update config file with depth camera parameters.' return if lua_replace_transform( config_file, 'dock_cam_transform' if args.dock_cam else 'nav_cam_transform', (t, q)): print >> sys.stderr, 'Failed to update config file with cam0 parameters' return
def __sub__(self, other): return CompactTransform( self.p - other.p, quaternion_multiply(self.q, quaternion_conjugate(other.q)))
def qv_mult(q, v): v = v + (0.0,) q_conj = transformations.quaternion_conjugate(q) return transformations.quaternion_multiply(transformations.quaternion_multiply(q,v), q_conj)[:-1]
def qinv(q): """ Quaternion Inverse (Conjugate, Unit) """ # no normalization, assume unit #va, ra = q[:3], q[-1] #return np.concatenate([va, [-ra]], axis=-1) return tx.quaternion_conjugate(q)
def qv_mult(q1, v1): q2 = (0.0,) + v1 q3 = tft.quaternion_multiply(q1, q2) return tft.quaternion_multiply(q3, tft.quaternion_conjugate(q1))[1:]