def plan_grasps(self, x=0, y=0, z=0.080): position = geometry_msgs.msg.Point(x, y, z) normal_orientation = pyquaternion.Quaternion(x=0, y=0, z=0, w=1) rotated_orientation = pyquaternion.Quaternion(axis=[1, 0, 0], angle=np.pi / 2) rotated_orientation = rotated_orientation * normal_orientation geom_orient = geometry_msgs.msg.Quaternion(x=normal_orientation[0], y=normal_orientation[1], z=normal_orientation[2], w=normal_orientation[3]) geom_orient_rot = geometry_msgs.msg.Quaternion( x=rotated_orientation[0], y=rotated_orientation[1], z=rotated_orientation[2], w=rotated_orientation[3]) grasps = list() grasps.append(self.construct_graspit_grasp(position, geom_orient)) grasps.append(self.construct_graspit_grasp(position, geom_orient_rot)) return grasps
def global_nusc_box_to_cam(info, boxes, classes, eval_configs, eval_version='detection_cvpr_2019'): """Convert the box from global to camera coordinate. Args: info (dict): Info for a specific sample data, including the calibration information. boxes (list[:obj:`NuScenesBox`]): List of predicted NuScenesBoxes. classes (list[str]): Mapped classes in the evaluation. eval_configs (object): Evaluation configuration object. eval_version (str): Evaluation version. Default: 'detection_cvpr_2019' Returns: list: List of standard NuScenesBoxes in the global coordinate. """ box_list = [] for box in boxes: # Move box to ego vehicle coord system box.translate(-np.array(info['ego2global_translation'])) box.rotate( pyquaternion.Quaternion(info['ego2global_rotation']).inverse) # filter det in ego. cls_range_map = eval_configs.class_range radius = np.linalg.norm(box.center[:2], 2) det_range = cls_range_map[classes[box.label]] if radius > det_range: continue # Move box to camera coord system box.translate(-np.array(info['cam2ego_translation'])) box.rotate(pyquaternion.Quaternion(info['cam2ego_rotation']).inverse) box_list.append(box) return box_list
def get_robot_ee_position(self, msg): # ee_orientation = pyquaternion.Quaternion(msg.pose.orientation.w, msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z) # ee_position = numpy.array([msg.pose.position.x, msg.pose.position.y, msg.pose.position.z]) pose = [ msg.pose.orientation.w, msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.position.x, msg.pose.position.y, msg.pose.position.z ] pose = openravepy.matrixFromPose(pose) ee_pose = numpy.dot(pose, self.T_G2EE) ee_pose = openravepy.poseFromMatrix(ee_pose) self.ee_orientation = pyquaternion.Quaternion(ee_pose[:4]) self.ee_position = numpy.array(ee_pose[4:])
def _lidar_nusc_box_to_global(info, boxes, classes, eval_version="detection_cvpr_2019"): import pyquaternion box_list = [] for box in boxes: # Move box to ego vehicle coord system box.rotate(pyquaternion.Quaternion(info['lidar2ego_rotation'])) box.translate(np.array(info['lidar2ego_translation'])) from nuscenes.eval.detection.config import config_factory from nuscenes.eval.detection.config import DetectionConfig # filter det in ego. cfg = config_factory(eval_version) cls_range_map = cfg.class_range radius = np.linalg.norm(box.center[:2], 2) det_range = cls_range_map[classes[box.label]] if radius > det_range: continue # Move box to global coord system box.rotate(pyquaternion.Quaternion(info['ego2global_rotation'])) box.translate(np.array(info['ego2global_translation'])) box_list.append(box) return box_list
def plotAxes(ax, orig, rot, l=1., c=['b', 'r', 'g'], otherOpts={}): import pyquaternion as pyq if isinstance(rot, pyq.Quaternion) or (len(list(rot)) == 4): rot = pyq.Quaternion(list(rot)).rotation_matrix rot *= l x = np.hstack((orig, orig + rot[:, [0]])) y = np.hstack((orig, orig + rot[:, [1]])) z = np.hstack((orig, orig + rot[:, [2]])) #rotation matrix -> columns are new base vectors ax.plot(*x, color=c[0], **otherOpts) ax.plot(*y, color=c[1], **otherOpts) ax.plot(*z, color=c[2], **otherOpts) return 0
def get_rpy_from_quat(x, y, z, w): quat = pyquaternion.Quaternion(x=x, y=y, z=z, w=w) R = quat.rotation_matrix n = R[:, 0] o = R[:, 1] a = R[:, 2] #print "R:",R,"n:",n #Eigen::Vector3d o = R.col(1); #Eigen::Vector3d a = R.col(2); y = math.atan2(n[1], n[0]) p = math.atan2(-1 * n[2], n[0] * math.cos(y) + n[1] * math.sin(y)) r = math.atan2(a[0] * math.sin(y) - a[1] * math.cos(y), -1 * o[0] * math.sin(y) + o[1] * math.cos(y)) return r, p, y
def parse_cheetah_state(event): state_estimate = state_estimate_t.state_estimate_t.decode(event.data) q = pyquaternion.Quaternion(np.array(state_estimate.quat)) gyro = state_estimate.gyro accelerometer = state_estimate.accelerometer p = np.array(np.array(state_estimate.p0)) pose = np.empty((3, 4)) pose[:, :-1] = q.rotation_matrix pose[:, -1] = p return { "cheetahTimestamps": event.timestamp, "cheetahPoses": pose, "cheetahGyros": gyro, "cheetahAccelerometers": accelerometer }
def my_bislerp(Qa, Qb, t): qa = pyq.Quaternion(matrix=Qa) qb = pyq.Quaternion(matrix=Qb) val = np.zeros(8) quat_i = pyq.Quaternion(0, 1, 0, 0) quat_j = pyq.Quaternion(0, 0, 1, 0) quat_k = pyq.Quaternion(0, 0, 0, 1) quat_array = [ qa, -qa, qa * quat_i, -qa * quat_i, qa * quat_j, -qa * quat_j, qa * quat_k, -qa * quat_k ] cnt = 0 qb_arr = np.array([qb[0], qb[1], qb[2], qb[3]]) for qt in quat_array: #val[cnt] = qt.dot(qb) qt_arr = np.array([qt[0], qt[1], qt[2], qt[3]]) val[cnt] = np.dot(qt_arr, qb_arr) cnt = cnt + 1 qt = quat_array[val.argmax(axis=0)] if (t < 0): t = 0.0 #qm = slerp(t, qt, qb) qm = pyq.Quaternion.slerp(qt, qb, t) qm = qm.normalised Qm_M = qm.rotation_matrix return Qm_M
def to_lifting_view(pose_3d): assert pose_3d.ndim == 2 pose_3d = pose_3d.transpose() assert pose_3d.shape[0] == 3 # scale pose_3d = pose_3d * 1000 # rotate qua = pq.Quaternion(axis=[1, 0, 0], degrees=-120) pose_3d = qua.rotation_matrix @ pose_3d # shift shift = np.array( [-np.mean(pose_3d[0]), -np.mean(pose_3d[1]), -np.min(pose_3d[2])]).reshape((-1, 1)) pose_3d = pose_3d + shift return pose_3d.transpose()
def angle_to_quaternion(self, angle): """ Method to translate angles in degrees over Y axis to quaternions :param angle (int) degrees wrt Y axis :return geometry_msgs/Quaternion """ pyq = pyquaternion.Quaternion( axis=[0.0, 0.0, 1.0], degrees=angle) # Rotation in degrees over Z axis q = Quaternion() q.x = pyq.x q.y = pyq.y q.z = pyq.z q.w = pyq.w return q
def msg(self, msgs): for i in range(len(msgs)): self.accel_x.append(msgs[i].accelData[0]) self.accel_y.append(msgs[i].accelData[1]) self.accel_z.append(msgs[i].accelData[2]) self.gyro_x.append(msgs[i].gyroData[0]) self.gyro_y.append(msgs[i].gyroData[1]) self.gyro_z.append(msgs[i].gyroData[2]) self.magn_x.append(msgs[i].magnData[0]) self.magn_y.append(msgs[i].magnData[1]) self.magn_z.append(msgs[i].magnData[2]) self.time.append(msgs[i].time) quat = pyquaternion.Quaternion(msgs[i].quaternion) self.plane_widget._update_rotation(quat) if len(self.time) > self.length: self.accel_x = self.accel_x[self.cut:(self.length - 1)] self.accel_y = self.accel_y[self.cut:(self.length - 1)] self.accel_z = self.accel_z[self.cut:(self.length - 1)] self.gyro_x = self.gyro_x[self.cut:(self.length - 1)] self.gyro_y = self.gyro_y[self.cut:(self.length - 1)] self.gyro_z = self.gyro_z[self.cut:(self.length - 1)] self.magn_x = self.magn_x[self.cut:(self.length - 1)] self.magn_y = self.magn_y[self.cut:(self.length - 1)] self.magn_z = self.magn_z[self.cut:(self.length - 1)] self.time = self.time[self.cut:(self.length - 1)] self.accel_x_plot.setData(x=self.time, y=self.accel_x, pen=('r'), width=0.5) self.accel_y_plot.setData(x=self.time, y=self.accel_y, pen=('g'), width=0.5) self.accel_z_plot.setData(x=self.time, y=self.accel_z, pen=('b'), width=0.5) self.gyro_x_plot.setData(x=self.time, y=self.gyro_x, pen=('r'), width=0.5) self.gyro_y_plot.setData(x=self.time, y=self.gyro_y, pen=('g'), width=0.5) self.gyro_z_plot.setData(x=self.time, y=self.gyro_z, pen=('b'), width=0.5) self.magn_x_plot.setData(x=self.time, y=self.magn_x, pen=('r'), width=0.5) self.magn_y_plot.setData(x=self.time, y=self.magn_y, pen=('g'), width=0.5) self.magn_z_plot.setData(x=self.time, y=self.magn_z, pen=('b'), width=0.5)
def apply(self, data, args=None): modified = [] for d in data: # Convert the quaternion values to unit #new_quat = pq.Quaternion(d.quat_w, d.quat_x, d.quat_y, d.quat_z).unit new_quat = pq.Quaternion(d.quat_w * self.__quaternion_multiplier, d.quat_x * self.__quaternion_multiplier, d.quat_y * self.__quaternion_multiplier, d.quat_z * self.__quaternion_multiplier).unit modified.append( IMUData(d.acc_x * self.__acceleration_multiplier, d.acc_y * self.__acceleration_multiplier, d.acc_z * self.__acceleration_multiplier, new_quat.w, new_quat.x, new_quat.y, new_quat.z, d.time1, d.time2)) return PipelineReturnValue(0, modified)
def _second_det_to_nusc_box(detection): from nuscenes.utils.data_classes import Box import pyquaternion box3d = detection["box3d_lidar"].detach().cpu().numpy() scores = detection["scores"].detach().cpu().numpy() labels = detection["label_preds"].detach().cpu().numpy() box3d[:, 6] = -box3d[:, 6] - np.pi / 2 box_list = [] for i in range(box3d.shape[0]): quat = pyquaternion.Quaternion(axis=[0, 0, 1], radians=box3d[i, 6]) box = Box(box3d[i, :3], box3d[i, 3:6], quat, label=labels[i], score=scores[i]) box_list.append(box) return box_list
def project_to_camera(nusc, forecast_frame_points, scene_token, cam_str, wmax=1600, hmax=900): """ :param nusc: :param forecast_frame_points: (T, d) :param scene_token: :param cam_str: :param wmax: :param hmax: :returns: :rtype: """ sample = nusc.get('sample', scene_token) sample_data = nusc.get('sample_data', sample['data'][cam_str]) cal = nusc.get('calibrated_sensor', sample_data['calibrated_sensor_token']) K = np.asarray(cal['camera_intrinsic']) if forecast_frame_points.shape[1] == 3: future_fore_C_fore_R = forecast_frame_points.T else: # Fill in z coordinates. future_fore_C_fore_R = fill_z(forecast_frame_points, mx=0.) # Homog transform from ego to camera. cam_from_ego = gu.transform_matrix(cal['translation'], pyquaternion.Quaternion(cal['rotation']), inverse=True) # Transform points from ego-frame to camera frame. future_cam_C_cam_R = ((cam_from_ego[:3,:3] @ future_fore_C_fore_R).T + cam_from_ego[:3, 3]).T # Project to the camera. points = gu.view_points(future_cam_C_cam_R, view=K, normalize=False) nbr_points = points.shape[1] z = points[2] # Normalize the points. points = points / points[2:3, :].repeat(3, 0).reshape(3, nbr_points) # Filter. # xib = np.logical_and(0 <= points[0], points[0] <= wmax) # yib = np.logical_and(0 <= points[1], points[1] <= hmax) # In front of camera. zib = 0 <= z pib = points[:, zib] return pib
def getMat(self, stamp): msg = self.get(stamp) quaternion = pyquaternion.Quaternion(msg.transform.rotation.w, msg.transform.rotation.x, msg.transform.rotation.y, msg.transform.rotation.z) R = np.eye(4) R[0:3, 0:3] = quaternion.rotation_matrix T = np.eye(4) T[0:3, 3] = np.array([ msg.transform.translation.x, msg.transform.translation.y, msg.transform.translation.z ]) #return R.dot(T) return T.dot(R) #first rotate, then translate
def apply(self, data, args=None): new_data = [] for d in data: quat = pq.Quaternion(d.quat_w, d.quat_x, d.quat_y, d.quat_z) new_vec = quat.rotate([d.acc_x, d.acc_y, d.acc_z]) new_data.append( IMUData( new_vec[0], new_vec[1], new_vec[2], 1, 0, 0, 0, # The quaternion now is the same as the non-changing frame of reference d.time1, d.time2)) return PipelineReturnValue(0, new_data)
def plus(self, delta: np.ndarray): """ add local dimension delta to state of this vertex, the state is global dimension\n for translation just add.\n for rotation apply right multiplication. \n :param delta: 6x1, 6 is local dimension, [0,3) translaton, [3,6) rotation :return: """ self.state[0:3] += delta[0:3] stateRotation: np.ndarray = vec4_to_quat( self.state[3:7]).rotation_matrix deltaRotation = scp.expm(numeric.hat(delta[3:6])) spd = stateRotation.dot(deltaRotation) spdQ = pyquaternion.Quaternion(matrix=spd) self.state[3:7] = quat_to_vec4(spdQ)
def num_of_engines(self, value): if self._num_of_engines == value: return self._num_of_engines = value self.engines = list() self._distance_fus_engine_mc = list() self._height_fus_engine_mc = list() self.fus_engine_q = list() self._angle_fus_engine = list() self._vector_fus_engine_mc = list() for i in range(self._num_of_engines): self.engines.append(engine.Engine()) self._distance_fus_engine_mc.append(0.) self._height_fus_engine_mc.append(0.) self.fus_engine_q.append(quat.Quaternion()) self.angle_fus_engine.append(0.) self._vector_fus_engine_mc.append(py.array([0., 0., 0.])) return
def actuate(self): if self.mode == 'end_effector_space': target_position = self.action[0:3] target_orientation = self.action[3:7] quat_orientation = pyq.Quaternion(target_orientation) quat_orientation = quat_orientation.normalised target_gripper = self.action[7] jointPoses = accurateIK(self.robot_id, self.end_effector_id, target_position, target_orientation, self.lowerLimits, self.upperLimits, self.jointRanges, self.restPoses, useNullSpace=True) setMotors(self.robot_id, jointPoses) # explicitly control the gripper target_gripper_pos = float(self.interp_grip(target_gripper)) p.setJointMotorControl2(bodyIndex=self.robot_id, jointIndex=49, controlMode=p.POSITION_CONTROL, targetPosition=target_gripper_pos, force=MAX_FORCE) p.setJointMotorControl2(bodyIndex=self.robot_id, jointIndex=51, controlMode=p.POSITION_CONTROL, targetPosition=-target_gripper_pos, force=MAX_FORCE) if self.mode == 'joints_space': # we want one action per joint (gripper is composed by 2 joints but considered as one) assert(len(self.action) == self.n_joints - 1) # add the command for the last gripper joint for i in range(1): self.action = np.append(self.action, self.action[-1]) # map the action commands = [] for i, joint_command in enumerate(self.action): percentage_command = (joint_command + 1) / 2 # between 0 and 1 if i == 8: percentage_command = 1 - percentage_command low = self.lowerLimits[self.ids_in_ranges[i]] high = self.upperLimits[self.ids_in_ranges[i]] command = low + percentage_command * (high - low) commands.append(command) # apply the commands setMotorsIds(self.robot_id, self.joints_id, commands)
def output_to_nusc_box(detection): """Convert the output to the box class in the nuScenes. Args: detection (dict): Detection results. - boxes_3d (:obj:`BaseInstance3DBoxes`): Detection bbox. - scores_3d (torch.Tensor): Detection scores. - labels_3d (torch.Tensor): Predicted box labels. Returns: list[:obj:`NuScenesBox`]: List of standard NuScenesBoxes. """ box3d = detection['boxes_3d'] scores = detection['scores_3d'].numpy() labels = detection['labels_3d'].numpy() # 0 - 9 for classes, 0 - # TODO: box_gravity_center = box3d.gravity_center.numpy() box_dims = box3d.dims.numpy() box_yaw = box3d.yaw.numpy() # TODO: check whether this is necessary # with dir_offset & dir_limit in the head box_yaw = -box_yaw - np.pi / 2 box_list = [] for i in range(len(box3d)): quat = pyquaternion.Quaternion(axis=[0, 0, 1], radians=box_yaw[i]) velocity = (*box3d.tensor[i, 7:9], 0.0) # velo_val = np.linalg.norm(box3d[i, 7:9]) # velo_ori = box3d[i, 6] # velocity = ( # velo_val * np.cos(velo_ori), velo_val * np.sin(velo_ori), 0.0) box = NuScenesBox( box_gravity_center[i], box_dims[i], quat, label=labels[i], score=scores[i], velocity=velocity) box_list.append(box) return box_list
def test_calc_q_dot(): # test against formula from notes q = quat(np.random.rand(4)) w = np.random.rand(3) test1 = .5 * L(q) @ np.append(0, w) test2 = calc_q_dot(q, w) np.testing.assert_allclose(test1, test2, atol=tol) # trivial case q = quat(np.random.rand(4)) w = np.zeros(3) np.testing.assert_allclose(calc_q_dot(q, w), np.zeros(4), atol=tol) # against pyquaternion for i in range(100): q = quat(np.random.rand(4)) w = np.random.rand(3) qd = pyquaternion.Quaternion(q).derivative(w).elements np.testing.assert_allclose(qd, calc_q_dot(q, w), atol=tol)
def read_camera_poses(filename): with open(filename) as file: poses = [] for line in file: elems = line.rstrip().split(' ') mat = [] for p in elems: if p == '': continue mat.append(float(p)) #print(mat[1:4]) #print(mat[4:]) position = np.asarray(mat[1:4]) rotation = np.asarray(mat[4:]) M = np.eye(4) M[0, 0] = -1. M[1, 1] = 1. M[2, 2] = 1. qw = rotation[3] qx = rotation[0] qy = rotation[1] qz = rotation[2] quaternion = pyquaternion.Quaternion(qw, qx, qy, qz) rotation = quaternion.rotation_matrix extrinsics = np.eye(4) extrinsics[:3, :3] = rotation extrinsics[:3, 3] = position poses.append(extrinsics) # extrinsics = np.dot(M, extrinsics) # extrinsics[:3, 2] *= -1. # extrinsics = np.linalg.inv(extrinsics) return poses
def get_quaternion(lst1, lst2, matchlist=None): """Find quaternion between two coordinate systems EXPAND HERE Taken from here(cleaned up a bit): https://stackoverflow.com/a/23760608/8774464 Which is a Python implementation from an algorithm in: Paul J. Besl and Neil D. McKay "Method for registration of 3-D shapes", Sensor Fusion IV: Control Paradigms and Data Structures, 586 (April 30, 1992); http://dx.doi.org/10.1117/12.57955 """ if not matchlist: matchlist = range(len(lst1)) m = np.matrix([[0, 0, 0], [0, 0, 0], [0, 0, 0]]) for i, coord1 in enumerate(lst1): x = np.matrix(np.outer(coord1, lst2[matchlist[i]])) m = m + x n11 = float(m[0][:, 0] + m[1][:, 1] + m[2][:, 2]) n22 = float(m[0][:, 0] - m[1][:, 1] - m[2][:, 2]) n33 = float(-m[0][:, 0] + m[1][:, 1] - m[2][:, 2]) n44 = float(-m[0][:, 0] - m[1][:, 1] + m[2][:, 2]) n12 = float(m[1][:, 2] - m[2][:, 1]) n13 = float(m[2][:, 0] - m[0][:, 2]) n14 = float(m[0][:, 1] - m[1][:, 0]) n23 = float(m[0][:, 1] + m[1][:, 0]) n24 = float(m[2][:, 0] + m[0][:, 2]) n34 = float(m[1][:, 2] + m[2][:, 1]) n = np.matrix([[n11, n12, n13, n14], [n12, n22, n23, n24], [n13, n23, n33, n34], [n14, n24, n34, n44]]) values, vectors = np.linalg.eig(n) w = list(values) mw = max(w) quat = vectors[:, w.index(mw)] quat = np.array(quat).reshape(-1, ) return pq.Quaternion(quat)
def imu_isc_msg(self, msgs): for i in range(len(msgs)): self.accel_isc_x.append(msgs[i].accel[0]) self.accel_isc_y.append(msgs[i].accel[1]) self.accel_isc_z.append(msgs[i].accel[2]) self.magn_x.append(msgs[i].magn[0]) self.magn_y.append(msgs[i].magn[1]) self.magn_z.append(msgs[i].magn[2]) self.time_isc.append(msgs[i].time) if magn_calibration: self.magn_file = open(MAGN_PATH, 'a') self.magn_file.write("%f\t%f\t%f\n" % (msgs[i].magn[0], msgs[i].magn[1], msgs[i].magn[2])) self.magn_file.close() if len(self.time_isc) > self.length: self.time_isc = self.time_isc[self.cut:(self.length - 1)] self.accel_isc_x = self.accel_isc_x[self.cut:(self.length - 1)] self.accel_isc_y = self.accel_isc_y[self.cut:(self.length - 1)] self.accel_isc_z = self.accel_isc_z[self.cut:(self.length - 1)] self.magn_x = self.magn_x[self.cut:(self.length - 1)] self.magn_y = self.magn_y[self.cut:(self.length - 1)] self.magn_z = self.magn_z[self.cut:(self.length - 1)] self.quaternion = self.quaternion[self.cut:(self.length - 1)] self.accel_isc_x_graph.setData(x=self.time_isc, y=self.accel_isc_x, pen=('r'), width=0.5) self.accel_isc_y_graph.setData(x=self.time_isc, y=self.accel_isc_y, pen=('g'), width=0.5) self.accel_isc_z_graph.setData(x=self.time_isc, y=self.accel_isc_z, pen=('b'), width=0.5) self.magn_x_graph.setData(x=self.time_isc, y=self.magn_x, pen=('r'), width=0.5) self.magn_y_graph.setData(x=self.time_isc, y=self.magn_y, pen=('g'), width=0.5) self.magn_z_graph.setData(x=self.time_isc, y=self.magn_z, pen=('b'), width=0.5) if not accel_calibration and not magn_calibration: quat = pyquaternion.Quaternion(msgs[i].quaternion) self.plane_widget.update_rotation(quat)
def _second_det_to_nusc_box(detection): from lyft_dataset_sdk.utils.data_classes import Box import pyquaternion box3d = detection["box3d_lidar"].detach().cpu().numpy() scores = detection["scores"].detach().cpu().numpy() labels = detection["label_preds"].detach().cpu().numpy() box3d[:, 6] = -box3d[:, 6] - np.pi / 2 box_list = [] for i in range(box3d.shape[0]): quat = pyquaternion.Quaternion(axis=[0, 0, 1], radians=box3d[i, 6]) velocity = (np.nan, np.nan, np.nan) if box3d.shape[1] == 9: velocity = (*box3d[i, 7:9], 0.0) box = Box(box3d[i, :3], box3d[i, 3:6], quat, label=labels[i], score=scores[i], velocity=velocity) box_list.append(box) return box_list
def computeResidual(self): vertices: [Graph.Vertex] = self.getVertices() camVertex: BACameraVertex = vertices[0] pointVertex: BAPointVertex = vertices[1] assert camVertex is BACameraVertex assert pointVertex is BAPointVertex angleAxis = camVertex.state[0:3] angle = np.linalg.norm(angleAxis) axis = angleAxis/angle self.rotation = pyquaternion.Quaternion(axis=axis, angle=angle) translation = camVertex.state[3:6] Pw = pointVertex.state self.Pc = self.rotation.rotate(Pw) + translation self.pc = np.array([-self.Pc[0]/self.Pc[2], -self.Pc[1]/self.Pc[2]], dtype=np.float) self.focal = float(camVertex.state[6]) self.l1 = float(camVertex.state[7]) self.l2 = float(camVertex.state[8]) self.r2 = self.pc.dot(self.pc) self.distortion = 1.0 + self.r2 * (self.l1 + self.l2 * self.r2) self.pfinal = self.focal * self.distortion * self.pc self.residual = self.pfinal - self.observation pass
def update_keyframe(self, cloud): criteria = open3d.registration.ICPConvergenceCriteria( max_iteration=100) reg = open3d.registration.registration_icp( cloud, self.keyframes[-1].cloud, 1.0, self.last_frame_transformation, open3d.registration.TransformationEstimationPointToPlane(), criteria=criteria) angle = pyquaternion.Quaternion( matrix=reg.transformation[:3, :3]).degrees trans = numpy.linalg.norm(reg.transformation[:3, 3]) if abs(angle) < self.keyframe_angle_thresh_deg and abs( trans) < self.keyframe_trans_thresh_m: self.last_frame_transformation = reg.transformation return False odom = numpy.dot(self.keyframes[-1].odom, reg.transformation) self.keyframes.append(Keyframe(len(self.keyframes), cloud, odom)) self.graph.nodes.append(self.keyframes[-1].node) self.vis.add_geometry(self.keyframes[-1].transformed) self.last_frame_transformation = numpy.identity(4) information = open3d.registration.get_information_matrix_from_point_clouds( self.keyframes[-1].cloud, self.keyframes[-2].cloud, 1.0, reg.transformation) edge = open3d.registration.PoseGraphEdge(self.keyframes[-1].id, self.keyframes[-2].id, reg.transformation, information, uncertain=False) self.graph.edges.append(edge) return True
def find_up_face_idx(self, debug=False): normals = self.die.face_normals up_vector = np.asarray(normals[0]) up_vector_length = np.linalg.norm(up_vector) end_rotation = quat.Quaternion(self.end_rot[3], self.end_rot[0], self.end_rot[1], self.end_rot[2]) dots = [] for i, normal in enumerate(normals): # rotate normal end_normal = end_rotation.rotate(np.asarray(normal)) end_normal_length = np.linalg.norm(end_normal) # compare rotated normal with up_vector dot = np.dot(up_vector, end_normal) / (up_vector_length * end_normal_length) dots.append(dot) # if round(dot, 1) == 0.9: # return i if debug: print(dots) return np.argmax(np.asarray(dots))
def build_model(path_to_model, pose_vector): """ Get model parameters required to spawn it Should be used before spawn_model :param path_to_model: absolute path as a string :param pose_vector: pose as a vector [x, y, z, a1, a2, a3] :return: model_xml: xml read as a string, pose: pose as a geometry_msg::Pose(Point, Quaternion) """ # Get model and read as string with open(path_to_model, 'r') as f: model_xml = f.read() # Get pose from input vector angles = pose_vector[-3:] # get quaternion from angle: quat=[w,x,y,z] quat = pyquaternion.Quaternion(axis=[0, 0, 1], angle=angles[-1]) # build ros quaternion: rosquat=[x,y,z,w] orientation = Quaternion(quat[1], quat[2], quat[3], quat[0]) pose = Pose(Point(x=pose_vector[0], y=pose_vector[1], z=pose_vector[2]), orientation) return model_xml, pose
def __init__(self): self.name = 'Unnamed' self.fuselage = p_obj.PhysicalObject() self.aero_square = 0. self.drag_coef = py.array([0., 0., 0.]) self.moment_coef = py.array([0., 0., 0.]) self._num_of_engines = 4 self.engines = list() self._distance_fus_engine_mc = list() self._height_fus_engine_mc = list() self.fus_engine_q = list() self._angle_fus_engine = list() self.symmetry = False self._equal_engines = False self._vector_fus_engine_mc = list() for i in range(self._num_of_engines): self.engines.append(engine.Engine()) self._distance_fus_engine_mc.append(0.) self._height_fus_engine_mc.append(0.) self.fus_engine_q.append(quat.Quaternion()) self._angle_fus_engine.append(0.) self._vector_fus_engine_mc.append(py.array([0., 0., 0.])) return