Exemplo n.º 1
0
    def init_reg_cb(self, req):
        # TODO REMOVE THIS FACE SIDE MESS
        self.publish_feedback("Performing Head Registration. Please Wait.")
        self.face_side = rospy.get_param("/face_side", 'r')
        bag_str = self.reg_dir + '/' + '_'.join(
            [self.subject, self.face_side, "head_transform"]) + ".bag"
        rospy.loginfo("[%s] Loading %s" % (rospy.get_name(), bag_str))
        try:
            bag = rosbag.Bag(bag_str, 'r')
            for topic, msg, ts in bag.read_messages():
                head_tf = msg
            assert (head_tf
                    is not None), "Error reading head transform bagfile"
            bag.close()
        except Exception as e:
            self.publish_feedback(
                "Registration failed: Error loading saved registration.")
            rospy.logerr(
                "[%s] Cannot load registration parameters from %s:\r\n%s" %
                (rospy.get_name(), bag_str, e))
            return (False, PoseStamped())

        if self.face_side == 'r':
            head_registration = self.head_registration_r
        else:
            head_registration = self.head_registration_l
        try:
            rospy.loginfo(
                "[%s] Requesting head registration for %s at pixel (%d, %d)." %
                (rospy.get_name(), self.subject, req.u, req.v))
            self.head_pc_reg = head_registration(req.u, req.v).reg_pose
            if ((self.head_pc_reg.pose.position == Point(0.0, 0.0, 0.0))
                    and (self.head_pc_reg.pose.orientation == Quaternion(
                        0.0, 0.0, 0.0, 1.0))):
                raise rospy.ServiceException("Unable to find a good match.")
                self.head_pc_reg = None
        except rospy.ServiceException as se:
            self.publish_feedback("Registration failed: %s" % se)
            return (False, PoseStamped())

        pc_reg_mat = PoseConv.to_homo_mat(self.head_pc_reg)
        head_tf_mat = PoseConv.to_homo_mat(
            Transform(head_tf.transform.translation,
                      head_tf.transform.rotation))
        self.head_pose = PoseConv.to_pose_stamped_msg(pc_reg_mat**-1 *
                                                      head_tf_mat)
        self.head_pose.header.frame_id = self.head_pc_reg.header.frame_id
        self.head_pose.header.stamp = self.head_pc_reg.header.stamp

        side = "right" if (self.face_side == 'r') else "left"
        self.publish_feedback(
            "Registered head using %s cheek model, please check and confirm." %
            side)
        #        rospy.loginfo('[%s] Head frame registered at:\r\n %s' %(rospy.get_name(), self.head_pose))
        self.test_pose.publish(self.head_pose)
        return (True, self.head_pose)
Exemplo n.º 2
0
    def register_ellipse(self, mode, side):
        reg_e_params = EllipsoidParams()
        try:
            now = rospy.Time.now()
            self.tfl.waitForTransform("/base_link", "/head_frame", now,
                                      rospy.Duration(10.))
            pos, quat = self.tfl.lookupTransform("/head_frame", "/base_frame",
                                                 now)
            self.head_pose = PoseStamped()
            self.head_pose.header.stamp = now
            self.head_pose.header.frame_id = "/base_link"
            self.head_pose.pose.position = Point(*pos)
            self.head_pose.pose.orientation = Quaternion(*quat)
        except:
            rospy.logwarn("[%s] Head registration not loaded yet." %
                          rospy.get_name())
            return (False, reg_e_params)
        reg_prefix = rospy.get_param("~registration_prefix", "")
        registration_files = rospy.get_param("~registration_files", None)
        if mode not in registration_files:
            rospy.logerr("[%s] Mode not in registration_files parameters" %
                         (rospy.get_name()))
            return (False, reg_e_params)
        try:
            bag_str = reg_prefix + registration_files[mode][side]
            rospy.loginfo("[%s] Loading %s" % (rospy.get_name(), bag_str))
            bag = rosbag.Bag(bag_str, 'r')
            e_params = None
            for topic, msg, ts in bag.read_messages():
                e_params = msg
            assert e_params is not None
            bag.close()
        except:
            rospy.logerr("[%s] Cannot load registration parameters from %s" %
                         (rospy.get_name(), bag_str))
            return (False, reg_e_params)

        head_reg_mat = PoseConv.to_homo_mat(self.head_pose)
        ell_reg = PoseConv.to_homo_mat(
            Transform(e_params.e_frame.transform.translation,
                      e_params.e_frame.transform.rotation))
        reg_e_params.e_frame = PoseConv.to_tf_stamped_msg(head_reg_mat**-1 *
                                                          ell_reg)
        reg_e_params.e_frame.header.frame_id = self.head_reg_tf.header.frame_id
        reg_e_params.e_frame.child_frame_id = '/ellipse_frame'
        reg_e_params.height = e_params.height
        reg_e_params.E = e_params.E
        self.ell_params_pub.publish(reg_e_params)
        self.ell_frame = reg_e_params.e_frame
        return (True, reg_e_params)
 def robot_ellipsoidal_pose(self, lat, lon, height, orient_quat, kinect_frame_mat=None):
     if kinect_frame_mat is None:
         kinect_frame_mat = self.get_ell_frame()
     pos, quat = self.ell_space.ellipsoidal_to_pose(lat, lon, height)
     quat_rotated = trans.quaternion_multiply(quat, orient_quat)
     ell_pose_mat = PoseConv.to_homo_mat(pos, quat_rotated)
     return PoseConv.to_pos_rot(kinect_frame_mat * ell_pose_mat)
Exemplo n.º 4
0
    def command_absolute_cb(self, msg):
        if self.arm is None:
            rospy.logwarn(
                "[cartesian_manager] Cartesian controller not enabled.")
            return
        self.cart_ctrl.stop_moving(wait=True)
        if msg.header.frame_id == "":
            msg.header.frame_id = "torso_lift_link"
        if self.kin is None or msg.header.frame_id not in self.kin.get_link_names(
        ):
            self.kin = create_joint_kin("torso_lift_link", msg.header.frame_id)
        torso_pos_ep, torso_rot_ep = self.arm.get_ep()
        torso_B_ref = self.kin.forward(base_link="torso_lift_link",
                                       end_link=msg.header.frame_id)
        ref_B_goal = PoseConv.to_homo_mat(msg)
        torso_B_goal = torso_B_ref * ref_B_goal

        change_pos, change_rot = PoseConv.to_pos_rot(torso_B_goal)
        change_pos_xyz = change_pos.T.A[0]
        rospy.loginfo("[cartesian_manager] Command absolute movement." +
                      str((change_pos_xyz, change_rot)))
        self.cart_ctrl.execute_cart_move((change_pos_xyz, change_rot),
                                         ((1, 1, 1), 1),
                                         velocity=self.velocity,
                                         blocking=True)
Exemplo n.º 5
0
    def init_reg_cb(self, req):
        # TODO REMOVE THIS FACE SIDE MESS
        self.publish_feedback("Performing Head Registration. Please Wait.")
        self.face_side = rospy.get_param("~face_side1", 'r')
        bag_str = self.reg_dir + '/' + '_'.join([self.subject, self.face_side, "head_transform"]) + ".bag"
        rospy.loginfo("[%s] Loading %s" %(rospy.get_name(), bag_str))
        try:
            bag = rosbag.Bag(bag_str, 'r')
            for topic, msg, ts in bag.read_messages():
                head_tf = msg
            assert (head_tf is not None), "Error reading head transform bagfile"
            bag.close()
        except Exception as e:
            self.publish_feedback("Registration failed: Error loading saved registration.")
            rospy.logerr("[%s] Cannot load registration parameters from %s:\r\n%s" %
                         (rospy.get_name(), bag_str, e))
            return (False, PoseStamped())

        if self.face_side == 'r':
            head_registration = self.head_registration_r
        else:
            head_registration = self.head_registration_l
        try:
            rospy.loginfo("[%s] Requesting head registration for %s at pixel (%d, %d)." %(rospy.get_name(),
                                                                                          self.subject,
                                                                                          req.u, req.v))
            self.head_pc_reg = head_registration(req.u, req.v).reg_pose
            if ((self.head_pc_reg.pose.position == Point(0.0, 0.0, 0.0)) and
                (self.head_pc_reg.pose.orientation == Quaternion(0.0, 0.0, 0.0, 1.0))):
               raise rospy.ServiceException("Unable to find a good match.")
               self.head_pc_reg = None
        except rospy.ServiceException as se:
            self.publish_feedback("Registration failed: %s" %se)
            return (False, PoseStamped())

        pc_reg_mat = PoseConv.to_homo_mat(self.head_pc_reg)
        head_tf_mat = PoseConv.to_homo_mat(Transform(head_tf.transform.translation,
                                                     head_tf.transform.rotation))
        self.head_pose = PoseConv.to_pose_stamped_msg(pc_reg_mat**-1 * head_tf_mat)
        self.head_pose.header.frame_id = self.head_pc_reg.header.frame_id
        self.head_pose.header.stamp = self.head_pc_reg.header.stamp

        side = "right" if (self.face_side == 'r') else "left"
        self.publish_feedback("Registered head using %s cheek model, please check and confirm." %side)
#        rospy.loginfo('[%s] Head frame registered at:\r\n %s' %(rospy.get_name(), self.head_pose))
        self.test_pose.publish(self.head_pose)
        return (True, self.head_pose)
Exemplo n.º 6
0
 def cal_joint_angle_kdl(self, x_w, y_w, z_w):
     pos_euler = [[x_w, y_w, z_w], [0.0, 0.0, 0.0]]
     twist_msg = PoseConv.to_twist_msg(pos_euler)
     homo_mat = PoseConv.to_homo_mat(twist_msg)
     print(homo_mat)
     q = self.kdl_kin.random_joint_angles()
     q_r = self.kdl_kin.inverse(homo_mat, q)
     return q_r
 def load_params(self, params):
     kinect_B_head = PoseConv.to_homo_mat(params.e_frame)
     base_B_kinect = self.kin_head.forward(base_link="base_link",
                                           end_link=self.kinect_frame)
     base_B_head = base_B_kinect * kinect_B_head
     self.head_center = PoseConv.to_pose_stamped_msg("/base_link",base_B_head)
     self.ell_space = EllipsoidSpace()
     self.ell_space.load_ell_params(params.E, params.is_oblate, params.height)
     rospy.loginfo("Loaded ellispoidal parameters.")
Exemplo n.º 8
0
 def point_towards_midpoint(self, clean_joints, psm1_pos, psm2_pos, key_hole,ecm_pose):
     mid_point = (psm1_pos + psm2_pos)/2
 #     mid_point = ecm_pose[0:3,3] - numpy.array([0,0,.01]).reshape(3,1)
     self.add_marker(PoseConv.to_homo_mat([mid_point, [0,0,0]]), '/marker_subscriber',color=[1,0,0], scale=[0.047/5,0.047/5,0.047/5])
     self.add_marker(PoseConv.to_homo_mat([key_hole,[0,0,0]]), '/keyhole_subscriber',[0,0,1])
     self.add_marker(ecm_pose, '/current_ecm_pose', [1,0,0], Marker.ARROW, scale=[.1,.005,.005])
     temp = clean_joints['ecm'].position
     b,_ = self.ecm_kin.FK([temp[0],temp[1],.14,temp[3]])
     # find the equation of the line that goes through the key_hole and the 
     # mid_point
     ab_vector = (mid_point-key_hole)
     ecm_current_direction = b-key_hole 
     self.add_marker(ecm_pose, '/midpoint_to_keyhole', [0,1,1], type=Marker.LINE_LIST, scale = [0.005, 0.005, 0.005], points=[b, key_hole])
     
     self.add_marker(PoseConv.to_homo_mat([ab_vector,[0,0,0]]), '/ab_vector',[0,1,0], type=Marker.ARROW)
     r = self.find_rotation_matrix_between_two_vectors(ecm_current_direction, ab_vector)
     m = math.sqrt(ab_vector[0]**2 + ab_vector[1]**2 + ab_vector[2]**2) # ab_vector's length
     
     # insertion joint length
     l = math.sqrt( (ecm_pose[0,3]-key_hole[0])**2 + (ecm_pose[1,3]-key_hole[1])**2 + (ecm_pose[2,3]-key_hole[2])**2)
     
     # Equation of the line that passes through the midpoint of the tools and the key hole
     x = lambda t: key_hole[0] + ab_vector[0] * t
     y = lambda t: key_hole[1] + ab_vector[1] * t
     z = lambda t: key_hole[2] + ab_vector[2] * t
     
     t = l/m
     
     new_ecm_position = numpy.array([x(t), y(t), z(t)]).reshape(3,1)
     
     ecm_pose[0:3,0:3] =  r* ecm_pose[0:3,0:3]  
     ecm_pose[0:3,3] = new_ecm_position
     self.add_marker(ecm_pose, '/target_ecm_pose', [0,0,1], Marker.ARROW, scale=[.1,.005,.005])
     output_msg = clean_joints['ecm']
     
     
     try:
         p = self.ecm_kin.inverse(ecm_pose)
     except Exception as e:
         rospy.logerr('error')
     if p != None:  
         p[3] = 0
         output_msg.position = p
     return output_msg
Exemplo n.º 9
0
    def register_ellipse(self, mode, side):
        reg_e_params = EllipsoidParams()
        try:
            now = rospy.Time.now()
            self.tfl.waitForTransform("/base_link", "/head_frame", now, rospy.Duration(10.))
            pos, quat = self.tfl.lookupTransform("/head_frame", "/base_frame", now)
            self.head_pose = PoseStamped()
            self.head_pose.header.stamp = now
            self.head_pose.header.frame_id = "/base_link"
            self.head_pose.pose.position = Point(*pos)
            self.head_pose.pose.orientation = Quaternion(*quat)
        except:
            rospy.logwarn("[%s] Head registration not loaded yet." %rospy.get_name())
            return (False, reg_e_params)
        reg_prefix = rospy.get_param("~registration_prefix", "")
        registration_files = rospy.get_param("~registration_files", None)
        if mode not in registration_files:
            rospy.logerr("[%s] Mode not in registration_files parameters" % (rospy.get_name()))
            return (False, reg_e_params)
        try:
            bag_str = reg_prefix + registration_files[mode][side]
            rospy.loginfo("[%s] Loading %s" %(rospy.get_name(), bag_str))
            bag = rosbag.Bag(bag_str, 'r')
            e_params = None
            for topic, msg, ts in bag.read_messages():
                e_params = msg
            assert e_params is not None
            bag.close()
        except:
            rospy.logerr("[%s] Cannot load registration parameters from %s" %(rospy.get_name(), bag_str))
            return (False, reg_e_params)

        head_reg_mat = PoseConv.to_homo_mat(self.head_pose)
        ell_reg = PoseConv.to_homo_mat(Transform(e_params.e_frame.transform.translation,
                                                      e_params.e_frame.transform.rotation))
        reg_e_params.e_frame = PoseConv.to_tf_stamped_msg(head_reg_mat**-1 * ell_reg)
        reg_e_params.e_frame.header.frame_id = self.head_reg_tf.header.frame_id
        reg_e_params.e_frame.child_frame_id = '/ellipse_frame'
        reg_e_params.height = e_params.height
        reg_e_params.E = e_params.E
        self.ell_params_pub.publish(reg_e_params)
        self.ell_frame = reg_e_params.e_frame
        return (True, reg_e_params)
Exemplo n.º 10
0
 def load_params(self, params):
     kinect_B_head = PoseConv.to_homo_mat(params.e_frame)
     base_B_kinect = self.kin_head.forward(base_link="base_link",
                                           end_link=self.kinect_frame)
     base_B_head = base_B_kinect * kinect_B_head
     self.head_center = PoseConv.to_pose_stamped_msg(
         "/base_link", base_B_head)
     self.ell_space = EllipsoidSpace()
     self.ell_space.load_ell_params(params.E, params.is_oblate,
                                    params.height)
     rospy.loginfo("Loaded ellispoidal parameters.")
Exemplo n.º 11
0
 def robot_ellipsoidal_pose(self,
                            lat,
                            lon,
                            height,
                            orient_quat,
                            kinect_frame_mat=None):
     if kinect_frame_mat is None:
         kinect_frame_mat = self.get_ell_frame()
     pos, quat = self.ell_space.ellipsoidal_to_pose(lat, lon, height)
     quat_rotated = trans.quaternion_multiply(quat, orient_quat)
     ell_pose_mat = PoseConv.to_homo_mat(pos, quat_rotated)
     return PoseConv.to_pos_rot(kinect_frame_mat * ell_pose_mat)
Exemplo n.º 12
0
 def create_tool_arrow():
     arrows = MarkerArray()
     color = ColorRGBA(0., 0., 1., 1.)
     for i, param in enumerate(params):
         ell_pos, ell_rot = params[param]
         _, ell_rot_mat = PoseConv.to_pos_rot([0]*3, ell_rot)
         cart_pose = PoseConv.to_homo_mat(ell_space.ellipsoidal_to_pose(*ell_pos))
         cart_pose[:3,:3] = cart_pose[:3,:3] * ell_rot_mat
         arrow = create_arrow_marker(cart_pose, i, color)
         arrow.header.stamp = rospy.Time.now()
         arrows.markers.append(arrow)
     return arrows
Exemplo n.º 13
0
 def ar_sub(self, msg):
     if self.kin_arm == None:
         self.kin_arm = create_joint_kin(base_link="base_link", 
                                         end_link=msg.header.frame_id)
     base_B_camera = self.kin_arm.forward()
     camera_B_tag = PoseConv.to_homo_mat(msg.markers[0].pose.pose) #changed to use Alvar Markers
          
     cur_ar_pose = base_B_camera * camera_B_tag
     # check to see if the tag is in front of the robot
     if cur_ar_pose[0,3] < 0.:
         rospy.logwarn("Tag behind robot: Strange AR toolkit bug!")
         return
     self.cur_ar_pose = cur_ar_pose
     self.ar_pose_updated = True
Exemplo n.º 14
0
def main():
    rospy.init_node("joint_kinematics")
    import sys

    def usage():
        print("Tests for kdl_parser:\n")
        print("kdl_parser <urdf file>")
        print("\tLoad the URDF from file.")
        print("kdl_parser")
        print("\tLoad the URDF from the parameter server.")
        sys.exit(1)

    if len(sys.argv) > 2:
        usage()
    if len(sys.argv) == 2 and (sys.argv[1] == "-h" or sys.argv[1] == "--help"):
        usage()
    if (len(sys.argv) == 1):
        robot = URDF.from_parameter_server()
    else:
        f = file(sys.argv[1], 'r')
        robot = Robot.from_xml_string(f.read())
        f.close()

    if True:
        import random
        base_link = robot.get_root()
        end_link = robot.link_map.keys()[random.randint(
            0,
            len(robot.link_map) - 1)]
        print("Root link: %s; Random end link: %s" % (base_link, end_link))
        js_kin = JointKinematics(robot, base_link, end_link)
        print("Joint angles:", js_kin.get_joint_angles())
        print("Joint angles (wrapped):", js_kin.get_joint_angles(True))
        print("Joint velocities:", js_kin.get_joint_velocities())
        print("Joint efforts:", js_kin.get_joint_efforts())
        print("Jacobian:", js_kin.jacobian())
        kdl_pose = js_kin.forward()
        print("FK:", kdl_pose)
        print("End effector force:", js_kin.end_effector_force())

        if True:
            import tf
            from hrl_geom.pose_converter import PoseConv
            tf_list = tf.TransformListener()
            rospy.sleep(1)
            t = tf_list.getLatestCommonTime(base_link, end_link)
            tf_pose = PoseConv.to_homo_mat(
                tf_list.lookupTransform(base_link, end_link, t))
            print("Error with TF:", np.linalg.norm(kdl_pose - tf_pose))
Exemplo n.º 15
0
def main():
    rospy.init_node("joint_kinematics")
    import sys
    def usage():
        print("Tests for kdl_parser:\n")
        print("kdl_parser <urdf file>")
        print("\tLoad the URDF from file.")
        print("kdl_parser")
        print("\tLoad the URDF from the parameter server.")
        sys.exit(1)

    if len(sys.argv) > 2:
        usage()
    if len(sys.argv) == 2 and (sys.argv[1] == "-h" or sys.argv[1] == "--help"):
        usage()
    if (len(sys.argv) == 1):
        robot = URDF.load_from_parameter_server(verbose=False)
    else:
        robot = URDF.load_xml_file(sys.argv[1], verbose=False)

    if True:
        import random
        base_link = robot.get_root()
        end_link = robot.links.keys()[random.randint(0, len(robot.links)-1)]
        print "Root link: %s; Random end link: %s" % (base_link, end_link)
        js_kin = JointKinematics(robot, base_link, end_link)
        print "Joint angles:", js_kin.get_joint_angles()
        print "Joint angles (wrapped):", js_kin.get_joint_angles(True)
        print "Joint velocities:", js_kin.get_joint_velocities()
        print "Joint efforts:", js_kin.get_joint_efforts()
        print "Jacobian:", js_kin.jacobian()
        kdl_pose = js_kin.forward()
        print "FK:", kdl_pose
        print "End effector force:", js_kin.end_effector_force()

        if True:
            import tf 
            from hrl_geom.pose_converter import PoseConv
            tf_list = tf.TransformListener()
            rospy.sleep(1)
            t = tf_list.getLatestCommonTime(base_link, end_link)
            tf_pose = PoseConv.to_homo_mat(tf_list.lookupTransform(base_link, end_link, t))
            print "Error with TF:", np.linalg.norm(kdl_pose - tf_pose)
Exemplo n.º 16
0
 def _ctrl_state_cb(self, ctrl_state):
     self._save_ep(PoseConv.to_homo_mat(ctrl_state.x_desi_filtered))
     with self.ctrl_state_lock:
         self.ctrl_state_dict["frame"] = ctrl_state.header.frame_id
         self.ctrl_state_dict["x_desi"] = PoseConv.to_pos_rot(ctrl_state.x_desi)
         self.ctrl_state_dict["xd_desi"] = extract_twist(ctrl_state.xd_desi)
         self.ctrl_state_dict["x_act"] = PoseConv.to_pos_rot(ctrl_state.x)
         self.ctrl_state_dict["xd_act"] = extract_twist(ctrl_state.xd)
         self.ctrl_state_dict["x_desi_filt"] = PoseConv.to_pos_rot(
                                                             ctrl_state.x_desi_filtered)
         self.ctrl_state_dict["x_err"] = extract_twist(ctrl_state.x_err)
         self.ctrl_state_dict["tau_pose"] = np.array(ctrl_state.tau_pose)
         self.ctrl_state_dict["tau_posture"] = np.array(ctrl_state.tau_posture)
         self.ctrl_state_dict["tau"] = np.array(ctrl_state.tau)
         self.ctrl_state_dict["F"] = np.array([ctrl_state.F.force.x, 
                                               ctrl_state.F.force.y,
                                               ctrl_state.F.force.z,
                                               ctrl_state.F.torque.x,
                                               ctrl_state.F.torque.y,
                                               ctrl_state.F.torque.z])
Exemplo n.º 17
0
    def command_absolute_cb(self, msg):
        if self.arm is None:
            rospy.logwarn("[cartesian_manager] Cartesian controller not enabled.")
            return
        self.cart_ctrl.stop_moving(wait=True)
        if msg.header.frame_id == "":
            msg.header.frame_id = "torso_lift_link"
        if self.kin is None or msg.header.frame_id not in self.kin.get_link_names():
            self.kin = create_joint_kin("torso_lift_link", msg.header.frame_id)
        torso_pos_ep, torso_rot_ep = self.arm.get_ep()
        torso_B_ref = self.kin.forward(base_link="torso_lift_link", 
                                       end_link=msg.header.frame_id)
        ref_B_goal = PoseConv.to_homo_mat(msg)
        torso_B_goal = torso_B_ref * ref_B_goal

        change_pos, change_rot = PoseConv.to_pos_rot(torso_B_goal)
        change_pos_xyz = change_pos.T.A[0]
        rospy.loginfo("[cartesian_manager] Command absolute movement." + 
                      str((change_pos_xyz, change_rot)))
        self.cart_ctrl.execute_cart_move((change_pos_xyz, change_rot), ((1, 1, 1), 1), 
                                         velocity=self.velocity, blocking=True)
 def get_ell_pose(self, pose):
     torso_B_kinect = self.kin_head.forward(base_link="/torso_lift_link")
     torso_B_ee = PoseConv.to_homo_mat(pose)
     kinect_B_ee = torso_B_kinect**-1 * torso_B_ee
     ell_B_pose = self.get_ell_frame(self.kinect_frame)**-1 * kinect_B_ee
     return self.ell_space.pose_to_ellipsoidal(ell_B_pose)
Exemplo n.º 19
0
 def __init__(self):
     # Read parameters to configure the node
     tf_publish_rate = read_parameter('~tf_publish_rate', 50.)
     tf_period = 1. / tf_publish_rate if tf_publish_rate > 0 else float(
         'inf')
     parent_frame = read_parameter('~parent_frame', 'world')
     optitrack_frame = read_parameter('~optitrack_frame', 'optitrack')
     rigid_bodies = read_parameter('~rigid_bodies', dict())
     names = []
     ids = []
     for name, value in rigid_bodies.items():
         names.append(name)
         ids.append(value)
     # Setup Publishers
     pose_pub = rospy.Publisher('/optitrack/rigid_bodies',
                                RigidBodyArray,
                                queue_size=3)
     # Setup TF listener and broadcaster
     tf_listener = tf.TransformListener()
     tf_broadcaster = tf.TransformBroadcaster()
     # Connect to the optitrack system
     iface = read_parameter('~iface', 'eth1')
     version = (2, 9, 0, 0)  # the latest SDK version
     optitrack = rx.mkdatasock(ip_address=get_ip_address(iface))
     rospy.loginfo('Successfully connected to optitrack')
     # Get transformation from the world to optitrack frame
     (parent, child) = (parent_frame, optitrack_frame)
     try:
         now = rospy.Time.now() + rospy.Duration(1.0)
         tf_listener.waitForTransform(parent, child, now,
                                      rospy.Duration(5.0))
         (pos, rot) = tf_listener.lookupTransform(parent, child, now)
         wTo = PoseConv.to_homo_mat(pos, rot)
     except (tf.Exception, tf.LookupException, tf.ConnectivityException):
         rospy.logwarn('Failed to get transformation from %r to %r frame' %
                       (parent, child))
         parent_frame = optitrack_frame
         wTo = np.eye(4)
     # Track up to 24 rigid bodies
     prevtime = np.ones(24) * rospy.get_time()
     while not rospy.is_shutdown():
         try:
             data = optitrack.recv(rx.MAX_PACKETSIZE)
         except socket.error:
             if rospy.is_shutdown():  # exit gracefully
                 return
             else:
                 rospy.logwarn('Failed to receive packet from optitrack')
         packet = rx.unpack(data, version=version)
         if type(packet) is rx.SenderData:
             version = packet.natnet_version
             rospy.loginfo('NatNet version received: ' + str(version))
         if type(packet) in [rx.SenderData, rx.ModelDefs, rx.FrameOfData]:
             # Optitrack gives the position of the centroid.
             array_msg = RigidBodyArray()
             for i, rigid_body in enumerate(packet.rigid_bodies):
                 body_id = rigid_body.id
                 pos_opt = np.array(rigid_body.position)
                 rot_opt = np.array(rigid_body.orientation)
                 oTr = PoseConv.to_homo_mat(pos_opt, rot_opt)
                 # Transformation between world frame and the rigid body
                 wTr = np.dot(wTo, oTr)
                 array_msg.header.stamp = rospy.Time.now()
                 array_msg.header.frame_id = parent_frame
                 body_msg = RigidBody()
                 pose = Pose()
                 pose.position = Point(*wTr[:3, 3])
                 pose.orientation = Quaternion(
                     *tr.quaternion_from_matrix(wTr))
                 body_msg.id = body_id
                 body_msg.tracking_valid = rigid_body.tracking_valid
                 body_msg.mrk_mean_error = rigid_body.mrk_mean_error
                 body_msg.pose = pose
                 for marker in rigid_body.markers:
                     # TODO: Should transform the markers as well
                     body_msg.markers.append(Point(*marker))
                 array_msg.bodies.append(body_msg)
                 # Control the publish rate for the TF broadcaster
                 if rigid_body.tracking_valid and (
                         rospy.get_time() - prevtime[body_id] >= tf_period):
                     body_name = 'rigid_body_%d' % (body_id)
                     if body_id in ids:
                         idx = ids.index(body_id)
                         body_name = names[idx]
                     tf_broadcaster.sendTransform(pos_opt, rot_opt,
                                                  rospy.Time.now(),
                                                  body_name,
                                                  optitrack_frame)
                     prevtime[body_id] = rospy.get_time()
             pose_pub.publish(array_msg)
Exemplo n.º 20
0
def main():
    if len(sys.argv) < 3:
        print 'grab_cbs_auto cb_config.yaml output_bag.bag'
        return
    rospy.init_node("grab_cbs")

    f = file(sys.argv[1], 'r')
    cb_config = yaml.safe_load(f.read())
    print cb_config
    f.close()
    is_kinect = rospy.get_param("/grab_cbs/is_kinect", True)

    # load cb stuff
    chessboard = ChessboardInfo()
    chessboard.n_cols = cb_config['cols'] # 6
    chessboard.n_rows = cb_config['rows'] # 7
    chessboard.dim = cb_config['dim'] # 0.0258 
    calib = Calibrator([chessboard])
    bridge = CvBridge()

    l = DataListener(is_kinect, bridge, calib)
    tf_list = tf.TransformListener()

    cb_knowns = []
    for j in range(chessboard.n_cols):
        for i in range(chessboard.n_rows):
            cb_knowns.append((chessboard.dim*i, chessboard.dim*j, 0.0))
        
    bag = rosbag.Bag(sys.argv[2], 'w')
    i = 0
    while not rospy.is_shutdown():
        if raw_input("Press enter to take CB, type 'q' to quit: ") == "q":
            break

        tries = 0
        while not rospy.is_shutdown() and tries < 3:
            corners = l.wait_for_new(5.)
            if corners is None:
                print "No corners detected"
                tries += 1
                continue
            corners_2d = np.uint32(np.round(corners)).tolist()
            corners_3d = []
            if is_kinect:
                for x,y,z in read_points(l.cur_pc, field_names=['x', 'y', 'z'], uvs=corners_2d):
                    corners_3d.append((x,y,z))
                frame_id = l.cur_pc.header
            else:
                obj_pts = cv.fromarray(np.array(cb_knowns))
                img_pts = cv.fromarray(np.array(corners))
                K = cv.fromarray(np.reshape(l.cam_info.K,(3,3)))
                D = cv.fromarray(np.array([l.cam_info.D]))
                R_vec = cv.fromarray(np.zeros((3,1)))
                t = cv.fromarray(np.zeros((3,1)))
                cv.FindExtrinsicCameraParams2(obj_pts, img_pts, K, D, R_vec, t)
                R_mat = cv.fromarray(np.zeros((3,3)))
                cv.Rodrigues2(R_vec, R_mat)
                T = PoseConv.to_homo_mat(np.mat(np.asarray(t)).T.A.tolist(), 
                                         np.mat(np.asarray(R_mat)).A.tolist())
                cb_knowns_mat = np.vstack((np.mat(cb_knowns).T, np.mat(np.ones((1, len(cb_knowns))))))
                corners_3d = np.array((T * cb_knowns_mat)[:3,:].T)
                frame_id = l.cur_img.header
            print corners_3d
            if np.any(np.isnan(corners_3d)):
                print "Pointcloud malformed"
                tries += 1
                continue
            now = rospy.Time.now()
            corners_pc = create_cloud_xyz32(frame_id, corners_3d)
            try:
                pose = tf_list.lookupTransform('/base_link', '/ee_link', rospy.Time(0))
            except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                print "TF screwed up..."
                continue
            bag.write("/pose", PoseConv.to_pose_stamped_msg('/base_link', pose), now)
            bag.write("/pc", corners_pc, now)
            print "Wrote pose/CB to bag file"
            break
        i += 1
    bag.close()
Exemplo n.º 21
0
    def find_zoom_level(self, msg, cam_info, clean_joints):
        if cam_info != None:
            T1W = self.psm1_kin.forward(clean_joints['psm1'].position)
            T2W = self.psm2_kin.forward(clean_joints['psm2'].position)
            TEW = self.ecm_kin.forward(clean_joints['ecm'].position)
            TEW_inv = numpy.linalg.inv(TEW)
            T1W_inv = numpy.linalg.inv(T1W)
            T2W_inv = numpy.linalg.inv(T2W)
            
            mid_point = (T1W[0:4,3] + T2W[0:4,3])/2
            p1 = T1W[0:4,3]
            p2 = T2W[0:4,3]
            
            T2E = TEW_inv * T2W
    
    #         ig = image_geometry.PinholeCameraModel()
            ig = image_geometry.StereoCameraModel()
            
            ig.fromCameraInfo(cam_info['right'], cam_info['left'])
            
            # Format in fakecam.launch:  x y z  yaw pitch roll [fixed-axis rotations: x(roll),y(pitch),z(yaw)]
            # Format for PoseConv.to_homo_mat:  (x,y,z)  (roll, pitch, yaw) [fixed-axis rotations: x(roll),y(pitch),z(yaw)]
            r = PoseConv.to_homo_mat( [ (0.0, 0.0, 0.0), (0.0, 0.0, 1.57079632679) ])
            r_inv = numpy.linalg.inv(r);
            
#             r = numpy.linalg.inv(r)
            self.logerror( r.__str__())
            
#             rotate_vector = lambda x: (r * numpy.array([ [x[0]], [x[1]], [x[2]], [1] ]) )[0:3,3]
             
            self.add_marker(T2W, '/left_arm', scale=[.002,.002,.002])
            
            
            l1, r1 = ig.project3dToPixel( ( r_inv * TEW_inv * T1W )[0:3,3]) # tool1 left and right pixel positions
            l2, r2 = ig.project3dToPixel( ( r_inv * TEW_inv * T2W )[0:3,3]) # tool2 left and right pixel positions
            lm, rm = ig.project3dToPixel( ( r_inv * TEW_inv * mid_point)[0:3,0]) # midpoint left and right pixel positions
    #         add_100 = lambda x : (x[0] *.5 + cam_info.width/2, x[1])
    #         l1 = add_100(l1)
    #         l2 = add_100(l2)
    #         lm = add_100(lm)
    
            
            self.zoom_level_positions = {'l1':l1, 'r1':r1, 'l2':l2, 'r2':r2, 'lm':lm, 'rm':rm}    
            
            test1_l, test1_r = ig.project3dToPixel( [1,0,0])
            test2_l, test2_r = ig.project3dToPixel( [0,0,1])
            self.logerror('\ntest1_l = ' + test1_l.__str__() + '\ntest2_l = ' + test2_l.__str__() )
    #         logerror('xm=%f,'%lm[0] +  'ym=%f'%lm[1])
            
            msg.position[3] = 0
    #         zoom_percentage = zoom_fitness(cam_info=cam_info, mid_point=[xm, ym], inner_margin=.20,
    #                                         deadzone_margin= .70, tool_point= [x1,y1])
            
            zoom_percentage = self.zoom_fitness2(cam_info['left'], mid_point=lm, tool_point=l1, 
                                            tool_point2=l2, radius=.1, deadzone_radius=.01)
            msg.position[2] =  msg.position[2] + zoom_percentage 
            if msg.position[2] < 0 : # minimum 0
                msg.position[2] = 0.00
            elif msg.position[2] > .21: # maximum .23
                msg.position[2] = .21
        return msg   
Exemplo n.º 22
0
    def __init__(self):
        # Read parameters to configure the node
        tf_publish_rate = read_parameter('~tf_publish_rate', 50.)
        tf_period = 1. / tf_publish_rate if tf_publish_rate > 0 else float(
            'inf')
        parent_frame = read_parameter('~parent_frame', 'world')
        optitrack_frame = read_parameter('~optitrack_frame', 'optitrack')
        rigid_bodies = read_parameter('~rigid_bodies', dict())
        names = []
        ids = []
        for name, value in rigid_bodies.items():
            names.append(name)
            ids.append(value)
        # Setup Publishers
        pose_pub = rospy.Publisher('/optitrack/rigid_bodies',
                                   RigidBodyArray,
                                   queue_size=3)
        # Setup TF listener and broadcaster
        tf_listener = tf.TransformListener()
        tf_broadcaster = tf.TransformBroadcaster()
        # Connect to the optitrack system
        iface = read_parameter('~iface', 'eth0')  #('~iface', 'eth1')
        version = (2, 7, 0, 0)  # the latest SDK version
        #IPython.embed()
        #optitrack = rx.mkdatasock(ip_address=get_ip_address(iface))
        #Modified by Nikhil
        # optitrack = rx.mkdatasock(ip_address=iface)
        optitrack = rx.mkcmdsock(ip_address=iface)
        msg = struct.pack("I", rx.NAT_PING)
        server_address = '10.0.0.1'  #'192.168.1.205'
        result = optitrack.sendto(msg, (server_address, rx.PORT_COMMAND))

        rospy.loginfo('Successfully connected to optitrack')
        # Get transformation from the world to optitrack frame
        (parent, child) = (parent_frame, optitrack_frame)
        try:
            now = rospy.Time.now() + rospy.Duration(1.0)
            tf_listener.waitForTransform(parent, child, now,
                                         rospy.Duration(5.0))
            (pos, rot) = tf_listener.lookupTransform(parent, child, now)
            wTo = PoseConv.to_homo_mat(pos, rot)
        except (tf.Exception, tf.LookupException, tf.ConnectivityException):
            rospy.logwarn('Failed to get transformation from %r to %r frame' %
                          (parent, child))
            parent_frame = optitrack_frame
            wTo = np.eye(4)
        # Track up to 24 rigid bodies
        prevtime = np.ones(24) * rospy.get_time()
        # IPython.embed()
        rospy.loginfo('Successfully got transformation')

        while not rospy.is_shutdown():
            try:
                data = optitrack.recv(rx.MAX_PACKETSIZE)
                # data, address = optitrack.recvfrom(rx.MAX_PACKETSIZE + 4)
            except socket.error:
                if rospy.is_shutdown():  # exit gracefully
                    return
                else:
                    rospy.logwarn('Failed to receive packet from optitrack')
            msgtype, packet = rx.unpack(data, version=version)

            if type(packet) is rx.SenderData:
                version = packet.natnet_version
                rospy.loginfo('NatNet version received: ' + str(version))
            if type(packet) in [rx.SenderData, rx.ModelDefs, rx.FrameOfData]:
                # Optitrack gives the position of the centroid.
                array_msg = RigidBodyArray()
                # IPython.embed()
                if msgtype == rx.NAT_FRAMEOFDATA:
                    # IPython.embed()
                    for i, rigid_body in enumerate(packet.rigid_bodies):
                        body_id = rigid_body.id
                        pos_opt = np.array(rigid_body.position)
                        rot_opt = np.array(rigid_body.orientation)
                        oTr = PoseConv.to_homo_mat(pos_opt, rot_opt)
                        # Transformation between world frame and the rigid body
                        wTr = np.dot(wTo, oTr)
                        array_msg.header.stamp = rospy.Time.now()
                        array_msg.header.frame_id = parent_frame
                        body_msg = RigidBody()
                        pose = Pose()
                        pose.position = Point(*wTr[:3, 3])
                        pose.orientation = Quaternion(
                            *tr.quaternion_from_matrix(wTr))
                        body_msg.id = body_id
                        body_msg.tracking_valid = rigid_body.tracking_valid
                        body_msg.mrk_mean_error = rigid_body.mrk_mean_error
                        body_msg.pose = pose
                        for marker in rigid_body.markers:
                            # TODO: Should transform the markers as well
                            body_msg.markers.append(Point(*marker))
                        array_msg.bodies.append(body_msg)
                        # Control the publish rate for the TF broadcaster
                        if rigid_body.tracking_valid and (
                                rospy.get_time() - prevtime[body_id] >=
                                tf_period):
                            body_name = 'rigid_body_%d' % (body_id)
                            if body_id in ids:
                                idx = ids.index(body_id)
                                body_name = names[idx]
                            tf_broadcaster.sendTransform(
                                pos_opt, rot_opt, rospy.Time.now(), body_name,
                                optitrack_frame)
                            prevtime[body_id] = rospy.get_time()

                logFile = open("optitrack_position", "a")
                strX = str(array_msg.bodies[0].pose.position.x).decode('utf-8')
                strX = strX[2:-2]
                strY = str(array_msg.bodies[0].pose.position.y).decode('utf-8')
                strY = strY[2:-2]
                strZ = str(array_msg.bodies[0].pose.position.z).decode('utf-8')
                strZ = strZ[2:-2]
                logFile.write(strX)
                logFile.write(", ")
                logFile.write(strY)
                logFile.write(", ")
                logFile.write(strZ)
                logFile.write("\n")
                logFile.close()
                pose_pub.publish(array_msg)

                try:
                    fltX = float(strX)
                    fltY = float(strY)
                    fltZ = float(strZ)
                    rospy.loginfo("x: %.4lf | y: %.4lf | z: %.4lf" %
                                  (fltX, fltY, fltZ))
                except:
                    rospy.logwarn('didn\'t read correctly')

                logFile = open("optitrack_orientation", "a")
                q1 = str(
                    array_msg.bodies[0].pose.orientation.x).decode('utf-8')
                q1 = q1[2:-2]
                q2 = str(
                    array_msg.bodies[0].pose.orientation.y).decode('utf-8')
                q2 = q2[2:-2]
                q3 = str(
                    array_msg.bodies[0].pose.orientation.z).decode('utf-8')
                q3 = q3[2:-2]
                q4 = str(
                    array_msg.bodies[0].pose.orientation.w).decode('utf-8')
                q4 = q4[2:-2]
                logFile.write(q1)
                logFile.write(", ")
                logFile.write(q2)
                logFile.write(", ")
                logFile.write(q3)
                logFile.write(", ")
                logFile.write(q4)
                logFile.write("\n")
                logFile.close()

                pose_pub.publish(array_msg)
Exemplo n.º 23
0
 def get_ell_pose(self, pose):
     torso_B_kinect = self.kin_head.forward(base_link="/torso_lift_link")
     torso_B_ee = PoseConv.to_homo_mat(pose)
     kinect_B_ee = torso_B_kinect**-1 * torso_B_ee
     ell_B_pose = self.get_ell_frame(self.kinect_frame)**-1 * kinect_B_ee
     return self.ell_space.pose_to_ellipsoidal(ell_B_pose)
    def __init__(self):
        # Read parameters to configure the node
        tf_publish_rate = read_parameter('~tf_publish_rate', 50.)
        tf_period = 1. / tf_publish_rate if tf_publish_rate > 0 else float(
            'inf')
        parent_frame = read_parameter('~parent_frame', 'world')
        optitrack_frame = read_parameter('~optitrack_frame', 'optitrack')
        rigid_bodies = read_parameter('~rigid_bodies', dict())
        names = []
        ids = []
        for name, value in rigid_bodies.items():
            names.append(name)
            ids.append(value)
        # Setup Publishers
        pose_pub = rospy.Publisher('/optitrack/rigid_bodies',
                                   RigidBodyArray,
                                   queue_size=3)
        # Setup TF listener and broadcaster
        tf_listener = tf.TransformListener()
        tf_broadcaster = tf.TransformBroadcaster()
        # Connect to the optitrack system
        iface = read_parameter('~iface', 'eth1')
        version = (2, 7, 0, 0)  # the latest SDK version
        #IPython.embed()
        #optitrack = rx.mkdatasock(ip_address=get_ip_address(iface))
        #Modified by Nikhil
        # optitrack = rx.mkdatasock(ip_address=iface)
        optitrack = rx.mkcmdsock(ip_address=iface)
        msg = struct.pack("I", rx.NAT_PING)
        server_address = '192.168.1.205'
        result = optitrack.sendto(msg, (server_address, rx.PORT_COMMAND))

        rospy.loginfo('Successfully connected to optitrack')
        # Get transformation from the world to optitrack frame
        (parent, child) = (parent_frame, optitrack_frame)
        try:
            now = rospy.Time.now() + rospy.Duration(1.0)
            tf_listener.waitForTransform(parent, child, now,
                                         rospy.Duration(5.0))
            (pos, rot) = tf_listener.lookupTransform(parent, child, now)
            wTo = PoseConv.to_homo_mat(pos, rot)  # return 4x4 numpy mat
        except (tf.Exception, tf.LookupException, tf.ConnectivityException):
            rospy.logwarn('Failed to get transformation from %r to %r frame' %
                          (parent, child))
            parent_frame = optitrack_frame
            wTo = np.eye(4)
        # Track up to 24 rigid bodies
        prevtime = np.ones(24) * rospy.get_time()
        # IPython.embed()
        while not rospy.is_shutdown():
            try:
                # data = optitrack.recv(rx.MAX_PACKETSIZE)
                data, address = optitrack.recvfrom(rx.MAX_PACKETSIZE + 4)
            except socket.error:
                if rospy.is_shutdown():  # exit gracefully
                    return
                else:
                    rospy.logwarn('Failed to receive packet from optitrack')
            msgtype, packet = rx.unpack(data, version=version)
            if type(packet) is rx.SenderData:
                version = packet.natnet_version
                rospy.loginfo('NatNet version received: ' + str(version))
            if type(packet) in [rx.SenderData, rx.ModelDefs, rx.FrameOfData]:
                # Optitrack gives the position of the centroid.
                array_msg = RigidBodyArray()
                # IPython.embed()
                if msgtype == rx.NAT_FRAMEOFDATA:
                    # IPython.embed()
                    for i, rigid_body in enumerate(packet.rigid_bodies):
                        body_id = rigid_body.id
                        pos_opt = np.array(rigid_body.position)
                        rot_opt = np.array(rigid_body.orientation)
                        # IPython.embed()
                        oTr = PoseConv.to_homo_mat(pos_opt, rot_opt)
                        # Transformation between world frame and the rigid body
                        WTr = np.dot(wTo, oTr)
                        wTW = np.array([[0, -1, 0, 0], [1, 0, 0, 0],
                                        [0, 0, 1, 0], [0, 0, 0, 1]])
                        wTr = np.dot(wTW, WTr)

                        ## New Change ##
                        # Toffset = np.array( [[0, 1, 0, 0],[0, 0, 1, 0],[1, 0, 0, 0],[0, 0, 0, 1]] )
                        # tf_wTr = np.dot(oTr,Toffset)
                        # tf_pose = Pose()
                        # tf_pose.position = Point(*tf_wTr[:3,3])
                        # tf_pose.orientation = Quaternion(*tr.quaternion_from_matrix(tf_wTr))

                        # IPython.embed()

                        array_msg.header.stamp = rospy.Time.now()
                        array_msg.header.frame_id = parent_frame
                        body_msg = RigidBody()
                        pose = Pose()
                        pose.position = Point(*wTr[:3, 3])
                        # OTr = np.dot(oTr,Toffset)
                        # pose.orientation = Quaternion(*tr.quaternion_from_matrix(oTr))

                        # change 26 Feb. 2017
                        # get the euler angle we want then compute the new quaternion
                        euler = tr.euler_from_quaternion(rot_opt)
                        quaternion = tr.quaternion_from_euler(
                            euler[2], euler[0], euler[1])
                        pose.orientation.x = quaternion[0]
                        pose.orientation.y = quaternion[1]
                        pose.orientation.z = quaternion[2]
                        pose.orientation.w = quaternion[3]

                        body_msg.id = body_id
                        body_msg.tracking_valid = rigid_body.tracking_valid
                        body_msg.mrk_mean_error = rigid_body.mrk_mean_error
                        body_msg.pose = pose
                        for marker in rigid_body.markers:
                            # TODO: Should transform the markers as well
                            body_msg.markers.append(Point(*marker))
                        array_msg.bodies.append(body_msg)
                        # Control the publish rate for the TF broadcaster
                        if rigid_body.tracking_valid and (
                                rospy.get_time() - prevtime[body_id] >=
                                tf_period):
                            body_name = 'rigid_body_%d' % (body_id)
                            if body_id in ids:
                                idx = ids.index(body_id)
                                body_name = names[idx]
                            ## no change ##
                            # tf_broadcaster.sendTransform(pos_opt, rot_opt, rospy.Time.now(), body_name, optitrack_frame)
                            # change 1 ##
                            # pos2 = np.array([tf_pose.position.x, tf_pose.position.y, tf_pose.position.z])
                            # rot2 = np.array([tf_pose.orientation.x, tf_pose.orientation.y, tf_pose.orientation.z, tf_pose.orientation.w])
                            # tf_broadcaster.sendTransform(pos2, rot2, rospy.Time.now(), body_name, optitrack_frame)
                            ## change 2 ## <fail>
                            # pos2 = np.array([-pose.position.y, pose.position.x, pose.position.z])
                            # rot2 = np.array([-pose.orientation.y, pose.orientation.x, pose.orientation.z, pose.orientation.w])
                            # tf_broadcaster.sendTransform(pos2, rot2, rospy.Time.now(), body_name, optitrack_frame)
                            ## change 3 ## <fail>
                            # pos2 = np.array([-pos_opt[1],pos_opt[0],pos_opt[2]])
                            # rot2 = np.array([-rot_opt[1],rot_opt[0],rot_opt[2],rot_opt[3]])
                            # tf_broadcaster.sendTransform(pos2, rot2, rospy.Time.now(), body_name, optitrack_frame)
                            ## change 4 ## <>
                            pos2 = np.array([
                                pose.position.x, pose.position.y,
                                pose.position.z
                            ])
                            rot2 = np.array([
                                pose.orientation.x, pose.orientation.y,
                                pose.orientation.z, pose.orientation.w
                            ])
                            tf_broadcaster.sendTransform(
                                pos2, rot2, rospy.Time.now(), body_name,
                                parent_frame)

                            prevtime[body_id] = rospy.get_time()

                pose_pub.publish(array_msg)
 def get_ell_frame(self, frame="/torso_lift_link"):
     # find the current ellipsoid frame location in this frame
     base_B_head = PoseConv.to_homo_mat(self.head_center)
     target_B_base = self.kin_head.forward(end_link=frame)
     return target_B_base**-1 * base_B_head
Exemplo n.º 26
0
 def get_ell_frame(self, frame="/torso_lift_link"):
     # find the current ellipsoid frame location in this frame
     base_B_head = PoseConv.to_homo_mat(self.head_center)
     target_B_base = self.kin_head.forward(end_link=frame)
     return target_B_base**-1 * base_B_head
Exemplo n.º 27
0
def main():
    if True:
        #q = [ 4.07545758,  5.71643082, -4.57552159, -2.79061482, -3.17069678, 1.42865389]
        d1, a2, a3, d4, d5, d6 = [
            0.1273, -0.612, -0.5723, 0.163941, 0.1157, 0.0922
        ]
        a = [0, a2, a3, 0, 0, 0]
        d = [d1, 0, 0, d4, d5, d6]
        l = [pi / 2, 0, 0, pi / 2, -pi / 2, 0]
        kin = RAVEKinematics()
        rospy.init_node("test_ur_ik")
        start_time = rospy.get_time()
        n = 0
        while not rospy.is_shutdown():
            q = (np.random.rand(6) - .5) * 4 * pi
            x1 = kin.forward(q)
            pos, euler = PoseConv.to_pos_euler(x1)
            m = np.random.randint(-4, 5)
            euler = [euler[0], m * np.pi / 2 + 0., euler[2]]
            #euler = [euler[0], 0.*np.pi/2 + m*np.pi, euler[2]]
            T = PoseConv.to_homo_mat(pos, euler)
            #q[4] = 0.
            T = kin.forward(q)
            sols = inverse_kin(T, a, d, l)
            print m, len(sols)
            if False and len(sols) == 0:
                print 'wuh', T
                sols = inverse_kin(T, a, d, l, True)

            if True:
                for qsol in sols:
                    #Tsol, _ = forward_kin(qsol, a, d, l)
                    Tsol = kin.forward(qsol)
                    #print qsol
                    #print q
                    #print Tsol
                    #print T
                    diff = Tsol**-1 * T
                    ang, _, _ = mat_to_ang_axis_point(diff)
                    dist = np.linalg.norm(diff[:3, 3])
                    #print ang, dist
                    if abs(dist) > 1e-5 or abs(ang) > 1e-5:
                        print 'BAD'
                    else:
                        pass
                        #print 'GOOD'
            n += 1
        time_diff = rospy.get_time() - start_time
        print time_diff, n, n / time_diff

    if False:
        #q = [ 4.07545758,  5.71643082, -4.57552159, -2.79061482, -3.17069678, 1.42865389]
        d1, a2, a3, d4, d5, d6 = [
            0.1273, -0.612, -0.5723, 0.163941, 0.1157, 0.0922
        ]
        a = [0, a2, a3, 0, 0, 0]
        d = [d1, 0, 0, d4, d5, d6]
        l = [pi / 2, 0, 0, pi / 2, -pi / 2, 0]
        kin = RAVEKinematics()
        rospy.init_node("test_ur_ik")
        start_time = rospy.get_time()
        n = 0
        while not rospy.is_shutdown():
            q = (np.random.rand(6) - .5) * 4 * pi
            T = kin.forward(q)
            sols = inverse_kin(T, a, d, l)
            #print len(sols)
            if False:
                print len(sols)
                for qsol in sols:
                    #Tsol, _ = forward_kin(qsol, a, d, l)
                    Tsol = kin.forward(qsol)
                    diff = Tsol**-1 * T
                    ang, _, _ = mat_to_ang_axis_point(diff)
                    dist = np.linalg.norm(diff[:3, 3])
                    #print ang, dist
                    if abs(dist) > 1e-8 or abs(ang) > 1e-8:
                        print 'BAD'
            n += 1
        time_diff = rospy.get_time() - start_time
        print time_diff, n, n / time_diff

    if False:
        #q = [ 4.07545758,  5.71643082, -4.57552159, -2.79061482, -3.17069678, 1.42865389]
        q = (np.random.rand(6) - .5) * 4 * pi
        d1, a2, a3, d4, d5, d6 = [
            0.1273, -0.612, -0.5723, 0.163941, 0.1157, 0.0922
        ]
        a = [0, a2, a3, 0, 0, 0]
        d = [d1, 0, 0, d4, d5, d6]
        l = [pi / 2, 0, 0, pi / 2, -pi / 2, 0]
        kin = RAVEKinematics()
        T = kin.forward(q)
        print T
        print forward_kin(q, a, d, l)
        print q
        sols = inverse_kin(T, a, d, l)
        for qsol in sols:
            Tsol, _ = forward_kin(qsol, a, d, l)
            diff = Tsol**-1 * T
            ang, _, _ = mat_to_ang_axis_point(diff)
            dist = np.linalg.norm(diff[:3, 3])
            if False:
                if abs(dist) > 1e-6:
                    print '-' * 80
                else:
                    print '+' * 80
                print 'T', T
                print 'qsol', qsol
                print 'q234', np.sum(qsol[1:4])
                print 'q5', qsol[4]
                print '-sin(q2 + q3 + q4)*sin(q5)', -sin(qsol[1] + qsol[2] +
                                                         qsol[3]) * sin(
                                                             qsol[4])
                print 'z3', T[2, 0]
                if abs(dist) > 1e-6:
                    print '-' * 80
                else:
                    print '+' * 80
            print ang, dist
        print np.sort(sols, 0)
        print len(sols)
        #unique_sols = np.array(sols)[np.where(np.hstack(([True], np.sum(np.diff(np.sort(sols,0), 1, 0),1) > 0.000)))[0]]
        #print unique_sols
        #print len(unique_sols)
        #print len(np.hstack(([True], np.sum(np.diff(np.sort(sols,0), 1, 0),1) > 0.000)))
        for qsol in sols:
            #Tsol, _ = forward_kin(qsol, a, d, l)
            Tsol = kin.forward(qsol)
            diff = Tsol**-1 * T
            ang, _, _ = mat_to_ang_axis_point(diff)
            dist = np.linalg.norm(diff[:3, 3])
            print ang, dist
            if abs(dist) > 1e-8:
                print 'BAD'

        kin.robot.SetDOFValues(np.array([0.] * 6))
        rave_sols = kin.manip.FindIKSolutions(T.A, kin.ik_options)
        rave_list = []
        for qsol in rave_sols:
            rave_list.append(np.array(qsol))
            #Tsol, _ = forward_kin(qsol, a, d, l)
            Tsol = kin.forward(qsol)
            diff = Tsol**-1 * T
            ang, _, _ = mat_to_ang_axis_point(diff)
            dist = np.linalg.norm(diff[:3, 3])
            print ang, dist
            if abs(dist) > 1e-8:
                print 'BAD'
        print np.sort(rave_list, 0)
        #print diff
        #print q
        #print qsol
        #print '-'*80

    if False:
        q = (np.random.rand(3) - 0.5) * 4. * np.pi
        T04 = forward_rrr(q)
        print T04
        qs = inverse_rrr(T04)
        print qs
        print T04**-1 * forward_rrr(qs[0])
        print T04**-1 * forward_rrr(qs[1])
Exemplo n.º 28
0
#!/usr/bin/python
import numpy as np
import roslib
roslib.load_manifest("hrl_geom")
import rospy
from hrl_geom.pose_converter import PoseConv

if __name__ == "__main__":
    rospy.init_node("test_poseconv")
    homo_mat = PoseConv.to_homo_mat([0., 1., 2.], [0., 0., np.pi/2])
    pose_msg = PoseConv.to_pose_msg(homo_mat)
    pos, quat = PoseConv.to_pos_quat(pose_msg)
    pose_stamped_msg = PoseConv.to_pose_stamped_msg("/base_link", pos, quat)
    pose_stamped_msg2 = PoseConv.to_pose_stamped_msg("/base_link", [pos, quat])
    tf_stamped = PoseConv.to_tf_stamped_msg("/new_link", pose_stamped_msg)
    p, R = PoseConv.to_pos_rot("/new_link", tf_stamped)
    for name in ["homo_mat", "pose_msg", "pos", "quat", "pose_stamped_msg", 
                 "pose_stamped_msg2", "tf_stamped", "p", "R"]:
        print "%s: \n" % name, locals()[name]
Exemplo n.º 29
0
def main():
    if True:
        #q = [ 4.07545758,  5.71643082, -4.57552159, -2.79061482, -3.17069678, 1.42865389]
        d1, a2, a3, d4, d5, d6 = [0.1273, -0.612, -0.5723, 0.163941, 0.1157, 0.0922]
        a = [0, a2, a3, 0, 0, 0]
        d = [d1, 0, 0, d4, d5, d6]
        l = [pi/2, 0, 0, pi/2, -pi/2, 0]
        kin = RAVEKinematics()
        rospy.init_node("test_ur_ik")
        start_time = rospy.get_time()
        n = 0
        while not rospy.is_shutdown():
            q = (np.random.rand(6)-.5)*4*pi
            x1 = kin.forward(q)
            pos, euler = PoseConv.to_pos_euler(x1)
            m = np.random.randint(-4,5)
            euler = [euler[0], m*np.pi/2 + 0., euler[2]]
            #euler = [euler[0], 0.*np.pi/2 + m*np.pi, euler[2]]
            T = PoseConv.to_homo_mat(pos, euler)
            #q[4] = 0.
            T = kin.forward(q)
            sols = inverse_kin(T,a,d,l)
            print m, len(sols)
            if False and len(sols) == 0:
                print 'wuh', T
                sols = inverse_kin(T,a,d,l,True)
                
            if True:
                for qsol in sols:
                    #Tsol, _ = forward_kin(qsol, a, d, l)
                    Tsol = kin.forward(qsol)
                    #print qsol
                    #print q
                    #print Tsol
                    #print T
                    diff = Tsol**-1 * T
                    ang, _, _ = mat_to_ang_axis_point(diff)
                    dist = np.linalg.norm(diff[:3,3])
                    #print ang, dist
                    if abs(dist) > 1e-5 or abs(ang) > 1e-5:
                        print 'BAD'
                    else:
                        pass
                        #print 'GOOD'
            n += 1
        time_diff = rospy.get_time() - start_time
        print time_diff, n, n/time_diff

    if False:
        #q = [ 4.07545758,  5.71643082, -4.57552159, -2.79061482, -3.17069678, 1.42865389]
        d1, a2, a3, d4, d5, d6 = [0.1273, -0.612, -0.5723, 0.163941, 0.1157, 0.0922]
        a = [0, a2, a3, 0, 0, 0]
        d = [d1, 0, 0, d4, d5, d6]
        l = [pi/2, 0, 0, pi/2, -pi/2, 0]
        kin = RAVEKinematics()
        rospy.init_node("test_ur_ik")
        start_time = rospy.get_time()
        n = 0
        while not rospy.is_shutdown():
            q = (np.random.rand(6)-.5)*4*pi
            T = kin.forward(q)
            sols = inverse_kin(T,a,d,l)
            #print len(sols)
            if False:
                print len(sols)
                for qsol in sols:
                    #Tsol, _ = forward_kin(qsol, a, d, l)
                    Tsol = kin.forward(qsol)
                    diff = Tsol**-1 * T
                    ang, _, _ = mat_to_ang_axis_point(diff)
                    dist = np.linalg.norm(diff[:3,3])
                    #print ang, dist
                    if abs(dist) > 1e-8 or abs(ang) > 1e-8:
                        print 'BAD'
            n += 1
        time_diff = rospy.get_time() - start_time
        print time_diff, n, n/time_diff
        
    if False:
        #q = [ 4.07545758,  5.71643082, -4.57552159, -2.79061482, -3.17069678, 1.42865389]
        q = (np.random.rand(6)-.5)*4*pi
        d1, a2, a3, d4, d5, d6 = [0.1273, -0.612, -0.5723, 0.163941, 0.1157, 0.0922]
        a = [0, a2, a3, 0, 0, 0]
        d = [d1, 0, 0, d4, d5, d6]
        l = [pi/2, 0, 0, pi/2, -pi/2, 0]
        kin = RAVEKinematics()
        T = kin.forward(q)
        print T
        print forward_kin(q,a,d,l)
        print q
        sols = inverse_kin(T,a,d,l)
        for qsol in sols:
            Tsol, _ = forward_kin(qsol, a, d, l)
            diff = Tsol**-1 * T
            ang, _, _ = mat_to_ang_axis_point(diff)
            dist = np.linalg.norm(diff[:3,3])
            if False:
                if abs(dist) > 1e-6:
                    print '-'*80
                else:
                    print '+'*80
                print 'T', T
                print 'qsol', qsol
                print 'q234', np.sum(qsol[1:4])
                print 'q5', qsol[4]
                print '-sin(q2 + q3 + q4)*sin(q5)', -sin(qsol[1] + qsol[2] + qsol[3])*sin(qsol[4])
                print 'z3', T[2,0]
                if abs(dist) > 1e-6:
                    print '-'*80
                else:
                    print '+'*80
            print ang, dist
        print np.sort(sols,0)
        print len(sols)
        #unique_sols = np.array(sols)[np.where(np.hstack(([True], np.sum(np.diff(np.sort(sols,0), 1, 0),1) > 0.000)))[0]]
        #print unique_sols
        #print len(unique_sols)
        #print len(np.hstack(([True], np.sum(np.diff(np.sort(sols,0), 1, 0),1) > 0.000)))
        for qsol in sols:
            #Tsol, _ = forward_kin(qsol, a, d, l)
            Tsol = kin.forward(qsol)
            diff = Tsol**-1 * T
            ang, _, _ = mat_to_ang_axis_point(diff)
            dist = np.linalg.norm(diff[:3,3])
            print ang, dist
            if abs(dist) > 1e-8:
                print 'BAD'
        
        kin.robot.SetDOFValues(np.array([0.]*6))
        rave_sols = kin.manip.FindIKSolutions(T.A, kin.ik_options)
        rave_list = []
        for qsol in rave_sols:
            rave_list.append(np.array(qsol))
            #Tsol, _ = forward_kin(qsol, a, d, l)
            Tsol = kin.forward(qsol)
            diff = Tsol**-1 * T
            ang, _, _ = mat_to_ang_axis_point(diff)
            dist = np.linalg.norm(diff[:3,3])
            print ang, dist
            if abs(dist) > 1e-8:
                print 'BAD'
        print np.sort(rave_list,0)
            #print diff
            #print q
            #print qsol
            #print '-'*80

    if False:
        q = (np.random.rand(3)-0.5)*4.*np.pi
        T04 = forward_rrr(q)
        print T04
        qs = inverse_rrr(T04)
        print qs
        print T04**-1 * forward_rrr(qs[0])
        print T04**-1 * forward_rrr(qs[1])
Exemplo n.º 30
0
#import roslib; roslib.load_manifest("hrl_geom")
from hrl_geom.pose_converter import PoseConv

pos_euler = [[0.1, 0.2, 0.3], [0.3, 0., 0.]]
twist_msg = PoseConv.to_twist_msg(pos_euler)
print twist_msg
#linear:
#  x: 0.1
#  y: 0.2
#  z: 0.3
#angular:
#  x: 0.3
#  y: 0.0
#  z: 0.0
homo_mat = PoseConv.to_homo_mat(twist_msg)
print homo_mat
#[[ 1.          0.          0.          0.1       ]
# [ 0.          0.95533649 -0.29552021  0.2       ]
# [-0.          0.29552021  0.95533649  0.3       ]
# [ 0.          0.          0.          1.        ]]
[pos, rot] = homo_mat[:3, 3], homo_mat[:3, :3]
tf_stamped_msg = PoseConv.to_tf_stamped_msg("base_link", pos, rot)
print tf_stamped_msg
#header:
#  seq: 0
#  stamp:
#    secs: 1341611677
#    nsecs: 579762935
#  frame_id: base_link
#child_frame_id: ''
#transform: