Esempio n. 1
0
    def laplace_cost_and_grad_pose(self, theta, mu_theta, inv_sigma_theta,
                                   mu_x, inv_sigma_x, mu_ang_euler_des,
                                   inv_sig_euler):
        f_th, T_joint = self.fwd_kin(theta)
        jac_th = self.geometric_jacobian(T_joint, f_th)

        euler_angles = tf_tran.euler_from_matrix(f_th, 'szyz')
        s_phi, s_nu, c_phi, c_nu = np.sin(euler_angles[0]), np.sin(
            euler_angles[1]), np.cos(euler_angles[0]), np.cos(euler_angles[1])
        T = np.array([[0, -s_phi, c_phi * s_nu], [0, c_phi, s_phi * s_nu],
                      [1, 0, c_nu]])
        jac_analytic = np.dot(tf_tran.inverse_matrix(T), jac_th[3:6, :])

        pos_th = f_th[0:3, 3]
        jac_pos_th = jac_th[0:3, :]
        diff1 = theta - mu_theta
        tmp1 = np.dot(inv_sigma_theta, diff1)
        diff2 = pos_th - mu_x
        tmp2 = np.dot(inv_sigma_x, diff2)

        ori_th = tf_tran.euler_from_matrix(f_th, 'szyz')
        jac_ori_th = jac_analytic  # [3:6, :]
        diff3 = np.array(ori_th) - np.array(mu_ang_euler_des)
        tmp3 = np.dot(inv_sig_euler, diff3)

        nll = 0.5 * (np.dot(diff1, tmp1) + np.dot(diff2, tmp2)) + 0.5 * np.dot(
            diff3, tmp3)
        # print 'nll', nll
        # print '###########################'
        grad_nll = tmp1 + np.dot(jac_pos_th.T, tmp2) + np.dot(
            jac_ori_th.T, tmp3)
        return nll, grad_nll
Esempio n. 2
0
def main():
    rospack = rospkg.RosPack()
    pkg_path = rospack.get_path('hrl_base_selection')
    bag_path = '/home/ari/svn/robot1_data/2011_ijrr_phri/log_data/bag/'
    
    #bag_name = 'sub1_shaver'
    #bag_name = 'sub3_shaver'
    bag_name = 'sub6_shaver'
    

    adl2_B_toolframe = createBMatrix([0.234, 0.041, -0.015], [0, 0, 0, 1])
    toolframe_B_shaver = createBMatrix([0.070, 0., 0.], [0, 0, 0, 1])
    shaver_B_pr2goal = createBMatrix([-0.050, 0., 0.], [0, 0, 0, 1])
    adl2_B_pr2goal = adl2_B_toolframe*toolframe_B_shaver*shaver_B_pr2goal
    head_B_headc = createBMatrix([0.21, 0.02, -0.087], [0, 0, 0, 1])
    print 'Starting on the tool log file! \n'
    bag = rosbag.Bag(''.join([bag_path,bag_name,'.bag']), 'r')
    tool = open(''.join([pkg_path,'/data/',bag_name,'_tool.log']), 'w')
    for topic, tf_msg, t in bag.read_messages():
        if topic == "/tf":
            if len(tf_msg.transforms) > 0 and tf_msg.transforms[0].child_frame_id == "/adl2":
                tx = copy.copy(tf_msg.transforms[0].transform.translation.x)
                ty = copy.copy(tf_msg.transforms[0].transform.translation.y)
                tz = copy.copy(tf_msg.transforms[0].transform.translation.z)
                rx = copy.copy(tf_msg.transforms[0].transform.rotation.x)
                ry = copy.copy(tf_msg.transforms[0].transform.rotation.y)
                rz = copy.copy(tf_msg.transforms[0].transform.rotation.z)
                rw = copy.copy(tf_msg.transforms[0].transform.rotation.w)
                world_B_shaver = createBMatrix([tx,ty,tz],[rx,ry,rz,rw])*adl2_B_pr2goal
                eul = tft.euler_from_matrix(world_B_shaver,'rxyz')
                #print world_B_shaver,'\n'
                #print eul,'\n'
                #print 'time: \n',t
                tool.write(''.join([str(t),' %f %f %f %f %f %f \n' % (world_B_shaver[0,3],world_B_shaver[1,3],world_B_shaver[2,3],eul[0],eul[1],eul[2])]))
                #tool.write(''.join([world_B_shaver[0,3],' ',world_B_shaver[1,3],' ',world_B_shaver[2,3],' ',eul[0],' ',eul[1],' ',eul[2],'\n']))
    bag.close()
    tool.close()
    print 'Starting on the head log file! \n'
    bag = rosbag.Bag(''.join([bag_path,bag_name,'.bag']), 'r')
    head = open(''.join([pkg_path,'/data/',bag_name,'_head.log']), 'w')
    for topic, tf_msg, t in bag.read_messages():
        if topic == "/tf":
            if len(tf_msg.transforms) > 0 and tf_msg.transforms[0].child_frame_id == "/head":
                tx = copy.copy(tf_msg.transforms[0].transform.translation.x)
                ty = copy.copy(tf_msg.transforms[0].transform.translation.y)
                tz = copy.copy(tf_msg.transforms[0].transform.translation.z)
                rx = copy.copy(tf_msg.transforms[0].transform.rotation.x)
                ry = copy.copy(tf_msg.transforms[0].transform.rotation.y)
                rz = copy.copy(tf_msg.transforms[0].transform.rotation.z)
                rw = copy.copy(tf_msg.transforms[0].transform.rotation.w)
                world_B_headc = createBMatrix([tx,ty,tz],[rx,ry,rz,rw])*head_B_headc
                eul = copy.copy(tft.euler_from_matrix(world_B_headc,'rxyz'))
                head.write(''.join([str(t),' %f %f %f %f %f %f \n' % (world_B_headc[0,3],world_B_headc[1,3],world_B_headc[2,3],eul[0],eul[1],eul[2])]))
                #head.write(''.join([t,' ',world_B_head[0,3],' ',world_B_head[1,3],' ',world_B_head[2,3],' ',eul[0],' ',eul[1],' ',eul[2],' ',eul[3],'\n']))
    bag.close()
    head.close()

    print "Saved files!"
    def moveItTo2(self, destin):
        if self.shouldMove == False:
            self.destin_list2.insert(0, destin)
            return

        delay = rospy.Rate(1.0)

        if len(self.destin_list2) > 0:
            self.destin_list2.insert(0, destin)
            destin = self.destin_list2.pop()
        while not rospy.is_shutdown():
            try:
                (translation, orientation) = listener.lookupTransform(
                    "/odom", "/base_footprint", rospy.Time(0))
            except (tf.LookupException, tf.ConnectivityException,
                    tf.ExtrapolationException) as e:
                print("EXCEPTION:", e)
                delay.sleep()
            print("Robot is believed to be at (x,y,z): (", translation[0], ",",
                  translation[1], ",", translation[2], ")")
            r_x, r_y, r_z = transformations.euler_from_matrix(
                transformations.quaternion_matrix(orientation))
            print("Robot's angle with the Z-axis is: ", r_z)
            print("Robot's angle with the Y-axis is: ", r_y)
            print("Robot's angle with the X-axis is: ", r_x)

            print("Destination is believed to be at (x,y,z): (",
                  destin.translation.x, ",", destin.translation.y, ",",
                  destin.translation.z, ")")
            destinrotq = [
                destin.rotation.x, destin.rotation.y, destin.rotation.z,
                destin.rotation.w
            ]
            d_x, d_y, d_z = transformations.euler_from_matrix(
                transformations.quaternion_matrix(destinrotq))
            print("Destination's angle with the Z-axis is: ", d_z)
            print("Destination's angle with the Y-axis is: ", d_y)
            print("Destination's angle with the X-axis is: ", d_x)

            motor_command = Twist()

            x = destin.translation.x - translation[0]
            y = destin.translation.y - translation[1]
            z = destin.translation.z - translation[2]
            theta1 = M.atan2(y, x)
            theta2 = M.atan2(y, x)
            newSpeed = sqrt(x**2 + y**2 + z**2)

            if abs(theta1) < 0.1 and abs(
                    theta2
            ) < 0.1:  #The robot is not facing the exact destination, but it's close enough. so let's move.
                motor_command.linear.x = newSpeed / 2
            motor_command.angular.z = theta1
            motor_command.angular.y = theta2

            motor_command_publisher.publish(motor_command)
Esempio n. 4
0
 def ep_error(self, ep_actual, ep_desired):
     pos_act, rot_act = ep_actual
     pos_des, rot_des = ep_desired
     err = np.mat(np.zeros((6, 1)))
     err[:3, 0] = pos_act - pos_des
     err[3:6, 0] = np.mat(tf_trans.euler_from_matrix(rot_des.T * rot_act)).T
     return err
def inverse_biased(kin, pose, q_init, q_bias, q_bias_weights, rot_weight=1.0, 
                   bias_vel=0.01, num_iter=100):
    # This code is potentially volatile
    q_out = q_init.copy()
    pos = pose[0:3,-1]
    rot = pose[0:3,0:3]
    for i in range(num_iter):
        # pos_fk, rot_fk = PoseConv.to_pos_rot(self.forward(q_out))
        g = np.array(kin.forward(q_out))
        pos_fk = g[0:3,-1]
        rot_fk = g[0:3,0:3]
        delta_twist = np.array(np.zeros(6))
        pos_delta = pos - pos_fk
        delta_twist[:3] = pos_delta
        rot_delta = np.eye(4)
        rot_delta[:3,:3] = rot * rot_fk.T
        rot_delta_angles = np.array(trans.euler_from_matrix(rot_delta))
        delta_twist[3:6] = rot_delta_angles
        J = np.array(kin.jacobian(q_out))
        J[3:6,:] *= np.sqrt(rot_weight)
        delta_twist[3:6] *= np.sqrt(rot_weight)
        J_tinv = matmult(np.linalg.inv(matmult(J.T,J) + np.diag(q_bias_weights)), J.T)
        q_bias_diff = q_bias - q_out
        q_bias_diff_normed = q_bias_diff * bias_vel / np.linalg.norm(q_bias_diff)
        delta_q = q_bias_diff_normed + matmult(J_tinv, (delta_twist - matmult(J, q_bias_diff_normed)))
        q_out += delta_q 
        q_out = np.array(kin.clip_joints_safe(q_out))
    return q_out
Esempio n. 6
0
    def callback(self, rgb_msg, depth_msg):
        (translation,
         orientation) = self.listener.lookupTransform("/odom",
                                                      "/base_footprint",
                                                      rospy.Time(0))
        robot_x = translation[0]

        r_xorient, r_yorient, r_zorient = transformations.euler_from_matrix(
            transformations.quaternion_matrix(orientation))
        robot_theta = r_zorient  # only need the z axis
        robot_y = translation[1]
        print("My position in start of callback is: ", robot_x, robot_y,
              robot_theta)

        # The callback processing the pairs of numbers that arrived at approximately the same time
        rawImage = self.bridge.imgmsg_to_cv2(rgb_msg, "bgr8")
        depthImage = self.bridge.imgmsg_to_cv2(depth_msg, "32FC1")

        labelArray, rectangleInfoArray, pointListArray = self.processRgb(
            rgb_msg, rawImage)
        for i in range(len(rectangleInfoArray)):
            object_coords = self.processDepth(depth_msg, rectangleInfoArray[i],
                                              robot_x, robot_y, robot_theta,
                                              pointListArray[i], depthImage)
            if object_coords[0] == -1:
                return
            else:
                #print("My position is: ", robot_x, robot_y, robot_theta)
                print("I detected a ", labelArray[i], " and its position is ",
                      object_coords, ".")
            self.saveCoordinates(labelArray[i], object_coords)
            #print(self.objectCoordinates)

        return
Esempio n. 7
0
    def predict(self, Delta):

        # Implement Kalman prediction here
        theta = self.X[2, 0]
        pose_mat = np.mat([
            [cos(theta), -sin(theta), 0, self.X[0, 0]],
            [sin(theta), cos(theta), 0, self.X[1, 0]],
            [0, 0, 1, 0],
            [0, 0, 0, 1],
        ])
        pose_mat = Delta * pose_mat
        self.X[0:2, 0] = pose_mat[0:2, 3:4]
        euler = euler_from_matrix(pose_mat[0:3, 0:3], 'rxyz')
        self.X[2, 0] = euler[2]  # Ignore the others
        Jx = np.mat(
            [[1, 0, -sin(theta) * Delta[0, 3] - cos(theta) * Delta[1, 3]],
             [0, 1, cos(theta) * Delta[0, 3] - sin(theta) * Delta[1, 3]],
             [0, 0, 1]])
        Qs = np.mat(np.diag([self.position_uncertainty**2] * 3))
        P = self.P[0:3, 0:3]
        self.P[0:3, 0:3] = Jx * P * Jx.T + Qs
        assert type(self.X) == np.matrixlib.defmatrix.matrix and type(
            self.P) == np.matrixlib.defmatrix.matrix
        self.P = np.mat(np.abs(self.P))
        return self.X, self.P
Esempio n. 8
0
    def predict(self, Delta):

        # Implement Kalman prediction here
        theta = self.X[2, 0]
        pose_mat = mat([
            [cos(theta), -sin(theta), 0, self.X[0, 0]],
            [sin(theta), cos(theta), 0, self.X[1, 0]],
            [0, 0, 1, 0],
            [0, 0, 0, 1],
        ])
        pose_mat = Delta * pose_mat
        self.X[0:2, 0] = pose_mat[0:2, 3:4]
        euler = euler_from_matrix(pose_mat[0:3, 0:3], 'rxyz')
        self.X[2, 0] = euler[2]
        # Ignore the others
        Jx = mat([[1, 0, -sin(theta) * Delta[0, 3] - cos(theta) * Delta[1, 3]],
                  [0, 1,
                   cos(theta) * Delta[0, 3] - sin(theta) * Delta[1, 3]],
                  [0, 0, 1]])
        Qs = mat(
            diag([
                self.position_uncertainty**2, self.position_uncertainty**2,
                self.angular_uncertainty**2
            ]))
        P = self.P[0:3, 0:3]
        self.P[0:3, 0:3] = Jx * P * Jx.T + Qs
        return (self.X, self.P)
Esempio n. 9
0
    def go_angle(self,
                 target_odom,
                 tolerance=math.radians(5.),
                 max_ang_vel=math.radians(20.),
                 func=None):
        self.tl.waitForTransform('base_footprint', 'odom_combined',
                                 rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)
        k = math.radians(80)

        #current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint', 'odom_combined', self.tl)[0:3, 0:3], 'sxyz')[2]
        #target_odom = current_ang_odom + delta_ang

        while not rospy.is_shutdown():
            robot_odom = tfu.transform('base_footprint', 'odom_combined',
                                       self.tl)
            current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3],
                                                    'sxyz')[2]
            diff = ut.standard_rad(current_ang_odom - target_odom)
            p = k * diff
            #print diff
            if func != None:
                func(diff)
            if np.abs(diff) < tolerance:
                break

            tw = gm.Twist()
            vels = [p, np.sign(p) * max_ang_vel]
            tw.angular.z = vels[np.argmin(np.abs(vels))]
            #rospy.loginfo('diff %.3f vel %.3f' % (math.degrees(diff), math.degrees(tw.angular.z)))
            self.tw_pub.publish(tw)
            #rospy.loginfo('commanded %s' % str(tw))
            rate.sleep()
        rospy.loginfo('finished %.3f' % math.degrees(diff))
        return diff
Esempio n. 10
0
def scan2point(laser_msg):
    global step
    num = len(laser_msg.ranges)
    index = np.linspace(laser_msg.angle_min,laser_msg.angle_max,num).tolist()
    # index = np.arange(laser_msg.angle_min,laser_msg.angle_max,laser_msg.angle_increment).tolist()

    if DEBUG_MODE == 'odom':
        _R_map, _T_map = robot_loc.odom()
        _R_fro = robot_loc.R_map_odom_ready
        _T_fro = robot_loc.T_map_odom_ready
        _R_map = np.dot(_R_fro, _R_map)
        _T_map += _T_fro
    elif DEBUG_MODE == 'map':
        _R_map, _T_map = robot_loc.map()
        _R_fro = robot_loc.R_map_odom_ready
        _T_fro = robot_loc.T_map_odom_ready
        _R_map = np.dot(_R_fro, _R_map)
        _T_map += _T_fro

    _R = np.eye(4)
    _R[:2, :2] = _R_map
    ax, ay, theta = euler_from_matrix(_R)
    map_range = []
    for omiga in index:
        map_range.append(map_calc_range(static_map, _T_map[0, 0], _T_map[1, 0], theta + omiga))
    laser_fix_range = judge_scan(laser_msg.ranges, map_range)

    point_data = np.empty((2, 1))
    for i in range(0, len(index), step):
        if laser_msg.range_min <= laser_fix_range[i] <= 7:
            point_data = np.insert(point_data, -1, np.array([laser_fix_range[i]*math.cos(index[i]), laser_fix_range[i]*math.sin(index[i])]), axis=1)
    point_data = np.delete(point_data, -1, axis=1)
    return point_data
Esempio n. 11
0
 def __rodrigues2euler(self, rvec):
     R = cv2.Rodrigues(rvec)[0]
     T = np.zeros((4, 4))
     T[3, 3] = 1.0
     euler = np.array(euler_from_matrix(R, "sxyz"))
     euler[2] *= -1
     return euler.reshape((3, 1))
Esempio n. 12
0
def space_from_joints(joint_angles):
    T01, T12, T23, T34, T4e = direct_kinematics(joint_angles)
    T = tfs.concatenate_matrices(T01, T12, T23, T34, T4e)
    rz1, ry, rz2 = tfs.euler_from_matrix(T, 'szyz')
    trans = tfs.translation_from_matrix(T)
    S = np.append(trans, rz2)
    return S
Esempio n. 13
0
def space_from_joints_small(joint_angles):
    T01, T12, T23, T34, T45, T5e, Tec, Tem = direct_kinematics(joint_angles)
    T = tfs.concatenate_matrices(T01, T12, T23, T34, T45, T5e, Tem)
    rx, ry, rz = tfs.euler_from_matrix(T, 'sxyz')
    trans = tfs.translation_from_matrix(T)
    S = np.concatenate((trans, [rz, ry]), axis=1)
    return S
Esempio n. 14
0
def JS_to_PrPlRrl(q):

    robot = URDF.from_xml_file(
        "/home/josh/catkin_ws/src/baxter_common/baxter_description/urdf/baxter.urdf"
    )

    kdl_kin_left = KDLKinematics(robot, "base", "left_gripper")
    kdl_kin_right = KDLKinematics(robot, "base", "right_gripper")

    T_left = kdl_kin_left.forward(q[0:7])
    R_left = np.array(T_left[:3, :3])

    T_right = kdl_kin_right.forward(q[7:14])
    R_right = np.array(T_right[:3, :3])

    x_left = T_left[0, 3]
    y_left = T_left[1, 3]
    z_left = T_left[2, 3]

    x_right = T_right[0, 3]
    y_right = T_right[1, 3]
    z_right = T_right[2, 3]

    R_rl = np.dot(np.transpose(R_right), R_left)

    roll, pitch, yaw = euler_from_matrix(R_rl, 'sxyz')

    P = np.array([[x_left], [y_left], [z_left], [x_right], [y_right],
                  [z_right], [roll], [pitch], [yaw], [R_right[0, 0]],
                  [R_left[0, 1]], [R_right[1, 0]], [R_left[1, 1]]])

    return P
Esempio n. 15
0
    def get_tf(self, pose_tag_to_camera_link, quat_tag_to_camera_link,
               april_tag_id):
        """
        Given the tf from april tag to camera link, calculate the tf from base link to map
        Input:
            pose_tag_to_camera_link: np(3,); the pose from tag to camera link
            quat_tag_to_camera_link: np(4,); the quaternion from tag to camera link
            april_tag_id: int; the id of the apriltag
        Output:
            P_base_link_to_map: np(3,); the P from base link to map
        """
        T_pose_tag_to_camera_link = np.array(
            [[1., 0., 0., pose_tag_to_camera_link[0]],
             [0., 1., 0., pose_tag_to_camera_link[1]],
             [0., 0., 1., pose_tag_to_camera_link[2]], [0., 0., 0., 1.]])
        T_quat_tag_to_camera_link = tfm.quaternion_matrix(
            quat_tag_to_camera_link)
        T_tag_to_camera_link = np.dot(T_pose_tag_to_camera_link,
                                      T_quat_tag_to_camera_link)
        T_camera_link_to_tag = np.linalg.inv(T_tag_to_camera_link)

        T_tag_to_map = self.apriltag_transform_dict[april_tag_id]
        T_base_link_to_map = np.dot(np.dot(T_tag_to_map, T_camera_link_to_tag),
                                    self.T_base_link_to_camera_link)

        theta = tfm.euler_from_matrix(T_base_link_to_map, axes='sxyz')[2]
        P_base_link_to_map = np.array(
            [T_base_link_to_map[0][3], T_base_link_to_map[1][3], theta])
        return P_base_link_to_map
Esempio n. 16
0
    def test_euler_XYZ_from_rotation_matrix(self):
        """
        r in 'rxyz' is 'r'otating frames indicating rotations are applied consecutively with respect to current
        frames' axes, "relative-rotation". Order of rotations are first X, second Y and third Z. Rotation matrix
        composition rule for relative rotations: Rx * Ry * Rz.

        s in 'sxyz' is 's'tationary frames indicating rotations are applied with respect to the coordinate frame
        of the INITIAL frame, "fixed-axis rotation". Rotation matrix composition rule for fixed-axis rotations:
        Rz * Ry * Rx.
        """

        Rx_90 = tr.rotation_matrix(
            math.pi / 4, [1, 0, 0])  # first, rotate 90 degrees around x axis
        Ry_90 = tr.rotation_matrix(
            math.pi / 5,
            [0, 1, 0])  # second, 45 degrees around y axis of the current frame
        Rz_90 = tr.rotation_matrix(
            math.pi / 6,
            [0, 0, 1])  # third, 30 degrees around z axis of the current frame

        R1 = np.matmul(Rz_90, Ry_90)
        R = np.matmul(R1, Rx_90)

        euler_angles = tr.euler_from_matrix(R, 'sxyz')
        euler_angles_expected = [math.pi / 4, math.pi / 5, math.pi / 6]

        np.testing.assert_almost_equal(euler_angles, euler_angles_expected)
Esempio n. 17
0
def clickedCB(data):
    global askedMarkers
    global listener
    global worldTransform
    global measuredMarkers
    global clickNum
    print 'clicked'
    try:
        (tipTrans, tipRot) = listener.lookupTransform('/mocha_world', '/tip',
                                                      rospy.Time(0))
        measuredMarkers[clickNum, :] = tipTrans

        clickNum = clickNum + 1
        if clickNum == 8:
            clickNum = 0
            print 'finished clicking'
            print 'AskedMarkers:'
            print askedMarkers
            print 'measured markers:'
            print measuredMarkers
            (worldRotupdate,
             worldTransUpdate) = rigid_transform_3D(measuredMarkers,
                                                    askedMarkers)
            worldRot4 = np.identity(4)
            for i in range(3):
                worldRot4[(0, 1, 2), i] = worldRotupdate[:, i]
            worldTransformUpdate = TR.compose_matrix(
                translate=worldTransUpdate,
                angles=TR.euler_from_matrix(worldRot4))
            worldTransformationUpdateInv = TR.inverse_matrix(
                worldTransformUpdate)
            worldTransform = np.dot(worldTransformationUpdateInv,
                                    worldTransform)
    except ():
        nothing = 0
Esempio n. 18
0
 def predict(self, Delta):
     # Implement Kalman prediction here
     # oldP = self.P.copy()
     theta = self.X[2, 0]
     pose_mat = np.mat([
         [cos(theta), -sin(theta), 0, self.X[0, 0]],
         [sin(theta), cos(theta), 0, self.X[1, 0]],
         [0, 0, 1, 0],
         [0, 0, 0, 1],
     ])
     pose_mat = Delta * pose_mat
     self.X[0:2, 0] = pose_mat[0:2, 3:4]
     euler = euler_from_matrix(pose_mat[0:3, 0:3], 'rxyz')
     self.X[2, 0] = euler[2]  # Ignore the others
     Jx = np.mat(
         [[1, 0, -sin(theta) * Delta[0, 3] - cos(theta) * Delta[1, 3]],
          [0, 1, cos(theta) * Delta[0, 3] - sin(theta) * Delta[1, 3]],
          [0, 0, 1]])
     Qs = np.mat(np.diag([self.position_uncertainty**2] * 3))
     P = self.P[0:3, 0:3]
     self.P[0:3, 0:3] = Jx * P * Jx.T + Qs
     # if np.min(self.P.diagonal()) < 0.0:
     #     print "bad P on predict arg at:", np.argmin(self.P.diagonal())
     #     print "Jx matrix:\n", Jx
     #     print "old value of P:\n", oldP.diagonal()
     #     print "attention les yeux:\n", self.P.diagonal()
     #     print "================= Patching P ==============="
     #     n, _ = self.P.shape
     #     for i in range(n):
     #         if self.P[i, i] <= 0.0:
     #             self.P[i, i] = 0.001
     return self.X, self.P
Esempio n. 19
0
def rotationMatrixToEulerAngles(R):
    """Calculates euler angles given rotation matrix in 'xyz' sequence

    :param R: 3x3 rotation matrix
    :return: list of three euler angles equivalent to the given rotation matrix
    """
    return list(euler_from_matrix(R, axes="sxyz"))
    def execute(self, userdata):


        userdata.marker_pose.header.stamp = rospy.Time.now()
        pose = self.tf.transformPose('/base_link', userdata.marker_pose)
        p = [pose.pose.position.x, pose.pose.position.y, pose.pose.position.z]
        q = [pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w]
        rm = tfs.quaternion_matrix(q)
        # assemble a new coordinate frame that has z-axis aligned with
        # the vertical direction and x-axis facing the z-axis of the
        # original frame
        z = rm[:3, 2]
        z[2] = 0
        axis_x = tfs.unit_vector(z)
        axis_z = tfs.unit_vector([0, 0, 1])
        axis_y = np.cross(axis_x, axis_z)
        rm = np.vstack((axis_x, axis_y, axis_z)).transpose()
        # shift the pose along the x-axis
        p += np.dot(rm, [self.DISTANCE, 0, 0])[:3]
        userdata.goal_pose = pose
        userdata.goal_pose.pose.position.x = p[0]
        userdata.goal_pose.pose.position.y = p[1]
        userdata.goal_pose.pose.position.z = p[2]
        yaw = tfs.euler_from_matrix(rm)[2]
        q = tfs.quaternion_from_euler(0, 0, yaw - math.pi)
        userdata.goal_pose.pose.orientation.x = q[0]
        userdata.goal_pose.pose.orientation.y = q[1]
        userdata.goal_pose.pose.orientation.z = q[2]
        userdata.goal_pose.pose.orientation.w = q[3]
        return 'succeeded'
    def Aruco_marker_detector(self):
        """ Use the build in python library to detect the aruco marker and its coordinates """
        img = self.frame.copy() 
        
        # Detect the markers in the image
        markerCorners, markerIds, rejectedCandidates = cv.aruco.detectMarkers(img, self.dictionary, parameters=self.parameters)
        # Get the transformation matrix to the marker
        if markerCorners:

            markerSize = 2.0
            axisLength = 3.0

            rvecs, tvecs, _ = cv.aruco.estimatePoseSingleMarkers(markerCorners, markerSize, self.K, self.distCoeffs)
            
            # Draw the axis on the marker
            frame_out = cv.aruco.drawAxis( img, self.K, self.distCoeffs, rvecs, tvecs, axisLength)
            
            rotMat = tr.euler_matrix(np.pi / 2.0, 0, 0)
            rotMat = rotMat[0:3, 0:3]
            tvecs = np.matmul(rotMat, tvecs[0][0].T)
            rotMat, _ = cv.Rodrigues(rvecs)
            eul = tr.euler_from_matrix(rotMat)

            yaw_angle = self.pos[3] - eul[2] 

            # Publish the position of the marker
            marker_pos = PT()
            marker_pos.position.x = tvecs[0]
            marker_pos.position.y = - tvecs[2]
            marker_pos.position.z = tvecs[1]
            marker_pos.yaw = eul[2]  * np.pi / 180.0
            self.aruco_marker_pos_pub.publish(marker_pos)

            # Publish the image with the detected marker
            self.aruco_marker_img_pub.publish(self.bridge.cv2_to_imgmsg(frame_out, "bgr8"))  
Esempio n. 22
0
def getRotationMotion(R):
    Result={}
    angles=copy.deepcopy(euler_from_matrix(R,'szxy'))
    Result["Roll"]=57.2958*angles[0]
    Result["Pitch"]=57.2958*angles[1]
    Result["Yaw"]=57.2958*angles[2]
    return Result
Esempio n. 23
0
def generate_pose(T, camera_optical_frame):
    print('publish TF for \n', T)
    rate = rospy.Rate(30)  # Hz

    listener = tf.TransformListener()
    try:
        t, r = listener.lookupTransform('/base_link', camera_optical_frame,
                                        rospy.Time(0))
    except (tf.LookupException, tf.ConnectivityException,
            tf.ExtrapolationException):
        rospy.error(
            "failed to listen transform from '/base_link' to '/camera_optical_frame'"
        )
        continue
    cam2base = numpy.matrix(tft.quaternion_matrix(r))
    cam2base[0, 3] = t[0]
    cam2base[1, 3] = t[1]
    cam2base[2, 3] = t[2]

    T = numpy.array(numpy.dot(cam2base, T))

    p = Pose()
    p.position.x = T[0, 3]
    p.position.y = T[1, 3]
    p.position.z = T[2, 3]
    R = T[:3, :3]
    roll, pitch, yaw = tft.euler_from_matrix(T)
    q = tft.quaternion_from_euler(roll, pitch, yaw)
    p.orientation.x = q[0]
    p.orientation.y = q[1]
    p.orientation.z = q[2]
    p.orientation.w = q[3]
    #tf_brocast(p, camera_optical_frame)
    tf_brocast(p, "base_link")
Esempio n. 24
0
 def ep_error(self, ep_actual, ep_desired):
     pos_act, rot_act = ep_actual
     pos_des, rot_des = ep_desired
     err = np.mat(np.zeros((6, 1)))
     err[:3,0] = pos_act - pos_des
     err[3:6,0] = np.mat(tf_trans.euler_from_matrix(rot_des.T * rot_act)).T
     return err
Esempio n. 25
0
    def _construct_xyzrpy_from_matrix(self, matrix):
        translation = transformations.translation_from_matrix(matrix)
        euler = transformations.euler_from_matrix(matrix)

        pose = np.concatenate([translation, euler])

        return pose
Esempio n. 26
0
 def inv_kin(
     self,
     q_current,
     T_desired,
 ):
     T_current, T_joint = self.fwd_kin(q_current)
     num_joints = len(self.a) - 1
     c_T_d = tf_tran.concatenate_matrices(
         tf_tran.inverse_matrix(T_current), T_desired
     )  # transformation of desired frame with respect to current frame
     c_t_d = c_T_d[0:3, 3]  # extracting the translation part
     # c_R_d = c_T_d[0:3, 0:3]  # extracting the rotation part
     ROT = np.array(tf_tran.euler_from_matrix(c_T_d))
     delta_x = c_t_d
     P = 1.0
     dx = P * delta_x  # ( velocity change wrt ee_current frame)
     v_ee = np.zeros([6, 1])
     v_ee[0:3] = dx.reshape([3, 1])
     v_ee[3:6] = ROT.reshape([3, 1])
     # Jac = self.jacobian(T_joint, T_current)
     Jac = self.geometric_jacobian(T_joint, T_current)
     qq = np.ones([num_joints, 1]) * 1
     J_pinv = np.linalg.pinv(Jac)
     qn_dot = np.dot((np.identity(num_joints) - np.dot(J_pinv, Jac)),
                     qq)  # null space jacobian
     final_theta = np.dot(
         J_pinv, v_ee)  # + qn_dot  # final_theta are the joint velocities
     final_theta = np.insert(final_theta, num_joints,
                             0)  # Adding 0 at the end for the fixed joint
     return final_theta
Esempio n. 27
0
	def __get_cartesian_from_matrix(self, h_matrix):
		# Returns a list of [roll, pitch, yaw, X, Y, Z]

		euler_from_matrix					= list(TF.euler_from_matrix(h_matrix))
		translation_from_matrix				= list(TF.translation_from_matrix(h_matrix))

		return euler_from_matrix + translation_from_matrix
def inverse_biased(kin, pose, q_init, q_bias, q_bias_weights, rot_weight=1.0, 
                   bias_vel=0.01, num_iter=100):
    # This code is potentially volatile
    q_out = q_init.copy()
    pos = pose[0:3,-1]
    rot = pose[0:3,0:3]
    for i in range(num_iter):
        # pos_fk, rot_fk = PoseConv.to_pos_rot(self.forward(q_out))
        g = np.array(kin.forward(q_out))
        pos_fk = g[0:3,-1]
        rot_fk = g[0:3,0:3]
        delta_twist = np.array(np.zeros(6))
        pos_delta = pos - pos_fk
        delta_twist[:3] = pos_delta
        rot_delta = np.eye(4)
        rot_delta[:3,:3] = rot * rot_fk.T
        rot_delta_angles = np.array(trans.euler_from_matrix(rot_delta))
        delta_twist[3:6] = rot_delta_angles
        J = np.array(kin.jacobian(q_out))
        J[3:6,:] *= np.sqrt(rot_weight)
        delta_twist[3:6] *= np.sqrt(rot_weight)
        J_tinv = matmult(np.linalg.inv(matmult(J.T,J) + np.diag(q_bias_weights)), J.T)
        q_bias_diff = q_bias - q_out
        q_bias_diff_normed = q_bias_diff * bias_vel / np.linalg.norm(q_bias_diff)
        delta_q = q_bias_diff_normed + matmult(J_tinv, (delta_twist - matmult(J, q_bias_diff_normed)))
        q_out += delta_q 
        q_out = np.array(kin.clip_joints_safe(q_out))
    return q_out
Esempio n. 29
0
def space_from_joints_small(joint_angles):
    T01, T12, T23, T34, T45, T5e, Tec, Tem = direct_kinematics(joint_angles)
    T = tfs.concatenate_matrices(T01, T12, T23, T34, T45, T5e, Tem)
    rx, ry, rz = tfs.euler_from_matrix(T, 'sxyz')
    trans = tfs.translation_from_matrix(T)
    S = np.concatenate((trans, [rz, ry]), axis=1)
    return S
Esempio n. 30
0
    def place(self, x, y, z, alpha, beta, gamma, approach):
        # find pre-place pose
        T_grasp = euler_matrix(radians(alpha),
                               radians(beta),
                               radians(gamma),
                               axes="sxyz")
        T_grasp[:3, 3] = np.array([x, y, z])
        T_trans = np.identity(4)
        T_trans[2, 3] = -approach

        T_pre_grasp = np.dot(T_grasp, T_trans)
        alpha_pre, beta_pre, gamma_pre = euler_from_matrix(T_pre_grasp)
        alpha_pre, beta_pre, gamma_pre = (degrees(alpha_pre),
                                          degrees(beta_pre),
                                          degrees(gamma_pre))
        x_pre, y_pre, z_pre = T_pre_grasp[:3, 3]

        # move to pre-place
        if (self.move(x_pre, y_pre, z_pre, alpha_pre, beta_pre, gamma_pre) and
                # move to place
                self.move(x, y, z, alpha, beta, gamma) and
                # open gripper
                self.open_gripper() and
                # move back to pre-grasp
                self.move(x_pre, y_pre, z_pre, alpha_pre, beta_pre,
                          gamma_pre)):
            rospy.loginfo("Place finished")
            return True
        else:
            rospy.logerr("Place failed")
            self.clear_error()
            return False
Esempio n. 31
0
    def motion_model(self, f0, f1, stamp, use_kalman=False):
        if not use_kalman:
            # simple `repetition` model
            txn0, rxn0 = f0['pose'][L_POS], f0['pose'][A_POS]
            txn1, rxn1 = f1['pose'][L_POS], f1['pose'][A_POS]
            R0 = tx.euler_matrix(*rxn0)
            R1 = tx.euler_matrix(*rxn1)

            T0 = tx.compose_matrix(angles=rxn0, translate=txn0)
            T1 = tx.compose_matrix(angles=rxn1, translate=txn1)

            Tv = np.dot(T1, vm.inv(T0))  # Tv * T0 = T1
            T2 = np.dot(Tv, T1)

            txn = tx.translation_from_matrix(T2)
            rxn = tx.euler_from_matrix(T2)

            x = f1['pose'].copy()
            P = f1['cov'].copy()
            x[0:3] = txn
            x[9:12] = rxn
            return x, P
        else:
            # dt MUST NOT BE None
            self.kf_.x = f0['pose']
            self.kf_.P = f0['cov']
            dt = (f1['stamp'] - f0['stamp'])
            self.kf_.predict(dt)

            txn, rxn = f1['pose'][L_POS], f1['pose'][A_POS]
            z = np.concatenate([txn, rxn])
            self.kf_.update(z)
            dt = (stamp - f1['stamp'])
            self.kf_.predict(dt)
            return self.kf_.x.copy(), self.kf_.P.copy()
Esempio n. 32
0
    def go_angle(self, target_odom, tolerance=math.radians(5.), max_ang_vel=math.radians(20.), func=None):
        self.tl.waitForTransform('base_footprint', 'odom_combined', rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)
        k = math.radians(80)

        #current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint', 'odom_combined', self.tl)[0:3, 0:3], 'sxyz')[2]
        #target_odom = current_ang_odom + delta_ang

        while not rospy.is_shutdown():
            robot_odom = tfu.transform('base_footprint', 'odom_combined', self.tl)
            current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
            diff = ut.standard_rad(current_ang_odom - target_odom)
            p = k * diff
            #print diff
            if func != None:
                func(diff)
            if np.abs(diff) < tolerance:
                break

            tw = gm.Twist()
            vels = [p, np.sign(p) * max_ang_vel]
            tw.angular.z = vels[np.argmin(np.abs(vels))]
            #rospy.loginfo('diff %.3f vel %.3f' % (math.degrees(diff), math.degrees(tw.angular.z)))
            self.tw_pub.publish(tw)
            #rospy.loginfo('commanded %s' % str(tw))
            rate.sleep()
        rospy.loginfo('finished %.3f' % math.degrees(diff))
        return diff
def JS_to_PrPlRrl(q):

    T_left = kdl_kin_left.forward(q[0:7])
    R_left = np.array(T_left[:3, :3])

    T_right = kdl_kin_right.forward(q[7:14])
    R_right = np.array(T_right[:3, :3])

    x_left = T_left[0, 3]
    y_left = T_left[1, 3]
    z_left = T_left[2, 3]

    x_right = T_right[0, 3]
    y_right = T_right[1, 3]
    z_right = T_right[2, 3]

    T_rl = np.dot(InverseTransformationMatrix(T_right), T_left)
    sep_dist_x = T_rl[0, 3]
    sep_dist_y = T_rl[1, 3]
    sep_dist_z = T_rl[2, 3]

    R_rl = np.dot(np.transpose(R_right), R_left)

    roll, pitch, yaw = euler_from_matrix(R_rl, 'sxyz')

    P = np.array([[x_left], [y_left], [z_left], [x_right],
                  [y_right], [z_right], [roll], [pitch], [yaw], [sep_dist_x],
                  [sep_dist_y], [sep_dist_z]])

    return P
Esempio n. 34
0
def to_pose2d(pos):
    # pos = N?x3x4 transformation matrix
    # 2d rotation requires (-y) component
    # 2d translation requires (z, -x) component
    
    # == test ==
    # -- opt 1 : einsum --
    T_pre = tx.euler_matrix(-np.pi/2,0,-np.pi/2) #4x4
    T_0i  = tx.inverse_matrix(np.concatenate((pos[0], [[0,0,0,1]]), axis=0)) #4x4
    T_cam  = np.stack([np.concatenate((p, [[0,0,0,1]]), axis=0) for p in pos], axis=0)
    pos_n = np.einsum('ij,jk,nkl,lm->nim', T_pre, T_0i, T_cam, T_pre.T) # normalized position
    res = []
    for p in pos_n:
        t = tx.translation_from_matrix(p)
        q = tx.euler_from_matrix(p)
        res.append([ t[0], t[1], q[-1] ])
    res = np.stack(res, axis=0)
    return res
    # --------------------

    # -- opt 2 : no einsum --
    #T_pre = tx.euler_matrix(-np.pi/2,0,-np.pi/2) #4x4
    #T_0i  = tx.inverse_matrix(np.concatenate((pos[0], [[0,0,0,1]]), axis=0)) #4x4
    #T_cam  = np.stack([np.concatenate((p, [[0,0,0,1]]), axis=0) for p in pos], axis=0)
    #res = []
    #for p in pos:
    #    T_cam  = np.concatenate((p, [[0,0,0,1]]), axis=0)
    #    T_base = T_pre.dot(T_0i).dot(T_cam).dot(T_pre.T)
    #    t = tx.translation_from_matrix(T_base)
    #    q = tx.euler_from_matrix(T_base)
    #    res.append([t[0], t[1], q[-1]])
    #res = np.stack(res, axis=0)
    return res
Esempio n. 35
0
def computeTransformation(world, slam, camera):
    # Here we do not care about the slamMworld transformation
    # timing as it is constant.
    tWorld = listener.getLatestCommonTime(world, camera)
    tMap = listener.getLatestCommonTime(slam, camera)
    t = min(tWorld, tMap)

    # Current pose given by the SLAM.
    (slamMcam_T, slamMcam_Q) = listener.lookupTransform(camera, slam, t)
    slamMcam = np.matrix(transformer.fromTranslationRotation(slamMcam_T,
                                                             slamMcam_Q))

    # Estimation of the camera position given by control.
    #FIXME: order is weird but it works.
    (worldMcam_T, worldMcam_Q) = listener.lookupTransform(world, camera, t)
    worldMcam = np.matrix(transformer.fromTranslationRotation(worldMcam_T,
                                                              worldMcam_Q))

    (slamMworld_T, slamMworld_Q) = listener.lookupTransform(slam, world, t)
    slamMworld = np.matrix(transformer.fromTranslationRotation(slamMworld_T,
                                                               slamMworld_Q))
    slamMcamEstimated = slamMworld * worldMcam

    # Inverse frames.
    camMslam = np.linalg.inv(slamMcam)
    camMslamEstimated = np.linalg.inv(slamMcamEstimated)

    # Split.
    camMslam_T = translation_from_matrix(camMslam)
    camMslam_Q = quaternion_from_matrix(camMslam)
    camMslam_E = euler_from_matrix(camMslam)

    camMslamEstimated_T = translation_from_matrix(camMslamEstimated)
    camMslamEstimated_Q = quaternion_from_matrix(camMslamEstimated)
    camMslamEstimated_E = euler_from_matrix(camMslamEstimated)

    # Compute correction.
    camMslamCorrected_T = [camMslam_T[0],
                           camMslamEstimated_T[1],
                           camMslam_T[2]]
    camMslamCorrected_E = [camMslamEstimated_E[0],
                           camMslam_E[1],
                           camMslamEstimated_E[2]]

    camMslamCorrected_Q = quaternion_from_euler(*camMslamCorrected_E)

    return (camMslamCorrected_T, camMslamCorrected_Q, t)
Esempio n. 36
0
def draw_map(ax, frame, db, cfg):
    # global - top-down view

    # extract data
    map_frame = db.keyframe[0]
    xyz = db.landmark['pos']
    col = db.landmark['col']

    #idx = np.random.choice(len(xyz), size=2048, replace=False)
    #xyz = xyz[idx]
    #col = col[idx]

    # draw (3d)
    ax3 = sub_axis(ax, [0.0, 0.0, 1.0, 0.5], projection='3d')
    ax3.scatter(xyz[:,0], xyz[:,1], xyz[:,2],
            s = 0.1,
            c = (col[...,::-1] / 255.0),
            )
    for fr in db.frame:
        xfm_pose = pose_to_xfm(fr['pose'])
        txn = tx.translation_from_matrix(xfm_pose)
        rxn = tx.euler_from_matrix(xfm_pose)
        draw_pose(ax3, txn, rxn, alpha=0.02)
    draw_pose(ax3, frame['pose'][0:3], frame['pose'][9:12])
    set_axes_equal(ax3)

    # draw (2d)
    T_R = vm.tx.euler_matrix(-np.pi/2, 0, -np.pi/2)
    ax2 = sub_axis(ax, [0.0, 0.5, 1.0, 0.5])
    xyz = vm.tx3(T_R, xyz)
    ax2.scatter(xyz[:,0], xyz[:,1],
            s = 0.1,
            c = (col[...,::-1] / 255.0)
            )
    for fr in db.frame:
        xfm_pose = pose_to_xfm(fr['pose'])
        r_xfm_pose = T_R.dot(xfm_pose)
        txn = tx.translation_from_matrix(r_xfm_pose)
        rxn = tx.euler_from_matrix(r_xfm_pose)
        draw_pose(ax2, txn, rxn, alpha=0.02)
    fr = frame
    xfm_pose = pose_to_xfm(fr['pose'])
    r_xfm_pose = T_R.dot(xfm_pose)
    txn = tx.translation_from_matrix(r_xfm_pose)
    rxn = tx.euler_from_matrix(r_xfm_pose)
    draw_pose(ax2, txn, rxn, alpha=1.0)
    ax2.set_aspect('equal')
Esempio n. 37
0
 def toROSPose2DModel(self):
   ros = Pose2DModel()
   ros.id = self.id
   ros.type = str(self.type)
   matrix = toMatrix(self.pose.pose)
   ros.pose.x = matrix[0][3]
   ros.pose.y = matrix[1][3]
   ros.pose.theta = euler_from_matrix(matrix)[2]
   return ros
Esempio n. 38
0
    def go_angle(self, target_base, tolerance=math.radians(5.), max_ang_vel=math.radians(20.), func=None):
        self.tl.waitForTransform('odom_combined', 'base_footprint', rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)
        k = math.radians(80)

        od_T_bf = tfu.transform('odom_combined', 'base_footprint', self.tl)
        target_odom_mat = od_T_bf * tfu.tf_as_matrix( ([0, 0, 0.], tr.quaternion_from_euler(0, 0, target_base)) )
        target_odom = tr.euler_from_quaternion(tfu.matrix_as_tf(target_odom_mat)[1])[2]
        #target_odom = (od_T_bf * np.row_stack([target_base, np.matrix([0,1.]).T]))[0:2,0]

        #current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint', 'odom_combined', self.tl)[0:3, 0:3], 'sxyz')[2]
        #target_odom = current_ang_odom + delta_ang

        robot_odom = tfu.transform('odom_combined', 'base_footprint', self.tl)
        current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
        diff = ut.standard_rad(current_ang_odom - target_odom)
        rospy.loginfo('go_angle: target %.2f' %  np.degrees(target_odom))
        rospy.loginfo('go_angle: current %.2f' %  np.degrees(current_ang_odom))
        rospy.loginfo('go_angle: diff %.2f' %  np.degrees(diff))

        while not rospy.is_shutdown():
            robot_odom = tfu.transform('odom_combined', 'base_footprint', self.tl)
            current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
            diff = ut.standard_rad(target_odom - current_ang_odom)
            rospy.loginfo('go_angle: current %.2f diff %.2f' %  (np.degrees(current_ang_odom), np.degrees(diff)))
            p = k * diff
            if func != None:
                func(diff)
            if np.abs(diff) < tolerance:
                break

            if self.go_ang_server.is_preempt_requested():
                self.go_ang_server.set_preempted()
                break

            tw = gm.Twist()
            vels = [p, np.sign(p) * max_ang_vel]
            tw.angular.z = vels[np.argmin(np.abs(vels))]
            #rospy.loginfo('diff %.3f vel %.3f' % (math.degrees(diff), math.degrees(tw.angular.z)))
            self.tw_pub.publish(tw)
            #rospy.loginfo('commanded %s' % str(tw))
            rate.sleep()
        rospy.loginfo('go_ang: finished %.3f' % math.degrees(diff))
        return diff
    def set_target_pose_relative(self, name, delta_xyz, delta_rpy, tm):
        if name.lower() == 'rarm':
            joint = 'RARM_JOINT5'
        elif name.lower() == 'larm':
            joint = 'LARM_JOINT5'
        else:
            raise rospy.ServiceException()

        matrix = self.get_current_pose(joint).pose.data
        xyz = numpy.array([matrix[3], matrix[7], matrix[11]])
        rpy = numpy.array(euler_from_matrix([matrix[0:3], matrix[4:7], matrix[8:11]], 'sxyz'))
        xyz += [delta_xyz[0], delta_xyz[1], delta_xyz[2]]
        rpy += [delta_rpy[0], delta_rpy[1], delta_rpy[2]]
        return self.set_target_pose(name, list(xyz), list(rpy), tm)
Esempio n. 40
0
    def go_ang(self, ang, speed):
        dt = math.radians(ang)

        if dt > 0:
            sign = -1
        elif dt < 0:
            sign = 1
        else:
            sign = 0

        self.tl.waitForTransform("base_footprint", "odom_combined", rospy.Time(), rospy.Duration(10))
        p0_base = tfu.transform("base_footprint", "odom_combined", self.tl)  # \
        start_ang = tr.euler_from_matrix(p0_base[0:3, 0:3], "sxyz")[2]
        r = rospy.Rate(100)
        dist_so_far = 0.0

        last_ang = start_ang
        while not rospy.is_shutdown():
            pcurrent_base = tfu.transform("base_footprint", "odom_combined", self.tl)  # \
            current_ang = tr.euler_from_matrix(pcurrent_base[0:3, 0:3], "sxyz")[2]
            dist_so_far = dist_so_far + (ut.standard_rad(current_ang - last_ang))
            if dt > 0 and dist_so_far > dt:
                rospy.loginfo("stopped! %f %f" % (dist_so_far, dt))
                break
            elif dt < 0 and dist_so_far < dt:
                rospy.loginfo("stopped! %f %f" % (dist_so_far, dt))
                break
            elif dt == 0:
                rospy.loginfo("stopped! %f %f" % (dist_so_far, dt))
                break

            tw = gm.Twist()
            tw.angular.z = math.radians(speed * sign)

            self.tw_pub.publish(tw)
            r.sleep()
            last_ang = current_ang
 def tf_cb(self, msg):
     for t in msg.transforms:
         if t.child_frame_id == self.target:
             time = self.tf.getLatestCommonTime(self.source, self.target)
             p, q = self.tf.lookupTransform(self.source, self.target, time)
             rm = tfs.quaternion_matrix(q)
             # assemble a new coordinate frame that has z-axis aligned with
             # the vertical direction and x-axis facing the z-axis of the
             # original frame
             z = rm[:3, 2]
             z[2] = 0
             axis_x = tfs.unit_vector(z)
             axis_z = tfs.unit_vector([0, 0, 1])
             axis_y = np.cross(axis_x, axis_z)
             rm = np.vstack((axis_x, axis_y, axis_z)).transpose()
             # shift the pose along the x-axis
             self.position = p + np.dot(rm, self.d)[:3]
             self.buffer[self.buffer_ptr] = tfs.euler_from_matrix(rm)[2]
             self.buffer_ptr = (self.buffer_ptr + 1) % self.buffer_size
             self.publish_filtered_tf(t.header)
Esempio n. 42
0
 def predict(self, Delta):
     
     # Implement Kalman prediction here
     theta = self.X[2,0]
     pose_mat = mat([[cos(theta), -sin(theta), 0, self.X[0,0]], 
                   [sin(theta),  cos(theta), 0, self.X[1,0]],
                   [         0,           0, 1, 0],
                   [         0,           0, 0, 1],
                   ]);
     pose_mat = Delta * pose_mat;
     self.X[0:2,0] = pose_mat[0:2,3:4]
     euler = euler_from_matrix(pose_mat[0:3,0:3], 'rxyz')
     self.X[2,0] = euler[2]; # Ignore the others
     Jx = mat([[1, 0, -sin(theta)*Delta[0,3]-cos(theta)*Delta[1,3]],
               [0, 1,  cos(theta)*Delta[0,3]-sin(theta)*Delta[1,3]],
               [0, 0,                       1                       ]])
     Qs = mat(diag([self.position_uncertainty**2,self.position_uncertainty**2,self.angular_uncertainty**2]))
     P = self.P[0:3,0:3]
     self.P[0:3,0:3] = Jx * P * Jx.T + Qs 
     return (self.X,self.P)
def dls_ik(kin, pose, q0, lam=0.25, num=100):
    # get position and orientation:
    Rd = pose[0:3,0:3]
    Pd = pose[0:3,-1]
    # setup iterations:
    q = q0.copy()
    # run loop trying to get to target:
    for i in range(num):
        J = kin.jacobian(q)
        g = np.array(kin.forward(q))
        R = g[0:3,0:3]
        P = g[0:3,-1]
        Rdelt = matmult(Rd, R.T)
        rdelt_angles = np.array(trans.euler_from_matrix(Rdelt))
        e = np.hstack((Pd-P, rdelt_angles))
        dq = np.array(matmult(J.T,np.linalg.inv(matmult(J,J.T) + lam*np.eye(J.shape[0]))))
        q += matmult(dq,e)
        ##############################################################
        # should add a break condition and corresponding tolerance!! #
        ##############################################################
    return q
 def IK_biased(self, pos, rot, q_init, q_bias, q_bias_weights, rot_weight=1., 
               bias_vel=0.01, num_iter=100):
     q_out = np.mat(self.IK_search(pos, rot)).T
     for i in range(num_iter):
         pos_fk, rot_fk = self.FK_vanilla(q_out)
         delta_twist = np.mat(np.zeros((6, 1)))
         pos_delta = pos - pos_fk
         delta_twist[:3,0] = pos_delta
         rot_delta = np.mat(np.eye(4))
         rot_delta[:3,:3] = rot * rot_fk.T
         rot_delta_angles = np.mat(tf_trans.euler_from_matrix(rot_delta)).T
         delta_twist[3:6,0] = rot_delta_angles
         J = self.jacobian_vanilla(q_out)
         J[3:6,:] *= np.sqrt(rot_weight)
         delta_twist[3:6,0] *= np.sqrt(rot_weight)
         J_tinv = np.linalg.inv(J.T * J + np.diag(q_bias_weights) * np.eye(len(q_init))) * J.T
         q_bias_diff = q_bias - q_out
         q_bias_diff_normed = q_bias_diff * bias_vel / np.linalg.norm(q_bias_diff)
         delta_q = q_bias_diff_normed + J_tinv * (delta_twist - J * q_bias_diff_normed)
         q_out += delta_q 
         q_out = np.mat(np.clip(q_out.T.A[0], self.joint_info["safe_mins"], 
                                              self.joint_info["safe_maxs"])).T
     return q_out
    def processAprilTags(self, msg):

        # Only process if the wheels_cmd hasn't changed since last time and the last time < cur time
        if self.wheelsCmdAlmostEqual(self.wheels_cmd, self.wheels_cmd_prev) and msg.header.stamp > self.timestamp_prev:
            deltas = []
            for tag in msg.detections:
                if tag.id in self.april_tags_prev:
                    Ta_r1 = self.geometryTransformToMatrix(self.april_tags_prev[tag.id].transform)
                    Ta_r2 = self.geometryTransformToMatrix(tag.transform)
                    Tr2_r1 = np.dot(Ta_r1, tr.inverse_matrix(Ta_r2))

                    # Get the angle, dx, and dy
                    theta = tr.euler_from_matrix(Tr2_r1)[2]
                    dx,dy = Tr2_r1[0:2,3]
                    deltas.append([theta, dx, dy])

            if deltas:
                # We found some matches, average them, and print
                deltas = np.mean(np.array(deltas),0).tolist()
                cmd = [self.wheels_cmd_prev.vel_left, self.wheels_cmd_prev.vel_right, (msg.header.stamp - self.timestamp_prev).to_sec()]
                sample = cmd+deltas
                rospy.loginfo(sample)

                f = open(self.filename, 'a+')
                np.savetxt(f, sample, fmt='%-7.8f', newline=" ")
                f.write('\n')
                f.close()
        else:
            err = "backwards time." if msg.header.stamp < self.timestamp_prev else "cmd changed"
            rospy.logwarn("Invalid interval %s", err)

        # Save the tags and wheels_cmd for next time
        for tag in msg.detections:
            self.april_tags_prev[tag.id] = tag
        self.timestamp_prev = msg.header.stamp
        self.wheels_cmd_prev = self.wheels_cmd
Esempio n. 46
0
def main():
    rospy.init_node("pr2_arm_test")
    arm = sys.argv[1]
    mode = sys.argv[2]
    assert arm in ['r', 'l']
    assert mode in ['joint1', 'joint2', 'cart1', 'cart2', 'cart3', 'cart4']

    if mode == 'joint1':
        pr2_joint_arm = create_pr2_arm(arm, arm_type=PR2ArmJointTrajectory)
        pr2_joint_arm.set_ep([0.1]*7, 15)

    if mode == 'joint2':
        jnt_arm = create_pr2_arm(arm, arm_type=PR2ArmJointTrajectory)
        kin = jnt_arm.kinematics

        pos = np.mat([[0.6, 0.0, 0.0]]).T
        rot = np.mat(np.eye(3))
        q_ik = kin.IK_search(pos, rot)
        
        if q_ik is not None:
            jnt_arm.set_ep(q_ik, 15)

    if mode == 'cart1':
        pr2_r_kinematics = PR2ArmKinematics(arm)
        pr2_jtrans_arm = PR2ArmJTranspose(arm, pr2_r_kinematics)
        pos = np.mat([[0.6, 0.0, 0.0]]).T
        rot = np.mat(np.eye(3))
        pr2_jtrans_arm.set_ep((pos, rot), 5.)

    if mode == 'cart2':
        pr2_r_kinematics = PR2ArmKinematics(arm)
        pr2_jttask_arm = PR2ArmJTransposeTask(arm, pr2_r_kinematics)
        pr2_jttask_arm.set_gains([0., 60., 60., 50., 50., 50.],
                                 [3., 0.1, 0.1, 3., 3., 3.], 'r_gripper_tool_frame')

    if mode == 'cart3':
        pr2_r_kinematics = PR2ArmKinematics(arm)
        pr2_jttask_arm = PR2ArmJTransposeTask(arm, pr2_r_kinematics)
        pos_a = np.mat([[0.0, 0.1, 0.0]]).T
        rot_a = np.mat(np.eye(3))
        pos_b = np.mat([[0.6, 0.0, 0.0]]).T
        rot_b = np.mat([[1.0, 0.0, 0.0],
                        [0.0, 0.0, -1.0],
                        [0.0, 1.0, 0.0]])
        print zip(*pr2_jttask_arm.interpolate_ep((pos_a, rot_a), (pos_b, rot_b), 10))
        
    if mode == 'cart4':
        pr2_r_kinematics = PR2ArmKinematics(arm)
        pr2_jttask_arm = PR2ArmJTransposeTask(arm, pr2_r_kinematics)
        pos_a = np.mat([[0.0, 0.1, 0.0]]).T
        rot_a = np.mat(tf_trans.random_rotation_matrix())[:3,:3]
        pos_b = np.mat([[0.6, 0.0, 0.0]]).T
        rot_b = np.mat(tf_trans.euler_matrix(0.5, 0.8, 0.5))[:3,:3]
        ep_err = pr2_jttask_arm.ep_error((pos_a, rot_a), (pos_b, rot_b))
        err_mat = np.mat(tf_trans.euler_matrix(ep_err[3,0], ep_err[4,0], ep_err[5,0]))[:3, :3]
        print "err_mat", err_mat
        diff_mat = rot_b.T * rot_a
        print "diff_mat", diff_mat
        rx, ry, rz = tf_trans.euler_from_matrix(diff_mat)
        print ep_err[3:6,0].T
        print rx, ry, rz
        print diff_mat - err_mat
Esempio n. 47
0
def homo_mat_to_2d(mat):
    rot = tf_trans.euler_from_matrix(mat)[2]
    return mat[0,3], mat[1,3], rot
Esempio n. 48
0
def main(): 
          
    # Header of the sensor file
    NSMAP = {"xacro" : "http://www.ros.org/wiki/xacro"}                      
    ROOT = ltr.Element("robot",nsmap=NSMAP) 
    XACRO= ltr.SubElement(ROOT,"xacroCOLONmacro",name="ur10_arm_gazebo",params="prefix")
    ROOT.insert(0,ltr.Comment("Proximity sensors"))
    string_joint_link=""
    string_collisions=""
    modify=True #if false the original files are used by moveit if true the file with sensor is used
    
    SensorCount=0    
    width=0.10; min_range=0.06;    max_range=0.8;       Rate=10; theta_incline=0.05           
    s=Sensor_attributes(width,min_range,max_range,Rate)
    
    
#     
    
    
    
    # Link 2  
    frame_name="forearm_link" 
      
#    Note 1 In the absence of an array pick max position     
#    Note 2 R is defined by relative rotations
    # Ultrasound ring 
    # 0  
    theta_incline=15.* np.pi/180.
    R=np.dot(np.dot(transformations.rotation_matrix(np.pi/2,np.array([1,0,0])),transformations.rotation_matrix(np.pi/2,np.array([0,0,1]))),transformations.rotation_matrix(-np.pi,np.array([0,1,0])))
    Rincline=np.dot(R,transformations.rotation_matrix(-theta_incline,np.array([0,0,1]))) 
    Rx,Ry,Rz=transformations.euler_from_matrix(Rincline,'sxyz')
    Position_min=np.array([-0.06,-0.06,0.53,Rx,Ry,Rz]);    Position_max=np.array([0.078,0.00,0.48,Rx,Ry,Rz])   
    face_array=sensor_array(frame_name,Position_min,Position_max,[s])     
    string_joint_link,string_collisions,XACRO,SensorCount=create_sensors_for_face(XACRO,SensorCount,face_array,string_joint_link,string_collisions)
    print "Generated Sensor ",SensorCount 
    
#    # 1 to mod
    R=np.dot(np.dot(transformations.rotation_matrix(np.pi/2,np.array([1,0,0])),transformations.rotation_matrix(np.pi/2,np.array([0,0,1]))),transformations.rotation_matrix(-np.pi,np.array([0,1,0])))
    R=np.dot(R,transformations.rotation_matrix(-np.pi/2,np.array([1,0,0]))) # rotating for the ring
    Rincline=np.dot(R,transformations.rotation_matrix(-theta_incline,np.array([0,0,1]))) 
    Rx,Ry,Rz=transformations.euler_from_matrix(Rincline,'sxyz')
    
    Position_min=np.array([-0.06,-0.06,0.53,Rx,Ry,Rz]);    Position_max=np.array([0.0,0.075,0.48,Rx,Ry,Rz])   

    face_array=sensor_array(frame_name,Position_min,Position_max,[s])     
    string_joint_link,string_collisions,XACRO,SensorCount=create_sensors_for_face(XACRO,SensorCount,face_array,string_joint_link,string_collisions)
    print "Generated Sensor ",SensorCount 
#    
#    #2
#    
    R=np.dot(np.dot(transformations.rotation_matrix(np.pi/2,np.array([1,0,0])),transformations.rotation_matrix(np.pi/2,np.array([0,0,1]))),transformations.rotation_matrix(-np.pi,np.array([0,1,0])))
    R=np.dot(R,transformations.rotation_matrix(-np.pi,np.array([1,0,0]))) # rotating for the ring
    Rincline=np.dot(R,transformations.rotation_matrix(-theta_incline,np.array([0,0,1]))) 
    Rx,Ry,Rz=transformations.euler_from_matrix(Rincline,'sxyz')
    
    Position_min=np.array([-0.06,-0.06,0.53,Rx,Ry,Rz]);    Position_max=np.array([-0.075,-0.0,0.48,Rx,Ry,Rz])   

    face_array=sensor_array(frame_name,Position_min,Position_max,[s])     
    string_joint_link,string_collisions,XACRO,SensorCount=create_sensors_for_face(XACRO,SensorCount,face_array,string_joint_link,string_collisions)
    print "Generated Sensor ",SensorCount 
#    
#    #3
    R=np.dot(np.dot(transformations.rotation_matrix(np.pi/2,np.array([1,0,0])),transformations.rotation_matrix(np.pi/2,np.array([0,0,1]))),transformations.rotation_matrix(-np.pi,np.array([0,1,0])))
    R=np.dot(R,transformations.rotation_matrix(np.pi/2,np.array([1,0,0]))) # rotating for the ring
    Rincline=np.dot(R,transformations.rotation_matrix(-theta_incline,np.array([0,0,1]))) 
    Rx,Ry,Rz=transformations.euler_from_matrix(Rincline,'sxyz')
    
    Position_min=np.array([-0.06,-0.06,0.53,Rx,Ry,Rz]);    Position_max=np.array([0.0,-0.075,0.48,Rx,Ry,Rz])   

    face_array=sensor_array(frame_name,Position_min,Position_max,[s])     
    string_joint_link,string_collisions,XACRO,SensorCount=create_sensors_for_face(XACRO,SensorCount,face_array,string_joint_link,string_collisions)
    print "Generated Sensor ",SensorCount 
#    
    # -------------------------------------------------------------------------
    #
    #       Convert to text, Replace Colons and write result to .xacro file 
    #
    # -------------------------------------------------------------------------

    Original_xacro_file = open("/home/philip/catkin_ws_v5/src/ur_package/config/SensorGeneration/ur10.urdf.xacro", "r") # Bare version of .xacro
    Original_srdf_file = open("/home/philip/catkin_ws_v5/src/ur_package/config/SensorGeneration/ur10.srdf", "r") # Bare version of .sdf

    Gazebo_sensor_file = open("/opt/ros/indigo/share/ur_description/urdf/ur10.gazebo.xacro", 'w')    
    UR10_xacro_file = open("/opt/ros/indigo/share/ur_description/urdf/ur10.urdf.xacro", "w") # replace used version with sensor version
    UR10_srdf_file = open("/opt/ros/indigo/share/ur10_moveit_config/config/ur10.srdf", "w")
    
    
    contents = Original_srdf_file.readlines() # read bare version 
    if(modify):
        contents.insert(-1,string_collisions) # insert the supplmentary joint information at end of file
    contents = "".join(contents) # joint contents to create buffer
    UR10_srdf_file.write(contents) # write buffer to used version 
    
    
    contents = Original_xacro_file.readlines() # read bare version 
    if(modify):
        contents.insert(-6,string_joint_link) # insert the supplmentary joint information at end of file
    contents = "".join(contents) # joint contents to create buffer
    UR10_xacro_file.write(contents) # write buffer to used version 
    
    
    string=ltr.tostring(ROOT, pretty_print=True,xml_declaration=True)
    Gazebo_sensor_file.write(string.replace("COLON",":")) # A terrible hack I should be ashamed of required cause I couldn't be bothered learning more about namespaces
    
    
    
    Gazebo_sensor_file.close()
    Original_srdf_file.close()
    Original_xacro_file.close()
    UR10_xacro_file.close()
    UR10_srdf_file.close()
Esempio n. 49
0
	def getRotationAroundZ(self):
		return euler_from_matrix(self._matrix, 'szyx')[0]
Esempio n. 50
0
def stat_poses(csv_file):
    print('%s %s %s' % ('>' * 20, csv_file, '<' * 20))
    df = pandas.DataFrame.from_csv(csv_file, index_col=None)

    # Compute average of transformation matrix
    # ========================================
    n_fused = 0
    matrix_fused = None  # average matrix from base -> checkerboard pose
    for index, row in df.iterrows():
        # base -> pointIn (sensed calibboard pose)
        pos = row['position_x'], row['position_y'], row['position_z']
        matrix_pos = tff.translation_matrix(pos)
        rot = row['quaternion_x'], row['quaternion_y'], row['quaternion_z'], row['quaternion_w']
        matrix_rot = tff.quaternion_matrix(rot)
        matrix_pose = matrix_pos.dot(matrix_rot)

        if n_fused == 0:
            matrix_fused = matrix_pose
            n_fused += 1
            continue

        # Fuse transformation matrix
        # --------------------------
        # base -> pointFused (fused calibboard pose)
        # matrix_fused

        # pointFused -> pointIn = pointFused -> base -> pointIn
        matrix_rel = matrix_pose.dot(tff.inverse_matrix(matrix_fused))

        # weight of the pointIn is 1 / (n_fused + 1)
        weight_of_new_point = 1. / (n_fused + 1)

        # midpoint of rotation
        angle, direction, point = tff.rotation_from_matrix(matrix_rel)
        matrix_rel_rot_mid = tff.rotation_matrix(
            angle * weight_of_new_point, direction, point)
        # midpoint of translation
        trans_rel = tff.translation_from_matrix(matrix_rel)
        matrix_rel_pos_mid = tff.translation_matrix(
            [x * weight_of_new_point for x in trans_rel])
        # relative transformation for midpoint from pointFused
        matrix_rel_mid = matrix_rel_pos_mid.dot(matrix_rel_rot_mid)

        # base -> pointFused_new
        matrix_fused = matrix_rel_mid.dot(matrix_fused)
        n_fused += 1
        # -------------------------------------------------------------------------
    assert n_fused == len(df)

    print('%s Average %s' % ('-' * 30, '-' * 30))
    print('N fused: %d' % n_fused)
    print('Matrix: \n%s' % matrix_fused)
    trans = tff.translation_from_matrix(matrix_fused)
    print('Position: {}'.format(trans))
    pos_keys = ['position_%s' % x for x in 'xyz']
    print('Position (simple): {}'.format(df.mean()[pos_keys].values))
    euler = tff.euler_from_matrix(matrix_fused)
    print('Orientation: {}'.format(euler))

    # Compute variance of transformation matrix
    # =========================================

    N = len(df)
    keys = ['x', 'y', 'z', 'angle']
    variance = {k: 0 for k in keys}
    for index, row in df.iterrows():
        # base -> pointIn (sensed calibboard pose)
        pos = row['position_x'], row['position_y'], row['position_z']
        matrix_pos = tff.translation_matrix(pos)
        rot = row['quaternion_x'], row['quaternion_y'], row['quaternion_z'], row['quaternion_w']
        matrix_rot = tff.quaternion_matrix(rot)
        matrix_pose = matrix_pos.dot(matrix_rot)

        # pointFused -> pointIn = pointFused -> base -> pointIn
        matrix_rel = matrix_pose.dot(tff.inverse_matrix(matrix_fused))
        # compute distance for translation/rotation
        delta_x, delta_y, delta_z = tff.translation_from_matrix(matrix_rel)
        delta_angle, _, _ = tff.rotation_from_matrix(matrix_rel)

        variance['x'] += delta_x ** 2
        variance['y'] += delta_y ** 2
        variance['z'] += delta_z ** 2
        variance['angle'] += delta_angle ** 2
    for k in keys:
        variance[k] /= (N - 1)

    print('%s Std Deviation (Variance) %s' % ('-' * 22, '-' * 22))
    for k in keys:
        print('%s: %f (%f)' % (k, variance[k], math.sqrt(variance[k])))
        if k != 'angle':
            print('{} (simple): {}'.format(k, df.std()['position_%s' % k]))
def main():
    rospack = rospkg.RosPack()
    pkg_path = rospack.get_path('hrl_base_selection')
    #bag_path = '/home/ari/svn/robot1_data/2011_ijrr_phri/log_data/bag/'
    bag_path = '/home/ari/git/gt-ros-pkg.hrl-assistive/hrl_base_selection/data/yogurt/bags/'

    bag_name_list = ['2014-06-17-16-13-46',
                     #'2014-06-17-16-17-20', This is a bad bag
                     '2014-06-17-16-19-02',
                     '2014-06-17-16-21-57',
                     '2014-06-17-16-24-03',
                     '2014-06-17-16-26-12',
                     '2014-06-17-16-27-46',
                     '2014-06-17-16-29-18',
                     '2014-06-17-16-31-24',
                     '2014-06-17-16-33-13',
                     '2014-06-18-12-37-23',
                     '2014-06-18-12-39-43',
                     '2014-06-18-12-41-42',
                     '2014-06-18-12-43-45',
                     '2014-06-18-12-45-26',
                     '2014-06-18-12-47-22',
                     '2014-06-18-12-49-04',
                     '2014-06-18-12-50-52',
                     '2014-06-18-12-52-39',
                     '2014-06-18-12-54-26']

#    bag_name = '2014-06-17-16-17-20'
#    bag_name = '2014-06-17-16-19-02'
#    bag_name = 
#    bag_name = 
#    bag_name = 
#    bag_name = 
#    bag_name = 
#    bag_name = 
#    bag_name = 
#    bag_name = 
#    bag_name = 
    for num,bag_name in enumerate(bag_name_list):
        print 'Going to open bag: ',bag_name,'.bag'
        print 'Starting on a tool log file! \n'
        bag = rosbag.Bag(''.join([bag_path,bag_name,'.bag']), 'r')
        tool = open(''.join([pkg_path,'/data/yogurt/logs/',bag_name,'_tool.log']), 'w')
        for topic, msg, t in bag.read_messages():
            if topic == "/spoon/pose":
                tx = copy.copy(msg.transform.translation.x)
                ty = copy.copy(msg.transform.translation.y)
                tz = copy.copy(msg.transform.translation.z)
                rx = copy.copy(msg.transform.rotation.x)
                ry = copy.copy(msg.transform.rotation.y)
                rz = copy.copy(msg.transform.rotation.z)
                rw = copy.copy(msg.transform.rotation.w)
                world_B_spoon = createBMatrix([tx,ty,tz],[rx,ry,rz,rw])#*adl2_B_pr2goal
                eul = tft.euler_from_matrix(world_B_spoon,'rxyz')
                #print world_B_shaver,'\n'
                #print eul,'\n'
                #print 'time: \n',t
                tool.write(''.join([str(t),' %f %f %f %f %f %f \n' % (world_B_spoon[0,3],world_B_spoon[1,3],world_B_spoon[2,3],eul[0],eul[1],eul[2])]))
                #tool.write(''.join([world_B_shaver[0,3],' ',world_B_shaver[1,3],' ',world_B_shaver[2,3],' ',eul[0],' ',eul[1],' ',eul[2],'\n']))
        bag.close()
        tool.close()
        print 'Starting on a head log file! \n'
        bag = rosbag.Bag(''.join([bag_path,bag_name,'.bag']), 'r')
        head = open(''.join([pkg_path,'/data/yogurt/logs/',bag_name,'_head.log']), 'w')
        for topic, msg, t in bag.read_messages():
            if topic == "/head/pose":
                tx = copy.copy(msg.transform.translation.x)
                ty = copy.copy(msg.transform.translation.y)
                tz = copy.copy(msg.transform.translation.z)
                rx = copy.copy(msg.transform.rotation.x)
                ry = copy.copy(msg.transform.rotation.y)
                rz = copy.copy(msg.transform.rotation.z)
                rw = copy.copy(msg.transform.rotation.w)
                world_B_head = createBMatrix([tx,ty,tz],[rx,ry,rz,rw])#*adl2_B_pr2goal
                eul = copy.copy(tft.euler_from_matrix(world_B_head,'rxyz'))
                head.write(''.join([str(t),' %f %f %f %f %f %f \n' % (world_B_head[0,3],world_B_head[1,3],world_B_head[2,3],eul[0],eul[1],eul[2])]))
                #head.write(''.join([t,' ',world_B_head[0,3],' ',world_B_head[1,3],' ',world_B_head[2,3],' ',eul[0],' ',eul[1],' ',eul[2],' ',eul[3],'\n']))
        bag.close()
        head.close()

        print "Saved file! Finished bag #",num+1
    print 'Done with all bag files!'
Esempio n. 52
0
    def start_recording(self):

        print "Recorder Loop"
        while self.left_image is None or self.right_image is None:
            pass

        if self.record_kinematics:
            while 1:
                try:
                    (trans, rot) = self.listener.lookupTransform("/r_gripper_tool_frame", "/base_link", rospy.Time(0))
                    break
                except (tf.ExtrapolationException):
                    print "ExtrapolationException"
                    rospy.sleep(0.1)
                    continue

        frm = 0
        wait_thresh = 0
        prev_r_l = self.r_l
        prev_r_r = self.r_r

        trans_vel = np.array([0.0, 0.0, 0.0])
        rot_vel = np.array([0.0, 0.0, 0.0])

        prev_trans = None
        prev_rot = None

        for i in range(9999999):
            print frm
            rospy.sleep(self.period)

            start = time.time()

            cv2.imwrite(
                self.video_folder
                + self.task_name
                + "_"
                + self.trial_name
                + "_capture1/"
                + str(get_frame_fig_name(frm)),
                self.left_image,
            )
            cv2.imwrite(
                self.video_folder
                + self.task_name
                + "_"
                + self.trial_name
                + "_capture2/"
                + str(get_frame_fig_name(frm)),
                self.right_image,
            )

            if self.record_kinematics:

                (trans, quaternion) = self.listener.lookupTransform(
                    "/r_gripper_tool_frame", "/base_link", rospy.Time(0)
                )
                r_matrix = utils.quaternion2rotation(quaternion)
                rot = transformations.euler_from_matrix(r_matrix)
                r_gripper_angle = self.joint_state.position[-17]

                if frm != 0:
                    trans_vel = (trans - prev_trans) / self.period
                    rot_vel = (rot - prev_rot) / self.period

                prev_trans = np.array(trans)
                prev_rot = np.array(rot)

                js_pos = self.joint_state.position[16:-12]
                js_vel = self.joint_state.velocity[16:-12]

                W = list(trans) + list(r_matrix.flatten()) + list(trans_vel) + list(rot_vel)

                # Gripper angle is r_gripper_joint
                W.append(r_gripper_angle)

                W = W + list(js_pos) + list(js_vel)

                self.data = utils.safe_concatenate(self.data, utils.reshape(np.array(W)))

            frm += 1

            if (self.r_l == prev_r_l) and (self.r_r == prev_r_r):
                print "Not recording anymore?"
                wait_thresh += 1
                if wait_thresh > 5:
                    self.save_and_quit()

            prev_r_l = self.r_l
            prev_r_r = self.r_r

            end = time.time()

            print end - start
				plane_resp.pose.orientation.w]))

#plane_pose = numpy.eye(4)
plane_pose[0,3] = plane_resp.pose.position.x
plane_pose[1,3] = plane_resp.pose.position.y
plane_pose[2,3] = plane_resp.pose.position.z

timeout = 10
destination_frame = '/map'
detection_frame = '/head/kinect2_rgb_optical_frame'
listener = tf.TransformListener()

listener.waitForTransform(detection_frame,destination_frame,rospy.Time(0),rospy.Duration(timeout))
frame_trans,frame_rot = listener.lookupTransform(destination_frame,detection_frame,rospy.Time(0))
frame_offset = numpy.matrix(quaternion_matrix(frame_rot))
frame_offset[0,3] = frame_trans[0]
frame_offset[1,3] = frame_trans[1]
frame_offset[2,3] = frame_trans[2]

plane_in_world = numpy.array(numpy.dot(frame_offset,plane_pose))

#Add pi/2 to az
curr_rot = plane_in_world[0:3,0:3]
ax,ay,az = euler_from_matrix(curr_rot)
curr_rot = euler_matrix(ax,ay,az+numpy.pi/2)
plane_in_world[0:3,0:3] = curr_rot[0:3,0:3]


env.AddKinBody(plane)
plane.SetTransform(plane_in_world)
Esempio n. 54
0
    def move_velocity(self, velocity=0.015, is_translation=True, velocity_rot=None):
        if velocity_rot is not None:
            self.update_velocity_rot(velocity_rot)
        pos_i_des, rot_i_des = self.arm.get_ep()
        pos_i_act, rot_i_act = self.arm.get_end_effector_pose()

        # this is the current residiual error in the controller at the start
        pos_err = pos_i_act - pos_i_des 
        rot_err = rot_i_act.T * rot_i_des

        if is_translation:
            pos_vel_des = velocity
            pid_ctrl = PIDController(rate=rate, k_p=kp_proportional, k_i=kp_integral, 
                                     i_max=None, init_integ=np.sign(pos_vel_des) * kp_init_integral, 
                                     saturation=kp_max_ctrl, 
                                     feed_forward=np.sign(pos_vel_des) * kp_constant,
                                     name="arm_vel")
        else:
            rot_vel_des = velocity
            pid_ctrl = PIDController(rate=rate, k_p=kr_proportional, k_i=kr_integral, 
                                     i_max=None, init_integ=np.sign(rot_vel_des) * kr_init_integral, 
                                     saturation=kr_max_ctrl, 
                                     feed_forward=np.sign(rot_vel_des) * kr_constant,
                                     name="arm_vel")
        vels = deque([np.array([0]*6)]*40)
        r = rospy.Rate(rate)
        self._is_moving = True
        while not rospy.is_shutdown() and self._is_moving:
            with self.lock:
                vel_rot = self.velocity_rot.copy()
            cur_pos, cur_rot = self.arm.get_end_effector_pose()

            # hacky velocity filter
            xd_act = self.arm.get_controller_state()['xd_act']
            vels.append(xd_act)
            vels.popleft()
            vel_filt = np.mat(np.mean(vels, 0)).T
            x_vel_filt = (vel_rot.T * vel_filt[:3,0])[0,0]
            roll_vel_filt = (vel_rot.T * vel_filt[3:,0])[0,0]

            if is_translation:
                # PI velocity controller for position
                pos_ctrl = pid_ctrl.update_state(pos_vel_des - x_vel_filt)
                pos_des = vel_rot * (np.mat([pos_ctrl, 0, 0]).T + 
                                     np.mat(np.diag([1, 0, 0])) * vel_rot.T * (cur_pos - pos_err) +
                                     np.mat(np.diag([0, 1, 1])) * vel_rot.T * pos_i_des)

                rot_des = rot_i_des # don't change rotation

            if not is_translation:
                rot_des_vel_frame = np.mat(np.eye(4))
                rot_des_vel_frame[:3,:3] = cur_rot * rot_err * vel_rot
                roll_des_vel_frame, r2, r3 = tf_trans.euler_from_matrix(rot_des_vel_frame)

                # PI velocity controller for rotation
                rot_ctrl = pid_ctrl.update_state(rot_vel_des + roll_vel_filt)
                print roll_vel_filt, rot_vel_des, rot_vel_des - roll_vel_filt

                roll_ctrl_des = roll_des_vel_frame + rot_ctrl
                r1, pitch_i_des, yaw_i_des = tf_trans.euler_from_matrix(rot_i_des * vel_rot)
                rot_des = np.mat(tf_trans.euler_matrix(roll_ctrl_des, pitch_i_des, yaw_i_des)[:3,:3]) * vel_rot.T

                pos_des = pos_i_des # don't change translation

            self.arm.set_ep((pos_des, rot_des), 1)
            r.sleep()
        self.arm.set_ep(self.arm.get_ep(), 1)
Esempio n. 55
0
def quaternion_to_rpy(q):
    'Take a Quaternion message and return a numpy rpy vector.'
    q_arr = np.array([q.x, q.y, q.z, q.w])
    R = transformations.quaternion_matrix(q_arr)
    return transformations.euler_from_matrix(R, axes='sxyz')
Esempio n. 56
0
    def _make_generic(args):
        if type(args[0]) == str:
            frame_id = args[0]
            header, homo_mat, rot_quat, rot_euler = PoseConverter._make_generic(args[1:])
            if header is None:
                header = [0, rospy.Time.now(), '']
            header[2] = frame_id
            return header, homo_mat, rot_quat, rot_euler

        if len(args) == 1:
            if type(args[0]) is Pose:
                homo_mat, rot_quat, rot_euler = PoseConverter._extract_pose_msg(args[0])
                return None, homo_mat, rot_quat, rot_euler

            elif type(args[0]) is PoseStamped:
                homo_mat, rot_quat, rot_euler = PoseConverter._extract_pose_msg(args[0].pose)
                seq = args[0].header.seq
                stamp = args[0].header.stamp
                frame_id = args[0].header.frame_id
                return [seq, stamp, frame_id], homo_mat, rot_quat, rot_euler

            elif type(args[0]) is Transform:
                homo_mat, rot_quat, rot_euler = PoseConverter._extract_tf_msg(args[0])
                return None, homo_mat, rot_quat, rot_euler

            elif type(args[0]) is TransformStamped:
                homo_mat, rot_quat, rot_euler = PoseConverter._extract_tf_msg(args[0].transform)
                seq = args[0].header.seq
                stamp = args[0].header.stamp
                frame_id = args[0].header.frame_id
                return [seq, stamp, frame_id], homo_mat, rot_quat, rot_euler
                
            elif type(args[0]) is Twist:
                homo_mat, rot_quat, rot_euler = PoseConverter._extract_twist_msg(args[0])
                return None, homo_mat, rot_quat, rot_euler

            elif type(args[0]) is TwistStamped:
                homo_mat, rot_quat, rot_euler = PoseConverter._extract_twist_msg(args[0].twist)
                seq = args[0].header.seq
                stamp = args[0].header.stamp
                frame_id = args[0].header.frame_id
                return [seq, stamp, frame_id], homo_mat, rot_quat, rot_euler

            elif isinstance(args[0], (np.matrix, np.ndarray)) and np.shape(args[0]) == (4, 4):
                return (None, np.mat(args[0]), tf_trans.quaternion_from_matrix(args[0]).tolist(),
                        tf_trans.euler_from_matrix(args[0]))

            elif isinstance(args[0], (tuple, list)) and len(args[0]) == 2:
                pos_arg = np.mat(args[0][0])
                rot_arg = np.mat(args[0][1])
                if pos_arg.shape == (1, 3):
                    # matrix is row, convert to column
                    pos = pos_arg.T
                elif pos_arg.shape == (3, 1):
                    pos = pos_arg
                else:
                    return None, None, None, None

                if rot_arg.shape == (3, 3):
                    # rotation matrix
                    homo_mat = np.mat(np.eye(4))
                    homo_mat[:3,:3] = rot_arg
                    quat = tf_trans.quaternion_from_matrix(homo_mat)
                    rot_euler = tf_trans.euler_from_matrix(homo_mat)
                else:
                    if rot_arg.shape[1] == 1:
                        # make into row matrix
                        rot_arg = rot_arg.T
                    else:
                        rot_arg = rot_arg.tolist()[0]
                        if len(rot_arg) == 3:
                            # Euler angles rotation
                            homo_mat = np.mat(tf_trans.euler_matrix(*rot_arg))
                            quat = tf_trans.quaternion_from_euler(*rot_arg)
                            rot_euler = rot_arg
                        elif len(rot_arg) == 4:
                            # quaternion rotation
                            homo_mat = np.mat(tf_trans.quaternion_matrix(rot_arg))
                            quat = rot_arg
                            rot_euler = tf_trans.euler_from_quaternion(quat)
                        else:
                            return None, None, None, None

                homo_mat[:3, 3] = pos
                return None, homo_mat, np.array(quat), rot_euler

        elif len(args) == 2:
            header, homo_mat, rot_quat, rot_euler = PoseConverter._make_generic(
                                                                  ((args[0], args[1]),))
            if homo_mat is not None:
                return header, homo_mat, rot_quat, rot_euler

        return None, None, None, None
Esempio n. 57
0
 def _turn_by(self, delta_ang, block=True):
     current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint',\
                             'odom_combined', self.tflistener)[0:3, 0:3], 'sxyz')[2]
     self.turn_to(current_ang_odom + delta_ang, block)
Esempio n. 58
0
    def DetectBlocks(self, robot, table, blocks=None,timeout=10, **kw_args):
        """
        Calls detector for blocks and places them on the table

        @param robot: The robot instance using the detector
        @param table: The kinbody for the table on which the blocks are placed
        @blocks: List of blocks currently in the environment; if present, redetection not done

        @return The list of blocks detected
        """
        if blocks is not None and len(blocks) == 0:
            # Add all blocks
        
            env = robot.GetEnv()

            # Get the pr-ordata package path
            import prpy.util
            block_path = os.path.join('objects', 'block.kinbody.xml')
            
            detected_blocks = self.find_blocks(cloud_topic=self.point_cloud_topic)
        
            # Place blocks on the table
            from prpy.util import ComputeEnabledAABB
            with prpy.rave.Disabled(table, padding_only=True):
                table_aabb = ComputeEnabledAABB(table)
                z = table_aabb.pos()[2] + table_aabb.extents()[2] + table_z_offset #OFFSET SET AT TOP

            for b in detected_blocks:

                block = env.ReadKinBodyXMLFile(block_path)

                for link in block.GetLinks():
                    for geometry in link.GetGeometries():
                        geometry.SetDiffuseColor(numpy.array([b.avg_color.r,b.avg_color.g,b.avg_color.b,b.avg_color.a]))

                block_pose = numpy.array(quaternion_matrix([
                        b.pose.orientation.x,
                        b.pose.orientation.y,
                        b.pose.orientation.z,
                        b.pose.orientation.w]))
                block_pose[0,3] = b.pose.position.x
                block_pose[1,3] = b.pose.position.y
                block_pose[2,3] = b.pose.position.z

                self.listener.waitForTransform(
                        self.detection_frame,
                        self.destination_frame,
                        rospy.Time(0),
                        rospy.Duration(timeout))

                frame_trans, frame_rot = self.listener.lookupTransform(
                        self.destination_frame,
                        self.detection_frame,
                        rospy.Time(0))

                frame_offset = numpy.matrix(quaternion_matrix(frame_rot))
                frame_offset[0,3] = frame_trans[0]
                frame_offset[1,3] = frame_trans[1]
                frame_offset[2,3] = frame_trans[2]

                block_in_world = numpy.array(numpy.dot(frame_offset,block_pose))
              
                
                #Snap block to table
                
                block_in_world[2,3] = z

                #To snap blocks to upright on table
                
                block_matrix = block_in_world[0:3,0:3]
                ax, ay, az = euler_from_matrix(block_matrix)
                ax = 0
                ay = 0
                block_in_world_corrected = euler_matrix(ax,ay,az)
                block_in_world[0:3,0:3] = block_in_world_corrected[0:3,0:3]
                block.SetTransform(block_in_world)
                
                #Set block name 
                block.SetName('block')
                env.Add(block,anonymous=True)
                blocks.append(block)

        return blocks