Example #1
0
    def __init__(self):
        rospy.init_node(NODE_NAME)
        sys.path.append(
            '~/Documents/RobotSemantics/semseg_ws/src/image_segnet/object_class_pcl/src'
        )
        for name, default in PARAM_LIST:
            setattr(self, name, rospy.get_param(PARAM_PREFIX + name, default))

        rospy.Subscriber(self.in_pcl, PointCloud2, self.pcl_cb)
        rospy.Subscriber(self.in_viz + '/' + self.camera_used + '/camera_info',
                         CameraInfo, self.ci_cb)
        rospy.Subscriber(
            self.in_viz + '/' + self.camera_used + '/image/compressed',
            CompressedImage, self.image_cb)
        rospy.Subscriber(self.in_semseg + '/class_matrix', numpy_msg(Floats),
                         self.clmat_cb)
        self.pcl_pub = rospy.Publisher(self.out_pcl,
                                       PointCloud2,
                                       queue_size=20)
        self.clmats = {}
        self.cis = {}
        self.imgsC = {}
        self.timestamps = []
        self.counterTS = 0
        self.counter = 0
        #SHOULD BE CHANGED TO SOMETHING ADAPTIVE - NOW manually SETTED
        self.camera0PM = [
            638.81494, 0, 625.98561, 0, 0, 585.79797, 748.57858, 0, 0, 0, 1, 0
        ]
        self.tf_listener = tf.TransformListener()
        self.adjustment = np.dot(
            tf_help.rotation_matrix(math.pi / 2, (0, 1, 0)),
            tf_help.rotation_matrix(math.pi, (0, 0, 1)))
    def compute_contact_coordinates(self, disk_center_position):

        rot_psi = tfms.rotation_matrix(np.radians(self._ginsberg_euler.x),
                                       [0, 0, 1])
        init_rot = tfms.rotation_matrix(math.pi / 2, [0, 0, 1])
        rot_theta = tfms.rotation_matrix(np.radians(self._ginsberg_euler.y),
                                         [0, 1, 0])
        rot_phi = tfms.rotation_matrix(np.radians(self._ginsberg_euler.z),
                                       [0, 0, 1])

        rot_StoQ = np.matmul(np.matmul(rot_psi, init_rot), rot_theta)
        rot_StoB = np.matmul(rot_StoQ, rot_phi)

        ground_contact_vector = np.matmul(
            rot_StoQ, np.array([[self._object_radius], [0], [0], [0]]))

        contact_position_x = disk_center_position.x + ground_contact_vector[0]
        contact_position_y = disk_center_position.y + ground_contact_vector[1]

        contact_position = Vector3()
        contact_position.x = contact_position_x
        contact_position.y = contact_position_y
        contact_position.z = 0

        self._ginsberg_contact_position_pub.publish(contact_position)
Example #3
0
    def test_euler_XYZ_from_rotation_matrix(self):
        """
        r in 'rxyz' is 'r'otating frames indicating rotations are applied consecutively with respect to current
        frames' axes, "relative-rotation". Order of rotations are first X, second Y and third Z. Rotation matrix
        composition rule for relative rotations: Rx * Ry * Rz.

        s in 'sxyz' is 's'tationary frames indicating rotations are applied with respect to the coordinate frame
        of the INITIAL frame, "fixed-axis rotation". Rotation matrix composition rule for fixed-axis rotations:
        Rz * Ry * Rx.
        """

        Rx_90 = tr.rotation_matrix(
            math.pi / 4, [1, 0, 0])  # first, rotate 90 degrees around x axis
        Ry_90 = tr.rotation_matrix(
            math.pi / 5,
            [0, 1, 0])  # second, 45 degrees around y axis of the current frame
        Rz_90 = tr.rotation_matrix(
            math.pi / 6,
            [0, 0, 1])  # third, 30 degrees around z axis of the current frame

        R1 = np.matmul(Rz_90, Ry_90)
        R = np.matmul(R1, Rx_90)

        euler_angles = tr.euler_from_matrix(R, 'sxyz')
        euler_angles_expected = [math.pi / 4, math.pi / 5, math.pi / 6]

        np.testing.assert_almost_equal(euler_angles, euler_angles_expected)
Example #4
0
    def gp_sampling(self, p, R):
        h = 100.  # Depth of the field of view
        b = h / np.cos(self.mbes_angle / 2.)
        n = 80  # Number of sampling points

        # Triangle vertices
        Rxmin = rotation_matrix(-self.mbes_angle / 2., (1, 0, 0))[0:3, 0:3]
        Rxmax = rotation_matrix(self.mbes_angle / 2., (1, 0, 0))[0:3, 0:3]
        Rxmin = np.matmul(R, Rxmin)
        Rxmax = np.matmul(R, Rxmax)

        p2 = p + Rxmax[:, 2] / np.linalg.norm(Rxmax[:, 2]) * b
        p3 = p + Rxmin[:, 2] / np.linalg.norm(Rxmin[:, 2]) * b

        # Sampling step
        i_range = np.linalg.norm(p3 - p2) / n

        # Across ping direction
        direc = ((p3 - p2) / np.linalg.norm(p3 - p2))

        #  start = time.time()
        p2s = np.full((n, len(p2)), p2)
        direcs = np.full((n, len(direc)), direc)
        i_ranges = np.full((1, n), i_range) * (np.asarray(range(0, n)))
        sampling_points = p2s + i_ranges.reshape(n, 1) * direcs

        # Sample GP here
        mu, sigma = self.gp.sample(np.asarray(sampling_points)[:, 0:2])
        mu_array = np.array([mu])

        # Concatenate sampling points x,y with sampled z
        mbes_gp = np.concatenate(
            (np.asarray(sampling_points)[:, 0:2], mu_array.T), axis=1)
        #  print(mbes_gp)
        return mbes_gp
Example #5
0
    def compute_contact_coordinates(self):
        """Trace of ground contact as the object rolls without slipping"""

        rot_psi = tfms.rotation_matrix(self._body_euler.x, [0,0,1])
        init_rot = tfms.rotation_matrix(math.pi/2, [0,0,1])
        rot_theta = tfms.rotation_matrix(self._body_euler.y, [0,1,0])
        rot_phi = tfms.rotation_matrix(self._body_euler.z, [0,0,1])


        rot_StoQ = np.matmul(np.matmul(rot_psi, init_rot),rot_theta) # this frame
        # does not roll with the object. So suitable to determine ground contact point

        rot_StoB = np.matmul(rot_StoQ, rot_phi) # from world to body frame. no use here.

        ground_contact_vector = np.matmul(rot_StoQ,np.array([[OBJECT_RADIUS],[0],[0],[0]]))

        contact_position_x = self._disk_center_position.x + ground_contact_vector[0]
        contact_position_y = self._disk_center_position.y + ground_contact_vector[1]

        self._ground_contact = Vector3()
        self._ground_contact.x = contact_position_x
        self._ground_contact.y = contact_position_y
        self._ground_contact.z = 0

        #publish
        self._pub_kinematics._ground_contact_publisher.publish(self._ground_contact)
def flipOrbToNDT(orbPose):
    qOrb = [orbPose.qx, orbPose.qy, orbPose.qz, orbPose.qw]
    orbFlip = trafo.concatenate_matrices(
        trafo.quaternion_matrix(qOrb),
        trafo.rotation_matrix(np.pi / 2, (1, 0, 0)),
        trafo.rotation_matrix(np.pi / 2, (0, 0, 1)))
    return trafo.quaternion_from_matrix(orbFlip)
Example #7
0
def flipOrbToNDT (orbPose):
    qOrb = [orbPose.qx, orbPose.qy, orbPose.qz, orbPose.qw]
    orbFlip = trafo.concatenate_matrices(
        trafo.quaternion_matrix(qOrb),
        trafo.rotation_matrix(np.pi/2, (1,0,0)),
        trafo.rotation_matrix(np.pi/2, (0,0,1))
    )
    return trafo.quaternion_from_matrix(orbFlip)
Example #8
0
    def compute_Q_frame(self):
        # Q frame only tilts and yaws, but does not spin. Useful to determine ground contact
        rot_psi = tfms.rotation_matrix(self._body_euler.x, [0, 0, 1])
        init_rot = tfms.rotation_matrix(math.pi / 2, [0, 0, 1])
        rot_theta = tfms.rotation_matrix(self._body_euler.y, [0, 1, 0])
        rot_phi = tfms.rotation_matrix(self._body_euler.z, [0, 0, 1])

        self._world_Q_rot = np.matmul(np.matmul(rot_psi, init_rot), rot_theta)
def dynamics(s):
    global param, inp, wind, forces, moments
    m= param['m']
    Jx = param['Jx']
    Jy = param['Jy']
    Jz = param['Jz']
    Jxz = param['Jxz']
    # Jxz = 0.0
    p_n = s[0][0]
    p_e = s[1][0]
    p_d = s[2][0]
    u = s[3][0]
    v = s[4][0]
    w = s[5][0]
    phi = s[6][0]
    theta = s[7][0]
    psi = s[8][0]
    p = s[9][0]
    q = s[10][0]
    r = s[11][0]

    state_dot = np.zeros((12,1)) #[[0.],[0.],[0.],[0.],[0.],[0.],[0.],[0.],[0.],[0.],[0.],[0.]] #
    origin, xaxis, yaxis, zaxis = (0., 0., 0.), (1., 0., 0.), (0., 1., 0.), (0., 0., 1.)
    Rx = rotation_matrix(phi,xaxis)
    Ry = rotation_matrix(theta,yaxis)
    Rz = rotation_matrix(psi,zaxis)
    R_V_B = concatenate_matrices(Rz,Ry,Rx)
    R_V_B = R_V_B[0:3,0:3]

    sphi   = np.sin(phi)
    cphi   = np.cos(phi)
    stheta = np.sin(theta)
    ctheta = np.cos(theta)
    spsi   = np.sin(psi)
    cpsi   = np.cos(psi)
    ttheta = np.tan(theta)
    secth = 1.0/ctheta
    # calpha = np.cos(alpha)
    # salpha = np.sin(alpha)

    Mat2 = np.matrix([[1.0,sphi*ttheta,cphi*ttheta],[0.,cphi,-sphi],[0.,sphi*secth,cphi*secth]])
    J = np.matrix([[Jx, 0., -Jxz],[0., Jy, 0.],[-Jxz, 0., Jz]])

    temp1 = R_V_B*np.matrix(s[3:6])
    state_dot[0:3] = temp1

    temp2 = Mat2*np.matrix(s[9:12])
    state_dot[6:9] = temp2

    temp3 = np.transpose(-np.cross(np.transpose(np.matrix(s[9:12])),np.transpose(np.matrix(s[3:6])))) + (1.0/m)*forces
    state_dot[3:6] = temp3

    temp4 = np.linalg.pinv(J)*(np.transpose(-np.cross(np.transpose(np.matrix(s[9:12])),np.transpose(J*np.matrix(s[9:12])))) + moments)
    state_dot[9:12] = temp4
    s2 = np.copy(state_dot)

    return s2
Example #10
0
    def __init__(self, **kwargs):
        super(RBOHand2Kuka, self).__init__()

        #old parameters below - must be updated to new convention!
        self['surface_grasp']['initial_goal'] = np.array([
            -0.05864322834179703, 0.4118988657714642, -0.05864200146127985,
            -1.6887810963180838, -0.11728653060066829, -0.8237944986945402, 0
        ])
        self['surface_grasp']['pregrasp_pose'] = tra.translation_matrix(
            [0, 0, -0.2])
        self['surface_grasp']['hand_pose'] = tra.concatenate_matrices(
            tra.translation_matrix([0, 0, 0]),
            tra.rotation_matrix(math.radians(0.), [0, 0, 1]))
        self['surface_grasp']['downward_force'] = 7.
        self['surface_grasp']['valve_pattern'] = (np.array([[0.,
                                                             4.1], [0., 0.1],
                                                            [0., 5.], [0., 5.],
                                                            [0., 2.],
                                                            [0., 3.5]]),
                                                  np.array([[1, 0]] * 6))

        self['wall_grasp']['pregrasp_pose'] = tra.translation_matrix(
            [0.05, 0, -0.2])
        self['wall_grasp']['table_force'] = 7.0
        self['wall_grasp']['sliding_speed'] = 0.1
        self['wall_grasp']['up_speed'] = 0.1
        self['wall_grasp']['down_speed'] = 0.1
        self['wall_grasp']['wall_force'] = 10.0
        self['wall_grasp']['angle_of_attack'] = 1.0  #radians
        self['wall_grasp']['object_lift_time'] = 4.5

        self['edge_grasp']['initial_goal'] = np.array([
            -0.05864322834179703, 0.4118988657714642, -0.05864200146127985,
            -1.6887810963180838, -0.11728653060066829, -0.8237944986945402, 0
        ])
        self['edge_grasp']['pregrasp_pose'] = tra.translation_matrix(
            [0.2, 0, 0.4])
        self['edge_grasp']['hand_object_pose'] = tra.concatenate_matrices(
            tra.translation_matrix([0, 0, 0.05]),
            tra.rotation_matrix(math.radians(10.), [1, 0, 0]),
            tra.euler_matrix(0, 0, -math.pi / 2.))
        self['edge_grasp']['grasp_pose'] = tra.concatenate_matrices(
            tra.translation_matrix([0, -0.05, 0]),
            tra.rotation_matrix(math.radians(10.), [1, 0, 0]),
            tra.euler_matrix(0, 0, -math.pi / 2.))
        self['edge_grasp']['postgrasp_pose'] = tra.translation_matrix(
            [0, 0, -0.1])
        self['edge_grasp']['downward_force'] = 4.0
        self['edge_grasp']['sliding_speed'] = 0.04
        self['edge_grasp']['valve_pattern'] = (np.array([[0, 0], [0,
                                                                  0], [1, 0],
                                                         [1, 0], [1, 0],
                                                         [1, 0]]),
                                               np.array([[0, 3.0]] * 6))
Example #11
0
    def set_cam_object_T(self, object_pose):
        Tr = tr.translation_matrix(
            [object_pose[0], object_pose[1], object_pose[2]])

        xaxis, yaxis, zaxis = [1, 0, 0], [0, 1, 0], [0, 0, 1]
        Rx = tr.rotation_matrix(object_pose[3], xaxis)
        Ry = tr.rotation_matrix(object_pose[4], yaxis)
        Rz = tr.rotation_matrix(object_pose[5], zaxis)
        R = tr.concatenate_matrices(Rx, Ry, Rz)

        T = tr.concatenate_matrices(Tr, R)
        return T
Example #12
0
    def set_cam_object_T(self, ball_pose):
        Tr = tr.translation_matrix([ball_pose[0], ball_pose[1], ball_pose[2]])

        # the rotation angles are zero because the detected ball does not have any orientation
        xaxis, yaxis, zaxis = [1, 0, 0], [0, 1, 0], [0, 0, 1]
        Rx = tr.rotation_matrix(0, xaxis)
        Ry = tr.rotation_matrix(0, yaxis)
        Rz = tr.rotation_matrix(0, zaxis)
        R = tr.concatenate_matrices(Rx, Ry, Rz)

        T = tr.concatenate_matrices(Tr, R)
        return T
Example #13
0
def tagTworld():
    """ Returns homogeneous transformation that expresses world_p in tag frame.

    Returns:
        tag_T_world (numpy.array): homogeneous transformation that expresses world_p in tag cf

    """
    T_x = tr.rotation_matrix(-math.pi / 2, [1, 0, 0])
    T_z = tr.rotation_matrix(math.pi / 2, [0, 0, 1])

    tag_T_world = np.matmul(T_x, T_z)

    return tag_T_world
Example #14
0
 def get_camera_transformation(self, offsets=None, current=False):
     # R is to fix axes correspondance to gripper
     Rx = tft.rotation_matrix(pi / 2, (1, 0, 0))
     Rz = tft.rotation_matrix(pi / 2, (0, 0, 1))
     R = Rz.dot(Rx)
     if offsets is None:
         D_e = tft.translation_matrix((0.094, -0.031, 0.0235))  # m
     else:
         D_e = tft.translation_matrix(offsets)  # m
     if current:  # use current robot position to determine camera T
         R_e = self.get_self_pose()  # use orientation of EE
     else:
         R_e = tft.rotation_matrix(-pi / 4, (1, 0, 0))  # rotation around z
     return R_e.dot(D_e).dot(R)
Example #15
0
def bebop2track_transform_odom(bebop_odom):

    global track_last_known_pos
    global bebop_last_known_pos
    global bebop_last_known_hdg
    global track_last_known_hdg

    pose = bebop_odom.pose.pose
    twist = bebop_odom.twist.twist.linear

    bebop_d_pos = np.array([pose.position.x, pose.position.y, pose.position.z
                            ]) - bebop_last_known_pos
    bebop_q = np.array([
        pose.orientation.x, pose.orientation.y, pose.orientation.z,
        pose.orientation.w
    ])

    hdg_bebop = tfs.euler_from_quaternion(bebop_q)[2]
    track_bebop_dhdg = track_last_known_hdg - bebop_last_known_hdg

    R_hdg = tfs.rotation_matrix(track_bebop_dhdg, (0, 0, 1))[0:3, 0:3]

    track_pos = np.matmul(R_hdg, bebop_d_pos) + track_last_known_pos

    bebop_vel_body = np.array([twist.x, twist.y, twist.z])

    R_body2track = tfs.rotation_matrix(hdg_bebop + track_bebop_dhdg,
                                       (0, 0, 1))[0:3, 0:3]

    track_vel_body = np.matmul(R_body2track, bebop_vel_body)

    track_odom = bebop_odom

    track_odom.twist.twist.linear.x = track_vel_body[0]
    track_odom.twist.twist.linear.y = track_vel_body[1]
    track_odom.twist.twist.linear.z = track_vel_body[2]

    track_odom.pose.pose.position.x = track_pos[0]
    track_odom.pose.pose.position.y = track_pos[1]
    track_odom.pose.pose.position.z = track_pos[2]

    track_quat = tfs.quaternion_multiply(
        tfs.quaternion_from_euler(0, 0, track_bebop_dhdg), bebop_q)

    track_odom.pose.pose.orientation.x = track_quat[0]
    track_odom.pose.pose.orientation.y = track_quat[1]
    track_odom.pose.pose.orientation.z = track_quat[2]
    track_odom.pose.pose.orientation.w = track_quat[3]

    return track_odom
Example #16
0
    def __init__(self):
        rospy.init_node(NODE_NAME)
        sys.path.append(
            '~/Documents/RobotSemantics/semseg_ws/src/image_segnet/object_class_pcl/src'
        )

        #import parameters to self
        for name, default in PARAM_LIST:
            setattr(self, name, rospy.get_param(PARAM_PREFIX + name, default))
        self.clmats = {}
        self.cis = {}
        self.imgsC = {}
        self.timestamps = []
        self.counterTS = 0
        self.counter = 0
        self.clmat_counter = 0
        self.pcls = {}

        # subscribed Topics
        #rospy.Subscriber(self.in_pcl, PointCloud2, self.pcl_cb)
        rospy.Subscriber(self.in_viz + '/' + self.camera_used + '/camera_info',
                         CameraInfo, self.ci_cb)
        rospy.Subscriber(
            self.in_viz + '/' + self.camera_used + '/' + self.image_spec,
            CompressedImage, self.image_cb)
        #rospy.Subscriber(self.in_semseg + '/class_matrix', clmat, self.clmat_cb)
        #approximate time synchranization of subscribed topics
        mode_sub = message_filters.Subscriber(self.in_pcl, PointCloud2)
        penalty_sub = message_filters.Subscriber(
            self.in_semseg + '/class_matrix', clmat)
        ts = message_filters.ApproximateTimeSynchronizer(
            [mode_sub, penalty_sub], 200, 0.5)
        ts.registerCallback(self.callback)

        # topic where we publish
        self.pcl_pub = rospy.Publisher(self.out_pcl,
                                       PointCloud2,
                                       queue_size=20)
        self.pcl_pub2 = rospy.Publisher('dyn_pcl_clmat_IG',
                                        PointCloud2,
                                        queue_size=20)

        #setting buffer for transformation
        tfBuffer = tf2_ros.Buffer(rospy.Duration(10))  #tf buffer length
        self.tf_listener = tf.TransformListener(tfBuffer)
        self.adjustment = np.dot(
            tf_help.rotation_matrix(math.pi / 2, (0, 1, 0)),
            tf_help.rotation_matrix(math.pi, (0, 0, 1)))
        print "hi"
Example #17
0
    def mbes_as_cb(self, goal):

        # Unpack goal
        p_auv = [
            goal.mbes_pose.transform.translation.x,
            goal.mbes_pose.transform.translation.y,
            goal.mbes_pose.transform.translation.z
        ]
        r_mbes = quaternion_matrix([
            goal.mbes_pose.transform.rotation.x,
            goal.mbes_pose.transform.rotation.y,
            goal.mbes_pose.transform.rotation.z,
            goal.mbes_pose.transform.rotation.w
        ])[0:3, 0:3]

        # IGL sim ping
        # The sensor frame on IGL needs to have the z axis pointing opposite from the actual sensor direction
        R_flip = rotation_matrix(np.pi, (1, 0, 0))[0:3, 0:3]
        mbes = self.draper.project_mbes(np.asarray(p_auv), r_mbes.dot(R_flip),
                                        goal.beams_num.data, self.mbes_angle)

        mbes = mbes[::-1]  # Reverse beams for same order as real pings

        # Pack result
        mbes_cloud = pack_cloud(self.map_frame, mbes)
        result = MbesSimResult()
        result.sim_mbes = mbes_cloud
        self.as_ping.set_succeeded(result)

        self.latest_mbes = result.sim_mbes
Example #18
0
 def transformation_matrix(rot_angle, rot_dir, trans):
     # print "rot_angle: ", rot_angle
     # print "rot_dir: ", rot_dir
     # print "trans: ", trans
     return tfs.concatenate_matrices(
         tfs.rotation_matrix(rot_angle, rot_dir),
         tfs.translation_matrix(trans))
def cv2ros(rvecs,tvecs):
    """Transform pose from openCV solvePNP output format to ROS TransformRTStampedWithHeader format.

    Args:
        rvecs: in format [rx, ry, rz] units : radians
        tvecs: in format [tx, ty, tz] units : meters
        (type: list)

    Returns:
        An HTM (type: TransformRTStampedWithHeader)
    """

    # ROS pose
    ros_pose = TransformRTStampedWithHeader()

    # ROS position
    ros_pose.transform.translation.x = tvecs[0]
    ros_pose.transform.translation.y = tvecs[1]
    ros_pose.transform.translation.z = tvecs[2]

    # Ros orientation
    angle = sqrt(rvecs[0] ** 2 + rvecs[1] ** 2 + rvecs[2] ** 2)
    direction = [i / angle for i in rvecs[0:3]]
    np_T = tf.rotation_matrix(angle, direction)
    np_q = tf.quaternion_from_matrix(np_T)
    ros_pose.transform.rotation.x = np_q[0]
    ros_pose.transform.rotation.y = np_q[1]
    ros_pose.transform.rotation.z = np_q[2]
    ros_pose.transform.rotation.w = np_q[3]

    return ros_pose
def perpendicular_vector(v):
  """ Finds an arbitrary perpendicular vector to *v*. The idea here is finding a perpendicular vector then rotating abitraryly it around v
  @type v np.array([x,y,z])
  """
  # for two vectors (x, y, z) and (a, b, c) to be perpendicular,
  # the following equation has to be fulfilled
  #  0 = ax + by + cz
  # x = y = z = 0 is not an acceptable solution
  if v[0] == v[1] == v[2] == 0:
    raise ValueError('zero-vector')

  # If one dimension is zero, this can be solved by setting that to
  # non-zero and the others to zero. Example: (4, 2, 0) lies in the
  # x-y-Plane, so (0, 0, 1) is orthogonal to the plane.
  random_rotation = tr.rotation_matrix(np.random.uniform(0, np.pi),v)[:3,:3]
  if v[0] == 0:
    return np.dot(random_rotation,np.array([1, 0, 0]))
  if v[1] == 0:
    return np.dot(random_rotation,np.array([0, 1, 0]))
  if v[2] == 0:
    return np.dot(random_rotation,np.array([0, 0, 1]))

  # set a = b = 1
  # then the equation simplifies to
  #  c = -(x + y)/z
  return np.dot(random_rotation,np.array([1, 1, -1.0 * (v[0] + v[1]) / v[2]]))
Example #21
0
 def get_gripper_transformation(self, yaw):
     suction_cup = 0.01  # meters, used to offset suction press
     gripper_offsets = np.array([0.0, -0.1397 + suction_cup, -0.0889, 1])
     R = tft.rotation_matrix(pi / 2 + yaw, (0, 0, 1))  # rotation around z
     p = R.dot(gripper_offsets)
     D_e = tft.translation_matrix(p[:3])  # m
     return D_e
Example #22
0
    def test_transformations(self):
        from math import pi
        alpha, beta, gamma = 0, 0, -pi/2.
        origin, xaxis, yaxis, zaxis = (0, 0, 0), (1, 0, 0), (0, 1, 0), (0, 0, 1)
#         Rx = tf.rotation_matrix(alpha, xaxis)
#         Ry = tf.rotation_matrix(beta, yaxis)
        R = Rz = tf.rotation_matrix(gamma, zaxis)
        #R = tf.concatenate_matrices(Rx, Ry, Rz)
        
        point0 = np.array([0, 1, 0, 0])
        print point0
        
        #point1 = R * point0
        #point1 = R * point0.T
        #point1 = np.multiply(R, point0)
        #point1 = np.multiply(R, point0.T)
        actual_point1 = R.dot(point0)
        print 'actual_point1:', actual_point1
        
        expected_point1 = np.array([1, 0, 0, 0])
        print 'expected_point1:', expected_point1
        
        #np.alltrue(point1 == )
        #self.assertEqual(point1.tolist(), expected_point1.tolist())
        self.assertTrue(np.allclose(actual_point1, expected_point1))
Example #23
0
def ur2ros(ur_pose):
    """Transform pose from UR format to ROS Pose format"""

    # ROS pose
    ros_pose = Pose()

    # Position
    ros_pose.position.x = ur_pose[0]
    ros_pose.position.y = ur_pose[1]
    ros_pose.position.z = ur_pose[2]

    # angle normalized
    angle = sqrt(ur_pose[3]**2 + ur_pose[4]**2 + ur_pose[5]**2)

    orient = [i / angle for i in ur_pose[3:6]]
    #rx ry rz to rotation matrix
    np_T = tf.rotation_matrix(angle, orient)
    #rotation matrix to quaternion
    np_q = tf.quaternion_from_matrix(np_T)
    ros_pose.orientation.x = np_q[0]
    ros_pose.orientation.y = np_q[1]
    ros_pose.orientation.z = np_q[2]
    ros_pose.orientation.w = np_q[3]

    return ros_pose
def coordinate_translator(world_coords, odom_coords, focal_point,
                          principal_point):
    """ Takes in the coordinates of the point in the world and translates it to a 
      point on the image using odometry and lens data """
    w_z, w_y, w_x = world_coords
    w_y *= -1
    o_z, o_y, o_theta = odom_coords
    o_x = 0
    o_y *= -1
    fx, fy = focal_point
    cx, cy = principal_point

    t = (w_x - o_x, w_y - o_y, w_z - o_z)

    r = rotation_matrix(o_theta, (1, 0, 0))

    rx = r[0][:3]
    ry = r[1][:3]
    rz = r[2][:3]

    x = t[0] + (w_x * rx[0]) + (w_y * rx[1]) + (w_z * rx[2])
    y = t[1] + (w_x * ry[0]) + (w_y * ry[1]) + (w_z * ry[2])
    z = t[2] + (w_x * rz[0]) + (w_y * rz[1]) + (w_z * rz[2])

    u = (fx * (x / z)) + cx
    v = (fy * (y / z)) + cy

    return [480 - v, 640 - u]
 def test_frame3_axis_angle(self, x, y, z, axis, angle):
     r1 = np.array(spw.frame_axis_angle(x, y, z, axis, angle)).astype(float)
     r2 = rotation_matrix(angle, np.array(axis))
     r2[0, 3] = x
     r2[1, 3] = y
     r2[2, 3] = z
     self.assertTrue(np.isclose(r1, r2).all(), msg='\n{} != \n{}'.format(r1, r2))
Example #26
0
 def transformation_matrix(rot_angle, rot_dir, trans):
     # print "rot_angle: ", rot_angle
     # print "rot_dir: ", rot_dir
     # print "trans: ", trans
     return tfs.concatenate_matrices(
         tfs.rotation_matrix(rot_angle, rot_dir),
         tfs.translation_matrix(trans))
Example #27
0
def GetPlaceOnTSRList( T_target, T_ee, Bw_T, Bw_R, avalues = [0] ):
    T0_w = T_target 
    T0_p = TSRUtil.E2P( T_ee )
    Tw_p = linalg.inv( T0_w ) * T0_p 
    Tw_p[0,3] = 0
    Tw_p[1,3] = 0

    # backoff distance from center of target 
    backoff = -0.03

    # determine if EE is perpendicular or parallel to target
    [x_t, y_t, z_t, t_t] = MatrixToAxes( T_target )
    [x_e, y_e, z_e, t_e] = MatrixToAxes( T_ee )
    t_backoff = array( [0, 0, backoff * ( 1 - fabs( dot( z_t, z_e ) ) )] )
    G_backoff = mat( Rt2G( eye( 3 ), t_backoff) )

    tsrList = []
    for i in range( len( avalues ) ):
        th = avalues[i] * pi / 180
        Rz = mat( tr.rotation_matrix( th, array( [0, 0, 1.] ) ) )
        Tw_p_i = Rz * Tw_p * G_backoff
        T0_p_i = T0_w * Tw_p_i
        tsr_i = TSRUtil.PalmToTSR( T0_p_i, T0_w, Bw_T, Bw_R )

        tsrList.append( copy.deepcopy( tsr_i ) )

    return tsrList
Example #28
0
def GetPlaceOnTSRList(T_target, T_ee, Bw_T, Bw_R, avalues=[0]):
    T0_w = T_target
    T0_p = TSRUtil.E2P(T_ee)
    Tw_p = linalg.inv(T0_w) * T0_p
    Tw_p[0, 3] = 0
    Tw_p[1, 3] = 0

    # backoff distance from center of target
    backoff = -0.03

    # determine if EE is perpendicular or parallel to target
    [x_t, y_t, z_t, t_t] = MatrixToAxes(T_target)
    [x_e, y_e, z_e, t_e] = MatrixToAxes(T_ee)
    t_backoff = array([0, 0, backoff * (1 - fabs(dot(z_t, z_e)))])
    G_backoff = mat(Rt2G(eye(3), t_backoff))

    tsrList = []
    for i in range(len(avalues)):
        th = avalues[i] * pi / 180
        Rz = mat(tr.rotation_matrix(th, array([0, 0, 1.])))
        Tw_p_i = Rz * Tw_p * G_backoff
        T0_p_i = T0_w * Tw_p_i
        tsr_i = TSRUtil.PalmToTSR(T0_p_i, T0_w, Bw_T, Bw_R)

        tsrList.append(copy.deepcopy(tsr_i))

    return tsrList
Example #29
0
    def get_start_guess(self):
        if 1:
            rod = matrix2rodrigues(self.base_cam.get_rotation())
            t = self.base_cam.get_translation()
            t.shape= (3,)
            rod.shape=(3,)
            return np.array(list(t)+list(rod),dtype=np.float)

        R = np.eye(4)
        R[:3,:3] = self.base_cam.get_rotation()

        angle, direction, point = rotation_from_matrix(R)
        q = quaternion_about_axis(angle,direction)
        #q = matrix2quaternion(R)
        if 1:
            R2 = rotation_matrix(angle, direction, point)
            #R2 = quaternion2matrix( q )
            try:
                assert np.allclose(R, R2)
            except:
                print
                print 'R'
                print R
                print 'R2'
                print R2
                raise

        C = self.base_cam.get_camcenter()
        result = list(C) + list(q)
        return result
Example #30
0
def fourdof_to_matrix(translation, yaw):
    """
    Convert from a Cartesian translation and yaw to a homogeneous transform.
    """
    T        = transformations.rotation_matrix(yaw, [0,0,1])
    T[0:3,3] = translation
    return T
def ur2ros(ur_pose):
    """Transform pose from UR format to ROS Pose format.

    Args:
        ur_pose: A pose in UR format [px, py, pz, rx, ry, rz] 
        (type: list)

    Returns:
        An HTM (type: Pose).
    """

    # ROS pose
    ros_pose = Pose()

    # ROS position
    ros_pose.position.x = ur_pose[0]
    ros_pose.position.y = ur_pose[1]
    ros_pose.position.z = ur_pose[2]

    # Ros orientation
    angle = sqrt(ur_pose[3]**2 + ur_pose[4]**2 + ur_pose[5]**2)
    direction = [i / angle for i in ur_pose[3:6]]
    np_T = tf.rotation_matrix(angle, direction)
    np_q = tf.quaternion_from_matrix(np_T)
    ros_pose.orientation.x = np_q[0]
    ros_pose.orientation.y = np_q[1]
    ros_pose.orientation.z = np_q[2]
    ros_pose.orientation.w = np_q[3]

    return ros_pose
Example #32
0
def fourdof_to_matrix(translation, yaw):
    """
    Convert from a Cartesian translation and yaw to a homogeneous transform.
    """
    T = transformations.rotation_matrix(yaw, [0, 0, 1])
    T[0:3, 3] = translation
    return T
Example #33
0
    def get_start_guess(self):
        if 1:
            rod = matrix2rodrigues(self.base_cam.get_rotation())
            t = self.base_cam.get_translation()
            t.shape= (3,)
            rod.shape=(3,)
            return np.array(list(t)+list(rod),dtype=np.float)

        R = np.eye(4)
        R[:3,:3] = self.base_cam.get_rotation()

        angle, direction, point = rotation_from_matrix(R)
        q = quaternion_about_axis(angle,direction)
        #q = matrix2quaternion(R)
        if 1:
            R2 = rotation_matrix(angle, direction, point)
            #R2 = quaternion2matrix( q )
            try:
                assert np.allclose(R, R2)
            except:
                print
                print 'R'
                print R
                print 'R2'
                print R2
                raise

        C = self.base_cam.get_camcenter()
        result = list(C) + list(q)
        return result
Example #34
0
    def align_with(self, target):
        """
        Aligns the normal of this triangle to target
        
        @param target - 3 dim np.array
        @return (rotated triangle, rotation matrix)
        """
        target = np.array(target)
        source = np.cross(self.b - self.a, self.c - self.a)
        source /= np.linalg.norm(source)
    
        rotation = np.eye(3)
        
        dot = np.dot(source, target)
        if not np.isnan(dot):
            angle = np.arccos(dot)
            if not np.isnan(angle):
                cross = np.cross(source, target)
                cross_norm = np.linalg.norm(cross)
                if not np.isnan(cross_norm) and not cross_norm < epsilon:
                    cross = cross / cross_norm
                    rotation = tft.rotation_matrix(angle, cross)[:3,:3]

        return (Triangle(np.dot(rotation, self.a),
                        np.dot(rotation, self.b),
                        np.dot(rotation, self.c)),
                rotation)
    def getWristPose(joint_angle_list, robot, joint_names, intermediate_pose=False):
        '''
        Get the wrist pose for given joint angle configuration.

        joint_angle_list: List of joint angles to get final wrist pose for
        kwargs: Other keyword arguments use as required.

        Return: List of 16 values which represent the joint wrist pose 
        obtained from the End-Effector Transformation matrix using column-major
        ordering.
        '''

        pose = np.eye(4)
        intermediate_poses = []

        for joint, angle in zip(joint_names, joint_angle_list):
            R, T, axis = robot.get_joint_pose(joint)
            A = rotation_matrix(angle, axis)
            pose = np.matmul(np.matmul(pose, np.matmul(R, T)), A)
            intermediate_poses.append((pose, axis))
        
        if intermediate_pose:
            return intermediate_poses
        else:
            return pose
Example #36
0
 def test_frame3_axis_angle(self, x, y, z, axis, angle):
     r2 = rotation_matrix(angle, np.array(axis))
     r2[0, 3] = x
     r2[1, 3] = y
     r2[2, 3] = z
     np.testing.assert_array_almost_equal(w.compile_and_execute(w.frame_axis_angle, [x, y, z, axis, angle]),
                                          r2)
Example #37
0
    def align_with(self, target):
        """
        Aligns the normal of this triangle to target
        
        @param target - 3 dim np.array
        @return (rotated triangle, rotation matrix)
        """
        target = np.array(target)
        source = np.cross(self.b - self.a, self.c - self.a)
        source /= np.linalg.norm(source)

        rotation = np.eye(3)

        dot = np.dot(source, target)
        if not np.isnan(dot):
            angle = np.arccos(dot)
            if not np.isnan(angle):
                cross = np.cross(source, target)
                cross_norm = np.linalg.norm(cross)
                if not np.isnan(cross_norm) and not cross_norm < epsilon:
                    cross = cross / cross_norm
                    rotation = tft.rotation_matrix(angle, cross)[:3, :3]

        return (Triangle(np.dot(rotation, self.a), np.dot(rotation, self.b),
                         np.dot(rotation, self.c)), rotation)
Example #38
0
def GetManipGraspList( objectName, objectPose, tablePose, actionName ):
    loginfo( "Generating %s grasp for %s" %( actionName, objectName ) )
    graspList = []

    grasp = graspit_msgs.msg.Grasp()
    T0_o = PoseToTransform( objectPose )
    To_g_list = []
    if "darpadrill" in objectName:
        # Rx1 has 2 fingers along +ve y-axis
        # Rx2 has 2 fingers along -ve y-axis
        Rx1 = tr.rotation_matrix( -pi/2, array( [0, 1., 0] ) )
        Rx2 = tr.rotation_matrix( pi/2, array( [0, 1., 0] ) )
        for th in arange( 15, 75, 15) * pi/180:
            Rz1 = tr.rotation_matrix( th, array( [0, 0, 1.] ) )
            To_g = mat( Rz1 ) * mat( Rx1 )
            To_g_list.append( copy.deepcopy( To_g ) )
            Rz2 = tr.rotation_matrix( pi - th, array( [0, 0, 1.] ) )
            To_g = mat( Rz2 ) * mat( Rx2 )
            To_g_list.append( copy.deepcopy( To_g ) )
        
        G_fin = mat( Rt2G( eye( 3 ), array( [0.,0., -0.07] ) ) )
        G_pre = mat( Rt2G( eye( 3 ), array( [0.,0., -0.14] ) ) )
        G_tra = mat( Rt2G( eye( 3 ), array( [0.,0.,  0.00] ) ) )
        for To_g in To_g_list:
            grasp.pre_grasp_pose = TransformToPose( G_tra *T0_o * To_g * G_pre )
            grasp.final_grasp_pose = TransformToPose( T0_o * To_g * G_fin )
            grasp.epsilon_quality = 0.1
            grasp.volume_quality = 0.1
            graspList.append( copy.deepcopy( grasp ) )
    else:
        success = 0
        reason = "No trigger grasps for object %s" % objectName
        return success, graspList, reason

    loginfo( "Trigger grasps: %d grasps" %( len( graspList ) ) )

    success = 1
    reason = "Done"
    return success, graspList, reason
 def get_camera_position(self, marker):
     """ Outputs the position of the camera in the global coordinates """
     euler_angles = euler_from_quaternion((marker.pose.pose.orientation.x,
                                           marker.pose.pose.orientation.y,
                                           marker.pose.pose.orientation.z,
                                           marker.pose.pose.orientation.w))
     translation = np.array([marker.pose.pose.position.y,
                             -marker.pose.pose.position.x,
                             0,
                             1.0])
     translation_rotated = rotation_matrix(self.yaw-euler_angles[2], [0,0,1]).dot(translation)
     xy_yaw = (translation_rotated[0]+self.position[0],translation_rotated[1]+self.position[1],self.yaw-euler_angles[2])
     return xy_yaw
Example #40
0
def fk(robot, lr, joint_values):
    pose_mat = robot.GetLink(start_link).GetTransform()
    

    R = np.eye(4)
    for i, j in enumerate(joint_values):
        rot = tft.rotation_matrix(j, arm_joint_axes[i])
        trans = arm_link_trans[i]
        R[:3,:3] = rot[:3,:3]
        R[:3,3] = trans
        pose_mat = np.dot(pose_mat, R)
        
    return pose_mat
Example #41
0
def rotation_matrix_from_axes(newaxis, oldaxis=Z_AXIS):
  """
  Return the rotation matrix that aligns two vectors.
  """
  oldaxis = tr.unit_vector(oldaxis)
  newaxis = tr.unit_vector(newaxis)
  c = np.dot(oldaxis, newaxis)
  angle = np.arccos(c)
  if np.isclose(c, -1.0) or np.allclose(newaxis, oldaxis):
    v = perpendicular_vector(newaxis)
  else:
    v = np.cross(oldaxis, newaxis)
  return tr.rotation_matrix(angle, v)
	def calENU(self):
		if(self.body.isNone() or self.headingAngle is None):
			return False
		else:
			br = self.body.getRotationAroundZ()
			
			q = rotation_matrix(br+self.headingAngle, (0,0,1))
			
			t = translation_from_matrix(self.body.getMatrix())
			t = translation_matrix(t)
			
			self.enu.setMatrix(t).transformByMatrix(q)
			return True
#eoc
    def callback(self, data):
        # rospy.loginfo('callback called')
        
        # Set parameters
        # TODO: get the parameters from the gps transform!!!
        xoff = -0.45
        yoff = 0.0
        std = .03
        rot = math.pi/2

        # Rotate the base pose
        origin, zaxis = (0, 0, 0), (0, 0, 1) 
        Rz = rotation_matrix(rot, zaxis, origin)
        pt = numpy.matrix((data.pose.pose.position.x, data.pose.pose.position.y, data.pose.pose.position.z, 0),dtype=numpy.float64).T
        xy = Rz*pt
        
        # Add the angle rotation
        oldAng = data.pose.pose.orientation
        ang = euler_from_quaternion([ oldAng.x, oldAng.y, oldAng.z, oldAng.w ])
        ang = ang[0], ang[1], ang[2] + rot
        
        # Offset the GPS reading by the lever arm offset
        x = xy[0] + xoff*math.cos(ang[2]) - yoff*math.sin(ang[2])
        y = xy[1] + xoff*math.sin(ang[2]) + yoff*math.cos(ang[2])
        #x = xy[0] + xoff*math.cos(ang[2])
        #y = xy[1] + yoff*math.sin(ang[2])
        gps_x = x + random.gauss(0,std)
        gps_y = y + random.gauss(0,std)
        print "testing"
        print xy
        print x,y
        print gps_x, gps_y    

        # Populate the angle
        # Note... GPS doesnt really return a heading!! So we cant use to localize... rememmmmberrrrr
        # gps_msg.pose.pose.orientation = Quaternion(*quaternion_from_euler(*ang))
        self.gps_msg.pose.orientation = Quaternion(*quaternion_from_euler(*ang))
        
        # Populate x,y
        self.gps_msg.header.stamp = data.header.stamp
        self.gps_msg.header.frame_id = "map_gps"
        #self.gps_msg.pose.pose.position.x = gps_x
        #self.gps_msg.pose.pose.position.y = gps_y
        self.gps_msg.pose.position.x = gps_x
        self.gps_msg.pose.position.y = gps_y

        # Publish
        self.pub.publish(self.gps_msg)
        self.pubStat.publish(self.gps_stat)
Example #44
0
	def convert_pose_inverse_transform(pose):
		translation = np.zeros((4,1))
		translation[0] = -pose.position.x
		translation[1] = -pose.position.y
		translation[2] = -pose.position.z
		translation[3] = 1.0

		rotation = (pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w)
		euler_angle = euler_from_quaternion(rotation)
		rotation = np.transpose(rotation_matrix(euler_angle[2], [0,0,1]))		# the angle is a yaw
		transformed_translation = rotation.dot(translation)

		translation = (transformed_translation[0], transformed_translation[1], transformed_translation[2])
		rotation = quaternion_from_matrix(rotation)
		return (translation, rotation)
    def get_camera_position(self, marker):
        """ Outputs the position of the camera in the global coordinates """
        translation, rotation = TransformHelpers.convert_pose_inverse_transform(marker.pose.pose)
        euler_angles = euler_from_quaternion(rotation)
        # correct for coordinate system convention changes between camera and world
        # Also, the Neato can't fly!
        translation = np.array([-translation[1], translation[0], 0, 1])
        # correct for the possibility that the marker is rotated relative to the world coordinate frame
        rot_mat = rotation_matrix(self.yaw, [0,0,1])
        translation_rotated = rot_mat.dot(translation)

        xy_yaw = (translation_rotated[0]+self.position[0],
                  translation_rotated[1]+self.position[1],
                  self.yaw+euler_angles[2])
        return xy_yaw
    def convert_pose_inverse_transform(pose):
        """ Helper method to invert a transform (this is built into the tf C++ classes, but ommitted from Python) """
        translation = np.zeros((4,1))
        translation[0] = -pose.position.x
        translation[1] = -pose.position.y
        translation[2] = -pose.position.z
        translation[3] = 1.0

        rotation = (pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w)
        euler_angle = euler_from_quaternion(rotation)
        rotation = np.transpose(rotation_matrix(euler_angle[2], [0,0,1]))       # the angle is a yaw
        transformed_translation = rotation.dot(translation)

        translation = (transformed_translation[0], transformed_translation[1], transformed_translation[2])
        rotation = quaternion_from_matrix(rotation)
        return (translation, rotation)
def addGeometryToEnvironment(endpoint1, endpoint2):
    #TODO: How do we add in the doodler?
    radius = 0.0005
    with env:
        prism = orpy.RaveCreateKinBody(env, '')
        prism.SetName(str(np.random.rand())) #give it a random name - we'll never reference it
        points_diff = (endpoint2 - endpoint1)[:-1, 3] #difference vector
        offset = endpoint1[:-1, 3]
        norm = np.linalg.norm(points_diff, 2)
        #prism.InitFromBoxes(np.array([[0., 0., 0., radius*2., radius*2., norm]]), True)
        prism.InitFromBoxes(np.array([[-radius, -radius, 0., radius, radius, norm/2.]]), True)
        z = np.array([0., 0., norm])
        cross = np.cross(z, points_diff)
        angle = np.arccos(np.dot(z, points_diff) / (norm * norm) )
        tr_matrix = tr.rotation_matrix(angle, cross)
        tr_matrix[:-1, 3] = (endpoint1[:-1, 3] + endpoint2[:-1, 3])/2.
        env.Add(prism, True)
        prism.SetTransform(tr_matrix)
    def transformWorldToPixels(self, aSpriteCordinates, cameraLocation, cameraAngle):
        #initialize the array which has elements of coins
        #initialize the camera coordinate points for each coin image
        aSpritePixels = []

        for aCoord in aSpriteCordinates:
            #rotational matrix, rotate by self.theta, along the z-axis
            rotat = rotation_matrix(cameraAngle,[0,0,1])
            #translation matrix which maps the location of a robot(in world) to camera coordinate origin
            trans = np.matrix([[1,0,0,-(cameraLocation[0])],[0,1,0,-(cameraLocation[1])],[0,0,1,-(cameraLocation[2])],[0,0,0,1]])
            #this calculates the points of the aCoin in camera coordinate system
            new_coord = np.dot(rotat, np.dot(trans, aCoord))
            #calls the pixelate function which maps the camera coordinate points into 2D screen
            aSpritePixels.append(self.pixelate(new_coord[:3]))
        #append to the array which takes an array for each coin
        return aSpritePixels


        """
Example #49
0
def rotation_matrix_from_axes(newaxis, oldaxis=Z_AXIS):
  """
  Returns the rotation matrix that aligns two vectors.
  @type  newaxis: np.array
  @param newaxis: The goal axis
  @type  oldaxis: np.array
  @param oldaxis: The initial axis
  @rtype: array, shape (3,3)
  @return: The resulting rotation matrix that aligns the old to the new axis.
  """
  oldaxis = tr.unit_vector(oldaxis)
  newaxis = tr.unit_vector(newaxis)
  c = np.dot(oldaxis, newaxis)
  angle = np.arccos(c)
  if np.isclose(c, -1.0) or np.allclose(newaxis, oldaxis):
    v = perpendicular_vector(newaxis)
  else:
    v = np.cross(oldaxis, newaxis)
  return tr.rotation_matrix(angle, v)
Example #50
0
 def fk(self, joint_values):
     self.handles = list()
     pose_mat = self.origin
     
     R = np.eye(4)
     for i, j in enumerate(joint_values[:]):
         rot = tft.rotation_matrix(j, self.arm_joint_axes[i])
         trans = self.arm_link_trans[i]
         R[:3,:3] = rot[:3,:3]
         R[:3,3] = trans
         print('{0}: {1}\n'.format(i,R))
         pose_mat = np.dot(pose_mat, R)
         
         self.handles += utils.plot_transform(self.env, pose_mat, .3)
       
     pose_mat = np.dot(pose_mat, self.gripper_tool_to_sensor)
     self.handles += utils.plot_transform(self.env, pose_mat, .3)
         
     return tfx.pose(pose_mat)
def get_rotation_matrix(angle,axis):
  """
  This function returns a rot matrix (The new frame has the x axis aligned with the GIVEN axis)
  @type angle  : float  
  @param angle : radian
  @type axis   : array([x,y,z])
  @param axis  : new x-axis (normalized)
  @type rot_matrix: 3x3 matrix
  @param rot_matrix: rotation matrix
  """
  old_x = np.array([1,0,0])
  v = np.cross(old_x, axis)
  I = np.eye(3)
  K = skew(v)
  c = np.dot(old_x, axis)
  s = np.linalg.norm(v)
  R = I + s*K + (1-c)*np.linalg.matrix_power(K,2)
  R_angle =  tr.rotation_matrix(angle, old_x)
  rot_matrix = np.dot(R, R_angle[:3,:3])
  return rot_matrix
    def beaconCallback(self,data):
        # rospy.loginfo('callback called')
        beacon_msg = Beacon();
        
        # Rotation value
        rot = math.pi/2

        # Rotate the base pose
        origin, zaxis = (0, 0, 0), (0, 0, 1) 
        Rz = rotation_matrix(rot, zaxis, origin)
        pt = numpy.matrix((data.pose.pose.position.x, data.pose.pose.position.y, data.pose.pose.position.z, 0),dtype=numpy.float64).T
        xy = Rz*pt

        # Add the angle rotation
        oldAng = data.pose.pose.orientation
        ang = euler_from_quaternion([ oldAng.x, oldAng.y, oldAng.z, oldAng.w ])
        ang = ang[0], ang[1], ang[2] + rot

        # Offset the base_pose_ground_truth by the specified xoff, yoff
        sensor_x = xy[0] + self.xoff*math.cos(ang[2]) - self.yoff*math.sin(ang[2])
        sensor_y = xy[1] + self.xoff*math.sin(ang[2]) + self.yoff*math.cos(ang[2])
        dist_x = sensor_x - self.beacon_x;
        dist_y = sensor_y - self.beacon_y;
        dist_true = math.sqrt(dist_x**2 + dist_y**2)
        dist = dist_true + random.gauss(0,self.std)
        print "testing"
        print xy
        print sensor_x, sensor_y
        print self.beacon_x, self.beacon_y
        print dist_x, dist_y
        print dist_true
        print dist

        # Populate the message
        beacon_msg.header.stamp = data.header.stamp
        beacon_msg.header.frame_id = "base_ranger_1"
        beacon_msg.beacon_frame = "beacon_1"
        beacon_msg.range = dist

        # Publish
        self.pub.publish(beacon_msg)
Example #53
0
 def rotate(x, angle):
     return transformations.rotation_matrix(angle, axis_world)[:3, :3].dot(x)
    def _pose_sending_func(self):
        """
        Continually sends the desired pose to the controller.
        """
        r = rospy.Rate(100)
        while not rospy.is_shutdown():
            try:
                r.sleep()
            except rospy.ROSInterruptException:
                break

            # get the desired pose as a transform matrix
            with self._state_lock:
                ps_des = copy.deepcopy(self._desired_pose_stamped)
                delta_dist = self._delta_dist

            if ps_des == None:
                # no desired pose to publish
                continue

            # convert desired pose to transform matrix from goal frame to CONTROLLER_FRAME
            self._transform_listener.waitForTransform(
                CONTROLLER_FRAME, ps_des.header.frame_id, rospy.Time(0), rospy.Duration(4.0)
            )
            # trans, rot = self._transform_listener.lookupTransform(CONTROLLER_FRAME, ps_des.header.frame_id, rospy.Time(0))
            # torso_H_wrist_des = geom.trans_rot_to_hmat(trans, rot)

            ps_des.header.stamp = rospy.Time(0)
            ps_des = self._transform_listener.transformPose(CONTROLLER_FRAME, ps_des)
            torso_H_wrist_des = conversions.pose_to_mat(ps_des.pose)

            if self.br is not None:
                p = ps_des.pose.position
                q = ps_des.pose.orientation
                self.br.sendTransform(
                    (p.x, p.y, p.z), (q.x, q.y, q.z, q.w), rospy.Time.now(), "/cci_goal", CONTROLLER_FRAME
                )

            # get the current pose as a transform matrix
            goal_link = self._hand_description.hand_frame

            # really shouldn't need to do waitForTransform here, since rospy.Time(0) means "use the most
            # recent available transform". but without this, tf sometimes raises an ExtrapolationException. -jdb
            self._transform_listener.waitForTransform(CONTROLLER_FRAME, goal_link, rospy.Time(0), rospy.Duration(4.0))
            trans, rot = self._transform_listener.lookupTransform(CONTROLLER_FRAME, goal_link, rospy.Time(0))
            torso_H_wrist = geom.trans_rot_to_hmat(trans, rot)

            # compute difference between actual and desired wrist pose
            wrist_H_wrist_des = np.dot(linalg.inv(torso_H_wrist), torso_H_wrist_des)
            trans_err, rot_err = geom.hmat_to_trans_rot(wrist_H_wrist_des)

            # scale the relative transform such that the translation is equal
            # to delta_dist. we're assuming that the scaled angular error will
            # be reasonable
            scale_factor = delta_dist / linalg.norm(trans_err)
            if scale_factor > 1.0:
                scale_factor = 1.0  # don't overshoot
            scaled_trans_err = scale_factor * trans_err
            angle_err, direc, point = transformations.rotation_from_matrix(wrist_H_wrist_des)
            scaled_angle_err = scale_factor * angle_err
            wrist_H_wrist_control = np.dot(
                transformations.translation_matrix(scaled_trans_err),
                transformations.rotation_matrix(scaled_angle_err, direc, point),
            )

            # find incrementally moved pose to send to the controller
            torso_H_wrist_control = np.dot(torso_H_wrist, wrist_H_wrist_control)
            desired_pose = conversions.mat_to_pose(torso_H_wrist_control)
            desired_ps = PoseStamped()
            desired_ps.header.stamp = rospy.Time(0)
            desired_ps.header.frame_id = CONTROLLER_FRAME
            desired_ps.pose = desired_pose

            if self.br is not None:
                p = desired_ps.pose.position
                q = desired_ps.pose.orientation
                self.br.sendTransform(
                    (p.x, p.y, p.z), (q.x, q.y, q.z, q.w), rospy.Time.now(), "/cci_imm_goal", CONTROLLER_FRAME
                )

            # send incremental pose to controller
            self._desired_pose_pub.publish(desired_ps)
Example #55
0
def rotz(q): return numpy.matrix(tfs.rotation_matrix(q, [0, 0, 1]))
def trans(xyz): return numpy.matrix(tfs.translation_matrix(xyz))
Example #56
0
def roty(q): return numpy.matrix(tfs.rotation_matrix(q, [0, 1, 0]))
def rotz(q): return numpy.matrix(tfs.rotation_matrix(q, [0, 0, 1]))
 def viconPoseCallback(self, msg_vicon):
     if self.adjVicon:
         self.Tv_w = np.dot(self.poseToTransform(msg_vicon.pose), tr.rotation_matrix(-np.pi/2,(0,0,1)))
         #print "Found vicon initial frame: {vicon}".format(vicon=self.Tv_w)
         self.adjVicon = False
Example #58
0
def rot_angle_direction(angle, direction):
    direction = direction/np.linalg.norm(direction)
    r = tft.rotation_matrix(angle, direction.A1)
    return np.matrix(r[0:3,0:3])
    def run_controller(self):
        if self.X is None:
            return
        self.t = (rospy.Time.now() - self.tbase).to_sec()
        if self.running_flag and not rospy.get_param('return_to_base'):
            try:
                Xref = self.ref_func(self.t)
            except ValueError:
                # let's assume this implies t is out of range:
                if self.t < self.ref_time[0] or self.t > self.ref_time[-1]:
                    rospy.loginfo("Trajectory Complete!")
                    self.running_flag = False
                    
                    self.Einx = 0
                    self.Einy = 0
                    
                    if self.first_time: 
                        self.first_time = False
                self.cmd_pub.publish(Twist())
                return
            # if we get here, we can run the controller:
            cmd = Twist()
            uff = self.estimate_derivative(self.t)
            # uff is in the spatial frame... transform to body frame:
            Rbo = tr.rotation_matrix(self.ref_func(self.t)[2], [0,0,1])[0:2,0:2].T
            Vb = np.dot(Rbo, uff[0:2])
            Vfeedback_p = np.array([
                50*Kx*(Xref[0] - self.X[0]),
                50*Ky*(Xref[1] - self.X[1])
            ])
            
            Vfb_k = np.dot(Rbo, Vfeedback_p)
            
            if self.get_intergral_time== 0: 
                self.time_gap = rospy.Time.now().to_sec() 
                self.get_intergral_time = 1
            elif self.get_intergral_time == 1:
                self.time_gap = rospy.Time.now().to_sec() - self.time_gap 
                self.Einx = self.Einx + (Xref[0] - self.X[0])*self.time_gap            
                self.Einy = self.Einy + (Xref[1] - self.X[1])*self.time_gap
                self.get_intergral_time = 2
            else: 
                self.Einx = self.Einx + (Xref[0] - self.X[0])*self.time_gap            
                self.Einy = self.Einy + (Xref[1] - self.X[1])*self.time_gap
                
            Vfeedback_i = np.array([
                self.Einx*Kix,
                self.Einy*Kiy
            ])
            
            Vfb_i = np.dot(Rbo, Vfeedback_i)
            
            cmd.linear.x = Vb[0] + Vfb_k[0]+Vfb_i[0]
            cmd.linear.y = Vb[1] + Vfb_k[1]+Vfb_i[1]
            cmd.angular.z = uff[2] + Kth*angle_utils.shortest_angular_distance(self.X[2],Xref[2])
            cmd = self.clamp_controls(cmd)
            rospy.logdebug("t = %f, xd = %f, yd = %f, thd = %f", self.t, cmd.linear.x, cmd.linear.y, cmd.angular.z)
            self.send_transform(Xref)
            self.cmd_pub.publish(cmd)
        else:
            try:
                if not rospy.get_param('return_to_base'):
                    Xref = np.array([self.ref_state[-1][0],self.ref_state[-1][1],self.ref_state[-1][2]]) 
                else:
                    self.running_flag = False
                    self.ref_state[-1][0] = 0 
                    self.ref_state[-1][1] = 0 
                    self.ref_state[-1][2] = 0
                    Xref = np.array([0,0,0]) 
            except IndexError:
                return
            except TypeError:
                return
            cmd = Twist()
            Vfeedback = np.array([
                3*Kx*(Xref[0] - self.X[0]),
                3*Ky*(Xref[1] - self.X[1])
            ])
            Rbo = tr.rotation_matrix(Xref[2], [0,0,1])[0:2,0:2].T
            Vfb = np.dot(Rbo, Vfeedback)
            cmd.linear.x = Vfb[0]
            cmd.linear.y = Vfb[1]

            cmd.angular.z = angle_utils.shortest_angular_distance(self.X[2],Xref[2])
            cmd = self.clamp_controls(cmd)
            cmd = self.low_limit(cmd);
            rospy.logdebug("t = %f, xd = %f, yd = %f, thd = %f", self.t, cmd.linear.x, cmd.linear.y, cmd.angular.z)
            if not self.first_time:
                self.cmd_pub.publish(cmd)
            if rospy.get_param('return_to_base'):
                self.cmd_pub.publish(cmd) 
        return
Example #60
0
def frame_change(pos,last_pos,angle,last_angle,delta):
    inv_R = np.linalg.inv(rotation_matrix(last_angle,(0,0,1)))
    new_pos = np.divide(np.dot(inv_R,np.transpose(np.subtract(pos, last_pos))),delta)
    new_ori = math.asin(np.dot(inv_R,np.array([math.cos(angle), math.sin(angle), 0,0]))[1])/delta
    return [new_pos,new_ori]