Exemplo n.º 1
0
 def getEnv(self, key, iter=0, first=False):
     
     val = self.base.getModel(key)
     
     print key, val
     
     obj = None
     obj_list = []
     parent_tf = tf.identity_matrix()
     
     if self.base.typeOf(key) == self.base.ROBOTS.ident:
         obj = self._getRobot(key)
         obj_list.append(obj)
         parent_tf = obj.GetTransform()
     
     elif self.base.typeOf(key) == self.base.OBJECTS.ident:
         obj = self._getObject(key)
         obj_list.append(obj)
         parent_tf = obj.GetTransform()
             
     elif self.base.typeOf(key) == self.base.LOCATIONS.ident:
         obj = self._getLocation(key)
         obj_list.append(obj)
         parent_tf = obj.GetTransform()
         
     elif self.base.typeOf(key) == self.base.SENSORS.ident:
         obj = self._getSensor(key)
         obj_list.append(obj)
         parent_tf = obj.GetTransform()
         
     else:
         if val.has_key("translation"):
             translation = map(lambda x: float(x), val["translation"].split(" "))
             parent_tf = tf.concatenate_matrices(parent_tf, tf.compose_matrix(translate = translation))
         if val.has_key("quat"):
             quat = map(lambda x: float(x), val["quat"].split(" "))
             rot = rave.axisAngleFromQuat(quat)
             m2 = tf.compose_matrix(angles = rot)
             parent_tf = tf.concatenate_matrices(parent_tf, m2)     
     
     if first:
         parent_tf = tf.identity_matrix()
         if obj != None:
             obj.SetTransform(parent_tf)
     
     if iter==0:
         return obj_list            
     
     # search for ancestors
     child_expr   = pycassa.create_index_expression('base', key)
     clild_clause = pycassa.create_index_clause([child_expr])
     
     for child_key, _ in self.base.col.get_indexed_slices(clild_clause):
         child_obj = self.getEnv(child_key, iter-1)
         for obj in child_obj:
             if type(obj) != type(None):
                 obj.SetTransform(tf.concatenate_matrices(parent_tf, obj.GetTransform()))
                 obj_list.append(obj)
         
     return obj_list
Exemplo n.º 2
0
 def get_transform_to_world(self):
     mat_w_2_mocap = transformations.identity_matrix()
     exist_mat_w_2_mocap = False
     mat_mocap_2_link = transformations.identity_matrix()
     exists_mat_mocap_2_link = False
     mat_link_2_rgb = transformations.identity_matrix()
     exists_mat_link_2_rgb = False
     mat_optical = transformations.identity_matrix()
     exists_mat_optical = False
     with open(self.filename) as csv_file:
         csv_reader = csv.reader(csv_file, delimiter=',')
         for row in csv_reader:
             if (not exist_mat_w_2_mocap) and (row[3] == "world" and row[4] == "kinect2_mocap"):
                 exist_mat_w_2_mocap = True
                 mat_w_2_mocap = matrix_from_row(row)
             if (not exists_mat_mocap_2_link) and (row[3] == "/kinect2_mocap" and row[4] == "/kinect2_link"):
                 exists_mat_mocap_2_link = True
                 mat_mocap_2_link = matrix_from_row(row)
             if (not exists_mat_link_2_rgb) and (row[3] == "kinect2_link" and row[4] == "kinect2_rgb_optical_frame"):
                 exists_mat_link_2_rgb = True
                 mat_link_2_rgb = matrix_from_row(row)
             if (not exists_mat_optical) and (row[3] == "kinect2_rgb_optical_frame" and row[4] == "kinect2_ir_optical_frame"):
                 exists_mat_optical  = True
                 mat_optical = matrix_from_row(row)
             if exist_mat_w_2_mocap and exists_mat_mocap_2_link and exists_mat_link_2_rgb and exists_mat_optical:
                 break
         else:
             print ("Warning! No transformation in world found")
     # print mat_w_2_mocap, mat_mocap_2_link, mat_link_2_rgb
     # return mat_mocap_2_link.dot(transformations.inverse_matrix(mat_link_2_rgb.dot(mat_w_2_mocap)))
     return mat_w_2_mocap.dot(mat_mocap_2_link.dot(mat_link_2_rgb.dot(mat_optical)))
Exemplo n.º 3
0
    def test_inverse_homogenous_transform_non_zero_translation(self):
        """
        tests whether inverse_homogeneous_transform performs the inversion correctly for T with non-zero translation vector.
        """
        T = tr.identity_matrix()
        T[0:3, 3] = np.array([1, 1, 1])

        T_inv_expected = tr.identity_matrix()
        T_inv_expected[0:3, 3] = np.array([-1, -1, -1])

        T_inv = inverse_homogeneous_transform(T)
        np.testing.assert_almost_equal(T_inv, T_inv_expected)
Exemplo n.º 4
0
        def fill_from_Orient(orient_data):
            '''Fill messages with information from 'orientation' MTData block.
            '''
            self.pub_imu = True
            if 'quaternion' in orient_data:
                w, x, y, z = orient_data['quaternion']
            elif 'roll' in orient_data:
                x, y, z, w = quaternion_from_euler(
                    radians(orient_data['roll']),
                    radians(orient_data['pitch']), radians(orient_data['yaw']))
            elif 'matrix' in orient_data:
                m = identity_matrix()
                m[:3, :3] = orient_data['matrix']
                x, y, z, w = quaternion_from_matrix(m)
            w, x, y, z = convert_quat((w, x, y, z), o['frame'])
            self.imu_msg.orientation.x = x
            self.imu_msg.orientation.y = y
            self.imu_msg.orientation.z = z
            self.imu_msg.orientation.w = w
            self.imu_msg.orientation_covariance = self.orientation_covariance

            self.pub_euler = True
            if 'roll' in orient_data:
                self.euler_msg.yaw = orient_data['yaw']
                self.euler_msg.pitch = orient_data['pitch']
                self.euler_msg.roll = orient_data['roll']
Exemplo n.º 5
0
def rvec_and_tvec_to_matrix(rvec, tvec):
    """Rodrigues rotation and translation vector to 4x4 matrix"""
    t_matrix = tft.translation_matrix(tvec)
    R, _ = cv2.Rodrigues(rvec)
    r_matrix = tft.identity_matrix()
    r_matrix[:3, :3] = R
    return np.dot(t_matrix, r_matrix)
Exemplo n.º 6
0
    def __init__(self, program_file=PROGRAM_FILE):
        # TODO: Either implement behavior that fixes programs when markers change
        # or only let this callback run once
        self._markers_sub = rospy.Subscriber(SUB_NAME,
                                             Marker,
                                             callback=self._markers_callback)
        self._curr_markers = None
        self._tf_listener = tf.TransformListener()
        self._arm = fetch_api.Arm()
        self._gripper = fetch_api.Gripper()
        self._torso = fetch_api.Torso()
        self._controller_client = actionlib.SimpleActionClient(
            '/query_controller_states', QueryControllerStatesAction)
        self._program_file = program_file
        self._programs = self._read_in_programs()

        self._joint_reader = JointStateReader()

        mat = tft.identity_matrix()
        mat[:, 0] = np.array([0, 0, -1, 0])
        mat[:, 2] = np.array([1, 0, 0, 0])
        o = tft.quaternion_from_matrix(mat)
        self._constraint_pose = Pose(orientation=Quaternion(*o))

        oc = OrientationConstraint()
        oc.header.frame_id = 'base_link'
        oc.link_name = 'gripper_link'
        oc.orientation = self._constraint_pose.orientation
        oc.weight = 1.0
        oc.absolute_z_axis_tolerance = 1.0
        oc.absolute_x_axis_tolerance = 1.0
        oc.absolute_y_axis_tolerance = 1.0
        self._constraint = None
Exemplo n.º 7
0
		def fill_from_Orientation_Data(o):
			'''Fill messages with information from 'Orientation Data' MTData2 block.'''
			self.pub_imu = True
			try:
				x, y, z, w = o['Q1'], o['Q2'], o['Q3'], o['Q0']
			except KeyError:
				pass
			try: 
				# FIXME check that Euler angles are in radians
				x, y, z, w = quaternion_from_euler(radians(o['Roll']),
						radians(o['Pitch']), radians(o['Yaw']))
			except KeyError:
				pass
			try:
				a, b, c, d, e, f, g, h, i = o['a'], o['b'], o['c'], o['d'],\
						o['e'], o['f'], o['g'], o['h'], o['i']
				m = identity_matrix()
				m[:3,:3] = ((a, b, c), (d, e, f), (g, h, i))
				x, y, z, w = quaternion_from_matrix(m)
			except KeyError:
				pass
			self.imu_msg.orientation.x = x
			self.imu_msg.orientation.y = y
			self.imu_msg.orientation.z = z
			self.imu_msg.orientation.w = w
			self.imu_msg.orientation_covariance = (radians(1.), 0., 0., 0.,
					radians(1.), 0., 0., 0., radians(9.))
Exemplo n.º 8
0
 def coord_transform(self, v__fin, fin, fout):
     ''' transform vector v which is represented in frame fin into its representation in frame fout
     Args:
     - v__fin: 3D vector represented in fin coordinates
     - fin: string describing input coordinate frame (bd, bu, fc, dc, lned, lenu)
     - fout: string describing output coordinate frame (bd, bu, fc, dc, lned, lenu)
     Returns
     - v__fout: vector v represent in fout coordinates
     '''
     
     # trivial transform, checking input shape
     if fin==fout:
         v4__fin = list(v__fin)+[0.0]
         R = tft.identity_matrix()
         v4__fout = np.dot(R, v4__fin)
         v__fout = np.array(v4__fout[0:3])
         return v__fout
     
     # check for existence of rotation matrix
     R_str = 'R_{}2{}'.format(fin, fout)
     try:
         R_i2o = getattr(self, R_str)
     except AttributeError:
         err = 'No static transform exists from {} to {}.'.format(fin, fout)
         err += ' Are you sure these frames are not moving relative to each other?'
         raise AttributeError(err)
     
     # perform transform
     v4__fin = list(v__fin) + [0.0]
     v4__fout = np.dot(R_i2o, v4__fin)
     v__fout = np.array(v4__fout[0:3])
     return v__fout
Exemplo n.º 9
0
 def fill_from_Orientation_Data(o):
     '''Fill messages with information from 'Orientation Data' MTData2 block.'''
     global pub_imu, imu_msg
     pub_imu = True
     try:
         x, y, z, w = o['Q1'], o['Q2'], o['Q3'], o['Q0']
     except KeyError:
         pass
     try:
         # FIXME check that Euler angles are in radians
         x, y, z, w = quaternion_from_euler(radians(o['Roll']),
                                            radians(o['Pitch']),
                                            radians(o['Yaw']))
     except KeyError:
         pass
     try:
         a, b, c, d, e, f, g, h, i = o['a'], o['b'], o['c'], o['d'],\
           o['e'], o['f'], o['g'], o['h'], o['i']
         m = identity_matrix()
         m[:3, :3] = ((a, b, c), (d, e, f), (g, h, i))
         x, y, z, w = quaternion_from_matrix(m)
     except KeyError:
         pass
     imu_msg.orientation.x = x
     imu_msg.orientation.y = y
     imu_msg.orientation.z = z
     imu_msg.orientation.w = w
     imu_msg.orientation_covariance = (radians(1.), 0., 0., 0.,
                                       radians(1.), 0., 0., 0.,
                                       radians(9.))
Exemplo n.º 10
0
def getSkeletonTransformation(user_id, tf, tf_name, world_name, last_updated):
    """
    Return the rototranslation matrix between tf_name and world_name 
    and the time stamp
    
    @param user_id id of the human
    @param tf 
    @param tf_name name of the element in the tf tree
    @param world_name name of the world in the tf tree
    @param last_update time stamp of the last time the tf tree was updated
    """
    
    tf_name_full = getFullTfName(user_id, tf_name)
    if tf.frameExists(tf_name_full) and tf.frameExists(world_name):   
        try:                
            time = tf.getLatestCommonTime(tf_name_full, world_name)
            pos, quat = tf.lookupTransform(world_name, tf_name_full, time)
        except:
            return last_updated, None
        last_updated = time.secs if last_updated <= time.secs else last_updated
        r = transformations.quaternion_matrix(quat)
        skel_trans = transformations.identity_matrix()
        skel_trans[0:4,0:4] = r
        skel_trans[0:3, 3] = pos    
        return last_updated, skel_trans
    else:
        return last_updated, None
Exemplo n.º 11
0
 def fill_from_Orientation_Data(o):
     '''Fill messages with information from 'Orientation Data' MTData2
     block.'''
     self.pub_imu = True
     try:
         x, y, z, w = o['Q1'], o['Q2'], o['Q3'], o['Q0']
     except KeyError:
         pass
     try:
         x, y, z, w = quaternion_from_euler(radians(o['Roll']),
                                            radians(o['Pitch']),
                                            radians(o['Yaw']))
     except KeyError:
         pass
     try:
         a, b, c, d, e, f, g, h, i = o['a'], o['b'], o['c'], o['d'],\
             o['e'], o['f'], o['g'], o['h'], o['i']
         m = identity_matrix()
         m[:3, :3] = ((a, b, c), (d, e, f), (g, h, i))
         x, y, z, w = quaternion_from_matrix(m)
     except KeyError:
         pass
     w, x, y, z = convert_quat((w, x, y, z), o['frame'])
     self.imu_msg.orientation.x = x
     self.imu_msg.orientation.y = y
     self.imu_msg.orientation.z = z
     self.imu_msg.orientation.w = w
     self.imu_msg.orientation_covariance = self.orientation_covariance
Exemplo n.º 12
0
 def test_inverse_homogenous_transform_trivial(self):
     """
     tests whether inverse_homogeneous_transform yields  T = inv(T) when T is the identity matrix.
     """
     T = tr.identity_matrix()
     T_inv = inverse_homogeneous_transform(T)
     np.testing.assert_almost_equal(T, T_inv)
Exemplo n.º 13
0
 def root_transform(self):
     multi_matrix = identity_matrix()
     for node in self.path_to_root:
         matrix = fromStringToMatrix(node.transform)
         multi_matrix = dot(matrix, multi_matrix)
     root_transform = fromMatrixToTransform(multi_matrix)
     return root_transform
Exemplo n.º 14
0
 def __init__(self):
     self.subscriber = rospy.Subscriber("input_odom",
                                        Odometry,
                                        self.callback,
                                        queue_size=1)
     self.publisher = rospy.Publisher("output_odom", Odometry, queue_size=1)
     self.T_NED_V = TR.euler_matrix(pi, 0, 0, 'rxyz')
     self.T_XAV_UAV = TR.euler_matrix(pi, 0, 0, 'rxyz')
     self.T_XAV_V = TR.identity_matrix()  # the output transform
Exemplo n.º 15
0
    def __init__(self):
        self.node_name = 'pose2d_to_path_node'

        # Store the frames used to link the robot and vicon paths
        self.Tv_w = tr.identity_matrix()
        self.Tw_r1 = tr.identity_matrix()
        self.adjVicon = True
        self.adjRobot = True

        # setup publishers and subscribers
        self.sub_vicon = rospy.Subscriber("~vicon_pose", PoseStamped, self.viconPoseCallback)
        self.sub_pose = rospy.Subscriber("~pose", Pose2DStamped, self.poseCallback)
        self.sub_weights = rospy.Subscriber("~weights", KinematicsWeights, self.weightsCallback)
        self.pub_path = rospy.Publisher("~path", Path, queue_size=1)

        # Setup the path message
        self.msg_path = Path()

        rospy.loginfo("[%s] has started!", self.node_name)
Exemplo n.º 16
0
        def fill_from_Orient(orient_data):
            '''Fill messages with information from 'orientation' MTData block.
            '''
            self.pub_imu = True
            if 'quaternion' in orient_data:
                w, x, y, z = orient_data['quaternion']
            elif 'roll' in orient_data:
                x, y, z, w = quaternion_from_euler(
                    radians(orient_data['roll']),
                    radians(orient_data['pitch']), radians(orient_data['yaw']))
            elif 'matrix' in orient_data:
                m = identity_matrix()
                m[:3, :3] = orient_data['matrix']
                x, y, z, w = quaternion_from_matrix(m)
            self.imu_msg.orientation.x = x
            self.imu_msg.orientation.y = y
            self.imu_msg.orientation.z = z
            self.imu_msg.orientation.w = w
            self.imu_msg.orientation_covariance = (radians(1.), 0., 0., 0.,
                                                   radians(1.), 0., 0., 0.,
                                                   radians(9.))
            self.view_msg.pose.position.x = 0
            self.view_msg.pose.position.y = 0
            self.view_msg.pose.position.z = 0
            self.view_msg.pose.orientation.x = x
            self.view_msg.pose.orientation.y = y
            self.view_msg.pose.orientation.z = z
            self.view_msg.pose.orientation.w = w

            y2 = y**2
            t0 = -2 * (y2 + z**2) + 1
            t1 = 2 * (x * y - w) * z
            t2 = -2 * (x * z + w) * y
            t3 = 2 * (y * z - w) * x
            t4 = -2 * (x**2 + y2) + 1

            if t2 > 1:
                t2 = 1
            if t2 < -1:
                t2 = -1

            pitch = math.asin(t2)
            roll = math.atan2(t3, t4)
            yaw = 180 * (math.atan2(t1, t0) - math.pi / 2) / math.pi + 90
            '''0.02495820 Sevilla'''
            '''-0.0083775 Toulouse'''
            #s = 'Roll: ' + str(roll) + ' , Pitch: ' + str(pitch) + ' , Yaw: ' + str(yaw)
            #print s

            ang = math.acos(math.fabs(math.cos(pitch) * math.cos(roll)))
            #s = 'ANG = ' + str(ang)
            #print s
            self.ang_msg.data = ang
            self.ang_msg2.data = pitch
            self.ang_msg3.data = roll
Exemplo n.º 17
0
	def cartesian_to_ik(self, cartesian, single_sol=False):
		# Cartesian coordinates of [roll,pitch,yaw,X,Y,Z] to ik solutions
		# Returns all possible IK solutions within joint limits if single is False
		# Returns best IK solution within joint limit if single is True
		self.single_sol						= single_sol
		roll,pitch,yaw,X,Y,Z 				= cartesian

		matrix_from_euler					= TF.euler_matrix(roll,pitch,yaw)
		matrix_from_translation				= TF.translation_matrix([X,Y,Z]) - TF.identity_matrix()
		matrix_from_cartesian 				= matrix_from_euler + matrix_from_translation 

		return self.__get_ik_from_matrix(matrix_from_cartesian)
Exemplo n.º 18
0
    def __init__(self):
        self.node_name = 'pose2d_to_path_node'

        # Store the frames used to link the robot and vicon paths
        self.Tv_w = tr.identity_matrix()
        self.Tw_r1 = tr.identity_matrix()
        self.adjVicon = True
        self.adjRobot = True

        # setup publishers and subscribers
        self.sub_vicon = rospy.Subscriber("~vicon_pose", PoseStamped,
                                          self.viconPoseCallback)
        self.sub_pose = rospy.Subscriber("~pose", Pose2DStamped,
                                         self.poseCallback)
        self.sub_weights = rospy.Subscriber("~weights", KinematicsWeights,
                                            self.weightsCallback)
        self.pub_path = rospy.Publisher("~path", Path, queue_size=1)

        # Setup the path message
        self.msg_path = Path()

        rospy.loginfo("[%s] has started!", self.node_name)
Exemplo n.º 19
0
    def test_inverse_homogenous_transform_translation_rotation(self):
        """
        tests whether inverse_homogeneous_transform performs the inversion correctly for
        T with non-zero translation vector and a non-identity rotation matrix.
        """
        # Spesifically tests matrix inversion used in the function.
        a_T_b = tr.identity_matrix()
        a_T_b[0:3, 0:3] = np.array(
            [[0, -1, 0], [1, 0, 0], [0, 0, 1]]
        )  # rotation around z axis by 90 degrees. frame B is rotated with respect to az
        a_T_b[0:3, 3] = np.array(
            [1, 1, 0]
        )  # translation by 1 unit in positive x. translation from from frame A to frame B expressed in frame A.

        b_T_a_exp = tr.identity_matrix()
        b_T_a_exp[0:3,
                  3] = np.matmul(np.array([[0, 1, 0], [-1, 0, 0], [0, 0, 1]]),
                                 -np.array([1, 1, 0]))
        b_T_a_exp[0:3, 0:3] = np.array([[0, 1, 0], [-1, 0, 0],
                                        [0, 0, 1]])  # R_inv * -t

        T_inv = inverse_homogeneous_transform(a_T_b)
        np.testing.assert_almost_equal(T_inv, b_T_a_exp)
Exemplo n.º 20
0
def inverse_homogeneous_transform(T):
    """ Inverts a homogeneous transformation matrix using the formula 2.92 of [1]. """
    t = T[0:3, 3]
    R = T[0:3, 0:3]

    T_inv = tr.identity_matrix()
    R_inv = np.transpose(R)

    t_inv = np.matmul(R_inv, -t)

    T_inv[0:3, 0:3] = R_inv
    T_inv[0:3, 3] = t_inv

    return T_inv
Exemplo n.º 21
0
    def _new_frame_callback(self, message):
        if self._frame_num % self._skip_frames == 0:
            data = point_cloud_to_array(message)[self._base_markers,:,0]            

            #Remove markers which are not visible
            visible = np.where(np.logical_not(np.isnan(data[:,0])))[0]
            data = data[visible, :]

            #Compute the centroid of the observed points
            centroid = np.mean(data, axis=0)

            #Compute the translation and rotation components
            translation = centroid.squeeze()
            rotation = convert.quaternion_from_matrix(convert.identity_matrix())

            #Publish the transform
            self.publish_transform(translation, rotation)

            #Compute the transformation matrix
            homog = convert.identity_matrix()
            homog[0:3,3] = translation

            #Transform points into rigid body frame
            homog_data = np.vstack((data.T, np.ones((1, data.shape[0]))))
            desired = la.inv(homog).dot(homog_data)
            print(desired)

            #Save the marker arrangement if all markers are visible
            if desired.shape[1] == len(self._base_markers):
                try:
                    save_rigid_body_config(args.data_file, 'test_body', self._base_markers, desired[0:3,:])
                except IOError:
                    print('Error: Unable to save rigid body config')
                rospy.signal_shutdown('Captured all markers in a single frame') 

        self._frame_num += 1
Exemplo n.º 22
0
 def __init__(self):
     self.subscriber = rospy.Subscriber("vicon",
                                        PoseStamped,
                                        self.callback,
                                        queue_size=1)
     self.reference_pub = rospy.Publisher("reference",
                                          Odometry,
                                          queue_size=1)
     self.ext_att_pub = rospy.Publisher("external_attitude",
                                        Quaternion,
                                        queue_size=1)
     self.T_V_NED = TR.euler_matrix(pi, 0, 0, 'rxyz')
     # self.T_XAV_V = TR.identity_matrix() # from subscriber callback
     self.T_UAV_XAV = TR.euler_matrix(pi, 0, 0, 'rxyz')
     self.T_UAV_NED = TR.identity_matrix()  # the output transform
Exemplo n.º 23
0
 def forward(self, qs, start=0, end=5):
     """ FK problem """
     h = tt.identity_matrix()
     for i in range(start, end):
         rz = tt.rotation_matrix(self.theta[i] - qs[i], (0, 0, 1))
         tz = tt.translation_matrix((0, 0, self.d[i]))
         tx = tt.translation_matrix((self.a[i], 0, 0))
         rx = tt.rotation_matrix(self.alpha[i], (1, 0, 0))
         a = tt.concatenate_matrices(rz, tz, tx, rx)
         h = tt.concatenate_matrices(h, a)
         out = "%f\t%f\t%f\t%f + %f" % (self.a[i], self.alpha[i],
                                        self.d[i], qs[i], self.theta[i])
         rospy.logdebug(out)
     xyz = h[:3, 3]
     qtn = tt.quaternion_from_matrix(h)
     rpy = tt.euler_from_matrix(h[:3, :3], axes='sxyz')
     return xyz, qtn, rpy, h
Exemplo n.º 24
0
		def quat_from_orient(orient):
			'''Build a quaternion from orientation data.'''
			try:
				w, x, y, z = orient['quaternion']
				return (x, y, z, w)
			except KeyError:
				pass
			try:
				return quaternion_from_euler(pi*orient['roll']/180.,
						pi*orient['pitch']/180, pi*orient['yaw']/180.)
			except KeyError:
				pass
			try:
				m = identity_matrix()
				m[:3,:3] = orient['matrix']
				return quaternion_from_matrix(m)
			except KeyError:
				pass
Exemplo n.º 25
0
		def quat_from_orient(orient):
			'''Build a quaternion from orientation data.'''
			try:
				w, x, y, z = orient['quaternion']
				return (x, y, z, w)
			except KeyError:
				pass
			try:
				return quaternion_from_euler(pi*orient['roll']/180.,
						pi*orient['pitch']/180, pi*orient['yaw']/180.)
			except KeyError:
				pass
			try:
				m = identity_matrix()
				m[:3,:3] = orient['matrix']
				return quaternion_from_matrix(m)
			except KeyError:
				pass
Exemplo n.º 26
0
		def fill_from_Orient(orient_data):
			'''Fill messages with information from 'orientation' MTData block.'''
			self.pub_imu = True
			if orient_data.has_key('quaternion'):
				w, x, y, z = orient_data['quaternion']
			elif orient_data.has_key('roll'):
				# FIXME check that Euler angles are in radians
				x, y, z, w = quaternion_from_euler(radians(orient_data['roll']),
						radians(orient_data['pitch']), radians(orient_data['yaw']))
			elif orient_data.has_key('matrix'):
				m = identity_matrix()
				m[:3,:3] = orient_data['matrix']
				x, y, z, w = quaternion_from_matrix(m)
			self.imu_msg.orientation.x = x
			self.imu_msg.orientation.y = y
			self.imu_msg.orientation.z = z
			self.imu_msg.orientation.w = w
			self.imu_msg.orientation_covariance = (radians(1.), 0., 0., 0.,
					radians(1.), 0., 0., 0., radians(9.))
Exemplo n.º 27
0
		def fill_from_Orient(orient_data):
			'''Fill messages with information from 'orientation' MTData block.'''
			self.pub_imu = True
			if orient_data.has_key('quaternion'):
				w, x, y, z = orient_data['quaternion']
			elif orient_data.has_key('roll'):
				# FIXME check that Euler angles are in radians
				x, y, z, w = quaternion_from_euler(radians(orient_data['roll']),
						radians(orient_data['pitch']), radians(orient_data['yaw']))
			elif orient_data.has_key('matrix'):
				m = identity_matrix()
				m[:3,:3] = orient_data['matrix']
				x, y, z, w = quaternion_from_matrix(m)
			self.imu_msg.orientation.x = x
			self.imu_msg.orientation.y = y
			self.imu_msg.orientation.z = z
			self.imu_msg.orientation.w = w
			self.imu_msg.orientation_covariance = (radians(1.), 0., 0., 0.,
					radians(1.), 0., 0., 0., radians(9.))
Exemplo n.º 28
0
 def fill_from_Orient(orient_data):
     '''Fill messages with information from 'orientation' MTData block.
     '''
     self.pub_imu = True
     if 'quaternion' in orient_data:
         w, x, y, z = orient_data['quaternion']
     elif 'roll' in orient_data:
         x, y, z, w = quaternion_from_euler(
             radians(orient_data['roll']), radians(orient_data['pitch']),
             radians(orient_data['yaw']))
     elif 'matrix' in orient_data:
         m = identity_matrix()
         m[:3, :3] = orient_data['matrix']
         x, y, z, w = quaternion_from_matrix(m)
     w, x, y, z = convert_quat((w, x, y, z), o['frame'])
     self.imu_msg.orientation.x = x
     self.imu_msg.orientation.y = y
     self.imu_msg.orientation.z = z
     self.imu_msg.orientation.w = w
     self.imu_msg.orientation_covariance = self.orientation_covariance
Exemplo n.º 29
0
 def fill_from_Orient(orient_data):
     '''Fill messages with information from 'orientation' MTData block.
     '''
     self.pub_imu = True
     if 'quaternion' in orient_data:
         w, x, y, z = orient_data['quaternion']
     elif 'roll' in orient_data:
         x, y, z, w = quaternion_from_euler(
             radians(orient_data['roll']),
             radians(orient_data['pitch']), radians(orient_data['yaw']))
     elif 'matrix' in orient_data:
         m = identity_matrix()
         m[:3, :3] = orient_data['matrix']
         x, y, z, w = quaternion_from_matrix(m)
     self.imu_msg.orientation.x = x
     self.imu_msg.orientation.y = y
     self.imu_msg.orientation.z = z
     self.imu_msg.orientation.w = w
     self.imu_msg.orientation_covariance = (radians(1.), 0., 0., 0.,
                                            radians(1.), 0., 0., 0.,
                                            radians(9.))
Exemplo n.º 30
0
    def forward_DH(self, q=None, start=1, end=None):
        if q is None:
            q = self.q
        if end is None:
            end = len(self.q)

        q[4] = 0.4

        H = transforms.identity_matrix()
        for i in range(start, end):
            Rz = transforms.rotation_matrix(self.DH[i][3] + q[i], (0, 0, 1))
            Tz = transforms.translation_matrix((0, 0, self.DH[i][2]))
            Tx = transforms.translation_matrix((self.DH[i][1], 0, 0))
            Rx = transforms.rotation_matrix(self.DH[i][0], (1, 0, 0))
            A = transforms.concatenate_matrices(Rz, Tz, Tx, Rx)
            print(A)
            H = transforms.concatenate_matrices(H, A)
            #out = "%f\t%f\t%f\t%f + %f" % (self.DH[i][1], self.DH[i][0],
            #self.DH[i][2], self.q[i], self.DH[i][3])
        #rospy.logdebug(out)
        xyz = H[:3, 3]
        qtn = transforms.quaternion_from_matrix(H)
        rpy = transforms.euler_from_matrix(H[:3, :3], axes='sxyz')
        print(H)
        # check #################
        br = TransformBroadcaster()
        ls = tf.TransformListener()
        try:
            ls.waitForTransform("tool0", "iiwa_base_link", rospy.Time(0),
                                rospy.Duration(4.0))
            (pos, rot) = ls.lookupTransform('iiwa_base_link', 'tool0',
                                            rospy.Time())
        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            print("can't get iiwa_base_link <- tool0 transform")
            return -1
        print(pos)
        ##########################

        return xyz, qtn, rpy, H
Exemplo n.º 31
0
    def start(self):
        mat = tft.identity_matrix()
        mat[:, 0] = np.array([0, 0, -1, 0])
        mat[:, 2] = np.array([1, 0, 0, 0])
        o = tft.quaternion_from_matrix(mat)
        self._constraint_pose = Pose(orientation=Quaternion(*o))
        gripper_im = create_gripper_interactive_marker(self._constraint_pose,
                                                       pregrasp=False,
                                                       rotation_enabled=False)

        self._im_server.insert(gripper_im, feedback_cb=self.handle_feedback)
        self._im_server.applyChanges()

        oc = OrientationConstraint()
        oc.header.frame_id = 'base_link'
        oc.link_name = 'gripper_link'
        oc.orientation = self._constraint_pose.orientation
        oc.weight = 1.0
        oc.absolute_z_axis_tolerance = 0.1
        oc.absolute_x_axis_tolerance = 0.1
        oc.absolute_y_axis_tolerance = 0.1

        self.contraint = oc
Exemplo n.º 32
0
    def marker_inf_cb(self, data):
        print "in marker callback"
        q = Quaternion()
        q = data.pose.orientation

        marker_orientation = np.array([q.x, q.y, q.z, q.w])
        euler_angles = tr.euler_from_quaternion(marker_orientation, 'rxyz')

        vec3 = Vector3()
        vec3.x = data.pose.position.x
        vec3.y = data.pose.position.y
        vec3.z = data.pose.position.z

        marker_pose = np.array([
            vec3.x, vec3.y, vec3.z, euler_angles[0], euler_angles[1],
            euler_angles[2]
        ])

        for i in range(6):
            if np.isnan(marker_pose[i]):
                return

        cam_object_T = self.set_cam_object_T(marker_pose)

        cam_object_R = tr.identity_matrix()
        cam_object_R[0:3, 0:3] = cam_object_T[0:3, 0:3]
        cam_object_q = tr.quaternion_from_matrix(cam_object_R)
        self.br.sendTransform(
            (cam_object_T[0, 3], cam_object_T[1, 3], cam_object_T[2, 3]),
            cam_object_q, rospy.Time.now(), "object_in_cam_frame",
            "camera_rgb_optical_frame")

        # publishing the object marker to rviz for object_in_cam
        det_marker = Marker()
        det_marker.header.frame_id = "camera_rgb_optical_frame"
        det_marker.header.stamp = rospy.Time.now()
        det_marker.type = Marker.SPHERE
        det_marker.pose.position.x = cam_object_T[0, 3]
        det_marker.pose.position.y = cam_object_T[1, 3]
        det_marker.pose.position.z = cam_object_T[2, 3]
        det_marker.pose.orientation.x = 0
        det_marker.pose.orientation.y = 0
        det_marker.pose.orientation.z = 0
        det_marker.pose.orientation.w = 1
        det_marker.scale.x = 0.05
        det_marker.scale.y = 0.05
        det_marker.scale.z = 0.05
        det_marker.color.a = 1.0
        det_marker.color.r = 1.0
        det_marker.color.g = 0.0
        det_marker.color.b = 0.0
        self.marker_object_in_cam_pub.publish(det_marker)

        base_object_T = np.dot(np.dot(self.base_end_T, self.end_cam_T),
                               cam_object_T)
        # print base_object_T

        final_err = self.error_computation(base_object_T, self.base_target_T)

        # target thresholds:
        trans_th = [0.05, 0.05, 0.05]
        rot_th = [15 * np.pi / 180, 15 * np.pi / 180, 15 * np.pi / 180]

        print "timer: ", self.timer

        if self.timer > 5:
            print "goal reached ............................"
            self.gripper_final_action()

        if np.abs(final_err[0]) < trans_th[0] and np.abs(
                final_err[1]) < trans_th[1] and np.abs(
                    final_err[2]) < trans_th[2]:
            if np.abs(final_err[3]) < rot_th[0] and np.abs(
                    final_err[4]) < rot_th[1] and np.abs(
                        final_err[5]) < rot_th[2]:
                self.timer += 1
                return
            else:
                self.timer = 0
                print "error :", final_err
        else:
            self.timer = 0
            print "error :", final_err

        # use of error_gains:
        error_gains = np.reshape(np.array([1, 1, 1, 1, 1, 1]), (6, 1))
        final_err2 = np.multiply(final_err, error_gains)

        # error value type 2
        end_object_T = np.dot(self.end_cam_T, cam_object_T)
        end_target_T = np.dot(self.end_grip_T, self.grip_target_T)
        object_target_T = np.dot(end_object_T, np.linalg.inv(end_target_T))
        # print "error style 2: ", object_target_T

        # publishing the object marker to rviz
        det_marker = Marker()
        det_marker.header.frame_id = "iiwa_base_link"
        det_marker.header.stamp = rospy.Time.now()
        det_marker.type = Marker.SPHERE
        det_marker.pose.position.x = base_object_T[0, 3]
        det_marker.pose.position.y = base_object_T[1, 3]
        det_marker.pose.position.z = base_object_T[2, 3]
        det_marker.pose.orientation.x = 0
        det_marker.pose.orientation.y = 0
        det_marker.pose.orientation.z = 0
        det_marker.pose.orientation.w = 1
        det_marker.scale.x = 0.05
        det_marker.scale.y = 0.05
        det_marker.scale.z = 0.05
        det_marker.color.a = 1.0
        det_marker.color.r = 0.0
        det_marker.color.g = 0.0
        det_marker.color.b = 1.0
        self.marker_object_in_base_pub.publish(det_marker)

        # print "final error is : ", final_err
        # print "base_target_T: ", base_object_T

        # computation of jacobian of the manipulator
        J = self.kdl_kin.jacobian(self.end_eff_current_pose)

        if self.used_method == "DLSM":
            # use of damped_least_squares as matrix inverse
            J_inverse = np.dot(
                J.T,
                np.linalg.inv(
                    np.dot(J, J.T) + ((self.landa**2) * np.identity(6))))

        elif self.used_method == "JPM":
            # use of matrix pseudo_inverse as matrix inverse
            J_inverse = np.linalg.pinv(J)

        elif self.used_method == "JTM":
            # use of matrix transpose as matrix inverse
            J_inverse = np.transpose(J)

        final_vel = np.dot(J_inverse, final_err2)

        # the dereived final speed command for the manipulator
        speed_cmd = list(np.array(final_vel).reshape(-1, ))

        # avoid huge joint velocities:
        joint_vel_th = [0.7, 0.7, 0.7, 0.7, 0.7, 0.7, 0.7]
        if speed_cmd[0] > joint_vel_th[0]:
            print "huge velocity for the first joint!"
            return
        elif speed_cmd[1] > joint_vel_th[1]:
            print "huge velocity for the second joint!"
            return
        elif speed_cmd[2] > joint_vel_th[2]:
            print "huge velocity for the third joint!"
            return
        elif speed_cmd[3] > joint_vel_th[3]:
            print "huge velocity for the fourth joint!"
            return
        elif speed_cmd[4] > joint_vel_th[4]:
            print "huge velocity for the fifth joint!"
            return
        elif speed_cmd[5] > joint_vel_th[5]:
            print "huge velocity for the sixth joint!"
            return
        elif speed_cmd[6] > joint_vel_th[6]:
            print "huge velocity for the seventh joint!"
            return

        # block to handle speed publisher using pose publisher
        intergration_time_step = 0.5
        pose_change = [speed * intergration_time_step for speed in speed_cmd]
        final_pose = [
            a + b for a, b in zip(self.end_eff_current_pose, pose_change)
        ]

        # actionlib commands
        self.trajectory.points[0].positions = final_pose
        self.trajectory.points[0].velocities = [0.0] * len(self.joint_names)
        self.trajectory.points[0].accelerations = [0.0] * len(self.joint_names)
        self.trajectory.points[0].time_from_start = rospy.Duration(1.0)

        # print "pose_change", pose_change
        # print "The manipulator is moving..."

        goal = FollowJointTrajectoryGoal()
        goal.trajectory = self.trajectory
        goal.goal_time_tolerance = rospy.Duration(0.0)

        self.manipulator_client.send_goal(goal)
        action_result = self.manipulator_client.wait_for_result()

        if action_result is True:
            # publishing the path of the object in "red" color
            final_object_position = base_object_T[0:3, 3]
            final_object_R = tr.identity_matrix()
            final_object_R[0:3, 0:3] = base_object_T[0:3, 0:3]
            final_object_quaternion = tr.quaternion_from_matrix(final_object_R)
            self.object_path.markers.append(Marker())
            self.object_path.markers[-1].header.frame_id = "iiwa_base_link"
            self.object_path.markers[-1].header.stamp = rospy.Time.now()
            self.object_path.markers[-1].id = len(self.object_path.markers)
            self.object_path.markers[-1].type = Marker.SPHERE
            self.object_path.markers[
                -1].pose.position.x = final_object_position[0]
            self.object_path.markers[
                -1].pose.position.y = final_object_position[1]
            self.object_path.markers[
                -1].pose.position.z = final_object_position[2]
            self.object_path.markers[
                -1].pose.orientation.x = final_object_quaternion[0]
            self.object_path.markers[
                -1].pose.orientation.y = final_object_quaternion[1]
            self.object_path.markers[
                -1].pose.orientation.z = final_object_quaternion[2]
            self.object_path.markers[
                -1].pose.orientation.w = final_object_quaternion[3]
            self.object_path.markers[-1].scale.x = 0.01
            self.object_path.markers[-1].scale.y = 0.01
            self.object_path.markers[-1].scale.z = 0.01
            self.object_path.markers[-1].color.a = 1.0
            self.object_path.markers[-1].color.r = 1.0
            self.object_path.markers[-1].color.g = 0.0
            self.object_path.markers[-1].color.b = 0.0
            self.marker_obj_path_pub.publish(self.object_path)

            # publishing path of the end effector in "green" color
            final_ee_position = self.base_end_T[0:3, 3]
            final_ee_R = tr.identity_matrix()
            final_ee_R[0:3, 0:3] = self.base_end_T[0:3, 0:3]
            final_ee_quaternion = tr.quaternion_from_matrix(final_ee_R)
            self.ee_path.markers.append(Marker())
            self.ee_path.markers[-1].header.frame_id = "iiwa_base_link"
            self.ee_path.markers[-1].header.stamp = rospy.Time.now()
            self.ee_path.markers[-1].id = len(self.ee_path.markers)
            self.ee_path.markers[-1].type = Marker.SPHERE
            self.ee_path.markers[-1].pose.position.x = final_ee_position[0]
            self.ee_path.markers[-1].pose.position.y = final_ee_position[1]
            self.ee_path.markers[-1].pose.position.z = final_ee_position[2]
            self.ee_path.markers[-1].pose.orientation.x = final_ee_quaternion[
                0]
            self.ee_path.markers[-1].pose.orientation.y = final_ee_quaternion[
                1]
            self.ee_path.markers[-1].pose.orientation.z = final_ee_quaternion[
                2]
            self.ee_path.markers[-1].pose.orientation.w = final_ee_quaternion[
                3]
            self.ee_path.markers[-1].scale.x = 0.01
            self.ee_path.markers[-1].scale.y = 0.01
            self.ee_path.markers[-1].scale.z = 0.01
            self.ee_path.markers[-1].color.a = 1.0
            self.ee_path.markers[-1].color.r = 0.0
            self.ee_path.markers[-1].color.g = 1.0
            self.ee_path.markers[-1].color.b = 0.0
            self.marker_eff_path_pub.publish(self.ee_path)
Exemplo n.º 33
0
    def get_current_pose(self):
        frame_described_in = str(self.frameline.text())
        left = ('Left' == str(self.arm_box.currentText()))
        right = False
        if not left:
            right = True
            arm_tip_frame = LinearMoveTool.RIGHT_TIP
        else:
            arm_tip_frame = LinearMoveTool.LEFT_TIP
        
        self.tf_listener.waitForTransform(frame_described_in, arm_tip_frame, rospy.Time(), rospy.Duration(2.))
        p_arm = tfu.tf_as_matrix(self.tf_listener.lookupTransform(frame_described_in, arm_tip_frame, rospy.Time(0))) * tr.identity_matrix()
        trans, rotation = tr.translation_from_matrix(p_arm), tr.quaternion_from_matrix(p_arm)

        for value, vr in zip(trans, [self.xline, self.yline, self.zline]):
            vr.setText(str(value))
        for value, vr in zip(tr.euler_from_quaternion(rotation), [self.phi_line, self.theta_line, self.psi_line]):
            vr.setText(str(np.degrees(value)))
Exemplo n.º 34
0
                # test if the marker is tracked or not, skip
                print 'vicon pos %.4f %.4f %.4f' % tuple(vmpos), 'robot pos %.4f %.4f %.4f' % tuple(vmpos_robot)
                if norm(vmpos) < 1e-9:
                    print 'vicon pos is bad, not using this data'
                    continue
                xs = xs + [vmpos[0]]
                ys = ys + [vmpos[1]]
                zs = zs + [vmpos[2]]
                xt = xt + [vmpos_robot[0]]
                yt = yt + [vmpos_robot[1]]
                zt = zt + [vmpos_robot[2]]
            
print 'data length', len(xs)
plt.scatter(xs, ys, c='r', marker='o')
plt.scatter(xt, yt, c='b', marker='o')

plt.show()

viconpts = np.vstack([xs, ys, zs]).T
robotpts = np.vstack([xt, yt, zt]).T

(R,t,rmse) = rigid_transform_3D(viconpts, robotpts)  # then you'll get vicon frame wrt robot frame

Rh = tfm.identity_matrix()
Rh[np.ix_([0,1,2],[0,1,2])] = R
quat = tfm.quaternion_from_matrix(Rh)

print 'vicon_T_robot:', " ".join('%.8e' % x for x in (t.tolist() + quat.tolist()))
print 'rmse:', rmse

Exemplo n.º 35
0
 def space_coordinates(self, joints):
     T = tfs.identity_matrix()
     for idx in range(0, self.dof):
         T = tfs.concatenate_matrices(T, self.transformations[idx](joints[idx]))
     return self.space_from_matrix(T)
Exemplo n.º 36
0
    def error_computation(self, base_object, base_target):
        position_err = base_object[0:3, 3] - base_target[0:3, 3]

        # computation of orientation_err using the skew symmetric matrix
        # the order for the result quaternion from tr package is x, y, z, w
        # no need to change the order in goal_q and ee_q
        cos = np.cos
        sin = np.sin
        base_object_R = tr.identity_matrix()
        base_object_R[0:3, 0:3] = base_object[0:3, 0:3]
        modification_matrix1 = np.array(
            [[cos(4 * np.pi / 6), 0,
              sin(4 * np.pi / 6), 0], [0, 1, 0, 0],
             [-sin(4 * np.pi / 6), 0,
              cos(4 * np.pi / 6), 0], [0, 0, 0, 1]])
        modification_matrix2 = np.array([[1, 0, 0, 0],
                                         [0, cos(np.pi), -sin(np.pi), 0],
                                         [0, sin(np.pi),
                                          cos(np.pi), 0], [0, 0, 0, 1]])

        modified_base_object_R = np.dot(
            np.dot(base_object_R, modification_matrix1), modification_matrix2)
        base_object_quaternion = tr.quaternion_from_matrix(
            modified_base_object_R)
        goal_q = [
            base_object_quaternion[0], base_object_quaternion[1],
            base_object_quaternion[2], base_object_quaternion[3]
        ]

        ### object tf wrt base before modification in orientation
        real_base_object_quaternion = tr.quaternion_from_matrix(base_object_R)
        object_real_q = [
            real_base_object_quaternion[0], real_base_object_quaternion[1],
            real_base_object_quaternion[2], real_base_object_quaternion[3]
        ]
        self.br.sendTransform(
            (base_object[0, 3], base_object[1, 3], base_object[2, 3]),
            object_real_q, rospy.Time.now(), "actual_object_tf",
            "iiwa_base_link")

        ### object tf wrt base after modification in orientation
        self.br.sendTransform(
            (base_object[0, 3], base_object[1, 3], base_object[2, 3]), goal_q,
            rospy.Time.now(), "modified_object_tf", "iiwa_base_link")

        # computation of orientation error using skey_symmetric matrix
        base_target_R = tr.identity_matrix()
        base_target_R[0:3, 0:3] = base_target[0:3, 0:3]
        base_target_quaternion = tr.quaternion_from_matrix(base_target_R)
        ee_q = [
            base_target_quaternion[0], base_target_quaternion[1],
            base_target_quaternion[2], base_target_quaternion[3]
        ]

        skew_mat = np.array([[0.0, -goal_q[2], goal_q[1]],
                             [goal_q[2], 0.0, -goal_q[0]],
                             [-goal_q[1], goal_q[0], 0.0]])

        orientation_err = np.empty((3, 1))
        for i in range(3):
            orientation_err[i] = (ee_q[3] * goal_q[i]) - (
                goal_q[3] * ee_q[i]) - (skew_mat[i, 0] * ee_q[0] +
                                        skew_mat[i, 1] * ee_q[1] +
                                        skew_mat[i, 2] * ee_q[2])

        # print "orientation error: ", orientation_err

        final_err = np.empty((6, 1))
        final_err[0] = position_err[0]
        final_err[1] = position_err[1]
        final_err[2] = position_err[2]
        # final_err[0] = 0
        # final_err[1] = 0
        # final_err[2] = 0
        final_err[3] = orientation_err[0]
        final_err[4] = orientation_err[1]
        final_err[5] = orientation_err[2]
        # final_err[3] = 0
        # final_err[4] = 0
        # final_err[5] = 0
        return final_err
Exemplo n.º 37
0
roslib.load_manifest('ptp_arm_action')
import rospy
import actionlib
import ptp_arm_action.msg as ptp
import numpy as np
from object_manipulator.convert_functions import *
import tf.transformations as tr

rospy.init_node('ptp_client')
client = actionlib.SimpleActionClient('right_ptp', ptp.LinearMovementAction)
client.wait_for_server()

#Construct goal
goal = ptp.LinearMovementGoal()
goal.relative = True
goal.trans_vel = 0.02
goal.rot_vel = 0.02
motion = tr.identity_matrix()
motion[0, 3] = .1
print motion
goal.goal = stamp_pose(mat_to_pose(motion), 'base_link')
print 'goal is\n', goal
client.send_goal(goal)

client.wait_for_result()
r = client.get_result()
print r
#print r.__class__, r
print 'done!'
#rospy.spin()
Exemplo n.º 38
0
 def __init__(self):
     self.br = tf2_ros.TransformBroadcaster()
     self.mat44_map_odom = tft.identity_matrix()
     t_ = threading.Thread(target=self.tf_publish)
     t_.start()