コード例 #1
0
ファイル: ros_utils.py プロジェクト: benkehoe/python
def make_kin_tree_from_joint(ps,jointname, ns='default_ns',valuedict=None):
    rospy.logdebug("joint: %s"%jointname)
    U = get_pr2_urdf()
        
    joint = U.joints[jointname]
    joint.origin = joint.origin or urdf.Pose()
    if not joint.origin.position: joint.origin.position = [0,0,0]
    if not joint.origin.rotation: joint.origin.rotation = [0,0,0]
   
    parentFromChild = conversions.trans_rot_to_hmat(joint.origin.position, transformations.quaternion_from_euler(*joint.origin.rotation))

    childFromRotated = np.eye(4)
    if valuedict and jointname in valuedict:        
        
        if joint.joint_type == 'revolute' or joint.joint_type == 'continuous':
            childFromRotated = transformations.rotation_matrix(valuedict[jointname], joint.axis)
        elif joint.joint_type == 'prismatic':
            childFromRotated = transformations.translation_matrix(np.array(joint.axis)* valuedict[jointname]) 
            
    parentFromRotated = np.dot(parentFromChild, childFromRotated)            
    originFromParent = conversions.pose_to_hmat(ps.pose)
    originFromRotated = np.dot(originFromParent, parentFromRotated)
    
    newps = gm.PoseStamped()
    newps.header = ps.header
    newps.pose = conversions.hmat_to_pose(originFromRotated)
    
    return make_kin_tree_from_link(newps,joint.child,ns=ns,valuedict=valuedict)
コード例 #2
0
ファイル: ros_utils.py プロジェクト: benkehoe/python
def make_kin_tree_from_link(ps,linkname, ns='default_ns',valuedict=None):
    markers = []
    U = get_pr2_urdf()
    link = U.links[linkname]
    
    if link.visual and link.visual.geometry and isinstance(link.visual.geometry,urdf.Mesh):
        rospy.logdebug("%s is a mesh. drawing it.", linkname)
        marker = Marker(type = Marker.MESH_RESOURCE, action = Marker.ADD)
        marker.ns = ns
        marker.header = ps.header        
        
        linkFromGeom = conversions.trans_rot_to_hmat(link.visual.origin.position, transformations.quaternion_from_euler(*link.visual.origin.rotation))
        origFromLink = conversions.pose_to_hmat(ps.pose)
        origFromGeom = np.dot(origFromLink, linkFromGeom)
        
        marker.pose = conversions.hmat_to_pose(origFromGeom)           
        marker.scale = gm.Vector3(1,1,1)
        marker.color = stdm.ColorRGBA(1,1,0,.5)
        marker.id = 0
        marker.lifetime = rospy.Duration()
        marker.mesh_resource = str(link.visual.geometry.filename)
        marker.mesh_use_embedded_materials = False
        markers.append(marker)
    else:
        rospy.logdebug("%s is not a mesh", linkname)
        
    if U.child_map.has_key(linkname):
        for (cjoint,clink) in U.child_map[linkname]:
            markers.extend(make_kin_tree_from_joint(ps,cjoint,ns=ns,valuedict=valuedict))
            
    return markers    
コード例 #3
0
ファイル: conversions.py プロジェクト: benkehoe/python
def hmat_to_trans_rot(hmat): 
    ''' 
    Converts a 4x4 homogenous rigid transformation matrix to a translation and a 
    quaternion rotation. 
    ''' 
    scale, shear, angles, trans, persp = transformations.decompose_matrix(hmat) 
    rot = transformations.quaternion_from_euler(*angles) 
    return trans, rot 
コード例 #4
0
def on_sensor_data(data):
    #print('Sensor frame received')
    rgb_image = Image.open(BytesIO(base64.b64decode(data['rgb_image'])))
    rgb_image = Image.open(BytesIO(base64.b64decode(data['rgb_image'])))
    rgb_image = np.asarray(rgb_image)
    
    if rgb_image.shape != (256,256,3):
        print('image shape not 256, 256, 3')
        return None
    
    # to save the received rgb images
    # np.save(str(np.random.randint(9999)), rgb_image)
    
    # only draw boxes every few seconds
    if np.random.randint(45) > 41:
       
        # cast a ray directly at the center of the image
        # once this works we would need to vary this so that the centroid is not in the center of the image
        # some errors will be suppressed by the symetry of this pixel choice, which is what I want at this moment. 
        ray = ray_casting.cast_ray(data, [128, 128])
        
        # gimbal pose and quad pose: I am pretty sure they are in the world reference frame
        # TODO remove this when sign flip on y is fixed in unity sim
        gimbal_pose = tmpfix_position(list(map(float, data['gimbal_pose'].split(','))))

        # TODO remove this when sign flip on y is fixed in unity sim
        quad_pose = tmpfix_position(list(map(float, data['pose'].split(','))))



        # the sim world has a static reference frame, the angle below is the angle between the world frame and the sensor frame
        rot = transformations.euler_matrix(to_radians(gimbal_pose[3]),
                                           to_radians(gimbal_pose[4]),
                                           -to_radians(gimbal_pose[5]))[:3,:3]

        # this is the identity rotation. The conventions of transformations, is w,x,y,z, and for unity x,y,z,w
        gimbal_cam_rot = transformations.quaternion_matrix((1,0,0,0))[:3,:3]  
        
        # rotate the ray coordinates, so the ray is in the world frame. I think the order may matter here. 
        print('ray unrotated', ray)
        ray = np.dot(gimbal_cam_rot, ray)        
        ray = np.dot(rot, np.array(ray))
        print('ray rotated', ray)

        # print some more debug data
         
        euler = gimbal_pose[3:]
        quaternion = transformations.quaternion_from_euler(euler[0], euler[1], euler[2]) 
        print('gimbal_rotation_quat', quaternion)
        print('gimbal_pose_received', data['gimbal_pose'])
        print('gimbal_pose_fixed', gimbal_pose)

        euler = quad_pose[3:]
        quaternion = transformations.quaternion_from_euler(euler[0], euler[1], euler[2]) 
        print('quad_rotation_quat', quaternion)
        print('quad_position_received', data['pose'])
        print('quad_pose_fixed', quad_pose)
        
        # Flip the sign again so that that the pose received set by the sim is correct
        # TODO remove the call to tmpfix when sign flip on y is fixed in unity sim
        marker_pos = tmpfix_position((gimbal_pose[:3] + ray).tolist() + [0,0,0]).tolist()
        
        marker_rot = [0,0,0]
        quaternion = transformations.quaternion_from_euler(*marker_rot) 
        print('marker_rotation_quat', quaternion, '\n\n')

        # prepare the marker message and send 
        marker_msg = sio_msgs.create_box_marker_msg(np.random.randint(99999), marker_pos)
        socketIO.emit('create_box_marker', marker_msg)
コード例 #5
0
ファイル: conversions.py プロジェクト: benkehoe/python
def yaw_to_quat(yaw):
    return transformations.quaternion_from_euler(0, 0, yaw)
コード例 #6
0
ファイル: util.py プロジェクト: robustrobotics/honda_cmm
def quaternion_from_euler(roll, pitch, yaw):
    trans_q = trans.quaternion_from_euler(roll, pitch, yaw, 'rxyz')
    return to_pyquat(trans_q)