Пример #1
0
def sang_poly(chue_nod_poly,
              lis_xyz,
              lis_index_chut,
              lis_n_chut_nai_na,
              lis_u,
              lis_v,
              lis_norm=None):
    # 頂点のデータを収めておいたリストをmayaの配列オブジェクトにする
    arr_xyz = om.MFloatPointArray(lis_xyz)  # 頂点の位置
    arr_index_chut = om.MIntArray(lis_index_chut)  # 頂点の番号
    arr_n_chut_nai_na = om.MIntArray(lis_n_chut_nai_na)  # 各面に使う頂点の数
    arr_u = om.MFloatArray(lis_u)  # 頂点のuv
    arr_v = om.MFloatArray(lis_v)

    trans_fn = om.MFnTransform()
    trans_obj = trans_fn.create()
    trans_fn.setName(chue_nod_poly)
    chue_nod_poly = trans_fn.name()
    # 準備しておいたデータから全てのポリゴンメッシュを作成する
    fn_mesh = om.MFnMesh()
    fn_mesh.create(arr_xyz, arr_n_chut_nai_na, arr_index_chut, arr_u, arr_v,
                   trans_obj)
    fn_mesh.setName(chue_nod_poly + 'Shape')
    fn_mesh.assignUVs(arr_n_chut_nai_na, arr_index_chut)
    if (lis_norm):
        fn_mesh.setVertexNormals(lis_norm, om.MIntArray(range(len(lis_xyz))))

    # 一応MMDからのモデルだという情報をポリゴンのノードに刻んでおく
    mc.addAttr(chue_nod_poly,
               longName='MMD_model',
               niceName='MMDからのモデル',
               attributeType='bool')
    mc.setAttr(chue_nod_poly + '.MMD_model', True)
    mc.setAttr(chue_nod_poly + '.aiOpaque', 0)  # arnoldを使う時に不透明度を有効にする
    return chue_nod_poly
Пример #2
0
    def __init__(self):

        self.uvSetName = 'map1'

        self.uArray = openMaya.MFloatArray()
        self.vArray = openMaya.MFloatArray()
        self.uvCounts = openMaya.MIntArray()
        self.uvIds = openMaya.MIntArray()
Пример #3
0
    def reader(self, fileObject, optionString, accessMode):
        drc = Draco()
        mesh = drc.decode(fileObject.fullName())

        # vertices, normals, uvs
        vertices = []
        normals = []
        us = OpenMaya.MFloatArray()
        vs = OpenMaya.MFloatArray()
        poly_count = [3] * mesh.faces_num
        for n in range(mesh.vertices_num):
            i = 3 * n
            vertices.append(
                OpenMaya.MPoint(mesh.vertices[i], mesh.vertices[i + 1],
                                mesh.vertices[i + 2]))
            if mesh.normals:
                normals.append(
                    OpenMaya.MFloatVector(mesh.normals[i], mesh.normals[i + 1],
                                          mesh.normals[i + 2]))
            if mesh.uvs:
                i = 2 * n
                us.append(mesh.uvs[i])
                vs.append(mesh.uvs[i + 1])

        #create mesh
        fnMesh = OpenMaya.MFnMesh()
        newMesh = fnMesh.create(vertices, poly_count, mesh.faces)

        if mesh.normals:
            fnMesh.setVertexNormals(normals, range(len(vertices)))
        if mesh.uvs:
            uvSetsNames = fnMesh.getUVSetNames()
            fnMesh.setUVs(us, vs, uvSetsNames[0])
            fnMesh.assignUVs(poly_count, mesh.faces)

        fnMesh.updateSurface()

        slist = OpenMaya.MGlobal.getSelectionListByName("initialShadingGroup")
        initialSG = slist.getDependNode(0)

        fnSG = OpenMaya.MFnSet(initialSG)
        if fnSG.restriction() == OpenMaya.MFnSet.kRenderableOnly:
            fnSG.addMember(newMesh)
Пример #4
0
    def __init__(self, file_gfx):
        self.bone_ids = 0
        self.weights = 0
        self.VA = 0
        self.NA = 0
        self.uArray0 = om2.MFloatArray()
        self.vArray0 = om2.MFloatArray()
        self.uArray1 = om2.MFloatArray()
        self.vArray1 = om2.MFloatArray()
        self.uArray2 = om2.MFloatArray()
        self.vArray2 = om2.MFloatArray()
        self.uArray3 = om2.MFloatArray()
        self.vArray3 = om2.MFloatArray()

        self.name = rd_meshBegin(file_gfx)
        self.mesh_header = header(file_gfx)
Пример #5
0
    def _importCtm(self, fileName, importOptions):
        verbose = importOptions.get('verbose', False)
        context = openctm.ctmNewContext(openctm.CTM_IMPORT)

        # Extract file
        openctm.ctmLoad(context, fileName)
        e = openctm.ctmGetError(context)
        if e != 0:
            s = openctm.ctmErrorString(e)
            print(s)
            openctm.ctmFreeContext(context)
            raise Exception(s)

        # Extract indices
        triCount = openctm.ctmGetInteger(context, openctm.CTM_TRIANGLE_COUNT)
        ctmIndices = openctm.ctmGetIntegerArray(context, openctm.CTM_INDICES)
        polyCount = [3] * triCount

        # Extract vertices
        vertCount = openctm.ctmGetInteger(context, openctm.CTM_VERTEX_COUNT)
        ctmVertices = openctm.ctmGetFloatArray(context, openctm.CTM_VERTICES)
        vertices = OpenMaya.MFloatPointArray()
        vertices.setLength(vertCount)

        # Extract Normals
        ctmVertNormals = None
        vertNormals = OpenMaya.MFloatPointArray()
        hasNormals = openctm.ctmGetInteger(
            context, openctm.CTM_HAS_NORMALS) == openctm.CTM_TRUE
        if hasNormals:
            ctmVertNormals = openctm.ctmGetFloatArray(context,
                                                      openctm.CTM_NORMALS)
            vertNormals.setLength(vertCount)

        # Extract UVs
        hasUVs = openctm.ctmGetInteger(context, openctm.CTM_UV_MAP_COUNT) > 0
        ctmTexCoords = None
        uCoords = OpenMaya.MFloatArray()
        vCoords = OpenMaya.MFloatArray()

        if hasUVs:
            ctmTexCoords = openctm.ctmGetFloatArray(context,
                                                    openctm.CTM_UV_MAP_1)
            uCoords.setLength(vertCount)
            vCoords.setLength(vertCount)
            textureFilename = openctm.ctmGetUVMapString(
                context, openctm.CTM_UV_MAP_1, openctm.CTM_FILE_NAME)
            # TODO: Load texture file
            if textureFilename:
                pass

        # Extract colors
        colorAttrib = openctm.ctmGetNamedAttribMap(context, "Color")
        hasVertexColors = colorAttrib != openctm.CTM_NONE
        ctmColors = None
        vertexColors = OpenMaya.MColorArray()
        if hasVertexColors:
            ctmColors = openctm.ctmGetFloatArray(context, colorAttrib)
            vertexColors.setLength(vertCount)

        pointToIndex = {}
        ctmVertIndexToUniqueIndex = {}
        nrSkippedVertices = 0
        for i in range(vertCount):
            ctmVertIndex = i * 3
            p = (float(ctmVertices[ctmVertIndex]),
                 float(ctmVertices[ctmVertIndex + 1]),
                 float(ctmVertices[ctmVertIndex + 2]))
            if p not in pointToIndex:
                index = i - nrSkippedVertices
                pointToIndex[p] = index
                ctmVertIndexToUniqueIndex[i] = index
                vertices[index].x = p[0]
                vertices[index].y = p[1]
                vertices[index].z = p[2]

                if hasNormals:
                    vertNormals[index].x = float(ctmVertNormals[ctmVertIndex])
                    vertNormals[index].y = float(ctmVertNormals[ctmVertIndex +
                                                                1])
                    vertNormals[index].z = float(ctmVertNormals[ctmVertIndex +
                                                                2])

                if hasUVs:
                    ctmUVIndex = i * 2
                    uCoords[index] = float(ctmTexCoords[ctmUVIndex])
                    vCoords[index] = float(ctmTexCoords[ctmUVIndex + 1])

                if hasVertexColors:
                    ctmColIndex = i * 4
                    vertexColors[index].r = float(ctmColors[ctmColIndex])
                    vertexColors[index].g = float(ctmColors[ctmColIndex + 1])
                    vertexColors[index].b = float(ctmColors[ctmColIndex + 2])
                    vertexColors[index].a = float(ctmColors[ctmColIndex + 3])
            else:
                ctmVertIndexToUniqueIndex[i] = pointToIndex[p]
                nrSkippedVertices += 1

        uniqVertCount = len(pointToIndex)
        vertices.setLength(uniqVertCount)
        vertNormals.setLength(uniqVertCount)

        indices = [
            ctmVertIndexToUniqueIndex[ctmIndices[i]]
            for i in range(3 * triCount)
        ]

        if hasUVs:
            uCoords.setLength(uniqVertCount)
            vCoords.setLength(uniqVertCount)

        if hasVertexColors:
            vertexColors.setLength(uniqVertCount)

        if verbose:
            method = openctm.ctmGetInteger(context,
                                           openctm.CTM_COMPRESSION_METHOD)
            if method == openctm.CTM_METHOD_RAW:
                methodStr = "RAW"
            elif method == openctm.CTM_METHOD_MG1:
                methodStr = "MG1"
            elif method == openctm.CTM_METHOD_MG2:
                methodStr = "MG2"
            else:
                methodStr = "Unknown"
            print("File: %s" % fileName)
            print("Comment: %s" %
                  str(openctm.ctmGetString(context, openctm.CTM_FILE_COMMENT)))
            print("Compression Method: %s" % methodStr)
            print("Vertices Count : %d" % vertCount)
            print("Unique Vertices Count : %d" % uniqVertCount)
            print("Triangles Count: %d" % triCount)
            print("Has normals: %r" % hasNormals)
            print("Has UVs: %r" % hasUVs)
            print("Has Vertex Colors: %r" % hasVertexColors)

        fnMesh = OpenMaya.MFnMesh()
        newMesh = fnMesh.create(vertices, polyCount, indices, uCoords, vCoords)

        if hasNormals:
            fnMesh.setVertexNormals(vertNormals, range(len(vertices)))

        if hasVertexColors:
            fnMesh.setVertexColors(vertexColors, range(len(vertices)))

        fnMesh.updateSurface()

        # Assign initial shading group
        slist = OpenMaya.MGlobal.getSelectionListByName("initialShadingGroup")
        initialSG = slist.getDependNode(0)

        fnSG = OpenMaya.MFnSet(initialSG)
        if fnSG.restriction() == OpenMaya.MFnSet.kRenderableOnly:
            fnSG.addMember(newMesh)
Пример #6
0
def read_gmc_data(scale_rate, gmc_path, mesh_name):
    # first read, get object section
    file = open(gmc_path, "r")

    line_num = -1
    line_nums = []

    line = file.readline()
    line_num += 1

    while True:
        line = file.readline()
        line_num += 1

        if not line:
            break

        line = strip_space_line(line)
        if line.find("Object") == 0:
            line_nums.append(line_num)

    print line_nums

    file.close()

    # second read,read section 1
    vertexArray = OpenMaya.MFloatPointArray()
    uArray = OpenMaya.MFloatArray()
    vArray = OpenMaya.MFloatArray()

    polygonCounts = OpenMaya.MIntArray()
    polygonConnects = OpenMaya.MIntArray()

    material_num = 0
    material_sets = []

    file = open(gmc_path, "r")
    line_num = -1

    line = file.readline()
    line_num += 1

    while True:
        line = file.readline()
        line_num += 1

        if not line:
            break

        line = strip_space_line(line)
        if line_num >= line_nums[0] and line_num < line_nums[1]:

            if line.find("Vertices") == 0:
                print line
            elif line.find("v") == 0:
                vertex_line = re.split("v|n|c|t", line)
                vertex_data = []
                vertex_data.append(parse_mesh_element(vertex_line[1]))
                vertex_data.append(parse_mesh_element(vertex_line[4]))

                pos = vertex_data[0]
                pos[0] *= scale_rate
                pos[1] *= scale_rate
                pos[2] *= scale_rate

                v = OpenMaya.MFloatPoint(pos[0], pos[1], pos[2])
                vertexArray.append(v)

                uv = vertex_data[1]
                uArray.append(uv[0])
                vArray.append(uv[1])

            elif line.find("f") == 0:
                face_line = line[1:]
                face_data = []
                face_data = parse_mesh_element(face_line)

                print face_data

                polygonCounts.append(3)

                polygonConnects.append(int(face_data[1]))
                polygonConnects.append(int(face_data[2]))
                polygonConnects.append(int(face_data[3]))

    mFn_Mesh = OpenMaya.MFnMesh()
    m_DagMod = OpenMaya.MDagModifier()
    new_object = m_DagMod.createNode('transform')

    mFn_Mesh.create(vertexArray, polygonCounts, polygonConnects, uArray,
                    vArray, new_object)
    mFn_Mesh.setName(mesh_name)
    m_DagMod.doIt()

    new_mesh = pmc.PyNode(mesh_name)
    new_transform = pmc.listRelatives(new_mesh, type='transform',
                                      parent=True)[0]

    mFn_Mesh.assignUVs(polygonCounts, polygonConnects, 'map1')

    node_name = mesh_name + "_mesh"

    pmc.select(new_transform)
    pmc.rename(new_transform, node_name)

    file.close()
Пример #7
0
def read_skc_file(scale_rate, skc_path, mesh_name):
    file = open(skc_path, "r")

    line = file.readline()

    vertexArray = OpenMaya.MFloatPointArray()
    uArray = OpenMaya.MFloatArray()
    vArray = OpenMaya.MFloatArray()

    polygonCounts = OpenMaya.MIntArray()
    polygonConnects = OpenMaya.MIntArray()

    vertexWeights = []

    material_num = 0
    material_sets = []

    while True:
        line = file.readline()
        if not line:
            break

        line = strip_space_line(line)

        if line.find("Materials") != -1:
            line = strip_space_line(line)
            material_num = int(line.split(":")[1])
            for i in range(0, int(material_num)):
                material_sets.append(0)
        elif line.find("Vertices") != -1:
            print line
        elif line[0] == "v":
            vertex_data = parse_pos_uv_weight(line)
            pos = vertex_data[0]
            pos[0] *= scale_rate
            pos[1] *= scale_rate
            pos[2] *= scale_rate
            v = OpenMaya.MFloatPoint(pos[0], pos[1], pos[2])
            vertexArray.append(v)

            uv = vertex_data[1]
            uArray.append(uv[0])
            vArray.append(uv[1])

            # bone weights
            skin_data = vertex_data[2]
            weight_num = skin_data[0]
            weights = []
            for bi in range(0, int(weight_num)):
                tmp_bone_idx = skin_data[int(1 + 2 * bi)]
                tmp_bone_name = bone_name_list[int(tmp_bone_idx)]
                tmp_bone_weight = skin_data[int(2 + 2 * bi)]
                key_value = (tmp_bone_name, tmp_bone_weight)
                weights.append(key_value)
            vertexWeights.append(weights)

        elif line.find("Triangles") != -1:
            print line
        elif line[0] == "f":
            face_data = parse_face(line)
            polygonCounts.append(3)

            polygonConnects.append(int(face_data[2]))
            polygonConnects.append(int(face_data[3]))
            polygonConnects.append(int(face_data[4]))

            # assign material
            material_sets[int(face_data[1])] += 1

    mFn_Mesh = OpenMaya.MFnMesh()
    m_DagMod = OpenMaya.MDagModifier()
    new_object = m_DagMod.createNode('transform')

    mFn_Mesh.create(vertexArray, polygonCounts, polygonConnects, uArray,
                    vArray, new_object)
    mFn_Mesh.setName(mesh_name)
    m_DagMod.doIt()

    new_mesh = pmc.PyNode(mesh_name)
    new_transform = pmc.listRelatives(new_mesh, type='transform',
                                      parent=True)[0]

    mFn_Mesh.assignUVs(polygonCounts, polygonConnects, 'map1')

    node_name = mesh_name + "_mesh"

    pmc.select(new_transform)
    pmc.rename(new_transform, node_name)

    # skin cluster
    pmc.select(bone_name_list[0], add=True)
    skin_cluster = pmc.skinCluster(bindMethod=0,
                                   skinMethod=1,
                                   normalizeWeights=0,
                                   maximumInfluences=4,
                                   obeyMaxInfluences=True)
    pmc.select(node_name, r=True)
    pmc.skinPercent(skin_cluster, node_name, normalize=False, pruneWeights=100)

    for v in range(0, len(vertexWeights)):
        pmc.skinPercent(skin_cluster,
                        "{0}.vtx[{1}]".format(node_name, v),
                        transformValue=vertexWeights[v],
                        normalize=True)

    #create material
    #pCylinder1.f[14:17] _mesh.f[{0}:{1}].format()

    material_starts = []
    material_ends = []
    mesh_selcte_sets = []

    material_sets = filter(lambda x: x != 0, material_sets)
    print material_sets

    material_starts.append(0)
    material_ends.append(material_sets[0] - 1)
    mesh_selcte_sets.append(
        node_name +
        ".f[{0}:{1}]".format(int(material_starts[0]), int(material_ends[0])))
    for i in range(1, len(material_sets)):
        material_starts.append(material_ends[int(i - 1)] + 1)
        material_ends.append(material_ends[int(i - 1)] + material_sets[i])
        mesh_selcte_sets.append(node_name + ".f[{0}:{1}]".format(
            int(material_starts[i]), int(material_ends[i])))

    print mesh_selcte_sets

    # 没有这一句会出错,必须将以前的选择清理掉
    pmc.select(clear=True)

    for i in range(0, len(mesh_selcte_sets)):
        shader_name = "p_shader{0}".format(int(i))
        new_shader = pmc.shadingNode("lambert",
                                     asShader=True,
                                     name=shader_name)
        new_shadinggroup = pmc.sets(renderable=True,
                                    noSurfaceShader=True,
                                    empty=True,
                                    name='{}_SG'.format(shader_name))
        pmc.connectAttr(new_shader.outColor, new_shadinggroup.surfaceShader)

        pmc.select(mesh_selcte_sets[i])
        pmc.hyperShade(assign=new_shadinggroup)
        pmc.select(clear=True)
Пример #8
0
polygonConnects = OpenMaya.MIntArray()
polygonConnects.append(0)
polygonConnects.append(2)
polygonConnects.append(1)
polygonConnects.append(0)
polygonConnects.append(3)
polygonConnects.append(2)
polygonConnects.append(3)
polygonConnects.append(4)
polygonConnects.append(2)
polygonConnects.append(3)
polygonConnects.append(5)
polygonConnects.append(4)

uArray = OpenMaya.MFloatArray()
vArray = OpenMaya.MFloatArray()

uArray.append(0.0)
vArray.append(1.0)

uArray.append(0.0)
vArray.append(0.0)

uArray.append(1.0)
vArray.append(0.0)

uArray.append(1.0)
vArray.append(1.0)

uArray.append(2.0)
Пример #9
0
def makeMFloatArray(xmldata):
    myFloatArray = om.MFloatArray()
    for each in xmldata.attrib['value'].split(' '):
        myFloatArray.append(float(each))
    return myFloatArray
Пример #10
0
def readFile(filepath):
    files = path_wrangler(filepath)
    files.get_files()

    md = open(files.data['uexp'], 'rb')
    ua = open(files.data['uasset'], 'rb')

    meshName = files.data['meshName']
    submesh_name = files.data['submesh_name']

    arm = False
    weightData = {}
    Weight_array = []
    vertexArray = []
    NA = []
    normal_array = []

    faces = []

    U0 = om2.MFloatArray()
    V0 = om2.MFloatArray()
    U1 = om2.MFloatArray()
    V1 = om2.MFloatArray()
    U2 = om2.MFloatArray()
    V2 = om2.MFloatArray()
    U3 = om2.MFloatArray()
    V3 = om2.MFloatArray()
    U4 = om2.MFloatArray()
    V4 = om2.MFloatArray()
    U5 = om2.MFloatArray()
    V5 = om2.MFloatArray()

    t1 = time.time()
    names = readUasset(ua)

    pattern0 = re.compile(b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00........')
    for x in xrange(20000):
        s1 = struct.unpack("18s", md.read(18))[0]
        if pattern0.match(s1):
            c0 = struct.unpack("<L", md.read(4))[0]
            c1 = struct.unpack("<L", md.read(4))[0]
            c2 = struct.unpack("<L", md.read(4))[0]
            c3 = struct.unpack("<L", md.read(4))[0]
            c4 = struct.unpack("<L", md.read(4))[0]
            if (c0 and c1 and c2 and c3 and c4 > 1000000000):
                break
            else:
                md.seek(-20, 1)
        md.seek(-17, 1)

    materialCount = struct.unpack("<L", md.read(4))[0]
    materials = {}
    for m in xrange(materialCount):
        materials[m] = {}
        materials[m]['val0'] = struct.unpack("<l", md.read(4))[0]
        stringIndex = struct.unpack("<L", md.read(4))[0]
        unk0 = struct.unpack("<L", md.read(4))[0]
        unk1 = struct.unpack("<L", md.read(4))[0]
        unk2 = struct.unpack("<L", md.read(4))[0]
        unk3 = struct.unpack("<f", md.read(4))[0]
        unk4 = struct.unpack("<f", md.read(4))[0]
        unk5 = struct.unpack("<L", md.read(4))[0]
        unk6 = struct.unpack("<L", md.read(4))[0]

        materials[m]['name'] = names[stringIndex]

    boneCount = struct.unpack("<L", md.read(4))[0]
    joint_data = {}
    bt = np.empty([1, boneCount], dtype='U32')
    for i in xrange(boneCount):
        string_index = struct.unpack("<L", md.read(4))[0]
        jName = names[string_index]
        unk = struct.unpack("<L", md.read(4))[0]
        parent = struct.unpack("<l", md.read(4))[0]

        joint_data[i] = {"name": jName, "parent": parent}
        bt[0, i] = jName

    boneCount2 = struct.unpack("<L", md.read(4))[0]
    bone_list = []
    boneArray = []
    for k in xrange(boneCount2):
        m1 = struct.unpack("<10f", md.read(40))
        boneName = joint_data[k]["name"]
        BNparent = joint_data[k]["parent"]
        boneArray.append(boneName)

        BNps = om.MVector(-m1[5], -m1[6], m1[4])
        BNrt = om.MQuaternion(m1[1], m1[2], -m1[0], -m1[3])
        BNsc = om.MVector(-m1[8], -m1[9], -m1[7])

        if BNparent == -1:
            cmds.select(clear=True)
        else:
            pm.select(bone_list[BNparent])
        newBone = pm.joint(p=(0, 0, 0), name=boneName, radius=0.1)
        newBone.setTranslation(BNps)
        newBone.setOrientation(BNrt)
        newBone.setScale(BNsc)

        bone_list.append(newBone)
    arm = True

    boneCount3 = struct.unpack("<L", md.read(4))[0]
    md.seek(boneCount3 * 12, 1)

    vertexGroups = {}
    unk0 = struct.unpack("<L", md.read(4))[0]
    unk1 = struct.unpack("B", md.read(1))[0]
    unk2 = struct.unpack("B", md.read(1))[0]
    groupCount = struct.unpack("<L", md.read(4))[0]
    for m in xrange(groupCount):
        z1 = struct.unpack("<H", md.read(2))[0]
        ID = struct.unpack("<H", md.read(2))[0]

        md.seek(24, 1)
        vertexGroups[ID] = {'range': 0, 'bones': []}

        # pragma region bone palette
        start = struct.unpack("<L", md.read(4))[0]
        count = struct.unpack("<L", md.read(4))[0]
        bt = np.empty([1, count], dtype='U32')
        for bn in xrange(count):
            bid = struct.unpack("<H", md.read(2))[0]
            vertexGroups[ID]['bones'].append(bid)
            bt[0, bn] = joint_data[bid]["name"]
        # pragma endregion bone palette

        size = struct.unpack("<L", md.read(4))[0]
        stop = start + size
        vertexGroups[ID]['range'] = range(start, stop)
        vertexGroups[ID]["start"] = start
        vertexGroups[ID]["stop"] = stop
        vertexGroups[ID]["size"] = size
        vertexGroups[ID]["names"] = bt

        md.seek(34, 1)
        FFx4 = readHexString(md, 4)
        flag = struct.unpack("<L", md.read(4))[0]
        if flag:  # extra data for this group
            count = struct.unpack("<L", md.read(4))[0]
            md.seek(count * 16, 1)
        else:
            null = struct.unpack("<L", md.read(4))[0]

    unk = struct.unpack("B", md.read(1))[0]
    checkHere = md.tell()
    stride = struct.unpack("<L", md.read(4))[0]
    fCount = struct.unpack("<L", md.read(4))[0]

    faceByteCount = fCount * stride
    fi = np.fromfile(md, dtype='B', count=faceByteCount)
    if stride == 4:
        fi_0 = fi.view(dtype='<L').reshape(fCount // 3, 3)
    elif stride == 2:
        fi_0 = fi.view(dtype='<H').reshape(fCount // 3, 3)
    fi_1 = fi_0.ravel()
    faces = tuple(fi_1)

    pCounts = [3] * (len(faces) // 3)

    gh = pCounts[0]

    unkCount = struct.unpack("<L", md.read(4))[0]
    md.seek(unkCount * 2, 1)

    unk = struct.unpack("<L", md.read(4))[0]
    vertexCount = struct.unpack("<L", md.read(4))[0]
    boneCount = struct.unpack("<L", md.read(4))[0]
    md.seek(boneCount * 2, 1)

    null0 = struct.unpack("<L", md.read(4))[0]
    null1 = struct.unpack("<L", md.read(4))[0]

    uv_count = struct.unpack("<L", md.read(4))[0]
    unk0 = struct.unpack("<H", md.read(2))[0]
    uv_count2 = struct.unpack("<L", md.read(4))[0]

    null2 = struct.unpack("<L", md.read(4))[0]

    unk1 = struct.unpack("<f", md.read(4))[0]
    unk2 = struct.unpack("<f", md.read(4))[0]
    unk3 = struct.unpack("<f", md.read(4))[0]

    null3 = struct.unpack("<L", md.read(4))[0]
    null4 = struct.unpack("<L", md.read(4))[0]
    null5 = struct.unpack("<L", md.read(4))[0]

    vStride = struct.unpack("<L", md.read(4))[0]
    vCount = struct.unpack("<L", md.read(4))[0]

    byteCount = vCount * vStride
    vi = np.fromfile(md, dtype='B', count=byteCount).reshape((vCount, vStride))
    pos = vi[:, 8:20].ravel().view(dtype='<f').reshape((vCount, 3))
    pos[:, [0, 2]] = pos[:, [2, 0]]
    pos[:, [0, 1]] = pos[:, [1, 0]]
    pos[:, [2]] *= -1
    positions = pos.tolist()
    VA = om2.MFloatPointArray(positions)

    if uv_count > 0:
        uvData_ = vi[:, 20:24].ravel().view(dtype='<f2').reshape((vCount, 2))
        uvData_[:, 1:2] *= -1
        uvData_[:, 1:2] += 1
        uvData = tuple(map(tuple, uvData_))
        u = zip(*uvData)[0]
        v = zip(*uvData)[1]
        U0.copy(u)
        V0.copy(v)
    if uv_count > 1:
        uvData_ = vi[:, 24:28].ravel().view(dtype='<f2').reshape((vCount, 2))
        uvData_[:, 1:2] *= -1
        uvData_[:, 1:2] += 1
        uvData = tuple(map(tuple, uvData_))
        u = zip(*uvData)[0]
        v = zip(*uvData)[1]
        U1.copy(u)
        V1.copy(v)
    if uv_count > 2:
        uvData_ = vi[:, 28:32].ravel().view(dtype='<f2').reshape((vCount, 2))
        uvData_[:, 1:2] *= -1
        uvData_[:, 1:2] += 1
        uvData = tuple(map(tuple, uvData_))
        u = zip(*uvData)[0]
        v = zip(*uvData)[1]
        U2.copy(u)
        V2.copy(v)
    if uv_count > 3:
        uvData_ = vi[:, 32:36].ravel().view(dtype='<f2').reshape((vCount, 2))
        uvData_[:, 1:2] *= -1
        uvData_[:, 1:2] += 1
        uvData = tuple(map(tuple, uvData_))
        u = zip(*uvData)[0]
        v = zip(*uvData)[1]
        U3.copy(u)
        V3.copy(v)
    if uv_count > 4:
        uvData_ = vi[:, 36:40].ravel().view(dtype='<f2').reshape((vCount, 2))
        uvData_[:, 1:2] *= -1
        uvData_[:, 1:2] += 1
        uvData = tuple(map(tuple, uvData_))
        u = zip(*uvData)[0]
        v = zip(*uvData)[1]
        U4.copy(u)
        V4.copy(v)

    mesh = om2.MFnMesh()
    ShapeMesh = cmds.group(em=True)
    parentOwner = get_mobject(ShapeMesh)
    meshMObj = mesh.create(VA,
                           pCounts,
                           faces,
                           uValues=U0,
                           vValues=V0,
                           parent=parentOwner)

    cmds.sets(ShapeMesh, e=True, forceElement='initialShadingGroup')

    cmds.polyUVSet(rename=True,
                   newUVSet='map_0',
                   uvSet=mesh.currentUVSetName(-1))
    mesh.setUVs(U0, V0, 'map_0')
    mesh.assignUVs(pCounts, faces, 'map_0')

    cmds.rename(meshName)

    s1 = cmds.ls(sl=1)
    s2 = s1[0]
    shapeName = s2.encode('ascii', 'ignore')

    if uv_count > 1:
        mesh.createUVSet('map_1')
        mesh.setUVs(U1, V1, 'map_1')
        mesh.assignUVs(pCounts, faces, 'map_1')
    if uv_count > 2:
        mesh.createUVSet('map_2')
        mesh.setUVs(U2, V2, 'map_2')
        mesh.assignUVs(pCounts, faces, 'map_2')
    if uv_count > 3:
        mesh.createUVSet('map_3')
        mesh.setUVs(U3, V3, 'map_3')
        mesh.assignUVs(pCounts, faces, 'map_3')
    if uv_count > 4:
        mesh.createUVSet('map_4')
        mesh.setUVs(U4, V4, 'map_4')
        mesh.assignUVs(pCounts, faces, 'map_4')

    unkS = struct.unpack("<H", md.read(2))[0]
    extraBoneWeights = struct.unpack("<L", md.read(4))[0]
    wCount = struct.unpack("<L", md.read(4))[0]
    stride = struct.unpack("<L", md.read(4))[0]
    wCount2 = struct.unpack("<L", md.read(4))[0]

    subStride = int(stride / 2)

    clusterName = shapeName + '_' + 'skinCluster'
    pm.skinCluster(boneArray[:], shapeName, sm=1, mi=8, omi=1, n=clusterName)

    skin = mm.eval('findRelatedSkinCluster "' + s2 + '"')

    sel = om.MSelectionList()
    sel.add(shapeName)
    meshMObject = om.MObject()
    sel.getDependNode(0, meshMObject)

    sel2 = om.MSelectionList()
    sel2.add(skin)
    skinMObject = om.MObject()
    sel2.getDependNode(0, skinMObject)

    FnSkin = oma.MFnSkinCluster(skinMObject)
    dag_path, skinMObject = get_skin_dag_path_and_mobject(FnSkin)
    weights = om.MDoubleArray()
    influence_paths = om.MDagPathArray()
    influence_count = FnSkin.influenceObjects(influence_paths)
    components_per_influence = weights.length() / influence_count

    # influences
    unused_influences = list()
    influences = [
        influence_paths[inf_count].partialPathName()
        for inf_count in xrange(influence_paths.length())
    ]

    wSize = vCount * influence_count
    weights = om.MDoubleArray(wSize, 0.0)

    w_byteCount = wCount * stride
    wi = np.fromfile(md, dtype='B', count=w_byteCount).reshape(
        (wCount, stride))
    wi_b = wi[:, :subStride].ravel().view().reshape((wCount, subStride))
    wi_w = wi[:, subStride:stride].ravel().view().reshape(
        (wCount, subStride)).astype(np.float64)
    wi_w /= 255.0

    def do_stuff(n):
        fg = influences.index(L[n])
        idx = fg + (j * influence_count)
        weights[idx] = W[n]

    for h in xrange(len(vertexGroups)):
        crnt_grp = vertexGroups[h]
        g_names = crnt_grp["names"]
        g_range = crnt_grp["range"]
        for j in g_range:
            Wt = np.trim_zeros(wi_w[j], 'b')
            W = tuple(Wt)
            a = Wt.shape[0]
            ids = wi_b[j, :a]
            L = tuple(g_names[0, ids])

            map(do_stuff, range(len(L)))
    influence_array = om.MIntArray(influence_count)
    m_util.createIntArrayFromList(range(influence_count), influence_array)
    FnSkin.setWeights(dag_path, skinMObject, influence_array, weights, False)

    createMaterials(files, vertexGroups, materials, shapeName)

    pm.select(shapeName)
    cmds.viewFit()

    elapsed = time.time() - t1
    return elapsed

    md.close()
    ua.close()
Пример #11
0
import maya.api.OpenMaya as mayaApi

selList = mayaApi.MSelectionList()

selList.add( 'pSphereShape1' )
fnMesh = mayaApi.MFnMesh( selList.getDagPath( 0 ) )

fnMesh.getUVSetNames()

uArray = mayaApi.MFloatArray()
vArray = mayaApi.MFloatArray()
uvCounts = mayaApi.MIntArray()
uvIds    = mayaApi.MIntArray()
Пример #12
0
    def compute(self, pPlug, pData):

        #only compute if output is in out array
        if (pPlug.parent() == generalIk.aOutRot):
            # descend into coordinated cycles
            # inputs
            solverDH = pData.inputValue(generalIk.aSolver)
            solver = solverDH.asInt()

            iterationsDH = pData.inputValue(generalIk.aMaxIter)
            maxIter = iterationsDH.asInt()

            toleranceDH = pData.inputValue(generalIk.aTolerance)
            tolerance = toleranceDH.asFloat()

            rootMat = om.MMatrix()
            rootMatDH = pData.inputValue(generalIk.aRootMat)
            rootMat = rootMatDH.asMatrix()

            # target
            targetMat = om.MMatrix()
            targetMatDH = pData.inputValue(generalIk.aTargetMat)
            targetMat = targetMatDH.asMatrix()

            endMat = om.MMatrix()
            endMatDH = pData.inputValue(generalIk.aEndMat)
            endMat = endMatDH.asMatrix()
            print "endMat at start is {}".format(endMat)

            # get everything in root space
            targetRSmat = targetMat.__mul__(rootMat.inverse())

            # get reference matrix array from tip to root
            # this lets us virtually rebuild the parent-child hierarchy
            inJntArrayDH = pData.inputArrayValue(generalIk.aJnts)
            inLength = inJntArrayDH.__len__()
            # DON'T FORGET our useful arrays are one entry shorter than we have
            chainArray = om.MMatrixArray()
            jntArray = om.MMatrixArray()

            jntWeights = om.MFloatArray()

            for i in range(inLength):
                inJntArrayDH.jumpToPhysicalElement(i)
                childCompDH = inJntArrayDH.inputValue()

                childMat = om.MMatrix()
                childMatDH = om.MDataHandle(
                    childCompDH.child(generalIk.aJntMat))
                #childUpMatDH = om.MDataHandle(childCompDH.child(generalIk.aJntUpMat))
                # not needed atm
                childMat = childMatDH.asMatrix()
                print "joint{}mat is {}".format(i, childMat)

                parentMat = om.MMatrix()
                if i != 0:
                    inJntArrayDH.jumpToPhysicalElement(i - 1)
                    parentCompDH = inJntArrayDH.inputValue()
                    parentMatDH = om.MDataHandle(
                        parentCompDH.child(generalIk.aJntMat))
                    parentMat = parentMatDH.asMatrix()
                else:
                    # only applies to last in loop (so first in joints)
                    parentMat = rootMat

                chainMat = childMat.__mul__(parentMat.inverse())
                chainArray.append(chainMat)
                #chainArray is every joint in the space of its parent
                #chainArray[i] = jntMat[i] x jntMat[i-1].inverse
                jntArray.append(childMat)
                #childArray is just the joint matrices

                childWeightDH = om.MDataHandle(
                    childCompDH.child(generalIk.aJntWeight))
                childWeight = childWeightDH.asFloat()
                print "childWeight is {}".format(childWeight)

                jntWeights.append(childWeight)

            print ""
            #from here applies only to ccd algorithm

            gap = 1.0
            iter = 0
            while (iter < maxIter) and (gap > tolerance):
                print "ITERATION {}".format(iter)
                print ""

                for i in range(inLength):

                    print "computing joint {} of {}".format(i, inLength)

                    # welcome to the bone zone

                    # reconstruct hierarchy with matrices from previous iteration
                    # currently target is known in rootspace, and end isn't known at all
                    # backwards to get target and forwards to get end, both in active joint space
                    #
                    #                  +(target)
                    #               .
                    #             .
                    #           O
                    #         /   \       X(end)
                    #       /       \   /
                    #     /           O
                    #   /
                    #(root)
                    # this works by getting vector from active joint to end, from active joint to target,
                    # then aligning one to the other. joints are assumed to have direct parent-child
                    # hierarchy, so all rotations are inherited rigidly

                    # ccd operates from tip to root - see reference for actual algorithm
                    jntMat = om.MMatrix()
                    jntMat = jntArray.__getitem__(inLength - (i + 1))
                    print "jntMat is {}".format(jntMat)
                    chainMult = om.MMatrix()

                    #backwards
                    for b in range(inLength - i):
                        #multiply targetRootSpaceMat by inverse chainMats,
                        #starting from ROOT
                        print "chainArray[b] is {}".format(chainArray[b])

                        if b == 0:
                            # at first joint, only equals root matrix
                            chainMult = chainArray[b].inverse()

                        else:
                            chainMult.__imul__(chainArray[b].inverse())

                        print "chainMult at {} is {}".format(b, chainMult)
                        print "endMat is {}".format(endMat)
                        print "endMat x chainMult is {}".format(
                            endMat.__mul__(chainMult))

                    #forwards
                    # only need end in joint space
                    endJSmat = endMat.__mul__(jntMat.inverse())
                    targetJSmat = targetRSmat * chainMult

                    # check gap is still important
                    gapVec = om.MVector(targetJSmat[12] - endJSmat[12],
                                        targetJSmat[13] - endJSmat[13],
                                        targetJSmat[14] - endJSmat[14])
                    gap = gapVec.length()
                    if gap > tolerance:

                        print "gap is {}".format(gap)
                        print "end JS is {}".format(endJSmat)
                        print "endMat v2 at {} is {}".format(i, endMat)
                        print "target JS is {}".format(targetJSmat)
                        targetJStrans = om.MTransformationMatrix(
                            targetJSmat).translation(1)
                        print "targetJStrans at {} is {}".format(
                            i, targetJStrans)

                        # we don't just want to aim each joint, we want to
                        # aim the end, by rotating each joint in turn

                        # first get the aim matrix from joint to end
                        endAimMat = om.MMatrix()
                        endAimMat = lookAt(jntMat, endJSmat)
                        print "endAimMat is {}".format(endAimMat)
                        #jntMat.__imul__(endAimMat)

                        # then from that, aim from end to target
                        targetAimMat = om.MMatrix()
                        targetAimMat = lookAt(jntMat, targetJSmat)

                        # is weighting this simple?
                        #print "jntWeight is {}".format(jntWeights.__getitem__(i))
                        targetAimMat.__imul__(jntWeights.__getitem__(i))
                        print "targetAimMat is {}".format(targetAimMat)

                        #constraints are going to be fun, but for now
                        jntMat.__imul__(targetAimMat)
                        endMat.__imul__(targetAimMat)

                        print "finalEndMat is {}".format(endMat)

                        #end of if block and aim procedure
                        print ""
                    else:
                        print ""
                        print "gap is within tolerance, ending"
                        break

                    jntArray.__setitem__(i, jntMat)
                    #end of single iteration along joint chain
                print ""

                iter = iter + 1

                #end of all iterations, computation has completed

            #convert jntArray of matrices to useful rotation values
            outArrayDH = pData.outputArrayValue(generalIk.aOutArray)

            targetRSTransA = om.MTransformationMatrix(targetRSmat).translation(
                4)
            print "target in RS world is {}".format(targetRSTransA)

            for i in range(0, inLength):
                outArrayDH.jumpToPhysicalElement(i)
                outCompDH = outArrayDH.outputValue()

                outRotDH = outCompDH.child(generalIk.aOutRot)
                outRxDH = outRotDH.child(generalIk.aOutRx)
                outRyDH = outRotDH.child(generalIk.aOutRy)
                outRzDH = outRotDH.child(generalIk.aOutRz)

                outRotVals = om.MTransformationMatrix(jntArray[i]).rotation()
                # unitConversions bring SHAME on family
                xAngle = om.MAngle(outRotVals[0])
                yAngle = om.MAngle(outRotVals[1])
                zAngle = om.MAngle(outRotVals[2])
                outRxDH.setMAngle(xAngle)
                outRyDH.setMAngle(yAngle)
                outRzDH.setMAngle(zAngle)

            outArrayDH.setAllClean()

            pData.setClean(pPlug)