def __init__( self ): BaseMelWindow.__init__( self ) ApplyToRigForm( self ) cmd.setParent( self.getMenu( 'File' ), menu=True ) cmd.menuItem( l="Open Offset Editor", c=lambda *a: CreateRigOffsetEditor() ) cmd.setParent( self.getMenu( 'Help' ), menu=True ) cmd.menuItem( l="Help...", c=lambda *a: webbrowser.open( 'https://intranet.valvesoftware.com/wiki/index.php/Apply_to_Rig' ) ) cmd.menuItem( d=True ) bugReporterUI.addBugReporterMenuItems( TOOL_NAME ) filesystem.reportUsageToAuthor() self.show()
def applyWeights( dmeMesh, weightData, usePosition=True, tolerance=TOL, axisMult=None, swapParity=True, averageVerts=True, meshNameRemapDict=None, jointNameRemapDict=None ): ''' loads weights back on to a model given a file. NOTE: the tolerance is an axis tolerance NOT a distance tolerance. ie each axis must fall within the value of the given vector to be considered a match - this makes matching a heap faster because vectors can be culled from the a sorted list. possibly implementing some sort of oct-tree class might speed up the matching more, but... the majority of weight loading time at this stage is spent by maya actually applying skin weights, not the positional searching ''' assert isinstance( dmeMesh, vs.movieobjects.CDmeMesh ) reportUsageToAuthor() start = time.clock() unfoundVerts = [] miscData, joints, jointHierarchies, weightData = weightData if miscData[ filesystem.kEXPORT_DICT_TOOL_VER ] != TOOL_VERSION: print( "WARNING: the file being loaded was stored from an older version (%d) of the tool - please re-generate the file. Current version is %d." % (miscData[ filesystem.kEXPORT_DICT_TOOL_VER ], TOOL_VERSION) ) #setup the mappings VertSkinWeight.MESH_NAME_REMAP_DICT = meshNameRemapDict VertSkinWeight.JOINT_NAME_REMAP_DICT = jointNameRemapDict #axisMults can be used to alter the positions of verts saved in the weightData array - this is mainly useful for applying #weights to a mirrored version of a mesh - so weights can be stored on meshA, meshA duplicated to meshB, and then the #saved weights can be applied to meshB by specifying an axisMult=(-1,1,1) OR axisMult=(-1,) if axisMult is not None: for data in weightData: for n, mult in enumerate( axisMult ): data[ n ] *= mult #we need to re-sort the weightData as the multiplication could have potentially reversed things... i could probably #be a bit smarter about when to re-order, but its not a huge hit... so, meh weightData = sortByIdx( weightData ) #using axisMult for mirroring also often means you want to swap parity tokens on joint names - if so, do that now. #parity needs to be swapped in both joints and jointHierarchies if swapParity: for joint, target in joints.iteritems(): joints[ joint ] = str( names.Name( target ).swap_parity() ) for joint, parents in jointHierarchies.iteritems(): jointHierarchies[ joint ] = [ str( names.Name( p ).swap_parity() ) for p in parents ] num = dmeMesh.numVerts() verts = xrange( num ) findMethod = findBestVector if averageVerts: findMethod = getDistanceWeightedVector #if we're using position, the restore weights path is quite different setVertWeight = dmeMesh.setVertWeight if usePosition: vertPositions = dmeMesh.currentState.positions print "starting first iteration with", len( weightData ), "verts" iterationCount = 1 while True: unfoundVerts = [] unfoundVertsAppend = unfoundVerts.append foundVerts = [] foundVertsAppend = foundVerts.append for vert in verts: pos = vertPositions[ vert ] pos = Vector( (pos.x, pos.y, pos.z) ) vertData = findMethod(pos, weightData, tolerance) try: #unpack data to locals try: jointList, weightList = vertData.joints, vertData.weights except AttributeError: raise NoVertFound try: #re-map joints to their actual values actualJointNames = [ joints[ j ] for j in jointList ] #check sizes - if joints have been remapped, there may be two entries for a joint #in the re-mapped jointList - in this case, we need to re-gather weights actualJointsAsSet = set( actualJointNames ) if len( actualJointsAsSet ) != len( actualJointNames ): #so if the set sizes are different, then at least one of the joints is listed twice, #so we need to gather up its weights into a single value new = {} [ new.setdefault(j, 0) for j in actualJointNames ] #init the dict with 0 values for j, w in zip(actualJointNames, weightList): new[ j ] += w #if the weightList is empty after renormalizing, nothing to do - keep loopin actualJointNames, weightList = new.keys(), new.values() if not weightList: raise NoVertFound except KeyError: #if there was a key error, then one of the joints was removed from the joints dict #as it wasn't found in the scene - so get the missing joints, remove them from the #list and renormalize the remaining weights jointListSet = set( jointList ) diff = missingJoints.difference(jointListSet) weightList = renormalizeWeights( jointList, weightList, diff ) actualJointNames = [ joints[ j ] for j in jointList ] #if the weightList is empty after renormalizing, nothing to do - keep loopin if not weightList: raise NoVertFound setVertWeight( vert, actualJointNames, weightList ) foundVertData = VertSkinWeight( pos ) foundVertData.populate( vertData.mesh, vertData.idx, actualJointNames, weightList ) foundVertsAppend( foundVertData ) except NoVertFound: unfoundVertsAppend( vert ) #so with the unfound verts - sort them, call them "verts" and iterate over them with the newly grown weight data #the idea here is that when a vert is found its added to the weight data (in memory not on disk). by performing #another iteration for the previously un-found verts, we should be able to get a better approximation verts = unfoundVerts if unfoundVerts: if foundVerts: weightData = sortByIdx( foundVerts ) else: print "### still unfound verts, but no new matches were made in previous iteration - giving up. %d iterations performed" % iterationCount break else: print "### all verts matched! %d iterations performed" % iterationCount break iterationCount += 1 print "iteration %d - using %d verts (increasing the search radius will reduce iterations)" % (iterationCount, len( weightData )) #otherwise simply restore by id else: #rearrange the weightData structure so its ordered by id meshName = dmeMesh.name weightDataById = {} [ weightDataById.setdefault(i.getVertName(), (i.joints, i.weights)) for i in weightData ] for vert in verts: try: jointList, weightList = weightDataById[ '%s.%d' % (meshName, vert) ] setVertWeight( vert, jointList, weightList ) except KeyError: #in this case, the vert doesn't exist in teh file... print '### no point found for %s' % vert continue end = time.clock() print 'time for weight load %.02f secs' % (end - start)