示例#1
0
    def apply(self, mapping, **kwargs):
        '''
		construct a mel string to pass to eval - so it can be contained in a single undo...
		'''
        cmdQueue = api.CmdQueue()

        #gather options...
        additive = kwargs.get(self.kOPT_ADDITIVE,
                              self.kOPT_DEFAULTS[self.kOPT_ADDITIVE])

        for clipObj, tgtObj in mapping.iteritems():
            try:
                attrDict = self[clipObj]
            except KeyError:
                continue

            for attr, value in attrDict.iteritems():
                attrpath = '%s.%s' % (tgtObj, attr)
                try:
                    if not cmd.getAttr(attrpath, settable=True): continue
                except TypeError:
                    continue

                if additive: value += cmd.getAttr(attrpath)
                cmdQueue.append('setAttr -clamp %s %s;' % (attrpath, value))

        cmdQueue()
示例#2
0
    def __call__(self, pct, mapping=None):
        BaseBlender.__call__(self, pct, mapping)
        cmdQueue = api.CmdQueue()

        if pct == 0:
            self.clipA.apply(self.getMapping())
        elif pct == 1:
            self.clipB.apply(self.getMapping())
        else:
            for attrPath, curves in self.curvePairs.iteritems():
                try:
                    curveA, curveB = curves
                except ValueError:
                    continue

                #because we know both curves have the same timings (ie if curveA has a key at time x, curveB is guaranteed to also have a key
                #at time x) then we just need to iterate over the keys of one curve, and blend them with the values of the other
                for time, keyA in curveA.m_keys.iteritems():
                    keyB = curveB.m_keys[time]
                    blendedValue = (keyA.m_flValue *
                                    (1 - pct)) + (keyB.m_flValue * pct)
                    blendedIX = (keyA.m_flInTanX *
                                 (1 - pct)) + (keyB.m_flInTanX * pct)
                    blendedIY = (keyA.m_flInTanY *
                                 (1 - pct)) + (keyB.m_flInTanY * pct)
                    blendedOX = (keyA.m_flOutTanX *
                                 (1 - pct)) + (keyB.m_flOutTanX * pct)
                    blendedOY = (keyA.m_flOutTanY *
                                 (1 - pct)) + (keyB.m_flOutTanY * pct)
                    cmdQueue.append('setKeyframe -t %s -v %s %s' %
                                    (time, blendedValue, attrPath))
                    cmdQueue.append(
                        'keyTangent -e -t %s -ix %s -iy %s -ox %s -oy %s %s' %
                        (time, blendedIX, blendedIY, blendedOX, blendedOY,
                         attrPath))
示例#3
0
    def __call__(self, pct, mapping=None):
        BaseBlender.__call__(self, pct, mapping)
        cmdQueue = api.CmdQueue()

        if mapping is None:
            mapping = self.getMapping()

        for clipAObj, attrDictA in self.clipA.iteritems():
            clipBObjs = mapping[clipAObj]
            for a, valueA in attrDictA.iteritems():
                attrpath = '%s.%s' % (clipAObj, a)
                if not cmd.getAttr(attrpath, settable=True):
                    continue

                for clipBObj in clipBObjs:
                    try:
                        attrDictB = self.clipB[clipBObj]
                    except KeyError:
                        continue

                    try:
                        valueB = attrDictB[a]
                        blendedValue = (valueA * (1 - pct)) + (valueB * pct)
                        cmdQueue.append('setAttr %s %s' %
                                        (attrpath, blendedValue))
                    except KeyError:
                        cmdQueue.append('setAttr %s %s' % (attrpath, valueA))
                    except:
                        pass

        cmdQueue()
示例#4
0
    def __call__(self, pct, mapping=None, attributes=None):
        BaseBlender.__call__(self, pct, mapping)
        cmdQueue = api.CmdQueue()

        if mapping is None:
            mapping = self.getMapping()

        if attributes is None:
            attributes = self.attributes

        mappingDict = mapping.asDict()
        for clipAObj, attrDictA in self.clipA.iteritems():

            #if the object isn't in the mapping dict, skip it
            if clipAObj not in mappingDict:
                continue

            clipBObjs = mapping[clipAObj]
            for a, valueA in attrDictA.iteritems():
                if attributes:
                    if a not in attributes:
                        continue

                if not clipAObj:
                    continue

                attrpath = '%s.%s' % (clipAObj, a)
                if not cmd.getAttr(attrpath, settable=True):
                    continue

                for clipBObj in clipBObjs:
                    try:
                        attrDictB = self.clipB[clipBObj]
                    except KeyError:
                        continue

                    try:
                        valueB = attrDictB[a]
                        blendedValue = (valueA * (1 - pct)) + (valueB * pct)
                        cmdQueue.append('setAttr -clamp %s %f' %
                                        (attrpath, blendedValue))
                    except KeyError:
                        cmdQueue.append('setAttr -clamp %s %f' %
                                        (attrpath, valueA))
                    except:
                        pass

        cmdQueue()
示例#5
0
def loadWeights(objects,
                filepath=None,
                usePosition=True,
                tolerance=TOL,
                axisMult=None,
                swapParity=True,
                averageVerts=True,
                doPreview=False,
                meshNameRemapDict=None,
                jointNameRemapDict=None):
    '''
	loads weights back on to a model given a file.  NOTE: the tolerance is an axis tolerance
	NOT a distance tolerance.  ie each axis must fall within the value of the given vector to be
	considered a match - this makes matching a heap faster because vectors can be culled from
	the a sorted list.  possibly implementing some sort of oct-tree class might speed up the
	matching more, but...  the majority of weight loading time at this stage is spent by maya
	actually applying skin weights, not the positional searching
	'''
    reportUsageToAuthor()
    start = time.clock()

    #setup the mappings
    VertSkinWeight.MESH_NAME_REMAP_DICT = meshNameRemapDict
    VertSkinWeight.JOINT_NAME_REMAP_DICT = jointNameRemapDict

    #cache heavily access method objects as locals...
    skinPercent = cmd.skinPercent
    progressWindow = cmd.progressWindow
    xform = cmd.xform

    #now get a list of all weight files that are listed on the given objects - and
    #then load them one by one and apply them to the appropriate objects
    filesAndGeos = {}
    dirOfCurFile = Path(cmd.file(q=True, sn=True)).up()
    for obj in objects:
        items = []  #this holds the vert list passed in IF any
        if obj.find('.') != -1:
            items = [obj]
            obj = obj.split('.')[0]

        try:
            file = Path(dirOfCurFile +
                        cmd.getAttr('%s.weightSaveFile' %
                                    obj) if filepath is None else filepath)
            if file.exists and not file.isfile():
                raise TypeError()
        except TypeError:
            #in this case, no weightSave file existed on the object, so try using the default file if it exists
            file = getDefaultPath() if filepath is None else filepath
            if not file.exists:
                api.melError('cant find a weightSaveFile for %s - skipping' %
                             obj)
                continue

        filesAndGeos.setdefault(file, {})
        try:
            filesAndGeos[file][obj].extend(items)
        except KeyError:
            filesAndGeos[file][obj] = items

    print filesAndGeos

    unfoundVerts = []
    for filepath, objItemsDict in filesAndGeos.iteritems():
        numItems = len(objItemsDict)
        curItem = 1
        progressWindow(e=True,
                       title='loading weights from file %d items' % numItems)

        miscData, joints, jointHierarchies, weightData = Path(
            filepath).unpickle()
        if miscData[api.kEXPORT_DICT_TOOL_VER] != TOOL_VERSION:
            api.melWarning(
                "WARNING: the file being loaded was stored from an older version (%d) of the tool - please re-generate the file.  Current version is %d."
                % (miscData[api.kEXPORT_DICT_TOOL_VER], TOOL_VERSION))

        for geo, items in objItemsDict.iteritems():
            #the miscData contains a dictionary with a bunch of data stored from when the weights was saved - do some
            #sanity checking to make sure we're not loading weights from some completely different source
            curFile = cmd.file(q=True, sn=True)
            origFile = miscData['scene']
            if curFile != origFile:
                api.melWarning(
                    'the file these weights were saved in a different file from the current: "%s"'
                    % origFile)
                #response = cmd.confirmDialog(t='files differ...', m='the file these weights were saved from was %s\nthis is different from your currently opened file.\n\nis that OK?' % origFile, b=('Proceed', 'Cancel'), db='Proceed')
                #if response == 'Cancel': return

            #axisMults can be used to alter the positions of verts saved in the weightData array - this is mainly useful for applying
            #weights to a mirrored version of a mesh - so weights can be stored on meshA, meshA duplicated to meshB, and then the
            #saved weights can be applied to meshB by specifying an axisMult=(-1,1,1) OR axisMult=(-1,)
            if axisMult is not None:
                for data in weightData:
                    for n, mult in enumerate(axisMult):
                        data[n] *= mult

                #we need to re-sort the weightData as the multiplication could have potentially reversed things...  i could probably
                #be a bit smarter about when to re-order, but its not a huge hit...  so, meh
                weightData = sortByIdx(weightData)

                #using axisMult for mirroring also often means you want to swap parity tokens on joint names - if so, do that now.
                #parity needs to be swapped in both joints and jointHierarchies
                if swapParity:
                    for joint, target in joints.iteritems():
                        joints[joint] = str(names.Name(target).swap_parity())
                    for joint, parents in jointHierarchies.iteritems():
                        jointHierarchies[joint] = [
                            str(names.Name(p).swap_parity()) for p in parents
                        ]

            #if the geo is None, then check for data in the verts arg - the user may just want weights
            #loaded on a specific list of verts - we can get the geo name from those verts
            skinCluster = ''
            verts = cmd.ls(cmd.polyListComponentConversion(
                items if items else geo, toVertex=True),
                           fl=True)

            #remap joint names in the saved file to joint names that are in the scene - they may be namespace differences...
            missingJoints = set()
            for j in joints.keys():
                if not cmd.objExists(j):
                    #see if the joint with the same leaf name exists in the scene
                    idxA = j.rfind(':')
                    idxB = j.rfind('|')
                    idx = max(idxA, idxB)
                    if idx != -1:
                        leafName = j[idx:]
                        search = cmd.ls('%s*' % leafName, r=True, type='joint')
                        if len(search):
                            joints[j] = search[0]

            #now that we've remapped joint names, we go through the joints again and remap missing joints to their nearest parent
            #joint in the scene - NOTE: this needs to be done after the name remap so that parent joint names have also been remapped
            for j, jRemap in joints.iteritems():
                if not cmd.objExists(jRemap):
                    dealtWith = False
                    for n, jp in enumerate(jointHierarchies[j]):
                        #if n > 2: break
                        remappedJp = jp
                        if jp in joints: remappedJp = joints[jp]
                        if cmd.objExists(remappedJp):
                            joints[j] = remappedJp
                            dealtWith = True
                            break

                    if dealtWith: continue
                    missingJoints.add(j)

            #now remove them from the list
            [joints.pop(j) for j in missingJoints]
            for key, value in joints.iteritems():
                if key != value:
                    print '%s remapped to %s' % (key, value)

            #do we have a skinCluster on the geo already?  if not, build one
            skinCluster = cmd.ls(cmd.listHistory(geo), type='skinCluster')
            if not skinCluster:
                skinCluster = cmd.skinCluster(geo, joints.values())[0]
                verts = cmd.ls(cmd.polyListComponentConversion(geo,
                                                               toVertex=True),
                               fl=True)
            else:
                skinCluster = skinCluster[0]

            num = len(verts)
            cur = 0.0
            inc = 100.0 / num

            findMethod = findBestVector
            if averageVerts:
                findMethod = getDistanceWeightedVector

            #if we're using position, the restore weights path is quite different
            if usePosition:
                progressWindow(edit=True,
                               status='searching by position: %s (%d/%d)' %
                               (geo, curItem, numItems))
                queue = api.CmdQueue()

                print "starting first iteration with", len(weightData), "verts"

                iterationCount = 1
                while True:
                    unfoundVerts = []
                    foundVerts = []
                    for vert in verts:
                        progressWindow(edit=True, progress=cur)
                        cur += inc

                        pos = Vector(xform(vert, q=True, ws=True, t=True))
                        vertData = findMethod(pos, weightData, tolerance,
                                              doPreview)

                        try:
                            #unpack data to locals
                            try:
                                jointList, weightList = vertData.joints, vertData.weights
                            except AttributeError:
                                raise NoVertFound

                            try:
                                #re-map joints to their actual values
                                actualJointNames = [
                                    joints[j] for j in jointList
                                ]

                                #check sizes - if joints have been remapped, there may be two entries for a joint
                                #in the re-mapped jointList - in this case, we need to re-gather weights
                                actualJointsAsSet = set(actualJointNames)
                                if len(actualJointsAsSet) != len(
                                        actualJointNames):
                                    #so if the set sizes are different, then at least one of the joints is listed twice,
                                    #so we need to gather up its weights into a single value
                                    new = {}
                                    [
                                        new.setdefault(j, 0)
                                        for j in actualJointNames
                                    ]  #init the dict with 0 values
                                    for j, w in zip(actualJointNames,
                                                    weightList):
                                        new[j] += w

                                    #if the weightList is empty after renormalizing, nothing to do - keep loopin
                                    actualJointNames, weightList = new.keys(
                                    ), new.values()
                                    if not weightList: raise NoVertFound
                            except KeyError:
                                #if there was a key error, then one of the joints was removed from the joints dict
                                #as it wasn't found in the scene - so get the missing joints, remove them from the
                                #list and renormalize the remaining weights
                                jointListSet = set(jointList)
                                diff = missingJoints.difference(jointListSet)
                                weightList = renormalizeWeights(
                                    jointList, weightList, diff)
                                actualJointNames = [
                                    joints[j] for j in jointList
                                ]

                                #if the weightList is empty after renormalizing, nothing to do - keep loopin
                                if not weightList: raise NoVertFound

                            #normalize the weightlist
                            weightList = normalizeWeightList(weightList)

                            #zip the joint names and their corresponding weight values together (as thats how maya
                            #accepts the data) and fire off the skinPercent cmd
                            jointsAndWeights = zip(actualJointNames,
                                                   weightList)

                            queue.append(
                                'skinPercent -tv %s %s %s' % (' -tv '.join([
                                    '%s %s' % t for t in jointsAndWeights
                                ]), skinCluster, vert))
                            foundVertData = VertSkinWeight(pos)
                            foundVertData.populate(vertData.mesh, vertData.idx,
                                                   actualJointNames,
                                                   weightList)
                            foundVerts.append(foundVertData)
                        except NoVertFound:
                            unfoundVerts.append(vert)
                            #print '### no point found for %s' % vert

                    #so with the unfound verts - sort them, call them "verts" and iterate over them with the newly grown weight data
                    #the idea here is that when a vert is found its added to the weight data (in memory not on disk).  by performing
                    #another iteration for the previously un-found verts, we should be able to get a better approximation
                    verts = unfoundVerts
                    if unfoundVerts:
                        if foundVerts:
                            weightData = sortByIdx(foundVerts)
                        else:
                            print "### still unfound verts, but no new matches were made in previous iteration - giving up.  %d iterations performed" % iterationCount
                            break
                    else:
                        print "### all verts matched!  %d iterations performed" % iterationCount
                        break

                    iterationCount += 1
                    print "starting iteration %d - using" % iterationCount, len(
                        weightData), "verts"
                    #for www in weightData: print www

                #bail if we've been asked to cancel
                if progressWindow(q=True, isCancelled=True):
                    progressWindow(ep=True)
                    return

                progressWindow(e=True,
                               status='maya is setting skin weights...')
                queue()

            #otherwise simply restore by id
            else:
                progressWindow(edit=True,
                               status='searching by vert name: %s (%d/%d)' %
                               (geo, curItem, numItems))
                queue = api.CmdQueue()

                #rearrange the weightData structure so its ordered by vertex name
                weightDataById = {}
                [
                    weightDataById.setdefault(i.getVertName(),
                                              (i.joints, i.weights))
                    for i in weightData
                ]

                for vert in verts:
                    progressWindow(edit=True, progress=cur / num * 100.0)
                    if progressWindow(q=True, isCancelled=True):
                        progressWindow(ep=True)
                        return

                    cur += 1
                    try:
                        jointList, weightList = weightDataById[vert]
                    except KeyError:
                        #in this case, the vert doesn't exist in teh file...
                        print '### no point found for %s' % vert
                        continue
                    else:
                        jointsAndWeights = zip(jointList, weightList)
                        skinPercent(skinCluster, vert, tv=jointsAndWeights)

            #remove unused influences from the skin cluster
            cmd.skinCluster(skinCluster, edit=True, removeUnusedInfluence=True)
            curItem += 1

    if unfoundVerts: cmd.select(unfoundVerts)
    end = time.clock()
    api.melPrint('time for weight load %.02f secs' % (end - start))