def __init__(self): selectedObjects = pm.ls(selection = True, transforms = True) filteredGroups = [] for obj in selectedObjects: if obj.find("Group__") == 0: filteredGroups.append(obj) if len(filteredGroups) == 0: return # Recursively find and store grouped module namespaces in a list groupContainer = "Group_container" modules = [] for group in filteredGroups: modules.extend(self.FindChildModules(group)) # Store all the grouped container nodes in a list moduleContainers = [groupContainer] for module in modules: moduleContainer = "%s:module_container" %module moduleContainers.append(moduleContainer) # Unlock containers for container in moduleContainers: pm.lockNode(container, lock = False, lockUnpublished = False) # Ungroup for group in filteredGroups: childCount = len(pm.listRelatives(group, children = True)) if childCount > 1: pm.ungroup(group, absolute = True) for attr in ["t", "r", "globalScale"]: pm.container(groupContainer, edit = True, unbindAndUnpublish = "%s.%s" %(group, attr)) parentGroup = pm.listRelatives(group, parent = True) pm.delete(group) # Recursively delete empty parent groups if len(parentGroup) != 0: parentGroup = parentGroup[0] children = pm.listRelatives(parentGroup, children = True) children = pm.ls(children, transforms = True) if len(children) == 0: pm.select(parentGroup, replace = True) UngroupSelected() # Lock module containers after ungrouping is finished for container in moduleContainers: if pm.objExists(container): pm.lockNode(container, lock = True, lockUnpublished = True)
def genereateAnim(self, reopen=True): # export_path,_ = self.getFilename() Scene = self.preExport() FBXAnim = os.path.join(Scene.dirname(), "FBXAnim") os.mkdir(FBXAnim) if not os.path.exists(FBXAnim) else None export_path = os.path.join(FBXAnim, "%s.FBX" % Scene.namebase) export_path = export_path.replace('\\', '/') # NOTE 导入所有的 reference [ref.importContents(True) for ref in pm.listReferences()] mesh_list = pm.ls("MODEL", ni=1, dag=1, type="mesh") # # NOTE 删除非变形器历史 # pm.bakePartialHistory( mesh_list,prePostDeformers=True ) jnt_list = self.getJntList(mesh_list) pm.select(cl=1) root = pm.joint(n="root") jnt_parent = self.getRelParent(jnt_list, root) anim_parent = {} for jnt in jnt_list: pm.select(cl=1) anim_jnt = pm.joint(n="%s_bind" % jnt) pm.parentConstraint(jnt, anim_jnt, mo=0) pm.scaleConstraint(jnt, anim_jnt, mo=0) parent = jnt_parent[jnt] anim_parent[ anim_jnt] = "%s_bind" % parent if parent != root else root jnt_transform = {} for anim_jnt, parent in anim_parent.items(): anim_jnt.setParent(parent) # NOTE 删除骨骼缩放修正组 transform = anim_jnt.getParent() if transform != parent: pm.ungroup(transform) # NOTE bake 关键帧 start_time = pm.playbackOptions(q=1, min=1) end_time = pm.playbackOptions(q=1, max=1) pm.bakeResults(anim_parent.keys(), simulation=1, t=(start_time, end_time)) # NOTE 删除 root 骨骼下的所有约束 pm.delete(pm.ls(root, dag=1, ni=1, type="constraint")) pm.select(root) # NOTE 导出文件 mel.eval('FBXExport -f "' + export_path + '" -s') os.startfile(os.path.dirname(export_path)) # NOTE 重新打开当前文件 if reopen: pm.openFile(pm.sceneName(), f=1)
def bakePositions(self, *splines): # get mesh type dag bjects, the we search all the groups and ungroup for curveGroup in self.curveGroups: for shape in (pm.ls( curveGroup, dag=True, type='mesh')): while True: if shape.getTransform().getParent() != curveGroup: pm.ungroup(shape.getTransform().getParent()) else: logger.debug('was baked: %s' % shape.getTransform()) break
def boundingBoxObject(self, object): group=pm.group(empty=True, parent=object) try: pm.parent(group, pm.listRelatives(object, parent=True)) except: pm.parent(group, world=True) pm.parent(object, group) object.resetFromRestPosition() boundingBox = self.transformMesh[0].getBoundingBox(space='object') pm.ungroup(group) return abs(((boundingBox[1][0] - boundingBox[0][0]) / 2))
def ungroupParent(objType="joint"): curr_sel = pm.ls(sl=1) for sel in pm.ls(sl=1, dag=1, ni=1, type=objType): if type(sel) == pm.nodetypes.Joint: grp = sel.getParent() else: grp = sel.getParent().getParent() pm.ungroup(grp) pm.select(curr_sel)
def rig_getHairSystemNodes (curve): '''Takes a pm.nt.NurbsCurve and returns all of the connected hairSystem nodes Args: curve (pm.nt.NurbsCurve): the curve to be queried Returns: {pm.PyNode} - the dictionary hairSystem nodes attached to the curve ''' follicle = curve.listRelatives(p=True, type='transform')[0] follicleGrp = follicle.listRelatives(p=True, type='transform')[0] outputCurve = follicle.getShape().listConnections(s=False,type='nurbsCurve')[0] outputCurveGrp = outputCurve.listRelatives(p=True, type='transform')[0] hairSystem = follicle.getShape().listConnections(d=False,type='hairSystem')[0] pm.ungroup(outputCurveGrp) pm.ungroup(follicleGrp) return {"follicle":follicle, "outputCurve":outputCurve, "hairSystem":hairSystem}
def ungroupAndDeleteExtraGroups(topGroup): '''Finds all groups in the hierarchy and ungroups them or deletes them Args: topGroup (pm.nt.Transform): top node to search under Returns: (pm.nt.Transform): top node Usage: ungroupAndDeleteExtraGroups(pm.ls(sl=True)[0]) ''' for relative in topGroup.listRelatives(ad=True, type='transform'): if not relative.type() == 'joint': if relative.getChildren(): pm.ungroup(relative) else: pm.delete(relative) return topGroup
def fill(self, times ): self.transform() selected = pm.ls(sl = 1) pm.select(cl = 1) if len(selected) >= 1: haircap = self.capName.getText() if len(selected) > 1: allNewHairs = [] for n in range(len(selected)-1): hair1 = selected[n] hair2 = selected[n+1] grp = pm.group(empty = True, name = 'HairGrp') selfMatrix = selected[n].getMatrix() hair2Matrix = selected[n+1].getMatrix() grpMatrix = (selfMatrix + hair2Matrix)/2 grpMatrix = grpMatrix.homogenize() grp.setMatrix(grpMatrix) pm.parent([hair1, hair2], grp) newHairs = [] for x in range(times-1): newHair = pm.duplicate(hair1)[0] newHair.setMatrix((selfMatrix*grpMatrix.inverse()).blend((hair2Matrix*grpMatrix.inverse()), weight = (x+1)*(1.0/times))) #set blendshapes connecting new hair with the original hair pm.blendShape( hair1 ,newHair , w = ( 0 , 1-( (x+1)*(1.0/times) ) ) ) #if hairs are the same connect the last one as well if pm.polyEvaluate(hair1, v=1) == pm.polyEvaluate(hair2, v=1): pm.blendShape( hair2 ,newHair , w = ( 0 , (x+1)*(1.0/times) ) ) if pm.objExists(haircap) and self.transferCheckBox.getValue() == 1 : pm.transferAttributes(haircap, newHair, sampleSpace=0,transferUVs=1, transferColors=0, sourceUvSet = 'map1',targetUvSet = 'map1') newHairs.append(newHair) pm.ungroup(grp) allNewHairs.append(newHairs) if self.randSliderT.getValue() or self.randSliderR.getValue() > 0: self.randomize(allNewHairs, transRot = 2) pm.select(allNewHairs) else: hair1 = selected[0] newHairs = [] for x in range(times-1): newHair = pm.duplicate(hair1)[0] selfTrans = newHair.getTranslation() selfRot = newHair.getRotation() newHairs.append(newHair) if self.randSliderT.getValue() or self.randSliderR.getValue() > 0: self.randomize(newHairs, transRot = 2) pm.select(newHairs) else: pm.error( "select something")
def alignCtrlToJnt(): selComp = pm.ls(sl = True, fl = True) checkSelection(selComp) tmpGroup = pm.group(selComp[:-1]) pm.parent(tmpGroup, selComp[-1]) tmpGroup.rotate.set([0, 0, 0]) tmpGroup.translate.set([0, 0, 0]) pm.parent(tmpGroup, world = True) pm.ungroup(tmpGroup) # Convert controls to control vertices selComp2 = selComp[:-1] for i in range(len(selComp2)): selComp2[i] += '.cv[*]' pm.select(selComp2) pm.rotate([0, 0, 90]) pm.select(cl = True) pm.select(selComp[:])
def alignCtrlToJnt(): selComp = pm.ls(sl=True, fl=True) checkSelection(selComp) tmpGroup = pm.group(selComp[:-1]) pm.parent(tmpGroup, selComp[-1]) tmpGroup.rotate.set([0, 0, 0]) tmpGroup.translate.set([0, 0, 0]) pm.parent(tmpGroup, world=True) pm.ungroup(tmpGroup) # Convert controls to control vertices selComp2 = selComp[:-1] for i in range(len(selComp2)): selComp2[i] += '.cv[*]' pm.select(selComp2) pm.rotate([0, 0, 90]) pm.select(cl=True) pm.select(selComp[:])
def genereateRig(self, select=True): # export_path,_ = self.getFilename() # if not os.path.exists(export_path): # return Scene = self.preExport() export_path = os.path.join(Scene.dirname(), "%s.FBX" % Scene.namebase) export_path = export_path.replace('\\', '/') mel.eval('FBXExportSkins -v true') # NOTE 导入所有的 reference [ref.importContents(True) for ref in pm.listReferences()] # NOTE 获取场景中所有可见的模型 mesh_list = pm.ls("MODEL", ni=1, dag=1, type="mesh") # # NOTE 删除非变形器历史 # pm.bakePartialHistory( mesh_list,prePostDeformers=True ) jnt_list = self.getJntList(mesh_list) pm.select(cl=1) root = pm.joint(n="root") jnt_parent = self.getRelParent(jnt_list, root) mel.eval('moveJointsMode 1;') # # NOTE 删除所有 Blendshape # pm.delete(pm.ls(type="ikEffector")) pm.delete(pm.ls(type="blendShape")) jnt_transform = {} for jnt, pos in { jnt: pm.xform(jnt, q=1, ws=1, t=1) for jnt in jnt_list }.iteritems(): jnt.tx.setLocked(0) jnt.ty.setLocked(0) jnt.tz.setLocked(0) jnt.rx.setLocked(0) jnt.ry.setLocked(0) jnt.rz.setLocked(0) jnt.sx.setLocked(0) jnt.sy.setLocked(0) jnt.sz.setLocked(0) jnt.tx.showInChannelBox(1) jnt.ty.showInChannelBox(1) jnt.tz.showInChannelBox(1) jnt.rx.showInChannelBox(1) jnt.ry.showInChannelBox(1) jnt.rz.showInChannelBox(1) jnt.sx.showInChannelBox(1) jnt.sy.showInChannelBox(1) jnt.sz.showInChannelBox(1) mel.eval('CBdeleteConnection %s' % jnt.tx) mel.eval('CBdeleteConnection %s' % jnt.ty) mel.eval('CBdeleteConnection %s' % jnt.tz) mel.eval('CBdeleteConnection %s' % jnt.rx) mel.eval('CBdeleteConnection %s' % jnt.ry) mel.eval('CBdeleteConnection %s' % jnt.rz) mel.eval('CBdeleteConnection %s' % jnt.sx) mel.eval('CBdeleteConnection %s' % jnt.sy) mel.eval('CBdeleteConnection %s' % jnt.sz) jnt.setParent(root) jnt.rename("%s_bind" % jnt) parent = jnt.getParent() if parent.name() == root: jnt.t.set(pos) else: jnt_transform[jnt] = parent # NOTE clear jnt transform node for jnt, parent in jnt_transform.items(): pm.xform(parent, piv=pm.xform(jnt, q=1, ws=1, t=1), ws=1) # jnt.s.set(parent.s.get()) # parent.s.set(1,1,1) pm.ungroup(parent) # NOTE delete unrelated node [pm.delete(node) for jnt in jnt_list for node in jnt.getChildren()] # NOTE reparent hierarchy jnt_transform = {} for jnt, parent in jnt_parent.items(): jnt.setParent(parent) transform = jnt.getParent() if transform != parent: jnt_transform[jnt] = transform for jnt, parent in jnt_transform.items(): pm.xform(parent, piv=pm.xform(jnt, q=1, ws=1, t=1), ws=1) # NOTE 避免意外扭动 jnt.s.set(1, 1, 1) parent.s.set(1, 1, 1) pm.ungroup(parent) [mesh.getParent().setParent(w=1) for mesh in mesh_list] pm.select(root, mesh_list) pm.delete(pm.ls(type="dagPose")) pm.dagPose(bp=1, s=1) # mel.eval('moveJointsMode 0;') # NOTE 导出文件 mel.eval('FBXExport -f "' + export_path + '" -s') os.startfile(os.path.dirname(export_path)) # NOTE 重新打开当前文件 pm.openFile(pm.sceneName(), f=1)
def addJoint(self, obj, name, newActiveJnt=None, UniScale=False, segComp=0, gearMulMatrix=True): """Add joint as child of the active joint or under driver object. Args: obj (dagNode): The input driver object for the joint. name (str): The joint name. newActiveJnt (bool or dagNode): If a joint is pass, this joint will be the active joint and parent of the newly created joint. UniScale (bool): Connects the joint scale with the Z axis for a unifor scalin, if set Falsewill connect with each axis separated. segComp (bool): Set True or False the segment compensation in the joint.. gearMulMatrix (bool): Use the custom gear_multiply matrix node, if False will use Maya's default mulMatrix node. Returns: dagNode: The newly created joint. """ if self.options["joint_rig"]: if newActiveJnt: self.active_jnt = newActiveJnt jnt = primitive.addJoint(self.active_jnt, self.getName(str(name) + "_jnt"), transform.getTransform(obj)) # TODO: Set the joint to have always positive scaling # jnt.scale.set([1, 1, 1]) # Disconnect inversScale for better preformance if isinstance(self.active_jnt, pm.nodetypes.Joint): try: pm.disconnectAttr(self.active_jnt.scale, jnt.inverseScale) except RuntimeError: # This handle the situation where we have in between joints # transformation due a negative scaling pm.ungroup(jnt.getParent()) # All new jnts are the active by default self.active_jnt = jnt if gearMulMatrix: mulmat_node = applyop.gear_mulmatrix_op( obj + ".worldMatrix", jnt + ".parentInverseMatrix") dm_node = node.createDecomposeMatrixNode(mulmat_node + ".output") m = mulmat_node.attr('output').get() else: mulmat_node = node.createMultMatrixNode( obj + ".worldMatrix", jnt + ".parentInverseMatrix") dm_node = node.createDecomposeMatrixNode(mulmat_node + ".matrixSum") m = mulmat_node.attr('matrixSum').get() pm.connectAttr(dm_node + ".outputTranslate", jnt + ".t") pm.connectAttr(dm_node + ".outputRotate", jnt + ".r") # TODO: fix squash stretch solver to scale the joint uniform # the next line cheat the uniform scaling only fo X or Y axis # oriented joints if UniScale: pm.connectAttr(dm_node + ".outputScaleZ", jnt + ".sx") pm.connectAttr(dm_node + ".outputScaleZ", jnt + ".sy") pm.connectAttr(dm_node + ".outputScaleZ", jnt + ".sz") else: pm.connectAttr(dm_node + ".outputScale", jnt + ".s") pm.connectAttr(dm_node + ".outputShear", jnt + ".shear") # Segment scale compensate Off to avoid issues with the global # scale jnt.setAttr("segmentScaleCompensate", segComp) jnt.setAttr("jointOrient", 0, 0, 0) # setting the joint orient compensation in order to have clean # rotation channels jnt.attr("jointOrientX").set(jnt.attr("rx").get()) jnt.attr("jointOrientY").set(jnt.attr("ry").get()) jnt.attr("jointOrientZ").set(jnt.attr("rz").get()) im = m.inverse() if gearMulMatrix: mul_nod = applyop.gear_mulmatrix_op(mulmat_node.attr('output'), im, jnt, 'r') dm_node2 = mul_nod.output.listConnections()[0] else: mul_nod = node.createMultMatrixNode( mulmat_node.attr('matrixSum'), im, jnt, 'r') dm_node2 = mul_nod.matrixSum.listConnections()[0] if jnt.attr("sz").get() < 0: # if negative scaling we have to negate some axis for rotation neg_rot_node = pm.createNode("multiplyDivide") pm.setAttr(neg_rot_node + ".operation", 1) pm.connectAttr(dm_node2.outputRotate, neg_rot_node + ".input1", f=True) for v, axis in zip([-1, -1, 1], "XYZ"): pm.setAttr(neg_rot_node + ".input2" + axis, v) pm.connectAttr(neg_rot_node + ".output", jnt + ".r", f=True) # set not keyable attribute.setNotKeyableAttributes(jnt) else: jnt = primitive.addJoint(obj, self.getName(str(name) + "_jnt"), transform.getTransform(obj)) pm.connectAttr(self.rig.jntVis_att, jnt.attr("visibility")) attribute.lockAttribute(jnt) self.addToGroup(jnt, "deformers") # This is a workaround due the evaluation problem with compound attr # TODO: This workaround, should be removed onces the evaluation issue # is fixed # github issue: Shifter: Joint connection: Maya evaluation Bug #210 dm = jnt.r.listConnections(p=True, type="decomposeMatrix") if dm: at = dm[0] dm_node = at.node() pm.disconnectAttr(at, jnt.r) pm.connectAttr(dm_node.outputRotateX, jnt.rx) pm.connectAttr(dm_node.outputRotateY, jnt.ry) pm.connectAttr(dm_node.outputRotateZ, jnt.rz) dm = jnt.t.listConnections(p=True, type="decomposeMatrix") if dm: at = dm[0] dm_node = at.node() pm.disconnectAttr(at, jnt.t) pm.connectAttr(dm_node.outputTranslateX, jnt.tx) pm.connectAttr(dm_node.outputTranslateY, jnt.ty) pm.connectAttr(dm_node.outputTranslateZ, jnt.tz) dm = jnt.s.listConnections(p=True, type="decomposeMatrix") if dm: at = dm[0] dm_node = at.node() pm.disconnectAttr(at, jnt.s) pm.connectAttr(dm_node.outputScaleX, jnt.sx) pm.connectAttr(dm_node.outputScaleY, jnt.sy) pm.connectAttr(dm_node.outputScaleZ, jnt.sz) return jnt