def createBlossomShader(rgb_blossom): """ It creates a shading network for the blossom material. rgb_branch: RGB values (0-1) for the diffuse colour of the branches. On Exit: Creates a Lambert node connected to a Shading Group which will be applied to the petals. Furthermore, two non-customizable additional shaders will be created and applied to the stamen and pedicel of the blossom. """ global blossomMat, blossomSG import globalVar reload(globalVar) blossomPetalsMat = cmds.shadingNode( 'lambert', asShader=True, name='blossomPetalsMat'+str(globalVar.plantNumber) ) cmds.setAttr( blossomPetalsMat + '.color', rgb_blossom[0], rgb_blossom[1], rgb_blossom[2] ) blossomPetalsSG = cmds.sets( renderable=True, noSurfaceShader=True, empty=True, name='blossomPetalsSG'+str(globalVar.plantNumber) ) cmds.connectAttr( blossomPetalsMat + '.outColor', blossomPetalsSG + '.surfaceShader', f=True ) blossomStamenMat = cmds.shadingNode( 'lambert', asShader=True, name='blossomStamenMat'+str(globalVar.plantNumber) ) cmds.setAttr( blossomStamenMat + '.color', 0.848, 0.8484, 0.186 ) blossomStamenSG = cmds.sets( renderable=True, noSurfaceShader=True, empty=True, name='blossomStamenSG'+str(globalVar.plantNumber) ) cmds.connectAttr( blossomStamenMat + '.outColor', blossomStamenSG + '.surfaceShader', f=True ) blossomPedicelMat = cmds.shadingNode( 'lambert', asShader=True, name='blossomPedicelMat'+str(globalVar.plantNumber) ) cmds.setAttr( blossomPedicelMat + '.color', 0, 0.494, 0 ) blossomPedicelSG = cmds.sets( renderable=True, noSurfaceShader=True, empty=True, name='blossomPedicelSG'+str(globalVar.plantNumber) ) cmds.connectAttr( blossomPedicelMat + '.outColor', blossomPedicelSG + '.surfaceShader', f=True )
def rebuildMesh(self): ''' ''' # Start timer timer = mc.timerX() # Rebuild Mesh Data meshData = OpenMaya.MObject() meshUtil = OpenMaya.MScriptUtil() numVertices = len(self._data['vertexList'])/3 numPolygons = len(self._data['polyCounts']) polygonCounts = OpenMaya.MIntArray() polygonConnects = OpenMaya.MIntArray() meshUtil.createIntArrayFromList(self._data['polyCounts'],polygonCounts) meshUtil.createIntArrayFromList(self._data['polyConnects'],polygonConnects) # Rebuild UV Data uArray = OpenMaya.MFloatArray() vArray = OpenMaya.MFloatArray() meshUtil.createFloatArrayFromList(self._data['uArray'],uArray) meshUtil.createFloatArrayFromList(self._data['vArray'],vArray) uvCounts = OpenMaya.MIntArray() uvIds = OpenMaya.MIntArray() meshUtil.createIntArrayFromList(self._data['uvCounts'],uvCounts) meshUtil.createIntArrayFromList(self._data['uvIds'],uvIds) # Rebuild Vertex Array vertexArray = OpenMaya.MFloatPointArray(numVertices,OpenMaya.MFloatPoint.origin) vertexList = [vertexArray.set(i,self._data['vertexList'][i*3],self._data['vertexList'][i*3+1],self._data['vertexList'][i*3+2],1.0) for i in xrange(numVertices)] # Rebuild Mesh meshFn = OpenMaya.MFnMesh() meshObj = meshFn.create( numVertices, numPolygons, vertexArray, polygonCounts, polygonConnects, uArray, vArray, meshData ) # Assign UVs meshFn.assignUVs(uvCounts,uvIds) # Rename Mesh mesh = OpenMaya.MFnDependencyNode(meshObj).setName(self._data['name']) meshShape = mc.listRelatives(mesh,s=True,ni=True,pa=True)[0] # Assign Initial Shading Group mc.sets(meshShape,fe='initialShadingGroup') # Print timer result buildTime = mc.timerX(st=timer) print('MeshIntersectData: Geometry rebuild time for mesh "'+mesh+'": '+str(buildTime)) # ================= # - Return Result - # ================= return mesh
def fixReferenceShaders(refList=None, defaultShader='initialShadingGroup'): """ """ # Check Reference List if not refList: refList = glTools.utils.reference.listReferences() # Check Each Reference for ref in refList: # Initialize Reference Shape List shpList = [] # Check Disconnected Shaders c = cmds.listConnections(ref + '.placeHolderList', s=True, d=False, p=True, c=True, sh=True) # Check Each Reference Connection for i in range(0, len(c), 2): # Define Connection Source and Destination dst = c[i] src = c[i + 1] # Check instObjGroups Connections if 'instObjGroups' in src: # Disconnect placeHolderList Connection cmds.disconnectAttr(src, dst) # Get Source Shape shp = cmds.ls(src, o=True)[0] if not shp in shpList: shpList.append(shp) # Reconnect to Shader if shpList: cmds.sets(shpList, e=True, fe=defaultShader)
def importSculpt(path, sel=None): ''' Provide the full path:eg. home/user/exportfile.txt .txt in the name is critical. sel = <Transform> Provide the transform that the sculpts are connected to, recommened through script sel = <None>(Default), recommended through User Interface calls ''' if sel == None: sel = validateSelection() if sel != None: # get a list of all the files path = os.path.join(path, 'sculpt_set') if os.path.exists(path): files = os.listdir(path) for f in files: # It's expected that only the sculpt files will be in here if os.path.splitext(f)[1] == '.txt': # get the shape shape = pm.ls(sel)[0].getShape().name() _file = file(os.path.join(path, f), 'r') info = _file.readlines() _file.close() deformer = info[0].strip('\n') vertex = eval(info[1]) sculptset = cmds.listConnections(deformer + '.message', s=False, d=True)[0] # clear the set cmds.sets(cl=sculptset) # add vertex to the set cmds.sets(vertex, fe=sculptset)
def importPolyMesh(name, identifier, jobInfo, parentXform=None, isConstant=False, useDynTopo=False): cmds.ExocortexAlembic_profileBegin(f="Python.ExocortexAlembic._import.importPolyMesh") try: reader = "" shape = fnt.alembicCreateNode(name, "mesh", parentXform) cmds.sets(shape, e=True, forceElement="initialShadingGroup") topoReader = cmds.createNode("ExocortexAlembicPolyMesh") cmds.connectAttr(topoReader+".outMesh", shape+".inMesh") cmds.connectAttr(jobInfo.filenode+".outFileName", topoReader+".fileName") cmds.setAttr(topoReader+".identifier", identifier, type="string") cmds.setAttr(topoReader+".normals", jobInfo.useNormals) cmds.setAttr(topoReader+".uvs", jobInfo.useUVs) if jobInfo.useFaceSets: cmds.ExocortexAlembic_createFaceSets(o=shape, f=jobInfo.filename, i=identifier) if useDynTopo: cmds.connectAttr(jobInfo.timeCtrl+".outTime", topoReader+".inTime") reader = topoReader elif not isConstant: reader = cmds.deformer(shape, type="ExocortexAlembicPolyMeshDeform")[0] setupReaderAttribute(reader, identifier, isConstant, jobInfo) #if not useDynTopo: # setupReaderAttribute(topoReader, identifier, isConstant, jobInfo) except Exception as ex: shape = "?importPolyMesh --> exception: \"" + str(ex.args) + "\" of type " + str(type(ex)); apix.MPxCommand.setResult(shape) cmds.ExocortexAlembic_profileEnd(f="Python.ExocortexAlembic._import.importPolyMesh") return shape
def addJoint(side='L'): if mc.objExists('%s_armPalm_bnd_0'%side):return # make joint and locators Joint = mc.createNode('joint', name='%s_armPalm_bnd_0'%side) JointGroup = mc.group(Joint, name='%s_armPalm_bndgrp_0'%side) FKloc = mc.spaceLocator(p=(0,0,0), name='%s_armPalmFK_loc_0'%side)[0] IKloc = mc.spaceLocator(p=(0,0,0), name='%s_armPalmIK_loc_0'%side)[0] # constraint constraintNode = mc.parentConstraint(FKloc, IKloc, JointGroup) # match position mc.delete(mc.parentConstraint('%s_armMiddleAIK_jnt_0'%side, FKloc)) mc.delete(mc.parentConstraint('%s_armMiddleAIK_jnt_0'%side, IKloc)) # parent locator mc.parent(FKloc, '%s_armWristFk_jnt_0'%side) mc.parent(IKloc, '%s_armMiddleAIK_jnt_0'%side) # make ikfk switch reverseNode = [x.split('.')[0] for x in mc.connectionInfo('%s_armFkIk_ctl_0.FKIKBlend'%side, dfs=True) if mc.nodeType(x.split('.')[0])=='reverse'][0] mc.connectAttr('%s.outputX'%reverseNode, '%s.%sW0'%(constraintNode[0], FKloc)) mc.connectAttr('%s_armFkIk_ctl_0.FKIKBlend'%side, '%s.%sW1'%(constraintNode[0], IKloc)) # add to bind set mc.sets(Joint, e=True, forceElement='bind_joints_set') # connect jointLayer mc.connectAttr('jointLayer.drawInfo', '%s.drawOverride'%Joint) # parent joint mc.parent(JointGroup, '%s_armBind_org_0'%side)
def creatSphere(*args): circleSel = mc.ls(sl=1)[0] radiusCircle = mc.circle(circleSel, q=1, r=1) radiusSpere = radiusCircle*.75 particleSphere = mc.polySphere(n='%s_Sphere'%circleSel, r=radiusSpere, sx=float(radiusSpere), sy=float(radiusSpere), ax=[0, 1, 0])[0] mc.parentConstraint(circleSel, particleSphere, mo=0, w=1) #mc.parent(particleSphere, circleSel) mc.setAttr('%s.tx'%particleSphere, 0) mc.setAttr('%s.ty'%particleSphere, 0) mc.setAttr('%s.tz'%particleSphere, 0) mc.setAttr('%s.rx'%particleSphere, 0) mc.setAttr('%s.ry'%particleSphere, 0) mc.setAttr('%s.rz'%particleSphere, 0) mc.setAttr('%s.v'%particleSphere, 0) mc.select(particleSphere, r=1) mc.emitter(type='surface', r=4, dx=1, dy=0, dz=0, n='%s_emitter'%circleSel ) mc.particle( n='%s_Particles'%circleSel ) mc.connectDynamic( '%s_Particles'%circleSel, em='%s_emitter'%circleSel ) particlesShape = mc.listRelatives('%s_Particles'%circleSel, s=1)[0] mc.setAttr('%s.lifespanMode'%particlesShape, 1) mc.setAttr('%s.lifespan'%particlesShape, 0.4) mc.setAttr('%s.startFrame'%particlesShape, 1001) mc.connectControl( 'numText', '%s.rate'%('%s_emitter'%circleSel) ) mc.shadingNode('blinn', n='%s_blinn'%circleSel, asShader=1) mc.sets( n='%s_blinnSG'%circleSel, renderable=True, noSurfaceShader=True, empty=1) mc.connectAttr('%s.outColor'%('%s_blinn'%circleSel), '%s.surfaceShader'%('%s_blinnSG'%circleSel)) mc.connectControl( 'myColorIndex', '%s.color'%('%s_blinn'%circleSel) ) mc.connectControl( 'lifeText', '%s.lifespan'%particlesShape ) mc.sets('%s_Particles'%circleSel, e=1, forceElement='%s'%('%s_blinnSG'%circleSel))
def ImportCustomTexture(name): os.system("cd ~/maya/2014-x64/scripts; python texture.py") file = '/usr/tmp/texture.jpg' #create a shader shader=cmds.shadingNode( "blinn", asShader=True ) #a file texture node shaderName = file_node=cmds.shadingNode( "file", asTexture=True ) print shaderName #attach file to node cmds.setAttr( '%s.fileTextureName' %file_node, file, type = "string") # a shading group shading_group= cmds.sets(renderable=True,noSurfaceShader=True,empty=True) #connect shader to sg surface shader cmds.connectAttr('%s.outColor' %shader ,'%s.surfaceShader' %shading_group) #connect file texture node to shader's color cmds.connectAttr('%s.outColor' %file_node, '%s.color' %shader) cmds.sets(name, edit=True, forceElement=shading_group)
def __setup_sculpt_shader(self): #--- this method setups the sculpt shader color #--- list all the shader names shader_list = ['sculptShaderGreen'] #--- check if shader exists, else create a new one for s in range(len(shader_list)): if cmds.objExists(shader_list[s]): #--- check which mesh is selected self.shader = shader_list[0] self.shader_set = shader_list[0] + '3SG' else: #--- create the lambert shader self.shader = cmds.shadingNode('lambert', asShader = True, name = shader_list[0]) self.shader_set = cmds.sets(self.shader, renderable = True, noSurfaceShader = True, empty = True, name = shader_list[0] + '3SG') cmds.connectAttr(self.shader + '.outColor', self.shader_set + '.surfaceShader', force = True) #--- change the color cmds.setAttr(self.shader + '.color', 0, 1, 1) #--- assign the shader to the sculpt_mesh cmds.sets(self.sculpt_mesh, forceElement = self.shader_set)
def singlePoly(self, arg=None): selObj=self.selection_grab() if selObj: pass else: print "select a polygon object" return if "." in selObj[0]: print "You need to select a polygon object to interogate.(check that you are not in component mode)" return else: pass cmds.select(cl=True) if cmds.objExists("PolyIssues")==True: cmds.delete("PolyIssues") cmds.sets(n="PolyIssues", co=5) cmds.select(selObj) errorFound=cmds.polyInfo(selObj, lf=True, nme=True, nmv=True ) cmds.select (errorFound) cmds.ConvertSelectionToVertices(errorFound) if errorFound>0: print "Polygon error found" cmds.sets( fe='PolyIssues') cmds.select('PolyIssues', r=True, ne=True) cmds.pickWalk(d='Up') errorFound=cmds.ls(sl=True) if (len(errorFound))==0: cmds.delete("PolyIssues")
def perform(**kwargs): sel = cmds.ls(sl=True) sel3=[] for s in sel: sel3+=cmds.xform(s,q=True, ws=True, t=True) pointset=[] for i in xrange(len(sel3)/3): pointset+=[Vector(sel3[i*3],sel3[i*3+1],sel3[i*3+2])] bbox = BBox() bbox.obbFromPointSet(pointset) t = Transform(bbox.axis[0],bbox.axis[1],bbox.axis[2]) t = t.transpose() z = t.getEuler() cube = cmds.createNode("polyCube") cubeShape = cmds.createNode("mesh") cubeTrans = cmds.listRelatives(cubeShape,p=True)[0] cmds.connectAttr(cube+".output",cubeShape+".inMesh") cmds.setAttr(cubeTrans+".tx",bbox.center[0]) cmds.setAttr(cubeTrans+".ty",bbox.center[1]) cmds.setAttr(cubeTrans+".tz",bbox.center[2]) cmds.setAttr(cubeTrans+".rz",degrees(z[2])) cmds.setAttr(cubeTrans+".ry",degrees(z[1])) cmds.setAttr(cubeTrans+".rx",degrees(z[0])) cmds.setAttr(cube+".width",bbox.max[0]-bbox.min[0]) cmds.setAttr(cube+".height",bbox.max[1]-bbox.min[1]) cmds.setAttr(cube+".depth",bbox.max[2]-bbox.min[2]) cmds.sets(e=True,forceElement="initialShadingGroup") cmds.select(sel)
def appointShader(self , name, side , modular , shaderName , obj): objShape = mc.listRelatives(obj , s=True)[0] shader = mc.createNode('lambert',n=shaderName) mc.sets(shader ,renderable=True , noSurfaceShader=True , empty=1 , name=name+side+modular+'shader_help') mc.connectAttr(shader+'.outColor' , name+side+modular+'shader_help.surfaceShader') mc.sets(objShape,e=True,fe=name+side+modular+'shader_help') return shader
def importPolyMesh(name, identifier, jobInfo, parentXform=None, isConstant=False, useDynTopo=False): cmds.ExocortexAlembic_profileBegin(f="Python.ExocortexAlembic._import.importPolyMesh") # TODO: set isConstant properly elsewhere when there are no transforms but are # animated attributes isConstant = False try: reader = "" shape = fnt.alembicCreateNode(name, "mesh", parentXform) cmds.sets(shape, e=True, forceElement="initialShadingGroup") topoReader = cmds.createNode("ExocortexAlembicPolyMesh") cmds.connectAttr(topoReader+".outMesh", shape+".inMesh") cmds.connectAttr(jobInfo.filenode+".outFileName", topoReader+".fileName") cmds.setAttr(topoReader+".identifier", identifier, type="string") cmds.setAttr(topoReader+".normals", jobInfo.useNormals) cmds.setAttr(topoReader+".uvs", jobInfo.useUVs) if jobInfo.useFaceSets: cmds.ExocortexAlembic_createFaceSets(o=shape, f=jobInfo.filename, i=identifier) if useDynTopo: cmds.connectAttr(jobInfo.timeCtrl+".outTime", topoReader+".inTime") reader = topoReader elif not isConstant: reader = cmds.deformer(shape, type="ExocortexAlembicPolyMeshDeform")[0] setupReaderAttribute(reader, identifier, isConstant, jobInfo) #if not useDynTopo: # setupReaderAttribute(topoReader, identifier, isConstant, jobInfo) except: return [traceback.format_exc()] finally: cmds.ExocortexAlembic_profileEnd(f="Python.ExocortexAlembic._import.importPolyMesh") return [shape, reader]
def getObjMaterials(): global remapAndLuminaceNodes index = 0 shadingType = ["blinn", "phong", "RedshiftArchitectural", "aiStandard"] displacementShading = ["RedshiftDisplacement", "displacementShader"] duplicateMaterials = list() selObjsList = cmds.ls(sl=1, tr=1) if not selObjsList: assert "No object is selected!" for obj in selObjsList: duplicateNodes = list() cmds.select(obj, r=1) cmds.hyperShade(smn=1) selObjsDuplicateMaterials = cmds.duplicate(un=1) for item in selObjsDuplicateMaterials: dupliMater = cmds.rename(item, "%s_duplicate" % (item)) duplicateNodes.append(dupliMater) remapAndLuminaceNodes.append(duplicateNodes[1]) remapAndLuminaceNodes.append(duplicateNodes[2]) remapAndLuminaceNodes.extend(duplicateNodes) for item in duplicateNodes: if cmds.nodeType(item) == "shadingEngine": duplicateShading = item if cmds.nodeType(item) in shadingType: duplicateMaterials.append(item) # duplicateMaterials.append(item) # remapAndLuminaceNodes.remove(item) # if cmds.nodeType(item) == displacementShading: # if index >= 1: continue # cmds.connectAttr('%s.displacement'%item, '%s.displacementShader'%duplicateShading, f = 1) # index += 1 # remapAndLuminaceNodes.remove(item) cmds.sets(obj, e=1, fe=duplicateShading) cmds.select(selObjsList, r=1) return list(set(duplicateMaterials))
def setDeformerMembership(self,geoList=[]): ''' ''' # Check geometry list if not geoList: geoList = self.deformerData.keys() # Check deformer deformer = self.deformerName if not glTools.utils.deformer.isDeformer(deformer): raise Exception('Deformer "'+deformer+'" does not exist!') # Get deformer set deformerSet = glTools.utils.deformer.getDeformerSet(deformer) for geo in geoList: # Get current and stored membership setMembers = self.deformerData[geo]['membership'] currMembers = glTools.utils.deformer.getDeformerSetMemberIndices(deformer,geo) removeMembers = list(set(currMembers)-set(setMembers)) # Determine component type pt = 'cv' if self.deformerData[geo]['geometryType'] == 'mesh': pt = 'vtx' # Remove unused if removeMembers: mc.sets([geo+'.'+pt+'['+str(i)+']' for i in removeMembers],rm=deformerSet) # Add remaining members mc.sets(self.getMemberList([geo]),fe=deformerSet)
def assignSurfaceShader(name="", values=(0,0,0)): selection = cmds.ls(sl = True) print name, selection, values[0], values[1], values[2] if selection: # if the shader already exists if name in cmds.ls("*", type = "surfaceShader") and name+"SG" in cmds.ls("*", type = "shadingEngine"): for node in selection: try: cmds.select(node, r = True) cmds.sets(node, e = True, forceElement = name+"SG") except: pass # otherwise create the shader else: shader = cmds.shadingNode('surfaceShader', asShader = True, name = name) shadingGroup = cmds.sets(shader, renderable = True, noSurfaceShader = True, empty = True, name = name+"SG") cmds.connectAttr(shader+".outColor", shadingGroup+".surfaceShader", force = True) cmds.setAttr(shader+".outColor", values[0], values[1], values[2], type = "double3") for node in selection: try: cmds.sets(node, e = True, forceElement = name+"SG") except: pass cmds.select(selection, r = True)
def copyShader_multi( inputFirst, inputSeconds ): first = pymel.core.ls( inputFirst )[0] if not pymel.core.objExists( first ): return None try:firstShape = first.getShape() except:firstShape = first engines = firstShape.listConnections( type='shadingEngine' ) if not engines: return None engines = list( set( engines ) ) copyObjAndEngines = [] seconds = [ pymel.core.ls( inputSecond )[0] for inputSecond in inputSeconds ] for engine in engines: srcCons = filter( lambda x : x.longName() in ['message', 'outColor'], engine.listConnections( s=1, d=0, p=1 ) ) if not srcCons: continue pymel.core.hyperShade( objects = srcCons[0].node() ) selObjs = pymel.core.ls( sl=1 ) targetObjs = [] for selObj in selObjs: if selObj.node() != firstShape: continue if selObj.find( '.' ) != -1: for second in seconds: targetObjs.append( second+'.'+ selObj.split( '.' )[-1] ) else: for second in seconds: targetObjs.append( second ) if not targetObjs: continue for targetObj in targetObjs: cmds.sets( targetObj, e=1, forceElement=engine.name() ) copyObjAndEngines.append( [targetObj, engine.name()] ) return copyObjAndEngines
def sprSortSetsMemeber(set, sortByType = True, defaultControlers = ["browController","eyeController","noseController","mouthController"]): allControllers = cmds.sets(set,q=1) sortedControllerList = [] if sortByType == True: sortedControllerList.extend(transformControllers(set,defaultControlers)) sortedControllerList.extend(curveControllers(set)) sortedControllerList.extend(jointControllers(set)) else: for defaultControler in defaultControlers: if defaultControler in allControllers: allControllers.remove(defaultControler) else: defaultControlers.remove(defaultControler) allControllers.sort() sortedControllerList.extend(defaultControlers) sortedControllerList.extend(allControllers) for item in sortedControllerList: cmds.sets(item, remove=set) for item in sortedControllerList: cmds.sets(item, addElement=set)
def badname(self, arg=None): cmds.select(d=True) if cmds.objExists("badNames")==True: cmds.delete("badNames") cmds.sets(n="badNames", co=5) for each in badNameList: if cmds.objExists(each+'*'): cmds.select (each+'*', hierarchy=False, add=True) cmds.sets( fe="badNames") # if cmds.objExists('pSphere*'): # cmds.select ('pSphere*', hierarchy=False, add=True) # cmds.sets( fe="badNames") # # if cmds.objExists('curve*'): # cmds.select ('curve*', hierarchy=False, add=True) # cmds.sets( fe="badNames") # # # if cmds.objExists('polySurface*'): # cmds.select ('polySurface*', hierarchy=False, add=True) # cmds.sets( fe="badNames") if cmds.objExists('badNames'): cmds.select('badNames', r=True, ne=True) shoo=cmds.ls(sl=True) cmds.pickWalk (d='up') pete=cmds.ls(sl=True) if (len(pete))>0: print("The selected object(s) don't have very descriptive names.") else: cmds.select(cl=True) print(" no bad names exists.") cmds.delete(shoo)
def getObjMaterials(): global remapAndLuminaceNodes shadingType = ["blinn", "phong", "RedshiftArchitectural", "aiStandard"] displacementShading = ["RedshiftDisplacement", "displacementShader"] duplicateMaterials = list() selObjsList = cmds.ls(sl=1, tr=1) if not selObjsList: assert "No object is selected!" for obj in selObjsList: duplicateNodes = list() cmds.select(obj, r=1) cmds.hyperShade(smn=1) selObjsDuplicateMaterials = cmds.duplicate(un=1) for item in selObjsDuplicateMaterials: dupliMater = cmds.rename(item, "%s_duplicate" % (item)) duplicateNodes.append(dupliMater) for item in duplicateNodes: if cmds.nodeType(item) in shadingType: duplicateShading = cmds.sets(r=1, nss=1, em=1, n="%s_SG" % item) cmds.connectAttr("%s.outColor" % item, "%s.surfaceShader" % duplicateShading, f=1) duplicateMaterials.append(item) elif cmds.nodeType(item) == "RedshiftDisplacement": conInfo = cmds.connectionInfo("%s.displacementShader" % duplicateShading, sfd=1) if not conInfo: cmds.connectAttr("%s.out" % item, "%s.displacementShader" % duplicateShading, f=1) elif cmds.nodeType(item) == "displacementShader": conInfo = cmds.connectionInfo("%s.displacementShader" % duplicateShading, sfd=1) if not conInfo: cmds.connectAttr("%s.displacement" % item, "%s.displacementShader" % duplicateShading, f=1) cmds.sets(obj, e=1, fe=duplicateShading) cmds.select(selObjsList, r=1) return list(set(duplicateMaterials))
def meshToMaya(self,**kwargs): polygonCounts_final = OpenMaya.MIntArray() polygonConnects_final = OpenMaya.MIntArray() vertexArray_final = OpenMaya.MFloatPointArray() uArray = OpenMaya.MFloatArray() vArray = OpenMaya.MFloatArray() uvConnects = OpenMaya.MIntArray() for i in xrange(len(self.vertices)): vertexArray_final.append(self.vertices[i].x,self.vertices[i].y,self.vertices[i].z) #uArray.append(self.vertices[i].x) #vArray.append(self.vertices[i].z) for i in self.faces: id = 0 for j in i: if j>len(self.vertices)-1: print("segmentation unknown vertex") else: polygonConnects_final.append(j) id = id+1 polygonCounts_final.append(id) meshFS_n = OpenMaya.MFnMesh() if "parent" in kwargs: meshFS_n.create(len(self.vertices), len(self.faces), vertexArray_final, polygonCounts_final, polygonConnects_final, kwargs.get("parent")) else: meshFS_n.create(len(self.vertices), len(self.faces), vertexArray_final, polygonCounts_final, polygonConnects_final) uvs = kwargs.get("uvs",0) if uvs: for i in xrange(len(self.uvs)): uArray.append(self.uvs[i].x) vArray.append(self.uvs[i].y) for i in self.face_uvs: for j in i: if j>len(self.uvs)-1: print("segmentation unknown uv") else: uvConnects.append(j) meshFS_n.setUVs( uArray, vArray) meshFS_n.assignUVs(polygonCounts_final, uvConnects) """ for i in xrange(len(self.faces)): for j in xrange(len(self.faces[i])): meshFS_n.assignUV(i,j,self.faces[i][j]) """ if "parent" in kwargs: return meshFS_n.updateSurface() cmds.sets (meshFS_n.name(), e=True, fe='initialShadingGroup') if 'name' in kwargs: meshFS_n.setName(kwargs.get('name')+'Shape') transform = meshFS_n.parent(0) transform_dag = OpenMaya.MFnDagNode(transform) transform_dag.setName(kwargs.get('name')) return ( transform_dag.fullPathName(),meshFS_n.fullPathName() )
def makeSnowflakes(number,size,sizeVar,rgb1,rgb2,transparency,glow): ''' Creates a number of snowflakes number : Number of particles to create size : Radius of the snowflakes sizeVar : Variation in the radius rgb1 : One end of the colour range in the form (r,g,b) rgb2 : The other end of the colour range in the form (r,g,b) transparency : Alpha value for the shader glow : Glow value for the shader The progress window is updated and the shading group list is created. A while loop is initiated to create snowflakes, add them to the list and assign shaders. The list of objects is returned. ''' cmds.progressWindow(e=1,progress=0,status='Making Snowflakes...') SGList = createColourList(rgb1,rgb2,transparency,glow,5) list=[] count=0 while count<number: radius = size+random.uniform(-(size*sizeVar*0.01),(size*sizeVar*0.01)) list[len(list):] = [makeFlake(random.randint(5,8),radius)] cmds.sets(list[-1], e=1, forceElement=SGList[random.randint(0,4)]) cmds.progressWindow(e=1,step=100/number) count += 1 return list
def makeStreetTree(shaders): ''' Creates a tree on a circular platform and with a circular fence around it. shaders: A list of shaders for the tree crowns. On exit: A tree has been created using makeTree(...), a circular platform has been created underneath it and a fence around it. Appropriate shaders have been assigned. Everything is united into one polygonal object and returned as a tuple with the object name and the node name. ''' tree = makeTree(shaders) platform = cmds.polyCylinder(name = "platform",h = 0.1, r = 0.8) cmds.move(0.25, y = True) cmds.sets(platform[0], edit=True, forceElement="fountainMaterialGroup") pole = cmds.polyCube(name = "pole", h = 0.6, w = 0.04, d = 0.04) cmds.xform(pole, t = (0.7,0.45,0)) angle = 360/10.0 for i in range(1,10): pole1 = cmds.polyCube(name = "pole", h = 0.6, w = 0.04, d = 0.04) cmds.rotate(angle * i, y = True) cmds.move(0.7,0.45,0, os = True) pole = cmds.polyUnite(pole, pole1) bar = cmds.polyPipe(name = "bar", h = 0.1, r = 0.65, t = 0.04) cmds.move(0.65, y = True) bar1 = cmds.duplicate(bar[0]) cmds.move(-0.2, y = True, r = True) fence = cmds.polyUnite(pole, bar, bar1) cmds.sets(fence[0], edit=True, forceElement="blackMetalGroup") streetTree = cmds.polyUnite(tree,platform, fence) cmds.delete(streetTree, ch = True) return streetTree
def makeFountain(): ''' Creates a fountain. On exit: A fountain shaped polygonal object has been created, assigned a shader and is returned as a tuple with the object name and node name. The procedure uses random numbers in order to create different looking fountains every time it is called. ''' steps = random.randint(1,3) fountain = cmds.polyCylinder(name = "Fountain", h = 0.1) cmds.xform(fountain, translation = (0, 0.25, 0)) cmds.select(fountain[0] + ".f[40:59]") for i in range(steps): scale_ = random.uniform(0.6, 0.95) cmds.polyExtrudeFacet(scale = (scale_, scale_, scale_)) translation = random.uniform(0.1, 0.6) cmds.polyExtrudeFacet(translate = (0, translation, 0)) cmds.polyExtrudeFacet(scale = (0.9,0.9,0.9)) cmds.polyExtrudeFacet(translate = (0, -0.3,0)) scale_ = random.uniform(0.3,0.6) cmds.polyExtrudeFacet(scale = (scale_,scale_,scale_)) translation = random.uniform(0.2,0.4) cmds.polyExtrudeFacet(translate = (0,translation,0)) stepsUp = random.randint(1,3) for i in range(stepsUp): scale_ = random.uniform(0.4,0.9) cmds.polyExtrudeFacet(scale = (scale_,scale_,scale_)) translation = random.uniform(0.05,1) cmds.polyExtrudeFacet(translate = (0,translation,0)) top = fountainTop(fountain) # Create a top for the fountain. fountain = cmds.polyUnite(top[0],fountain) cmds.sets(fountain[0], edit=True, forceElement="fountainMaterialGroup") return fountain
def makeTree(shaders): ''' Creates a tree. shaders: A list of shaders for the tree crown. On exit: A tree has been modeled, and is returned as a tuple containing the object name and the node name. Some of the variables are chosen randomly to create different results. ''' height = random.uniform(0.3,1.5) trunk = cmds.polyCylinder(name = "trunk", h = height, r = 0.07) cmds.sets(trunk[0], edit=True, forceElement="trunkMaterialGroup") cmds.xform(trunk, translation = (0,height/2.0 + 0.2,0)) crown = cmds.polySphere(name = "crown", r = 0.5) cmds.xform(crown, translation = (0,height + 0.6,0)) cmds.softSelect(sse = True, ssd = 0.86) cmds.select(crown[0] + ".vtx[381]") translation = random.uniform(0.3,1.5) cmds.move(translation, y = True, r = True) cmds.softSelect(sse = False) shader = random.choice(shaders) scale_ = random.uniform(0.7,1.8) cmds.select(crown) cmds.scale(scale_, scale_, scale_, pivot = (0,height,0)) cmds.sets(crown[0], edit=True, forceElement= shader[1]) tree = cmds.polyUnite(trunk[0],crown[0]) cmds.delete(tree[0], ch = True) return tree
def combineShader( shaderList ): cmds.undoInfo( ock=1 ) targetObjs = [] for shader in shaderList: cmds.hyperShade( objects = shader ) targetObjs += cmds.ls( sl=1 ) shadingEngines = cmds.listConnections( shaderList, s=0, d=1, type='shadingEngine' ) if not shadingEngines: return None shadingEngines = list( set( shadingEngines ) ) targetShadingEngine = shadingEngines[0] cmds.sets( targetObjs, e=1, forceElement = targetShadingEngine ) cmds.delete( shadingEngines[1:] ) for shader in shaderList: shadingEngines = cmds.listConnections( shader, s=0, d=1, type='shadingEngine' ) if not shadingEngines: cmds.delete( shader ) elif not targetShadingEngine in shadingEngines: cmds.delete( shader, shadingEngines ) Window_global.nodeInfomation = {} cmds.undoInfo( cck=1 )
def addLocators(factory=None, group="", locators=[]): if not factory: factory = MayaFactory.MayaFactory() if not group and not locators: selection = mc.ls(sl=True) if len(selection) < 2: raise Errors.BadArgumentError("Please select some objects to turn into Massive locators, and a massive group.") group = selection[-1] locators = selection[0:-1] addLocators(group, locators) else: if not group or not locators: raise Errors.BadArgumentError("Please select some objects to turn into Massive locators, and a massive group.") if not isGroup(group): raise Errors.BadArgumentError("%s is not a massive group." % group) if factory.isGroupsSet(group): raise Errors.BadArgumentError("%s can not have locators added to it." % group) for locator in locators: try: mc.xform(locator, query=True, worldSpace=True, translation=True) except: raise Errors.BadArgumentError("%s can not be turned into a Massive locator.\n" + "Please select items whose transformation is accessible through the 'xform' command." % locator) if not mc.objExists("%s.massive" % locator): mc.addAttr(locator, longName="massive", attributeType="long") mc.sets(locators, add=group)
def set_isolate_set(selected): set_name = get_isolate_set_name() # Trying to hide visible children in hierarchy to get wanted isolate # behavior. for sel in selected: for child in sel.iterchildren(): if child in selected or not child.type == MFn.kTransform: continue # Only work on visible children if child.attr['visibility']: child.attr['visibility'] = False HIDDEN_CHILDREN.add(child) hilited = DagpathList( [dag for dag in mampy.daglist(hl=True) if dag not in selected] ) if hilited: cmds.hilite(hilited.cmdslist(), toggle=True) # In case the dag object was a child of unhilited object rehilite it. for dag in selected: cmds.hilite(str(dag)) if not set_name: for dag in selected: cmds.isolateSelect(get_active_panel(), addDagObject=str(dag)) return cmds.sets(clear=set_name) cmds.sets(selected.cmdslist(), include=set_name)
def load_shader(self, look_file, shader_file, ref=True): if not os.path.isfile(look_file) and not os.path.isfile(shader_file): return False asset_ref_name = self.asset_name() asset_ref_name += '_Shad' # 用引用的方式导入材质文件,如果引用已经存在,无需导入 if not mc.objExists(asset_ref_name + 'RN'): if ref: mc.file(shader_file, r=True, ignoreVersion=True, mergeNamespacesOnClash=False, gl=True, namespace=asset_ref_name, options='v=0;p=17;f=0', typ='mayaBinary', pr=True) else: mc.file(shader_file, i=True, ignoreVersion=True, mergeNamespacesOnClash=False, namespace=asset_ref_name, options='v=0;p=17;f=0', typ='mayaBinary', pr=True) else: print('Shader is loaded.') # 读入材质链接信息,讲材质赋给模型 with open(look_file) as f: data = json.loads(f.read()) link_list = data['shader_link'] for k in link_list.keys(): for s in link_list[k]: sg_name = '%s:%s' % (asset_ref_name, s) if mc.objExists(k): mc.sets(k, e=1, forceElement=sg_name) print("Assign shader %s to %s." % (sg_name, k)) else: asset_n = k.split('|')[0] this_obj = k.replace(asset_n, self.__name) if mc.objExists(this_obj): mc.sets(k, e=1, forceElement=sg_name) print("Assign shader %s to %s." % (sg_name, this_obj)) print('Load shader for %s successful.' % self.name()) return True
def postProcessControl ( _control, _function, _controlled ): # <<< string, string, list lockHideAttributes ( _control ) if (cmds.objExists('anim_control_set') == False): cmds.createNode('objectSet',name='anim_control_set',skipSelect=True) cmds.sets ( _control, addElement = 'anim_control_set' ) cmds.toggle ( localAxis = 1 ) if len ( _controlled ) == 0: _control = cmds.rename ( '_' + _function + '_control' ) _control = cmds.group ( world = 1, name = '_' + _function + '_control_offSet' ) cmds.move ( 0, 0, 0, _control + '.rotatePivot', _control + '.scalePivot' ) lockHideAttributes ( _control ) else: _k = _controlled[0].rfind ( '|' ) _control = _controlled[0][_k+1:] cmds.rename ( _control + '_' + _function + '_control' ) _control = cmds.group ( world = 1, name = _control + '_' + _function + '_control_offSet' ) cmds.move ( 0, 0, 0, _control + '.rotatePivot', _control + '.scalePivot' ) lockHideAttributes ( _control ) cmds.select ( _controlled[0], toggle = True ) cmds.parent () cmds.xform ( translation = ( 0, 0, 0 ), rotation = ( 0, 0, 0 ) ) cmds.parent ( world = 1 ) cmds.pickWalk ( direction = "down" )
def applyAssetLooks(fileName,type): removeItem = 'LUK' if type == 'LUK': removeItem = 'DPL' base = fileName.split('/')[-1].split('.')[0] host = '%s/WORK/'%fileName.split('/WORK/')[0] prefix = base.split('_')[0] b = base.replace(prefix,type) base = 'LOOKS/%s/%s.ma'%(b,b) references = cmds.file(q=True,r=True) namespaces = [] lookNamespaces = [] for each in references: if '/LOOKS/' in each: lookNamespaces.append(cmds.referenceQuery(each,ns=True)[1:]) if '/MEDIA/' in each: continue try: cmds.referenceQuery(each,namespace=True) except: continue namespaces.append(cmds.referenceQuery(each,ns=True)[1:]) lookFileName = '' if '/SETUP/' in fileName: fparts = fileName.split('/SETUP/') lookFileName = '%s/%s'%(fparts[0],base) if '/MODEL/' in fileName: fparts = fileName.split('/MODEL/') lookFileName = '%s/%s'%(fparts[0],base) lookFileName=lookFileName.replace('/','/') lookFileName = lookFileName.replace(host,'L:') lookFileName = lookFileName.replace('W:/', 'L:/') removeLookFileName = lookFileName.replace('%s_'%type,'%s_'%removeItem) references = cmds.file(q=True,r=True) lookBase = None if removeLookFileName in references: cmds.file(removeLookFileName,rr=True) if lookFileName not in references: cmds.file(lookFileName, ns=b, r=True) lookBase = b else: lookBase = cmds.referenceQuery(lookFileName, ns=True)[1:] # edits = cmds.referenceQuery(lookFileName, es=True, scs=True) for eachShader in cmds.ls('%s:*' % lookBase, type='shadingEngine'): if cmds.objExists('%s.ptAssign' % eachShader): objects = cmds.getAttr('%s.ptAssign' % eachShader) objects = objects.split('#') for obj in objects: if cmds.objExists(obj): cmds.select(obj) cmds.sets(e=True, fe=eachShader) else: for r in namespaces: if cmds.objExists('%s:%sShapeDeformed' % (r, obj)): cmds.select('%s:%sShapeDeformed' % (r, obj)) cmds.sets(e=True, fe=eachShader) elif cmds.objExists('%s:%sDeformed'%(r,obj)): cmds.select('%s:%sDeformed' % (r, obj)) cmds.sets(e=True, fe=eachShader) elif cmds.objExists('%s:%s'%(r,obj)): cmds.select('%s:%s'%(r,obj)) cmds.sets(e=True, fe=eachShader) if cmds.objExists('%s.ptConnect' % eachShader): conStr = cmds.getAttr('%s.ptConnect' % eachShader) conPairs = conStr.split('#') for pairStr in conPairs: src, des = pairStr.split(',') des = '%s:%s'%(lookBase, des) if cmds.objExists(des): if cmds.objExists(src): cmds.connectAttr(src, des, f=True) else: for r in namespaces: srcNs = '%s:%s'%(r, src) if cmds.objExists(srcNs): print 'Connected: %s to %s' %(srcNs, des) cmds.connectAttr(srcNs, des, f=True)
def collect(self, instance): self.log.info("Looking for look associations " "for %s" % instance.data['name']) # Discover related object sets self.log.info("Gathering sets..") sets = self.collect_sets(instance) # Lookup set (optimization) instance_lookup = set(cmds.ls(instance, long=True)) self.log.info("Gathering set relations..") # Ensure iteration happen in a list so we can remove keys from the # dict within the loop for objset in list(sets): self.log.debug("From %s.." % objset) # Get all nodes of the current objectSet (shadingEngine) for member in cmds.ls(cmds.sets(objset, query=True), long=True): member_data = self.collect_member_data(member, instance_lookup) if not member_data: continue # Add information of the node to the members list sets[objset]["members"].append(member_data) # Remove sets that didn't have any members assigned in the end # Thus the data will be limited to only what we need. if not sets[objset]["members"]: self.log.info("Removing redundant set information: " "%s" % objset) sets.pop(objset, None) self.log.info("Gathering attribute changes to instance members..") attributes = self.collect_attributes_changed(instance) # Store data on the instance instance.data["lookData"] = { "attributes": attributes, "relationships": sets } # Collect file nodes used by shading engines (if we have any) files = list() looksets = sets.keys() shaderAttrs = [ "surfaceShader", "volumeShader", "displacementShader", "aiSurfaceShader", "aiVolumeShader" ] materials = list() if looksets: for look in looksets: for at in shaderAttrs: con = cmds.listConnections("{}.{}".format(look, at)) if con: materials.extend(con) self.log.info("Found materials:\n{}".format(materials)) self.log.info("Found the following sets:\n{}".format(looksets)) # Get the entire node chain of the look sets # history = cmds.listHistory(looksets) history = list() for material in materials: history.extend(cmds.listHistory(material)) files = cmds.ls(history, type="file", long=True) files.extend(cmds.ls(history, type="aiImage", long=True)) self.log.info("Collected file nodes:\n{}".format(files)) # Collect textures if any file nodes are found instance.data["resources"] = [self.collect_resource(n) for n in files] self.log.info("Collected resources:\n{}".format( instance.data["resources"])) # Log a warning when no relevant sets were retrieved for the look. if not instance.data["lookData"]["relationships"]: self.log.warning("No sets found for the nodes in the instance: " "%s" % instance[:]) # Ensure unique shader sets # Add shader sets to the instance for unify ID validation instance.extend(shader for shader in looksets if shader not in instance_lookup) self.log.info("Collected look for %s" % instance)
def BG_ASM_SET(): trs = pymel.core.ls(tr=1) topNodes = [] for tr in trs: if tr.getParent(): continue children = tr.listRelatives(c=1) proxyExists = False for child in children: if child[0] == 'V': proxyExists = True break if proxyExists: topNodes.append(tr) targets = [] pymel.core.makeIdentity(topNodes, apply=1, t=1, r=1, s=1, n=0, pn=1) for topNode in topNodes: children = topNode.listRelatives(c=1) for child in children: if child.find('V3') != -1: targets.append(topNode) continue pymel.core.delete(child) for target in targets: children = target.listRelatives(c=1, ad=1) for child in children: pymel.core.showHidden(child, a=1) reducedObj = sgCmds.combineMultiShapes(target) pymel.core.polyReduce(reducedObj, ver=1, trm=0, p=90, vct=0, tct=0, shp=0, keepBorder=1, keepMapBorder=1, keepColorBorder=1, keepFaceGroupBorder=1, keepHardEdge=1, keepCreaseEdge=1, keepBorderWeight=0.5, keepMapBorderWeight=0.5, keepColorBorderWeight=0.5, keepFaceGroupBorderWeight=0.5, keepHardEdgeWeight=0.5, keepCreaseEdgeWeight=0.5, useVirtualSymmetry=0, symmetryTolerance=0.01, sx=0, sy=1, sz=0, sw=0, preserveTopology=1, keepQuadsWeight=1, vertexMapName="", replaceOriginal=1, cachingReduce=1, ch=1) reducedObj.rename(target.shortName() + '_reduced') pymel.core.select(reducedObj) cmds.DeleteHistory() bb = pymel.core.exactWorldBoundingBox(target) bbmin = bb[:3] bbmax = bb[-3:] points = [[] for i in range(8)] points[0] = [bbmin[0], bbmin[1], bbmax[2]] points[1] = [bbmax[0], bbmin[1], bbmax[2]] points[2] = [bbmin[0], bbmax[1], bbmax[2]] points[3] = [bbmax[0], bbmax[1], bbmax[2]] points[4] = [bbmin[0], bbmax[1], bbmin[2]] points[5] = [bbmax[0], bbmax[1], bbmin[2]] points[6] = [bbmin[0], bbmin[1], bbmin[2]] points[7] = [bbmax[0], bbmin[1], bbmin[2]] boundingBoxObj = pymel.core.polyCube(ch=1, o=1, cuv=4, n=target.shortName() + '_gpu')[0] boundingBoxObjShape = boundingBoxObj.getShape() boundingBoxObjShape.overrideEnabled.set(1) boundingBoxObjShape.overrideDisplayType.set(2) newLambert = pymel.core.shadingNode('lambert', asShader=1) newLambertSG = pymel.core.sets(renderable=1, noSurfaceShader=1, empty=1, name=newLambert + 'SG') newLambert.outColor >> newLambertSG.surfaceShader newLambert.transparency.set(1, 1, 1) cmds.sets(boundingBoxObj.name(), e=1, forceElement=newLambertSG.name()) for i in range(8): pymel.core.move(points[i][0], points[i][1], points[i][2], boundingBoxObj + '.vtx[%d]' % i) sceneName = cmds.file(q=1, sceneName=1) gpuPath = os.path.dirname(sceneName) pymel.core.select(target) mel.eval( 'displaySmoothness -divisionsU 0 -divisionsV 0 -pointsWire 4 -pointsShaded 1 -polygonObject 1;' ) targetParents = cmds.listRelatives(target.name(), p=1, f=1) targetPos = cmds.getAttr(target + '.m') cmds.xform(target.name(), os=1, matrix=sgCmds.getListFromMatrix(OpenMaya.MMatrix())) abcPath = cmds.gpuCache(target.name(), startTime=1, endTime=1, optimize=1, optimizationThreshold=1000, writeMaterials=0, dataFormat='ogawa', directory=gpuPath, fileName=target.replace('|', '_'), saveMultipleFiles=False)[0] cmds.xform(target.name(), os=1, matrix=targetPos) gpuObjName = target.split('|')[-1] + '_gpuOrig' gpuCacheNode = cmds.createNode('gpuCache') gpuCacheObj = cmds.listRelatives(gpuCacheNode, p=1, f=1)[0] gpuCacheObj = cmds.rename(gpuCacheObj, gpuObjName) gpuShapeName = cmds.listRelatives(gpuCacheObj, c=1, f=1)[0] cmds.setAttr(gpuShapeName + '.cacheFileName', abcPath, type='string') if targetParents: gpuCacheObj = cmds.parent(gpuCacheObj, targetParents[0]) cmds.xform(gpuCacheObj, os=1, matrix=targetPos) gpuCacheObj = pymel.core.ls(gpuCacheObj)[0] src = target gpuShape = gpuCacheObj.getShape() pymel.core.parent(gpuShape, boundingBoxObj, shape=1, add=1) pymel.core.delete(gpuCacheObj) others = [reducedObj, boundingBoxObj] sceneName = cmds.file(q=1, sceneName=1) fileName = sceneName.split('/')[-1].split('.')[0] targetPath = '.'.join(sceneName.split('.')[:-1]) + '.mi' pymel.core.select(target) mel.eval( 'displaySmoothness -divisionsU 3 -divisionsV 3 -pointsWire 16 -pointsShaded 4 -polygonObject 3;' ) pymel.core.select(src) cmds.file( targetPath, options= 'binary=1;compression=0;tabstop=8;perframe=0;padframe=0;perlayer=1;pathnames=3313323333;assembly=0;fragment=0;fragsurfmats=0;fragsurfmatsassign=0;fragincshdrs=0;fragchilddag=0;passcontrimaps=1;passusrdata=1;overrideAssemblyRootName=0;assemblyRootName=binary=1;compression=0;tabstop=8;perframe=0;padframe=0;perlayer=0;pathnames=3313333333;assembly=1;fragment=1;fragsurfmats=1;fragsurfmatsassign=1;fragincshdrs=1;fragchilddag=1;passcontrimaps=1;passusrdata=0;filter=00000011010000001101000;overrideAssemblyRootName=0;assemblyRootName=', typ='mentalRay', pr=1, es=1, force=1) mel.eval( 'Mayatomr -mi -exportFilter 721600 -active -binary -fe -fem -fma -fis -fcd -pcm -as -asn "%s" -xp "3313333333" -file "%s"' % (fileName, targetPath)) for other in others: otherShape = other.getShape() if otherShape.nodeType() == 'mesh': otherShape.miUpdateProxyBoundingBoxMode.set(3) otherShape.miProxyFile.set(targetPath) target.rename(target + '_orig') folderPath = os.path.dirname(cmds.file(q=1, sceneName=1)) fileName = cmds.file(q=1, sceneName=1).split('/')[-1].split('.')[0] if not cmds.pluginInfo('sceneAssembly', q=1, l=1): cmds.loadPlugin('sceneAssembly') mel.eval('assemblyCreate assemblyDefinition') asmNode = pymel.core.ls(type='assemblyDefinition')[-1] asmNode.rename('ASM_' + fileName) reps = pymel.core.assembly(asmNode, q=1, listRepresentations=1) if reps: for rep in reps: pymel.core.assembly(asmNode, e=1, deleteRepresentation=rep) index = 0 repNames = [] for sel in [boundingBoxObj, reducedObj, target]: selShape = sel.getShape() repName = sel.split('_')[-1] repNames.append(repName) if selShape: if selShape.nodeType() == 'gpuCache': pymel.core.assembly(asmNode, edit=True, createRepresentation='Cache', input=selShape.cacheFileName.get(), repName=repName) asmNode.attr('representations')[index].repLabel.set( repName) index += 1 continue pymel.core.select(sel) filePath = folderPath + '/ASMOBJ_' + sel.shortName() + '.mb' cmds.file(filePath, force=1, options="v=0;", typ="mayaBinary", pr=1, es=1) pymel.core.assembly(asmNode, edit=True, createRepresentation='Scene', input=filePath, repName=repName) asmNode.attr('representations')[index].repLabel.set(repName) index += 1 scenePath = cmds.file(q=1, sceneName=1) folderName = os.path.dirname(scenePath) fileName = scenePath.split('/')[-1].split('.')[0] exportPath = folderName + '/ASM_' + fileName + '.mb' pymel.core.select(asmNode) cmds.file(exportPath, force=1, options="v=0;", typ="mayaBinary", pr=1, es=1)
def get_member(node): return set(cmds.ls(cmds.sets(node, query=True), type="transform"))
def _apply_crease_edges(look, relationship, nodes): namespace = look["namespace"][1:] crease_sets = lib.apply_crease_edges(relationship, namespace, nodes=nodes) cmds.sets(crease_sets, forceElement=look["objectName"])
import maya.cmds as cmds import pymel.core as pm import os import math import random import functools treeTrunkShader = cmds.shadingNode('lambert', asShader=True) cmds.setAttr(treeTrunkShader + '.color', 0.4, 0.3, 0.3, type='double3') treeTrunkShaderSG = cmds.sets(renderable=1, noSurfaceShader=1, empty=1, name='treeTrunkShaderSG'); cmds.connectAttr(treeTrunkShader + '.outColor', treeTrunkShaderSG + '.surfaceShader', f=1) foliageShader = cmds.shadingNode('lambert', asShader=True) cmds.setAttr(foliageShader + '.color', 0.30, 0.7, 0.40, type='double3') foliageShaderSG = cmds.sets(renderable=True, noSurfaceShader=True, empty=True, name='foliageShaderSG'); cmds.connectAttr(foliageShader + '.outColor', foliageShaderSG + '.surfaceShader', force=True) class Minitree(): def __init__(self): self.create_ui('miniTree', self.apply_call_back, self.save_preset, self.load_preset) def create_ui(self, pWindowTitle, pApplyCallBack, pSavePreset, pLoadPreset): windowID = 'miniTree' if cmds.window(windowID, exists=True): cmds.deleteUI(windowID) cmds.window(windowID, title=pWindowTitle, sizeable=False, resizeToFitChildren=True, w = 100) cmds.rowColumnLayout(numberOfColumns=1)
def loadLooks(type='DPL', refNode=None): # print 'Cleaning Looks' sharedR = cmds.ls('sharedReferenceNode*') if cmds.objExists('setupTimeRange'): cmds.delete('setupTimeRange') if '/LIGHTING/' in cmds.file(q=True, sn=True): try: cmds.editRenderLayerGlobals(currentRenderLayer='defaultRenderLayer') except: print 'Failed updating shaders' return if sharedR: cmds.delete(sharedR) # --- if refNode is not passed, just take all references in the scene refs = [] if not refNode: refs = cmds.file(q=True,r=True) else: refs = [cmds.referenceQuery(refNode, f=True)] # --- filter out invalid references... filteredRefs = [] for each in refs: if '/LOOKS/' in each: if '{' in each: try: cmds.file(each,rr=True) except Exception as e: print e ''' else: #cmds.file(each,rr=True) try: rfn = cmds.referenceQuery(each,rfn=True) cmds.file(ur=rfn) cmds.referenceEdit(rfn,r=True) cmds.file(lr=rfn) except Exception as e: print e ''' continue # skip sound references if '.wav' in each: continue # skip references without use of namespace try: cmds.referenceQuery(each, namespace=True) except Exception as e: print each print e continue filteredRefs.append(each) # print 'Cleaned Looks' # --- looping each filter reference paths... for each in filteredRefs: shadingSets = [] isCrowd = False nameSpace = cmds.referenceQuery(each,namespace=True)[1:] # --- reference looks, collect shading sets... # crowd case if '__CROWD.abc' in each: print 'Applying looks to crowd: %s' %each lookName = each.replace('.abc', '.ma') lookBase = lookName.split('/')[-1].split('.')[0] if lookName not in cmds.file(q=True,r=True): lookName = cmds.file(lookName, ns=lookBase, sharedReferenceFile=False, r=True) lookBase = cmds.referenceQuery(lookName, namespace=True)[1:] shadingSets = cmds.ls('%s:*' % lookBase, type='shadingEngine') isCrowd = True # normal case elif '/SETUP/STP_' in each or '/PASSIVE/PAS_' in each or '/GEO/GEO_' in each or '/RENDER/PAS_' in each: # if this is a passive asset, look for setup path in memeName attribute on Geo_Grp if '/PASSIVE/PAS_' in each or '/RENDER/PAS_' in each: if cmds.objExists('%s:Geo_Grp.memeName'%nameSpace): each = cmds.getAttr('%s:Geo_Grp.memeName'%nameSpace) else: continue lookName = None lookBase = None if '/SETUP/STP_' in each: lookName = each.replace('.mb', '.ma') lookName = lookName.replace('/SETUP/', '/LOOKS/') lookBase = lookName.split('/')[-1].split('.')[0].replace('STP_', '%s_' %type) lookParts = lookName.split('/STP_')[0] lookName = '%s/%s/%s.ma' % (lookParts, lookBase, lookBase) elif '/GEO/GEO_' in each: lookName = each.replace('/GEO/', '/LOOKS/') lookBase = lookName.split('/')[-1].split('.')[0].replace('GEO_', '%s_' %type) lookParts = lookName.split('/GEO_')[0] lookName = '%s/%s/%s.ma' % (lookParts, lookBase, lookBase) if lookName: if not os.path.exists(lookName): print 'Look path does not exists: %s' %lookName continue # --- figure out if adding new reference is needed addNewRef = True # if already have the reference if lookName in cmds.file(q=True,r=True): lookBase = cmds.referenceQuery(lookName, ns=True)[1:] shadingSets = cmds.ls('%s:*' % lookBase, type='shadingEngine') sgWithMemeConnect = [s for s in shadingSets if cmds.objExists('%s.memeConnect' %s) or cmds.objExists('%s.ptConnect' %s)] # if none of the SGs has ptConnect/memeConnect, reuse the look if not sgWithMemeConnect: addNewRef = False if not lookName: addNewRef = False # need to add new reference for the look if addNewRef: referedPath = cmds.file(lookName, ns=lookBase,sharedReferenceFile=False, r=True) lookBase = cmds.referenceQuery(referedPath, ns=True)[1:] # list all shading engines within the namespace shadingSets = cmds.ls('%s:*' % lookBase, type='shadingEngine') # also list RedshiftMeshParameters if type == 'LUK': shadingSets.extend(cmds.ls('%s:*'%lookBase, type='RedshiftMeshParameters')) # assign lambert1 to all of geoGrp first geoGrpChildren = cmds.listRelatives('%s:Geo_Grp' %nameSpace, ad=True, type='transform', f=True) if shadingSets and geoGrpChildren: geos = [] for tr in geoGrpChildren: shapes = cmds.listRelatives(tr, shapes=True, type='mesh', f=True, ni=True) if not shapes: continue shp = shapes[0] geos.append(shp) # get rid of connections to reference node placeHolderList (failed connectAttr edits) refConnections = cmds.listConnections(shp, d=True, s=False, p=True, c=True, type='reference') if refConnections: for s, d in zip(refConnections[0::2], refConnections[1::2]): if '.placeHolderList' in d: try: cmds.disconnectAttr(s, d) except Exception, e: print e print 'Cannot disconnect %s from placeHolderList' %shp cmds.sets(geos, e=True, fe='initialShadingGroup', nw=True) # --------------------------------------------------------------------------------- # looping each shadingSets for eachShader in shadingSets: setType = cmds.nodeType(eachShader) # assign/add to sets asignAttr = None if cmds.objExists('%s.memeAssign' % eachShader): asignAttr = '%s.memeAssign' % eachShader elif cmds.objExists('%s.ptAssign' % eachShader): asignAttr = '%s.ptAssign' % eachShader if asignAttr: objects = cmds.getAttr(asignAttr) for obj in objects.split('#'): extingObj = None if cmds.objExists('%s:%s' % (nameSpace, obj)): extingObj = '%s:%s' % (nameSpace, obj) elif cmds.objExists('%s:%sDeformed' % (nameSpace, obj)): extingObj = '%s:%sDeformed' % (nameSpace, obj) elif '.f[' in obj: splits = obj.split('.') objName = '%s:%sDeformed.%s' %(nameSpace, splits[0], splits[1]) if cmds.objExists(objName): extingObj = objName elif cmds.objExists('%s:%sShapeDeformed' % (nameSpace, obj)): extingObj = '%s:%sShapeDeformed' % (nameSpace, obj) objToAssign = None fSplit = '' if extingObj: if not isCrowd: # split .f if '.f[' in extingObj: objSplits = extingObj.split('.f[') extingObj = objSplits[0] fSplit = objSplits[1] objTyp = cmds.nodeType(extingObj) if objTyp == 'transform': # get the shape shapes = cmds.listRelatives(extingObj, s=True, ni=True, pa=True, type='mesh') if shapes: objToAssign = shapes[0] else: objToAssign = extingObj elif objTyp == 'mesh': # get the transform first trs = cmds.listRelatives(extingObj, p=True) if trs: # then get the most available shape shapes = cmds.listRelatives(trs[0], s=True, ni=True, pa=True, type='mesh') if shapes: objToAssign = shapes[0] else: # if it's a crowd asset just take the name from strings objToAssign = extingObj if objToAssign: if fSplit: objToAssign = '%s.f[%s' %(objToAssign, fSplit) if setType == 'shadingEngine': try: # print 'assigning %s with %s' %(objToAssign, eachShader) cmds.sets(objToAssign, e=True, fe=eachShader, nw=True) except Exception as e: pass # print e elif setType == 'RedshiftMeshParameters': try: # print 'RS %s with %s' %(objToAssign, eachShader) cmds.sets(objToAssign, e=True, add=eachShader, nw=True) except Exception as e: pass # print e # connect rig to shaders conAttr = None if cmds.objExists('%s.memeConnect' % eachShader): conAttr = '%s.memeConnect' % eachShader elif cmds.objExists('%s.ptConnect' % eachShader): conAttr = '%s.ptConnect' % eachShader if conAttr: conStr = cmds.getAttr(conAttr) conPairs = conStr.split('#') for pairStr in conPairs: src, des = pairStr.split(',') src = '%s:%s' % (nameSpace, src) des = '%s:%s' % (lookBase, des) if cmds.objExists(src) and cmds.objExists(des) and not cmds.isConnected(src, des): #print 'Connected: %s to %s' % (src, des) try: cmds.connectAttr(src, des, f=True) except Exception as e: pass
xforms.append(cmds.listRelatives(mesh, p=1)[0]) # make a new renderLayer if it doesn't already exist. layers = cmds.ls(type='renderLayer') if layerName in layers: cmds.editRenderLayerGlobals(crl=layerName) if layerName != 'defaultRenderLayer': cmds.editRenderLayerMembers(layerName, xforms) modLayers.append(layerName) else: cmds.createRenderLayer(xforms, mc=1, name=layerName) cmds.setAttr(layerName + '.renderable', 0) importLayers.append(layerName) # assign material to objects if material isn't default. if shadingGroup.split(':')[-1] != 'initialShadingGroup': cmds.select(meshes, r=1) cmds.sets(e=1, forceElement=shadingGroup) if int(doOverrides) == 1: for mesh in meshes: if cmds.getAttr(sourceGeo + '.castsShadows') != cmds.getAttr( mesh + '.castsShadows'): cmds.setAttr(mesh + '.castsShadows', cmds.getAttr(sourceGeo + '.castsShadows')) if cmds.getAttr(sourceGeo + '.receiveShadows') != cmds.getAttr( mesh + '.receiveShadows'): cmds.setAttr(mesh + '.receiveShadows', cmds.getAttr(sourceGeo + '.receiveShadows')) if cmds.getAttr(sourceGeo + '.primaryVisibility' ) != cmds.getAttr(mesh + '.primaryVisibility'): cmds.setAttr( mesh + '.primaryVisibility', cmds.getAttr(sourceGeo + '.primaryVisibility'))
def refresh(self): model_containers = list() host = api.registered_host() for container in host.ls(): if container["loader"] == "ModelLoader": model_containers.append(container) self.clear() self.beginResetModel() for container in model_containers: subset_id = io.ObjectId(container["subsetId"]) version_id = io.ObjectId(container["versionId"]) version = io.find_one({"_id": version_id}) latest = io.find_one({ "type": "version", "parent": subset_id }, sort=[("name", -1)], projection={"name": True}) latest_repr = io.find_one({ "type": "representation", "parent": latest["_id"], "name": "mayaBinary" }) # Is latest version loaded ? is_latest = latest["name"] == version["name"] versions = io.find({ "type": "version", "parent": subset_id }, sort=[("name", -1)]) for version in versions: repr = io.find_one({ "type": "representation", "parent": version["_id"], "name": "mayaBinary" }) protected = repr["data"].get("modelProtected") if protected is not None: # Get protected list from previous version if not found break protected = protected or set() namespace = container["namespace"] subset_group = container["subsetGroup"] subset_item = models.Item() subset_item.update({ "subsetId": subset_id, "representation": latest_repr, "namespace": namespace, "node": subset_group, "name": subset_group.rsplit("|", 1)[-1][len(namespace):], "isLatest": is_latest, }) members = cmds.sets(container["objectName"], query=True) for node in cmds.ls(members, type="transform", referencedNodes=True, long=True): meshes = cmds.listRelatives(node, shapes=True, noIntermediate=True, type="mesh") if not meshes: continue id = utils.get_id(node) is_locked = id in protected node_item = models.Item() node_item.update({ "node": node, "name": node.rsplit("|", 1)[-1][len(namespace):], "avalonId": id, "isLocked": is_locked, "isLatest": is_latest, "setLocked": None, }) subset_item.add_child(node_item) self.add_child(subset_item) self.endResetModel()
if cmds.objExists(src) and cmds.objExists(des) and not cmds.isConnected(src, des): #print 'Connected: %s to %s' % (src, des) try: cmds.connectAttr(src, des, f=True) except Exception as e: pass # print e # remove unused looks toRem = [] for ref in [r for r in cmds.file(q=True, r=True) if '/LOOKS/' in r]: ns = cmds.referenceQuery(ref, ns=True)[1:] sgs = cmds.ls('%s:*' %ns, type='shadingEngine') for sg in sgs: if cmds.sets(sg, q=True): break else: toRem.append(ref) for ref in toRem: try: cmds.file(ref, rr=True) except Exception as e: print e print "Shader update sucessful" def applyAssetLooks(fileName,type): removeItem = 'LUK'
def generateConnectedTestScene(self, shadingNodeAttributes, ignoreColorSpaceFileRules=False): """ Generate test scene containing a UsdPreviewSurface with bindings to other shading nodes exports correctly. :type shadingNodeAttributes: List[Tuple[str, Any]] :type ignoreColorSpaceFileRules: bool """ maya_file = os.path.join(self.temp_dir, "UsdExportConnectedUsdPreviewSurfaceTest.ma") cmds.file(force=True, new=True) mesh = "ConnectedMaterialSphere" cmds.polySphere(name=mesh, subdivisionsX=20, subdivisionsY=20, radius=1) cmds.group(mesh, name="Geom") cmds.group("Geom", name="UsdPreviewSurfaceExportTest") shading_node = "usdPreviewSurface_Connected" cmds.shadingNode("usdPreviewSurface", name=shading_node, asShader=True) for attr in shadingNodeAttributes: if isinstance(attr[1], Gf.Vec3f): cmds.setAttr( "%s.%s" % (shading_node, attr[0]), attr[1][0], attr[1][1], attr[1][2], ) else: cmds.setAttr("%s.%s" % (shading_node, attr[0]), attr[1]) texture_dir = os.path.join(self.input_dir, "UsdExportUsdPreviewSurfaceTest") cmds.defaultNavigation(createNew=True, destination="%s.diffuseColor" % shading_node) file_node = cmds.shadingNode("file", asTexture=True, name="Brazilian_Rosewood_Texture") cmds.setAttr(file_node + ".ignoreColorSpaceFileRules", ignoreColorSpaceFileRules) cmds.setAttr( file_node + ".fileTextureName", os.path.join(texture_dir, "Brazilian_rosewood_pxr128.png"), type="string", ) cmds.connectAttr("%s.outColor" % file_node, "%s.diffuseColor" % shading_node, force=True) # This file node should have stayed "sRGB": if not ignoreColorSpaceFileRules: self.assertEqual(cmds.getAttr(file_node + ".colorSpace"), "sRGB") cmds.defaultNavigation(createNew=True, destination="%s.roughness" % shading_node) file_node = cmds.shadingNode("file", asTexture=True, name="Brazilian_Rosewood_Bump_Texture") cmds.setAttr(file_node + ".ignoreColorSpaceFileRules", ignoreColorSpaceFileRules) cmds.setAttr( file_node + ".fileTextureName", os.path.join(texture_dir, "Brazilian_rosewood_pxr128_bmp.png"), type="string", ) cmds.connectAttr("%s.outColorR" % file_node, "%s.roughness" % shading_node, force=True) # The monochrome file node should have been set to "Raw" automatically: if not ignoreColorSpaceFileRules: self.assertEqual(cmds.getAttr(file_node + ".colorSpace"), "Raw") cmds.defaultNavigation(createNew=True, destination="%s.clearcoatRoughness" % shading_node) cmds.connectAttr( "%s.outColorR" % file_node, "%s.clearcoatRoughness" % shading_node, force=True, ) cmds.defaultNavigation(createNew=True, destination="%s.normal" % shading_node) file_node = cmds.shadingNode("file", asTexture=True, name="Brazilian_Rosewood_Normal_Texture") cmds.setAttr( file_node + ".fileTextureName", os.path.join(texture_dir, "Brazilian_rosewood_pxr128_n.png"), type="string", ) cmds.connectAttr("%s.outColor" % file_node, "%s.normal" % shading_node, force=True) # The file node should have been set to "NormalMap" automatically: self.assertEqual(cmds.getAttr(file_node + ".colorSpace"), "Raw") self.assertEqual(cmds.getAttr(file_node + ".colorGainR"), 2) self.assertEqual(cmds.getAttr(file_node + ".colorGainG"), 2) self.assertEqual(cmds.getAttr(file_node + ".colorGainB"), 2) self.assertEqual(cmds.getAttr(file_node + ".colorOffsetR"), -1) self.assertEqual(cmds.getAttr(file_node + ".colorOffsetG"), -1) self.assertEqual(cmds.getAttr(file_node + ".colorOffsetB"), -1) self.assertEqual(cmds.getAttr(file_node + ".alphaGain"), 1) self.assertEqual(cmds.getAttr(file_node + ".alphaOffset"), 0) shading_engine = "%sSG" % shading_node cmds.sets(renderable=True, noSurfaceShader=True, empty=True, name=shading_engine) cmds.connectAttr( "%s.outColor" % shading_node, "%s.surfaceShader" % shading_engine, force=True, ) cmds.sets(mesh, edit=True, forceElement=shading_engine) cmds.file(rename=maya_file) cmds.file(save=True, type="mayaAscii") self.assertTrue(os.path.exists(maya_file)) return maya_file
def testIsolateSelect(self): cmds.file(force=True, new=True) mayaUtils.loadPlugin("mayaUsdPlugin") panel = mayaUtils.activeModelPanel() usdaFile = testUtils.getTestScene("setsCmd", "5prims.usda") proxyDagPath, sphereStage = mayaUtils.createProxyFromFile(usdaFile) usdCube = proxyDagPath + ",/Cube1" usdCylinder = proxyDagPath + ",/Cylinder1" usdCapsule = proxyDagPath + ",/Capsule1" usdCone = proxyDagPath + ",/Cone1" usdXform = proxyDagPath + ",/Xform1" cmds.move(-4, -24, 0, "persp") cmds.rotate(90, 0, 0, "persp") globalSelection = ufe.GlobalSelection.get() globalSelection.clear() self.assertSnapshotClose('unselected.png') # Turn on isolate select for cube cmds.select(usdCube) cmds.isolateSelect(panel, state=1) self.assertSnapshotClose('cube.png') # Replace isolate select cube with cylinder cmds.select(usdCylinder) cmds.isolateSelect(panel, loadSelected=True) self.assertSnapshotClose('cylinder.png') # Add capsule to isolate select cmds.select(usdCapsule) cmds.isolateSelect(panel, addSelected=True) self.assertSnapshotClose('cylinderAndCapsule.png') # Remove capsule from isolate select cmds.isolateSelect(panel, removeSelected=True) self.assertSnapshotClose('cylinderAfterCapsuleRemove.png') # Undo, Redo cmds.undo() # Undo remove capsule from isolate select self.assertSnapshotClose('undoCapsuleRemove.png') cmds.redo() # Redo remove capsule from isolate select self.assertSnapshotClose('redoCapsuleRemove.png') cmds.undo() # Undo remove capsule from isolate select cmds.undo() # Undo add capsule to isolate select self.assertSnapshotClose('undoCapsuleAdd.png') # Turn off isolate select cmds.isolateSelect(panel, state=0) self.assertSnapshotClose('isolateSelectOff.png') # Create an isolate select set, then add something directly to it cmds.isolateSelect(panel, state=1) isolateSelectSet = "modelPanel4ViewSelectedSet" cmds.sets(usdCube, add=isolateSelectSet) cmds.isolateSelect(panel, update=True) self.assertSnapshotClose('capsuleAndCube.png') # The flags addDagObject and removeDagObject don't # work with USD items. # Add the cone to the isolate select # different from addSelected because it filters out components cmds.select(usdCone) cmds.isolateSelect(panel, addSelectedObjects=True) self.assertSnapshotClose('capsuleAndCubeAndCone.png') # Translate Xform1 and reparent Cube1 under Xform1 cmds.select(usdXform) cmds.move(0, 0, 1, relative=True) cmds.select(clear=True) cmds.parent(usdCube, usdXform, relative=True) cmds.isolateSelect(panel, update=True) usdCube = usdXform + "/Cube1" self.assertSnapshotClose('reparentedCube.png') # Reparent Cube1 back cmds.parent(usdCube, proxyDagPath, relative=True) cmds.isolateSelect(panel, update=True) usdCube = proxyDagPath + ",/Cube1" self.assertSnapshotClose('reparentedCubeBack.png') #reparent the proxy shape locatorShape = cmds.createNode("locator") locator = "|" + cmds.listRelatives(locatorShape, parent=True)[0] cmds.move(0, 0, 5, locator) cmds.parent("|stage", locator, relative=True) usdCube = locator + usdCube self.assertSnapshotClose('reparentedProxyShape.png') cmds.undo() #undo reparent so that _createPrim works usdCube = proxyDagPath + ",/Cube1" #Auto load new objects usdXformItem = self._stringToUfeItem(usdXform) usdXformCone = self._createPrim(usdXformItem, 'Cone', '/Xform1/Cone1') cmds.select(usdXformCone) cmds.move(-8.725, 0, 2) self.assertSnapshotClose('autoLoadNewObjects.png') #Auto load selected objects cmds.editor(panel, edit=True, unlockMainConnection=True) self.assertSnapshotClose('autoLoadSelected_xformCone.png') cmds.select(usdCube) self.assertSnapshotClose('autoLoadSelected_cube.png') cmds.select("|stage") self.assertSnapshotClose('autoLoadSelected_stage.png') cmds.select(usdCone) cmds.select(usdCapsule, add=True) cmds.select(usdCylinder, add=True) self.assertSnapshotClose('autoLoadSelected_coneCapsuleCyliner.png') cmds.editor(panel, edit=True, unlockMainConnection=False)
def update(self, container, representation): import os from maya import cmds node = container["objectName"] path = api.get_representation_path(representation) # Get reference node from container members members = cmds.sets(node, query=True, nodesOnly=True) reference_node = self._get_reference_node(members) file_type = { "ma": "mayaAscii", "mb": "mayaBinary", "abc": "Alembic" }.get(representation["name"]) assert file_type, "Unsupported representation: %s" % representation assert os.path.exists(path), "%s does not exist." % path try: content = cmds.file(path, loadReference=reference_node, type=file_type, returnNewNodes=True) except RuntimeError as exc: # When changing a reference to a file that has load errors the # command will raise an error even if the file is still loaded # correctly (e.g. when raising errors on Arnold attributes) # When the file is loaded and has content, we consider it's fine. if not cmds.referenceQuery(reference_node, isLoaded=True): raise content = cmds.referenceQuery(reference_node, nodes=True, dagPath=True) if not content: raise self.log.warning("Ignoring file read error:\n%s", exc) # Fix PLN-40 for older containers created with Avalon that had the # `.verticesOnlySet` set to True. if cmds.getAttr("{}.verticesOnlySet".format(node)): self.log.info("Setting %s.verticesOnlySet to False", node) cmds.setAttr("{}.verticesOnlySet".format(node), False) # Add new nodes of the reference to the container cmds.sets(content, forceElement=node) # Remove any placeHolderList attribute entries from the set that # are remaining from nodes being removed from the referenced file. members = cmds.sets(node, query=True) invalid = [x for x in members if ".placeHolderList" in x] if invalid: cmds.sets(invalid, remove=node) # Update metadata cmds.setAttr("{}.representation".format(node), str(representation["_id"]), type="string")
def createMesh(self, meshPath): self.readDatas(meshPath) polygon_counts = [] polygon_connects = [] uv_counts = [] uvs_ids = [] normal_connects = [] if self.faces: offset = 0 while offset < len(self.faces): _type = self.faces[offset] isTriangle = not self.isBitSet(_type, 0) hasMaterial = self.isBitSet(_type, 1) hasFaceVertexUv = self.isBitSet(_type, 3) hasFaceNormal = self.isBitSet(_type, 4) hasFaceVertexNormal = self.isBitSet(_type, 5) hasFaceColor = self.isBitSet(_type, 6) hasFaceVertexColor = self.isBitSet(_type, 7) offset += 1 if isTriangle: polygon_counts.append(3) polygon_connects.extend([ self.faces[offset], self.faces[offset + 1], self.faces[offset + 2] ]) offset += 3 if hasMaterial: offset += 1 if self.uvs: if hasFaceVertexUv: uv_counts.append(3) for _uv in range(len(self.uvs)): try: uvs_ids[_uv] except: uvs_ids.append([]) for i in range(3): uvs_ids[_uv].append(self.faces[offset]) offset += 1 if hasFaceNormal: offset += 1 if hasFaceVertexNormal: normal_connects.extend([ self.faces[offset], self.faces[offset + 1], self.faces[offset + 2] ]) offset += 3 if hasFaceColor: offset += 1 if hasFaceVertexColor: for i in range(3): offset += 1 if self.vertices and polygon_counts and polygon_connects: # new MFnMesh class m = nm.MFnMesh() # set some datas for MFnMesh m.create(self.vertices, polygon_counts, polygon_connects) # add material (default material) cmds.sets(m.name(), e=True, forceElement="initialShadingGroup") # set UV for Mesh if self.uvs: for e in range(len(self.uvs)): # create uvset names uv_name = "map%d" % (e + 1) if uv_name not in m.getUVSetNames(): m.createUVSet(uv_name) m.setUVs(self.uvs[e]['u'], self.uvs[e]['v'], uv_name) m.assignUVs(uv_counts, uvs_ids[e], uv_name) # set Normal for Mesh if self.normals: _faceIds = [] # polygon/face id list _normals = [] # polygon/face each vertex normal for faceId in range(m.numPolygons): for fvid in range(3): _faceIds.append(faceId) vid = faceId * 3 + fvid norm = self.normals[normal_connects[vid]] # get normal _normals.append(norm) m.setFaceVertexNormals(_normals, _faceIds, polygon_connects)
def _createSets(self): """ Function used to put the alembic caches into the right sets for use witht ehlayout tool deved in house at LSky The lists were built from the base ENV scenes pre the cleanup. If new buildings are added to the sets they should be added to the lists below """ ## Now check for the sets ## Sets for MIDDLE HARBOUR if self.envPulldown.currentText() == 'ENV_MIDDLEHARBOUR_STATIC': animBuildList = ['BBB_CanoeBoatHouse_BLD', 'AI_Jetty_Dock_BLD_hrc', 'BBB_BowserBoatHouse_Dock_BLD_hrc','BBB_DockyardPier_Dock_BLD_hrc','BBB_Jetty_Dock_BLD_hrc','BBB_MainStorage_Dock_BLD_hrc','BBB_Office_Dock_BLD_hrc','BBB_TheMarina_Dock_BLD_hrc','BBB_DryDockInterior_BLD_hrc','BBB_Int_TerrysStorageshed_BLD_hrc','BBB_ZipBoatHouse_BLD_hrc', 'BBB_ZipBoathouseInterior_BLD_hrc','BBB_SydneyBoatHouse_BLD_hrc', 'BBB_SydneyBoathouseInterior_BLD_hrc'] setList = { "BBBEastPointLND" : ["BBB_Silo_BLD_hrc", "BBB_StorageShed02_BLD_hrc", "BBB_TerrysBoatHouse_BLD_hrc", "BBB_TerrysStorageShed_BLD_hrc", "BBB_DockyardPier_BLD_hrc", "BBB_EastPoint_LND_hrc"], "BBBMidPointLND" : ["BBB_Storage001_BLD_hrc", "BBB_Storage002_BLD_hrc", "BBB_StorageShed_BLD_hrc", "BBB_TheMarina_BLD_hrc", "BBB_Gen011_BLD_hrc", "BBB_Jetty_BLD_hrc", "BBB_MainStorage_BLD_hrc", "BBB_Office_BLD_hrc", "BBB_PirateShip_BLD_hrc", "BBB_MidPoint_LND_hrc", "BBB_DryDockMainBuilding_BLD_hrc", "BBB_DryDockInterior_BLD_hrc"], "BBBWestPointLND" : ["BBB_BowserBoatHouse_BLD_hrc", "BBB_Gen002_BLD_hrc", "BBB_Gen008_BLD_hrc", "BBB_Gen009_BLD_hrc", "BBB_Gen010_BLD_hrc", "BBB_Gen007_BLD_hrc", "BBB_Gen003_BLD_hrc", "BBB_Gen004_BLD_hrc", "BBB_Gen001_BLD_hrc", "BBB_Gen005_BLD_hrc", "BBB_Gen006_BLD_hrc", "BBB_WestPoint_LND_hrc", "BBB_ZipBoatHouse_BLD_hrc", "BBB_SydneyBoatHouse_BLD_hrc"], "TWRLND" : ["TWR_LND_hrc"], } ## Sets for MIDDLE HARBOUR EAST if self.envPulldown.currentText() == 'ENV_MIDDLEHARBOUR_EAST_STATIC': animBuildList = [] setList = { "AILND" : ["AI_LightHouse_BLD_hrc", "AI_LND_hrc", "AI_Jetty_BLD_hrc"], "FWBSandbarLND" : ["FWB_Rock001_LND_hrc", "FWB_Rock002_LND_hrc", "FWB_Rock003_LND_hrc", "FWB_Rock004_LND_hrc", "FWB_Rock005_LND_hrc", "FWB_Rock006_LND_hrc", "FWB_Rock007_LND_hrc", "FWB_Rock008_LND_hrc", "FWB_BeachHouse_LND_hrc", "FWB_Sandbar_LND_hrc", "FWB_Fingers_LND_hrc", ], "HCEastLND" : ["HC_ExtraBlockingRock_LND_hrc", "HC_East_LND_hrc", "HC_Island010_E_LND_hrc", "HC_Island010_F_LND_hrc", "HC_Waterfall001_LND_hrc", "HC_Bridge001_A_LND_hrc", "HC_Bridge001_B_LND_hrc", "HC_Bridge001_C_LND_hrc", "HC_Cave001_LND_hrc", "HC_Island006_A_LND_hrc", "HC_Island006_B_LND_hrc", "HC_Island007_LND_hrc", "HC_Island010_A_LND_hrc", "HC_Island010_B_LND_hrc", "HC_Island010_C_LND_hrc", "HC_Island010_D_LND_hrc"], "HCNorthLND" : ["HC_North_LND_hrc", "HC_Entrance002_LND_hrc", "HC_Island001_A_LND_hrc", "HC_Island001_B_LND_hrc", "HC_Island001_C_LND_hrc", "HC_Island001_D_LND_hrc", "HC_Island001_E_LND_hrc", "HC_Island001_F_LND_hrc", "HC_Island001_G_LND_hrc", "HC_Island001_H_LND_hrc", "HC_Island001_I_LND_hrc", "HC_Island001_J_LND_hrc", "HC_Island002_A_LND_hrc", "HC_Island002_B_LND_hrc", "HC_Island003_A_LND_hrc", "HC_Island003_B_LND_hrc", "HC_Island004_LND_hrc"], "HCSouthLND" : ["HC_South_LND_hrc"], "HCWestLND" : ["HC_Entrance001_LND_hrc", "HC_West_LND_hrc", "HC_ShipWreck_BLD_hrc", "HC_Island008_LND_hrc", "HC_Island009_LND_hrc"], } ## Sets for WEST HARBOUR elif self.envPulldown.currentText() == 'ENV_WESTHARBOUR_STATIC': animBuildList = ['BB_PP_JettyDock_01_BLD', 'BB_PP_JettyDock_02_BLD', 'DingleIsland_JettyDock_BLD', 'LittleTown_Dock001_BLD', 'LittleTown_Dock002_BLD', 'MulliganTown_JettyDock_01_BLD', 'MulliganTown_JettyDock_02_BLD', 'BB_OF_Lease_BLD'] setList = { 'AdmiralBridgeLND' : ["AdmiralBridge_LND_hrc"], 'BBOysterFarmLND' : ["BB_OysterFarm_LND_hrc", "BB_OF_Hut005_BLD_hrc", "BB_OF_Lease_BLD_hrc", "BB_OF_Hut004_BLD_hrc", "BB_OF_Hut003_BLD_hrc", "BB_OF_Hut002_BLD_hrc", "BB_OF_Hut001_BLD_hrc"], 'BBPointPeriwinkleLND' : ["BB_PointPeriwinkle_LND_hrc", "BB_PP_Jetty_BLD_hrc", "BB_PP_Huts_BLD_hrc", "BB_PP_JettyDock_01_BLD_hrc", "BB_PP_JettyDock_02_BLD_hrc"], 'DingleIslandLND' : ["DingleIsland_JettyDock_BLD_hrc", "DingleIsland_LND_hrc"], 'LittleTownLND' : ["LittleTown_EastBuilding_BLD_hrc", "LittleTown_MidBuilding_BLD_hrc", "LittleTown_MidGenBuilding_BLD_hrc", "LittleTown_WestBuilding_BLD_hrc", "LittleTown_East_LND_hrc", "LittleTown_Mid_LND_hrc", "LittleTown_West_LND_hrc"], 'MuliganTownLND' : ["MulliganTown_JettyBuilding_BLD_hrc", "MulliganTown_EastBuilding_BLD_hrc", "MulliganTown_WestBuilding_BLD_hrc", "MulliganTown_EastGenBuilding_BLD_hrc", "MulliganTown_WestGenBuilding_BLD_hrc", "MulliganTown_SateliteHouse_BLD_hrc", "MulliganTown_JettyDock_01_BLD_hrc", "MulliganTown_JettyDock_02_BLD_hrc", "MulliganTown_East_LND_hrc", "MulliganTown_West_LND_hrc"] } ## Sets for BIG TOWN elif self.envPulldown.currentText() == 'ENV_BIGTOWN_STATIC': animBuildList = ['BigPort_Shipyard_Dock_BLD'] setList = { 'BigNorthPortLND' : ["BigNorthPort_LND_hrc", "BigNorthPort_Building_BLD_hrc"], 'BigPortLND' : ["BigPort_LND_hrc", "BigPort_Shipyard_BLD_hrc"], 'BigTown01LND' : ["BigTown_01_LND_hrc", "BigTown_01_Building_BLD_hrc"], 'BigTown02LND' : ["BigTown_02_LND_hrc", "BigTown_02_Building_BLD_hrc"], 'BigTownLND' : ["BigTown_LND_hrc", "BigTown_Building_BLD_hrc"], } ## Sets for THEHEADS elif self.envPulldown.currentText() == 'ENV_THEHEADS_STATIC': animBuildList = [] setList = { 'THIrisleLND' : ['TH_IrisIsle_LND_hrc'], 'THMangoShore01LND' : ["TH_MangoShore01_LND_hrc", "TH_MangoShore02_LND_hrc"], 'THRainbowShoreLND' : ["TH_RainbowCliffs_LND_hrc", "TH_RainbowShore01_LND_hrc", "TH_RainbowShore02_LND_hrc"] } cmds.select(clear = True) for setName, itemList in setList.items(): if not cmds.objExists(setName): cmds.sets(n = setName) for eachHRC in itemList: if eachHRC not in animBuildList: try: cmds.sets(eachHRC, e = True, forceElement = setName) print 'Successfully added %s to %s' % (eachHRC, setName) except ValueError: print 'Failed to add %s' % eachHRC
def transferMocap(scale=1): src, tgt = mc.ls(sl=True)[:2] if ":" in src: ns_src = src.split(":")[0] + ":" else: ns_src = "" if ":" in tgt: ns_tgt = tgt.split(":")[0] + ":" else: ns_tgt = "" grp = mc.createNode("transform") src = mc.parent(src, grp)[0] mc.setAttr(grp + ".s", scale, scale, scale) for n in mc.listRelatives(ns_src + "Reference", pa=True, ad=True): mc.setAttr(n + ".r", 0, 0, 0) mc.setAttr(ns_src + "Reference.tx", 0) mc.setAttr(ns_src + "Reference.tz", 0) grp2 = mc.createNode("transform") mc.delete(mc.pointConstraint(ns_src + "LeftFoot", grp2)) mc.parent(grp, grp2) mc.setAttr( grp2 + ".ty", mc.xform(ns_tgt + "ankle_lf_ik_control", q=True, ws=True, rp=True)[1]) l = [] l.append( mc.parentConstraint(ns_src + "Hips", ns_tgt + "cog_control", mo=True)[0]) l.append( mc.orientConstraint(ns_src + "Spine", ns_tgt + "waist_control", mo=False)[0]) l.append( mc.orientConstraint(ns_src + "Spine1", ns_tgt + "chest_control", mo=False)[0]) l.append( mc.orientConstraint(ns_src + "Neck", ns_tgt + "neck_control", mo=False)[0]) l.append( mc.orientConstraint(ns_src + "Head", ns_tgt + "head_control", mo=False)[0]) mc.setAttr(ns_tgt + "arm_rt_pv_control.space", 0) mc.setAttr(ns_tgt + "arm_lf_pv_control.space", 0) l.append( mc.parentConstraint(ns_src + "RightLeg", ns_tgt + "leg_rt_pv_control_grp", mo=True)[0]) l.append( mc.pointConstraint(ns_src + "RightFoot", ns_tgt + "ankle_rt_ik_control")[0]) l.append( mc.orientConstraint(ns_src + "RightFoot", ns_tgt + "ankle_rt_ik_control", mo=True)[0]) l.append( mc.orientConstraint(ns_src + "RightShoulder", ns_tgt + "shoulder_rt_ik_control", mo=True)[0]) l.append( mc.pointConstraint(ns_src + "RightForeArm", ns_tgt + "arm_rt_pv_control")[0]) mc.delete( mc.pointConstraint(ns_src + "RightHand", ns_tgt + "wrist_rt_ik_control")) mc.setAttr(ns_tgt + "wrist_rt_ik_control.ty", 0) mc.setAttr(ns_tgt + "wrist_rt_ik_control.tz", 0) l.append( mc.parentConstraint(ns_src + "RightHand", ns_tgt + "wrist_rt_ik_control", mo=True)[0]) l.append( mc.parentConstraint(ns_src + "LeftLeg", ns_tgt + "leg_lf_pv_control_grp", mo=True)[0]) l.append( mc.pointConstraint(ns_src + "LeftFoot", ns_tgt + "ankle_lf_ik_control")[0]) l.append( mc.orientConstraint(ns_src + "LeftFoot", ns_tgt + "ankle_lf_ik_control", mo=True)[0]) l.append( mc.orientConstraint(ns_src + "LeftShoulder", ns_tgt + "shoulder_lf_ik_control", mo=True)[0]) l.append( mc.pointConstraint(ns_src + "LeftForeArm", ns_tgt + "arm_lf_pv_control")[0]) mc.delete( mc.pointConstraint(ns_src + "LeftHand", ns_tgt + "wrist_lf_ik_control")) mc.setAttr(ns_tgt + "wrist_lf_ik_control.ty", 0) mc.setAttr(ns_tgt + "wrist_lf_ik_control.tz", 0) l.append( mc.parentConstraint(ns_src + "LeftHand", ns_tgt + "wrist_lf_ik_control", mo=True)[0]) sf = mc.playbackOptions(q=True, min=True) ef = mc.playbackOptions(q=True, max=True) mc.bakeResults(mc.sets(ns_tgt + "ik_controls_set", q=True), t=(sf, ef), sm=True) mc.delete(grp2)
import maya.cmds as cmds
def Colour_The_Balls(): # Create Shaders aColourList = [ [0 ,[ 0.39 , 0.86 , 1.0 ]], [1 ,[ 0.26 , 1.0 , 0.64 ]], [2 ,[ 1.0 , 0.69 , 0.69 ]], [3 ,[ 0.19 , 0.63 , 0.63 ]], [4 ,[ 0.89 , 0.67 , 0.47 ]], [5 ,[ 0.41 , 0.63 , 0.19 ]], [6 ,[ 0 , 0.6 , 0.33 ]], [7 ,[ 1.0 , 0 , 0 ]], [8 ,[ 0 , 1.0 , 0 ]], [9 ,[ 0 , 0 , 0 ]], ] for colour in aColourList: oMaterial = 'PivotColour_%s' % colour[0] oShader = oMaterial+'_SDR' if not cmds.objExists(oMaterial): cmds.shadingNode('lambert', n = oMaterial, asShader = 1, ) cmds.sets(oMaterial, renderable = True, noSurfaceShader = True, empty = True, name = oShader) cmds.connectAttr(oMaterial+'.outColor', oShader+'.surfaceShader', f = True) cmds.setAttr( "%s.color"%oMaterial, type = 'double3', *colour[1]) cmds.setAttr( "%s.incandescence"%oMaterial, type = 'double3', *colour[1]) cmds.setAttr( "%s.ambientColor"%oMaterial, type = 'double3', *colour[1]) # Change the color of the Spheres. for i in range(0,len(GetExistingPivots())): sBall = 'PivotSphere_%s_Pivot' % i # Object Name print sBall cmds.sets( sBall, fe = 'PivotColour_%s_SDR' % i, e = True) ### Copy files in Python from shutil import copyfile copyfile(src, dst) ### Bake Animation ### import maya.mel as mel def SpeedUpBake_1_Store(sName): # store a temporary panel configuration. layout = cmds.panelConfiguration(l=sName, sc=0) evalStr = 'updatePanelLayoutFromCurrent "'+name+'"' mel.eval(evalStr) # switch to fast "hidden" layout evalStr = 'setNamedPanelLayout "Single Perspective View"' mel.eval(evalStr) perspPane = cmds.getPanel(vis=1) cmds.scriptedPanel('graphEditor1',e=1,rp=perspPane[0]) return sName def SpeedUpBake_2_Restore(sName): # restore the layout returned from makeHiddenLayout. evalStr = 'setNamedPanelLayout "'+sName+'"' mel.eval(evalStr) # now delete the old layout. killMe = cmds.getPanel(cwl=sName) cmds.deleteUI(killMe,pc=1) SpeedUpBake_1_Store('tempLayout') try: print 'do something' cmds.bakeResults(aFirst, t = (aRange[0],aRange[1]), simulation = True ) finally: SpeedUpBake_2_Restore('tempLayout') #ScriptJob example (ScriptJob : script must fishish executing completely in order for maya to respond.) def CB(callback): trans = set(cmds.ls(sl = True, type = 'transform')) if trans: cb = cmds.channelBox('mainChannelBox', q = True, sma = True) or [] if cb: callback([a+'.' +b for a in trans for b in cb]) else: objs = set(cmds.ls(sl = True)) - trans if objs: cmds.select(list(objs)) def temp(): res = [a+'.'+b for a in objs for b in cmds.channelBox('mainChannelBox', q = True, sma = True)or[]] cmds.select(list(trans)) callback(res) cmds.scriptJob(e = ('idle', temp), ro = True) def main(): def p(val): print val CB(p) print 'test'
def process(self, containers): from maya import cmds from reveries.maya import lib, pipeline from avalon.tools import sceneinventory cached_document = dict() def get_document(id): if id in cached_document: doc = cached_document[id] else: doc = io.find_one({"_id": io.ObjectId(id)}) cached_document[id] = doc return doc for container in containers: namespace = container["namespace"] filter = {"id": "pyblish.avalon.container", "namespace": namespace} if len(lib.lsAttrs(filter)) == 1: # Namespace is unique continue # Create new namespace asset = get_document(container["assetId"]) asset_name = asset["name"] subset = get_document(container["subsetId"]) if subset["schema"] == "avalon-core:subset-3.0": family = subset["data"]["families"][0] else: version = get_document(container["versionId"]) family = version["data"]["families"][0] family_name = family.split(".")[-1] new_namespace = pipeline.unique_root_namespace( asset_name=asset_name, family_name=family_name, ) CON = container["objectName"] members = cmds.ls(cmds.sets(CON, query=True, nodesOnly=True), long=True) reference_node = lib.get_highest_reference_node(members) if reference_node: filename = cmds.referenceQuery(reference_node, filename=True) cmds.file(filename, edit=True, namespace=new_namespace) else: cmds.namespace(add=new_namespace) for node in members: if not cmds.objExists(node): continue if cmds.referenceQuery(node, isNodeReferenced=True): continue if cmds.lockNode(node, query=True)[0]: continue new = "|".join( p.replace(namespace[1:], new_namespace[1:], 1) for p in node.split("|")) if node != new: cmds.rename(node, new) cmds.setAttr(CON + ".namespace", new_namespace, type="string") container["namespace"] = new_namespace sceneinventory.app.window.refresh()
def get_scene_materials(): for shading_engine in cmds.ls(type='shadingEngine'): if cmds.sets(shading_engine, q=True): for material in cmds.ls(cmds.listConnections(shading_engine), materials=True): yield material, shading_engine
#scripts from dragon rig makin' at Buck #create quick select set for selected controls import maya.cmds as cmds sel = cmds.ls(sl=True) cmds.sets(text="gCharacterSet", name="headControls") for obj in sel: cmds.sets("headControls", e=True, add=obj) #apply a series of blend shapes to a series of objects import maya.cmds as cmds sel = cmds.ls(sl=True) for x in range(0, len(sel)): num = (x%10)+1 thisTarget = "lf_stripWaveDriver%02d_geo"%num BS = cmds.blendShape(thisTarget, sel[x], origin="local") cmds.blendShape(BS, e=True, w=[(0,1)]) #to create bend deformers on each of the driving cards and orient them correctly import maya.cmds as cmds
def rebuild(self): """ Rebuild the set from the stored setData. """ # ========== # - Checks - # ========== # Set Name if not self._data['name']: raise Exception('SetData has not been initialized!') # Member Items memberList = self._data['membership'] or [] for obj in memberList: if not cmds.objExists(obj): print('Set member item "' + obj + '" does not exist! Unable to add to set...') memberList.remove(obj) # Flatten Membership List memberList = cmds.ls(memberList, fl=True) or [] # Mode if not mode in self.mode: raise Exception('Invalid set membership mode "' + mode + '"! Use "add" or "replace"!') # =============== # - Rebuild Set - # =============== # Start timer timer = cmds.timerX() # Create Set setName = self._data['name'] # Delete Set (REPLACE only) if cmds.objExists(setName) and mode == 'replace': cmds.delete(setName) # Create Set if not cmds.objExists(setName): setName = cmds.sets(n=setName) # Add Members if memberList: if forceMembership: for obj in memberList: try: cmds.sets(obj, e=True, fe=setName) except Exception, e: print('Error adding item "' + obj + '" to set "' + setName + '"! Skipping') print(str(e)) else: for obj in memberList: try: cmds.sets(obj, e=True, add=setName) except Exception, e: print('Error adding item "' + obj + '" to set "' + setName + '"! Skipping') print(str(e))
def update_scene(set_container, containers, current_data, new_data, new_file): """Updates the hierarchy, assets and their matrix Updates the following withing the scene: * Setdress hierarchy alembic * Matrix * Parenting * Representations It removes any assets which are not present in the new build data Args: set_container (dict): the setdress container of the scene containers (list): the list of containers under the setdress container current_data (dict): the current build data of the setdress new_data (dict): the new build data of the setdres Returns: processed_containers (list): all new and updated containers """ from pype.hosts.maya.lib import DEFAULT_MATRIX, get_container_transforms set_namespace = set_container['namespace'] # Update the setdress hierarchy alembic set_root = get_container_transforms(set_container, root=True) set_hierarchy_root = cmds.listRelatives(set_root, fullPath=True)[0] set_hierarchy_reference = cmds.referenceQuery(set_hierarchy_root, referenceNode=True) new_alembic = new_file.replace(".json", ".abc") assert os.path.exists(new_alembic), "%s does not exist." % new_alembic with unlocked(cmds.listRelatives(set_root, ad=True, fullPath=True)): cmds.file(new_alembic, loadReference=set_hierarchy_reference, type="Alembic") identity = DEFAULT_MATRIX[:] processed_namespaces = set() processed_containers = list() new_lookup = _instances_by_namespace(new_data) old_lookup = _instances_by_namespace(current_data) for container in containers: container_ns = container['namespace'] # Consider it processed here, even it it fails we want to store that # the namespace was already available. processed_namespaces.add(container_ns) processed_containers.append(container['objectName']) if container_ns in new_lookup: root = get_container_transforms(container, root=True) if not root: log.error("Can't find root for %s", container['objectName']) continue old_instance = old_lookup.get(container_ns, {}) new_instance = new_lookup[container_ns] # Update the matrix # check matrix against old_data matrix to find local overrides current_matrix = cmds.xform(root, query=True, matrix=True, objectSpace=True) original_matrix = old_instance.get("matrix", identity) has_matrix_override = not matrix_equals(current_matrix, original_matrix) if has_matrix_override: log.warning("Matrix override preserved on %s", container_ns) else: new_matrix = new_instance.get("matrix", identity) cmds.xform(root, matrix=new_matrix, objectSpace=True) # Update the parenting if old_instance.get("parent", None) != new_instance["parent"]: parent = to_namespace(new_instance['parent'], set_namespace) if not cmds.objExists(parent): log.error("Can't find parent %s", parent) continue # Set the new parent cmds.lockNode(root, lock=False) root = cmds.parent(root, parent, relative=True) cmds.lockNode(root, lock=True) # Update the representation representation_current = container['representation'] representation_old = old_instance['representation'] representation_new = new_instance['representation'] has_representation_override = (representation_current != representation_old) if representation_new != representation_current: if has_representation_override: log.warning("Your scene had local representation " "overrides within the set. New " "representations not loaded for %s.", container_ns) continue # We check it against the current 'loader' in the scene instead # of the original data of the package that was loaded because # an Artist might have made scene local overrides if new_instance['loader'] != container['loader']: log.warning("Loader is switched - local edits will be " "lost. Removing: %s", container_ns) # Remove this from the "has been processed" list so it's # considered as new element and added afterwards. processed_containers.pop() processed_namespaces.remove(container_ns) api.remove(container) continue # Check whether the conversion can be done by the Loader. # They *must* use the same asset, subset and Loader for # `api.update` to make sense. old = io.find_one({ "_id": io.ObjectId(representation_current) }) new = io.find_one({ "_id": io.ObjectId(representation_new) }) is_valid = compare_representations(old=old, new=new) if not is_valid: log.error("Skipping: %s. See log for details.", container_ns) continue new_version = new["context"]["version"] api.update(container, version=new_version) else: # Remove this container because it's not in the new data log.warning("Removing content: %s", container_ns) api.remove(container) # Add new assets all_loaders = api.discover(api.Loader) for representation_id, instances in new_data.items(): # Find the compatible loaders loaders = api.loaders_from_representation(all_loaders, representation_id) for instance in instances: # Already processed in update functionality if instance['namespace'] in processed_namespaces: continue container = _add(instance=instance, representation_id=representation_id, loaders=loaders, namespace=set_container['namespace'], root=set_root) # Add to the setdress container cmds.sets(container, addElement=set_container['objectName']) processed_containers.append(container) return processed_containers
def update(self, container, representation): namespace = container["namespace"] container_node = container["objectName"] path = api.get_representation_path(representation) # Get all node data fname, ext = os.path.splitext(path) settings_fname = "{}.fursettings".format(fname) with open(settings_fname, "r") as fp: settings = json.load(fp) # Collect scene information of asset set_members = cmds.sets(container["objectName"], query=True) container_root = lib.get_container_transforms(container, members=set_members, root=True) scene_nodes = cmds.ls(set_members, type="pgYetiMaya", long=True) # Build lookup with cbId as keys scene_lookup = defaultdict(list) for node in scene_nodes: cb_id = lib.get_id(node) scene_lookup[cb_id].append(node) # Re-assemble metadata with cbId as keys meta_data_lookup = {n["cbId"]: n for n in settings["nodes"]} # Compare look ups and get the nodes which ar not relevant any more to_delete_lookup = { cb_id for cb_id in scene_lookup.keys() if cb_id not in meta_data_lookup } if to_delete_lookup: # Get nodes and remove entry from lookup to_remove = [] for _id in to_delete_lookup: # Get all related nodes shapes = scene_lookup[_id] # Get the parents of all shapes under the ID transforms = cmds.listRelatives( shapes, parent=True, fullPath=True) or [] to_remove.extend(shapes + transforms) # Remove id from look uop scene_lookup.pop(_id, None) cmds.delete(to_remove) for cb_id, data in meta_data_lookup.items(): # Update cache file name file_name = data["name"].replace(":", "_") cache_file_path = "{}.%04d.fur".format(file_name) data["attrs"]["cacheFileName"] = os.path.join( path, cache_file_path) if cb_id not in scene_lookup: self.log.info("Creating new nodes ..") new_nodes = self.create_nodes(namespace, [data]) cmds.sets(new_nodes, addElement=container_node) cmds.parent(new_nodes, container_root) else: # Update the matching nodes scene_nodes = scene_lookup[cb_id] lookup_result = meta_data_lookup[cb_id]["name"] # Remove namespace if any (e.g.: "character_01_:head_YNShape") node_name = lookup_result.rsplit(":", 1)[-1] for scene_node in scene_nodes: # Get transform node, this makes renaming easier transforms = cmds.listRelatives( scene_node, parent=True, fullPath=True) or [] assert len(transforms) == 1, "This is a bug!" # Get scene node's namespace and rename the transform node lead = scene_node.rsplit(":", 1)[0] namespace = ":{}".format(lead.rsplit("|")[-1]) new_shape_name = "{}:{}".format(namespace, node_name) new_trans_name = new_shape_name.rsplit("Shape", 1)[0] transform_node = transforms[0] cmds.rename(transform_node, new_trans_name, ignoreShape=False) # Get the newly named shape node yeti_nodes = cmds.listRelatives(new_trans_name, children=True) yeti_node = yeti_nodes[0] for attr, value in data["attrs"].items(): lib.set_attribute(attr, value, yeti_node) cmds.setAttr("{}.representation".format(container_node), str(representation["_id"]), typ="string")
def skirt_connect(self): cmds.disconnectAttr('L_skirt_total_CTL.translateX', 'L_leg_CTL.translateX') cmds.disconnectAttr('L_skirt_total_CTL.translateZ', 'L_leg_CTL.translateZ') top_loc_list = cmds.listRelatives('top_loc_GRP', c=1) mid_loc_list = cmds.listRelatives('mid_loc_GRP', c=1) top_loc_y = cmds.xform('top_01_loc', q=1, rp=1, ws=1)[1] # top loc의 Y축값만 쿼리 top_loc_y = normalize_float(top_loc_y) # 소수점정리 mid_loc_y = cmds.xform('mid_01_loc', q=1, rp=1, ws=1)[1] # mid loc의 Y축값만 쿼리 mid_loc_y = normalize_float(mid_loc_y) low_loc_y = cmds.xform('low_01_loc', q=1, rp=1, ws=1)[1] # low loc의 Y축값만 쿼리 low_loc_y = normalize_float(low_loc_y) # use_num = change_number() #사용자 지정 컨트롤러 갯수 # num = (use_num-3) / 2 #허벅지 ~ 무릎, 무릎 ~ 발목 파트를 2개로 나눈다 (지정한 컨트롤러갯수 - 고정된컨트롤러(top,mid,low) / 2(윗다리,아랫다리) # top_con_num = (top_loc_y - mid_loc_y) / (num + 1)# 허벅지에서 무릎 # # top Y축과 mid Y축 사이 (허벅지~무릎)에 컨트롤러갯수(num)+1 을 해주어야 등분갯수가 나온다(컨트롤러를 일정한간격으로 배치하기위함) # low_con_num = (mid_loc_y - low_loc_y) / (num + 1)# 무릎에서 발목 up_use_num = change_number('up_segments') top_con_num = (top_loc_y - mid_loc_y) / (up_use_num + 1) # 허벅지에서 무릎 down_use_num = change_number('down_segments') low_con_num = (mid_loc_y - low_loc_y) / (down_use_num + 1) # 무릎에서 발목 top_leg_y_list = [] for count in range(up_use_num): con_count = count + 1 top_leg_y = con_count * top_con_num top_con_po = top_loc_y - top_leg_y top_leg_y_list.append(top_con_po) #print top_leg_y_list low_leg_y_list = [] for count in range(down_use_num + 1): # 무릎 ~ 발목 은 맨아래컨트롤러가 하나 더 생성되어야 하기때문에 +1 해준다 con_count = count + 1 low_leg_y = con_count * low_con_num low_con_po = mid_loc_y - low_leg_y low_leg_y_list.append(low_con_po) #print low_leg_y_list # ------------------------------- top_loc_GRP 에서 트랜스 X,Z를 쿼리 (허벅지 - 무릎) top_loc_x_list = [] top_loc_z_list = [] for top_loc_po in top_loc_list: po = cmds.xform(top_loc_po, q=1, rp=1, ws=1) top_loc_x_list.append(po[0]) #x축의 값 top_loc_z_list.append(po[2]) #z축의 값 top_loc_x_list = [normalize_float(num) for num in top_loc_x_list] #소수점 자리 정리 top_loc_z_list = [normalize_float(num) for num in top_loc_z_list] # ------------------------------- mid_loc_GRP 에서 트랜스 X,Z를 쿼리 (무릎 - 발목) mid_loc_x_list = [] mid_loc_z_list = [] for mid_loc_po in mid_loc_list: po = cmds.xform(mid_loc_po, q=1, rp=1, ws=1) mid_loc_x_list.append(po[0]) #x축의 값 mid_loc_z_list.append(po[2]) #z축의 값 mid_loc_x_list = [normalize_float(num) for num in mid_loc_x_list] #소수점 자리 정리 mid_loc_z_list = [normalize_float(num) for num in mid_loc_z_list] con_name_list = [ u'skirt_F_M', u'skirt_L_A', u'skirt_L_B', u'skirt_L_D', u'skirt_L_F', u'skirt_L_G', u'skirt_B_M', u'skirt_R_G', u'skirt_R_F', u'skirt_R_D', u'skirt_R_B', u'skirt_R_A' ] # 컨트롤러 프리픽스네임 리스트 top_FK_last_list = [] top_IK_last_JNT_list = [] # 허벅지 - 무릎 / FK컨트롤러 생성 for loc_x, loc_z, con_name in zip(top_loc_x_list, top_loc_z_list, con_name_list): # x축과 z축은 한줄씩 고정값임 for i, top_loc_y in enumerate(top_leg_y_list): # y축은 치마 길이에따라 변화됨 top_FK_con = cmds.duplicate('FK_con', n=con_name + '_FK_%02d_CTL' % (i + 2)) cmds.move(loc_x, top_loc_y, loc_z, top_FK_con) fix_rotate = rotate_xform( con_name + '_FK_01_CTL') # 만들어질 컨트롤러의 기준 로테이션은 FK_01 컨트롤러에서 추출 cmds.xform(top_FK_con, ws=1, ro=fix_rotate) offGRP_command_CTL(top_FK_con) cmds.parent(con_name + '_FK_%02d_CTL_offGRP' % (i + 2), con_name + '_FK_%02d_CTL' % (i + 1)) # FK컨트롤러 하이라키 정리 top_FK_last_list.append(top_FK_con[0]) # top fk컨트롤러의 마지막 컨트롤러를 리스트화시킨다(쿼리해서 point_loc가 그 밑에 하이라키로 들어갈수있게하기 위함) # 허벅지 - 무릎 / IK컨트롤러 생성 for loc_x, loc_z, con_name in zip(top_loc_x_list, top_loc_z_list, con_name_list): # x축과 z축은 한줄씩 고정값임 for i, top_loc_y in enumerate(top_leg_y_list): # y축은 치마 길이에따라 변화됨 top_IK_con = cmds.duplicate('IK_con', n=con_name + '_IK_%02d_CTL' % (i + 2)) cmds.move(loc_x, top_loc_y, loc_z, top_IK_con) fix_rotate = rotate_xform( con_name + '_FK_01_CTL') # 만들어질 컨트롤러의 기준 로테이션은 FK_01 컨트롤러에서 추출 cmds.xform(top_IK_con, ws=1, ro=fix_rotate) offGRP_command(top_IK_con) cmds.parent(con_name + '_IK_%02d_CTL_offGRP' % (i + 2), con_name + '_FK_%02d_CTL' % (i + 2)) # IK컨트롤러 하이라키 정리 cmds.parent(con_name + '_IK_%02d_skinJNT' % (i + 2), con_name + '_IK_%02d_skinJNT' % (i + 1)) # IK스킨조인트 하이라키 정리 cmds.connectAttr( 'down_skirt_RIG_setup_CTL.ikVis', con_name + '_IK_%02d_CTL_offGRP' % (i + 2) + '.visibility') # IK_vis 연결 top_IK_last_JNT = con_name + '_IK_%02d_skinJNT' % ( i + 2) # 허벅지 - 무릎의 마지막조인트, down_IK조인트와 하이라키로 이어주기위해 쿼리 top_IK_last_JNT_list.append(top_IK_last_JNT) point_loc_list = [] for loc_x, loc_z, con_name in zip(mid_loc_x_list, mid_loc_z_list, con_name_list): # 무릎 - 발목 / FK컨트롤러 생성 for i, low_loc_y in enumerate(low_leg_y_list): low_FK_con = cmds.duplicate('FK_con', n='down_' + con_name + '_FK_%02d_CTL' % (i + 2)) cmds.move(loc_x, low_loc_y, loc_z, low_FK_con) fix_rotate = rotate_xform( 'down_' + con_name + '_FK_01_CTL') # 만들어질 컨트롤러의 기준 로테이션은 FK_01 컨트롤러에서 추출 cmds.xform(low_FK_con, ws=1, ro=fix_rotate) offGRP_command_CTL(low_FK_con) cmds.parent('down_' + con_name + '_FK_%02d_CTL_offGRP' % (i + 2), 'down_' + con_name + '_FK_%02d_CTL' % (i + 1)) # down FK컨트롤러 하이라키 정리 # 무릎 - 발목 / IK컨트롤러 생성 for i, low_loc_y in enumerate(low_leg_y_list): low_IK_con = cmds.duplicate('IK_con', n='down_' + con_name + '_IK_%02d_CTL' % (i + 2)) cmds.move(loc_x, low_loc_y, loc_z, low_IK_con) fix_rotate = rotate_xform( 'down_' + con_name + '_FK_01_CTL') # 만들어질 컨트롤러의 기준 로테이션은 FK_01 컨트롤러에서 추출 cmds.xform(low_IK_con, ws=1, ro=fix_rotate) offGRP_command(low_IK_con) cmds.parent('down_' + con_name + '_IK_%02d_CTL_offGRP' % (i + 2), 'down_' + con_name + '_FK_%02d_CTL' % (i + 2)) # down IK컨트롤러 하이라키 정리 cmds.parent('down_' + con_name + '_IK_%02d_skinJNT' % (i + 2), 'down_' + con_name + '_IK_%02d_skinJNT' % (i + 1)) # down IK스킨조인트 하이라키 정리 cmds.connectAttr( 'down_skirt_RIG_setup_CTL.ikVis', 'down_' + con_name + '_IK_%02d_CTL_offGRP' % (i + 2) + '.visibility') # IK_vis 연결 # 허벅지 - 무릎의 마지막 조인트와, 무릎 - 발목의 첫 조인트를 하이라키로 연결 for top_IK_last_JNT, con_name in zip(top_IK_last_JNT_list, con_name_list): cmds.parent('down_' + con_name + '_IK_01_skinJNT', top_IK_last_JNT) point_loc = cmds.spaceLocator(n=(con_name + '_point_loc')) cmds.setAttr(point_loc[0] + '.visibility', 0) # point_loc 하이드 # point로케이터를 생성(loc -> down fk의 첫번째 컨트롤러에 포인트 컨스트레인을 하기위함) down_FK_firt = ('down_' + con_name + '_FK_01_CTL_offGRP' ) # down FK 의 첫번째 컨트롤러 move(point_loc, down_FK_firt) # down FK 의 첫번째 컨트롤러와 위치를 똑같이 맞춰준다 point_loc_list.append( point_loc) # point_loc 리스트화(하이라키 구조로 전부 넣기 위함) down_FK_firt_list = [] for top_FK_last, point_loc, con_name in zip(top_FK_last_list, point_loc_list, con_name_list): cmds.parent( point_loc, top_FK_last) #point_loc는 up_FK컨트롤러의 하이라키 최하위에 있어야됨(FK마지막) down_FK_firt = ('down_' + con_name + '_FK_01_CTL_offGRP') down_FK_firt_sub = ('down_' + con_name + '_FK_01_CTL_key_GRP' ) # 무릎 FK1번 key그룹 cmds.pointConstraint(point_loc, down_FK_firt, mo=1, w=1) # point_loc가 무릎FK 1번을 point로 물고있는다 cmds.orientConstraint( point_loc, down_FK_firt_sub, mo=1, w=1) # point_loc가 무릎 FK 1번 key그룹에 오리엔트(knee_FK_rotate 스위치를 위함) cmds.connectAttr( 'down_skirt_RIG_setup_CTL.kneeFkRotate', down_FK_firt_sub + '_orientConstraint1.' + con_name + '_point_locW0') # knee_FK_rotate on/off 스위치 연결 down_FK_firt_list.append(down_FK_firt) lower_joint = cmds.listRelatives('skirt_IK_skinJNT_GRP', ad=1, pa=1, typ='joint') sel_lower_joint = cmds.select(lower_joint) cmds.sets(n='skirt_skinJNT_set') # skirt_skinJNT_set 생성 hip_JNT = cmds.textField('hip_tex_box', q=1, text=1) # root조인트 for con_name in con_name_list: cmds.parent(con_name + '_IK_01_skinJNT', hip_JNT) # root조인트 밑으로 치마조인트가 들어가게 하이라키 정리 cmds.delete('skirt_RIG_skinJNT_GRP') # 비어있는 스킨조인트 그룹은 삭제 cmds.delete('con_shape') # 컨트롤러 복사가 끝난뒤 con_shape 그룹은 삭제 # legFollow 옵션에 따라 point컨스트레인 on/off cmds.createNode('reverse', n='down_point_reverse') cmds.connectAttr('down_skirt_RIG_setup_CTL.legFollow', 'down_point_reverse.inputX') for down_FK_firt, con_name in zip(down_FK_firt_list, con_name_list): cmds.connectAttr( 'down_point_reverse.outputX', down_FK_firt + '_pointConstraint1.' + con_name + '_point_locW0') # 빌드후 리깅 셋팅 관련 어트리뷰트는 하이드 cmds.setAttr("down_skirt_RIG_setup_CTL.skirtLength", lock=True, keyable=False, channelBox=False) # 빌드후 skirtLength lock cmds.setAttr("down_skirt_RIG_setup_CTL.____Rig____", lock=True, keyable=False, channelBox=False) cmds.setAttr("down_skirt_RIG_setup_CTL.legSkirtStrength", lock=True, keyable=False, channelBox=False) cmds.setAttr("down_skirt_RIG_setup_CTL.middleSkirtStrength", lock=True, keyable=False, channelBox=False) # cmds.setAttr('skirt_sub_RIG_FK_CTL_GRP.visibility',1) # cmds.setAttr('skirt_RIG_skinJNT_GRP.visibility',1) # cmds.setAttr('skirt_sub_total_CTL.visibility',1) # cmds.setAttr('skirt_RIG_setup_CTL_offGRP.visibility',1) total_ui = cmds.textField('total_tex_box', q=1, text=1) hip_ui = cmds.textField('hip_tex_box', q=1, text=1) spn_ui = cmds.textField('spn_01_tex_box', q=1, text=1) L_up_leg_ui = cmds.textField('L_leg_tex_box', q=1, text=1) L_low_leg_ui = cmds.textField('L_knee_tex_box', q=1, text=1) L_ankle_ui = cmds.textField('L_ankle_tex_box', q=1, text=1) R_up_leg_ui = cmds.textField('R_leg_tex_box', q=1, text=1) R_low_leg_ui = cmds.textField('R_knee_tex_box', q=1, text=1) R_ankle_ui = cmds.textField('R_ankle_tex_box', q=1, text=1) sels = cmds.sets('delete_set', int='delete_set') cmds.delete(sels) cmds.parentConstraint('skirt_sub_total_CTL', 'skirt_sub_front_M_FK_01_rot_pin', mo=1, w=1) cmds.parentConstraint('skirt_sub_total_CTL', 'skirt_sub_back_M_FK_01_rot_pin', mo=1, w=1) cmds.parentConstraint(total_ui, 'skirt_total_CTL_offGRP', mo=1, w=1) cmds.scaleConstraint(total_ui, 'skirt_total_CTL_offGRP', mo=1, w=1) cmds.pointConstraint(hip_ui, 'skirt_total_CTL_GRP', mo=1, w=1) cmds.parentConstraint(spn_ui, 'waist_total_CTL', mo=1, w=1) cmds.pointConstraint(L_up_leg_ui, 'L_skirt_total_CTL', mo=1, w=1) cmds.pointConstraint(R_up_leg_ui, 'R_skirt_total_CTL', mo=1, w=1) cmds.parentConstraint(L_low_leg_ui, 'L_leg_CTL', mo=1, w=1) # skirt_auto on/off 시 전환 cmds.parentConstraint('L_skirt_total_CTL', 'L_leg_CTL', mo=1, w=1) # skirt_auto on/off 시 전환 cmds.parentConstraint(R_low_leg_ui, 'R_leg_CTL', mo=1, w=1) cmds.parentConstraint('R_skirt_total_CTL', 'R_leg_CTL', mo=1, w=1) cmds.parentConstraint(L_ankle_ui, 'down_L_leg_CTL', mo=1, w=1) # skirt_auto on/off 시 전환 cmds.parentConstraint('down_L_skirt_total_CTL', 'down_L_leg_CTL', mo=1, w=1) # skirt_auto on/off 시 전환 cmds.parentConstraint(R_ankle_ui, 'down_R_leg_CTL', mo=1, w=1) cmds.parentConstraint('down_R_skirt_total_CTL', 'down_R_leg_CTL', mo=1, w=1) move_point('L_ankle_loc', L_ankle_ui) move_point('R_ankle_loc', R_ankle_ui) cmds.parentConstraint(L_ankle_ui, 'L_ankle_loc', mo=1, w=1) cmds.parentConstraint(R_ankle_ui, 'R_ankle_loc', mo=1, w=1) # leg_follow cmds.parentConstraint('L_skirt_total_CTL', 'down_L_skirt_total_CTL_offGRP', mo=1, w=1) cmds.parentConstraint('R_skirt_total_CTL', 'down_R_skirt_total_CTL_offGRP', mo=1, w=1) cmds.connectAttr( 'down_skirt_RIG_setup_CTL.legFollow', 'down_L_skirt_total_CTL_offGRP_parentConstraint1.L_leg_CTLW0') cmds.connectAttr( 'down_skirt_RIG_setup_CTL.legFollow', 'down_R_skirt_total_CTL_offGRP_parentConstraint1.R_leg_CTLW0') cmds.connectAttr( 'leg_follow_reverse.outputX', 'down_L_skirt_total_CTL_offGRP_parentConstraint1.L_skirt_total_CTLW1' ) cmds.connectAttr( 'leg_follow_reverse.outputX', 'down_R_skirt_total_CTL_offGRP_parentConstraint1.R_skirt_total_CTLW1' ) cmds.pointConstraint('down_L_front_skirt_trans_sub_CTL', 'down_skirt_sub_side_L_01_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_L_side_L_01_skirt_trans_sub_CTL', 'down_skirt_sub_side_L_02_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_L_side_L_03_skirt_trans_sub_CTL', 'down_skirt_sub_side_L_04_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_L_side_L_05_skirt_trans_sub_CTL', 'down_skirt_sub_side_L_06_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_L_side_back_skirt_trans_sub_CTL', 'down_skirt_sub_side_L_07_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_M_front_skirt_trans_sub_CTL', 'down_skirt_sub_front_M_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_M_side_back_skirt_trans_sub_CTL', 'down_skirt_sub_back_M_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_R_front_skirt_trans_sub_CTL', 'down_skirt_sub_side_R_01_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_R_side_R_01_skirt_trans_sub_CTL', 'down_skirt_sub_side_R_02_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_R_side_R_03_skirt_trans_sub_CTL', 'down_skirt_sub_side_R_04_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_R_side_R_05_skirt_trans_sub_CTL', 'down_skirt_sub_side_R_06_FK_01_CTL_offGRP', mo=1, w=1) cmds.pointConstraint('down_R_side_back_skirt_trans_sub_CTL', 'down_skirt_sub_side_R_07_FK_01_CTL_offGRP', mo=1, w=1) i = 1 for b in range(7): cmds.parentConstraint('skirt_sub_total_CTL', 'skirt_sub_side_L_%02d_FK_01_rot_pin' % i, mo=1, w=1) cmds.parentConstraint('skirt_sub_total_CTL', 'skirt_sub_side_R_%02d_FK_01_rot_pin' % i, mo=1, w=1) i = i + 1 pin_GRP_list = cmds.listRelatives('skirt_sub_FK_01_rot_pin_GRP', children=1) pin_loc_list = cmds.listRelatives(pin_GRP_list, children=1) #for pin_loc in pin_loc_list: #cmds.connectAttr('skirt_RIG_setup_CTL.waist_pin', pin_loc + '_parentConstraint1.skirt_sub_total_CTLW0') cmds.setAttr("down_skirt_RIG_setup_CTL.legFollow", 0) cmds.setAttr("down_skirt_RIG_setup_CTL.legFollow", lock=True, keyable=False, channelBox=False) # 빌드후 leg follow lock #skirt_auto 스위치 cmds.createNode('reverse', n='skirt_auto_reverse') #skirt_auto on cmds.connectAttr('down_skirt_RIG_setup_CTL.skirtAuto', 'L_leg_CTL_parentConstraint1.knee_L_skinJNTW0') cmds.connectAttr('down_skirt_RIG_setup_CTL.skirtAuto', 'R_leg_CTL_parentConstraint1.knee_R_skinJNTW0') cmds.connectAttr('down_skirt_RIG_setup_CTL.skirtAuto', 'down_L_leg_CTL_parentConstraint1.ankle_L_skinJNTW0') cmds.connectAttr('down_skirt_RIG_setup_CTL.skirtAuto', 'down_R_leg_CTL_parentConstraint1.ankle_R_skinJNTW0') #skirt_auto off cmds.connectAttr('down_skirt_RIG_setup_CTL.skirtAuto', 'skirt_auto_reverse.inputX') cmds.connectAttr('skirt_auto_reverse.outputX', 'L_leg_CTL_parentConstraint1.L_skirt_total_CTLW1') cmds.connectAttr('skirt_auto_reverse.outputX', 'R_leg_CTL_parentConstraint1.R_skirt_total_CTLW1') cmds.connectAttr( 'skirt_auto_reverse.outputX', 'down_L_leg_CTL_parentConstraint1.down_L_skirt_total_CTLW1') cmds.connectAttr( 'skirt_auto_reverse.outputX', 'down_R_leg_CTL_parentConstraint1.down_R_skirt_total_CTLW1') # 빌드 후 옵션 cmds.disconnectAttr('L_leg_CTL_parentConstraint1.constraintRotateX', 'L_leg_CTL.rotateX') # 로테이트 브레이크커넥션 cmds.disconnectAttr('L_leg_CTL_parentConstraint1.constraintRotateY', 'L_leg_CTL.rotateY') cmds.disconnectAttr('L_leg_CTL_parentConstraint1.constraintRotateZ', 'L_leg_CTL.rotateZ') cmds.disconnectAttr('R_leg_CTL_parentConstraint1.constraintRotateX', 'R_leg_CTL.rotateX') cmds.disconnectAttr('R_leg_CTL_parentConstraint1.constraintRotateY', 'R_leg_CTL.rotateY') cmds.disconnectAttr('R_leg_CTL_parentConstraint1.constraintRotateZ', 'R_leg_CTL.rotateZ') cmds.disconnectAttr( 'down_L_leg_CTL_parentConstraint1.constraintRotateX', 'down_L_leg_CTL.rotateX') # 로테이트 브레이크커넥션 cmds.disconnectAttr( 'down_L_leg_CTL_parentConstraint1.constraintRotateY', 'down_L_leg_CTL.rotateY') cmds.disconnectAttr( 'down_L_leg_CTL_parentConstraint1.constraintRotateZ', 'down_L_leg_CTL.rotateZ') cmds.disconnectAttr( 'down_R_leg_CTL_parentConstraint1.constraintRotateX', 'down_R_leg_CTL.rotateX') cmds.disconnectAttr( 'down_R_leg_CTL_parentConstraint1.constraintRotateY', 'down_R_leg_CTL.rotateY') cmds.disconnectAttr( 'down_R_leg_CTL_parentConstraint1.constraintRotateZ', 'down_R_leg_CTL.rotateZ') cmds.connectAttr('root_M_skinJNT.rotateY', 'skirt_total_CTL_GRP.rotateY') cmds.createNode('plusMinusAverage', n='main_world_plus') cmds.createNode('multiplyDivide', n='main_world_plus_reverse') cmds.setAttr("main_world_plus_reverse.input2X", -1) cmds.connectAttr('main_M_CTL.rotateY', 'main_world_plus.input1D[0]') cmds.connectAttr('world_M_CTL.rotateY', 'main_world_plus.input1D[1]') cmds.connectAttr('main_world_plus.output1D', 'main_world_plus_reverse.input1X') cmds.connectAttr('main_world_plus_reverse.outputX', 'skirt_total_CTL_GRP_reverse.rotateY') cmds.setAttr('down_skirt_RIG_setup_CTL.skirtAuto', 1) cmds.setAttr('skirt_total_CTLShape.visibility', 0) cmds.setAttr('waist_total_CTL.visibility', 0) cmds.setAttr('L_skirt_total_CTL.visibility', 0) cmds.setAttr('R_skirt_total_CTL.visibility', 0) cmds.setAttr('M_skirt_total_CTL.visibility', 0) cmds.setAttr('L_leg_CTL.visibility', 0) cmds.setAttr('R_leg_CTL.visibility', 0) cmds.setAttr('down_L_skirt_total_CTL.visibility', 0) cmds.setAttr('down_R_skirt_total_CTL.visibility', 0) cmds.setAttr('down_M_skirt_total_CTL.visibility', 0) cmds.setAttr('down_L_leg_CTL.visibility', 0) cmds.setAttr('down_R_leg_CTL.visibility', 0) cmds.setAttr('ankle_M_skirt_total_CTL_offGRP.visibility', 0)
pathOfFiles = "/Users/Emily/Desktop/" fileType = "obj" files = cmds.getFileList(folder=pathOfFiles, filespec='*.%s' % fileType) if len(files) == 0: cmds.warning("No files found") else: #initializes global numPheromones global totalNumPheromones totalNumPheromones = 0 #"home" pyramid is the origin (0,0,0) (it glows gold) cmds.polyPyramid() cmds.move(0, 0.3, 0) cmds.sets(e=True, forceElement='blinn2SG') #food piles at corners (each has 10 food) cmds.polyPyramid() cmds.scale(.50, .50, .50) cmds.move(9, .25, 9) cmds.sets(e=True, forceElement='blinn3SG') food1 = Food("pPyramid2") cmds.polyPyramid() cmds.scale(.50, .50, .50) cmds.move(-9, .25, 9) cmds.sets(e=True, forceElement='blinn3SG') food2 = Food("pPyramid3") cmds.polyPyramid()
def sort_set(): new_dict = sort_model() for key in new_dict: cmds.sets(new_dict[key], n='mao_faceNum_' + str(key))
def returnObjectSets(): """ Return a semi intelligent dictionary of sets in a mays scene file. Return dict keys: all(list) -- all sets found maya(list) -- maya made and controlled sets (tweakSet, etc) render(list) -- sets returned by mc.listSets(type=1) deformer(list) -- sets returned by mc.listSets(type=2) referenced(dict) -- ['From Scene'] are local sets, all other sets are indexed to their reference prefix qss(list) -- quick select sets types(dict) -- Sets indexed to their type as understood by cgm tools. 'typeModifier' tag in this case """ returnSetsDict = { 'maya': [], 'qss': [], 'referenced': {}, 'cgmTypes': {}, 'objectSetGroups': [] } returnSetsDict['all'] = mc.ls(type='objectSet') or [] returnSetsDict['render'] = mc.listSets(type=1) or [] returnSetsDict['deformer'] = mc.listSets(type=2) or [] refBuffer = {'From Scene': []} returnSetsDict['referenced'] = refBuffer typeBuffer = {'NONE': []} returnSetsDict['cgmTypes'] = typeBuffer for s in returnSetsDict['all']: #Get our qss sets if mc.sets(s, q=True, text=True) == 'gCharacterSet': returnSetsDict['qss'].append(s) #Get our maya sets for check in [ 'defaultCreaseDataSet', 'defaultObjectSet', 'defaultLightSet', 'initialParticleSE', 'initialShadingGroup', 'Vray', 'SG', ['cluster', 'Set'], ['skinCluster', 'Set'], 'tweakSet' ]: if type(check) is list: buffer = [] for c in check: if c in s: buffer.append(1) else: buffer.append(0) if len(buffer) == sum(buffer): returnSetsDict['maya'].append(s) break elif check in s: returnSetsDict['maya'].append(s) break # Get our reference prefixes and sets sorted out if mc.referenceQuery(s, isNodeReferenced=True): refPrefix = returnReferencePrefix(s) if refPrefix in refBuffer.keys(): refBuffer[refPrefix].append(s) else: refBuffer[refPrefix] = [s] else: refBuffer['From Scene'].append(s) #Type sort buffer = returnTagInfo(s, 'cgmType') for tag in dictionary.setTypes.keys(): if dictionary.setTypes[tag] == buffer: if tag in typeBuffer.keys(): typeBuffer[tag].append(s) else: typeBuffer[tag] = [s] else: typeBuffer['NONE'].append(s) #Set group check if returnTagInfo(s, 'cgmType') == 'objectSetGroup': returnSetsDict['objectSetGroups'].append(s) return returnSetsDict
def cleanShotScene(fType, BG=True, importRefs=True): """ does stuff to clean the current open scene. . . BG 1 = clean for background import(import the refs, BG 0 = don't do the import reffed stuff) """ #make sure to stick info about the relevant workshop into the file for the "promotePastVersion" function later (promote them both) # make saving a note mandatory, BUT make sure this DOESN"T require user input when in headless mode (maybe just an arg to pass) #bring up window #this is where we do the stuff in the mastering process depeneding on which fType we have # clean up the delete set if importRefs == True: if cmds.objExists("deleteSet"): delStuff = cmds.sets("deleteSet", q=True) cmds.delete(delStuff) try: cmds.delete("deleteSet") except: print "-Problem deleting the deleteSet" #---------------- shoudl this be an "if" thing? ie. if deleteImage: and if deleteDisplay. . . #delete image planes ip = cmds.ls(type="imagePlane") print "deleting image planes: {0}".format(ip) if ip: cmds.delete(ip) #get rid of display layers, render layers, anm layers dl = cmds.ls(type="displayLayer") if dl: dl.remove("defaultLayer") print "deleting display layers: {0}".format(dl) cmds.delete(dl) # lgt if fType == "lgt": # - set frame to 000 cmds.currentTime(0) # - do it in the bg ?? # - delete "delete set" # if we're importing the refs (in all BG masters or regular masters (not ref master process)) if importRefs: refs = cmds.file(q=True, r=True) # =========== DON"T REMOVE NAMESPACES from refs!!!! for ref in refs: refNode = cmds.referenceQuery(ref, rfn=True) cmds.file(rfn=refNode, importReference=True) # anm if fType == "anm": # - set frame to 000 cmds.currentTime(0) # - get all lights and delete lights = cFuncs.getLightList() print "deleting lights: {0}".format(lights) if lights: cmds.delete(lights) refs = cmds.file(q=True, r=True) # =========== DON"T REMOVE NAMESPACES from refs!!!! for ref in refs: refNode = cmds.referenceQuery(ref, rfn=True) cmds.file(rfn=refNode, importReference=True)
def surfaceShader_Occ(self): for Mater in self.allMaterial: transp = "" try: transp = mc.listConnections("%s.transparency" % Mater, s=True, d=False, plugs=True)[0] except: pass try: transp = mc.listConnections("%s.opacity" % Mater, s=True, d=False, plugs=True)[0] except: pass if transp: try: mc.disconnectAttr(transp, "%s.transparency" % Mater) except: pass try: mc.disconnectAttr(transp, "%s.opacity" % Mater) except: pass surfaceNode = mc.shadingNode("surfaceShader", asShader=True) surfaceNodeSG = mc.sets(renderable=True, noSurfaceShader=True, empty=True, name=(surfaceNode + "SG")) mc.connectAttr("%s.outColor" % surfaceNode, "%s.surfaceShader" % surfaceNodeSG, f=True) mib_fg_occlNode = mc.shadingNode("mib_fg_occlusion", asShader=True) mc.connectAttr("%s.outValue" % mib_fg_occlNode, "%s.outColor" % surfaceNode, f=True) mib_transpNode = mc.shadingNode("mib_transparency", asShader=True) mc.connectAttr("%s.outValue" % mib_transpNode, "%s.miMaterialShader" % surfaceNodeSG) mc.connectAttr("%s.outValueA" % mib_fg_occlNode, "%s.inputA" % mib_transpNode) mc.connectAttr("%s.outValue" % mib_fg_occlNode, "%s.input" % mib_transpNode) tran = transp.split(".")[0] mc.connectAttr("%s.outColor" % tran, "%s.transp" % mib_transpNode) mc.connectAttr("%s.outAlpha" % tran, "%s.transpA" % mib_transpNode) mc.setAttr("%s.invert" % tran, 1) mc.hyperShade(objects=Mater) mc.hyperShade(assign=surfaceNode) else: surfaceNode = mc.shadingNode("surfaceShader", asShader=True) surfaceNodeSG = mc.sets(renderable=True, noSurfaceShader=True, empty=True, name=(surfaceNode + "SG")) mc.connectAttr("%s.outColor" % surfaceNode, "%s.surfaceShader" % surfaceNodeSG, f=True) mib_fg_occlNode = mc.shadingNode("mib_fg_occlusion", asShader=True) mc.connectAttr("%s.outValue" % mib_fg_occlNode, "%s.outColor" % surfaceNode, f=True) try: mc.hyperShade(objects=Mater) mc.hyperShade(assign=surfaceNode) except: pass