def getQuickSelSets(): import maya.cmds as cmds allSets = cmds.listSets(allSets=1) deformSet = cmds.listSets(t=1) shaderSet = cmds.listSets(t=2) unusedSet = [ "defaultCreaseDataSet", "initialTextureBakeSet", "initialVertexBakeSet", "tmpTextureBakeSet", "defaultObjectSet", "defaultLightSet" ] allSets = [x for x in allSets if x not in deformSet] allSets = [x for x in allSets if x not in shaderSet] allSets = [x for x in allSets if x not in unusedSet] selSets = [] unnSets = [] for xSet in allSets: # Maya's cmd "listSets -as" not returning NS !!! if pm.objExists(xSet): if cmds.sets(xSet, q=1, t=1) == "gCharacterSet": selSets.append(xSet) else: unnSets.append(xSet) else: withNS = cmds.ls("*:" + str(xSet)) if len(withNS) > 0: print withNS if cmds.sets(withNS[0], q=1, t=1) == "gCharacterSet": print withNS selSets.append(withNS[0]) elif len(withNS) > 0: unnSets.append(withNS[0]) return selSets + unnSets
def get_related_sets(node): """Return objectSets that are relationships for a look for `node`. Filters out based on: - id attribute is NOT `pyblish.avalon.container` - shapes and deformer shapes (alembic creates meshShapeDeformed) - set name ends with any from a predefined list - set in not in viewport set (isolate selected for example) Args: node (str): name of the current node to check Returns: list: The related sets """ # Ignore specific suffices ignore_suffices = ["out_SET", "controls_SET", "_INST", "_CON"] # Default nodes to ignore defaults = {"defaultLightSet", "defaultObjectSet"} # Ids to ignore ignored = {"pyblish.avalon.instance", "pyblish.avalon.container"} view_sets = get_isolate_view_sets() sets = cmds.listSets(object=node, extendToShape=False) if not sets: return [] # Fix 'no object matches name' errors on nodes returned by listSets. # In rare cases it can happen that a node is added to an internal maya # set inaccessible by maya commands, for example check some nodes # returned by `cmds.listSets(allSets=True)` sets = cmds.ls(sets) # Ignore `avalon.container` sets = [ s for s in sets if not cmds.attributeQuery("id", node=s, exists=True) or not cmds.getAttr("%s.id" % s) in ignored ] # Exclude deformer sets (`type=2` for `maya.cmds.listSets`) deformer_sets = cmds.listSets(object=node, extendToShape=False, type=2) or [] deformer_sets = set(deformer_sets) # optimize lookup sets = [s for s in sets if s not in deformer_sets] # Ignore when the set has a specific suffix sets = [s for s in sets if not any(s.endswith(x) for x in ignore_suffices)] # Ignore viewport filter view sets (from isolate select and # viewports) sets = [s for s in sets if s not in view_sets] sets = [s for s in sets if s not in defaults] return sets
def getCtrlSet(ctrlName): '''given a ctrl, return the ctrl object set. This is the set under the limb set that contains the ctrlsFK and ctrlsIK sets ''' currentSets = cmds.listSets(object=ctrlName) for objSet in currentSets: #if the current set is ctrlsIK or ctrlsFK set, walk up one if objSet.endswith(name.CTRLSETFK) or objSet.endswith(name.CTRLSETIK): return cmds.listSets(object=objSet)[0] elif objSet.endswith(name.SEP + name.CTRLSET): return objSet return None
def is_compatible(container): from maya import cmds from avalon.maya.pipeline import AVALON_CONTAINERS if not container: return False if container["loader"] not in [ "CameraLoader", "LightSetLoader", "LookLoader", "MayaShareLoader", "ModelLoader", "PointCacheReferenceLoader", "RigLoader", "SetDressLoader", ]: return False containers = AVALON_CONTAINERS[1:] # Remove root namespace parents = cmds.listSets(object=container["objectName"]) or [] # Must be a root container if containers in parents: return True return False
def updateData(self, *a, **kw): """ Updates the stored data Stores basic data, qss state and type """ self.setList = mc.sets(self.nameLong, q=True) if not self.setList: self.setList = [] if mc.sets(self.nameLong, q=True, text=True) == 'gCharacterSet': self.qssState = True else: self.qssState = False self.parents = mc.listSets(o=self.nameLong) #If it's a maya set self.mayaSetState = False for check in [ 'defaultCreaseDataSet', 'defaultObjectSet', 'defaultLightSet', 'initialParticleSE', 'initialShadingGroup', 'tweakSet' ]: if check in self.nameLong and not self.qssState: self.mayaSetState = True typeBuffer = search.returnTagInfo(self.nameLong, 'cgmType') if typeBuffer: for t in setTypes.keys(): if setTypes.get(t) == typeBuffer: self.setType = t if not self.setType: self.setType = typeBuffer
def _lightLinkSets(self): debug(app=self, method='_lightLinkSets', message='Light Linking Sets...', verbose=False) conn = cmds.listConnections('lightLinker1', connections=True, plugs=True) allConn = zip(conn[::2], conn[1::2]) for each in allConn: if 'initialParticleSE' not in each[ 1] and 'defaultLightSet' not in each[ 1] and 'initialShadingGroup' not in each[1]: cmds.disconnectAttr(each[1], each[0]) cmds.select(clear=True) allSets = cmds.listSets(allSets=True) for each in allSets: if each[:10] == 'LightLink_': lightName = each[10:] lightSet = each cmds.select(lightName, lightSet, replace=True, noExpand=True) cmds.lightlink(make=True, useActiveLights=True, useActiveObjects=True) cmds.select(clear=True)
def listDispSets(self): listSets = mc.listSets(allSets=True) mtoaDispSets = [] for setNode in listSets: if mc.objExists(setNode +".aiDispHeight"): mtoaDispSets.append(setNode) return mtoaDispSets
def listLgtGrpSets(self): listSets = mc.listSets(allSets=True) mtoalgtGrpSets = [] for setNode in listSets: if mc.objExists(setNode +".mtoa_constant_lightGroup"): mtoalgtGrpSets.append(setNode) return mtoalgtGrpSets
def getFaceShadingGroup( face): ## assumes face is a string, faster vs to meshface? sg = cmds.listSets(extendToShape=True, type=1, object=face) ##sg = pm.listSets(extendToShape=True, type=1, object=face) if len(sg) > 1: sg = cmds.ls(sg, type='shadingEngine') return sg[0]
def is_compatible(container): """Action will be visibile only if the selected container require this fix """ from maya import cmds from avalon.maya.pipeline import AVALON_CONTAINERS from reveries.maya import lib if not container: return False if not ("subsetGroup" in container and container["subsetGroup"]): return False namespace = lib.get_ns(container["subsetGroup"]) if container["namespace"] != namespace: # Use `UpdateNamespace` action to update namespace first. return if cache["mainContainers"] is None: cache["mainContainers"] = cmds.ls(AVALON_CONTAINERS[1:] + "*", recursive=True) if cache["loadedNamespaces"] is None: cache["loadedNamespaces"] = [cmds.getAttr(con + ".namespace") for con in avalon.maya.pipeline._ls()] main_containers = cache["mainContainers"] namespaces = cache["loadedNamespaces"] parents = cmds.listSets(object=container["objectName"]) or [] # Must be a root container if any(main in parents for main in main_containers): if namespaces.count(container["namespace"]) > 1: return True return False
def listIdSets(self): listSets = mc.listSets(allSets=True) mtoaIdSets = [] for setNode in listSets: if self.idGroupSetPrefix in setNode: mtoaIdSets.append(setNode) return mtoaIdSets
def getShadingGroups(items=None): """Returns a list of shadingGroups associated with the given items.""" from PMP.maya import getShapes items = ensureIterable_defaultToSelection(items, basestring) shadingGroups = set() shapes = [] for item in items: if isAShadingGroup(item): shadingGroups.add(item) else: shapes.extend(getShapes(item)) for shape in shapes: containingSets = cmds.listSets(type=1, object=shape) if containingSets is not None: shadingGroups.update([ mayaSet for mayaSet in containingSets if isAShadingGroup(mayaSet) ]) return [group for group in shadingGroups]
def process(self, instance): shading_engines = [] file_textures = [] for obj in instance: shading_engines.extend( cmds.listSets(object=obj, extendToShape=True, type=1) or []) shading_engines = list(set(shading_engines)) while True: nodes = cmds.listConnections(shading_engines, destination=False, source=True, skipConversionNodes=True, shapes=False) file_textures.extend(cmds.ls(nodes, type="file")) if not nodes: break file_textures = list(set(file_textures)) for node in file_textures: img_path = cmds.getAttr(node + ".fileTextureName") if img_path[:2] in ["C:", "D:"]: self.invalid_files.append(node) assert not self.invalid_files, ( "Texture files should not be in local drive (C:, D:).")
def get_invalid(cls, instance): from maya import cmds from avalon.pipeline import AVALON_CONTAINER_ID from reveries.maya import lib invalid = list() camera = instance.data["camera"] # Is camera being containerized ? containers = lib.lsAttr("id", AVALON_CONTAINER_ID) transform = cmds.listRelatives(camera, parent=True, fullPath=True)[0] for set_ in cmds.listSets(object=transform) or []: if set_ in containers: break else: # Is camera being publish ? camera_ln = cmds.ls(camera, long=True)[0] camera_instances = [ i for i in instance.context if (i.data["family"] == cls.camera_family and i.data.get("publish", True)) ] if not any(camera_ln in inst for inst in camera_instances): invalid.append(camera) return invalid
def get_invalid_sets(shape): """Get sets that are considered related but do not contain the shape. In some scenarios Maya keeps connections to multiple shaders even if just a single one is assigned on the full object. These are related sets returned by `maya.cmds.listSets` that don't actually have the shape as member. """ from maya import cmds invalid = [] sets = cmds.listSets( object=shape, type=1, # all rendering sets extendToShape=False) or [] for s in sets: members = cmds.sets(s, query=True, nodesOnly=True) if not members: invalid.append(s) continue members = set(cmds.ls(members, long=True)) if shape not in members: invalid.append(s) return invalid
def get_invalid(cls, instance): from reveries.maya import lib from maya import cmds containers = lib.lsAttr("id", AVALON_CONTAINER_ID) cameras = set(instance.data["renderCam"]) has_versioned = set() # Is camera being containerized ? for cam in cameras: transform = cmds.listRelatives(cam, parent=True, fullPath=True)[0] for set_ in cmds.listSets(object=transform) or []: if set_ in containers: has_versioned.add(cam) break # Is camera being publish ? not_containerized = cameras - has_versioned camera_instances = [ i for i in instance.context if (i.data["family"] == cls.camera_family and i.data.get("publish", True)) ] for cam in not_containerized: for inst in camera_instances: if cam in inst: has_versioned.add(cam) break return list(cameras - has_versioned)
def update_hierarchy(containers): """Hierarchical container support This is the function to support Scene Inventory to draw hierarchical view for containers. We need both parent and children to visualize the graph. """ container_names = set(_ls()) # lookup set for container in containers: # Find parent parent = cmds.listSets(object=container["objectName"]) or [] for node in parent: if node in container_names: container["parent"] = node break # List children children = cmds.ls(cmds.sets(container["objectName"], query=True), type="objectSet") container["children"] = [ child for child in children if child in container_names ] yield container
def determineShaderList(exportJob): """docstring for determineSGContent""" sgObjectList = [] sgNodeList = set() geomList = [obj['dagpath'] for obj in exportJob.geomObjectList] for geo in geomList: sgSetList = cmds.listSets(object=geo, t=1, ets=True) if sgSetList == None: continue sgList = [ sg for sg in sgSetList if cmds.nodeType(sg) == 'shadingEngine' ] sgNodeList.update(sgList) if len(sgNodeList) == 1 and 'initialShadingGroup' in sgNodeList: return None #remove namespaces from the shader nodes to make a list of names sgNameList = list(sgNodeList) if exportJob.jobOptions['stripNamespaces']: sgNameList = [ _remove_namespace_from_dag_path(shader) for shader in list(sgNodeList) ] hashList = [addHashToShader(sg) for sg in list(sgNodeList)] #unwind all the lists into a dict, then make a list of dict objects for i in xrange(len(sgNodeList)): sgObject = {} sgObject['dagpath'] = list(sgNodeList)[i] sgObject['hash'] = hashList[i] sgObject['name'] = sgNameList[i] sgObject['index'] = i sgObjectList.append(sgObject) #return list(sgNodeList), sgNameList, hashList return sgObjectList
def listIdSets(self): listSets = mc.listSets(allSets=True) mtoaIdSets = [] for setNode in listSets: if mc.objExists(setNode +'.'+self.idGroupAttr): mtoaIdSets.append(setNode) return mtoaIdSets
def process(self, containers): shaders = set() for container in containers: if not container.get("loader") == "LookLoader": continue # Select assigned members = cmds.sets(container["objectName"], query=True) shaders.update(cmds.ls(members, type="shadingEngine")) with maintained_selection(): cmds.select(list(shaders), replace=True) # Parse selected containers = avalon.maya.ls() container_names = set(c["objectName"] for c in containers) selected_items = set() for node in cmds.ls(sl=True): objsets = cmds.listSets(object=node) or [] for objset in objsets: if objset in container_names: selected_items.add(objset) break return selected_items
def developShading(): selObj = cmds.ls(sl = 1) objShadings = list() allObjShadings = list() oldMaterials = list() newMaterials = list() materialDisplacement = dict() materialObject = dict() objectsMaterial = dict() duplicateAllNodes = list() for item in selObj: objShape = cmds.listRelatives(item, s = 1, f = 1) objShapeLongName = cmds.ls(objShape[0], l = 1) objShading = cmds.listSets(o = objShapeLongName[0]) objMaterial = cmds.listConnections('%s.surfaceShader'%objShading[0], d = 0, s = 1) oldMaterials.append(objMaterial[0]) objectsMaterial[objMaterial[0]] = item materialObject[item] = objMaterial[0] allObjShadings.extend(objShading) objShading = list(set(allObjShadings)) setOldMaterials = list(set(oldMaterials)) for item in objShading: objMaterial = cmds.listConnections('%s.surfaceShader'%item, d = 0, s = 1) objDisplacment = cmds.listConnections('%s.displacementShader'%item, d = 0, s = 1) materialDisplacement[objMaterial[0]] = objDisplacment[0] for material, displacement in materialDisplacement.items(): if displacement: duplicateAllNodes.extend(cmds.duplicate(material, un = 1)) duplicateAllNodes.extend(cmds.duplicate(displacement, un = 1)) else: duplicateAllNodes.extend(cmds.duplicate(material, un = 1)) # print duplicateAllNodes newMaterials = linkMaterialsToShading(duplicateAllNodes) for item in newMaterials: fileNode = getShadingFileNode(item) # print newMaterials linkFileToSpecular(fileNode[0], item) remapAndLuminaceNodes.append('%s_remapHsc'%item) remapAndLuminaceNodes.append('%s_solidFractal'%item) remapAndLuminaceNodes.append('%s_place3dTexture'%item) remapAndLuminaceNodes.append('%s_multiplyDivide'%item) remapAndLuminaceNodes.append('%s_ramp'%item) remapAndLuminaceNodes.append(item) print setOldMaterials for obj in selObj: for index in range(len(setOldMaterials)): print index print materialObject[obj] if materialObject[obj] == setOldMaterials[index]: newMaterialShading = cmds.listConnections(newMaterials[index], d = 1, t = 'shadingEngine') cmds.sets(objectsMaterial[material], e = 1, fe = newMaterialShading[0]) cmds.select(selObj, r = 1)
def addToSet(*args): sel = cm.ls(sl=True, flatten=True) if cm.objectType(sel[-1]) == "objectSet": cm.sets(sel[:-1], add=sel[-1]) else: belongSets = cm.listSets(object=sel[-1]) for eachSet in belongSets: cm.sets(sel[:-1], add=eachSet)
def getAllCtrlSet(ctrlName): '''given a ctrl return the set that contains all ctrls. This is a set under the master set with every rig ctrl.''' currentSets = cmds.listSets(object=ctrlName) for objSet in currentSets: if objSet.endswith(name.SEP + name.ALLCTRLSET): return objSet return None
def chkNameConflict(obj): """ Check if object(s) with same name exist in scene. Checks for ICSets on those objects and renames them accordingly. """ if mc.objExists(obj): verbose.nameConflict(obj) objSetLs = mc.listSets(o=obj) newObjName = mayaOps.renameObj([obj], '%s_1' % obj, oldName=False)[0]
def drop_interface(): """Remove deprecated interface nodes from scene Transfer data from interface node to container node and delete """ PORTS = ":AVALON_PORTS" INTERFACE = "pyblish.avalon.interface" CONTAINERS = AVALON_CONTAINERS[1:] if not cmds.objExists(PORTS): return for interface in lib.lsAttr("id", INTERFACE): namespace = cmds.getAttr(interface + ".namespace") container = get_container_from_namespace(namespace) cmds.warning("Processing container: %s" % container) getter = (lambda a: cmds.getAttr(interface + "." + a)) for key, value in { "containerId": getter("containerId"), "assetId": getter("assetId"), "subsetId": getter("subsetId"), "versionId": getter("versionId"), }.items(): if not cmds.objExists(container + "." + key): cmds.addAttr(container, longName=key, dataType="string") cmds.setAttr(container + "." + key, value, type="string") try: group = cmds.listConnections(interface + ".subsetGroup", source=True, destination=False)[0] except ValueError: pass else: # Connect subsetGroup grp_attr = container + "." + AVALON_GROUP_ATTR msg_attr = group + ".message" if not cmds.objExists(grp_attr): cmds.addAttr(container, longName=AVALON_GROUP_ATTR, attributeType="message") if not cmds.isConnected(msg_attr, grp_attr): cmds.setAttr(grp_attr, lock=False) cmds.connectAttr(msg_attr, grp_attr, force=True) cmds.setAttr(grp_attr, lock=True) # Ensure container lives in main container if CONTAINERS not in cmds.listSets(o=container): cmds.sets(container, addElement=CONTAINERS) cmds.delete(interface)
def selectSetMembers(*args): sel = cm.ls(sl=True) cm.select(clear=True) for each in sel: if cm.objectType(each) == "objectSet": cm.select(each, add=True) else: belongSets = cm.listSets(object=each) cm.select(belongSets)
def process(self, context): from maya import cmds self.log.info("Looking for shader associations..") if not cmds.objExists(self.IDENTIFIER): return self.log.info("Nothing found") self.log.info("Gathering object sets..") sets = dict() for mesh in cmds.sets(self.IDENTIFIER, query=True): for shape in cmds.listRelatives(mesh, shapes=True): shape = cmds.ls(shape, long=True, absoluteName=True)[0] # Discover related object sets for objset in cmds.listSets(object=shape): if objset not in sets: sets[objset] = { "uuid": cmds.getAttr(objset + ".uuid"), "members": list() } self.log.info("Gathering data..") for objset in sets: self.log.debug("From %s.." % objset) for member in cmds.ls(cmds.sets(objset, query=True), long=True, absoluteName=True): node, components = (member.rsplit(".", 1) + [None])[:2] if member in [m["name"] for m in sets[objset]["members"]]: continue self.log.debug("Such as %s.." % member) sets[objset]["members"].append({ "name": node, "uuid": cmds.getAttr(node + ".uuid"), "components": components, "properties": dict((attr, cmds.getAttr(node + "." + attr)) for attr in ( cmds.listAttr(node, userDefined=True) or []) if attr != "uuid") }) payload = [dict(name=key, **value) for key, value in sets.items()] item = os.environ["ITEM"] instance = context.create_instance(item, family="lookdev") self.log.info("Storing data: %s" % payload) instance.set_data("payload", payload) self.log.info("Storing sets: %s" % sets.keys()) instance[:] = cmds.ls(sets.keys(), absoluteName=True, long=True) self.log.info("Found %s" % instance)
def getSets(self): objectSets = [] for node in cmds.ls(transforms=True): sets = cmds.listSets(object=node) if sets: objectSets.extend(sets) return list(set(objectSets))
def getShadingEngine(model): """ Get shading engine from given mesh :param model: Transform name or mesh name :type model: str :return: Shading engine :rtype: str """ if mc.objectType(model, isType='transform'): sets = mc.listSets(type=1, o=model, ets=True) elif mc.objectType(model, isType='mesh'): sets = mc.listSets(type=1, o=model, ets=False) else: sets = pUtil.findTypeInHistory(model, 'shadingEngine', past=True, future=True) if not sets: print "!!! Error: Shading engine not found." else: return sets
def process(self, context): from maya import cmds self.log.info("Looking for shader associations..") if not cmds.objExists(self.IDENTIFIER): return self.log.info("Nothing found") self.log.info("Gathering object sets..") sets = dict() for mesh in cmds.sets(self.IDENTIFIER, query=True): for shape in cmds.listRelatives(mesh, shapes=True): shape = cmds.ls(shape, long=True, absoluteName=True)[0] # Discover related object sets for objset in cmds.listSets(object=shape): if objset not in sets: sets[objset] = { "uuid": cmds.getAttr(objset + ".uuid"), "members": list() } self.log.info("Gathering data..") for objset in sets: self.log.debug("From %s.." % objset) for member in cmds.ls(cmds.sets(objset, query=True), long=True, absoluteName=True): node, components = (member.rsplit(".", 1) + [None])[:2] if member in [m["name"] for m in sets[objset]["members"]]: continue self.log.debug("Such as %s.." % member) sets[objset]["members"].append({ "name": node, "uuid": cmds.getAttr(node + ".uuid"), "components": components, "properties": dict( (attr, cmds.getAttr(node + "." + attr)) for attr in ( cmds.listAttr(node, userDefined=True) or []) if attr != "uuid") }) payload = [dict(name=key, **value) for key, value in sets.items()] item = os.environ["ITEM"] instance = context.create_instance(item, family="lookdev") self.log.info("Storing data: %s" % payload) instance.set_data("payload", payload) self.log.info("Storing sets: %s" % sets.keys()) instance[:] = cmds.ls(sets.keys(), absoluteName=True, long=True) self.log.info("Found %s" % instance)
def getSG(geo): """ Get shading group assigned to specified geometry. @param geo: Geometry to get shading group from @type geo: str """ # Get Face Sets sets = cmds.listSets(extendToShape=True, type=1, object=geo) or [] # Return Result return list(set(sets))
def removeFromAllSets(remSetList=[]): ''' remove objects in list from all sets :param remSetList: list :return: ''' for obj in remSetList: getSets = mc.listSets(object=obj) if getSets: for set in getSets: mc.sets(obj, rm=set)
def find(self): setBlendShapes=[] for d in iterable(mc.listSets(o=self.base[self.baseIndex],ets=True,t=2)): setBlendShapes.extend(iterable(mc.listConnections(d+'.usedBy',type='blendShape'))) histBlendShapes=iterable(mc.ls(iterable(mc.listHistory(self.base)),type='blendShape')) for bs in setBlendShapes: if bs in histBlendShapes: self.append(bs) break
def remove_select(): new_set_name = cmds.textField('new_setName', q=True, text=True) sel_model = cmds.ls(sl=True) for key in sel_model: set_nemeList = cmds.listSets(object=key) if len(set_nemeList) == 2: for name in set_nemeList: if name is not "modelPanel4ViewSelectedSet": set_name = name cmds.sets(key, rm=set_name) elif len(set_nemeList) > 2: cmds.warning('Please check the set for the selected object!') cmds.sets(sel_model, n=new_set_name)
def deleteICDataSet(objLs): for obj in objLs: if mc.objExists('%s.icARefTag' % obj): mc.setAttr('%s.icARefTag' % obj, l=False) mc.deleteAttr('%s.icARefTag' % obj) objSetLs = mc.listSets(o=obj) if objSetLs: for objSet in objSetLs: if mc.nodeType(objSet) == 'ICSet': mc.setAttr('%s.overrideComponentDisplay' % objSet, l=False) mc.setAttr('%s.overrideComponentDisplay' % objSet, 0) mc.delete(objSet) break
def removeLocators(locators=[]): if not locators: selection = mc.ls(sl=True) if not selection: return removeLocators(selection) else: for locator in locators: sets = mc.listSets(object=locator) for set in sets: if isGroup(set): mc.sets(locator, edit=True, rm=set) mc.deleteAttr("%s.massive" % locator)
def find_stray_textures(instance): """Find file nodes that were not containerized """ stray = list() containers = lib.lsAttr("id", AVALON_CONTAINER_ID) for file_node in cmds.ls(instance, type="file"): sets = cmds.listSets(object=file_node) or [] if any(s in containers for s in sets): continue stray.append(file_node) return stray
def import_geo(self, spec): old_objs = set(mc.ls(assemblies=True)) old_sets = set(mc.listSets(allSets=True)) # mo=0 signals to import into a single object. x = mc.file(spec['path'], i=True, type="OBJ", options='mo=0') new_objs = list(set(mc.ls(assemblies=True)).difference(old_objs)) # Lots of extra sets get created that we don't want. new_sets = list(set(mc.listSets(allSets=True)).difference(old_sets)) mc.delete(new_sets) if not new_objs: print 'No geometry in', spec['path'] return assert len(new_objs) == 1 shape = mc.listRelatives(new_objs, fullPath=True, shapes=True)[0] shape = mc.parent(shape, self.transform, shape=True, relative=True) self.shape = mc.rename(self.shape, self.name + 'Shape') mc.delete(new_objs)
def maya_export_shader(obj_name, output_file): shading_groups = set() shad_group_to_obj = {} if cmds.ls(obj_name, dag=True, type="mesh"): faces = cmds.polyListComponentConversion(obj_name, toFace=True) for shading_group in cmds.listSets(type=1, object=faces[0]): shading_groups.add(shading_group) shad_group_to_obj[shading_group] = obj_name shaders = set() script_nodes = [] for shading_group in list(shading_groups): connections = cmds.listConnections( shading_group, source=True, destination=False) for shader in cmds.ls(connections, materials=True): shaders.add(shader) obj_name = shad_group_to_obj[shading_group] print obj_name # Instead of using a script node, it would be great to # this data in some other form. Metadata from red9 maybe? script_node = cmds.scriptNode( name="SHADER_HOOKUP_" + obj_name, scriptType=0, # execute on demand. beforeScript=shader, ) script_nodes.append(script_node) select_nodes = list(shaders) #select_nodes.extend(list(shading_groups)) select_nodes.extend(script_nodes) cmds.select(select_nodes, replace=True) # write a .ma file to the publish path with the shader network definitions cmds.file( output_file, type='mayaAscii', exportSelected=True, options="v=0", prompt=False, force=True )
def exportSculpt(path, sel=None): ''' Provide the full path:eg. home/user/exportfile.txt .txt in the name is critical. sel = <Transform> Provide the transform that the sculpts are connected to, recommened through script sel = <None>(Default), recommended through User Interface calls ''' # gui call, otherwise on object is expected if sel == None: sel = validateSelection() if sel != None: path = os.path.join(path, 'sculpt_set') # create the sculpt_set folder if one doesn't exist if not os.path.exists(path): os.mkdir(path) # get the shape shape = pm.ls(sel)[0].getShape().name() # get the sets sculptSets = cmds.listSets(type=2, object=shape) # iterate through each set for _set in sculptSets: if _set.rfind('tweak') == -1 and _set.rfind('skinCluster') == -1: # find the geo that the set is connected to deformer = cmds.listConnections(_set + '.usedBy', d=False, s=True)[0] # get the components in the set components = cmds.sets(_set, query=True) # Build the string to write to file, this is then later eval'd exportStr = '[' for i, comp in enumerate(components): if i + 1 != len(components): exportStr += '"%s",' % comp else: exportStr += '"%s"' % comp exportStr += ']' # write out the file _file = open(os.path.join(path, _set + '.txt'), 'w') _file.write(deformer + '\n') _file.write(exportStr) _file.close()
def _lightLinkSets(self): debug(app = self, method = '_lightLinkSets', message = 'Light Linking Sets...', verbose = False) conn = cmds.listConnections('lightLinker1', connections= True, plugs = True) allConn = zip(conn[::2], conn[1::2]) for each in allConn: if 'initialParticleSE' not in each[1] and 'defaultLightSet' not in each[1] and 'initialShadingGroup' not in each[1]: cmds.disconnectAttr(each[1],each[0]) cmds.select(clear = True) allSets = cmds.listSets(allSets= True) for each in allSets: if each[:10] == 'LightLink_': lightName = each[10:] lightSet = each cmds.select(lightName, lightSet, replace = True, noExpand = True) cmds.lightlink(make =True, useActiveLights = True, useActiveObjects = True) cmds.select(clear = True)
def getShadingGroups(items=None): """Returns a list of shadingGroups associated with the given items.""" from PMP.maya import getShapes items = ensureIterable_defaultToSelection(items, basestring) shadingGroups = set() shapes = [] for item in items: if isAShadingGroup(item): shadingGroups.add(item) else: shapes.extend(getShapes(item)) for shape in shapes: containingSets = cmds.listSets(type=1, object=shape) if containingSets is not None: shadingGroups.update([mayaSet for mayaSet in containingSets if isAShadingGroup(mayaSet)]) return [group for group in shadingGroups]
def updateData(self,*a,**kw): """ Updates the stored data Stores basic data, qss state and type """ self.setList = mc.sets(self.nameLong, q = True) if not self.setList: self.setList = [] if mc.sets(self.nameLong,q=True,text=True)== 'gCharacterSet': self.qssState = True else: self.qssState = False self.parents = mc.listSets(o=self.nameLong) #If it's a maya set self.mayaSetState = False for check in ['defaultCreaseDataSet', 'defaultObjectSet', 'defaultLightSet', 'initialParticleSE', 'initialShadingGroup', 'tweakSet']: if check in self.nameLong and not self.qssState: self.mayaSetState = True typeBuffer = search.returnTagInfo(self.nameLong,'cgmType') if typeBuffer: for t in setTypes.keys(): if setTypes.get(t) == typeBuffer: self.setType = t if not self.setType: self.setType = typeBuffer
def Cas_CRTS_getJointSet(jCluster): set = cmds.listSets(o=jCluster) return set
def getSGsFromShape(shape): """Return all the Shading Groups connected to the shape""" shadingEngines = cmds.listSets(object=shape, type=1, extendToShape=True) # Faster than cmds.listConnections(shape, destination=True, source=False, plugs=False, type="shadingEngine") return list(set(shadingEngines)) if shadingEngines else []
def __publish_maya_shader_network( self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb ): """ Publish shader networks for the asset and register with Shotgun. :param item: The item to publish :param output: The output definition to publish with :param work_template: The work template for the current scene :param primary_publish_path: The path to the primary published file :param sg_task: The Shotgun task we are publishing for :param comment: The publish comment/description :param thumbnail_path: The path to the publish thumbnail :param progress_cb: A callback that can be used to report progress """ # determine the publish info to use # progress_cb(10, "Determining publish details") # get the current scene path and extract fields from it # using the work template: scene_path = os.path.abspath(cmds.file(query=True, sn=True)) fields = work_template.get_fields(scene_path) publish_version = fields["version"] tank_type = output["tank_type"] shader_name = item["name"] fields["obj_name"] = shader_name fields["name"] = re.sub(r"[\W_]+", "", shader_name) # create the publish path by applying the fields # with the publish template: publish_template = output["publish_template"] publish_path = publish_template.apply_fields(fields) # ensure the publish folder exists: publish_folder = os.path.dirname(publish_path) self.parent.ensure_folder_exists(publish_folder) # determine the publish name: publish_name = fields.get("obj_name") if not publish_name: publish_name = os.path.basename(publish_path) # Find additional info from the scene: # progress_cb(10, "Analysing scene") # there's probably a better way to do this. i am jon snow (i know # nothing) shading_groups = set() if cmds.ls(shader_name, dag=True, type="mesh"): faces = cmds.polyListComponentConversion(shader_name, toFace=True) for shading_group in cmds.listSets(type=1, object=faces[0]): shading_groups.add(shading_group) shaders = set() for shading_group in shading_groups: connections = cmds.listConnections(shading_group, source=True, destination=False) for shader in cmds.ls(connections, materials=True): shaders.add(shader) if not shaders: progress_cb(100, "No shader networks to export.") return cmds.select(list(shaders), replace=True) # write a .ma file to the publish path with the shader network definitions progress_cb(25, "Exporting the shader network.") cmds.file(publish_path, type="mayaAscii", exportSelected=True, options="v=0", prompt=False, force=True) # register the publish: progress_cb(75, "Registering the publish") args = { "tk": self.parent.tank, "context": self.parent.context, "comment": comment, "path": publish_path, "name": publish_name, "version_number": publish_version, "thumbnail_path": thumbnail_path, "task": sg_task, "dependency_paths": [primary_publish_path], "published_file_type": tank_type, } tank.util.register_publish(**args)
def exportWeight(): path = "C:/afok_tools/Data/skin_weight_editor/test.xml" shapeNode = "pCubeShape1" skinClusterNode = "skinCluster1" vertexCount = cmds.polyEvaluate(shapeNode, v=True) # root_element root_element = xml.Element("exported_weight") # shape_element shape_element = xml.Element("shape") shape_element.attrib["name"] = shapeNode shape_element.attrib["size"] = str(vertexCount) root_element.append(shape_element) for v in range(vertexCount): pos = cmds.xform("%s.vtx[%d]" % (shapeNode, v), q=True, ws=True, t=True) point_element = xml.Element("point") point_element.attrib["index"] = str(v) point_element.attrib["value"] = " %.6f %.6f %.6f" % (pos[0], pos[1], pos[2]) shape_element.append(point_element) # sets_members sets_element = xml.Element("sets") sets = cmds.listSets(object=skinClusterNode, type=2) sets_members = cmds.ls(cmds.sets(sets, q=True), flatten=True) for vertex in sets_members: index = vertex.split("[")[1].split("]")[0] point_element = xml.Element("point") point_element.attrib["index"] = str(index) sets_element.append(point_element) sets_element.attrib["size"] = str(len(sets_members)) sets_element.attrib["deformer"] = skinClusterNode root_element.append(sets_element) # joint infos joint_connections = cmds.listConnections(skinClusterNode + ".matrix", type="joint", c=True) joint_infos = {} for i in range(0, len(joint_connections), 2): joint_index = int(joint_connections[i].split("[")[1].split("]")[0]) joint_name = joint_connections[i + 1] joint_infos[joint_name] = joint_index # weights_element for joint_name, joint_index in zip(joint_infos.keys(), joint_infos.values()): weights_element = xml.Element("weights") counts = 0 for vertex in range(vertexCount): value = cmds.getAttr(skinClusterNode + ".wl[%d].w[%d]" % (vertex, joint_index)) if value: counts += 1 point_element = xml.Element("point") point_element.attrib["index"] = "%d" % vertex point_element.attrib["value"] = "%.6f" % value weights_element.append(point_element) weights_element.attrib["shape"] = shapeNode weights_element.attrib["joint"] = str(joint_name) weights_element.attrib["size"] = str(counts) root_element.append(weights_element) # write xml with open(path, "w") as f: xml.ElementTree(root_element).write(f)
def export_materials(self, transform, awd_inst): sets = mc.listSets(object=transform, t=1, ets=True) if sets is not None: for set in sets: if mc.nodeType(set)=='shadingEngine': tex = None mat = None mat_his = mc.listHistory(set) for state in mat_his: state_type = mc.nodeType(state) if state_type == 'lambert': mat = self.block_cache.get(state) if mat is None: mat = AWDMaterial(AWDMaterial.BITMAP, name=self.get_name(state)) self.awd.add_material(mat) self.block_cache.add(state, mat) print('created material') if self.alpha_blending or self.alpha_threshold > 0.0: # Check if transparency is an input (rather than scalars) # in which case the material needs to be marked as transparent, # to indicate that the texture's alpha channel should be used. tr_input = mc.connectionInfo('%s.it' % state, isDestination=True) if tr_input: if self.alpha_threshold > 0.0: mat.alpha_threshold = self.alpha_threshold else: mat.alpha_blending = True awd_inst.materials.append(mat) print('adding material ' + state) # Only check the first file, which will likely be the color input. # TODO: This needs to be solved in a prettier way for normal maps # and other inputs like that. elif state_type == 'file' and tex is None: tex = self.block_cache.get(state) if tex is None: tex_abs_path = str(mc.getAttr(state+'.fileTextureName')) if self.embed_textures: tex = AWDBitmapTexture(AWDBitmapTexture.EMBED, name=self.get_name(state)) tex.embed_file(tex_abs_path) print('embedding %s' % tex_abs_path) else: tex = AWDBitmapTexture(AWDBitmapTexture.EXTERNAL, name=self.get_name(state)) tex.url = mc.workspace(pp=tex_abs_path) self.awd.add_texture(tex) self.block_cache.add(state, tex) print('created texture') if mat is not None: mat.texture = tex elif state_type == 'place2dTexture' and mat is not None: # Determine from place2dTexture node whether # this material should repeat/wrap rep_uv = mc.getAttr('%s.re' % state)[0] if rep_uv[0] != 1.0 or rep_uv[1] != 1.0: mat.repeat = True elif mc.getAttr(state+'.wu') or mc.getAttr(state+'.wv'): mat.repeat = True
def __publish_maya_shader_network(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb): """ Publish shader networks for the asset and register with Shotgun. :param item: The item to publish :param output: The output definition to publish with :param work_template: The work template for the current scene :param primary_publish_path: The path to the primary published file :param sg_task: The Shotgun task we are publishing for :param comment: The publish comment/description :param thumbnail_path: The path to the publish thumbnail :param progress_cb: A callback that can be used to report progress """ # determine the publish info to use # progress_cb(10, "Determining publish details") # get the current scene path and extract fields from it # using the work template: scene_path = os.path.abspath(cmds.file(query=True, sn=True)) fields = work_template.get_fields(scene_path) publish_version = fields["version"] tank_type = output["tank_type"] obj_name = item['name'] fields['obj_name'] = obj_name fields['name'] = re.sub(r'[\W_]+', '', obj_name) # create the publish path by applying the fields # with the publish template: publish_template = output["publish_template"] publish_path = publish_template.apply_fields(fields) # ensure the publish folder exists: publish_folder = os.path.dirname(publish_path) self.parent.ensure_folder_exists(publish_folder) # determine the publish name: publish_name = fields.get("obj_name") if not publish_name: publish_name = os.path.basename(publish_path) # Find additional info from the scene: # progress_cb(10, "Analysing scene") # clean up any hookup nodes that existed before _clean_shader_hookup_script_nodes() # there's probably a better way to do this. i am jon snow (i know # nothing) shading_groups = set() shad_group_to_obj = {} if cmds.ls(obj_name, dag=True, type="mesh"): faces = cmds.polyListComponentConversion(obj_name, toFace=True) for shading_group in cmds.listSets(type=1, object=faces[0]): shading_groups.add(shading_group) shad_group_to_obj[shading_group] = obj_name shaders = set() script_nodes = [] for shading_group in list(shading_groups): connections = cmds.listConnections( shading_group, source=True, destination=False) for shader in cmds.ls(connections, materials=True): shaders.add(shader) obj_name = shad_group_to_obj[shading_group] # can't seem to store arbitrary data in maya in any # reasonable way. would love to know a better way to # do this. for now, just create a script node that # we can easily find and deduce an object name and # shader name. Yes, this is hacky. script_node = cmds.scriptNode( name="SHADER_HOOKUP_" + obj_name, scriptType=0, # execute on demand. beforeScript=shader, ) script_nodes.append(script_node) if not shaders: progress_cb(100, "No shader networks to export.") return select_nodes = list(shaders) #select_nodes.extend(list(shading_groups)) select_nodes.extend(script_nodes) cmds.select(select_nodes, replace=True) # write a .ma file to the publish path with the shader network definitions progress_cb(25, "Exporting the shader network.") cmds.file( publish_path, type='mayaAscii', exportSelected=True, options="v=0", prompt=False, force=True ) # clean up shader hookup nodes. they should exist in the publish file # only. _clean_shader_hookup_script_nodes() # register the publish: progress_cb(75, "Registering the publish") args = { "tk": self.parent.tank, "context": self.parent.context, "comment": comment, "path": publish_path, "name": publish_name, "version_number": publish_version, "thumbnail_path": thumbnail_path, "task": sg_task, "dependency_paths": [primary_publish_path], "published_file_type":tank_type } tank.util.register_publish(**args)
def _exportMesh(self, dagPath, component, meshName): mesh = MFnMesh(dagPath) options = self.options.copy() self.currentMeshName = meshName self._updateOffsets() # export vertex data if options['vertices']: try: iterVerts = MItMeshVertex(dagPath, component) while not iterVerts.isDone(): point = iterVerts.position(MSpace.kWorld) self.vertices += [_round8(point.x), _round8(point.y), _round8(point.z)] iterVerts.next() except: options['vertices'] = False print 'ERROR: Could not export Face Vertices!' # export uv data if options['uvs']: try: uvLayers = [] mesh.getUVSetNames(uvLayers) while len(uvLayers) > len(self.uvs): self.uvs.append([]) self.offsets['uvs'].append(0) for i, layer in enumerate(uvLayers): uList = MFloatArray() vList = MFloatArray() mesh.getUVs(uList, vList, layer) for j in xrange(uList.length()): self.uvs[i] += [_round8(uList[j]), _round8(vList[j])] except: options['uvs'] = False print 'ERROR: Could not export UVs!' # export normal data if options['normals']: try: normals = MFloatVectorArray() mesh.getNormals(normals, MSpace.kWorld) for i in xrange(normals.length()): point = normals[i] self.normals += [_round8(point.x), _round8(point.y), _round8(point.z)] except: options['normals'] = False print 'ERROR: Could not export Normals!' # export color data if options['colors']: try: colors = MColorArray() mesh.getColors(colors) for i in xrange(colors.length()): color = colors[i] # uncolored vertices are set to (-1, -1, -1). Clamps colors to (0, 0, 0). self.colors += [max(color.r, 0), max(color.g, 0), max(color.b, 0)] except: options['colors'] = False print 'ERROR: Could not export Normals!' # export face data if options['vertices']: #try: bitmask = self._getTypeBitmask(options) iterPolys = MItMeshPolygon(dagPath, component) currentPoly = 0 while not iterPolys.isDone(): # export face vertices verts = MIntArray() iterPolys.getVertices(verts) if verts.length() == 3: self.faces.append(bitmask) elif verts.length() == 4: self.faces.append(bitmask + 1) else: print 'ERROR: One or more of your faces have more than 4 sides! Please Triangulate your Mesh and try Again.' raise ThreeJsError(('ERROR: One or more of your faces have more than 4 sides!!' + meshName + '[' + str(currentPoly) + ']. Please Triangulate your Mesh and try Again: {0}').format(self.accessMode)) for i in xrange(verts.length()): self.faces.append(verts[i] + self.offsets['vertices']) # export face vertex materials if options['materials']: materialIndex = 0 meshTransform = mc.listRelatives(meshName, parent=True, fullPath=True) face = str(meshTransform[0]) + '.f['+str(iterPolys.index())+']' #print face sgs = mc.listSets(t=1, o=face) if sgs != None: material = mc.ls(mc.listConnections(sgs[0]),materials=1) if len(material): for i in xrange(len(self.materials)): if self.materials[i]['DbgName'] == material[0]: materialIndex = i #print face + ' has material ' + str(material[0]) + ' index: ' + str(i) self.faces.append(materialIndex) #self.faces.append(len(self.materials)) # export face vertex uvs if options['uvs']: util = MScriptUtil() uvPtr = util.asIntPtr() for i, layer in enumerate(uvLayers): for j in xrange(verts.length()): iterPolys.getUVIndex(j, uvPtr, layer) uvIndex = util.getInt(uvPtr) self.faces.append(uvIndex + self.offsets['uvs'][i]) # export face vertex normals if options['normals']: for i in xrange(verts.length()): normalIndex = iterPolys.normalIndex(i) self.faces.append(normalIndex + self.offsets['normals']) # export face vertex colors if options['colors']: colors = MIntArray() iterPolys.getColorIndices(colors) for i in xrange(colors.length()): self.faces.append(colors[i] + self.offsets['colors']) currentPoly += 1 iterPolys.next() '''
def returnObjectSets(): """ Return a semi intelligent dictionary of sets in a mays scene file. Return dict keys: all(list) -- all sets found maya(list) -- maya made and controlled sets (tweakSet, etc) render(list) -- sets returned by mc.listSets(type=1) deformer(list) -- sets returned by mc.listSets(type=2) referenced(dict) -- ['From Scene'] are local sets, all other sets are indexed to their reference prefix qss(list) -- quick select sets types(dict) -- Sets indexed to their type as understood by cgm tools. 'typeModifier' tag in this case """ returnSetsDict = {'maya':[],'qss':[],'referenced':{},'cgmTypes':{},'objectSetGroups':[]} returnSetsDict['all'] = mc.ls(type='objectSet') or [] returnSetsDict['render'] = mc.listSets(type = 1) or [] returnSetsDict['deformer'] = mc.listSets(type = 2) or [] refBuffer = {'From Scene':[]} returnSetsDict['referenced'] = refBuffer typeBuffer = {'NONE':[]} returnSetsDict['cgmTypes'] = typeBuffer for s in returnSetsDict['all']: #Get our qss sets if mc.sets(s,q=True,text=True) == 'gCharacterSet': returnSetsDict['qss'].append(s) #Get our maya sets for check in ['defaultCreaseDataSet', 'defaultObjectSet', 'defaultLightSet', 'initialParticleSE', 'initialShadingGroup', 'Vray', 'SG', ['cluster','Set'], ['skinCluster','Set'], 'tweakSet']: if type(check) is list: buffer = [] for c in check: if c in s: buffer.append(1) else:buffer.append(0) if len(buffer) == sum(buffer): returnSetsDict['maya'].append(s) break elif check in s: returnSetsDict['maya'].append(s) break # Get our reference prefixes and sets sorted out if mc.referenceQuery(s, isNodeReferenced=True): refPrefix = returnReferencePrefix(s) if refPrefix in refBuffer.keys(): refBuffer[refPrefix].append(s) else: refBuffer[refPrefix] = [s] else: refBuffer['From Scene'].append(s) #Type sort buffer = returnTagInfo(s,'cgmType') for tag in dictionary.setTypes.keys(): if dictionary.setTypes[tag] == buffer: if tag in typeBuffer.keys(): typeBuffer[tag].append(s) else: typeBuffer[tag] = [s] else: typeBuffer['NONE'].append(s) #Set group check if returnTagInfo(s,'cgmType') == 'objectSetGroup': returnSetsDict['objectSetGroups'].append(s) return returnSetsDict