def appendNclothCache(self, *args, **kwargs): self.getUersTimer() if kwargs.has_key("startTime"): nowTime = kwargs['startTime'] else: nowTime = cmds.currentTime(query=True) if self.SelctShapeList: display.displayViewPrint(u'<hl style=\"color:#FF4500\"> 为选择的(%s)创建缓存</hl>'% self.SelctShapeList) for s in self.SelctShapeList: if self.__getNodeStatus__(s): cmds.cacheFile(appendFrame=True, replaceCachedFrame=True, simulationRate=1, sampleMultiplier=1, noBackup=True, cnd=s) self.showDisplay(s) else: display.displayViewPrint(u'<hl style=\"color:#FF4500\"> 为场景中所有由nucleu控制的创建缓存</hl>' ) for key, value in self.ActiveCacheNodeDict.items(): if self.__getNodeStatus__(key): cmds.cacheFile(appendFrame=True, replaceCachedFrame=True, simulationRate=1, sampleMultiplier=1, noBackup=True, cnd=key) self.showDisplay(key) if nowTime == self.EndTime: if len(args) > 0: fun = args[0] cmds.iconTextButton('threadPlay', e=True, l='播放', image1="interactivePlayback.png") fun() else: nowTime += 1 cmds.currentTime(nowTime)
def importAssetCache(self, cacheXmlLt, cacheErrorCheck = False): """ cacheXmlLt = "R:/data/cache/sq001/sh001/light/char/ben00c_ben/ben00c_ben.xml" """ if os.path.exists(cacheXmlLt): cacheChannels = mc.cacheFile(fileName=cacheXmlLt,q=1,channelName=1) cacheGeos = self.getCacheGeos() cacheGeoDict, cacheChannelsTmp = {}, [] for chn in cacheChannels: for geo in cacheGeos: baseChn = utils.stripNames(utils.convertName(chn, "texture")) baseGeo = utils.stripNames(utils.stripNames(geo, ":"), "|") if baseChn in baseGeo: cacheGeoDict[chn] = geo cacheChannelsTmp.append(chn) continue else: utils.msgWin("Error", "File does not exist : %s"%cacheXmlLt, self.silent) return False if cacheErrorCheck: missedChannels = list(set(cacheChannels).difference(set(cacheGeoDict.keys()))) if len(missedChannels) > 0: msg = "Cache geometry missing\n" msg += "\n".join(missedChannels) utils.msgWin("Error", msg, self.silent) return missedChannels else: return False for chNode in self.getCacheNodes(): mc.delete(chNode) for chn in cacheGeoDict.keys(): deformShp = cacheGeoDict[chn] try: shpSwitch = mc.deformer(deformShp, type="historySwitch") except: continue shpHist = mc.listHistory(deformShp, pdo=1) if shpHist: for hist in shpHist: if mc.nodeType(hist) == "tweak": dblList = mc.listAttr("%s.plist"%hist, m= 1) fltList = mc.listAttr("%s.vlist"%hist, m= 1) dbCon, flCon = False, False if dblList: if len(dblList) > 1: dbCon = True if fltList: if len(fltList) > 1: flCon = True if not(dbCon or flCon): mc.delete(hist) break conns = mc.listConnections("%s.ip[0].ig"%shpSwitch[0], p=1) mc.connectAttr(conns[0], "%s.ug[0]"%shpSwitch[0]) mc.setAttr("%s.playFromCache"%shpSwitch[0], 1) mc.getAttr("%s.op[0]"%shpSwitch[0], sl = 1) mc.setAttr("%s.playFromCache"%shpSwitch[0], 0) mc.disconnectAttr(conns[0], "%s.ug[0]"%shpSwitch[0]) switch = mc.rename(shpSwitch[0],'cacheSwitch#') mc.setAttr(switch+'.ihi',0) cacheNode = mc.cacheFile(f = cacheXmlLt, attachFile = True, ia = '%s.inp[0]'%switch, cnm = chn) mc.connectAttr(cacheNode+".inRange", switch + '.playFromCache') utils.msgWin("Message", "Cache loaded successfully for %s"%self.namespace, self.silent) return True
def doOperation(self, operands): result = 'done' frameRange = operands['FrameRange']['range'].value meshPrimitives = operands["%sNodes" % self.prefix].value.split(',') tmpFileName = tempfile.mkstemp()[1] self.data['assetPath'] = "%sFrame%%d.mc" % tmpFileName self.data['multipleFiles'] = range(frameRange[0], frameRange[1] + 1) self.data['nodes'] = meshPrimitives if m: m.cycleCheck(e=0) m.cacheFile( dtf=1, format="OneFilePerFrame", fileName=os.path.basename(tmpFileName), directory=os.path.dirname(tmpFileName), cacheableNode=' '.join(meshPrimitives), st=frameRange[0], et=frameRange[1], ) # check if simulation finished susccefuly by looking for all the cached files! frames = self.data['multipleFiles'] if len( filter( lambda x: os.path.exists(self.data['assetPath'] % x), frames)) != len(frames): raise Exception( "Simulation was cancelled by user interaction or some other unknown issue. Asset not published!!" ) else: raise Exception("We can only publish nParticles from within maya!") return IECore.StringData(result)
def __publish_maya_geometry_cache(config): """Publish the asset in Maya Geometry Cache based on the configuration.""" if not os.path.exists(config.path): os.makedirs(config.path) selection = cmds.ls(sl=True) selected_shapes = cmds.listRelatives(selection, s=True) if not selected_shapes: cmds.confirmDialog( title="Cannot publish asset", message= "The asset cannot be published. Please, select the geometry you want to publish.", button=["Ok"], defaultButton="Ok") return start_frame = cmds.playbackOptions(q=True, ast=True) end_frame = cmds.playbackOptions(q=True, aet=True) cmds.cacheFile(f=config.file_name, dir=config.path, pts=selected_shapes, st=start_frame, et=end_frame, r=True, ws=True, fm="OneFile", sch=True)
def doOperation( self, operands ) : result = 'done' frameRange = operands['FrameRange']['range'].value meshPrimitives = operands["%sNodes" % self.prefix].value.split(',') tmpFileName = tempfile.mkstemp()[1] self.data['assetPath'] = "%sFrame%%d.mc" % tmpFileName self.data['multipleFiles'] = range(frameRange[0],frameRange[1]+1) self.data['nodes'] = meshPrimitives if m: m.cycleCheck(e=0) m.cacheFile( dtf =1, format ="OneFilePerFrame", fileName =os.path.basename(tmpFileName), directory =os.path.dirname(tmpFileName), cacheableNode =' '.join(meshPrimitives), st =frameRange[0], et =frameRange[1], ) # check if simulation finished susccefuly by looking for all the cached files! frames = self.data['multipleFiles'] if len(filter(lambda x: os.path.exists(self.data['assetPath']%x), frames)) != len(frames): raise Exception("Simulation was cancelled by user interaction or some other unknown issue. Asset not published!!") else: raise Exception("We can only publish nParticles from within maya!") return IECore.StringData( result )
def append_ncache(nodes=None, evaluate_every_frame=1.0, save_every_evaluation=1): nodes = nodes or cmds.ls(DYNAMIC_NODES) nodes = filter_invisible_nodes_for_manager(nodes) cmds.cacheFile(refresh=True, noBackup=True, simulationRate=evaluate_every_frame, sampleMultiplier=save_every_evaluation, cacheableNode=nodes, startTime=cmds.currentTime(query=True), endTime=cmds.playbackOptions(max=True, query=True))
def doCreateCharacterGeoCache(self, value): geoCacheDirPath = self.geoCharacterCacheDir(value) ns = self.nameOfCharacterCache(value) self.shapes = self.deformCharacterShapeSel(value) mc.select(self.shapes, r=1) mc.cacheFile(staticCache=1, dir=geoCacheDirPath, f = ns, format='OneFilePerFrame',#OneFile sch = 1, st=self.min, et=self.max, points=self.shapes)
def exportAssetCache(self, cacheXmlAn): """ cacheXmlAn = "R:/data/cache/sq999/sh001" """ self.animCachePath=self.getAstCacheFldr(cacheXmlAn, "anim") stF = mc.playbackOptions(q=1,ast=1) edF = mc.playbackOptions(q=1,aet=1) if os.path.exists(self.animCachePath): shutil.rmtree(self.animCachePath,ignore_errors=True) os.makedirs(self.animCachePath) pts = self.getCacheGeos() if pts: mc.cacheFile(pts=pts,f=self.astCacheName,directory=self.animCachePath,cacheFormat="mcc",st=stF,et=edF,fm='OneFile',sch=1) utils.msgWin("Message", "Cache exported successfully for %s"%self.namespace, self.silent) return True
def doCreateCharacterGeoCache(self, value): geoCacheDirPath = self.geoCharacterCacheDir(value) ns = self.nameOfCharacterCache(value) self.shapes = self.deformCharacterShapeSel(value) mc.select(self.shapes, r=1) mc.cacheFile(staticCache=1, dir=geoCacheDirPath, f = ns, format='OneFilePerFrame',#OneFile sch = 1, st=mc.textField(self.startText, q=1, text=1), et=mc.textField(self.endText, q=1, text=1), points=self.shapes, dtf=True)
def geometryCacheReplace(self): object_listTemp = cmds.ls(sl=True,long=True) global object object = object_listTemp[0] dupObjectName = cmds.textField('Face_Control_Object_Name',q=True,tx=True) #print dupObjectName minTime = cmds.playbackOptions(q = True,min = True) maxTime = cmds.playbackOptions(q = True,max = True) attrs = cmds.listHistory(object) meshList= [] for i in attrs: if cmds.objectType(i,isType='mesh'): meshList.append(i) cmds.cacheFile(fm='OneFile',cf='mcc',ws=True,f=dupObjectName+'GeometryCache',refresh=0,st=minTime,et=maxTime, dir='D:\Cache',pts=meshList[0]) cmds.currentTime(minTime,edit=True ) try: cmds.select(dupObjectName+'_Group') cmds.delete() cmds.select(object) cmds.duplicate(name = dupObjectName) cmds.group(dupObjectName,name = dupObjectName+'_Group',world=True) except: #print 'No element' cmds.select(object) cmds.duplicate(name = dupObjectName) cmds.group(dupObjectName,name = dupObjectName+'_Group',world=True) cmds.select(dupObjectName) if os.path.exists('D:\Cache\TrashCache.mc') or os.path.exists('D:\Cache\TrashCache.xml'): os.remove('D:\Cache\TrashCache.mc') os.remove('D:\Cache\TrashCache.xml') else: print 'Go ahead' mel.eval('''doCreateGeometryCache 6 { "2", "1", "10", "OneFile", "1", "D:/Cache","0","TrashCache","0", "add", "0", "1", "1","0","0","mcc","1" } ;''') cmds.currentTime(minTime,edit=True ) dupObjAttrs = cmds.listHistory(dupObjectName) cacheNameList = [] for i in dupObjAttrs: if cmds.objectType(i,isType='cacheFile'): cacheNameList.append(i) cmds.setAttr("%s.cacheName"%cacheNameList[0],dupObjectName+'GeometryCache',type='string')
def export_geometry_cache(directory, file_name, start, end, points): if not os.path.isdir(directory): os.makedirs(directory) mc.cacheFile(refresh=True, directory=directory, singleCache=1, format="OneFile", smr=1, spm=1, cacheFormat='mcx', fileName=file_name, st=start, et=end, points=points, worldSpace=1)
def create(start, end, dir, smr=1, attrcachefile="cachefile", per=False): set = cmds.ls(sl=True, l=True) len_set = float(len(set)) if len_set == 0: cmds.warning("Please select objects!") return i = 0 for shape in set: cachefile = cmds.getAttr(shape+"."+attrcachefile) cmds.cacheFile(f=cachefile, staticCache=0, st=start, et=end, points=shape, smr=smr, dir=dir, format='OneFile') if not per == False: increase_per(per/len_set) i=i+1
def channels(self): cache_path = self.cachePath() if not cache_path: return [] else: return cmds.cacheFile( q=True, fileName=cache_path, channelName=True) or []
def doCreateGeoCache(self, value): geoCacheDirPath = self.geoCacheDir(value) ns = self.nameOfCache(value) self.shapes = self.deformShapeSel(value) mc.select(self.shapes, r=1) mc.cacheFile( staticCache=1, dir=geoCacheDirPath, f=ns, format="OneFilePerFrame", # OneFile sch=1, st=self.min, et=self.max, points=self.shapes, dtf=True, )
def doCreateGeoCache(self, value): try: geoCacheDirPath = self.geoCacheDir(value) ns = self.nameOfCache(value) self.shapes = self.deformShapeSel(value) mc.select(self.shapes, r=1) mc.cacheFile(staticCache=1, dir=geoCacheDirPath, f = ns, format='OneFilePerFrame',#OneFile sch = 1, st=mc.textField(self.startText, q=1, text=1), et=mc.textField(self.endText, q=1, text=1), points=self.shapes, dtf=True) except: return 'Failed export:'
def attach(dir, attrcachefile="cachefile"): set = cmds.ls(sl=True, l=True) for shape in set: cachefile = cmds.getAttr(shape+"."+attrcachefile) switch = mel.eval('createHistorySwitch("'+shape+'",false)') cacheNode = cmds.cacheFile(f=cachefile+".xml", ia='%s.inp[0]' % switch , directory=dir, attachFile=True) cmds.setAttr( '%s.playFromCache' % switch, 1 )
def attachHairCacheNode(eachDagObject, eachObjCacheDir, eachObjCacheFile): """连接毛发缓存""" cacheNode = cmds.cacheFile(attachFile = True, fileName = eachObjCacheFile, directory = eachObjCacheDir, channelName = [eachDagObject + "_hairCounts", eachDagObject + "_vertexCounts", eachDagObject + "_positions"], inAttr = [eachDagObject + ".hairCounts", eachDagObject + ".vertexCounts", eachDagObject + ".positions"]) cmds.setAttr("%s.playFromCache"% eachDagObject, 1) cmds.connectAttr(cacheNode+".inRange", eachDagObject+".playFromCache") return eachDagObject + " Attach To : " + eachObjCacheDir + eachObjCacheFile + ".xml"
def get_cache_channels(cache_path): if cache_path is None: return [] try: return mcc.get_channels(cache_path) except mcc.ParseError as e: cmds.warning('Could not parse MCC for channel data; %r' % e) channels = cmds.cacheFile(q=True, fileName=cache_path, channelName=True) return [(c, None) for c in channels]
def import_ncache(node, filename, behavior=0): """ This fubction create a cachenode and connect it to the corresponding dynamic node. It respect the record_ncache behavior system. :nodes: one or list of dynamic nodes as string ('hairSystem' and 'nCloth') :filename: path pointing an mcc file :behavior: as int 0: replace all old connected cachenodes and blendnodes 1: replace all old connected cachenodes but add new cache in blendnodes 2: blend all existing cachenodes with new cache """ connected_cachenode = get_connected_cachenode([node]) if behavior is 0: cmds.delete(connected_cachenode) if behavior is 1: if cmds.nodeType(connected_cachenode) == "cacheFile": cmds.delete(connected_cachenode) connections = disconnect_cachenodes(node) def convert_channelname_to_inattr(channelname): plug = "_".join(channelname.split("_")[:-1]) attribute = channelname.split("_")[-1] return plug + "." + attribute if cmds.nodeType(node) == 'hairSystem': channels = cmds.cacheFile(fileName=filename, query=True, channelName=True) inattrs = [ convert_channelname_to_inattr(channel) for channel in channels ] # doesn't need channel check for cloth nodes else: inattrs = node + '.positions' cachefile = cmds.cacheFile(attachFile=True, fileName=filename, inAttr=inattrs) cmds.connectAttr(cachefile + '.inRange', node + '.playFromCache') if connections: reconnect_cachenodes(connections) return cachefile
def __deleteCachedFrame__(self, name, endValue=None, startValue=None): cmds.cacheFile(refresh=True, deleteCachedFrame=True, cacheableNode=name, startTime=startValue, endTime=endValue) self.removeShowDisplay(name, False) filePath = self.ActiveCacheNodeDict[name][1] if not filePath is None: listFiles = filePath for fileName in os.listdir(listFiles): if fileName.startswith("backup_"): try: os.remove(os.path.join(filePath, fileName)) OpenMaya.MGlobal.displayInfo("remove files (%s)" % (os.path.join(filePath, fileName))) except: pass display.displayViewPrint( u'<hl style=\"color:#FF4500\"> %s (%s)至(%s)缓存已删除</hl>' % (fileName, startValue, endValue))
def getActiveCacheFileNode(self): for key, value in self.ActiveCacheNodeDict.items(): listFile = cmds.cacheFile(value, query=True, f=True) filePath = None if listFile: filePath = os.path.dirname(listFile[0]) self.ActiveCacheNodeDict[key] = [value, filePath]
def attachClothCacheNode(eachDagObject, eachObjCacheDir, eachObjCacheFile): """连接物体缓存""" objShapeName = cmds.listRelatives(eachDagObject, shapes = True) #拿到物体的shape节点 connectFlag = False for eachShapeNode in objShapeName: if eachShapeNode.find("Deformed") >= 0: switch = mel.eval('createHistorySwitch("%s",false)'%eachShapeNode) cacheNode = cmds.cacheFile(fileName = eachObjCacheFile, directory = eachObjCacheDir, ia = "%s.inp[0]"%switch, attachFile = True) cmds.setAttr("%s.playFromCache"% switch, 1) cmds.connectAttr(cacheNode+".inRange", switch+".playFromCache") connectFlag = True if not connectFlag: switch = mel.eval('createHistorySwitch("%s",false)'%objShapeName[0]) cacheNode = cmds.cacheFile(fileName = eachObjCacheFile, directory = eachObjCacheDir, ia = "%s.inp[0]"%switch, attachFile = True) cmds.setAttr("%s.playFromCache"% switch, 1) cmds.connectAttr(cacheNode+".inRange", switch+".playFromCache") return eachDagObject + " Attach To : " + eachObjCacheDir + eachObjCacheFile + ".xml"
def doCreateCharacterGeoCache(self, value): try: geoCacheDirPath = self.geoCharacterCacheDir(value) ns = self.nameOfCharacterCache(value) self.shapes = self.deformCharacterShapeSel(value) mc.select(self.shapes, r=1) mc.cacheFile( staticCache=1, dir=geoCacheDirPath, f=ns, format="OneFilePerFrame", # OneFile sch=1, st=self.min, et=self.max, points=self.shapes, dtf=True, ) except: return "Failed export:"
def exportMcc(dataPath=None): #datapath is the asset path get from cmds.file(q=True, r=True) if dataPath is None: raise registrarError, 'datapath not specified' #get asset namespace and path assetNameSpace = cmds.referenceQuery(dataPath, ns=True).replace(':','') assetPath = dataPath[:dataPath.rfind('/')] assetPath = assetPath[:assetPath.rfind('/')] #get data from scene file sceneName = cmds.getAttr('sceneInfo.shotName', asString=True) episode = cmds.getAttr('sceneInfo.episodeName', asString=True) sequnceDataPath = genDataPath(pathType='sequence', sceneName=sceneName, episode=episode) frameCount = cmds.getAttr('sceneInfo.frameCount', asString=True) endTime = 100+int(frameCount) #select object based on conData.xml obj = [] tree = ET.parse(assetPath+'/shader/conData.xml') root = tree.getroot() for chk in root: obj.append(assetNameSpace+':'+chk.text) cmds.select(obj) #export sceneInfo data if os.path.isfile(sequnceDataPath+'/info.xml'): os.remove(sequnceDataPath+'/info.xml') root = ET.Element('root') for attr in cmds.listAttr('sceneInfo', ud=True): tagWrite = ET.SubElement(root, attr) tagWrite.text = str(cmds.getAttr('sceneInfo.'+str(attr), asString=True)) tree = ET.ElementTree(root) tree.write(sequnceDataPath+'/info.xml') #export mcc file to server if not os.path.isdir(sequnceDataPath+'/render/'+assetNameSpace): os.makedirs(sequnceDataPath+'/render/'+assetNameSpace) cmds.cacheFile(f=assetNameSpace, dir=sequnceDataPath+'/render/'+assetNameSpace, st=101, et=int(endTime), fm='OneFile', points=cmds.ls(sl=True), sch=True) openVar = open(sequnceDataPath+'/render/'+assetNameSpace+'/filePath.txt', 'w') openVar.write(assetPath) openVar.close() return
def getMayaCachePath(self, mayaCacheFileNode): mayaCacheInfo = {} mayaCaches = {} mayaNCache = cmds.cacheFile(mayaCacheFileNode, query=True, f=True) for cache in mayaNCache: if cache.endswith('mc'): if os.path.exists(cache): mayaCacheInfo['path'] = cache mayaCacheInfo['type'] = 'mc' mayaCaches['cacheFile'] = mayaCacheInfo return mayaCaches return None
def get_cache_channels(cache_path): if cache_path is None: return [] try: return mcc.get_channels(cache_path) except mcc.ParseError as e: raise cmds.warning('Could not parse MCC for channel data; %r' % e) channels = cmds.cacheFile(q=True, fileName=cache_path, channelName=True) return [(c, None) for c in channels]
def mergeFluidCaches(interactiveFoamXML = '', interactiveWakeXML = ''): """ Creates blendcaches for the fluid containers.... """ ########################################### ## IS THIS AN INTERACTIVE SETUP FX SCENE??? ## IF SO BLEND THE CACHES ########################################### ## Now that we have cached the fluids we have to merge the cache files together ## Animation publishes these caches with the SAME name as the base wake caches, so these can actually blend! debug(app = None, method = 'mergeFluidCaches', message = 'Creating cache blends now...', verbose = False) foamCache = cmds.cacheFile(createCacheNode = True, fileName = interactiveFoamXML.replace('\\', '/')) foamCache = cmds.rename(foamCache, '%s_cacheFile' % CONST.INTERACTIVE_FOAM_FLUID_SHAPENODE) debug(app = None, method = '_publish_fx_caches_for_item', message = 'foamCache: \t\t%s' % foamCache, verbose = False) wakeCache = cmds.cacheFile(createCacheNode = True, fileName = interactiveWakeXML.replace('\\', '/')) wakeCache = cmds.rename(wakeCache, '%s_cacheFile' % CONST.INTERACTIVE_WAKE_FLUID_SHAPENODE) debug(app = None, method = '_publish_fx_caches_for_item', message = 'wakeCache: %s' % wakeCache, verbose = False) cmds.select(CONST.FOAM_FLUID_SHAPENODE, r = True) foamBlend = cmds.cacheFileCombine() debug(app = None, method = 'mergeFluidCaches', message = 'foamBlend: %s' % foamBlend, verbose = False) cmds.cacheFileCombine(foamBlend[0], e = True, cc = foamCache) debug(app = None, method = 'mergeFluidCaches', message = 'CacheBlend for foam made successfully', verbose = False) cmds.select(CONST.WAKE_FLUID_SHAPENODE, r = True) wakeBlend = cmds.cacheFileCombine() debug(app = None, method = 'mergeFluidCaches', message = 'wakeBlend: %s' % wakeBlend, verbose = False) cmds.cacheFileCombine(wakeBlend[0], e = True, cc = wakeCache) debug(app = None, method = 'mergeFluidCaches', message = 'CacheBlend for wake made successfully', verbose = False) ## Now hard set all the inputs to a weight of 1 for each in cmds.ls(type = 'cacheBlend'): cmds.setAttr("%s.cacheData[0].weight" % each, 1) cmds.setAttr("%s.cacheData[1].weight" % each, 1)
def getGeoCacheFromObj(objName): """根据物体名拿到该物体相连的缓存列表 Description: 若存在缓存(一个或多个),返回缓存节点列表;若不存在缓存,返回空 Arguments: objName:物体名 Returns: cacheNodes:缓存节点列表 """ cacheNodesList = clothHairT.getCacheNodes() #拿到所有的cacheNode cacheNodes = [] if cacheNodesList: for eachCacheNode in cacheNodesList: isGeoCache = cmds.cacheFile(eachCacheNode, query=True, geometry=True) if isGeoCache: #为geoCache,不是hairCache shapeWithCache = cmds.cacheFile(eachCacheNode, query=True, geometry=True) objWithCache = cmds.listRelatives(shapeWithCache[0], parent = True) #拿到该cache的物体名 if objWithCache[0] == objName: cacheNodes.append(eachCacheNode) return cacheNodes
def cache(): """ Cache the constrained duplicate geo """ # GeoCache selected sel = cmds.ls(sl=True) grp = cmds.group(em=True,n='CachedGeoGrp') for mesh in sel: if cmds.objectType(mesh) == 'transform': try: shape = cmds.listRelatives(mesh,shapes=True)[0] if cmds.objectType(shape) == 'mesh': if mel.eval('findRelatedSkinCluster( "%s");'%mesh): cacheFiles = cmds.cacheFile( st=290, et=344, points=shape,dir='/Users/mauricioptkvp/Desktop/21_July_2010_scarecrow_rig - Copy/CleanedUp/geoCaches') cmds.parent(mesh,grp) print 'Cached: ',shape except Exception,e: print e
def isNOrGeoCacheNode(cacheNodeName): """判断是nCache或是geoCache Description: 判断某个缓存节点是geo缓存还是n缓存 Arguments: cacheNodeName:缓存节点名 Returns: “nCache”:为nCache “geoCache”:为geoCache """ cacheFileXML = cmds.cacheFile(cacheNodeName, query=True, f=True)[0] cacheSize = os.path.getsize(cacheFileXML) #该XML文件的大小 if cacheSize > 5000: return "nCache" else: return "geoCache"
def start_trans(): get_frame_start = mc.intField("the_ST_ABC", q=True, v=True) get_frame_end = mc.intField("the_ET_ABC", q=True, v=True) write_UV = ["", "-uvWrite"] # world_Space = ["","-worldSpace"] ifUV = int(mc.checkBox("if_write_UV", q=True, v=True)) # ifWP = 1 daili = mc.optionMenu("the_dai", q=True, sl=True) get_cj_list = mc.textScrollList("the_cj_list", q=True, ai=True) if_comb = int(mc.checkBox("if_combine", q=True, v=True)) get_final_out_path = mc.textField("thePath_to_out", q=True, tx=True) get_final_out_filename = mc.textField("the_out_file_name", q=True, tx=True) get_the_step = mc.floatField("the_EVA_every", q=True, v=True) dataType = int(mc.optionMenu("cacheData", q=True, sl=True)) all_need_to_cache = mc.textScrollList("the_mod_list", q=True, ai=True) all_need_to_string = "" all_need_to_cache_string = "" all_need_to_cache_string_2 = "" #导出代理 #输出缓存 if if_comb == 1 and str(all_need_to_cache) != "None": for oneGroup in all_need_to_cache: Groups = mc.listRelatives(oneGroup, allParents=True) group_M = mc.ls(oneGroup, dag=True, ni=True, shapes=True) if len(group_M) > 1: objComb = mc.polyUnite(oneGroup, ch=True, mergeUVSets=True, name=oneGroup + "_comb") if Groups: mc.parent(objComb[0], Groups[0]) if len(group_M) == 1: oneObj = mc.listRelatives(group_M[0], allParents=True) mc.rename(oneObj, oneGroup + "_comb") all_need_to_string += "\"" + oneGroup + "_comb" + "\"," if str(all_need_to_string) != 0: mc.textScrollList("the_mod_list", e=True, ra=True) exec("mc.textScrollList(\"the_mod_list\",e=True,append=(" + all_need_to_string[0:-1] + "))") all_need_to_cache = mc.textScrollList("the_mod_list", q=True, ai=True) for one_cache in all_need_to_cache: all_need_to_cache_string += " -root " + one_cache all_need_to_cache_string_2 += one_cache + " " #导出ABC缓存 if dataType == 1: get_cache_path = mc.workspace(en="cache") get_cache_paths = r'%s/alembic/' % get_cache_path if not os.path.isdir(get_cache_paths): os.makedirs(get_cache_paths) # mc.AbcExport(j="-frameRange "+str(get_frame_start)+" "+str(get_frame_end)+" "+"-step "+str(get_the_step)+" "+write_UV[ifUV]+" -worldSpace"+all_need_to_cache_string+" -file "+get_final_out_path+get_final_out_filename+".abc"); mc.AbcExport(j="-frameRange " + str(get_frame_start) + " " + str(get_frame_end) + " " + "-step " + str(get_the_step) + " " + write_UV[ifUV] + " -worldSpace" + all_need_to_cache_string + " -file " + get_cache_paths + get_final_out_filename + ".abc") mc.delete(all_need_to_cache, ch=True) #mm.eval("AbcImport -mode import -connect \""+all_need_to_cache_string_2[0:-1]+"\" \""+get_final_out_path+get_final_out_filename+".abc"+"\"") mm.eval("AbcImport -mode import -connect \"" + all_need_to_cache_string_2[0:-1] + "\" \"" + get_cache_paths + get_final_out_filename + ".abc" + "\"") #导出GPU缓存 if dataType == 2: get_cache_path = mc.workspace(en="cache") get_cache_paths = r'%s/alembic/' % get_cache_path if not os.path.isdir(get_cache_paths): os.makedirs(get_cache_paths) mc.select(all_need_to_cache, r=True) # mc.gpuCache(all_need_to_cache, startTime = get_frame_start, endTime = get_frame_end, saveMultipleFiles = False, optimize = False, writeMaterials = False, dataFormat = "ogawa", wuv = ifUV, directory= get_final_out_path, fileName = get_final_out_filename) mc.gpuCache(all_need_to_cache, startTime=get_frame_start, endTime=get_frame_end, saveMultipleFiles=False, optimize=False, writeMaterials=False, dataFormat="ogawa", wuv=ifUV, directory=get_cache_paths, fileName=get_final_out_filename) for one_gpu in all_need_to_cache: mc.polyTriangulate(one_gpu) mc.delete(all_need_to_cache, ch=True) # mm.eval("AbcImport -mode import -connect \"" +all_need_to_cache_string_2[0:-1] + "\" -createIfNotFound " + " \"" +get_final_out_path+get_final_out_filename+".abc"+"\"") mm.eval("AbcImport -mode import -connect \"" + all_need_to_cache_string_2[0:-1] + "\" -createIfNotFound " + " \"" + get_cache_paths + get_final_out_filename + ".abc" + "\"") #导出几何体缓存 if dataType == 3: all_need_to_cache_shape = mc.ls(all_need_to_cache, dagObjects=True, ni=True, shapes=True) cacheFiles = mc.cacheFile(r=True, sch=True, dtf=True, fm='OneFile', spm=1, smr=1, directory=get_final_out_path, fileName=get_final_out_filename, st=get_frame_start, et=get_frame_end, points=all_need_to_cache_shape) mc.delete(all_need_to_cache, ch=True) myswichList = [] myswichNode = [] myNewcacheObjects = [] switchText = '' for each in all_need_to_cache_shape: switch = mm.eval('createHistorySwitch("%s",false)' % each) myNewcacheObjects.append(each) myswichNode.append(switch) switchText = '%s.inp[0]' % switch myswichList.append(switchText) mc.setAttr('%s.playFromCache' % switch, 1) mc.cacheFile(f=get_final_out_filename, directory=get_final_out_path, cnm=myNewcacheObjects, ia=myswichList, attachFile=True) #输出blend缓存 if dataType == 4: mc.select(all_need_to_cache, r=True) mm.eval('x_bakeShape(%s,%s,%s, "%s", 0, 0)' % (get_frame_start, get_frame_end, get_the_step, get_final_out_filename)) mc.textScrollList("the_mod_list", e=True, ra=True) exec( "mc.textScrollList(\"the_mod_list\",e=True,append=(get_final_out_filename+'_bakeshape_gp'))" ) mc.delete(all_need_to_cache, ch=True) mc.select(hi=True) #导出另存文件 all_need_to_cache = mc.textScrollList("the_mod_list", q=True, ai=True) all_need_others = mc.textScrollList("the_others_list", q=True, ai=True) if str(all_need_others) != "None": mc.select(all_need_others + all_need_to_cache, r=True) else: mc.select(all_need_to_cache, r=True) maormb = mc.optionMenu("the_ma_mb", q=True, sl=True) MA_MB = ["mayaBinary", "mayaAscii"] ma_mb = [".mb", ".ma"] mm.eval("file -force -options \"v=0;\" -typ \"" + MA_MB[maormb - 1] + "\" -pr -es \"" + get_final_out_path + get_final_out_filename + ma_mb[maormb - 1] + "\"")
def createGeoCache(*args): project = m.workspace(rd=True, q=True) dataDir = project + 'data/' index = project[:-1].rindex('/') shotDir = project[:index] + '/' index = shotDir[:-1].rindex('/') # parse scene path to derrive scene name to be used in folder s_string = m.file(sn=True, q=True) s_splitString = s_string.split('/') i_splitStringLength = len(s_splitString) s_filename = s_splitString[(i_splitStringLength - 1)] # parse scene name to derrive name s_splitFolder = s_filename.split('.') i_splitStringLengthFolder = len(s_splitFolder) s_foldername = s_splitFolder[(i_splitStringLengthFolder - 2)] # specify the plate name here plate = shotDir[index + 1:-1] + '_plate_01' imageDir = shotDir + 'images/' + plate + '/' imageList = [] # images = os.listdir(imageDir) # for i in images: # if 'plate' in i: # imageList.append(i) start = m.playbackOptions(ast=True, q=True) end = m.playbackOptions(aet=True, q=True) # set timeline to images m.playbackOptions(ast=start, aet=end, min=start, max=end) # make geo caache directory geoCacheDir = dataDir + 'geoCache/' if not 'geoCache' in os.listdir(dataDir): os.mkdir(geoCacheDir) # make cache version directory versions = os.listdir(geoCacheDir) if versions: nextVersion = s_foldername cacheVersionDir = geoCacheDir + s_foldername # modified this line to use scene name as folder name if not os.path.exists(cacheVersionDir): os.mkdir(cacheVersionDir) else: cacheVersionDir = geoCacheDir + s_foldername # modified this line to use scene name as folder name os.mkdir(cacheVersionDir) # cache selected objects list = m.ls(type='transform') for obj in list: if m.attributeQuery('cache', node=obj, exists=True): try: m.setAttr(obj + '.cache', False) except: if m.getAttr(obj + '.cache'): print m.getAttr(obj + '.cache'), obj shape = m.listRelatives(obj, s=True)[0] if ':' in shape: cacheName = shape[shape.rindex(':') + 1:] else: cacheName = shape print 'caching shape:', shape, 'as:', cacheName m.cacheFile(dir=cacheVersionDir, f=cacheName, points=shape, st=start - 10, et=end + 10) else: print 'no attr'
def channels(self): cache_path = self.cachePath() if not cache_path: return [] else: return cmds.cacheFile(q=True, fileName=cache_path, channelName=True) or []
def iter_existing_cache_connections(): """Yield data about every existing cache connection in the scene. :returns: Iterator of ``(cacheFile, fileName, channel, transform, shape)`` tuples for each cache connection. It is possible for ``transform`` or ``shape`` to be ``None`` when the connection cannot be fully resolved. In every case that the connection is not complete, ``shape`` will be ``None``. """ cache_nodes = cmds.ls(type='cacheFile') or [] for cache_node in cache_nodes: cache_path = cmds.cacheFile(cache_node, q=True, fileName=True)[0] ## Identify what it is connected to. channel = cmds.getAttr(cache_node + '.channel[0]') switch = cmds.listConnections(cache_node + '.outCacheData[0]') if not switch: cmds.warning('cacheFile %r is not connected' % cache_node) yield cache_node, cache_path, channel, None, None continue switch = switch[0] switch_type = cmds.nodeType(switch) # Pass through blends. if switch_type == 'cacheBlend': blend = switch switch = cmds.listConnections(blend + '.outCacheData[0]') if not switch: cmds.warning('cacheBlend %r is not connected' % blend) yield cache_node, cache_path, channel, None, None continue switch = switch[0] switch_type = cmds.nodeType(switch) if switch_type != 'historySwitch': cmds.warning('Unknown cache node layout; expected historySwitch, found %s %r' % (switch_type, switch)) yield cache_node, cache_path, channel, None, None continue # The switch hooks onto a transform, but we want the shapes. transform = (cmds.listConnections(switch + '.outputGeometry[0]') or (None, ))[0] if transform is None: cmds.warning('Unknown cache node layout; nothing connected to %r' % switch) yield cache_node, cache_path, channel, None, None continue # Pass through groupParts. The control flow is a little wacky here, be # careful. while transform is not None: transform_type = cmds.nodeType(transform) if transform_type == 'groupParts': transform = (cmds.listConnections(transform + '.outputGeometry') or (None, ))[0] continue break if transform is None: transform_type = 'None' if transform_type != 'transform': cmds.warning('Unknown cache node layout; expected transform, found %s %r' % (transform_type, transform)) yield cache_node, cache_path, channel, None, None continue shapes = cmds.listRelatives(transform, children=True, shapes=True) or [] # Maya will often add a "Deformed" copy of a mesh. Sometimes there is # a "Orig". Sometimes there are both. if len(shapes) > 1: a = basename(shapes[0]) for other in shapes[1:]: b = basename(other) if not (b[:len(a)] == a and b[len(a):] in ('Deformed', 'Orig') ): break else: shapes = [shapes[0]] if len(shapes) != 1: cmds.warning('Could not identify shape connected to %r; found %r' % (cache_node, shapes)) yield cache_node, cache_path, channel, transform, None continue shape = shapes[0] yield cache_node, cache_path, channel, transform, shape
def IG_import(self, iList, cacheFolder): #self.importList self.IMchar = [] #self.info_cacheList = [] self.info_dict_ = {} self.sList = [] for x in iList: self.IMchar.append(x + 'Shape.xml') for i in self.IMchar: sn = cmds.listRelatives(i.split('Shape.xml')[0], s=1)[0] if cmds.objExists(sn + '_cacheBlend'): self.ui.nCache_LED.setText('>>> Already exists cacheBlend <<<') self.ui.nCache_LED.setStyleSheet("color: rgb(255, 255, 0);") self.ui.loopRange_LED.clear() self.ui.confirm_LED.clear() self.info_dict_ = {} for i in self.selMesh: sn = cmds.listRelatives(i, s=True)[0] self.shape = sn self.sList.append(sn) self.info_dict_[self.shape] = [ self.shape + 'A_cache', self.shape + 'B_cache', self.shape + '_cacheBlend' ] else: if os.path.exists(cacheFolder + '/' + i): xmlInfo = self.xlmFileInfo(cacheFolder + '/' + i) #preShape = xmlInfo.split('_geoCacheShape')[0]+ 'Shape' sn = cmds.listRelatives(i.split('Shape.xml')[0], s=True) if len(sn) > 1: preShape = sn[1] else: preShape = sn[0] self.shape = preShape self.sList.append(self.shape) mcxFile = i.replace('xml', 'mcx') switch = mel.eval('createHistorySwitch("%s", false)' % self.shape) CB = cmds.createNode('cacheBlend') CBnode = cmds.rename(CB, self.shape + '_cacheBlend') cacheNode1 = cmds.cacheFile( dir=cacheFolder, f='%s' % mcxFile, ia='%s.inCache[0].vectorArray[0]' % CBnode, attachFile=True) cacheNode2 = cmds.cacheFile( dir=cacheFolder, f='%s' % mcxFile, ia='%s.inCache[0].vectorArray[1]' % CBnode, attachFile=True) #rename cacheNode1 = cmds.rename(cacheNode1, self.shape + 'A_cache') cacheNode2 = cmds.rename(cacheNode2, self.shape + 'B_cache') info_cache = cacheNode1 #self.info_cacheList.append(info_cache) self.info_dict_[self.shape] = [ cacheNode1, cacheNode2, CBnode ] #attribute connect cmds.connectAttr('%s.start' % cacheNode1, '%s.cacheData[0].start' % CBnode, f=1) cmds.connectAttr('%s.end' % cacheNode1, '%s.cacheData[0].end' % CBnode, f=1) cmds.connectAttr('%s.inRange' % cacheNode1, '%s.cacheData[0].range' % CBnode, f=1) cmds.connectAttr('%s.start' % cacheNode2, '%s.cacheData[1].start' % CBnode, f=1) cmds.connectAttr('%s.end' % cacheNode2, '%s.cacheData[1].end' % CBnode, f=1) cmds.connectAttr('%s.inRange' % cacheNode2, '%s.cacheData[1].range' % CBnode, f=1) cmds.connectAttr('%s.outCacheData[0]' % CBnode, '%s.inp[0]' % switch, f=1) cmds.setAttr('%s.playFromCache' % switch, 1) self.ui.nCache_LED.setText('>>> import success <<<') self.ui.nCache_LED.setStyleSheet("color: rgb(0, 255, 0);") else: self.ui.nCache_LED.setText('>>> Check path or file <<<') self.ui.nCache_LED.setStyleSheet("color: rgb(255, 0, 0);")
def build(self, sim, animType, frameStep, cacheGeometry, cacheDir, deleteSkeleton, agentOptions): if sim.scene.mas().terrainFile: self._factory.importObj(sim.scene.mas().terrainFile, "terrain") mayaAgents = [] startFrame = -sys.maxint endFrame = -sys.maxint for agent in sim.agents(): mayaAgent = MayaSimAgent.MayaSimAgent(agent, self._factory, sim) mayaAgent.build(agentOptions, animType, frameStep) # Presumably every agent will be simmed over the same frame # range - however since the frame ranges could conceivably # be different, find and and store the earliest startFrame # and latest endFrame # if mayaAgent.simData(): if -sys.maxint == startFrame or mayaAgent.simData().startFrame < startFrame: startFrame = mayaAgent.simData().startFrame if -sys.maxint == endFrame or mayaAgent.simData().endFrame > endFrame: endFrame = mayaAgent.simData().endFrame mayaAgents.append(mayaAgent) if cacheGeometry: # Create geometry caches for each agent. # meshes = [] for mayaAgent in mayaAgents: meshes.extend( [ geometry.shapeName() for geometry in mayaAgent.geometryData ] ) cacheFileName = "%s_%s" % (sim.scene.baseName(), sim.range) mc.cacheFile( directory=cacheDir, singleCache=True, doubleToFloat=True, format="OneFilePerFrame", simulationRate=1, sampleMultiplier=1, fileName=cacheFileName, startTime=startFrame, endTime=endFrame, points=meshes ) # There's a bug in maya where cacheFile will sometimes write a # partial path into the cache instead of the full path. To makes # sure the attachFile works, we have to query the actual channel # names cacheFileFullName = "%s/%s.xml" % (cacheDir, cacheFileName) meshes = mc.cacheFile(query=True, fileName=cacheFileFullName, channelName=True) switches = [ maya.mel.eval( 'createHistorySwitch( "%s", false )' % mesh ) for mesh in meshes ] switchAttrs = [ ( "%s.inp[0]" % switch ) for switch in switches ] mc.cacheFile(attachFile=True, fileName=cacheFileName, directory=cacheDir, channelName=meshes, inAttr=switchAttrs) for switch in switches: mc.setAttr("%s.playFromCache" % switch, True) if deleteSkeleton: # After creating a geometry cache the skeleton, anim curves, and # skin clusters are no longer needed to playback the sim. To save # memory the user can choose to delete them. # for mayaAgent in mayaAgents: mayaAgent.deleteSkeleton() self._factory.cleanup() # The layers are off by default to speed up load, turn them on # now. # MayaAgent.showLayers()
mc.setAttr('nucleus1.startFrame', -20) mc.setAttr('nucleus1.spaceScale', 0.45) mc.setAttr('nucleus1.subSteps', 6) mc.setAttr('nucleus1.maxCollisionIterations', 8) #Set Up Pin Constraints mc.select('ten_Robe_Sim.vtx[2310:2334]', 'ten_Robe_Sim.vtx[3088:3111]', replace=True) mc.select('ten_Robe_Sim.vtx[699:701]', 'ten_Robe_Sim.vtx[1652]', 'ten_Robe_Sim.vtx[1691]', 'ten_Robe_Sim.vtx[1694]', 'ten_Robe_Sim.vtx[1725]', add=True) mc.select('ten_Collider', toggle=True) mel.eval('createNConstraint pointToSurface 0;') #Cache Out the Cloth Sim filepath = '/users/animation/mitchbre/Documents/Cloth_Script_Files/Test_Cache' shapeRelatives = mc.listRelatives('ten_Robe_Sim', shapes=True) print shapeRelatives mc.cacheFile(fileName='tenRobe_cache', format='OneFilePerFrame', startTime=-20, endTime=186, points=shapeRelatives[1], directory=filepath) #Connect the Cloth Cache mc.currentTime(-20) pc.mel.doImportCacheFile(filepath + '/tenRobe_cache.xml', '', ['ten_Robe_Sim'], list()) #Group Colliders mc.group(['ten_Collider', 'ten_ColliderBase', 'ten_Mittens', 'nRigid1', 'nRigid2'], name='colliders') #Group Robe mc.group(['ten_Robe_Sim','ten_Sash_Sim'], name='robe_Objects' ) #Objects mc.group(['nCloth1','dynamicConstraint1'], name='robe_Sim' ) #Group Robe Sim mc.group(['robe_Objects', 'robe_Sim'], name='robe') #Group All Robe Groups #Group Pants
def __publish_mikros_cache(self, task, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb): infoNodeLib = InfoNodeLib(self.parent.engine) progress_cb(10, "Determining publish details") item = task['item'] output = task["output"] # get the current scene path and extract fields from it # using the work template: scene_path = os.path.abspath(cmds.file(query=True, sn=True)) wip_path = self._get_current_work_file_version(scene_path) fields = work_template.get_fields(wip_path) publish_version = fields["version"] tank_type = output["tank_type"] # determine the publish name: self.parent.log_debug("") self.parent.log_debug(" +---> Publishing Cache") self.parent.log_debug(" |") # Find additional info from the scene: cache = item["nodeName"] progress_cb(20, "Analysing scene") cacheType = 'geo' ## By default, we suppose it's a geometric cache geomNode = cmds.cacheFile(cache, q=1, geometry=1) if cmds.nodeType(geomNode) == 'nParticle': cacheType = 'part' if cmds.nodeType(geomNode) in ['fluidShape', 'nCloth']: # nFluid, nCloth cacheType = 'mc' self.parent.log_debug(" | cs_cache_type: %s"%cacheType) cacheDir = item["baseDir"] cacheName = item["baseName"] fields['cs_cache_type']=cacheType fields['cs_cache_name']=cacheName progress_cb(30, "Determining publish path") # create the publish path by applying the fields # with the publish template: publish_template = output["publish_template"] publish_path = publish_template.apply_fields(fields) publish_name = publish_path wantedPath = publish_path wantedDir = wantedPath wantedName = cacheName self.parent.log_debug(" | publish_path: %s"%publish_path) self.parent.log_debug(" | wantedDir: %s"%wantedDir) self.parent.log_debug(" | wantedName: %s"%wantedName) if cacheDir != wantedDir or cacheName != wantedName: self.parent.log_debug(" | Redirect %s cache directory to %s" % (cache, wantedDir)) progress_cb(40, "Finding Files to Publish") fileList = os.listdir(cacheDir) if not fileList: self.parent.log_debug(" | Nothing to do ! No file found") else: fileList.sort() prct = 40 prcPerFile = 40 / len(fileList) for file in fileList: matcher = re.compile('^%s(.+)$' % cacheName).search(file) if matcher: source = os.path.join(cacheDir, file) ender = matcher.group(1) dest = '%s/%s%s' % (wantedDir, wantedName, ender) if not os.path.isfile(dest): self.parent.log_debug(" | => Link %s" % source) self.parent.log_debug(" | => To %s" % dest) infoNodeLib.do_move_and_sym_link(source, dest) prct += prcPerFile progress_cb(prct, "Linking %s" % (source)) else: prct += prcPerFile progress_cb(prct, "Exists Already %s" % (source)) if cacheDir != wantedDir: self.parent.log_debug(" | Set %s cache directory to %s/" % (cache, wantedDir)) cmds.setAttr( '%s.cachePath' % cache, '%s/' % wantedDir, type='string' ) if cacheName != wantedName: self.parent.log_debug(" | Set %s cache name to %s" % (cache, wantedName)) cmds.setAttr( '%s.cacheName' % cache, wantedName, type='string' ) progress_cb(90, "Saving Scene with new cache") cmds.file(save=True, force=True) progress_cb(100, "Ok") self.parent.log_debug(" |") self.parent.log_debug(" |---------------------------")
len(rselShape) m = 'poppymaster_DL1:SK:ghostPoppy_F_1 poppymaster_DL1:SK:ghostPoppy_F_2_1 poppymaster_DL1:SK:ghostPoppy_F_2_2_1 poppymaster_DL1:SK:ghostPoppy_F_2_2_2 poppymaster_DL1:SK:ghostPoppy_F_2_2_3 poppymaster_DL1:SK:ghostPoppy_F_2_2_4 poppymaster_DL1:SK:ghostPoppy_F_2_2_5 poppymaster_DL1:SK:ghostPoppy_F_2_2_6 poppymaster_DL1:SK:ghostPoppy_F_2_2_7 poppymaster_DL1:SK:ghostPoppy_F_2_2_8 poppymaster_DL1:SK:ghostPoppy_F_2_2_9 poppymaster_DL1:SK:ghostPoppy_F_2_2_10 poppymaster_DL1:SK:ghostPoppy_F_3 poppymaster_DL1:SK:ghostPoppy_F_4 poppymaster_DL1:SK:ghostPoppy_F_5' len(m.split(' ')) for item in rselShape: if not item in allCharacterRightdeformShape: continue allCharacterRightdeformShape.remove(item) len(allCharacterRightdeformShape) allCharacterRightdeformShape.reverse() allCharacterRightdeformShape.index('poppymaster_DL1:SK:ghostPoppy_E_5ShapeDeformed') rselShape.index('poppymaster_DL1:SK:ghostPoppy_E_5ShapeDeformed') mc.select(allCharacterRightdeformShape, r=1) mc.cacheFile(staticCache=1, dir='E:/cache/sc057a/030', f = 'women', format='OneFilePerFrame',#OneFile sch = 1, st='1001', et='1037', points=allCharacterRightdeformShape, dtf=True) mc.select('poppymaster_DL1:SK:ghostPoppy_C_1ShapeOrig', r=1) mc.nodeType('poppymaster_DL1:SK:ghostPoppy_C_1ShapeOrig') mc.getAttr('poppymaster_DL1:SK:ghostPoppy_C_1ShapeOrig.primaryVisibility') io = shape + '.intermediateObject' mc.getAttr('poppymaster_DL1:SK:ghostPoppy_C_1ShapeOrig.intermediateObject') mc.select(, r=1) mc.objExists('poppymaster_DL1:SK:ghostPoppy_B_2_1ShapeOrig') for item in allCharacterRightdeformShape: if item.endswith('Orig') == 1: allCharacterRightdeformShape.remove(item) len(allCharacterRightdeformShape) vb = 'poppymaster_DL1:SK:ghostPoppy_B_2_1ShapeOrig'
def build(self, sim, animType, frameStep, cacheGeometry, cacheDir, deleteSkeleton, agentOptions): if sim.scene.mas().terrainFile: self._factory.importObj(sim.scene.mas().terrainFile, "terrain") mayaAgents = [] startFrame = -sys.maxint endFrame = -sys.maxint for agent in sim.agents(): mayaAgent = MayaSimAgent.MayaSimAgent(agent, self._factory, sim) mayaAgent.build(agentOptions, animType, frameStep) # Presumably every agent will be simmed over the same frame # range - however since the frame ranges could conceivably # be different, find and and store the earliest startFrame # and latest endFrame # if mayaAgent.simData(): if -sys.maxint == startFrame or mayaAgent.simData( ).startFrame < startFrame: startFrame = mayaAgent.simData().startFrame if -sys.maxint == endFrame or mayaAgent.simData( ).endFrame > endFrame: endFrame = mayaAgent.simData().endFrame mayaAgents.append(mayaAgent) if cacheGeometry: # Create geometry caches for each agent. # meshes = [] for mayaAgent in mayaAgents: meshes.extend([ geometry.shapeName() for geometry in mayaAgent.geometryData ]) cacheFileName = "%s_%s" % (sim.scene.baseName(), sim.range) mc.cacheFile(directory=cacheDir, singleCache=True, doubleToFloat=True, format="OneFilePerFrame", simulationRate=1, sampleMultiplier=1, fileName=cacheFileName, startTime=startFrame, endTime=endFrame, points=meshes) # There's a bug in maya where cacheFile will sometimes write a # partial path into the cache instead of the full path. To makes # sure the attachFile works, we have to query the actual channel # names cacheFileFullName = "%s/%s.xml" % (cacheDir, cacheFileName) meshes = mc.cacheFile(query=True, fileName=cacheFileFullName, channelName=True) switches = [ maya.mel.eval('createHistorySwitch( "%s", false )' % mesh) for mesh in meshes ] switchAttrs = [("%s.inp[0]" % switch) for switch in switches] mc.cacheFile(attachFile=True, fileName=cacheFileName, directory=cacheDir, channelName=meshes, inAttr=switchAttrs) for switch in switches: mc.setAttr("%s.playFromCache" % switch, True) if deleteSkeleton: # After creating a geometry cache the skeleton, anim curves, and # skin clusters are no longer needed to playback the sim. To save # memory the user can choose to delete them. # for mayaAgent in mayaAgents: mayaAgent.deleteSkeleton() self._factory.cleanup() # The layers are off by default to speed up load, turn them on # now. # MayaAgent.showLayers()
def __publish_geometry_cache(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb): """ Publish an Alembic cache file for the scene and publish it to Shotgun. :param item: The item to publish :param output: The output definition to publish with :param work_template: The work template for the current scene :param primary_publish_path: The path to the primary published file :param sg_task: The Shotgun task we are publishing for :param comment: The publish comment/description :param thumbnail_path: The path to the publish thumbnail :param progress_cb: A callback that can be used to report progress """ # determine the publish info to use # progress_cb(10, "Determining publish details") # Revilo - Add naming for use in alembic export #group_name = item["name"].strip("|") #meshes = item["meshes"] tank_type = output["tank_type"] publish_template = output["publish_template"] # get the current scene path and extract fields from it # using the work template: scene_path = os.path.abspath(cmds.file(query=True, sn=True)) fields = work_template.get_fields(scene_path) publish_version = fields["version"] tank_type = output["tank_type"] # update fields with the group name: #fields["grp_name"] = group_name # create the publish path by applying the fields # with the publish template: publish_template = output["publish_template"] publish_path = publish_template.apply_fields(fields) # ensure the publish folder exists: publish_folder = os.path.dirname(publish_path) self.parent.ensure_folder_exists(publish_folder) # determine the publish name: publish_name = fields['Shot'] + '_' + fields['Step'] + item['name'] # Find additional info from the scene: # progress_cb(10, "Analysing scene") # set the geometry cache arguments # find the animated frame range to use: start_frame, end_frame = self._find_scene_animation_range() # ...and execute it: progress_cb(30, "Exporting Geometry cache") caches = item['caches'] for geo, meshes in caches.iteritems(): group_name = geo.strip("|") try: self.parent.log_debug("Executing command: cacheFile") cmds.cacheFile(dir=publish_path.replace("\\", "/"), f=group_name, st=start_frame, et=end_frame, points=meshes, cf='mcc', worldSpace=True, format='OneFile', singleCache=True) except Exception, e: raise TankError("Failed to export Geometry Cache: %s" % e)
def CreateGeometryCache_zwz(): myStartFrameV = mc.playbackOptions(q=True, min=True) myEndFrameV = mc.playbackOptions(q=True, max=True) num_myGeoObject_zwz = len(myGeoObject_zwz) #获取激活视面 if num_myGeoObject_zwz: mm.eval( 'setNamedPanelLayout "Single Perspective View"; updateToolbox();') activePlane = '' i = 1 while (i): try: tmp = mc.modelEditor('modelPanel%d' % i, q=True, av=True) except: pass else: if tmp: activePlane = 'modelPanel%d' % i break i += 1 mc.modelEditor(activePlane, e=True, polymeshes=False, nurbsSurfaces=False) myfileName = mc.file(q=True, sn=True, shortName=True).split('.')[0] mydataName = mc.workspace(en='data/%s/' % myfileName) ErrorObjects = [] amount = 0 mc.progressWindow(title=u'动画缓存创建,一共有%s个' % num_myGeoObject_zwz, progress=amount, status='chMyGeoCache', min=0, max=num_myGeoObject_zwz, isInterruptable=True) allcacheNames = mc.textScrollList('selectObjects', q=True, ai=True) for i, each in enumerate(myGeoObject_zwz): if len(each): cacheName = allcacheNames[i] mc.progressWindow(e=True, status=u'第%s个: %s' % (i, cacheName), progress=i) mc.select(cl=True) mc.select(myGeoObject_zwz[i], r=True) mm.eval("SelectIsolate;") j = 1 while (True): if os.path.isdir(mydataName + cacheName): if os.path.isdir(mydataName + cacheName + '_' + '%s' % j): j = j + 1 else: cacheName = cacheName + '_' + '%s' % j break else: break cacheFiles = mc.cacheFile(r=True, sch=True, dtf=True, fm='OneFilePerFrame', spm=1, smr=1, directory=mydataName + cacheName, fileName=cacheName, st=myStartFrameV, et=myEndFrameV, points=each) if mc.progressWindow( q=True, isCancelled=True) or mc.progressWindow( q=True, progress=True) > num_myGeoObject_zwz: mc.progressWindow(endProgress=True) mc.confirmDialog(title=u'温馨提示', message=u'到第%s个: %s就停了' % (i, cacheName), button=['OK'], defaultButton='Yes', dismissString='No') return mc.delete(each, ch=True) myswichNode = [] myswichList = [] myNewcacheObjects = [] switchText = '' for peach in each: try: switch = mm.eval('createHistorySwitch("%s", false)' % peach) except: ErrorObjects.append(peach) print peach else: myNewcacheObjects.append(peach) myswichNode.append(switch) switchText = '%s.inp[0]' % switch myswichList.append(switchText) mc.cacheFile(f=cacheName, directory=mydataName + cacheName, cnm=myNewcacheObjects, ia=myswichList, attachFile=True) for seach in myswichNode: mc.setAttr('%s.playFromCache' % seach, 1) mc.select(cl=True) mc.progressWindow(endProgress=True) mc.isolateSelect(activePlane, state=False) mc.modelEditor(activePlane, e=True, polymeshes=True, nurbsSurfaces=True) if ErrorObjects: print(u'以下shapes不能创建缓存:\n %s' % ErrorObjects) print u'全部创建完成'
def checkStaticCache(self, file): result = mc.cacheFile(file, q=1, sc=1) return result
mc.setAttr('nucleus1.spaceScale', 0.45) mc.setAttr('nucleus1.subSteps', 6) mc.setAttr('nucleus1.maxCollisionIterations', 8) #Set Up Pin Constraints mc.select('ten_sim_robe.vtx[2310:2334]', 'ten_sim_robe.vtx[3088:3111]', replace=True) mc.select('ten_sim_robe.vtx[699:701]', 'ten_sim_robe.vtx[1652]', 'ten_sim_robe.vtx[1691]', 'ten_sim_robe.vtx[1694]', 'ten_sim_robe.vtx[1725]', add=True) mc.select('ten_collide_body', toggle=True) mel.eval('createNConstraint pointToSurface 0;') #Cache Out the Cloth Sim #filepath = '/users/animation/mitchbre/Documents/Cloth_Script_Files/Test_Cache' filepath = '/users/ugrad/e/ecmraven/workspace/cache' shapeRelatives = mc.listRelatives('ten_sim_robe', shapes=True) print shapeRelatives mc.cacheFile(fileName='tenRobe_cache', format='OneFilePerFrame', startTime=-20, endTime=120, points=shapeRelatives[1], directory='/users/ugrad/e/ecmraven/workspace/cache') #Connect the Cloth Cache mc.currentTime(-20) pc.mel.doImportCacheFile(filepath + '/tenRobe_cache.xml', '', ['ten_Robe_Sim'], list()) #Group Colliders mc.group(['ten_Collider', 'ten_ColliderBase', 'ten_Mittens', 'nRigid1', 'nRigid2'], name='colliders') #Group Robe mc.group(['ten_Robe_Sim','ten_Sash_Sim'], name='robe_Objects' ) #Objects mc.group(['nCloth1','dynamicConstraint1'], name='robe_Sim' ) #Group Robe Sim mc.group(['robe_Objects', 'robe_Sim'], name='robe') #Group All Robe Groups #Group Pants
def createGeoCache(*args): geo =[ 'body1', 'roots1', 'vines_01', 'vines_02', 'headVines', 'twigs1', 'twigs2' ] shot = [ 'roots1', 'body', 'twigs1_lower', 'twigs1_upper', 'twigs2_upper', 'twigs2_lower', 'cutvines_01', 'cutvines_02_lower', 'cutvines_02_upper', 'cutheadVines_lower', 'cutheadVines_upper', 'headShot_B_head', 'headShot_A_pieces', 'headShot_B_pieces' ] shotOld =[ 'roots1', 'body', 'twigs1', 'twigs2', 'cutvines_01', 'cutvines_02_lower', 'cutvines_02_upper', 'cutheadVines_lower', 'cutheadVines_upper', 'headshot_head', 'headshot_bit1', 'headshot_bit2' ] chop = [ 'body1', 'roots1', 'vines_01', 'vines_02', 'headVines', 'body', 'twigs1', 'twigs2', 'head', 'head_chop_geo', 'twigs1_lower', 'twigs1_upper', 'twigs2_upper', 'twigs2_lower', 'cutvines_01', 'cutvines_02_lower', 'cutvines_02_upper', 'cutheadVines_lower', 'cutheadVines_upper' ] root =[ 'root_geo' ] if os.name == 'posix': project = m.workspace(rd=True, q=True).split('scenes/')[0] else: project = m.workspace(rd=True, q=True) dataDir = project+'data/' index = project[:-1].rindex('/') shotDir = project[:index]+'/' index = shotDir[:-1].rindex('/') #parse scene path to derrive scene name to be used in folder s_string = m.file(sn=True, q=True) s_splitString = s_string.split('/') i_splitStringLength = len(s_splitString) s_filename = s_splitString[(i_splitStringLength - 1)] #parse scene name to derrive name s_splitFolder = s_filename.split('.') i_splitStringLengthFolder = len(s_splitFolder) s_foldername = s_splitFolder[(i_splitStringLengthFolder - 2)] #specify the plate name here plate = shotDir[index+1:-1]+ '_plate_01' imageDir = shotDir + 'images/' + plate + '/' imageList = [] #images = os.listdir(imageDir) #for i in images: # if 'plate' in i: # imageList.append(i) start = m.playbackOptions (ast=True, q=True) end = m.playbackOptions (aet=True, q=True) #set timeline to images m.playbackOptions( ast = start, aet = end, min = start, max = end) #make geo caache directory geoCacheDir = dataDir + 'geoCache/' print geoCacheDir if not os.path.exists(os.path.join(dataDir, 'geoCache')): os.mkdir(geoCacheDir) #make cache version directory versions = os.listdir(geoCacheDir) if versions: nextVersion = s_foldername cacheVersionDir = geoCacheDir+s_foldername #modified this line to use scene name as folder name if not os.path.exists(cacheVersionDir): os.mkdir(cacheVersionDir) else: cacheVersionDir = geoCacheDir+s_foldername #modified this line to use scene name as folder name os.mkdir(cacheVersionDir) # cache selected objects i = 0 list = m.ls(type = 'transform' ) for obj in list: if m.attributeQuery( 'cache', node= obj, exists=True ): try: m.setAttr(obj+'.cache', False) except: shape = m.listRelatives( obj, s=True)[0] if ':' in shape: cacheName = shape[ shape.rindex(':')+1 : ] else: cacheName = shape print 'bunk' #shot specific caches cacheObjects = [] sceneName = m.file(q=True, sn=True) #head shot geo if '323_SC' in sceneName or '340_SC' in sceneName: cacheObjects = shot #head chop geo elif '708_SC' in sceneName or '709_SC' in sceneName or '715_SC' in sceneName: cacheObjects = chop pass #normal geo else: cacheObjects = geo for g in cacheObjects: if g + 'Shape' == cacheName: print 'caching shape:', shape, 'as:', cacheName m.cacheFile( dir = cacheVersionDir, f=cacheName, points = shape, st=start-10, et=end+10 ) else: print cacheName, ' ignored. Not default high res geo. ', g + 'Shape' for r in root: if r + 'Shape' == cacheName: print 'caching shape:', shape, 'as:', cacheName m.cacheFile( dir = cacheVersionDir, f=cacheName + '_' + str(i), points = shape, st=start-10, et=end+10 ) m.select(obj) #print cacheVersionDir #buildStr = "AbcExport -j " + "\"" + "-frameRange " + str(start-10) + " " + str(end+10) + " -step 0.5 -uvWrite -file " + "\'" + cacheVersionDir + "/" + obj.replace(':', '__') + ".abc\'" + "\"" + ";" #print buildStr #mel.eval(buildStr) else: print cacheName, ' ignored. Not default high res geo. ', r + 'Shape' i = i + 1 else: #print 'no attr' pass
def iter_existing_cache_connections(): """Yield data about every existing cache connection in the scene. :returns: Iterator of ``(cacheFile, fileName, channel, transform, shape)`` tuples for each cache connection. It is possible for ``transform`` or ``shape`` to be ``None`` when the connection cannot be fully resolved. In every case that the connection is not complete, ``shape`` will be ``None``. """ cache_nodes = cmds.ls(type='cacheFile') or [] for cache_node in cache_nodes: cache_paths = cmds.cacheFile(cache_node, q=True, fileName=True) if not cache_paths: dir_ = cmds.getAttr('%s.cachePath' % cache_node) name = cmds.getAttr('%s.cacheName' % cache_node) cmds.warning(('cacheNode %s does not exist: %s/%s' % (cache_node, dir_, name)).replace('//', '/')) continue cache_path = cache_paths[0] ## Identify what it is connected to. channel = cmds.getAttr(cache_node + '.channel[0]') switch = cmds.listConnections(cache_node + '.outCacheData[0]') if not switch: cmds.warning('cacheFile %r is not connected' % cache_node) yield cache_node, cache_path, channel, None, None continue switch = switch[0] switch_type = cmds.nodeType(switch) # Pass through blends. if switch_type == 'cacheBlend': blend = switch switch = cmds.listConnections(blend + '.outCacheData[0]') if not switch: cmds.warning('cacheBlend %r is not connected' % blend) yield cache_node, cache_path, channel, None, None continue switch = switch[0] switch_type = cmds.nodeType(switch) if switch_type != 'historySwitch': cmds.warning('Unknown cache node layout; expected historySwitch, found %s %r' % (switch_type, switch)) yield cache_node, cache_path, channel, None, None continue # The switch hooks onto a transform, but we want the shapes. transform = (cmds.listConnections(switch + '.outputGeometry[0]') or (None, ))[0] if transform is None: cmds.warning('Unknown cache node layout; nothing connected to %r' % switch) yield cache_node, cache_path, channel, None, None continue # Pass through groupParts. The control flow is a little wacky here, be # careful. while transform is not None: transform_type = cmds.nodeType(transform) if transform_type == 'groupParts': transform = (cmds.listConnections(transform + '.outputGeometry') or (None, ))[0] continue break if transform is None: transform_type = 'None' if transform_type != 'transform': cmds.warning('Unknown cache node layout; expected transform, found %s %r' % (transform_type, transform)) yield cache_node, cache_path, channel, None, None continue shapes = cmds.listRelatives(transform, children=True, shapes=True) or [] shapes = isolate_deformed_shape(shapes) if len(shapes) != 1: cmds.warning('Could not identify shape connected to %r; found %r' % (cache_node, shapes)) yield cache_node, cache_path, channel, transform, None continue shape = shapes[0] yield cache_node, cache_path, channel, transform, shape
def prepareCacheGrade(self): """整理缓存文件 Description: 将场景中所有缓存节点所连接的缓存文件复制到某个指定路径下 Arguments: 无 Returns: 无 """ #通过缓存拿到与之相连的物体名 cacheNodesList = clothHairT.getCacheNodes() result = True for eachCacheNode in cacheNodesList: if isNOrGeoCacheNode(eachCacheNode) == "geoCache": cacheFlag = 1 isMultCache = cmds.listConnections(eachCacheNode, type = "cacheBlend") if isMultCache: #若某个物体存在多个缓存,则选中该物体并显示提示信息 shapeWithCache = cmds.cacheFile(eachCacheNode, query=True, geometry=True) objName = cmds.listRelatives(shapeWithCache[0], parent = True) #拿到该cache的物体名 cmds.select(objName[0],replace = True) cmds.confirmDialog(title = "Warning", message = "Object %s Has More Than One Cache"%objName[0], button = "OK", defaultButton = "OK") cmds.warning("Please Delete Other Cache Nodes And CacheBlend Node!") result = False break else: #只有一个缓存 shapeWithCache = cmds.cacheFile(eachCacheNode, query=True, geometry=True) objName = cmds.listRelatives(shapeWithCache[0], parent = True) #拿到该cache的物体名 print objName #拿到该缓存多连的物体名 cacheFinalDir = getFinalCacheDir(objName[0], cacheFlag) #根据物体名拿到缓存预存放路径 ObjNameNew = getObjNameNew(objName[0]) eachCacheFileList = cmds.cacheFile(eachCacheNode, query = True, f = True ) #拿到缓存节点的文件信息,含mc与xml两个文件 #将缓存文件存在该目录下(若目录不存在则创建),并对其重命名 #若文件夹已经存在,则不做任何操作,若不存在则创建文件夹 if os.path.exists(cacheFinalDir): pass else: os.makedirs(cacheFinalDir) newCacheFileDirMC = cacheFinalDir + ObjNameNew + ".mc" newCacheFileDirXML = cacheFinalDir + ObjNameNew + ".xml" for eacheachCacheFile in eachCacheFileList: if eacheachCacheFile.find(".mc") >= 0: shutil.copyfile(eacheachCacheFile, newCacheFileDirMC) #复制缓存文件.mc elif eacheachCacheFile.find(".xml") >= 0: shutil.copyfile(eacheachCacheFile, newCacheFileDirXML) #复制缓存文件.mc elif isNOrGeoCacheNode(eachCacheNode) == "nCache": cacheFlag = 2 isMultCache = cmds.listConnections(eachCacheNode, type = "cacheBlend") if isMultCache: hairSystemName = cT.delDupFromList(cmds.listConnections(eachCacheNode, type = "hairSystem")) hairSystemShapeName = cmds.listRelatives(hairSystemName[0], shapes = True) #拿到该cache的hairSystemShape名 cmds.select(hairSystemShapeName[0],replace = True) cmds.confirmDialog(title = "Warning", message = "HairSystem %s Has More Than One Cache"%hairSystemShapeName[0], button = "OK", defaultButton = "OK") cmds.warning("Please Delete Other Cache Nodes And CacheBlend Node!") result = False break else: hairSystemName = cT.delDupFromList(cmds.listConnections(eachCacheNode, type = "hairSystem")) hairSystemShapeName = cmds.listRelatives(hairSystemName[0], shapes = True) #拿到该cache的hairSystemShape名 cacheFinalDir = getFinalCacheDir(hairSystemShapeName[0], cacheFlag) #根据物体名拿到缓存预存放路径 ObjNameNew = getObjNameNew(hairSystemShapeName[0]) eachCacheFileList = cmds.cacheFile(eachCacheNode, query = True, f = True ) #拿到缓存节点的文件信息,含mc与xml两个文件 #将缓存文件存在该目录下(若目录不存在则创建),并对其重命名 #若文件夹已经存在,则不做任何操作,若不存在则创建文件夹 if os.path.exists(cacheFinalDir): pass else: os.makedirs(cacheFinalDir) newCacheFileDirMC = cacheFinalDir + ObjNameNew + ".mc" newCacheFileDirXML = cacheFinalDir + ObjNameNew + ".xml" for eacheachCacheFile in eachCacheFileList: if eacheachCacheFile.find(".mc") >= 0: shutil.copyfile(eacheachCacheFile, newCacheFileDirMC) #复制缓存文件.mc elif eacheachCacheFile.find(".xml") >= 0: shutil.copyfile(eacheachCacheFile, newCacheFileDirXML) #复制缓存文件.mc if result: cmds.confirmDialog(title = "OK", message = "Prepare Cache OK !", button = "OK", defaultButton = "OK")
def iter_existing_cache_connections(): """Yield data about every existing cache connection in the scene. :returns: Iterator of ``(cacheFile, fileName, channel, transform, shape)`` tuples for each cache connection. It is possible for ``transform`` or ``shape`` to be ``None`` when the connection cannot be fully resolved. In every case that the connection is not complete, ``shape`` will be ``None``. """ cache_nodes = cmds.ls(type='cacheFile') or [] for cache_node in cache_nodes: cache_paths = cmds.cacheFile(cache_node, q=True, fileName=True) if not cache_paths: dir_ = cmds.getAttr('%s.cachePath' % cache_node) name = cmds.getAttr('%s.cacheName' % cache_node) cmds.warning(('cacheNode %s does not exist: %s/%s' % (cache_node, dir_, name)).replace('//', '/')) continue cache_path = cache_paths[0] ## Identify what it is connected to. channel = cmds.getAttr(cache_node + '.channel[0]') switch = cmds.listConnections(cache_node + '.outCacheData[0]') if not switch: cmds.warning('cacheFile %r is not connected' % cache_node) yield cache_node, cache_path, channel, None, None continue switch = switch[0] switch_type = cmds.nodeType(switch) # Pass through blends. if switch_type == 'cacheBlend': blend = switch switch = cmds.listConnections(blend + '.outCacheData[0]') if not switch: cmds.warning('cacheBlend %r is not connected' % blend) yield cache_node, cache_path, channel, None, None continue switch = switch[0] switch_type = cmds.nodeType(switch) if switch_type != 'historySwitch': cmds.warning( 'Unknown cache node layout; expected historySwitch, found %s %r' % (switch_type, switch)) yield cache_node, cache_path, channel, None, None continue # The switch hooks onto a transform, but we want the shapes. transform = (cmds.listConnections(switch + '.outputGeometry[0]') or (None, ))[0] if transform is None: cmds.warning('Unknown cache node layout; nothing connected to %r' % switch) yield cache_node, cache_path, channel, None, None continue # Pass through groupParts. The control flow is a little wacky here, be # careful. while transform is not None: transform_type = cmds.nodeType(transform) if transform_type == 'groupParts': transform = (cmds.listConnections(transform + '.outputGeometry') or (None, ))[0] continue break if transform is None: transform_type = 'None' if transform_type != 'transform': cmds.warning( 'Unknown cache node layout; expected transform, found %s %r' % (transform_type, transform)) yield cache_node, cache_path, channel, None, None continue shapes = cmds.listRelatives(transform, children=True, shapes=True) or () shapes = isolate_deformed_shape(shapes) if len(shapes) != 1: cmds.warning('Could not identify shape connected to %r; found %r' % (cache_node, shapes)) yield cache_node, cache_path, channel, transform, None continue shape = shapes[0] yield cache_node, cache_path, channel, transform, shape
def rebuild_cache_from_xml(xmlPath, fluidShape = ''): """ Based off Autodesk cache's xml file structure to get and set *.tag / *.attrib / *.text / *.tail NOTE: Interactive cache attachments; IF the interactive master boat is found in the scene These caches need to be attached to the interactive wake and foam 3D fluid textures for the purposes of previewing the caches. Later on during publish we cache the base wake and foam, then attach these caches back to the ocean_dispShader and then perform a merge caches and output that to the publish folder. """ debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'Rebuilding Caches', verbose = False) debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'xmlPath: %s' % xmlPath, verbose = False) if os.path.exists(xmlPath): debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'GOOD! Path to xml exists: %s' % xmlPath, verbose = False) # Create a new cache node cache = cmds.cacheFile(createCacheNode = True, fileName = xmlPath) debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'Cache node: %s' % cache, verbose = False) # Parse xml file xml = et.parse(xmlPath) root = xml.getroot() # By default, Autodesk's xml structure has all the attributes and value set in the <extra> tag. # Therefore, we have to go through all of them and set them in maya because cache only works # properly with all the same settings or it'll show some warning stating some values aren't the same. for tag in root.findall("extra"): try: attr, tag = tag.text.split("=") # debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'attr: %s' % attr, verbose = False) # debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'tag: %s' % tag, verbose = False) except: attr = None if attr is not None: if cmds.objExists(attr): try: cmds.setAttr( attr, eval(tag) ) except: debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'Failed to set %s' % attr , verbose = False) mel.eval( r'warning "Failed to set \"%s\"...";' %attr ) # Get exactly the same slot number of the cache and fluid's connection hook-ups or cache won't work properly. fluids = [] for x in root.findall("Channels"): for y in x.getchildren(): channelName = y.get("ChannelName") debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'channelName: %s' % channelName, verbose = False) debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'y.get("ChannelName").split("_"): %s' % y.get("ChannelName").split("_"), verbose = False) if 'interactive' in y.get("ChannelName"): ## Note the split here was separating the interactive_oceanFoam naming badly so this gets handled differently here for interactive fluid = '_'.join(y.get("ChannelName").split("_")[0:-1]) attr = y.get("ChannelName").split("_")[-1] else: fluid, attr = y.get("ChannelName").split("_") if cmds.objExists(fluidShape): fluid = fluidShape index = y.tag.strip("channel") src = "%s.outCacheData[%s]" %(cache, index) dst = "%s.in%s" %( fluid, attr.title() ) cmds.setAttr("%s.ch[%s]" %(cache, index), channelName, type = "string") _connectAttr(src, dst, force = 1) if fluid not in fluids: fluids.append(fluid) # More connections to hook up... for fluid in fluids: src = "time1.outTime" dst = "%s.currentTime" %fluid _connectAttr(src, dst, force = 1) src = "%s.inRange" %cache dst = "%s.playFromCache" %fluid _connectAttr(src, dst, force = 1) debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'SUCCESS', verbose = False) return cache else: debug(app = None, method = 'fluidCaches.rebuild_cache_from_xml', message = 'FAILED TO FIND PATH TO CACHE', verbose = False) return None