def copyAlltoAsset(assetMaps): assetMaps = pym.Path(assetMaps) for fn in pym.ls(type="file"): ftn = pym.Path(fn.fileTextureName.get()) if pym.Path(ftn).parent != assetMaps: tgt = assetMaps / ftn.basename() if not tgt.exists(): print "copy:", ftn, "to:", assetMaps pym.Path.copy(ftn, assetMaps)
def setNewPath(filePath, localdir, addition): name = pm.Path(filePath).namebase base = pm.Path(filePath).splitext()[0] ext = pm.Path(filePath).splitext()[-1] prefix = base.split("/sourceimages/")[-1].split(name)[0] if not pm.Path(localdir + 'sourceimages/' + prefix).exists(): pm.Path(localdir + 'sourceimages/' + prefix).makedirs() newPath = localdir + 'sourceimages/' + prefix + name + addition + ext return newPath
def startHoudiniObjectTools(geoFile="", ptc=False, density=0.1, ptcBBox=True): #HoudiniObjectTools.exe C:\daten\3dprojects\mantra\mantra\mayaToMantra_fluid\geo if geoFile == "": log.error("startHoudiniObjectTools: no file to operate on") return False prepareEnv() converterPath = os.environ["MTM_HOME"] + "/bin/HoudiniObjectTools.exe -v 5 " cmd = "" if ptc: cmd = converterPath + " -ptc " + geoFile + " -ptcd " + str(density) else: cmd = converterPath + " -bbox " + geoFile if not os.path.exists(geoFile): log.error("geoFile " + geoFile + " does not exist.") return ptcFile = geoFile.replace(".geo", ".hsffp").replace(".bgeo", ".hsffp") doConvert = True if os.path.exists(ptcFile): mtimeg = pm.Path(geoFile).mtime mtimep = pm.Path(ptcFile).mtime if mtimep > mtimeg: log.debug( "ptcFile is younger than geo file, no conversion necessary") doConvert = False # no conversion, only read bbox if not doConvert: cmd = converterPath + " -bbox " + geoFile log.info("Starting HoudiniObjectTools cmd: %s" % cmd) IDLE_PRIORITY_CLASS = 64 process = subprocess.Popen(cmd, bufsize=1, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, creationflags=IDLE_PRIORITY_CLASS) output = [] bbox = [] while 1: line = process.stdout.readline() #BBox: -0.110311 -1.26636 -1.17787 2.24247 2.5 1.17811 if "BBox: " in line: bbox = line.strip().replace("BBox: ", "").split(" ") bbox = map(float, bbox) output.append(line) if not line: break pm.mel.trace(line.strip()) #log.debug(line.strip()) #print "Bounding box", bbox return bbox
def publish_ass(self): ''' Export Refernce ASS File for StandIn Usage ''' curr_path = pmc.sceneName() curr_name, curr_ext = curr_path.name.splitext() new_path = pmc.Path(curr_path.parent.parent + "/" + self.meta.asset + "_" + self.meta.department + "_REF.ass") # Backup Procedure if os.path.exists(new_path): backup_path = new_path + datetime.datetime.now().strftime( "%Y%m%d_%H%M%S") + ".bak" shutil.copyfile(new_path, backup_path) subprocess.check_call(["attrib", "+H", backup_path], creationflags=0x08000000) pmc.select(allDagObjects=True) pmc.exportSelected( new_path, force=True, type="ASS Export", options= "-shadowLinks 1;-mask 6399;-lightLinks 1;-exportAllShadingGroups;-boundingBox" ) pmc.select(clear=True) return True
def replaceL2H(fr, to): #把参考文件的低模, 替换成高模 # #把字段f替换成t # #列出所有参考文件,存到以namespace为key的字典中 refs = pm.system.getReferences() noH = [] for key, value in refs.items(): name_space = key fileRef = value #列出参考文件的路径和文件名 file_path = fileRef.path file_name = file_path.name #将带'_l_'标示的低模, 替换成带'_h_'标示的高模 if fr in file_name: hf = file_name.replace(fr, to) hd = file_path.replace(fr, to) dir = pm.Path(file_path).dirname() #如果服务器上有高模文件就进行替换, 没有就加入到一个列表里以便弹窗报错 if hf in [f.name for f in dir.files()]: fileRef.replaceWith(hd) else: noH.append(file_path) if noH: pm.confirmDialog(title=u'提示', message='\n'.join(noH) + u'\n 没有"_h_"文件', button=[u'确认']) else: pm.confirmDialog(title=u'提示', message=u'完成', button=[u'确认'])
def changeOutputDestination(self, dest = None): '''updates the output location for muster renders''' #TODO: check access to proposed render location location if not dest: dest = pm.fileDialog2(fileMode = 3, dialogStyle = 1, startingDirectory = self.widgets['outputDir'].getFileName()) if dest: self.widgets['outputDir'].setText(pm.Path(dest[0]))
def test_file_reference_creation(self): for ref in pm.listReferences(recursive=True): self.assertEqual(ref, pm.FileReference(pm.PyNode(ref.refNode))) self.assertEqual(ref, pm.FileReference(str(ref.refNode))) self.assertEqual(ref, pm.FileReference(pm.Path(ref.withCopyNumber()))) self.assertEqual(ref, pm.FileReference(str(ref.withCopyNumber()))) self.assertEqual(ref, pm.FileReference(namespace=ref.fullNamespace))
def syncTextures(error=False): oSync = Sync() paths = set() for fn in pym.ls(type="file"): if pym.hasAttr(fn, "fileTextureName"): ftn = fn.fileTextureName.get() mapfolder = oSync.strip(pym.Path(ftn).parent) if mapfolder not in paths: paths.add(mapfolder) syncPath(mapfolder)
def save_version_up(self): ''' Function to save the file with version up and set the Maya project :return: ''' project_path = pm.Path(self.shot_path) pm.mel.setProject(project_path) pm.system.saveAs('{}/{}'.format(self.shot_path, self.new_shot_name), type='mayaAscii', force=1) return True
def load_latest_incr(self): ''' Load latest increment of current Asset. ''' asset_dev_folder = pmc.Path(self.meta.current_file).parent latest_incr_file = get_latest_incr_path( asset_name=self.meta.asset, department=self.meta.department, folder=asset_dev_folder) try: return self.load(latest_incr_file) except RuntimeError: logger.error("Could not open file!")
def dump_data(json_data, fpath=None): if not fpath: ls_fpath = pm.fileDialog2(fm=0, ff="*.json", cap="Export Object Set") if not ls_fpath: return fpath = ls_fpath[0] print " Writing Object Sets data:\n > ", fpath with open(fpath, "w") as writefile: json.dump(json_data, writefile, indent=1) res = pm.Path(fpath).exists() if res: print " Object Sets >", fpath return fpath return
def multFNdiffCS(): fileNodes = {} for fn in pym.ls(type="file"): ftn = pym.Path(fn.fileTextureName.get()) if ftn in fileNodes: fileNodes[ftn].append(fn) else: fileNodes[ftn] = [fn] multFN = filter(lambda x: diffCS(fileNodes[x]), fileNodes) filteredDict = {} for fn in multFN: filteredDict[fn] = fileNodes[fn] return filteredDict
def check(self): """@brief Check if all the cache in the scene are in the Library. """ badCachePath = list() badCacheNode = list() cacheIn = getCacheInfoFromMaya() cacheInScene = cacheIn.getCacheFromScene() # get the templates if not TYPE == 'MULTI': cachePublishTemplate = self.parent.app.get_template_by_name( 'fx_cacheseq_shot_publish') mayaCachePublishTemplate = self.parent.app.get_template_by_name( 'maya_fx_cacheseq_shot_publish') else: cachePublishTemplate = self.parent.app.get_template_by_name( 'fx_cacheseq_shot_publish') mayaCachePublishTemplate = self.parent.app.get_template_by_name( 'maya_fx_cacheseq_shot_publish') for cacheFrom, cacheVal in cacheInScene.iteritems(): fileNode = cacheVal for nodes, nodeVal in cacheVal.iteritems(): for cacheNumber, cacheVal in nodeVal.iteritems(): filePath = cacheVal['path'] if cachePublishTemplate.validate(filePath, skip_keys=["SEQ"]): continue elif mayaCachePublishTemplate.validate(filePath, skip_keys=["SEQ"]): continue else: badCachePath.append(pm.Path(filePath)) badCacheNode.append(nodes) continue if not badCachePath: self.status = "OK" else: self.status = self.errorMode self.errorNodes = badCacheNode for node in badCachePath: self.addError("%s is not in the library" % node) self.errorMessage = "%s Cache not in library" % (len(badCachePath))
def main(): file_to_open = os.path.normpath(sys.argv[1]) dirname = os.path.dirname(file_to_open) latest = utils.getLatestFile(dirname) # abort if file is no longer the latest version if not os.path.samefile(file_to_open, latest): return # abort if file is currently in use if fl.isLocked(file_to_open): return time_start = time.ctime() pm.system.openFile(file_to_open) count = 0 count += functions.removeUseless(animCurves=True, objectSets=True, referenceNodes=True) count += functions.fixNamespaces() count += functions.rebuildHierarchy() count += functions.renameAnimCurves() # abort if no operation has been done if count == 0: return fl.unlock(file_to_open) # abort if file is used by others if fl.isLocked(file_to_open): return time_end = time.ctime() functions.addFileInfo(file_to_open, time_start, time_end) new_file = functions.versionUp(file_to_open) new_file.chmod(0777) old_file = pm.Path(file_to_open) owner = old_file.get_owner() sender = studio_xmpp.Sender() sender.send( owner, u'已经清理优化了你的工程文件,请使用新版!\n' u'源文件:%s\n' u'新版本:%s\n' u'(目前是测试阶段,打开新版本发现问题的话,请尽快通知TD。)' % (old_file.basename(), new_file.basename()))
def __init__(self, root_path, episode=None, shot=None): ''' Initialize the shot class with base root path for episodes , episode name and shot name :param root_path: :param episode: :param shot: ''' self.shot_path = pm.Path(root_path) self.shot_dir = self.shot_path.dirname() self.shot_name = None self.new_shot_name = None self.episode = episode self.shot = shot self.cur_filename = pm.sceneName().namebase self.get_next_version()
def save_incr(self, comment=None): ''' Increment Scene. ''' curr_path = pmc.sceneName() curr_name, curr_ext = curr_path.name.splitext() re_incr = re.compile(r"_\d+") matches = re_incr.findall(curr_name) if len(matches) == 0: logger.warning( "Please check filename format: 'your_asset_name_XX_optional_comment.ma'!" ) return False else: match = matches[-1] curr_asset = curr_name.split(match)[0] curr_incr_str = match.replace("_", "") new_incr_int = int(curr_incr_str) + 1 # "_{num:0{width}d}" creates the increment suffix with leading zeroes new_incr_str = "_{num:0{width}d}".format(num=new_incr_int, width=self.incr_padding) incr_file = pmc.Path(curr_path.parent + "/" + curr_asset + new_incr_str + curr_ext) if os.path.exists(incr_file): logger.error("FILE ALREADY EXITS!") confirmation = pmc.confirmDialog(title='Confirm', message="Force Save?", button=['Yes', 'No'], defaultButton='Yes', cancelButton='No', dismissString='No') if confirmation == 'Yes': self.save_as(incr_file, comment) else: return False else: return self.save_as(incr_file, comment)
def saveFile(describe): #按照描述保存文件 # #参数describe: 需要输入描述字符串, 如: 'CHRcolor'等, 保存文件时加入到文件名中. # pnm = ProjNameMatch() fileName = pm.Env().sceneName().namebase dir = pnm.setFileName(fileName) dir = pm.Path(pnm.getProjDirectorys()[-1] + pnm.getProjDirectorys()[0] + '/scenes') project_name = pnm.getResults('project_name') episode_number = pnm.getResults('episode_number') session_number = pnm.getResults('session_number') scene_number = pnm.getResults('scene_number') scene_describe = LAYER_FILENAME[describe] process_name = 'lr' version_number = 'c001' newFileName = '_'.join([ project_name, episode_number, session_number, scene_number, scene_describe, process_name, version_number ]) filePrefix = '_'.join([ project_name, episode_number, session_number, scene_number, scene_describe, process_name ]) versions = [] for f in dir.files(): subpnm = ProjNameMatch() if filePrefix in f.name: subpnm.setFileName(f.name) versions.append(subpnm.getResults('version_number')) if versions: versions.sort() version_number = 'c' + str(int(versions[-1][1:]) + 1).zfill(3) newFileName = '_'.join([ project_name, episode_number, session_number, scene_number, scene_describe, process_name, version_number ]) try: pm.saveAs("{0}/{1}.mb".format(dir, newFileName)) return newFileName except: raise "save this file failure!!!"
def allTexturesToRelative(): oSync = Sync() prjpath = pym.workspace.path notfound = [] for filenode in pym.ls(type="file"): if filenode.hasAttr("fileTextureName"): path = pym.Path(filenode.fileTextureName.get()) if not path.startswith(prjpath): if oSync.local_exists(path): setTexturePathWithSameCS(filenode, oSync.strip(path)) elif oSync.remote_exists(path): path = oSync.copyRef(path) setTexturePathWithSameCS(filenode, oSync.strip(path)) else: newpath = oSync.getLatest(path) if newpath: setTexturePathWithSameCS(filenode, oSync.strip(path)) else: notfound.append(filenode, path) return notfound
def startup_deferred(): logger.info("Backspace Deferred Startup Procedure") import pymel.core as pmc pipe_path = pmc.Path(__file__).parent.parent maya_project_path = "//am-ca-fs02/cg2/04_workflow" #maya_project_path = pmc.Path(__file__).splitdrive()[0] / "/04_workflow" # DragnDrop Feature dragndrop_script_path = pipe_path / "backspace_pipe" / "mel" / "performFileDropAction.mel" pmc.mel.evalDeferred('source "{}"'.format( dragndrop_script_path.replace("\\", "/"))) # Set Project global project_setup if not project_setup: try: pmc.mel.eval('setProject "{}"'.format(maya_project_path)) except RuntimeError as e: logger.warning( "Could not set project at {}".format(maya_project_path)) # Port Setup global port_setup if not port_setup: try: pmc.commandPort(name=":7002", sourceType="python") except RuntimeError as e: pmc.warning(e) port_setup = True # Hotkey Setup global hotkeys_setup if not hotkeys_setup: if pmc.hotkeySet("BackspaceHotkeys", query=True, exists=True): pmc.hotkeySet("BackspaceHotkeys", edit=True, current=True) else: pmc.hotkeySet(edit=True, ip=pipe_path + "/BackspaceHotkeys.mhk") hotkeys_setup = True
def publish(self, comment=None): ''' Publish scene with comment. ''' if not comment: result = pmc.promptDialog(title="Comment", message="Enter Comment:", button=["OK", "Cancel"], defaultButton="OK", cancelButton="Cancel", dismissString="Cancel") if result == "OK": comment = pmc.promptDialog(query=True, text=True) else: return False curr_path = pmc.sceneName() curr_name, curr_ext = curr_path.name.splitext() new_path = pmc.Path(curr_path.parent.parent + "/" + self.meta.asset + "_" + self.meta.department + "_REF" + curr_ext) self.save_as(new_path, comment) return True
def __init__(self, src=facts.facts["src"]): self.__src__ = pym.Path(src) self.__prj__ = pym.workspace.path self.__baseFolders__ = [ str(path.name) for path in self.__src__.listdir() if path.isdir() ]
def convertPymelEnums(docLocation=None): # Compatibility for pre-2012 caches... see note after ApiEnum def in # apicache import pymel.api pymel.api.Enum = apicache.ApiEnum apicache.Enum = apicache.ApiEnum import pymel.internal.parsers as parsers import maya.OpenMaya as om parser = parsers.ApiDocParser(om, docLocation=docLocation) dummyCache = apicache.ApiCache() dummyCache.version = '[0-9.]+' cachePattern = pm.Path(dummyCache.path()) caches = sorted(cachePattern.parent.files(re.compile(cachePattern.name))) rawCaches = {} badByCache = {} enumsByCache = {} for cachePath in caches: print "checking enum data for: %s" % cachePath raw = pm.util.picklezip.load(unicode(cachePath)) rawCaches[cachePath] = raw classEnums, classPyEnums, bad = checkEnumConsistency(raw, parser=parser) if bad: badByCache[cachePath] = bad enumsByCache[cachePath] = {'api': classEnums, 'py': classPyEnums} if badByCache: pprint.pprint(badByCache) print "Do you want to continue converting pymel enums? (y/n)" print "(Pymel values will be altered to match the api values)" answer = raw_input().lower().strip() if not answer or answer[0] != 'y': print "aborting cache update" return fixedKeys = [] deletedEnums = [] for cachePath, raw in rawCaches.iteritems(): print '=' * 60 print "Fixing: %s" % cachePath apiClassInfo = raw[-1] apiEnums = enumsByCache[cachePath]['api'] pyEnums = enumsByCache[cachePath]['py'] assert (set(apiEnums.keys()) == set(pyEnums.keys())) for className, apiEnumsForClass in apiEnums.iteritems(): pyEnumsForClass = pyEnums[className] assert (set(apiEnumsForClass.keys()) == set( pyEnumsForClass.keys())) for enumName, apiEnum in apiEnumsForClass.iteritems(): fullEnumName = '%s.%s' % (className, enumName) print fullEnumName # first, find any "bad" values - ie, values whose index is None # - and delete them badKeys = [ key for key, index in apiEnum._keys.iteritems() if index is None ] if badKeys: print "!!!!!!!!" print "fixing bad keys in %s - %s" % (fullEnumName, badKeys) print "!!!!!!!!" assert (None in apiEnum._values) valueDocs = apiClassInfo[className]['enums'][enumName][ 'valueDocs'] for badKey in badKeys: valueDocs.pop(badKey, None) del apiEnum._keys[badKey] del apiEnum._values[None] if not apiEnum._keys: print "enum empty after removing bad keys - deleting..." del apiClassInfo[className]['enums'][enumName] del apiClassInfo[className]['pymelEnums'][enumName] deletedEnums.append(fullEnumName) continue else: fixedKeys.append(fullEnumName) else: assert (None not in apiEnum._values) try: pyEnums[className] = parser._apiEnumToPymelEnum(apiEnum) except Exception: globals()['rawCaches'] = rawCaches globals()['apiEnum'] = apiEnum raise # After making ALL changes, if there were NO errors, write them all out... for cachePath, raw in rawCaches.iteritems(): pm.util.picklezip.dump(raw, unicode(cachePath))
import os import pymel.core as pmc # setpieces_path = pmc.Path(__file__).splitdrive()[0] / "04_workflow" / "scenes" / "assets" / "setpieces" / "" # setpieces_path = pmc.Path("M:/04_workflow/scenes/assets/setpieces") setpieces_path = pmc.Path(pmc.workspace.getPath() + "/scenes/assets/setpieces") _norm = os.path.normpath def parse(): asset_list = [] asset_folders = os.listdir(setpieces_path) for asset_dir in asset_folders: asset_status_dict = { "DEV": False, "MDL": False, "RIG": False, "SHD": False, "LGT": False } asset_maya_dir = setpieces_path / asset_dir / "Maya" if os.path.isdir(asset_maya_dir): for department in ["MDL", "RIG", "SHD", "LGT"]: asset_status_dict[department] = os.path.isfile( asset_maya_dir / "{asset_name}_{dep}_REF.ma".format( asset_name=asset_dir, dep=department))
def __init__(self, assetName, publishPath): super(B1XGenPublish, self).__init__(assetName) self.publishPath = pm.Path(publishPath)
def __init__(self, assetName, sourceDirPath): super(B1XGenRebuild, self).__init__(assetName) self.sourceDirPath = pm.Path(sourceDirPath)
def process(self, context): loadedPluginPath = map(lambda x: pm.Path(x).basename().name, pm.pluginInfo(q=True, lsp=True)) context.data['LoadedPlugins'] = loadedPluginPath