def setupCGMScriptPaths(): thisFile = cgmPath.Path(__file__) #thisPath = os.sep.join(__file__.split(os.sep)[:-1]) thisPath = thisFile.up().osPath() mayaScriptPaths = map( cgmPath.Path, maya.mel.eval('getenv MAYA_SCRIPT_PATH').split(os.pathsep)) mayaScriptPathsSet = set(mayaScriptPaths) _paths = [ os.path.join('cgm', 'mel', 'zooPy'), os.path.join('cgm', 'mel'), os.path.join('cgm', 'images'), os.path.join('cgm', 'lib', 'zoo'), os.path.join('cgm', 'lib', 'zoo', 'zooMel'), os.path.join('cgm', 'lib', 'zoo', 'zooPy'), os.path.join('cgm', 'core', 'mel'), 'Red9' ] for path in _paths: fullPath = cgmPath.Path(os.path.join(thisPath, path)) if fullPath not in mayaScriptPathsSet: log.info( "setupCGMScriptPaths>> Path not found. Appending: {0}".format( fullPath)) mayaScriptPaths.append(cgmPath.Path(fullPath.asFriendly())) mayaScriptPaths.extend(fullPath.dirs(recursive=True)) mayaScriptPaths = mUI.removeDupes(mayaScriptPaths) newScriptPath = os.pathsep.join([p for p in mayaScriptPaths]) #for p in mayaScriptPaths: #print ("{0} >> {1}".format(p,p.unresolved())) maya.mel.eval('putenv MAYA_SCRIPT_PATH "%s"' % newScriptPath)
def setupContributorPaths(): try: thisFile = cgmPath.Path(__file__) thisPath = thisFile.up().osPath() #thisPath = os.sep.join(__file__.split(os.sep)[:-1]) mayaSysPaths = sys.path #'lib/zoo/zooMel','lib/zoo/zooPy','lib/zoo/zooPyMaya','lib/bo','lib/ml': _l = [ 'mel', 'images', os.path.join('lib', 'zoo'), os.path.join('core', 'mel'), os.path.join('lib', 'zoo', 'zooPy'), os.path.join('lib', 'zoo', 'zooPyMaya'), os.path.join('lib', 'zoo', 'zooMel'), os.path.join('lib', 'bo'), os.path.join('lib', 'ml') ] for folder in _l: bufferFolderPath = os.path.join(thisPath, folder) if bufferFolderPath not in mayaSysPaths: try: sys.path.append("%s" % bufferFolderPath) except: print('%s Failed to append' % bufferFolderPath) except Exception, err: raise Exception, "setupContributorPaths FAILURE || {0}".format(err)
def main(**kwargs): v = kwargs.get('verbosity', 2) #cgm.core._reload() sceneSetup() suite = unittest.TestSuite() loader = unittest.TestLoader() tests = loader.discover(PATH.Path(__file__).up()) testRunner = unittest.runner.TextTestRunner(verbosity=v) for t in tests: for t2 in t: for t3 in t2: print t3 print t3._testMethodName for k in t3.__dict__.keys(): print k #exec("reload({0})".format(t3)) return #try: testRunner.run(tests) """
def test_queries(self): import cgm _dir = PATH.Path(cgm.__path__[0]) _file = os.path.join(_dir.up(), 'cgmToolbox.py') self.assertEquals(os.path.exists(_dir), True) self.assertEquals(os.path.exists(_file), True) self.assertEquals(_file.isFile(), True) self.assertEquals(_file.asFile().endswith('cgmToolbox.py'), True)
def setupCGMPlugins(): thisFile = cgmPath.Path(__file__) thisPath = thisFile.up().osPath() existingPlugPathStr = maya.mel.eval('getenv MAYA_PLUG_IN_PATH;') existingPlugPaths = map(cgmPath.Path, existingPlugPathStr.split(os.pathsep)) existingPlugPathsSet = set(existingPlugPaths) #cgmPyPath = thisPath / 'cgm/plugins' cgmPyPath = cgmPath.Path(os.path.join(thisPath, 'cgm', 'plugins')) if cgmPyPath not in existingPlugPathsSet: log.info( "setupCGMPlugins>> cgmPyPath not found. Appending: {0}".format( cgmPyPath)) existingPlugPaths.append(cgmPyPath) existingPlugPaths = mUI.removeDupes(existingPlugPaths) newPlugPathStr = os.pathsep.join([p for p in existingPlugPaths]) for p in existingPlugPaths: print p maya.mel.eval('putenv MAYA_PLUG_IN_PATH "%s";' % newPlugPathStr)
def getUserSetupFile(self): pyUserSetup, melUserSetup = None, None """try: pyUserSetup = cgmPath.Path(cgmPath.findInPyPath( 'userSetup.py'))#findInPyPath) log.info("Py user file is '%s'"%pyUserSetup) except: log.info ('No py user setup')""" try: melUserSetup = cgmPath.Path( cgmPath.findFirstInEnv('userSetup.mel', 'MAYA_SCRIPT_PATH')) log.info("Mel user file is '%s'" % melUserSetup) except: log.info('No mel user setup') return pyUserSetup, melUserSetup
def returnPyFilesFromFolder(): import os thisFile = cgmPath.Path(__file__) thisPath = thisFile.up() bufferList = find_files(thisPath, '*.py') returnList = [] for file in bufferList: if '__' not in file: splitBuffer = file.split('.') returnList.append(splitBuffer[0]) if returnList: return returnList else: return False
def mainRunner(**kwargs): v = kwargs.get('verbosity', 2) cgm.core._reload() sceneSetup() suite = unittest.TestSuite() #maya.standalone.initialize() loader = unittest.TestLoader() tests = loader.discover(PATH.Path(__file__).up()) testRunner = unittest.runner.TextTestRunner() try: testRunner.run(tests) except Exception,err: for arg in err.args: log.error(arg) raise Exception,err
def process_blocks_rig(f=None, blocks=None, postProcesses=1, **kws): _str_func = 'process_blocks_rig' #cgmGEN.log_start(_str_func) mFile = PATHS.Path(f) if not mFile.exists(): raise ValueError, "Invalid file: {0}".format(f) _path = mFile.asFriendly() log.info("Good Path: {0}".format(_path)) """ if 'template' in _path: _newPath = _path.replace('template','build') else:""" _name = mFile.name() _d = mFile.up().asFriendly() log.debug(cgmGEN.logString_msg(_str_func, _name)) _newPath = os.path.join(_d, _name + '_BUILD.{0}'.format(mFile.getExtension())) log.info("New Path: {0}".format(_newPath)) #cgmGEN.logString_msg(_str_func,'File Open...') mc.file(_path, open=1, f=1) #cgmGEN.logString_msg(_str_func,'Process...') T1 = time.time() get_time = cgmGEN.get_timeString try: if not blocks: #cgmGEN.logString_sub(_str_func,'No blocks arg') ml_masters = r9Meta.getMetaNodes(mTypes='cgmRigBlock', nTypes=['transform', 'network'], mAttrs='blockType=master') for mMaster in ml_masters: #cgmGEN.logString_sub(_str_func,mMaster) RIGBLOCKS.contextual_rigBlock_method_call(mMaster, 'below', 'atUtils', 'changeState', 'rig', forceNew=False) ml_context = BLOCKGEN.get_rigBlock_heirarchy_context( mMaster, 'below', True, False) l_fails = [] for mSubBlock in ml_context: _state = mSubBlock.getState(False) if _state != 4: l_fails.append(mSubBlock) if l_fails: log.info('The following failed...') pprint.pprint(l_fails) raise ValueError, "Modules failed to rig: {0}".format( l_fails) log.info("Begin Rig Prep cleanup...") ''' Begin Rig Prep process ''' mPuppet = mMaster.moduleTarget #...when mBlock is your masterBlock #str(datetime.timedelta(seconds=v)) if postProcesses: l_timeReports = [] if kws.get('mirrorVerify', 1): print(cgmGEN._str_hardBreak) log.info('mirror_verify...') t1 = time.clock() mPuppet.atUtils('mirror_verify', 1) t2 = time.clock() l_timeReports.append( ['mirrorVerify', get_time(t2 - t1)]) if kws.get('gatherSpaceDrivers', 1): log.info('collect worldSpace...') t1 = time.clock() mPuppet.atUtils('collect_worldSpaceObjects') t2 = time.clock() l_timeReports.append( ['gatherSpaceDrivers', get_time(t2 - t1)]) if kws.get('qss', 1): print(cgmGEN._str_hardBreak) log.info('qss...') t1 = time.clock() mPuppet.atUtils('qss_verify', puppetSet=1, bakeSet=1, deleteSet=1, exportSet=1) t2 = time.clock() l_timeReports.append(['qss', get_time(t2 - t1)]) if kws.get('deleteUnusedShaders'): print(cgmGEN._str_hardBreak) log.info('Delete unused shaders...') t1 = time.clock() MRSPOST.shaders_getUnused(delete=True) t2 = time.clock() l_timeReports.append( ['deleteUnusedShaders', get_time(t2 - t1)]) if kws.get('deleteCGMLightGroup'): print(cgmGEN._str_hardBreak) log.info('Delete cgm shaders...') t1 = time.clock() try: mc.delete('cgmLightGroup') except: pass t2 = time.clock() l_timeReports.append( ['deleteUnusedShaders', get_time(t2 - t1)]) if kws.get('proxyMesh', 1): print(cgmGEN._str_hardBreak) log.info('proxyMesh...') t1 = time.clock() mPuppet.atUtils('proxyMesh_verify', 1) t2 = time.clock() l_timeReports.append(['proxyMesh', get_time(t2 - t1)]) if kws.get('puppetMesh', 1): print(cgmGEN._str_hardBreak) log.info('puppetMesh...') t1 = time.clock() mPuppet.atUtils('puppetMesh_create', **{ 'unified': True, 'skin': True }) t2 = time.clock() l_timeReports.append(['puppetMesh', get_time(t2 - t1)]) if kws.get('hideVisSub', 1): print(cgmGEN._str_hardBreak) log.info('hideVisSub...') t1 = time.clock() for i, mSubBlock in enumerate(ml_context): if not i: continue try: mSubBlock.moduleTarget.rigNull.settings.visSub = 0 except Exception, err: log.error(mSubBlock) log.error(err) t2 = time.clock() l_timeReports.append(['hideVisSub', get_time(t2 - t1)]) if kws.get('hideJointAxis'): print(cgmGEN._str_hardBreak) log.info('Hide axis on all joints...') t1 = time.clock() for mObj in cgmMeta.asMeta(mc.ls(type='joint')): mObj.displayLocalAxis = 0 t2 = time.clock() l_timeReports.append( ['hideJointAxis', get_time(t2 - t1)]) if kws.get('removeRefs'): print(cgmGEN._str_hardBreak) log.info('Remove Refs...') t1 = time.clock() MRSPOST.refs_remove() t2 = time.clock() l_timeReports.append(['removeRefs', get_time(t2 - t1)]) if kws.get('ihi', 1): print(cgmGEN._str_hardBreak) log.info('ihi...') t1 = time.clock() mPuppet.atUtils('rigNodes_setAttr', 'ihi', 0) t2 = time.clock() l_timeReports.append(['ihi', get_time(t2 - t1)]) if kws.get('connectRig', 1): print(cgmGEN._str_hardBreak) log.info('rig connect...') t1 = time.clock() mPuppet.atUtils('rig_connectAll') t2 = time.clock() l_timeReports.append(['connectRig', get_time(t2 - t1)]) log.info('...') if kws.get('controllerVerify', 1): print(cgmGEN._str_hardBreak) if cgmGEN.__mayaVersion__ >= 2018: log.info('controller_verify...') t1 = time.clock() mPuppet.atUtils('controller_verify') log.info('...') t2 = time.clock() l_timeReports.append( ['controllerVerify', get_time(t2 - t1)]) if kws.get('blocksGather', 1): print(cgmGEN._str_hardBreak) t1 = time.clock() mGrp = BUILDERUTILS.gather_rigBlocks() if kws.get('blocksParent', 1): mGrp.p_parent = mPuppet mGrp.v = False t2 = time.clock() l_timeReports.append( ['blocksGather', get_time(t2 - t1)]) if kws.get('worldGather'): print(cgmGEN._str_hardBreak) log.info('Gathering world dags...') t1 = time.clock() MRSPOST.gather_worldStuff() t2 = time.clock() l_timeReports.append( ['worldGather', get_time(t2 - t1)]) if kws.get('deleteUnusedLayers'): print(cgmGEN._str_hardBreak) log.info('Deleting Unused Layers...') t1 = time.clock() MRSPOST.layers_getUnused(delete=True) t2 = time.clock() l_timeReports.append( ['deleteUnusedLayers', get_time(t2 - t1)]) print(cgmGEN._str_hardBreak) print(cgmGEN.logString_sub("Batch", 'Times')) for i, pair_time in enumerate(l_timeReports): print(" {0} | ['{1}'] | {2} ".format( i, pair_time[0], pair_time[1])) except Exception, err: log.error(err)
def create_MRS_batchFile(f=None, blocks=[None], process=False, postProcesses=True, deleteAfterProcess=False, gatherOptionVars=True): _str_func = 'create_MRS_batchFile' cgmGEN.log_start(_str_func) l_pre = [ 'import maya', 'from maya import standalone', 'standalone.initialize()', 'from maya.api import OpenMaya as om2', 'om2.MGlobal.displayInfo("Begin")', 'import maya.cmds as mc', 'mc.loadPlugin("matrixNodes")', 'import cgm.core.mrs.lib.batch_utils as MRSBATCH' ] l_post = [ 'except:', ' import msvcrt#...waits for key', ' om2.MGlobal.displayInfo("Hit a key to continue")', ' msvcrt.getch()', 'om2.MGlobal.displayInfo("End")', 'standalone.uninitialize()' ] log.debug(cgmGEN.logString_sub(_str_func, "Checks ...")) l_paths = [] l_dirs = [] l_check = VALID.listArg(f) l_mFiles = [] l_batch = [] if not l_check: log.debug( cgmGEN.logString_msg(_str_func, "No file passed. Using current")) l_check = [mc.file(q=True, sn=True)] for f in l_check: mFile = PATHS.Path(f) if not mFile.exists(): log.error("Invalid file: {0}".format(f)) continue log.debug(cgmGEN.logString_sub(_str_func)) _path = mFile.asFriendly() l_paths.append(_path) _name = mFile.name() _d = mFile.up().asFriendly() log.debug(cgmGEN.logString_msg(_str_func, _name)) _batchPath = os.path.join(_d, _name + '_MRSbatch.py') log.debug(cgmGEN.logString_msg(_str_func, "batchPath: " + _batchPath)) log.debug(cgmGEN.logString_msg(_str_func, "template: " + _path)) mTar = PATHS.Path(_batchPath) _l = "try:MRSBATCH.process_blocks_rig('{0}',postProcesses = {1})".format( mFile.asString(), postProcesses) if mTar.getWritable(): if mTar.exists(): os.remove(mTar) log.warning("Writing file: {0}".format(_batchPath)) with open(_batchPath, 'a') as TMP: for l in l_pre + [_l] + l_post: TMP.write('{0}\n'.format(l)) l_batch.append(mTar) else: log.warning("Not writable: {0}".format(_batchPath)) if process: log.debug(cgmGEN.logString_sub(_str_func, "Processing ...")) for f in l_batch: log.warning("Processing file: {0}".format(f.asFriendly())) #subprocess.call([sys.argv[0].replace("maya.exe","mayapy.exe"),f.asFriendly()]) subprocess.Popen( [ sys.argv[0].replace("maya.exe", "mayapy.exe"), '-i', f.asFriendly() ], creationflags=subprocess.CREATE_NEW_CONSOLE) # env=my_env if deleteAfterProcess: os.remove(f) '''
_res = [] for c in _l: _res.append(mc.parent(c, world=True)[0]) mc.delete(_name) return _res def verify_dir_fromDict(root=None, d={}, case=None): l_keys = [] d_toDo = {} _str_func = 'verify_dir_fromDict' log.info("|{0}| >>...".format(_str_func)) mRoot = cgmPath.Path(root) if not mRoot.exists(): log.error("Invalid root: {0}".format(root)) return False _pathRoot = mRoot.asFriendly() log.info("|{0}| >> root: {1}".format(_str_func, _pathRoot)) for k, l in d.iteritems(): #log.info("|{0}| >> k: {1}".format(_str_func,k)) if case == 'lower': k = k.lower() mDir = cgmPath.Path(os.path.join(mRoot, k)) if not mDir.exists():
def process_blocks_rig(f = None, blocks = None, postProcesses = False): _str_func = 'process_blocks_rig' #cgmGEN.log_start(_str_func) mFile = PATHS.Path(f) if not mFile.exists(): raise ValueError,"Invalid file: {0}".format(f) _path = mFile.asFriendly() log.info("Good Path: {0}".format(_path)) """ if 'template' in _path: _newPath = _path.replace('template','build') else:""" _name = mFile.name() _d = mFile.up().asFriendly() log.debug(cgmGEN.logString_msg(_str_func,_name)) _newPath = os.path.join(_d,_name+'_BUILD.{0}'.format(mFile.getExtension())) log.info("New Path: {0}".format(_newPath)) #cgmGEN.logString_msg(_str_func,'File Open...') mc.file(_path, open = 1, f = 1) #cgmGEN.logString_msg(_str_func,'Process...') t1 = time.time() try: if not blocks: #cgmGEN.logString_sub(_str_func,'No blocks arg') ml_masters = r9Meta.getMetaNodes(mTypes = 'cgmRigBlock', nTypes=['transform','network'], mAttrs='blockType=master') for mMaster in ml_masters: #cgmGEN.logString_sub(_str_func,mMaster) RIGBLOCKS.contextual_rigBlock_method_call(mMaster, 'below', 'atUtils','changeState','rig',forceNew=False) ml_context = BLOCKGEN.get_rigBlock_heirarchy_context(mMaster,'below',True,False) l_fails = [] for mSubBlock in ml_context: _state = mSubBlock.getState(False) if _state != 4: l_fails.append(mSubBlock) if l_fails: log.info('The following failed...') pprint.pprint(l_fails) raise ValueError,"Modules failed to rig: {0}".format(l_fails) log.info("Begin Rig Prep cleanup...") ''' Begin Rig Prep process ''' mPuppet = mMaster.moduleTarget#...when mBlock is your masterBlock if postProcesses: log.info('mirror_verify...') mPuppet.atUtils('mirror_verify') log.info('collect worldSpace...') mPuppet.atUtils('collect_worldSpaceObjects') log.info('qss...') mPuppet.atUtils('qss_verify',puppetSet=1,bakeSet=1,deleteSet=1,exportSet=1) log.info('proxyMesh...') mPuppet.atUtils('proxyMesh_verify') log.info('ihi...') mPuppet.atUtils('rigNodes_setAttr','ihi',0) log.info('rig connect...') mPuppet.atUtils('rig_connectAll') log.info('...') if cgmGEN.__mayaVersion__ >= 2018: log.info('controller_verify...') mPuppet.atUtils('controller_verify') log.info('...') except Exception,err: log.error(err)
def test_walk(self): import cgm.core as CORE _test = PATH.Path(CORE.__path__[0]) for root, dirs, files in os.walk(_test, True, True): pass
def walk_below_dir(arg=None, tests=None, uiStrings=True, fileTest=None, fileCount=True, hardCap=20, skipRoot=True, l_mask=['.svn', 'pristine']): """ Walk directory for pertinent info :parameters: :returns _d_modules, _d_categories, _l_unbuildable _d_modules(dict) - keys to modules _d_categories(dict) - categories to list of entries _l_unbuildable(list) - list of unbuildable modules """ _str_func = 'walk_below' _b_debug = log.isEnabledFor(logging.DEBUG) _path = PATHS.Path(arg) if not _path.exists(): log.debug( cgmGEN.logString_msg(_str_func, "Path doesn't exists: {0}".format(arg))) return False _l_duplicates = [] _l_unbuildable = [] _base = _path.split()[-1] #_d_files = {} #_d_modules = {} #_d_import = {} #_d_categories = {} _d_levels = {} _d_dir = {} _l_keys = [] if uiStrings: log.debug("|{0}| >> uiStrings on".format(_str_func)) _d_uiStrings = {} _l_uiStrings = [] log.debug("|{0}| >> Checking base: {1} | path: {2}".format( _str_func, _base, _path)) _i = 0 _rootKey = None for root, dirs, files in os.walk(_path, True, None): if hardCap and _i > hardCap: log.warning( cgmGEN.logString_msg(_str_func, "hit cap...{0}".format(hardCap))) break _rootPath = PATHS.Path(root) _split = _rootPath.split() _subRoot = _split[-1] _splitUp = _split[_split.index(_base):] _depth = len(_splitUp) - 1 if _path == root: _rootKey = _split[-1] if skipRoot: log.debug(cgmGEN.logString_msg(_str_func, "Skipping root")) continue log.debug(cgmGEN.logString_sub(_str_func, _subRoot)) if _subRoot[0] in ['.']: log.debug( cgmGEN.logString_msg(_str_func, "Skipping...{0}".format(_subRoot))) continue elif _subRoot in l_mask: log.debug( cgmGEN.logString_msg(_str_func, "Masked...{0}".format(_subRoot))) continue if l_mask: _break = False for v in l_mask: if v in _splitUp: log.debug( cgmGEN.logString_msg(_str_func, "Masked...{0}".format(_rootPath))) _break = True continue if _break: continue log.debug("|{0}| >> On subroot: {1} | path: {2}".format( _str_func, _subRoot, root)) #log.debug("|{0}| >> On split up: {1}".format(_str_func,_splitUp)) #log.debug("|{0}| >> On split: {1}".format(_str_func,_split)) _splitRoot = _split[_split.index(_rootKey) + 1:] _key = '|||'.join(_splitRoot) #_rootPath.asString() _l_keys.append(_key) _d_dir[_key] = { 'depth': _depth, 'split': _split, 'splitRoot': _splitRoot, 'token': _subRoot, 'pyString': _rootPath.asFriendly(), 'raw': root, 'mPath': _rootPath, 'dir': dirs, 'index': _i, 'key': _key, 'files': files } if uiStrings: if _depth > 1: _Root = _splitRoot[:-1] _Root.reverse() _uiString = ' ' * (_depth) + " {0} ".format( _subRoot) + ' \\\\' + '.'.join(_Root) #_reverseRoot = _splitRoot[:-1] #_reverseRoot.reverse() #_uiString = ' '*(_depth) + '>' + '--' + '{0}'.format(_subRoot) + " {0}".format('.'.join(_reverseRoot)) else: _uiString = " || " + _subRoot if files: if fileTest and fileTest.get('endsWith'): _cnt = 0 for f in files: if f.endswith(fileTest.get('endsWith')): _cnt += 1 elif fileCount: _cnt = len(files) _uiString = _uiString + ' ({0})'.format(_cnt) #if files: # _uiString = _uiString + ' cnt: {0}'.format(len(files)) #if _uiString in _l_uiStrings: # _uiString = _uiString+ "[dup | {0}]".format(_i) _l_uiStrings.append(_uiString) _d_uiStrings[_i] = _uiString _d_dir[_key]['uiString'] = _uiString if not _d_levels.get(_depth): _d_levels[_depth] = [] _d_levels[_depth].append(_key) _i += 1 for k, d in _d_dir.iteritems(): if d.get('dir'): d['tokensSub'] = {} for subD in d.get('dir'): for k, d2 in _d_dir.iteritems(): if d2.get('token') == subD: d['tokensSub'][k] = subD if _b_debug: print(cgmGEN.logString_sub(_str_func, "Levels")) pprint.pprint(_d_levels) print(cgmGEN.logString_sub(_str_func, "Dat")) pprint.pprint(_d_dir) if uiStrings: print(cgmGEN.logString_sub(_str_func, 'Ui Strings')) #pprint.pprint(_d_uiStrings) for s in _l_uiStrings: print s if _l_duplicates and _b_debug: log.debug(cgmGEN._str_subLine) log.debug("|{0}| >> DUPLICATE ....".format(_str_func)) for m in _l_duplicates: print(m) raise Exception, "Must resolve" #log.debug("|{0}| >> Found {1} modules under: {2}".format(_str_func,len(_d_files.keys()),_path)) return _d_dir, _d_levels, _l_keys
def get_lsFromPath(str_path=None, matchArg=None, calledFrom=None, removeInit=True, **kwargs): """ Return files or folders of a specific type from a given path :parameters: str_path | str The base file path matchArg | str Type of file or folder to be returned. :returns: result :raises: TypeError | if 'str_path' is not a string ValueError | if 'str_path' is a recognized dir path TypeError | if 'matchArg' is not a string """ def prepReturn(result, removeInit): for r in result: if '__init__' in r: result.remove(r) return result log.debug("get_lsFromPath str_path = {1} | matchArg={0}".format( matchArg, str_path)) _str_funcRoot = 'get_lsFromPath' if calledFrom: _str_funcName = "{0}.{1}({2})".format(calledFrom, _str_funcRoot, matchArg) else: _str_funcName = "{0}({1})".format(_str_funcRoot, matchArg) result = None #>> Check the str_path if not isinstance(str_path, basestring): raise TypeError( 'path must be string | str_path = {0}'.format(str_path)) if os.path.isfile(str_path): str_path = cgmPath.Path(str_path).up() log.info("{0} >> passed file. using dir: {1}".format( _str_funcName, str_path)) if not os.path.isdir(str_path): raise ValueError( 'path must validate as os.path.isdir | str_path = {0}'.format( str_path)) #try:#>> Check matchArg if matchArg is not None: if issubclass(type(matchArg), list): _res = [] for a in matchArg: _res.extend(find_files(str_path, a)) return _res elif not isinstance(matchArg, basestring): raise TypeError( 'matchArg must be string | matchArg: {0}'.format(matchArg)) if matchArg is None or matchArg in ['']: return [name for name in os.listdir(str_path)] #if '*.' in matchArg: #l_buffer = matchArg.split('*') #return [ name for name in os.listdir(str_path) if name[-3:] == matchArg.split('*')[-1]] if matchArg.lower() in ['folder', 'dir']: return [ name for name in os.listdir(str_path) if os.path.isdir(os.path.join(str_path, name)) ] elif matchArg.lower() in ['maya files', 'maya']: return [ name for name in os.listdir(str_path) if name[-3:] in ['.ma', '.mb'] ] else: return find_files(str_path, matchArg) #raise NotImplementedError,'matchArg handler not in | matchArg: {0}'.format(matchArg) return result
def create_Scene_batchFile(dat=[], batchFile=None, process=True, postProcesses=True, deleteAfterProcess=False): _str_func = 'create_Scene_batchFile' cgmGEN.log_start(_str_func) if batchFile is None: var_project = cgmMeta.cgmOptionVar('cgmVar_projectCurrent', defaultValue='') mProject = PROJECT.data(filepath=var_project.value) d_paths = mProject.userPaths_get() mPath_root = PATHS.Path(d_paths['root']) if mPath_root.exists(): log.debug('Root | : {0}'.format(mPath_root.asFriendly())) else: log.debug('Root | Invalid Path: {0}'.format(mPath_root)) mPath_content = PATHS.Path(d_paths['content']) if os.path.exists(mPath_content): log.debug('Root | : {0}'.format(mPath_content)) else: log.debug('Root | Invalid Path: {0}'.format(mPath_content)) _batchPath = os.path.join(mPath_root.asFriendly(), 'mrsScene_batch.py') log.debug("batchFile : {0}".format(_batchPath)) l_pre = [ 'import maya', 'from maya import standalone', 'standalone.initialize()', 'from cgm.core.mrs import Scene', 'import maya.mel as mel', 'from maya.api import OpenMaya as om2', 'om2.MGlobal.displayInfo("Begin")', 'import maya.cmds as mc', 'mc.loadPlugin("fbxmaya")', 'mc.workspace("{0}",openWorkspace=1)'.format(mPath_content), 'import cgm.core.mrs.lib.batch_utils as MRSBATCH', '' ] l_post = [ 'except Exception,err:', ' print err', ' import msvcrt#...waits for key', ' om2.MGlobal.displayInfo("Hit a key to continue")', ' msvcrt.getch()', '', 'om2.MGlobal.displayInfo("End")', 'standalone.uninitialize()' ] log.debug(cgmGEN.logString_sub(_str_func, "Checks ...")) l_paths = [] l_dirs = [] #l_check = VALID.listArg(f) l_mFiles = [] l_batch = [] #if not l_check: #log.debug(cgmGEN.logString_msg(_str_func,"No file passed. Using current")) #l_check = [mc.file(q=True, sn=True)] _dat = ['dat = ['] for d2 in dat: _dat.append('{') for k, d in d2.iteritems(): if k == 'objs': if d: _l_tmp = ','.join("'{0}'".format(o) for o in d) _dat.append('"{0}" : [{1}],'.format(k, _l_tmp)) else: _dat.append("'objs' : [ ],") elif 'Path' in k: _l_tmp = ','.join("'{0}'".format(o) for o in d) _dat.append('"{0}" : [{1}],'.format(k, _l_tmp)) else: _dat.append('"{0}" : "{1}",'.format(k, d)) _dat.append('},') _dat.append(']') mTar = PATHS.Path(_batchPath) _l = "try:Scene.BatchExport(dat)" #_l = "try:MRSBATCH.process_blocks_rig('{0}',postProcesses = {1})".format(mFile.asString(),postProcesses) if mTar.getWritable(): if mTar.exists(): os.remove(mTar) log.warning("Writing file: {0}".format(_batchPath)) with open(_batchPath, 'a') as TMP: for l in l_pre + _dat + [_l] + l_post: TMP.write('{0}\n'.format(l)) l_batch.append(mTar) else: log.warning("Not writable: {0}".format(_batchPath)) if process: log.debug(cgmGEN.logString_sub(_str_func, "Processing ...")) log.warning("Processing file: {0}".format(mTar.asFriendly())) #subprocess.call([sys.argv[0].replace("maya.exe","mayapy.exe"),f.asFriendly()]) subprocess.Popen( [ sys.argv[0].replace("maya.exe", "mayapy.exe"), '-i', mTar.asFriendly() ], creationflags=subprocess.CREATE_NEW_CONSOLE) # env=my_env if deleteAfterProcess: os.remove(f) return for f in l_check: mFile = PATHS.Path(f) if not mFile.exists(): log.error("Invalid file: {0}".format(f)) continue log.debug(cgmGEN.logString_sub(_str_func)) _path = mFile.asFriendly() l_paths.append(_path) _name = mFile.name() _d = mFile.up().asFriendly() log.debug(cgmGEN.logString_msg(_str_func, _name)) _batchPath = os.path.join(_d, _name + '_batch.py') log.debug(cgmGEN.logString_msg(_str_func, "batchPath: " + _batchPath)) log.debug(cgmGEN.logString_msg(_str_func, "template: " + _path)) mTar = PATHS.Path(_batchPath) _l = "try:MRSBATCH.process_blocks_rig('{0}',postProcesses = {1})".format( mFile.asString(), postProcesses) if mTar.getWritable(): if mTar.exists(): os.remove(mTar) log.warning("Writing file: {0}".format(_batchPath)) with open(_batchPath, 'a') as TMP: for l in l_pre + [_l] + l_post: TMP.write('{0}\n'.format(l)) l_batch.append(mTar) else: log.warning("Not writable: {0}".format(_batchPath)) if process: log.debug(cgmGEN.logString_sub(_str_func, "Processing ...")) for f in l_batch: log.warning("Processing file: {0}".format(f.asFriendly())) #subprocess.call([sys.argv[0].replace("maya.exe","mayapy.exe"),f.asFriendly()]) subprocess.Popen( [ sys.argv[0].replace("maya.exe", "mayapy.exe"), '-i', f.asFriendly() ], creationflags=subprocess.CREATE_NEW_CONSOLE) # env=my_env if deleteAfterProcess: os.remove(f)
def find_tmpFiles(path=None, level=None, cleanFiles=False, endMatch=['_batch.py', '_MRSbatch.py'], l_mask=['max', 'mab', 'markdown', 'mapping']): """ Function for walking below a given directory looking for modules to reload. It finds modules that have pyc's as well for help in reloading. There is a cleaner on it as well to clear all pycs found. :parameters path(str) level(int) - Depth to search. None means everything mode(int) 0 - normal 1 - pycs only self(instance): cgmMarkingMenu cleanPyc: Delete pycs after check :returns _d_files,_l_ordered,_l_pycd _d_files - dict of import key to file _l_ordered - ordered list of module keys as found _l_pycd - list of modules that were _pycd """ _str_func = 'find_tmpFiles' _b_debug = log.isEnabledFor(logging.DEBUG) _path = PATH.Path(path) _l_subs = [] _d_files = {} _d_names = {} _l_duplicates = [] _l_errors = [] _base = _path.split()[-1] _l_ordered_list = [] _l_weirdFiles = [] _d_weirdFiles = {} log.debug("|{0}| >> Checking base: {1} | path: {2}".format( _str_func, _base, path)) _i = 0 for root, dirs, files in os.walk(path, True, None): # Parse all the files of given path and reload python modules _mRoot = PATH.Path(root) _split = _mRoot.split() _subRoot = _split[-1] _splitUp = _split[_split.index(_base):] log.debug("|{0}| >> On subroot: {1} | path: {2}".format( _str_func, _subRoot, root)) log.debug("|{0}| >> On split: {1}".format(_str_func, _splitUp)) _mod = False _l_sub = [] for f in files: key = False _pycd = False _long = os.path.join(root, f) if '.' not in f: continue _dot_split = f.split('.') _extension = _dot_split[-1] _pre = _dot_split[0] if _extension in l_mask: continue if len(_extension) > 3: if _extension.startswith('ma') or _extension.startswith('mb'): _l_weirdFiles.append(f) _d_weirdFiles[f] = os.path.join(root, f) continue for s in endMatch: if f.endswith(s): _l_weirdFiles.append(f) _d_weirdFiles[f] = os.path.join(root, f) continue if level is not None and _i >= level: break _i += 1 if cleanFiles: for f, _path in _d_weirdFiles.iteritems(): try: log.warning("Remove: {0}".format(_path)) os.remove(_path) except WindowsError, e: try: log.info( "|{0}| >> Initial delete fail. attempting chmod... ". format(_str_func)) os.chmod(_path, stat.S_IWRITE) os.remove(_path) except Exception, e: for arg in e.args: log.error(arg) raise RuntimeError, "Stop"
def get_data(path=None, level=None, mode=0, cleanPyc=False): """ Function for walking below a given directory looking for modules to reload. It finds modules that have pyc's as well for help in reloading. There is a cleaner on it as well to clear all pycs found. :parameters path(str) level(int) - Depth to search. None means everything mode(int) 0 - normal 1 - pycs only self(instance): cgmMarkingMenu cleanPyc: Delete pycs after check :returns _d_files,_l_ordered,_l_pycd _d_files - dict of import key to file _l_ordered - ordered list of module keys as found _l_pycd - list of modules that were _pycd """ _str_func = 'get_data' _b_debug = log.isEnabledFor(logging.DEBUG) _path = PATH.Path(path) _l_subs = [] _d_files = {} _d_names = {} _d_pycd = {} _d_pycs = {} _l_duplicates = [] _l_errors = [] _l_pyc = [] _l_pycd = [] _base = _path.split()[-1] _l_ordered_list = [] log.debug("|{0}| >> Checking base: {1} | path: {2}".format( _str_func, _base, path)) _i = 0 for root, dirs, files in os.walk(path, True, None): # Parse all the files of given path and reload python modules _mRoot = PATH.Path(root) _split = _mRoot.split() _subRoot = _split[-1] _splitUp = _split[_split.index(_base):] log.debug("|{0}| >> On subroot: {1} | path: {2}".format( _str_func, _subRoot, root)) log.debug("|{0}| >> On split: {1}".format(_str_func, _splitUp)) _mod = False _l_sub = [] for f in files: key = False _pycd = False _long = os.path.join(root, f) if f.endswith('.pyc'): #name = f[:-4] #key = f _l_pyc.append(os.path.join(root, f)) if f.endswith('.py'): _str_pycCheck = _long.replace('.py', '.pyc') if os.path.exists(_str_pycCheck): _pycd = True if f == '__init__.py': if _i == 0: key = _base name = _base else: key = '.'.join(_splitUp) name = _subRoot _mod = key else: name = f[:-3] if _i == 0: key = '.'.join([_base, name]) else: key = '.'.join(_splitUp + [name]) #log.debug("|{0}| >> found: {1}".format(_str_func,name)) if key: if key not in _d_files.keys(): if key != _mod: _l_sub.append(key) _d_files[key] = os.path.join(root, f) _d_names[key] = name _d_pycd[key] = _pycd if _pycd: _l_pycd.append(key) _d_pycs[key] = _str_pycCheck else: _l_duplicates.append("{0} >> {1} ".format( key, os.path.join(root, f))) """ try: module = __import__(name, globals(), locals(), ['*'], -1) reload(module) except ImportError, e: for arg in e.args: logger.debug(arg) except Exception, e: for arg in e.args: logger.debug(arg) # Now reload sub modules as well for dir_name in dirs: __reloadRecursive( os.path.join(path, dir_name), parent_name+'.'+dir_name )""" if _mod: _l_ordered_list.append(_mod) if _l_sub: _l_ordered_list.extend(_l_sub) if level is not None and _i >= level: break _i += 1 if cleanPyc: _l_failed = [] log.debug("|{0}| >> Found {1} pyc files under: {2}".format( _str_func, len(_l_pyc), path)) for _file in _l_pyc: #for k in _l_ordered_list: #if k in _l_pycd: log.debug("|{0}| >> Attempting to clean pyc for: {1} ".format( _str_func, _file)) if not _file.endswith('.pyc'): raise ValueError, "Should NOT be here" try: os.remove(_file) except WindowsError, e: try: log.info( "|{0}| >> Initial delete fail. attempting chmod... ". format(_str_func)) os.chmod(_file, stat.S_IWRITE) os.remove(_file) except Exception, e: for arg in e.args: log.error(arg) raise RuntimeError, "Stop"