def __init__(self, sphinx=True, sphinx_autogen=True, coverage=True, epydoc=True, base_dir='.', *args): """Initialize the instance :param sphinx: If True, sphinx documentation will be produced :param coverage: If True, the coverage report will be generated :param epydoc: If True, epydoc documentation will be generated""" if self.rootmodule is None: self._retrieve_project_info(base_dir) # END asssure project info is set self._sphinx = sphinx self._sphinx_autogen = sphinx_autogen self._coverage = coverage self._epydoc = epydoc self._base_dir = make_path(base_dir) # We assume to be in the project's doc directory, otherwise we cannot # automatically handle the project information if self._base_dir.abspath().basename() != 'doc': raise EnvironmentError( "Basedirectory needs to be the 'doc' directory, not %s" % self._base_dir) self._project_dir = make_path(self._base_dir / "..")
def __init__(self, sphinx=True, sphinx_autogen=True, coverage=True, epydoc=True, base_dir='.', *args): """Initialize the instance :param sphinx: If True, sphinx documentation will be produced :param coverage: If True, the coverage report will be generated :param epydoc: If True, epydoc documentation will be generated""" if self.rootmodule is None: self._retrieve_project_info(base_dir) # END asssure project info is set self._sphinx = sphinx self._sphinx_autogen = sphinx_autogen self._coverage = coverage self._epydoc = epydoc self._base_dir = make_path(base_dir) # We assume to be in the project's doc directory, otherwise we cannot # automatically handle the project information if self._base_dir.abspath().basename() != 'doc': raise EnvironmentError("Basedirectory needs to be the 'doc' directory, not %s" % self._base_dir) self._project_dir = make_path(self._base_dir / "..")
def init_loadWorkflows(): _this_module = __import__("mrv.test.automation.workflows", globals(), locals(), ['workflows']) wflbase.addWorkflowsFromDotFiles( _this_module, make_path(__file__).parent().glob("*.dot")) wflbase.addWorkflowsFromDotFiles( _this_module, make_path(__file__).parent().glob("*.dotQA"), workflowcls=QAWorkflow)
def init_classhierarchy( ): """ Read a simple hiearchy file and create an Indexed tree from it""" mfile = make_path( __file__ ).parent().parent() / "cache/UICommandsHierachy.hf" # STORE THE TYPE TREE global _typetree _typetree = mrvmaya.dag_tree_from_tuple_list( mrvmaya.tuple_list_from_file( mfile ) )
def test_saveAs_export( self ): tmpdir = make_path( tempfile.gettempdir() ) / "maya_save_test" try: shutil.rmtree( tmpdir ) # cleanup except OSError: pass files = [ "mafile.ma" , "mb.mb", "ma.ma" ] for filename in files: mayafile = tmpdir / filename assert not mayafile.exists() Scene.save( mayafile , force=1 ) assert mayafile.exists() # END for each file to save # test remove unknown nodes assert Scene.name().ext() == ".ma" target_path = tmpdir / 'withoutunknown.mb' unode = cmds.createNode("unknown") # this doesnt work unless we have real unknown data - an unknown node # itself is not enough # self.failUnlessRaises(RuntimeError, Scene.save, target_path) Scene.save(target_path, autodeleteUnknown=True) assert not cmds.objExists(unode) # must work for untitled files as well Scene.new( force = 1 ) Scene.save( tmpdir / files[-1], force = 1 ) # TEST EXPORT ############# # as long as we have the test dir # export all eafile = tmpdir / "export_all.ma" assert not eafile.exists() assert Scene.export(eafile) == eafile assert eafile.exists() # export selected nodes = cmds.polySphere() cmds.select(cl=1) # selects newly created ... esfile = tmpdir / "export_selected.ma" assert not esfile.exists() assert not cmds.ls(sl=1) assert Scene.export(esfile, nodes) == esfile assert not cmds.ls(sl=1) # selection unaltered assert esfile.isfile() # it truly exported our sphere Scene.new(force=1) esref = ref.createReference(esfile) assert len(list(esref.iterNodes(api.MFn.kMesh))) == 1 shutil.rmtree( tmpdir ) # cleanup
def find_mrv_script(name): """Find an mrv script of the given name. This method should be used if you want to figure out where the mrv executable with the given name is located. The returned path is either relative or absolute. :return: Path to script :raise EnvironmentError: if the executable could not be found :note: Currently it only looks for executables, but handles projects which use mrv as a subproject""" import mrv mrvroot = os.path.dirname(mrv.__file__) tried_paths = list() for base in ('', 'ext', mrvroot): for subdir in ('bin', 'doc', os.path.join('test', 'bin')): path = None if base: path = os.path.join(base, subdir, name) else: path = os.path.join(subdir, name) # END handle base if os.path.isfile(path): return make_path(path) tried_paths.append(path) # END for each subdir # END for each base raise EnvironmentError("Script named %s not found, looked at %s" % (name, ', '.join(tried_paths)))
def test_saveAs_export(self): tmpdir = make_path(tempfile.gettempdir()) / "maya_save_test" try: shutil.rmtree(tmpdir) # cleanup except OSError: pass files = ["mafile.ma", "mb.mb", "ma.ma"] for filename in files: mayafile = tmpdir / filename assert not mayafile.exists() Scene.save(mayafile, force=1) assert mayafile.exists() # END for each file to save # test remove unknown nodes assert Scene.name().ext() == ".ma" target_path = tmpdir / 'withoutunknown.mb' unode = cmds.createNode("unknown") # this doesnt work unless we have real unknown data - an unknown node # itself is not enough # self.failUnlessRaises(RuntimeError, Scene.save, target_path) Scene.save(target_path, autodeleteUnknown=True) assert not cmds.objExists(unode) # must work for untitled files as well Scene.new(force=1) Scene.save(tmpdir / files[-1], force=1) # TEST EXPORT ############# # as long as we have the test dir # export all eafile = tmpdir / "export_all.ma" assert not eafile.exists() assert Scene.export(eafile) == eafile assert eafile.exists() # export selected nodes = cmds.polySphere() cmds.select(cl=1) # selects newly created ... esfile = tmpdir / "export_selected.ma" assert not esfile.exists() assert not cmds.ls(sl=1) assert Scene.export(esfile, nodes) == esfile assert not cmds.ls(sl=1) # selection unaltered assert esfile.isfile() # it truly exported our sphere Scene.new(force=1) esref = ref.createReference(esfile) assert len(list(esref.iterNodes(api.MFn.kMesh))) == 1 shutil.rmtree(tmpdir) # cleanup
def open(cls, scenepath=None, force=False, **kwargs): """ Open the scene at the given scenepath :param scenepath: The path to the file to be opened If None, the currently loaded file will reopened :param force: if True, the new scene will be loaded although currently loaded contains unsaved changes :param kwargs: passed to *cmds.file* :return: a Path to the loaded scene""" if not scenepath: scenepath = cls.name() # NOTE: it will return the last loaded reference instead of the loaded file - lets fix this ! sourcePath = make_path(scenepath) kwargs.pop("open", kwargs.pop("o", None)) kwargs.pop("force", kwargs.pop("f", None)) lastReference = cmds.file(sourcePath.abspath(), open=1, force=force, **kwargs) return make_path(sourcePath)
def cacheFilePath(filename, ext, use_version=False): """Return path to cache file from which you would initialize data structures :param use_version: if true, the maya version will be appended to the filename """ mfile = make_path(__file__).parent() version = "" if use_version: version = cmds.about(version=1).split(" ")[0] # END use version return mfile / ("cache/%s%s.%s" % (filename, version, ext))
def new(cls, force=False, **kwargs): """ Create a new scene :param force: if True, the new scene will be created even though there are unsaved modifications :param kwargs: passed to *cmds.file* :return: Path with name of the new file""" kwargs.pop("new", kwargs.pop("n", None)) kwargs.pop("force", kwargs.pop("f", None)) return make_path(cmds.file(new=True, force=force, **kwargs))
def cacheFilePath( filename, ext, use_version = False ): """Return path to cache file from which you would initialize data structures :param use_version: if true, the maya version will be appended to the filename """ mfile = make_path( __file__ ).parent() version = "" if use_version: version = cmds.about( version=1 ).split( " " )[0] # END use version return mfile / ( "cache/%s%s.%s" % ( filename, version, ext ) )
def new(cls, force=False, **kwargs): """ Create a new scene :param force: if True, the new scene will be created even though there are unsaved modifications :param kwargs: passed to *cmds.file* :return: Path with name of the new file""" kwargs.pop('new', kwargs.pop('n', None)) kwargs.pop('force', kwargs.pop('f', None)) return make_path(cmds.file(new=True, force=force, **kwargs))
def setup_maya_app(): """Prepare the maya app dir to come up with a neutral environment""" maya_config = fixture_path("maya_config") target_path = make_path(os.path.join(tempfile.gettempdir(), "test_mrv_maya_config")) if target_path.isdir(): shutil.rmtree(target_path) shutil.copytree(maya_config, target_path) # adjust the environment to assure os.putenv(env_app_dir, target_path) os.environ[env_app_dir] = target_path
def headerPath(apiname): """ :return: Path to file containing the c++ header of the given apiclass' name. The file will not be verified, hence it may be inaccessible :param apiname: string name, like 'MFnBase' :raise ValueError: if MAYA_LOCATION is not set""" p = make_path("$MAYA_LOCATION").expand_or_raise().realpath() if sys.platform == 'darwin': p = p.parent().parent() / "devkit" # END handle platform dependency return p / ("include/maya/%s.h" % apiname)
def headerPath( apiname ): """ :return: Path to file containing the c++ header of the given apiclass' name. The file will not be verified, hence it may be inaccessible :param apiname: string name, like 'MFnBase' :raise ValueError: if MAYA_LOCATION is not set""" p = make_path("$MAYA_LOCATION").expand_or_raise().realpath() if sys.platform == 'darwin': p = p.parent().parent() / "devkit" # END handle platform dependency return p / ("include/maya/%s.h" % apiname)
def tuple_list_from_file( filepath ): """Create a tuple hierarchy list from the file at the given path :return: tuple list suitable for dag_tree_from_tuple_list""" lines = make_path( filepath ).lines( retain = False ) hierarchytuples = list() # PARSE THE FILE INTO A TUPLE LIST for no,line in enumerate( lines ): item = ( line.count( '\t' ), line.lstrip( '\t' ) ) hierarchytuples.append( item ) return hierarchytuples
def open(cls, scenepath=None, force=False, **kwargs): """ Open the scene at the given scenepath :param scenepath: The path to the file to be opened If None, the currently loaded file will reopened :param force: if True, the new scene will be loaded although currently loaded contains unsaved changes :param kwargs: passed to *cmds.file* :return: a Path to the loaded scene""" if not scenepath: scenepath = cls.name() # NOTE: it will return the last loaded reference instead of the loaded file - lets fix this ! sourcePath = make_path(scenepath) kwargs.pop('open', kwargs.pop('o', None)) kwargs.pop('force', kwargs.pop('f', None)) lastReference = cmds.file(sourcePath.abspath(), open=1, force=force, **kwargs) return make_path(sourcePath)
def path( self, copynumber=False, unresolved = False ): """:return: Path object with the path containing the reference's data :param copynumber: If True, the returned path will include the copy number. As it will be a path object, it might not be fully usable in that state :param unresolved: see `ls` :note: we always query it from maya as our numbers change if some other reference is being removed and cannot be trusted""" path_str = cmds.referenceQuery( self._refnode, f=1, un=unresolved ) if not copynumber: path_str = self._splitCopyNumber( path_str )[0] # END handle copy number return make_path(path_str)
def path(self, copynumber=False, unresolved = False): """:return: Path object with the path containing the reference's data :param copynumber: If True, the returned path will include the copy number. As it will be a path object, it might not be fully usable in that state :param unresolved: see `ls` :note: we always query it from maya as our numbers change if some other reference is being removed and cannot be trusted""" path_str = cmds.referenceQuery(self._refnodename, f=1, un=unresolved) if not copynumber: path_str = self._splitCopyNumber(path_str)[0] # END handle copy number return make_path(path_str)
def tuple_list_from_file(filepath): """Create a tuple hierarchy list from the file at the given path :return: tuple list suitable for dag_tree_from_tuple_list""" lines = make_path(filepath).lines(retain=False) hierarchytuples = list() # PARSE THE FILE INTO A TUPLE LIST for no, line in enumerate(lines): item = (line.count('\t'), line.lstrip('\t')) hierarchytuples.append(item) return hierarchytuples
def setup_maya_app(): """Prepare the maya app dir to come up with a neutral environment""" maya_config = fixture_path("maya_config") target_path = make_path( os.path.join(tempfile.gettempdir(), "test_mrv_maya_config")) if target_path.isdir(): shutil.rmtree(target_path) shutil.copytree(maya_config, target_path) # adjust the environment to assure os.putenv(env_app_dir, target_path) os.environ[env_app_dir] = target_path
def clean(self): """Clean the generated files by removing them :note: Must respect the options the same way as done by the ``generate`` method""" if self._coverage: self.remove_version_info('coverage') bdd = self.build_downloads_dir() csdd = self.source_downloads_coverage_dir() coverage_dir = make_path(self._project_dir / cmd.tmrv_coverage_dir) # delete all files we copied from the coverage dir if coverage_dir.isdir(): for fpath in coverage_dir.files(): tfpath = bdd / fpath.basename() if tfpath.isfile(): tfpath.remove() # END remove file # END for each coverage file to remove # END if coverage directory exists try: shutil.rmtree(csdd) except OSError: pass # END exceptionhandlint # END clean coverage if self._epydoc: self.remove_version_info('epydoc') try: shutil.rmtree(self.epydoc_target_dir()) except OSError: pass # END ignore errors if directory doesnt exist # END clean epydoc if self._sphinx: self.remove_version_info('sphinx') ip = self.index_rst_path() iph = ip+'.header' # only remove index.rst if it appears we are generating it using # header and footer if iph.isfile() and ip.isfile(): ip.remove() # END remove generated index out_dir = self.html_output_dir() dt_dir = self.doctrees_dir() agp = self.autogen_output_dir() for dir in (agp, out_dir, dt_dir): if dir.isdir(): shutil.rmtree(dir)
def save(cls, scenepath=None, autodeleteUnknown=False, **kwargs): """Save the currently opened scene under scenepath in the respective format :param scenepath: if None, the currently opened scene will be saved, otherwise the name will be changed. Paths leading to the file will automatically be created. :param autodeleteUnknown: if true, unknown nodes will automatically be deleted before an attempt is made to change the maya file's type :param kwargs: passed to cmds.file :return: Path at which the scene has been saved.""" if scenepath is None or scenepath == "": scenepath = cls.name() scenepath = make_path(scenepath) curscene = cls.name() try: filetype = cls.kFileTypeMap[scenepath.ext()] curscenetype = cls.kFileTypeMap[curscene.ext()] except KeyError: raise RuntimeError("Unsupported filetype of: " + scenepath) # is it a save as ? if curscene != scenepath: cls.rename(scenepath) # assure path exists parentdir = scenepath.dirname() if not parentdir.exists(): parentdir.makedirs() # END assure parent path exists # delete unknown before changing types ( would result in an error otherwise ) if autodeleteUnknown and curscenetype != filetype: cls.deleteUnknownNodes() # END handle unkonwn nodes # safe the file kwargs.pop("save", kwargs.pop("s", None)) kwargs.pop("type", kwargs.pop("typ", None)) return make_path(cmds.file(save=True, type=filetype, **kwargs))
def save(cls, scenepath=None, autodeleteUnknown=False, **kwargs): """Save the currently opened scene under scenepath in the respective format :param scenepath: if None, the currently opened scene will be saved, otherwise the name will be changed. Paths leading to the file will automatically be created. :param autodeleteUnknown: if true, unknown nodes will automatically be deleted before an attempt is made to change the maya file's type :param kwargs: passed to cmds.file :return: Path at which the scene has been saved.""" if scenepath is None or scenepath == "": scenepath = cls.name() scenepath = make_path(scenepath) curscene = cls.name() try: filetype = cls.kFileTypeMap[scenepath.ext()] curscenetype = cls.kFileTypeMap[curscene.ext()] except KeyError: raise RuntimeError("Unsupported filetype of: " + scenepath) # is it a save as ? if curscene != scenepath: cls.rename(scenepath) # assure path exists parentdir = scenepath.dirname() if not parentdir.exists(): parentdir.makedirs() # END assure parent path exists # delete unknown before changing types ( would result in an error otherwise ) if autodeleteUnknown and curscenetype != filetype: cls.deleteUnknownNodes() # END handle unkonwn nodes # safe the file kwargs.pop('save', kwargs.pop('s', None)) kwargs.pop('type', kwargs.pop('typ', None)) return make_path(cmds.file(save=True, type=filetype, **kwargs))
def clean(self): """Clean the generated files by removing them :note: Must respect the options the same way as done by the ``generate`` method""" if self._coverage: self.remove_version_info('coverage') bdd = self.build_downloads_dir() csdd = self.source_downloads_coverage_dir() coverage_dir = make_path(self._project_dir / cmd.tmrv_coverage_dir) # delete all files we copied from the coverage dir if coverage_dir.isdir(): for fpath in coverage_dir.files(): tfpath = bdd / fpath.basename() if tfpath.isfile(): tfpath.remove() # END remove file # END for each coverage file to remove # END if coverage directory exists try: shutil.rmtree(csdd) except OSError: pass # END exceptionhandlint # END clean coverage if self._epydoc: self.remove_version_info('epydoc') try: shutil.rmtree(self.epydoc_target_dir()) except OSError: pass # END ignore errors if directory doesnt exist # END clean epydoc if self._sphinx: self.remove_version_info('sphinx') ip = self.index_rst_path() if ip.isfile(): ip.remove() # END remove generated index out_dir = self.html_output_dir() dt_dir = self.doctrees_dir() agp = self.autogen_output_dir() for dir in (agp, out_dir, dt_dir): if dir.isdir(): shutil.rmtree(dir)
class WorkflowWrapTestProcess(process.WorkflowProcessBase): workflow_directory = make_path(__file__).parent().parent() / "workflows" def __init__(self, id, wflname, **kwargs): """Wrap the workflow with the given name""" self.workflow_file = wflname + ".dot" return super(WorkflowWrapTestProcess, self).__init__(id, **kwargs) #{ iDuplicatable Interface def createInstance(self, *args, **kwargs): """Create a copy of self and return it""" return self.__class__(self.id(), self.workflowName, wflInstance=self.wgraph)
def maya_location(maya_version): """:return: string path to the existing maya installation directory for the given maya version :raise EnvironmentError: if it was not found""" mayaroot = None suffix = '' if sys.platform.startswith('linux'): mayaroot = "/usr/autodesk/maya" if os.path.isdir('/lib64'): suffix = "-x64" # END handle 64 bit systems elif sys.platform == 'darwin': mayaroot = "/Applications/Autodesk/maya" elif sys.platform.startswith('win'): # try to find it in all kinds of program files, prefer 64 bit versions tried_paths = list() for envvar in ('PROGRAMW6432', 'PROGRAMFILES', 'PROGRAMFILES(X86)'): if envvar not in os.environ: continue basepath = make_path(os.environ[envvar]) / "Autodesk" if basepath.isdir(): mayaroot = basepath / 'Maya' break # END if we have found Autodesk installations tried_paths.append(basepath) # END for each envvar if mayaroot is None: raise EnvironmentError( "Could not find any maya installation, searched %s" % (', '.join(tried_paths))) # END os specific adjustments if mayaroot is None: raise EnvironmentError("Current platform %r is unsupported" % sys.platform) # END assure existance of maya root mayalocation = "%s%g%s" % (mayaroot, maya_version, suffix) # OSX special handling if sys.platform == 'darwin': mayalocation = os.path.join(mayalocation, 'Maya.app', 'Contents') if not os.path.isdir(mayalocation): raise EnvironmentError("Could not find maya installation at %r" % mayalocation) # END verfy maya location return mayalocation
def rename(cls, scenepath): """Rename the currently loaded file to be the file at scenepath :param scenepath: string or Path pointing describing the new location of the scene. :return: Path to scenepath :note: as opposed to the normal file -rename it will also adjust the extension :raise RuntimeError: if the scene's extension is not supported.""" scenepath = make_path(scenepath) try: cmds.file(rename=scenepath.expandvars()) cmds.file(type=cls.kFileTypeMap[scenepath.ext()]) except KeyError: raise RuntimeError("Unsupported filetype of: " + scenepath) # END exception handling return scenepath
def maya_location(maya_version): """:return: string path to the existing maya installation directory for the given maya version :raise EnvironmentError: if it was not found""" mayaroot = None suffix = '' if sys.platform.startswith('linux'): mayaroot = "/usr/autodesk/maya" if os.path.isdir('/lib64'): suffix = "-x64" # END handle 64 bit systems elif sys.platform == 'darwin': mayaroot = "/Applications/Autodesk/maya" elif sys.platform.startswith('win'): # try to find it in all kinds of program files, prefer 64 bit versions tried_paths = list() for envvar in ('PROGRAMW6432', 'PROGRAMFILES','PROGRAMFILES(X86)'): if envvar not in os.environ: continue basepath = make_path(os.environ[envvar]) / "Autodesk" if basepath.isdir(): mayaroot = basepath / 'Maya' break # END if we have found Autodesk installations tried_paths.append(basepath) # END for each envvar if mayaroot is None: raise EnvironmentError("Could not find any maya installation, searched %s" % (', '.join(tried_paths))) # END os specific adjustments if mayaroot is None: raise EnvironmentError("Current platform %r is unsupported" % sys.platform) # END assure existance of maya root mayalocation = "%s%g%s" % (mayaroot, maya_version, suffix) # OSX special handling if sys.platform == 'darwin': mayalocation=os.path.join(mayalocation, 'Maya.app', 'Contents') if not os.path.isdir(mayalocation): raise EnvironmentError("Could not find maya installation at %r" % mayalocation) # END verfy maya location return mayalocation
def _make_coverage(self): """Generate a coverage report and make it available as download""" tmrvpath = self.tmrv_bin_path() # for some reason, the html output can only be generated if the current # working dir is in the project root. Its something within nose's coverage # module apparently prevcwd = os.getcwd() os.chdir(self._project_dir) try: rval = self._call_python_script([ tmrvpath, str(self._mrv_maya_version()), "%s=%s" % (cmd.tmrv_coverage_flag, self.pinfo.root_package) ]) finally: os.chdir(prevcwd) # END handle cwd if rval: raise SystemError("tmrv reported failure") # END handle return value bdd = self.build_downloads_dir() csdd = self.source_downloads_coverage_dir() for dir in (bdd, csdd): if not dir.isdir(): dir.makedirs() # END if dir doesnt exist, create it # END for each directory # coverage was generated into the current working dir # index goes to downloads in the source directory as it is referenced # by the docs coverage_dir = make_path(self._project_dir / cmd.tmrv_coverage_dir) cindex = coverage_dir / 'index.html' shutil.copy(cindex, csdd) # all coverage html files go to the downlods directory for html in coverage_dir.files(): shutil.copy(html, bdd) # END for each html self.write_version('coverage')
def fix_ascii_file(filepath): """Unfortunately, on windows and osx and maya2011, ascii's tend to corrupt themselves by writing ',' into floats which should be a '.'. Could be something related to the locale too. Nonetheless, we have to fix it and do a stupid find a replace""" if filepath.ext() != '.ma': return tmpfile = make_path(tempfile.mktemp()) ofh = open(tmpfile, 'wb') for line in open(filepath): ofh.write(line.replace(',', '.')) # END for each line ofh.close() filepath.remove() tmpfile.move(filepath)
def create(cls, filepath, namespace=None, load = True, **kwargs): """Create a reference with the given namespace :param filepath: path describing the reference file location :param namespace: if None, a unique namespace will be generated for you The namespace will contain all referenced objects. :param load: if True, the reference will be created in loaded state, other wise its loading is deferred :param kwargs: passed to file command :raise ValueError: if the namespace does already exist :raise RuntimeError: if the reference could not be created""" filepath = make_path(cls._splitCopyNumber(filepath)[0]) def nsfunc(base, i): if not i: return base return "%s%i" % (base,i) ns = namespace if not ns: # assure unique namespace nsbasename = filepath.stripext().basename() ns = Namespace.findUnique(nsbasename, incrementFunc = nsfunc) else: ns = Namespace(ns) # assure we have a namespace object ns = ns.relativeTo(Namespace(Namespace.rootpath)) if ns.exists(): raise ValueError("Namespace %s for %s does already exist" % (ns,filepath)) # assure we keep the current namespace prevns = Namespace.current() # removing duplicate **kwargs kwargs.pop('ns', None) kwargs.pop('reference', kwargs.pop('r', None)) kwargs.pop('deferReference', kwargs.pop('dr', None)) try: createdRefpath = cmds.file(filepath, ns=str(ns),r=1,dr=not load, **kwargs) finally: prevns.setCurrent() # END assure we keep the namespace return FileReference(createdRefpath)
def create( cls, filepath, namespace=None, load = True, **kwargs ): """Create a reference with the given namespace :param filepath: path describing the reference file location :param namespace: if None, a unique namespace will be generated for you The namespace will contain all referenced objects. :param load: if True, the reference will be created in loaded state, other wise its loading is deferred :param kwargs: passed to file command :raise ValueError: if the namespace does already exist :raise RuntimeError: if the reference could not be created""" filepath = make_path( cls._splitCopyNumber( filepath )[0] ) def nsfunc( base, i ): if not i: return base return "%s%i" % ( base,i ) ns = namespace if not ns: # assure unique namespace nsbasename = filepath.stripext().basename() ns = Namespace.findUnique( nsbasename, incrementFunc = nsfunc ) else: ns = Namespace( ns ) # assure we have a namespace object ns = ns.relativeTo( Namespace( Namespace.rootpath ) ) if ns.exists(): raise ValueError( "Namespace %s for %s does already exist" % (ns,filepath) ) # assure we keep the current namespace prevns = Namespace.current() # removing duplicate **kwargs kwargs.pop('ns', None) kwargs.pop('reference', kwargs.pop('r', None)) kwargs.pop('deferReference', kwargs.pop('dr', None)) try: createdRefpath = cmds.file( filepath, ns=str(ns),r=1,dr=not load, **kwargs ) finally: prevns.setCurrent( ) # END assure we keep the namespace return FileReference( createdRefpath )
def _make_coverage(self): """Generate a coverage report and make it available as download""" tmrvpath = self.tmrv_bin_path() # for some reason, the html output can only be generated if the current # working dir is in the project root. Its something within nose's coverage # module apparently prevcwd = os.getcwd() os.chdir(self._project_dir) try: rval = self._call_python_script([tmrvpath, str(self._mrv_maya_version()), "%s=%s" % (cmd.tmrv_coverage_flag, self.pinfo.root_package)]) finally: os.chdir(prevcwd) # END handle cwd if rval: raise SystemError("tmrv reported failure") # END handle return value bdd = self.build_downloads_dir() csdd = self.source_downloads_coverage_dir() for dir in (bdd, csdd): if not dir.isdir(): dir.makedirs() # END if dir doesnt exist, create it # END for each directory # coverage was generated into the current working dir # index goes to downloads in the source directory as it is referenced # by the docs coverage_dir = make_path(self._project_dir / cmd.tmrv_coverage_dir) cindex = coverage_dir / 'index.html' shutil.copy(cindex, csdd) # all coverage html files go to the downlods directory for html in coverage_dir.files(): shutil.copy(html, bdd) # END for each html self.write_version('coverage')
def mayapy_maya_version(): """:return: float representing the maya version of the currently running mayapy interpreter. :raise EnvironmentError: If called from a 'normal' python interpreter""" if 'maya' not in sys.executable.lower(): raise EnvironmentError("Not running mayapy") # END quick first check exec_path = make_path(os.path.realpath(sys.executable)) # Maya is capitalized on windows try: version_token = [ t[4:] for t in exec_path.splitall() if t.lower().startswith('maya') ][0] except IndexError: raise EnvironmentError("Not running mayapy or invalid path mayapy path: %s" % exec_path) # END handle errors if version_token.endswith('-x64'): version_token = version_token[:-4] # END handle 64 bit paths return float(version_token)
def export(cls, outputFile, nodeListOrIterable=None, **kwargs): """Export the given nodes or everything into the file at path :param outputFile: Path object or path string to which the data should be written to. Parent directories will be created as needed :param nodeListOrIterable: if None, everything will be exported. Otherwise it may be an MSelectionList ( recommended ), or a list of Nodes, MObjects or MDagPaths :param kwargs: passed to cmds.file, see the mel docs for modifying flags :return: Path to which the data was exported""" outputFile = make_path(outputFile) if not outputFile.dirname().isdir(): outputFile.dirname().makedirs() # END create parent dirs prev_selection = None if nodeListOrIterable is None: kwargs['exportAll'] = True else: # export selected mode kwargs['exportSelected'] = True prev_selection = api.MSelectionList() api.MGlobal.getActiveSelectionList(prev_selection) import nt nt.select(nt.toSelectionList(nodeListOrIterable)) # END handle nodes typ = kwargs.pop( 'type', kwargs.pop('typ', cls.kFileTypeMap.get(outputFile.ext(), None))) if typ is None: raise RuntimeError("Invalid type in %s" % outputFile) # END handle type try: cmds.file(outputFile, type=typ, **kwargs) return outputFile finally: if prev_selection is not None: api.MGlobal.setActiveSelectionList(prev_selection)
def export(cls, outputFile, nodeListOrIterable=None, **kwargs): """Export the given nodes or everything into the file at path :param outputFile: Path object or path string to which the data should be written to. Parent directories will be created as needed :param nodeListOrIterable: if None, everything will be exported. Otherwise it may be an MSelectionList ( recommended ), or a list of Nodes, MObjects or MDagPaths :param kwargs: passed to cmds.file, see the mel docs for modifying flags :return: Path to which the data was exported""" outputFile = make_path(outputFile) if not outputFile.dirname().isdir(): outputFile.dirname().makedirs() # END create parent dirs prev_selection = None if nodeListOrIterable is None: kwargs["exportAll"] = True else: # export selected mode kwargs["exportSelected"] = True prev_selection = api.MSelectionList() api.MGlobal.getActiveSelectionList(prev_selection) import nt nt.select(nt.toSelectionList(nodeListOrIterable)) # END handle nodes typ = kwargs.pop("type", kwargs.pop("typ", cls.kFileTypeMap.get(outputFile.ext(), None))) if typ is None: raise RuntimeError("Invalid type in %s" % outputFile) # END handle type try: cmds.file(outputFile, type=typ, **kwargs) return outputFile finally: if prev_selection is not None: api.MGlobal.setActiveSelectionList(prev_selection)
def mayapy_maya_version(): """:return: float representing the maya version of the currently running mayapy interpreter. :raise EnvironmentError: If called from a 'normal' python interpreter""" if 'maya' not in sys.executable.lower(): raise EnvironmentError("Not running mayapy") # END quick first check exec_path = make_path(os.path.realpath( sys.executable)) # Maya is capitalized on windows try: version_token = [ t[4:] for t in exec_path.splitall() if t.lower().startswith('maya') ][0] except IndexError: raise EnvironmentError( "Not running mayapy or invalid path mayapy path: %s" % exec_path) # END handle errors if version_token.endswith('-x64'): version_token = version_token[:-4] # END handle 64 bit paths return float(version_token)
def fromPaths(cls, paths, **kwargs): """Find the reference for each path in paths. If you provide the path X 2 times, but you only have one reference to X, the return value will be [FileReference(X), None] as there are less references than provided paths. :param paths: a list of paths or references whose references in the scene should be returned. In case a reference is found, its plain path will be used instead. :param kwargs: all supported by `ls` to yield the base set of references we will use to match the paths with. Additionally, you may specify: * ignore_extension: if True, default False, the extension will be ignored during the search, only the actual base name will matter. This way, an MA file will be matched with an MB file. The references returned will still have their extension original extension. :return: list(FileReference|None, ...) if a filereference was found for given occurrence of Path, it will be returned at index of the current path in the input paths, otherwise it is None. :note: zip(paths, result) to get a corresponding tuple list associating each input path with the located reference""" if not isinstance(paths, (list,tuple)) or hasattr(paths, 'next'): raise TypeError("paths must be tuple, was %s" % type(paths)) ignore_ext = kwargs.pop("ignore_extension", False) refs = cls.ls(**kwargs) # build dict for fast lookup # It will keep each reference lut = dict() pathscp = [(isinstance(p, cls) and p.path()) or make_path(p) for p in paths] conv = lambda f: f if ignore_ext: conv = lambda f: f.expandvars().splitext()[0] # END ignore extension converter def countTuple(filepath, lut): count = lut.get(filepath, 0) lut[filepath] = count + 1 return (filepath , count) # END utility clut = dict() for ref in refs: lut[countTuple(conv(ref.path()), clut)] = ref # keys have no ext # END for each ref to put into lut clut.clear() for i,path in enumerate(pathscp): pathscp[i] = countTuple(conv(path), clut) # END for each path to prepare outlist = list() for path in pathscp: ref_or_none = lut.get(path, None) outlist.append(ref_or_none) # no need to delete the keys as they have to be unique anyway # END for each path to find return outlist
log.warn("Invalid value for MRV configuration variable: %s" % str(e).split(':', 1)[-1]) # END safe access to variables def source_file_safely(script): try: maya.mel.eval('source "%s"' % script) except RuntimeError, e: log.error(str(e) + "- ignored") # END exception handling # END utility if not (init_mel|run_user_setup|autoload_plugins): return import maya.cmds as cmds prefsdir = make_path(cmds.internalVar(userPrefDir=1)) if not prefsdir.isdir(): log.warn("User Preferences directory did not exist: %s" % prefsdir) return # END check for existence # source actual MEL scripts sources = list() if init_mel: sources.append("createPreferencesOptVars.mel") sources.append("createGlobalOptVars.mel") sources.append(prefsdir + "/userPrefs.mel") # END option vars if autoload_plugins:
def test_storagePickleData( self ): tmpdir = make_path( tempfile.gettempdir() ) def setTestValue( mydict ): mydict['string'] = "hello world" mydict[1] = 3.0 mydict["list"] = ["this", 2, 45.0] def checkTestValue( self, mydict ): sval = mydict.get( "string" ) assert sval == "hello world" fval = mydict.get( 1 ) assert fval == 3.0 lval = mydict.get( "list" ) assert len( lval ) == 3 def fix_ascii_file(filepath): """Unfortunately, on windows and osx and maya2011, ascii's tend to corrupt themselves by writing ',' into floats which should be a '.'. Could be something related to the locale too. Nonetheless, we have to fix it and do a stupid find a replace""" if filepath.ext() != '.ma': return tmpfile = make_path(tempfile.mktemp()) ofh = open(tmpfile, 'wb') for line in open(filepath): ofh.write(line.replace(',', '.')) # END for each line ofh.close() filepath.remove() tmpfile.move(filepath) did = "test" for filetype in [ ".ma", ".mb" ]: mrvmaya.Scene.new( force = True ) # BASIC DATA CREATION AND EDITING #################################### storagenode = nt.createNode( "storage", "storageNode" ) refcomparator = nt.createNode( "trans", "transform" ) pyval = storagenode.pythonData( did, autoCreate = True ) # adjust the value - will be changed in place setTestValue( pyval ) # SAVE AND LOAD ! ################# # ascii and binary ( including reference test ) filepath = tmpdir / ( "storagetest" + filetype ) mrvmaya.Scene.save( filepath ) fix_ascii_file(filepath) # reload mrvmaya.Scene.open( filepath, force=True ) # get and test data storagenode = nt.Node( "storage" ) pyvalloaded = storagenode.pythonData( did, autoCreate = False ) checkTestValue( self, pyvalloaded ) # CLEAR NON-EMPTY DATA WITH UNDO ################################## storagenode.clearData( did ) pydatacleared = storagenode.pythonData( did, autoCreate =False ) assert not pydatacleared.has_key( "string" ) cmds.undo() pydataundone = storagenode.pythonData( did, autoCreate =False ) assert pydataundone.has_key( "string" ) # CREATE REFERENCE ################## mrvmaya.Scene.new( force = True ) mrvmaya.ref.createReference( filepath, namespace="referenced" ) refstoragenode = nt.Node( "referenced:storage" ) refcomparator = nt.Node( "referenced:trans" ) pyval = refstoragenode.pythonData( did ) # adjust values pyval[ "refchange" ] = "changed in reference" refcomparator.tx.msetFloat( 5.5 ) # save reference filewithrefpath = tmpdir / ( "refstoragetest" + filetype ) mrvmaya.Scene.save( filewithrefpath ) fix_ascii_file(filewithrefpath) mrvmaya.Scene.open( filewithrefpath, force = True ) # check test value and the newly written one refstoragenode = nt.Node( "referenced:storage" ) pyval = refstoragenode.pythonData( did ) checkTestValue( self, pyval ) sval = pyval[ 'refchange' ] assert sval == "changed in reference" # DUPLICATION ############### for is_shallow in range( 2 ): duplicate = refstoragenode.duplicate( shallow = is_shallow ) ddata = duplicate.pythonData( did ) data = refstoragenode.pythonData( did ) checkTestValue( self, ddata ) # assure that its a real copy , not just something shallow if not is_shallow: data[ 'other' ] = 2 assert not ddata.has_key( 'other' )
def mfnDBPath( mfnclsname ): """Generate a path to a database file containing mfn wrapping information""" return make_path(cacheFilePath("mfndb/"+ mfnclsname, '', use_version=False)[:-1]) # cut the '.'
class DocGenerator(object): """Encapsulates all functionality required to create sphinx/epydoc documentaiton""" #{ Configuration forbidden_dirs = ['test', 'ext', 'doc', '.'] # PATHS source_dir = make_path('source') source_dl_dir = source_dir / 'download' build_dir = make_path('build') html_dir = build_dir / 'html' downloads_dir = html_dir / '_downloads' # EPYDOC epydoc_show_source = 'yes' epydoc_modules = """modules: unittest modules: pydot,pyparsing modules: ../,../ext/networkx/networkx""" epydoc_exclude = "mrv.test,mrv.doc,mrv.cmd.ipythonstartup" # DYNAMICALLY ADJUSTED MEMBERS # These members will be adjusted after reading the current project's # information rootmodule = None pinfo = None epydoc_cfg = """[epydoc] name: %s url: %s sourcecode: %s %s exclude: %s output: html""" #} END configuration def __init__(self, sphinx=True, sphinx_autogen=True, coverage=True, epydoc=True, base_dir='.', *args): """Initialize the instance :param sphinx: If True, sphinx documentation will be produced :param coverage: If True, the coverage report will be generated :param epydoc: If True, epydoc documentation will be generated""" if self.rootmodule is None: self._retrieve_project_info(base_dir) # END asssure project info is set self._sphinx = sphinx self._sphinx_autogen = sphinx_autogen self._coverage = coverage self._epydoc = epydoc self._base_dir = make_path(base_dir) # We assume to be in the project's doc directory, otherwise we cannot # automatically handle the project information if self._base_dir.abspath().basename() != 'doc': raise EnvironmentError( "Basedirectory needs to be the 'doc' directory, not %s" % self._base_dir) self._project_dir = make_path(self._base_dir / "..") #{ Public Interface @classmethod def remove_version_info(cls, idstring, basedir='.'): """Remove the version info file if it exists""" try: os.remove(cls.version_file_name(idstring, basedir)) except OSError: pass # END exception handling @classmethod def version_file_name(cls, idstring, basedir='.'): """:return: filename at which to write the version file with the given id""" return make_path(os.path.join(basedir, "%s.version_info" % idstring)) @classmethod def write_version(cls, idstring, basedir='.'): """Writes a version file containing the rootmodule's version info. This allows to verify that the version of the individual parts, like epydoc and sphinx are still matching""" version_string = "version_info = (%i, %i, %i, '%s', %i)" % cls.pinfo.version open(cls.version_file_name(idstring, basedir), 'wb').write(version_string) @classmethod def check_version(cls, opid, idstring, basedir='.'): """Checks whether the current version info matches with the stored version info as retrieved from idstring. If there is no such info or if the version matches exactly, do nothing. Otherwise raise an environment error to tell the user to rebuild the respective part of the documentation""" vlocals = dict() vfile = cls.version_file_name(idstring, basedir) if not os.path.isfile(vfile): return execfile(vfile, vlocals) vinfo = vlocals['version_info'] if vinfo != cls.pinfo.version: msg = "Documentation target named '%s' at version %s requires '%s' ( last built at %s ) to be rebuild" % ( opid, str(cls.pinfo.version), idstring, str(vinfo)) raise EnvironmentError(msg) # END raise exception @classmethod def parser(cls): """:return: OptionParser instance suitable to parse commandline arguments with which to initialize our instance""" usage = """%prog [options] Make documentation or remove the generated files.""" parser = optparse.OptionParser(usage=usage) hlp = """Specifies to build sphinx documentation""" parser.add_option('-s', '--sphinx', dest='sphinx', type='int', default=1, help=hlp, metavar='STATE') hlp = """If specified, sphinx API docuementation will be generated""" parser.add_option('-a', '--sphinx-autogen', dest='sphinx_autogen', type='int', default=1, help=hlp, metavar='STATE') hlp = """Specifies epydoc documentation""" parser.add_option('-e', '--epydoc', dest='epydoc', type='int', default=1, help=hlp, metavar='STATE') hlp = """Specifies a coverage report. It will be referenced from within the sphinx documentation""" parser.add_option('-c', '--coverage', dest='coverage', type='int', default=1, help=hlp, metavar='STATE') return parser @classmethod def package_info(cls, basedir='.'): """:return: tuple(root_path, package_root_path, root_package_name ) tuple of the path containing all modules, path containing the root package, as well as the name of our root package as deduced from the package_root_path :param basedir: we expect to be in the root/doc path of the project - if this is not the case, the basedir can be adjusted accordingly to 'virtually' chdir into the doc directory""" rootpath = ospd(os.path.realpath(os.path.abspath(basedir))) packageroot = ospd(rootpath) packagename = os.path.basename(rootpath) return (rootpath, packageroot, packagename) @classmethod def makedoc(cls, args): """Produce the actual docs using this type""" p = cls.parser() hlp = """If specified, previously generated files will be removed. Works in conjunction with the other flags, which default to True, hence %prog --clean will remove all generated files by default""" p.add_option('--clean', dest='clean', action='store_true', default=False, help=hlp) options, args = p.parse_args(args) clean = options.clean del (options.clean) dgen = cls(*args, **options.__dict__) if clean: dgen.clean() else: dgen.generate() # END handle mode def generate(self): """Geneate the documentation according to our configuration :note: respects the options given during construction""" if self._coverage: self._make_coverage() if self._epydoc: self._make_epydoc() if self._sphinx: self._make_sphinx_index() if self._sphinx_autogen: self._make_sphinx_autogen() # END generate autogen self._make_sphinx() # END make sphinx def clean(self): """Clean the generated files by removing them :note: Must respect the options the same way as done by the ``generate`` method""" if self._coverage: self.remove_version_info('coverage') bdd = self.build_downloads_dir() csdd = self.source_downloads_coverage_dir() coverage_dir = make_path(self._project_dir / cmd.tmrv_coverage_dir) # delete all files we copied from the coverage dir if coverage_dir.isdir(): for fpath in coverage_dir.files(): tfpath = bdd / fpath.basename() if tfpath.isfile(): tfpath.remove() # END remove file # END for each coverage file to remove # END if coverage directory exists try: shutil.rmtree(csdd) except OSError: pass # END exceptionhandlint # END clean coverage if self._epydoc: self.remove_version_info('epydoc') try: shutil.rmtree(self.epydoc_target_dir()) except OSError: pass # END ignore errors if directory doesnt exist # END clean epydoc if self._sphinx: self.remove_version_info('sphinx') ip = self.index_rst_path() if ip.isfile(): ip.remove() # END remove generated index out_dir = self.html_output_dir() dt_dir = self.doctrees_dir() agp = self.autogen_output_dir() for dir in (agp, out_dir, dt_dir): if dir.isdir(): shutil.rmtree(dir) # END remove html dir # END for each directory # END clean sphinx #} END public interface #{ Paths def base_dir(self): """:return: Path containing all documentation sources and output files""" return self._base_dir def set_base_dir(self, base_dir): """Set the base directory to the given value :return: self""" self._base_dir = Path(base_dir) return self def index_rst_path(self): """:return: Path to index rst file""" return self._base_dir / self.source_dir / "index.rst" def build_downloads_dir(self): """:return: Path to the build downloads directory""" return self._base_dir / self.downloads_dir def source_downloads_dir(self): """:return: Path to the source downloads directory""" return self._base_dir / self.source_dl_dir def source_downloads_coverage_dir(self): """:return: Path to coverage related downloads""" return self.source_downloads_dir() / 'coverage' def epydoc_target_dir(self): """:return: Path to directory to which epydoc will write its output""" return self.html_output_dir() / 'generated' / 'api' def html_output_dir(self): """:return: html directory to receive all output""" return self._base_dir / self.html_dir def autogen_output_dir(self): """:return: directory to which sphinx-autogen will write its output to""" return self._base_dir / self.source_dir / 'generated' def doctrees_dir(self): """:return: Path to doctrees directory to which sphinx writes some files""" return self._base_dir / self.build_dir / 'doctrees' def mrv_bin_path(self): """:return: Path to mrv binary""" import mrv.cmd.base return mrv.cmd.base.find_mrv_script('mrv') def tmrv_bin_path(self): """:return: Path to tmrv binary""" import mrv.cmd.base return mrv.cmd.base.find_mrv_script('tmrv') #} END paths #{ Utilities def _mrv_maya_version(self): """:return: maya version with which mrv subcommands should be started with""" import mrv.cmd.base return mrv.cmd.base.available_maya_versions()[-1] def _call_python_script(self, *args, **kwargs): """Wrapper of subprocess.call which assumes that we call a python script. On windows, the python interpreter needs to be called directly :raise EnvironmentError: if the called had a non-0 return value""" if sys.platform.startswith('win'): args[0].insert(0, "python") # END handle windows cmd = ' '.join(str(i) for i in args[0]) print cmd rval = subprocess.call(*args, **kwargs) if rval: raise EnvironmentError("Call to %s failed with status %i" % (args[0][0], rval)) # END handle call error #} END utilities #{ Protected Interface @classmethod def _retrieve_project_info(cls, base_dir='.'): """Store the project information of the actual project in our class members for later use :note: must be called exactly once""" rootpath, packageroot, packagename = cls.package_info(base_dir) # for now, we assume our root package is already in the path try: cls.rootmodule = __import__(packagename) except ImportError: raise EnvironmentError("Root package %s could not be imported" % packagename) # END handle import pinfo_package = "%s.info" % packagename try: cls.pinfo = __import__(pinfo_package, fromlist=['']) except ImportError: raise EnvironmentError( "Project information module %r could not be imported:" % pinfo_package) # END handle import # APPLY DOC-CONFIG ################### dcon = getattr(cls.pinfo, 'doc_config', dict()) for k, v in dcon.items(): if k.startswith('epydoc'): setattr(cls, k, v) # END apply project info cls.epydoc_cfg = cls.epydoc_cfg % ( cls.pinfo.project_name, cls.pinfo.url, cls.epydoc_show_source, cls.epydoc_modules, cls.epydoc_exclude) def _make_sphinx_index(self): """Generate the index.rst file according to the modules and packages we actually have""" import mrv indexpath = self.index_rst_path() ifp = open(indexpath, 'wb') # write header ifp.write((indexpath + '.header').bytes()) # write api index if self._sphinx_autogen: basepath = self._base_dir / ".." rootmodule = basepath.abspath().basename() for root, dirs, files in os.walk(basepath): remove_dirs = list() for dirname in dirs: if dirname in self.forbidden_dirs: remove_dirs.append(dirname) # END for each forbidden dir # END for each directory for dirname in remove_dirs: del (dirs[dirs.index(dirname)]) # END for each dirname to remove for fname in files: if not fname.endswith('.py') or fname.startswith('_'): continue filepath = os.path.join(root, fname) # + 1 as there is a trailing path separator modulepath = "%s.%s" % ( rootmodule, filepath[len(basepath) + 1:-3].replace( os.path.sep, '.')) ifp.write("\t%s\n" % modulepath) # END for each file # END for each file # END generate api index # finalize it, write the footer ifp.write((indexpath + '.footer').bytes()) ifp.close() def _make_coverage(self): """Generate a coverage report and make it available as download""" tmrvpath = self.tmrv_bin_path() # for some reason, the html output can only be generated if the current # working dir is in the project root. Its something within nose's coverage # module apparently prevcwd = os.getcwd() os.chdir(self._project_dir) try: rval = self._call_python_script([ tmrvpath, str(self._mrv_maya_version()), "%s=%s" % (cmd.tmrv_coverage_flag, self.pinfo.root_package) ]) finally: os.chdir(prevcwd) # END handle cwd if rval: raise SystemError("tmrv reported failure") # END handle return value bdd = self.build_downloads_dir() csdd = self.source_downloads_coverage_dir() for dir in (bdd, csdd): if not dir.isdir(): dir.makedirs() # END if dir doesnt exist, create it # END for each directory # coverage was generated into the current working dir # index goes to downloads in the source directory as it is referenced # by the docs coverage_dir = make_path(self._project_dir / cmd.tmrv_coverage_dir) cindex = coverage_dir / 'index.html' shutil.copy(cindex, csdd) # all coverage html files go to the downlods directory for html in coverage_dir.files(): shutil.copy(html, bdd) # END for each html self.write_version('coverage') def _make_sphinx_autogen(self): """Instruct sphinx to generate the autogen rst files""" # will have to run it in a separate process for maya support mrvpath = self.mrv_bin_path() # note: the mrv import resolves the site-packages for us which does not # happen on osx for some reason code = "import mrv; import sphinx.ext.autosummary.generate as sas; sas.main()" agp = self.autogen_output_dir() # make sure its clean, otherwise we will reprocess the same files if agp.isdir(): shutil.rmtree(agp) agp.makedirs() # END handle existing directory args = [ mrvpath, str(self._mrv_maya_version()), '-c', code, '-o', agp, self.index_rst_path() ] self._call_python_script(args) # POST PROCESS ############## # Add :api:module.name which gets picked up by extapi, inserting a # epydoc link to the respective file. for rstfile in agp.files("*.rst"): # insert module link lines = rstfile.lines() modulename = lines[0][6:-2] # skip `\n lines.insert(2, ":api:`%s`\n" % modulename) # insert :api: links to the autoclasses i = 0 l = len(lines) while i < l: line = lines[i] if line.startswith('.. autoclass'): classname = line[line.rfind(' ') + 1:-1] # skip newline l += 1 lines.insert(i, ':api:`%s.%s`\n\n' % (modulename, classname)) i += 1 # END if we have a class i += 1 # END for each line rstfile.write_lines(lines) # END for each rst to process def _sphinx_args(self): """:return: list of arguments to be used when calling sphinx from the commandline :note: directories of all kinds will be handled by the caller""" # we don't need "" around the values as we don't use a shell return [ '-c', 'import sys, mrv, sphinx.cmdline; sphinx.cmdline.main(sys.argv)', '-b', 'html', '-D', 'latex_paper_size=a4', '-D', 'latex_paper_size=letter', '-D', 'project=%s' % self.pinfo.project_name, '-D', 'copyright=%s' % self.pinfo.author, '-D', 'version=%s' % "%i.%i" % self.pinfo.version[:2], '-D', 'release=%s' % "%i.%i.%i-%s" % self.pinfo.version[:4] ] def _make_sphinx(self): """Generate the sphinx documentation""" self.check_version('sphinx', 'epydoc') self.check_version('sphinx', 'coverage') mrvpath = self.mrv_bin_path() out_dir = self.html_output_dir() for dir in (self.source_dir, out_dir): if not dir.isdir(): dir.makedirs() # END assure directory exists # END for each directory pathargs = ['-d', self.doctrees_dir(), self.source_dir, out_dir] args = [mrvpath, str(self._mrv_maya_version()) ] + self._sphinx_args() + pathargs self._call_python_script(args) self.write_version('sphinx') def _make_epydoc(self): """Generate epydoc documentation""" # start epydocs in a separate process # as maya support is required epytarget = self.epydoc_target_dir() if not epytarget.isdir(): epytarget.makedirs() # END assure directory exists # write epydoc.cfg file temporarily epydoc_cfg_file = "epydoc.cfg" open(epydoc_cfg_file, 'wb').write(self.epydoc_cfg) args = [ 'epydoc', '-q', '-q', '--config', epydoc_cfg_file, '-o', str(epytarget) ] origargs = sys.argv[:] del (sys.argv[:]) sys.argv.extend(args) try: import epydoc.cli epydoc.cli.cli() finally: os.remove(epydoc_cfg_file) del (sys.argv[:]) sys.argv.extend(origargs) # END handle epydoc config file self.write_version('epydoc')
# -*- coding: utf-8 -*- """Keeps all workflows specific to maya :note: ``createWorkflow`` method must be supported in a module keeping workflows :todo: it would be better to have the createWorkflow method in some sort of workflowManager, for now that appears like overkill though """ __docformat__ = "restructuredtext" from mrv.path import make_path _this_module = __import__( "mrv.automation.workflows", globals(), locals(), ['workflows'] ) import pydot import mrv.automation.processes #{ Initialization import mrv.automation.base as common # load all workflows at once common.addWorkflowsFromDotFiles( _this_module, make_path( __file__ ).parent().glob( "*.dot" ) ) #} END initialization
def _createWrappedWfl( self, wfldir, wflname ): """ :return: our wrapped workflow instance as created by a method loading a workflow from a file""" wfl = wflbase.loadWorkflowFromDotFile( make_path( wfldir ) / wflname ) return wfl
def fixture_path(name): """:return: path to fixture file with ``name``, you can use a relative path as well, like subfolder/file.ext""" return make_path(os.path.abspath(os.path.join(os.path.dirname(__file__), "../fixtures/%s" % name)))
def fixture_path( name ): """:return: path to fixture file with ``name``, you can use a relative path as well, like subfolder/file.ext""" return make_path(os.path.abspath( os.path.join( os.path.dirname( __file__ ), "../fixtures/%s" % name ) ))
def name(cls): return make_path(cmds.file(q=1, exn=1))
# -*- coding: utf-8 -*- """Keeps all workflows specific to maya :note: ``createWorkflow`` method must be supported in a module keeping workflows :todo: it would be better to have the createWorkflow method in some sort of workflowManager, for now that appears like overkill though """ __docformat__ = "restructuredtext" from mrv.path import make_path _this_module = __import__("mrv.automation.workflows", globals(), locals(), ['workflows']) import pydot import mrv.automation.processes #{ Initialization import mrv.automation.base as common # load all workflows at once common.addWorkflowsFromDotFiles(_this_module, make_path(__file__).parent().glob("*.dot")) #} END initialization
def version_file_name(cls, idstring, basedir='.'): """:return: filename at which to write the version file with the given id""" return make_path(os.path.join(basedir, "%s.version_info" % idstring))
def fromPaths( cls, paths, **kwargs ): """Find the reference for each path in paths. If you provide the path X 2 times, but you only have one reference to X, the return value will be [ FileReference(X), None ] as there are less references than provided paths. :param paths: a list of paths or references whose references in the scene should be returned. In case a reference is found, its plain path will be used instead. :param kwargs: all supported by `ls` to yield the base set of references we will use to match the paths with. Additionally, you may specify: * ignore_extension: if True, default False, the extension will be ignored during the search, only the actual base name will matter. This way, an MA file will be matched with an MB file. The references returned will still have their extension original extension. :return: list( FileReference|None, ... ) if a filereference was found for given occurrence of Path, it will be returned at index of the current path in the input paths, otherwise it is None. :note: zip( paths, result ) to get a corresponding tuple list associating each input path with the located reference""" if not isinstance( paths, (list,tuple) ) or hasattr( paths, 'next' ): raise TypeError( "paths must be tuple, was %s" % type( paths ) ) ignore_ext = kwargs.pop( "ignore_extension", False ) refs = cls.ls( **kwargs ) # build dict for fast lookup # It will keep each reference lut = dict() pathscp = [ (isinstance(p, cls) and p.path()) or make_path(p) for p in paths ] conv = lambda f: f if ignore_ext: conv = lambda f: f.expandvars().splitext()[0] # END ignore extension converter def countTuple( filepath, lut ): count = lut.get( filepath, 0 ) lut[ filepath ] = count + 1 return ( filepath , count ) # END utility clut = dict() for ref in refs: lut[ countTuple(conv(ref.path()), clut) ] = ref # keys have no ext # END for each ref to put into lut clut.clear() for i,path in enumerate( pathscp ): pathscp[i] = countTuple(conv(path), clut) # END for each path to prepare outlist = list() for path in pathscp: ref_or_none = lut.get( path, None ) outlist.append( ref_or_none ) # no need to delete the keys as they have to be unique anyway # END for each path to find return outlist
def init_loadWorkflows( ): _this_module = __import__( "mrv.test.automation.workflows", globals(), locals(), ['workflows'] ) wflbase.addWorkflowsFromDotFiles( _this_module, make_path( __file__ ).parent().glob( "*.dot" ) ) wflbase.addWorkflowsFromDotFiles( _this_module, make_path( __file__ ).parent().glob( "*.dotQA" ), workflowcls = QAWorkflow )
def createWorkflow( workflowName ): """Create the workflow matching the given name """ return wflbase.loadWorkflowFromDotFile( make_path( __file__ ).parent() / workflowName + ".dot" )
class TestStartup( tutil.StandaloneTestBase ): """For the sake of brevity, we turn all options on and test for all of them, although in fact each of these are independent. The test will not be able to detect if the configuration we test for always applies. Here we rely on the implementors capabilities""" temp_app_dir = make_path(tempfile.gettempdir()) / "testmaya" def _rm_test_dir(self): if self.temp_app_dir.isdir(): try: self.temp_app_dir.rmtree() except OSError: # on windows, maya usually keps the maya.log open, so the # full removal fails. Ignore that ... . pass # END delete test dir def setup_environment(self): os.environ['MRV_STANDALONE_INIT_OPTIONVARS'] = "1" os.environ['MRV_STANDALONE_RUN_USER_SETUP'] = "1" os.environ['MRV_STANDALONE_AUTOLOAD_PLUGINS'] = "1" self._rm_test_dir() if not self.temp_app_dir.isdir(): self.temp_app_dir.mkdir() # END assure we have a target directory # copy default user preferences for 32 and 64 bit versions - on darwin # they is no difference, as they only support one version on it prefs_base = tutil.fixture_path('maya_user_prefs') mayaversion = os.environ['MRV_MAYA_VERSION'] # setup maya app dir to be in a temporary directory suffixes = [''] if sys.platform != 'darwin': suffixes.append('-x64') for suffix in suffixes: prefs_base.copytree(self.temp_app_dir / (mayaversion + suffix)) # copy prefs data # setup maya to use our prefs directory os.environ['MAYA_APP_DIR'] = str(self.temp_app_dir) def undo_setup_environment(self): os.environ['MRV_STANDALONE_INIT_OPTIONVARS'] = "0" os.environ['MRV_STANDALONE_RUN_USER_SETUP'] = "0" os.environ['MRV_STANDALONE_AUTOLOAD_PLUGINS'] = "0" # remove our temporary data self._rm_test_dir() def post_standalone_initialized(self): from mrv.maya.util import OptionVarDict import maya.cmds as cmds import maya.mel ovars = OptionVarDict() ovar = "TEST_OVAR" # check runtime comamnds assert 'MyTestRuntimeCommand' in (cmds.runTimeCommand(q=1, uca=1) or list()) plugins = ("ge2Export", "decomposeMatrix") for plugname in plugins: assert cmds.pluginInfo(plugname, q=1, loaded=1) # check auto load plugins # check option vars assert ovar in ovars assert ovars[ovar] == 3 # check user setup tscript = """global int $gTestVar; if( $gTestVar != 5 ){ error("AssertionError, global variable was not set"); }""" maya.mel.eval(tscript) # shouldn't raise assert hasattr(sys, 'userSetup') and sys.userSetup == True