def _get_setup_options(distdir, metadata): # a set of names of variables that are supposed to be lists lists = set(["keywords", "install_requires", "packages", "classifiers"]) # mapping of new metadata names to old ones mapping = { "name": "name", "version": "version", "keywords": "keywords", "summary": "description", "description": "long_description", "home-page": "url", "download-url": "download_url", "author": "author", "author-email": "author_email", "maintainer": "maintainer", "maintainer-email": "maintainer_email", "license": "license", "classifier": "classifiers", "requires-dist": "install_requires", "entry_points": "entry_points", #'py_modules': 'py_modules', "packages": "packages", } # populate the package data with sphinx docs # we have to list all of the files because setuptools doesn't # handle nested directories very well pkgdir = os.path.join(distdir, "src", metadata["name"]) plen = len(pkgdir) + 1 sphinxdir = os.path.join(pkgdir, "sphinx_build", "html") testdir = os.path.join(pkgdir, "test") pkglist = list(find_files(sphinxdir)) pkglist.extend(list(find_files(testdir, exclude="*.py[co]"))) pkglist = [p[plen:] for p in pkglist] setup_options = { #'packages': [metadata['name']], "package_data": { metadata["name"]: pkglist # [ #'sphinx_build/html/*.*', #'sphinx_build/html/_modules/*', #'sphinx_build/html/_sources/*', #'sphinx_build/html/_static/*', # ] }, "package_dir": {"": "src"}, "zip_safe": False, "include_package_data": True, } for key, val in metadata.items(): if key in mapping: if isinstance(val, basestring): if mapping[key] in lists: val = [p.strip() for p in val.split("\n") if p.strip()] else: val = val.strip() setup_options[mapping[key]] = val return setup_options
def get_full_libpath(): """Find all of the shared libraries in the current virtual environment and print the required LD_LIBRARY_PATH string (or equivalent) necessary to find them. """ libpathvname = _lpdict.get(sys.platform) if libpathvname: lpcontents = os.environ.get(libpathvname) or '' libpaths = [lib for lib in lpcontents.split(os.pathsep) if lib.strip()] topdir = os.path.dirname(os.path.dirname(sys.executable)) if sys.platform.startswith('win'): pkgdir = os.path.join(topdir, 'Lib', 'site-packages') libfiles = [os.path.abspath(x) for x in find_files(pkgdir, '*.dll')] else: pkgdir = os.path.join(topdir, 'lib', 'python%s.%s' % sys.version_info[:2], 'site-packages') libfiles = [os.path.abspath(x) for x in find_files(pkgdir, '*.so')] if sys.platform == 'darwin': libfiles.extend([os.path.abspath(x) for x in find_files(pkgdir, '*.dylib')]) # if the same library appears multiple times under the same subdir parent, remove # it from the libpath. # Better to fail due to missing lib than to use one with the wrong bitsize... # TODO: add some smarts to figure out desired bitsize and keep the correct lib # in the libpath bases = {} for fname in libfiles: bases.setdefault(os.path.basename(fname), []).append(fname) if len(bases) != len(libfiles): for base, paths in bases.items(): if len(paths) > 1: pardirs = [os.path.dirname(os.path.dirname(p)) for p in paths] for d,p in zip(pardirs, paths): if pardirs.count(d) > 1: libfiles.remove(p) added = [] exts = ['.py', '.pyc', '.pyo'] for fname in libfiles: for ext in exts: if os.path.exists(os.path.splitext(fname)[0]+ext): break else: added.append(os.path.dirname(fname)) final = [] seen = set() for p in added + libpaths: if p not in seen: seen.add(p) final.append(p) print os.pathsep.join(final)
def update_libpath(): """Find all of the shared libraries in the current virtual environment and modify the activate script to put their directories in LD_LIBRARY_PATH """ ldict = { 'linux2': 'LD_LIBRARY_PATH', 'linux': 'LD_LIBRARY_PATH', 'darwin': 'DYLD_LIBRARY_PATH', } libpathvname = ldict.get(sys.platform, None) if libpathvname: topdir = os.path.dirname(os.path.dirname(sys.executable)) bindir = os.path.join(topdir, 'bin') pkgdir = os.path.join(topdir, 'lib', 'python%s.%s' % sys.version_info[:2], 'site-packages') sofiles = [os.path.abspath(x) for x in find_files(pkgdir,'*.so')] final = set() for f in sofiles: pyf = os.path.splitext(f)[0]+'.py' if not os.path.exists(pyf): final.add(os.path.dirname(f)) subdict = { 'libpath': libpathvname, 'add_on': os.pathsep.join(final) } if len(final) > 0: activate_lines = [ '# BEGIN MODIFICATION\n', 'if [ -z "$%(libpath)s" ] ; then\n', ' %(libpath)s=""\n', 'fi\n', '\n', '%(libpath)s=$%(libpath)s:%(add_on)s\n', 'export %(libpath)s\n', '# END MODIFICATION\n', '\n', ] absbin = os.path.abspath(bindir) activate_fname = os.path.join(absbin, 'activate') with open(activate_fname, 'r') as f: lines = f.readlines() try: idx = lines.index(activate_lines[0]) del lines[idx:idx+len(activate_lines)] except ValueError: pass idx = lines.index('export PATH\n') lines[idx+2:idx+2] = activate_lines content = ''.join(lines) with open(activate_fname, 'w') as f: f.write(content % subdict) print "\nThe 'activate' file has been updated with new values added to %s" % libpathvname print "You must deactivate and reactivate your virtual environment for the" print "changes to take effect\n"
def test_quickstart(self): # All options. argv = ['quickstart', 'foobar', '-c', 'FooBar', '-g', 'component', '-v', '1.1', '-d', self.tdir] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_quickstart(parser, options, args) self.assertEqual(retval, 0) fandd = find_files(self.tdir, showdirs=True) self.assertEqual(set([os.path.basename(f) for f in fandd]), set(['foobar', 'src', 'docs', 'setup.cfg', 'setup.py', 'MANIFEST.in', '__init__.py', 'conf.py', 'usage.rst', 'index.rst', 'srcdocs.rst', 'pkgdocs.rst', 'foobar.py', 'README.txt', 'test','test_foobar.py'])) # Errors. code = 'plugin_quickstart(parser, options, args)' assert_raises(self, code, globals(), locals(), OSError, "Can't create directory '%s' because it already exists." % os.path.join(self.tdir, 'foobar')) argv = ['quickstart', 'foobar', 'stuff'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_quickstart(parser, options, args) self.assertEqual(retval, -1)
def main(): from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument("-c", "--classes", action='store_true', dest='showclasses', help="show classes found") parser.add_argument("-b", "--bases", action="store_true", dest="showbases", help="show base classes (only works if --classes is active)") parser.add_argument("-i", "--interfaces", action="store_true", dest="showifaces", help="show interfaces of classes (only works if --classes is active)") parser.add_argument("-u", "--use-cache", action='store_true', dest='use_cache', help="use analysis cache") parser.add_argument('files', metavar='fname', type=str, nargs='+', help='a file or directory to be scanned') options = parser.parse_args() print options.use_cache stime = time.time() psta = PythonSourceTreeAnalyser() for f in options.files: f = os.path.abspath(os.path.expanduser(f)) if os.path.isdir(f): for pyfile in find_files(f, "*.py", exclude=lambda n: 'test' in n.split(os.sep)): psta.analyze_file(pyfile, use_cache=options.use_cache) else: psta.analyze_file(f, use_cache=options.use_cache) psta.dump(sys.stdout, options) sys.stdout.write("elapsed time: %s seconds\n\n" % (time.time() - stime)) if options.use_cache: _FileInfoCache.save()
def _get_py_files(distdir): def _pred(fname): parts = fname.split(os.sep) if parts[-1] in ['setup.py', '__init__.py'] or 'test' in parts: return False return fname.endswith('.py') return list(find_files(distdir, _pred))
def __init__(self, startdir=None, exclude=None, mod_excludes=None): self.files_count = 0 # number of files analyzed # inheritance graph. It's a directed graph with base classes # pointing to the classes that inherit from them. Also includes interfaces # pointing to classes that implement them. self.graph = nx.DiGraph() if isinstance(startdir, basestring): self.startdirs = [startdir] elif startdir is None: self.startdirs = [] else: self.startdirs = startdir self.startdirs = [ os.path.expandvars(os.path.expanduser(d)) for d in self.startdirs ] if mod_excludes is None: self.mod_excludes = set(['enthought', 'zope', 'ast']) else: self.mod_excludes = mod_excludes self.modinfo = {} # maps module pathnames to PythonSourceFileAnalyzers self.fileinfo = { } # maps filenames to (PythonSourceFileAnalyzer, modtime) self.class_map = {} # map of classname to ClassInfo for the class for pyfile in find_files(self.startdirs, "*.py", exclude): self.analyze_file(pyfile)
def __init__(self, startdir=None, exclude=None, mod_excludes=None, direxclude=None): self.files_count = 0 # number of files analyzed # inheritance graph. It's a directed graph with base classes pointing # to the classes that inherit from them. Also includes interfaces # pointing to classes that implement them. self.graph = nx.DiGraph() if isinstance(startdir, basestring): self.startdirs = [startdir] elif startdir is None: self.startdirs = [] else: self.startdirs = startdir self.startdirs = [os.path.expandvars(os.path.expanduser(d)) for d in self.startdirs] if mod_excludes is None: self.mod_excludes = set(["traits", "zope", "ast"]) else: self.mod_excludes = mod_excludes self.modinfo = {} # maps module pathnames to PythonSourceFileAnalyzers self.fileinfo = {} # maps filenames to (PythonSourceFileAnalyzer, modtime) self.class_map = {} # map of classname to ClassInfo for the class for pyfile in find_files(self.startdirs, "*.py", exclude=exclude, direxclude=direxclude): self.analyze_file(pyfile)
def test_quickstart(self): # All options. argv = [ 'quickstart', 'foobar', '-c', 'FooBar', '-g', 'component', '-v', '1.1', '-d', self.tdir ] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_quickstart(parser, options, args) self.assertEqual(retval, 0) fandd = find_files(self.tdir, showdirs=True) self.assertEqual( set([os.path.basename(f) for f in fandd]), set([ 'foobar', 'src', 'docs', 'setup.cfg', 'setup.py', 'MANIFEST.in', '__init__.py', 'conf.py', 'usage.rst', 'index.rst', 'srcdocs.rst', 'pkgdocs.rst', 'foobar.py', 'README.txt', 'test', 'test_foobar.py' ])) # Errors. code = 'plugin_quickstart(parser, options, args)' assert_raises( self, code, globals(), locals(), OSError, "Can't create directory '%s' because it already exists." % os.path.join(self.tdir, 'foobar')) argv = ['quickstart', 'foobar', 'stuff'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_quickstart(parser, options, args) self.assertEqual(retval, -1)
def _get_py_files(distdir, pred=None, dirpred=None): if pred is None: def pred(fname): parts = fname.split(os.sep) if parts[-1] in ['setup.py', '__init__.py'] or 'test' in parts: return False return fname.endswith('.py') return list(find_files(distdir, match=pred, dirmatch=dirpred))
def _get_py_files(distdir): def _pred(fname): parts = fname.split(os.sep) if parts[-1] in ["setup.py", "__init__.py"] or "test" in parts: return False return fname.endswith(".py") return list(find_files(distdir, _pred))
def get_env_libpath(): """Find all of the shared libraries in the current virtual environment and return the string that must be added to LD_LIBRARY_PATH (or equivalent) in order to locate them. """ libpathvname = _lpdict.get(sys.platform) if libpathvname: print "\nScanning virtualenv for shared libraries..." topdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.executable))) libfiles = [] if sys.platform.startswith('win'): pkgdir = os.path.join(topdir, 'Lib', 'site-packages') checker = '*.dll' else: pkgdir = os.path.join(topdir, 'lib', 'python%s.%s' % sys.version_info[:2], 'site-packages') if sys.platform == 'darwin': checker = lambda n: n.endswith('.so') or n.endswith('.dylib') else: checker = "*.so" for d in os.listdir(pkgdir): d = os.path.join(pkgdir, d) if os.path.isdir(d): # find any shared libs that don't have a matching .py bootstrapper newlibs = [] for f in find_files(d, checker): if not os.path.isfile(os.path.splitext(f)[0] + '.py'): newlibs.append(f) libfiles.extend(newlibs) # if the same library appears multiple times under the same subdir parent, remove # it from the libpath. # Better to fail due to missing lib than to use one with the wrong bitsize... # TODO: add some smarts to figure out desired bitsize and keep the correct lib # in the libpath bases = {} for fname in libfiles: bases.setdefault(os.path.basename(fname), []).append(fname) if len(bases) < len(libfiles): for base, paths in bases.items(): if len(paths) > 1: for p in paths: libfiles.remove(p) added = set([os.path.dirname(n) for n in libfiles]) if added: print "adding the following dirs to %s" % libpathvname for name in added: print name return os.pathsep.join(added) return ''
def get_env_libpath(): """Find all of the shared libraries in the current virtual environment and return the string that must be added to LD_LIBRARY_PATH (or equivalent) in order to locate them. """ libpathvname = _lpdict.get(sys.platform) if libpathvname: print "\nScanning virtualenv for shared libraries..." topdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.executable))) libfiles = [] if sys.platform.startswith('win'): pkgdir = os.path.join(topdir, 'Lib', 'site-packages') checker = '*.dll' else: pkgdir = os.path.join(topdir, 'lib', 'python%s.%s' % sys.version_info[:2], 'site-packages') if sys.platform == 'darwin': checker = lambda n: n.endswith('.so') or n.endswith('.dylib') else: checker = "*.so" for d in os.listdir(pkgdir): d = os.path.join(pkgdir, d) if os.path.isdir(d): # find any shared libs that don't have a matching .py bootstrapper newlibs = [] for f in find_files(d, checker): if not os.path.isfile(os.path.splitext(f)[0]+'.py'): newlibs.append(f) libfiles.extend(newlibs) # if the same library appears multiple times under the same subdir parent, remove # it from the libpath. # Better to fail due to missing lib than to use one with the wrong bitsize... # TODO: add some smarts to figure out desired bitsize and keep the correct lib # in the libpath bases = {} for fname in libfiles: bases.setdefault(os.path.basename(fname), []).append(fname) if len(bases) < len(libfiles): for base, paths in bases.items(): if len(paths) > 1: for p in paths: libfiles.remove(p) added = set([os.path.dirname(n) for n in libfiles]) if added: print "adding the following dirs to %s" % libpathvname for name in added: print name return os.pathsep.join(added) return ''
def run_wing(): """Runs the Wing IDE using our template project file.""" parser = OptionParser() parser.add_option( "-w", "--wingpath", action="store", type="string", dest="wingpath", help="location of WingIDE executable" ) parser.add_option( "-p", "--projpath", action="store", type="string", dest="projpath", default="", help="location of WingIDE project file", ) parser.add_option( "-v", "--version", action="store", type="string", dest="version", default="4.0", help="version of WingIDE" ) (options, args) = parser.parse_args(sys.argv[1:]) wingpath = options.wingpath projpath = options.projpath version = options.version if len(version) == 1: version = version + ".0" if not os.path.isfile(projpath): venvdir = os.path.dirname(os.path.dirname(sys.executable)) proj_template = os.path.join(os.path.dirname(venvdir), "config", "wing_proj_template.wpr") projpath = os.path.join(venvdir, "etc", "wingproj.wpr") _modify_wpr_file(proj_template, projpath, version) # in order to find all of our shared libraries, # put their directories in LD_LIBRARY_PATH env = os.environ if sys.platform != "win32": libs = env.get("LD_LIBRARY_PATH", "").split(os.pathsep) rtop = find_up(".git") if not rtop: rtop = find_up(".git") if rtop: rtop = os.path.dirname(rtop) sodirs = set([os.path.dirname(x) for x in find_files(rtop, "*.so")]) libs.extend(sodirs) env["LD_LIBRARY_PATH"] = os.pathsep.join(libs) if sys.platform == "darwin": cmd = ["open", projpath] else: if not wingpath: wingpath = _find_wing() cmd = [wingpath, projpath] try: Popen(cmd, env=env) except Exception as err: print "Failed to run command '%s'." % " ".join(cmd)
def _get_py_files(distdir, pred=None, dirpred=None): if pred is None: def pred(fname): parts = fname.split(os.sep) if parts[-1] in ["setup.py", "__init__.py"] or "test" in parts: return False return fname.endswith(".py") return list(find_files(distdir, match=pred, dirmatch=dirpred))
def test_quickstart(self): argv = ['foobar', '-v', '1.1', '-d', self.tdir] plugin_quickstart(argv) fandd = find_files(self.tdir, nodirs=False) self.assertEqual(set([os.path.basename(f) for f in fandd]), set(['foobar', 'src', 'docs', 'setup.cfg', 'setup.py', 'MANIFEST.in', '__init__.py', 'conf.py', 'usage.rst', 'index.rst', 'srcdocs.rst', 'pkgdocs.rst', 'foobar.py', 'README.txt', 'test','test_foobar.py']))
def test_quickstart(self): argv = ['quickstart', 'foobar', '-v', '1.1', '-d', self.tdir] options = _get_plugin_parser().parse_args(argv) plugin_quickstart(options) fandd = find_files(self.tdir, nodirs=False) self.assertEqual(set([os.path.basename(f) for f in fandd]), set(['foobar', 'src', 'docs', 'setup.cfg', 'setup.py', 'MANIFEST.in', '__init__.py', 'conf.py', 'usage.rst', 'index.rst', 'srcdocs.rst', 'pkgdocs.rst', 'foobar.py', 'README.txt', 'test','test_foobar.py']))
def run_wing(): """Runs the Wing IDE using our template project file.""" wingpath = None projpath = "" for arg in sys.argv[1:]: if arg.startswith("--wingpath="): wingpath = arg.split("=")[1] elif arg.startswith("--proj="): projpath = arg.split("=")[1] if not wingpath: if sys.platform == "win32": wname = "wing.exe" locs = [r"C:\Program Files (x86)\WingIDE 3.2"] elif sys.platform == "darwin": wname = "wing" locs = ["/Applications/WingIDE.app/Contents/MacOS", "/Applications/Wing/WingIDE.app/Contents/MacOS"] else: wname = "wing3.2" locs = ["/usr/bin", "/usr/sbin", "/usr/local/bin"] wingpath = find_in_path(wname) # searches PATH if not wingpath: wingpath = find_in_dir_list(wname, locs) # look in common places if not wingpath: raise OSError("%s was not found in PATH or in any of the common places." % wname) if not os.path.isfile(projpath): venvdir = os.path.dirname(os.path.dirname(sys.executable)) projpath = os.path.join(venvdir, "etc", "wingproj.wpr") if sys.platform == "darwin": _modify_wpr_file(projpath) # have to put virtualenv sys path info in wing project file on Mac # in order to find all of our shared libraries, # put their directories in LD_LIBRARY_PATH env = os.environ if sys.platform != "win32": libs = env.get("LD_LIBRARY_PATH", "").split(os.pathsep) rtop = find_up(".git") if not rtop: rtop = find_up(".git") if rtop: rtop = os.path.dirname(rtop) sodirs = set([os.path.dirname(x) for x in find_files(rtop, "*.so")]) libs.extend(sodirs) env["LD_LIBRARY_PATH"] = os.pathsep.join(libs) try: Popen([wingpath, projpath], env=env) except Exception as err: print "Failed to run wing executable (%s) using project (%s)." % (wingpath, projpath)
def _analyze(self): """Gather import and class inheritance information from the source trees under the specified set of starting directories. """ fileinfo = {} # gather python files from the specified starting directories # and parse them, extracting class and import information for pyfile in find_files(self.startdirs, "*.py", self.exclude): myvisitor = PythonSourceFileAnalyser(pyfile) # in order to get this to work with the 'ast' lib, I have # to read using universal newlines and append a newline # to the string I read for some files. The 'compiler' lib # didn't have this problem. :( f = open(pyfile, 'Ur') try: for node in ast.walk(ast.parse(f.read()+'\n', pyfile)): myvisitor.visit(node) finally: f.close() fileinfo[get_module_path(pyfile)] = myvisitor # now translate any indirect imports into absolute module pathnames # NOTE: only indirect imports within the set of specified source # trees will be fully resolved, i.e., if a file in your set # of source trees imports # openmdao.main.api but you don't include openmdao.main in your # list of source trees, any imports within openmdao.main.api # won't be included in the translation. This means that # openmdao.main.api.Component will not be translated to # openmdao.main.component.Component like it should. for visitor in fileinfo.values(): visitor.translate(fileinfo) # build the inheritance/interface graph for visitor in fileinfo.values(): for classname, classinfo in visitor.classes.items(): for base in classinfo.bases: self.graph.add_edge(classname, base) for impl in classinfo.impls: self.ifaces.add(impl) self.graph.add_edge(classname, impl) # flip orientation of inheritance graph so we can find all classes # that inherit from a particular base more easily self.graph = self.graph.reverse(copy=False)
def _analyze(self): """Gather import and class inheritance information from the source trees under the specified set of starting directories. """ fileinfo = {} # gather python files from the specified starting directories # and parse them, extracting class and import information for pyfile in find_files(self.startdirs, "*.py", self.exclude): myvisitor = PythonSourceFileAnalyser(pyfile) # in order to get this to work with the 'ast' lib, I have # to read using universal newlines and append a newline # to the string I read for some files. The 'compiler' lib # didn't have this problem. :( f = open(pyfile, 'Ur') try: for node in ast.walk(ast.parse(f.read() + '\n', pyfile)): myvisitor.visit(node) finally: f.close() fileinfo[get_module_path(pyfile)] = myvisitor # now translate any indirect imports into absolute module pathnames # NOTE: only indirect imports within the set of specified source # trees will be fully resolved, i.e., if a file in your set # of source trees imports # openmdao.main.api but you don't include openmdao.main in your # list of source trees, any imports within openmdao.main.api # won't be included in the translation. This means that # openmdao.main.api.Component will not be translated to # openmdao.main.component.Component like it should. for visitor in fileinfo.values(): visitor.translate(fileinfo) # build the inheritance/interface graph for visitor in fileinfo.values(): for classname, classinfo in visitor.classes.items(): for base in classinfo.bases: self.graph.add_edge(classname, base) for impl in classinfo.impls: self.ifaces.add(impl) self.graph.add_edge(classname, impl) # flip orientation of inheritance graph so we can find all classes # that inherit from a particular base more easily self.graph = self.graph.reverse(copy=False)
def test_find_files(self): # find all files flist = find_files(self.tempdir) self.assertEqual( set([os.path.basename(f) for f in flist]), set([ 'd1d1f1.exe', 'd1d2f2', '_d2d1f1.foo', '_d2d2f1.txt', 'd2d4d1f1.blah' ])) # find all .exe files flist = find_files(self.tempdir, '*.exe') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d1f1.exe'])) # find exe files or files starting with an underscore matcher = lambda name: fnmatch(name, '*.exe') or name.startswith('_') flist = find_files(self.tempdir, matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set([ 'd1d1f1.exe', '_d2d1f1.foo', '_d2d2f1.txt', ])) # find all files except .exe files flist = find_files(self.tempdir, exclude='*.exe') self.assertEqual( set([os.path.basename(f) for f in flist]), set(['d1d2f2', '_d2d1f1.foo', '_d2d2f1.txt', 'd2d4d1f1.blah'])) # find all files except .exe files and files starting with '_' flist = find_files(self.tempdir, exclude=matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d2f2', 'd2d4d1f1.blah'])) # only match .exe but exclude .exe and starting with '_', which results in no matches flist = find_files(self.tempdir, match='*.exe', exclude=matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set([])) # find all files except those under directories staring with '_' flist = find_files(self.tempdir, direxclude='_*') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d1f1.exe', 'd1d2f2']))
def filedict(path): ''' Create a nested dictionary for a file structure with names relative to the starting directory. ''' rootlen = len(path) dirs = {path: {}} for filename in find_files(path, showdirs=True): dirname, basename = split(filename) if isdir(filename): dirs[filename] = {} dirs[dirname][filename[rootlen:]] = dirs[filename] else: try: dirs[dirname][filename[rootlen:]] = getsize(filename) except OSError as err: # during a mercurial commit we got an error during # getsize() of a lock file that was no longer there, # so check file existence here and only raise an exception # if the file still exists. if exists(filename): raise return dirs[path]
def filedict(path): ''' Create a nested dictionary for a file structure with names relative to the starting directory. ''' rootlen = len(path) dirs = { path: {} } for filename in find_files(path, showdirs=True): dirname, basename = split(filename) if isdir(filename): dirs[filename] = {} dirs[dirname][filename[rootlen:]] = dirs[filename] else: try: dirs[dirname][filename[rootlen:]] = getsize(filename) except OSError as err: # during a mercurial commit we got an error during # getsize() of a lock file that was no longer there, # so check file existence here and only raise an exception # if the file still exists. if exists(filename): raise return dirs[path]
def test_find_files(self): flist = find_files(self.tempdir) self.assertEqual(set([os.path.basename(f) for f in flist]), set(['bar.exe', 'somefile'])) flist = find_files(self.tempdir, '*.exe') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['bar.exe'])) matcher = lambda name: fnmatch(name, '*.exe') or fnmatch( name, '*some*') flist = find_files(self.tempdir, matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set(['bar.exe', 'somefile'])) flist = find_files(self.tempdir, exclude='*.exe') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['somefile'])) flist = find_files(self.tempdir, exclude=matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set([])) flist = find_files(self.tempdir, match='*.exe', exclude=matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set([]))
def test_find_files(self): # find all files flist = find_files(self.tempdir) self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d1f1.exe', 'd1d2f2', '_d2d1f1.foo', '_d2d2f1.txt', 'd2d4d1f1.blah'])) # find all .exe files flist = find_files(self.tempdir, '*.exe') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d1f1.exe'])) # find exe files or files starting with an underscore matcher = lambda name: fnmatch(name, '*.exe') or name.startswith('_') flist = find_files(self.tempdir, matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d1f1.exe', '_d2d1f1.foo', '_d2d2f1.txt', ])) # find all files except .exe files flist = find_files(self.tempdir, exclude='*.exe') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d2f2', '_d2d1f1.foo', '_d2d2f1.txt', 'd2d4d1f1.blah'])) # find all files except .exe files and files starting with '_' flist = find_files(self.tempdir, exclude=matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d2f2', 'd2d4d1f1.blah'])) # only match .exe but exclude .exe and starting with '_', which results in no matches flist = find_files(self.tempdir, match='*.exe', exclude=matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set([])) # find all files except those under directories staring with '_' flist = find_files(self.tempdir, direxclude='_*') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['d1d1f1.exe', 'd1d2f2']))
def test_find_files(self): flist = find_files(self.tempdir) self.assertEqual(set([os.path.basename(f) for f in flist]), set(['bar.exe', 'somefile'])) flist = find_files(self.tempdir, '*.exe') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['bar.exe'])) matcher = lambda name: fnmatch(name, '*.exe') or fnmatch(name, '*some*') flist = find_files(self.tempdir, matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set(['bar.exe', 'somefile'])) flist = find_files(self.tempdir, exclude='*.exe') self.assertEqual(set([os.path.basename(f) for f in flist]), set(['somefile'])) flist = find_files(self.tempdir, exclude=matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set([])) flist = find_files(self.tempdir, match='*.exe', exclude=matcher) self.assertEqual(set([os.path.basename(f) for f in flist]), set([]))
def run_wing(): """Runs the Wing IDE using our template project file.""" parser = OptionParser() parser.add_option("-w", "--wingpath", action="store", type="string", dest="wingpath", help="location of WingIDE executable") parser.add_option("-p", "--projpath", action="store", type="string", dest="projpath", default='', help="location of WingIDE project file") parser.add_option("-v", "--version", action="store", type="string", dest="version", default='4.0', help="version of WingIDE") (options, args) = parser.parse_args(sys.argv[1:]) wingpath = options.wingpath projpath = options.projpath version = options.version if len(version) == 1: version = version + '.0' if not os.path.isfile(projpath): venvdir = os.path.dirname(os.path.dirname(sys.executable)) proj_template = os.path.join(os.path.dirname(venvdir), 'config', 'wing_proj_template.wpr') projpath = os.path.join(venvdir, 'etc', 'wingproj.wpr') _modify_wpr_file(proj_template, projpath, version) # in order to find all of our shared libraries, # put their directories in LD_LIBRARY_PATH env = os.environ if sys.platform != 'win32': libs = env.get('LD_LIBRARY_PATH', '').split(os.pathsep) rtop = find_up('.git') if not rtop: rtop = find_up('.git') if rtop: rtop = os.path.dirname(rtop) sodirs = set( [os.path.dirname(x) for x in find_files(rtop, '*.so')]) libs.extend(sodirs) env['LD_LIBRARY_PATH'] = os.pathsep.join(libs) if sys.platform == 'darwin': cmd = ['open', projpath] else: if not wingpath: wingpath = _find_wing() cmd = [wingpath, projpath] try: Popen(cmd, env=env) except Exception as err: print "Failed to run command '%s'." % ' '.join(cmd)
def run_wing(): """Runs the Wing IDE using our template project file.""" parser = OptionParser() parser.add_option("-w", "--wingpath", action="store", type="string", dest="wingpath", help="location of WingIDE executable") parser.add_option("-p", "--projpath", action="store", type="string", dest="projpath", default='', help="location of WingIDE project file") parser.add_option("-v", "--version", action="store", type="string", dest="version", default='5.0', help="version of WingIDE") (options, args) = parser.parse_args(sys.argv[1:]) wingpath = options.wingpath projpath = options.projpath version = options.version if len(version) == 1: version = version + '.0' if not os.path.isfile(projpath): # Support different versions of Wing major_rev = int(version.split('.')[0]) if major_rev > 4: wingproj_file = 'wing_proj_template5.wpr' else: wingproj_file = 'wing_proj_template.wpr' venvdir = os.path.dirname(os.path.dirname(sys.executable)) proj_template = os.path.join(os.path.dirname(venvdir), 'config', wingproj_file) projpath = os.path.join(venvdir, 'etc', 'wingproj.wpr') _modify_wpr_file(proj_template, projpath, version) # in order to find all of our shared libraries, # put their directories in LD_LIBRARY_PATH env = {} env.update(os.environ) if sys.platform == 'darwin': libpname = 'DYLD_LIBRARY_PATH' libext = '*.dyld' elif not sys.platform.startswith('win'): libpname = 'LD_LIBRARY_PATH' libext = '*.so' else: libpname = None if libpname: libs = env.get(libpname, '').split(os.pathsep) rtop = find_up('.git') if not rtop: rtop = find_up('.git') if rtop: rtop = os.path.dirname(rtop) sodirs = set( [os.path.dirname(x) for x in find_files(rtop, libext)]) libs.extend(sodirs) env[libpname] = os.pathsep.join(libs) if sys.platform == 'darwin': cmd = ['open', projpath] else: if not wingpath: wingpath = _find_wing() cmd = [wingpath, projpath] try: print "wing command: ", ' '.join(cmd) Popen(cmd, env=env) except Exception as err: print "Failed to run command '%s'." % ' '.join(cmd)
def tearDown(self): for pyfile in find_files(self.tdir, "*.py"): modpath = get_module_path(pyfile) if modpath in sys.modules: del sys.modules[modpath] shutil.rmtree(self.tdir, onerror=onerror)
def test_basic(self): logging.debug('') logging.debug('test_basic') # Just run through a complete cycle. orig_dir = os.getcwd() orig_stdout = sys.stdout orig_stderr = sys.stderr # Quickstart. logging.debug('') logging.debug('quickstart') os.chdir(self.tdir) try: argv = ['quickstart', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_quickstart(parser, options, args) self.assertEqual(retval, 0) fandd = find_files(self.tdir, showdirs=True) self.assertEqual(set([os.path.basename(f) for f in fandd]), set(['foobar', 'src', 'docs', 'setup.cfg', 'setup.py', 'MANIFEST.in', '__init__.py', 'conf.py', 'usage.rst', 'index.rst', 'srcdocs.rst', 'pkgdocs.rst', 'foobar.py', 'README.txt', 'test','test_foobar.py'])) finally: os.chdir(orig_dir) # Makedist. logging.debug('') logging.debug('makedist') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() logdata = '' os.chdir(os.path.join(self.tdir, 'foobar')) try: argv = ['makedist'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_makedist(parser, options, args, capture='makedist.out') with open('makedist.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, 0) if sys.platform == 'win32': self.assertTrue(os.path.exists('foobar-0.1.zip')) else: self.assertTrue(os.path.exists('foobar-0.1.tar.gz')) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) # Existing distribution error. logging.debug('') logging.debug('makedist error') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() os.chdir(os.path.join(self.tdir, 'foobar')) try: argv = ['makedist'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_makedist(parser, options, args, capture='makedist.out') with open('makedist.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, -1) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) self.assertTrue('already exists' in captured_stderr) # Install logging.debug('') logging.debug('install') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() logdata = '' os.chdir(self.tdir) try: argv = ['install', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_install(parser, options, args, capture='install.out') with open('install.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, 0) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) try: # List in subprocess to grab updated package environment. logging.debug('') logging.debug('list') os.chdir(self.tdir) stdout = open('list.out', 'w') try: check_call(('plugin', 'list', '-g', 'driver', '-g', 'component', '--external'), stdout=stdout, stderr=STDOUT) finally: stdout.close() with open('list.out', 'r') as inp: captured_stdout = inp.read() os.remove('list.out') os.chdir(orig_dir) logging.debug('captured subprocess output:') logging.debug(captured_stdout) self.assertTrue('foobar.foobar.Foobar' in captured_stdout) # Docs. logging.debug('') logging.debug('docs') argv = ['docs', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) url = find_docs_url(options.plugin_dist_name) expected = os.path.join(self.tdir, 'foobar', 'src', 'foobar', 'sphinx_build', 'html', 'index.html') self.assertEqual(os.path.realpath(url), os.path.realpath(expected)) finally: # Uninstall logging.debug('') logging.debug('uninstall') with open('pip.in', 'w') as out: out.write('y\n') stdin = open('pip.in', 'r') stdout = open('pip.out', 'w') # On EC2 Windows, 'pip' generates an absurdly long temp directory # name, apparently to allow backing-out of the uninstall. # The name is so long Windows can't handle it. So we try to # avoid that by indirectly influencing mkdtemp(). env = os.environ.copy() env['TMP'] = os.path.expanduser('~') try: check_call(('pip', 'uninstall', 'foobar'), env=env, stdin=stdin, stdout=stdout, stderr=STDOUT) finally: stdin.close() stdout.close() with open('pip.out', 'r') as inp: captured_stdout = inp.read() os.remove('pip.in') os.remove('pip.out') logging.debug('captured stdout:') logging.debug(captured_stdout) # Show removed. logging.debug('') logging.debug('list removed') os.chdir(self.tdir) stdout = open('list.out', 'w') try: check_call(('plugin', 'list', '--external'), stdout=stdout, stderr=STDOUT) finally: stdout.close() with open('list.out', 'r') as inp: captured_stdout = inp.read() os.remove('list.out') os.chdir(orig_dir) logging.debug('captured subprocess output:') logging.debug(captured_stdout) self.assertFalse('foobar.foobar.Foobar' in captured_stdout)
def run_wing(): """Runs the Wing IDE using our template project file.""" parser = OptionParser() parser.add_option("-w", "--wingpath", action="store", type="string", dest="wingpath", help="location of WingIDE executable") parser.add_option("-p", "--projpath", action="store", type="string", dest="projpath", default='', help="location of WingIDE project file") parser.add_option("-v", "--version", action="store", type="string", dest="version", default='5.0', help="version of WingIDE") (options, args) = parser.parse_args(sys.argv[1:]) wingpath = options.wingpath projpath = options.projpath version = options.version if len(version)==1: version = version + '.0' if not os.path.isfile(projpath): # Support different versions of Wing major_rev = int(version.split('.')[0]) if major_rev > 4: wingproj_file = 'wing_proj_template5.wpr' else: wingproj_file = 'wing_proj_template.wpr' venvdir = os.path.dirname(os.path.dirname(sys.executable)) proj_template = os.path.join(os.path.dirname(venvdir), 'config',wingproj_file) projpath = os.path.join(venvdir, 'etc', 'wingproj.wpr') _modify_wpr_file(proj_template, projpath, version) # in order to find all of our shared libraries, # put their directories in LD_LIBRARY_PATH env = {} env.update(os.environ) if sys.platform == 'darwin': libpname = 'DYLD_LIBRARY_PATH' libext = '*.dyld' elif not sys.platform.startswith('win'): libpname = 'LD_LIBRARY_PATH' libext = '*.so' else: libpname = None if libpname: libs = env.get(libpname,'').split(os.pathsep) rtop = find_up('.git') if not rtop: rtop = find_up('.git') if rtop: rtop = os.path.dirname(rtop) sodirs = set([os.path.dirname(x) for x in find_files(rtop, libext)]) libs.extend(sodirs) env[libpname] = os.pathsep.join(libs) if sys.platform == 'darwin': cmd = ['open', projpath] else: if not wingpath: wingpath = _find_wing() cmd = [wingpath, projpath] try: print "wing command: ",' '.join(cmd) Popen(cmd, env=env) except Exception as err: print "Failed to run command '%s'." % ' '.join(cmd)
def test_basic(self): logging.debug('') logging.debug('test_basic') # Just run through a complete cycle. orig_dir = os.getcwd() orig_stdout = sys.stdout orig_stderr = sys.stderr # Quickstart. logging.debug('') logging.debug('quickstart') os.chdir(self.tdir) try: argv = ['quickstart', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_quickstart(parser, options, args) self.assertEqual(retval, 0) fandd = find_files(self.tdir, showdirs=True) self.assertEqual( set([os.path.basename(f) for f in fandd]), set([ 'foobar', 'src', 'docs', 'setup.cfg', 'setup.py', 'MANIFEST.in', '__init__.py', 'conf.py', 'usage.rst', 'index.rst', 'srcdocs.rst', 'pkgdocs.rst', 'foobar.py', 'README.txt', 'test', 'test_foobar.py' ])) finally: os.chdir(orig_dir) # Makedist. logging.debug('') logging.debug('makedist') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() logdata = '' os.chdir(os.path.join(self.tdir, 'foobar')) try: argv = ['makedist'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_makedist(parser, options, args, capture='makedist.out') with open('makedist.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, 0) if sys.platform == 'win32': self.assertTrue(os.path.exists('foobar-0.1.zip')) else: self.assertTrue(os.path.exists('foobar-0.1.tar.gz')) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) # Existing distribution error. logging.debug('') logging.debug('makedist error') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() os.chdir(os.path.join(self.tdir, 'foobar')) try: argv = ['makedist'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_makedist(parser, options, args, capture='makedist.out') with open('makedist.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, -1) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) self.assertTrue('already exists' in captured_stderr) # Install logging.debug('') logging.debug('install') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() logdata = '' os.chdir(self.tdir) try: argv = ['install', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_install(parser, options, args, capture='install.out') with open('install.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, 0) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) try: # List in subprocess to grab updated package environment. logging.debug('') logging.debug('list') os.chdir(self.tdir) stdout = open('list.out', 'w') try: check_call(('plugin', 'list', '-g', 'driver', '-g', 'component', '--external'), stdout=stdout, stderr=STDOUT) finally: stdout.close() with open('list.out', 'r') as inp: captured_stdout = inp.read() os.remove('list.out') os.chdir(orig_dir) logging.debug('captured subprocess output:') logging.debug(captured_stdout) self.assertTrue('foobar.foobar.Foobar' in captured_stdout) # Docs. logging.debug('') logging.debug('docs') argv = ['docs', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) url = find_docs_url(options.plugin_dist_name) # Strip off the file protocol header url = url.replace('file://', '') expected = os.path.join(self.tdir, 'foobar', 'src', 'foobar', 'sphinx_build', 'html', 'index.html') self.assertEqual(os.path.realpath(url), os.path.realpath(expected)) finally: # Uninstall logging.debug('') logging.debug('uninstall') pip_in = os.path.join(self.tdir, 'pip.in') pip_out = os.path.join(self.tdir, 'pip.out') with open(pip_in, 'w') as out: out.write('y\n') stdin = open(pip_in, 'r') stdout = open(pip_out, 'w') # On EC2 Windows, 'pip' generates an absurdly long temp directory # name, apparently to allow backing-out of the uninstall. # The name is so long Windows can't handle it. So we try to # avoid that by indirectly influencing mkdtemp(). env = os.environ.copy() env['TMP'] = os.path.expanduser('~') try: # the following few lines are to prevent the system level pip # from being used instead of the local virtualenv version. pipexe = 'pip-%d.%d' % (sys.version_info[0], sys.version_info[1]) pipexe = find_in_path(pipexe) if pipexe is None: pipexe = 'pip' check_call((pipexe, 'uninstall', 'foobar'), env=env, stdin=stdin, stdout=stdout, stderr=STDOUT) finally: stdin.close() stdout.close() with open(pip_out, 'r') as inp: captured_stdout = inp.read() logging.debug('captured stdout:') logging.debug(captured_stdout) # Show removed. logging.debug('') logging.debug('list removed') os.chdir(self.tdir) stdout = open('list.out', 'w') try: check_call(('plugin', 'list', '--external'), stdout=stdout, stderr=STDOUT) finally: stdout.close() with open('list.out', 'r') as inp: captured_stdout = inp.read() os.remove('list.out') os.chdir(orig_dir) logging.debug('captured subprocess output:') logging.debug(captured_stdout) self.assertFalse('foobar.foobar.Foobar' in captured_stdout)
def get_full_libpath(): """Find all of the shared libraries in the current virtual environment and print the required LD_LIBRARY_PATH string (or equivalent) necessary to find them. """ libpathvname = _lpdict.get(sys.platform) if libpathvname: lpcontents = os.environ.get(libpathvname) or '' libpaths = [lib for lib in lpcontents.split(os.pathsep) if lib.strip()] topdir = os.path.dirname( os.path.dirname(os.path.abspath(sys.executable))) cachefile = os.path.join(topdir, '_libpath_cache') libfiles = [] if os.path.isfile(cachefile): with open(cachefile, "rb") as f: cache = pickle.load(f) else: cache = {} if sys.platform.startswith('win'): pkgdir = os.path.join(topdir, 'Lib', 'site-packages') checker = '*.dll' else: pkgdir = os.path.join(topdir, 'lib', 'python%s.%s' % sys.version_info[:2], 'site-packages') if sys.platform == 'darwin': checker = lambda n: n.endswith('.so') or n.endswith('.dylib') else: checker = "*.so" for d in os.listdir(pkgdir): d = os.path.join(pkgdir, d) if os.path.isdir(d): if d in cache: libfiles.extend(cache[d]) else: # find any shared libs that don't have a matching .py bootstrapper newlibs = [ x for x in find_files(d, checker) if not os.path.isfile(os.path.splitext(x)[0] + '.py') ] cache[d] = newlibs libfiles.extend(newlibs) with open(cachefile, "wb") as f: pickle.dump(cache, f, pickle.HIGHEST_PROTOCOL) # if the same library appears multiple times under the same subdir parent, remove # it from the libpath. # Better to fail due to missing lib than to use one with the wrong bitsize... # TODO: add some smarts to figure out desired bitsize and keep the correct lib # in the libpath bases = {} for fname in libfiles: bases.setdefault(os.path.basename(fname), []).append(fname) if len(bases) != len(libfiles): for base, paths in bases.items(): if len(paths) > 1: for p in paths: d = os.path.dirname(p) while d: if d in cache: libfiles.remove(p) break d = os.path.dirname(d) added = [os.path.dirname(n) for n in libfiles] final = [] seen = set() for p in added + libpaths: if p not in seen: seen.add(p) final.append(p) print os.pathsep.join(final)
def update_libpath(options=None): """Find all of the shared libraries in the current virtual environment and modify the activate script to put their directories in LD_LIBRARY_PATH (or its equivalent) """ ldict = {"linux2": "LD_LIBRARY_PATH", "linux": "LD_LIBRARY_PATH", "darwin": "DYLD_LIBRARY_PATH"} libpathvname = ldict[sys.platform] if options is None: parser = ArgumentParser( description="adds any shared library paths found in the current python environment to %s" % libpathvname ) parser.usage = "update_libpath [options]" options = parser.parse_args() if libpathvname: topdir = os.path.dirname(os.path.dirname(sys.executable)) bindir = os.path.join(topdir, "bin") pkgdir = os.path.join(topdir, "lib", "python%s.%s" % sys.version_info[:2], "site-packages") sofiles = [os.path.abspath(x) for x in find_files(pkgdir, "*.so")] final = set() for f in sofiles: pyf = os.path.splitext(f)[0] + ".py" if not os.path.exists(pyf): final.add(os.path.dirname(f)) subdict = {"libpath": libpathvname, "add_on": os.pathsep.join(final)} if len(final) > 0: activate_lines = [ "# BEGIN MODIFICATION\n", 'if [ -z "$%(libpath)s" ] ; then\n', ' %(libpath)s=""\n', "fi\n", "\n", "%(libpath)s=$%(libpath)s:%(add_on)s\n", "export %(libpath)s\n", "# END MODIFICATION\n", "\n", ] absbin = os.path.abspath(bindir) activate_fname = os.path.join(absbin, "activate") with open(activate_fname, "r") as f: lines = f.readlines() try: idx = lines.index(activate_lines[0]) del lines[idx : idx + len(activate_lines)] except ValueError: pass idx = lines.index("export PATH\n") lines[idx + 2 : idx + 2] = activate_lines content = "".join(lines) with open(activate_fname, "w") as f: f.write(content % subdict) print "\nThe 'activate' file has been updated with new values added to %s" % libpathvname print "You must deactivate and reactivate your virtual environment for the" print "changes to take effect\n"
def _get_setup_options(distdir, metadata): """ Return dictionary of setup options. """ # a set of names of variables that are supposed to be lists lists = set([ 'keywords', 'install_requires', 'packages', 'classifiers', ]) # mapping of new metadata names to old ones mapping = { 'name': 'name', 'version': 'version', 'keywords': 'keywords', 'summary': 'description', 'description': 'long_description', 'home-page': 'url', 'download-url': 'download_url', 'author': 'author', 'author-email': 'author_email', 'maintainer': 'maintainer', 'maintainer-email': 'maintainer_email', 'license': 'license', 'classifier': 'classifiers', 'requires-dist': 'install_requires', 'entry_points': 'entry_points', #'py_modules': 'py_modules', 'packages': 'packages', } # populate the package data with sphinx docs # we have to list all of the files because setuptools doesn't # handle nested directories very well pkgdir = os.path.join(distdir, 'src', metadata['name']) plen = len(pkgdir)+1 sphinxdir = os.path.join(pkgdir, 'sphinx_build', 'html') testdir = os.path.join(pkgdir, 'test') pkglist = list(find_files(sphinxdir)) pkglist.extend(list(find_files(testdir, exclude="*.py[co]"))) pkglist = [p[plen:] for p in pkglist] setup_options = { #'packages': [metadata['name']], 'package_data': { metadata['name']: pkglist #[ #'sphinx_build/html/*.*', #'sphinx_build/html/_modules/*', #'sphinx_build/html/_sources/*', #'sphinx_build/html/_static/*', #] }, 'package_dir': {'': 'src'}, 'zip_safe': False, 'include_package_data': True, } for key,val in metadata.items(): if key in mapping: if isinstance(val, basestring): if mapping[key] in lists: val = [p.strip() for p in val.split('\n') if p.strip()] else: val = val.strip() setup_options[mapping[key]] = val return setup_options
def _get_setup_options(distdir, metadata): """ Return dictionary of setup options. """ # a set of names of variables that are supposed to be lists lists = set([ 'keywords', 'install_requires', 'packages', 'classifiers', ]) # mapping of new metadata names to old ones mapping = { 'name': 'name', 'version': 'version', 'keywords': 'keywords', 'summary': 'description', 'description': 'long_description', 'home-page': 'url', 'download-url': 'download_url', 'author': 'author', 'author-email': 'author_email', 'maintainer': 'maintainer', 'maintainer-email': 'maintainer_email', 'license': 'license', 'classifier': 'classifiers', 'requires-dist': 'install_requires', 'entry_points': 'entry_points', #'py_modules': 'py_modules', 'packages': 'packages', } # populate the package data with sphinx docs # we have to list all of the files because setuptools doesn't # handle nested directories very well pkgdir = os.path.join(distdir, 'src', metadata['name']) plen = len(pkgdir) + 1 sphinxdir = os.path.join(pkgdir, 'sphinx_build', 'html') testdir = os.path.join(pkgdir, 'test') pkglist = list(find_files(sphinxdir)) pkglist.extend(list(find_files(testdir, exclude="*.py[co]"))) pkglist = [p[plen:] for p in pkglist] setup_options = { #'packages': [metadata['name']], 'package_data': { metadata['name']: pkglist #[ #'sphinx_build/html/*.*', #'sphinx_build/html/_modules/*', #'sphinx_build/html/_sources/*', #'sphinx_build/html/_static/*', #] }, 'package_dir': { '': 'src' }, 'zip_safe': False, 'include_package_data': True, } for key, val in metadata.items(): if key in mapping: if isinstance(val, basestring): if mapping[key] in lists: val = [p.strip() for p in val.split('\n') if p.strip()] else: val = val.strip() setup_options[mapping[key]] = val return setup_options
def get_full_libpath(): """Find all of the shared libraries in the current virtual environment and print the required LD_LIBRARY_PATH string (or equivalent) necessary to find them. """ libpathvname = _lpdict.get(sys.platform) if libpathvname: lpcontents = os.environ.get(libpathvname) or '' libpaths = [lib for lib in lpcontents.split(os.pathsep) if lib.strip()] topdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.executable))) cachefile = os.path.join(topdir, '_libpath_cache') libfiles = [] if os.path.isfile(cachefile): with open(cachefile, "rb") as f: cache = pickle.load(f) else: cache = {} if sys.platform.startswith('win'): pkgdir = os.path.join(topdir, 'Lib', 'site-packages') checker = '*.dll' else: pkgdir = os.path.join(topdir, 'lib', 'python%s.%s' % sys.version_info[:2], 'site-packages') if sys.platform == 'darwin': checker = lambda n: n.endswith('.so') or n.endswith('.dylib') else: checker = "*.so" for d in os.listdir(pkgdir): d = os.path.join(pkgdir, d) if os.path.isdir(d): if d in cache: libfiles.extend(cache[d]) else: # find any shared libs that don't have a matching .py bootstrapper newlibs = [x for x in find_files(d, checker) if not os.path.isfile(os.path.splitext(x)[0]+'.py')] cache[d] = newlibs libfiles.extend(newlibs) with open(cachefile, "wb") as f: pickle.dump(cache, f, pickle.HIGHEST_PROTOCOL) # if the same library appears multiple times under the same subdir parent, remove # it from the libpath. # Better to fail due to missing lib than to use one with the wrong bitsize... # TODO: add some smarts to figure out desired bitsize and keep the correct lib # in the libpath bases = {} for fname in libfiles: bases.setdefault(os.path.basename(fname), []).append(fname) if len(bases) != len(libfiles): for base, paths in bases.items(): if len(paths) > 1: for p in paths: d = os.path.dirname(p) while d: if d in cache: libfiles.remove(p) break d = os.path.dirname(d) added = [os.path.dirname(n) for n in libfiles] final = [] seen = set() for p in added + libpaths: if p not in seen: seen.add(p) final.append(p) print os.pathsep.join(final)
def update_libpath(options=None): """Find all of the shared libraries in the current virtual environment and modify the activate script to put their directories in LD_LIBRARY_PATH (or its equivalent). """ ldict = { 'linux2': 'LD_LIBRARY_PATH', 'linux': 'LD_LIBRARY_PATH', 'darwin': 'DYLD_LIBRARY_PATH', } libpathvname = ldict[sys.platform] if options is None: parser = ArgumentParser(description="adds any shared library paths" " found in the current python environment to" " %s" % libpathvname) parser.usage = "update_libpath [options]" options = parser.parse_args() if libpathvname: topdir = os.path.dirname(os.path.dirname(sys.executable)) bindir = os.path.join(topdir, 'bin') pkgdir = os.path.join(topdir, 'lib', 'python%s.%s' % sys.version_info[:2], 'site-packages') sofiles = [os.path.abspath(x) for x in find_files(pkgdir, '*.so')] final = set() for fname in sofiles: pyf = os.path.splitext(fname)[0] + '.py' if not os.path.exists(pyf): final.add(os.path.dirname(fname)) subdict = {'libpath': libpathvname, 'add_on': os.pathsep.join(final)} if len(final) > 0: activate_lines = [ '# BEGIN MODIFICATION\n', 'if [ -z "$%(libpath)s" ] ; then\n', ' %(libpath)s=""\n', 'fi\n', '\n', '%(libpath)s=$%(libpath)s:%(add_on)s\n', 'export %(libpath)s\n', '# END MODIFICATION\n', '\n', ] absbin = os.path.abspath(bindir) activate_fname = os.path.join(absbin, 'activate') with open(activate_fname, 'r') as inp: lines = inp.readlines() try: idx = lines.index(activate_lines[0]) del lines[idx:idx + len(activate_lines)] except ValueError: pass idx = lines.index('export PATH\n') lines[idx + 2:idx + 2] = activate_lines content = ''.join(lines) with open(activate_fname, 'w') as out: out.write(content % subdict) print "\nThe 'activate' file has been updated with new values" \ " added to %s" % libpathvname print "You must deactivate and reactivate your virtual environment" print "for the changes to take effect\n"
def test_basic(self): #Testing in pythonxy fails due to the pip version if sys.platform == 'win32': pipvers = pip.__version__ if pipvers.__contains__("xy"): raise nose.SkipTest("PythonXY's pip name is non-standard 'pip-2.7xy' and causes test failure when 'pip-2.7' is searched for and not found.") logging.debug('') logging.debug('test_basic') # Just run through a complete cycle. orig_dir = os.getcwd() orig_stdout = sys.stdout orig_stderr = sys.stderr # Quickstart. logging.debug('') logging.debug('quickstart') os.chdir(self.tdir) try: argv = ['quickstart', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_quickstart(parser, options, args) self.assertEqual(retval, 0) fandd = find_files(self.tdir, showdirs=True) self.assertEqual(set([os.path.basename(f) for f in fandd]), set(['foobar', 'src', 'docs', 'setup.cfg', 'setup.py', 'MANIFEST.in', '__init__.py', 'conf.py', 'usage.rst', 'index.rst', 'srcdocs.rst', 'pkgdocs.rst', 'foobar.py', 'README.txt', '_static', 'test', 'test_foobar.py'])) finally: os.chdir(orig_dir) # Makedist. logging.debug('') logging.debug('makedist') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() logdata = '' os.chdir(os.path.join(self.tdir, 'foobar')) try: argv = ['makedist'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_makedist(parser, options, args, capture='makedist.out') with open('makedist.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, 0) if sys.platform == 'win32': self.assertTrue(os.path.exists('foobar-0.1.zip')) else: self.assertTrue(os.path.exists('foobar-0.1.tar.gz')) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) # Overwrite existing distribution. logging.debug('') logging.debug('makedist overwrite') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() os.chdir(os.path.join(self.tdir, 'foobar')) try: argv = ['makedist'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_makedist(parser, options, args, capture='makedist.out') with open('makedist.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, 0) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) # Install logging.debug('') logging.debug('install') sys.stdout = cStringIO.StringIO() sys.stderr = cStringIO.StringIO() logdata = '' os.chdir(self.tdir) try: argv = ['install', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) retval = plugin_install(parser, options, args, capture='install.out') with open('install.out', 'r') as inp: logdata = inp.read() self.assertEqual(retval, 0) finally: captured_stdout = sys.stdout.getvalue() captured_stderr = sys.stderr.getvalue() sys.stdout = orig_stdout sys.stderr = orig_stderr os.chdir(orig_dir) logging.debug('captured stdout:') logging.debug(captured_stdout) logging.debug('captured stderr:') logging.debug(captured_stderr) logging.debug('captured subprocess output:') logging.debug(logdata) try: # List in subprocess to grab updated package environment. logging.debug('') logging.debug('list') os.chdir(self.tdir) stdout = open('list.out', 'w') try: check_call(('plugin', 'list', '-g', 'driver', '-g', 'component', '--external'), stdout=stdout, stderr=STDOUT) finally: stdout.close() with open('list.out', 'r') as inp: captured_stdout = inp.read() os.remove('list.out') os.chdir(orig_dir) logging.debug('captured subprocess output:') logging.debug(captured_stdout) self.assertTrue('foobar.foobar.Foobar' in captured_stdout) # Docs. logging.debug('') logging.debug('docs') argv = ['docs', 'foobar'] parser = _get_plugin_parser() options, args = parser.parse_known_args(argv) url = find_docs_url(options.plugin_dist_name) # Strip off the file protocol header url = url.replace('file://', '') expected = os.path.join(self.tdir, 'foobar', 'src', 'foobar', 'sphinx_build', 'html', 'index.html') self.assertEqual(os.path.realpath(url), os.path.realpath(expected)) finally: # Uninstall logging.debug('') logging.debug('uninstall') pip_in = os.path.join(self.tdir, 'pip.in') pip_out = os.path.join(self.tdir, 'pip.out') with open(pip_in, 'w') as out: out.write('y\n') stdin = open(pip_in, 'r') stdout = open(pip_out, 'w') # On EC2 Windows, 'pip' generates an absurdly long temp directory # name, apparently to allow backing-out of the uninstall. # The name is so long Windows can't handle it. So we try to # avoid that by indirectly influencing mkdtemp(). env = os.environ.copy() env['TMP'] = os.path.expanduser('~') try: # the following few lines are to prevent the system level pip # from being used instead of the local virtualenv version. pipexe = 'pip' pipexe = find_in_path(pipexe) if pipexe is None: pipexe = 'pip' check_call((pipexe, 'uninstall', 'foobar'), env=env, stdin=stdin, stdout=stdout, stderr=STDOUT) finally: stdin.close() stdout.close() with open(pip_out, 'r') as inp: captured_stdout = inp.read() logging.debug('captured stdout:') logging.debug(captured_stdout) # Show removed. logging.debug('') logging.debug('list removed') os.chdir(self.tdir) stdout = open('list.out', 'w') try: check_call(('plugin', 'list', '--external'), stdout=stdout, stderr=STDOUT) finally: stdout.close() with open('list.out', 'r') as inp: captured_stdout = inp.read() os.remove('list.out') os.chdir(orig_dir) logging.debug('captured subprocess output:') logging.debug(captured_stdout) self.assertFalse('foobar.foobar.Foobar' in captured_stdout)