def get_source(self,fullname): """Get the source code for the given module.""" file,pathname,description = self._get_module_info(fullname) if file is not None: file.close() if description[2] == imp.PKG_DIRECTORY: for (suffix,_,typ) in imp.get_suffixes(): if typ != imp.PY_SOURCE: continue initfile = os.path.join(pathname,"__init__"+suffix) if os.path.exists(initfile): f = open(initfile,"rU") try: return f.read() finally: f.close() return self.get_source(fullname+".__init__") else: pathbase = pathname[:-1*len(description[0])] for (suffix,_,typ) in imp.get_suffixes(): if typ != imp.PY_SOURCE: continue sourcefile = pathbase+suffix if os.path.exists(sourcefile): f = open(sourcefile,"rU") try: return f.read() finally: f.close() return None
def find_module(self,qname,rootpath = _Void): # find_module_qname """ Find python module file information for given qname. This uses sys.path for root location and search order is according that. Gives priority for modules (files) in search (over packages/directories). - qname@str Module qualified name specification. - rootpath@list(@str) Optional module root path. + root@str|None Root where module is found. + path@str|None Path (filename) to the module from the root. + type@str|None Type of module, see imp.get_suffixes(). = safe = readonly ? None """ if rootpath == _Void: # set the rootpath to syspath as default rootpath = sys.path # no default argument as it gets fixed at compile parts = qname.split(".") for root in rootpath: path = os.path.join(*parts) ## test for module file for ext in imp.get_suffixes(): path_ext = path + ext[0] if os.path.exists(os.path.join(root,path_ext)): return root,path_ext,ext[2] ## test for package directory if (os.path.exists(os.path.join(root,path)) and os.path.isdir(os.path.join(root,path))): path2 = os.path.join(path,"__init__") for ext in imp.get_suffixes(): path2_ext = path2 + ext[0] if os.path.exists(os.path.join(root,path2_ext)): return root,path2_ext,ext[2] return None,None,None
def get_code(self,fullname): """Get the code object for the given module.""" file,pathname,description = self._get_module_info(fullname) if file is not None: file.close() if description[2] == imp.PKG_DIRECTORY: for (suffix,_,typ) in imp.get_suffixes(): if typ != imp.PY_COMPILED: continue initfile = os.path.join(pathname,"__init__"+suffix) if os.path.exists(initfile): f = open(initfile,"rb") try: f.seek(8) return marshal.load(f) finally: f.close() return self.get_code(fullname+".__init__") else: pathbase = pathname[:-1*len(description[0])] for (suffix,_,typ) in imp.get_suffixes(): if typ != imp.PY_COMPILED: continue codefile = pathbase+suffix if os.path.exists(codefile): f = open(codefile,"rb") try: f.seek(8) return marshal.load(f) finally: f.close() source = self.get_source(fullname) if source is not None: return compile(source,pathname,"exec") return None
def pythonLinkData(): # @TODO Fix to work with static builds of Python libdir = sysconfig.get_config_var('LIBDIR') if libdir is None: libdir = os.path.abspath(os.path.join( sysconfig.get_config_var('LIBDEST'), "..", "libs")) version = pythonVersion() version_no_dots = version.replace('.', '') flags = {} flags['libdir'] = libdir if sys.platform == 'win32': suffix = '_d' if any([tup[0].endswith('_d.pyd') for tup in imp.get_suffixes()]) else '' flags['lib'] = 'python{}{}'.format(version_no_dots, suffix) elif sys.platform == 'darwin': flags['lib'] = 'python{}'.format(version) # Linux and anything else else: if sys.version_info[0] < 3: suffix = '_d' if any([tup[0].endswith('_d.so') for tup in imp.get_suffixes()]) else '' flags['lib'] = 'python{}{}'.format(version, suffix) else: flags['lib'] = 'python{}{}'.format(version, sys.abiflags) return flags
def main(): print imp.get_suffixes() fmt = '%20s %10s %10s' print fmt % ('Extension', 'Mode', 'Type') print '-' * 42 for extension, mode, module_type in imp.get_suffixes(): print fmt % (extension, mode, module_types[module_type])
def find_module(name, path=None): """ A version of imp.find_module that works with zipped packages. """ if path is None: path = sys.path for entry in path: importer = pkg_resources.get_importer(entry) loader = importer.find_module(name) if loader is None: continue if isinstance(importer, pkg_resources.ImpWrapper): filename = loader.filename if filename.endswith('.pyc') or filename.endswith('.pyo'): fp = open(filename, 'rb') description = ('.pyc', 'rb', imp.PY_COMPILED) return (fp, filename, description) elif filename.endswith('.py'): fp = file(filename, READ_MODE) description = ('.py', READ_MODE, imp.PY_SOURCE) return (fp, filename, description) else: for _sfx, _mode, _type in imp.get_suffixes(): if _type == imp.C_EXTENSION and filename.endswith(_sfx): description = (_sfx, 'rb', imp.C_EXTENSION) break else: description = ('', '', imp.PKG_DIRECTORY) return (None, filename, description) elif hasattr(loader, 'get_code'): co = loader.get_code(name) fp = _code_to_file(co) pathname = os.path.join(entry, *name.split('.')) if isinstance(loader, zipimport.zipimporter): # Check if this happens to be a wrapper module introduced by setuptools, # if it is we return the actual extension. zn = '/'.join(name.split('.')) for _sfx, _mode, _type in imp.get_suffixes(): if _type == imp.C_EXTENSION: p = loader.prefix + zn + _sfx if p in loader._files: description = (_sfx, 'rb', imp.C_EXTENSION) return (None, pathname + _sfx, description) if hasattr(loader, 'is_package') and loader.is_package(name): return (None, pathname, ('', '', imp.PKG_DIRECTORY)) pathname = pathname + '.pyc' description = ('.pyc', 'rb', imp.PY_COMPILED) return (fp, pathname, description) raise ImportError(name)
def compatible_tags(): """ Return (pyver, abi, arch) tuples compatible with this Python. """ versions = [VER_SUFFIX] major = VER_SUFFIX[0] for minor in range(sys.version_info[1] - 1, - 1, -1): versions.append(''.join([major, str(minor)])) abis = [] for suffix, _, _ in imp.get_suffixes(): if suffix.startswith('.abi'): abis.append(suffix.split('.', 2)[1]) abis.sort() if ABI != 'none': abis.insert(0, ABI) abis.append('none') result = [] arches = [ARCH] if sys.platform == 'darwin': m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) if m: name, major, minor, arch = m.groups() minor = int(minor) matches = [arch] if arch in ('i386', 'ppc'): matches.append('fat') if arch in ('i386', 'ppc', 'x86_64'): matches.append('fat3') if arch in ('ppc64', 'x86_64'): matches.append('fat64') if arch in ('i386', 'x86_64'): matches.append('intel') if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): matches.append('universal') while minor >= 0: for match in matches: s = '%s_%s_%s_%s' % (name, major, minor, match) if s != ARCH: # already there arches.append(s) minor -= 1 # Most specific - our Python version, ABI and arch for abi in abis: for arch in arches: result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) # where no ABI / arch dependency, but IMP_PREFIX dependency for i, version in enumerate(versions): result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) if i == 0: result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) # no IMP_PREFIX, ABI or arch dependency for i, version in enumerate(versions): result.append((''.join(('py', version)), 'none', 'any')) if i == 0: result.append((''.join(('py', version[0])), 'none', 'any')) return set(result)
def _get_open_file_list(): fsencoding = sys.getfilesystemencoding() sproc = subprocess.Popen( ['lsof -F0 -n -p {0}'.format(os.getpid())], shell=True, stdout=subprocess.PIPE) output = sproc.communicate()[0].strip() files = [] for line in output.split(b'\n'): columns = line.split(b'\0') mapping = {} for column in columns: if len(column) >= 2: mapping[column[0:1]] = column[1:] if (mapping.get(b'f') and mapping.get(b'a', b' ') != b' ' and mapping.get(b't') == b'REG'): # Ignore extension modules -- they may be imported by a # test but are never again closed by the runtime. That's # ok. for suffix, mode, filetype in imp.get_suffixes(): if mapping[b'n'].decode(fsencoding).endswith(suffix): break else: files.append(mapping[b'n']) return set(files)
def __init__(self, srcName): self._srcName = srcName self.__magic = imp.get_magic() self.__sfx = {} for sfx, mode, typ in imp.get_suffixes(): self.__sfx[typ] = (sfx, len(sfx), mode) debug.logger & debug.flagBld and debug.logger('trying %s' % self)
def compatible_tags(): """ Return (pyver, abi, arch) tuples compatible with this Python. """ versions = [VER_SUFFIX] major = VER_SUFFIX[0] for minor in range(sys.version_info[1] - 1, - 1, -1): versions.append(''.join([major, str(minor)])) abis = [] for suffix, _, _ in imp.get_suffixes(): if suffix.startswith('.abi'): abis.append(suffix.split('.', 2)[1]) abis.sort() if ABI != 'none': abis.insert(0, ABI) abis.append('none') result = [] # Most specific - our Python version, ABI and arch for abi in abis: result.append((''.join((IMP_PREFIX, versions[0])), abi, ARCH)) # where no ABI / arch dependency, but IMP_PREFIX dependency for i, version in enumerate(versions): result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) if i == 0: result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) # no IMP_PREFIX, ABI or arch dependency for i, version in enumerate(versions): result.append((''.join(('py', version)), 'none', 'any')) if i == 0: result.append((''.join(('py', version[0])), 'none', 'any')) return result
def find_all_submodules(self, m): if not m.__path__: return modules = {} # 'suffixes' used to be a list hardcoded to [".py", ".pyc", ".pyo"]. # But we must also collect Python extension modules - although # we cannot separate normal dlls from Python extensions. suffixes = [] for triple in imp.get_suffixes(): suffixes.append(triple[0]) for dir in m.__path__: try: names = os.listdir(dir) except os.error: self.msg(2, "can't list directory", dir) continue for name in names: mod = None for suff in suffixes: n = len(suff) if name[-n:] == suff: mod = name[:-n] break if mod and mod != "__init__": modules[mod] = mod return modules.keys()
def _init_posix(): """Initialize the module as appropriate for POSIX systems.""" g = {} g['CC'] = "gcc -pthread" g['CXX'] = "g++ -pthread" g['OPT'] = "-DNDEBUG -O2" g['CFLAGS'] = "-DNDEBUG -O2" g['CCSHARED'] = "-fPIC" g['LDSHARED'] = "gcc -pthread -shared" g['SO'] = [s[0] for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION][0] g['AR'] = "ar" g['ARFLAGS'] = "rc" g['EXE'] = "" g['LIBDIR'] = os.path.join(sys.prefix, 'lib') g['VERSION'] = get_python_version() if sys.platform[:6] == "darwin": import platform if platform.machine() == 'i386': if platform.architecture()[0] == '32bit': arch = 'i386' else: arch = 'x86_64' else: # just a guess arch = platform.machine() g['LDSHARED'] += ' -undefined dynamic_lookup' g['CC'] += ' -arch %s' % (arch,) global _config_vars _config_vars = g
def getsourcefile(object): """Return the filename that can be used to locate an object's source. Return None if no way can be identified to get the source. """ filename = getfile(object) if filename == "<stdin>": return filename if string.lower(filename[-4:]) in ('.pyc', '.pyo'): filename = filename[:-4] + '.py' for suffix, mode, kind in imp.get_suffixes(): if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix: # Looks like a binary file. We want to only return a text file. return None if filename not in pathExistsOnDiskCache_: pathExistsOnDiskCache_[filename] = os.path.exists(filename) if pathExistsOnDiskCache_[filename]: return filename # only return a non-existent filename if the module has a PEP 302 loader if hasattr(getmodule(object, filename), '__loader__'): return filename # or it is in the linecache if filename in linecache.cache: return filename
def get_libc_name(): if sys.platform == 'win32': # Parses sys.version and deduces the version of the compiler import distutils.msvccompiler version = distutils.msvccompiler.get_build_version() if version is None: # This logic works with official builds of Python. if sys.version_info < (2, 4): clibname = 'msvcrt' else: clibname = 'msvcr71' else: if version <= 6: clibname = 'msvcrt' else: clibname = 'msvcr%d' % (version * 10) # If python was built with in debug mode import imp if imp.get_suffixes()[0][0] == '_d.pyd': clibname += 'd' return clibname+'.dll' else: return ctypes.util.find_library('c')
def get_abi3_suffix(): """Return the file extension for an abi3-compliant Extension()""" for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION): if '.abi3' in suffix: # Unix return suffix elif suffix == '.pyd': # Windows return suffix
def find_library(libname): """ Search for 'libname.so'. Returns library loaded with ctypes.CDLL Raises OSError if library is not found """ base_path = os.path.dirname(__file__) lib, search_paths = None, [] so_extensions = [ ext for ext, _, typ in imp.get_suffixes() if typ == imp.C_EXTENSION ] for extension in so_extensions: search_paths += [ os.path.abspath(os.path.normpath( os.path.join( base_path, '..', libname + extension ))), os.path.abspath(os.path.normpath( os.path.join( base_path, '../..', libname + extension ))) ] for path in search_paths: if os.path.exists(path): lib = path break if not lib: raise OSError('Cant find %s.so. searched at:\n %s' % ( libname, '\n'.join(search_paths))) return ctypes.CDLL(lib)
def find_all_submodules(self, m): if not m.__path__: return else: modules = {} suffixes = [] for triple in imp.get_suffixes(): suffixes.append(triple[0]) for dir in m.__path__: try: names = os.listdir(dir) except os.error: self.msg(2, "can't list directory", dir) continue for name in names: mod = None for suff in suffixes: n = len(suff) if name[-n:] == suff: mod = name[:-n] break if mod and mod != '__init__': modules[mod] = mod return modules.keys()
def run(self): # Recursive deletion of build/ directory path = localpath("build") try: shutil.rmtree(path) except Exception: print("Error: Failed to remove directory %s" % path) else: print("Success: Cleaned up %s" % path) # Now, the extension and other files try: import imp except ImportError: if os.name == 'posix': paths = [localpath("zorro/numexprz/interpreter.so")] else: paths = [localpath("zorro/numexprz/interpreter.pyd")] else: paths = [] for suffix, _, _ in imp.get_suffixes(): if suffix == '.py': continue paths.append(localpath("zorro/numexprz", "interpreter" + suffix)) paths.append(localpath("zorro/numexprz/__config__.py")) paths.append(localpath("zorro/numexprz/__config__.pyc")) for path in paths: try: os.remove(path) except Exception: print("Error: Failed to clean up file %s" % path) else: print("Cleaning up %s" % path) clean.run(self)
def add_to_path(): """ Prepends the build directory to the path so that newly built pypyodbc libraries are used, allowing it to be tested without installing it. """ # Put the build directory into the Python path so we pick up the version we just built. # # To make this cross platform, we'll search the directories until we find the .pyd file. import imp library_exts = [t[0] for t in imp.get_suffixes() if t[-1] == imp.C_EXTENSION] library_names = ["pypyodbc%s" % ext for ext in library_exts] # Only go into directories that match our version number. dir_suffix = "-%s.%s" % (sys.version_info[0], sys.version_info[1]) build = join(dirname(dirname(abspath(__file__))), "build") for root, dirs, files in os.walk(build): for d in dirs[:]: if not d.endswith(dir_suffix): dirs.remove(d) for name in library_names: if name in files: sys.path.insert(0, root) return print >> sys.stderr, "Did not find the pypyodbc library in the build directory. Will use an installed version."
def srcname(dottedname): """get the source filename from importable name or module""" if hasattr(dottedname, '__file__'): module = dottedname else: names = dottedname.split('.') module = __import__(names.pop(0)) for name in names: module = getattr(module, name) filename = module.__file__ name, ext = os.path.splitext(filename) ext = ext.lower() _py_src_suffixes = [i[0] for i in imp.get_suffixes() if i[2] == imp.PY_SOURCE] if ext not in build_exe._py_suffixes: raise ValueError('not python script') if ext in _py_src_suffixes: return filename for i in _py_src_suffixes: if os.path.isfile(name + i): return name + i raise ValueError('not found')
def scan_source_files(self, srcdir, tpl_dir_name, extensions): """Scan the source files for the provided `srcdir`.""" exclude_dirs = ('.appledouble', '.svn', 'cvs', '_darcs', 'i18ntoolbox') source_files = [] templates = [] for root, dirs, files in os.walk(srcdir): if os.path.basename(root).lower() in exclude_dirs: continue for fname in files: name, ext = os.path.splitext(fname) del(name) srcfile = os.path.join(root, fname) _py_ext = [triple[0] for triple in imp.get_suffixes() if triple[2] == imp.PY_SOURCE][0] if ext == _py_ext: # Python Source Files source_files.append(srcfile) elif ext in extensions: # Templates templates.append(srcfile) else: # Everything Else, Do Nothing pass # Only include templates if path contains `tpl_dir_name` templates = [ fname for fname in templates if fname.find(tpl_dir_name) != -1 ] return source_files, templates
def _visit_pyfiles(list, dirname, names): """Helper for getFilesForName().""" # get extension for python source files if not globals().has_key('_py_ext'): global _py_ext # _py_ext = [triple[0] for triple in imp.get_suffixes() # if triple[2] == imp.PY_SOURCE][0] _py_ext = [triple[0] for triple in imp.get_suffixes() if triple[2] == imp.PY_SOURCE] # don't recurse into CVS directories if 'CVS' in names: names.remove('CVS') if '.svn' in names: names.remove('.svn') if '.git' in names: names.remove('.git') # add all *.py files to list list.extend( [os.path.join(dirname, file) for file in names if os.path.splitext(file)[1] in _py_ext] )
def import_local_file(modname, modfile=None): """Import a local file as a module. Opens a file in the current directory named `modname`.py, imports it as `modname`, and returns the module object. `modfile` is the file to import if it isn't in the current directory. """ try: from importlib.machinery import SourceFileLoader except ImportError: SourceFileLoader = None if modfile is None: modfile = modname + '.py' if SourceFileLoader: mod = SourceFileLoader(modname, modfile).load_module() else: for suff in imp.get_suffixes(): # pragma: part covered if suff[0] == '.py': break with open(modfile, 'r') as f: # pylint: disable=undefined-loop-variable mod = imp.load_module(modname, f, modfile, suff) return mod
def _get_init_directory(self): for suffix, _, _ in imp.get_suffixes(): ending = '__init__' + suffix if self.py__file__().endswith(ending): # Remove the ending, including the separator. return self.py__file__()[:-len(ending) - 1] return None
def Import(pkg, name, path, arch): """ Import helper for PETSc-based extension modules. """ import sys, os, imp # full dotted module name fullname = '%s.%s' % (pkg, name) # test if extension module was already imported module = sys.modules.get(fullname) fname = getattr(module, '__file__', '') shext = imp.get_suffixes()[0][0] if os.path.splitext(fname)[-1] == shext: # if 'arch' is None, do nothing; otherwise this # call may be invalid if extension module for # other 'arch' has been already imported. if arch is not None and arch != module.__arch__: raise ImportError("%s already imported" % module) return module # import extension module from 'path/arch' directory pathlist = [os.path.join(path, arch)] fo, fn, stuff = imp.find_module(name, pathlist) module = imp.load_module(fullname, fo, fn, stuff) module.__arch__ = arch # save arch value setattr(sys.modules[pkg], name, module) return module
def load(filename=None, pkgname=None, conf_dir=None): """Load module at *filename* with the proper module loader. The module loader is chosen based on *filename*'s extension. If *pkgname* is given, sets the default for *filename* based on the constant global `pyconf.default_conf_dir`: filename = default_conf_dir '/' pkgname '.conf.py' """ if filename is None and pkgname is not None: env_var = pkgname.replace("-", "_").upper() + "_CONF" if env_var in os.environ: filename = os.environ[env_var] else: conf_dir = conf_dir or default_conf_dir filename = os.path.join(conf_dir, pkgname + ".conf.py") for (suffix, mode, type_) in imp.get_suffixes(): if filename.endswith(suffix): fp = open(filename, mode) desc = (suffix, mode, type_) try: mod = imp.load_module("<config>", fp, filename, desc) return mod finally: fp.close() else: raise ConfigurationError( "no suitable loader for config module %r" % (filename, ))
def get_suffixes(self): """ Liste aller Endungen aufbereitet für glob() """ suffixes = ["*" + i[0] for i in imp.get_suffixes()] suffixes = "[%s]" % "|".join(suffixes) return suffixes
def iter_zipimport_modules(importer, prefix): # make the path components regex safe sep = os.sep.replace("\\", "\\\\") path = prefix.replace(os.sep, sep) # using "non-greedy" matching in case a suffix is not just an # extension (like module.so for dlopen imports) modname = "[a-zA-Z_][a-zA-Z0-9_]*?" pkginit = sep + "__init__" suffix = "|".join([desc[0] for desc in imp.get_suffixes()]) suffix = suffix.replace(".", "\\.") pattern = "^%s(%s)(%s)?(%s)$" % (path, modname, pkginit, suffix) submodule_match = re.compile(pattern).match yielded = {} dirlist = list(importer._files) dirlist.sort() for fn in dirlist: match = submodule_match(fn) if match is not None: modname, pkginit, suffix = match.groups() if pkginit: ispkg = True elif modname == "__init__": continue else: ispkg = False if modname not in yielded: yielded[modname] = True yield modname, ispkg return
def _verify(self,path,data,canonicalise=True): """Verify data for the given path. This performs verification against the local database and the main manager database. If a valid hash for the item is not found in either location, IntegrityCheckMissing error is raised. """ try: try: self.hashdb.verify(path,data) except IntegrityCheckMissing: self.manager.hashdb.verify(path,data) else: try: self.manager.hashdb.verify(path,data) except IntegrityCheckMissing: pass except IntegrityCheckMissing: if not canonicalise: raise for (suffix,_,typ) in imp.get_suffixes(): if path.endswith(suffix): modname = path[:-1*len(suffix)] modname = modname.replace("/",".").replace("\\",".") cmodname = self.manager.get_canonical_modname(modname) if cmodname != modname: cpath = cmodname.replace(".","/") + suffix self._verify(cpath,data,False) break else: raise
def __init__(self,skip_checks=True,build=True): """Build a finder object. Arguments: - skip_checks: Don't test whether modules are readable while building the cache. This improves performace, but can cause an unreadable file that looks like a Python module to shadow a readable module with the same name later in sys.path. -build: if set, build the cache now. This is used in the mpi4py_finder and pympi_finder extensions """ # Store some suffix and module description information t = imp.get_suffixes() self.skip_checks = skip_checks self._suffixes = [x[0] for x in t] # in order of precedence self._rsuffixes = self._suffixes[::-1] # and in reverse order self._suffix_tuples = dict((x[0],tuple(x)) for x in t) # We store the value of sys.path in _syspath so we can keep track # of changes. _cache is a dictionary mapping module names to tuples # containing the information needed to load the module (path and # module description). if build: self._syspath = list(sys.path) self._build_cache() else: # For some subclasses self._syspath = [] self._cache = {}
if is_py34: import importlib.util BYTECODE_MAGIC = importlib.util.MAGIC_NUMBER else: # This fallback should work with Python 2.7 and 3.3. import imp BYTECODE_MAGIC = imp.get_magic() # List of suffixes for Python C extension modules. try: # In Python 3.3+ There is a list from importlib.machinery import EXTENSION_SUFFIXES except ImportError: import imp EXTENSION_SUFFIXES = [f[0] for f in imp.get_suffixes() if f[2] == imp.C_EXTENSION] # In Python 3 'Tkinter' has been made lowercase - 'tkinter'. Keep Python 2 # compatibility. if is_py2: modname_tkinter = 'Tkinter' else: modname_tkinter = 'tkinter' # On Windows we require pypiwin32 or pywin32-ctypes # -> all pyinstaller modules should use win32api from PyInstaller.compat to # ensure that it can work on MSYS2 (which requires pywin32-ctypes) if is_win:
# Try to import the dynamic-loading _psyco and report errors # Sorry, the following does not work under Windows ##import platform ##if platform.processor() != 'i386': ## raise ImportError, "Only i386 is supported." try: import _psyco except ImportError, e: extramsg = '' import sys, imp try: file, filename, (suffix, mode, type) = imp.find_module('_psyco', __path__) except ImportError: ext = [ suffix for suffix, mode, type in imp.get_suffixes() if type == imp.C_EXTENSION ] if ext: extramsg = (" (cannot locate the compiled extension '_psyco%s' " "in the package path '%s')" % (ext[0], '; '.join(__path__))) else: extramsg = (" (check that the compiled extension '%s' is for " "the correct Python version; this is Python %s)" % (filename, sys.version.split()[0])) raise ImportError, str(e) + extramsg # Publish important data by importing them in the package from support import __version__, error, warning, _getrealframe, _getemulframe from support import version_info, __version__ as hexversion
def get_supported(versions=None, noarch=False): """Return a list of supported tags for each version specified in `versions`. :param versions: a list of string versions, of the form ["33", "32"], or None. The first version will be assumed to support our ABI. """ supported = [] # Versions must be given with respect to the preference if versions is None: versions = [] version_info = get_impl_version_info() major = version_info[:-1] # Support all previous minor Python versions. for minor in range(version_info[-1], -1, -1): versions.append(''.join(map(str, major + (minor,)))) impl = get_abbr_impl() abis = [] abi = get_abi_tag() if abi: abis[0:0] = [abi] abi3s = set() import imp for suffix in imp.get_suffixes(): if suffix[0].startswith('.abi'): abi3s.add(suffix[0].split('.', 2)[1]) abis.extend(sorted(list(abi3s))) abis.append('none') if not noarch: arch = get_platform() if sys.platform == 'darwin': # support macosx-10.6-intel on macosx-10.9-x86_64 match = _osx_arch_pat.match(arch) if match: name, major, minor, actual_arch = match.groups() tpl = '{0}_{1}_%i_%s'.format(name, major) arches = [] for m in reversed(range(int(minor) + 1)): for a in get_darwin_arches(int(major), m, actual_arch): arches.append(tpl % (m, a)) else: # arch pattern didn't match (?!) arches = [arch] elif is_manylinux1_compatible(): arches = [arch.replace('linux', 'manylinux1'), arch] else: arches = [arch] # Current version, current API (built specifically for our Python): for abi in abis: for arch in arches: supported.append(('%s%s' % (impl, versions[0]), abi, arch)) # Has binaries, does not use the Python API: for arch in arches: supported.append(('py%s' % (versions[0][0]), 'none', arch)) # No abi / arch, but requires our implementation: supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) # Tagged specifically as being cross-version compatible # (with just the major version specified) supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) # No abi / arch, generic Python for i, version in enumerate(versions): supported.append(('py%s' % (version,), 'none', 'any')) if i == 0: supported.append(('py%s' % (version[0]), 'none', 'any')) return supported
# the internal name (ie, no '.py'). # See carchive.py for a more general archive (contains anything) # that can be understood by a C program. _verbose = 0 _listdir = None _environ = None ### **NOTE** This module is used during bootstrap. ### Import *ONLY* builtin modules. import marshal import struct import imp import sys _c_suffixes = filter(lambda x: x[2] == imp.C_EXTENSION, imp.get_suffixes()) for nm in ('nt', 'posix'): if nm in sys.builtin_module_names: mod = __import__(nm) _listdir = mod.listdir _environ = mod.environ break versuffix = '%d%d' % sys.version_info[:2] # :todo: is this still used? if "-vi" in sys.argv[1:]: _verbose = 1 class ArchiveReadError(RuntimeError):
def _getsuffixes(self): return imp.get_suffixes()
def _follow_sys_path(self, sys_path): """ Find a module with a path (of the module, like usb.backend.libusb10). """ def follow_str(ns_path, string): debug.dbg('follow_module %s in %s', string, ns_path) path = None if ns_path: path = ns_path elif self.level > 0: # is a relative import path = self.get_relative_path() if path is not None: importing = find_module(string, [path]) else: debug.dbg('search_module %s in %s', string, self.file_path) # Override the sys.path. It works only good that way. # Injecting the path directly into `find_module` did not work. sys.path, temp = sys_path, sys.path try: importing = find_module(string) finally: sys.path = temp return importing current_namespace = (None, None, None) # now execute those paths rest = [] for i, s in enumerate(self.import_path): try: current_namespace = follow_str(current_namespace[1], unicode(s)) except ImportError: _continue = False if self.level >= 1 and len(self.import_path) == 1: # follow `from . import some_variable` rel_path = self.get_relative_path() with common.ignored(ImportError): current_namespace = follow_str(rel_path, '__init__') elif current_namespace[2]: # is a package path = self.str_import_path[:i] for n in self.namespace_packages(current_namespace[1], path): try: current_namespace = follow_str(n, unicode(s)) if current_namespace[1]: _continue = True break except ImportError: pass if not _continue: if current_namespace[1]: rest = self.str_import_path[i:] break else: raise ModuleNotFound(s) path = current_namespace[1] is_package_directory = current_namespace[2] f = None if is_package_directory or current_namespace[0]: # is a directory module if is_package_directory: for suffix, _, _ in imp.get_suffixes(): p = os.path.join(path, '__init__' + suffix) if os.path.exists(p): if suffix == '.py': with open(p, 'rb') as f: source = f.read() path = p else: # It's a binary! source = None break else: source = current_namespace[0].read() current_namespace[0].close() return _load_module(self._evaluator, path, source, sys_path=sys_path), rest else: return _load_module(self._evaluator, name=path, sys_path=sys_path), rest
import sys from distutils import log from distutils.ccompiler import new_compiler from distutils.command.build_ext import build_ext as _du_build_ext from distutils.errors import DistutilsError from distutils.file_util import copy_file from distutils.sysconfig import customize_compiler, get_config_var from setuptools.extension import Library from setuptools.extern import six if six.PY2: import imp EXTENSION_SUFFIXES = [ s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION] else: from importlib.machinery import EXTENSION_SUFFIXES try: # Attempt to use Cython for building extensions, if available from Cython.Distutils.build_ext import build_ext as _build_ext # Additionally, assert that the compiler module will load # also. Ref #1229. __import__('Cython.Compiler.Main') except ImportError: _build_ext = _du_build_ext # make sure _config_vars is initialized get_config_var("LDSHARED") from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa
import apache import util import sys import os from os.path import exists, isabs, normpath, split, isfile, join, dirname import imp import re import base64 import new import types from types import * imp_suffixes = " ".join([x[0][1:] for x in imp.get_suffixes()]) ####################### The published page cache ############################## from cache import ModuleCache, NOT_INITIALIZED class PageCache(ModuleCache): """ This is the cache for page objects. Handles the automatic reloading of pages. """ def key(self, req): """ Extracts the normalized filename from the request """ return req.filename def check(self, key, req, entry): config = req.get_config() autoreload = int(config.get("PythonAutoReload", 1))
class PyPackageSearcher(AbstractSearcher): """Figures out if given Python module (source or bytecode) exists in given Python package. Python package must be importable. """ suffixes = {} for sfx, mode, typ in imp.get_suffixes(): if typ not in suffixes: suffixes[typ] = [] suffixes[typ].append((sfx, mode)) def __init__(self, package): """Create an instance of *PyPackageSearcher* bound to specific Python package. Args: package (str): name of the Python package to look up Python modules at. """ self._package = package self.__loader = None def __str__(self): return '%s{"%s"}' % (self.__class__.__name__, self._package) @staticmethod def _parseDosTime(dosdate, dostime): t = ( ((dosdate >> 9) & 0x7f) + 1980, # year ((dosdate >> 5) & 0x0f), # month dosdate & 0x1f, # mday (dostime >> 11) & 0x1f, # hour (dostime >> 5) & 0x3f, # min (dostime & 0x1f) * 2, # sec -1, # wday -1, # yday -1) # dst return time.mktime(t) def fileExists(self, mibname, mtime, rebuild=False): if rebuild: debug.logger & debug.flagSearcher and debug.logger( 'pretend %s is very old' % mibname) return mibname = decode(mibname) try: p = __import__(self._package, globals(), locals(), ['__init__']) if hasattr(p, '__loader__') and hasattr(p.__loader__, '_files'): self.__loader = p.__loader__ self._package = self._package.replace('.', os.sep) debug.logger & debug.flagSearcher and debug.logger( '%s is an importable egg at %s' % (self._package, os.path.split(p.__file__)[0])) elif hasattr(p, '__file__'): debug.logger & debug.flagSearcher and debug.logger( '%s is not an egg, trying it as a package directory' % self._package) return PyFileSearcher(os.path.split(p.__file__)[0]).fileExists( mibname, mtime, rebuild=rebuild) else: raise error.PySmiFileNotFoundError( '%s is neither importable nor a file' % self._package, searcher=self) except ImportError: raise error.PySmiFileNotFoundError( '%s is not importable, trying as a path' % self._package, searcher=self) for fmt in imp.PY_COMPILED, imp.PY_SOURCE: for pySfx, pyMode in self.suffixes[fmt]: f = os.path.join(self._package, mibname.upper()) + pySfx if f not in self.__loader._files: debug.logger & debug.flagSearcher and debug.logger( '%s is not in %s' % (f, self._package)) continue if fmt == imp.PY_COMPILED: pyData = self.__loader.get_data(f) if pyData[:4] == imp.get_magic(): pyData = pyData[4:] pyTime = struct.unpack('<L', pyData[:4])[0] debug.logger & debug.flagSearcher and debug.logger( 'found %s, mtime %s' % (f, time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(pyTime)))) if pyTime >= mtime: raise error.PySmiFileNotModifiedError() else: raise error.PySmiFileNotFoundError( 'older file %s exists' % mibname, searcher=self) else: debug.logger & debug.flagSearcher and debug.logger( 'bad magic in %s' % f) continue else: pyTime = self._parseDosTime(self.__loader._files[f][6], self.__loader._files[f][5]) debug.logger & debug.flagSearcher and debug.logger( 'found %s, mtime %s' % (f, time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(pyTime)))) if pyTime >= mtime: raise error.PySmiFileNotModifiedError() else: raise error.PySmiFileNotFoundError( 'older file %s exists' % mibname, searcher=self) raise error.PySmiFileNotFoundError('no file %s found' % mibname, searcher=self)
def get_suffix(file): for suffix in imp.get_suffixes(): if file[-len(suffix[0]):] == suffix[0]: return suffix return None
def _get_compiled_ext(): for ext, mode, typ in imp.get_suffixes(): if typ == imp.PY_COMPILED: return ext
def get_supported(versions=None, noarch=False, platform=None, impl=None, abi=None): """Return a list of supported tags for each version specified in `versions`. :param versions: a list of string versions, of the form ["33", "32"], or None. The first version will be assumed to support our ABI. :param platform: specify the exact platform you want valid tags for, or None. If None, use the local system platform. :param impl: specify the exact implementation you want valid tags for, or None. If None, use the local interpreter impl. :param abi: specify the exact abi you want valid tags for, or None. If None, use the local interpreter abi. """ supported = [] # Versions must be given with respect to the preference if versions is None: versions = [] version_info = get_impl_version_info() major = version_info[:-1] # Support all previous minor Python versions. for minor in range(version_info[-1], -1, -1): versions.append("".join(map(str, major + (minor, )))) impl = impl or get_abbr_impl() abis = [] abi = abi or get_abi_tag() if abi: abis[0:0] = [abi] abi3s = set() import imp for suffix in imp.get_suffixes(): if suffix[0].startswith(".abi"): abi3s.add(suffix[0].split(".", 2)[1]) abis.extend(sorted(list(abi3s))) abis.append("none") if not noarch: arch = platform or get_platform() if arch.startswith("macosx"): # support macosx-10.6-intel on macosx-10.9-x86_64 match = _osx_arch_pat.match(arch) if match: name, major, minor, actual_arch = match.groups() tpl = "{}_{}_%i_%s".format(name, major) arches = [] for m in reversed(range(int(minor) + 1)): for a in get_darwin_arches(int(major), m, actual_arch): arches.append(tpl % (m, a)) else: # arch pattern didn't match (?!) arches = [arch] elif platform is None and is_manylinux1_compatible(): arches = [arch.replace("linux", "manylinux1"), arch] else: arches = [arch] # Current version, current API (built specifically for our Python): for abi in abis: for arch in arches: supported.append(("%s%s" % (impl, versions[0]), abi, arch)) # abi3 modules compatible with older version of Python for version in versions[1:]: # abi3 was introduced in Python 3.2 if version in {"31", "30"}: break for abi in abi3s: # empty set if not Python 3 for arch in arches: supported.append(("%s%s" % (impl, version), abi, arch)) # Has binaries, does not use the Python API: for arch in arches: supported.append(("py%s" % (versions[0][0]), "none", arch)) # No abi / arch, but requires our implementation: supported.append(("%s%s" % (impl, versions[0]), "none", "any")) # Tagged specifically as being cross-version compatible # (with just the major version specified) supported.append(("%s%s" % (impl, versions[0][0]), "none", "any")) # No abi / arch, generic Python for i, version in enumerate(versions): supported.append(("py%s" % (version, ), "none", "any")) if i == 0: supported.append(("py%s" % (version[0]), "none", "any")) return supported
def get_supported(versions=None, noarch=False): """Return a list of supported tags for each version specified in `versions`. :param versions: a list of string versions, of the form ["33", "32"], or None. The first version will be assumed to support our ABI. """ supported = [] # Versions must be given with respect to the preference if versions is None: versions = [] major = sys.version_info[0] # Support all previous minor Python versions. for minor in range(sys.version_info[1], -1, -1): versions.append(''.join(map(str, (major, minor)))) impl = get_abbr_impl() abis = [] try: soabi = sysconfig.get_config_var('SOABI') except IOError as e: # Issue #1074 warnings.warn("{0}".format(e), RuntimeWarning) soabi = None if soabi and soabi.startswith('cpython-'): abis[0:0] = ['cp' + soabi.split('-', 1)[-1]] abi3s = set() import imp for suffix in imp.get_suffixes(): if suffix[0].startswith('.abi'): abi3s.add(suffix[0].split('.', 2)[1]) abis.extend(sorted(list(abi3s))) abis.append('none') if not noarch: arch = get_platform() if sys.platform == 'darwin': # support macosx-10.6-intel on macosx-10.9-x86_64 match = _osx_arch_pat.match(arch) if match: name, major, minor, actual_arch = match.groups() actual_arches = [actual_arch] if actual_arch in ('i386', 'ppc'): actual_arches.append('fat') if actual_arch in ('i386', 'x86_64'): actual_arches.append('intel') if actual_arch in ('i386', 'ppc', 'x86_64'): actual_arches.append('fat3') if actual_arch in ('ppc64', 'x86_64'): actual_arches.append('fat64') if actual_arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): actual_arches.append('universal') tpl = '{0}_{1}_%i_%s'.format(name, major) arches = [] for m in range(int(minor) + 1): for a in actual_arches: arches.append(tpl % (m, a)) else: # arch pattern didn't match (?!) arches = [arch] else: arches = [arch] # Current version, current API (built specifically for our Python): for abi in abis: for arch in arches: supported.append(('%s%s' % (impl, versions[0]), abi, arch)) # No abi / arch, but requires our implementation: for i, version in enumerate(versions): supported.append(('%s%s' % (impl, version), 'none', 'any')) if i == 0: # Tagged specifically as being cross-version compatible # (with just the major version specified) supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) # No abi / arch, generic Python for i, version in enumerate(versions): supported.append(('py%s' % (version,), 'none', 'any')) if i == 0: supported.append(('py%s' % (version[0]), 'none', 'any')) return supported
# stick the CompiledCode object here to keep it alive module.__nativecompile_compiled_code__ = ccode pyinternals.cep_exec(ccode.entry_points[0], module.__dict__) return module return inner SourceLoader = xloader(importlib._bootstrap._SourceFileLoader) SourcelessLoader = xloader(importlib._bootstrap._SourcelessFileLoader) ExtensionLoader = importlib._bootstrap._ExtensionFileLoader PYTHON_FILE_SUFFIXES = ( [(suffix,SourceLoader) for suffix,mode,type in imp.get_suffixes() if type == imp.PY_SOURCE] + [(suffix,SourcelessLoader) for suffix,mode,type in imp.get_suffixes() if type == imp.PY_COMPILED]) FILE_SUFFIXES = ( PYTHON_FILE_SUFFIXES + [(suffix,ExtensionLoader) for suffix,mode,type in imp.get_suffixes() if type == imp.C_EXTENSION]) def path_hook(path): if os.path.isdir(path): return Finder(path) else: raise ImportError("only directories are supported")
def _extension_suffixes(): return [ suffix for suffix, _, type in imp.get_suffixes() if type == imp.C_EXTENSION ]
raise NotLoadedError(repr(self)) for name in dict(sys.modules): if name.startswith(self.package+".") or name == self.package: del sys.modules[name] self.plugins = [] self.loaded = False ''' def __repr__(self): return "<plugin.Tracker(%r)>" % self.modulename def __str__(self): return "Plugin Tracker %r: %s" % (self.description, self.modulename) suffixes = set([info[0] for info in imp.get_suffixes()]) def getmodulename(parent, filename): """ Get the name of a module based on it's filename """ for suffix in suffixes: if filename.endswith(suffix): return filename[:-len(suffix)] # remove ending else: fullpath = os.path.join(parent, filename) if os.path.isdir(fullpath): for suffix in suffixes: if os.path.exists(os.path.join(fullpath, "__init__"+suffix)): return filename def listpackage(path):
class PyFileBorrower(AbstractBorrower): """Create PySNMP MIB file borrowing object""" for sfx, mode, typ in imp.get_suffixes(): if typ == imp.PY_SOURCE: exts = [sfx] break
def _get_c_extension_suffix(): for ext, mod, typ in imp.get_suffixes(): if typ == imp.C_EXTENSION: return ext
def _is_debug_build(): import imp for ext, _, _ in imp.get_suffixes(): if ext == "_d.pyd": return True return False
# Globals and constants #----------------------------------------------------------------------------- # Path to the modules database MODULES_PATH = get_conf_path('db') # Time in seconds after which we give up if os.name == 'nt': TIMEOUT_GIVEUP = 30 else: TIMEOUT_GIVEUP = 20 # Py2app only uses .pyc files for the stdlib when optimize=0, # so we need to add it as another suffix here if running_in_mac_app(): suffixes = imp.get_suffixes() + [('.pyc', 'rb', '2')] else: suffixes = imp.get_suffixes() # Regular expression for the python import statement import_re = re.compile(r'(?P<name>[a-zA-Z_][a-zA-Z0-9_]*?)' r'(?P<package>[/\\]__init__)?' r'(?P<suffix>%s)$' % r'|'.join(re.escape(s[0]) for s in suffixes)) # Modules database modules_db = PickleShareDB(MODULES_PATH) #----------------------------------------------------------------------------- # Utility functions #-----------------------------------------------------------------------------
import imp, sys, os, re, time identifier = "python-%s-%s" % (sys.version[:3], sys.platform) timestamp = time.strftime("%Y%m%dT%H%M%SZ", time.gmtime(time.time())) # known test packages TEST_PACKAGES = "test.", "bsddb.test.", "distutils.tests." try: import platform platform = platform.platform() except: platform = None # unknown suffixes = imp.get_suffixes() def get_suffix(file): for suffix in suffixes: if file[-len(suffix[0]):] == suffix[0]: return suffix return None def main(): path = getpath() modules = {} for m in sys.builtin_module_names:
package = package[:-1] m = mf.import_hook(package, None, ["*"]) else: # Exclude subtrees that aren't packages dirnames[:] = [] return mf # # resource constants # PY_SUFFIXES = ['.py', '.pyw', '.pyo', '.pyc'] C_SUFFIXES = [ _triple[0] for _triple in imp.get_suffixes() if _triple[2] == imp.C_EXTENSION ] # # side-effects # def _replacePackages(): REPLACEPACKAGES = { '_xmlplus': 'xml', } for k, v in REPLACEPACKAGES.items(): modulegraph.replacePackage(k, v)
# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. from __future__ import absolute_import import distutils.util try: from importlib.machinery import EXTENSION_SUFFIXES except ImportError: # pragma: no cover import imp EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] del imp import logging import os import platform import re import struct import sys import sysconfig import warnings from ._typing import TYPE_CHECKING, cast if TYPE_CHECKING: # pragma: no cover from typing import ( Dict, FrozenSet, IO,
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. from PyQt4 import QtCore import imp import os.path import shutil import picard.plugins import traceback _suffixes = [s[0] for s in imp.get_suffixes()] _package_entries = ["__init__.py", "__init__.pyc", "__init__.pyo"] _extension_points = [] def _plugin_name_from_path(path): path = os.path.normpath(path) file = os.path.basename(path) if os.path.isdir(path): for entry in _package_entries: if os.path.isfile(os.path.join(path, entry)): return file else: if file in _package_entries: return None name, ext = os.path.splitext(file)
def _findModuleInPath2(module_name, search_path): """ This is out own module finding low level implementation. Just the full module name and search path are given. This is then tasked to raise "ImportError" or return a path if it finds it, or None, if it is a built-in. """ # We have many branches here, because there are a lot of cases to try. # pylint: disable=R0912 # We may have to decide between package and module, therefore build # a list of candidates. candidates = oset.OrderedSet() considered = set() for entry in search_path: # Don't try again, just with an entry of different casing or complete # duplicate. if Utils.normcase(entry) in considered: continue considered.add(Utils.normcase(entry)) package_directory = os.path.join(entry, module_name) # First, check for a package with an init file, that would be the # first choice. if Utils.isDir(package_directory): for suffix in (".py", ".pyc"): package_file_name = "__init__" + suffix file_path = os.path.join(package_directory, package_file_name) if Utils.isFile(file_path): candidates.add((entry, 1, package_directory)) break else: if python_version >= 330: candidates.add((entry, 2, package_directory)) # Then, check out suffixes of all kinds. for suffix, _mode, _type in imp.get_suffixes(): file_path = Utils.joinpath(entry, module_name + suffix) if Utils.isFile(file_path): candidates.add((entry, 1, file_path)) break if _debug_module_finding: print("Candidates", candidates) if candidates: # Ignore lower priority matches from package directories without # "__init__.py" file. min_prio = min(candidate[1] for candidate in candidates) candidates = [ candidate for candidate in candidates if candidate[1] == min_prio ] # On case sensitive systems, no resolution needed. if case_sensitive: return candidates[0][2] else: for candidate in candidates: dir_listing = os.listdir(candidate[0]) for filename in dir_listing: if Utils.joinpath(candidate[0], filename) == candidate[2]: return candidate[2] # Only exact case matches matter, all candidates were ignored, # lets just fall through to raising the import error. # Nothing found. raise ImportError
def getExtensionModuleSuffixes(): for suffix, _mode, kind in imp.get_suffixes(): if kind == imp.C_EXTENSION: yield suffix
def get_extension_suffixes(): return [suffix[0] for suffix in get_suffixes()]
def all_suffixes(): # Is deprecated and raises a warning in Python 3.6. import imp return [suffix for suffix, _, _ in imp.get_suffixes()]
# POSSIBILITY OF SUCH DAMAGE. # =================================================================== import abc import sys from Crypto.Util.py3compat import byte_string from Crypto.Util._file_system import pycryptodome_filename # # List of file suffixes for Python extensions # if sys.version_info[0] < 3: import imp extension_suffixes = [] for ext, mod, typ in imp.get_suffixes(): if typ == imp.C_EXTENSION: extension_suffixes.append(ext) else: from importlib import machinery extension_suffixes = machinery.EXTENSION_SUFFIXES # Which types with buffer interface we support (apart from byte strings) if sys.version_info[0] == 2 and sys.version_info[1] < 7: _buffer_type = (bytearray) else: _buffer_type = (bytearray, memoryview)
import imp import os import sys import unittest import tempfile import zipfile from distutils.command.build_ext import build_ext from distutils.core import Distribution from distutils.extension import Extension import pydzipimport # The filename extension for compiled extension modules. SO_EXT = [ suffix[0] for suffix in imp.get_suffixes() if suffix[2] == imp.C_EXTENSION ][0] def prepare_sample_zip(file): """Create a zipfile which contains the `sample` package. On completion, the contents of `file` will be:: sample/ sample/one.py sample/__init__.<SO_EXT> sample/two.<SO_EXT> The extension modules are compiled in a temporary directory. """