def GetImports(cls, directory, out_filters=[]): """ Lists imports made by python files found in the given directory. Directory will be scanned recursively :param unicode directory: Path to a directory :param list(unicode) out_filters: List of filename filters .. see:: FindFiles :rtype: list(unicode) :returns: List of module imported by python """ from zerotk.easyfs import FindFiles from modulefinder import ModuleFinder as Finder finder = Finder(bytes(directory)) for py_filename in FindFiles(directory, in_filters='*.py', out_filters=out_filters): finder.run_script(py_filename) return map(unicode, sorted(finder.modules.keys() + finder.badmodules.keys()))
def spider(self): print '-' * 40 while len(self.full_path) != 0: full_path = self.full_path.pop() self.full_path_discard.append(full_path) finder = ModuleFinder() finder.run_script(full_path) for name, mod in finder.modules.iteritems(): full_path = str(mod.__file__).replace('.', '/') if full_path not in self.full_path_discard: if full_path.startswith('/home/alfred/pipline/code/'): self.result.append(name.replace('.', '/')) if not full_path.startswith('/home/alfred/pipline/code/U'): self.full_path.append(full_path) else: self.full_path_discard.append(full_path) names = finder.any_missing() for name in names: for folder in self.all_path: try: full_path = folder + name + '.py' if os.path.isfile(full_path): if full_path not in self.full_path_discard: self.full_path.append(full_path) self.result.append(full_path) except: pass for item in sorted(set(self.result)): print item print '='*100
def run(self): p = self.cmdLineParams finder = ModuleFinder() finder.run_script(p.infile.name) modDict = {} for name, mod in finder.modules.items(): if mod.__file__ is None: key = str(mod.__file__) else: key = os.path.abspath(mod.__file__) if not key in modDict: modDict[key] = [] modDict[key].append(name) # sort for key in sorted(modDict): if p.restrictToProject and not key.startswith(__PROJECT_ROOT__): continue modules = modDict[key] sys.stdout.write(key) for moduleName in modules: sys.stdout.write('\t') sys.stdout.write(moduleName) sys.stdout.write('\n')
def find_failed_imports(module_path): """Returns all modules that failed to import when loading a particular module.""" finder = ModuleFinder() finder.run_script(module_path) bad_modules = dict(finder.badmodules) return return_missing_base_modules(bad_modules)
def find_and_register_qabbage_tasks(outer_scope, exclude_tests=True): """ Walks the directory and finds all of the scripts that import qabbage tasks. :return: file paths with qabbage tasks """ all_files = [] qabbage_files = [] for root, dirs, files in os.walk("."): path = root.split('/') for file in files: _, file_extension = os.path.splitext(file) if file_extension == '.py': all_files.append(os.path.join(*path, file)) finder = ModuleFinder() for file in filter(lambda x: 'test_' not in x, all_files): finder.run_script(file) for name, mod in finder.modules.items(): if name == 'qabbage': qabbage_files.append(file) promises = load_qabbage_modules(qabbage_files) for promise in promises: outer_scope[promise[0]] = promise[1] return promises
def _reload(obj, name=None): obj_type = type(obj) module = inspect.getmodule(obj) sys_modules_frozen = sys.modules.copy() sys_modules_sorted = sorted(sys_modules_frozen.items(), key=lambda t: len(t[0]), reverse=True) if not module: for modname, module in sys_modules_sorted: if modname.split('.')[0] in STDLIB_NAMES: continue candidate = getattr(module, name, Sentinel) if (candidate is not Sentinel and isinstance(candidate, obj_type)): break else: module = None if not module: raise TypeError() finder = ModuleFinder() finder.run_script(module.__file__) for m in finder.modules: if m.split('.')[0] in STDLIB_NAMES: continue try: reload(sys_modules_frozen[m]) except KeyError: pass return reload(module)
def _modules_from_buffer(self, buffer): finder = ModuleFinder(excludes=self.excludes) bufdir = os.path.dirname(buffer.name) sys.path.insert(0, bufdir) try: finder.load_module('__main__', buffer, '__main__.py', ('', 'r', 1)) finally: sys.path.remove(bufdir) self.badmodules.update(finder.badmodules.keys()) stdlib = sysconfig.get_python_lib(standard_lib=True) stdlib_local = sysconfig.get_python_lib(standard_lib=True, prefix='/usr/local') dist_packages = sysconfig.get_python_lib() stdlib_user_local = sysconfig.get_python_lib( standard_lib=True, prefix=f'{os.path.expanduser("~")}/.local') for module in finder.modules.values(): if module.__file__ is None: continue elif module.__file__.startswith(stdlib): continue elif module.__file__.startswith(stdlib_local): continue elif module.__file__.startswith(dist_packages): continue elif module.__file__.startswith(stdlib_user_local): continue elif module.__file__ == buffer.name: continue elif module.__file__ == '__main__.py': continue elif module.__file__ in self.modules: continue self.modules.add(module.__file__) yield module
def get_files_with_modules(directory_path, search_recursively=False): files_with_modules = [] if search_recursively: original_file_paths = gfp.get_paths(directory_path) unique_file_paths = [] for file_path_index, file_path in enumerate(original_file_paths): files_with_modules.append([ file_path, os.path.dirname(file_path).rsplit(os.path.sep, 1)[1] ]) finder = ModuleFinder() for file_path in original_file_paths: try: finder.run_script(file_path) except ImportError: print("Could not load", file_path) for key in finder.modules.items(): item_path = key[1].__file__ if key[ 1].__file__ else key[1].__name__ + " (built-in)" if '.' not in item_path or item_path.endswith('.py'): item_data = [item_path, key[1].__name__] if item_data[0] not in unique_file_paths and item_data[ 0] not in original_file_paths: unique_file_paths.append(item_data[0]) files_with_modules.append(item_data) files_with_modules.sort(key=lambda item: item[0]) else: files_with_modules = gfp.get_paths(directory_path) for file_path_index, file_path in enumerate(files_with_modules): files_with_modules[file_path_index] = [ file_path, os.path.dirname(file_path).rsplit(os.path.sep, 1)[1] ] files_with_modules.sort(key=lambda item: item[0]) return files_with_modules
def _get_dependencies(entry_point): # Dynamically find all imported modules from modulefinder import ModuleFinder finder = ModuleFinder() finder.run_script( os.path.join(os.path.dirname(os.path.realpath(__file__)), entry_point)) imports = list( set([ v.__file__.split('site-packages/', 1)[-1].split('/')[0] for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__ ])) # Get just the modules, ignore the so and py now (maybe useful for calls to add_file) modules = [i.split('.py')[0] for i in imports if ".so" not in i] so_files = list( set([ v.__file__ for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__ and ".so" in v.__file__ ])) return set(modules), so_files
def GetImports(cls, directory, out_filters=[]): ''' Lists imports made by python files found in the given directory. Directory will be scanned recursively :param str directory: Path to a directory :param list(str) out_filters: List of filename filters .. see:: FindFiles :rtype: list(str) :returns: List of module imported by python ''' from ben10.filesystem import FindFiles from modulefinder import ModuleFinder as Finder finder = Finder(directory) for py_filename in FindFiles(directory, in_filters='*.py', out_filters=out_filters): finder.run_script(py_filename) return sorted(finder.modules.keys() + finder.badmodules.keys())
def add_mocks(filename): gp = mock.patch('ansible.module_utils.basic.get_platform').start() gp.return_value = 'linux' module_mock = mock.MagicMock() mocks = [] for module_class in MODULE_CLASSES: mocks.append( mock.patch('ansible.module_utils.basic.AnsibleModule', new=module_mock) ) for m in mocks: p = m.start() p.side_effect = AnsibleModuleCallError() finder = ModuleFinder() try: finder.run_script(filename) except: pass sys_mock = mock.MagicMock() sys_mock.__version__ = '0.0.0' sys_mocks = [] for module, sources in finder.badmodules.items(): if module in sys.modules: continue if [s for s in sources if s[:7] in ['ansible', '__main_']]: parts = module.split('.') for i in range(len(parts)): dotted = '.'.join(parts[:i+1]) sys.modules[dotted] = sys_mock sys_mocks.append(dotted) return module_mock, mocks, sys_mocks
def get_imports(self, file): finder = ModuleFinder() finder.run_script(file) im = [] for name, mod in finder.modules.items(): im.append(name) return im
def print_type_dir(): finder = ModuleFinder() finder.run_script('from_import.py') print('dir ModuleFinder: {}'.format(dir(finder))) """ dir ModuleFinder: ['__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', '_add_badmodule', '_safe_import_hook', 'add_module', 'any_missing', 'any_missing_maybe', 'badmodules', 'debug', 'determine_parent', 'ensure_fromlist', 'excludes', 'find_all_submodules', 'find_head_package', 'find_module', 'import_hook', 'import_module', 'indent', 'load_file', 'load_module', 'load_package', 'load_tail', 'modules', 'msg', 'msgin', 'msgout', 'path', 'processed_paths', 'replace_paths', 'replace_paths_in_code', 'report', 'run_script', 'scan_code', 'scan_opcodes'] """ for name, mod in finder.modules.items(): print('type:{}'.format(type(mod))) #=> type:<class 'modulefinder.Module'> print('dir Module object:{}'.format(dir(mod))) """ dir:['__class__', '__code__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__file__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__name__', '__ne__', '__new__', '__path__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', 'globalnames', 'starimports'] """ return
def _get_source_of_module(module): # print("module", module) try: module_source = inspect.getsource(module) except OSError: return {} module_finder = ModuleFinder() module_finder.run_script(module.__file__) # ret = ast.parse(base_source) # print(dir(ret)) accum = { module.__name__: module_source } for child_module_name, child_module_tuple in module_finder.modules.items(): if child_module_name == "__main__": continue if child_module_name in accum: continue # found_module = imp.find_module(child_module_tuple.__name__) # child_module = imp.load_module(child_module_tuple.__name__, *found_module) child_module = importlib.import_module(child_module_tuple.__name__) print("processing module", child_module_tuple.__name__, child_module) accum.update(_get_source_of_module(child_module)) return accum
def __init__(self, path=None, debug=0, excludes=[], replace_paths=[], **kwargs): ModuleFinder.__init__(self, path, debug, excludes, replace_paths) self.loadedModules = [] self.moduleTypes = kwargs.pop("types", (imp.PY_SOURCE, imp.PY_COMPILED))
def get_files_data(general_path, file_paths): module_finder = ModuleFinder() files = [] for file_path in file_paths: spec = importlib.util.spec_from_file_location(file_path, file_path) module_from_spec = importlib.util.module_from_spec(spec) spec.loader.exec_module(module_from_spec) functions_list = [ function for function in getmembers(module_from_spec) if isfunction(function[1]) ] module = file.Module( file_path.rsplit(os.path.sep, 1)[1].rsplit('.')[0], file_path) module.size = os.path.getsize(file_path) module_finder.run_script(module.path) module.fill_imports_and_functions(general_path, module_finder) module.fill_dependecies() files.append(module) for element in files: element.fill_dependecies(files) return files
def make_bom(): """Generate a bill of materials and return the tt object.""" finder = ModuleFinder() finder.run_script(sys.argv[0]) tt = tytable.ttable() tt.add_option(tt.OPTION_LONGTABLE) tt.add_head(['name', 'ver', 'path', 'bytes', 'lines', 'sha-1']) tt.set_col_alignmnets("lllrrl") for inc in [False, True]: for name, mod in sorted(finder.modules.items()): if system_module(mod) != inc: continue # don't use this one stats = file_stats(mod.__file__) ver = mod.globalnames.get('__version__', '---') if ver == 1 and name in sys.modules: # It has a version number; get it. try: ver = sys.modules[name].__version__ except AttributeError: ver = '---' fname = mod.__file__ if type(name) != str: name = "" if type(fname) != str: fname = "" tt.add_data([ latex_tools.latex_escape(name), ver, latex_tools.latex_escape(fname), stats[0], stats[1], stats[2] ]) tt.add_data(tt.HR) return tt
def basic_module_find(files): '''Do a basic module find on all files''' mf = ModuleFinder() for fname in files: mf.run_script(fname) return mf
def add_mocks(filename): gp = mock.patch('ansible.module_utils.basic.get_platform').start() gp.return_value = 'linux' module_mock = mock.MagicMock() mocks = [] for module_class in MODULE_CLASSES: mocks.append( mock.patch('ansible.module_utils.basic.AnsibleModule', new=module_mock)) for m in mocks: p = m.start() p.side_effect = AnsibleModuleCallError() finder = ModuleFinder() try: finder.run_script(filename) except: pass sys_mock = mock.MagicMock() sys_mock.__version__ = '0.0.0' sys_mocks = [] for module, sources in finder.badmodules.items(): if module in sys.modules: continue if [s for s in sources if s[:7] in ['ansible', '__main_']]: parts = module.split('.') for i in range(len(parts)): dotted = '.'.join(parts[:i + 1]) sys.modules[dotted] = sys_mock sys_mocks.append(dotted) return module_mock, mocks, sys_mocks
def get_python_module_partials( pathToModuleFile, log): """ *Get the names of the _partials imported into dryx python modules.* **Key Arguments:** - ``pathToModuleFile`` -- the path to the python module we wish to extract the _partial names for - ``log`` -- logger **Return:** - ``partialsDictionary`` -- a dictionary of the _partial names imported into the dryx python module, and a list of their functions .. todo:: - [ ] when complete, clean get_python_module_partials function & add logging """ ################ > IMPORTS ################ ## STANDARD LIB ## from modulefinder import ModuleFinder import re import os import sys ## THIRD PARTY ## ## LOCAL APPLICATION ## log.debug('starting the ``get_python_module_partials`` function') ## VARIABLES ## partialsDictionary = {} finder = ModuleFinder() finder.run_script(pathToModuleFile) baseName = os.path.basename(pathToModuleFile).replace(".py", "") if baseName == "__init__": pathToModuleFile = pathToModuleFile.replace("__init__.py", "") baseName = os.path.basename(pathToModuleFile) reBaseName = re.compile(r"%s" % (baseName,)) for name, mod in finder.modules.iteritems(): if reBaseName.search(name): importList = [] for key in mod.globalnames.keys(): # if ("/Library/Frameworks/Python.framework/" in mod.__file__ or # "macports" in mod.__file__) and "site-packages" not in # mod.__file__: if "/Users/" not in mod.__file__: # print "skipping %s" % (mod.__file__,) continue importList.append(key) if len(importList): # print mod.__file__, importList partialsDictionary[name] = importList log.debug('completed the ``get_python_module_partials`` function') return partialsDictionary
def get_module_import_dict(object, scope='project', remove_packages=True): """ Given some code object (or the full name of a module), find all the modules that must be imported for this object to run. :param object: :param scope: One of: 'package': Only collect modules from same root package (eg root package of foo.bar.baz is foo. 'project': Only collect modules in same project (ie, modules whose root package is in the same directoy as object's root package) 'all': Collect all dependent modueles (not recommended, as this will, for example include a bunch of numpy code). :return: A dict<module_name: module_path> """ assert scope in ('package', 'project', 'all') if isinstance(object, (list, tuple)): dicts, names = zip( *[get_module_import_dict(ob, scope=scope) for ob in object]) return {k: v for d in dicts for k, v in d.iteritems()}, names elif isinstance(object, basestring): module = import_module(object) else: module = inspect.getmodule(object) module_file = get_src_file(module) finder = ModuleFinder() this_package = module.__name__.split('.')[0] LOGGER.info( 'Scanning Dependent Modules in {}. This may take some time...'.format( this_package)) finder.run_script(module_file) module_files = { name: get_src_file(module) for name, module in finder.modules.iteritems() if module.__file__ is not None } module_files[module.__name__] = get_src_file( module) # Don't forget yourself! # module_files = if scope == 'package': module_files = { name: mod for name, mod in module_files.iteritems() if name.split('.')[0] == this_package } elif scope == 'project': base_dir = os.path.dirname(os.path.dirname(module.__file__)) module_files = { name: mod for name, mod in module_files.iteritems() if mod.startswith(base_dir) } LOGGER.info('Scan Complete. {} dependent modules found.'.format( len(module_files))) # module_name_to_module_path = {name: get_src_file(m) for name, m in modules.iteritems()} if remove_packages: module_files = { name: path for name, path in module_files.iteritems() if not ( path.endswith('__init__.py') or path.endswith('__init__.pyc')) } return module_files, module.__name__
def zip_std_lib(src_module, dst_file): """Compiles the Python standard library modules used by the source module and outputs to zip file.""" finder = ModuleFinder() finder.run_script(src_module) modules = set() print('Writing dependencies to "%s"...' % DEP_OUT) with open(DEP_OUT, 'w') as f: for name, mod in finder.modules.items(): print('%s: ' % name, end='', file=f) print(mod.__file__, file=f) if mod.__file__ is None: continue path = os.path.realpath(mod.__file__) if not path.startswith(os.path.normpath(STD_LIB)): continue while (os.path.dirname(path) != os.path.normpath(STD_LIB)): path = os.path.dirname(path) if os.path.isfile(path): modules.add(path) elif os.path.isdir(path): for root, dirs, files in os.walk(path): for i in files: modules.add(os.path.join(root, i)) print('-' * 50, file=f) print('### Modules NOT imported ###', file=f) print('\n'.join(finder.badmodules.keys()), file=f) modules = sorted([ i for i in modules if i.endswith(( '.py', '.pyc')) and not os.path.dirname(i).endswith('__pycache__') ]) print('Writing standard library to "%s"...' % dst_file) with zipfile.ZipFile(dst_file, 'w', compression=zipfile.ZIP_DEFLATED) as z: for i in modules: root, ext = os.path.splitext(i) if ext == '.py': arcname = os.path.relpath(root, STD_LIB) + '.pyc' pyc = create_pyc(i) else: arcname = os.path.relpath(i, STD_LIB) with open(i, 'rb') as f: pyc = f.read() z.writestr(arcname, pyc)
def _find_modules(module_name): """ Ideally we want to find all the modules imported by the provided top level module The modules that are contained in the top level are returned in submodules Other modules that are dependancies should be returned in others. """ _others = set() _submodules = set() mod = importlib.import_module(module_name) if False: from modulefinder import ModuleFinder finder = ModuleFinder() # finder.load_file(mod.__file__) pathname = Path(mod.__file__) if pathname.name == '__init__.py': pathname = pathname.parent finder.load_package(module_name, str(pathname)) modules = set(finder.modules) | set(finder.badmodules) modules ^= {'__main__'} _others = {name if module_name not in name else name.split('.')[0] for name in finder.modules.keys()} _submodules = {name for name in finder.modules.keys() if module_name in name} if True: def get_imports(_module_name): import dis mod = importlib.import_module(_module_name) instructions = dis.get_instructions(inspect.getsource(mod)) imports = [i.argval for i in instructions if 'IMPORT' in i.opname] subs = [getattr(mod, i, None) or importlib.import_module(i, _module_name) or i for i in imports] return subs # Just get all submodules from sys.modules for key, val in sys.modules.items(): if module_name == key or key.startswith(module_name + "."): _submodules |= {val.__name__} for attr, val in mod.__dict__.items(): # This misses recursive submodules, and ones that are imported like from <module> import <function> if inspect.ismodule(val): if module_name in val.__name__: _submodules |= {val.__name__} else: _others |= {val.__name__} _submodules ^= {module_name} for m in _submodules: s, o = _find_modules(m) _submodules |= s _others |= o return _submodules, _others
def zip_std_lib(src_module, dst_file): """Compiles the Python standard library modules used by the source module and outputs to zip file.""" finder = ModuleFinder() finder.run_script(src_module) modules = set() print('Writing dependencies to "%s"...' % DEP_OUT) with open(DEP_OUT, "w") as f: for name, mod in finder.modules.items(): print("%s: " % name, end="", file=f) print(mod.__file__, file=f) if mod.__file__ is None: continue path = os.path.realpath(mod.__file__) if not path.startswith(os.path.normpath(STD_LIB)): continue while os.path.dirname(path) != os.path.normpath(STD_LIB): path = os.path.dirname(path) if os.path.isfile(path): modules.add(path) elif os.path.isdir(path): for root, dirs, files in os.walk(path): for i in files: modules.add(os.path.join(root, i)) print("-" * 50, file=f) print("### Modules NOT imported ###", file=f) print("\n".join(finder.badmodules.keys()), file=f) modules = sorted( [i for i in modules if i.endswith((".py", ".pyc")) and not os.path.dirname(i).endswith("__pycache__")] ) print('Writing standard library to "%s"...' % dst_file) with zipfile.ZipFile(dst_file, "w", compression=zipfile.ZIP_DEFLATED) as z: for i in modules: root, ext = os.path.splitext(i) if ext == ".py": arcname = os.path.relpath(root, STD_LIB) + ".pyc" pyc = create_pyc(i) else: arcname = os.path.relpath(i, STD_LIB) with open(i, "rb") as f: pyc = f.read() z.writestr(arcname, pyc)
def find_failed_imports_by_directory(directory): """Returns all modules that failed to import when loading all modules below a directory.""" finder = ModuleFinder() py_files = _find_all_python_modules(directory) for f in py_files: finder.run_script(f) bad_modules = dict(finder.badmodules) return return_missing_base_modules(bad_modules)
def get_imported_module_paths(module_path): finder = ModuleFinder([os.path.dirname(module_path)]) finder.run_script(module_path) imported_modules = [] for module_name, module in finder.modules.items(): if module_name != '__main__': path = getattr(module, '__file__', None) if path is not None and os.path.isfile(path): imported_modules.append(path) return imported_modules
def get_modules(filename): finder = ModuleFinder() finder.run_script(filename) list_modules = {} for name,mod in finder.modules.iteritems(): print name,mod return list_modules
def __init__(self, build_dir: Union[str, Path]): if isinstance(build_dir, str): self.build_dir = Path(build_dir) else: self.build_dir = build_dir self.zip_build_dir = self.build_dir / "libs" / "lib" self.dylib_dir = self.build_dir / "dist" / "lib" / "lib-dynload" self.finder = ModuleFinder()
def on_modified(self, event): if not event.src_path.endswith('.py'): return print(f'modules using {event.src_path}') finder = ModuleFinder() finder.load_file(event.src_path) for name, mod in finder.modules.items(): print(name) print(mod)
def check_module(self, module_name ): from modulefinder import ModuleFinder finder = ModuleFinder(debug=2) finder.run_script(module_name) for name, mod in finder.modules.items(): try: __import__(name, fromlist=mod.globalnames.keys(),level=1) sys.stdout.write('.') except ImportError as e: print("ERROR IMPORTING %s: " % name + " -- "+e.message)
def patched(self, project_dir, spec, sack): mod = ModuleFinder() for item in list(project_dir.glob('**/*.py')): try: mod.run_script(str(item)) for _, mod in mod.modules.items(): if mod.__file__ and mod.__file__.startswith("/usr/lib"): spec.required_files.add(mod.__file__) except ImportError as ie: logging.warn("Exception was raised by ModuleFinder:\n" + str(ie) + "\nOn file: " + str(item))
def imports_metadata(path: Union[str, Path]) -> bool: mf = ModuleFinder() if isinstance(path, Path): path = str(path) mf.run_script(path) modules = list(mod for name, mod in mf.modules.items() if "metadata" in name) if modules: return True else: return False
def patched(self, project_dir, spec, sack): files = list(project_dir.glob('*.py')) files.extend(list(project_dir.glob('*/*.py'))) mod = ModuleFinder() for item in files: mod.run_script(str(item)) for _, mod in mod.modules.items(): if mod.__file__ and mod.__file__.startswith("/usr/lib"): spec.required_files.add(mod.__file__)
def test_dependencies(self): path = os.path.join('lingcorpora', 'corpora', '%s_corpus.py' % self.lang) legit_deps = pickle.load(open('dependencies.pickle', 'rb')) mf = ModuleFinder() mf.run_script(path) deps = set(mf.modules.keys()) self.assertEqual(len(deps.difference(legit_deps)), 0) del deps, legit_deps
def main(argv): path = sys.path[:] path[0] = os.path.dirname(argv[0]) mf = ModuleFinder(path) for f in argv: mf.run_script(f) paths = sorted(list(set([os.path.abspath(x.__file__) for x in mf.modules.values() if x.__file__]))) cwd = os.getcwd() paths = [x for x in paths if x.startswith(cwd)] m = len(cwd) + 1 paths = argv + [x[m:] for x in paths] print ' '.join(paths)
def make_module_list(input_file, output_file): module_list = [] finder = ModuleFinder() finder.run_script(input_file) keys = sorted(finder.modules.keys()) for key in keys: if finder.modules[key].__path__ and key.find(".") == -1: module_list.append(key) with open(output_file, "w") as f_o: f_o.write('pip install ' + ' '.join(module_list)) print('pip install ' + ' '.join(module_list))
def diff_import_fromimport(): files = ['from_import.py', 'import.py'] for f in files: print('=' * 10) print('filename:{}'.format(f)) # ModuleFinderオブジェクトは、ファイルごとに生成する finder = ModuleFinder() finder.run_script(f) for name, mod in finder.modules.items(): print('-' * 10) print('name:{}'.format(name)) print('modules:{}'.format(','.join(list(mod.globalnames.keys()))))
def get_dependencies(module): from modulefinder import ModuleFinder import os finder = ModuleFinder() finder.run_script(module) all_module_paths = [os.path.abspath(m.__file__) for m in finder.modules.values() if m.__file__ is not None] def is_in_same_path(p): return p and os.path.dirname(p).startswith(os.path.dirname(module)) return [x for x in all_module_paths if is_in_same_path(x) and x != module]
def myMethod(self): finder = ModuleFinder() myDrive = "g:\\" mySrcPath = "git\\6700Spring16\\CA05\\submissions\\danieljames_1246453_74826857_jhd0008\\softwareProcess\\SoftwareProcess\\Assignment\\prod" myPath = "git\\6700Spring16\\CA05\\submissions\\danieljames_1246453_74826857_jhd0008\\softwareProcess\\SoftwareProcess\\Assignment\\test" #myPath = "git\\6700Spring16\\CA05\\submissions" scriptname = myDrive+myPath+'\\FixTest.py' with io.open(scriptname) as scriptfile: code = compile(scriptfile.readall(),scriptname,'exec') mod = importlib.import_module('FixTest',package="Assignment") print 'Inspect output:' for i in inspect.getmembers((mod, inspect.ismodule)): print i[0] for root, myDir, files in os.walk(myDrive + myPath): for myFile in files: print myFile sys.path.insert(0,os.path.join(myDrive+myPath)) print sys.path finder.run_script(os.path.join(myDrive+mySrcPath+'\\Fix.py')) print 'Loaded modules:' for name, mod in finder.modules.iteritems(): print '%s: ' % name, print ','.join(mod.globalnames.keys()[:3]) runpy.run_path(os.path.join(myDrive,myPath,'FixTest.py')) __all__ = [] module_name = None for loader, module_name, is_pkg in pkgutil.walk_packages("g:\\"): print 'in for loop' #__all__.append(module_name) #module = loader.find_module(module_name).load_module(module_name) print 'Found submodule %s ' % module_name print module_name ''' src_data = pkgutil.get_data('Assignment.prod',"Fix.py") if src_data == None: print "No source data" else: print src_data #mod = loader.load_module(myDrive+myPath+"Fix.py") myString = "diff --git a/rtb0006/CA01/prod/__init__.pyc b/rtb0006/CA01/prod/__init__.pyc" testSuite = TestLoader().discover("g:\\git\\6700Spring16\\CA05\\submissions\\danieljames_1246453_74826857_jhd0008\\softwareProcess\\SoftwareProcess\\Assignment\\test", pattern="*.py") ImpImporter("g:\\git\\6700Spring16\\CA05\\submissions\\danieljames_1246453_74826857_jhd00085\\softwareProcess\\SoftwareProcess\\Assignment\\prod") print testSuite.countTestCases() for p in sys.path: print p result = TextTestRunner().run(testSuite) print result.testsRun ''' '''
def read_config(self): """ We have to split modules by apps. We use ModuleFinder to get all imports from module. Than we select those not starting with "/usr". """ self.apps = {} self.apps_rev = {} if (self.config is None or not isinstance(self.config, basestring)): return finder = ModuleFinder() # map: app -> modules apps = None with open(self.config, 'r') as f: apps = json.load(f) if apps is None: log.debug("Apps are empty. Try setting modules in config: %s" % ( self.config)) f.close() i = 0 if not isinstance(apps, list): return for t in apps: if not isinstance(t, list): continue self.apps[i] = set() for module in t: if not isinstance(module, basestring): continue self.apps[i].add(module) try: finder.run_script(module) modules = [m.__file__.replace('.pyo', '.py') for m in finder.modules.values() if (hasattr(m, "__file__") and isinstance(m.__file__, basestring) and not m.__file__.startswith('/usr'))] self.apps[i].update(modules) except Exception as e: log.debug(str(e)) i += 1 # Reverse map: module -> apps for i, modules in self.apps.items(): for module in modules: if module not in self.apps_rev: self.apps_rev[module] = set() self.apps_rev[module].add(i)
def _analyze_file_deps(build_zip_descriptor, file_to_analyze): """ Returns a path normalized list of files a file given by file_to_analyze depends upon. """ mf = ModuleFinder(excludes=build_zip_descriptor.exclude_modules) mf.run_script(file_to_analyze) file_list = [ ] for module_name in mf.modules.keys(): module_file = mf.modules[module_name].__file__ if not module_file: continue file_list.append(os.path.abspath(module_file)) return file_list
def get_submodules(package): """Find names of all modules in `package` Parameters ---------- package : imported Python package Returns ------- list Sorted list of fully-qualified module names """ mf = ModuleFinder() modules = sorted(["%s.%s" % (package.__name__,X) for X in mf.find_all_submodules(package) if X != "__init__"]) return modules
def import_module( self, partnam, fqname, parent ): depth = self._depth if depth is None or len( self._callerStack ) <= depth: r = ModuleFinder.import_module( self, partnam, fqname, parent ) if r is not None: self._depNode.addDepedency( self._callerStack +[ r.__file__ ] ) return r
def main(): description="Convenience script to generate HTML documentation using pydoc" parser = argparse.ArgumentParser(description=description) parser.add_argument('--out', required=True, metavar="PATH", help="Directory in which to write HTML output files.") parser.add_argument('--recursive', action='store_true', default=False, help="Recursively import documentation for dependencies. If not recursive, zcall documents will contain broken links to standard modules. Recursive mode generates approximately 180 HTML files comprising 6 MB of data.") parser.add_argument('--verbose', action='store_true', default=False, help="Write pydoc information to stdout.") args = vars(parser.parse_args()) recursive = args['recursive'] verbose = args['verbose'] if not verbose: # suppress stdout chatter from pydoc.writedoc sys.stdout = open('/dev/null', 'w') localDir = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.abspath(localDir+"/..")) # import from zcall dir zcallDir = os.path.abspath(localDir+"/../zcall") outDir = os.path.abspath(args['out']) if not (os.access(outDir, os.W_OK) and os.path.isdir(outDir)): msg = "ERROR: Output path "+outDir+" is not a writable directory.\n" sys.stderr.write(msg) sys.exit(1) os.chdir(outDir) import zcall pydoc.writedoc(zcall) modules = set() zcall = set() scripts = [] mf = ModuleFinder() for script in os.listdir(zcallDir): if re.search("\.py$", script) and script!="__init__.py": words = re.split("\.", script) words.pop() scriptName = (".".join(words)) # name without .py suffix modules.add("zcall."+scriptName) zcall.add(scriptName) scripts.append(script) if recursive: for script in scripts: mf.run_script(os.path.join(zcallDir, script)) for name, mod in mf.modules.iteritems(): if name not in zcall: modules.add(name) for module in modules: pydoc.writedoc(import_module(module))
def main(): finder = ModuleFinder() finder.run_script('sample.py') print('dir ModuleFinder: {}'.format(dir(finder))) for name, mod in finder.modules.items(): print("type:{}".format(type(mod))) print("dir module object:{}".format(dir(mod))) for name, mod in finder.modules.items(): print('-'*20) print("name:%s"%name) print("globalnames:%s"%mod.globalnames) print("modules:%s"%','.join(list(mod.globalnames.keys()))) print("starimports:{}".format(mod.starimports)) print("bad modules:{}".format(','.join(finder.badmodules.keys())))
def _get_dependencies(entry_point): # Dynamically find all imported modules from modulefinder import ModuleFinder finder = ModuleFinder() finder.run_script(entry_point) imports = list(set([v.__file__.split('site-packages/', 1)[-1].split('/')[0] for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__])) # Get just the modules, ignore the so and py now (maybe useful for calls to add_file) modules = [i.split('.py')[0] for i in imports if ".so" not in i] so_files = list(set([v.__file__ for (k, v) in finder.modules.items() if v.__file__ is not None and "site-packages" in v.__file__ and ".so" in v.__file__])) return set(modules), so_files
def find_modules(self, scriptname, excludes=[], verbose=False): """ Find the modules we'd want to include in the distribution. """ path = sys.path[:] path.insert(0, os.path.dirname(scriptname)) mf = ModuleFinder(path=path, excludes=excludes) mf.run_script(scriptname) modulenames = mf.modules.keys() modulenames.sort() self.log("Looking for modules used by '%s'...\n" % scriptname) log_format = "%-2s %-30s %s" self.log(log_format % ('', 'Module name', 'Module location')) self.log(log_format % ('--', '-' * 30, '-' * 30)) modules = [] for name in modulenames: m = mf.modules[name] # builtin # if not m.__file__: continue mpath = os.path.normcase(m.__file__) # Modules in Python distribution. # Pass on only those that live in site-packages # if mpath.startswith(self.site_packages): pass elif mpath.startswith(self.sys_prefix): continue type = "P" if m.__path__ else "m" modules.append((name, type, m.__file__)) self.log(log_format % (type, name, m.__file__)) self.log("") return modules
def import_hook( self, name, caller=None, fromlist=None, level=None ): if caller is None: return None try: self._callerStack.append( caller.__file__ ) return ModuleFinder.import_hook( self, name, caller, fromlist ) finally: self._callerStack.pop()
def load_module( self, fqname, fp, pathname, file_info ): if pathname: if not isinstance( pathname, Path ): pathname = Path( pathname ) if pathname.hasExtension( 'cmd' ): line = fp.readline().strip() suffix, mode, type = file_info[0], file_info[1], imp.PY_SOURCE #pretend the cmd script is a py file assert '@setlocal' in line and '& python' in line, "Doesn't seem to be a python cmd script!" return ModuleFinder.load_module( self, fqname, fp, pathname, file_info )
def _getImported_all(cls, moduleObj, isRecursive, allSubs=[]): if cls.isBuiltin(moduleObj) or cls.isStock(moduleObj): return if moduleObj.__name__ == "__main__": return for i in allSubs: if i.__name__ == moduleObj.__name__ and i.__file__ == moduleObj.__file__: return # print("adding " + str(moduleObj)) allSubs.append(moduleObj) if isRecursive or allSubs == []: # add path relative to current module newPaths = [os.path.split(cls.convert_objectToFullPath(moduleObj))[0]] + sys.path finder = ModuleFinder(path=newPaths) finder.run_script(cls.convert_objectToFullPath(moduleObj)) for subModuleName, subModuleObj in finder.modules.iteritems(): # print("discovered " + str(subModuleObj)) cls._getImported_all(subModuleObj, True, allSubs) return allSubs
def __init__( self, scriptPath, additionalPaths=(), depth=_DEPTH, *a, **kw ): self._depth = depth self._depNode = DependencyNode() self._callerStack = [] ModuleFinder.__init__( self, *a, **kw ) #add in the additional paths preSysPath = sys.path[:] for p in reversed( additionalPaths ): sys.path.insert( 0, str( p ) ) #insert the path of the script sys.path.insert( 0, str( Path( scriptPath ).up() ) ) try: self.run_script( scriptPath ) except SyntaxError: pass finally: #restore the original sys.path sys.path = preSysPath
def load_module(self, fqname, fp, pathname, fileInfo): _, _, iFileType = fileInfo m = ModuleFinder.load_module(self, fqname, fp, pathname, fileInfo) if iFileType in self.moduleTypes: p = os.path.normpath(m.__file__).replace("\\", "/") if not osp.normcase(p).startswith(osp.normcase(SYS_EXEC_PREFIX)): m.__file__ = p self.loadedModules.append(m) return m
def describe_python_module(type_, location, entities, pkgdb=None): from modulefinder import ModuleFinder spec = dict(location=location) if location.endswith('.so'): # treat binary extension as a library return describe_binary(type_, location, entities, pkgdb=pkgdb) fpath = os.path.realpath(location) spec['realpath'] = fpath fhash = sha1sum(fpath) spec['sha1sum'] = fhash if fhash in entities: # do not process twice return fhash else: entities[fhash] = spec lgr.debug("describe %s at '%s' (%s)" % (type_, fpath, fhash)) spec['type'] = type_ # find all related modules modfind = ModuleFinder() try: modfind.run_script(location) except ImportError as e: lgr.warning("cannot determine Python module dependencies of %s (%s)" % (fpath, e)) if len(modfind.modules): spec['depmods'] = [] for modname, mod in iteritems(modfind.modules): if not mod.__file__: # probably builtin continue # XXX STOP HERE FOR NOW UNTIL THERE IS A WAY TO CONTROL THE RECURSION DEPTH continue #spec['depmods'].append( # describe_python_module('python_module', mod.__file__, entities, # pkgdb=pkgdb)) return fhash
def __init__(self, directory, version=None, language='english'): self.directory = directory self.version = version or determineVersion(self.directory) self.language = language.lower() self.dcfiles = [os.path.join(directory, 'config/otp.dc'), os.path.join(directory, 'config/toon.dc')] self.modules = {} self.path_overrides = {} self.config_file = os.path.join(self.directory, 'config/public_client.prc') self.mf = ModuleFinder(sys.path+[self.directory]) from panda3d.direct import DCFile self.dcf = DCFile()