def get_modulefinder(self): if self.debug_modulegraph: debug = 4 else: debug = 0 return find_modules( scripts=scripts['console'] + scripts['gui'], includes=list(self.includes) + main_modules['console'], packages=self.packages, excludes=self.excludes, debug=debug)
def get_modulefinder(self): if self.debug_modulegraph: debug = 4 else: debug = 0 return find_modules(scripts=scripts['console'] + scripts['gui'], includes=list(self.includes) + main_modules['console'], packages=self.packages, excludes=self.excludes, debug=debug)
def analyze_script(self, filenames): """ Analyze given scripts and get dependencies on other Python modules. return two lists - python modules and python extensions """ from modulegraph.find_modules import find_modules, parse_mf_results mf = find_modules(filenames, excludes=self.excludes) py_files, extensions = parse_mf_results(mf) return py_files, extensions
def get_modulefinder(self): if self.debug_modulegraph: debug = 4 else: debug = 0 return find_modules( scripts=self.collect_scripts(), includes=self.includes, packages=self.packages, excludes=self.excludes, debug=debug, )
def testIncludePackageWithExclude(self): root = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'testpkg-packages') mf = find_modules.find_modules( path=[root] + sys.path, scripts=[os.path.join(root, "main_script.py")], packages=['pkg'], excludes=['pkg.sub3']) node = mf.findNode('pkg') self.assertIsInstance(node, modulegraph.Package) node = mf.findNode('pkg.sub3') self.assertIsInstance(node, modulegraph.ExcludedModule)
def testIncludePackageWithExclude(self): root = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'testpkg-packages') mf = find_modules.find_modules( path=[root]+sys.path, scripts=[os.path.join(root, "main_script.py")], packages=['pkg'], excludes=['pkg.sub3']) node = mf.findNode('pkg') self.assertIsInstance(node, modulegraph.Package) node = mf.findNode('pkg.sub3') self.assertIsInstance(node, modulegraph.ExcludedModule)
def collect_all(self): debug = 4 if self.opts.debug_modulegraph else 0 mf = find_modules(scripts=self.collect_scripts(), includes=self.opts.includes, packages=self.opts.packages, excludes=self.opts.excludes, debug=debug) filters = [has_filename_filter] flatpackages = {} loader_files = [] self.process_recipes(mf, filters, flatpackages, loader_files) if self.opts.debug_modulegraph: import pdb pdb.Pdb().set_trace() self.filter_dependencies(mf, filters) py_files, extensions = self.finalize_modulefinder(mf) pkgdirs = self.collect_packagedirs() return py_files, pkgdirs, extensions, loader_files
def collect_all(self): debug = 4 if self.opts.debug_modulegraph else 0 mf = find_modules( scripts=self.collect_scripts(), includes=self.opts.includes, packages=self.opts.packages, excludes=self.opts.excludes, debug=debug, ) filters = [has_filename_filter] flatpackages = {} loader_files = [] self.process_recipes(mf, filters, flatpackages, loader_files) if self.opts.debug_modulegraph: import pdb pdb.Pdb().set_trace() self.filter_dependencies(mf, filters) py_files, extensions = self.finalize_modulefinder(mf) pkgdirs = self.collect_packagedirs() return py_files, pkgdirs, extensions, loader_files
def get_modules(script_path): sys.path.insert(0, os.path.dirname(script_path)) mf = find_modules((script_path,)) del sys.path[0] modules = mf.flatten() return modules
def package(scripts, name=None, pathex=None, datas=None, binaries=None, includes=None, excludes=None): ''' usage: package(__file__, datas=[(src,dst), (src, dst), ]) :param scripts: scripts file to parse from :param name: zip file name :param pathex: :param datas: :param binaries: :param includes: :param excludes: :return: the zipfile path ''' excludes, includes, inputs = excludes or [], includes or [], [] scripts = [scripts] if isinstance(scripts, str) else scripts if not isinstance(scripts, (tuple, list)): raise ValueError("scripts parameters must be str or tuple or list ") for script in scripts: # Normalize script path. script = os.path.normpath(script) if not os.path.exists(script): raise ValueError("script '%s' not found" % script) inputs.append(helper.get_toplevel_modules_path(script)) toplevelpath = inputs[0] inputs.extend(sys.path + (pathex or [])) basepath = os.path.abspath(os.path.dirname(scripts[0])) installed_pkgs = _get_installed_modules() logger.info(installed_pkgs) logger.info("trying to analysis dependents , please wait ...") mf = find_modules.find_modules(scripts=scripts, includes=includes, excludes=excludes + _DefaultExcludes, path=inputs) requires, depends_thirds, depends_selfmod = {}, [], [] for m in mf.flatten(): ty, nm, fn = type(m).__name__, m.identifier, m.filename if not ty in [ 'MissingModule', 'AliasNode', 'BuiltinModule', 'ExcludedModule', 'NamespacePackage' ]: logger.info("check type:%s, module:%s, path:%s", ty, nm, fn) pkgname, ver = installed_pkgs.get(nm, (None, None)) if ver: requires[pkgname] = ver continue nmbase = nm.split('.', 1)[0] pkgname, ver = installed_pkgs.get(nmbase, (None, None)) if ver: requires[pkgname] = ver continue sourcetype = helper.source_type(fn) if sourcetype == 'PythonLib': logger.debug( '###########> ignore python lib type:%s, module:<%s>, %s', ty, nm, fn) pass elif sourcetype == 'SitePackages': depends_thirds.append((ty, nm, fn)) logger.debug( '----------> add site-packages type:%s, module:<%s>, %s', ty, nm, fn) elif sourcetype == 'UNKOWN': depends_selfmod.append((ty, nm, fn)) logger.debug( '----------> add self modules type:%s, module:<%s>, %s', ty, nm, fn) import zipfile zfilepath = name if name else os.path.join(toplevelpath, os.path.basename(scripts[0])) zfilepath = os.path.splitext(zfilepath)[0] + '.zip' logger.info("trying to zip dependents to %s", zfilepath) with zipfile.ZipFile(zfilepath, 'w') as zfile: _write_modules(zfile, depends_thirds + depends_selfmod, toplevelpath) _write_files(zfile, (datas or []) + (binaries or []), basepath) _write_requires(zfile, requires) logger.info("dependents have been packaged to %s", zfilepath) return zfilepath
def process_module(target_module_name, target_packages, dest_dir, requirements_file='requirements.txt', add_init_py=False, add_setup_py=False, package_data=(), source_paths=(), readme=None, functions=(), fire_components=(), post_build_commands=(), verbose=False): # determine package name pkg_name = os.path.split(dest_dir)[1] # parse root requirements.txt header_lines, all_reqs = load_requirements_txt(fname=requirements_file) print('parsed %i requirements from requirements.txt' % len(all_reqs)) # run modulegraph to get modules print('constructing module import graph') mg = find_modules(includes=(target_module_name, ), excludes=set(r.name for r in all_reqs) - set(target_packages)) print('found %i nodes in the module import graph' % len(list(mg.flatten()))) # analyze the graph target_node = mg.findNode(target_module_name) if target_node is None or isinstance(target_node, MissingModule): raise ImportError('could not import target module %s' % target_module_name) visited = set() our_mods = {target_node} external_mods = set() external_reqs = set() stack = [target_node] while stack: current_node = stack.pop() refs = list(mg.getReferences(current_node)) for ref in refs: if ref.identifier in visited: continue visited.add(ref.identifier) if any(x in ref.identifier for x in target_packages): if module_is_nonempty(ref): our_mods.add(ref) stack.append(ref) continue for req in all_reqs: if convert_from_pypi(req.name) in ref.identifier: external_mods.add(ref) external_reqs.add(req) break print('found %i modules imported from target packages' % len(our_mods)) print('found %i modules imported from %i external requirements' % (len(external_mods), len(external_reqs))) if verbose: for m in sorted(external_mods): print(m.identifier) # fill in new names and new paths old_names = [x.identifier for x in our_mods] new_names = resolve_names(old_names) old_name_to_new_name = {} old_name_to_new_path = {} for old_name, new_name in zip(old_names, new_names): old_name_to_new_name[old_name] = new_name old_name_to_new_path[old_name] = \ os.path.join(dest_dir, pkg_name, new_name + '.py') \ if add_setup_py else os.path.join(dest_dir, new_name + '.py') # make dest_dir if not os.path.exists(dest_dir): os.mkdir(dest_dir) if add_setup_py and not os.path.exists(os.path.join(dest_dir, pkg_name)): os.mkdir(os.path.join(dest_dir, pkg_name)) # touch __init__.py if add_init_py: if add_setup_py: print('both add_init_py and add_setup_py are set to True') print('__init__.py will be written, but only once ' '(inside the package folder)') else: open(os.path.join(dest_dir, '__init__.py'), 'w').close() if add_setup_py: open(os.path.join(dest_dir, pkg_name, '__init__.py'), 'w').close() # copy modules, rewriting imports for m in our_mods: with open(m.filename, 'r') as handle: data = handle.read() for other in sorted(our_mods, key=lambda x: len(x.identifier), reverse=True): data = data.replace(other.identifier, '.' + old_name_to_new_name[other.identifier]) with open(old_name_to_new_path[m.identifier], 'w') as handle: handle.write(data) # handle package_data for f in package_data: package_name, f_path = f.split('/', 1) package_path = mg.findNode(package_name).packagepath[0] complete_path = os.path.join(package_path, f_path) shutil.copy(complete_path, dest_dir) # write requirements.in req_in_fname = os.path.join(dest_dir, 'requirements.in') with open(req_in_fname, 'w') as handle: handle.write('\n'.join(header_lines + list(sorted([e.line for e in external_reqs])))) handle.write('\n') # handle source_paths find_links = [] for source_path in source_paths: print('building sdist for %s' % source_path) cmd = 'python setup.py --quiet sdist' subprocess.Popen(shlex.split(cmd), cwd=source_path).wait() # backslash pathsep breaks pip_comple() in py2 find_link = os.path.join(source_path, 'dist').replace('\\', '/') assert os.path.exists(find_link) find_links.append(find_link) # compile requirements.txt print('writing requirements.txt') req_txt_fname = os.path.join(dest_dir, 'requirements.txt') pip_compile_args = [ req_in_fname, '--output-file', req_txt_fname, '--no-header', '--no-annotate' ] for f in find_links: pip_compile_args.extend(['--find-links', f]) if not verbose: pip_compile_args.append('--quiet') print('compiling requirements: %s %s' % ('pip-compile', ' '.join(pip_compile_args))) pip_compile(args=pip_compile_args, standalone_mode=False) os.remove(req_in_fname) # load readme content readme_content = None if readme and os.path.exists(readme): with open(readme, 'r') as handle: readme_content = handle.read() # write README if readme_content or fire_components or functions: new_name = old_name_to_new_name[target_module_name] with open(os.path.join(dest_dir, 'README.md'), 'w') as handle: handle.write('%s\n' % dest_dir) handle.write(('=' * len(dest_dir)) + '\n\n') if readme_content: handle.write(readme_content + '\n') if fire_components: handle.write('Command line tools\n') handle.write('------------------\n\n') for f in fire_components: document_component( handle, *resolve_function_name(f, target_module_name, old_name_to_new_name)) if fire_components and functions: handle.write('\n') if functions: handle.write('Python API\n') handle.write('----------\n\n') for f in functions: document_function(handle, *resolve_function_name( f, target_module_name, old_name_to_new_name), pkg_name=pkg_name if add_setup_py else None) # write setup.py if add_setup_py: print('writing setup.py') write_setup_py(dest_dir, pkg_name, external_reqs) # post build commands for cmd in post_build_commands: print('running post_build_command: %s' % cmd) subprocess.Popen(shlex.split(cmd), cwd=dest_dir, shell=True).wait()