def is_package_root(dn, ignores): """Return true if this is a package root. A package root is a directory that could be used as a PYTHONPATH entry.""" if not exists(dn) or exists(join(dn, '__init__.py')): return False else: dirfiles = (join(dn, x) for x in listdir(dn)) subdirs, files = filter_separate(isdir, dirfiles) # Check if the directory contains Python files. pyfiles = [] for x in files: bx = basename(x) if bx in ignores: continue if bx.endswith('.so') or is_python(x): pyfiles.append(bx) # Note: we need to check for a 'site-packages' subdirectory because some # distributions package arch-specific files in a different place and # have no .py files in /usr/lib/pythonVER, but a single 'config' or # 'site-packages' directory instead. These aren't packages either. if join(dn, 'site-packages') in subdirs: return True # Check if the directory contains Python packages. for sub in subdirs: bsub = basename(sub) # Note: Make use of the fact that dotted directory names cannot be # imported as packages for culling away branches by removing those # subdirectories that have dots in them. if '.' in bsub or bsub in ignores: continue if exists(join(sub, '__init__.py')): return True return False
def gendeps(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option( "-i", "--internal", "--internal-only", default=0, action="count", help="Filter out dependencies that are outside of the " "roots of the input files. If internal is used twice, we " "filter down further the dependencies to the set of " "files that were processed only, not just to the files " "that live in the same roots.", ) parser.add_option( "-e", "--external", "--external-only", action="store_true", help="Filter out dependencies to modules within the " "roots of the input files. This can be used to find out " "what external modules a package depends on, for example. " "Note that it does not make sense to use --internal and " "--external at the same time, as --internal will reject " "all the dependencies --external allows would output.", ) parser.add_option( "-I", "--ignore", dest="ignores", action="append", default=def_ignores, help="Add the given directory name to the list to be ignored.", ) parser.add_option("-v", "--verbose", action="count", default=0, help="Output more debugging information") parser.add_option("-q", "--quiet", action="count", default=0, help="Output less debugging information") parser.add_option( "-f", "--follow", "-r", "--recursive", action="store_true", help="Follow the modules depended upon and trace their dependencies. " "WARNING: This can be slow. Use --internal to limit the scope.", ) parser.add_option( "--print-roots", action="store_true", help="Only print the package roots corresponding to the input files." "This is mostly used for testing and troubleshooting.", ) parser.add_option( "-d", "--disable-pragmas", action="store_false", dest="do_pragmas", default=True, help="Disable processing of pragma directives as strings after imports.", ) parser.add_option( "-u", "--ignore-unused", action="store_true", help="Automatically ignore unused imports. (See sfood-checker.)" ) opts, args = parser.parse_args() opts.verbose -= opts.quiet setup_logging(opts.verbose) if not args: logging.warning("Searching for files from current directory.") args = ["."] info = logging.info if opts.internal and opts.external: parser.error("Using --internal and --external at the same time does not make sense.") if opts.print_roots: inroots = find_roots(args, opts.ignores) for dn in sorted(inroots): print dn return info("") info("Input paths:") for arg in args: fn = realpath(arg) info(" %s" % fn) if not exists(fn): parser.error("Filename '%s' does not exist." % fn) # Get the list of package roots for our input files and prepend them to the # module search path to insure localized imports. inroots = find_roots(args, opts.ignores) if (opts.internal or opts.external) and not inroots: parser.error( "No package roots found from the given files or directories. " "Using --internal with these roots will generate no dependencies." ) info("") info("Roots of the input files:") for root in inroots: info(" %s" % root) info("") info("Using the following import path to search for modules:") sys.path = inroots + sys.path for dn in sys.path: info(" %s" % dn) inroots = frozenset(inroots) # Find all the dependencies. info("") info("Processing files:") info("") allfiles = defaultdict(set) allerrors = [] processed_files = set() fiter = iter_pyfiles(args, opts.ignores, False) while 1: newfiles = set() for fn in fiter: if fn in processed_files: continue # Make sure we process each file only once. info(" %s" % fn) processed_files.add(fn) if is_python(fn): files, errors = find_dependencies(fn, opts.verbose, opts.do_pragmas, opts.ignore_unused) allerrors.extend(errors) else: # If the file is not a source file, we don't know how to get the # dependencies of that (without importing, which we want to # avoid). files = [] # When packages are the source of dependencies, remove the __init__ # file. This is important because the targets also do not include the # __init__ (i.e. when "from <package> import <subpackage>" is seen). if basename(fn) == "__init__.py": fn = dirname(fn) # Make sure all the files at least appear in the output, even if it has # no dependency. from_ = relfile(fn, opts.ignores) if from_ is None: continue infrom = from_[0] in inroots if opts.internal and not infrom: continue if not opts.external: allfiles[from_].add((None, None)) # Add the dependencies. for dfn in files: xfn = dfn if basename(xfn) == "__init__.py": xfn = dirname(xfn) to_ = relfile(xfn, opts.ignores) into = to_[0] in inroots if (opts.internal and not into) or (opts.external and into): continue allfiles[from_].add(to_) newfiles.add(dfn) if not (opts.follow and newfiles): break else: fiter = iter(sorted(newfiles)) # If internal is used twice, we filter down further the dependencies to the # set of files that were processed only, not just to the files that live in # the same roots. if opts.internal >= 2: filtfiles = type(allfiles)() for from_, tolist in allfiles.iteritems(): filtfiles[from_] = set(x for x in tolist if x in allfiles or x == (None, None)) allfiles = filtfiles info("") info("SUMMARY") info("=======") # Output a list of the symbols that could not be imported as modules. reports = [ ("Modules that were ignored because not used:", ERROR_UNUSED, logging.info), ("Modules that could not be imported:", ERROR_IMPORT, logging.warning), ] if opts.verbose >= 2: reports.append(("Symbols that could not be imported as modules:", ERROR_SYMBOL, logging.debug)) for msg, errtype, efun in reports: names = set(name for (err, name) in allerrors if err is errtype) if names: efun("") efun(msg) for name in sorted(names): efun(" %s" % name) # Output the list of roots found. info("") info("Found roots:") found_roots = set() for key, files in allfiles.iteritems(): found_roots.add(key[0]) found_roots.update(map(itemgetter(0), files)) if None in found_roots: found_roots.remove(None) for root in sorted(found_roots): info(" %s" % root) # Output the dependencies. info("") output_depends(allfiles)