def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option('-o', '--overwrite', action='store_true', help="Overwrite the destination files. " "If this is not set, an error is generated if " "the destination file exists.") parser.add_option( '-i', '--insert-package-inits', action='store_true', help= "Automatically create missing __init__.py in intervening directories.") opts, args = parser.parse_args() if len(args) != 1: parser.error("You must specify the destination root.") dest, = args if not opts.overwrite and exists(dest): logging.error("Cannot overwrite '%s'." % dest) sys.exit(1) depends = list(read_depends(sys.stdin)) for droot, drel in flatten_depends(depends): srcfn = join(droot, drel) if isdir(srcfn): drel = join(drel, '__init__.py') srcfn = join(droot, drel) dstfn = join(dest, drel) if not opts.overwrite and exists(dstfn): logging.error("Cannot overwrite '%s'." % dstfn) sys.exit(1) destdir = dirname(dstfn) if not exists(destdir): os.makedirs(destdir) print_('Copying: %s' % srcfn) if not exists(srcfn): logging.error("Could not copy file '%s'." % srcfn) continue shutil.copyfile(srcfn, dstfn) if opts.insert_package_inits: for root, dirs, files in os.walk(dest): if root == dest: continue # Not needed at the very root. initfn = join(root, '__init__.py') if not exists(initfn): print_('Creating: %s' % initfn) f = open(initfn, 'w') f.close()
def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) opts, args = parser.parse_args() depends = read_depends(sys.stdin) for droot, drel in flatten_depends(depends): print_(join(droot, drel))
def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option('-o', '--overwrite', action='store_true', help="Overwrite the destination files. " "If this is not set, an error is generated if " "the destination file exists.") parser.add_option('-i', '--insert-package-inits', action='store_true', help="Automatically create missing __init__.py in intervening directories.") opts, args = parser.parse_args() if len(args) != 1: parser.error("You must specify the destination root.") dest, = args if not opts.overwrite and exists(dest): logging.error("Cannot overwrite '%s'." % dest) sys.exit(1) depends = list(read_depends(sys.stdin)) for droot, drel in flatten_depends(depends): srcfn = join(droot, drel) if isdir(srcfn): drel = join(drel, '__init__.py') srcfn = join(droot, drel) dstfn = join(dest, drel) if not opts.overwrite and exists(dstfn): logging.error("Cannot overwrite '%s'." % dstfn) sys.exit(1) destdir = dirname(dstfn) if not exists(destdir): os.makedirs(destdir) print_('Copying: %s' % srcfn) if not exists(srcfn): logging.error("Could not copy file '%s'." % srcfn) continue shutil.copyfile(srcfn, dstfn) if opts.insert_package_inits: for root, dirs, files in os.walk(dest): if root == dest: continue # Not needed at the very root. initfn = join(root, '__init__.py') if not exists(initfn): print_('Creating: %s' % initfn) f = open(initfn, 'w') f.close()
def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) opts, args = parser.parse_args() if not args: parser.error("You need to specify the name of Python files to print out.") import compiler, traceback for fn in args: print_('\n\n%s:\n' % fn) try: printAst(compiler.parseFile(fn), initlevel=1) except SyntaxError: _, e, _ = sys.exc_info() traceback.print_exc()
def list_imports(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option( '-I', '--ignore', dest='ignores', action='append', default=def_ignores, help="Add the given directory name to the list to be ignored.") parser.add_option( '-u', '--unified', action='store_true', help="Just output the unique set of dependencies found, " "in no particular order, without the filenames. The default " "is to output all imports, in order of appearance, along with " "the filename and line number.") parser.add_option('-v', '--verbose', action='count', default=0, help="Output input lines as well.") opts, args = parser.parse_args() setup_logging(opts.verbose) if not args: logging.warning("Searching for files from root directory.") args = ['.'] info = logging.info if opts.unified: all_symnames = set() for fn in iter_pyfiles(args, opts.ignores): all_symnames.update( x[0] for x in find_imports(fn, opts.verbose, opts.ignores)) for symname in sorted(all_symnames): print_(symname) else: for fn in iter_pyfiles(args, opts.ignores): if opts.verbose: lines = list(open(fn, 'rU')) for symname, lineno, islocal in find_imports( fn, opts.verbose, opts.ignores): print_('%s:%d: %s' % (fn, lineno, symname)) if opts.verbose: for no in xrange(lineno - 1, len(lines)): l = lines[no].rstrip() print_(' %s' % l) if l[-1] != '\\': break print_()
def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) opts, args = parser.parse_args() if not args: parser.error( "You need to specify the name of Python files to print out.") import compiler, traceback for fn in args: print_('\n\n%s:\n' % fn) try: printAst(compiler.parseFile(fn), initlevel=1) except SyntaxError: _, e, _ = sys.exc_info() traceback.print_exc()
def do_filter(populate_parser=None): import optparse parser = optparse.OptionParser(__doc__.strip()) opts, args = parser.parse_args() if not args: args = ['-'] for fn in args: if fn == '-': f = sys.stdin else: f = open(fn) for line in f.xreadlines(): try: yield eval(line) except Exception, e: print_(e, sys.stderr) raise SystemExit
def list_imports(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option('-I', '--ignore', dest='ignores', action='append', default=def_ignores, help="Add the given directory name to the list to be ignored.") parser.add_option('-u', '--unified', action='store_true', help="Just output the unique set of dependencies found, " "in no particular order, without the filenames. The default " "is to output all imports, in order of appearance, along with " "the filename and line number.") parser.add_option('-v', '--verbose', action='count', default=0, help="Output input lines as well.") opts, args = parser.parse_args() setup_logging(opts.verbose) if not args: logging.warning("Searching for files from root directory.") args = ['.'] info = logging.info if opts.unified: all_symnames = set() for fn in iter_pyfiles(args, opts.ignores): all_symnames.update(x[0] for x in find_imports(fn, opts.verbose, opts.ignores)) for symname in sorted(all_symnames): print_(symname) else: for fn in iter_pyfiles(args, opts.ignores): if opts.verbose: lines = list(open(fn, 'rU')) for symname, lineno, islocal in find_imports(fn, opts.verbose, opts.ignores): print_('%s:%d: %s' % (fn, lineno, symname)) if opts.verbose: for no in xrange(lineno-1, len(lines)): l = lines[no].rstrip() print_(' %s' % l) if l[-1] != '\\': break print_()
def gendeps(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option( '-i', '--internal', '--internal-only', default=0, action='count', help="Filter out dependencies that are outside of the " "roots of the input files. If internal is used twice, we " "filter down further the dependencies to the set of " "files that were processed only, not just to the files " "that live in the same roots.") parser.add_option( '-e', '--external', '--external-only', action='store_true', help="Filter out dependencies to modules within the " "roots of the input files. This can be used to find out " "what external modules a package depends on, for example. " "Note that it does not make sense to use --internal and " "--external at the same time, as --internal will reject " "all the dependencies --external allows would output.") parser.add_option( '-I', '--ignore', dest='ignores', action='append', default=def_ignores, help="Add the given directory name to the list to be ignored.") parser.add_option('-v', '--verbose', action='count', default=0, help="Output more debugging information") parser.add_option('-q', '--quiet', action='count', default=0, help="Output less debugging information") parser.add_option( '-f', '--follow', '-r', '--recursive', action='store_true', help="Follow the modules depended upon and trace their dependencies. " "WARNING: This can be slow. Use --internal to limit the scope.") parser.add_option( '--print-roots', action='store_true', help="Only print the package roots corresponding to the input files." "This is mostly used for testing and troubleshooting.") parser.add_option( '-d', '--disable-pragmas', action='store_false', dest='do_pragmas', default=True, help="Disable processing of pragma directives as strings after imports." ) parser.add_option( '-u', '--ignore-unused', action='store_true', help="Automatically ignore unused imports. (See sfood-checker.)") opts, args = parser.parse_args() opts.verbose -= opts.quiet setup_logging(opts.verbose) if not args: logging.warning("Searching for files from current directory.") args = ['.'] info = logging.info if opts.internal and opts.external: parser.error( "Using --internal and --external at the same time does not make sense." ) if opts.print_roots: inroots = find_roots(args, opts.ignores) for dn in sorted(inroots): print_(dn) return info("") info("Input paths:") for arg in args: fn = realpath(arg) info(' %s' % fn) if not exists(fn): parser.error("Filename '%s' does not exist." % fn) # Get the list of package roots for our input files and prepend them to the # module search path to insure localized imports. inroots = find_roots(args, opts.ignores) if (opts.internal or opts.external) and not inroots: parser.error( "No package roots found from the given files or directories. " "Using --internal with these roots will generate no dependencies.") info("") info("Roots of the input files:") for root in inroots: info(' %s' % root) info("") info("Using the following import path to search for modules:") sys.path = inroots + sys.path for dn in sys.path: info(" %s" % dn) inroots = frozenset(inroots) # Find all the dependencies. info("") info("Processing files:") info("") allfiles = defaultdict(set) allerrors = [] processed_files = set() fiter = iter_pyfiles(args, opts.ignores, False) while 1: newfiles = set() for fn in fiter: if fn in processed_files: continue # Make sure we process each file only once. info(" %s" % fn) processed_files.add(fn) if is_python(fn): files, errors = find_dependencies(fn, opts.verbose, opts.do_pragmas, opts.ignore_unused) allerrors.extend(errors) else: # If the file is not a source file, we don't know how to get the # dependencies of that (without importing, which we want to # avoid). files = [] # When packages are the source of dependencies, remove the __init__ # file. This is important because the targets also do not include the # __init__ (i.e. when "from <package> import <subpackage>" is seen). if basename(fn) == '__init__.py': fn = dirname(fn) # Make sure all the files at least appear in the output, even if it has # no dependency. from_ = relfile(fn, opts.ignores) if from_ is None: continue infrom = from_[0] in inroots if opts.internal and not infrom: continue if not opts.external: allfiles[from_].add((None, None)) # Add the dependencies. for dfn in files: xfn = dfn if basename(xfn) == '__init__.py': xfn = dirname(xfn) to_ = relfile(xfn, opts.ignores) into = to_[0] in inroots if (opts.internal and not into) or (opts.external and into): continue allfiles[from_].add(to_) newfiles.add(dfn) if not (opts.follow and newfiles): break else: fiter = iter(sorted(newfiles)) # If internal is used twice, we filter down further the dependencies to the # set of files that were processed only, not just to the files that live in # the same roots. if opts.internal >= 2: filtfiles = type(allfiles)() for from_, tolist in allfiles.iteritems(): filtfiles[from_] = set(x for x in tolist if x in allfiles or x == (None, None)) allfiles = filtfiles info("") info("SUMMARY") info("=======") # Output a list of the symbols that could not be imported as modules. reports = [ ("Modules that were ignored because not used:", ERROR_UNUSED, logging.info), ("Modules that could not be imported:", ERROR_IMPORT, logging.warning), ] if opts.verbose >= 2: reports.append(("Symbols that could not be imported as modules:", ERROR_SYMBOL, logging.debug)) for msg, errtype, efun in reports: names = set(name for (err, name) in allerrors if err is errtype) if names: efun("") efun(msg) for name in sorted(names): efun(" %s" % name) # Output the list of roots found. info("") info("Found roots:") found_roots = set() for key, files in allfiles.iteritems(): found_roots.add(key[0]) found_roots.update(map(itemgetter(0), files)) if None in found_roots: found_roots.remove(None) for root in sorted(found_roots): info(" %s" % root) # Output the dependencies. info("") output_depends(allfiles)
def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option('--debug', action='store_true', help="Debugging output.") parser.add_option('-I', '--ignore', dest='ignores', action='append', default=def_ignores, help="Add the given directory name to the list to be ignored.") parser.add_option('-d', '--disable-pragmas', action='store_false', dest='do_pragmas', default=True, help="Disable processing of pragma directives as strings after imports.") parser.add_option('-D', '--duplicates', '--enable-duplicates', dest='do_dups', action='store_true', help="Enable experimental heuristic for finding duplicate imports.") parser.add_option('-M', '--missing', '--enable-missing', dest='do_missing', action='store_true', help="Enable experimental heuristic for finding missing imports.") opts, args = parser.parse_args() write = sys.stderr.write for fn in iter_pyfiles(args or ['.'], opts.ignores, False): # Parse the file. ast, lines = parse_python_source(fn) if ast is None: continue found_imports = get_ast_imports(ast) # Check for duplicate remote names imported. if opts.do_dups: found_imports, dups = check_duplicate_imports(found_imports) for modname, rname, lname, lineno, level, pragma in dups: write("%s:%d: Duplicate import '%s'\n" % (fn, lineno, lname)) # Filter out the unused imports. used_imports, unused_imports = filter_unused_imports(ast, found_imports) # Output warnings for the unused imports. for x in unused_imports: _, _, lname, lineno, _, pragma = x if opts.do_pragmas and pragma: continue # Search for the column in the relevant line. mo = re.search(r'\b%s\b' % lname, lines[lineno-1]) colno = 0 if mo: colno = mo.start()+1 write("%s:%d:%d: Unused import '%s'\n" % (fn, lineno, colno, lname)) # (Optionally) Compute the list of names that are being assigned to. if opts.do_missing or opts.debug: vis = AssignVisitor() compiler.walk(ast, vis) assign_names = vis.finalize() # (Optionally) Check for potentially missing imports (this cannot be # precise, we are only providing a heuristic here). if opts.do_missing: defined = set(modname for modname, _, _, _, _, _ in used_imports) defined.update(x[0] for x in assign_names) _, simple_names = get_names_from_ast(ast) for name, lineno in simple_names: if name not in defined and name not in __builtin__.__dict__: write("%s:%d: Missing import for '%s'\n" % (fn, lineno, name)) # Print out all the schmoo for debugging. if opts.debug: print_() print_() print_('------ Imported names:') for modname, rname, lname, lineno, level, pragma in found_imports: print_('%s:%d: %s' % (fn, lineno, lname)) ## print_() ## print_() ## print_('------ Exported names:') ## for name, lineno in exported: ## print_('%s:%d: %s' % (fn, lineno, name)) ## print_() ## print_() ## print_('------ Used names:') ## for name, lineno in dotted_names: ## print_('%s:%d: %s' % (fn, lineno, name)) ## print_() print_() print_() print_('------ Assigned names:') for name, lineno in assign_names: print_('%s:%d: %s' % (fn, lineno, name)) print_() print_() print_('------ AST:') printAst(ast, indent=' ', stream=sys.stdout, initlevel=1) print_()
def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option('--debug', action='store_true', help="Debugging output.") parser.add_option( '-I', '--ignore', dest='ignores', action='append', default=def_ignores, help="Add the given directory name to the list to be ignored.") parser.add_option( '-d', '--disable-pragmas', action='store_false', dest='do_pragmas', default=True, help="Disable processing of pragma directives as strings after imports." ) parser.add_option( '-D', '--duplicates', '--enable-duplicates', dest='do_dups', action='store_true', help="Enable experimental heuristic for finding duplicate imports.") parser.add_option( '-M', '--missing', '--enable-missing', dest='do_missing', action='store_true', help="Enable experimental heuristic for finding missing imports.") opts, args = parser.parse_args() write = sys.stderr.write for fn in iter_pyfiles(args or ['.'], opts.ignores, False): # Parse the file. ast, lines = parse_python_source(fn) if ast is None: continue found_imports = get_ast_imports(ast) # Check for duplicate remote names imported. if opts.do_dups: found_imports, dups = check_duplicate_imports(found_imports) for modname, rname, lname, lineno, level, pragma in dups: write("%s:%d: Duplicate import '%s'\n" % (fn, lineno, lname)) # Filter out the unused imports. used_imports, unused_imports = filter_unused_imports( ast, found_imports) # Output warnings for the unused imports. for x in unused_imports: _, _, lname, lineno, _, pragma = x if opts.do_pragmas and pragma: continue # Search for the column in the relevant line. mo = re.search(r'\b%s\b' % lname, lines[lineno - 1]) colno = 0 if mo: colno = mo.start() + 1 write("%s:%d:%d: Unused import '%s'\n" % (fn, lineno, colno, lname)) # (Optionally) Compute the list of names that are being assigned to. if opts.do_missing or opts.debug: vis = AssignVisitor() compiler.walk(ast, vis) assign_names = vis.finalize() # (Optionally) Check for potentially missing imports (this cannot be # precise, we are only providing a heuristic here). if opts.do_missing: defined = set(modname for modname, _, _, _, _, _ in used_imports) defined.update(x[0] for x in assign_names) _, simple_names = get_names_from_ast(ast) for name, lineno in simple_names: if name not in defined and name not in __builtin__.__dict__: write("%s:%d: Missing import for '%s'\n" % (fn, lineno, name)) # Print out all the schmoo for debugging. if opts.debug: print_() print_() print_('------ Imported names:') for modname, rname, lname, lineno, level, pragma in found_imports: print_('%s:%d: %s' % (fn, lineno, lname)) ## print_() ## print_() ## print_('------ Exported names:') ## for name, lineno in exported: ## print_('%s:%d: %s' % (fn, lineno, name)) ## print_() ## print_() ## print_('------ Used names:') ## for name, lineno in dotted_names: ## print_('%s:%d: %s' % (fn, lineno, name)) ## print_() print_() print_() print_('------ Assigned names:') for name, lineno in assign_names: print_('%s:%d: %s' % (fn, lineno, name)) print_() print_() print_('------ AST:') printAst(ast, indent=' ', stream=sys.stdout, initlevel=1) print_()
def gendeps(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option('-i', '--internal', '--internal-only', default=0, action='count', help="Filter out dependencies that are outside of the " "roots of the input files. If internal is used twice, we " "filter down further the dependencies to the set of " "files that were processed only, not just to the files " "that live in the same roots.") parser.add_option('-e', '--external', '--external-only', action='store_true', help="Filter out dependencies to modules within the " "roots of the input files. This can be used to find out " "what external modules a package depends on, for example. " "Note that it does not make sense to use --internal and " "--external at the same time, as --internal will reject " "all the dependencies --external allows would output.") parser.add_option('-I', '--ignore', dest='ignores', action='append', default=def_ignores, help="Add the given directory name to the list to be ignored.") parser.add_option('-v', '--verbose', action='count', default=0, help="Output more debugging information") parser.add_option('-q', '--quiet', action='count', default=0, help="Output less debugging information") parser.add_option('-f', '--follow', '-r', '--recursive', action='store_true', help="Follow the modules depended upon and trace their dependencies. " "WARNING: This can be slow. Use --internal to limit the scope.") parser.add_option('--print-roots', action='store_true', help="Only print the package roots corresponding to the input files." "This is mostly used for testing and troubleshooting.") parser.add_option('-d', '--disable-pragmas', action='store_false', dest='do_pragmas', default=True, help="Disable processing of pragma directives as strings after imports.") parser.add_option('-u', '--ignore-unused', action='store_true', help="Automatically ignore unused imports. (See sfood-checker.)") opts, args = parser.parse_args() opts.verbose -= opts.quiet setup_logging(opts.verbose) if not args: logging.warning("Searching for files from current directory.") args = ['.'] info = logging.info if opts.internal and opts.external: parser.error("Using --internal and --external at the same time does not make sense.") if opts.print_roots: inroots = find_roots(args, opts.ignores) for dn in sorted(inroots): print_(dn) return info("") info("Input paths:") for arg in args: fn = realpath(arg) info(' %s' % fn) if not exists(fn): parser.error("Filename '%s' does not exist." % fn) # Get the list of package roots for our input files and prepend them to the # module search path to insure localized imports. inroots = find_roots(args, opts.ignores) if (opts.internal or opts.external) and not inroots: parser.error("No package roots found from the given files or directories. " "Using --internal with these roots will generate no dependencies.") info("") info("Roots of the input files:") for root in inroots: info(' %s' % root) info("") info("Using the following import path to search for modules:") sys.path = inroots + sys.path for dn in sys.path: info(" %s" % dn) inroots = frozenset(inroots) # Find all the dependencies. info("") info("Processing files:") info("") allfiles = defaultdict(set) allerrors = [] processed_files = set() fiter = iter_pyfiles(args, opts.ignores, False) while 1: newfiles = set() for fn in fiter: if fn in processed_files: continue # Make sure we process each file only once. info(" %s" % fn) processed_files.add(fn) if is_python(fn): files, errors = find_dependencies( fn, opts.verbose, opts.do_pragmas, opts.ignore_unused) allerrors.extend(errors) else: # If the file is not a source file, we don't know how to get the # dependencies of that (without importing, which we want to # avoid). files = [] # When packages are the source of dependencies, remove the __init__ # file. This is important because the targets also do not include the # __init__ (i.e. when "from <package> import <subpackage>" is seen). if basename(fn) == '__init__.py': fn = dirname(fn) # Make sure all the files at least appear in the output, even if it has # no dependency. from_ = relfile(fn, opts.ignores) if from_ is None: continue infrom = from_[0] in inroots if opts.internal and not infrom: continue if not opts.external: allfiles[from_].add((None, None)) # Add the dependencies. for dfn in files: xfn = dfn if basename(xfn) == '__init__.py': xfn = dirname(xfn) to_ = relfile(xfn, opts.ignores) into = to_[0] in inroots if (opts.internal and not into) or (opts.external and into): continue allfiles[from_].add(to_) newfiles.add(dfn) if not (opts.follow and newfiles): break else: fiter = iter(sorted(newfiles)) # If internal is used twice, we filter down further the dependencies to the # set of files that were processed only, not just to the files that live in # the same roots. if opts.internal >= 2: filtfiles = type(allfiles)() for from_, tolist in allfiles.iteritems(): filtfiles[from_] = set(x for x in tolist if x in allfiles or x == (None, None)) allfiles = filtfiles info("") info("SUMMARY") info("=======") # Output a list of the symbols that could not be imported as modules. reports = [ ("Modules that were ignored because not used:", ERROR_UNUSED, logging.info), ("Modules that could not be imported:", ERROR_IMPORT, logging.warning), ] if opts.verbose >= 2: reports.append( ("Symbols that could not be imported as modules:", ERROR_SYMBOL, logging.debug)) for msg, errtype, efun in reports: names = set(name for (err, name) in allerrors if err is errtype) if names: efun("") efun(msg) for name in sorted(names): efun(" %s" % name) # Output the list of roots found. info("") info("Found roots:") found_roots = set() for key, files in allfiles.iteritems(): found_roots.add(key[0]) found_roots.update(map(itemgetter(0),files)) if None in found_roots: found_roots.remove(None) for root in sorted(found_roots): info(" %s" % root) # Output the dependencies. info("") output_depends(allfiles)