def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option('-f', '--from-file', action='store', help="Read cluster list from the given filename.") opts, clusters = parser.parse_args() if opts.from_file: clusters.extend(read_clusters(opts.from_file)) depends = read_depends(sys.stdin) clusfiles = defaultdict(set) for (froot, f), (troot, t) in depends: cfrom = apply_cluster(clusters, froot, f) cto = apply_cluster(clusters, troot, t) # Skip self-dependencies that may occur. if cfrom == cto: cto = (None, None) clusfiles[cfrom].add(cto) output_depends(clusfiles)
def main(): import optparse parser = optparse.OptionParser(__doc__.strip()) opts, renames = parser.parse_args() if len(renames) % 2: parser.error("An odd number of renames was specified.") renames = [(re.compile(regexp), target) for regexp, target in iterpairs(renames)] depends = read_depends(sys.stdin) clusfiles = defaultdict(set) for (froot, f), (troot, t) in depends: for rename, target in renames: if rename.match(f): f = target break cfrom = (froot, f) for rename, target in renames: if t and rename.match(t): t = target break cto = (troot, t) # Skip self-dependencies that may occur. if cfrom == cto: cto = (None, None) clusfiles[cfrom].add(cto) output_depends(clusfiles)
def __init__(self, input_package, files, ignore_list=None, source_path=None): self.input_package = input_package self.files = files self.ignores = ignore_list or [] self.all_files = defaultdict(set) self.all_errors = [] self.processed_files = set() self.source_paths = source_path or []
def fetch_dependencies(self): """ Fetch all dependencies and follow the target file. This was inspired by the snakefood library snakefood-1.4-py2.7.egg/snakefood/gendeps.py """ # No need to run this twice if self.dependency_list: return self.dependency_list log.info("Fetching internal dependecies: %s" % self.filename) depends = find_imports(self.filename, 1, 0) # Get the list of package roots for our input files and prepend them to the # module search path to insure localized imports. inroots = find_roots([self.filename], []) self.file_roots = inroots if not inroots: raise NoRoot for file in inroots: log.debug("Root found: %s" % file) sys.path = inroots + sys.path #log.debug("Using the following import path to search for modules:") #for dn in sys.path: # log.debug(" -- %s" % dn) inroots = frozenset(inroots) # Find all the dependencies. log.debug("Processing file:") allfiles = defaultdict(set) allerrors = [] processed_files = set() ignorefiles = [] alldependencies = [] fiter = iter_pyfiles([self.filename], ignorefiles, False) while 1: newfiles = set() for fn in fiter: log.debug(" post-filter: %s" % fn) processed_files.add(fn) if is_python(fn): files, errors = find_dependencies(fn, 0, 0) log.debug("dependency file count: %d" % len(files)) allerrors.extend(errors) else: # If the file is not a source file, we don't know how to get the # dependencies of that (without importing, which we want to # avoid). files = [] # When packages are the source of dependencies, remove the __init__ # file. This is important because the targets also do not include the # __init__ (i.e. when "from <package> import <subpackage>" is seen). if basename(fn) == '__init__.py': fn = dirname(fn) # no dependency. from_ = relfile(fn, ignorefiles) if from_ is None: log.debug("from_ empty. Move on") continue infrom = from_[0] in inroots log.debug(" from: %s" % from_[0]) log.debug(" file: %s" % from_[1]) allfiles[from_].add((None, None)) # Add the dependencies. for dfn in files: xfn = dfn if basename(xfn) == '__init__.py': xfn = dirname(xfn) to_ = relfile(xfn, ignorefiles) into = to_[0] in inroots log.debug(" from: %s" % from_[1]) log.debug(" to: %s" % to_[1]) if dfn in alldependencies: log.debug("Already added %s to dependency list" % dfn) else: log.debug("Add %s to dependency list" % dfn) allfiles[from_].add(to_) newfiles.add(dfn) alldependencies.append(dfn) if not newfiles: log.debug("No more new files. all done") break else: fiter = iter(sorted(newfiles)) # Output the list of roots found. log.debug("Found roots:") found_roots = set() for key, files in allfiles.iteritems(): found_roots.add(key[0]) found_roots.update(map(itemgetter(0), files)) if None in found_roots: found_roots.remove(None) for root in sorted(found_roots): log.debug(" %s" % root) self.dependency_list = allfiles return self.dependency_list
def scan(self): """ Returns an ImportGraph """ self.optsVerbose -= self.optsQuiet setup_logging(self.optsVerbose) info = logging.info warning = logging.warning debug = logging.debug if self.optsInternal and self.optsExternal: message = "Using --internal and --external at the same time " \ "does not make sense." raise SnakefoodScannerException(message) if self.optsPrintRoots: inroots = find_roots(self.args, self.optsIgnores) for dn in sorted(inroots): print(dn) return info("") info("Input paths:") for arg in self.args: fn = os.path.realpath(arg) info(' {}'.format(fn)) if not os.path.exists(fn): message = "Filename '{}' does not exist.".format(fn) raise SnakefoodScannerException(message) # Get the list of package roots for our input files and prepend # them to the module search path to insure localized imports. inroots = find_roots(self.args, self.optsIgnores) if (self.optsInternal or self.optsExternal) and not inroots: message = "No package roots found from the given files or " \ "directories. Using --internal with these roots will " \ "generate no dependencies." raise SnakefoodScannerException(message) info("") info("Roots of the input files:") for root in inroots: info(' {}'.format(root)) info("") info("Using the following import path to search for modules:") sys.path = inroots + sys.path for dn in sys.path: info(" {}".format(dn)) inroots = frozenset(inroots) # Find all the dependencies. info("") info("Processing files:") info("") allfiles = defaultdict(set) allerrors = [] processed_files = set() fiter = iter_pyfiles(self.args, self.optsIgnores, False) while 1: newfiles = set() for fn in fiter: if fn in processed_files: continue # Make sure we process each file only once. info(" {}".format(fn)) processed_files.add(fn) if is_python(fn): files, errors = find_dependencies( fn, self.optsVerbose, self.optsDoPragmas, self.optsVerbose) allerrors.extend(errors) else: # If the file is not a source file, we don't know how # to get the dependencies of that (without importing, # which we want to avoid). files = [] # When packages are the source of dependencies, remove the # __init__ file. This is important because the targets # also do not include the __init__ (i.e. when "from # <package> import <subpackage>" is seen). if os.path.basename(fn) == '__init__.py': fn = os.path.dirname(fn) # Make sure all the files at least appear in the output, # even if it has no dependency. from_ = relfile(fn, self.optsIgnores) if from_ is None: continue infrom = from_[0] in inroots if self.optsInternal and not infrom: continue if not self.optsExternal: allfiles[from_].add((None, None)) # Add the dependencies. for dfn in files: xfn = dfn if os.path.basename(xfn) == '__init__.py': xfn = os.path.dirname(xfn) to_ = relfile(xfn, self.optsIgnores) into = to_[0] in inroots if (self.optsInternal and not into) or \ (self.optsExternal and into): continue allfiles[from_].add(to_) newfiles.add(dfn) if not (self.optsFollow and newfiles): break else: fiter = iter(sorted(newfiles)) # If internal is used twice, we filter down further the # dependencies to the set of files that were processed only, # not just to the files that live in the same roots. if self.optsInternal >= 2: filtfiles = type(allfiles)() for from_, tolist in allfiles.iteritems(): filtfiles[from_] = set( x for x in tolist if x in allfiles or x == (None, None)) allfiles = filtfiles info("") info("SUMMARY") info("=======") # Output a list of the symbols that could not # be imported as modules. reports = [ ("Modules that were ignored because not used:", ERROR_UNUSED, info), ("Modules that could not be imported:", ERROR_IMPORT, warning), ] if self.optsVerbose >= 2: reports.append( ("Symbols that could not be imported as modules:", ERROR_SYMBOL, debug)) for msg, errtype, efun in reports: names = set(name for (err, name) in allerrors if err is errtype) if names: efun("") efun(msg) for name in sorted(names): efun(" {}".format(name)) # Output the list of roots found. info("") info("Found roots:") foundRoots = set() for key, files in allfiles.iteritems(): foundRoots.add(key[0]) foundRoots.update(map(operator.itemgetter(0), files)) if None in foundRoots: foundRoots.remove(None) for root in sorted(foundRoots): info(" {}".format(root)) # Output the dependencies. entries = SnakefoodEntries() info("") for (from_root, from_), targets in sorted( allfiles.iteritems(), key=operator.itemgetter(0)): for to_root, to_ in sorted(targets): entry = SnakefoodEntry(from_root, from_, to_root, to_) entries.append(entry) graph = ImportGraph() for entry in entries.iterEntries(): graph.addEntry(entry) return graph
def gendeps(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option('-i', '--internal', '--internal-only', default=0, action='count', help="Filter out dependencies that are outside of the " "roots of the input files. If internal is used twice, we " "filter down further the dependencies to the set of " "files that were processed only, not just to the files " "that live in the same roots.") parser.add_option('-e', '--external', '--external-only', action='store_true', help="Filter out dependencies to modules within the " "roots of the input files. This can be used to find out " "what external modules a package depends on, for example. " "Note that it does not make sense to use --internal and " "--external at the same time, as --internal will reject " "all the dependencies --external allows would output.") parser.add_option('-I', '--ignore', dest='ignores', action='append', default=def_ignores, help="Add the given directory name to the list to be ignored.") parser.add_option('-v', '--verbose', action='count', default=0, help="Output more debugging information") parser.add_option('-q', '--quiet', action='count', default=0, help="Output less debugging information") parser.add_option('-f', '--follow', '-r', '--recursive', action='store_true', help="Follow the modules depended upon and trace their dependencies. " "WARNING: This can be slow. Use --internal to limit the scope.") parser.add_option('--print-roots', action='store_true', help="Only print the package roots corresponding to the input files." "This is mostly used for testing and troubleshooting.") parser.add_option('-d', '--disable-pragmas', action='store_false', dest='do_pragmas', default=True, help="Disable processing of pragma directives as strings after imports.") parser.add_option('-u', '--ignore-unused', action='store_true', help="Automatically ignore unused imports. (See sfood-checker.)") opts, args = parser.parse_args() opts.verbose -= opts.quiet setup_logging(opts.verbose) if not args: logging.warning("Searching for files from current directory.") args = ['.'] info = logging.info if opts.internal and opts.external: parser.error("Using --internal and --external at the same time does not make sense.") if opts.print_roots: inroots = find_roots(args, opts.ignores) for dn in sorted(inroots): print_(dn) return info("") info("Input paths:") for arg in args: fn = realpath(arg) info(' %s' % fn) if not exists(fn): parser.error("Filename '%s' does not exist." % fn) # Get the list of package roots for our input files and prepend them to the # module search path to insure localized imports. inroots = find_roots(args, opts.ignores) if (opts.internal or opts.external) and not inroots: parser.error("No package roots found from the given files or directories. " "Using --internal with these roots will generate no dependencies.") info("") info("Roots of the input files:") for root in inroots: info(' %s' % root) info("") info("Using the following import path to search for modules:") sys.path = inroots + sys.path for dn in sys.path: info(" %s" % dn) inroots = frozenset(inroots) # Find all the dependencies. info("") info("Processing files:") info("") allfiles = defaultdict(set) allerrors = [] processed_files = set() fiter = iter_pyfiles(args, opts.ignores, False) while 1: newfiles = set() for fn in fiter: if fn in processed_files: continue # Make sure we process each file only once. info(" %s" % fn) processed_files.add(fn) if is_python(fn): files, errors = find_dependencies( fn, opts.verbose, opts.do_pragmas, opts.ignore_unused) allerrors.extend(errors) else: # If the file is not a source file, we don't know how to get the # dependencies of that (without importing, which we want to # avoid). files = [] # When packages are the source of dependencies, remove the __init__ # file. This is important because the targets also do not include the # __init__ (i.e. when "from <package> import <subpackage>" is seen). if basename(fn) == '__init__.py': fn = dirname(fn) # Make sure all the files at least appear in the output, even if it has # no dependency. from_ = relfile(fn, opts.ignores) if from_ is None: continue infrom = from_[0] in inroots if opts.internal and not infrom: continue if not opts.external: allfiles[from_].add((None, None)) # Add the dependencies. for dfn in files: xfn = dfn if basename(xfn) == '__init__.py': xfn = dirname(xfn) to_ = relfile(xfn, opts.ignores) into = to_[0] in inroots if (opts.internal and not into) or (opts.external and into): continue allfiles[from_].add(to_) newfiles.add(dfn) if not (opts.follow and newfiles): break else: fiter = iter(sorted(newfiles)) # If internal is used twice, we filter down further the dependencies to the # set of files that were processed only, not just to the files that live in # the same roots. if opts.internal >= 2: filtfiles = type(allfiles)() for from_, tolist in allfiles.iteritems(): filtfiles[from_] = set(x for x in tolist if x in allfiles or x == (None, None)) allfiles = filtfiles info("") info("SUMMARY") info("=======") # Output a list of the symbols that could not be imported as modules. reports = [ ("Modules that were ignored because not used:", ERROR_UNUSED, logging.info), ("Modules that could not be imported:", ERROR_IMPORT, logging.warning), ] if opts.verbose >= 2: reports.append( ("Symbols that could not be imported as modules:", ERROR_SYMBOL, logging.debug)) for msg, errtype, efun in reports: names = set(name for (err, name) in allerrors if err is errtype) if names: efun("") efun(msg) for name in sorted(names): efun(" %s" % name) # Output the list of roots found. info("") info("Found roots:") found_roots = set() for key, files in allfiles.iteritems(): found_roots.add(key[0]) found_roots.update(map(itemgetter(0),files)) if None in found_roots: found_roots.remove(None) for root in sorted(found_roots): info(" %s" % root) # Output the dependencies. info("") output_depends(allfiles)
def gendeps(): import optparse parser = optparse.OptionParser(__doc__.strip()) parser.add_option( '-i', '--internal', '--internal-only', default=0, action='count', help="Filter out dependencies that are outside of the " "roots of the input files. If internal is used twice, we " "filter down further the dependencies to the set of " "files that were processed only, not just to the files " "that live in the same roots.") parser.add_option( '-e', '--external', '--external-only', action='store_true', help="Filter out dependencies to modules within the " "roots of the input files. This can be used to find out " "what external modules a package depends on, for example. " "Note that it does not make sense to use --internal and " "--external at the same time, as --internal will reject " "all the dependencies --external allows would output.") parser.add_option( '-I', '--ignore', dest='ignores', action='append', default=def_ignores, help="Add the given directory name to the list to be ignored.") parser.add_option('-v', '--verbose', action='count', default=0, help="Output more debugging information") parser.add_option('-q', '--quiet', action='count', default=0, help="Output less debugging information") parser.add_option( '-f', '--follow', '-r', '--recursive', action='store_true', help="Follow the modules depended upon and trace their dependencies. " "WARNING: This can be slow. Use --internal to limit the scope.") parser.add_option( '--print-roots', action='store_true', help="Only print the package roots corresponding to the input files." "This is mostly used for testing and troubleshooting.") parser.add_option( '-d', '--disable-pragmas', action='store_false', dest='do_pragmas', default=True, help="Disable processing of pragma directives as strings after imports." ) parser.add_option( '-u', '--ignore-unused', action='store_true', help="Automatically ignore unused imports. (See sfood-checker.)") opts, args = parser.parse_args() opts.verbose -= opts.quiet setup_logging(opts.verbose) if not args: logging.warning("Searching for files from current directory.") args = ['.'] info = logging.info if opts.internal and opts.external: parser.error( "Using --internal and --external at the same time does not make sense." ) if opts.print_roots: inroots = find_roots(args, opts.ignores) for dn in sorted(inroots): print_(dn) return info("") info("Input paths:") for arg in args: fn = realpath(arg) info(' %s' % fn) if not exists(fn): parser.error("Filename '%s' does not exist." % fn) # Get the list of package roots for our input files and prepend them to the # module search path to insure localized imports. inroots = find_roots(args, opts.ignores) if (opts.internal or opts.external) and not inroots: parser.error( "No package roots found from the given files or directories. " "Using --internal with these roots will generate no dependencies.") info("") info("Roots of the input files:") for root in inroots: info(' %s' % root) info("") info("Using the following import path to search for modules:") sys.path = inroots + sys.path for dn in sys.path: info(" %s" % dn) inroots = frozenset(inroots) # Find all the dependencies. info("") info("Processing files:") info("") allfiles = defaultdict(set) allerrors = [] processed_files = set() fiter = iter_pyfiles(args, opts.ignores, False) while 1: newfiles = set() for fn in fiter: if fn in processed_files: continue # Make sure we process each file only once. info(" %s" % fn) processed_files.add(fn) if is_python(fn): files, errors = find_dependencies(fn, opts.verbose, opts.do_pragmas, opts.ignore_unused) allerrors.extend(errors) else: # If the file is not a source file, we don't know how to get the # dependencies of that (without importing, which we want to # avoid). files = [] # When packages are the source of dependencies, remove the __init__ # file. This is important because the targets also do not include the # __init__ (i.e. when "from <package> import <subpackage>" is seen). if basename(fn) == '__init__.py': fn = dirname(fn) # Make sure all the files at least appear in the output, even if it has # no dependency. from_ = relfile(fn, opts.ignores) if from_ is None: continue infrom = from_[0] in inroots if opts.internal and not infrom: continue if not opts.external: allfiles[from_].add((None, None)) # Add the dependencies. for dfn in files: xfn = dfn if basename(xfn) == '__init__.py': xfn = dirname(xfn) to_ = relfile(xfn, opts.ignores) into = to_[0] in inroots if (opts.internal and not into) or (opts.external and into): continue allfiles[from_].add(to_) newfiles.add(dfn) if not (opts.follow and newfiles): break else: fiter = iter(sorted(newfiles)) # If internal is used twice, we filter down further the dependencies to the # set of files that were processed only, not just to the files that live in # the same roots. if opts.internal >= 2: filtfiles = type(allfiles)() for from_, tolist in allfiles.items(): filtfiles[from_] = set(x for x in tolist if x in allfiles or x == (None, None)) allfiles = filtfiles info("") info("SUMMARY") info("=======") # Output a list of the symbols that could not be imported as modules. reports = [ ("Modules that were ignored because not used:", ERROR_UNUSED, logging.info), ("Modules that could not be imported:", ERROR_IMPORT, logging.warning), ] if opts.verbose >= 2: reports.append(("Symbols that could not be imported as modules:", ERROR_SYMBOL, logging.debug)) for msg, errtype, efun in reports: names = set(name for (err, name) in allerrors if err is errtype) if names: efun("") efun(msg) for name in sorted(names): efun(" %s" % name) # Output the list of roots found. info("") info("Found roots:") found_roots = set() for key, files in allfiles.items(): found_roots.add(key[0]) found_roots.update(list(map(itemgetter(0), files))) if None in found_roots: found_roots.remove(None) for root in sorted(found_roots): info(" %s" % root) # Output the dependencies. info("") output_depends_inverted(allfiles, is_json=True)
def fetch_dependencies(self): """ Fetch all dependencies and follow the target file. This was inspired by the snakefood library snakefood-1.4-py2.7.egg/snakefood/gendeps.py """ # No need to run this twice if self.dependency_list: return self.dependency_list log.info("Fetching internal dependecies: %s" % self.filename) depends = find_imports(self.filename, 1, 0) # Get the list of package roots for our input files and prepend them to the # module search path to insure localized imports. inroots = find_roots([self.filename], []) self.file_roots = inroots if not inroots: raise NoRoot for file in inroots: log.debug("Root found: %s" % file) sys.path = inroots + sys.path #log.debug("Using the following import path to search for modules:") #for dn in sys.path: # log.debug(" -- %s" % dn) inroots = frozenset(inroots) # Find all the dependencies. log.debug("Processing file:") allfiles = defaultdict(set) allerrors = [] processed_files = set() ignorefiles = [] alldependencies = [] fiter = iter_pyfiles([self.filename], ignorefiles, False) while 1: newfiles = set() for fn in fiter: log.debug(" post-filter: %s" % fn) processed_files.add(fn) if is_python(fn): files, errors = find_dependencies(fn, 0, 0) log.debug("dependency file count: %d" % len(files)) allerrors.extend(errors) else: # If the file is not a source file, we don't know how to get the # dependencies of that (without importing, which we want to # avoid). files = [] # When packages are the source of dependencies, remove the __init__ # file. This is important because the targets also do not include the # __init__ (i.e. when "from <package> import <subpackage>" is seen). if basename(fn) == '__init__.py': fn = dirname(fn) # no dependency. from_ = relfile(fn, ignorefiles) if from_ is None: log.debug("from_ empty. Move on") continue infrom = from_[0] in inroots log.debug( " from: %s" % from_[0]) log.debug( " file: %s" % from_[1]) allfiles[from_].add((None, None)) # Add the dependencies. for dfn in files: xfn = dfn if basename(xfn) == '__init__.py': xfn = dirname(xfn) to_ = relfile(xfn, ignorefiles) into = to_[0] in inroots log.debug( " from: %s" % from_[1]) log.debug( " to: %s" % to_[1]) if dfn in alldependencies: log.debug("Already added %s to dependency list" % dfn) else: log.debug("Add %s to dependency list" % dfn) allfiles[from_].add(to_) newfiles.add(dfn) alldependencies.append(dfn) if not newfiles: log.debug("No more new files. all done") break else: fiter = iter(sorted(newfiles)) # Output the list of roots found. log.debug("Found roots:") found_roots = set() for key, files in allfiles.iteritems(): found_roots.add(key[0]) found_roots.update(map(itemgetter(0),files)) if None in found_roots: found_roots.remove(None) for root in sorted(found_roots): log.debug(" %s" % root) self.dependency_list = allfiles return self.dependency_list;
if modname not in finder.module_cache: fn = finder.find_dotted(names) if fn: finder.module_cache[modname].append(fn) file_names = finder.module_cache[modname] if not file_names: file_names = [os.path.join(UNKNOWN_PACKAGE, modname)] fn = file_names[0] else: file_names = [fn] # If this is a from-form, try the target symbol as a module. if rname: fn2 = None for name in file_names: fn2 = finder.find_dotted([rname], os.path.dirname(name)) if fn2: break if fn2: fn = fn2 else: pass # Pass-thru and return the filename of the parent, which was found. return fn, errors # monkey patch find so that it works with namespace packages finder.module_cache = defaultdict(list) finder.find_dotted_module = find_dotted_module