Exemplo n.º 1
0
def list_imports():
    import optparse
    parser = optparse.OptionParser(__doc__.strip())

    parser.add_option(
        '-I',
        '--ignore',
        dest='ignores',
        action='append',
        default=def_ignores,
        help="Add the given directory name to the list to be ignored.")

    parser.add_option(
        '-u',
        '--unified',
        action='store_true',
        help="Just output the unique set of dependencies found, "
        "in no particular order, without the filenames.  The default "
        "is to output all imports, in order of appearance, along with "
        "the filename and line number.")

    parser.add_option('-v',
                      '--verbose',
                      action='count',
                      default=0,
                      help="Output input lines as well.")

    opts, args = parser.parse_args()
    setup_logging(opts.verbose)

    if not args:
        logging.warning("Searching for files from root directory.")
        args = ['.']

    info = logging.info

    if opts.unified:
        all_symnames = set()
        for fn in iter_pyfiles(args, opts.ignores):
            all_symnames.update(
                x[0] for x in find_imports(fn, opts.verbose, opts.ignores))
        for symname in sorted(all_symnames):
            print(symname)
    else:
        for fn in iter_pyfiles(args, opts.ignores):
            if opts.verbose:
                lines = list(open(fn, 'rU'))
            for symname, lineno, islocal in find_imports(
                    fn, opts.verbose, opts.ignores):
                print('%s:%d: %s' % (fn, lineno, symname))
                if opts.verbose:
                    for no in range(lineno - 1, len(lines)):
                        l = lines[no].rstrip()
                        print('   %s' % l)
                        if l[-1] != '\\':
                            break
                    print()
Exemplo n.º 2
0
def list_imports():
    import optparse
    parser = optparse.OptionParser(__doc__.strip())

    parser.add_option('-I', '--ignore', dest='ignores', action='append',
                      default=def_ignores,
                      help="Add the given directory name to the list to be ignored.")

    parser.add_option('-u', '--unified', action='store_true',
                      help="Just output the unique set of dependencies found, "
                      "in no particular order, without the filenames.  The default "
                      "is to output all imports, in order of appearance, along with "
                      "the filename and line number.")

    parser.add_option('-v', '--verbose', action='count', default=0,
                      help="Output input lines as well.")

    opts, args = parser.parse_args()
    setup_logging(opts.verbose)

    if not args:
        logging.warning("Searching for files from root directory.")
        args = ['.']

    info = logging.info

    if opts.unified:
        all_symnames = set()
        for fn in iter_pyfiles(args, opts.ignores):
            all_symnames.update(x[0] for x in
                                find_imports(fn, opts.verbose, opts.ignores))
        for symname in sorted(all_symnames):
            print_(symname)
    else:
        for fn in iter_pyfiles(args, opts.ignores):
            if opts.verbose:
                lines = list(open(fn, 'rU'))
            for symname, lineno, islocal in find_imports(fn,
                                                         opts.verbose,
                                                         opts.ignores):
                print_('%s:%d: %s' % (fn, lineno, symname))
                if opts.verbose:
                    for no in xrange(lineno-1, len(lines)):
                        l = lines[no].rstrip()
                        print_('   %s' % l)
                        if l[-1] != '\\':
                            break
                    print_()
Exemplo n.º 3
0
def preload_packages(source_paths, ignores=None):
    all_package_roots = find_package_paths(source_paths, ignores)
    cache = {}
    for package_path in all_package_roots:
        pyfiles = iter_pyfiles([package_path], [], False)
        for fn in pyfiles:
            cache_package(fn, package_path)
            cache[fn] = package_path
    return cache
 def files(self):
     """
     :return: files under input package
     """
     return [
         found_file for found_file in iter_pyfiles(
             [self.package_path], None, abspaths=False)
         if not self._is_ignored(found_file)
     ]
Exemplo n.º 5
0
    def load_dependencies(self):
        """
        Find all the dependencies.
        Taken from snakefood/gendeps.py
        """
        fiter = iter_pyfiles(self.files, self.ignores, False)
        in_roots = find_roots(self.files, self.ignores)

        for fn in fiter:
            if fn in self.processed_files:
                continue  # Make sure we process each file only once.
            self.processed_files.add(fn)
            if is_python(fn):
                files, errors = finder.find_dependencies(
                    fn, verbose=False, process_pragmas=True, ignore_unused=True)

                self.all_errors.extend(errors)
            else:
                files = []
            if basename(fn) == '__init__.py':
                fn = dirname(fn)

            self._add_dependencies(fn, in_roots, files)
Exemplo n.º 6
0
def main():
    import optparse
    parser = optparse.OptionParser(__doc__.strip())

    parser.add_option('--debug', action='store_true', help="Debugging output.")

    parser.add_option(
        '-I',
        '--ignore',
        dest='ignores',
        action='append',
        default=def_ignores,
        help="Add the given directory name to the list to be ignored.")

    parser.add_option(
        '-d',
        '--disable-pragmas',
        action='store_false',
        dest='do_pragmas',
        default=True,
        help="Disable processing of pragma directives as strings after imports."
    )

    parser.add_option(
        '-D',
        '--duplicates',
        '--enable-duplicates',
        dest='do_dups',
        action='store_true',
        help="Enable experimental heuristic for finding duplicate imports.")

    parser.add_option(
        '-M',
        '--missing',
        '--enable-missing',
        dest='do_missing',
        action='store_true',
        help="Enable experimental heuristic for finding missing imports.")

    opts, args = parser.parse_args()

    write = sys.stderr.write
    for fn in iter_pyfiles(args or ['.'], opts.ignores, False):

        # Parse the file.
        ast, lines = parse_python_source(fn)
        if ast is None:
            continue
        found_imports = get_ast_imports(ast)

        # Check for duplicate remote names imported.
        if opts.do_dups:
            found_imports, dups = check_duplicate_imports(found_imports)
            for modname, rname, lname, lineno, level, pragma in dups:
                write("%s:%d:  Duplicate import '%s'\n" % (fn, lineno, lname))

        # Filter out the unused imports.
        used_imports, unused_imports = filter_unused_imports(
            ast, found_imports)

        # Output warnings for the unused imports.
        for x in unused_imports:
            _, _, lname, lineno, _, pragma = x

            if opts.do_pragmas and pragma:
                continue

            # Search for the column in the relevant line.
            mo = re.search(r'\b%s\b' % lname, lines[lineno - 1])
            colno = 0
            if mo:
                colno = mo.start() + 1
            write("%s:%d:%d:  Unused import '%s'\n" %
                  (fn, lineno, colno, lname))

        # (Optionally) Compute the list of names that are being assigned to.
        if opts.do_missing or opts.debug:
            vis = AssignVisitor()
            ast.walk(ast, vis)
            assign_names = vis.finalize()

        # (Optionally) Check for potentially missing imports (this cannot be
        # precise, we are only providing a heuristic here).
        if opts.do_missing:
            defined = set(modname for modname, _, _, _, _, _ in used_imports)
            defined.update(x[0] for x in assign_names)
            _, simple_names = get_names_from_ast(ast)
            for name, lineno in simple_names:
                if name not in defined and name not in builtins.__dict__:
                    write("%s:%d:  Missing import for '%s'\n" %
                          (fn, lineno, name))

        # Print out all the schmoo for debugging.
        if opts.debug:
            print_()
            print_()
            print_('------ Imported names:')
            for modname, rname, lname, lineno, level, pragma in found_imports:
                print_('%s:%d:  %s' % (fn, lineno, lname))

            ## print_()
            ## print_()
            ## print_('------ Exported names:')
            ## for name, lineno in exported:
            ##     print_('%s:%d:  %s' % (fn, lineno, name))

            ## print_()
            ## print_()
            ## print_('------ Used names:')
            ## for name, lineno in dotted_names:
            ##     print_('%s:%d:  %s' % (fn, lineno, name))
            ## print_()

            print_()
            print_()
            print_('------ Assigned names:')
            for name, lineno in assign_names:
                print_('%s:%d:  %s' % (fn, lineno, name))

            print_()
            print_()
            print_('------ AST:')
            printAst(ast, indent='    ', stream=sys.stdout, initlevel=1)
            print_()
Exemplo n.º 7
0
    def fetch_dependencies(self):
        """
        Fetch all dependencies and follow the target file.
        This was inspired by the snakefood library
        snakefood-1.4-py2.7.egg/snakefood/gendeps.py
        """
        # No need to run this twice
        if self.dependency_list: return self.dependency_list

        log.info("Fetching internal dependecies: %s" % self.filename)

        depends = find_imports(self.filename, 1, 0)

        # Get the list of package roots for our input files and prepend them to the
        # module search path to insure localized imports.
        inroots = find_roots([self.filename], [])
        self.file_roots = inroots

        if not inroots:
            raise NoRoot

        for file in inroots:
            log.debug("Root found: %s" % file)
        sys.path = inroots + sys.path

        #log.debug("Using the following import path to search for modules:")
        #for dn in sys.path:
        #    log.debug(" --  %s" % dn)
        inroots = frozenset(inroots)

        # Find all the dependencies.
        log.debug("Processing file:")
        allfiles = defaultdict(set)
        allerrors = []
        processed_files = set()
        ignorefiles = []
        alldependencies = []

        fiter = iter_pyfiles([self.filename], ignorefiles, False)
        while 1:
            newfiles = set()

            for fn in fiter:
                log.debug("  post-filter: %s" % fn)
                processed_files.add(fn)

                if is_python(fn):
                    files, errors = find_dependencies(fn, 0, 0)
                    log.debug("dependency file count: %d" % len(files))
                    allerrors.extend(errors)
                else:
                    # If the file is not a source file, we don't know how to get the
                    # dependencies of that (without importing, which we want to
                    # avoid).
                    files = []

                # When packages are the source of dependencies, remove the __init__
                # file.  This is important because the targets also do not include the
                # __init__ (i.e. when "from <package> import <subpackage>" is seen).
                if basename(fn) == '__init__.py':
                    fn = dirname(fn)

                # no dependency.
                from_ = relfile(fn, ignorefiles)
                if from_ is None:
                    log.debug("from_ empty.  Move on")
                    continue
                infrom = from_[0] in inroots
                log.debug("  from: %s" % from_[0])
                log.debug("  file: %s" % from_[1])
                allfiles[from_].add((None, None))

                # Add the dependencies.
                for dfn in files:
                    xfn = dfn
                    if basename(xfn) == '__init__.py':
                        xfn = dirname(xfn)

                    to_ = relfile(xfn, ignorefiles)
                    into = to_[0] in inroots
                    log.debug("  from: %s" % from_[1])
                    log.debug("  to: %s" % to_[1])

                    if dfn in alldependencies:
                        log.debug("Already added %s to dependency list" % dfn)
                    else:
                        log.debug("Add %s to dependency list" % dfn)
                        allfiles[from_].add(to_)
                        newfiles.add(dfn)
                        alldependencies.append(dfn)

            if not newfiles:
                log.debug("No more new files.  all done")
                break
            else:
                fiter = iter(sorted(newfiles))

        # Output the list of roots found.
        log.debug("Found roots:")

        found_roots = set()
        for key, files in allfiles.iteritems():
            found_roots.add(key[0])
            found_roots.update(map(itemgetter(0), files))
        if None in found_roots:
            found_roots.remove(None)
        for root in sorted(found_roots):
            log.debug("  %s" % root)

        self.dependency_list = allfiles
        return self.dependency_list
Exemplo n.º 8
0
 def scan(self):
     """
     Returns an ImportGraph
     """
     self.optsVerbose -= self.optsQuiet
     setup_logging(self.optsVerbose)
     info = logging.info
     warning = logging.warning
     debug = logging.debug
     if self.optsInternal and self.optsExternal:
         message = "Using --internal and --external at the same time " \
             "does not make sense."
         raise SnakefoodScannerException(message)
     if self.optsPrintRoots:
         inroots = find_roots(self.args, self.optsIgnores)
         for dn in sorted(inroots):
             print(dn)
         return
     info("")
     info("Input paths:")
     for arg in self.args:
         fn = os.path.realpath(arg)
         info('  {}'.format(fn))
         if not os.path.exists(fn):
             message = "Filename '{}' does not exist.".format(fn)
             raise SnakefoodScannerException(message)
     # Get the list of package roots for our input files and prepend
     # them to the module search path to insure localized imports.
     inroots = find_roots(self.args, self.optsIgnores)
     if (self.optsInternal or self.optsExternal) and not inroots:
         message = "No package roots found from the given files or " \
             "directories. Using --internal with these roots will  " \
             "generate no dependencies."
         raise SnakefoodScannerException(message)
     info("")
     info("Roots of the input files:")
     for root in inroots:
         info('  {}'.format(root))
     info("")
     info("Using the following import path to search for modules:")
     sys.path = inroots + sys.path
     for dn in sys.path:
         info("  {}".format(dn))
     inroots = frozenset(inroots)
     # Find all the dependencies.
     info("")
     info("Processing files:")
     info("")
     allfiles = defaultdict(set)
     allerrors = []
     processed_files = set()
     fiter = iter_pyfiles(self.args, self.optsIgnores, False)
     while 1:
         newfiles = set()
         for fn in fiter:
             if fn in processed_files:
                 continue  # Make sure we process each file only once.
             info("  {}".format(fn))
             processed_files.add(fn)
             if is_python(fn):
                 files, errors = find_dependencies(
                     fn, self.optsVerbose,
                     self.optsDoPragmas, self.optsVerbose)
                 allerrors.extend(errors)
             else:
                 # If the file is not a source file, we don't know how
                 # to get the dependencies of that (without importing,
                 # which we want to avoid).
                 files = []
             # When packages are the source of dependencies, remove the
             # __init__ file.  This is important because the targets
             # also do not include the __init__ (i.e. when "from
             # <package> import <subpackage>" is seen).
             if os.path.basename(fn) == '__init__.py':
                 fn = os.path.dirname(fn)
             # Make sure all the files at least appear in the output,
             # even if it has no dependency.
             from_ = relfile(fn, self.optsIgnores)
             if from_ is None:
                 continue
             infrom = from_[0] in inroots
             if self.optsInternal and not infrom:
                 continue
             if not self.optsExternal:
                 allfiles[from_].add((None, None))
             # Add the dependencies.
             for dfn in files:
                 xfn = dfn
                 if os.path.basename(xfn) == '__init__.py':
                     xfn = os.path.dirname(xfn)
                 to_ = relfile(xfn, self.optsIgnores)
                 into = to_[0] in inroots
                 if (self.optsInternal and not into) or \
                         (self.optsExternal and into):
                     continue
                 allfiles[from_].add(to_)
                 newfiles.add(dfn)
         if not (self.optsFollow and newfiles):
             break
         else:
             fiter = iter(sorted(newfiles))
     # If internal is used twice, we filter down  further the
     # dependencies to the set of files that were processed only,
     # not just to the files that live in the same roots.
     if self.optsInternal >= 2:
         filtfiles = type(allfiles)()
         for from_, tolist in allfiles.iteritems():
             filtfiles[from_] = set(
                 x for x in tolist if x in allfiles or x == (None, None))
         allfiles = filtfiles
     info("")
     info("SUMMARY")
     info("=======")
     # Output a list of the symbols that could not
     # be imported as modules.
     reports = [
         ("Modules that were ignored because not used:",
             ERROR_UNUSED, info),
         ("Modules that could not be imported:",
             ERROR_IMPORT, warning),
         ]
     if self.optsVerbose >= 2:
         reports.append(
             ("Symbols that could not be imported as modules:",
                 ERROR_SYMBOL, debug))
     for msg, errtype, efun in reports:
         names = set(name for (err, name) in allerrors if err is errtype)
         if names:
             efun("")
             efun(msg)
             for name in sorted(names):
                 efun("  {}".format(name))
     # Output the list of roots found.
     info("")
     info("Found roots:")
     foundRoots = set()
     for key, files in allfiles.iteritems():
         foundRoots.add(key[0])
         foundRoots.update(map(operator.itemgetter(0), files))
     if None in foundRoots:
         foundRoots.remove(None)
     for root in sorted(foundRoots):
         info("  {}".format(root))
     # Output the dependencies.
     entries = SnakefoodEntries()
     info("")
     for (from_root, from_), targets in sorted(
             allfiles.iteritems(), key=operator.itemgetter(0)):
         for to_root, to_ in sorted(targets):
             entry = SnakefoodEntry(from_root, from_, to_root, to_)
             entries.append(entry)
     graph = ImportGraph()
     for entry in entries.iterEntries():
         graph.addEntry(entry)
     return graph
Exemplo n.º 9
0
def main():
    import optparse
    parser = optparse.OptionParser(__doc__.strip())

    parser.add_option('--debug', action='store_true',
                      help="Debugging output.")

    parser.add_option('-I', '--ignore', dest='ignores', action='append',
                      default=def_ignores,
                      help="Add the given directory name to the list to be ignored.")

    parser.add_option('-d', '--disable-pragmas', action='store_false',
                      dest='do_pragmas', default=True,
                      help="Disable processing of pragma directives as strings after imports.")

    parser.add_option('-D', '--duplicates', '--enable-duplicates',
                      dest='do_dups', action='store_true',
                      help="Enable experimental heuristic for finding duplicate imports.")

    parser.add_option('-M', '--missing', '--enable-missing',
                      dest='do_missing', action='store_true',
                      help="Enable experimental heuristic for finding missing imports.")

    opts, args = parser.parse_args()

    write = sys.stderr.write
    for fn in iter_pyfiles(args or ['.'], opts.ignores, False):

        # Parse the file.
        ast, lines = parse_python_source(fn)
        if ast is None:
            continue
        found_imports = get_ast_imports(ast)

        # Check for duplicate remote names imported.
        if opts.do_dups:
            found_imports, dups = check_duplicate_imports(found_imports)
            for modname, rname, lname, lineno, level, pragma in dups:
                write("%s:%d:  Duplicate import '%s'\n" % (fn, lineno, lname))

        # Filter out the unused imports.
        used_imports, unused_imports = filter_unused_imports(ast, found_imports)

        # Output warnings for the unused imports.
        for x in unused_imports:
            _, _, lname, lineno, _, pragma = x

            if opts.do_pragmas and pragma:
                continue

            # Search for the column in the relevant line.
            mo = re.search(r'\b%s\b' % lname, lines[lineno-1])
            colno = 0
            if mo:
                colno = mo.start()+1
            write("%s:%d:%d:  Unused import '%s'\n" % (fn, lineno, colno, lname))

        # (Optionally) Compute the list of names that are being assigned to.
        if opts.do_missing or opts.debug:
            vis = AssignVisitor()
            compiler.walk(ast, vis)
            assign_names = vis.finalize()

        # (Optionally) Check for potentially missing imports (this cannot be
        # precise, we are only providing a heuristic here).
        if opts.do_missing:
            defined = set(modname for modname, _, _, _, _, _ in used_imports)
            defined.update(x[0] for x in assign_names)
            for name, lineno in simple_names:
                if name not in defined and name not in __builtin__.__dict__:
                    write("%s:%d:  Missing import for '%s'\n" % (fn, lineno, name))

        # Print out all the schmoo for debugging.
        if opts.debug:
            print
            print
            print '------ Imported names:'
            for modname, rname, lname, lineno, level, pragma in found_imports:
                print '%s:%d:  %s' % (fn, lineno, lname)

            ## print
            ## print
            ## print '------ Exported names:'
            ## for name, lineno in exported:
            ##     print '%s:%d:  %s' % (fn, lineno, name)

            ## print
            ## print
            ## print '------ Used names:'
            ## for name, lineno in dotted_names:
            ##     print '%s:%d:  %s' % (fn, lineno, name)
            ## print

            print
            print
            print '------ Assigned names:'
            for name, lineno in assign_names:
                print '%s:%d:  %s' % (fn, lineno, name)

            print
            print
            print '------ AST:'
            printAst(ast, indent='    ', stream=sys.stdout, initlevel=1)
            print
Exemplo n.º 10
0
def gendeps():
    import optparse
    parser = optparse.OptionParser(__doc__.strip())

    parser.add_option('-i', '--internal', '--internal-only',
                      default=0, action='count',
                      help="Filter out dependencies that are outside of the "
                      "roots of the input files. If internal is used twice, we "
                      "filter down further the dependencies to the set of "
                      "files that were processed only, not just to the files "
                      "that live in the same roots.")

    parser.add_option('-e', '--external', '--external-only',
                      action='store_true',
                      help="Filter out dependencies to modules within the "
                      "roots of the input files. This can be used to find out "
                      "what external modules a package depends on, for example. "
                      "Note that it does not make sense to use --internal and "
                      "--external at the same time, as --internal will reject "
                      "all the dependencies --external allows would output.")

    parser.add_option('-I', '--ignore', dest='ignores', action='append',
                      default=def_ignores,
                      help="Add the given directory name to the list to be ignored.")

    parser.add_option('-v', '--verbose', action='count', default=0,
                      help="Output more debugging information")
    parser.add_option('-q', '--quiet', action='count', default=0,
                      help="Output less debugging information")

    parser.add_option('-f', '--follow', '-r', '--recursive', action='store_true',
                      help="Follow the modules depended upon and trace their dependencies. "
                      "WARNING: This can be slow.  Use --internal to limit the scope.")

    parser.add_option('--print-roots', action='store_true',
                      help="Only print the package roots corresponding to the input files."
                      "This is mostly used for testing and troubleshooting.")

    parser.add_option('-d', '--disable-pragmas', action='store_false',
                      dest='do_pragmas', default=True,
                      help="Disable processing of pragma directives as strings after imports.")

    parser.add_option('-u', '--ignore-unused', action='store_true',
                      help="Automatically ignore unused imports. (See sfood-checker.)")

    opts, args = parser.parse_args()
    opts.verbose -= opts.quiet
    setup_logging(opts.verbose)

    if not args:
        logging.warning("Searching for files from current directory.")
        args = ['.']

    info = logging.info

    if opts.internal and opts.external:
        parser.error("Using --internal and --external at the same time does not make sense.")

    if opts.print_roots:
        inroots = find_roots(args, opts.ignores)
        for dn in sorted(inroots):
            print_(dn)
        return

    info("")
    info("Input paths:")
    for arg in args:
        fn = realpath(arg)
        info('  %s' % fn)
        if not exists(fn):
            parser.error("Filename '%s' does not exist." % fn)

    # Get the list of package roots for our input files and prepend them to the
    # module search path to insure localized imports.
    inroots = find_roots(args, opts.ignores)
    if (opts.internal or opts.external) and not inroots:
        parser.error("No package roots found from the given files or directories. "
                     "Using --internal with these roots will generate no dependencies.")
    info("")
    info("Roots of the input files:")
    for root in inroots:
        info('  %s' % root)

    info("")
    info("Using the following import path to search for modules:")
    sys.path = inroots + sys.path
    for dn in sys.path:
        info("  %s" % dn)
    inroots = frozenset(inroots)

    # Find all the dependencies.
    info("")
    info("Processing files:")
    info("")
    allfiles = defaultdict(set)
    allerrors = []
    processed_files = set()

    fiter = iter_pyfiles(args, opts.ignores, False)
    while 1:
        newfiles = set()
        for fn in fiter:
            if fn in processed_files:
                continue # Make sure we process each file only once.

            info("  %s" % fn)
            processed_files.add(fn)

            if is_python(fn):
                files, errors = find_dependencies(
                    fn, opts.verbose, opts.do_pragmas, opts.ignore_unused)
                allerrors.extend(errors)
            else:
                # If the file is not a source file, we don't know how to get the
                # dependencies of that (without importing, which we want to
                # avoid).
                files = []

            # When packages are the source of dependencies, remove the __init__
            # file.  This is important because the targets also do not include the
            # __init__ (i.e. when "from <package> import <subpackage>" is seen).
            if basename(fn) == '__init__.py':
                fn = dirname(fn)

            # Make sure all the files at least appear in the output, even if it has
            # no dependency.
            from_ = relfile(fn, opts.ignores)
            if from_ is None:
                continue
            infrom = from_[0] in inroots
            if opts.internal and not infrom:
                continue
            if not opts.external:
                allfiles[from_].add((None, None))

            # Add the dependencies.
            for dfn in files:
                xfn = dfn
                if basename(xfn) == '__init__.py':
                    xfn = dirname(xfn)

                to_ = relfile(xfn, opts.ignores)
                into = to_[0] in inroots
                if (opts.internal and not into) or (opts.external and into):
                    continue
                allfiles[from_].add(to_)
                newfiles.add(dfn)

        if not (opts.follow and newfiles):
            break
        else:
            fiter = iter(sorted(newfiles))

    # If internal is used twice, we filter down further the dependencies to the
    # set of files that were processed only, not just to the files that live in
    # the same roots.
    if opts.internal >= 2:
        filtfiles = type(allfiles)()
        for from_, tolist in allfiles.iteritems():
            filtfiles[from_] = set(x for x in tolist if x in allfiles or x == (None, None))
        allfiles = filtfiles

    info("")
    info("SUMMARY")
    info("=======")

    # Output a list of the symbols that could not be imported as modules.
    reports = [
        ("Modules that were ignored because not used:", ERROR_UNUSED, logging.info),
        ("Modules that could not be imported:", ERROR_IMPORT, logging.warning),
        ]
    if opts.verbose >= 2:
        reports.append(
            ("Symbols that could not be imported as modules:", ERROR_SYMBOL, logging.debug))

    for msg, errtype, efun in reports:
        names = set(name for (err, name) in allerrors if err is errtype)
        if names:
            efun("")
            efun(msg)
            for name in sorted(names):
                efun("  %s" % name)

    # Output the list of roots found.
    info("")
    info("Found roots:")

    found_roots = set()
    for key, files in allfiles.iteritems():
        found_roots.add(key[0])
        found_roots.update(map(itemgetter(0),files))
    if None in found_roots:
        found_roots.remove(None)
    for root in sorted(found_roots):
        info("  %s" % root)

    # Output the dependencies.
    info("")
    output_depends(allfiles)
Exemplo n.º 11
0
 def scan(self):
     """
     Returns an ImportGraph
     """
     self.optsVerbose -= self.optsQuiet
     setup_logging(self.optsVerbose)
     info = logging.info
     warning = logging.warning
     debug = logging.debug
     if self.optsInternal and self.optsExternal:
         message = "Using --internal and --external at the same time " \
             "does not make sense."
         raise SnakefoodScannerException(message)
     if self.optsPrintRoots:
         inroots = find_roots(self.args, self.optsIgnores)
         for dn in sorted(inroots):
             print(dn)
         return
     info("")
     info("Input paths:")
     for arg in self.args:
         fn = os.path.realpath(arg)
         info('  {}'.format(fn))
         if not os.path.exists(fn):
             message = "Filename '{}' does not exist.".format(fn)
             raise SnakefoodScannerException(message)
     # Get the list of package roots for our input files and prepend
     # them to the module search path to insure localized imports.
     inroots = find_roots(self.args, self.optsIgnores)
     if (self.optsInternal or self.optsExternal) and not inroots:
         message = "No package roots found from the given files or " \
             "directories. Using --internal with these roots will  " \
             "generate no dependencies."
         raise SnakefoodScannerException(message)
     info("")
     info("Roots of the input files:")
     for root in inroots:
         info('  {}'.format(root))
     info("")
     info("Using the following import path to search for modules:")
     sys.path = inroots + sys.path
     for dn in sys.path:
         info("  {}".format(dn))
     inroots = frozenset(inroots)
     # Find all the dependencies.
     info("")
     info("Processing files:")
     info("")
     allfiles = defaultdict(set)
     allerrors = []
     processed_files = set()
     fiter = iter_pyfiles(self.args, self.optsIgnores, False)
     while 1:
         newfiles = set()
         for fn in fiter:
             if fn in processed_files:
                 continue  # Make sure we process each file only once.
             info("  {}".format(fn))
             processed_files.add(fn)
             if is_python(fn):
                 files, errors = find_dependencies(
                     fn, self.optsVerbose,
                     self.optsDoPragmas, self.optsVerbose)
                 allerrors.extend(errors)
             else:
                 # If the file is not a source file, we don't know how
                 # to get the dependencies of that (without importing,
                 # which we want to avoid).
                 files = []
             # When packages are the source of dependencies, remove the
             # __init__ file.  This is important because the targets
             # also do not include the __init__ (i.e. when "from
             # <package> import <subpackage>" is seen).
             if os.path.basename(fn) == '__init__.py':
                 fn = os.path.dirname(fn)
             # Make sure all the files at least appear in the output,
             # even if it has no dependency.
             from_ = relfile(fn, self.optsIgnores)
             if from_ is None:
                 continue
             infrom = from_[0] in inroots
             if self.optsInternal and not infrom:
                 continue
             if not self.optsExternal:
                 allfiles[from_].add((None, None))
             # Add the dependencies.
             for dfn in files:
                 xfn = dfn
                 if os.path.basename(xfn) == '__init__.py':
                     xfn = os.path.dirname(xfn)
                 to_ = relfile(xfn, self.optsIgnores)
                 into = to_[0] in inroots
                 if (self.optsInternal and not into) or \
                         (self.optsExternal and into):
                     continue
                 allfiles[from_].add(to_)
                 newfiles.add(dfn)
         if not (self.optsFollow and newfiles):
             break
         else:
             fiter = iter(sorted(newfiles))
     # If internal is used twice, we filter down  further the
     # dependencies to the set of files that were processed only,
     # not just to the files that live in the same roots.
     if self.optsInternal >= 2:
         filtfiles = type(allfiles)()
         for from_, tolist in allfiles.iteritems():
             filtfiles[from_] = set(
                 x for x in tolist if x in allfiles or x == (None, None))
         allfiles = filtfiles
     info("")
     info("SUMMARY")
     info("=======")
     # Output a list of the symbols that could not
     # be imported as modules.
     reports = [
         ("Modules that were ignored because not used:",
             ERROR_UNUSED, info),
         ("Modules that could not be imported:",
             ERROR_IMPORT, warning),
         ]
     if self.optsVerbose >= 2:
         reports.append(
             ("Symbols that could not be imported as modules:",
                 ERROR_SYMBOL, debug))
     for msg, errtype, efun in reports:
         names = set(name for (err, name) in allerrors if err is errtype)
         if names:
             efun("")
             efun(msg)
             for name in sorted(names):
                 efun("  {}".format(name))
     # Output the list of roots found.
     info("")
     info("Found roots:")
     foundRoots = set()
     for key, files in allfiles.iteritems():
         foundRoots.add(key[0])
         foundRoots.update(map(operator.itemgetter(0), files))
     if None in foundRoots:
         foundRoots.remove(None)
     for root in sorted(foundRoots):
         info("  {}".format(root))
     # Output the dependencies.
     entries = SnakefoodEntries()
     info("")
     for (from_root, from_), targets in sorted(
             allfiles.iteritems(), key=operator.itemgetter(0)):
         for to_root, to_ in sorted(targets):
             entry = SnakefoodEntry(from_root, from_, to_root, to_)
             entries.append(entry)
     graph = ImportGraph()
     for entry in entries.iterEntries():
         graph.addEntry(entry)
     return graph
Exemplo n.º 12
0
def gendeps():
    import optparse
    parser = optparse.OptionParser(__doc__.strip())

    parser.add_option(
        '-i',
        '--internal',
        '--internal-only',
        default=0,
        action='count',
        help="Filter out dependencies that are outside of the "
        "roots of the input files. If internal is used twice, we "
        "filter down further the dependencies to the set of "
        "files that were processed only, not just to the files "
        "that live in the same roots.")

    parser.add_option(
        '-e',
        '--external',
        '--external-only',
        action='store_true',
        help="Filter out dependencies to modules within the "
        "roots of the input files. This can be used to find out "
        "what external modules a package depends on, for example. "
        "Note that it does not make sense to use --internal and "
        "--external at the same time, as --internal will reject "
        "all the dependencies --external allows would output.")

    parser.add_option(
        '-I',
        '--ignore',
        dest='ignores',
        action='append',
        default=def_ignores,
        help="Add the given directory name to the list to be ignored.")

    parser.add_option('-v',
                      '--verbose',
                      action='count',
                      default=0,
                      help="Output more debugging information")
    parser.add_option('-q',
                      '--quiet',
                      action='count',
                      default=0,
                      help="Output less debugging information")

    parser.add_option(
        '-f',
        '--follow',
        '-r',
        '--recursive',
        action='store_true',
        help="Follow the modules depended upon and trace their dependencies. "
        "WARNING: This can be slow.  Use --internal to limit the scope.")

    parser.add_option(
        '--print-roots',
        action='store_true',
        help="Only print the package roots corresponding to the input files."
        "This is mostly used for testing and troubleshooting.")

    parser.add_option(
        '-d',
        '--disable-pragmas',
        action='store_false',
        dest='do_pragmas',
        default=True,
        help="Disable processing of pragma directives as strings after imports."
    )

    parser.add_option(
        '-u',
        '--ignore-unused',
        action='store_true',
        help="Automatically ignore unused imports. (See sfood-checker.)")

    opts, args = parser.parse_args()
    opts.verbose -= opts.quiet
    setup_logging(opts.verbose)

    if not args:
        logging.warning("Searching for files from current directory.")
        args = ['.']

    info = logging.info

    if opts.internal and opts.external:
        parser.error(
            "Using --internal and --external at the same time does not make sense."
        )

    if opts.print_roots:
        inroots = find_roots(args, opts.ignores)
        for dn in sorted(inroots):
            print_(dn)
        return

    info("")
    info("Input paths:")
    for arg in args:
        fn = realpath(arg)
        info('  %s' % fn)
        if not exists(fn):
            parser.error("Filename '%s' does not exist." % fn)

    # Get the list of package roots for our input files and prepend them to the
    # module search path to insure localized imports.
    inroots = find_roots(args, opts.ignores)
    if (opts.internal or opts.external) and not inroots:
        parser.error(
            "No package roots found from the given files or directories. "
            "Using --internal with these roots will generate no dependencies.")
    info("")
    info("Roots of the input files:")
    for root in inroots:
        info('  %s' % root)

    info("")
    info("Using the following import path to search for modules:")
    sys.path = inroots + sys.path
    for dn in sys.path:
        info("  %s" % dn)
    inroots = frozenset(inroots)

    # Find all the dependencies.
    info("")
    info("Processing files:")
    info("")
    allfiles = defaultdict(set)
    allerrors = []
    processed_files = set()

    fiter = iter_pyfiles(args, opts.ignores, False)
    while 1:
        newfiles = set()
        for fn in fiter:
            if fn in processed_files:
                continue  # Make sure we process each file only once.

            info("  %s" % fn)
            processed_files.add(fn)

            if is_python(fn):
                files, errors = find_dependencies(fn, opts.verbose,
                                                  opts.do_pragmas,
                                                  opts.ignore_unused)
                allerrors.extend(errors)
            else:
                # If the file is not a source file, we don't know how to get the
                # dependencies of that (without importing, which we want to
                # avoid).
                files = []

            # When packages are the source of dependencies, remove the __init__
            # file.  This is important because the targets also do not include the
            # __init__ (i.e. when "from <package> import <subpackage>" is seen).
            if basename(fn) == '__init__.py':
                fn = dirname(fn)

            # Make sure all the files at least appear in the output, even if it has
            # no dependency.
            from_ = relfile(fn, opts.ignores)
            if from_ is None:
                continue
            infrom = from_[0] in inroots
            if opts.internal and not infrom:
                continue
            if not opts.external:
                allfiles[from_].add((None, None))

            # Add the dependencies.
            for dfn in files:
                xfn = dfn
                if basename(xfn) == '__init__.py':
                    xfn = dirname(xfn)

                to_ = relfile(xfn, opts.ignores)
                into = to_[0] in inroots
                if (opts.internal and not into) or (opts.external and into):
                    continue
                allfiles[from_].add(to_)
                newfiles.add(dfn)

        if not (opts.follow and newfiles):
            break
        else:
            fiter = iter(sorted(newfiles))

    # If internal is used twice, we filter down further the dependencies to the
    # set of files that were processed only, not just to the files that live in
    # the same roots.
    if opts.internal >= 2:
        filtfiles = type(allfiles)()
        for from_, tolist in allfiles.items():
            filtfiles[from_] = set(x for x in tolist
                                   if x in allfiles or x == (None, None))
        allfiles = filtfiles

    info("")
    info("SUMMARY")
    info("=======")

    # Output a list of the symbols that could not be imported as modules.
    reports = [
        ("Modules that were ignored because not used:", ERROR_UNUSED,
         logging.info),
        ("Modules that could not be imported:", ERROR_IMPORT, logging.warning),
    ]
    if opts.verbose >= 2:
        reports.append(("Symbols that could not be imported as modules:",
                        ERROR_SYMBOL, logging.debug))

    for msg, errtype, efun in reports:
        names = set(name for (err, name) in allerrors if err is errtype)
        if names:
            efun("")
            efun(msg)
            for name in sorted(names):
                efun("  %s" % name)

    # Output the list of roots found.
    info("")
    info("Found roots:")

    found_roots = set()
    for key, files in allfiles.items():
        found_roots.add(key[0])
        found_roots.update(list(map(itemgetter(0), files)))
    if None in found_roots:
        found_roots.remove(None)
    for root in sorted(found_roots):
        info("  %s" % root)

    # Output the dependencies.
    info("")
    output_depends_inverted(allfiles, is_json=True)
Exemplo n.º 13
0
    def fetch_dependencies(self):
        """
        Fetch all dependencies and follow the target file.
        This was inspired by the snakefood library
        snakefood-1.4-py2.7.egg/snakefood/gendeps.py
        """
        # No need to run this twice
        if self.dependency_list: return self.dependency_list
        
        log.info("Fetching internal dependecies: %s" % self.filename)
        
        depends = find_imports(self.filename, 1, 0)
        
        # Get the list of package roots for our input files and prepend them to the
        # module search path to insure localized imports.
        inroots = find_roots([self.filename], [])
        self.file_roots = inroots
        
        if not inroots:
            raise NoRoot
        
        for file in inroots:
            log.debug("Root found: %s" % file)
        sys.path = inroots + sys.path
            
        #log.debug("Using the following import path to search for modules:")
        #for dn in sys.path:
        #    log.debug(" --  %s" % dn)
        inroots = frozenset(inroots)

        # Find all the dependencies.
        log.debug("Processing file:")
        allfiles = defaultdict(set)
        allerrors = []
        processed_files = set()
        ignorefiles = []
        alldependencies = []

        fiter = iter_pyfiles([self.filename], ignorefiles, False)
        while 1:
            newfiles = set()

            for fn in fiter:
                log.debug("  post-filter: %s" % fn)
                processed_files.add(fn)
    
                if is_python(fn):
                    files, errors = find_dependencies(fn, 0, 0)
                    log.debug("dependency file count: %d" % len(files))
                    allerrors.extend(errors)
                else:
                    # If the file is not a source file, we don't know how to get the
                    # dependencies of that (without importing, which we want to
                    # avoid).
                    files = []
            
                # When packages are the source of dependencies, remove the __init__
                # file.  This is important because the targets also do not include the
                # __init__ (i.e. when "from <package> import <subpackage>" is seen).
                if basename(fn) == '__init__.py':
                    fn = dirname(fn)

                # no dependency.
                from_ = relfile(fn, ignorefiles)
                if from_ is None:
                    log.debug("from_ empty.  Move on")
                    continue
                infrom = from_[0] in inroots
                log.debug( "  from: %s" % from_[0])
                log.debug( "  file: %s" % from_[1])
                allfiles[from_].add((None, None))
    
                # Add the dependencies.
                for dfn in files:
                    xfn = dfn
                    if basename(xfn) == '__init__.py':
                        xfn = dirname(xfn)
        
                    to_ = relfile(xfn, ignorefiles)
                    into = to_[0] in inroots
                    log.debug( "  from: %s" % from_[1])
                    log.debug( "  to: %s" % to_[1])

                    if dfn in alldependencies:
                        log.debug("Already added %s to dependency list" % dfn)
                    else:
                        log.debug("Add %s to dependency list" % dfn)
                        allfiles[from_].add(to_)
                        newfiles.add(dfn)
                        alldependencies.append(dfn)

                    
            if not newfiles:
                log.debug("No more new files.  all done")
                break
            else:
                fiter = iter(sorted(newfiles))
                
        # Output the list of roots found.
        log.debug("Found roots:")

        found_roots = set()
        for key, files in allfiles.iteritems():
            found_roots.add(key[0])
            found_roots.update(map(itemgetter(0),files))
        if None in found_roots:
            found_roots.remove(None)
        for root in sorted(found_roots):
            log.debug("  %s" % root)

        
        self.dependency_list = allfiles
        return self.dependency_list;
Exemplo n.º 14
0
def find_package_paths(source_roots, ignores=None):
    return {
        os.path.dirname(path)
        for path in iter_pyfiles(source_roots, ignores)
        if os.path.basename(path) == "setup.py"
    }
 def files(self):
     """
     :return: files under input package
     """
     return [found_file for found_file in iter_pyfiles([self.package_path], None, abspaths=False)
             if not self._is_ignored(found_file)]