def find_module_paths_using_search(modules: List[str], packages: List[str], search_path: List[str], pyversion: Tuple[int, int]) -> List[StubSource]: """Find sources for modules and packages requested. This function just looks for source files at the file system level. This is used if user passes --no-import, and will not find C modules. Exit if some of the modules or packages can't be found. """ result = [] # type: List[StubSource] typeshed_path = default_lib_path(mypy.build.default_data_dir(), pyversion, None) search_paths = SearchPaths(('.',) + tuple(search_path), (), (), tuple(typeshed_path)) cache = FindModuleCache(search_paths) for module in modules: module_path = cache.find_module(module) if not module_path: fail_missing(module) result.append(StubSource(module, module_path)) for package in packages: p_result = cache.find_modules_recursive(package) if not p_result: fail_missing(package) sources = [StubSource(m.module, m.path) for m in p_result] result.extend(sources) return result
def setUp(self) -> None: self.package_dir = os.path.relpath( os.path.join( package_path, "modulefinder-site-packages", )) egg_dirs, site_packages = expand_site_packages([self.package_dir]) self.search_paths = SearchPaths( python_path=(), mypy_path=(os.path.join(data_path, "pkg1"), ), package_path=tuple(egg_dirs + site_packages), typeshed_path=(), ) options = Options() options.namespace_packages = True self.fmc_ns = FindModuleCache(self.search_paths, fscache=None, options=options) options = Options() options.namespace_packages = False self.fmc_nons = FindModuleCache(self.search_paths, fscache=None, options=options)
def find_added_suppressed( self, graph: mypy.build.Graph, seen: Set[str], search_paths: SearchPaths) -> List[Tuple[str, str]]: """Find suppressed modules that have been added (and not included in seen). Args: seen: reachable modules we've seen before (mutated here!!) Return suppressed, added modules. """ all_suppressed = set() for module, state in graph.items(): all_suppressed |= state.suppressed_set # Filter out things that shouldn't actually be considered suppressed. # TODO: Figure out why these are treated as suppressed all_suppressed = { module for module in all_suppressed if module not in graph } # TODO: Namespace packages finder = FindModuleCache(search_paths, self.fscache, self.options) found = [] for module in all_suppressed: result = finder.find_module(module) if isinstance(result, str) and module not in seen: found.append((module, result)) seen.add(module) return found
def setUp(self) -> None: self.search_paths = SearchPaths( python_path=(), mypy_path=( os.path.join(data_path, "nsx-pkg1"), os.path.join(data_path, "nsx-pkg2"), os.path.join(data_path, "nsx-pkg3"), os.path.join(data_path, "nsy-pkg1"), os.path.join(data_path, "nsy-pkg2"), os.path.join(data_path, "pkg1"), os.path.join(data_path, "pkg2"), ), package_path=(), typeshed_path=(), ) options = Options() options.namespace_packages = True self.fmc_ns = FindModuleCache(self.search_paths, fscache=None, options=options) options = Options() options.namespace_packages = False self.fmc_nons = FindModuleCache(self.search_paths, fscache=None, options=options)
def find_added_suppressed( self, graph: mypy.build.Graph, seen: Set[str], search_paths: SearchPaths) -> List[Tuple[str, str]]: """Find suppressed modules that have been added (and not included in seen). Args: seen: reachable modules we've seen before (mutated here!!) Return suppressed, added modules. """ all_suppressed = set() for state in graph.values(): all_suppressed |= state.suppressed_set # Filter out things that shouldn't actually be considered suppressed. # # TODO: Figure out why these are treated as suppressed all_suppressed = { module for module in all_suppressed if module not in graph and not ignore_suppressed_imports(module) } # Optimization: skip top-level packages that are obviously not # there, to avoid calling the relatively slow find_module() # below too many times. packages = {module.split('.', 1)[0] for module in all_suppressed} packages = filter_out_missing_top_level_packages( packages, search_paths, self.fscache) # TODO: Namespace packages finder = FindModuleCache(search_paths, self.fscache, self.options) found = [] for module in all_suppressed: top_level_pkg = module.split('.', 1)[0] if top_level_pkg not in packages: # Fast path: non-existent top-level package continue result = finder.find_module(module, fast_path=True) if isinstance(result, str) and module not in seen: # When not following imports, we only follow imports to .pyi files. if not self.following_imports() and not result.endswith( '.pyi'): continue found.append((module, result)) seen.add(module) return found
def parse_module(self, program_text: str, incremental_step: int = 0) -> List[Tuple[str, str, str]]: """Return the module and program names for a test case. Normally, the unit tests will parse the default ('__main__') module and follow all the imports listed there. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: mypy -m foo.bar foo.baz You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). Return a list of tuples (module name, file name, program text). """ m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE) if incremental_step > 1: alt_regex = '# cmd{}: mypy -m ([a-zA-Z0-9_. ]+)$'.format( incremental_step) alt_m = re.search(alt_regex, program_text, flags=re.MULTILINE) if alt_m is not None: # Optionally return a different command if in a later step # of incremental mode, otherwise default to reusing the # original cmd. m = alt_m if m: # The test case wants to use a non-default main # module. Look up the module and give it as the thing to # analyze. module_names = m.group(1) out = [] search_paths = SearchPaths((test_temp_dir, ), (), (), ()) cache = FindModuleCache(search_paths) for module_name in module_names.split(' '): path = cache.find_module(module_name) assert isinstance( path, str), "Can't find ad hoc case file: %s" % module_name with open(path, encoding='utf8') as f: program_text = f.read() out.append((module_name, path, program_text)) return out else: return [('__main__', 'main', program_text)]
def find_module_path_and_all( module: str, pyversion: Tuple[int, int], no_import: bool, search_path: List[str], interpreter: str) -> Optional[Tuple[str, Optional[List[str]]]]: """Find module and determine __all__. Return None if the module is a C module. Return (module_path, __all__) if Python module. Raise an exception or exit if failed. """ module_path = None # type: Optional[str] if not no_import: if pyversion[0] == 2: module_path, module_all = load_python_module_info( module, interpreter) else: # TODO: Support custom interpreters. try: mod = importlib.import_module(module) except Exception: raise CantImport(module) if is_c_module(mod): return None module_path = mod.__file__ module_all = getattr(mod, '__all__', None) else: # Find module by going through search path. search_paths = SearchPaths(('.', ) + tuple(search_path), (), (), ()) module_path = FindModuleCache(search_paths).find_module(module) if not module_path: raise SystemExit( "Can't find module '{}' (consider using --search-path)".format( module)) module_all = None return module_path, module_all
def parse_module(self, program_text: str, incremental_step: int = 0) -> List[Tuple[str, str, str]]: """Return the module and program names for a test case. Normally, the unit tests will parse the default ('__main__') module and follow all the imports listed there. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: mypy -m foo.bar foo.baz You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). Return a list of tuples (module name, file name, program text). """ m = re.search('# cmd: mypy -m ([a-zA-Z0-9_. ]+)$', program_text, flags=re.MULTILINE) if incremental_step > 1: alt_regex = '# cmd{}: mypy -m ([a-zA-Z0-9_. ]+)$'.format(incremental_step) alt_m = re.search(alt_regex, program_text, flags=re.MULTILINE) if alt_m is not None: # Optionally return a different command if in a later step # of incremental mode, otherwise default to reusing the # original cmd. m = alt_m if m: # The test case wants to use a non-default main # module. Look up the module and give it as the thing to # analyze. module_names = m.group(1) out = [] search_paths = SearchPaths((test_temp_dir,), (), (), ()) cache = FindModuleCache(search_paths) for module_name in module_names.split(' '): path = cache.find_module(module_name) assert path is not None, "Can't find ad hoc case file" with open(path, encoding='utf8') as f: program_text = f.read() out.append((module_name, path, program_text)) return out else: return [('__main__', 'main', program_text)]
def setUp(self) -> None: self.package_dir = os.path.relpath(os.path.join( package_path, "modulefinder-site-packages", )) self.search_paths = SearchPaths( python_path=(), mypy_path=( os.path.join(data_path, "pkg1"), ), package_path=( self.package_dir, ), typeshed_path=(), ) options = Options() options.namespace_packages = True self.fmc_ns = FindModuleCache(self.search_paths, options=options) options = Options() options.namespace_packages = False self.fmc_nons = FindModuleCache(self.search_paths, options=options)
def build_stubs(options: Options, find_module_cache: FindModuleCache, mod: str) -> Dict[str, nodes.MypyFile]: sources = find_module_cache.find_modules_recursive(mod) try: res = build.build(sources=sources, options=options) messages = res.errors except CompileError as error: messages = error.messages if messages: for msg in messages: print(msg) sys.exit(1) return res.files
def main(args: List[str]) -> Iterator[Error]: if len(args) == 1: print('must provide at least one module to test') sys.exit(1) else: modules = args[1:] options = Options() options.incremental = False data_dir = default_data_dir() search_path = compute_search_paths([], options, data_dir) find_module_cache = FindModuleCache(search_path) for module in modules: for error in test_stub(options, find_module_cache, module): yield error
def process_options( args: List[str], stdout: Optional[TextIO] = None, stderr: Optional[TextIO] = None, require_targets: bool = True, server_options: bool = False, fscache: Optional[FileSystemCache] = None, program: str = 'mypy', header: str = HEADER, ) -> Tuple[List[BuildSource], Options]: """Parse command line arguments. If a FileSystemCache is passed in, and package_root options are given, call fscache.set_package_root() to set the cache's package root. """ stdout = stdout or sys.stdout stderr = stderr or sys.stderr parser = argparse.ArgumentParser(prog=program, usage=header, description=DESCRIPTION, epilog=FOOTER, fromfile_prefix_chars='@', formatter_class=AugmentedHelpFormatter, add_help=False) strict_flag_names = [] # type: List[str] strict_flag_assignments = [] # type: List[Tuple[str, bool]] def add_invertible_flag( flag: str, *, inverse: Optional[str] = None, default: bool, dest: Optional[str] = None, help: str, strict_flag: bool = False, group: Optional[argparse._ActionsContainer] = None) -> None: if inverse is None: inverse = invert_flag_name(flag) if group is None: group = parser if help is not argparse.SUPPRESS: help += " (inverse: {})".format(inverse) arg = group.add_argument( flag, action='store_false' if default else 'store_true', dest=dest, help=help) dest = arg.dest arg = group.add_argument( inverse, action='store_true' if default else 'store_false', dest=dest, help=argparse.SUPPRESS) if strict_flag: assert dest is not None strict_flag_names.append(flag) strict_flag_assignments.append((dest, not default)) # Unless otherwise specified, arguments will be parsed directly onto an # Options object. Options that require further processing should have # their `dest` prefixed with `special-opts:`, which will cause them to be # parsed into the separate special_opts namespace object. # Note: we have a style guide for formatting the mypy --help text. See # https://github.com/python/mypy/wiki/Documentation-Conventions general_group = parser.add_argument_group(title='Optional arguments') general_group.add_argument('-h', '--help', action='help', help="Show this help message and exit") general_group.add_argument('-v', '--verbose', action='count', dest='verbosity', help="More verbose messages") general_group.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__, help="Show program's version number and exit") config_group = parser.add_argument_group( title='Config file', description="Use a config file instead of command line arguments. " "This is useful if you are using many flags or want " "to set different options per each module.") config_group.add_argument( '--config-file', help="Configuration file, must have a [mypy] section " "(defaults to {})".format(', '.join(defaults.CONFIG_FILES))) add_invertible_flag( '--warn-unused-configs', default=False, strict_flag=True, help="Warn about unused '[mypy-<pattern>]' config sections", group=config_group) imports_group = parser.add_argument_group( title='Import discovery', description="Configure how imports are discovered and followed.") imports_group.add_argument( '--ignore-missing-imports', action='store_true', help="Silently ignore imports of missing modules") imports_group.add_argument('--follow-imports', choices=['normal', 'silent', 'skip', 'error'], default='normal', help="How to treat imports (default normal)") imports_group.add_argument( '--python-executable', action='store', metavar='EXECUTABLE', help="Python executable used for finding PEP 561 compliant installed" " packages and stubs", dest='special-opts:python_executable') imports_group.add_argument( '--no-site-packages', action='store_true', dest='special-opts:no_executable', help="Do not search for installed PEP 561 compliant packages") imports_group.add_argument( '--no-silence-site-packages', action='store_true', help="Do not silence errors in PEP 561 compliant installed packages") add_invertible_flag( '--namespace-packages', default=False, help="Support namespace packages (PEP 420, __init__.py-less)", group=imports_group) platform_group = parser.add_argument_group( title='Platform configuration', description="Type check code assuming it will be run under certain " "runtime conditions. By default, mypy assumes your code " "will be run using the same operating system and Python " "version you are using to run mypy itself.") platform_group.add_argument( '--python-version', type=parse_version, metavar='x.y', help='Type check code assuming it will be running on Python x.y', dest='special-opts:python_version') platform_group.add_argument( '-2', '--py2', dest='special-opts:python_version', action='store_const', const=defaults.PYTHON2_VERSION, help="Use Python 2 mode (same as --python-version 2.7)") platform_group.add_argument( '--platform', action='store', metavar='PLATFORM', help="Type check special-cased code for the given OS platform " "(defaults to sys.platform)") platform_group.add_argument( '--always-true', metavar='NAME', action='append', default=[], help="Additional variable to be considered True (may be repeated)") platform_group.add_argument( '--always-false', metavar='NAME', action='append', default=[], help="Additional variable to be considered False (may be repeated)") disallow_any_group = parser.add_argument_group( title='Dynamic typing', description= "Disallow the use of the dynamic 'Any' type under certain conditions.") disallow_any_group.add_argument( '--disallow-any-unimported', default=False, action='store_true', help="Disallow Any types resulting from unfollowed imports") add_invertible_flag( '--disallow-subclassing-any', default=False, strict_flag=True, help="Disallow subclassing values of type 'Any' when defining classes", group=disallow_any_group) disallow_any_group.add_argument( '--disallow-any-expr', default=False, action='store_true', help='Disallow all expressions that have type Any') disallow_any_group.add_argument( '--disallow-any-decorated', default=False, action='store_true', help='Disallow functions that have Any in their signature ' 'after decorator transformation') disallow_any_group.add_argument( '--disallow-any-explicit', default=False, action='store_true', help='Disallow explicit Any in type positions') add_invertible_flag( '--disallow-any-generics', default=False, strict_flag=True, help='Disallow usage of generic types that do not specify explicit type ' 'parameters', group=disallow_any_group) untyped_group = parser.add_argument_group( title='Untyped definitions and calls', description="Configure how untyped definitions and calls are handled. " "Note: by default, mypy ignores any untyped function definitions " "and assumes any calls to such functions have a return " "type of 'Any'.") add_invertible_flag( '--disallow-untyped-calls', default=False, strict_flag=True, help="Disallow calling functions without type annotations" " from functions with type annotations", group=untyped_group) add_invertible_flag( '--disallow-untyped-defs', default=False, strict_flag=True, help="Disallow defining functions without type annotations" " or with incomplete type annotations", group=untyped_group) add_invertible_flag( '--disallow-incomplete-defs', default=False, strict_flag=True, help="Disallow defining functions with incomplete type annotations", group=untyped_group) add_invertible_flag( '--check-untyped-defs', default=False, strict_flag=True, help="Type check the interior of functions without type annotations", group=untyped_group) add_invertible_flag( '--disallow-untyped-decorators', default=False, strict_flag=True, help="Disallow decorating typed functions with untyped decorators", group=untyped_group) none_group = parser.add_argument_group( title='None and Optional handling', description= "Adjust how values of type 'None' are handled. For more context on " "how mypy handles values of type 'None', see: " "mypy.readthedocs.io/en/latest/kinds_of_types.html#no-strict-optional") add_invertible_flag( '--no-implicit-optional', default=False, strict_flag=True, help="Don't assume arguments with default values of None are Optional", group=none_group) none_group.add_argument('--strict-optional', action='store_true', help=argparse.SUPPRESS) none_group.add_argument( '--no-strict-optional', action='store_false', dest='strict_optional', help="Disable strict Optional checks (inverse: --strict-optional)") none_group.add_argument( '--strict-optional-whitelist', metavar='GLOB', nargs='*', help="Suppress strict Optional errors in all but the provided files; " "implies --strict-optional (may suppress certain other errors " "in non-whitelisted files)") lint_group = parser.add_argument_group( title='Warnings', description="Detect code that is sound but redundant or problematic.") add_invertible_flag( '--warn-redundant-casts', default=False, strict_flag=True, help="Warn about casting an expression to its inferred type", group=lint_group) add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True, help="Warn about unneeded '# type: ignore' comments", group=lint_group) add_invertible_flag( '--no-warn-no-return', dest='warn_no_return', default=True, help="Do not warn about functions that end without returning", group=lint_group) add_invertible_flag('--warn-return-any', default=False, strict_flag=True, help="Warn about returning values of type Any" " from non-Any typed functions", group=lint_group) # Note: this group is intentionally added here even though we don't add # --strict to this group near the end. # # That way, this group will appear after the various strictness groups # but before the remaining flags. # We add `--strict` near the end so we don't accidentally miss any strictness # flags that are added after this group. strictness_group = parser.add_argument_group( title='Other strictness checks') add_invertible_flag( '--allow-untyped-globals', default=False, strict_flag=False, help="Suppress toplevel errors caused by missing annotations", group=strictness_group) add_invertible_flag( '--allow-redefinition', default=False, strict_flag=False, help="Allow unconditional variable redefinition with a new type", group=strictness_group) add_invertible_flag( '--strict-equality', default=False, strict_flag=False, help="Prohibit equality, identity, and container checks for" " non-overlapping types", group=strictness_group) incremental_group = parser.add_argument_group( title='Incremental mode', description= "Adjust how mypy incrementally type checks and caches modules. " "Mypy caches type information about modules into a cache to " "let you speed up future invocations of mypy. Also see " "mypy's daemon mode: " "mypy.readthedocs.io/en/latest/mypy_daemon.html#mypy-daemon") incremental_group.add_argument('-i', '--incremental', action='store_true', help=argparse.SUPPRESS) incremental_group.add_argument( '--no-incremental', action='store_false', dest='incremental', help="Disable module cache (inverse: --incremental)") incremental_group.add_argument( '--cache-dir', action='store', metavar='DIR', help="Store module cache info in the given folder in incremental mode " "(defaults to '{}')".format(defaults.CACHE_DIR)) add_invertible_flag('--sqlite-cache', default=False, help="Use a sqlite database to store the cache", group=incremental_group) incremental_group.add_argument( '--cache-fine-grained', action='store_true', help= "Include fine-grained dependency information in the cache for the mypy daemon" ) incremental_group.add_argument( '--skip-version-check', action='store_true', help="Allow using cache written by older mypy version") incremental_group.add_argument( '--skip-cache-mtime-checks', action='store_true', help="Skip cache internal consistency checks based on mtime") internals_group = parser.add_argument_group( title='Mypy internals', description="Debug and customize mypy internals.") internals_group.add_argument('--pdb', action='store_true', help="Invoke pdb on fatal error") internals_group.add_argument('--show-traceback', '--tb', action='store_true', help="Show traceback on fatal error") internals_group.add_argument('--raise-exceptions', action='store_true', help="Raise exception on fatal error") internals_group.add_argument('--custom-typing', metavar='MODULE', dest='custom_typing_module', help="Use a custom typing module") internals_group.add_argument('--custom-typeshed-dir', metavar='DIR', help="Use the custom typeshed in DIR") add_invertible_flag( '--warn-incomplete-stub', default=False, help="Warn if missing type annotation in typeshed, only relevant with" " --disallow-untyped-defs or --disallow-incomplete-defs enabled", group=internals_group) internals_group.add_argument( '--shadow-file', nargs=2, metavar=('SOURCE_FILE', 'SHADOW_FILE'), dest='shadow_file', action='append', help="When encountering SOURCE_FILE, read and type check " "the contents of SHADOW_FILE instead.") add_invertible_flag('--fast-exit', default=False, help=argparse.SUPPRESS, group=internals_group) add_invertible_flag('--new-semantic-analyzer', default=False, help=argparse.SUPPRESS, group=internals_group) error_group = parser.add_argument_group( title='Error reporting', description="Adjust the amount of detail shown in error messages.") add_invertible_flag( '--show-error-context', default=False, dest='show_error_context', help='Precede errors with "note:" messages explaining context', group=error_group) add_invertible_flag('--show-column-numbers', default=False, help="Show column numbers in error messages", group=error_group) strict_help = "Strict mode; enables the following flags: {}".format( ", ".join(strict_flag_names)) strictness_group.add_argument('--strict', action='store_true', dest='special-opts:strict', help=strict_help) report_group = parser.add_argument_group( title='Report generation', description='Generate a report in the specified format.') for report_type in sorted(defaults.REPORTER_NAMES): report_group.add_argument('--%s-report' % report_type.replace('_', '-'), metavar='DIR', dest='special-opts:%s_report' % report_type) other_group = parser.add_argument_group(title='Miscellaneous') other_group.add_argument('--quickstart-file', help=argparse.SUPPRESS) other_group.add_argument('--junit-xml', help="Write junit.xml to the given file") other_group.add_argument( '--scripts-are-modules', action='store_true', help="Script x becomes module x instead of __main__") other_group.add_argument( '--find-occurrences', metavar='CLASS.MEMBER', dest='special-opts:find_occurrences', help="Print out all usages of a class member (experimental)") if server_options: # TODO: This flag is superfluous; remove after a short transition (2018-03-16) other_group.add_argument('--experimental', action='store_true', dest='fine_grained_incremental', help="Enable fine-grained incremental mode") other_group.add_argument( '--use-fine-grained-cache', action='store_true', help="Use the cache in fine-grained incremental mode") # hidden options parser.add_argument('--stats', action='store_true', dest='dump_type_stats', help=argparse.SUPPRESS) parser.add_argument('--inferstats', action='store_true', dest='dump_inference_stats', help=argparse.SUPPRESS) # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS) # --dump-deps will dump all fine-grained dependencies to stdout parser.add_argument('--dump-deps', action='store_true', help=argparse.SUPPRESS) # --dump-graph will dump the contents of the graph of SCCs and exit. parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS) # --semantic-analysis-only does exactly that. parser.add_argument('--semantic-analysis-only', action='store_true', help=argparse.SUPPRESS) # --local-partial-types disallows partial types spanning module top level and a function # (implicitly defined in fine-grained incremental mode) parser.add_argument('--local-partial-types', action='store_true', help=argparse.SUPPRESS) # --logical-deps adds some more dependencies that are not semantically needed, but # may be helpful to determine relative importance of classes and functions for overall # type precision in a code base. It also _removes_ some deps, so this flag should be never # used except for generating code stats. This also automatically enables --cache-fine-grained. # NOTE: This is an experimental option that may be modified or removed at any time. parser.add_argument('--logical-deps', action='store_true', help=argparse.SUPPRESS) # --bazel changes some behaviors for use with Bazel (https://bazel.build). parser.add_argument('--bazel', action='store_true', help=argparse.SUPPRESS) # --package-root adds a directory below which directories are considered # packages even without __init__.py. May be repeated. parser.add_argument('--package-root', metavar='ROOT', action='append', default=[], help=argparse.SUPPRESS) # --cache-map FILE ... gives a mapping from source files to cache files. # Each triple of arguments is a source file, a cache meta file, and a cache data file. # Modules not mentioned in the file will go through cache_dir. # Must be followed by another flag or by '--' (and then only file args may follow). parser.add_argument('--cache-map', nargs='+', dest='special-opts:cache_map', help=argparse.SUPPRESS) # options specifying code to check code_group = parser.add_argument_group( title="Running code", description= "Specify the code you want to type check. For more details, see " "mypy.readthedocs.io/en/latest/running_mypy.html#running-mypy") code_group.add_argument( '-m', '--module', action='append', metavar='MODULE', default=[], dest='special-opts:modules', help="Type-check module; can repeat for more modules") code_group.add_argument( '-p', '--package', action='append', metavar='PACKAGE', default=[], dest='special-opts:packages', help="Type-check package recursively; can be repeated") code_group.add_argument('-c', '--command', action='append', metavar='PROGRAM_TEXT', dest='special-opts:command', help="Type-check program passed in as string") code_group.add_argument(metavar='files', nargs='*', dest='special-opts:files', help="Type-check given files or directories") # Parse arguments once into a dummy namespace so we can get the # filename for the config file and know if the user requested all strict options. dummy = argparse.Namespace() parser.parse_args(args, dummy) config_file = dummy.config_file # Don't explicitly test if "config_file is not None" for this check. # This lets `--config-file=` (an empty string) be used to disable all config files. if config_file and not os.path.exists(config_file): parser.error("Cannot find config file '%s'" % config_file) # Parse config file first, so command line can override. options = Options() parse_config_file(options, config_file, stdout, stderr) # Set strict flags before parsing (if strict mode enabled), so other command # line options can override. if getattr(dummy, 'special-opts:strict'): # noqa for dest, value in strict_flag_assignments: setattr(options, dest, value) # Parse command line for real, using a split namespace. special_opts = argparse.Namespace() parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:')) # The python_version is either the default, which can be overridden via a config file, # or stored in special_opts and is passed via the command line. options.python_version = special_opts.python_version or options.python_version try: infer_python_executable(options, special_opts) except PythonExecutableInferenceError as e: parser.error(str(e)) if special_opts.no_executable: options.python_executable = None # Paths listed in the config file will be ignored if any paths are passed on # the command line. if options.files and not special_opts.files: special_opts.files = options.files # Check for invalid argument combinations. if require_targets: code_methods = sum( bool(c) for c in [ special_opts.modules + special_opts.packages, special_opts.command, special_opts.files ]) if code_methods == 0: parser.error("Missing target module, package, files, or command.") elif code_methods > 1: parser.error( "May only specify one of: module/package, files, or command.") # Check for overlapping `--always-true` and `--always-false` flags. overlap = set(options.always_true) & set(options.always_false) if overlap: parser.error( "You can't make a variable always true and always false (%s)" % ', '.join(sorted(overlap))) # Set build flags. if options.strict_optional_whitelist is not None: # TODO: Deprecate, then kill this flag options.strict_optional = True if special_opts.find_occurrences: state.find_occurrences = special_opts.find_occurrences.split('.') assert state.find_occurrences is not None if len(state.find_occurrences) < 2: parser.error("Can only find occurrences of class members.") if len(state.find_occurrences) != 2: parser.error( "Can only find occurrences of non-nested class members.") # Set reports. for flag, val in vars(special_opts).items(): if flag.endswith('_report') and val is not None: report_type = flag[:-7].replace('_', '-') report_dir = val options.report_dirs[report_type] = report_dir # Process --package-root. if options.package_root: process_package_roots(fscache, parser, options) # Process --cache-map. if special_opts.cache_map: if options.sqlite_cache: parser.error("--cache-map is incompatible with --sqlite-cache") process_cache_map(parser, special_opts, options) # Let logical_deps imply cache_fine_grained (otherwise the former is useless). if options.logical_deps: options.cache_fine_grained = True # Set target. if special_opts.modules + special_opts.packages: options.build_type = BuildType.MODULE search_paths = SearchPaths((os.getcwd(), ), tuple(mypy_path()), (), ()) targets = [] # TODO: use the same cache that the BuildManager will cache = FindModuleCache(search_paths, fscache) for p in special_opts.packages: if os.sep in p or os.altsep and os.altsep in p: fail("Package name '{}' cannot have a slash in it.".format(p), stderr) p_targets = cache.find_modules_recursive(p) if not p_targets: fail("Can't find package '{}'".format(p), stderr) targets.extend(p_targets) for m in special_opts.modules: targets.append(BuildSource(None, m, None)) return targets, options elif special_opts.command: options.build_type = BuildType.PROGRAM_TEXT targets = [BuildSource(None, None, '\n'.join(special_opts.command))] return targets, options else: try: targets = create_source_list(special_opts.files, options, fscache) # Variable named e2 instead of e to work around mypyc bug #620 # which causes issues when using the same variable to catch # exceptions of different types. except InvalidSourceList as e2: fail(str(e2), stderr) return targets, options
def process_options(args: List[str], require_targets: bool = True, server_options: bool = False, fscache: Optional[FileSystemCache] = None, program: str = 'mypy', header: str = HEADER, ) -> Tuple[List[BuildSource], Options]: """Parse command line arguments. If a FileSystemCache is passed in, and package_root options are given, call fscache.set_package_root() to set the cache's package root. """ parser = argparse.ArgumentParser(prog=program, usage=header, description=DESCRIPTION, epilog=FOOTER, fromfile_prefix_chars='@', formatter_class=AugmentedHelpFormatter, add_help=False) strict_flag_names = [] # type: List[str] strict_flag_assignments = [] # type: List[Tuple[str, bool]] def add_invertible_flag(flag: str, *, inverse: Optional[str] = None, default: bool, dest: Optional[str] = None, help: str, strict_flag: bool = False, group: Optional[argparse._ActionsContainer] = None ) -> None: if inverse is None: inverse = invert_flag_name(flag) if group is None: group = parser if help is not argparse.SUPPRESS: help += " (inverse: {})".format(inverse) arg = group.add_argument(flag, action='store_false' if default else 'store_true', dest=dest, help=help) dest = arg.dest arg = group.add_argument(inverse, action='store_true' if default else 'store_false', dest=dest, help=argparse.SUPPRESS) if strict_flag: assert dest is not None strict_flag_names.append(flag) strict_flag_assignments.append((dest, not default)) # Unless otherwise specified, arguments will be parsed directly onto an # Options object. Options that require further processing should have # their `dest` prefixed with `special-opts:`, which will cause them to be # parsed into the separate special_opts namespace object. # Note: we have a style guide for formatting the mypy --help text. See # https://github.com/python/mypy/wiki/Documentation-Conventions general_group = parser.add_argument_group( title='Optional arguments') general_group.add_argument( '-h', '--help', action='help', help="Show this help message and exit") general_group.add_argument( '-v', '--verbose', action='count', dest='verbosity', help="More verbose messages") general_group.add_argument( '-V', '--version', action='version', version='%(prog)s ' + __version__, help="Show program's version number and exit") config_group = parser.add_argument_group( title='Config file', description="Use a config file instead of command line arguments. " "This is useful if you are using many flags or want " "to set different options per each module.") config_group.add_argument( '--config-file', help="Configuration file, must have a [mypy] section " "(defaults to {})".format(', '.join(defaults.CONFIG_FILES))) add_invertible_flag('--warn-unused-configs', default=False, strict_flag=True, help="Warn about unused '[mypy-<pattern>]' config sections", group=config_group) imports_group = parser.add_argument_group( title='Import discovery', description="Configure how imports are discovered and followed.") imports_group.add_argument( '--ignore-missing-imports', action='store_true', help="Silently ignore imports of missing modules") imports_group.add_argument( '--follow-imports', choices=['normal', 'silent', 'skip', 'error'], default='normal', help="How to treat imports (default normal)") imports_group.add_argument( '--python-executable', action='store', metavar='EXECUTABLE', help="Python executable used for finding PEP 561 compliant installed" " packages and stubs", dest='special-opts:python_executable') imports_group.add_argument( '--no-site-packages', action='store_true', dest='special-opts:no_executable', help="Do not search for installed PEP 561 compliant packages") imports_group.add_argument( '--no-silence-site-packages', action='store_true', help="Do not silence errors in PEP 561 compliant installed packages") add_invertible_flag( '--namespace-packages', default=False, help="Support namespace packages (PEP 420, __init__.py-less)", group=imports_group) platform_group = parser.add_argument_group( title='Platform configuration', description="Type check code assuming it will be run under certain " "runtime conditions. By default, mypy assumes your code " "will be run using the same operating system and Python " "version you are using to run mypy itself.") platform_group.add_argument( '--python-version', type=parse_version, metavar='x.y', help='Type check code assuming it will be running on Python x.y', dest='special-opts:python_version') platform_group.add_argument( '-2', '--py2', dest='special-opts:python_version', action='store_const', const=defaults.PYTHON2_VERSION, help="Use Python 2 mode (same as --python-version 2.7)") platform_group.add_argument( '--platform', action='store', metavar='PLATFORM', help="Type check special-cased code for the given OS platform " "(defaults to sys.platform)") platform_group.add_argument( '--always-true', metavar='NAME', action='append', default=[], help="Additional variable to be considered True (may be repeated)") platform_group.add_argument( '--always-false', metavar='NAME', action='append', default=[], help="Additional variable to be considered False (may be repeated)") disallow_any_group = parser.add_argument_group( title='Dynamic typing', description="Disallow the use of the dynamic 'Any' type under certain conditions.") disallow_any_group.add_argument( '--disallow-any-unimported', default=False, action='store_true', help="Disallow Any types resulting from unfollowed imports") add_invertible_flag('--disallow-subclassing-any', default=False, strict_flag=True, help="Disallow subclassing values of type 'Any' when defining classes", group=disallow_any_group) disallow_any_group.add_argument( '--disallow-any-expr', default=False, action='store_true', help='Disallow all expressions that have type Any') disallow_any_group.add_argument( '--disallow-any-decorated', default=False, action='store_true', help='Disallow functions that have Any in their signature ' 'after decorator transformation') disallow_any_group.add_argument( '--disallow-any-explicit', default=False, action='store_true', help='Disallow explicit Any in type positions') add_invertible_flag('--disallow-any-generics', default=False, strict_flag=True, help='Disallow usage of generic types that do not specify explicit type ' 'parameters', group=disallow_any_group) untyped_group = parser.add_argument_group( title='Untyped definitions and calls', description="Configure how untyped definitions and calls are handled. " "Note: by default, mypy ignores any untyped function definitions " "and assumes any calls to such functions have a return " "type of 'Any'.") add_invertible_flag('--disallow-untyped-calls', default=False, strict_flag=True, help="Disallow calling functions without type annotations" " from functions with type annotations", group=untyped_group) add_invertible_flag('--disallow-untyped-defs', default=False, strict_flag=True, help="Disallow defining functions without type annotations" " or with incomplete type annotations", group=untyped_group) add_invertible_flag('--disallow-incomplete-defs', default=False, strict_flag=True, help="Disallow defining functions with incomplete type annotations", group=untyped_group) add_invertible_flag('--check-untyped-defs', default=False, strict_flag=True, help="Type check the interior of functions without type annotations", group=untyped_group) add_invertible_flag('--disallow-untyped-decorators', default=False, strict_flag=True, help="Disallow decorating typed functions with untyped decorators", group=untyped_group) none_group = parser.add_argument_group( title='None and Optional handling', description="Adjust how values of type 'None' are handled. For more context on " "how mypy handles values of type 'None', see: " "mypy.readthedocs.io/en/latest/kinds_of_types.html#no-strict-optional") add_invertible_flag('--no-implicit-optional', default=False, strict_flag=True, help="Don't assume arguments with default values of None are Optional", group=none_group) none_group.add_argument( '--strict-optional', action='store_true', help=argparse.SUPPRESS) none_group.add_argument( '--no-strict-optional', action='store_false', dest='strict_optional', help="Disable strict Optional checks (inverse: --strict-optional)") none_group.add_argument( '--strict-optional-whitelist', metavar='GLOB', nargs='*', help="Suppress strict Optional errors in all but the provided files; " "implies --strict-optional (may suppress certain other errors " "in non-whitelisted files)") lint_group = parser.add_argument_group( title='Warnings', description="Detect code that is sound but redundant or problematic.") add_invertible_flag('--warn-redundant-casts', default=False, strict_flag=True, help="Warn about casting an expression to its inferred type", group=lint_group) add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True, help="Warn about unneeded '# type: ignore' comments", group=lint_group) add_invertible_flag('--no-warn-no-return', dest='warn_no_return', default=True, help="Do not warn about functions that end without returning", group=lint_group) add_invertible_flag('--warn-return-any', default=False, strict_flag=True, help="Warn about returning values of type Any" " from non-Any typed functions", group=lint_group) # Note: this group is intentionally added here even though we don't add # --strict to this group near the end. # # That way, this group will appear after the various strictness groups # but before the remaining flags. # We add `--strict` near the end so we don't accidentally miss any strictness # flags that are added after this group. strictness_group = parser.add_argument_group( title='Other strictness checks') add_invertible_flag('--allow-untyped-globals', default=False, strict_flag=False, help="Suppress toplevel errors caused by missing annotations", group=strictness_group) add_invertible_flag('--allow-redefinition', default=False, strict_flag=False, help="Allow unconditional variable redefinition with a new type", group=strictness_group) add_invertible_flag('--strict-equality', default=False, strict_flag=False, help="Prohibit equality, identity, and container checks for" " non-overlapping types", group=strictness_group) incremental_group = parser.add_argument_group( title='Incremental mode', description="Adjust how mypy incrementally type checks and caches modules. " "Mypy caches type information about modules into a cache to " "let you speed up future invocations of mypy. Also see " "mypy's daemon mode: " "mypy.readthedocs.io/en/latest/mypy_daemon.html#mypy-daemon") incremental_group.add_argument( '-i', '--incremental', action='store_true', help=argparse.SUPPRESS) incremental_group.add_argument( '--no-incremental', action='store_false', dest='incremental', help="Disable module cache (inverse: --incremental)") incremental_group.add_argument( '--cache-dir', action='store', metavar='DIR', help="Store module cache info in the given folder in incremental mode " "(defaults to '{}')".format(defaults.CACHE_DIR)) add_invertible_flag('--sqlite-cache', default=False, help="Use a sqlite database to store the cache", group=incremental_group) incremental_group.add_argument( '--cache-fine-grained', action='store_true', help="Include fine-grained dependency information in the cache for the mypy daemon") incremental_group.add_argument( '--skip-version-check', action='store_true', help="Allow using cache written by older mypy version") incremental_group.add_argument( '--skip-cache-mtime-checks', action='store_true', help="Skip cache internal consistency checks based on mtime") internals_group = parser.add_argument_group( title='Mypy internals', description="Debug and customize mypy internals.") internals_group.add_argument( '--pdb', action='store_true', help="Invoke pdb on fatal error") internals_group.add_argument( '--show-traceback', '--tb', action='store_true', help="Show traceback on fatal error") internals_group.add_argument( '--raise-exceptions', action='store_true', help="Raise exception on fatal error" ) internals_group.add_argument( '--custom-typing', metavar='MODULE', dest='custom_typing_module', help="Use a custom typing module") internals_group.add_argument( '--custom-typeshed-dir', metavar='DIR', help="Use the custom typeshed in DIR") add_invertible_flag('--warn-incomplete-stub', default=False, help="Warn if missing type annotation in typeshed, only relevant with" " --disallow-untyped-defs or --disallow-incomplete-defs enabled", group=internals_group) internals_group.add_argument( '--shadow-file', nargs=2, metavar=('SOURCE_FILE', 'SHADOW_FILE'), dest='shadow_file', action='append', help="When encountering SOURCE_FILE, read and type check " "the contents of SHADOW_FILE instead.") add_invertible_flag('--fast-exit', default=False, help=argparse.SUPPRESS, group=internals_group) add_invertible_flag('--new-semantic-analyzer', default=False, help=argparse.SUPPRESS, group=internals_group) error_group = parser.add_argument_group( title='Error reporting', description="Adjust the amount of detail shown in error messages.") add_invertible_flag('--show-error-context', default=False, dest='show_error_context', help='Precede errors with "note:" messages explaining context', group=error_group) add_invertible_flag('--show-column-numbers', default=False, help="Show column numbers in error messages", group=error_group) strict_help = "Strict mode; enables the following flags: {}".format( ", ".join(strict_flag_names)) strictness_group.add_argument( '--strict', action='store_true', dest='special-opts:strict', help=strict_help) report_group = parser.add_argument_group( title='Report generation', description='Generate a report in the specified format.') for report_type in sorted(defaults.REPORTER_NAMES): report_group.add_argument('--%s-report' % report_type.replace('_', '-'), metavar='DIR', dest='special-opts:%s_report' % report_type) other_group = parser.add_argument_group( title='Miscellaneous') other_group.add_argument( '--quickstart-file', help=argparse.SUPPRESS) other_group.add_argument( '--junit-xml', help="Write junit.xml to the given file") other_group.add_argument( '--scripts-are-modules', action='store_true', help="Script x becomes module x instead of __main__") other_group.add_argument( '--find-occurrences', metavar='CLASS.MEMBER', dest='special-opts:find_occurrences', help="Print out all usages of a class member (experimental)") if server_options: # TODO: This flag is superfluous; remove after a short transition (2018-03-16) other_group.add_argument( '--experimental', action='store_true', dest='fine_grained_incremental', help="Enable fine-grained incremental mode") other_group.add_argument( '--use-fine-grained-cache', action='store_true', help="Use the cache in fine-grained incremental mode") # hidden options parser.add_argument( '--stats', action='store_true', dest='dump_type_stats', help=argparse.SUPPRESS) parser.add_argument( '--inferstats', action='store_true', dest='dump_inference_stats', help=argparse.SUPPRESS) # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS) # --dump-deps will dump all fine-grained dependencies to stdout parser.add_argument('--dump-deps', action='store_true', help=argparse.SUPPRESS) # --dump-graph will dump the contents of the graph of SCCs and exit. parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS) # --semantic-analysis-only does exactly that. parser.add_argument('--semantic-analysis-only', action='store_true', help=argparse.SUPPRESS) # --local-partial-types disallows partial types spanning module top level and a function # (implicitly defined in fine-grained incremental mode) parser.add_argument('--local-partial-types', action='store_true', help=argparse.SUPPRESS) # --logical-deps adds some more dependencies that are not semantically needed, but # may be helpful to determine relative importance of classes and functions for overall # type precision in a code base. It also _removes_ some deps, so this flag should be never # used except for generating code stats. This also automatically enables --cache-fine-grained. # NOTE: This is an experimental option that may be modified or removed at any time. parser.add_argument('--logical-deps', action='store_true', help=argparse.SUPPRESS) # --bazel changes some behaviors for use with Bazel (https://bazel.build). parser.add_argument('--bazel', action='store_true', help=argparse.SUPPRESS) # --package-root adds a directory below which directories are considered # packages even without __init__.py. May be repeated. parser.add_argument('--package-root', metavar='ROOT', action='append', default=[], help=argparse.SUPPRESS) # --cache-map FILE ... gives a mapping from source files to cache files. # Each triple of arguments is a source file, a cache meta file, and a cache data file. # Modules not mentioned in the file will go through cache_dir. # Must be followed by another flag or by '--' (and then only file args may follow). parser.add_argument('--cache-map', nargs='+', dest='special-opts:cache_map', help=argparse.SUPPRESS) # options specifying code to check code_group = parser.add_argument_group( title="Running code", description="Specify the code you want to type check. For more details, see " "mypy.readthedocs.io/en/latest/running_mypy.html#running-mypy") code_group.add_argument( '-m', '--module', action='append', metavar='MODULE', default=[], dest='special-opts:modules', help="Type-check module; can repeat for more modules") code_group.add_argument( '-p', '--package', action='append', metavar='PACKAGE', default=[], dest='special-opts:packages', help="Type-check package recursively; can be repeated") code_group.add_argument( '-c', '--command', action='append', metavar='PROGRAM_TEXT', dest='special-opts:command', help="Type-check program passed in as string") code_group.add_argument( metavar='files', nargs='*', dest='special-opts:files', help="Type-check given files or directories") # Parse arguments once into a dummy namespace so we can get the # filename for the config file and know if the user requested all strict options. dummy = argparse.Namespace() parser.parse_args(args, dummy) config_file = dummy.config_file # Don't explicitly test if "config_file is not None" for this check. # This lets `--config-file=` (an empty string) be used to disable all config files. if config_file and not os.path.exists(config_file): parser.error("Cannot find config file '%s'" % config_file) # Parse config file first, so command line can override. options = Options() parse_config_file(options, config_file) # Set strict flags before parsing (if strict mode enabled), so other command # line options can override. if getattr(dummy, 'special-opts:strict'): for dest, value in strict_flag_assignments: setattr(options, dest, value) # Parse command line for real, using a split namespace. special_opts = argparse.Namespace() parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:')) # The python_version is either the default, which can be overridden via a config file, # or stored in special_opts and is passed via the command line. options.python_version = special_opts.python_version or options.python_version try: infer_python_executable(options, special_opts) except PythonExecutableInferenceError as e: parser.error(str(e)) if special_opts.no_executable: options.python_executable = None # Paths listed in the config file will be ignored if any paths are passed on # the command line. if options.files and not special_opts.files: special_opts.files = options.files # Check for invalid argument combinations. if require_targets: code_methods = sum(bool(c) for c in [special_opts.modules + special_opts.packages, special_opts.command, special_opts.files]) if code_methods == 0: parser.error("Missing target module, package, files, or command.") elif code_methods > 1: parser.error("May only specify one of: module/package, files, or command.") # Check for overlapping `--always-true` and `--always-false` flags. overlap = set(options.always_true) & set(options.always_false) if overlap: parser.error("You can't make a variable always true and always false (%s)" % ', '.join(sorted(overlap))) # Set build flags. if options.strict_optional_whitelist is not None: # TODO: Deprecate, then kill this flag options.strict_optional = True if special_opts.find_occurrences: state.find_occurrences = special_opts.find_occurrences.split('.') assert state.find_occurrences is not None if len(state.find_occurrences) < 2: parser.error("Can only find occurrences of class members.") if len(state.find_occurrences) != 2: parser.error("Can only find occurrences of non-nested class members.") # Set reports. for flag, val in vars(special_opts).items(): if flag.endswith('_report') and val is not None: report_type = flag[:-7].replace('_', '-') report_dir = val options.report_dirs[report_type] = report_dir # Process --package-root. if options.package_root: process_package_roots(fscache, parser, options) # Process --cache-map. if special_opts.cache_map: if options.sqlite_cache: parser.error("--cache-map is incompatible with --sqlite-cache") process_cache_map(parser, special_opts, options) # Let logical_deps imply cache_fine_grained (otherwise the former is useless). if options.logical_deps: options.cache_fine_grained = True # Set target. if special_opts.modules + special_opts.packages: options.build_type = BuildType.MODULE search_paths = SearchPaths((os.getcwd(),), tuple(mypy_path()), (), ()) targets = [] # TODO: use the same cache that the BuildManager will cache = FindModuleCache(search_paths, fscache) for p in special_opts.packages: if os.sep in p or os.altsep and os.altsep in p: fail("Package name '{}' cannot have a slash in it.".format(p)) p_targets = cache.find_modules_recursive(p) if not p_targets: fail("Can't find package '{}'".format(p)) targets.extend(p_targets) for m in special_opts.modules: targets.append(BuildSource(None, m, None)) return targets, options elif special_opts.command: options.build_type = BuildType.PROGRAM_TEXT targets = [BuildSource(None, None, '\n'.join(special_opts.command))] return targets, options else: try: targets = create_source_list(special_opts.files, options, fscache) except InvalidSourceList as e: fail(str(e)) return targets, options
class ModuleFinderSitePackagesSuite(Suite): def setUp(self) -> None: self.package_dir = os.path.relpath( os.path.join( package_path, "modulefinder-site-packages", )) egg_dirs, site_packages = expand_site_packages([self.package_dir]) self.search_paths = SearchPaths( python_path=(), mypy_path=(os.path.join(data_path, "pkg1"), ), package_path=tuple(egg_dirs + site_packages), typeshed_path=(), ) options = Options() options.namespace_packages = True self.fmc_ns = FindModuleCache(self.search_paths, fscache=None, options=options) options = Options() options.namespace_packages = False self.fmc_nons = FindModuleCache(self.search_paths, fscache=None, options=options) def path(self, *parts: str) -> str: return os.path.join(self.package_dir, *parts) def test__packages_with_ns(self) -> None: cases = [ # Namespace package with py.typed ("ns_pkg_typed", self.path("ns_pkg_typed")), ("ns_pkg_typed.a", self.path("ns_pkg_typed", "a.py")), ("ns_pkg_typed.b", self.path("ns_pkg_typed", "b")), ("ns_pkg_typed.b.c", self.path("ns_pkg_typed", "b", "c.py")), ("ns_pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), # Namespace package without py.typed ("ns_pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Namespace package without stub package ("ns_pkg_w_stubs", self.path("ns_pkg_w_stubs")), ("ns_pkg_w_stubs.typed", self.path("ns_pkg_w_stubs-stubs", "typed", "__init__.pyi")), ("ns_pkg_w_stubs.typed_inline", self.path("ns_pkg_w_stubs", "typed_inline", "__init__.py")), ("ns_pkg_w_stubs.untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Regular package with py.typed ("pkg_typed", self.path("pkg_typed", "__init__.py")), ("pkg_typed.a", self.path("pkg_typed", "a.py")), ("pkg_typed.b", self.path("pkg_typed", "b", "__init__.py")), ("pkg_typed.b.c", self.path("pkg_typed", "b", "c.py")), ("pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), # Regular package without py.typed ("pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Top-level Python file in site-packages ("standalone", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("standalone.standalone_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Packages found by following .pth files ("baz_pkg", self.path("baz", "baz_pkg", "__init__.py")), ("ns_baz_pkg.a", self.path("baz", "ns_baz_pkg", "a.py")), ("neighbor_pkg", self.path("..", "modulefinder-src", "neighbor_pkg", "__init__.py")), ("ns_neighbor_pkg.a", self.path("..", "modulefinder-src", "ns_neighbor_pkg", "a.py")), # Something that doesn't exist ("does_not_exist", ModuleNotFoundReason.NOT_FOUND), # A regular package with an installed set of stubs ("foo.bar", self.path("foo-stubs", "bar.pyi")), # A regular, non-site-packages module ("a", os.path.join(data_path, "pkg1", "a.py")), ] for module, expected in cases: template = "Find(" + module + ") got {}; expected {}" actual = self.fmc_ns.find_module(module) assert_equal(actual, expected, template) def test__packages_without_ns(self) -> None: cases = [ # Namespace package with py.typed ("ns_pkg_typed", ModuleNotFoundReason.NOT_FOUND), ("ns_pkg_typed.a", ModuleNotFoundReason.NOT_FOUND), ("ns_pkg_typed.b", ModuleNotFoundReason.NOT_FOUND), ("ns_pkg_typed.b.c", ModuleNotFoundReason.NOT_FOUND), ("ns_pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), # Namespace package without py.typed ("ns_pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Namespace package without stub package ("ns_pkg_w_stubs", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_w_stubs.typed", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_w_stubs.typed_inline", self.path("ns_pkg_w_stubs", "typed_inline", "__init__.py")), ("ns_pkg_w_stubs.untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Regular package with py.typed ("pkg_typed", self.path("pkg_typed", "__init__.py")), ("pkg_typed.a", self.path("pkg_typed", "a.py")), ("pkg_typed.b", self.path("pkg_typed", "b", "__init__.py")), ("pkg_typed.b.c", self.path("pkg_typed", "b", "c.py")), ("pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), # Regular package without py.typed ("pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Top-level Python file in site-packages ("standalone", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("standalone.standalone_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Packages found by following .pth files ("baz_pkg", self.path("baz", "baz_pkg", "__init__.py")), ("ns_baz_pkg.a", ModuleNotFoundReason.NOT_FOUND), ("neighbor_pkg", self.path("..", "modulefinder-src", "neighbor_pkg", "__init__.py")), ("ns_neighbor_pkg.a", ModuleNotFoundReason.NOT_FOUND), # Something that doesn't exist ("does_not_exist", ModuleNotFoundReason.NOT_FOUND), # A regular package with an installed set of stubs ("foo.bar", self.path("foo-stubs", "bar.pyi")), # A regular, non-site-packages module ("a", os.path.join(data_path, "pkg1", "a.py")), ] for module, expected in cases: template = "Find(" + module + ") got {}; expected {}" actual = self.fmc_nons.find_module(module) assert_equal(actual, expected, template)
class ModuleFinderSuite(Suite): def setUp(self) -> None: self.search_paths = SearchPaths( python_path=(), mypy_path=( os.path.join(data_path, "nsx-pkg1"), os.path.join(data_path, "nsx-pkg2"), os.path.join(data_path, "nsx-pkg3"), os.path.join(data_path, "nsy-pkg1"), os.path.join(data_path, "nsy-pkg2"), os.path.join(data_path, "pkg1"), os.path.join(data_path, "pkg2"), ), package_path=(), typeshed_path=(), ) options = Options() options.namespace_packages = True self.fmc_ns = FindModuleCache(self.search_paths, fscache=None, options=options) options = Options() options.namespace_packages = False self.fmc_nons = FindModuleCache(self.search_paths, fscache=None, options=options) def test__no_namespace_packages__nsx(self) -> None: """ If namespace_packages is False, we shouldn't find nsx """ found_module = self.fmc_nons.find_module("nsx") assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) def test__no_namespace_packages__nsx_a(self) -> None: """ If namespace_packages is False, we shouldn't find nsx.a. """ found_module = self.fmc_nons.find_module("nsx.a") assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) def test__no_namespace_packages__find_a_in_pkg1(self) -> None: """ Find find pkg1/a.py for "a" with namespace_packages False. """ found_module = self.fmc_nons.find_module("a") expected = os.path.join(data_path, "pkg1", "a.py") assert_equal(expected, found_module) def test__no_namespace_packages__find_b_in_pkg2(self) -> None: found_module = self.fmc_ns.find_module("b") expected = os.path.join(data_path, "pkg2", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_as_namespace_pkg_in_pkg1(self) -> None: """ There's no __init__.py in any of the nsx dirs, return the path to the first one found in mypypath. """ found_module = self.fmc_ns.find_module("nsx") expected = os.path.join(data_path, "nsx-pkg1", "nsx") assert_equal(expected, found_module) def test__find_nsx_a_init_in_pkg1(self) -> None: """ Find nsx-pkg1/nsx/a/__init__.py for "nsx.a" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.a") expected = os.path.join(data_path, "nsx-pkg1", "nsx", "a", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_b_init_in_pkg2(self) -> None: """ Find nsx-pkg2/nsx/b/__init__.py for "nsx.b" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.b") expected = os.path.join(data_path, "nsx-pkg2", "nsx", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_c_c_in_pkg3(self) -> None: """ Find nsx-pkg3/nsx/c/c.py for "nsx.c.c" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.c.c") expected = os.path.join(data_path, "nsx-pkg3", "nsx", "c", "c.py") assert_equal(expected, found_module) def test__find_nsy_a__init_pyi(self) -> None: """ Prefer nsy-pkg1/a/__init__.pyi file over __init__.py. """ found_module = self.fmc_ns.find_module("nsy.a") expected = os.path.join(data_path, "nsy-pkg1", "nsy", "a", "__init__.pyi") assert_equal(expected, found_module) def test__find_nsy_b__init_py(self) -> None: """ There is a nsy-pkg2/nsy/b.pyi, but also a nsy-pkg2/nsy/b/__init__.py. We expect to find the latter when looking up "nsy.b" as a package is preferred over a module. """ found_module = self.fmc_ns.find_module("nsy.b") expected = os.path.join(data_path, "nsy-pkg2", "nsy", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsy_c_pyi(self) -> None: """ There is a nsy-pkg2/nsy/c.pyi and nsy-pkg2/nsy/c.py We expect to find the former when looking up "nsy.b" as .pyi is preferred over .py. """ found_module = self.fmc_ns.find_module("nsy.c") expected = os.path.join(data_path, "nsy-pkg2", "nsy", "c.pyi") assert_equal(expected, found_module) def test__find_a_in_pkg1(self) -> None: found_module = self.fmc_ns.find_module("a") expected = os.path.join(data_path, "pkg1", "a.py") assert_equal(expected, found_module) def test__find_b_init_in_pkg2(self) -> None: found_module = self.fmc_ns.find_module("b") expected = os.path.join(data_path, "pkg2", "b", "__init__.py") assert_equal(expected, found_module) def test__find_d_nowhere(self) -> None: found_module = self.fmc_ns.find_module("d") assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module)