def __init__(self, options: Options, status_file: str, timeout: Optional[int] = None) -> None: """Initialize the server with the desired mypy flags.""" self.options = options # Snapshot the options info before we muck with it, to detect changes self.options_snapshot = options.snapshot() self.timeout = timeout self.fine_grained_manager = None # type: Optional[FineGrainedBuildManager] if os.path.isfile(status_file): os.unlink(status_file) self.fscache = FileSystemCache() options.incremental = True options.fine_grained_incremental = True options.show_traceback = True if options.use_fine_grained_cache: # Using fine_grained_cache implies generating and caring # about the fine grained cache options.cache_fine_grained = True else: options.cache_dir = os.devnull # Fine-grained incremental doesn't support general partial types # (details in https://github.com/python/mypy/issues/4492) options.local_partial_types = True self.status_file = status_file
def parse_options(self, program_text: str, testcase: DataDrivenTestCase, incremental_step: int) -> Options: options = Options() flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE) if incremental_step > 1: flags2 = re.search('# flags{}: (.*)$'.format(incremental_step), program_text, flags=re.MULTILINE) if flags2: flags = flags2 flag_list = None if flags: flag_list = flags.group(1).split() targets, options = process_options(flag_list, require_targets=False) if targets: raise RuntimeError('Specifying targets via the flags pragma is not supported.') else: options = Options() # Allow custom python version to override testcase_pyversion if (not flag_list or all(flag not in flag_list for flag in ['--python-version', '-2', '--py2'])): options.python_version = testcase_pyversion(testcase.file, testcase.name) options.use_builtins_fixtures = True options.show_traceback = True options.incremental = True return options
def parse_options(self, program_text: str, testcase: DataDrivenTestCase, incremental_step: int) -> Options: options = Options() flags = re.search('# flags: (.*)$', program_text, flags=re.MULTILINE) if incremental_step > 1: flags2 = re.search('# flags{}: (.*)$'.format(incremental_step), program_text, flags=re.MULTILINE) if flags2: flags = flags2 flag_list = None if flags: flag_list = flags.group(1).split() targets, options = process_options(flag_list, require_targets=False) if targets: raise RuntimeError( 'Specifying targets via the flags pragma is not supported.' ) else: options = Options() # Allow custom python version to override testcase_pyversion if (not flag_list or all(flag not in flag_list for flag in ['--python-version', '-2', '--py2'])): options.python_version = testcase_pyversion( testcase.file, testcase.name) options.use_builtins_fixtures = True options.show_traceback = True options.incremental = True return options
def __init__(self, options: Options, timeout: Optional[int] = None) -> None: """Initialize the server with the desired mypy flags.""" self.options = options # Snapshot the options info before we muck with it, to detect changes self.options_snapshot = options.snapshot() self.timeout = timeout self.fine_grained_manager = None # type: Optional[FineGrainedBuildManager] if os.path.isfile(STATUS_FILE): os.unlink(STATUS_FILE) self.fscache = FileSystemCache() options.incremental = True options.fine_grained_incremental = True options.show_traceback = True if options.use_fine_grained_cache: # Using fine_grained_cache implies generating and caring # about the fine grained cache options.cache_fine_grained = True else: options.cache_dir = os.devnull # Fine-grained incremental doesn't support general partial types # (details in https://github.com/python/mypy/issues/4492) options.local_partial_types = True
def __init__(self, options: Options, status_file: str, timeout: Optional[int] = None) -> None: """Initialize the server with the desired mypy flags.""" self.options = options # Snapshot the options info before we muck with it, to detect changes self.options_snapshot = options.snapshot() self.timeout = timeout self.fine_grained_manager = None # type: Optional[FineGrainedBuildManager] if os.path.isfile(status_file): os.unlink(status_file) self.fscache = FileSystemCache() options.raise_exceptions = True options.incremental = True options.fine_grained_incremental = True options.show_traceback = True if options.use_fine_grained_cache: # Using fine_grained_cache implies generating and caring # about the fine grained cache options.cache_fine_grained = True else: options.cache_dir = os.devnull # Fine-grained incremental doesn't support general partial types # (details in https://github.com/python/mypy/issues/4492) options.local_partial_types = True self.status_file = status_file # Since the object is created in the parent process we can check # the output terminal options here. self.formatter = FancyFormatter(sys.stdout, sys.stderr, options.show_error_codes)
def __init__(self, options: Options, timeout: Optional[int] = None, alt_lib_path: Optional[str] = None) -> None: """Initialize the server with the desired mypy flags.""" self.options = options self.timeout = timeout self.alt_lib_path = alt_lib_path self.fine_grained_manager = None # type: Optional[FineGrainedBuildManager] if os.path.isfile(STATUS_FILE): os.unlink(STATUS_FILE) self.fscache = FileSystemCache(self.options.python_version) options.incremental = True options.fine_grained_incremental = True options.show_traceback = True if options.use_fine_grained_cache: options.cache_fine_grained = True # set this so that cache options match else: options.cache_dir = os.devnull # Fine-grained incremental doesn't support general partial types # (details in https://github.com/python/mypy/issues/4492) options.local_partial_types = True
def mypy_options(stubgen_options: Options) -> MypyOptions: """Generate mypy options using the flag passed by user.""" options = MypyOptions() options.follow_imports = 'skip' options.incremental = False options.ignore_errors = True options.semantic_analysis_only = True options.python_version = stubgen_options.pyversion return options
def build_dir(target_dir: str) -> Tuple[List[str], BuildManager, Graph]: sources = expand_dir(target_dir) options = Options() options.incremental = True options.show_traceback = True options.cache_dir = os.devnull try: result = build.build(sources=sources, options=options) except CompileError as e: # TODO: We need a manager and a graph in this case as well assert False, str('\n'.join(e.messages)) return e.messages, None, None return result.errors, result.manager, result.graph
def build(self, source: str) -> Tuple[List[str], Optional[BuildManager], Dict[str, State]]: options = Options() options.incremental = True options.use_builtins_fixtures = True options.show_traceback = True main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w') as f: f.write(source) try: result = build.build(sources=[BuildSource(main_path, None, None)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: Is it okay to return None? return e.messages, None, {} return result.errors, result.manager, result.graph
def main(args: List[str]) -> Iterator[Error]: if len(args) == 1: print('must provide at least one module to test') sys.exit(1) else: modules = args[1:] options = Options() options.incremental = False data_dir = default_data_dir() search_path = compute_search_paths([], options, data_dir) find_module_cache = FindModuleCache(search_path) for module in modules: for error in test_stub(options, find_module_cache, module): yield error
def build(self, source: str) -> Tuple[List[str], BuildManager, Graph]: options = Options() options.incremental = True options.use_builtins_fixtures = True options.show_traceback = True main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w') as f: f.write(source) try: result = build.build(sources=[BuildSource(main_path, None, None)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: We need a manager and a graph in this case as well assert False, str('\n'.join(e.messages)) return e.messages, None, None return result.errors, result.manager, result.graph
def build(self, source: str) -> Optional[BuildResult]: options = Options() options.incremental = True options.fine_grained_incremental = True options.use_builtins_fixtures = True options.show_traceback = True options.python_version = PYTHON3_VERSION main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w', encoding='utf8') as f: f.write(source) try: result = build.build(sources=[BuildSource(main_path, None, None)], options=options, alt_lib_path=test_temp_dir) except CompileError: # TODO: Is it okay to return None? return None return result
def build(self, source: str) -> Optional[BuildResult]: options = Options() options.incremental = True options.fine_grained_incremental = True options.use_builtins_fixtures = True options.show_traceback = True options.python_version = PYTHON3_VERSION main_path = os.path.join(test_temp_dir, 'main') with open(main_path, 'w') as f: f.write(source) try: result = build.build(sources=[BuildSource(main_path, None, None)], options=options, alt_lib_path=test_temp_dir) except CompileError as e: # TODO: Is it okay to return None? return None return result
def __init__(self, options: Options, timeout: Optional[int] = None, alt_lib_path: Optional[str] = None) -> None: """Initialize the server with the desired mypy flags.""" self.options = options self.timeout = timeout self.alt_lib_path = alt_lib_path self.fine_grained_manager = None # type: Optional[FineGrainedBuildManager] if os.path.isfile(STATUS_FILE): os.unlink(STATUS_FILE) options.incremental = True options.fine_grained_incremental = True options.show_traceback = True if options.use_fine_grained_cache: options.cache_fine_grained = True # set this so that cache options match else: options.cache_dir = os.devnull # Fine-grained incremental doesn't support general partial types # (details in https://github.com/python/mypy/issues/4492) options.local_partial_types = True
def process_options( args: List[str], require_targets: bool = True, server_options: bool = False, ) -> Tuple[List[BuildSource], Options]: """Parse command line arguments.""" parser = argparse.ArgumentParser(prog='mypy', epilog=FOOTER, fromfile_prefix_chars='@', formatter_class=AugmentedHelpFormatter) strict_flag_names = [] # type: List[str] strict_flag_assignments = [] # type: List[Tuple[str, bool]] def add_invertible_flag(flag: str, *, inverse: Optional[str] = None, default: bool, dest: Optional[str] = None, help: str, strict_flag: bool = False) -> None: if inverse is None: inverse = invert_flag_name(flag) if help is not argparse.SUPPRESS: help += " (inverse: {})".format(inverse) arg = parser.add_argument( flag, # type: ignore # incorrect stub for add_argument action='store_false' if default else 'store_true', dest=dest, help=help) dest = arg.dest arg = parser.add_argument( inverse, # type: ignore # incorrect stub for add_argument action='store_true' if default else 'store_false', dest=dest, help=argparse.SUPPRESS) if strict_flag: assert dest is not None strict_flag_names.append(flag) strict_flag_assignments.append((dest, not default)) # Unless otherwise specified, arguments will be parsed directly onto an # Options object. Options that require further processing should have # their `dest` prefixed with `special-opts:`, which will cause them to be # parsed into the separate special_opts namespace object. parser.add_argument('-v', '--verbose', action='count', dest='verbosity', help="more verbose messages") parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__) parser.add_argument('--python-version', type=parse_version, metavar='x.y', help='use Python x.y') parser.add_argument( '--platform', action='store', metavar='PLATFORM', help="typecheck special-cased code for the given OS platform " "(defaults to sys.platform).") parser.add_argument('-2', '--py2', dest='python_version', action='store_const', const=defaults.PYTHON2_VERSION, help="use Python 2 mode") parser.add_argument('--ignore-missing-imports', action='store_true', help="silently ignore imports of missing modules") parser.add_argument('--follow-imports', choices=['normal', 'silent', 'skip', 'error'], default='normal', help="how to treat imports (default normal)") parser.add_argument( '--disallow-any-unimported', default=False, action='store_true', help="disallow Any types resulting from unfollowed imports") parser.add_argument('--disallow-any-expr', default=False, action='store_true', help='disallow all expressions that have type Any') parser.add_argument( '--disallow-any-decorated', default=False, action='store_true', help='disallow functions that have Any in their signature ' 'after decorator transformation') parser.add_argument('--disallow-any-explicit', default=False, action='store_true', help='disallow explicit Any in type positions') parser.add_argument( '--disallow-any-generics', default=False, action='store_true', help='disallow usage of generic types that do not specify explicit ' 'type parameters') add_invertible_flag( '--disallow-untyped-calls', default=False, strict_flag=True, help="disallow calling functions without type annotations" " from functions with type annotations") add_invertible_flag( '--disallow-untyped-defs', default=False, strict_flag=True, help="disallow defining functions without type annotations" " or with incomplete type annotations") add_invertible_flag( '--disallow-incomplete-defs', default=False, strict_flag=True, help="disallow defining functions with incomplete type annotations") add_invertible_flag( '--check-untyped-defs', default=False, strict_flag=True, help="type check the interior of functions without type annotations") add_invertible_flag( '--disallow-subclassing-any', default=False, strict_flag=True, help="disallow subclassing values of type 'Any' when defining classes") add_invertible_flag( '--warn-incomplete-stub', default=False, help="warn if missing type annotation in typeshed, only relevant with" " --check-untyped-defs enabled") add_invertible_flag( '--disallow-untyped-decorators', default=False, strict_flag=True, help="disallow decorating typed functions with untyped decorators") add_invertible_flag( '--warn-redundant-casts', default=False, strict_flag=True, help="warn about casting an expression to its inferred type") add_invertible_flag( '--no-warn-no-return', dest='warn_no_return', default=True, help="do not warn about functions that end without returning") add_invertible_flag('--warn-return-any', default=False, strict_flag=True, help="warn about returning values of type Any" " from non-Any typed functions") add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True, help="warn about unneeded '# type: ignore' comments") add_invertible_flag( '--warn-unused-configs', default=False, strict_flag=True, help="warn about unused '[mypy-<pattern>]' config sections") add_invertible_flag( '--show-error-context', default=False, dest='show_error_context', help='Precede errors with "note:" messages explaining context') add_invertible_flag( '--no-implicit-optional', default=False, strict_flag=True, help="don't assume arguments with default values of None are Optional") parser.add_argument( '-i', '--incremental', action='store_true', help="enable module cache, (inverse: --no-incremental)") parser.add_argument('--no-incremental', action='store_false', dest='incremental', help=argparse.SUPPRESS) parser.add_argument('--quick-and-dirty', action='store_true', help="use cache even if dependencies out of date " "(implies --incremental)") parser.add_argument( '--cache-dir', action='store', metavar='DIR', help="store module cache info in the given folder in incremental mode " "(defaults to '{}')".format(defaults.CACHE_DIR)) parser.add_argument( '--cache-fine-grained', action='store_true', help="include fine-grained dependency information in the cache") parser.add_argument('--skip-version-check', action='store_true', help="allow using cache written by older mypy version") add_invertible_flag('--strict-optional', default=False, strict_flag=True, help="enable experimental strict Optional checks") parser.add_argument( '--strict-optional-whitelist', metavar='GLOB', nargs='*', help="suppress strict Optional errors in all but the provided files " "(experimental -- read documentation before using!). " "Implies --strict-optional. Has the undesirable side-effect of " "suppressing other errors in non-whitelisted files.") parser.add_argument('--junit-xml', help="write junit.xml to the given file") parser.add_argument('--pdb', action='store_true', help="invoke pdb on fatal error") parser.add_argument('--show-traceback', '--tb', action='store_true', help="show traceback on fatal error") parser.add_argument('--stats', action='store_true', dest='dump_type_stats', help="dump stats") parser.add_argument('--inferstats', action='store_true', dest='dump_inference_stats', help="dump type inference stats") parser.add_argument('--custom-typing', metavar='MODULE', dest='custom_typing_module', help="use a custom typing module") parser.add_argument('--custom-typeshed-dir', metavar='DIR', help="use the custom typeshed in DIR") parser.add_argument('--scripts-are-modules', action='store_true', help="Script x becomes module x instead of __main__") parser.add_argument('--config-file', help="Configuration file, must have a [mypy] section " "(defaults to {})".format(defaults.CONFIG_FILE)) add_invertible_flag('--show-column-numbers', default=False, help="Show column numbers in error messages") parser.add_argument( '--find-occurrences', metavar='CLASS.MEMBER', dest='special-opts:find_occurrences', help="print out all usages of a class member (experimental)") strict_help = "Strict mode. Enables the following flags: {}".format( ", ".join(strict_flag_names)) parser.add_argument('--strict', action='store_true', dest='special-opts:strict', help=strict_help) parser.add_argument('--shadow-file', nargs=2, metavar=('SOURCE_FILE', 'SHADOW_FILE'), dest='shadow_file', help='Typecheck SHADOW_FILE in place of SOURCE_FILE.') # hidden options # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS) # --dump-deps will dump all fine-grained dependencies to stdout parser.add_argument('--dump-deps', action='store_true', help=argparse.SUPPRESS) # --dump-graph will dump the contents of the graph of SCCs and exit. parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS) # --semantic-analysis-only does exactly that. parser.add_argument('--semantic-analysis-only', action='store_true', help=argparse.SUPPRESS) # --local-partial-types disallows partial types spanning module top level and a function # (implicitly defined in fine-grained incremental mode) parser.add_argument('--local-partial-types', action='store_true', help=argparse.SUPPRESS) # deprecated options parser.add_argument('--disallow-any', dest='special-opts:disallow_any', help=argparse.SUPPRESS) add_invertible_flag('--strict-boolean', default=False, help=argparse.SUPPRESS) parser.add_argument('-f', '--dirty-stubs', action='store_true', dest='special-opts:dirty_stubs', help=argparse.SUPPRESS) parser.add_argument('--use-python-path', action='store_true', dest='special-opts:use_python_path', help=argparse.SUPPRESS) parser.add_argument('-s', '--silent-imports', action='store_true', dest='special-opts:silent_imports', help=argparse.SUPPRESS) parser.add_argument('--almost-silent', action='store_true', dest='special-opts:almost_silent', help=argparse.SUPPRESS) parser.add_argument('--fast-parser', action='store_true', dest='special-opts:fast_parser', help=argparse.SUPPRESS) parser.add_argument('--no-fast-parser', action='store_true', dest='special-opts:no_fast_parser', help=argparse.SUPPRESS) if server_options: parser.add_argument('--experimental', action='store_true', dest='fine_grained_incremental', help="enable fine-grained incremental mode") parser.add_argument( '--use-fine-grained-cache', action='store_true', help="use the cache in fine-grained incremental mode") report_group = parser.add_argument_group( title='report generation', description='Generate a report in the specified format.') for report_type in sorted(reporter_classes): report_group.add_argument('--%s-report' % report_type.replace('_', '-'), metavar='DIR', dest='special-opts:%s_report' % report_type) code_group = parser.add_argument_group( title='How to specify the code to type check') code_group.add_argument( '-m', '--module', action='append', metavar='MODULE', dest='special-opts:modules', help="type-check module; can repeat for more modules") # TODO: `mypy -p A -p B` currently silently ignores A # (last option wins). Perhaps -c, -m and -p could just be # command-line flags that modify how we interpret self.files? code_group.add_argument('-c', '--command', action='append', metavar='PROGRAM_TEXT', dest='special-opts:command', help="type-check program passed in as string") code_group.add_argument('-p', '--package', metavar='PACKAGE', dest='special-opts:package', help="type-check all files in a directory") code_group.add_argument(metavar='files', nargs='*', dest='special-opts:files', help="type-check given files or directories") # Parse arguments once into a dummy namespace so we can get the # filename for the config file and know if the user requested all strict options. dummy = argparse.Namespace() parser.parse_args(args, dummy) config_file = dummy.config_file if config_file is not None and not os.path.exists(config_file): parser.error("Cannot find config file '%s'" % config_file) # Parse config file first, so command line can override. options = Options() parse_config_file(options, config_file) # Set strict flags before parsing (if strict mode enabled), so other command # line options can override. if getattr(dummy, 'special-opts:strict'): for dest, value in strict_flag_assignments: setattr(options, dest, value) # Parse command line for real, using a split namespace. special_opts = argparse.Namespace() parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:')) # --use-python-path is no longer supported; explain why. if special_opts.use_python_path: parser.error( "Sorry, --use-python-path is no longer supported.\n" "If you are trying this because your code depends on a library module,\n" "you should really investigate how to obtain stubs for that module.\n" "See https://github.com/python/mypy/issues/1411 for more discussion." ) # Process deprecated options if special_opts.disallow_any: print( "--disallow-any option was split up into multiple flags. " "See http://mypy.readthedocs.io/en/latest/command_line.html#disallow-any-flags" ) if options.strict_boolean: print( "Warning: --strict-boolean is deprecated; " "see https://github.com/python/mypy/issues/3195", file=sys.stderr) if special_opts.almost_silent: print( "Warning: --almost-silent has been replaced by " "--follow-imports=errors", file=sys.stderr) if options.follow_imports == 'normal': options.follow_imports = 'errors' elif special_opts.silent_imports: print( "Warning: --silent-imports has been replaced by " "--ignore-missing-imports --follow-imports=skip", file=sys.stderr) options.ignore_missing_imports = True if options.follow_imports == 'normal': options.follow_imports = 'skip' if special_opts.dirty_stubs: print( "Warning: -f/--dirty-stubs is deprecated and no longer necessary. Mypy no longer " "checks the git status of stubs.", file=sys.stderr) if special_opts.fast_parser: print("Warning: --fast-parser is now the default (and only) parser.") if special_opts.no_fast_parser: print( "Warning: --no-fast-parser no longer has any effect. The fast parser " "is now mypy's default and only parser.") # Check for invalid argument combinations. if require_targets: code_methods = sum( bool(c) for c in [ special_opts.modules, special_opts.command, special_opts.package, special_opts.files ]) if code_methods == 0: parser.error("Missing target module, package, files, or command.") elif code_methods > 1: parser.error( "May only specify one of: module, package, files, or command.") # Set build flags. if options.strict_optional_whitelist is not None: # TODO: Deprecate, then kill this flag options.strict_optional = True if special_opts.find_occurrences: experiments.find_occurrences = special_opts.find_occurrences.split('.') assert experiments.find_occurrences is not None if len(experiments.find_occurrences) < 2: parser.error("Can only find occurrences of class members.") if len(experiments.find_occurrences) != 2: parser.error( "Can only find occurrences of non-nested class members.") # Set reports. for flag, val in vars(special_opts).items(): if flag.endswith('_report') and val is not None: report_type = flag[:-7].replace('_', '-') report_dir = val options.report_dirs[report_type] = report_dir # Let quick_and_dirty imply incremental. if options.quick_and_dirty: options.incremental = True # Set target. if special_opts.modules: options.build_type = BuildType.MODULE targets = [BuildSource(None, m, None) for m in special_opts.modules] return targets, options elif special_opts.package: if os.sep in special_opts.package or os.altsep and os.altsep in special_opts.package: fail("Package name '{}' cannot have a slash in it.".format( special_opts.package)) options.build_type = BuildType.MODULE lib_path = [os.getcwd()] + build.mypy_path() # TODO: use the same cache as the BuildManager will targets = build.FindModuleCache().find_modules_recursive( special_opts.package, lib_path) if not targets: fail("Can't find package '{}'".format(special_opts.package)) return targets, options elif special_opts.command: options.build_type = BuildType.PROGRAM_TEXT targets = [BuildSource(None, None, '\n'.join(special_opts.command))] return targets, options else: targets = create_source_list(special_opts.files, options) return targets, options
def main() -> int: assert sys.version_info >= (3, 5), "This script requires at least Python 3.5" parser = argparse.ArgumentParser( description="Compares stubs to objects introspected from the runtime." ) parser.add_argument("modules", nargs="*", help="Modules to test") parser.add_argument("--concise", action="store_true", help="Make output concise") parser.add_argument( "--ignore-missing-stub", action="store_true", help="Ignore errors for stub missing things that are present at runtime", ) parser.add_argument( "--ignore-positional-only", action="store_true", help="Ignore errors for whether an argument should or shouldn't be positional-only", ) parser.add_argument( "--custom-typeshed-dir", metavar="DIR", help="Use the custom typeshed in DIR" ) parser.add_argument( "--check-typeshed", action="store_true", help="Check all stdlib modules in typeshed" ) parser.add_argument( "--whitelist", action="append", metavar="FILE", default=[], help=( "Use file as a whitelist. Can be passed multiple times to combine multiple " "whitelists. Whitelist can be created with --generate-whitelist" ), ) parser.add_argument( "--generate-whitelist", action="store_true", help="Print a whitelist (to stdout) to be used with --whitelist", ) args = parser.parse_args() # Load the whitelist. This is a series of strings corresponding to Error.object_desc # Values in the dict will store whether we used the whitelist entry or not. whitelist = { entry: False for whitelist_file in args.whitelist for entry in get_whitelist_entries(whitelist_file) } # If we need to generate a whitelist, we store Error.object_desc for each error here. generated_whitelist = set() modules = args.modules if args.check_typeshed: assert not args.modules, "Cannot pass both --check-typeshed and a list of modules" modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir) modules.remove("antigravity") # it's super annoying assert modules, "No modules to check" options = Options() options.incremental = False options.custom_typeshed_dir = args.custom_typeshed_dir modules = build_stubs(modules, options, find_submodules=not args.check_typeshed) exit_code = 0 for module in modules: for error in test_module(module): # Filter errors if args.ignore_missing_stub and error.is_missing_stub(): continue if args.ignore_positional_only and error.is_positional_only_related(): continue if error.object_desc in whitelist: whitelist[error.object_desc] = True continue # We have errors, so change exit code, and output whatever necessary exit_code = 1 if args.generate_whitelist: generated_whitelist.add(error.object_desc) continue print(error.get_description(concise=args.concise)) # Print unused whitelist entries for w in whitelist: if not whitelist[w]: exit_code = 1 print("note: unused whitelist entry {}".format(w)) # Print the generated whitelist if args.generate_whitelist: for e in sorted(generated_whitelist): print(e) exit_code = 0 return exit_code
def process_options(args: List[str], require_targets: bool = True, server_options: bool = False, ) -> Tuple[List[BuildSource], Options]: """Parse command line arguments.""" parser = argparse.ArgumentParser(prog='mypy', epilog=FOOTER, fromfile_prefix_chars='@', formatter_class=AugmentedHelpFormatter) strict_flag_names = [] # type: List[str] strict_flag_assignments = [] # type: List[Tuple[str, bool]] def add_invertible_flag(flag: str, *, inverse: Optional[str] = None, default: bool, dest: Optional[str] = None, help: str, strict_flag: bool = False ) -> None: if inverse is None: inverse = invert_flag_name(flag) if help is not argparse.SUPPRESS: help += " (inverse: {})".format(inverse) arg = parser.add_argument(flag, # type: ignore # incorrect stub for add_argument action='store_false' if default else 'store_true', dest=dest, help=help) dest = arg.dest arg = parser.add_argument(inverse, # type: ignore # incorrect stub for add_argument action='store_true' if default else 'store_false', dest=dest, help=argparse.SUPPRESS) if strict_flag: assert dest is not None strict_flag_names.append(flag) strict_flag_assignments.append((dest, not default)) # Unless otherwise specified, arguments will be parsed directly onto an # Options object. Options that require further processing should have # their `dest` prefixed with `special-opts:`, which will cause them to be # parsed into the separate special_opts namespace object. parser.add_argument('-v', '--verbose', action='count', dest='verbosity', help="more verbose messages") parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__) parser.add_argument('--python-version', type=parse_version, metavar='x.y', help='use Python x.y') parser.add_argument('--platform', action='store', metavar='PLATFORM', help="typecheck special-cased code for the given OS platform " "(defaults to sys.platform).") parser.add_argument('-2', '--py2', dest='python_version', action='store_const', const=defaults.PYTHON2_VERSION, help="use Python 2 mode") parser.add_argument('--ignore-missing-imports', action='store_true', help="silently ignore imports of missing modules") parser.add_argument('--follow-imports', choices=['normal', 'silent', 'skip', 'error'], default='normal', help="how to treat imports (default normal)") parser.add_argument('--disallow-any-unimported', default=False, action='store_true', help="disallow Any types resulting from unfollowed imports") parser.add_argument('--disallow-any-expr', default=False, action='store_true', help='disallow all expressions that have type Any') parser.add_argument('--disallow-any-decorated', default=False, action='store_true', help='disallow functions that have Any in their signature ' 'after decorator transformation') parser.add_argument('--disallow-any-explicit', default=False, action='store_true', help='disallow explicit Any in type positions') parser.add_argument('--disallow-any-generics', default=False, action='store_true', help='disallow usage of generic types that do not specify explicit ' 'type parameters') add_invertible_flag('--disallow-untyped-calls', default=False, strict_flag=True, help="disallow calling functions without type annotations" " from functions with type annotations") add_invertible_flag('--disallow-untyped-defs', default=False, strict_flag=True, help="disallow defining functions without type annotations" " or with incomplete type annotations") add_invertible_flag('--disallow-incomplete-defs', default=False, strict_flag=True, help="disallow defining functions with incomplete type annotations") add_invertible_flag('--check-untyped-defs', default=False, strict_flag=True, help="type check the interior of functions without type annotations") add_invertible_flag('--disallow-subclassing-any', default=False, strict_flag=True, help="disallow subclassing values of type 'Any' when defining classes") add_invertible_flag('--warn-incomplete-stub', default=False, help="warn if missing type annotation in typeshed, only relevant with" " --check-untyped-defs enabled") add_invertible_flag('--disallow-untyped-decorators', default=False, strict_flag=True, help="disallow decorating typed functions with untyped decorators") add_invertible_flag('--warn-redundant-casts', default=False, strict_flag=True, help="warn about casting an expression to its inferred type") add_invertible_flag('--no-warn-no-return', dest='warn_no_return', default=True, help="do not warn about functions that end without returning") add_invertible_flag('--warn-return-any', default=False, strict_flag=True, help="warn about returning values of type Any" " from non-Any typed functions") add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True, help="warn about unneeded '# type: ignore' comments") add_invertible_flag('--warn-unused-configs', default=False, strict_flag=True, help="warn about unused '[mypy-<pattern>]' config sections") add_invertible_flag('--show-error-context', default=False, dest='show_error_context', help='Precede errors with "note:" messages explaining context') add_invertible_flag('--no-implicit-optional', default=False, strict_flag=True, help="don't assume arguments with default values of None are Optional") parser.add_argument('-i', '--incremental', action='store_true', help="enable module cache, (inverse: --no-incremental)") parser.add_argument('--no-incremental', action='store_false', dest='incremental', help=argparse.SUPPRESS) parser.add_argument('--quick-and-dirty', action='store_true', help="use cache even if dependencies out of date " "(implies --incremental)") parser.add_argument('--cache-dir', action='store', metavar='DIR', help="store module cache info in the given folder in incremental mode " "(defaults to '{}')".format(defaults.CACHE_DIR)) parser.add_argument('--cache-fine-grained', action='store_true', help="include fine-grained dependency information in the cache") parser.add_argument('--skip-version-check', action='store_true', help="allow using cache written by older mypy version") add_invertible_flag('--strict-optional', default=False, strict_flag=True, help="enable experimental strict Optional checks") parser.add_argument('--strict-optional-whitelist', metavar='GLOB', nargs='*', help="suppress strict Optional errors in all but the provided files " "(experimental -- read documentation before using!). " "Implies --strict-optional. Has the undesirable side-effect of " "suppressing other errors in non-whitelisted files.") parser.add_argument('--junit-xml', help="write junit.xml to the given file") parser.add_argument('--pdb', action='store_true', help="invoke pdb on fatal error") parser.add_argument('--show-traceback', '--tb', action='store_true', help="show traceback on fatal error") parser.add_argument('--stats', action='store_true', dest='dump_type_stats', help="dump stats") parser.add_argument('--inferstats', action='store_true', dest='dump_inference_stats', help="dump type inference stats") parser.add_argument('--custom-typing', metavar='MODULE', dest='custom_typing_module', help="use a custom typing module") parser.add_argument('--custom-typeshed-dir', metavar='DIR', help="use the custom typeshed in DIR") parser.add_argument('--scripts-are-modules', action='store_true', help="Script x becomes module x instead of __main__") parser.add_argument('--config-file', help="Configuration file, must have a [mypy] section " "(defaults to {})".format(defaults.CONFIG_FILE)) add_invertible_flag('--show-column-numbers', default=False, help="Show column numbers in error messages") parser.add_argument('--find-occurrences', metavar='CLASS.MEMBER', dest='special-opts:find_occurrences', help="print out all usages of a class member (experimental)") strict_help = "Strict mode. Enables the following flags: {}".format( ", ".join(strict_flag_names)) parser.add_argument('--strict', action='store_true', dest='special-opts:strict', help=strict_help) parser.add_argument('--shadow-file', nargs=2, metavar=('SOURCE_FILE', 'SHADOW_FILE'), dest='shadow_file', help='Typecheck SHADOW_FILE in place of SOURCE_FILE.') # hidden options # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS) # --dump-deps will dump all fine-grained dependencies to stdout parser.add_argument('--dump-deps', action='store_true', help=argparse.SUPPRESS) # --dump-graph will dump the contents of the graph of SCCs and exit. parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS) # --semantic-analysis-only does exactly that. parser.add_argument('--semantic-analysis-only', action='store_true', help=argparse.SUPPRESS) # --local-partial-types disallows partial types spanning module top level and a function # (implicitly defined in fine-grained incremental mode) parser.add_argument('--local-partial-types', action='store_true', help=argparse.SUPPRESS) # deprecated options parser.add_argument('--disallow-any', dest='special-opts:disallow_any', help=argparse.SUPPRESS) add_invertible_flag('--strict-boolean', default=False, help=argparse.SUPPRESS) parser.add_argument('-f', '--dirty-stubs', action='store_true', dest='special-opts:dirty_stubs', help=argparse.SUPPRESS) parser.add_argument('--use-python-path', action='store_true', dest='special-opts:use_python_path', help=argparse.SUPPRESS) parser.add_argument('-s', '--silent-imports', action='store_true', dest='special-opts:silent_imports', help=argparse.SUPPRESS) parser.add_argument('--almost-silent', action='store_true', dest='special-opts:almost_silent', help=argparse.SUPPRESS) parser.add_argument('--fast-parser', action='store_true', dest='special-opts:fast_parser', help=argparse.SUPPRESS) parser.add_argument('--no-fast-parser', action='store_true', dest='special-opts:no_fast_parser', help=argparse.SUPPRESS) if server_options: # TODO: This flag is superfluous; remove after a short transition (2018-03-16) parser.add_argument('--experimental', action='store_true', dest='fine_grained_incremental', help="enable fine-grained incremental mode") parser.add_argument('--use-fine-grained-cache', action='store_true', help="use the cache in fine-grained incremental mode") report_group = parser.add_argument_group( title='report generation', description='Generate a report in the specified format.') for report_type in sorted(reporter_classes): report_group.add_argument('--%s-report' % report_type.replace('_', '-'), metavar='DIR', dest='special-opts:%s_report' % report_type) code_group = parser.add_argument_group(title='How to specify the code to type check') code_group.add_argument('-m', '--module', action='append', metavar='MODULE', default=[], dest='special-opts:modules', help="type-check module; can repeat for more modules") code_group.add_argument('-p', '--package', action='append', metavar='PACKAGE', default=[], dest='special-opts:packages', help="type-check package recursively; can be repeated") code_group.add_argument('-c', '--command', action='append', metavar='PROGRAM_TEXT', dest='special-opts:command', help="type-check program passed in as string") code_group.add_argument(metavar='files', nargs='*', dest='special-opts:files', help="type-check given files or directories") # Parse arguments once into a dummy namespace so we can get the # filename for the config file and know if the user requested all strict options. dummy = argparse.Namespace() parser.parse_args(args, dummy) config_file = dummy.config_file if config_file is not None and not os.path.exists(config_file): parser.error("Cannot find config file '%s'" % config_file) # Parse config file first, so command line can override. options = Options() parse_config_file(options, config_file) # Set strict flags before parsing (if strict mode enabled), so other command # line options can override. if getattr(dummy, 'special-opts:strict'): for dest, value in strict_flag_assignments: setattr(options, dest, value) # Parse command line for real, using a split namespace. special_opts = argparse.Namespace() parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:')) # --use-python-path is no longer supported; explain why. if special_opts.use_python_path: parser.error("Sorry, --use-python-path is no longer supported.\n" "If you are trying this because your code depends on a library module,\n" "you should really investigate how to obtain stubs for that module.\n" "See https://github.com/python/mypy/issues/1411 for more discussion." ) # Process deprecated options if special_opts.disallow_any: print("--disallow-any option was split up into multiple flags. " "See http://mypy.readthedocs.io/en/latest/command_line.html#disallow-any-flags") if options.strict_boolean: print("Warning: --strict-boolean is deprecated; " "see https://github.com/python/mypy/issues/3195", file=sys.stderr) if special_opts.almost_silent: print("Warning: --almost-silent has been replaced by " "--follow-imports=errors", file=sys.stderr) if options.follow_imports == 'normal': options.follow_imports = 'errors' elif special_opts.silent_imports: print("Warning: --silent-imports has been replaced by " "--ignore-missing-imports --follow-imports=skip", file=sys.stderr) options.ignore_missing_imports = True if options.follow_imports == 'normal': options.follow_imports = 'skip' if special_opts.dirty_stubs: print("Warning: -f/--dirty-stubs is deprecated and no longer necessary. Mypy no longer " "checks the git status of stubs.", file=sys.stderr) if special_opts.fast_parser: print("Warning: --fast-parser is now the default (and only) parser.") if special_opts.no_fast_parser: print("Warning: --no-fast-parser no longer has any effect. The fast parser " "is now mypy's default and only parser.") # Check for invalid argument combinations. if require_targets: code_methods = sum(bool(c) for c in [special_opts.modules + special_opts.packages, special_opts.command, special_opts.files]) if code_methods == 0: parser.error("Missing target module, package, files, or command.") elif code_methods > 1: parser.error("May only specify one of: module/package, files, or command.") # Set build flags. if options.strict_optional_whitelist is not None: # TODO: Deprecate, then kill this flag options.strict_optional = True if special_opts.find_occurrences: experiments.find_occurrences = special_opts.find_occurrences.split('.') assert experiments.find_occurrences is not None if len(experiments.find_occurrences) < 2: parser.error("Can only find occurrences of class members.") if len(experiments.find_occurrences) != 2: parser.error("Can only find occurrences of non-nested class members.") # Set reports. for flag, val in vars(special_opts).items(): if flag.endswith('_report') and val is not None: report_type = flag[:-7].replace('_', '-') report_dir = val options.report_dirs[report_type] = report_dir # Let quick_and_dirty imply incremental. if options.quick_and_dirty: options.incremental = True # Set target. if special_opts.modules + special_opts.packages: options.build_type = BuildType.MODULE lib_path = [os.getcwd()] + build.mypy_path() targets = [] # TODO: use the same cache as the BuildManager will cache = build.FindModuleCache() for p in special_opts.packages: if os.sep in p or os.altsep and os.altsep in p: fail("Package name '{}' cannot have a slash in it.".format(p)) p_targets = cache.find_modules_recursive(p, lib_path) if not p_targets: fail("Can't find package '{}'".format(p)) targets.extend(p_targets) for m in special_opts.modules: targets.append(BuildSource(None, m, None)) return targets, options elif special_opts.command: options.build_type = BuildType.PROGRAM_TEXT targets = [BuildSource(None, None, '\n'.join(special_opts.command))] return targets, options else: try: targets = create_source_list(special_opts.files, options) except InvalidSourceList as e: fail(str(e)) return targets, options
def process_options(args: List[str], require_targets: bool = True, server_options: bool = False, fscache: Optional[FileSystemCache] = None, ) -> Tuple[List[BuildSource], Options]: """Parse command line arguments. If a FileSystemCache is passed in, and package_root options are given, call fscache.set_package_root() to set the cache's package root. """ parser = argparse.ArgumentParser(prog='mypy', epilog=FOOTER, fromfile_prefix_chars='@', formatter_class=AugmentedHelpFormatter) strict_flag_names = [] # type: List[str] strict_flag_assignments = [] # type: List[Tuple[str, bool]] def add_invertible_flag(flag: str, *, inverse: Optional[str] = None, default: bool, dest: Optional[str] = None, help: str, strict_flag: bool = False, group: Optional[argparse._ActionsContainer] = None ) -> None: if inverse is None: inverse = invert_flag_name(flag) if group is None: group = parser if help is not argparse.SUPPRESS: help += " (inverse: {})".format(inverse) arg = group.add_argument(flag, action='store_false' if default else 'store_true', dest=dest, help=help) dest = arg.dest arg = group.add_argument(inverse, action='store_true' if default else 'store_false', dest=dest, help=argparse.SUPPRESS) if strict_flag: assert dest is not None strict_flag_names.append(flag) strict_flag_assignments.append((dest, not default)) # Unless otherwise specified, arguments will be parsed directly onto an # Options object. Options that require further processing should have # their `dest` prefixed with `special-opts:`, which will cause them to be # parsed into the separate special_opts namespace object. parser.add_argument('-v', '--verbose', action='count', dest='verbosity', help="more verbose messages") parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__, help="show program's version number and exit") config_group = parser.add_argument_group( title='config file', description="Use a config file instead of command line arguments.") config_group.add_argument( '--config-file', help="configuration file, must have a [mypy] section " "(defaults to {})".format(', '.join(defaults.CONFIG_FILES))) add_invertible_flag('--warn-unused-configs', default=False, strict_flag=True, help="warn about unused '[mypy-<pattern>]' config sections", group=config_group) imports_group = parser.add_argument_group( title='import discovery', description="Configure how imports are discovered and followed.") imports_group.add_argument( '--ignore-missing-imports', action='store_true', help="silently ignore imports of missing modules") imports_group.add_argument( '--follow-imports', choices=['normal', 'silent', 'skip', 'error'], default='normal', help="how to treat imports (default normal)") imports_group.add_argument( '--python-executable', action='store', metavar='EXECUTABLE', help="Python executable used for finding PEP 561 compliant installed" " packages and stubs", dest='special-opts:python_executable') imports_group.add_argument( '--no-site-packages', action='store_true', dest='special-opts:no_executable', help="do not search for installed PEP 561 compliant packages") platform_group = parser.add_argument_group( title='platform configuration', description="Type check code assuming certain runtime conditions.") platform_group.add_argument( '--python-version', type=parse_version, metavar='x.y', help='type check code assuming it will be running on Python x.y', dest='special-opts:python_version') platform_group.add_argument( '-2', '--py2', dest='python_version', action='store_const', const=defaults.PYTHON2_VERSION, help="use Python 2 mode (same as --python-version 2.7)") platform_group.add_argument( '--platform', action='store', metavar='PLATFORM', help="type check special-cased code for the given OS platform " "(defaults to sys.platform)") platform_group.add_argument( '--always-true', metavar='NAME', action='append', default=[], help="additional variable to be considered True (may be repeated)") platform_group.add_argument( '--always-false', metavar='NAME', action='append', default=[], help="additional variable to be considered False (may be repeated)") disallow_any_group = parser.add_argument_group( title='Any type restrictions', description="Disallow the use of the 'Any' type under certain conditions.") disallow_any_group.add_argument( '--disallow-any-unimported', default=False, action='store_true', help="disallow Any types resulting from unfollowed imports") add_invertible_flag('--disallow-subclassing-any', default=False, strict_flag=True, help="disallow subclassing values of type 'Any' when defining classes", group=disallow_any_group) disallow_any_group.add_argument( '--disallow-any-expr', default=False, action='store_true', help='disallow all expressions that have type Any') disallow_any_group.add_argument( '--disallow-any-decorated', default=False, action='store_true', help='disallow functions that have Any in their signature ' 'after decorator transformation') disallow_any_group.add_argument( '--disallow-any-explicit', default=False, action='store_true', help='disallow explicit Any in type positions') disallow_any_group.add_argument( '--disallow-any-generics', default=False, action='store_true', help='disallow usage of generic types that do not specify explicit ' 'type parameters') untyped_group = parser.add_argument_group( title='untyped definitions and calls', description="Configure how untyped definitions and calls are handled.") add_invertible_flag('--disallow-untyped-calls', default=False, strict_flag=True, help="disallow calling functions without type annotations" " from functions with type annotations", group=untyped_group) add_invertible_flag('--disallow-untyped-defs', default=False, strict_flag=True, help="disallow defining functions without type annotations" " or with incomplete type annotations", group=untyped_group) add_invertible_flag('--disallow-incomplete-defs', default=False, strict_flag=True, help="disallow defining functions with incomplete type annotations", group=untyped_group) add_invertible_flag('--check-untyped-defs', default=False, strict_flag=True, help="type check the interior of functions without type annotations", group=untyped_group) add_invertible_flag('--warn-incomplete-stub', default=False, help="warn if missing type annotation in typeshed, only relevant with" " --check-untyped-defs enabled", group=untyped_group) none_group = parser.add_argument_group( title='None and Optional handling', description="Adjust how values of type 'None' are handled.") add_invertible_flag('--no-implicit-optional', default=False, strict_flag=True, help="don't assume arguments with default values of None are Optional", group=none_group) none_group.add_argument( '--strict-optional', action='store_true', help=argparse.SUPPRESS) none_group.add_argument( '--no-strict-optional', action='store_false', dest='strict_optional', help="disable strict Optional checks (inverse: --strict-optional)") none_group.add_argument( '--strict-optional-whitelist', metavar='GLOB', nargs='*', help="suppress strict Optional errors in all but the provided files; " "implies --strict-optional (may suppress certain other errors " "in non-whitelisted files)") lint_group = parser.add_argument_group( title='warnings', description="Detect code that is sound but redundant or problematic.") add_invertible_flag('--warn-redundant-casts', default=False, strict_flag=True, help="warn about casting an expression to its inferred type", group=lint_group) add_invertible_flag('--no-warn-no-return', dest='warn_no_return', default=True, help="do not warn about functions that end without returning", group=lint_group) add_invertible_flag('--warn-return-any', default=False, strict_flag=True, help="warn about returning values of type Any" " from non-Any typed functions", group=lint_group) add_invertible_flag('--warn-unused-ignores', default=False, strict_flag=True, help="warn about unneeded '# type: ignore' comments", group=lint_group) strictness_group = parser.add_argument_group( title='other strictness checks', description="Other miscellaneous strictness checks.") add_invertible_flag('--disallow-untyped-decorators', default=False, strict_flag=True, help="disallow decorating typed functions with untyped decorators", group=strictness_group) incremental_group = parser.add_argument_group( title='incremental mode', description="Adjust how mypy incrementally type checks and caches modules.") incremental_group.add_argument( '-i', '--incremental', action='store_true', help=argparse.SUPPRESS) incremental_group.add_argument( '--no-incremental', action='store_false', dest='incremental', help="disable module cache (inverse: --incremental)") incremental_group.add_argument( '--cache-dir', action='store', metavar='DIR', help="store module cache info in the given folder in incremental mode " "(defaults to '{}')".format(defaults.CACHE_DIR)) incremental_group.add_argument( '--cache-fine-grained', action='store_true', help="include fine-grained dependency information in the cache for the mypy daemon") incremental_group.add_argument( '--quick-and-dirty', action='store_true', help="use cache even if dependencies out of date (implies --incremental)") incremental_group.add_argument( '--skip-version-check', action='store_true', help="allow using cache written by older mypy version") internals_group = parser.add_argument_group( title='mypy internals', description="Debug and customize mypy internals.") internals_group.add_argument( '--pdb', action='store_true', help="invoke pdb on fatal error") internals_group.add_argument( '--show-traceback', '--tb', action='store_true', help="show traceback on fatal error") internals_group.add_argument( '--custom-typing', metavar='MODULE', dest='custom_typing_module', help="use a custom typing module") internals_group.add_argument( '--custom-typeshed-dir', metavar='DIR', help="use the custom typeshed in DIR") internals_group.add_argument( '--shadow-file', nargs=2, metavar=('SOURCE_FILE', 'SHADOW_FILE'), dest='shadow_file', action='append', help="when encountering SOURCE_FILE, read and type check " "the contents of SHADOW_FILE instead.") error_group = parser.add_argument_group( title='error reporting', description="Adjust the amount of detail shown in error messages.") add_invertible_flag('--show-error-context', default=False, dest='show_error_context', help='precede errors with "note:" messages explaining context', group=error_group) add_invertible_flag('--show-column-numbers', default=False, help="show column numbers in error messages", group=error_group) analysis_group = parser.add_argument_group( title='extra analysis', description="Extract additional information and analysis.") analysis_group.add_argument( '--stats', action='store_true', dest='dump_type_stats', help=argparse.SUPPRESS) analysis_group.add_argument( '--inferstats', action='store_true', dest='dump_inference_stats', help=argparse.SUPPRESS) analysis_group.add_argument( '--find-occurrences', metavar='CLASS.MEMBER', dest='special-opts:find_occurrences', help="print out all usages of a class member (experimental)") strict_help = "strict mode; enables the following flags: {}".format( ", ".join(strict_flag_names)) strictness_group.add_argument( '--strict', action='store_true', dest='special-opts:strict', help=strict_help) report_group = parser.add_argument_group( title='report generation', description='Generate a report in the specified format.') for report_type in sorted(reporter_classes): report_group.add_argument('--%s-report' % report_type.replace('_', '-'), metavar='DIR', dest='special-opts:%s_report' % report_type) other_group = parser.add_argument_group( title='miscellaneous', description="Other miscellaneous flags.") other_group.add_argument( '--junit-xml', help="write junit.xml to the given file") other_group.add_argument( '--scripts-are-modules', action='store_true', help="script x becomes module x instead of __main__") if server_options: # TODO: This flag is superfluous; remove after a short transition (2018-03-16) other_group.add_argument( '--experimental', action='store_true', dest='fine_grained_incremental', help="enable fine-grained incremental mode") other_group.add_argument( '--use-fine-grained-cache', action='store_true', help="use the cache in fine-grained incremental mode") # hidden options # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). parser.add_argument('--debug-cache', action='store_true', help=argparse.SUPPRESS) # --dump-deps will dump all fine-grained dependencies to stdout parser.add_argument('--dump-deps', action='store_true', help=argparse.SUPPRESS) # --dump-graph will dump the contents of the graph of SCCs and exit. parser.add_argument('--dump-graph', action='store_true', help=argparse.SUPPRESS) # --semantic-analysis-only does exactly that. parser.add_argument('--semantic-analysis-only', action='store_true', help=argparse.SUPPRESS) # --local-partial-types disallows partial types spanning module top level and a function # (implicitly defined in fine-grained incremental mode) parser.add_argument('--local-partial-types', action='store_true', help=argparse.SUPPRESS) # --bazel changes some behaviors for use with Bazel (https://bazel.build). parser.add_argument('--bazel', action='store_true', help=argparse.SUPPRESS) # --package-root adds a directory below which directories are considered # packages even without __init__.py. May be repeated. parser.add_argument('--package-root', metavar='ROOT', action='append', default=[], help=argparse.SUPPRESS) # --cache-map FILE ... gives a mapping from source files to cache files. # Each triple of arguments is a source file, a cache meta file, and a cache data file. # Modules not mentioned in the file will go through cache_dir. # Must be followed by another flag or by '--' (and then only file args may follow). parser.add_argument('--cache-map', nargs='+', dest='special-opts:cache_map', help=argparse.SUPPRESS) # deprecated options parser.add_argument('--disallow-any', dest='special-opts:disallow_any', help=argparse.SUPPRESS) add_invertible_flag('--strict-boolean', default=False, help=argparse.SUPPRESS) parser.add_argument('-f', '--dirty-stubs', action='store_true', dest='special-opts:dirty_stubs', help=argparse.SUPPRESS) parser.add_argument('--use-python-path', action='store_true', dest='special-opts:use_python_path', help=argparse.SUPPRESS) parser.add_argument('-s', '--silent-imports', action='store_true', dest='special-opts:silent_imports', help=argparse.SUPPRESS) parser.add_argument('--almost-silent', action='store_true', dest='special-opts:almost_silent', help=argparse.SUPPRESS) parser.add_argument('--fast-parser', action='store_true', dest='special-opts:fast_parser', help=argparse.SUPPRESS) parser.add_argument('--no-fast-parser', action='store_true', dest='special-opts:no_fast_parser', help=argparse.SUPPRESS) code_group = parser.add_argument_group(title='specifying which code to type check') code_group.add_argument('-m', '--module', action='append', metavar='MODULE', default=[], dest='special-opts:modules', help="type-check module; can repeat for more modules") code_group.add_argument('-p', '--package', action='append', metavar='PACKAGE', default=[], dest='special-opts:packages', help="type-check package recursively; can be repeated") code_group.add_argument('-c', '--command', action='append', metavar='PROGRAM_TEXT', dest='special-opts:command', help="type-check program passed in as string") code_group.add_argument(metavar='files', nargs='*', dest='special-opts:files', help="type-check given files or directories") # Parse arguments once into a dummy namespace so we can get the # filename for the config file and know if the user requested all strict options. dummy = argparse.Namespace() parser.parse_args(args, dummy) config_file = dummy.config_file if config_file is not None and not os.path.exists(config_file): parser.error("Cannot find config file '%s'" % config_file) # Parse config file first, so command line can override. options = Options() parse_config_file(options, config_file) # Set strict flags before parsing (if strict mode enabled), so other command # line options can override. if getattr(dummy, 'special-opts:strict'): for dest, value in strict_flag_assignments: setattr(options, dest, value) # Parse command line for real, using a split namespace. special_opts = argparse.Namespace() parser.parse_args(args, SplitNamespace(options, special_opts, 'special-opts:')) # --use-python-path is no longer supported; explain why. if special_opts.use_python_path: parser.error("Sorry, --use-python-path is no longer supported.\n" "If you are trying this because your code depends on a library module,\n" "you should really investigate how to obtain stubs for that module.\n" "See https://github.com/python/mypy/issues/1411 for more discussion." ) # Process deprecated options if special_opts.disallow_any: print("--disallow-any option was split up into multiple flags. " "See http://mypy.readthedocs.io/en/latest/command_line.html#disallow-any-flags") if options.strict_boolean: print("Warning: --strict-boolean is deprecated; " "see https://github.com/python/mypy/issues/3195", file=sys.stderr) if special_opts.almost_silent: print("Warning: --almost-silent has been replaced by " "--follow-imports=errors", file=sys.stderr) if options.follow_imports == 'normal': options.follow_imports = 'errors' elif special_opts.silent_imports: print("Warning: --silent-imports has been replaced by " "--ignore-missing-imports --follow-imports=skip", file=sys.stderr) options.ignore_missing_imports = True if options.follow_imports == 'normal': options.follow_imports = 'skip' if special_opts.dirty_stubs: print("Warning: -f/--dirty-stubs is deprecated and no longer necessary. Mypy no longer " "checks the git status of stubs.", file=sys.stderr) if special_opts.fast_parser: print("Warning: --fast-parser is now the default (and only) parser.") if special_opts.no_fast_parser: print("Warning: --no-fast-parser no longer has any effect. The fast parser " "is now mypy's default and only parser.") try: infer_python_version_and_executable(options, special_opts) except PythonExecutableInferenceError as e: parser.error(str(e)) if special_opts.no_executable: options.python_executable = None # Check for invalid argument combinations. if require_targets: code_methods = sum(bool(c) for c in [special_opts.modules + special_opts.packages, special_opts.command, special_opts.files]) if code_methods == 0: parser.error("Missing target module, package, files, or command.") elif code_methods > 1: parser.error("May only specify one of: module/package, files, or command.") # Check for overlapping `--always-true` and `--always-false` flags. overlap = set(options.always_true) & set(options.always_false) if overlap: parser.error("You can't make a variable always true and always false (%s)" % ', '.join(sorted(overlap))) # Set build flags. if options.strict_optional_whitelist is not None: # TODO: Deprecate, then kill this flag options.strict_optional = True if special_opts.find_occurrences: experiments.find_occurrences = special_opts.find_occurrences.split('.') assert experiments.find_occurrences is not None if len(experiments.find_occurrences) < 2: parser.error("Can only find occurrences of class members.") if len(experiments.find_occurrences) != 2: parser.error("Can only find occurrences of non-nested class members.") # Set reports. for flag, val in vars(special_opts).items(): if flag.endswith('_report') and val is not None: report_type = flag[:-7].replace('_', '-') report_dir = val options.report_dirs[report_type] = report_dir # Process --package-root. if options.package_root: process_package_roots(fscache, parser, options) # Process --cache-map. if special_opts.cache_map: process_cache_map(parser, special_opts, options) # Let quick_and_dirty imply incremental. if options.quick_and_dirty: options.incremental = True # Set target. if special_opts.modules + special_opts.packages: options.build_type = BuildType.MODULE lib_path = [os.getcwd()] + build.mypy_path() targets = [] # TODO: use the same cache that the BuildManager will cache = build.FindModuleCache(fscache) for p in special_opts.packages: if os.sep in p or os.altsep and os.altsep in p: fail("Package name '{}' cannot have a slash in it.".format(p)) p_targets = cache.find_modules_recursive(p, tuple(lib_path), options.python_executable) if not p_targets: fail("Can't find package '{}'".format(p)) targets.extend(p_targets) for m in special_opts.modules: targets.append(BuildSource(None, m, None)) return targets, options elif special_opts.command: options.build_type = BuildType.PROGRAM_TEXT targets = [BuildSource(None, None, '\n'.join(special_opts.command))] return targets, options else: try: targets = create_source_list(special_opts.files, options, fscache) except InvalidSourceList as e: fail(str(e)) return targets, options
def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> None: bench = testcase.config.getoption( '--bench', False) and 'Benchmark' in testcase.name options = Options() options.use_builtins_fixtures = True options.show_traceback = True options.strict_optional = True # N.B: We try to (and ought to!) run with the current # version of python, since we are going to link and run # against the current version of python. # But a lot of the tests use type annotations so we can't say it is 3.5. options.python_version = max(sys.version_info[:2], (3, 6)) options.export_types = True options.preserve_asts = True options.incremental = False # Avoid checking modules/packages named 'unchecked', to provide a way # to test interacting with code we don't have types for. options.per_module_options['unchecked.*'] = {'follow_imports': 'error'} source = build.BuildSource('native.py', 'native', None) sources = [source] module_names = ['native'] module_paths = ['native.py'] # Hard code another module name to compile in the same compilation unit. to_delete = [] for fn, text in testcase.files: fn = os.path.relpath(fn, test_temp_dir) if os.path.basename(fn).startswith('other') and fn.endswith('.py'): name = os.path.basename(fn).split('.')[0] module_names.append(name) sources.append(build.BuildSource(fn, name, None)) to_delete.append(fn) module_paths.append(fn) shutil.copyfile( fn, os.path.join(os.path.dirname(fn), name + '_interpreted.py')) for source in sources: options.per_module_options.setdefault(source.module, {})['mypyc'] = True separate = (self.get_separate('\n'.join( testcase.input), incremental_step) if self.separate else False) groups = construct_groups(sources, separate, len(module_names) > 1) try: result = emitmodule.parse_and_typecheck(sources=sources, options=options, alt_lib_path='.') errors = Errors() compiler_options = CompilerOptions(multi_file=self.multi_file, separate=self.separate) ir, cfiles = emitmodule.compile_modules_to_c( result, compiler_options=compiler_options, errors=errors, groups=groups, ) if errors.num_errors: errors.flush_errors() assert False, "Compile error" except CompileError as e: for line in e.messages: print(line) assert False, 'Compile error' # Check that serialization works on this IR check_serialization_roundtrip(ir) setup_file = os.path.abspath(os.path.join(WORKDIR, 'setup.py')) # We pass the C file information to the build script via setup.py unfortunately with open(setup_file, 'w', encoding='utf-8') as f: f.write( setup_format.format(module_paths, separate, cfiles, self.multi_file)) if not run_setup(setup_file, ['build_ext', '--inplace']): if testcase.config.getoption('--mypyc-showc'): show_c(cfiles) assert False, "Compilation failed" # Assert that an output file got created suffix = 'pyd' if sys.platform == 'win32' else 'so' assert glob.glob('native.*.{}'.format(suffix)) driver_path = 'driver.py' env = os.environ.copy() env['MYPYC_RUN_BENCH'] = '1' if bench else '0' # XXX: This is an ugly hack. if 'MYPYC_RUN_GDB' in os.environ: if platform.system() == 'Darwin': subprocess.check_call( ['lldb', '--', sys.executable, driver_path], env=env) assert False, ( "Test can't pass in lldb mode. (And remember to pass -s to " "pytest)") elif platform.system() == 'Linux': subprocess.check_call( ['gdb', '--args', sys.executable, driver_path], env=env) assert False, ( "Test can't pass in gdb mode. (And remember to pass -s to " "pytest)") else: assert False, 'Unsupported OS' proc = subprocess.Popen([sys.executable, driver_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) output = proc.communicate()[0].decode('utf8') outlines = output.splitlines() if testcase.config.getoption('--mypyc-showc'): show_c(cfiles) if proc.returncode != 0: print() print('*** Exit status: %d' % proc.returncode) # Verify output. if bench: print('Test output:') print(output) else: if incremental_step == 1: msg = 'Invalid output' expected = testcase.output else: msg = 'Invalid output (step {})'.format(incremental_step) expected = testcase.output2.get(incremental_step, []) assert_test_output(testcase, outlines, msg, expected) if incremental_step > 1 and options.incremental: suffix = '' if incremental_step == 2 else str(incremental_step - 1) expected_rechecked = testcase.expected_rechecked_modules.get( incremental_step - 1) if expected_rechecked is not None: assert_module_equivalence('rechecked' + suffix, expected_rechecked, result.manager.rechecked_modules) expected_stale = testcase.expected_stale_modules.get( incremental_step - 1) if expected_stale is not None: assert_module_equivalence('stale' + suffix, expected_stale, result.manager.stale_modules) assert proc.returncode == 0
def test_stubs(args: argparse.Namespace, use_builtins_fixtures: bool = False) -> int: """This is stubtest! It's time to test the stubs!""" # Load the allowlist. This is a series of strings corresponding to Error.object_desc # Values in the dict will store whether we used the allowlist entry or not. allowlist = { entry: False for allowlist_file in args.allowlist for entry in get_allowlist_entries(allowlist_file) } allowlist_regexes = {entry: re.compile(entry) for entry in allowlist} # If we need to generate an allowlist, we store Error.object_desc for each error here. generated_allowlist = set() modules = args.modules if args.check_typeshed: assert not args.modules, "Cannot pass both --check-typeshed and a list of modules" modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir) annoying_modules = {"antigravity", "this"} modules = [m for m in modules if m not in annoying_modules] assert modules, "No modules to check" options = Options() options.incremental = False options.custom_typeshed_dir = args.custom_typeshed_dir options.config_file = args.mypy_config_file options.use_builtins_fixtures = use_builtins_fixtures if options.config_file: def set_strict_flags() -> None: # not needed yet return parse_config_file(options, set_strict_flags, options.config_file, sys.stdout, sys.stderr) try: modules = build_stubs(modules, options, find_submodules=not args.check_typeshed) except RuntimeError: return 1 exit_code = 0 for module in modules: for error in test_module(module): # Filter errors if args.ignore_missing_stub and error.is_missing_stub(): continue if args.ignore_positional_only and error.is_positional_only_related( ): continue if error.object_desc in allowlist: allowlist[error.object_desc] = True continue is_allowlisted = False for w in allowlist: if allowlist_regexes[w].fullmatch(error.object_desc): allowlist[w] = True is_allowlisted = True break if is_allowlisted: continue # We have errors, so change exit code, and output whatever necessary exit_code = 1 if args.generate_allowlist: generated_allowlist.add(error.object_desc) continue print(error.get_description(concise=args.concise)) # Print unused allowlist entries if not args.ignore_unused_allowlist: for w in allowlist: # Don't consider an entry unused if it regex-matches the empty string # This lets us allowlist errors that don't manifest at all on some systems if not allowlist[w] and not allowlist_regexes[w].fullmatch(""): exit_code = 1 print("note: unused allowlist entry {}".format(w)) # Print the generated allowlist if args.generate_allowlist: for e in sorted(generated_allowlist): print(e) exit_code = 0 return exit_code
def test_stubs(args: argparse.Namespace) -> int: """This is stubtest! It's time to test the stubs!""" # Load the whitelist. This is a series of strings corresponding to Error.object_desc # Values in the dict will store whether we used the whitelist entry or not. whitelist = { entry: False for whitelist_file in args.whitelist for entry in get_whitelist_entries(whitelist_file) } # If we need to generate a whitelist, we store Error.object_desc for each error here. generated_whitelist = set() modules = args.modules if args.check_typeshed: assert not args.modules, "Cannot pass both --check-typeshed and a list of modules" modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir) modules.remove("antigravity") # it's super annoying assert modules, "No modules to check" options = Options() options.incremental = False options.custom_typeshed_dir = args.custom_typeshed_dir try: modules = build_stubs(modules, options, find_submodules=not args.check_typeshed) except RuntimeError: return 1 exit_code = 0 for module in modules: for error in test_module(module): # Filter errors if args.ignore_missing_stub and error.is_missing_stub(): continue if args.ignore_positional_only and error.is_positional_only_related(): continue if error.object_desc in whitelist: whitelist[error.object_desc] = True continue # We have errors, so change exit code, and output whatever necessary exit_code = 1 if args.generate_whitelist: generated_whitelist.add(error.object_desc) continue print(error.get_description(concise=args.concise)) # Print unused whitelist entries for w in whitelist: if not whitelist[w]: exit_code = 1 print("note: unused whitelist entry {}".format(w)) # Print the generated whitelist if args.generate_whitelist: for e in sorted(generated_whitelist): print(e) exit_code = 0 return exit_code
def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> None: bench = testcase.config.getoption('--bench', False) and 'Benchmark' in testcase.name options = Options() options.use_builtins_fixtures = True options.show_traceback = True options.strict_optional = True options.python_version = sys.version_info[:2] options.export_types = True options.preserve_asts = True options.incremental = self.separate # Avoid checking modules/packages named 'unchecked', to provide a way # to test interacting with code we don't have types for. options.per_module_options['unchecked.*'] = {'follow_imports': 'error'} source = build.BuildSource('native.py', 'native', None) sources = [source] module_names = ['native'] module_paths = ['native.py'] # Hard code another module name to compile in the same compilation unit. to_delete = [] for fn, text in testcase.files: fn = os.path.relpath(fn, test_temp_dir) if os.path.basename(fn).startswith('other') and fn.endswith('.py'): name = fn.split('.')[0].replace(os.sep, '.') module_names.append(name) sources.append(build.BuildSource(fn, name, None)) to_delete.append(fn) module_paths.append(fn) shutil.copyfile(fn, os.path.join(os.path.dirname(fn), name + '_interpreted.py')) for source in sources: options.per_module_options.setdefault(source.module, {})['mypyc'] = True separate = (self.get_separate('\n'.join(testcase.input), incremental_step) if self.separate else False) groups = construct_groups(sources, separate, len(module_names) > 1) try: compiler_options = CompilerOptions(multi_file=self.multi_file, separate=self.separate) result = emitmodule.parse_and_typecheck( sources=sources, options=options, compiler_options=compiler_options, groups=groups, alt_lib_path='.') errors = Errors() ir, cfiles = emitmodule.compile_modules_to_c( result, compiler_options=compiler_options, errors=errors, groups=groups, ) if errors.num_errors: errors.flush_errors() assert False, "Compile error" except CompileError as e: for line in e.messages: print(fix_native_line_number(line, testcase.file, testcase.line)) assert False, 'Compile error' # Check that serialization works on this IR. (Only on the first # step because the the returned ir only includes updated code.) if incremental_step == 1: check_serialization_roundtrip(ir) opt_level = int(os.environ.get('MYPYC_OPT_LEVEL', 0)) debug_level = int(os.environ.get('MYPYC_DEBUG_LEVEL', 0)) setup_file = os.path.abspath(os.path.join(WORKDIR, 'setup.py')) # We pass the C file information to the build script via setup.py unfortunately with open(setup_file, 'w', encoding='utf-8') as f: f.write(setup_format.format(module_paths, separate, cfiles, self.multi_file, opt_level, debug_level)) if not run_setup(setup_file, ['build_ext', '--inplace']): if testcase.config.getoption('--mypyc-showc'): show_c(cfiles) assert False, "Compilation failed" # Assert that an output file got created suffix = 'pyd' if sys.platform == 'win32' else 'so' assert glob.glob(f'native.*.{suffix}') or glob.glob(f'native.{suffix}') driver_path = 'driver.py' if not os.path.isfile(driver_path): # No driver.py provided by test case. Use the default one # (mypyc/test-data/driver/driver.py) that calls each # function named test_*. default_driver = os.path.join( os.path.dirname(__file__), '..', 'test-data', 'driver', 'driver.py') shutil.copy(default_driver, driver_path) env = os.environ.copy() env['MYPYC_RUN_BENCH'] = '1' if bench else '0' # XXX: This is an ugly hack. if 'MYPYC_RUN_GDB' in os.environ: if platform.system() == 'Darwin': subprocess.check_call(['lldb', '--', sys.executable, driver_path], env=env) assert False, ("Test can't pass in lldb mode. (And remember to pass -s to " "pytest)") elif platform.system() == 'Linux': subprocess.check_call(['gdb', '--args', sys.executable, driver_path], env=env) assert False, ("Test can't pass in gdb mode. (And remember to pass -s to " "pytest)") else: assert False, 'Unsupported OS' proc = subprocess.Popen([sys.executable, driver_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) output = proc.communicate()[0].decode('utf8') outlines = output.splitlines() if testcase.config.getoption('--mypyc-showc'): show_c(cfiles) if proc.returncode != 0: print() print('*** Exit status: %d' % proc.returncode) # Verify output. if bench: print('Test output:') print(output) else: if incremental_step == 1: msg = 'Invalid output' expected = testcase.output else: msg = f'Invalid output (step {incremental_step})' expected = testcase.output2.get(incremental_step, []) if not expected: # Tweak some line numbers, but only if the expected output is empty, # as tweaked output might not match expected output. outlines = [fix_native_line_number(line, testcase.file, testcase.line) for line in outlines] assert_test_output(testcase, outlines, msg, expected) if incremental_step > 1 and options.incremental: suffix = '' if incremental_step == 2 else str(incremental_step - 1) expected_rechecked = testcase.expected_rechecked_modules.get(incremental_step - 1) if expected_rechecked is not None: assert_module_equivalence( 'rechecked' + suffix, expected_rechecked, result.manager.rechecked_modules) expected_stale = testcase.expected_stale_modules.get(incremental_step - 1) if expected_stale is not None: assert_module_equivalence( 'stale' + suffix, expected_stale, result.manager.stale_modules) assert proc.returncode == 0