Пример #1
0
    def fine_grained_increment(self,
                               sources: List[BuildSource]) -> Dict[str, Any]:
        assert self.fine_grained_manager is not None
        manager = self.fine_grained_manager.manager

        t0 = time.time()
        self.update_sources(sources)
        changed, removed = self.find_changed(sources)
        # TODO: Why create a new FileSystemCache rather than using self.fscache?
        manager.search_paths = compute_search_paths(sources, manager.options,
                                                    manager.data_dir,
                                                    FileSystemCache())
        t1 = time.time()
        messages = self.fine_grained_manager.update(changed, removed)
        t2 = time.time()
        manager.log(
            "fine-grained increment: find_changed: {:.3f}s, update: {:.3f}s".
            format(t1 - t0, t2 - t1))
        status = 1 if messages else 0
        self.previous_sources = sources
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }
Пример #2
0
def create_source_list(files: Sequence[str],
                       options: Options,
                       fscache: Optional[FileSystemCache] = None,
                       allow_empty_dir: bool = False) -> List[BuildSource]:
    """From a list of source files/directories, makes a list of BuildSources.

    Raises InvalidSourceList on errors.
    """
    fscache = fscache or FileSystemCache()
    finder = SourceFinder(fscache)

    targets = []
    for f in files:
        if f.endswith(PY_EXTENSIONS):
            # Can raise InvalidSourceList if a directory doesn't have a valid module name.
            name, base_dir = finder.crawl_up(os.path.normpath(f))
            targets.append(BuildSource(f, name, None, base_dir))
        elif fscache.isdir(f):
            sub_targets = finder.expand_dir(os.path.normpath(f))
            if not sub_targets and not allow_empty_dir:
                raise InvalidSourceList(
                    "There are no .py[i] files in directory '{}'".format(f))
            targets.extend(sub_targets)
        else:
            mod = os.path.basename(f) if options.scripts_are_modules else None
            targets.append(BuildSource(f, mod, None))
    return targets
Пример #3
0
def run_mypy_typechecking(cmd_options: List[str]) -> int:
    fscache = FileSystemCache()
    sources, options = process_options(cmd_options, fscache=fscache)

    error_messages = []

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        error_messages.extend(new_messages)
        f = sys.stderr if serious else sys.stdout
        try:
            for msg in new_messages:
                f.write(msg + "\n")
            f.flush()
        except BrokenPipeError:
            sys.exit(ReturnCodes.FATAL_ERROR)

    try:
        build.build(sources,
                    options,
                    flush_errors=flush_errors,
                    fscache=fscache)

    except SystemExit as sysexit:
        return sysexit.code
    finally:
        fscache.flush()

    if error_messages:
        return ReturnCodes.FAIL

    return ReturnCodes.SUCCESS
Пример #4
0
 def initialize_fine_grained(self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
     self.fscache = FileSystemCache(self.options.python_version)
     self.fswatcher = FileSystemWatcher(self.fscache)
     self.update_sources(sources)
     # Stores the initial state of sources as a side effect.
     self.fswatcher.find_changed()
     try:
         # TODO: alt_lib_path
         result = mypy.build.build(sources=sources,
                                   options=self.options)
     except mypy.errors.CompileError as e:
         output = ''.join(s + '\n' for s in e.messages)
         if e.use_stdout:
             out, err = output, ''
         else:
             out, err = '', output
         return {'out': out, 'err': err, 'status': 2}
     messages = result.errors
     manager = result.manager
     graph = result.graph
     self.fine_grained_manager = mypy.server.update.FineGrainedBuildManager(manager, graph)
     status = 1 if messages else 0
     self.previous_messages = messages[:]
     self.fine_grained_initialized = True
     self.previous_sources = sources
     self.fscache.flush()
     return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status}
Пример #5
0
    def __init__(self,
                 options: Options,
                 timeout: Optional[int] = None,
                 alt_lib_path: Optional[str] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        self.timeout = timeout
        self.alt_lib_path = alt_lib_path
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(STATUS_FILE):
            os.unlink(STATUS_FILE)

        self.fscache = FileSystemCache(self.options.python_version)

        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            options.cache_fine_grained = True  # set this so that cache options match
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True
Пример #6
0
    def __init__(self,
                 options: Options,
                 status_file: str,
                 timeout: Optional[int] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        # Snapshot the options info before we muck with it, to detect changes
        self.options_snapshot = options.snapshot()
        self.timeout = timeout
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(status_file):
            os.unlink(status_file)

        self.fscache = FileSystemCache()

        options.raise_exceptions = True
        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            # Using fine_grained_cache implies generating and caring
            # about the fine grained cache
            options.cache_fine_grained = True
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True
        self.status_file = status_file

        # Since the object is created in the parent process we can check
        # the output terminal options here.
        self.formatter = FancyFormatter(sys.stdout, sys.stderr,
                                        options.show_error_codes)
Пример #7
0
    def test_filter_out_missing_top_level_packages(self) -> None:
        with tempfile.TemporaryDirectory() as td:
            self.make_file(td, 'base/a/')
            self.make_file(td, 'base/b.py')
            self.make_file(td, 'base/c.pyi')
            self.make_file(td, 'base/missing.txt')
            self.make_file(td, 'typeshed/d.pyi')
            self.make_file(td, 'typeshed/@python2/e')
            self.make_file(td, 'pkg1/f-stubs')
            self.make_file(td, 'pkg2/g-python2-stubs')
            self.make_file(td, 'mpath/sub/long_name/')

            def makepath(p: str) -> str:
                return os.path.join(td, p)

            search = SearchPaths(python_path=(makepath('base'),),
                                 mypy_path=(makepath('mpath/sub'),),
                                 package_path=(makepath('pkg1'), makepath('pkg2')),
                                 typeshed_path=(makepath('typeshed'),))
            fscache = FileSystemCache()
            res = filter_out_missing_top_level_packages(
                {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'long_name', 'ff', 'missing'},
                search,
                fscache)
            assert res == {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'long_name'}
Пример #8
0
    def __init__(self,
                 options: Options,
                 timeout: Optional[int] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        # Snapshot the options info before we muck with it, to detect changes
        self.options_snapshot = options.snapshot()
        self.timeout = timeout
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(STATUS_FILE):
            os.unlink(STATUS_FILE)

        self.fscache = FileSystemCache()

        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            # Using fine_grained_cache implies generating and caring
            # about the fine grained cache
            options.cache_fine_grained = True
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True
Пример #9
0
def create_source_list(paths: Sequence[str],
                       options: Options,
                       fscache: Optional[FileSystemCache] = None,
                       allow_empty_dir: bool = False) -> List[BuildSource]:
    """From a list of source files/directories, makes a list of BuildSources.

    Raises InvalidSourceList on errors.
    """
    fscache = fscache or FileSystemCache()
    finder = SourceFinder(fscache, options)

    sources = []
    for path in paths:
        path = os.path.normpath(path)
        if path.endswith(PY_EXTENSIONS):
            # Can raise InvalidSourceList if a directory doesn't have a valid module name.
            name, base_dir = finder.crawl_up(path)
            sources.append(BuildSource(path, name, None, base_dir))
        elif fscache.isdir(path):
            sub_sources = finder.find_sources_in_dir(path)
            if not sub_sources and not allow_empty_dir:
                raise InvalidSourceList(
                    f"There are no .py[i] files in directory '{path}'")
            sources.extend(sub_sources)
        else:
            mod = os.path.basename(
                path) if options.scripts_are_modules else None
            sources.append(BuildSource(path, mod, None))
    return sources
Пример #10
0
    def initialize_fine_grained(
            self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        self.fscache = FileSystemCache(self.options.python_version)
        self.fswatcher = FileSystemWatcher(self.fscache)
        self.update_sources(sources)
        if not self.options.use_fine_grained_cache:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()
        try:
            # TODO: alt_lib_path
            result = mypy.build.build(sources=sources, options=self.options)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        manager = result.manager
        graph = result.graph
        self.fine_grained_manager = mypy.server.update.FineGrainedBuildManager(
            manager, graph)
        self.fine_grained_initialized = True
        self.previous_sources = sources
        self.fscache.flush()

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if self.options.use_fine_grained_cache:
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime),
                             st_size=meta.size,
                             md5=meta.hash))

            # Run an update
            changed = self.find_changed(sources)
            if changed:
                messages = self.fine_grained_manager.update(changed)
            self.fscache.flush()

        status = 1 if messages else 0
        self.previous_messages = messages[:]
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }
Пример #11
0
def mypyc_build(
    paths: List[str],
    compiler_options: CompilerOptions,
    *,
    separate: Union[bool, List[Tuple[List[str], Optional[str]]]] = False,
    only_compile_paths: Optional[Iterable[str]] = None,
    skip_cgen_input: Optional[Any] = None,
    always_use_shared_lib: bool = False
) -> Tuple[emitmodule.Groups, List[Tuple[List[str], List[str]]]]:
    """Do the front and middle end of mypyc building, producing and writing out C source."""
    fscache = FileSystemCache()
    mypyc_sources, all_sources, options = get_mypy_config(
        paths, only_compile_paths, compiler_options, fscache)

    # We generate a shared lib if there are multiple modules or if any
    # of the modules are in package. (Because I didn't want to fuss
    # around with making the single module code handle packages.)
    use_shared_lib = (len(mypyc_sources) > 1
                      or any('.' in x.module for x in mypyc_sources)
                      or always_use_shared_lib)

    groups = construct_groups(mypyc_sources, separate, use_shared_lib)

    # We let the test harness just pass in the c file contents instead
    # so that it can do a corner-cutting version without full stubs.
    if not skip_cgen_input:
        group_cfiles, ops_text = generate_c(all_sources,
                                            options,
                                            groups,
                                            fscache,
                                            compiler_options=compiler_options)
        # TODO: unique names?
        write_file(os.path.join(compiler_options.target_dir, 'ops.txt'),
                   ops_text)
    else:
        group_cfiles = skip_cgen_input

    # Write out the generated C and collect the files for each group
    # Should this be here??
    group_cfilenames = []  # type: List[Tuple[List[str], List[str]]]
    for cfiles in group_cfiles:
        cfilenames = []
        for cfile, ctext in cfiles:
            cfile = os.path.join(compiler_options.target_dir, cfile)
            write_file(cfile, ctext)
            if os.path.splitext(cfile)[1] == '.c':
                cfilenames.append(cfile)

        deps = [
            os.path.join(compiler_options.target_dir, dep)
            for dep in get_header_deps(cfiles)
        ]
        group_cfilenames.append((cfilenames, deps))

    return groups, group_cfilenames
Пример #12
0
    def initialize_fine_grained(self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        # The file system cache we create gets passed off to
        # BuildManager, and thence to FineGrainedBuildManager, which
        # assumes responsibility for clearing it after updates.
        fscache = FileSystemCache(self.options.python_version)
        self.fswatcher = FileSystemWatcher(fscache)
        self.update_sources(sources)
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=fscache,
                                      alt_lib_path=self.alt_lib_path)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)
        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash))

            changed, removed = self.find_changed(sources)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)
        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        fscache.flush()
        status = 1 if messages else 0
        return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status}
Пример #13
0
 def __init__(self,
              search_paths: SearchPaths,
              fscache: Optional[FileSystemCache] = None,
              options: Optional[Options] = None) -> None:
     self.search_paths = search_paths
     self.fscache = fscache or FileSystemCache()
     # Cache find_lib_path_dirs: (dir_chain, search_paths) -> list(package_dirs, should_verify)
     self.dirs = {}  # type: Dict[Tuple[str, Tuple[str, ...]], PackageDirs]
     # Cache find_module: id -> result
     self.results = {}  # type: Dict[str, Optional[str]]
     self.options = options
Пример #14
0
 def __init__(self, search_paths: SearchPaths,
              fscache: Optional[FileSystemCache],
              options: Optional[Options]) -> None:
     self.search_paths = search_paths
     self.fscache = fscache or FileSystemCache()
     # Cache for get_toplevel_possibilities:
     # search_paths -> (toplevel_id -> list(package_dirs))
     self.initial_components = {
     }  # type: Dict[Tuple[str, ...], Dict[str, List[str]]]
     # Cache find_module: id -> result
     self.results = {}  # type: Dict[str, ModuleSearchResult]
     self.ns_ancestors = {}  # type: Dict[str, str]
     self.options = options
Пример #15
0
 def _make_manager(self) -> BuildManager:
     errors = Errors()
     options = Options()
     fscache = FileSystemCache()
     manager = BuildManager(
         data_dir='',
         lib_path=[],
         ignore_prefix='',
         source_set=BuildSourceSet([]),
         reports=Reports('', {}),
         options=options,
         version_id=__version__,
         plugin=Plugin(options),
         errors=errors,
         flush_errors=lambda msgs, serious: None,
         fscache=fscache,
     )
     return manager
Пример #16
0
 def __init__(self,
              search_paths: SearchPaths,
              fscache: Optional[FileSystemCache],
              options: Optional[Options]) -> None:
     self.search_paths = search_paths
     self.fscache = fscache or FileSystemCache()
     # Cache for get_toplevel_possibilities:
     # search_paths -> (toplevel_id -> list(package_dirs))
     self.initial_components = {}  # type: Dict[Tuple[str, ...], Dict[str, List[str]]]
     # Cache find_module: id -> result
     self.results = {}  # type: Dict[str, ModuleSearchResult]
     self.ns_ancestors = {}  # type: Dict[str, str]
     self.options = options
     custom_typeshed_dir = None
     if options:
         custom_typeshed_dir = options.custom_typeshed_dir
     self.stdlib_py_versions = load_stdlib_py_versions(custom_typeshed_dir)
     self.python2 = options and options.python_version[0] == 2
Пример #17
0
 def _make_manager(self) -> BuildManager:
     errors = Errors()
     options = Options()
     fscache = FileSystemCache()
     search_paths = SearchPaths((), (), (), ())
     manager = BuildManager(
         data_dir='',
         search_paths=search_paths,
         ignore_prefix='',
         source_set=BuildSourceSet([]),
         reports=Reports('', {}),
         options=options,
         version_id=__version__,
         plugin=Plugin(options),
         plugins_snapshot={},
         errors=errors,
         flush_errors=lambda msgs, serious: None,
         fscache=fscache,
         stdout=sys.stdout,
         stderr=sys.stderr,
     )
     return manager
Пример #18
0
    def mypy_test(self):
        """
        Make sure that the generated python typechecks successfully
        """
        messages = []

        def flush_errors(new_messages, serious):
            messages.extend(new_messages)

        options = mypy.main.Options()
        options.allow_untyped_globals = True
        mypy.main.build.build(
            [mypy.main.BuildSource(path=self.test_file, module="")],
            options,
            None,
            flush_errors,
            FileSystemCache(),
            sys.stdout,
            sys.stderr,
        )
        for m in messages:
            print(m)
        self.assertFalse(messages)
Пример #19
0
 def __init__(self,
              search_paths: SearchPaths,
              fscache: Optional[FileSystemCache],
              options: Optional[Options],
              stdlib_py_versions: Optional[StdlibVersions] = None,
              source_set: Optional[BuildSourceSet] = None) -> None:
     self.search_paths = search_paths
     self.source_set = source_set
     self.fscache = fscache or FileSystemCache()
     # Cache for get_toplevel_possibilities:
     # search_paths -> (toplevel_id -> list(package_dirs))
     self.initial_components: Dict[Tuple[str, ...], Dict[str, List[str]]] = {}
     # Cache find_module: id -> result
     self.results: Dict[str, ModuleSearchResult] = {}
     self.ns_ancestors: Dict[str, str] = {}
     self.options = options
     custom_typeshed_dir = None
     if options:
         custom_typeshed_dir = options.custom_typeshed_dir
     self.stdlib_py_versions = (
         stdlib_py_versions or load_stdlib_py_versions(custom_typeshed_dir)
     )
     self.python_major_ver = 3 if options is None else options.python_version[0]
Пример #20
0
def main(
    script_path: Optional[str],
    stdout: TextIO,
    stderr: TextIO,
    args: Optional[List[str]] = None,
) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    util.check_python_version('mypy')
    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    sys.setrecursionlimit(2**14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args,
                                       stdout=stdout,
                                       stderr=stderr,
                                       fscache=fscache)

    messages = []

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        messages.extend(new_messages)
        f = stderr if serious else stdout
        try:
            for msg in new_messages:
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    res = None
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = build.build(sources, options, None, flush_errors, fscache,
                          stdout, stderr)
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs and not options.incremental:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file, ", ".join(
                  "[mypy-%s]" % glob
                  for glob in options.per_module_options.keys()
                  if glob in options.unused_configs)),
              file=stderr)
    if options.junit_xml:
        t1 = time.time()
        py_version = '{}_{}'.format(options.python_version[0],
                                    options.python_version[1])
        util.write_junit_xml(t1 - t0, serious, messages, options.junit_xml,
                             py_version, options.platform)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()
    del res  # Now it's safe to delete

    code = 0
    if messages:
        code = 2 if blockers else 1
    if options.fast_exit:
        # Exit without freeing objects -- it's faster.
        #
        # NOTE: We don't flush all open files on exit (or run other destructors)!
        util.hard_exit(code)
    elif code:
        sys.exit(code)
Пример #21
0
def main(script_path: Optional[str],
         stdout: TextIO,
         stderr: TextIO,
         args: Optional[List[str]] = None,
         ) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    util.check_python_version('mypy')
    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    sys.setrecursionlimit(2 ** 14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args, stdout=stdout, stderr=stderr,
                                       fscache=fscache)

    messages = []
    formatter = util.FancyFormatter(stdout, stderr, options.show_error_codes)

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        if options.pretty:
            new_messages = formatter.fit_in_terminal(new_messages)
        messages.extend(new_messages)
        f = stderr if serious else stdout
        try:
            for msg in new_messages:
                if options.color_output:
                    msg = formatter.colorize(msg)
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    res = None
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = build.build(sources, options, None, flush_errors, fscache, stdout, stderr)
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs and not options.incremental:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file,
               ", ".join("[mypy-%s]" % glob for glob in options.per_module_options.keys()
                         if glob in options.unused_configs)),
              file=stderr)
    maybe_write_junit_xml(time.time() - t0, serious, messages, options)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()

    code = 0
    if messages:
        code = 2 if blockers else 1
    if options.error_summary:
        if messages:
            n_errors, n_files = util.count_stats(messages)
            if n_errors:
                stdout.write(formatter.format_error(n_errors, n_files, len(sources),
                                                    options.color_output) + '\n')
        else:
            stdout.write(formatter.format_success(len(sources),
                                                  options.color_output) + '\n')
        stdout.flush()
    if options.fast_exit:
        # Exit without freeing objects -- it's faster.
        #
        # NOTE: We don't flush all open files on exit (or run other destructors)!
        util.hard_exit(code)
    elif code:
        sys.exit(code)

    # HACK: keep res alive so that mypyc won't free it before the hard_exit
    list([res])
Пример #22
0
def main(script_path: Optional[str], args: Optional[List[str]] = None) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    # Check for known bad Python versions.
    if sys.version_info[:2] < (3, 4):
        sys.exit("Running mypy with Python 3.3 or lower is not supported; "
                 "please upgrade to 3.4 or newer")
    if sys.version_info[:3] == (3, 5, 0):
        sys.exit("Running mypy with Python 3.5.0 is not supported; "
                 "please upgrade to 3.5.1 or newer")

    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    if script_path:
        bin_dir = find_bin_directory(script_path)  # type: Optional[str]
    else:
        bin_dir = None
    sys.setrecursionlimit(2**14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args, fscache=fscache)

    messages = []

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        messages.extend(new_messages)
        f = sys.stderr if serious else sys.stdout
        try:
            for msg in new_messages:
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = type_check_only(sources, bin_dir, options, flush_errors,
                              fscache)  # noqa
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file, ", ".join(
                  "[mypy-%s]" % glob
                  for glob in options.per_module_options.keys()
                  if glob in options.unused_configs)),
              file=sys.stderr)
    if options.junit_xml:
        t1 = time.time()
        util.write_junit_xml(t1 - t0, serious, messages, options.junit_xml)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()

    if messages:
        code = 2 if blockers else 1
        sys.exit(code)