def initialize_fine_grained(
            self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        self.fswatcher = FileSystemWatcher(self.fscache)
        self.update_sources(sources)
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=self.fscache,
                                      alt_lib_path=self.alt_lib_path)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)
        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime),
                             st_size=meta.size,
                             md5=meta.hash))

            changed, removed = self.find_changed(sources)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)
        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        if MEM_PROFILE:
            from mypy.memprofile import print_memory_profile
            print_memory_profile(run_gc=False)

        status = 1 if messages else 0
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }
Exemple #2
0
def main(
    script_path: Optional[str],
    stdout: TextIO,
    stderr: TextIO,
    args: Optional[List[str]] = None,
) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    util.check_python_version('mypy')
    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    sys.setrecursionlimit(2**14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args,
                                       stdout=stdout,
                                       stderr=stderr,
                                       fscache=fscache)

    messages = []

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        messages.extend(new_messages)
        f = stderr if serious else stdout
        try:
            for msg in new_messages:
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    res = None
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = build.build(sources, options, None, flush_errors, fscache,
                          stdout, stderr)
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs and not options.incremental:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file, ", ".join(
                  "[mypy-%s]" % glob
                  for glob in options.per_module_options.keys()
                  if glob in options.unused_configs)),
              file=stderr)
    if options.junit_xml:
        t1 = time.time()
        py_version = '{}_{}'.format(options.python_version[0],
                                    options.python_version[1])
        util.write_junit_xml(t1 - t0, serious, messages, options.junit_xml,
                             py_version, options.platform)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()
    del res  # Now it's safe to delete

    code = 0
    if messages:
        code = 2 if blockers else 1
    if options.fast_exit:
        # Exit without freeing objects -- it's faster.
        #
        # NOTE: We don't flush all open files on exit (or run other destructors)!
        util.hard_exit(code)
    elif code:
        sys.exit(code)
Exemple #3
0
def main(script_path: Optional[str], args: Optional[List[str]] = None) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    util.check_python_version('mypy')
    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    sys.setrecursionlimit(2 ** 14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args, fscache=fscache)

    messages = []

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        messages.extend(new_messages)
        f = sys.stderr if serious else sys.stdout
        try:
            for msg in new_messages:
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    res = None
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = build.build(sources, options, None, flush_errors, fscache)
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file,
               ", ".join("[mypy-%s]" % glob for glob in options.per_module_options.keys()
                         if glob in options.unused_configs)),
              file=sys.stderr)
    if options.junit_xml:
        t1 = time.time()
        py_version = '{}_{}'.format(options.python_version[0], options.python_version[1])
        util.write_junit_xml(t1 - t0, serious, messages, options.junit_xml,
                             py_version, options.platform)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()
    del res  # Now it's safe to delete

    code = 0
    if messages:
        code = 2 if blockers else 1
    if options.fast_exit:
        # Exit without freeing objects -- it's faster.
        #
        # NOTE: We don't flush all open files on exit (or run other destructors)!
        util.hard_exit(code)
    elif code:
        sys.exit(code)
Exemple #4
0
def main(script_path: Optional[str],
         stdout: TextIO,
         stderr: TextIO,
         args: Optional[List[str]] = None,
         ) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    util.check_python_version('mypy')
    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    sys.setrecursionlimit(2 ** 14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args, stdout=stdout, stderr=stderr,
                                       fscache=fscache)

    messages = []
    formatter = util.FancyFormatter(stdout, stderr, options.show_error_codes)

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        if options.pretty:
            new_messages = formatter.fit_in_terminal(new_messages)
        messages.extend(new_messages)
        f = stderr if serious else stdout
        try:
            for msg in new_messages:
                if options.color_output:
                    msg = formatter.colorize(msg)
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    res = None
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = build.build(sources, options, None, flush_errors, fscache, stdout, stderr)
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs and not options.incremental:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file,
               ", ".join("[mypy-%s]" % glob for glob in options.per_module_options.keys()
                         if glob in options.unused_configs)),
              file=stderr)
    maybe_write_junit_xml(time.time() - t0, serious, messages, options)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()

    code = 0
    if messages:
        code = 2 if blockers else 1
    if options.error_summary:
        if messages:
            n_errors, n_files = util.count_stats(messages)
            if n_errors:
                stdout.write(formatter.format_error(n_errors, n_files, len(sources),
                                                    options.color_output) + '\n')
        else:
            stdout.write(formatter.format_success(len(sources),
                                                  options.color_output) + '\n')
        stdout.flush()
    if options.fast_exit:
        # Exit without freeing objects -- it's faster.
        #
        # NOTE: We don't flush all open files on exit (or run other destructors)!
        util.hard_exit(code)
    elif code:
        sys.exit(code)

    # HACK: keep res alive so that mypyc won't free it before the hard_exit
    list([res])
Exemple #5
0
def main(script_path: Optional[str], args: Optional[List[str]] = None) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    # Check for known bad Python versions.
    if sys.version_info[:2] < (3, 4):
        sys.exit("Running mypy with Python 3.3 or lower is not supported; "
                 "please upgrade to 3.4 or newer")
    if sys.version_info[:3] == (3, 5, 0):
        sys.exit("Running mypy with Python 3.5.0 is not supported; "
                 "please upgrade to 3.5.1 or newer")

    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    if script_path:
        bin_dir = find_bin_directory(script_path)  # type: Optional[str]
    else:
        bin_dir = None
    sys.setrecursionlimit(2**14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args, fscache=fscache)

    messages = []

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        messages.extend(new_messages)
        f = sys.stderr if serious else sys.stdout
        try:
            for msg in new_messages:
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = type_check_only(sources, bin_dir, options, flush_errors,
                              fscache)  # noqa
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file, ", ".join(
                  "[mypy-%s]" % glob
                  for glob in options.per_module_options.keys()
                  if glob in options.unused_configs)),
              file=sys.stderr)
    if options.junit_xml:
        t1 = time.time()
        util.write_junit_xml(t1 - t0, serious, messages, options.junit_xml)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()

    if messages:
        code = 2 if blockers else 1
        sys.exit(code)
Exemple #6
0
    def initialize_fine_grained(self, sources: List[BuildSource], is_tty: bool,
                                terminal_width: int) -> Dict[str, Any]:
        self.fswatcher = FileSystemWatcher(self.fscache)
        t0 = time.time()
        self.update_sources(sources)
        t1 = time.time()
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=self.fscache)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)

        if self.following_imports():
            sources = find_all_sources_in_build(
                self.fine_grained_manager.graph, sources)
            self.update_sources(sources)

        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            t2 = time.time()
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime),
                             st_size=meta.size,
                             hash=meta.hash))

            changed, removed = self.find_changed(sources)
            changed += self.find_added_suppressed(
                self.fine_grained_manager.graph, set(),
                self.fine_grained_manager.manager.search_paths)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            t3 = time.time()
            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)

            if self.following_imports():
                # We need to do another update to any new files found by following imports.
                messages = self.fine_grained_increment_follow_imports(sources)

            t4 = time.time()
            self.fine_grained_manager.manager.add_stats(
                update_sources_time=t1 - t0,
                build_time=t2 - t1,
                find_changes_time=t3 - t2,
                fg_update_time=t4 - t3,
                files_changed=len(removed) + len(changed))

        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        if MEM_PROFILE:
            from mypy.memprofile import print_memory_profile
            print_memory_profile(run_gc=False)

        status = 1 if messages else 0
        messages = self.pretty_messages(messages, len(sources), is_tty,
                                        terminal_width)
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }
Exemple #7
0
    def initialize_fine_grained(self, sources: List[BuildSource]) -> Dict[str, Any]:
        self.fswatcher = FileSystemWatcher(self.fscache)
        t0 = time.time()
        self.update_sources(sources)
        t1 = time.time()
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=self.fscache)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)
        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            t2 = time.time()
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash))

            changed, removed = self.find_changed(sources)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            t3 = time.time()
            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)
            t4 = time.time()
            self.fine_grained_manager.manager.add_stats(
                update_sources_time=t1 - t0,
                build_time=t2 - t1,
                find_changes_time=t3 - t2,
                fg_update_time=t4 - t3,
                files_changed=len(removed) + len(changed))
        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        if MEM_PROFILE:
            from mypy.memprofile import print_memory_profile
            print_memory_profile(run_gc=False)

        status = 1 if messages else 0
        return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status}