Example #1
0
    def initialize_fine_grained(self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        # The file system cache we create gets passed off to
        # BuildManager, and thence to FineGrainedBuildManager, which
        # assumes responsibility for clearing it after updates.
        fscache = FileSystemCache(self.options.python_version)
        self.fswatcher = FileSystemWatcher(fscache)
        self.update_sources(sources)
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=fscache,
                                      alt_lib_path=self.alt_lib_path)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)
        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash))

            changed, removed = self.find_changed(sources)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)
        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        fscache.flush()
        status = 1 if messages else 0
        return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status}
Example #2
0
def create_source_list(paths: Sequence[str],
                       options: Options,
                       fscache: Optional[FileSystemCache] = None,
                       allow_empty_dir: bool = False) -> List[BuildSource]:
    """From a list of source files/directories, makes a list of BuildSources.

    Raises InvalidSourceList on errors.
    """
    fscache = fscache or FileSystemCache()
    finder = SourceFinder(fscache, options)

    sources = []
    for path in paths:
        path = os.path.normpath(path)
        if path.endswith(PY_EXTENSIONS):
            # Can raise InvalidSourceList if a directory doesn't have a valid module name.
            name, base_dir = finder.crawl_up(path)
            sources.append(BuildSource(path, name, None, base_dir))
        elif fscache.isdir(path):
            sub_sources = finder.find_sources_in_dir(path)
            if not sub_sources and not allow_empty_dir:
                raise InvalidSourceList(
                    "There are no .py[i] files in directory '{}'".format(path))
            sources.extend(sub_sources)
        else:
            mod = os.path.basename(
                path) if options.scripts_are_modules else None
            sources.append(BuildSource(path, mod, None))
    return sources
Example #3
0
def create_source_list(files: Sequence[str], options: Options,
                       fscache: Optional[FileSystemCache] = None,
                       allow_empty_dir: bool = False) -> List[BuildSource]:
    """From a list of source files/directories, makes a list of BuildSources.

    Raises InvalidSourceList on errors.
    """
    fscache = fscache or FileSystemCache()
    finder = SourceFinder(fscache)

    targets = []
    for f in files:
        if f.endswith(PY_EXTENSIONS):
            # Can raise InvalidSourceList if a directory doesn't have a valid module name.
            targets.append(BuildSource(f, finder.crawl_up(f), None))
        elif fscache.isdir(f):
            sub_targets = finder.expand_dir(f)
            if not sub_targets and not allow_empty_dir:
                raise InvalidSourceList("There are no .py[i] files in directory '{}'"
                                        .format(f))
            targets.extend(sub_targets)
        else:
            mod = os.path.basename(f) if options.scripts_are_modules else None
            targets.append(BuildSource(f, mod, None))
    return targets
Example #4
0
    def fine_grained_increment(self,
                               sources: List[BuildSource]) -> Dict[str, Any]:
        assert self.fine_grained_manager is not None
        manager = self.fine_grained_manager.manager

        t0 = time.time()
        self.update_sources(sources)
        changed, removed = self.find_changed(sources)
        # TODO: Why create a new FileSystemCache rather than using self.fscache?
        manager.search_paths = compute_search_paths(sources, manager.options,
                                                    manager.data_dir,
                                                    FileSystemCache())
        t1 = time.time()
        messages = self.fine_grained_manager.update(changed, removed)
        t2 = time.time()
        manager.log(
            "fine-grained increment: find_changed: {:.3f}s, update: {:.3f}s".
            format(t1 - t0, t2 - t1))
        status = 1 if messages else 0
        self.previous_sources = sources
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }
Example #5
0
    def __init__(self, options: Options,
                 status_file: str,
                 timeout: Optional[int] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        # Snapshot the options info before we muck with it, to detect changes
        self.options_snapshot = options.snapshot()
        self.timeout = timeout
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(status_file):
            os.unlink(status_file)

        self.fscache = FileSystemCache()

        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            # Using fine_grained_cache implies generating and caring
            # about the fine grained cache
            options.cache_fine_grained = True
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True
        self.status_file = status_file
Example #6
0
    def test_filter_out_missing_top_level_packages(self) -> None:
        with tempfile.TemporaryDirectory() as td:
            self.make_file(td, 'base/a/')
            self.make_file(td, 'base/b.py')
            self.make_file(td, 'base/c.pyi')
            self.make_file(td, 'base/missing.txt')
            self.make_file(td, 'typeshed/d.pyi')
            self.make_file(td, 'typeshed/@python2/e')
            self.make_file(td, 'pkg1/f-stubs')
            self.make_file(td, 'pkg2/g-python2-stubs')
            self.make_file(td, 'mpath/sub/long_name/')

            def makepath(p: str) -> str:
                return os.path.join(td, p)

            search = SearchPaths(python_path=(makepath('base'), ),
                                 mypy_path=(makepath('mpath/sub'), ),
                                 package_path=(makepath('pkg1'),
                                               makepath('pkg2')),
                                 typeshed_path=(makepath('typeshed'), ))
            fscache = FileSystemCache()
            res = filter_out_missing_top_level_packages(
                {
                    'a', 'b', 'c', 'd', 'e', 'f', 'g', 'long_name', 'ff',
                    'missing'
                }, search, fscache)
            assert res == {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'long_name'}
Example #7
0
def get_sources(fscache: FileSystemCache, modules: Dict[str, str],
                changed_modules: List[Tuple[str, str]]) -> List[BuildSource]:
    sources = []
    for id, path in changed_modules:
        if fscache.isfile(path):
            sources.append(BuildSource(path, id, None))
    return sources
Example #8
0
def get_site_packages_dirs(
        python_executable: Optional[str],
        fscache: FileSystemCache) -> Tuple[List[str], List[str]]:
    """Find package directories for given python.

    This runs a subprocess call, which generates a list of the egg directories, and the site
    package directories. To avoid repeatedly calling a subprocess (which can be slow!) we
    lru_cache the results."""
    def make_abspath(path: str, root: str) -> str:
        """Take a path and make it absolute relative to root if not already absolute."""
        if os.path.isabs(path):
            return os.path.normpath(path)
        else:
            return os.path.join(root, os.path.normpath(path))

    if python_executable is None:
        return [], []
    if python_executable == sys.executable:
        # Use running Python's package dirs
        site_packages = sitepkgs.getsitepackages()
    else:
        # Use subprocess to get the package directory of given Python
        # executable
        site_packages = ast.literal_eval(
            subprocess.check_output([python_executable, sitepkgs.__file__],
                                    stderr=subprocess.PIPE).decode())
    egg_dirs = []
    for dir in site_packages:
        pth = os.path.join(dir, 'easy-install.pth')
        if fscache.isfile(pth):
            with open(pth) as f:
                egg_dirs.extend(
                    [make_abspath(d.rstrip(), dir) for d in f.readlines()])
    return egg_dirs, site_packages
Example #9
0
    def initialize_fine_grained(self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        self.fscache = FileSystemCache(self.options.python_version)
        self.fswatcher = FileSystemWatcher(self.fscache)
        self.update_sources(sources)
        if not self.options.use_fine_grained_cache:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()
        try:
            # TODO: alt_lib_path
            result = mypy.build.build(sources=sources,
                                      options=self.options)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        manager = result.manager
        graph = result.graph
        self.fine_grained_manager = mypy.server.update.FineGrainedBuildManager(manager, graph)
        self.fine_grained_initialized = True
        self.previous_sources = sources
        self.fscache.flush()

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if self.options.use_fine_grained_cache:
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for meta, mypyfile, type_map in manager.saved_cache.values():
                if meta.mtime is None: continue
                self.fswatcher.set_file_data(
                    mypyfile.path,
                    FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash))

            # Run an update
            changed = self.find_changed(sources)
            if changed:
                messages = self.fine_grained_manager.update(changed)
            self.fscache.flush()

        status = 1 if messages else 0
        self.previous_messages = messages[:]
        return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status}
Example #10
0
def get_sources(fscache: FileSystemCache,
                modules: Dict[str, str],
                changed_modules: List[Tuple[str, str]]) -> List[BuildSource]:
    sources = []
    for id, path in changed_modules:
        if fscache.isfile(path):
            sources.append(BuildSource(path, id, None))
    return sources
Example #11
0
def verify_module(fscache: FileSystemCache, id: str, path: str) -> bool:
    """Check that all packages containing id have a __init__ file."""
    if path.endswith(('__init__.py', '__init__.pyi')):
        path = os.path.dirname(path)
    for i in range(id.count('.')):
        path = os.path.dirname(path)
        if not any(fscache.isfile_case(os.path.join(path, '__init__{}'.format(extension)))
                   for extension in PYTHON_EXTENSIONS):
            return False
    return True
Example #12
0
def verify_module(fscache: FileSystemCache, id: str, path: str) -> bool:
    """Check that all packages containing id have a __init__ file."""
    if path.endswith(('__init__.py', '__init__.pyi')):
        path = os.path.dirname(path)
    for i in range(id.count('.')):
        path = os.path.dirname(path)
        if not any(fscache.isfile_case(os.path.join(path, '__init__{}'.format(extension)))
                   for extension in PYTHON_EXTENSIONS):
            return False
    return True
Example #13
0
def mypyc_build(
    paths: List[str],
    compiler_options: CompilerOptions,
    *,
    separate: Union[bool, List[Tuple[List[str], Optional[str]]]] = False,
    only_compile_paths: Optional[Iterable[str]] = None,
    skip_cgen_input: Optional[Any] = None,
    always_use_shared_lib: bool = False
) -> Tuple[emitmodule.Groups, List[Tuple[List[str], List[str]]]]:
    """Do the front and middle end of mypyc building, producing and writing out C source."""
    fscache = FileSystemCache()
    mypyc_sources, all_sources, options = get_mypy_config(
        paths, only_compile_paths, compiler_options, fscache)

    # We generate a shared lib if there are multiple modules or if any
    # of the modules are in package. (Because I didn't want to fuss
    # around with making the single module code handle packages.)
    use_shared_lib = (len(mypyc_sources) > 1
                      or any('.' in x.module for x in mypyc_sources)
                      or always_use_shared_lib)

    groups = construct_groups(mypyc_sources, separate, use_shared_lib)

    # We let the test harness just pass in the c file contents instead
    # so that it can do a corner-cutting version without full stubs.
    if not skip_cgen_input:
        group_cfiles, ops_text = generate_c(all_sources,
                                            options,
                                            groups,
                                            fscache,
                                            compiler_options=compiler_options)
        # TODO: unique names?
        write_file(os.path.join(compiler_options.target_dir, 'ops.txt'),
                   ops_text)
    else:
        group_cfiles = skip_cgen_input

    # Write out the generated C and collect the files for each group
    # Should this be here??
    group_cfilenames = []  # type: List[Tuple[List[str], List[str]]]
    for cfiles in group_cfiles:
        cfilenames = []
        for cfile, ctext in cfiles:
            cfile = os.path.join(compiler_options.target_dir, cfile)
            write_file(cfile, ctext)
            if os.path.splitext(cfile)[1] == '.c':
                cfilenames.append(cfile)

        deps = [
            os.path.join(compiler_options.target_dir, dep)
            for dep in get_header_deps(cfiles)
        ]
        group_cfilenames.append((cfilenames, deps))

    return groups, group_cfilenames
Example #14
0
def highest_init_level(fscache: FileSystemCache, id: str, path: str) -> int:
    """Compute the highest level where an __init__ file is found."""
    if path.endswith(('__init__.py', '__init__.pyi')):
        path = os.path.dirname(path)
    level = 0
    for i in range(id.count('.')):
        path = os.path.dirname(path)
        if any(fscache.isfile_case(os.path.join(path, '__init__{}'.format(extension)))
               for extension in PYTHON_EXTENSIONS):
            level = i + 1
    return level
Example #15
0
 def __init__(self,
              search_paths: SearchPaths,
              fscache: Optional[FileSystemCache] = None,
              options: Optional[Options] = None) -> None:
     self.search_paths = search_paths
     self.fscache = fscache or FileSystemCache()
     # Cache find_lib_path_dirs: (dir_chain, search_paths) -> list(package_dirs, should_verify)
     self.dirs = {}  # type: Dict[Tuple[str, Tuple[str, ...]], PackageDirs]
     # Cache find_module: id -> result
     self.results = {}  # type: Dict[str, Optional[str]]
     self.options = options
Example #16
0
def highest_init_level(fscache: FileSystemCache, id: str, path: str) -> int:
    """Compute the highest level where an __init__ file is found."""
    if path.endswith(('__init__.py', '__init__.pyi')):
        path = os.path.dirname(path)
    level = 0
    for i in range(id.count('.')):
        path = os.path.dirname(path)
        if any(fscache.isfile_case(os.path.join(path, '__init__{}'.format(extension)))
               for extension in PYTHON_EXTENSIONS):
            level = i + 1
    return level
Example #17
0
def matches_exclude(subpath: str, exclude: str, fscache: FileSystemCache, verbose: bool) -> bool:
    if not exclude:
        return False
    subpath_str = os.path.relpath(subpath).replace(os.sep, "/")
    if fscache.isdir(subpath):
        subpath_str += "/"
    if re.search(exclude, subpath_str):
        if verbose:
            print("TRACE: Excluding {}".format(subpath_str), file=sys.stderr)
        return True
    return False
Example #18
0
 def __init__(self, search_paths: SearchPaths,
              fscache: Optional[FileSystemCache],
              options: Optional[Options]) -> None:
     self.search_paths = search_paths
     self.fscache = fscache or FileSystemCache()
     # Cache for get_toplevel_possibilities:
     # search_paths -> (toplevel_id -> list(package_dirs))
     self.initial_components = {
     }  # type: Dict[Tuple[str, ...], Dict[str, List[str]]]
     # Cache find_module: id -> result
     self.results = {}  # type: Dict[str, ModuleSearchResult]
     self.ns_ancestors = {}  # type: Dict[str, str]
     self.options = options
Example #19
0
    def __init__(self, options: Options,
                 timeout: Optional[int] = None,
                 alt_lib_path: Optional[str] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        self.timeout = timeout
        self.alt_lib_path = alt_lib_path
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(STATUS_FILE):
            os.unlink(STATUS_FILE)

        self.fscache = FileSystemCache(self.options.python_version)

        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            options.cache_fine_grained = True  # set this so that cache options match
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True
Example #20
0
def matches_exclude(subpath: str,
                    excludes: List[str],
                    fscache: FileSystemCache,
                    verbose: bool) -> bool:
    if not excludes:
        return False
    subpath_str = os.path.relpath(subpath).replace(os.sep, "/")
    if fscache.isdir(subpath):
        subpath_str += "/"
    for exclude in excludes:
        if re.search(exclude, subpath_str):
            if verbose:
                print(f"TRACE: Excluding {subpath_str} (matches pattern {exclude})",
                      file=sys.stderr)
            return True
    return False
Example #21
0
def refresh_suppressed_submodules(module: str, path: Optional[str],
                                  deps: Dict[str, Set[str]], graph: Graph,
                                  fscache: FileSystemCache) -> None:
    """Look for submodules that are now suppressed in target package.

    If a submodule a.b gets added, we need to mark it as suppressed
    in modules that contain "from a import b". Previously we assumed
    that 'a.b' is not a module but a regular name.

    This is only relevant when following imports normally.

    Args:
        module: target package in which to look for submodules
        path: path of the module
    """
    if path is None or not path.endswith(INIT_SUFFIXES):
        # Only packages have submodules.
        return
    # Find any submodules present in the directory.
    pkgdir = os.path.dirname(path)
    for fnam in fscache.listdir(pkgdir):
        if (not fnam.endswith(('.py', '.pyi')) or fnam.startswith("__init__.")
                or fnam.count('.') != 1):
            continue
        shortname = fnam.split('.')[0]
        submodule = module + '.' + shortname
        trigger = make_trigger(submodule)
        if trigger in deps:
            for dep in deps[trigger]:
                # TODO: <...> deps, etc.
                state = graph.get(dep)
                if not state:
                    # Maybe it's a non-top-level target. We only care about the module.
                    dep_module = module_prefix(graph, dep)
                    if dep_module is not None:
                        state = graph.get(dep_module)
                if state:
                    tree = state.tree
                    assert tree  # TODO: What if doesn't exist?
                    for imp in tree.imports:
                        if isinstance(imp, ImportFrom):
                            if (imp.id == module
                                    and any(name == shortname
                                            for name, _ in imp.names)):
                                # TODO: Only if does not exist already
                                state.suppressed.append(submodule)
                                state.suppressed_set.add(submodule)
Example #22
0
 def __init__(self,
              search_paths: SearchPaths,
              fscache: Optional[FileSystemCache],
              options: Optional[Options]) -> None:
     self.search_paths = search_paths
     self.fscache = fscache or FileSystemCache()
     # Cache for get_toplevel_possibilities:
     # search_paths -> (toplevel_id -> list(package_dirs))
     self.initial_components = {}  # type: Dict[Tuple[str, ...], Dict[str, List[str]]]
     # Cache find_module: id -> result
     self.results = {}  # type: Dict[str, ModuleSearchResult]
     self.ns_ancestors = {}  # type: Dict[str, str]
     self.options = options
     custom_typeshed_dir = None
     if options:
         custom_typeshed_dir = options.custom_typeshed_dir
     self.stdlib_py_versions = load_stdlib_py_versions(custom_typeshed_dir)
     self.python2 = options and options.python_version[0] == 2
Example #23
0
 def _make_manager(self) -> BuildManager:
     errors = Errors()
     options = Options()
     fscache = FileSystemCache()
     manager = BuildManager(
         data_dir='',
         lib_path=[],
         ignore_prefix='',
         source_set=BuildSourceSet([]),
         reports=Reports('', {}),
         options=options,
         version_id=__version__,
         plugin=Plugin(options),
         errors=errors,
         flush_errors=lambda msgs, serious: None,
         fscache=fscache,
     )
     return manager
Example #24
0
 def _make_manager(self) -> BuildManager:
     errors = Errors()
     options = Options()
     fscache = FileSystemCache()
     search_paths = SearchPaths((), (), (), ())
     manager = BuildManager(
         data_dir='',
         search_paths=search_paths,
         ignore_prefix='',
         source_set=BuildSourceSet([]),
         reports=Reports('', {}),
         options=options,
         version_id=__version__,
         plugin=Plugin(options),
         plugins_snapshot={},
         errors=errors,
         flush_errors=lambda msgs, serious: None,
         fscache=fscache,
         stdout=sys.stdout,
         stderr=sys.stderr,
     )
     return manager
Example #25
0
    def mypy_test(self):
        """
        Make sure that the generated python typechecks successfully
        """
        messages = []

        def flush_errors(new_messages, serious):
            messages.extend(new_messages)

        options = mypy.main.Options()
        options.allow_untyped_globals = True
        mypy.main.build.build(
            [mypy.main.BuildSource(path=self.test_file, module="")],
            options,
            None,
            flush_errors,
            FileSystemCache(),
            sys.stdout,
            sys.stderr,
        )
        for m in messages:
            print(m)
        self.assertFalse(messages)
Example #26
0
 def __init__(self,
              search_paths: SearchPaths,
              fscache: Optional[FileSystemCache],
              options: Optional[Options],
              stdlib_py_versions: Optional[StdlibVersions] = None,
              source_set: Optional[BuildSourceSet] = None) -> None:
     self.search_paths = search_paths
     self.source_set = source_set
     self.fscache = fscache or FileSystemCache()
     # Cache for get_toplevel_possibilities:
     # search_paths -> (toplevel_id -> list(package_dirs))
     self.initial_components: Dict[Tuple[str, ...], Dict[str, List[str]]] = {}
     # Cache find_module: id -> result
     self.results: Dict[str, ModuleSearchResult] = {}
     self.ns_ancestors: Dict[str, str] = {}
     self.options = options
     custom_typeshed_dir = None
     if options:
         custom_typeshed_dir = options.custom_typeshed_dir
     self.stdlib_py_versions = (
         stdlib_py_versions or load_stdlib_py_versions(custom_typeshed_dir)
     )
     self.python_major_ver = 3 if options is None else options.python_version[0]
Example #27
0
def filter_out_missing_top_level_packages(packages: Set[str],
                                          search_paths: SearchPaths,
                                          fscache: FileSystemCache) -> Set[str]:
    """Quickly filter out obviously missing top-level packages.

    Return packages with entries that can't be found removed.

    This is approximate: some packages that aren't actually valid may be
    included. However, all potentially valid packages must be returned.
    """
    # Start with a empty set and add all potential top-level packages.
    found = set()
    paths = (
        search_paths.python_path + search_paths.mypy_path + search_paths.package_path +
        search_paths.typeshed_path
    )
    paths += tuple(os.path.join(p, '@python2') for p in search_paths.typeshed_path)
    for p in paths:
        try:
            entries = fscache.listdir(p)
        except Exception:
            entries = []
        for entry in entries:
            # The code is hand-optimized for mypyc since this may be somewhat
            # performance-critical.
            if entry.endswith('.py'):
                entry = entry[:-3]
            elif entry.endswith('.pyi'):
                entry = entry[:-4]
            elif entry.endswith('-stubs'):
                # Possible PEP 561 stub package
                entry = entry[:-6]
                if entry.endswith('-python2'):
                    entry = entry[:-8]
            if entry in packages:
                found.add(entry)
    return found
Example #28
0
    def initialize_fine_grained(
            self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        # The file system cache we create gets passed off to
        # BuildManager, and thence to FineGrainedBuildManager, which
        # assumes responsibility for clearing it after updates.
        fscache = FileSystemCache(self.options.python_version)
        self.fswatcher = FileSystemWatcher(fscache)
        self.update_sources(sources)
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=fscache,
                                      alt_lib_path=self.alt_lib_path)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)
        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime),
                             st_size=meta.size,
                             md5=meta.hash))

            changed, removed = self.find_changed(sources)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)
        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        fscache.flush()
        status = 1 if messages else 0
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }
Example #29
0
class Server:

    # NOTE: the instance is constructed in the parent process but
    # serve() is called in the grandchild (by daemonize()).

    def __init__(self,
                 options: Options,
                 timeout: Optional[int] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        # Snapshot the options info before we muck with it, to detect changes
        self.options_snapshot = options.snapshot()
        self.timeout = timeout
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(STATUS_FILE):
            os.unlink(STATUS_FILE)

        self.fscache = FileSystemCache()

        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            # Using fine_grained_cache implies generating and caring
            # about the fine grained cache
            options.cache_fine_grained = True
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True

    def serve(self) -> None:
        """Serve requests, synchronously (no thread or fork)."""
        try:
            sock = self.create_listening_socket()
            if self.timeout is not None:
                sock.settimeout(self.timeout)
            try:
                with open(STATUS_FILE, 'w') as f:
                    json.dump(
                        {
                            'pid': os.getpid(),
                            'sockname': sock.getsockname()
                        }, f)
                    f.write('\n')  # I like my JSON with trailing newline
                while True:
                    try:
                        conn, addr = sock.accept()
                    except socket.timeout:
                        print("Exiting due to inactivity.")
                        reset_global_state()
                        sys.exit(0)
                    try:
                        data = receive(conn)
                    except OSError as err:
                        conn.close()  # Maybe the client hung up
                        continue
                    resp = {}  # type: Dict[str, Any]
                    if 'command' not in data:
                        resp = {'error': "No command found in request"}
                    else:
                        command = data['command']
                        if not isinstance(command, str):
                            resp = {'error': "Command is not a string"}
                        else:
                            command = data.pop('command')
                            try:
                                resp = self.run_command(command, data)
                            except Exception:
                                # If we are crashing, report the crash to the client
                                tb = traceback.format_exception(
                                    *sys.exc_info())
                                resp = {
                                    'error': "Daemon crashed!\n" + "".join(tb)
                                }
                                conn.sendall(json.dumps(resp).encode('utf8'))
                                raise
                    try:
                        conn.sendall(json.dumps(resp).encode('utf8'))
                    except OSError as err:
                        pass  # Maybe the client hung up
                    conn.close()
                    if command == 'stop':
                        sock.close()
                        reset_global_state()
                        sys.exit(0)
            finally:
                os.unlink(STATUS_FILE)
        finally:
            shutil.rmtree(self.sock_directory)
            exc_info = sys.exc_info()
            if exc_info[0] and exc_info[0] is not SystemExit:
                traceback.print_exception(*exc_info)

    def create_listening_socket(self) -> socket.socket:
        """Create the socket and set it up for listening."""
        self.sock_directory = tempfile.mkdtemp()
        sockname = os.path.join(self.sock_directory, SOCKET_NAME)
        sock = socket.socket(socket.AF_UNIX)
        sock.bind(sockname)
        sock.listen(1)
        return sock

    def run_command(self, command: str,
                    data: Mapping[str, object]) -> Dict[str, object]:
        """Run a specific command from the registry."""
        key = 'cmd_' + command
        method = getattr(self.__class__, key, None)
        if method is None:
            return {'error': "Unrecognized command '%s'" % command}
        else:
            return method(self, **data)

    # Command functions (run in the server via RPC).

    def cmd_status(self) -> Dict[str, object]:
        """Return daemon status."""
        res = {}  # type: Dict[str, object]
        res.update(get_meminfo())
        return res

    def cmd_stop(self) -> Dict[str, object]:
        """Stop daemon."""
        return {}

    last_sources = None  # type: List[mypy.build.BuildSource]

    def cmd_run(self, version: str, args: Sequence[str]) -> Dict[str, object]:
        """Check a list of files, triggering a restart if needed."""
        try:
            self.last_sources, options = mypy.main.process_options(
                ['-i'] + list(args),
                require_targets=True,
                server_options=True,
                fscache=self.fscache)
            # Signal that we need to restart if the options have changed
            if self.options_snapshot != options.snapshot():
                return {'restart': 'configuration changed'}
            if __version__ != version:
                return {'restart': 'mypy version changed'}
        except InvalidSourceList as err:
            return {'out': '', 'err': str(err), 'status': 2}
        return self.check(self.last_sources)

    def cmd_check(self, files: Sequence[str]) -> Dict[str, object]:
        """Check a list of files."""
        try:
            self.last_sources = create_source_list(files, self.options,
                                                   self.fscache)
        except InvalidSourceList as err:
            return {'out': '', 'err': str(err), 'status': 2}
        return self.check(self.last_sources)

    def cmd_recheck(self) -> Dict[str, object]:
        """Check the same list of files we checked most recently."""
        if not self.last_sources:
            return {
                'error':
                "Command 'recheck' is only valid after a 'check' command"
            }
        return self.check(self.last_sources)

    def check(self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        """Check using fine-grained incremental mode."""
        if not self.fine_grained_manager:
            res = self.initialize_fine_grained(sources)
        else:
            res = self.fine_grained_increment(sources)
        self.fscache.flush()
        return res

    def initialize_fine_grained(
            self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        self.fswatcher = FileSystemWatcher(self.fscache)
        self.update_sources(sources)
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=self.fscache)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)
        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime),
                             st_size=meta.size,
                             md5=meta.hash))

            changed, removed = self.find_changed(sources)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)
        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        if MEM_PROFILE:
            from mypy.memprofile import print_memory_profile
            print_memory_profile(run_gc=False)

        status = 1 if messages else 0
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }

    def fine_grained_increment(
            self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        assert self.fine_grained_manager is not None
        manager = self.fine_grained_manager.manager

        t0 = time.time()
        self.update_sources(sources)
        changed, removed = self.find_changed(sources)
        manager.lib_path = tuple(
            mypy.build.compute_lib_path(sources, manager.options,
                                        manager.data_dir))
        t1 = time.time()
        messages = self.fine_grained_manager.update(changed, removed)
        t2 = time.time()
        manager.log(
            "fine-grained increment: find_changed: {:.3f}s, update: {:.3f}s".
            format(t1 - t0, t2 - t1))
        status = 1 if messages else 0
        self.previous_sources = sources
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }

    def update_sources(self, sources: List[mypy.build.BuildSource]) -> None:
        paths = [source.path for source in sources if source.path is not None]
        self.fswatcher.add_watched_paths(paths)

    def find_changed(
        self, sources: List[mypy.build.BuildSource]
    ) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
        changed_paths = self.fswatcher.find_changed()
        # Find anything that has been added or modified
        changed = [(source.module, source.path) for source in sources
                   if source.path in changed_paths]

        # Now find anything that has been removed from the build
        modules = {source.module for source in sources}
        omitted = [
            source for source in self.previous_sources
            if source.module not in modules
        ]
        removed = []
        for source in omitted:
            path = source.path
            assert path
            removed.append((source.module, path))

        # Find anything that has had its module path change because of added or removed __init__s
        last = {s.path: s.module for s in self.previous_sources}
        for s in sources:
            assert s.path
            if s.path in last and last[s.path] != s.module:
                # Mark it as removed from its old name and changed at its new name
                removed.append((last[s.path], s.path))
                changed.append((s.module, s.path))

        return changed, removed

    def cmd_hang(self) -> Dict[str, object]:
        """Hang for 100 seconds, as a debug hack."""
        time.sleep(100)
        return {}
Example #30
0
class Server:

    # NOTE: the instance is constructed in the parent process but
    # serve() is called in the grandchild (by daemonize()).

    def __init__(self,
                 options: Options,
                 status_file: str,
                 timeout: Optional[int] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        # Snapshot the options info before we muck with it, to detect changes
        self.options_snapshot = options.snapshot()
        self.timeout = timeout
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(status_file):
            os.unlink(status_file)

        self.fscache = FileSystemCache()

        options.raise_exceptions = True
        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            # Using fine_grained_cache implies generating and caring
            # about the fine grained cache
            options.cache_fine_grained = True
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True
        self.status_file = status_file

        # Since the object is created in the parent process we can check
        # the output terminal options here.
        self.formatter = FancyFormatter(sys.stdout, sys.stderr,
                                        options.show_error_codes)

    def _response_metadata(self) -> Dict[str, str]:
        py_version = '{}_{}'.format(self.options.python_version[0],
                                    self.options.python_version[1])
        return {
            'platform': self.options.platform,
            'python_version': py_version,
        }

    def serve(self) -> None:
        """Serve requests, synchronously (no thread or fork)."""
        command = None
        try:
            server = IPCServer(CONNECTION_NAME, self.timeout)
            with open(self.status_file, 'w') as f:
                json.dump(
                    {
                        'pid': os.getpid(),
                        'connection_name': server.connection_name
                    }, f)
                f.write('\n')  # I like my JSON with a trailing newline
            while True:
                with server:
                    data = receive(server)
                    resp = {}  # type: Dict[str, Any]
                    if 'command' not in data:
                        resp = {'error': "No command found in request"}
                    else:
                        command = data['command']
                        if not isinstance(command, str):
                            resp = {'error': "Command is not a string"}
                        else:
                            command = data.pop('command')
                            try:
                                resp = self.run_command(command, data)
                            except Exception:
                                # If we are crashing, report the crash to the client
                                tb = traceback.format_exception(
                                    *sys.exc_info())
                                resp = {
                                    'error': "Daemon crashed!\n" + "".join(tb)
                                }
                                resp.update(self._response_metadata())
                                server.write(json.dumps(resp).encode('utf8'))
                                raise
                    try:
                        resp.update(self._response_metadata())
                        server.write(json.dumps(resp).encode('utf8'))
                    except OSError:
                        pass  # Maybe the client hung up
                    if command == 'stop':
                        reset_global_state()
                        sys.exit(0)
        finally:
            # If the final command is something other than a clean
            # stop, remove the status file. (We can't just
            # simplify the logic and always remove the file, since
            # that could cause us to remove a future server's
            # status file.)
            if command != 'stop':
                os.unlink(self.status_file)
            try:
                server.cleanup()  # try to remove the socket dir on Linux
            except OSError:
                pass
            exc_info = sys.exc_info()
            if exc_info[0] and exc_info[0] is not SystemExit:
                traceback.print_exception(*exc_info)

    def run_command(self, command: str,
                    data: Dict[str, object]) -> Dict[str, object]:
        """Run a specific command from the registry."""
        key = 'cmd_' + command
        method = getattr(self.__class__, key, None)
        if method is None:
            return {'error': "Unrecognized command '%s'" % command}
        else:
            if command not in {'check', 'recheck', 'run'}:
                # Only the above commands use some error formatting.
                del data['is_tty']
                del data['terminal_width']
            return method(self, **data)

    # Command functions (run in the server via RPC).

    def cmd_status(
            self,
            fswatcher_dump_file: Optional[str] = None) -> Dict[str, object]:
        """Return daemon status."""
        res = {}  # type: Dict[str, object]
        res.update(get_meminfo())
        if fswatcher_dump_file:
            data = self.fswatcher.dump_file_data() if hasattr(
                self, 'fswatcher') else {}
            # Using .dumps and then writing was noticeably faster than using dump
            s = json.dumps(data)
            with open(fswatcher_dump_file, 'w') as f:
                f.write(s)
        return res

    def cmd_stop(self) -> Dict[str, object]:
        """Stop daemon."""
        # We need to remove the status file *before* we complete the
        # RPC. Otherwise a race condition exists where a subsequent
        # command can see a status file from a dying server and think
        # it is a live one.
        os.unlink(self.status_file)
        return {}

    def cmd_run(self, version: str, args: Sequence[str], is_tty: bool,
                terminal_width: int) -> Dict[str, object]:
        """Check a list of files, triggering a restart if needed."""
        try:
            # Process options can exit on improper arguments, so we need to catch that and
            # capture stderr so the client can report it
            stderr = io.StringIO()
            stdout = io.StringIO()
            with redirect_stderr(stderr):
                with redirect_stdout(stdout):
                    sources, options = mypy.main.process_options(
                        ['-i'] + list(args),
                        require_targets=True,
                        server_options=True,
                        fscache=self.fscache,
                        program='mypy-daemon',
                        header=argparse.SUPPRESS)
            # Signal that we need to restart if the options have changed
            if self.options_snapshot != options.snapshot():
                return {'restart': 'configuration changed'}
            if __version__ != version:
                return {'restart': 'mypy version changed'}
            if self.fine_grained_manager:
                manager = self.fine_grained_manager.manager
                start_plugins_snapshot = manager.plugins_snapshot
                _, current_plugins_snapshot = mypy.build.load_plugins(
                    options, manager.errors, sys.stdout, extra_plugins=())
                if current_plugins_snapshot != start_plugins_snapshot:
                    return {'restart': 'plugins changed'}
        except InvalidSourceList as err:
            return {'out': '', 'err': str(err), 'status': 2}
        except SystemExit as e:
            return {
                'out': stdout.getvalue(),
                'err': stderr.getvalue(),
                'status': e.code
            }
        return self.check(sources, is_tty, terminal_width)

    def cmd_check(self, files: Sequence[str], is_tty: bool,
                  terminal_width: int) -> Dict[str, object]:
        """Check a list of files."""
        try:
            sources = create_source_list(files, self.options, self.fscache)
        except InvalidSourceList as err:
            return {'out': '', 'err': str(err), 'status': 2}
        return self.check(sources, is_tty, terminal_width)

    def cmd_recheck(self,
                    is_tty: bool,
                    terminal_width: int,
                    remove: Optional[List[str]] = None,
                    update: Optional[List[str]] = None) -> Dict[str, object]:
        """Check the same list of files we checked most recently.

        If remove/update is given, they modify the previous list;
        if all are None, stat() is called for each file in the previous list.
        """
        t0 = time.time()
        if not self.fine_grained_manager:
            return {
                'error':
                "Command 'recheck' is only valid after a 'check' command"
            }
        sources = self.previous_sources
        if remove:
            removals = set(remove)
            sources = [s for s in sources if s.path and s.path not in removals]
        if update:
            known = {s.path for s in sources if s.path}
            added = [p for p in update if p not in known]
            try:
                added_sources = create_source_list(added, self.options,
                                                   self.fscache)
            except InvalidSourceList as err:
                return {'out': '', 'err': str(err), 'status': 2}
            sources = sources + added_sources  # Make a copy!
        t1 = time.time()
        manager = self.fine_grained_manager.manager
        manager.log("fine-grained increment: cmd_recheck: {:.3f}s".format(t1 -
                                                                          t0))
        if not self.following_imports():
            messages = self.fine_grained_increment(sources, remove, update)
        else:
            assert remove is None and update is None
            messages = self.fine_grained_increment_follow_imports(sources)
        res = self.increment_output(messages, sources, is_tty, terminal_width)
        self.fscache.flush()
        self.update_stats(res)
        return res

    def check(self, sources: List[BuildSource], is_tty: bool,
              terminal_width: int) -> Dict[str, Any]:
        """Check using fine-grained incremental mode.

        If is_tty is True format the output nicely with colors and summary line
        (unless disabled in self.options). Also pass the terminal_width to formatter.
        """
        if not self.fine_grained_manager:
            res = self.initialize_fine_grained(sources, is_tty, terminal_width)
        else:
            if not self.following_imports():
                messages = self.fine_grained_increment(sources)
            else:
                messages = self.fine_grained_increment_follow_imports(sources)
            res = self.increment_output(messages, sources, is_tty,
                                        terminal_width)
        self.fscache.flush()
        self.update_stats(res)
        return res

    def update_stats(self, res: Dict[str, Any]) -> None:
        if self.fine_grained_manager:
            manager = self.fine_grained_manager.manager
            manager.dump_stats()
            res['stats'] = manager.stats
            manager.stats = {}

    def following_imports(self) -> bool:
        """Are we following imports?"""
        # TODO: What about silent?
        return self.options.follow_imports == 'normal'

    def initialize_fine_grained(self, sources: List[BuildSource], is_tty: bool,
                                terminal_width: int) -> Dict[str, Any]:
        self.fswatcher = FileSystemWatcher(self.fscache)
        t0 = time.time()
        self.update_sources(sources)
        t1 = time.time()
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=self.fscache)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)

        if self.following_imports():
            sources = find_all_sources_in_build(
                self.fine_grained_manager.graph, sources)
            self.update_sources(sources)

        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            t2 = time.time()
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime),
                             st_size=meta.size,
                             hash=meta.hash))

            changed, removed = self.find_changed(sources)
            changed += self.find_added_suppressed(
                self.fine_grained_manager.graph, set(),
                self.fine_grained_manager.manager.search_paths)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            t3 = time.time()
            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)

            if self.following_imports():
                # We need to do another update to any new files found by following imports.
                messages = self.fine_grained_increment_follow_imports(sources)

            t4 = time.time()
            self.fine_grained_manager.manager.add_stats(
                update_sources_time=t1 - t0,
                build_time=t2 - t1,
                find_changes_time=t3 - t2,
                fg_update_time=t4 - t3,
                files_changed=len(removed) + len(changed))

        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        if MEM_PROFILE:
            from mypy.memprofile import print_memory_profile
            print_memory_profile(run_gc=False)

        status = 1 if messages else 0
        messages = self.pretty_messages(messages, len(sources), is_tty,
                                        terminal_width)
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }

    def fine_grained_increment(
        self,
        sources: List[BuildSource],
        remove: Optional[List[str]] = None,
        update: Optional[List[str]] = None,
    ) -> List[str]:
        """Perform a fine-grained type checking increment.

        If remove and update are None, determine changed paths by using
        fswatcher. Otherwise, assume that only these files have changes.

        Args:
            sources: sources passed on the command line
            remove: paths of files that have been removed
            update: paths of files that have been changed or created
        """
        assert self.fine_grained_manager is not None
        manager = self.fine_grained_manager.manager

        t0 = time.time()
        if remove is None and update is None:
            # Use the fswatcher to determine which files were changed
            # (updated or added) or removed.
            self.update_sources(sources)
            changed, removed = self.find_changed(sources)
        else:
            # Use the remove/update lists to update fswatcher.
            # This avoids calling stat() for unchanged files.
            changed, removed = self.update_changed(sources, remove or [],
                                                   update or [])
        changed += self.find_added_suppressed(self.fine_grained_manager.graph,
                                              set(), manager.search_paths)
        manager.search_paths = compute_search_paths(sources, manager.options,
                                                    manager.data_dir)
        t1 = time.time()
        manager.log("fine-grained increment: find_changed: {:.3f}s".format(t1 -
                                                                           t0))
        messages = self.fine_grained_manager.update(changed, removed)
        t2 = time.time()
        manager.log("fine-grained increment: update: {:.3f}s".format(t2 - t1))
        manager.add_stats(find_changes_time=t1 - t0,
                          fg_update_time=t2 - t1,
                          files_changed=len(removed) + len(changed))

        self.previous_sources = sources
        return messages

    def fine_grained_increment_follow_imports(
            self, sources: List[BuildSource]) -> List[str]:
        """Like fine_grained_increment, but follow imports."""
        t0 = time.time()

        # TODO: Support file events

        assert self.fine_grained_manager is not None
        fine_grained_manager = self.fine_grained_manager
        graph = fine_grained_manager.graph
        manager = fine_grained_manager.manager

        orig_modules = list(graph.keys())

        self.update_sources(sources)
        changed_paths = self.fswatcher.find_changed()
        manager.search_paths = compute_search_paths(sources, manager.options,
                                                    manager.data_dir)

        t1 = time.time()
        manager.log("fine-grained increment: find_changed: {:.3f}s".format(t1 -
                                                                           t0))

        seen = {source.module for source in sources}

        # Find changed modules reachable from roots (or in roots) already in graph.
        changed, new_files = self.find_reachable_changed_modules(
            sources, graph, seen, changed_paths)
        sources.extend(new_files)

        # Process changes directly reachable from roots.
        messages = fine_grained_manager.update(changed, [])

        # Follow deps from changed modules (still within graph).
        worklist = changed[:]
        while worklist:
            module = worklist.pop()
            if module[0] not in graph:
                continue
            sources2 = self.direct_imports(module, graph)
            # Filter anything already seen before. This prevents
            # infinite looping if there are any self edges. (Self
            # edges are maybe a bug, but...)
            sources2 = [
                source for source in sources2 if source.module not in seen
            ]
            changed, new_files = self.find_reachable_changed_modules(
                sources2, graph, seen, changed_paths)
            self.update_sources(new_files)
            messages = fine_grained_manager.update(changed, [])
            worklist.extend(changed)

        t2 = time.time()

        def refresh_file(module: str, path: str) -> List[str]:
            return fine_grained_manager.update([(module, path)], [])

        for module_id, state in list(graph.items()):
            new_messages = refresh_suppressed_submodules(
                module_id, state.path, fine_grained_manager.deps, graph,
                self.fscache, refresh_file)
            if new_messages is not None:
                messages = new_messages

        t3 = time.time()

        # There may be new files that became available, currently treated as
        # suppressed imports. Process them.
        while True:
            new_unsuppressed = self.find_added_suppressed(
                graph, seen, manager.search_paths)
            if not new_unsuppressed:
                break
            new_files = [
                BuildSource(mod[1], mod[0]) for mod in new_unsuppressed
            ]
            sources.extend(new_files)
            self.update_sources(new_files)
            messages = fine_grained_manager.update(new_unsuppressed, [])

            for module_id, path in new_unsuppressed:
                new_messages = refresh_suppressed_submodules(
                    module_id, path, fine_grained_manager.deps, graph,
                    self.fscache, refresh_file)
                if new_messages is not None:
                    messages = new_messages

        t4 = time.time()

        # Find all original modules in graph that were not reached -- they are deleted.
        to_delete = []
        for module_id in orig_modules:
            if module_id not in graph:
                continue
            if module_id not in seen:
                module_path = graph[module_id].path
                assert module_path is not None
                to_delete.append((module_id, module_path))
        if to_delete:
            messages = fine_grained_manager.update([], to_delete)

        fix_module_deps(graph)

        self.previous_sources = find_all_sources_in_build(graph)
        self.update_sources(self.previous_sources)

        # Store current file state as side effect
        self.fswatcher.find_changed()

        t5 = time.time()

        manager.log("fine-grained increment: update: {:.3f}s".format(t5 - t1))
        manager.add_stats(find_changes_time=t1 - t0,
                          fg_update_time=t2 - t1,
                          refresh_suppressed_time=t3 - t2,
                          find_added_supressed_time=t4 - t3,
                          cleanup_time=t5 - t4)

        return messages

    def find_reachable_changed_modules(
        self, roots: List[BuildSource], graph: mypy.build.Graph,
        seen: Set[str], changed_paths: AbstractSet[str]
    ) -> Tuple[List[Tuple[str, str]], List[BuildSource]]:
        """Follow imports within graph from given sources until hitting changed modules.

        If we find a changed module, we can't continue following imports as the imports
        may have changed.

        Args:
            roots: modules where to start search from
            graph: module graph to use for the search
            seen: modules we've seen before that won't be visited (mutated here!!)
            changed_paths: which paths have changed (stop search here and return any found)

        Return (encountered reachable changed modules,
                unchanged files not in sources_set traversed).
        """
        changed = []
        new_files = []
        worklist = roots[:]
        seen.update(source.module for source in worklist)
        while worklist:
            nxt = worklist.pop()
            if nxt.module not in seen:
                seen.add(nxt.module)
                new_files.append(nxt)
            if nxt.path in changed_paths:
                assert nxt.path is not None  # TODO
                changed.append((nxt.module, nxt.path))
            elif nxt.module in graph:
                state = graph[nxt.module]
                for dep in state.dependencies:
                    if dep not in seen:
                        seen.add(dep)
                        worklist.append(
                            BuildSource(graph[dep].path, graph[dep].id))
        return changed, new_files

    def direct_imports(self, module: Tuple[str, str],
                       graph: mypy.build.Graph) -> List[BuildSource]:
        """Return the direct imports of module not included in seen."""
        state = graph[module[0]]
        return [
            BuildSource(graph[dep].path, dep) for dep in state.dependencies
        ]

    def find_added_suppressed(
            self, graph: mypy.build.Graph, seen: Set[str],
            search_paths: SearchPaths) -> List[Tuple[str, str]]:
        """Find suppressed modules that have been added (and not included in seen).

        Args:
            seen: reachable modules we've seen before (mutated here!!)

        Return suppressed, added modules.
        """
        all_suppressed = set()
        for state in graph.values():
            all_suppressed |= state.suppressed_set

        # Filter out things that shouldn't actually be considered suppressed.
        #
        # TODO: Figure out why these are treated as suppressed
        all_suppressed = {
            module
            for module in all_suppressed
            if module not in graph and not ignore_suppressed_imports(module)
        }

        # Optimization: skip top-level packages that are obviously not
        # there, to avoid calling the relatively slow find_module()
        # below too many times.
        packages = {module.split('.', 1)[0] for module in all_suppressed}
        packages = filter_out_missing_top_level_packages(
            packages, search_paths, self.fscache)

        # TODO: Namespace packages

        finder = FindModuleCache(search_paths, self.fscache, self.options)

        found = []

        for module in all_suppressed:
            top_level_pkg = module.split('.', 1)[0]
            if top_level_pkg not in packages:
                # Fast path: non-existent top-level package
                continue
            result = finder.find_module(module, fast_path=True)
            if isinstance(result, str) and module not in seen:
                # When not following imports, we only follow imports to .pyi files.
                if not self.following_imports() and not result.endswith(
                        '.pyi'):
                    continue
                found.append((module, result))
                seen.add(module)

        return found

    def increment_output(self, messages: List[str], sources: List[BuildSource],
                         is_tty: bool, terminal_width: int) -> Dict[str, Any]:
        status = 1 if messages else 0
        messages = self.pretty_messages(messages, len(sources), is_tty,
                                        terminal_width)
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }

    def pretty_messages(self,
                        messages: List[str],
                        n_sources: int,
                        is_tty: bool = False,
                        terminal_width: Optional[int] = None) -> List[str]:
        use_color = self.options.color_output and is_tty
        fit_width = self.options.pretty and is_tty
        if fit_width:
            messages = self.formatter.fit_in_terminal(
                messages, fixed_terminal_width=terminal_width)
        if self.options.error_summary:
            summary = None  # type: Optional[str]
            if messages:
                n_errors, n_files = count_stats(messages)
                if n_errors:
                    summary = self.formatter.format_error(n_errors,
                                                          n_files,
                                                          n_sources,
                                                          use_color=use_color)
            else:
                summary = self.formatter.format_success(n_sources, use_color)
            if summary:
                # Create new list to avoid appending multiple summaries on successive runs.
                messages = messages + [summary]
        if use_color:
            messages = [self.formatter.colorize(m) for m in messages]
        return messages

    def update_sources(self, sources: List[BuildSource]) -> None:
        paths = [source.path for source in sources if source.path is not None]
        if self.following_imports():
            # Filter out directories (used for namespace packages).
            paths = [path for path in paths if self.fscache.isfile(path)]
        self.fswatcher.add_watched_paths(paths)

    def update_changed(
        self,
        sources: List[BuildSource],
        remove: List[str],
        update: List[str],
    ) -> ChangesAndRemovals:

        changed_paths = self.fswatcher.update_changed(remove, update)
        return self._find_changed(sources, changed_paths)

    def find_changed(self, sources: List[BuildSource]) -> ChangesAndRemovals:
        changed_paths = self.fswatcher.find_changed()
        return self._find_changed(sources, changed_paths)

    def _find_changed(self, sources: List[BuildSource],
                      changed_paths: AbstractSet[str]) -> ChangesAndRemovals:
        # Find anything that has been added or modified
        changed = [(source.module, source.path) for source in sources
                   if source.path and source.path in changed_paths]

        # Now find anything that has been removed from the build
        modules = {source.module for source in sources}
        omitted = [
            source for source in self.previous_sources
            if source.module not in modules
        ]
        removed = []
        for source in omitted:
            path = source.path
            assert path
            removed.append((source.module, path))

        # Find anything that has had its module path change because of added or removed __init__s
        last = {s.path: s.module for s in self.previous_sources}
        for s in sources:
            assert s.path
            if s.path in last and last[s.path] != s.module:
                # Mark it as removed from its old name and changed at its new name
                removed.append((last[s.path], s.path))
                changed.append((s.module, s.path))

        return changed, removed

    def cmd_suggest(self, function: str, callsites: bool,
                    **kwargs: Any) -> Dict[str, object]:
        """Suggest a signature for a function."""
        if not self.fine_grained_manager:
            return {
                'error':
                "Command 'suggest' is only valid after a 'check' command"
                " (that produces no parse errors)"
            }
        engine = SuggestionEngine(self.fine_grained_manager, **kwargs)
        try:
            if callsites:
                out = engine.suggest_callsites(function)
            else:
                out = engine.suggest(function)
        except SuggestionFailure as err:
            return {'error': str(err)}
        else:
            if not out:
                out = "No suggestions\n"
            elif not out.endswith("\n"):
                out += "\n"
            return {'out': out, 'err': "", 'status': 0}
        finally:
            self.fscache.flush()

    def cmd_hang(self) -> Dict[str, object]:
        """Hang for 100 seconds, as a debug hack."""
        time.sleep(100)
        return {}
Example #31
0
class Server:

    # NOTE: the instance is constructed in the parent process but
    # serve() is called in the grandchild (by daemonize()).

    def __init__(self,
                 options: Options,
                 status_file: str,
                 timeout: Optional[int] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        # Snapshot the options info before we muck with it, to detect changes
        self.options_snapshot = options.snapshot()
        self.timeout = timeout
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(status_file):
            os.unlink(status_file)

        self.fscache = FileSystemCache()

        options.raise_exceptions = True
        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            # Using fine_grained_cache implies generating and caring
            # about the fine grained cache
            options.cache_fine_grained = True
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True
        self.status_file = status_file

        # Since the object is created in the parent process we can check
        # the output terminal options here.
        self.formatter = FancyFormatter(sys.stdout, sys.stderr,
                                        options.show_error_codes)

    def _response_metadata(self) -> Dict[str, str]:
        py_version = '{}_{}'.format(self.options.python_version[0],
                                    self.options.python_version[1])
        return {
            'platform': self.options.platform,
            'python_version': py_version,
        }

    def serve(self) -> None:
        """Serve requests, synchronously (no thread or fork)."""
        command = None
        try:
            server = IPCServer(CONNECTION_NAME, self.timeout)
            with open(self.status_file, 'w') as f:
                json.dump(
                    {
                        'pid': os.getpid(),
                        'connection_name': server.connection_name
                    }, f)
                f.write('\n')  # I like my JSON with a trailing newline
            while True:
                with server:
                    data = receive(server)
                    resp = {}  # type: Dict[str, Any]
                    if 'command' not in data:
                        resp = {'error': "No command found in request"}
                    else:
                        command = data['command']
                        if not isinstance(command, str):
                            resp = {'error': "Command is not a string"}
                        else:
                            command = data.pop('command')
                            try:
                                resp = self.run_command(command, data)
                            except Exception:
                                # If we are crashing, report the crash to the client
                                tb = traceback.format_exception(
                                    *sys.exc_info())
                                resp = {
                                    'error': "Daemon crashed!\n" + "".join(tb)
                                }
                                resp.update(self._response_metadata())
                                server.write(json.dumps(resp).encode('utf8'))
                                raise
                    try:
                        resp.update(self._response_metadata())
                        server.write(json.dumps(resp).encode('utf8'))
                    except OSError:
                        pass  # Maybe the client hung up
                    if command == 'stop':
                        reset_global_state()
                        sys.exit(0)
        finally:
            # If the final command is something other than a clean
            # stop, remove the status file. (We can't just
            # simplify the logic and always remove the file, since
            # that could cause us to remove a future server's
            # status file.)
            if command != 'stop':
                os.unlink(self.status_file)
            try:
                server.cleanup()  # try to remove the socket dir on Linux
            except OSError:
                pass
            exc_info = sys.exc_info()
            if exc_info[0] and exc_info[0] is not SystemExit:
                traceback.print_exception(*exc_info)

    def run_command(self, command: str,
                    data: Dict[str, object]) -> Dict[str, object]:
        """Run a specific command from the registry."""
        key = 'cmd_' + command
        method = getattr(self.__class__, key, None)
        if method is None:
            return {'error': "Unrecognized command '%s'" % command}
        else:
            if command not in {'check', 'recheck', 'run'}:
                # Only the above commands use some error formatting.
                del data['is_tty']
                del data['terminal_width']
            return method(self, **data)

    # Command functions (run in the server via RPC).

    def cmd_status(
            self,
            fswatcher_dump_file: Optional[str] = None) -> Dict[str, object]:
        """Return daemon status."""
        res = {}  # type: Dict[str, object]
        res.update(get_meminfo())
        if fswatcher_dump_file:
            data = self.fswatcher.dump_file_data() if hasattr(
                self, 'fswatcher') else {}
            # Using .dumps and then writing was noticably faster than using dump
            s = json.dumps(data)
            with open(fswatcher_dump_file, 'w') as f:
                f.write(s)
        return res

    def cmd_stop(self) -> Dict[str, object]:
        """Stop daemon."""
        # We need to remove the status file *before* we complete the
        # RPC. Otherwise a race condition exists where a subsequent
        # command can see a status file from a dying server and think
        # it is a live one.
        os.unlink(self.status_file)
        return {}

    def cmd_run(self, version: str, args: Sequence[str], is_tty: bool,
                terminal_width: int) -> Dict[str, object]:
        """Check a list of files, triggering a restart if needed."""
        try:
            # Process options can exit on improper arguments, so we need to catch that and
            # capture stderr so the client can report it
            stderr = io.StringIO()
            stdout = io.StringIO()
            with redirect_stderr(stderr):
                with redirect_stdout(stdout):
                    sources, options = mypy.main.process_options(
                        ['-i'] + list(args),
                        require_targets=True,
                        server_options=True,
                        fscache=self.fscache,
                        program='mypy-daemon',
                        header=argparse.SUPPRESS)
            # Signal that we need to restart if the options have changed
            if self.options_snapshot != options.snapshot():
                return {'restart': 'configuration changed'}
            if __version__ != version:
                return {'restart': 'mypy version changed'}
            if self.fine_grained_manager:
                manager = self.fine_grained_manager.manager
                start_plugins_snapshot = manager.plugins_snapshot
                _, current_plugins_snapshot = mypy.build.load_plugins(
                    options, manager.errors, sys.stdout, extra_plugins=())
                if current_plugins_snapshot != start_plugins_snapshot:
                    return {'restart': 'plugins changed'}
        except InvalidSourceList as err:
            return {'out': '', 'err': str(err), 'status': 2}
        except SystemExit as e:
            return {
                'out': stdout.getvalue(),
                'err': stderr.getvalue(),
                'status': e.code
            }
        return self.check(sources, is_tty, terminal_width)

    def cmd_check(self, files: Sequence[str], is_tty: bool,
                  terminal_width: int) -> Dict[str, object]:
        """Check a list of files."""
        try:
            sources = create_source_list(files, self.options, self.fscache)
        except InvalidSourceList as err:
            return {'out': '', 'err': str(err), 'status': 2}
        return self.check(sources, is_tty, terminal_width)

    def cmd_recheck(self,
                    is_tty: bool,
                    terminal_width: int,
                    remove: Optional[List[str]] = None,
                    update: Optional[List[str]] = None) -> Dict[str, object]:
        """Check the same list of files we checked most recently.

        If remove/update is given, they modify the previous list;
        if all are None, stat() is called for each file in the previous list.
        """
        t0 = time.time()
        if not self.fine_grained_manager:
            return {
                'error':
                "Command 'recheck' is only valid after a 'check' command"
            }
        sources = self.previous_sources
        if remove:
            removals = set(remove)
            sources = [s for s in sources if s.path and s.path not in removals]
        if update:
            known = {s.path for s in sources if s.path}
            added = [p for p in update if p not in known]
            try:
                added_sources = create_source_list(added, self.options,
                                                   self.fscache)
            except InvalidSourceList as err:
                return {'out': '', 'err': str(err), 'status': 2}
            sources = sources + added_sources  # Make a copy!
        t1 = time.time()
        manager = self.fine_grained_manager.manager
        manager.log("fine-grained increment: cmd_recheck: {:.3f}s".format(t1 -
                                                                          t0))
        res = self.fine_grained_increment(sources, is_tty, terminal_width,
                                          remove, update)
        self.fscache.flush()
        self.update_stats(res)
        return res

    def check(self, sources: List[BuildSource], is_tty: bool,
              terminal_width: int) -> Dict[str, Any]:
        """Check using fine-grained incremental mode.

        If is_tty is True format the output nicely with colors and summary line
        (unless disabled in self.options). Also pass the terminal_width to formatter.
        """
        if not self.fine_grained_manager:
            res = self.initialize_fine_grained(sources, is_tty, terminal_width)
        else:
            res = self.fine_grained_increment(sources, is_tty, terminal_width)
        self.fscache.flush()
        self.update_stats(res)
        return res

    def update_stats(self, res: Dict[str, Any]) -> None:
        if self.fine_grained_manager:
            manager = self.fine_grained_manager.manager
            manager.dump_stats()
            res['stats'] = manager.stats
            manager.stats = {}

    def initialize_fine_grained(self, sources: List[BuildSource], is_tty: bool,
                                terminal_width: int) -> Dict[str, Any]:
        self.fswatcher = FileSystemWatcher(self.fscache)
        t0 = time.time()
        self.update_sources(sources)
        t1 = time.time()
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=self.fscache)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)
        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            t2 = time.time()
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime),
                             st_size=meta.size,
                             md5=meta.hash))

            changed, removed = self.find_changed(sources)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            t3 = time.time()
            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)
            t4 = time.time()
            self.fine_grained_manager.manager.add_stats(
                update_sources_time=t1 - t0,
                build_time=t2 - t1,
                find_changes_time=t3 - t2,
                fg_update_time=t4 - t3,
                files_changed=len(removed) + len(changed))
        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        if MEM_PROFILE:
            from mypy.memprofile import print_memory_profile
            print_memory_profile(run_gc=False)

        status = 1 if messages else 0
        messages = self.pretty_messages(messages, len(sources), is_tty,
                                        terminal_width)
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }

    def fine_grained_increment(
        self,
        sources: List[BuildSource],
        is_tty: bool,
        terminal_width: int,
        remove: Optional[List[str]] = None,
        update: Optional[List[str]] = None,
    ) -> Dict[str, Any]:
        assert self.fine_grained_manager is not None
        manager = self.fine_grained_manager.manager

        t0 = time.time()
        if remove is None and update is None:
            # Use the fswatcher to determine which files were changed
            # (updated or added) or removed.
            self.update_sources(sources)
            changed, removed = self.find_changed(sources)
        else:
            # Use the remove/update lists to update fswatcher.
            # This avoids calling stat() for unchanged files.
            changed, removed = self.update_changed(sources, remove or [],
                                                   update or [])
        manager.search_paths = compute_search_paths(sources, manager.options,
                                                    manager.data_dir)
        t1 = time.time()
        manager.log("fine-grained increment: find_changed: {:.3f}s".format(t1 -
                                                                           t0))
        messages = self.fine_grained_manager.update(changed, removed)
        t2 = time.time()
        manager.log("fine-grained increment: update: {:.3f}s".format(t2 - t1))
        manager.add_stats(find_changes_time=t1 - t0,
                          fg_update_time=t2 - t1,
                          files_changed=len(removed) + len(changed))

        status = 1 if messages else 0
        self.previous_sources = sources
        messages = self.pretty_messages(messages, len(sources), is_tty,
                                        terminal_width)
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }

    def pretty_messages(self,
                        messages: List[str],
                        n_sources: int,
                        is_tty: bool = False,
                        terminal_width: Optional[int] = None) -> List[str]:
        use_color = self.options.color_output and is_tty
        fit_width = self.options.pretty and is_tty
        if fit_width:
            messages = self.formatter.fit_in_terminal(
                messages, fixed_terminal_width=terminal_width)
        if self.options.error_summary:
            summary = None  # type: Optional[str]
            if messages:
                n_errors, n_files = count_stats(messages)
                if n_errors:
                    summary = self.formatter.format_error(
                        n_errors, n_files, n_sources, use_color)
            else:
                summary = self.formatter.format_success(n_sources, use_color)
            if summary:
                # Create new list to avoid appending multiple summaries on successive runs.
                messages = messages + [summary]
        if use_color:
            messages = [self.formatter.colorize(m) for m in messages]
        return messages

    def update_sources(self, sources: List[BuildSource]) -> None:
        paths = [source.path for source in sources if source.path is not None]
        self.fswatcher.add_watched_paths(paths)

    def update_changed(
        self,
        sources: List[BuildSource],
        remove: List[str],
        update: List[str],
    ) -> ChangesAndRemovals:

        changed_paths = self.fswatcher.update_changed(remove, update)
        return self._find_changed(sources, changed_paths)

    def find_changed(self, sources: List[BuildSource]) -> ChangesAndRemovals:
        changed_paths = self.fswatcher.find_changed()
        return self._find_changed(sources, changed_paths)

    def _find_changed(self, sources: List[BuildSource],
                      changed_paths: AbstractSet[str]) -> ChangesAndRemovals:
        # Find anything that has been added or modified
        changed = [(source.module, source.path) for source in sources
                   if source.path and source.path in changed_paths]

        # Now find anything that has been removed from the build
        modules = {source.module for source in sources}
        omitted = [
            source for source in self.previous_sources
            if source.module not in modules
        ]
        removed = []
        for source in omitted:
            path = source.path
            assert path
            removed.append((source.module, path))

        # Find anything that has had its module path change because of added or removed __init__s
        last = {s.path: s.module for s in self.previous_sources}
        for s in sources:
            assert s.path
            if s.path in last and last[s.path] != s.module:
                # Mark it as removed from its old name and changed at its new name
                removed.append((last[s.path], s.path))
                changed.append((s.module, s.path))

        return changed, removed

    def cmd_suggest(self, function: str, callsites: bool,
                    **kwargs: Any) -> Dict[str, object]:
        """Suggest a signature for a function."""
        if not self.fine_grained_manager:
            return {
                'error':
                "Command 'suggest' is only valid after a 'check' command"
                " (that produces no parse errors)"
            }
        engine = SuggestionEngine(self.fine_grained_manager, **kwargs)
        try:
            if callsites:
                out = engine.suggest_callsites(function)
            else:
                out = engine.suggest(function)
        except SuggestionFailure as err:
            return {'error': str(err)}
        else:
            if not out:
                out = "No suggestions\n"
            elif not out.endswith("\n"):
                out += "\n"
            return {'out': out, 'err': "", 'status': 0}
        finally:
            self.fscache.flush()

    def cmd_hang(self) -> Dict[str, object]:
        """Hang for 100 seconds, as a debug hack."""
        time.sleep(100)
        return {}
Example #32
0
def main(
    script_path: Optional[str],
    stdout: TextIO,
    stderr: TextIO,
    args: Optional[List[str]] = None,
) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    util.check_python_version('mypy')
    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    sys.setrecursionlimit(2**14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args,
                                       stdout=stdout,
                                       stderr=stderr,
                                       fscache=fscache)

    messages = []

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        messages.extend(new_messages)
        f = stderr if serious else stdout
        try:
            for msg in new_messages:
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    res = None
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = build.build(sources, options, None, flush_errors, fscache,
                          stdout, stderr)
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs and not options.incremental:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file, ", ".join(
                  "[mypy-%s]" % glob
                  for glob in options.per_module_options.keys()
                  if glob in options.unused_configs)),
              file=stderr)
    if options.junit_xml:
        t1 = time.time()
        py_version = '{}_{}'.format(options.python_version[0],
                                    options.python_version[1])
        util.write_junit_xml(t1 - t0, serious, messages, options.junit_xml,
                             py_version, options.platform)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()
    del res  # Now it's safe to delete

    code = 0
    if messages:
        code = 2 if blockers else 1
    if options.fast_exit:
        # Exit without freeing objects -- it's faster.
        #
        # NOTE: We don't flush all open files on exit (or run other destructors)!
        util.hard_exit(code)
    elif code:
        sys.exit(code)
Example #33
0
class Server:

    # NOTE: the instance is constructed in the parent process but
    # serve() is called in the grandchild (by daemonize()).

    def __init__(self, flags: List[str]) -> None:
        """Initialize the server with the desired mypy flags."""
        self.saved_cache = {}  # type: mypy.build.SavedCache
        self.fine_grained_initialized = False
        sources, options = mypy.main.process_options(['-i'] + flags,
                                                     require_targets=False,
                                                     server_options=True)
        self.fine_grained = options.fine_grained_incremental
        if sources:
            sys.exit("dmypy: start/restart does not accept sources")
        if options.report_dirs:
            sys.exit("dmypy: start/restart cannot generate reports")
        if not options.incremental:
            sys.exit(
                "dmypy: start/restart should not disable incremental mode")
        if options.quick_and_dirty:
            sys.exit(
                "dmypy: start/restart should not specify quick_and_dirty mode")
        if options.use_fine_grained_cache and not options.fine_grained_incremental:
            sys.exit(
                "dmypy: fine-grained cache can only be used in experimental mode"
            )
        self.options = options
        if os.path.isfile(STATUS_FILE):
            os.unlink(STATUS_FILE)
        if self.fine_grained:
            options.incremental = True
            options.show_traceback = True
            if options.use_fine_grained_cache:
                options.cache_fine_grained = True  # set this so that cache options match
            else:
                options.cache_dir = os.devnull
            # Fine-grained incremental doesn't support general partial types
            # (details in https://github.com/python/mypy/issues/4492)
            options.local_partial_types = True

    def serve(self) -> None:
        """Serve requests, synchronously (no thread or fork)."""
        try:
            sock = self.create_listening_socket()
            try:
                with open(STATUS_FILE, 'w') as f:
                    json.dump(
                        {
                            'pid': os.getpid(),
                            'sockname': sock.getsockname()
                        }, f)
                    f.write('\n')  # I like my JSON with trailing newline
                while True:
                    conn, addr = sock.accept()
                    data = receive(conn)
                    resp = {}  # type: Dict[str, Any]
                    if 'command' not in data:
                        resp = {'error': "No command found in request"}
                    else:
                        command = data['command']
                        if not isinstance(command, str):
                            resp = {'error': "Command is not a string"}
                        else:
                            command = data.pop('command')
                        resp = self.run_command(command, data)
                    try:
                        conn.sendall(json.dumps(resp).encode('utf8'))
                    except OSError as err:
                        pass  # Maybe the client hung up
                    conn.close()
                    if command == 'stop':
                        sock.close()
                        sys.exit(0)
            finally:
                os.unlink(STATUS_FILE)
        finally:
            os.unlink(self.sockname)
            exc_info = sys.exc_info()
            if exc_info[0]:
                traceback.print_exception(*exc_info)  # type: ignore

    def create_listening_socket(self) -> socket.socket:
        """Create the socket and set it up for listening."""
        self.sockname = os.path.abspath(SOCKET_NAME)
        if os.path.exists(self.sockname):
            os.unlink(self.sockname)
        sock = socket.socket(socket.AF_UNIX)
        sock.bind(self.sockname)
        sock.listen(1)
        return sock

    def run_command(self, command: str,
                    data: Mapping[str, object]) -> Dict[str, object]:
        """Run a specific command from the registry."""
        key = 'cmd_' + command
        method = getattr(self.__class__, key, None)
        if method is None:
            return {'error': "Unrecognized command '%s'" % command}
        else:
            return method(self, **data)

    # Command functions (run in the server via RPC).

    def cmd_status(self) -> Dict[str, object]:
        """Return daemon status."""
        res = {}  # type: Dict[str, object]
        res.update(get_meminfo())
        return res

    def cmd_stop(self) -> Dict[str, object]:
        """Stop daemon."""
        return {}

    last_sources = None  # type: List[mypy.build.BuildSource]

    def cmd_check(self, files: Sequence[str]) -> Dict[str, object]:
        """Check a list of files."""
        # TODO: Move this into check(), in case one of the args is a directory.
        # Capture stdout/stderr and catch SystemExit while processing the source list.
        save_stdout = sys.stdout
        save_stderr = sys.stderr
        try:
            sys.stdout = stdout = io.StringIO()
            sys.stderr = stderr = io.StringIO()
            self.last_sources = mypy.main.create_source_list(
                files, self.options)
        except SystemExit as err:
            return {
                'out': stdout.getvalue(),
                'err': stderr.getvalue(),
                'status': err.code
            }
        finally:
            sys.stdout = save_stdout
            sys.stderr = save_stderr
        return self.check(self.last_sources)

    def cmd_recheck(self) -> Dict[str, object]:
        """Check the same list of files we checked most recently."""
        if not self.last_sources:
            return {
                'error':
                "Command 'recheck' is only valid after a 'check' command"
            }
        return self.check(self.last_sources)

    # Needed by tests.
    last_manager = None  # type: Optional[mypy.build.BuildManager]

    def check(self,
              sources: List[mypy.build.BuildSource],
              alt_lib_path: Optional[str] = None) -> Dict[str, Any]:
        if self.fine_grained:
            return self.check_fine_grained(sources)
        else:
            return self.check_default(sources, alt_lib_path)

    def check_default(self,
                      sources: List[mypy.build.BuildSource],
                      alt_lib_path: Optional[str] = None) -> Dict[str, Any]:
        """Check using the default (per-file) incremental mode."""
        self.last_manager = None
        with GcLogger() as gc_result:
            try:
                # saved_cache is mutated in place.
                res = mypy.build.build(sources,
                                       self.options,
                                       saved_cache=self.saved_cache,
                                       alt_lib_path=alt_lib_path)
                msgs = res.errors
                self.last_manager = res.manager  # type: Optional[mypy.build.BuildManager]
            except mypy.errors.CompileError as err:
                msgs = err.messages
        if msgs:
            msgs.append("")
            response = {'out': "\n".join(msgs), 'err': "", 'status': 1}
        else:
            response = {'out': "", 'err': "", 'status': 0}
        response.update(gc_result.get_stats())
        response.update(get_meminfo())
        if self.last_manager is not None:
            response.update(self.last_manager.stats_summary())
        return response

    def check_fine_grained(
            self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        """Check using fine-grained incremental mode."""
        if not self.fine_grained_initialized:
            return self.initialize_fine_grained(sources)
        else:
            return self.fine_grained_increment(sources)

    def initialize_fine_grained(
            self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        self.fscache = FileSystemCache(self.options.python_version)
        self.fswatcher = FileSystemWatcher(self.fscache)
        self.update_sources(sources)
        if not self.options.use_fine_grained_cache:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()
        try:
            # TODO: alt_lib_path
            result = mypy.build.build(sources=sources, options=self.options)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        manager = result.manager
        graph = result.graph
        self.fine_grained_manager = mypy.server.update.FineGrainedBuildManager(
            manager, graph)
        self.fine_grained_initialized = True
        self.previous_sources = sources
        self.fscache.flush()

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if self.options.use_fine_grained_cache:
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for meta, mypyfile, type_map in manager.saved_cache.values():
                if meta.mtime is None: continue
                self.fswatcher.set_file_data(
                    mypyfile.path,
                    FileData(st_mtime=float(meta.mtime),
                             st_size=meta.size,
                             md5=meta.hash))

            # Run an update
            changed = self.find_changed(sources)
            if changed:
                messages = self.fine_grained_manager.update(changed)
            self.fscache.flush()

        status = 1 if messages else 0
        self.previous_messages = messages[:]
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }

    def fine_grained_increment(
            self, sources: List[mypy.build.BuildSource]) -> Dict[str, Any]:
        t0 = time.time()
        self.update_sources(sources)
        changed = self.find_changed(sources)
        t1 = time.time()
        if not changed:
            # Nothing changed -- just produce the same result as before.
            messages = self.previous_messages
        else:
            messages = self.fine_grained_manager.update(changed)
        t2 = time.time()
        self.fine_grained_manager.manager.log(
            "fine-grained increment: find_changed: {:.3f}s, update: {:.3f}s".
            format(t1 - t0, t2 - t1))
        status = 1 if messages else 0
        self.previous_messages = messages[:]
        self.previous_sources = sources
        self.fscache.flush()
        return {
            'out': ''.join(s + '\n' for s in messages),
            'err': '',
            'status': status
        }

    def update_sources(self, sources: List[mypy.build.BuildSource]) -> None:
        paths = [source.path for source in sources if source.path is not None]
        self.fswatcher.add_watched_paths(paths)

    def find_changed(
            self,
            sources: List[mypy.build.BuildSource]) -> List[Tuple[str, str]]:
        changed_paths = self.fswatcher.find_changed()
        changed = [(source.module, source.path) for source in sources
                   if source.path in changed_paths]
        modules = {source.module for source in sources}
        omitted = [
            source for source in self.previous_sources
            if source.module not in modules
        ]
        for source in omitted:
            path = source.path
            assert path
            # Note that a file could be removed from the list of root sources but have no changes.
            if path in changed_paths:
                changed.append((source.module, path))
        return changed

    def cmd_hang(self) -> Dict[str, object]:
        """Hang for 100 seconds, as a debug hack."""
        time.sleep(100)
        return {}
Example #34
0
class Server:

    # NOTE: the instance is constructed in the parent process but
    # serve() is called in the grandchild (by daemonize()).

    def __init__(self, options: Options,
                 status_file: str,
                 timeout: Optional[int] = None) -> None:
        """Initialize the server with the desired mypy flags."""
        self.options = options
        # Snapshot the options info before we muck with it, to detect changes
        self.options_snapshot = options.snapshot()
        self.timeout = timeout
        self.fine_grained_manager = None  # type: Optional[FineGrainedBuildManager]

        if os.path.isfile(status_file):
            os.unlink(status_file)

        self.fscache = FileSystemCache()

        options.incremental = True
        options.fine_grained_incremental = True
        options.show_traceback = True
        if options.use_fine_grained_cache:
            # Using fine_grained_cache implies generating and caring
            # about the fine grained cache
            options.cache_fine_grained = True
        else:
            options.cache_dir = os.devnull
        # Fine-grained incremental doesn't support general partial types
        # (details in https://github.com/python/mypy/issues/4492)
        options.local_partial_types = True
        self.status_file = status_file

    def _response_metadata(self) -> Dict[str, str]:
        py_version = '{}_{}'.format(self.options.python_version[0], self.options.python_version[1])
        return {
            'platform': self.options.platform,
            'python_version': py_version,
        }

    def serve(self) -> None:
        """Serve requests, synchronously (no thread or fork)."""
        command = None
        try:
            server = IPCServer(CONNECTION_NAME, self.timeout)
            with open(self.status_file, 'w') as f:
                json.dump({'pid': os.getpid(), 'connection_name': server.connection_name}, f)
                f.write('\n')  # I like my JSON with a trailing newline
            while True:
                with server:
                    data = receive(server)
                    resp = {}  # type: Dict[str, Any]
                    if 'command' not in data:
                        resp = {'error': "No command found in request"}
                    else:
                        command = data['command']
                        if not isinstance(command, str):
                            resp = {'error': "Command is not a string"}
                        else:
                            command = data.pop('command')
                            try:
                                resp = self.run_command(command, data)
                            except Exception:
                                # If we are crashing, report the crash to the client
                                tb = traceback.format_exception(*sys.exc_info())
                                resp = {'error': "Daemon crashed!\n" + "".join(tb)}
                                resp.update(self._response_metadata())
                                server.write(json.dumps(resp).encode('utf8'))
                                raise
                    try:
                        resp.update(self._response_metadata())
                        server.write(json.dumps(resp).encode('utf8'))
                    except OSError:
                        pass  # Maybe the client hung up
                    if command == 'stop':
                        reset_global_state()
                        sys.exit(0)
        finally:
            # If the final command is something other than a clean
            # stop, remove the status file. (We can't just
            # simplify the logic and always remove the file, since
            # that could cause us to remove a future server's
            # status file.)
            if command != 'stop':
                os.unlink(self.status_file)
            try:
                server.cleanup()  # try to remove the socket dir on Linux
            except OSError:
                pass
            exc_info = sys.exc_info()
            if exc_info[0] and exc_info[0] is not SystemExit:
                traceback.print_exception(*exc_info)

    def run_command(self, command: str, data: Mapping[str, object]) -> Dict[str, object]:
        """Run a specific command from the registry."""
        key = 'cmd_' + command
        method = getattr(self.__class__, key, None)
        if method is None:
            return {'error': "Unrecognized command '%s'" % command}
        else:
            return method(self, **data)

    # Command functions (run in the server via RPC).

    def cmd_status(self, fswatcher_dump_file: Optional[str] = None) -> Dict[str, object]:
        """Return daemon status."""
        res = {}  # type: Dict[str, object]
        res.update(get_meminfo())
        if fswatcher_dump_file:
            data = self.fswatcher.dump_file_data() if hasattr(self, 'fswatcher') else {}
            # Using .dumps and then writing was noticably faster than using dump
            s = json.dumps(data)
            with open(fswatcher_dump_file, 'w') as f:
                f.write(s)
        return res

    def cmd_stop(self) -> Dict[str, object]:
        """Stop daemon."""
        # We need to remove the status file *before* we complete the
        # RPC. Otherwise a race condition exists where a subsequent
        # command can see a status file from a dying server and think
        # it is a live one.
        os.unlink(self.status_file)
        return {}

    def cmd_run(self, version: str, args: Sequence[str]) -> Dict[str, object]:
        """Check a list of files, triggering a restart if needed."""
        try:
            # Process options can exit on improper arguments, so we need to catch that and
            # capture stderr so the client can report it
            stderr = io.StringIO()
            stdout = io.StringIO()
            with redirect_stderr(stderr):
                with redirect_stdout(stdout):
                    sources, options = mypy.main.process_options(
                        ['-i'] + list(args),
                        require_targets=True,
                        server_options=True,
                        fscache=self.fscache,
                        program='mypy-daemon',
                        header=argparse.SUPPRESS)
            # Signal that we need to restart if the options have changed
            if self.options_snapshot != options.snapshot():
                return {'restart': 'configuration changed'}
            if __version__ != version:
                return {'restart': 'mypy version changed'}
            if self.fine_grained_manager:
                manager = self.fine_grained_manager.manager
                start_plugins_snapshot = manager.plugins_snapshot
                _, current_plugins_snapshot = mypy.build.load_plugins(options, manager.errors)
                if current_plugins_snapshot != start_plugins_snapshot:
                    return {'restart': 'plugins changed'}
        except InvalidSourceList as err:
            return {'out': '', 'err': str(err), 'status': 2}
        except SystemExit as e:
            return {'out': stdout.getvalue(), 'err': stderr.getvalue(), 'status': e.code}
        return self.check(sources)

    def cmd_check(self, files: Sequence[str]) -> Dict[str, object]:
        """Check a list of files."""
        try:
            sources = create_source_list(files, self.options, self.fscache)
        except InvalidSourceList as err:
            return {'out': '', 'err': str(err), 'status': 2}
        return self.check(sources)

    def cmd_recheck(self,
                    remove: Optional[List[str]] = None,
                    update: Optional[List[str]] = None) -> Dict[str, object]:
        """Check the same list of files we checked most recently.

        If remove/update is given, they modify the previous list;
        if all are None, stat() is called for each file in the previous list.
        """
        t0 = time.time()
        if not self.fine_grained_manager:
            return {'error': "Command 'recheck' is only valid after a 'check' command"}
        sources = self.previous_sources
        if remove:
            removals = set(remove)
            sources = [s for s in sources if s.path and s.path not in removals]
        if update:
            known = {s.path for s in sources if s.path}
            added = [p for p in update if p not in known]
            try:
                added_sources = create_source_list(added, self.options, self.fscache)
            except InvalidSourceList as err:
                return {'out': '', 'err': str(err), 'status': 2}
            sources = sources + added_sources  # Make a copy!
        t1 = time.time()
        manager = self.fine_grained_manager.manager
        manager.log("fine-grained increment: cmd_recheck: {:.3f}s".format(t1 - t0))
        res = self.fine_grained_increment(sources, remove, update)
        self.fscache.flush()
        self.update_stats(res)
        return res

    def check(self, sources: List[BuildSource]) -> Dict[str, Any]:
        """Check using fine-grained incremental mode."""
        if not self.fine_grained_manager:
            res = self.initialize_fine_grained(sources)
        else:
            res = self.fine_grained_increment(sources)
        self.fscache.flush()
        self.update_stats(res)
        return res

    def update_stats(self, res: Dict[str, Any]) -> None:
        if self.fine_grained_manager:
            manager = self.fine_grained_manager.manager
            manager.dump_stats()
            res['stats'] = manager.stats
            manager.stats = {}

    def initialize_fine_grained(self, sources: List[BuildSource]) -> Dict[str, Any]:
        self.fswatcher = FileSystemWatcher(self.fscache)
        t0 = time.time()
        self.update_sources(sources)
        t1 = time.time()
        try:
            result = mypy.build.build(sources=sources,
                                      options=self.options,
                                      fscache=self.fscache)
        except mypy.errors.CompileError as e:
            output = ''.join(s + '\n' for s in e.messages)
            if e.use_stdout:
                out, err = output, ''
            else:
                out, err = '', output
            return {'out': out, 'err': err, 'status': 2}
        messages = result.errors
        self.fine_grained_manager = FineGrainedBuildManager(result)
        self.previous_sources = sources

        # If we are using the fine-grained cache, build hasn't actually done
        # the typechecking on the updated files yet.
        # Run a fine-grained update starting from the cached data
        if result.used_cache:
            t2 = time.time()
            # Pull times and hashes out of the saved_cache and stick them into
            # the fswatcher, so we pick up the changes.
            for state in self.fine_grained_manager.graph.values():
                meta = state.meta
                if meta is None: continue
                assert state.path is not None
                self.fswatcher.set_file_data(
                    state.path,
                    FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash))

            changed, removed = self.find_changed(sources)

            # Find anything that has had its dependency list change
            for state in self.fine_grained_manager.graph.values():
                if not state.is_fresh():
                    assert state.path is not None
                    changed.append((state.id, state.path))

            t3 = time.time()
            # Run an update
            messages = self.fine_grained_manager.update(changed, removed)
            t4 = time.time()
            self.fine_grained_manager.manager.add_stats(
                update_sources_time=t1 - t0,
                build_time=t2 - t1,
                find_changes_time=t3 - t2,
                fg_update_time=t4 - t3,
                files_changed=len(removed) + len(changed))
        else:
            # Stores the initial state of sources as a side effect.
            self.fswatcher.find_changed()

        if MEM_PROFILE:
            from mypy.memprofile import print_memory_profile
            print_memory_profile(run_gc=False)

        status = 1 if messages else 0
        return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status}

    def fine_grained_increment(self,
                               sources: List[BuildSource],
                               remove: Optional[List[str]] = None,
                               update: Optional[List[str]] = None,
                               ) -> Dict[str, Any]:
        assert self.fine_grained_manager is not None
        manager = self.fine_grained_manager.manager

        t0 = time.time()
        if remove is None and update is None:
            # Use the fswatcher to determine which files were changed
            # (updated or added) or removed.
            self.update_sources(sources)
            changed, removed = self.find_changed(sources)
        else:
            # Use the remove/update lists to update fswatcher.
            # This avoids calling stat() for unchanged files.
            changed, removed = self.update_changed(sources, remove or [], update or [])
        manager.search_paths = compute_search_paths(sources, manager.options, manager.data_dir)
        t1 = time.time()
        manager.log("fine-grained increment: find_changed: {:.3f}s".format(t1 - t0))
        messages = self.fine_grained_manager.update(changed, removed)
        t2 = time.time()
        manager.log("fine-grained increment: update: {:.3f}s".format(t2 - t1))
        manager.add_stats(
            find_changes_time=t1 - t0,
            fg_update_time=t2 - t1,
            files_changed=len(removed) + len(changed))

        status = 1 if messages else 0
        self.previous_sources = sources
        return {'out': ''.join(s + '\n' for s in messages), 'err': '', 'status': status}

    def update_sources(self, sources: List[BuildSource]) -> None:
        paths = [source.path for source in sources if source.path is not None]
        self.fswatcher.add_watched_paths(paths)

    def update_changed(self,
                       sources: List[BuildSource],
                       remove: List[str],
                       update: List[str],
                       ) -> ChangesAndRemovals:

        changed_paths = self.fswatcher.update_changed(remove, update)
        return self._find_changed(sources, changed_paths)

    def find_changed(self, sources: List[BuildSource]) -> ChangesAndRemovals:
        changed_paths = self.fswatcher.find_changed()
        return self._find_changed(sources, changed_paths)

    def _find_changed(self, sources: List[BuildSource],
                      changed_paths: AbstractSet[str]) -> ChangesAndRemovals:
        # Find anything that has been added or modified
        changed = [(source.module, source.path)
                   for source in sources
                   if source.path and source.path in changed_paths]

        # Now find anything that has been removed from the build
        modules = {source.module for source in sources}
        omitted = [source for source in self.previous_sources if source.module not in modules]
        removed = []
        for source in omitted:
            path = source.path
            assert path
            removed.append((source.module, path))

        # Find anything that has had its module path change because of added or removed __init__s
        last = {s.path: s.module for s in self.previous_sources}
        for s in sources:
            assert s.path
            if s.path in last and last[s.path] != s.module:
                # Mark it as removed from its old name and changed at its new name
                removed.append((last[s.path], s.path))
                changed.append((s.module, s.path))

        return changed, removed

    def cmd_suggest(self,
                    function: str,
                    callsites: bool,
                    # We'd like to just use **kwargs here and save some duplication but
                    # mypyc doesn't support it yet...
                    json: bool,
                    no_errors: bool,
                    no_any: bool) -> Dict[str, object]:
        """Suggest a signature for a function."""
        if not self.fine_grained_manager:
            return {'error': "Command 'suggest' is only valid after a 'check' command"}
        engine = SuggestionEngine(self.fine_grained_manager, json, no_errors, no_any)
        try:
            if callsites:
                out = engine.suggest_callsites(function)
            else:
                out = engine.suggest(function)
        except SuggestionFailure as err:
            return {'error': str(err)}
        else:
            if not out:
                out = "No suggestions\n"
            elif not out.endswith("\n"):
                out += "\n"
            return {'out': out, 'err': "", 'status': 0}
        finally:
            self.fscache.flush()

    def cmd_hang(self) -> Dict[str, object]:
        """Hang for 100 seconds, as a debug hack."""
        time.sleep(100)
        return {}
Example #35
0
def main(script_path: Optional[str],
         stdout: TextIO,
         stderr: TextIO,
         args: Optional[List[str]] = None,
         ) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    util.check_python_version('mypy')
    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    sys.setrecursionlimit(2 ** 14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args, stdout=stdout, stderr=stderr,
                                       fscache=fscache)

    messages = []
    formatter = util.FancyFormatter(stdout, stderr, options.show_error_codes)

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        if options.pretty:
            new_messages = formatter.fit_in_terminal(new_messages)
        messages.extend(new_messages)
        f = stderr if serious else stdout
        try:
            for msg in new_messages:
                if options.color_output:
                    msg = formatter.colorize(msg)
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    res = None
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = build.build(sources, options, None, flush_errors, fscache, stdout, stderr)
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs and not options.incremental:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file,
               ", ".join("[mypy-%s]" % glob for glob in options.per_module_options.keys()
                         if glob in options.unused_configs)),
              file=stderr)
    maybe_write_junit_xml(time.time() - t0, serious, messages, options)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()

    code = 0
    if messages:
        code = 2 if blockers else 1
    if options.error_summary:
        if messages:
            n_errors, n_files = util.count_stats(messages)
            if n_errors:
                stdout.write(formatter.format_error(n_errors, n_files, len(sources),
                                                    options.color_output) + '\n')
        else:
            stdout.write(formatter.format_success(len(sources),
                                                  options.color_output) + '\n')
        stdout.flush()
    if options.fast_exit:
        # Exit without freeing objects -- it's faster.
        #
        # NOTE: We don't flush all open files on exit (or run other destructors)!
        util.hard_exit(code)
    elif code:
        sys.exit(code)

    # HACK: keep res alive so that mypyc won't free it before the hard_exit
    list([res])
Example #36
0
def main(script_path: Optional[str], args: Optional[List[str]] = None) -> None:
    """Main entry point to the type checker.

    Args:
        script_path: Path to the 'mypy' script (used for finding data files).
        args: Custom command-line arguments.  If not given, sys.argv[1:] will
        be used.
    """
    # Check for known bad Python versions.
    if sys.version_info[:2] < (3, 4):
        sys.exit("Running mypy with Python 3.3 or lower is not supported; "
                 "please upgrade to 3.4 or newer")
    if sys.version_info[:3] == (3, 5, 0):
        sys.exit("Running mypy with Python 3.5.0 is not supported; "
                 "please upgrade to 3.5.1 or newer")

    t0 = time.time()
    # To log stat() calls: os.stat = stat_proxy
    if script_path:
        bin_dir = find_bin_directory(script_path)  # type: Optional[str]
    else:
        bin_dir = None
    sys.setrecursionlimit(2**14)
    if args is None:
        args = sys.argv[1:]

    fscache = FileSystemCache()
    sources, options = process_options(args, fscache=fscache)

    messages = []

    def flush_errors(new_messages: List[str], serious: bool) -> None:
        messages.extend(new_messages)
        f = sys.stderr if serious else sys.stdout
        try:
            for msg in new_messages:
                f.write(msg + '\n')
            f.flush()
        except BrokenPipeError:
            sys.exit(2)

    serious = False
    blockers = False
    try:
        # Keep a dummy reference (res) for memory profiling below, as otherwise
        # the result could be freed.
        res = type_check_only(sources, bin_dir, options, flush_errors,
                              fscache)  # noqa
    except CompileError as e:
        blockers = True
        if not e.use_stdout:
            serious = True
    if options.warn_unused_configs and options.unused_configs:
        print("Warning: unused section(s) in %s: %s" %
              (options.config_file, ", ".join(
                  "[mypy-%s]" % glob
                  for glob in options.per_module_options.keys()
                  if glob in options.unused_configs)),
              file=sys.stderr)
    if options.junit_xml:
        t1 = time.time()
        util.write_junit_xml(t1 - t0, serious, messages, options.junit_xml)

    if MEM_PROFILE:
        from mypy.memprofile import print_memory_profile
        print_memory_profile()

    if messages:
        code = 2 if blockers else 1
        sys.exit(code)