Ejemplo n.º 1
0
Archivo: update.py Proyecto: rkday/mypy
def update_single_isolated(module: str, path: str, manager: BuildManager,
                           previous_modules: Dict[str, str],
                           graph: Graph) -> UpdateResult:
    """Build a new version of one changed module only.

    Don't propagate changes to elsewhere in the program. Raise CompleError on
    encountering a blocking error.

    Args:
        module: Changed module (modified, created or deleted)
        path: Path of the changed module
        manager: Build manager
        graph: Build graph

    Returns a named tuple describing the result (see above for details).
    """
    if module in manager.modules:
        assert_equivalent_paths(path, manager.modules[module].path)
    else:
        manager.log_fine_grained('new module %r' % module)

    old_modules = dict(manager.modules)
    sources = get_sources(previous_modules, [(module, path)])

    if module in manager.missing_modules:
        manager.missing_modules.remove(module)

    try:
        if module in graph:
            del graph[module]
        load_graph(sources, manager, graph)
    except CompileError as err:
        # Parse error somewhere in the program -- a blocker
        assert err.module_with_blocker
        if err.module_with_blocker != module:
            # Blocker is in a fresh module. Delete the state of the original target module
            # since it will be stale.
            #
            # TODO: It would be more efficient to store the original target module
            path = manager.modules[module].path
            del manager.modules[module]
            remaining_modules = [(module, path)]
        else:
            remaining_modules = []
        return BlockedUpdate(err.module_with_blocker, path, remaining_modules,
                             err.messages)

    if not os.path.isfile(path):
        delete_module(module, graph, manager)
        return NormalUpdate(module, path, [], None)

    # Find any other modules brought in by imports.
    changed_modules = get_all_changed_modules(module, path, previous_modules,
                                              graph)
    # If there are multiple modules to process, only process one of them and return
    # the remaining ones to the caller.
    if len(changed_modules) > 1:
        # As an optimization, look for a module that imports no other changed modules.
        module, path = find_relative_leaf_module(changed_modules, graph)
        changed_modules.remove((module, path))
        remaining_modules = changed_modules
        # The remaining modules haven't been processed yet so drop them.
        for id, _ in remaining_modules:
            if id in old_modules:
                manager.modules[id] = old_modules[id]
            else:
                del manager.modules[id]
            del graph[id]
        manager.log_fine_grained('--> %r (newly imported)' % module)
    else:
        remaining_modules = []

    state = graph[module]

    # Process the changed file.
    state.parse_file()
    # TODO: state.fix_suppressed_dependencies()?
    try:
        state.semantic_analysis()
    except CompileError as err:
        # There was a blocking error, so module AST is incomplete. Restore old modules.
        manager.modules.clear()
        manager.modules.update(old_modules)
        del graph[module]
        return BlockedUpdate(module, path, remaining_modules, err.messages)
    state.semantic_analysis_pass_three()
    state.semantic_analysis_apply_patches()

    # Merge old and new ASTs.
    assert state.tree is not None, "file must be at least parsed"
    new_modules = {module: state.tree}  # type: Dict[str, Optional[MypyFile]]
    replace_modules_with_new_variants(manager, graph, old_modules, new_modules)

    # Perform type checking.
    state.type_checker().reset()
    state.type_check_first_pass()
    state.type_check_second_pass()
    state.compute_fine_grained_deps()
    state.finish_passes()
    # TODO: state.write_cache()?
    # TODO: state.mark_as_rechecked()?

    graph[module] = state

    return NormalUpdate(module, path, remaining_modules, state.tree)
Ejemplo n.º 2
0
def update_module_isolated(module: str,
                           path: str,
                           manager: BuildManager,
                           previous_modules: Dict[str, str],
                           graph: Graph,
                           force_removed: bool) -> UpdateResult:
    """Build a new version of one changed module only.

    Don't propagate changes to elsewhere in the program. Raise CompileError on
    encountering a blocking error.

    Args:
        module: Changed module (modified, created or deleted)
        path: Path of the changed module
        manager: Build manager
        graph: Build graph
        force_removed: If True, consider the module removed from the build even it the
            file exists

    Returns a named tuple describing the result (see above for details).
    """
    if module not in graph:
        manager.log_fine_grained('new module %r' % module)

    if not manager.fscache.isfile(path) or force_removed:
        delete_module(module, path, graph, manager)
        return NormalUpdate(module, path, [], None)

    sources = get_sources(manager.fscache, previous_modules, [(module, path)])

    if module in manager.missing_modules:
        manager.missing_modules.remove(module)

    orig_module = module
    orig_state = graph.get(module)
    orig_tree = manager.modules.get(module)

    def restore(ids: List[str]) -> None:
        # For each of the modules in ids, restore that id's old
        # manager.modules and graphs entries. (Except for the original
        # module, this means deleting them.)
        for id in ids:
            if id == orig_module and orig_tree:
                manager.modules[id] = orig_tree
            elif id in manager.modules:
                del manager.modules[id]
            if id == orig_module and orig_state:
                graph[id] = orig_state
            elif id in graph:
                del graph[id]

    new_modules = []  # type: List[State]
    try:
        if module in graph:
            del graph[module]
        load_graph(sources, manager, graph, new_modules)
    except CompileError as err:
        # Parse error somewhere in the program -- a blocker
        assert err.module_with_blocker
        restore([module] + [st.id for st in new_modules])
        return BlockedUpdate(err.module_with_blocker, path, [], err.messages)

    # Reparsing the file may have brought in dependencies that we
    # didn't have before. Make sure that they are loaded to restore
    # the invariant that a module having a loaded tree implies that
    # its dependencies do as well.
    ensure_trees_loaded(manager, graph, graph[module].dependencies)

    # Find any other modules brought in by imports.
    changed_modules = [(st.id, st.xpath) for st in new_modules]

    # If there are multiple modules to process, only process one of them and return
    # the remaining ones to the caller.
    if len(changed_modules) > 1:
        # As an optimization, look for a module that imports no other changed modules.
        module, path = find_relative_leaf_module(changed_modules, graph)
        changed_modules.remove((module, path))
        remaining_modules = changed_modules
        # The remaining modules haven't been processed yet so drop them.
        restore([id for id, _ in remaining_modules])
        manager.log_fine_grained('--> %r (newly imported)' % module)
    else:
        remaining_modules = []

    state = graph[module]

    # Process the changed file.
    state.parse_file()
    assert state.tree is not None, "file must be at least parsed"
    t0 = time.time()
    # TODO: state.fix_suppressed_dependencies()?
    try:
        semantic_analysis_for_scc(graph, [state.id], manager.errors)
    except CompileError as err:
        # There was a blocking error, so module AST is incomplete. Restore old modules.
        restore([module])
        return BlockedUpdate(module, path, remaining_modules, err.messages)

    # Merge old and new ASTs.
    new_modules_dict = {module: state.tree}  # type: Dict[str, Optional[MypyFile]]
    replace_modules_with_new_variants(manager, graph, {orig_module: orig_tree}, new_modules_dict)

    t1 = time.time()
    # Perform type checking.
    state.type_checker().reset()
    state.type_check_first_pass()
    state.type_check_second_pass()
    t2 = time.time()
    state.finish_passes()
    t3 = time.time()
    manager.add_stats(
        semanal_time=t1 - t0,
        typecheck_time=t2 - t1,
        finish_passes_time=t3 - t2)

    graph[module] = state

    return NormalUpdate(module, path, remaining_modules, state.tree)
Ejemplo n.º 3
0
def update_module_isolated(module: str,
                           path: str,
                           manager: BuildManager,
                           previous_modules: Dict[str, str],
                           graph: Graph,
                           force_removed: bool) -> UpdateResult:
    """Build a new version of one changed module only.

    Don't propagate changes to elsewhere in the program. Raise CompleError on
    encountering a blocking error.

    Args:
        module: Changed module (modified, created or deleted)
        path: Path of the changed module
        manager: Build manager
        graph: Build graph
        force_removed: If True, consider the module removed from the build even it the
            file exists

    Returns a named tuple describing the result (see above for details).
    """
    if module in manager.modules:
        assert_equivalent_paths(path, manager.modules[module].path)
    else:
        manager.log_fine_grained('new module %r' % module)

    if not manager.fscache.isfile(path) or force_removed:
        delete_module(module, graph, manager)
        return NormalUpdate(module, path, [], None)

    old_modules = dict(manager.modules)
    sources = get_sources(manager.fscache, previous_modules, [(module, path)])

    if module in manager.missing_modules:
        manager.missing_modules.remove(module)

    try:
        if module in graph:
            del graph[module]
        load_graph(sources, manager, graph)
    except CompileError as err:
        # Parse error somewhere in the program -- a blocker
        assert err.module_with_blocker
        if err.module_with_blocker != module:
            # Blocker is in a fresh module. Delete the state of the original target module
            # since it will be stale.
            #
            # TODO: It would be more efficient to store the original target module
            path = manager.modules[module].path
            del manager.modules[module]
            remaining_modules = [(module, path)]
        else:
            remaining_modules = []
        return BlockedUpdate(err.module_with_blocker, path, remaining_modules, err.messages)

    # Find any other modules brought in by imports.
    changed_modules = get_all_changed_modules(module, path, previous_modules, graph)
    # If there are multiple modules to process, only process one of them and return
    # the remaining ones to the caller.
    if len(changed_modules) > 1:
        # As an optimization, look for a module that imports no other changed modules.
        module, path = find_relative_leaf_module(changed_modules, graph)
        changed_modules.remove((module, path))
        remaining_modules = changed_modules
        # The remaining modules haven't been processed yet so drop them.
        for id, _ in remaining_modules:
            if id in old_modules:
                manager.modules[id] = old_modules[id]
            else:
                del manager.modules[id]
            del graph[id]
        manager.log_fine_grained('--> %r (newly imported)' % module)
    else:
        remaining_modules = []

    state = graph[module]

    # Process the changed file.
    state.parse_file()
    # TODO: state.fix_suppressed_dependencies()?
    try:
        state.semantic_analysis()
    except CompileError as err:
        # There was a blocking error, so module AST is incomplete. Restore old modules.
        manager.modules.clear()
        manager.modules.update(old_modules)
        del graph[module]
        return BlockedUpdate(module, path, remaining_modules, err.messages)
    state.semantic_analysis_pass_three()
    state.semantic_analysis_apply_patches()

    # Merge old and new ASTs.
    assert state.tree is not None, "file must be at least parsed"
    new_modules = {module: state.tree}  # type: Dict[str, Optional[MypyFile]]
    replace_modules_with_new_variants(manager, graph, old_modules, new_modules)

    # Perform type checking.
    state.type_checker().reset()
    state.type_check_first_pass()
    state.type_check_second_pass()
    state.compute_fine_grained_deps()
    state.finish_passes()

    graph[module] = state

    return NormalUpdate(module, path, remaining_modules, state.tree)
Ejemplo n.º 4
0
def build_incremental_step(
        manager: BuildManager, changed_modules: List[Tuple[str, str]],
        graph: Dict[str,
                    State]) -> Tuple[Dict[str, Optional[MypyFile]], Graph]:
    """Build new versions of changed modules only.

    Raise CompleError on encountering a blocking error.

    Return the new ASTs for the changed modules and the entire build graph.
    """
    # TODO: Handle multiple changed modules per step
    assert len(changed_modules) == 1
    id, path = changed_modules[0]
    if id in manager.modules:
        path1 = os.path.normpath(path)
        path2 = os.path.normpath(manager.modules[id].path)
        assert path1 == path2, '%s != %s' % (path1, path2)

    old_modules = dict(manager.modules)

    sources = get_sources(graph, changed_modules)
    changed_set = {id for id, _ in changed_modules}

    invalidate_stale_cache_entries(manager.saved_cache, changed_modules)

    if not os.path.isfile(path):
        graph = delete_module(id, graph, manager)
        return {id: None}, graph

    old_graph = graph
    manager.missing_modules = set()
    graph = load_graph(sources, manager)

    # Find any other modules brought in by imports.
    for st in graph.values():
        if st.id not in old_graph and st.id not in changed_set:
            changed_set.add(st.id)
            assert st.path
            changed_modules.append((st.id, st.path))
    # TODO: Handle multiple changed modules per step
    assert len(changed_modules) == 1, changed_modules

    state = graph[id]

    # Parse file and run first pass of semantic analysis.
    state.parse_file()

    # TODO: state.fix_suppressed_dependencies()?

    # Run remaining passes of semantic analysis.
    try:
        state.semantic_analysis()
    except CompileError as err:
        # TODO: What if there are multiple changed modules?
        # There was a blocking error, so module AST is incomplete. Restore old modules.
        manager.modules.clear()
        manager.modules.update(old_modules)
        raise err
    state.semantic_analysis_pass_three()
    state.semantic_analysis_apply_patches()

    # Merge old and new ASTs.
    assert state.tree is not None, "file must be at least parsed"
    new_modules = {id: state.tree}  # type: Dict[str, Optional[MypyFile]]
    replace_modules_with_new_variants(manager, graph, old_modules, new_modules)

    # Perform type checking.
    state.type_check_first_pass()
    # TODO: state.type_check_second_pass()?
    state.finish_passes()
    # TODO: state.write_cache()?
    # TODO: state.mark_as_rechecked()?
    # TODO: Store new State in graph, as it has updated dependencies etc.

    graph[id] = state

    return new_modules, graph
Ejemplo n.º 5
0
def update_module_isolated(module: str,
                           path: str,
                           manager: BuildManager,
                           previous_modules: Dict[str, str],
                           graph: Graph,
                           force_removed: bool) -> UpdateResult:
    """Build a new version of one changed module only.

    Don't propagate changes to elsewhere in the program. Raise CompileError on
    encountering a blocking error.

    Args:
        module: Changed module (modified, created or deleted)
        path: Path of the changed module
        manager: Build manager
        graph: Build graph
        force_removed: If True, consider the module removed from the build even it the
            file exists

    Returns a named tuple describing the result (see above for details).
    """
    if module not in graph:
        manager.log_fine_grained('new module %r' % module)

    if not manager.fscache.isfile(path) or force_removed:
        delete_module(module, path, graph, manager)
        return NormalUpdate(module, path, [], None)

    sources = get_sources(manager.fscache, previous_modules, [(module, path)])

    if module in manager.missing_modules:
        manager.missing_modules.remove(module)

    orig_module = module
    orig_state = graph.get(module)
    orig_tree = manager.modules.get(module)

    def restore(ids: List[str]) -> None:
        # For each of the modules in ids, restore that id's old
        # manager.modules and graphs entries. (Except for the original
        # module, this means deleting them.)
        for id in ids:
            if id == orig_module and orig_tree:
                manager.modules[id] = orig_tree
            elif id in manager.modules:
                del manager.modules[id]
            if id == orig_module and orig_state:
                graph[id] = orig_state
            elif id in graph:
                del graph[id]

    new_modules = []  # type: List[State]
    try:
        if module in graph:
            del graph[module]
        load_graph(sources, manager, graph, new_modules)
    except CompileError as err:
        # Parse error somewhere in the program -- a blocker
        assert err.module_with_blocker
        restore([module] + [st.id for st in new_modules])
        return BlockedUpdate(err.module_with_blocker, path, [], err.messages)

    # Reparsing the file may have brought in dependencies that we
    # didn't have before. Make sure that they are loaded to restore
    # the invariant that a module having a loaded tree implies that
    # its dependencies do as well.
    ensure_trees_loaded(manager, graph, graph[module].dependencies)

    # Find any other modules brought in by imports.
    changed_modules = [(st.id, st.xpath) for st in new_modules]

    # If there are multiple modules to process, only process one of them and return
    # the remaining ones to the caller.
    if len(changed_modules) > 1:
        # As an optimization, look for a module that imports no other changed modules.
        module, path = find_relative_leaf_module(changed_modules, graph)
        changed_modules.remove((module, path))
        remaining_modules = changed_modules
        # The remaining modules haven't been processed yet so drop them.
        restore([id for id, _ in remaining_modules])
        manager.log_fine_grained('--> %r (newly imported)' % module)
    else:
        remaining_modules = []

    state = graph[module]

    # Process the changed file.
    state.parse_file()
    assert state.tree is not None, "file must be at least parsed"
    t0 = time.time()
    # TODO: state.fix_suppressed_dependencies()?
    if module == 'typing':
        # We need to manually add typing aliases to builtins, like we
        # do in process_stale_scc. Because this can't be done until
        # builtins is also loaded, there isn't an obvious way to
        # refactor this.
        manager.semantic_analyzer.add_builtin_aliases(state.tree)
    try:
        state.semantic_analysis()
    except CompileError as err:
        # There was a blocking error, so module AST is incomplete. Restore old modules.
        restore([module])
        return BlockedUpdate(module, path, remaining_modules, err.messages)
    state.semantic_analysis_pass_three()
    state.semantic_analysis_apply_patches()

    # Merge old and new ASTs.
    new_modules_dict = {module: state.tree}  # type: Dict[str, Optional[MypyFile]]
    replace_modules_with_new_variants(manager, graph, {orig_module: orig_tree}, new_modules_dict)

    t1 = time.time()
    # Perform type checking.
    state.type_checker().reset()
    state.type_check_first_pass()
    state.type_check_second_pass()
    t2 = time.time()
    state.compute_fine_grained_deps()
    t3 = time.time()
    state.finish_passes()
    t4 = time.time()
    manager.add_stats(
        semanal_time=t1 - t0,
        typecheck_time=t2 - t1,
        deps_time=t3 - t2,
        finish_passes_time=t4 - t3)

    graph[module] = state

    return NormalUpdate(module, path, remaining_modules, state.tree)