Exemplo n.º 1
0
    def run_case(self, testcase: DataDrivenTestCase) -> None:
        first_src = '\n'.join(testcase.input)
        files_dict = dict(testcase.files)
        second_src = files_dict['tmp/next.py']

        messages1, files1 = self.build(first_src)
        messages2, files2 = self.build(second_src)

        a = []
        if messages1:
            a.extend(messages1)
        if messages2:
            a.append('== next ==')
            a.extend(messages2)

        assert files1 is not None and files2 is not None, (
            'cases where CompileError'
            ' occurred should not be run')
        prefix = '__main__'
        snapshot1 = snapshot_symbol_table(prefix, files1['__main__'].names)
        snapshot2 = snapshot_symbol_table(prefix, files2['__main__'].names)
        diff = compare_symbol_table_snapshots(prefix, snapshot1, snapshot2)
        for trigger in sorted(diff):
            a.append(trigger)

        assert_string_arrays_equal(
            testcase.output, a,
            'Invalid output ({}, line {})'.format(testcase.file,
                                                  testcase.line))
Exemplo n.º 2
0
    def run_case(self, testcase: DataDrivenTestCase) -> None:
        first_src = '\n'.join(testcase.input)
        files_dict = dict(testcase.files)
        second_src = files_dict['tmp/next.py']
        options = parse_options(first_src, testcase, 1)

        messages1, files1 = self.build(first_src, options)
        messages2, files2 = self.build(second_src, options)

        a = []
        if messages1:
            a.extend(messages1)
        if messages2:
            a.append('== next ==')
            a.extend(messages2)

        assert files1 is not None and files2 is not None, ('cases where CompileError'
                                                           ' occurred should not be run')
        prefix = '__main__'
        snapshot1 = snapshot_symbol_table(prefix, files1['__main__'].names)
        snapshot2 = snapshot_symbol_table(prefix, files2['__main__'].names)
        diff = compare_symbol_table_snapshots(prefix, snapshot1, snapshot2)
        for trigger in sorted(diff):
            a.append(trigger)

        assert_string_arrays_equal(
            testcase.output, a,
            'Invalid output ({}, line {})'.format(testcase.file,
                                                  testcase.line))
Exemplo n.º 3
0
def calculate_active_triggers(manager: BuildManager,
                              old_snapshots: Dict[str, Dict[str,
                                                            SnapshotItem]],
                              new_modules: Dict[str, MypyFile]) -> Set[str]:
    """Determine activated triggers by comparing old and new symbol tables.

    For example, if only the signature of function m.f is different in the new
    symbol table, return {'<m.f>'}.
    """
    names = set()  # type: Set[str]
    for id in new_modules:
        snapshot1 = old_snapshots[id]
        snapshot2 = snapshot_symbol_table(id, new_modules[id].names)
        names |= compare_symbol_table_snapshots(id, snapshot1, snapshot2)
    return {make_trigger(name) for name in names}
Exemplo n.º 4
0
def calculate_active_triggers(manager: BuildManager,
                              old_snapshots: Dict[str, Dict[str, SnapshotItem]],
                              new_modules: Dict[str, Optional[MypyFile]]) -> Set[str]:
    """Determine activated triggers by comparing old and new symbol tables.

    For example, if only the signature of function m.f is different in the new
    symbol table, return {'<m.f>'}.
    """
    names = set()  # type: Set[str]
    for id in new_modules:
        snapshot1 = old_snapshots.get(id)
        if snapshot1 is None:
            names.add(id)
            snapshot1 = {}
        new = new_modules[id]
        if new is None:
            snapshot2 = snapshot_symbol_table(id, SymbolTable())
            names.add(id)
        else:
            snapshot2 = snapshot_symbol_table(id, new.names)
        diff = compare_symbol_table_snapshots(id, snapshot1, snapshot2)
        package_nesting_level = id.count('.')
        for item in diff.copy():
            if (item.count('.') <= package_nesting_level + 1
                    and item.split('.')[-1] not in ('__builtins__',
                                                    '__file__',
                                                    '__name__',
                                                    '__package__',
                                                    '__doc__')):
                # Activate catch-all wildcard trigger for top-level module changes (used for
                # "from m import *"). This also gets triggered by changes to module-private
                # entries, but as these unneeded dependencies only result in extra processing,
                # it's a minor problem.
                #
                # TODO: Some __* names cause mistriggers. Fix the underlying issue instead of
                #     special casing them here.
                diff.add(id + WILDCARD_TAG)
            if item.count('.') > package_nesting_level + 1:
                # These are for changes within classes, used by protocols.
                diff.add(item.rsplit('.', 1)[0] + WILDCARD_TAG)

        names |= diff
    return {make_trigger(name) for name in names}
Exemplo n.º 5
0
def calculate_active_triggers(manager: BuildManager,
                              old_snapshots: Dict[str, Dict[str, SnapshotItem]],
                              new_modules: Dict[str, Optional[MypyFile]]) -> Set[str]:
    """Determine activated triggers by comparing old and new symbol tables.

    For example, if only the signature of function m.f is different in the new
    symbol table, return {'<m.f>'}.
    """
    names = set()  # type: Set[str]
    for id in new_modules:
        snapshot1 = old_snapshots.get(id)
        if snapshot1 is None:
            names.add(id)
            snapshot1 = {}
        new = new_modules[id]
        if new is None:
            snapshot2 = snapshot_symbol_table(id, SymbolTable())
            names.add(id)
        else:
            snapshot2 = snapshot_symbol_table(id, new.names)
        diff = compare_symbol_table_snapshots(id, snapshot1, snapshot2)
        package_nesting_level = id.count('.')
        for item in diff.copy():
            if (item.count('.') <= package_nesting_level + 1
                    and item.split('.')[-1] not in ('__builtins__',
                                                    '__file__',
                                                    '__name__',
                                                    '__package__',
                                                    '__doc__')):
                # Activate catch-all wildcard trigger for top-level module changes (used for
                # "from m import *"). This also gets triggered by changes to module-private
                # entries, but as these unneeded dependencies only result in extra processing,
                # it's a minor problem.
                #
                # TODO: Some __* names cause mistriggers. Fix the underlying issue instead of
                #     special casing them here.
                diff.add(id + WILDCARD_TAG)
            if item.count('.') > package_nesting_level + 1:
                # These are for changes within classes, used by protocols.
                diff.add(item.rsplit('.', 1)[0] + WILDCARD_TAG)

        names |= diff
    return {make_trigger(name) for name in names}
Exemplo n.º 6
0
Arquivo: update.py Projeto: rkday/mypy
def reprocess_nodes(manager: BuildManager, graph: Dict[str, State],
                    module_id: str, nodeset: Set[DeferredNode],
                    deps: Dict[str, Set[str]]) -> Set[str]:
    """Reprocess a set of nodes within a single module.

    Return fired triggers.
    """
    if module_id not in graph:
        manager.log_fine_grained(
            '%s not in graph (blocking errors or deleted?)' % module_id)
        return set()

    file_node = manager.modules[module_id]
    old_symbols = find_symbol_tables_recursive(file_node.fullname(),
                                               file_node.names)
    old_symbols = {name: names.copy() for name, names in old_symbols.items()}
    old_symbols_snapshot = snapshot_symbol_table(file_node.fullname(),
                                                 file_node.names)

    def key(node: DeferredNode) -> int:
        # Unlike modules which are sorted by name within SCC,
        # nodes within the same module are sorted by line number, because
        # this is how they are processed in normal mode.
        return node.node.line

    nodes = sorted(nodeset, key=key)

    # TODO: ignore_all argument to set_file_ignored_lines
    manager.errors.set_file_ignored_lines(file_node.path,
                                          file_node.ignored_lines)

    # Strip semantic analysis information.
    for deferred in nodes:
        strip_target(deferred.node)
    semantic_analyzer = manager.semantic_analyzer

    patches = []  # type: List[Tuple[int, Callable[[], None]]]

    # Second pass of semantic analysis. We don't redo the first pass, because it only
    # does local things that won't go stale.
    for deferred in nodes:
        with semantic_analyzer.file_context(
                file_node=file_node,
                fnam=file_node.path,
                options=manager.options,
                active_type=deferred.active_typeinfo):
            manager.semantic_analyzer.refresh_partial(deferred.node, patches)

    # Third pass of semantic analysis.
    for deferred in nodes:
        with semantic_analyzer.file_context(
                file_node=file_node,
                fnam=file_node.path,
                options=manager.options,
                active_type=deferred.active_typeinfo):
            manager.semantic_analyzer_pass3.refresh_partial(
                deferred.node, patches)

    apply_semantic_analyzer_patches(patches)

    # Merge symbol tables to preserve identities of AST nodes. The file node will remain
    # the same, but other nodes may have been recreated with different identities, such as
    # NamedTuples defined using assignment statements.
    new_symbols = find_symbol_tables_recursive(file_node.fullname(),
                                               file_node.names)
    for name in old_symbols:
        if name in new_symbols:
            merge_asts(file_node, old_symbols[name], file_node,
                       new_symbols[name])

    # Type check.
    checker = graph[module_id].type_checker()
    checker.reset()
    # We seem to need additional passes in fine-grained incremental mode.
    checker.pass_num = 0
    checker.last_pass = 3
    more = checker.check_second_pass(nodes)
    while more:
        more = False
        if graph[module_id].type_checker().check_second_pass():
            more = True

    new_symbols_snapshot = snapshot_symbol_table(file_node.fullname(),
                                                 file_node.names)
    # Check if any attribute types were changed and need to be propagated further.
    changed = compare_symbol_table_snapshots(file_node.fullname(),
                                             old_symbols_snapshot,
                                             new_symbols_snapshot)
    new_triggered = {make_trigger(name) for name in changed}

    # Dependencies may have changed.
    update_deps(module_id, nodes, graph, deps, manager.options)

    # Report missing imports.
    verify_dependencies(graph[module_id], manager)

    return new_triggered
Exemplo n.º 7
0
def reprocess_nodes(manager: BuildManager,
                    graph: Dict[str, State],
                    module_id: str,
                    nodeset: Set[FineGrainedDeferredNode],
                    deps: Dict[str, Set[str]],
                    processed_targets: List[str]) -> Set[str]:
    """Reprocess a set of nodes within a single module.

    Return fired triggers.
    """
    if module_id not in graph:
        manager.log_fine_grained('%s not in graph (blocking errors or deleted?)' %
                    module_id)
        return set()

    file_node = manager.modules[module_id]
    old_symbols = find_symbol_tables_recursive(file_node.fullname, file_node.names)
    old_symbols = {name: names.copy() for name, names in old_symbols.items()}
    old_symbols_snapshot = snapshot_symbol_table(file_node.fullname, file_node.names)

    def key(node: FineGrainedDeferredNode) -> int:
        # Unlike modules which are sorted by name within SCC,
        # nodes within the same module are sorted by line number, because
        # this is how they are processed in normal mode.
        return node.node.line

    nodes = sorted(nodeset, key=key)

    options = graph[module_id].options
    manager.errors.set_file_ignored_lines(
        file_node.path, file_node.ignored_lines, options.ignore_errors)

    targets = set()
    for node in nodes:
        target = target_from_node(module_id, node.node)
        if target is not None:
            targets.add(target)
    manager.errors.clear_errors_in_targets(file_node.path, targets)

    # If one of the nodes is the module itself, emit any errors that
    # happened before semantic analysis.
    for target in targets:
        if target == module_id:
            for info in graph[module_id].early_errors:
                manager.errors.add_error_info(info)

    # Strip semantic analysis information.
    saved_attrs = {}  # type: SavedAttributes
    for deferred in nodes:
        processed_targets.append(deferred.node.fullname)
        strip_target(deferred.node, saved_attrs)
    semantic_analysis_for_targets(graph[module_id], nodes, graph, saved_attrs)
    # Merge symbol tables to preserve identities of AST nodes. The file node will remain
    # the same, but other nodes may have been recreated with different identities, such as
    # NamedTuples defined using assignment statements.
    new_symbols = find_symbol_tables_recursive(file_node.fullname, file_node.names)
    for name in old_symbols:
        if name in new_symbols:
            merge_asts(file_node, old_symbols[name], file_node, new_symbols[name])

    # Type check.
    checker = graph[module_id].type_checker()
    checker.reset()
    # We seem to need additional passes in fine-grained incremental mode.
    checker.pass_num = 0
    checker.last_pass = 3
    more = checker.check_second_pass(nodes)
    while more:
        more = False
        if graph[module_id].type_checker().check_second_pass():
            more = True

    if manager.options.export_types:
        manager.all_types.update(graph[module_id].type_map())

    new_symbols_snapshot = snapshot_symbol_table(file_node.fullname, file_node.names)
    # Check if any attribute types were changed and need to be propagated further.
    changed = compare_symbol_table_snapshots(file_node.fullname,
                                             old_symbols_snapshot,
                                             new_symbols_snapshot)
    new_triggered = {make_trigger(name) for name in changed}

    # Dependencies may have changed.
    update_deps(module_id, nodes, graph, deps, options)

    # Report missing imports.
    graph[module_id].verify_dependencies()

    graph[module_id].free_state()

    return new_triggered
Exemplo n.º 8
0
def reprocess_nodes(manager: BuildManager,
                    graph: Dict[str, State],
                    module_id: str,
                    nodeset: Set[DeferredNode],
                    deps: Dict[str, Set[str]]) -> Set[str]:
    """Reprocess a set of nodes within a single module.

    Return fired triggers.
    """
    if module_id not in graph:
        manager.log_fine_grained('%s not in graph (blocking errors or deleted?)' %
                    module_id)
        return set()

    file_node = manager.modules[module_id]
    old_symbols = find_symbol_tables_recursive(file_node.fullname(), file_node.names)
    old_symbols = {name: names.copy() for name, names in old_symbols.items()}
    old_symbols_snapshot = snapshot_symbol_table(file_node.fullname(), file_node.names)

    def key(node: DeferredNode) -> int:
        # Unlike modules which are sorted by name within SCC,
        # nodes within the same module are sorted by line number, because
        # this is how they are processed in normal mode.
        return node.node.line

    nodes = sorted(nodeset, key=key)

    # TODO: ignore_all argument to set_file_ignored_lines
    manager.errors.set_file_ignored_lines(file_node.path, file_node.ignored_lines)

    targets = set()
    for node in nodes:
        target = target_from_node(module_id, node.node)
        if target is not None:
            targets.add(target)
    manager.errors.clear_errors_in_targets(file_node.path, targets)

    # Strip semantic analysis information.
    for deferred in nodes:
        strip_target(deferred.node)
    semantic_analyzer = manager.semantic_analyzer

    patches = []  # type: List[Tuple[int, Callable[[], None]]]

    # Second pass of semantic analysis. We don't redo the first pass, because it only
    # does local things that won't go stale.
    for deferred in nodes:
        with semantic_analyzer.file_context(
                file_node=file_node,
                fnam=file_node.path,
                options=manager.options,
                active_type=deferred.active_typeinfo):
            manager.semantic_analyzer.refresh_partial(deferred.node, patches)

    # Third pass of semantic analysis.
    for deferred in nodes:
        with semantic_analyzer.file_context(
                file_node=file_node,
                fnam=file_node.path,
                options=manager.options,
                active_type=deferred.active_typeinfo,
                scope=manager.semantic_analyzer_pass3.scope):
            manager.semantic_analyzer_pass3.refresh_partial(deferred.node, patches)

    with semantic_analyzer.file_context(
            file_node=file_node,
            fnam=file_node.path,
            options=manager.options,
            active_type=None):
        apply_semantic_analyzer_patches(patches)

    # Merge symbol tables to preserve identities of AST nodes. The file node will remain
    # the same, but other nodes may have been recreated with different identities, such as
    # NamedTuples defined using assignment statements.
    new_symbols = find_symbol_tables_recursive(file_node.fullname(), file_node.names)
    for name in old_symbols:
        if name in new_symbols:
            merge_asts(file_node, old_symbols[name], file_node, new_symbols[name])

    # Type check.
    checker = graph[module_id].type_checker()
    checker.reset()
    # We seem to need additional passes in fine-grained incremental mode.
    checker.pass_num = 0
    checker.last_pass = 3
    more = checker.check_second_pass(nodes)
    while more:
        more = False
        if graph[module_id].type_checker().check_second_pass():
            more = True

    new_symbols_snapshot = snapshot_symbol_table(file_node.fullname(), file_node.names)
    # Check if any attribute types were changed and need to be propagated further.
    changed = compare_symbol_table_snapshots(file_node.fullname(),
                                             old_symbols_snapshot,
                                             new_symbols_snapshot)
    new_triggered = {make_trigger(name) for name in changed}

    # Dependencies may have changed.
    update_deps(module_id, nodes, graph, deps, manager.options)

    # Report missing imports.
    verify_dependencies(graph[module_id], manager)

    return new_triggered
Exemplo n.º 9
0
def reprocess_nodes(manager: BuildManager,
                    graph: Dict[str, State],
                    module_id: str,
                    nodeset: Set[FineGrainedDeferredNode],
                    deps: Dict[str, Set[str]],
                    processed_targets: List[str]) -> Set[str]:
    """Reprocess a set of nodes within a single module.

    Return fired triggers.
    """
    if module_id not in graph:
        manager.log_fine_grained('%s not in graph (blocking errors or deleted?)' %
                    module_id)
        return set()

    file_node = manager.modules[module_id]
    old_symbols = find_symbol_tables_recursive(file_node.fullname(), file_node.names)
    old_symbols = {name: names.copy() for name, names in old_symbols.items()}
    old_symbols_snapshot = snapshot_symbol_table(file_node.fullname(), file_node.names)

    def key(node: FineGrainedDeferredNode) -> int:
        # Unlike modules which are sorted by name within SCC,
        # nodes within the same module are sorted by line number, because
        # this is how they are processed in normal mode.
        return node.node.line

    nodes = sorted(nodeset, key=key)

    options = graph[module_id].options
    manager.errors.set_file_ignored_lines(
        file_node.path, file_node.ignored_lines, options.ignore_errors)

    targets = set()
    for node in nodes:
        target = target_from_node(module_id, node.node)
        if target is not None:
            targets.add(target)
    manager.errors.clear_errors_in_targets(file_node.path, targets)

    # Strip semantic analysis information.
    patches = []  # type: List[Callable[[], None]]
    for deferred in nodes:
        processed_targets.append(deferred.node.fullname())
        if not manager.options.new_semantic_analyzer:
            strip_target(deferred.node)
        else:
            patches = strip_target_new(deferred.node)
    if not options.new_semantic_analyzer:
        re_analyze_nodes(file_node, nodes, manager, options)
    else:
        process_selected_targets(graph[module_id], nodes, graph, patches)
    # Merge symbol tables to preserve identities of AST nodes. The file node will remain
    # the same, but other nodes may have been recreated with different identities, such as
    # NamedTuples defined using assignment statements.
    new_symbols = find_symbol_tables_recursive(file_node.fullname(), file_node.names)
    for name in old_symbols:
        if name in new_symbols:
            merge_asts(file_node, old_symbols[name], file_node, new_symbols[name])

    # Type check.
    checker = graph[module_id].type_checker()
    checker.reset()
    # We seem to need additional passes in fine-grained incremental mode.
    checker.pass_num = 0
    checker.last_pass = 3
    more = checker.check_second_pass(nodes)
    while more:
        more = False
        if graph[module_id].type_checker().check_second_pass():
            more = True

    if manager.options.export_types:
        manager.all_types.update(graph[module_id].type_map())

    new_symbols_snapshot = snapshot_symbol_table(file_node.fullname(), file_node.names)
    # Check if any attribute types were changed and need to be propagated further.
    changed = compare_symbol_table_snapshots(file_node.fullname(),
                                             old_symbols_snapshot,
                                             new_symbols_snapshot)
    new_triggered = {make_trigger(name) for name in changed}

    # Dependencies may have changed.
    update_deps(module_id, nodes, graph, deps, options)

    # Report missing imports.
    graph[module_id].verify_dependencies()

    return new_triggered