Beispiel #1
0
    def test_multiline_suppression(self) -> None:
        source = """
        # lint-ignore: SomeCode: some reason
        # lint: and some reason continued
        # lint: onto multiple lines.
        x = "Some ignored violation"
        """
        tokens = tuple(
            tokenize.tokenize(BytesIO(source.encode("utf-8")).readline))
        ignore_info = IgnoreInfo.compute(
            comment_info=CommentInfo.compute(tokens=tokens),
            line_mapping_info=LineMappingInfo.compute(tokens=tokens),
        )

        self.assertEqual(
            len(ignore_info.local_ignore_info.local_suppression_comments), 1)
        local_supp_comment = next(
            lsc for lsc in
            ignore_info.local_ignore_info.local_suppression_comments)
        self.assertEqual(
            local_supp_comment.reason,
            "some reason and some reason continued onto multiple lines.",
        )
        # Verify that all local suppression comment lines map to the same SuppressionComment instance.
        for (
                lines,
                supp_comments,
        ) in ignore_info.local_ignore_info.local_suppression_comments_by_line.items(
        ):
            self.assertEqual(len(supp_comments), 1)
            supp_comment = supp_comments[0]
            self.assertIs(supp_comment, local_supp_comment)
Beispiel #2
0
 def test_ignored_lines(self, *, source: str, ignored_code: str,
                        ignored_lines: Container[int]) -> None:
     tokens = tuple(
         tokenize.tokenize(BytesIO(source.encode("utf-8")).readline))
     ignore_info = IgnoreInfo.compute(
         comment_info=CommentInfo.compute(tokens=tokens),
         line_mapping_info=LineMappingInfo.compute(tokens=tokens),
     )
     lines = range(1, tokens[-1].end[0] + 1)
     actual_ignored_lines = []
     for line in lines:
         ignored = ignore_info.should_ignore_report(
             CstLintRuleReport(
                 file_path=Path("fake/path.py"),
                 node=cst.EmptyLine(),
                 code=ignored_code,
                 message="message",
                 line=line,
                 column=0,
                 module=cst.MetadataWrapper(cst.parse_module(source)),
                 module_bytes=source.encode("utf-8"),
             ))
         if ignored:
             actual_ignored_lines.append(line)
     # pyre-fixme[6]: Expected `Iterable[Variable[_T]]` for 1st param but got
     #  `Container[int]`.
     self.assertEqual(actual_ignored_lines, list(ignored_lines))
Beispiel #3
0
    def test(
        self,
        *,
        source: bytes,
        rules_in_lint_run: Collection[Type[CstLintRule]],
        rules_without_report: Collection[Type[CstLintRule]],
        suppressed_line: int,
        expected_unused_suppressions_report_messages: Collection[str],
        expected_replacements: Optional[List[str]] = None,
    ) -> None:
        reports = [
            CstLintRuleReport(
                file_path=FILE_PATH,
                node=cst.EmptyLine(),
                code=rule.__name__,
                message="message",
                line=suppressed_line,
                column=0,
                module=cst.MetadataWrapper(cst.parse_module(source)),
                module_bytes=source,
            ) for rule in rules_in_lint_run if rule not in rules_without_report
        ]
        tokens = _get_tokens(source)
        ignore_info = IgnoreInfo.compute(
            comment_info=CommentInfo.compute(tokens=tokens),
            line_mapping_info=LineMappingInfo.compute(tokens=tokens),
        )
        cst_wrapper = MetadataWrapper(cst.parse_module(source),
                                      unsafe_skip_copy=True)
        config = LintConfig(
            rule_config={
                RemoveUnusedSuppressionsRule.__name__: {
                    "ignore_info": ignore_info,
                    "rules": rules_in_lint_run,
                }
            })
        unused_suppressions_context = CstContext(cst_wrapper, source,
                                                 FILE_PATH, config)
        for report in reports:
            ignore_info.should_ignore_report(report)
        _visit_cst_rules_with_context(cst_wrapper,
                                      [RemoveUnusedSuppressionsRule],
                                      unused_suppressions_context)

        messages = []
        patches = []
        for report in unused_suppressions_context.reports:
            messages.append(report.message)
            patches.append(report.patch)

        self.assertEqual(messages,
                         expected_unused_suppressions_report_messages)
        if expected_replacements is None:
            self.assertEqual(len(patches), 0)
        else:
            self.assertEqual(len(patches), len(expected_replacements))

            for idx, patch in enumerate(patches):
                replacement = patch.apply(source.decode())
                self.assertEqual(replacement, expected_replacements[idx])
Beispiel #4
0
    def test_compose_new_comment_multiline(self) -> None:
        source = dedent_with_lstrip("""
            # lint-fixme: UsedRule, UsedRule2: reason...
            # lint: reason continued
            """)
        tokens = _get_tokens(source.encode())
        ignore_info = IgnoreInfo.compute(
            comment_info=CommentInfo.compute(tokens=tokens),
            line_mapping_info=LineMappingInfo.compute(tokens=tokens),
            use_noqa=False,
        )
        local_suppression_comments = (ignore_info.local_ignore_info.
                                      local_suppression_comments_by_line[1])
        self.assertEqual(len(local_suppression_comments), 1)
        local_suppression_comment = local_suppression_comments[0]

        # First code unneeded.
        unneeded_codes = ["UsedRule"]
        new_comment_lines = _compose_new_comment(local_suppression_comment,
                                                 unneeded_codes, 1)
        expected_new_lines = [
            "# lint-fixme: UsedRule2: reason... reason",
            "# lint: continued",
        ]
        self.assertEqual(new_comment_lines, expected_new_lines)

        # Second code unneeded.
        unneeded_codes = ["UsedRule2"]
        new_comment_lines = _compose_new_comment(local_suppression_comment,
                                                 unneeded_codes, 1)
        expected_new_lines = [
            "# lint-fixme: UsedRule: reason... reason",
            "# lint: continued",
        ]
        self.assertEqual(new_comment_lines, expected_new_lines)

        # Both codes unneded.
        unneeded_codes = ["UsedRule", "UsedRule2"]
        new_comment_lines = _compose_new_comment(local_suppression_comment,
                                                 unneeded_codes, 1)
        expected_new_lines = []
        self.assertEqual(new_comment_lines, expected_new_lines)

        # Both codes needed.
        unneeded_codes = []
        new_comment_lines = _compose_new_comment(local_suppression_comment,
                                                 unneeded_codes, 1)
        expected_new_lines = [
            "# lint-fixme: UsedRule, UsedRule2: reason...",
            "# lint: reason continued",
        ]
        self.assertEqual(new_comment_lines, expected_new_lines)
Beispiel #5
0
    def test_unused_comments(
        self,
        *,
        source: str,
        reports_on_lines: Iterable[Tuple[int, str]],
        unused_comments: Iterable[int],
    ) -> None:
        """
        Verify that we can correctly track which lint comments were used and which were
        unused.

        TODO: We don't track usage of global ignore comments, so we can't know if
        they're unused.
        """
        tokens = tuple(
            tokenize.tokenize(BytesIO(source.encode("utf-8")).readline))
        ignore_info = IgnoreInfo.compute(
            comment_info=CommentInfo.compute(tokens=tokens),
            line_mapping_info=LineMappingInfo.compute(tokens=tokens),
            use_noqa=True,
        )

        for line, code in reports_on_lines:
            ignore_info.should_ignore_report(
                CstLintRuleReport(
                    file_path=Path("fake/path.py"),
                    node=cst.EmptyLine(),
                    code=code,
                    message="message",
                    line=line,
                    column=0,
                    module=cst.MetadataWrapper(cst.parse_module(source)),
                    module_bytes=source.encode("utf-8"),
                ))

        self.assertEqual(
            sorted([
                min(tok.start[0] for tok in c.tokens)
                for c in ignore_info.suppression_comments if not c.used_by
            ]),
            sorted(unused_comments),
        )
Beispiel #6
0
def lint_file(
    file_path: Path,
    source: bytes,
    *,
    use_ignore_byte_markers: bool = True,
    use_ignore_comments: bool = True,
    config: Optional[LintConfig] = None,
    rules: LintRuleCollectionT,
    cst_wrapper: Optional[MetadataWrapper] = None,
    find_unused_suppressions: bool = False,
) -> Collection[BaseLintRuleReport]:
    """
    May raise a SyntaxError, which should be handled by the
    caller.
    """
    # Get settings from the nearest `.fixit.config.yaml` file if necessary.
    config: LintConfig = config if config is not None else get_lint_config()

    if use_ignore_byte_markers and any(
        pattern.encode() in source for pattern in config.block_list_patterns
    ):
        return []

    tokens = None
    if use_ignore_comments:
        # Don't compute tokens unless we have to, it slows down
        # `fixit.cli.run_rules`.
        #
        # `tokenize` is actually much more expensive than generating the whole AST,
        # since AST parsing is heavily optimized C, and tokenize is pure python.
        tokens = _get_tokens(source)
        ignore_info = IgnoreInfo.compute(
            comment_info=CommentInfo.compute(tokens=tokens),
            line_mapping_info=LineMappingInfo.compute(tokens=tokens),
        )
    else:
        ignore_info = None

    # Don't waste time evaluating rules that are globally ignored.
    evaluated_rules = [
        r for r in rules if ignore_info is None or ignore_info.should_evaluate_rule(r)
    ]
    # Categorize lint rules.
    cst_rules: List[Type[CstLintRule]] = []
    pseudo_rules: List[Type[PseudoLintRule]] = []
    for r in evaluated_rules:
        if issubclass(r, CstLintRule):
            cst_rules.append(cast(Type[CstLintRule], r))
        elif issubclass(r, PseudoLintRule):
            pseudo_rules.append(cast(Type[PseudoLintRule], r))

    # `self.context.report()` accumulates reports into the context object, we'll copy
    # those into our local `reports` list.
    ast_tree = None
    reports = []

    if cst_wrapper is None:
        cst_wrapper = MetadataWrapper(cst.parse_module(source), unsafe_skip_copy=True)
    if cst_rules:
        cst_context = CstContext(cst_wrapper, source, file_path, config)
        _visit_cst_rules_with_context(cst_wrapper, cst_rules, cst_context)
        reports.extend(cst_context.reports)

    if pseudo_rules:
        psuedo_context = PseudoContext(file_path, source, tokens, ast_tree)
        for pr_cls in pseudo_rules:
            reports.extend(pr_cls(psuedo_context).lint_file())

    if ignore_info is not None:
        # filter the accumulated errors that should be suppressed and report unused suppressions
        reports = [r for r in reports if not ignore_info.should_ignore_report(r)]
        if find_unused_suppressions and cst_rules:
            # We had to make sure to call ignore_info.should_ignore_report before running our
            # RemoveUnusedSuppressionsRule because ignore_info needs to be up to date for it to work.
            # We can construct a new context since we want a fresh set of reports to append to reports.
            config.rule_config[RemoveUnusedSuppressionsRule.__name__] = {
                "ignore_info": ignore_info,
                "rules": cst_rules,
            }
            unused_suppressions_context = CstContext(
                cst_wrapper, source, file_path, config
            )
            _visit_cst_rules_with_context(
                cst_wrapper, [RemoveUnusedSuppressionsRule], unused_suppressions_context
            )
            reports.extend(unused_suppressions_context.reports)

    return reports