예제 #1
0
def _main(args: argparse.Namespace) -> int:
    width = shutil.get_terminal_size(fallback=(80, 24)).columns

    # Find files if directory was provided.
    file_paths = tuple(find_files((str(p) for p in args.paths)))

    if not args.compact:
        print(f"Scanning {len(file_paths)} files")
        print()
    start_time = time.time()

    if args.no_message:
        message = MessageKind.NO_MESSAGE
    elif args.message is not None:
        message = args.message
    else:
        message = MessageKind.USE_LINT_REPORT

    metadata_caches: Optional[Mapping[str, Mapping["ProviderT", object]]] = None
    if rules_require_metadata_cache({args.rule}):
        metadata_caches = get_metadata_caches(args.cache_timeout, file_paths)

    # opts is a more type-safe version of args that we pass around
    opts = InsertSuppressionsOpts(
        rule=args.rule,
        skip_autoformatter=args.skip_autoformatter,
        kind=SuppressionCommentKind[args.kind.upper()],
        message=message,
        max_lines=args.max_lines,
        formatter=SuppressedLintRuleReportFormatter(width, args.compact),
    )

    formatted_reports_iter = itertools.chain.from_iterable(
        map_paths(
            get_formatted_reports_for_path,
            file_paths,
            opts,
            workers=args.workers,
            metadata_caches=metadata_caches,
        )
    )

    formatted_reports = []
    for formatted_report in formatted_reports_iter:
        # Reports are yielded as soon as they're available. Stream the output to the
        # terminal.
        print(formatted_report)
        # save the report from the iterator for later use
        formatted_reports.append(formatted_report)

    if not args.compact:
        print()
        print(
            f"Found {len(formatted_reports)} reports in {len(file_paths)} files in "
            + f"{time.time() - start_time :.2f} seconds."
        )

    return 0
예제 #2
0
def _main(args: argparse.Namespace) -> int:
    width = shutil.get_terminal_size(fallback=(80, 24)).columns

    # expand path if it's a directory
    file_paths = tuple(find_files(args.paths))
    all_rules = args.rules

    if not args.compact:
        print(f"Scanning {len(file_paths)} files")
        print(f"Testing {len(all_rules)} rules")
        print()
    start_time = time.time()

    metadata_caches: Optional[Mapping[str, Mapping["ProviderT", object]]] = None
    if rules_require_metadata_cache(all_rules):
        metadata_caches = get_metadata_caches(args.cache_timeout, file_paths)

    # opts is a more type-safe version of args that we pass around
    opts = LintOpts(
        rules=all_rules,
        use_ignore_byte_markers=args.use_ignore_byte_markers,
        use_ignore_comments=args.use_ignore_comments,
        formatter=LintRuleReportFormatter(width, args.compact),
    )

    formatted_reports_iter = itertools.chain.from_iterable(
        map_paths(
            get_formatted_reports_for_path,
            file_paths,
            opts,
            workers=args.workers,
            metadata_caches=metadata_caches,
        )
    )

    formatted_reports = []
    for formatted_report in formatted_reports_iter:
        # Reports are yielded as soon as they're available. Stream the output to the
        # terminal.
        print(formatted_report)
        # save the report from the iterator for later use
        formatted_reports.append(formatted_report)

    if not args.compact:
        print()
        print(
            f"Found {len(formatted_reports)} reports in {len(file_paths)} files in "
            + f"{time.time() - start_time :.2f} seconds."
        )

    # Return with an exit code of 1 if there are any violations found.
    return int(bool(formatted_reports))
예제 #3
0
def main(raw_args: Sequence[str]) -> int:
    parser = argparse.ArgumentParser(
        description=
        ("Inserts `# lint-fixme` comments into a file where lint violations are "
         + "found.\n" + "\n" +
         "You should only use this tool if it's not feasible to fix the existing "
         + "violations."),
        parents=[
            get_rule_parser(),
            get_paths_parser(),
            get_skip_autoformatter_parser(),
            get_compact_parser(),
            get_metadata_cache_parser(),
            get_multiprocessing_parser(),
        ],
    )
    parser.add_argument(
        "--kind",
        default="fixme",
        choices=[kind.name.lower() for kind in SuppressionCommentKind],
        help=
        "Should we use `# lint-fixme` or `# lint-ignore`? Defaults to 'fixme'.",
    )
    message_group = parser.add_mutually_exclusive_group()
    message_group.add_argument(
        "--message",
        default=None,
        help="Overrides the lint message used in the fixme comment.",
    )
    message_group.add_argument(
        "--no-message",
        action="store_true",
        help=
        ("Don't include a message with the suppression comment. Only include the "
         + "lint code."),
    )
    parser.add_argument(
        "--max-lines",
        default=3,
        type=int,
        help=
        "The maximum number of lines a comment can span before getting truncated",
    )

    args = parser.parse_args(raw_args)
    width = shutil.get_terminal_size(fallback=(80, 24)).columns

    # Find files if directory was provided.
    file_paths = tuple(find_files((str(p) for p in args.paths)))

    if not args.compact:
        print(f"Scanning {len(file_paths)} files")
        print()
    start_time = time.time()

    if args.no_message:
        message = MessageKind.NO_MESSAGE
    elif args.message is not None:
        message = args.message
    else:
        message = MessageKind.USE_LINT_REPORT

    metadata_caches: Optional[Mapping[str, Mapping["ProviderT",
                                                   object]]] = None
    if rules_require_metadata_cache({args.rule}):
        metadata_caches = get_metadata_caches(args.cache_timeout, file_paths)

    # opts is a more type-safe version of args that we pass around
    opts = InsertSuppressionsOpts(
        rule=args.rule,
        skip_autoformatter=args.skip_autoformatter,
        kind=SuppressionCommentKind[args.kind.upper()],
        message=message,
        max_lines=args.max_lines,
        formatter=SuppressedLintRuleReportFormatter(width, args.compact),
    )

    formatted_reports_iter = itertools.chain.from_iterable(
        map_paths(
            get_formatted_reports_for_path,
            file_paths,
            opts,
            workers=args.workers,
            metadata_caches=metadata_caches,
        ))

    formatted_reports = []
    for formatted_report in formatted_reports_iter:
        # Reports are yielded as soon as they're available. Stream the output to the
        # terminal.
        print(formatted_report)
        # save the report from the iterator for later use
        formatted_reports.append(formatted_report)

    if not args.compact:
        print()
        print(
            f"Found {len(formatted_reports)} reports in {len(file_paths)} files in "
            + f"{time.time() - start_time :.2f} seconds.")
    return 0
예제 #4
0
def _main(args: argparse.Namespace) -> int:
    width = shutil.get_terminal_size(fallback=(80, 24)).columns

    rules = args.rules
    use_ignore_byte_markers = args.use_ignore_byte_markers
    use_ignore_comments = args.use_ignore_comments
    skip_autoformatter = args.skip_autoformatter
    formatter = AutofixingLintRuleReportFormatter(width, args.compact)
    workers = args.workers

    # Find files if directory was provided.
    file_paths = tuple(find_files(args.paths))

    if not args.compact:
        print(f"Scanning {len(file_paths)} files")
        print("\n".join(file_paths))
        print()
    start_time = time.time()

    total_reports_count = 0

    if rules_require_metadata_cache(rules):
        touched_files = set()
        next_files = file_paths
        with Manager() as manager:
            # Avoid getting stuck in an infinite loop.
            for _ in range(MAX_ITER):
                if not next_files:
                    break

                patched_files = manager.list()
                metadata_caches = get_metadata_caches(args.cache_timeout,
                                                      next_files)

                next_files = []
                # opts is a more type-safe version of args that we pass around
                opts = LintOpts(
                    rules=rules,
                    use_ignore_byte_markers=use_ignore_byte_markers,
                    use_ignore_comments=use_ignore_comments,
                    skip_autoformatter=skip_autoformatter,
                    formatter=formatter,
                    patched_files_list=patched_files,
                )
                total_reports_count += call_map_paths_and_print_reports(
                    next_files, opts, workers, metadata_caches)
                next_files = list(patched_files)
                touched_files.update(patched_files)

        # Finally, format all the touched files.
        if not skip_autoformatter:
            for path in touched_files:
                with open(path, "rb") as f:
                    source = f.read()
                # Format the code using the config file's formatter.
                formatted_source = invoke_formatter(
                    get_lint_config().formatter, source)
                with open(path, "wb") as f:
                    f.write(formatted_source)

    else:
        # opts is a more type-safe version of args that we pass around
        opts = LintOpts(
            rules=rules,
            use_ignore_byte_markers=use_ignore_byte_markers,
            use_ignore_comments=use_ignore_comments,
            skip_autoformatter=skip_autoformatter,
            formatter=formatter,
        )

        total_reports_count = call_map_paths_and_print_reports(
            file_paths, opts, workers, None)

    if not args.compact:
        print()
        print(
            f"Found {total_reports_count} reports in {len(file_paths)} files in "
            + f"{time.time() - start_time :.2f} seconds.")

    return 0
예제 #5
0
파일: apply_fix.py 프로젝트: thatch/Fixit
def main(raw_args: Sequence[str]) -> int:
    parser = argparse.ArgumentParser(
        description=(
            "Runs a lint rule's autofixer over all of over a set of " +
            "files or directories.\n" + "\n" +
            "This is similar to the functionality provided by LibCST codemods "
            +
            "(https://libcst.readthedocs.io/en/latest/codemods_tutorial.html), "
            + "but limited to the small subset of APIs provided by Fixit."),
        parents=[
            get_rules_parser(),
            get_metadata_cache_parser(),
            get_paths_parser(),
            get_skip_ignore_comments_parser(),
            get_skip_ignore_byte_marker_parser(),
            get_skip_autoformatter_parser(),
            get_compact_parser(),
            get_multiprocessing_parser(),
        ],
    )

    args = parser.parse_args(raw_args)
    width = shutil.get_terminal_size(fallback=(80, 24)).columns

    rules = args.rules
    use_ignore_byte_markers = args.use_ignore_byte_markers
    use_ignore_comments = args.use_ignore_comments
    skip_autoformatter = args.skip_autoformatter
    formatter = AutofixingLintRuleReportFormatter(width, args.compact)
    workers = args.workers

    # Find files if directory was provided.
    file_paths = tuple(find_files(args.paths))

    if not args.compact:
        print(f"Scanning {len(file_paths)} files")
        print("\n".join(file_paths))
        print()
    start_time = time.time()

    total_reports_count = 0

    if rules_require_metadata_cache(rules):
        touched_files = set()
        next_files = file_paths
        with Manager() as manager:
            # Avoid getting stuck in an infinite loop.
            for _ in range(MAX_ITER):
                if not next_files:
                    break

                patched_files = manager.list()
                metadata_caches = get_metadata_caches(args.cache_timeout,
                                                      next_files)

                next_files = []
                # opts is a more type-safe version of args that we pass around
                opts = LintOpts(
                    rules=rules,
                    use_ignore_byte_markers=use_ignore_byte_markers,
                    use_ignore_comments=use_ignore_comments,
                    skip_autoformatter=skip_autoformatter,
                    formatter=formatter,
                    patched_files_list=patched_files,
                )
                total_reports_count += call_map_paths_and_print_reports(
                    next_files, opts, workers, metadata_caches)
                next_files = list(patched_files)
                touched_files.update(patched_files)

        # Finally, format all the touched files.
        if not skip_autoformatter:
            for path in touched_files:
                with open(path, "rb") as f:
                    source = f.read()
                # Format the code using the config file's formatter.
                formatted_source = invoke_formatter(
                    get_lint_config().formatter, source)
                with open(path, "wb") as f:
                    f.write(formatted_source)

    else:
        # opts is a more type-safe version of args that we pass around
        opts = LintOpts(
            rules=rules,
            use_ignore_byte_markers=use_ignore_byte_markers,
            use_ignore_comments=use_ignore_comments,
            skip_autoformatter=skip_autoformatter,
            formatter=formatter,
        )

        total_reports_count = call_map_paths_and_print_reports(
            file_paths, opts, workers, None)

    if not args.compact:
        print()
        print(
            f"Found {total_reports_count} reports in {len(file_paths)} files in "
            + f"{time.time() - start_time :.2f} seconds.")

    return 0
예제 #6
0
def main(raw_args: Sequence[str]) -> int:
    parser = argparse.ArgumentParser(
        description=(
            "Validates your lint rules by running them against the specified, "
            + "directory or file(s). This is not a substitute for unit tests, "
            + "but it can provide additional confidence in your lint rules.\n"
            + "If no lint rules or packages are specified, runs all lint rules "
            + "found in the packages specified in `fixit.config.yaml`."
        ),
        parents=[
            get_paths_parser(),
            get_rules_parser(),
            get_use_ignore_comments_parser(),
            get_skip_ignore_byte_marker_parser(),
            get_compact_parser(),
            get_multiprocessing_parser(),
        ],
    )

    parser.add_argument(
        "--cache-timeout",
        type=int,
        help="Timeout (seconds) for metadata cache fetching. Default is 2 seconds.",
        default=2,
    )

    args = parser.parse_args(raw_args)
    width = shutil.get_terminal_size(fallback=(80, 24)).columns

    # expand path if it's a directory
    file_paths = tuple(find_files(args.paths))
    all_rules = args.rules

    if not args.compact:
        print(f"Scanning {len(file_paths)} files")
        print(f"Testing {len(all_rules)} rules")
        print()
    start_time = time.time()

    metadata_caches: Optional[Mapping[str, Mapping["ProviderT", object]]] = None
    if rules_require_metadata_cache(all_rules):
        metadata_caches = get_metadata_caches(args.cache_timeout, file_paths)

    # opts is a more type-safe version of args that we pass around
    opts = LintOpts(
        rules=all_rules,
        use_ignore_byte_markers=args.use_ignore_byte_markers,
        use_ignore_comments=args.use_ignore_comments,
        formatter=LintRuleReportFormatter(width, args.compact),
    )

    formatted_reports_iter = itertools.chain.from_iterable(
        map_paths(
            get_formatted_reports_for_path,
            file_paths,
            opts,
            workers=args.workers,
            metadata_caches=metadata_caches,
        )
    )

    formatted_reports = []
    for formatted_report in formatted_reports_iter:
        # Reports are yielded as soon as they're available. Stream the output to the
        # terminal.
        print(formatted_report)
        # save the report from the iterator for later use
        formatted_reports.append(formatted_report)

    if not args.compact:
        print()
        print(
            f"Found {len(formatted_reports)} reports in {len(file_paths)} files in "
            + f"{time.time() - start_time :.2f} seconds."
        )

    # Return with an exit code of 1 if there are any violations found.
    return int(bool(formatted_reports))