Пример #1
0
async def legacy_prepare_python_sources(
    hydrated_targets: HydratedTargets, ) -> ImportablePythonSources:
    stripped_sources = await Get[SourceFiles](LegacyAllSourceFilesRequest(
        (ht.adaptor for ht in hydrated_targets), strip_source_roots=True))
    init_injected = await Get[InitInjectedSnapshot](InjectInitRequest(
        stripped_sources.snapshot))
    return ImportablePythonSources(init_injected.snapshot)
Пример #2
0
async def setup(
    request: SetupRequest,
    isort: Isort,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    adaptors_with_origins = request.formatter.adaptors_with_origins

    requirements_pex = await Get[Pex](PexRequest(
        output_filename="isort.pex",
        requirements=PexRequirements(isort.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            isort.default_interpreter_constraints),
        entry_point=isort.get_entry_point(),
    ))

    config_path: Optional[List[str]] = isort.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=config_path or (),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        conjunction=GlobExpansionConjunction.all_match,
        description_of_origin="the option `--isort-config`",
    ))

    if request.formatter.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](LegacyAllSourceFilesRequest(
            adaptor_with_origin.adaptor
            for adaptor_with_origin in adaptors_with_origins))
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.formatter.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        LegacySpecifiedSourceFilesRequest(adaptors_with_origins))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files_snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(adaptor_with_origin.adaptor.address.reference()
               for adaptor_with_origin in adaptors_with_origins))

    process_request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./isort.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            isort=isort,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=f"Run isort for {address_references}",
    )
    return Setup(process_request)
Пример #3
0
async def merge_coverage_data(
    data_batch: PytestCoverageDataBatch,
    transitive_targets: TransitiveHydratedTargets,
    python_setup: PythonSetup,
    coverage_setup: CoverageSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> MergedCoverageData:
    """Takes all python test results and merges their coverage data into a single sql file."""
    # We start with a bunch of test results, each of which has a coverage data file called `.coverage`
    # We prefix each of these with their address so that we can write them all into a single pex.
    coverage_directory_digests = await MultiGet(
        Get[Digest](
            DirectoryWithPrefixToAdd(
                directory_digest=result.test_result.coverage_data.
                digest,  # type: ignore[attr-defined]
                prefix=result.address.path_safe_spec,
            )) for result in data_batch.addresses_and_test_results
        if result.test_result.coverage_data is not None)
    sources = await Get[SourceFiles](LegacyAllSourceFilesRequest(
        (ht.adaptor for ht in transitive_targets.closure),
        strip_source_roots=False))
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot))
    coveragerc = await Get[Coveragerc](CoveragercRequest(HydratedTargets(
        transitive_targets.closure),
                                                         test_time=True))
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(directories=(
            *coverage_directory_digests,
            sources_with_inits_snapshot.snapshot.directory_digest,
            coveragerc.digest,
            coverage_setup.requirements_pex.directory_digest,
        )),
    )

    prefixes = [
        f"{result.address.path_safe_spec}/.coverage"
        for result in data_batch.addresses_and_test_results
    ]
    coverage_args = ["combine", *prefixes]
    request = coverage_setup.requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_files=(".coverage", ),
        description=f"Merge coverage reports.",
    )

    result = await Get[ExecuteProcessResult](ExecuteProcessRequest, request)
    return MergedCoverageData(coverage_data=result.output_directory_digest)
Пример #4
0
async def setup(
    request: SetupRequest,
    docformatter: Docformatter,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    adaptors_with_origins = request.formatter.adaptors_with_origins

    requirements_pex = await Get[Pex](PexRequest(
        output_filename="docformatter.pex",
        requirements=PexRequirements(docformatter.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            docformatter.default_interpreter_constraints),
        entry_point=docformatter.get_entry_point(),
    ))

    if request.formatter.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](LegacyAllSourceFilesRequest(
            adaptor_with_origin.adaptor
            for adaptor_with_origin in adaptors_with_origins))
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.formatter.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        LegacySpecifiedSourceFilesRequest(adaptors_with_origins))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files_snapshot.directory_digest,
        requirements_pex.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(adaptor_with_origin.adaptor.address.reference()
               for adaptor_with_origin in adaptors_with_origins))

    process_request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./docformatter.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            docformatter=docformatter,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=f"Run docformatter for {address_references}",
    )
    return Setup(process_request)
Пример #5
0
 def get_all_source_files(
     self,
     adaptors_with_origins: Iterable[TargetAdaptorWithOrigin],
     *,
     strip_source_roots: bool = False,
 ) -> List[str]:
     request = LegacyAllSourceFilesRequest(
         (adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins),
         strip_source_roots=strip_source_roots,
     )
     result = self.request_single_product(
         SourceFiles, Params(request, create_options_bootstrapper())
     )
     return sorted(result.snapshot.files)
Пример #6
0
async def construct_coverage_config(
        source_root_config: SourceRootConfig,
        coverage_config_request: CoveragercRequest) -> Coveragerc:
    sources = await Get[SourceFiles](LegacyAllSourceFilesRequest(
        (ht.adaptor for ht in coverage_config_request.hydrated_targets),
        strip_source_roots=False,
    ))
    init_injected = await Get[InitInjectedSnapshot](InjectInitRequest(
        sources.snapshot))
    source_roots = source_root_config.get_source_roots()

    # Generate a map from source root stripped source to its source root. eg:
    #  {'pants/testutil/subsystem/util.py': 'src/python'}
    # This is so coverage reports referencing /chroot/path/pants/testutil/subsystem/util.py can be mapped
    # back to the actual sources they reference when generating coverage reports.
    def source_root_stripped_source_and_source_root(
            file_name: str) -> Tuple[str, str]:
        source_root = source_roots.find_by_path(file_name)
        source_root_path = source_root.path if source_root is not None else ""
        source_root_stripped_path = file_name[len(source_root_path) + 1:]
        return (source_root_stripped_path, source_root_path)

    source_to_target_base = dict(
        source_root_stripped_source_and_source_root(filename)
        for filename in sorted(init_injected.snapshot.files))
    config_parser = configparser.ConfigParser()
    config_parser.read_file(StringIO(DEFAULT_COVERAGE_CONFIG))
    ensure_section(config_parser, "run")
    config_parser.set("run", "plugins", COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.add_section(COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.set(COVERAGE_PLUGIN_MODULE_NAME, "source_to_target_base",
                      json.dumps(source_to_target_base))
    config_parser.set(COVERAGE_PLUGIN_MODULE_NAME, "test_time",
                      json.dumps(coverage_config_request.test_time))
    config = StringIO()
    config_parser.write(config)
    coveragerc_digest = await Get[Digest](InputFilesContent,
                                          get_coveragerc_input(
                                              config.getvalue()))
    return Coveragerc(coveragerc_digest)
Пример #7
0
async def lint(
    linter: Flake8Linter,
    flake8: Flake8,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> LintResult:
    if flake8.options.skip:
        return LintResult.noop()

    adaptors_with_origins = linter.adaptors_with_origins

    # NB: Flake8 output depends upon which Python interpreter version it's run with. We ensure that
    # each target runs with its own interpreter constraints. See
    # http://flake8.pycqa.org/en/latest/user/invocation.html.
    interpreter_constraints = PexInterpreterConstraints.create_from_adaptors(
        (adaptor_with_origin.adaptor
         for adaptor_with_origin in adaptors_with_origins),
        python_setup=python_setup,
    )
    requirements_pex = await Get[Pex](PexRequest(
        output_filename="flake8.pex",
        requirements=PexRequirements(flake8.get_requirement_specs()),
        interpreter_constraints=interpreter_constraints,
        entry_point=flake8.get_entry_point(),
    ))

    config_path: Optional[str] = flake8.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=tuple([config_path] if config_path else []),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        description_of_origin="the option `--flake8-config`",
    ))

    all_source_files = await Get[SourceFiles](LegacyAllSourceFilesRequest(
        adaptor_with_origin.adaptor
        for adaptor_with_origin in adaptors_with_origins))
    specified_source_files = await Get[SourceFiles](
        LegacySpecifiedSourceFilesRequest(adaptors_with_origins))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files.snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(adaptor_with_origin.adaptor.address.reference()
               for adaptor_with_origin in adaptors_with_origins))

    request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./flake8.pex",
        pex_args=generate_args(specified_source_files=specified_source_files,
                               flake8=flake8),
        input_files=merged_input_files,
        description=f"Run Flake8 for {address_references}",
    )
    result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest,
                                                     request)
    return LintResult.from_fallible_execute_process_result(result)
Пример #8
0
async def generate_coverage_report(
    transitive_targets: TransitiveHydratedTargets,
    python_setup: PythonSetup,
    coverage_setup: CoverageSetup,
    merged_coverage_data: MergedCoverageData,
    coverage_toolbase: PytestCoverage,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> CoverageReport:
    """Takes all python test results and generates a single coverage report."""
    requirements_pex = coverage_setup.requirements_pex
    # TODO(#4535) We need a better way to do this kind of check that covers synthetic targets and rules extensibility.
    python_targets = [
        target for target in transitive_targets.closure
        if target.adaptor.type_alias in ("python_library", "python_tests")
    ]

    coveragerc = await Get[Coveragerc](CoveragercRequest(
        HydratedTargets(python_targets)))
    sources = await Get[SourceFiles](LegacyAllSourceFilesRequest(
        (ht.adaptor for ht in transitive_targets.closure),
        strip_source_roots=False))
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot))
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(directories=(
            merged_coverage_data.coverage_data,
            coveragerc.digest,
            requirements_pex.directory_digest,
            sources_with_inits_snapshot.snapshot.directory_digest,
        )),
    )
    report_type = coverage_toolbase.options.report
    coverage_args = [report_type.report_name]
    request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_directories=("htmlcov", ),
        output_files=("coverage.xml", ),
        description=f"Generate coverage report.",
    )

    result = await Get[ExecuteProcessResult](ExecuteProcessRequest, request)
    if report_type == ReportType.CONSOLE:
        return ConsoleCoverageReport(result.stdout.decode())

    report_dir = PurePath(coverage_toolbase.options.report_output_path)

    report_file: Optional[PurePath] = None
    if coverage_toolbase.options.report == ReportType.HTML:
        report_file = report_dir / "htmlcov" / "index.html"
    elif coverage_toolbase.options.report == ReportType.XML:
        report_file = report_dir / "coverage.xml"

    return FilesystemCoverageReport(
        result_digest=result.output_directory_digest,
        directory_to_materialize_to=report_dir,
        report_file=report_file,
    )
Пример #9
0
async def bandit_lint(
    linter: BanditLinter,
    bandit: Bandit,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> LintResult:
    if bandit.options.skip:
        return LintResult.noop()

    adaptors_with_origins = linter.adaptors_with_origins

    # NB: Bandit output depends upon which Python interpreter version it's run with. We ensure that
    # each target runs with its own interpreter constraints. See
    # https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit.
    interpreter_constraints = PexInterpreterConstraints.create_from_adaptors(
        (adaptor_with_origin.adaptor
         for adaptor_with_origin in adaptors_with_origins),
        python_setup=python_setup,
    )
    requirements_pex = await Get[Pex](PexRequest(
        output_filename="bandit.pex",
        requirements=PexRequirements(bandit.get_requirement_specs()),
        interpreter_constraints=interpreter_constraints,
        entry_point=bandit.get_entry_point(),
    ))

    config_path: Optional[str] = bandit.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=tuple([config_path] if config_path else []),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        description_of_origin="the option `--bandit-config`",
    ))

    all_source_files = await Get[SourceFiles](LegacyAllSourceFilesRequest(
        adaptor_with_origin.adaptor
        for adaptor_with_origin in adaptors_with_origins))
    specified_source_files = await Get[SourceFiles](
        LegacySpecifiedSourceFilesRequest(adaptors_with_origins))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files.snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(adaptor_with_origin.adaptor.address.reference()
               for adaptor_with_origin in adaptors_with_origins))

    request = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./bandit.pex",
        pex_args=generate_args(specified_source_files=specified_source_files,
                               bandit=bandit),
        input_files=merged_input_files,
        description=f"Run Bandit for {address_references}",
    )
    result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest,
                                                     request)
    return LintResult.from_fallible_execute_process_result(result)