Пример #1
0
 def run_docformatter(
     self,
     targets: List[TargetWithOrigin],
     *,
     passthrough_args: Optional[str] = None,
     skip: bool = False,
 ) -> Tuple[LintResult, FmtResult]:
     args = ["--backend-packages2=pants.backend.python.lint.docformatter"]
     if passthrough_args:
         args.append(f"--docformatter-args='{passthrough_args}'")
     if skip:
         args.append(f"--docformatter-skip")
     options_bootstrapper = create_options_bootstrapper(args=args)
     configs = [DocformatterConfiguration.create(tgt) for tgt in targets]
     lint_result = self.request_single_product(
         LintResult,
         Params(DocformatterConfigurations(configs), options_bootstrapper))
     input_snapshot = self.request_single_product(
         SourceFiles,
         Params(AllSourceFilesRequest(config.sources for config in configs),
                options_bootstrapper),
     )
     fmt_result = self.request_single_product(
         FmtResult,
         Params(
             DocformatterConfigurations(
                 configs, prior_formatter_result=input_snapshot),
             options_bootstrapper,
         ),
     )
     return lint_result, fmt_result
Пример #2
0
async def format_python_target(
        python_fmt_targets: PythonFmtTargets,
        union_membership: UnionMembership) -> LanguageFmtResults:
    targets_with_origins = python_fmt_targets.targets_with_origins
    original_sources = await Get[SourceFiles](AllSourceFilesRequest(
        target_with_origin.target[PythonSources]
        for target_with_origin in python_fmt_targets.targets_with_origins))
    prior_formatter_result = original_sources.snapshot

    results: List[FmtResult] = []
    fmt_request_types: Iterable[
        Type[FmtRequest]] = union_membership.union_rules[PythonFmtRequest]
    for fmt_request_type in fmt_request_types:
        result = await Get[FmtResult](
            PythonFmtRequest,
            fmt_request_type(
                (fmt_request_type.field_set_type.create(target_with_origin)
                 for target_with_origin in targets_with_origins),
                prior_formatter_result=prior_formatter_result,
            ),
        )
        if result != FmtResult.noop():
            results.append(result)
        if result.did_change:
            prior_formatter_result = await Get[Snapshot](Digest, result.output)
    return LanguageFmtResults(
        tuple(results),
        input=original_sources.snapshot.digest,
        output=prior_formatter_result.digest,
    )
Пример #3
0
 def run_isort(
     self,
     targets: List[TargetWithOrigin],
     *,
     config: Optional[str] = None,
     passthrough_args: Optional[str] = None,
     skip: bool = False,
 ) -> Tuple[LintResult, FmtResult]:
     args = ["--backend-packages2=pants.backend.python.lint.isort"]
     if config is not None:
         self.create_file(relpath=".isort.cfg", contents=config)
         args.append("--isort-config=.isort.cfg")
     if passthrough_args:
         args.append(f"--isort-args='{passthrough_args}'")
     if skip:
         args.append(f"--isort-skip")
     options_bootstrapper = create_options_bootstrapper(args=args)
     configs = [IsortConfiguration.create(tgt) for tgt in targets]
     lint_result = self.request_single_product(
         LintResult,
         Params(IsortConfigurations(configs), options_bootstrapper))
     input_snapshot = self.request_single_product(
         SourceFiles,
         Params(AllSourceFilesRequest(config.sources for config in configs),
                options_bootstrapper),
     )
     fmt_result = self.request_single_product(
         FmtResult,
         Params(
             IsortConfigurations(configs,
                                 prior_formatter_result=input_snapshot),
             options_bootstrapper,
         ),
     )
     return lint_result, fmt_result
Пример #4
0
async def create_python_binary(
        config: PythonBinaryConfiguration) -> CreatedBinary:
    entry_point: Optional[str]
    if config.entry_point.value is not None:
        entry_point = config.entry_point.value
    else:
        source_files = await Get[SourceFiles](AllSourceFilesRequest(
            [config.sources], strip_source_roots=True))
        # NB: `PythonBinarySources` enforces that we have 0-1 sources.
        if len(source_files.files) == 1:
            module_name = source_files.files[0]
            entry_point = PythonBinary.translate_source_path_to_py_module_specifier(
                module_name)
        else:
            entry_point = None

    output_filename = f"{config.address.target_name}.pex"
    two_step_pex = await Get[TwoStepPex](TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=Addresses([config.address]),
            entry_point=entry_point,
            platforms=PexPlatforms.create_from_platforms_field(
                config.platforms),
            output_filename=output_filename,
            additional_args=config.generate_additional_args(),
        )))
    pex = two_step_pex.pex
    return CreatedBinary(digest=pex.directory_digest,
                         binary_name=pex.output_filename)
Пример #5
0
async def format_python_target(
        python_fmt_targets: PythonFmtTargets,
        union_membership: UnionMembership) -> LanguageFmtResults:
    targets_with_origins = python_fmt_targets.targets_with_origins
    original_sources = await Get[SourceFiles](AllSourceFilesRequest(
        target_with_origin.target[PythonSources]
        for target_with_origin in python_fmt_targets.targets_with_origins))
    prior_formatter_result = original_sources.snapshot

    results: List[FmtResult] = []
    config_collection_types: Iterable[
        Type[FmtConfigurations]] = union_membership.union_rules[
            PythonFmtConfigurations]
    for config_collection_type in config_collection_types:
        result = await Get[FmtResult](
            PythonFmtConfigurations,
            config_collection_type(
                (config_collection_type.config_type.create(target_with_origin)
                 for target_with_origin in targets_with_origins),
                prior_formatter_result=prior_formatter_result,
            ),
        )
        if result != FmtResult.noop():
            results.append(result)
            prior_formatter_result = await Get[Snapshot](Digest, result.digest)
    return LanguageFmtResults(
        tuple(results),
        combined_digest=prior_formatter_result.directory_digest)
Пример #6
0
async def flake8_lint_partition(
    partition: Flake8Partition,
    flake8: Flake8,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> LintResult:
    requirements_pex_request = Get[Pex](PexRequest(
        output_filename="flake8.pex",
        requirements=PexRequirements(flake8.get_requirement_specs()),
        interpreter_constraints=(partition.interpreter_constraints
                                 or PexInterpreterConstraints(
                                     flake8.default_interpreter_constraints)),
        entry_point=flake8.get_entry_point(),
    ))

    config_path: Optional[str] = flake8.options.config
    config_snapshot_request = Get[Snapshot](PathGlobs(
        globs=[config_path] if config_path else [],
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        description_of_origin="the option `--flake8-config`",
    ))

    all_source_files_request = Get[SourceFiles](AllSourceFilesRequest(
        field_set.sources for field_set in partition.field_sets))
    specified_source_files_request = Get[SourceFiles](
        SpecifiedSourceFilesRequest((field_set.sources, field_set.origin)
                                    for field_set in partition.field_sets))

    requirements_pex, config_snapshot, all_source_files, specified_source_files = cast(
        Tuple[Pex, Snapshot, SourceFiles, SourceFiles],
        await MultiGet([
            requirements_pex_request,
            config_snapshot_request,
            all_source_files_request,
            specified_source_files_request,
        ]),
    )

    input_digest = await Get[Digest](MergeDigests(
        (all_source_files.snapshot.digest, requirements_pex.digest,
         config_snapshot.digest)))

    address_references = ", ".join(
        sorted(field_set.address.reference()
               for field_set in partition.field_sets))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./flake8.pex",
        pex_args=generate_args(specified_source_files=specified_source_files,
                               flake8=flake8),
        input_digest=input_digest,
        description=
        (f"Run Flake8 on {pluralize(len(partition.field_sets), 'target')}: {address_references}."
         ),
    )
    result = await Get[FallibleProcessResult](Process, process)
    return LintResult.from_fallible_process_result(result,
                                                   linter_name="Flake8")
Пример #7
0
async def setup(
    setup_request: SetupRequest,
    docformatter: Docformatter,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    requirements_pex_request = Get[Pex](PexRequest(
        output_filename="docformatter.pex",
        requirements=PexRequirements(docformatter.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            docformatter.default_interpreter_constraints),
        entry_point=docformatter.get_entry_point(),
    ))

    all_source_files_request = Get[SourceFiles](AllSourceFilesRequest(
        field_set.sources for field_set in setup_request.request.field_sets))
    specified_source_files_request = Get[SourceFiles](
        SpecifiedSourceFilesRequest(
            (field_set.sources, field_set.origin)
            for field_set in setup_request.request.field_sets))

    requests: List[Get] = [
        requirements_pex_request, specified_source_files_request
    ]
    if setup_request.request.prior_formatter_result is None:
        requests.append(all_source_files_request)
    requirements_pex, specified_source_files, *rest = cast(
        Union[Tuple[Pex, SourceFiles], Tuple[Pex, SourceFiles, SourceFiles]],
        await MultiGet(requests),
    )

    all_source_files_snapshot = (
        setup_request.request.prior_formatter_result
        if setup_request.request.prior_formatter_result else rest[0].snapshot)

    input_digest = await Get[Digest](MergeDigests(
        (all_source_files_snapshot.digest, requirements_pex.digest)))

    address_references = ", ".join(
        sorted(field_set.address.reference()
               for field_set in setup_request.request.field_sets))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./docformatter.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            docformatter=docformatter,
            check_only=setup_request.check_only,
        ),
        input_digest=input_digest,
        output_files=all_source_files_snapshot.files,
        description=
        (f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'target')}: "
         f"{address_references}."),
    )
    return Setup(process, original_digest=all_source_files_snapshot.digest)
Пример #8
0
async def setup(
    request: SetupRequest,
    isort: Isort,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    requirements_pex = await Get[Pex](PexRequest(
        output_filename="isort.pex",
        requirements=PexRequirements(isort.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            isort.default_interpreter_constraints),
        entry_point=isort.get_entry_point(),
    ))

    config_path: Optional[List[str]] = isort.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=config_path or (),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        conjunction=GlobExpansionConjunction.all_match,
        description_of_origin="the option `--isort-config`",
    ))

    if request.configs.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
            config.sources for config in request.configs))
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.configs.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(
            (config.sources, config.origin) for config in request.configs))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files_snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(config.address.reference() for config in request.configs))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./isort.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            isort=isort,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=
        (f"Run isort on {pluralize(len(request.configs), 'target')}: {address_references}."
         ),
    )
    return Setup(process)
Пример #9
0
async def bandit_lint(
    configs: BanditConfigurations,
    bandit: Bandit,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> LintResult:
    if bandit.options.skip:
        return LintResult.noop()

    # NB: Bandit output depends upon which Python interpreter version it's run with. See
    # https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit.
    interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
        (config.compatibility for config in configs), python_setup=python_setup
    )
    requirements_pex = await Get[Pex](
        PexRequest(
            output_filename="bandit.pex",
            requirements=PexRequirements(bandit.get_requirement_specs()),
            interpreter_constraints=interpreter_constraints,
            entry_point=bandit.get_entry_point(),
        )
    )

    config_path: Optional[str] = bandit.options.config
    config_snapshot = await Get[Snapshot](
        PathGlobs(
            globs=tuple([config_path] if config_path else []),
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--bandit-config`",
        )
    )

    all_source_files = await Get[SourceFiles](
        AllSourceFilesRequest(config.sources for config in configs)
    )
    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest((config.sources, config.origin) for config in configs)
    )

    merged_input_files = await Get[Digest](
        MergeDigests(
            (all_source_files.snapshot.digest, requirements_pex.digest, config_snapshot.digest)
        ),
    )

    address_references = ", ".join(sorted(config.address.reference() for config in configs))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./bandit.pex",
        pex_args=generate_args(specified_source_files=specified_source_files, bandit=bandit),
        input_files=merged_input_files,
        description=f"Run Bandit on {pluralize(len(configs), 'target')}: {address_references}.",
    )
    result = await Get[FallibleProcessResult](Process, process)
    return LintResult.from_fallible_process_result(result)
Пример #10
0
async def create_coverage_config(
    coverage_config_request: CoverageConfigRequest, source_root_config: SourceRootConfig
) -> CoverageConfig:
    sources = await Get[SourceFiles](
        AllSourceFilesRequest(
            (tgt.get(Sources) for tgt in coverage_config_request.targets), strip_source_roots=False,
        )
    )
    init_injected = await Get[InitInjectedSnapshot](InjectInitRequest(sources.snapshot))
    source_roots = source_root_config.get_source_roots()

    # Generate a map from source root stripped source to its source root. eg:
    #  {'pants/testutil/subsystem/util.py': 'src/python'}. This is so that coverage reports
    #  referencing /chroot/path/pants/testutil/subsystem/util.py can be mapped back to the actual
    #  sources they reference when generating coverage reports.
    def stripped_file_with_source_root(file_name: str) -> Tuple[str, str]:
        source_root_object = source_roots.find_by_path(file_name)
        source_root = source_root_object.path if source_root_object is not None else ""
        stripped_path = file_name[len(source_root) + 1 :]
        return stripped_path, source_root

    stripped_files_to_source_roots = dict(
        stripped_file_with_source_root(filename)
        for filename in sorted(init_injected.snapshot.files)
    )

    default_config = dedent(
        """
        [run]
        branch = True
        timid = False
        relative_files = True
        """
    )

    config_parser = configparser.ConfigParser()
    config_parser.read_file(StringIO(default_config))
    config_parser.set("run", "plugins", COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.add_section(COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.set(
        COVERAGE_PLUGIN_MODULE_NAME,
        "source_to_target_base",
        json.dumps(stripped_files_to_source_roots),
    )
    config_parser.set(
        COVERAGE_PLUGIN_MODULE_NAME, "test_time", json.dumps(coverage_config_request.is_test_time)
    )

    config_io_stream = StringIO()
    config_parser.write(config_io_stream)
    digest = await Get[Digest](
        InputFilesContent(
            [FileContent(".coveragerc", content=config_io_stream.getvalue().encode())]
        )
    )
    return CoverageConfig(digest)
Пример #11
0
async def prepare_python_sources(targets: Targets) -> ImportablePythonSources:
    stripped_sources = await Get[SourceFiles](AllSourceFilesRequest(
        (tgt.get(Sources) for tgt in targets),
        for_sources_types=(PythonSources, ResourcesSources, FilesSources),
        enable_codegen=True,
        strip_source_roots=True,
    ))
    init_injected = await Get[InitInjectedSnapshot](InjectInitRequest(
        stripped_sources.snapshot))
    return ImportablePythonSources(init_injected.snapshot)
Пример #12
0
async def flake8_lint(
    configs: Flake8Configurations,
    flake8: Flake8,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> LintResult:
    if flake8.options.skip:
        return LintResult.noop()

    # NB: Flake8 output depends upon which Python interpreter version it's run with. We ensure that
    # each target runs with its own interpreter constraints. See
    # http://flake8.pycqa.org/en/latest/user/invocation.html.
    interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
        (config.compatibility for config in configs), python_setup)
    requirements_pex = await Get[Pex](PexRequest(
        output_filename="flake8.pex",
        requirements=PexRequirements(flake8.get_requirement_specs()),
        interpreter_constraints=interpreter_constraints,
        entry_point=flake8.get_entry_point(),
    ))

    config_path: Optional[str] = flake8.options.config
    config_snapshot = await Get[Snapshot](PathGlobs(
        globs=tuple([config_path] if config_path else []),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        description_of_origin="the option `--flake8-config`",
    ))

    all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
        config.sources for config in configs))
    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(
            (config.sources, config.origin) for config in configs))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files.snapshot.directory_digest,
        requirements_pex.directory_digest,
        config_snapshot.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(config.address.reference() for config in configs))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path=f"./flake8.pex",
        pex_args=generate_args(specified_source_files=specified_source_files,
                               flake8=flake8),
        input_files=merged_input_files,
        description=
        f"Run Flake8 on {pluralize(len(configs), 'target')}: {address_references}.",
    )
    result = await Get[FallibleProcessResult](Process, process)
    return LintResult.from_fallible_process_result(result)
Пример #13
0
 def get_all_source_files(
     self,
     sources_fields_with_origins: Iterable[Tuple[SourcesField, OriginSpec]],
     *,
     strip_source_roots: bool = False,
 ) -> List[str]:
     request = AllSourceFilesRequest(
         (sources_field_with_origin[0]
          for sources_field_with_origin in sources_fields_with_origins),
         strip_source_roots=strip_source_roots,
     )
     result = self.request_single_product(
         SourceFiles, Params(request, create_options_bootstrapper()))
     return sorted(result.snapshot.files)
Пример #14
0
async def merge_coverage_data(
    data_collection: PytestCoverageDataCollection,
    coverage_setup: CoverageSetup,
    transitive_targets: TransitiveTargets,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> MergedCoverageData:
    """Takes all Python test results and merges their coverage data into a single SQL file."""
    # We start with a bunch of test results, each of which has a coverage data file called
    # `.coverage`. We prefix each of these with their address so that we can write them all into a
    # single PEX.
    coverage_directory_digests = await MultiGet(
        Get[Digest](DirectoryWithPrefixToAdd(data.digest, prefix=data.address.path_safe_spec))
        for data in data_collection
    )
    sources = await Get[SourceFiles](
        AllSourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure), strip_source_roots=False
        )
    )
    sources_with_inits = await Get[InitInjectedSnapshot](InjectInitRequest(sources.snapshot))
    coverage_config = await Get[CoverageConfig](
        CoverageConfigRequest(Targets(transitive_targets.closure), is_test_time=True)
    )
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(
            directories=(
                *coverage_directory_digests,
                sources_with_inits.snapshot.directory_digest,
                coverage_config.digest,
                coverage_setup.requirements_pex.directory_digest,
            )
        ),
    )

    prefixes = [f"{data.address.path_safe_spec}/.coverage" for data in data_collection]
    coverage_args = ("combine", *prefixes)
    process = coverage_setup.requirements_pex.create_process(
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_files=(".coverage",),
        description=f"Merge {len(prefixes)} Pytest coverage reports.",
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
    )

    result = await Get[ProcessResult](Process, process)
    return MergedCoverageData(coverage_data=result.output_directory_digest)
Пример #15
0
async def setup(
    request: SetupRequest,
    docformatter: Docformatter,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    requirements_pex = await Get[Pex](PexRequest(
        output_filename="docformatter.pex",
        requirements=PexRequirements(docformatter.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            docformatter.default_interpreter_constraints),
        entry_point=docformatter.get_entry_point(),
    ))

    if request.configs.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](AllSourceFilesRequest(
            config.sources for config in request.configs))
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.configs.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest(
            (config.sources, config.origin) for config in request.configs))

    merged_input_files = await Get[Digest](DirectoriesToMerge(directories=(
        all_source_files_snapshot.directory_digest,
        requirements_pex.directory_digest,
    )), )

    address_references = ", ".join(
        sorted(config.address.reference() for config in request.configs))

    process = requirements_pex.create_execute_request(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./docformatter.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            docformatter=docformatter,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=(
            f"Run Docformatter on {pluralize(len(request.configs), 'target')}: "
            f"{address_references}."),
    )
    return Setup(process)
Пример #16
0
async def create_coverage_config(
        coverage_config_request: CoverageConfigRequest) -> CoverageConfig:
    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (tgt.get(PythonSources) for tgt in coverage_config_request.targets
         if tgt.has_field(PythonSources))))

    source_root_stripped_sources = await Get[SourceRootStrippedSources](
        StripSnapshotRequest(snapshot=sources.snapshot))

    # Generate a map from source root stripped source to its source root. eg:
    #  {'pants/testutil/subsystem/util.py': 'src/python'}. This is so that coverage reports
    #  referencing /chroot/path/pants/testutil/subsystem/util.py can be mapped back to the actual
    #  sources they reference when generating coverage reports.
    # Note that this map doesn't contain injected __init__.py files, but we tell coverage
    # to ignore those using the -i flag (see below).
    stripped_files_to_source_roots = {}
    for source_root, files in source_root_stripped_sources.root_to_relfiles.items(
    ):
        for file in files:
            stripped_files_to_source_roots[file] = source_root

    default_config = dedent("""
        [run]
        branch = True
        timid = False
        relative_files = True
        """)

    config_parser = configparser.ConfigParser()
    config_parser.read_file(StringIO(default_config))
    config_parser.set("run", "plugins", COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.add_section(COVERAGE_PLUGIN_MODULE_NAME)
    config_parser.set(
        COVERAGE_PLUGIN_MODULE_NAME,
        "source_to_target_base",
        json.dumps(stripped_files_to_source_roots),
    )
    config_parser.set(COVERAGE_PLUGIN_MODULE_NAME, "test_time",
                      json.dumps(coverage_config_request.is_test_time))

    config_io_stream = StringIO()
    config_parser.write(config_io_stream)
    digest = await Get[Digest](InputFilesContent([
        FileContent(".coveragerc",
                    content=config_io_stream.getvalue().encode())
    ]))
    return CoverageConfig(digest)
Пример #17
0
async def get_ancestor_init_py(
    targets: Targets, source_root_config: SourceRootConfig
) -> AncestorInitPyFiles:
    """Find any ancestor __init__.py files for the given targets.

    Includes sibling __init__.py files. Returns the files stripped of their source roots.
    """
    source_roots = source_root_config.get_source_roots()
    sources = await Get[SourceFiles](
        AllSourceFilesRequest(tgt[PythonSources] for tgt in targets if tgt.has_field(PythonSources))
    )
    # Find the ancestors of all dirs containing .py files, including those dirs themselves.
    source_dir_ancestors: Set[Tuple[str, str]] = set()  # Items are (src_root, path incl. src_root).
    for fp in sources.snapshot.files:
        source_dir_ancestor = os.path.dirname(fp)
        source_root = source_root_or_raise(source_roots, fp)
        # Do not allow the repository root to leak (i.e., '.' should not be a package in setup.py).
        while source_dir_ancestor != source_root:
            source_dir_ancestors.add((source_root, source_dir_ancestor))
            source_dir_ancestor = os.path.dirname(source_dir_ancestor)

    source_dir_ancestors_list = list(source_dir_ancestors)  # To force a consistent order.

    # Note that we must MultiGet single globs instead of a a single Get for all the globs, because
    # we match each result to its originating glob (see use of zip below).
    ancestor_init_py_snapshots = await MultiGet[Snapshot](
        Get[Snapshot](PathGlobs, PathGlobs([os.path.join(source_dir_ancestor[1], "__init__.py")]))
        for source_dir_ancestor in source_dir_ancestors_list
    )

    source_root_stripped_ancestor_init_pys = await MultiGet[Digest](
        Get[Digest](
            DirectoryWithPrefixToStrip(
                directory_digest=snapshot.directory_digest, prefix=source_dir_ancestor[0]
            )
        )
        for snapshot, source_dir_ancestor in zip(
            ancestor_init_py_snapshots, source_dir_ancestors_list
        )
    )

    return AncestorInitPyFiles(source_root_stripped_ancestor_init_pys)
Пример #18
0
async def create_python_binary(
        field_set: PythonBinaryFieldSet) -> CreatedBinary:
    entry_point = field_set.entry_point.value
    if entry_point is None:
        source_files = await Get[SourceFiles](AllSourceFilesRequest(
            [field_set.sources], strip_source_roots=True))
        entry_point = PythonBinarySources.translate_source_file_to_entry_point(
            source_files.files)

    output_filename = f"{field_set.address.target_name}.pex"
    two_step_pex = await Get[TwoStepPex](TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=Addresses([field_set.address]),
            entry_point=entry_point,
            platforms=PexPlatforms.create_from_platforms_field(
                field_set.platforms),
            output_filename=output_filename,
            additional_args=field_set.generate_additional_args(),
        )))
    pex = two_step_pex.pex
    return CreatedBinary(digest=pex.digest, binary_name=pex.output_filename)
Пример #19
0
async def get_ancestor_init_py(targets: Targets) -> AncestorInitPyFiles:
    """Find any ancestor __init__.py files for the given targets.

    Includes sibling __init__.py files. Returns the files stripped of their source roots.
    """
    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (tgt.get(Sources) for tgt in targets),
        for_sources_types=(PythonSources, ),
        enable_codegen=True,
    ))
    # Find the ancestors of all dirs containing .py files, including those dirs themselves.
    source_dir_ancestors: Set[Tuple[
        str, str]] = set()  # Items are (src_root, path incl. src_root).
    source_roots = await MultiGet(
        Get[SourceRoot](SourceRootRequest, SourceRootRequest.for_file(path))
        for path in sources.files)
    for path, source_root in zip(sources.files, source_roots):
        source_dir_ancestor = os.path.dirname(path)
        # Do not allow the repository root to leak (i.e., '.' should not be a package in setup.py).
        while source_dir_ancestor != source_root.path:
            source_dir_ancestors.add((source_root.path, source_dir_ancestor))
            source_dir_ancestor = os.path.dirname(source_dir_ancestor)

    source_dir_ancestors_list = list(
        source_dir_ancestors)  # To force a consistent order.

    # Note that we must MultiGet single globs instead of a a single Get for all the globs, because
    # we match each result to its originating glob (see use of zip below).
    ancestor_init_py_snapshots = await MultiGet(
        Get[Snapshot](
            PathGlobs,
            PathGlobs([os.path.join(source_dir_ancestor[1], "__init__.py")]))
        for source_dir_ancestor in source_dir_ancestors_list)

    source_root_stripped_ancestor_init_pys = await MultiGet(
        Get[Digest](RemovePrefix(snapshot.digest, source_dir_ancestor[0]))
        for snapshot, source_dir_ancestor in zip(ancestor_init_py_snapshots,
                                                 source_dir_ancestors_list))

    return AncestorInitPyFiles(source_root_stripped_ancestor_init_pys)
Пример #20
0
 def run_black(
     self,
     targets: List[TargetWithOrigin],
     *,
     config: Optional[str] = None,
     passthrough_args: Optional[str] = None,
     skip: bool = False,
 ) -> Tuple[LintResults, FmtResult]:
     args = ["--backend-packages2=pants.backend.python.lint.black"]
     if config is not None:
         self.create_file(relpath="pyproject.toml", contents=config)
         args.append("--black-config=pyproject.toml")
     if passthrough_args:
         args.append(f"--black-args='{passthrough_args}'")
     if skip:
         args.append("--black-skip")
     options_bootstrapper = create_options_bootstrapper(args=args)
     field_sets = [BlackFieldSet.create(tgt) for tgt in targets]
     lint_results = self.request_single_product(
         LintResults, Params(BlackRequest(field_sets),
                             options_bootstrapper))
     input_sources = self.request_single_product(
         SourceFiles,
         Params(
             AllSourceFilesRequest(field_set.sources
                                   for field_set in field_sets),
             options_bootstrapper,
         ),
     )
     fmt_result = self.request_single_product(
         FmtResult,
         Params(
             BlackRequest(field_sets,
                          prior_formatter_result=input_sources.snapshot),
             options_bootstrapper,
         ),
     )
     return lint_results, fmt_result
Пример #21
0
async def setup(
    setup_request: SetupRequest,
    isort: Isort,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    requirements_pex_request = Get[Pex](PexRequest(
        output_filename="isort.pex",
        requirements=PexRequirements(isort.get_requirement_specs()),
        interpreter_constraints=PexInterpreterConstraints(
            isort.default_interpreter_constraints),
        entry_point=isort.get_entry_point(),
    ))

    config_path: Optional[List[str]] = isort.options.config
    config_snapshot_request = Get[Snapshot](PathGlobs(
        globs=config_path or (),
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
        conjunction=GlobExpansionConjunction.all_match,
        description_of_origin="the option `--isort-config`",
    ))

    all_source_files_request = Get[SourceFiles](AllSourceFilesRequest(
        field_set.sources for field_set in setup_request.request.field_sets))
    specified_source_files_request = Get[SourceFiles](
        SpecifiedSourceFilesRequest(
            (field_set.sources, field_set.origin)
            for field_set in setup_request.request.field_sets))

    requests: List[Get] = [
        requirements_pex_request,
        config_snapshot_request,
        specified_source_files_request,
    ]
    if setup_request.request.prior_formatter_result is None:
        requests.append(all_source_files_request)
    requirements_pex, config_snapshot, specified_source_files, *rest = cast(
        Union[Tuple[Pex, Snapshot, SourceFiles],
              Tuple[Pex, Snapshot, SourceFiles, SourceFiles]],
        await MultiGet(requests),
    )

    all_source_files_snapshot = (
        setup_request.request.prior_formatter_result
        if setup_request.request.prior_formatter_result else rest[0].snapshot)

    input_digest = await Get[Digest](MergeDigests(
        (all_source_files_snapshot.digest, requirements_pex.digest,
         config_snapshot.digest)))

    address_references = ", ".join(
        sorted(field_set.address.reference()
               for field_set in setup_request.request.field_sets))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./isort.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            isort=isort,
            check_only=setup_request.check_only,
        ),
        input_digest=input_digest,
        output_files=all_source_files_snapshot.files,
        description=
        (f"Run isort on {pluralize(len(setup_request.request.field_sets), 'target')}: {address_references}."
         ),
    )
    return Setup(process, original_digest=all_source_files_snapshot.digest)
Пример #22
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest, protoc: Protoc
) -> GeneratedSources:
    download_protoc_request = Get[DownloadedExternalTool](
        ExternalToolRequest, protoc.get_request(Platform.current)
    )

    output_dir = "_generated_files"
    # TODO(#9650): replace this with a proper intrinsic to create empty directories.
    create_output_dir_request = Get[ProcessResult](
        Process(
            ("/bin/mkdir", output_dir),
            description=f"Create the directory {output_dir}",
            output_directories=(output_dir,),
        )
    )

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    transitive_targets = await Get[TransitiveTargets](Addresses([request.protocol_target.address]))
    all_sources_request = Get[SourceFiles](
        AllSourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources,),
            # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
            # for Protobuf imports to be discoverable.
            strip_source_roots=True,
        )
    )
    stripped_target_sources_request = Get[SourceFiles](
        AllSourceFilesRequest([request.protocol_target[ProtobufSources]], strip_source_roots=True)
    )

    (
        downloaded_protoc_binary,
        create_output_dir_result,
        all_sources,
        stripped_target_sources,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_sources_request,
        stripped_target_sources_request,
    )

    input_digest = await Get[Digest](
        MergeDigests(
            (
                all_sources.snapshot.digest,
                downloaded_protoc_binary.digest,
                create_output_dir_result.output_digest,
            )
        )
    )

    result = await Get[ProcessResult](
        Process(
            (
                downloaded_protoc_binary.exe,
                "--python_out",
                output_dir,
                *stripped_target_sources.snapshot.files,
            ),
            input_digest=input_digest,
            description=f"Generating Python sources from {request.protocol_target.address}.",
            output_directories=(output_dir,),
        )
    )
    normalized_snapshot = await Get[Snapshot](RemovePrefix(result.output_digest, output_dir))
    return GeneratedSources(normalized_snapshot)
Пример #23
0
async def generate_coverage_report(
    merged_coverage_data: MergedCoverageData,
    coverage_setup: CoverageSetup,
    coverage_subsystem: PytestCoverage,
    transitive_targets: TransitiveTargets,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> CoverageReports:
    """Takes all Python test results and generates a single coverage report."""
    requirements_pex = coverage_setup.requirements_pex

    coverage_config = await Get[CoverageConfig](CoverageConfigRequest(
        Targets(transitive_targets.closure), is_test_time=False))

    sources = await Get[SourceFiles](AllSourceFilesRequest(
        (tgt.get(Sources) for tgt in transitive_targets.closure),
        strip_source_roots=False))
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot))
    input_digest: Digest = await Get(
        Digest,
        MergeDigests((
            merged_coverage_data.coverage_data,
            coverage_config.digest,
            requirements_pex.digest,
            sources_with_inits_snapshot.snapshot.digest,
        )),
    )

    report_type = coverage_subsystem.options.report
    # The -i flag causes coverage to ignore files it doesn't have sources for.
    # Specifically, it will ignore the injected __init__.py files, which is what we want since
    # those are empty and don't correspond to a real source file the user is aware of.
    coverage_args = [report_type.report_name, "-i"]

    process = requirements_pex.create_process(
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_digest=input_digest,
        output_directories=("htmlcov", ),
        output_files=("coverage.xml", ),
        description="Generate Pytest coverage report.",
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
    )
    result = await Get[ProcessResult](Process, process)

    if report_type == CoverageReportType.CONSOLE:
        return CoverageReports(
            reports=(ConsoleCoverageReport(result.stdout.decode()), ))

    report_dir = PurePath(coverage_subsystem.options.report_output_path)

    report_file: Optional[PurePath] = None
    if report_type == CoverageReportType.HTML:
        report_file = report_dir / "htmlcov" / "index.html"
    elif report_type == CoverageReportType.XML:
        report_file = report_dir / "coverage.xml"
    fs_report = FilesystemCoverageReport(
        result_digest=result.output_digest,
        directory_to_materialize_to=report_dir,
        report_file=report_file,
    )
    return CoverageReports(reports=(fs_report, ))
Пример #24
0
async def setup(
    request: SetupRequest,
    black: Black,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> Setup:
    requirements_pex = await Get[Pex](
        PexRequest(
            output_filename="black.pex",
            requirements=PexRequirements(black.get_requirement_specs()),
            interpreter_constraints=PexInterpreterConstraints(
                black.default_interpreter_constraints
            ),
            entry_point=black.get_entry_point(),
        )
    )

    config_path: Optional[str] = black.options.config
    config_snapshot = await Get[Snapshot](
        PathGlobs(
            globs=tuple([config_path] if config_path else []),
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--black-config`",
        )
    )

    if request.configs.prior_formatter_result is None:
        all_source_files = await Get[SourceFiles](
            AllSourceFilesRequest(config.sources for config in request.configs)
        )
        all_source_files_snapshot = all_source_files.snapshot
    else:
        all_source_files_snapshot = request.configs.prior_formatter_result

    specified_source_files = await Get[SourceFiles](
        SpecifiedSourceFilesRequest((config.sources, config.origin) for config in request.configs)
    )

    merged_input_files = await Get[Digest](
        MergeDigests(
            (all_source_files_snapshot.digest, requirements_pex.digest, config_snapshot.digest)
        ),
    )

    address_references = ", ".join(sorted(config.address.reference() for config in request.configs))

    process = requirements_pex.create_process(
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
        pex_path="./black.pex",
        pex_args=generate_args(
            specified_source_files=specified_source_files,
            black=black,
            check_only=request.check_only,
        ),
        input_files=merged_input_files,
        output_files=all_source_files_snapshot.files,
        description=(
            f"Run Black on {pluralize(len(request.configs), 'target')}: {address_references}."
        ),
    )
    return Setup(process, original_digest=all_source_files_snapshot.digest)
Пример #25
0
async def generate_coverage_report(
    merged_coverage_data: MergedCoverageData,
    coverage_setup: CoverageSetup,
    coverage_subsystem: PytestCoverage,
    transitive_targets: TransitiveTargets,
    python_setup: PythonSetup,
    subprocess_encoding_environment: SubprocessEncodingEnvironment,
) -> CoverageReport:
    """Takes all Python test results and generates a single coverage report."""
    requirements_pex = coverage_setup.requirements_pex

    python_targets = [tgt for tgt in transitive_targets.closure if tgt.has_field(PythonSources)]
    coverage_config = await Get[CoverageConfig](
        CoverageConfigRequest(Targets(python_targets), is_test_time=False)
    )

    sources = await Get[SourceFiles](
        AllSourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure), strip_source_roots=False
        )
    )
    sources_with_inits_snapshot = await Get[InitInjectedSnapshot](
        InjectInitRequest(sources.snapshot)
    )
    merged_input_files: Digest = await Get(
        Digest,
        DirectoriesToMerge(
            directories=(
                merged_coverage_data.coverage_data,
                coverage_config.digest,
                requirements_pex.directory_digest,
                sources_with_inits_snapshot.snapshot.directory_digest,
            )
        ),
    )

    report_type = coverage_subsystem.options.report
    coverage_args = [report_type.report_name]

    process = requirements_pex.create_process(
        pex_path=f"./{coverage_setup.requirements_pex.output_filename}",
        pex_args=coverage_args,
        input_files=merged_input_files,
        output_directories=("htmlcov",),
        output_files=("coverage.xml",),
        description=f"Generate Pytest coverage report.",
        python_setup=python_setup,
        subprocess_encoding_environment=subprocess_encoding_environment,
    )
    result = await Get[ProcessResult](Process, process)

    if report_type == ReportType.CONSOLE:
        return ConsoleCoverageReport(result.stdout.decode())

    report_dir = PurePath(coverage_subsystem.options.report_output_path)

    report_file: Optional[PurePath] = None
    if coverage_subsystem.options.report == ReportType.HTML:
        report_file = report_dir / "htmlcov" / "index.html"
    elif coverage_subsystem.options.report == ReportType.XML:
        report_file = report_dir / "coverage.xml"

    return FilesystemCoverageReport(
        result_digest=result.output_directory_digest,
        directory_to_materialize_to=report_dir,
        report_file=report_file,
    )