async def run_bash_binary(field_set: BashRunFieldSet,
                          bash_program: BashProgram,
                          bash_setup: BashSetup) -> RunRequest:
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest([field_set.address]))

    # We need to include all relevant transitive dependencies in the environment. We also get the
    # binary's sources so that we know the script name. See
    # https://www.pantsbuild.org/v2.0/docs/rules-api-and-target-api.
    binary_sources_request = Get(SourceFiles,
                                 SourceFilesRequest([field_set.sources]))
    all_sources_request = Get(
        SourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(BashSources, FilesSources, ResourcesSources),
        ),
    )
    binary_sources, all_sources = await MultiGet(binary_sources_request,
                                                 all_sources_request)

    # We join the relative path to our program with the template string "{chroot}", which will get
    # substituted with the path to the temporary directory where our program runs. This ensures
    # that we run the correct file.
    # Note that `BashBinarySources` will have already validated that there is exactly one file in
    # the sources field.
    script_name = os.path.join("{chroot}", binary_sources.files[0])

    return RunRequest(
        digest=all_sources.snapshot.digest,
        args=[bash_program.exe, script_name],
        extra_env=bash_setup.env_dict,
    )
Exemple #2
0
async def run_buf(request: BufRequest, buf: BufSubsystem) -> LintResults:
    if buf.skip:
        return LintResults([], linter_name=request.name)

    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest(
            (field_set.address for field_set in request.field_sets)),
    )

    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(tgt[ProtobufSourceField]
                           for tgt in transitive_targets.closure
                           if tgt.has_field(ProtobufSourceField)),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (field_set.sources for field_set in request.field_sets),
            for_sources_types=(ProtobufSourceField, ),
            enable_codegen=True,
        ),
    )

    download_buf_get = Get(DownloadedExternalTool, ExternalToolRequest,
                           buf.get_request(Platform.current))

    target_sources_stripped, all_sources_stripped, downloaded_buf = await MultiGet(
        target_stripped_sources_request, all_stripped_sources_request,
        download_buf_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            target_sources_stripped.snapshot.digest,
            all_sources_stripped.snapshot.digest,
            downloaded_buf.digest,
        )),
    )

    process_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                downloaded_buf.exe,
                "lint",
                *buf.args,
                "--path",
                ",".join(target_sources_stripped.snapshot.files),
            ],
            input_digest=input_digest,
            description=
            f"Run Buf on {pluralize(len(request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    result = LintResult.from_fallible_process_result(process_result)

    return LintResults([result], linter_name=request.name)
Exemple #3
0
async def get_helm_source_files(request: HelmChartSourceFilesRequest) -> HelmChartSourceFiles:
    dependencies = await Get(Targets, DependenciesRequest(request.field_set.dependencies))
    source_files, original_sources = await MultiGet(
        Get(
            StrippedSourceFiles,
            SourceFilesRequest(
                sources_fields=[
                    *request.sources_fields,
                    *(
                        tgt.get(SourcesField)
                        for tgt in dependencies
                        if not HelmChartFieldSet.is_applicable(tgt)
                    ),
                ],
                for_sources_types=request.valid_sources_types,
                enable_codegen=True,
            ),
        ),
        Get(
            StrippedSourceFiles,
            SourceFilesRequest([request.field_set.sources], enable_codegen=False),
        ),
    )
    all_files_snapshot = await Get(
        Snapshot, MergeDigests([source_files.snapshot.digest, original_sources.snapshot.digest])
    )
    return HelmChartSourceFiles(snapshot=all_files_snapshot)
Exemple #4
0
async def map_first_party_scala_targets_to_symbols(
    _: FirstPartyScalaTargetsMappingRequest,
    scala_targets: AllScalaTargets,
    jvm: JvmSubsystem,
) -> SymbolMap:
    source_analysis = await MultiGet(
        Get(ScalaSourceDependencyAnalysis,
            SourceFilesRequest([target[ScalaSourceField]]))
        for target in scala_targets)
    address_and_analysis = zip(
        [(tgt.address, tgt[JvmResolveField].normalized_value(jvm))
         for tgt in scala_targets],
        source_analysis,
    )

    mapping: Mapping[str, MutableTrieNode] = defaultdict(MutableTrieNode)
    for (address, resolve), analysis in address_and_analysis:
        namespace = _symbol_namespace(address)
        for symbol in analysis.provided_symbols:
            mapping[resolve].insert(symbol, [address],
                                    first_party=True,
                                    namespace=namespace)
        for symbol in analysis.provided_symbols_encoded:
            mapping[resolve].insert(symbol, [address],
                                    first_party=True,
                                    namespace=namespace)

    return SymbolMap(
        (resolve, node.frozen()) for resolve, node in mapping.items())
Exemple #5
0
async def format_terraform_targets(
    terraform_fmt_targets: TerraformFmtTargets, union_membership: UnionMembership
) -> LanguageFmtResults:
    original_sources = await Get(
        SourceFiles,
        SourceFilesRequest(
            target[TerraformModuleSourcesField] for target in terraform_fmt_targets.targets
        ),
    )
    prior_formatter_result = original_sources.snapshot

    results = []
    fmt_request_types: Iterable[type[StyleRequest]] = union_membership[TerraformFmtRequest]
    for fmt_request_type in fmt_request_types:
        request = fmt_request_type(
            (
                fmt_request_type.field_set_type.create(target)
                for target in terraform_fmt_targets.targets
                if fmt_request_type.field_set_type.is_applicable(target)
            ),
            prior_formatter_result=prior_formatter_result,
        )
        if not request.field_sets:
            continue
        result = await Get(FmtResult, TerraformFmtRequest, request)
        results.append(result)
        if result.did_change:
            prior_formatter_result = await Get(Snapshot, Digest, result.output)
    return LanguageFmtResults(
        tuple(results),
        input=original_sources.snapshot.digest,
        output=prior_formatter_result.digest,
    )
def run_gofmt(
    rule_runner: RuleRunner,
    targets: List[Target],
    *,
    skip: bool = False,
) -> Tuple[Sequence[LintResult], FmtResult]:
    args = ["--backend-packages=pants.backend.go"]
    if skip:
        args.append("--gofmt-skip")
    rule_runner.set_options(args)
    field_sets = [GofmtFieldSet.create(tgt) for tgt in targets]
    lint_results = rule_runner.request(LintResults, [GofmtRequest(field_sets)])
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.sources for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            GofmtRequest(field_sets,
                         prior_formatter_result=input_sources.snapshot),
        ],
    )
    return lint_results.results, fmt_result
Exemple #7
0
async def format_python_target(
        python_fmt_targets: PythonFmtTargets,
        union_membership: UnionMembership) -> LanguageFmtResults:
    original_sources = await Get(
        SourceFiles,
        SourceFilesRequest(target[PythonSources]
                           for target in python_fmt_targets.targets),
    )
    prior_formatter_result = original_sources.snapshot

    results: List[FmtResult] = []
    fmt_request_types: Iterable[Type[
        PythonFmtRequest]] = union_membership.union_rules[PythonFmtRequest]
    for fmt_request_type in fmt_request_types:
        result = await Get(
            FmtResult,
            PythonFmtRequest,
            fmt_request_type(
                (fmt_request_type.field_set_type.create(target)
                 for target in python_fmt_targets.targets),
                prior_formatter_result=prior_formatter_result,
            ),
        )
        results.append(result)
        if result.did_change:
            prior_formatter_result = await Get(Snapshot, Digest, result.output)
    return LanguageFmtResults(
        tuple(results),
        input=original_sources.snapshot.digest,
        output=prior_formatter_result.digest,
    )
Exemple #8
0
def assert_dockerfile(
    rule_runner: RuleRunner,
    addr: Address = Address("test"),
    *,
    filename: str = "test/Dockerfile",
    content: str = DOCKERFILE,
) -> None:
    tgt = rule_runner.get_target(addr)
    result = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(
                sources_fields=[tgt.get(SourcesField)],
                for_sources_types=(DockerImageSourceField,),
                enable_codegen=True,
            )
        ],
    )

    if filename:
        assert result.snapshot.files == (filename,)

    if content:
        digest_contents = rule_runner.request(DigestContents, [result.snapshot.digest])
        assert len(digest_contents) == 1
        assert isinstance(digest_contents[0], FileContent)
        assert digest_contents[0].content.decode() == content
Exemple #9
0
async def parse_java_imports(
        request: ParseJavaImportsRequest) -> ParsedJavaImports:
    source_files = await Get(SourceFiles,
                             SourceFilesRequest([request.sources]))
    analysis = await Get(JavaSourceDependencyAnalysis, SourceFiles,
                         source_files)
    return ParsedJavaImports(imp.name for imp in analysis.imports)
Exemple #10
0
async def setup_google_java_format(
    setup_request: SetupRequest,
    tool: GoogleJavaFormatSubsystem,
    jdk: InternalJdk,
) -> Setup:

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 GoogleJavaFormatToolLockfileSentinel())
    source_files, tool_classpath = await MultiGet(
        Get(
            SourceFiles,
            SourceFilesRequest(
                field_set.source
                for field_set in setup_request.request.field_sets),
        ),
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
    )

    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    toolcp_relpath = "__toolcp"
    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    maybe_java11_or_higher_options = []
    if jdk.jre_major_version >= 11:
        maybe_java11_or_higher_options = [
            "--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED",
        ]

    args = [
        *maybe_java11_or_higher_options,
        "com.google.googlejavaformat.java.Main",
        *(["--aosp"] if tool.aosp else []),
        "--dry-run" if setup_request.check_only else "--replace",
        *source_files.files,
    ]

    process = JvmProcess(
        jdk=jdk,
        argv=args,
        classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
        input_digest=source_files_snapshot.digest,
        extra_immutable_input_digests=extra_immutable_input_digests,
        extra_nailgun_keys=extra_immutable_input_digests,
        output_files=source_files_snapshot.files,
        description=
        f"Run Google Java Format on {pluralize(len(setup_request.request.field_sets), 'file')}.",
        level=LogLevel.DEBUG,
    )

    return Setup(process, original_snapshot=source_files_snapshot)
Exemple #11
0
async def get_resources(field_set: DockerResourcesFS) -> DockerComponent:
    return DockerComponent(
        commands=(),
        sources=(await
                 Get(StrippedSourceFiles,
                     SourceFilesRequest([field_set.sources]))).snapshot.digest,
    )
Exemple #12
0
async def run_pyupgrade(request: PyUpgradeRequest,
                        pyupgrade: PyUpgrade) -> PyUpgradeResult:
    pyupgrade_pex_get = Get(VenvPex, PexRequest, pyupgrade.to_pex_request())
    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.source
                           for field_set in request.field_sets),
    )
    source_files, pyupgrade_pex = await MultiGet(source_files_get,
                                                 pyupgrade_pex_get)

    source_files_snapshot = (source_files.snapshot
                             if request.prior_formatter_result is None else
                             request.prior_formatter_result)

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            pyupgrade_pex,
            argv=(*pyupgrade.args, *source_files.files),
            input_digest=source_files_snapshot.digest,
            output_files=source_files_snapshot.files,
            description=
            f"Run pyupgrade on {pluralize(len(request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return PyUpgradeResult(result, original_snapshot=source_files_snapshot)
Exemple #13
0
async def setup_black(setup_request: SetupRequest, black: Black,
                      python_setup: PythonSetup) -> Setup:
    # Black requires 3.6+ but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, and 3.7.
    # However, typed-ast does not understand 3.8+, so instead we must run Black with Python 3.8+
    # when relevant. We only do this if if <3.8 can't be used, as we don't want a loose requirement
    # like `>=3.6` to result in requiring Python 3.8, which would error if 3.8 is not installed on
    # the machine.
    all_interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields(
        (field_set.interpreter_constraints
         for field_set in setup_request.request.field_sets),
        python_setup,
    )
    tool_interpreter_constraints = (all_interpreter_constraints if (
        black.options.is_default("interpreter_constraints")
        and all_interpreter_constraints.requires_python38_or_newer(
            python_setup.interpreter_universe)) else
                                    black.interpreter_constraints)

    black_pex_get = Get(
        VenvPex,
        PexRequest,
        black.to_pex_request(
            interpreter_constraints=tool_interpreter_constraints),
    )

    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.source
                           for field_set in setup_request.request.field_sets),
    )

    source_files, black_pex = await MultiGet(source_files_get, black_pex_get)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    config_files = await Get(ConfigFiles, ConfigFilesRequest,
                             black.config_request(source_files_snapshot.dirs))
    input_digest = await Get(
        Digest,
        MergeDigests(
            (source_files_snapshot.digest, config_files.snapshot.digest)))

    process = await Get(
        Process,
        VenvPexProcess(
            black_pex,
            argv=generate_argv(source_files,
                               black,
                               check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            concurrency_available=len(setup_request.request.field_sets),
            description=
            f"Run Black on {pluralize(len(setup_request.request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_snapshot=source_files_snapshot)
Exemple #14
0
def run_isort(
    rule_runner: RuleRunner,
    targets: List[Target],
    *,
    config: Optional[str] = None,
    passthrough_args: Optional[str] = None,
    skip: bool = False,
) -> Tuple[Sequence[LintResult], FmtResult]:
    args = ["--backend-packages=pants.backend.python.lint.isort"]
    if config is not None:
        rule_runner.create_file(relpath=".isort.cfg", contents=config)
        args.append("--isort-config=.isort.cfg")
    if passthrough_args:
        args.append(f"--isort-args='{passthrough_args}'")
    if skip:
        args.append("--isort-skip")
    rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"})
    field_sets = [IsortFieldSet.create(tgt) for tgt in targets]
    lint_results = rule_runner.request(LintResults, [IsortRequest(field_sets)])
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.sources for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            IsortRequest(field_sets,
                         prior_formatter_result=input_sources.snapshot),
        ],
    )
    return lint_results.results, fmt_result
Exemple #15
0
def run_buf(
    rule_runner: RuleRunner,
    targets: list[Target],
    *,
    extra_args: list[str] | None = None,
) -> FmtResult:
    rule_runner.set_options(
        [
            "--backend-packages=pants.backend.codegen.protobuf.lint.buf",
            *(extra_args or ()),
        ],
        env_inherit={"PATH"},
    )
    field_sets = [BufFieldSet.create(tgt) for tgt in targets]
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.sources for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            BufFormatRequest(field_sets, snapshot=input_sources.snapshot),
        ],
    )

    return fmt_result
Exemple #16
0
def run_tffmt(
    rule_runner: RuleRunner,
    targets: List[Target],
    *,
    skip: bool = False,
) -> FmtResult:
    args = [
        "--backend-packages=pants.backend.experimental.terraform",
        "--backend-packages=pants.backend.experimental.terraform.lint.tffmt",
    ]
    if skip:
        args.append("--terraform-fmt-skip")
    rule_runner.set_options(args)
    field_sets = [TerraformFieldSet.create(tgt) for tgt in targets]
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.sources for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            TffmtRequest(field_sets, snapshot=input_sources.snapshot),
        ],
    )
    return fmt_result
Exemple #17
0
async def setup_gofmt(setup_request: SetupRequest, goroot: GoRoot) -> Setup:
    source_files = await Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    argv = (
        os.path.join(goroot.path, "bin/gofmt"),
        "-l" if setup_request.check_only else "-w",
        *source_files_snapshot.files,
    )
    process = Process(
        argv=argv,
        input_digest=source_files_snapshot.digest,
        output_files=source_files_snapshot.files,
        description=
        f"Run gofmt on {pluralize(len(source_files_snapshot.files), 'file')}.",
        level=LogLevel.DEBUG,
    )
    return Setup(process=process, original_snapshot=source_files_snapshot)
Exemple #18
0
async def parse_python_imports(request: ParsePythonImportsRequest) -> ParsedPythonImports:
    script = _SCRIPT_FORMAT.format(min_dots=request.string_imports_min_dots).encode()
    python_interpreter, script_digest, stripped_sources = await MultiGet(
        Get(PythonExecutable, InterpreterConstraints, request.interpreter_constraints),
        Get(Digest, CreateDigest([FileContent("__parse_python_imports.py", script)])),
        Get(StrippedSourceFiles, SourceFilesRequest([request.source])),
    )

    # We operate on PythonSourceField, which should be one file.
    assert len(stripped_sources.snapshot.files) == 1
    file = stripped_sources.snapshot.files[0]

    input_digest = await Get(
        Digest, MergeDigests([script_digest, stripped_sources.snapshot.digest])
    )
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                python_interpreter.path,
                "./__parse_python_imports.py",
                file,
            ],
            input_digest=input_digest,
            description=f"Determine Python imports for {request.source.address}",
            env={"STRING_IMPORTS": "y" if request.string_imports else "n"},
            level=LogLevel.DEBUG,
        ),
    )
    # See above for where we explicitly encoded as utf8. Even though utf8 is the
    # default for decode(), we make that explicit here for emphasis.
    return ParsedPythonImports(process_result.stdout.decode("utf8").strip().splitlines())
Exemple #19
0
async def prepare_python_sources(
    request: PythonSourceFilesRequest, union_membership: UnionMembership
) -> PythonSourceFiles:
    sources = await Get(
        SourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in request.targets),
            for_sources_types=request.valid_sources_types,
            enable_codegen=True,
        ),
    )

    missing_init_files = await Get(
        AncestorFiles, AncestorFilesRequest("__init__.py", sources.snapshot),
    )

    init_injected = await Get(
        Snapshot, MergeDigests((sources.snapshot.digest, missing_init_files.snapshot.digest)),
    )

    source_root_objs = await MultiGet(
        Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(tgt))
        for tgt in request.targets
        if (
            tgt.has_field(PythonSources)
            or tgt.has_field(ResourcesSources)
            or tgt.get(Sources).can_generate(PythonSources, union_membership)
            or tgt.get(Sources).can_generate(ResourcesSources, union_membership)
        )
    )
    source_root_paths = {source_root_obj.path for source_root_obj in source_root_objs}
    return PythonSourceFiles(
        SourceFiles(init_injected, sources.unrooted_files), tuple(sorted(source_root_paths))
    )
Exemple #20
0
async def _jvm_bsp_resources(
    request: BSPResourcesRequest,
    build_root: BuildRoot,
) -> BSPResourcesResult:
    """Generically handles a BSPResourcesRequest (subclass).

    This is a `@rule_helper` rather than a `@rule` for the same reason as `_jvm_bsp_compile`.
    """
    coarsened_targets = await Get(
        CoarsenedTargets, Addresses([fs.address for fs in request.field_sets]))

    source_files = await Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            [tgt.get(SourcesField) for tgt in coarsened_targets.closure()],
            for_sources_types=(ResourceSourceField, ),
            enable_codegen=True,
        ),
    )

    rel_resources_dir = _jvm_resources_directory(
        request.bsp_target.bsp_target_id)
    output_digest = await Get(
        Digest,
        AddPrefix(source_files.snapshot.digest, rel_resources_dir),
    )

    return BSPResourcesResult(
        resources=(
            # NB: IntelliJ requires that directory URIs end in slashes.
            build_root.pathlib_path.joinpath(
                ".pants.d/bsp", rel_resources_dir).as_uri() + "/", ),
        output_digest=output_digest,
    )
Exemple #21
0
def run_black(
    rule_runner: RuleRunner,
    targets: List[Target],
    *,
    config: Optional[str] = None,
    passthrough_args: Optional[str] = None,
    skip: bool = False,
) -> Tuple[Sequence[LintResult], FmtResult]:
    args = ["--backend-packages=pants.backend.python.lint.black"]
    if config is not None:
        rule_runner.create_file(relpath="pyproject.toml", contents=config)
        args.append("--black-config=pyproject.toml")
    if passthrough_args:
        args.append(f"--black-args='{passthrough_args}'")
    if skip:
        args.append("--black-skip")
    rule_runner.set_options(args)
    field_sets = [BlackFieldSet.create(tgt) for tgt in targets]
    lint_results = rule_runner.request(LintResults, [BlackRequest(field_sets)])
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.sources for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            BlackRequest(field_sets,
                         prior_formatter_result=input_sources.snapshot),
        ],
    )
    return lint_results.results, fmt_result
Exemple #22
0
def run_clangformat(
    rule_runner: RuleRunner,
    targets: list[Target],
    *,
    extra_args: list[str] | None = None,
) -> FmtResult:
    rule_runner.set_options(
        [
            "--backend-packages=pants.backend.cc.lint.clangformat",
            *(extra_args or ())
        ],
        env_inherit={"PATH", "PYENV_ROOT", "HOME"},
    )
    field_sets = [ClangFormatFmtFieldSet.create(tgt) for tgt in targets]
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.sources for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            ClangFormatRequest(field_sets, snapshot=input_sources.snapshot),
        ],
    )
    return fmt_result
Exemple #23
0
async def setup_autoflake(setup_request: SetupRequest,
                          autoflake: Autoflake) -> Setup:
    autoflake_pex_get = Get(VenvPex, PexRequest, autoflake.to_pex_request())

    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.source
                           for field_set in setup_request.request.field_sets),
    )

    source_files, autoflake_pex = await MultiGet(source_files_get,
                                                 autoflake_pex_get)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    process = await Get(
        Process,
        VenvPexProcess(
            autoflake_pex,
            argv=generate_argv(source_files,
                               autoflake,
                               check_only=setup_request.check_only),
            input_digest=source_files_snapshot.digest,
            output_files=source_files_snapshot.files,
            description=
            f"Run Autoflake on {pluralize(len(setup_request.request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_snapshot=source_files_snapshot)
Exemple #24
0
async def format_shell_targets(
    shell_fmt_targets: ShellFmtTargets, union_membership: UnionMembership
) -> LanguageFmtResults:
    original_sources = await Get(
        SourceFiles,
        SourceFilesRequest(target[ShellSources] for target in shell_fmt_targets.targets),
    )
    prior_formatter_result = original_sources.snapshot

    results = []
    fmt_request_types = union_membership.union_rules[ShellFmtRequest]
    for fmt_request_type in fmt_request_types:
        result = await Get(
            EnrichedFmtResult,
            ShellFmtRequest,
            fmt_request_type(
                (
                    fmt_request_type.field_set_type.create(target)
                    for target in shell_fmt_targets.targets
                ),
                prior_formatter_result=prior_formatter_result,
            ),
        )
        results.append(result)
        if result.did_change:
            prior_formatter_result = await Get(Snapshot, Digest, result.output)
    return LanguageFmtResults(
        tuple(results),
        input=original_sources.snapshot.digest,
        output=prior_formatter_result.digest,
    )
def run_docformatter(
    rule_runner: RuleRunner,
    targets: List[Target],
    *,
    passthrough_args: Optional[str] = None,
    skip: bool = False,
) -> Tuple[Sequence[LintResult], FmtResult]:
    args = ["--backend-packages=pants.backend.python.lint.docformatter"]
    if passthrough_args:
        args.append(f"--docformatter-args='{passthrough_args}'")
    if skip:
        args.append("--docformatter-skip")
    rule_runner.set_options(args)
    field_sets = [DocformatterFieldSet.create(tgt) for tgt in targets]
    lint_results = rule_runner.request(LintResults,
                                       [DocformatterRequest(field_sets)])
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.sources for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            DocformatterRequest(field_sets,
                                prior_formatter_result=input_sources.snapshot),
        ],
    )
    return lint_results.results, fmt_result
Exemple #26
0
def run_black(
    rule_runner: RuleRunner, targets: list[Target], *, extra_args: list[str] | None = None
) -> FmtResult:
    rule_runner.set_options(
        ["--backend-packages=pants.backend.python.lint.black", *(extra_args or ())],
        # We propagate LANG and LC_ALL to satisfy click, which black depends upon. Without this we
        # see something like the following in CI:
        #
        # RuntimeError: Click will abort further execution because Python was configured to use
        # ASCII as encoding for the environment. Consult
        # https://click.palletsprojects.com/unicode-support/ for mitigation steps.
        #
        # This system supports the C.UTF-8 locale which is recommended. You might be able to
        # resolve your issue by exporting the following environment variables:
        #
        #     export LC_ALL=C.UTF-8
        #     export LANG=C.UTF-8
        #
        env_inherit={"PATH", "PYENV_ROOT", "HOME", "LANG", "LC_ALL"},
    )
    field_sets = [BlackFieldSet.create(tgt) for tgt in targets]
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.source for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            BlackRequest(field_sets, snapshot=input_sources.snapshot),
        ],
    )
    return fmt_result
Exemple #27
0
async def infer_python_dependencies(
        request: InferPythonDependencies,
        python_inference: PythonInference) -> InferredDependencies:
    if not python_inference.imports:
        return InferredDependencies([], sibling_dependencies_inferrable=False)

    stripped_sources = await Get(StrippedSourceFiles,
                                 SourceFilesRequest([request.sources_field]))
    modules = tuple(
        PythonModule.create_from_stripped_path(PurePath(fp))
        for fp in stripped_sources.snapshot.files)
    digest_contents = await Get(DigestContents, Digest,
                                stripped_sources.snapshot.digest)

    owner_requests: List[Get[PythonModuleOwner, PythonModule]] = []
    for file_content, module in zip(digest_contents, modules):
        file_imports_obj = find_python_imports(file_content.content.decode(),
                                               module_name=module.module)
        detected_imports = (file_imports_obj.all_imports
                            if python_inference.string_imports else
                            file_imports_obj.explicit_imports)
        owner_requests.extend(
            Get(PythonModuleOwner, PythonModule(imported_module))
            for imported_module in detected_imports
            if imported_module not in combined_stdlib)

    owner_per_import = await MultiGet(owner_requests)
    result = (
        owner.address for owner in owner_per_import
        if owner.address and owner.address != request.sources_field.address)
    return InferredDependencies(result, sibling_dependencies_inferrable=True)
Exemple #28
0
async def parse_python_imports(request: ParsePythonImportsRequest) -> ParsedPythonImports:
    python_interpreter, script_digest, stripped_sources = await MultiGet(
        Get(PythonExecutable, PexInterpreterConstraints, request.interpreter_constraints),
        Get(Digest, CreateDigest([FileContent("__parse_python_imports.py", _SCRIPT.encode())])),
        Get(StrippedSourceFiles, SourceFilesRequest([request.sources])),
    )
    input_digest = await Get(
        Digest, MergeDigests([script_digest, stripped_sources.snapshot.digest])
    )
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                python_interpreter.path,
                "./__parse_python_imports.py",
                *stripped_sources.snapshot.files,
            ],
            input_digest=input_digest,
            description=f"Determine Python imports for {request.sources.address}",
            level=LogLevel.DEBUG,
        ),
    )
    explicit_imports, _, string_imports = process_result.stdout.decode().partition("--")
    return ParsedPythonImports(
        explicit_imports=FrozenOrderedSet(explicit_imports.strip().splitlines()),
        string_imports=FrozenOrderedSet(string_imports.strip().splitlines()),
    )
def run_yapf(
    rule_runner: RuleRunner,
    targets: list[Target],
    *,
    extra_args: list[str] | None = None,
) -> tuple[tuple[LintResult, ...], FmtResult]:
    rule_runner.set_options(
        ["--backend-packages=pants.backend.python.lint.yapf", *(extra_args or ())],
        env_inherit={"PATH", "PYENV_ROOT", "HOME"},
    )
    field_sets = [YapfFieldSet.create(tgt) for tgt in targets]
    lint_results = rule_runner.request(LintResults, [YapfRequest(field_sets)])
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.source for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            YapfRequest(field_sets, prior_formatter_result=input_sources.snapshot),
        ],
    )
    return lint_results.results, fmt_result
Exemple #30
0
def run_pyupgrade(
    rule_runner: RuleRunner,
    targets: list[Target],
    *,
    extra_args: list[str] | None = None,
    pyupgrade_arg: str = "--py36-plus",
) -> FmtResult:
    rule_runner.set_options(
        [
            "--backend-packages=pants.backend.python.lint.pyupgrade",
            f'--pyupgrade-args="{pyupgrade_arg}"',
            *(extra_args or ()),
        ],
        env_inherit={"PATH", "PYENV_ROOT", "HOME"},
    )
    field_sets = [PyUpgradeFieldSet.create(tgt) for tgt in targets]
    input_sources = rule_runner.request(
        SourceFiles,
        [
            SourceFilesRequest(field_set.source for field_set in field_sets),
        ],
    )
    fmt_result = rule_runner.request(
        FmtResult,
        [
            PyUpgradeRequest(field_sets, snapshot=input_sources.snapshot),
        ],
    )
    return fmt_result