Exemplo n.º 1
0
def test_path_globs_to_digest_entries(rule_runner: RuleRunner) -> None:
    setup_fs_test_tar(rule_runner)

    def get_entries(globs: Iterable[str]) -> Set[Union[FileEntry, Directory]]:
        return set(rule_runner.request(DigestEntries, [PathGlobs(globs)]))

    assert get_entries(["4.txt", "a/4.txt.ln"]) == {
        FileEntry(
            "4.txt",
            FileDigest("ab929fcd5594037960792ea0b98caf5fdaf6b60645e4ef248c28db74260f393e", 5),
        ),
        FileEntry(
            "a/4.txt.ln",
            FileDigest("ab929fcd5594037960792ea0b98caf5fdaf6b60645e4ef248c28db74260f393e", 5),
        ),
    }
    assert get_entries(["c.ln/../3.txt"]) == {
        FileEntry(
            "c.ln/../3.txt",
            FileDigest("f6936912184481f5edd4c304ce27c5a1a827804fc7f329f43d273b8621870776", 6),
        )
    }

    # Directories are empty.
    assert get_entries(["a/b"]) == {Directory("a/b")}
    assert get_entries(["c.ln"]) == {Directory("c.ln")}
Exemplo n.º 2
0
def test_create_empty_directory(rule_runner: RuleRunner) -> None:
    res = rule_runner.request(Snapshot, [CreateDigest([Directory("a/")])])
    assert res.dirs == ("a",)
    assert not res.files
    assert res.digest != EMPTY_DIGEST

    res = rule_runner.request(
        Snapshot, [CreateDigest([Directory("x/y/z"), Directory("m"), Directory("m/n")])]
    )
    assert res.dirs == ("m", "m/n", "x", "x/y", "x/y/z")
    assert not res.files
    assert res.digest != EMPTY_DIGEST
Exemplo n.º 3
0
def test_digest_entries_handles_empty_directory(rule_runner: RuleRunner) -> None:
    digest = rule_runner.request(
        Digest, [CreateDigest([Directory("a/b"), FileContent("a/foo.txt", b"four\n")])]
    )
    entries = rule_runner.request(DigestEntries, [digest])
    assert entries == DigestEntries(
        [
            Directory("a/b"),
            FileEntry(
                "a/foo.txt",
                FileDigest("ab929fcd5594037960792ea0b98caf5fdaf6b60645e4ef248c28db74260f393e", 5),
            ),
        ]
    )
Exemplo n.º 4
0
def test_output_digest(rule_runner: RuleRunner, working_directory) -> None:
    # Test that the output files are relative to the working directory, both in how
    # they're specified, and their paths in the output_digest.
    input_digest = (rule_runner.request(
        Digest,
        [CreateDigest([Directory(working_directory)])],
    ) if working_directory else EMPTY_DIGEST)
    process = Process(
        input_digest=input_digest,
        argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
        description="echo roland",
        output_files=("roland", ),
        working_directory=working_directory,
    )
    result = rule_runner.request(ProcessResult, [process])
    assert result.output_digest == Digest(
        fingerprint=
        "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
        serialized_bytes_length=80,
    )

    digest_contents = rule_runner.request(DigestContents,
                                          [result.output_digest])
    assert digest_contents == DigestContents(
        [FileContent("roland", b"European Burmese", False)])
Exemplo n.º 5
0
async def create_archive(request: CreateArchive) -> Digest:
    if request.format == ArchiveFormat.ZIP:
        zip_binary = await Get(ZipBinary, _ZipBinaryRequest())
        argv = zip_binary.create_archive_argv(request)
        env = {}
        input_digest = request.snapshot.digest
    else:
        tar_binary = await Get(TarBinary, _TarBinaryRequest())
        argv = tar_binary.create_archive_argv(request)
        # `tar` expects to find a couple binaries like `gzip` and `xz` by looking on the PATH.
        env = {"PATH": os.pathsep.join(SEARCH_PATHS)}
        # `tar` requires that the output filename's parent directory exists.
        output_dir_digest = await Get(
            Digest,
            CreateDigest([Directory(os.path.dirname(request.output_filename))
                          ]))
        input_digest = await Get(
            Digest, MergeDigests([output_dir_digest, request.snapshot.digest]))

    result = await Get(
        ProcessResult,
        Process(
            argv=argv,
            env=env,
            input_digest=input_digest,
            description=f"Create {request.output_filename}",
            level=LogLevel.DEBUG,
            output_files=(request.output_filename, ),
        ),
    )
    return result.output_digest
Exemplo n.º 6
0
async def fetch_helm_artifacts(
        request: FetchHelmArfifactsRequest) -> FetchedHelmArtifacts:
    download_prefix = "__downloads"
    empty_download_digest = await Get(
        Digest, CreateDigest([Directory(download_prefix)]))

    artifacts = await MultiGet(
        Get(ResolvedHelmArtifact, HelmArtifact,
            HelmArtifact.from_field_set(field_set))
        for field_set in request.field_sets)

    def create_fetch_process(artifact: ResolvedHelmArtifact) -> HelmProcess:
        return HelmProcess(
            argv=[
                "pull",
                artifact.name,
                "--repo",
                artifact.location_url,
                "--version",
                artifact.version,
                "--destination",
                download_prefix,
                "--untar",
            ],
            input_digest=empty_download_digest,
            description=
            f"Pulling Helm Chart '{artifact.name}' with version {artifact.version}",
            output_directories=(download_prefix, ),
        )

    download_results = await MultiGet(
        Get(
            ProcessResult,
            HelmProcess,
            create_fetch_process(artifact),
        ) for artifact in artifacts)

    stripped_artifact_digests = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, download_prefix))
        for result in download_results)

    # Avoid capturing the tarball that has been downloaded by Helm during the pull.
    artifact_snapshots = await MultiGet(
        Get(
            Snapshot,
            DigestSubset(digest, PathGlobs([os.path.join(artifact.name, "**")
                                            ])))
        for artifact, digest in zip(artifacts, stripped_artifact_digests))

    fetched_artifacts = [
        FetchedHelmArtifact(artifact=artifact, snapshot=snapshot)
        for artifact, snapshot in zip(artifacts, artifact_snapshots)
    ]
    logger.debug(
        f"Fetched {pluralize(len(fetched_artifacts), 'Helm artifact')} corresponding with:\n"
        f"{bullet_list([artifact.address.spec for artifact in fetched_artifacts], max_elements=10)}"
    )
    return FetchedHelmArtifacts(fetched_artifacts)
Exemplo n.º 7
0
async def maybe_extract_archive(digest: Digest) -> ExtractedArchive:
    """If digest contains a single archive file, extract it, otherwise return the input digest."""
    extract_archive_dir = "__extract_archive_dir"
    snapshot, output_dir_digest = await MultiGet(
        Get(Snapshot, Digest, digest),
        Get(Digest, CreateDigest([Directory(extract_archive_dir)])),
    )
    if len(snapshot.files) != 1:
        return ExtractedArchive(digest)

    archive_path = snapshot.files[0]
    is_zip = archive_path.endswith(".zip")
    is_tar = archive_path.endswith(
        (".tar", ".tar.gz", ".tgz", ".tar.bz2", ".tbz2", ".tar.xz", ".txz"))
    is_gz = not is_tar and archive_path.endswith(".gz")
    if not is_zip and not is_tar and not is_gz:
        return ExtractedArchive(digest)

    merge_digest_get = Get(Digest, MergeDigests((digest, output_dir_digest)))
    if is_zip:
        input_digest, unzip_binary = await MultiGet(
            merge_digest_get,
            Get(UnzipBinary, _UnzipBinaryRequest()),
        )
        argv = unzip_binary.extract_archive_argv(archive_path,
                                                 extract_archive_dir)
        env = {}
    elif is_tar:
        input_digest, tar_binary = await MultiGet(
            merge_digest_get,
            Get(TarBinary, _TarBinaryRequest()),
        )
        argv = tar_binary.extract_archive_argv(archive_path,
                                               extract_archive_dir)
        # `tar` expects to find a couple binaries like `gzip` and `xz` by looking on the PATH.
        env = {"PATH": os.pathsep.join(SEARCH_PATHS)}
    else:
        input_digest, gunzip = await MultiGet(
            merge_digest_get,
            Get(Gunzip, _GunzipRequest()),
        )
        argv = gunzip.extract_archive_argv(archive_path, extract_archive_dir)
        env = {}

    result = await Get(
        ProcessResult,
        Process(
            argv=argv,
            env=env,
            input_digest=input_digest,
            description=f"Extract {archive_path}",
            level=LogLevel.DEBUG,
            output_directories=(extract_archive_dir, ),
        ),
    )
    digest = await Get(Digest,
                       RemovePrefix(result.output_digest, extract_archive_dir))
    return ExtractedArchive(digest)
Exemplo n.º 8
0
async def build_processors(
        bash: BashBinary, jdk_setup: JdkSetup) -> JavaParserCompiledClassfiles:
    dest_dir = "classfiles"

    materialized_classpath, source_digest = await MultiGet(
        Get(
            MaterializedClasspath,
            MaterializedClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=(java_parser_artifact_requirements(), ),
            ),
        ),
        Get(
            Digest,
            CreateDigest([
                FileContent(
                    path=_LAUNCHER_BASENAME,
                    content=_load_javaparser_launcher_source(),
                ),
                Directory(dest_dir),
            ]),
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests((
            materialized_classpath.digest,
            jdk_setup.digest,
            source_digest,
        )),
    )

    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash,
                                [f"{jdk_setup.java_home}/lib/tools.jar"]),
                "com.sun.tools.javac.Main",
                "-cp",
                materialized_classpath.classpath_arg(),
                "-d",
                dest_dir,
                _LAUNCHER_BASENAME,
            ],
            input_digest=merged_digest,
            use_nailgun=jdk_setup.digest,
            output_directories=(dest_dir, ),
            description=
            f"Compile {_LAUNCHER_BASENAME} import processors with javac",
            level=LogLevel.DEBUG,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir))
    return JavaParserCompiledClassfiles(digest=stripped_classfiles_digest)
Exemplo n.º 9
0
async def build_processors(jdk: InternalJdk) -> JavaParserCompiledClassfiles:
    dest_dir = "classfiles"
    parser_lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                        JavaParserToolLockfileSentinel())
    materialized_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(prefix="__toolcp",
                                 lockfile=parser_lockfile_request),
        ),
        Get(
            Digest,
            CreateDigest([
                FileContent(
                    path=_LAUNCHER_BASENAME,
                    content=_load_javaparser_launcher_source(),
                ),
                Directory(dest_dir),
            ]),
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests((
            materialized_classpath.digest,
            source_digest,
        )),
    )

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[f"{jdk.java_home}/lib/tools.jar"],
            argv=[
                "com.sun.tools.javac.Main",
                "-cp",
                ":".join(materialized_classpath.classpath_entries()),
                "-d",
                dest_dir,
                _LAUNCHER_BASENAME,
            ],
            input_digest=merged_digest,
            output_directories=(dest_dir, ),
            description=
            f"Compile {_LAUNCHER_BASENAME} import processors with javac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir))
    return JavaParserCompiledClassfiles(digest=stripped_classfiles_digest)
Exemplo n.º 10
0
async def flake8_lint_partition(
        partition: Flake8Partition, flake8: Flake8,
        first_party_plugins: Flake8FirstPartyPlugins) -> LintResult:
    flake8_pex_get = Get(
        VenvPex,
        PexRequest,
        flake8.to_pex_request(
            interpreter_constraints=partition.interpreter_constraints,
            extra_requirements=first_party_plugins.requirement_strings,
        ),
    )
    config_files_get = Get(ConfigFiles, ConfigFilesRequest,
                           flake8.config_request)
    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.source
                           for field_set in partition.field_sets))
    # Ensure that the empty report dir exists.
    report_directory_digest_get = Get(Digest,
                                      CreateDigest([Directory(REPORT_DIR)]))
    flake8_pex, config_files, report_directory, source_files = await MultiGet(
        flake8_pex_get, config_files_get, report_directory_digest_get,
        source_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            source_files.snapshot.digest,
            first_party_plugins.sources_digest,
            config_files.snapshot.digest,
            report_directory,
        )),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            flake8_pex,
            argv=generate_argv(source_files, flake8),
            input_digest=input_digest,
            output_directories=(REPORT_DIR, ),
            extra_env={"PEX_EXTRA_SYS_PATH": first_party_plugins.PREFIX},
            concurrency_available=len(partition.field_sets),
            description=
            f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Exemplo n.º 11
0
async def setup_helm(helm_subsytem: HelmSubsystem) -> HelmBinary:
    cache_dir = "__cache"
    config_dir = "__config"
    data_dir = "__data"

    downloaded_binary, cache_digest, config_digest, data_digest = await MultiGet(
        Get(DownloadedExternalTool, ExternalToolRequest,
            helm_subsytem.get_request(Platform.current)),
        Get(Digest, CreateDigest([Directory(cache_dir)])),
        Get(Digest, CreateDigest([Directory(config_dir)])),
        Get(Digest, CreateDigest([Directory(data_dir)])),
    )

    tool_relpath = "__helm"
    immutable_input_digests = {tool_relpath: downloaded_binary.digest}
    helm_path = os.path.join(tool_relpath, downloaded_binary.exe)
    helm_env = _build_helm_env(cache_dir, config_dir, data_dir)

    # TODO Install Global Helm plugins
    # TODO Configure Helm classic repositories

    cache_subset_digest, config_subset_digest, data_subset_digest = await MultiGet(
        Get(Digest, RemovePrefix(cache_digest, cache_dir)),
        Get(Digest, RemovePrefix(config_digest, config_dir)),
        Get(Digest, RemovePrefix(data_digest, data_dir)),
    )

    setup_immutable_digests = {
        **immutable_input_digests,
        cache_dir: cache_subset_digest,
        config_dir: config_subset_digest,
        data_dir: data_subset_digest,
    }

    local_env = await Get(Environment, EnvironmentRequest(["HOME", "PATH"]))
    return HelmBinary(
        path=helm_path,
        helm_env=helm_env,
        local_env=local_env,
        immutable_input_digests=setup_immutable_digests,
    )
Exemplo n.º 12
0
async def bandit_lint_partition(partition: BanditPartition,
                                bandit: Bandit) -> LintResult:

    bandit_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="bandit.pex",
            internal_only=True,
            requirements=bandit.pex_requirements(),
            interpreter_constraints=partition.interpreter_constraints,
            main=bandit.main,
        ),
    )

    config_files_get = Get(ConfigFiles, ConfigFilesRequest,
                           bandit.config_request)
    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.source
                           for field_set in partition.field_sets))
    # Ensure that the empty report dir exists.
    report_directory_digest_get = Get(Digest,
                                      CreateDigest([Directory(REPORT_DIR)]))

    bandit_pex, config_files, report_directory, source_files = await MultiGet(
        bandit_pex_get, config_files_get, report_directory_digest_get,
        source_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((source_files.snapshot.digest,
                      config_files.snapshot.digest, report_directory)),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            bandit_pex,
            argv=generate_argv(source_files, bandit),
            input_digest=input_digest,
            description=
            f"Run Bandit on {pluralize(len(partition.field_sets), 'file')}.",
            output_directories=(REPORT_DIR, ),
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Exemplo n.º 13
0
async def maybe_extract_archive(digest: Digest, tar_binary: TarBinary,
                                unzip_binary: UnzipBinary) -> ExtractedArchive:
    """If digest contains a single archive file, extract it, otherwise return the input digest."""
    output_dir = "__output"
    snapshot, output_dir_digest = await MultiGet(
        Get(Snapshot, Digest, digest),
        Get(Digest, CreateDigest([Directory(output_dir)])),
    )
    if len(snapshot.files) != 1:
        return ExtractedArchive(digest)

    input_digest = await Get(Digest, MergeDigests((digest, output_dir_digest)))
    fp = snapshot.files[0]
    if fp.endswith(".zip"):
        argv = unzip_binary.extract_archive_argv(archive_path=fp,
                                                 output_dir=output_dir)
        env = {}
    elif fp.endswith(
        (".tar", ".tar.gz", ".tgz", ".tar.bz2", ".tbz2", ".tar.xz", ".txz")):
        argv = tar_binary.extract_archive_argv(archive_path=fp,
                                               output_dir=output_dir)
        # `tar` expects to find a couple binaries like `gzip` and `xz` by looking on the PATH.
        env = {"PATH": os.pathsep.join(SEARCH_PATHS)}
    else:
        return ExtractedArchive(digest)

    result = await Get(
        ProcessResult,
        Process(
            argv=argv,
            env=env,
            input_digest=input_digest,
            description=f"Extract {fp}",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )
    strip_output_dir = await Get(
        Digest, RemovePrefix(result.output_digest, output_dir))
    return ExtractedArchive(strip_output_dir)
Exemplo n.º 14
0
async def run_helm_package(field_set: HelmPackageFieldSet) -> BuiltPackage:
    result_dir = "__out"

    chart, result_digest = await MultiGet(
        Get(HelmChart, HelmChartRequest(field_set)),
        Get(Digest, CreateDigest([Directory(result_dir)])),
    )

    input_digest = await Get(
        Digest, MergeDigests([chart.snapshot.digest, result_digest]))
    process_output_file = os.path.join(result_dir,
                                       _helm_artifact_filename(chart.metadata))

    process_result = await Get(
        ProcessResult,
        HelmProcess(
            argv=["package", chart.path, "-d", result_dir],
            input_digest=input_digest,
            output_files=(process_output_file, ),
            description=f"Packaging Helm chart: {field_set.address.spec_path}",
        ),
    )

    stripped_output_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, result_dir))

    final_snapshot = await Get(
        Snapshot,
        AddPrefix(stripped_output_digest,
                  field_set.output_path.value_or_default(file_ending=None)),
    )
    return BuiltPackage(
        final_snapshot.digest,
        artifacts=tuple(
            BuiltPackageArtifact(
                file, extra_log_lines=(f"Built Helm chart artifact: {file}", ))
            for file in final_snapshot.files),
    )
Exemplo n.º 15
0
async def setup_pex_cli_process(
    request: PexCliProcess,
    pex_pex: PexPEX,
    pex_env: PexEnvironment,
    python_native_code: PythonNativeCode,
    global_options: GlobalOptions,
    pex_runtime_env: PexRuntimeEnvironment,
) -> Process:
    tmpdir = ".tmp"
    gets: List[Get] = [Get(Digest, CreateDigest([Directory(tmpdir)]))]

    # The certs file will typically not be in the repo, so we can't digest it via a PathGlobs.
    # Instead we manually create a FileContent for it.
    cert_args = []
    if global_options.ca_certs_path:
        ca_certs_content = Path(global_options.ca_certs_path).read_bytes()
        chrooted_ca_certs_path = os.path.basename(global_options.ca_certs_path)

        gets.append(
            Get(
                Digest,
                CreateDigest((FileContent(chrooted_ca_certs_path, ca_certs_content),)),
            )
        )
        cert_args = ["--cert", chrooted_ca_certs_path]

    digests_to_merge = [pex_pex.digest]
    digests_to_merge.extend(await MultiGet(gets))
    if request.additional_input_digest:
        digests_to_merge.append(request.additional_input_digest)
    input_digest = await Get(Digest, MergeDigests(digests_to_merge))

    global_args = [
        # Ensure Pex and its subprocesses create temporary files in the the process execution
        # sandbox. It may make sense to do this generally for Processes, but in the short term we
        # have known use cases where /tmp is too small to hold large wheel downloads Pex is asked to
        # perform. Making the TMPDIR local to the sandbox allows control via
        # --local-execution-root-dir for the local case and should work well with remote cases where
        # a remoting implementation has to allow for processes producing large binaries in a
        # sandbox to support reasonable workloads. Communicating TMPDIR via --tmpdir instead of via
        # environment variable allows Pex to absolutize the path ensuring subprocesses that change
        # CWD can find the TMPDIR.
        "--tmpdir",
        tmpdir,
    ]

    if request.concurrency_available > 0:
        global_args.extend(["--jobs", "{pants_concurrency}"])

    if pex_runtime_env.verbosity > 0:
        global_args.append(f"-{'v' * pex_runtime_env.verbosity}")

    resolve_args = (
        [*cert_args, "--python-path", create_path_env_var(pex_env.interpreter_search_paths)]
        if request.set_resolve_args
        else []
    )
    args = [
        *global_args,
        *request.subcommand,
        *resolve_args,
        # NB: This comes at the end because it may use `--` passthrough args, # which must come at
        # the end.
        *request.extra_args,
    ]

    complete_pex_env = pex_env.in_sandbox(working_directory=None)
    normalized_argv = complete_pex_env.create_argv(pex_pex.exe, *args, python=request.python)
    env = {
        **complete_pex_env.environment_dict(python_configured=request.python is not None),
        **python_native_code.environment_dict,
        **(request.extra_env or {}),
        # If a subcommand is used, we need to use the `pex3` console script.
        **({"PEX_SCRIPT": "pex3"} if request.subcommand else {}),
    }

    return Process(
        normalized_argv,
        description=request.description,
        input_digest=input_digest,
        env=env,
        output_files=request.output_files,
        output_directories=request.output_directories,
        append_only_caches=complete_pex_env.append_only_caches,
        level=request.level,
        concurrency_available=request.concurrency_available,
        cache_scope=request.cache_scope,
    )
Exemplo n.º 16
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest,
    protoc: Protoc,
    grpc_python_plugin: GrpcPythonPlugin,
    python_protobuf_subsystem: PythonProtobufSubsystem,
    python_protobuf_mypy_plugin: PythonProtobufMypyPlugin,
    pex_environment: PexEnvironment,
) -> GeneratedSources:
    download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest,
                                  protoc.get_request(Platform.current))

    output_dir = "_generated_files"
    create_output_dir_request = Get(Digest,
                                    CreateDigest([Directory(output_dir)]))

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.protocol_target.address]))

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(tgt[ProtobufSourceField]
                           for tgt in transitive_targets.closure
                           if tgt.has_field(ProtobufSourceField)),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest([request.protocol_target[ProtobufSourceField]]))

    (
        downloaded_protoc_binary,
        empty_output_dir,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    protoc_gen_mypy_script = "protoc-gen-mypy"
    protoc_gen_mypy_grpc_script = "protoc-gen-mypy_grpc"
    mypy_pex = None
    mypy_request = PexRequest(
        output_filename="mypy_protobuf.pex",
        internal_only=True,
        requirements=python_protobuf_mypy_plugin.pex_requirements(),
        interpreter_constraints=python_protobuf_mypy_plugin.
        interpreter_constraints,
    )

    if python_protobuf_subsystem.mypy_plugin:
        mypy_pex = await Get(
            VenvPex,
            VenvPexRequest(bin_names=[protoc_gen_mypy_script],
                           pex_request=mypy_request),
        )

        if request.protocol_target.get(ProtobufGrpcToggleField).value:
            mypy_info = await Get(PexResolveInfo, VenvPex, mypy_pex)

            # In order to generate stubs for gRPC code, we need mypy-protobuf 2.0 or above.
            if any(dist_info.project_name == "mypy-protobuf"
                   and dist_info.version.major >= 2
                   for dist_info in mypy_info):
                # TODO: Use `pex_path` once VenvPex stores a Pex field.
                mypy_pex = await Get(
                    VenvPex,
                    VenvPexRequest(
                        bin_names=[
                            protoc_gen_mypy_script, protoc_gen_mypy_grpc_script
                        ],
                        pex_request=mypy_request,
                    ),
                )

    downloaded_grpc_plugin = (await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        grpc_python_plugin.get_request(Platform.current),
    ) if request.protocol_target.get(ProtobufGrpcToggleField).value else None)

    unmerged_digests = [
        all_sources_stripped.snapshot.digest,
        downloaded_protoc_binary.digest,
        empty_output_dir,
    ]
    if mypy_pex:
        unmerged_digests.append(mypy_pex.digest)
    if downloaded_grpc_plugin:
        unmerged_digests.append(downloaded_grpc_plugin.digest)
    input_digest = await Get(Digest, MergeDigests(unmerged_digests))

    argv = [downloaded_protoc_binary.exe, "--python_out", output_dir]
    if mypy_pex:
        argv.extend([
            f"--plugin=protoc-gen-mypy={mypy_pex.bin[protoc_gen_mypy_script].argv0}",
            "--mypy_out",
            output_dir,
        ])
    if downloaded_grpc_plugin:
        argv.extend([
            f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}",
            "--grpc_out", output_dir
        ])

        if mypy_pex and protoc_gen_mypy_grpc_script in mypy_pex.bin:
            argv.extend([
                f"--plugin=protoc-gen-mypy_grpc={mypy_pex.bin[protoc_gen_mypy_grpc_script].argv0}",
                "--mypy_grpc_out",
                output_dir,
            ])

    argv.extend(target_sources_stripped.snapshot.files)
    result = await Get(
        ProcessResult,
        Process(
            argv,
            input_digest=input_digest,
            description=
            f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
            append_only_caches=pex_environment.in_sandbox(
                working_directory=None).append_only_caches,
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_source's source root.
        source_root_request = SourceRootRequest.for_target(
            request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
Exemplo n.º 17
0
async def prepare_shell_command_process(request: ShellCommandProcessRequest,
                                        shell_setup: ShellSetup,
                                        bash: BashBinary) -> Process:
    shell_command = request.target
    interactive = shell_command.has_field(ShellCommandRunWorkdirField)
    if interactive:
        working_directory = shell_command[
            ShellCommandRunWorkdirField].value or ""
    else:
        working_directory = shell_command.address.spec_path
    command = shell_command[ShellCommandCommandField].value
    timeout = shell_command.get(ShellCommandTimeoutField).value
    tools = shell_command.get(ShellCommandToolsField,
                              default_raw_value=()).value
    outputs = shell_command.get(ShellCommandOutputsField).value or ()
    extra_env_vars = shell_command.get(
        ShellCommandExtraEnvVarsField).value or ()

    if not command:
        raise ValueError(
            f"Missing `command` line in `{shell_command.alias}` target {shell_command.address}."
        )

    if interactive:
        command_env = {
            "CHROOT": "{chroot}",
        }
    else:
        if not tools:
            raise ValueError(
                f"Must provide any `tools` used by the `{shell_command.alias}` {shell_command.address}."
            )

        env = await Get(Environment, EnvironmentRequest(["PATH"]))
        search_path = shell_setup.executable_search_path(env)
        tool_requests = [
            BinaryPathRequest(
                binary_name=tool,
                search_path=search_path,
            ) for tool in {*tools, *["mkdir", "ln"]}
            if tool not in BASH_BUILTIN_COMMANDS
        ]
        tool_paths = await MultiGet(
            Get(BinaryPaths, BinaryPathRequest, request)
            for request in tool_requests)

        command_env = {
            "TOOLS":
            " ".join(
                _shell_tool_safe_env_name(tool.binary_name)
                for tool in tool_requests),
        }

        for binary, tool_request in zip(tool_paths, tool_requests):
            if binary.first_path:
                command_env[_shell_tool_safe_env_name(
                    tool_request.binary_name)] = binary.first_path.path
            else:
                raise BinaryNotFoundError.from_request(
                    tool_request,
                    rationale=
                    f"execute `{shell_command.alias}` {shell_command.address}",
                )

    extra_env = await Get(Environment, EnvironmentRequest(extra_env_vars))
    command_env.update(extra_env)

    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([shell_command.address]),
    )

    sources, pkgs_per_target = await MultiGet(
        Get(
            SourceFiles,
            SourceFilesRequest(
                sources_fields=[
                    tgt.get(SourcesField)
                    for tgt in transitive_targets.dependencies
                ],
                for_sources_types=(SourcesField, FileSourceField),
                enable_codegen=True,
            ),
        ),
        Get(
            FieldSetsPerTarget,
            FieldSetsPerTargetRequest(PackageFieldSet,
                                      transitive_targets.dependencies),
        ),
    )

    packages = await MultiGet(
        Get(BuiltPackage, PackageFieldSet, field_set)
        for field_set in pkgs_per_target.field_sets)

    if interactive or not working_directory or working_directory in sources.snapshot.dirs:
        work_dir = EMPTY_DIGEST
    else:
        work_dir = await Get(Digest,
                             CreateDigest([Directory(working_directory)]))

    input_digest = await Get(
        Digest,
        MergeDigests([
            sources.snapshot.digest, work_dir,
            *(pkg.digest for pkg in packages)
        ]))

    output_files = [f for f in outputs if not f.endswith("/")]
    output_directories = [d for d in outputs if d.endswith("/")]

    if interactive:
        relpath = os.path.relpath(
            working_directory or ".",
            start="/" if os.path.isabs(working_directory) else ".")
        boot_script = f"cd {shlex.quote(relpath)}; " if relpath != "." else ""
    else:
        # Setup bin_relpath dir with symlinks to all requested tools, so that we can use PATH, force
        # symlinks to avoid issues with repeat runs using the __run.sh script in the sandbox.
        bin_relpath = ".bin"
        boot_script = ";".join(
            dedent(f"""\
                $mkdir -p {bin_relpath}
                for tool in $TOOLS; do $ln -sf ${{!tool}} {bin_relpath}; done
                export PATH="$PWD/{bin_relpath}"
                """).split("\n"))

    return Process(
        argv=(bash.path, "-c", boot_script + command),
        description=f"Running {shell_command.alias} {shell_command.address}",
        env=command_env,
        input_digest=input_digest,
        output_directories=output_directories,
        output_files=output_files,
        timeout_seconds=timeout,
        working_directory=working_directory,
    )
Exemplo n.º 18
0
async def setup_pex_cli_process(
    request: PexCliProcess,
    pex_binary: PexBinary,
    pex_env: PexEnvironment,
    python_native_code: PythonNativeCode,
    global_options: GlobalOptions,
    pex_runtime_env: PexRuntimeEnvironment,
) -> Process:
    tmpdir = ".tmp"
    gets: List[Get] = [
        Get(DownloadedExternalTool, ExternalToolRequest, pex_binary.get_request(Platform.current)),
        Get(Digest, CreateDigest([Directory(tmpdir)])),
    ]
    cert_args = []

    # The certs file will typically not be in the repo, so we can't digest it via a PathGlobs.
    # Instead we manually create a FileContent for it.
    if global_options.options.ca_certs_path:
        ca_certs_content = Path(global_options.options.ca_certs_path).read_bytes()
        chrooted_ca_certs_path = os.path.basename(global_options.options.ca_certs_path)

        gets.append(
            Get(
                Digest,
                CreateDigest((FileContent(chrooted_ca_certs_path, ca_certs_content),)),
            )
        )
        cert_args = ["--cert", chrooted_ca_certs_path]

    downloaded_pex_bin, *digests_to_merge = await MultiGet(gets)
    digests_to_merge.append(downloaded_pex_bin.digest)
    if request.additional_input_digest:
        digests_to_merge.append(request.additional_input_digest)
    input_digest = await Get(Digest, MergeDigests(digests_to_merge))

    pex_root_path = ".cache/pex_root"
    argv = [
        downloaded_pex_bin.exe,
        *cert_args,
        "--python-path",
        create_path_env_var(pex_env.interpreter_search_paths),
        "--pex-root",
        pex_root_path,
        # Ensure Pex and its subprocesses create temporary files in the the process execution
        # sandbox. It may make sense to do this generally for Processes, but in the short term we
        # have known use cases where /tmp is too small to hold large wheel downloads Pex is asked to
        # perform. Making the TMPDIR local to the sandbox allows control via
        # --local-execution-root-dir for the local case and should work well with remote cases where
        # a remoting implementation has to allow for processes producing large binaries in a
        # sandbox to support reasonable workloads. Communicating TMPDIR via --tmpdir instead of via
        # environment variable allows Pex to absolutize the path ensuring subprocesses that change
        # CWD can find the TMPDIR.
        "--tmpdir",
        tmpdir,
    ]
    if pex_runtime_env.verbosity > 0:
        argv.append(f"-{'v' * pex_runtime_env.verbosity}")

    # NB: This comes at the end of the argv because the request may use `--` passthrough args,
    # which must come at the end.
    argv.extend(request.argv)
    normalized_argv = pex_env.create_argv(*argv, python=request.python)
    env = {
        **pex_env.environment_dict(python_configured=request.python is not None),
        **python_native_code.environment_dict,
        **(request.extra_env or {}),
    }

    return Process(
        normalized_argv,
        description=request.description,
        input_digest=input_digest,
        env=env,
        output_files=request.output_files,
        output_directories=request.output_directories,
        append_only_caches={"pex_root": pex_root_path},
        level=request.level,
        cache_scope=request.cache_scope,
    )
Exemplo n.º 19
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
    complete_env: CompleteEnvironment,
) -> TestSetup:
    transitive_targets, plugin_setups = await MultiGet(
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.field_set.address])),
        Get(AllPytestPluginSetups,
            AllPytestPluginSetupsRequest(request.field_set.address)),
    )
    all_targets = transitive_targets.closure

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            [request.field_set.address],
            internal_only=True,
            resolve_and_lockfile=request.field_set.resolve.
            resolve_and_lockfile(python_setup),
        ),
    )
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=pytest.pex_requirements(),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    # Ensure that the empty extra output dir exists.
    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        extra_output_directory_digest_get,
    )

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [request.field_set.address],
            interpreter_constraints=interpreter_constraints,
            sources=prepared_sources,
        ),
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=pytest.main,
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex, local_dists.pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            local_dists.remaining_sources.source_files.snapshot.digest,
            config_files.snapshot.digest,
            extra_output_directory_digest,
            *(plugin_setup.digest for plugin_setup in plugin_setups),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        pytest.validate_pytest_cov_included()
        output_files.append(".coverage")

        if coverage_subsystem.filter:
            cov_args = [f"--cov={morf}" for morf in coverage_subsystem.filter]
        else:
            # N.B.: Passing `--cov=` or `--cov=.` to communicate "record coverage for all sources"
            # fails in certain contexts as detailed in:
            #   https://github.com/pantsbuild/pants/issues/12390
            # Instead we focus coverage on just the directories containing python source files
            # materialized to the Process chroot.
            cov_args = [
                f"--cov={source_root}"
                for source_root in prepared_sources.source_roots
            ]

        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *cov_args,
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
        # NOTE: `complete_env` intentionally after `test_extra_env` to allow overriding within
        # `python_tests`
        **complete_env.get_subset(request.field_set.extra_env_vars.value or ()),
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = (ProcessCacheScope.PER_SESSION
                   if test_subsystem.force else ProcessCacheScope.SUCCESSFUL)
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Exemplo n.º 20
0
async def generate_go_from_protobuf(
    request: GenerateGoFromProtobufRequest,
    protoc: Protoc,
    go_protoc_plugin: _SetupGoProtocPlugin,
) -> GeneratedSources:
    output_dir = "_generated_files"
    protoc_relpath = "__protoc"
    protoc_go_plugin_relpath = "__protoc_gen_go"

    downloaded_protoc_binary, empty_output_dir, transitive_targets = await MultiGet(
        Get(DownloadedExternalTool, ExternalToolRequest,
            protoc.get_request(Platform.current)),
        Get(Digest, CreateDigest([Directory(output_dir)])),
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.protocol_target.address])),
    )

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_sources_stripped, target_sources_stripped = await MultiGet(
        Get(
            StrippedSourceFiles,
            SourceFilesRequest(tgt[ProtobufSourceField]
                               for tgt in transitive_targets.closure
                               if tgt.has_field(ProtobufSourceField)),
        ),
        Get(StrippedSourceFiles,
            SourceFilesRequest([request.protocol_target[ProtobufSourceField]
                                ])),
    )

    input_digest = await Get(
        Digest,
        MergeDigests([all_sources_stripped.snapshot.digest, empty_output_dir]))

    maybe_grpc_plugin_args = []
    if request.protocol_target.get(ProtobufGrpcToggleField).value:
        maybe_grpc_plugin_args = [
            f"--go-grpc_out={output_dir}",
            "--go-grpc_opt=paths=source_relative",
        ]

    result = await Get(
        ProcessResult,
        Process(
            argv=[
                os.path.join(protoc_relpath, downloaded_protoc_binary.exe),
                f"--plugin=go={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go')}",
                f"--plugin=go-grpc={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go-grpc')}",
                f"--go_out={output_dir}",
                "--go_opt=paths=source_relative",
                *maybe_grpc_plugin_args,
                *target_sources_stripped.snapshot.files,
            ],
            # Note: Necessary or else --plugin option needs absolute path.
            env={"PATH": protoc_go_plugin_relpath},
            input_digest=input_digest,
            immutable_input_digests={
                protoc_relpath: downloaded_protoc_binary.digest,
                protoc_go_plugin_relpath: go_protoc_plugin.digest,
            },
            description=
            f"Generating Go sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest,
            SourceRootRequest.for_target(request.protocol_target)),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
Exemplo n.º 21
0
async def pylint_lint_partition(
        partition: PylintPartition, pylint: Pylint,
        first_party_plugins: PylintFirstPartyPlugins) -> LintResult:
    requirements_pex_get = Get(
        Pex,
        RequirementsPexRequest(
            (t.address for t in partition.root_targets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    pylint_pex_get = Get(
        Pex,
        PexRequest,
        pylint.to_pex_request(
            interpreter_constraints=partition.interpreter_constraints,
            extra_requirements=first_party_plugins.requirement_strings,
        ),
    )

    prepare_python_sources_get = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
    field_set_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(t[PythonSourceField]
                           for t in partition.root_targets))
    # Ensure that the empty report dir exists.
    report_directory_digest_get = Get(Digest,
                                      CreateDigest([Directory(REPORT_DIR)]))

    (
        pylint_pex,
        requirements_pex,
        prepared_python_sources,
        field_set_sources,
        report_directory,
    ) = await MultiGet(
        pylint_pex_get,
        requirements_pex_get,
        prepare_python_sources_get,
        field_set_sources_get,
        report_directory_digest_get,
    )

    pylint_runner_pex, config_files = await MultiGet(
        Get(
            VenvPex,
            VenvPexRequest(
                PexRequest(
                    output_filename="pylint_runner.pex",
                    interpreter_constraints=partition.interpreter_constraints,
                    main=pylint.main,
                    internal_only=True,
                    pex_path=[pylint_pex, requirements_pex],
                ),
                # TODO(John Sirois): Remove this (change to the default of symlinks) when we can
                #  upgrade to a version of Pylint with https://github.com/PyCQA/pylint/issues/1470
                #  resolved.
                site_packages_copies=True,
            ),
        ),
        Get(ConfigFiles, ConfigFilesRequest,
            pylint.config_request(field_set_sources.snapshot.dirs)),
    )

    pythonpath = list(prepared_python_sources.source_roots)
    if first_party_plugins:
        pythonpath.append(first_party_plugins.PREFIX)

    input_digest = await Get(
        Digest,
        MergeDigests((
            config_files.snapshot.digest,
            first_party_plugins.sources_digest,
            prepared_python_sources.source_files.snapshot.digest,
            report_directory,
        )),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            pylint_runner_pex,
            argv=generate_argv(field_set_sources, pylint),
            input_digest=input_digest,
            output_directories=(REPORT_DIR, ),
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            concurrency_available=len(partition.root_targets),
            description=
            f"Run Pylint on {pluralize(len(partition.root_targets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
Exemplo n.º 22
0
async def setup_scala_parser_classfiles(jdk: InternalJdk) -> ScalaParserCompiledClassfiles:
    dest_dir = "classfiles"

    parser_source_content = pkgutil.get_data(
        "pants.backend.scala.dependency_inference", "ScalaParser.scala"
    )
    if not parser_source_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    parser_source = FileContent("ScalaParser.scala", parser_source_content)

    tool_classpath, parser_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-compiler",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-library",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-reflect",
                            version=PARSER_SCALA_VERSION,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__parsercp", artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS
            ),
        ),
        Get(Digest, CreateDigest([parser_source, Directory(dest_dir)])),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (
                tool_classpath.digest,
                parser_classpath.digest,
                source_digest,
            )
        ),
    )

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(),
            argv=[
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(parser_classpath.classpath_entries()),
                "-d",
                dest_dir,
                parser_source.path,
            ],
            input_digest=merged_digest,
            output_directories=(dest_dir,),
            description="Compile Scala parser for dependency inference with scalac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir)
    )
    return ScalaParserCompiledClassfiles(digest=stripped_classfiles_digest)
Exemplo n.º 23
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.field_set.address]))
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements([request.field_set.address],
                                               internal_only=True),
    )
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    build_package_dependencies_get = Get(
        BuiltPackageDependencies,
        BuildPackageDependenciesRequest(
            request.field_set.runtime_package_dependencies),
    )

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        built_package_dependencies,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        build_package_dependencies_get,
        extra_output_directory_digest_get,
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=ConsoleScript("pytest"),
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            prepared_sources.source_files.snapshot.digest,
            config_files.snapshot.digest,
            extra_output_directory_digest,
            *(pkg.digest for pkg in built_package_dependencies),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (
            ".", )
        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *itertools.chain.from_iterable(["--cov", cov_path]
                                           for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)
Exemplo n.º 24
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest,
    protoc: Protoc,
    grpc_python_plugin: GrpcPythonPlugin,
    python_protobuf_subsystem: PythonProtobufSubsystem,
) -> GeneratedSources:
    download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest,
                                  protoc.get_request(Platform.current))

    output_dir = "_generated_files"
    create_output_dir_request = Get(Digest,
                                    CreateDigest([Directory(output_dir)]))

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    # TODO(#10917): Use TransitiveTargets instead of TransitiveTargetsLite.
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequestLite([request.protocol_target.address]))

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources, ),
        ),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest([request.protocol_target[ProtobufSources]]))

    (
        downloaded_protoc_binary,
        empty_output_dir,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    # To run the MyPy Protobuf plugin, we first install it with Pex, then extract the wheels and
    # point Protoc to the extracted wheels with its `--plugin` argument.
    extracted_mypy_wheels = None
    if python_protobuf_subsystem.mypy_plugin:
        mypy_pex = await Get(
            Pex,
            PexRequest(
                output_filename="mypy_protobuf.pex",
                internal_only=True,
                requirements=PexRequirements(
                    [python_protobuf_subsystem.mypy_plugin_version]),
                # This is solely to ensure that we use an appropriate interpreter when resolving
                # the distribution. We don't actually run the distribution directly with Python,
                # as we extract out its binary.
                interpreter_constraints=PexInterpreterConstraints(
                    ["CPython>=3.5"]),
            ),
        )
        extracted_mypy_wheels = await Get(ExtractedPexDistributions, Pex,
                                          mypy_pex)

    downloaded_grpc_plugin = (await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        grpc_python_plugin.get_request(Platform.current),
    ) if request.protocol_target.get(ProtobufGrcpToggle).value else None)

    unmerged_digests = [
        all_sources_stripped.snapshot.digest,
        downloaded_protoc_binary.digest,
        empty_output_dir,
    ]
    if extracted_mypy_wheels:
        unmerged_digests.append(extracted_mypy_wheels.digest)
    if downloaded_grpc_plugin:
        unmerged_digests.append(downloaded_grpc_plugin.digest)
    input_digest = await Get(Digest, MergeDigests(unmerged_digests))

    argv = [downloaded_protoc_binary.exe, "--python_out", output_dir]
    if extracted_mypy_wheels:
        mypy_plugin_path = next(
            p for p in extracted_mypy_wheels.wheel_directory_paths
            if p.startswith(".deps/mypy_protobuf-"))
        argv.extend([
            f"--plugin=protoc-gen-mypy={mypy_plugin_path}/bin/protoc-gen-mypy",
            "--mypy_out",
            output_dir,
        ])
    if downloaded_grpc_plugin:
        argv.extend([
            f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}",
            "--grpc_out", output_dir
        ])
    argv.extend(target_sources_stripped.snapshot.files)

    env = {}
    if extracted_mypy_wheels:
        env["PYTHONPATH"] = ":".join(
            extracted_mypy_wheels.wheel_directory_paths)

    result = await Get(
        ProcessResult,
        Process(
            argv,
            env=env,
            input_digest=input_digest,
            description=
            f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_library's source root.
        source_root_request = SourceRootRequest.for_target(
            request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
Exemplo n.º 25
0
async def run_shell_command(
    request: GenerateFilesFromShellCommandRequest,
    shell_setup: ShellSetup,
    bash: BashBinary,
) -> GeneratedSources:
    shell_command = request.protocol_target
    working_directory = shell_command.address.spec_path
    command = shell_command[ShellCommandCommandField].value
    tools = shell_command[ShellCommandToolsField].value
    outputs = shell_command[ShellCommandOutputsField].value or ()

    if not command:
        raise ValueError(
            f"Missing `command` line in `shell_command` target {shell_command.address}."
        )

    if not tools:
        raise ValueError(
            f"Must provide any `tools` used by the `shell_command` {shell_command.address}."
        )

    env = await Get(Environment, EnvironmentRequest(["PATH"]))
    search_path = shell_setup.executable_search_path(env)
    tool_requests = [
        BinaryPathRequest(
            binary_name=tool,
            search_path=search_path,
        ) for tool in {*tools, *["mkdir", "ln"]}
        if tool not in BASH_BUILTIN_COMMANDS
    ]
    tool_paths = await MultiGet(
        Get(BinaryPaths, BinaryPathRequest, request)
        for request in tool_requests)

    command_env = {
        "TOOLS":
        " ".join(shlex.quote(tool.binary_name) for tool in tool_requests),
    }

    for binary, tool_request in zip(tool_paths, tool_requests):
        if binary.first_path:
            command_env[tool_request.binary_name] = binary.first_path.path
        else:
            raise BinaryNotFoundError.from_request(
                tool_request,
                rationale=
                f"execute experimental_shell_command {shell_command.address}",
            )

    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([shell_command.address]),
    )

    sources = await Get(
        SourceFiles,
        SourceFilesRequest(
            sources_fields=[
                tgt.get(Sources) for tgt in transitive_targets.dependencies
            ],
            for_sources_types=(
                Sources,
                FilesSources,
            ),
            enable_codegen=True,
        ),
    )

    output_files = [f for f in outputs if not f.endswith("/")]
    output_directories = [d for d in outputs if d.endswith("/")]

    if working_directory in sources.snapshot.dirs:
        input_digest = sources.snapshot.digest
    else:
        work_dir = await Get(Digest,
                             CreateDigest([Directory(working_directory)]))
        input_digest = await Get(
            Digest, MergeDigests([sources.snapshot.digest, work_dir]))

    # Setup bin_relpath dir with symlinks to all requested tools, so that we can use PATH.
    bin_relpath = ".bin"
    setup_tool_symlinks_script = ";".join(
        dedent(f"""\
            $mkdir -p {bin_relpath}
            for tool in $TOOLS; do $ln -s ${{!tool}} {bin_relpath}/; done
            export PATH="$PWD/{bin_relpath}"
            """).split("\n"))

    result = await Get(
        ProcessResult,
        Process(
            argv=(bash.path, "-c", setup_tool_symlinks_script + command),
            description=
            f"Running experimental_shell_command {shell_command.address}",
            env=command_env,
            input_digest=input_digest,
            output_directories=output_directories,
            output_files=output_files,
            working_directory=working_directory,
        ),
    )

    if shell_command[ShellCommandLogOutputField].value:
        if result.stdout:
            logger.info(result.stdout.decode())
        if result.stderr:
            logger.warning(result.stderr.decode())

    output = await Get(Snapshot,
                       AddPrefix(result.output_digest, working_directory))
    return GeneratedSources(output)
Exemplo n.º 26
0
async def compile_java_source(
    bash: BashBinary,
    javac: JavacSubsystem,
    zip_binary: ZipBinary,
    request: CompileJavaSourceRequest,
) -> FallibleClasspathEntry:
    # Request the component's direct dependency classpath, and additionally any prerequisite.
    optional_prereq_request = [
        *((request.prerequisite, ) if request.prerequisite else ())
    ]
    fallibles = await MultiGet(
        Get(FallibleClasspathEntries,
            ClasspathEntryRequests(optional_prereq_request)),
        Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request)),
    )

    direct_dependency_classpath_entries = FallibleClasspathEntries(
        itertools.chain(*fallibles)).if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    # Capture just the `ClasspathEntry` objects that are listed as `export` types by source analysis
    deps_to_classpath_entries = dict(
        zip(request.component.dependencies, direct_dependency_classpath_entries
            or ()))
    # Re-request inferred dependencies to get a list of export dependency addresses
    inferred_dependencies = await MultiGet(
        Get(
            JavaInferredDependencies,
            JavaInferredDependenciesAndExportsRequest(tgt[JavaSourceField]),
        ) for tgt in request.component.members
        if JavaFieldSet.is_applicable(tgt))
    flat_exports = {
        export
        for i in inferred_dependencies for export in i.exports
    }

    export_classpath_entries = [
        classpath_entry for coarsened_target, classpath_entry in
        deps_to_classpath_entries.items()
        if any(m.address in flat_exports for m in coarsened_target.members)
    ]

    # Then collect the component's sources.
    component_members_with_sources = tuple(t for t in request.component.members
                                           if t.has_field(SourcesField))
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField), ),
                    for_sources_types=(JavaSourceField, ),
                    enable_codegen=True,
                ),
            ) for t in component_members_with_sources),
    )
    component_members_and_java_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]
    if not component_members_and_java_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest,
            MergeDigests(cpe.digest
                         for cpe in direct_dependency_classpath_entries))
        classpath_entry = ClasspathEntry.merge(
            exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    dest_dir = "classfiles"
    dest_dir_digest, jdk = await MultiGet(
        Get(
            Digest,
            CreateDigest([Directory(dest_dir)]),
        ),
        Get(JdkEnvironment, JdkRequest,
            JdkRequest.from_target(request.component)),
    )
    merged_digest = await Get(
        Digest,
        MergeDigests((
            dest_dir_digest,
            *(sources.snapshot.digest
              for _, sources in component_members_and_java_source_files),
        )),
    )

    usercp = "__cp"
    user_classpath = Classpath(direct_dependency_classpath_entries,
                               request.resolve)
    classpath_arg = ":".join(
        user_classpath.root_immutable_inputs_args(prefix=usercp))
    immutable_input_digests = dict(
        user_classpath.root_immutable_inputs(prefix=usercp))

    # Compile.
    compile_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[f"{jdk.java_home}/lib/tools.jar"],
            argv=[
                "com.sun.tools.javac.Main",
                *(("-cp", classpath_arg) if classpath_arg else ()),
                *javac.args,
                "-d",
                dest_dir,
                *sorted(
                    chain.from_iterable(
                        sources.snapshot.files for _, sources in
                        component_members_and_java_source_files)),
            ],
            input_digest=merged_digest,
            extra_immutable_input_digests=immutable_input_digests,
            output_directories=(dest_dir, ),
            description=f"Compile {request.component} with javac",
            level=LogLevel.DEBUG,
        ),
    )
    if compile_result.exit_code != 0:
        return FallibleClasspathEntry.from_fallible_process_result(
            str(request.component),
            compile_result,
            None,
        )

    # Jar.
    # NB: We jar up the outputs in a separate process because the nailgun runner cannot support
    # invoking via a `bash` wrapper (since the trailing portion of the command is executed by
    # the nailgun server). We might be able to resolve this in the future via a Javac wrapper shim.
    output_snapshot = await Get(Snapshot, Digest, compile_result.output_digest)
    output_file = compute_output_jar_filename(request.component)
    output_files: tuple[str, ...] = (output_file, )
    if output_snapshot.files:
        jar_result = await Get(
            ProcessResult,
            Process(
                argv=[
                    bash.path,
                    "-c",
                    " ".join([
                        "cd", dest_dir, ";", zip_binary.path, "-r",
                        f"../{output_file}", "."
                    ]),
                ],
                input_digest=compile_result.output_digest,
                output_files=output_files,
                description=f"Capture outputs of {request.component} for javac",
                level=LogLevel.TRACE,
            ),
        )
        jar_output_digest = jar_result.output_digest
    else:
        # If there was no output, then do not create a jar file. This may occur, for example, when compiling
        # a `package-info.java` in a single partition.
        output_files = ()
        jar_output_digest = EMPTY_DIGEST

    output_classpath = ClasspathEntry(jar_output_digest, output_files,
                                      direct_dependency_classpath_entries)

    if export_classpath_entries:
        merged_export_digest = await Get(
            Digest,
            MergeDigests((output_classpath.digest,
                          *(i.digest for i in export_classpath_entries))),
        )
        merged_classpath = ClasspathEntry.merge(
            merged_export_digest,
            (output_classpath, *export_classpath_entries))
        output_classpath = merged_classpath

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        compile_result,
        output_classpath,
    )
Exemplo n.º 27
0
async def setup_full_package_build_request(
    request: _SetupGoProtobufPackageBuildRequest,
    protoc: Protoc,
    go_protoc_plugin: _SetupGoProtocPlugin,
    package_mapping: ImportPathToPackages,
    go_protobuf_mapping: GoProtobufImportPathMapping,
    analyzer: PackageAnalyzerSetup,
) -> FallibleBuildGoPackageRequest:
    output_dir = "_generated_files"
    protoc_relpath = "__protoc"
    protoc_go_plugin_relpath = "__protoc_gen_go"

    transitive_targets, downloaded_protoc_binary, empty_output_dir = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)),
        Get(DownloadedExternalTool, ExternalToolRequest,
            protoc.get_request(Platform.current)),
        Get(Digest, CreateDigest([Directory(output_dir)])),
    )

    all_sources = await Get(
        SourceFiles,
        SourceFilesRequest(
            sources_fields=(tgt[ProtobufSourceField]
                            for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSourceField, ),
            enable_codegen=True,
        ),
    )
    source_roots, input_digest = await MultiGet(
        Get(SourceRootsResult, SourceRootsRequest,
            SourceRootsRequest.for_files(all_sources.files)),
        Get(Digest,
            MergeDigests([all_sources.snapshot.digest, empty_output_dir])),
    )

    source_root_paths = sorted(
        {sr.path
         for sr in source_roots.path_to_root.values()})

    pkg_sources = await MultiGet(
        Get(SourcesPaths, SourcesPathsRequest(tgt[ProtobufSourceField]))
        for tgt in transitive_targets.roots)
    pkg_files = sorted({f for ps in pkg_sources for f in ps.files})

    maybe_grpc_plugin_args = []
    if any(
            tgt.get(ProtobufGrpcToggleField).value
            for tgt in transitive_targets.roots):
        maybe_grpc_plugin_args = [
            f"--go-grpc_out={output_dir}",
            "--go-grpc_opt=paths=source_relative",
        ]

    gen_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                os.path.join(protoc_relpath, downloaded_protoc_binary.exe),
                f"--plugin=go={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go')}",
                f"--plugin=go-grpc={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go-grpc')}",
                f"--go_out={output_dir}",
                "--go_opt=paths=source_relative",
                *(f"--proto_path={source_root}"
                  for source_root in source_root_paths),
                *maybe_grpc_plugin_args,
                *pkg_files,
            ],
            # Note: Necessary or else --plugin option needs absolute path.
            env={"PATH": protoc_go_plugin_relpath},
            input_digest=input_digest,
            immutable_input_digests={
                protoc_relpath: downloaded_protoc_binary.digest,
                protoc_go_plugin_relpath: go_protoc_plugin.digest,
            },
            description=f"Generating Go sources from {request.import_path}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )
    if gen_result.exit_code != 0:
        return FallibleBuildGoPackageRequest(
            request=None,
            import_path=request.import_path,
            exit_code=gen_result.exit_code,
            stderr=gen_result.stderr.decode(),
        )

    # Ensure that the generated files are in a single package directory.
    gen_sources = await Get(Snapshot, Digest, gen_result.output_digest)
    files_by_dir = group_by_dir(gen_sources.files)
    if len(files_by_dir) != 1:
        return FallibleBuildGoPackageRequest(
            request=None,
            import_path=request.import_path,
            exit_code=1,
            stderr=
            ("Expected Go files generated from Protobuf sources to be output to a single directory.\n"
             f"- import path: {request.import_path}\n"
             f"- protobuf files: {', '.join(pkg_files)}"),
        )
    gen_dir = list(files_by_dir.keys())[0]

    # Analyze the generated sources.
    input_digest = await Get(
        Digest, MergeDigests([gen_sources.digest, analyzer.digest]))
    result = await Get(
        FallibleProcessResult,
        Process(
            (analyzer.path, gen_dir),
            input_digest=input_digest,
            description=
            f"Determine metadata for generated Go package for {request.import_path}",
            level=LogLevel.DEBUG,
            env={"CGO_ENABLED": "0"},
        ),
    )

    # Parse the metadata from the analysis.
    fallible_analysis = FallibleFirstPartyPkgAnalysis.from_process_result(
        result,
        dir_path=gen_dir,
        import_path=request.import_path,
        minimum_go_version="",
        description_of_source=
        f"Go package generated from protobuf targets `{', '.join(str(addr) for addr in request.addresses)}`",
    )
    if not fallible_analysis.analysis:
        return FallibleBuildGoPackageRequest(
            request=None,
            import_path=request.import_path,
            exit_code=fallible_analysis.exit_code,
            stderr=fallible_analysis.stderr,
        )
    analysis = fallible_analysis.analysis

    # Obtain build requests for third-party dependencies.
    # TODO: Consider how to merge this code with existing dependency inference code.
    dep_build_request_addrs: list[Address] = []
    for dep_import_path in (*analysis.imports, *analysis.test_imports,
                            *analysis.xtest_imports):
        # Infer dependencies on other Go packages.
        candidate_addresses = package_mapping.mapping.get(dep_import_path)
        if candidate_addresses:
            # TODO: Use explicit dependencies to disambiguate? This should never happen with Go backend though.
            if len(candidate_addresses) > 1:
                return FallibleBuildGoPackageRequest(
                    request=None,
                    import_path=request.import_path,
                    exit_code=result.exit_code,
                    stderr=
                    (f"Multiple addresses match import of `{dep_import_path}`.\n"
                     f"addresses: {', '.join(str(a) for a in candidate_addresses)}"
                     ),
                )
            dep_build_request_addrs.extend(candidate_addresses)

        # Infer dependencies on other generated Go sources.
        go_protobuf_candidate_addresses = go_protobuf_mapping.mapping.get(
            dep_import_path)
        if go_protobuf_candidate_addresses:
            dep_build_request_addrs.extend(go_protobuf_candidate_addresses)

    dep_build_requests = await MultiGet(
        Get(BuildGoPackageRequest, BuildGoPackageTargetRequest(addr))
        for addr in dep_build_request_addrs)

    return FallibleBuildGoPackageRequest(
        request=BuildGoPackageRequest(
            import_path=request.import_path,
            digest=gen_sources.digest,
            dir_path=analysis.dir_path,
            go_file_names=analysis.go_files,
            s_file_names=analysis.s_files,
            direct_dependencies=dep_build_requests,
            minimum_go_version=analysis.minimum_go_version,
        ),
        import_path=request.import_path,
    )
Exemplo n.º 28
0
def test_pex_working_directory(rule_runner: RuleRunner,
                               pex_type: type[Pex | VenvPex]) -> None:
    named_caches_dir = rule_runner.request(GlobalOptions, []).named_caches_dir
    sources = rule_runner.request(
        Digest,
        [
            CreateDigest((FileContent(
                path="main.py",
                content=textwrap.dedent("""
                            import os
                            cwd = os.getcwd()
                            print(f"CWD: {cwd}")
                            for path, dirs, _ in os.walk(cwd):
                                for name in dirs:
                                    print(f"DIR: {os.path.relpath(os.path.join(path, name), cwd)}")
                            """).encode(),
            ), )),
        ],
    )

    pex_data = create_pex_and_get_all_data(
        rule_runner,
        pex_type=pex_type,
        main=EntryPoint("main"),
        sources=sources,
        interpreter_constraints=InterpreterConstraints(["CPython>=3.6"]),
    )

    pex_process_type = PexProcess if isinstance(pex_data.pex,
                                                Pex) else VenvPexProcess

    dirpath = "foo/bar/baz"
    runtime_files = rule_runner.request(
        Digest, [CreateDigest([Directory(path=dirpath)])])

    dirpath_parts = os.path.split(dirpath)
    for i in range(0, len(dirpath_parts)):
        working_dir = os.path.join(*dirpath_parts[:i]) if i > 0 else None
        expected_subdir = os.path.join(
            *dirpath_parts[i:]) if i < len(dirpath_parts) else None
        process = rule_runner.request(
            Process,
            [
                pex_process_type(
                    pex_data.pex,
                    description="Run the pex and check its cwd",
                    working_directory=working_dir,
                    input_digest=runtime_files,
                    # We skip the process cache for this PEX to ensure that it re-runs.
                    cache_scope=ProcessCacheScope.PER_SESSION,
                )
            ],
        )

        # For VenvPexes, run the PEX twice while clearing the venv dir in between. This emulates
        # situations where a PEX creation hits the process cache, while venv seeding misses the PEX
        # cache.
        if isinstance(pex_data.pex, VenvPex):
            # Request once to ensure that the directory is seeded, and then start a new session so
            # that the second run happens as well.
            _ = rule_runner.request(ProcessResult, [process])
            rule_runner.new_session("re-run-for-venv-pex")
            rule_runner.set_options(
                ["--backend-packages=pants.backend.python"],
                env_inherit={"PATH", "PYENV_ROOT", "HOME"},
            )

            # Clear the cache.
            venv_dir = os.path.join(named_caches_dir, "pex_root",
                                    pex_data.pex.venv_rel_dir)
            assert os.path.isdir(venv_dir)
            safe_rmtree(venv_dir)

        result = rule_runner.request(ProcessResult, [process])
        output_str = result.stdout.decode()
        mo = re.search(r"CWD: (.*)\n", output_str)
        assert mo is not None
        reported_cwd = mo.group(1)
        if working_dir:
            assert reported_cwd.endswith(working_dir)
        if expected_subdir:
            assert f"DIR: {expected_subdir}" in output_str
Exemplo n.º 29
0
async def compile_java_source(
    bash: BashBinary,
    jdk_setup: JdkSetup,
    zip_binary: ZipBinary,
    union_membership: UnionMembership,
    request: CompileJavaSourceRequest,
) -> FallibleClasspathEntry:
    # Request the component's direct dependency classpath.
    direct_dependency_classpath_entries = FallibleClasspathEntry.if_all_succeeded(
        await MultiGet(
            Get(
                FallibleClasspathEntry,
                ClasspathEntryRequest,
                ClasspathEntryRequest.for_targets(union_membership,
                                                  component=coarsened_dep,
                                                  resolve=request.resolve),
            ) for coarsened_dep in request.component.dependencies))
    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    # Then collect the component's sources.
    component_members_with_sources = tuple(t for t in request.component.members
                                           if t.has_field(SourcesField))
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField), ),
                    for_sources_types=(JavaSourceField, ),
                    enable_codegen=True,
                ),
            ) for t in component_members_with_sources),
    )
    component_members_and_java_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]
    if not component_members_and_java_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest,
            MergeDigests(cpe.digest
                         for cpe in direct_dependency_classpath_entries))
        classpath_entry = ClasspathEntry.merge(
            exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    dest_dir = "classfiles"
    (merged_direct_dependency_classpath_digest,
     dest_dir_digest) = await MultiGet(
         Get(
             Digest,
             MergeDigests(
                 classfiles.digest
                 for classfiles in direct_dependency_classpath_entries),
         ),
         Get(
             Digest,
             CreateDigest([Directory(dest_dir)]),
         ),
     )

    usercp = "__cp"
    prefixed_direct_dependency_classpath_digest = await Get(
        Digest, AddPrefix(merged_direct_dependency_classpath_digest, usercp))
    classpath_arg = ClasspathEntry.arg(direct_dependency_classpath_entries,
                                       prefix=usercp)

    merged_digest = await Get(
        Digest,
        MergeDigests((
            prefixed_direct_dependency_classpath_digest,
            dest_dir_digest,
            jdk_setup.digest,
            *(sources.snapshot.digest
              for _, sources in component_members_and_java_source_files),
        )),
    )

    # Compile.
    compile_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash,
                                [f"{jdk_setup.java_home}/lib/tools.jar"]),
                "com.sun.tools.javac.Main",
                *(("-cp", classpath_arg) if classpath_arg else ()),
                "-d",
                dest_dir,
                *sorted(
                    chain.from_iterable(
                        sources.snapshot.files for _, sources in
                        component_members_and_java_source_files)),
            ],
            input_digest=merged_digest,
            use_nailgun=jdk_setup.digest,
            append_only_caches=jdk_setup.append_only_caches,
            env=jdk_setup.env,
            output_directories=(dest_dir, ),
            description=f"Compile {request.component} with javac",
            level=LogLevel.DEBUG,
        ),
    )
    if compile_result.exit_code != 0:
        return FallibleClasspathEntry.from_fallible_process_result(
            str(request.component),
            compile_result,
            None,
        )

    # Jar.
    # NB: We jar up the outputs in a separate process because the nailgun runner cannot support
    # invoking via a `bash` wrapper (since the trailing portion of the command is executed by
    # the nailgun server). We might be able to resolve this in the future via a Javac wrapper shim.
    output_snapshot = await Get(Snapshot, Digest, compile_result.output_digest)
    output_file = f"{request.component.representative.address.path_safe_spec}.jar"
    if output_snapshot.files:
        jar_result = await Get(
            ProcessResult,
            Process(
                argv=[
                    bash.path,
                    "-c",
                    " ".join([
                        "cd", dest_dir, ";", zip_binary.path, "-r",
                        f"../{output_file}", "."
                    ]),
                ],
                input_digest=compile_result.output_digest,
                output_files=(output_file, ),
                description=f"Capture outputs of {request.component} for javac",
                level=LogLevel.TRACE,
            ),
        )
        jar_output_digest = jar_result.output_digest
    else:
        # If there was no output, then do not create a jar file. This may occur, for example, when compiling
        # a `package-info.java` in a single partition.
        jar_output_digest = EMPTY_DIGEST

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        compile_result,
        ClasspathEntry(jar_output_digest, (output_file, ),
                       direct_dependency_classpath_entries),
    )
Exemplo n.º 30
0
async def compile_avro_source(
    request: CompileAvroSourceRequest,
    jdk: InternalJdk,
    avro_tools: AvroSubsystem,
) -> CompiledAvroSource:
    output_dir = "_generated_files"
    toolcp_relpath = "__toolcp"

    lockfile_request = await Get(GenerateJvmLockfileFromTool, AvroToolLockfileSentinel())
    tool_classpath, subsetted_input_digest, empty_output_dir = await MultiGet(
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
        Get(
            Digest,
            DigestSubset(
                request.digest,
                PathGlobs(
                    [request.path],
                    glob_match_error_behavior=GlobMatchErrorBehavior.error,
                    conjunction=GlobExpansionConjunction.all_match,
                    description_of_origin="the Avro source file name",
                ),
            ),
        ),
        Get(Digest, CreateDigest([Directory(output_dir)])),
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            [
                subsetted_input_digest,
                empty_output_dir,
            ]
        ),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    def make_avro_process(
        args: Iterable[str],
        *,
        overridden_input_digest: Digest | None = None,
        overridden_output_dir: str | None = None,
    ) -> JvmProcess:

        return JvmProcess(
            jdk=jdk,
            argv=(
                "org.apache.avro.tool.Main",
                *args,
            ),
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            input_digest=(
                overridden_input_digest if overridden_input_digest is not None else input_digest
            ),
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_immutable_input_digests,
            description="Generating Java sources from Avro source.",
            level=LogLevel.DEBUG,
            output_directories=(overridden_output_dir if overridden_output_dir else output_dir,),
        )

    path = PurePath(request.path)
    if path.suffix == ".avsc":
        result = await Get(
            ProcessResult,
            JvmProcess,
            make_avro_process(["compile", "schema", request.path, output_dir]),
        )
    elif path.suffix == ".avpr":
        result = await Get(
            ProcessResult,
            JvmProcess,
            make_avro_process(["compile", "protocol", request.path, output_dir]),
        )
    elif path.suffix == ".avdl":
        idl_output_dir = "__idl"
        avpr_path = os.path.join(idl_output_dir, str(path.with_suffix(".avpr")))
        idl_output_dir_digest = await Get(
            Digest, CreateDigest([Directory(os.path.dirname(avpr_path))])
        )
        idl_input_digest = await Get(Digest, MergeDigests([input_digest, idl_output_dir_digest]))
        idl_result = await Get(
            ProcessResult,
            JvmProcess,
            make_avro_process(
                ["idl", request.path, avpr_path],
                overridden_input_digest=idl_input_digest,
                overridden_output_dir=idl_output_dir,
            ),
        )
        generated_files_dir = await Get(Digest, CreateDigest([Directory(output_dir)]))
        protocol_input_digest = await Get(
            Digest, MergeDigests([idl_result.output_digest, generated_files_dir])
        )
        result = await Get(
            ProcessResult,
            JvmProcess,
            make_avro_process(
                ["compile", "protocol", avpr_path, output_dir],
                overridden_input_digest=protocol_input_digest,
            ),
        )
    else:
        raise AssertionError(
            f"Avro backend does not support files with extension `{path.suffix}`: {path}"
        )

    normalized_digest = await Get(Digest, RemovePrefix(result.output_digest, output_dir))
    return CompiledAvroSource(normalized_digest)