Exemplo n.º 1
0
async def analyze_java_source_dependencies(
    processor_classfiles: JavaParserCompiledClassfiles,
    jdk: InternalJdk,
    request: JavaSourceDependencyAnalysisRequest,
) -> FallibleJavaSourceDependencyAnalysisResult:
    source_files = request.source_files
    if len(source_files.files) > 1:
        raise ValueError(
            f"parse_java_package expects sources with exactly 1 source file, but found {len(source_files.files)}."
        )
    elif len(source_files.files) == 0:
        raise ValueError(
            "parse_java_package expects sources with exactly 1 source file, but found none."
        )
    source_prefix = "__source_to_analyze"
    source_path = os.path.join(source_prefix, source_files.files[0])
    processorcp_relpath = "__processorcp"
    toolcp_relpath = "__toolcp"

    parser_lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                        JavaParserToolLockfileSentinel())
    tool_classpath, prefixed_source_files_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(lockfile=parser_lockfile_request),
        ),
        Get(Digest, AddPrefix(source_files.snapshot.digest, source_prefix)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        processorcp_relpath: processor_classfiles.digest,
    }

    analysis_output_path = "__source_analysis.json"

    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[
                *tool_classpath.classpath_entries(toolcp_relpath),
                processorcp_relpath,
            ],
            argv=[
                "org.pantsbuild.javaparser.PantsJavaParserLauncher",
                analysis_output_path,
                source_path,
            ],
            input_digest=prefixed_source_files_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            output_files=(analysis_output_path, ),
            extra_nailgun_keys=extra_immutable_input_digests,
            description=f"Analyzing {source_files.files[0]}",
            level=LogLevel.DEBUG,
        ),
    )

    return FallibleJavaSourceDependencyAnalysisResult(
        process_result=process_result)
Exemplo n.º 2
0
async def setup_google_java_format(
    setup_request: SetupRequest,
    tool: GoogleJavaFormatSubsystem,
    jdk: InternalJdk,
) -> Setup:

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 GoogleJavaFormatToolLockfileSentinel())
    source_files, tool_classpath = await MultiGet(
        Get(
            SourceFiles,
            SourceFilesRequest(
                field_set.source
                for field_set in setup_request.request.field_sets),
        ),
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
    )

    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    toolcp_relpath = "__toolcp"
    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    maybe_java11_or_higher_options = []
    if jdk.jre_major_version >= 11:
        maybe_java11_or_higher_options = [
            "--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED",
        ]

    args = [
        *maybe_java11_or_higher_options,
        "com.google.googlejavaformat.java.Main",
        *(["--aosp"] if tool.aosp else []),
        "--dry-run" if setup_request.check_only else "--replace",
        *source_files.files,
    ]

    process = JvmProcess(
        jdk=jdk,
        argv=args,
        classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
        input_digest=source_files_snapshot.digest,
        extra_immutable_input_digests=extra_immutable_input_digests,
        extra_nailgun_keys=extra_immutable_input_digests,
        output_files=source_files_snapshot.files,
        description=
        f"Run Google Java Format on {pluralize(len(setup_request.request.field_sets), 'file')}.",
        level=LogLevel.DEBUG,
    )

    return Setup(process, original_snapshot=source_files_snapshot)
Exemplo n.º 3
0
async def ensure_jdk_for_pants_run(jdk: JdkEnvironment) -> __RuntimeJvm:
    # `tools.jar` is distributed with the JDK, so we can rely on it existing.
    ensure_jvm_process = await Get(
        Process,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[f"{jdk.java_home}/lib/tools.jar"],
            argv=["com.sun.tools.javac.Main", "--version"],
            input_digest=EMPTY_DIGEST,
            description="Ensure download of JDK for `pants run` use",
        ),
    )

    # Do not treat the coursier JDK locations as an append-only cache, so that we can capture the
    # downloaded JDK in a `Digest`

    ensure_jvm_process = dataclasses.replace(
        ensure_jvm_process,
        append_only_caches=FrozenDict(),
        output_directories=(".cache/jdk", ".cache/arc"),
        use_nailgun=(),
    )

    ensure_jvm = await Get(ProcessResult, Process, ensure_jvm_process)

    return __RuntimeJvm(ensure_jvm.output_digest)
Exemplo n.º 4
0
def test_pass_jvm_options_to_java_program(rule_runner: RuleRunner) -> None:
    global_jvm_options = ["-Dpants.jvm.global=true"]

    # Rely on JEP-330 to run a Java file from source so we don´t need a compile step.
    rule_runner.set_options(
        ["--jvm-tool-jdk=adopt:1.11", f"--jvm-global-options={repr(global_jvm_options)}"],
        env_inherit=PYTHON_BOOTSTRAP_ENV,
    )

    classname = "EchoSystemProperties"
    filename = f"{classname}.java"
    file_content = textwrap.dedent(
        f"""\
        public class {classname} {{
            public static void main(String[] args) {{
                System.getProperties().list(System.out);
            }}
        }}
        """
    )

    input_digest = rule_runner.request(
        Digest,
        [
            CreateDigest(
                [
                    FileContent(
                        filename,
                        file_content.encode("utf-8"),
                    )
                ]
            )
        ],
    )

    jdk = rule_runner.request(InternalJdk, [])
    process_result = rule_runner.request(
        ProcessResult,
        [
            JvmProcess(
                jdk=jdk,
                argv=[filename],
                classpath_entries=(),
                extra_jvm_options=["-Dpants.jvm.extra=true"],
                input_digest=input_digest,
                description="Echo JVM System properties",
                use_nailgun=False,
            )
        ],
    )

    jvm_properties = [
        prop for prop in process_result.stdout.decode("utf-8").splitlines() if "=" in prop
    ]
    assert "java.specification.version=11" in jvm_properties
    assert "pants.jvm.global=true" in jvm_properties
    assert "pants.jvm.extra=true" in jvm_properties
Exemplo n.º 5
0
async def analyze_scala_source_dependencies(
    jdk: InternalJdk,
    processor_classfiles: ScalaParserCompiledClassfiles,
    source_files: SourceFiles,
) -> FallibleScalaSourceDependencyAnalysisResult:
    if len(source_files.files) > 1:
        raise ValueError(
            f"analyze_scala_source_dependencies expects sources with exactly 1 source file, but found {len(source_files.snapshot.files)}."
        )
    elif len(source_files.files) == 0:
        raise ValueError(
            "analyze_scala_source_dependencies expects sources with exactly 1 source file, but found none."
        )
    source_prefix = "__source_to_analyze"
    source_path = os.path.join(source_prefix, source_files.files[0])
    processorcp_relpath = "__processorcp"
    toolcp_relpath = "__toolcp"

    (tool_classpath, prefixed_source_files_digest,) = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS),
        ),
        Get(Digest, AddPrefix(source_files.snapshot.digest, source_prefix)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        processorcp_relpath: processor_classfiles.digest,
    }

    analysis_output_path = "__source_analysis.json"

    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[
                *tool_classpath.classpath_entries(toolcp_relpath),
                processorcp_relpath,
            ],
            argv=[
                "org.pantsbuild.backend.scala.dependency_inference.ScalaParser",
                analysis_output_path,
                source_path,
            ],
            input_digest=prefixed_source_files_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            output_files=(analysis_output_path,),
            extra_nailgun_keys=extra_immutable_input_digests,
            description=f"Analyzing {source_files.files[0]}",
            level=LogLevel.DEBUG,
        ),
    )

    return FallibleScalaSourceDependencyAnalysisResult(process_result=process_result)
Exemplo n.º 6
0
async def google_java_format_fmt(
    request: GoogleJavaFormatRequest,
    tool: GoogleJavaFormatSubsystem,
    jdk: InternalJdk,
) -> FmtResult:
    if tool.skip:
        return FmtResult.skip(formatter_name=request.name)
    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 GoogleJavaFormatToolLockfileSentinel())
    tool_classpath = await Get(ToolClasspath,
                               ToolClasspathRequest(lockfile=lockfile_request))

    toolcp_relpath = "__toolcp"
    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    maybe_java11_or_higher_options = []
    if jdk.jre_major_version >= 11:
        maybe_java11_or_higher_options = [
            "--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED",
            "--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED",
        ]

    args = [
        *maybe_java11_or_higher_options,
        "com.google.googlejavaformat.java.Main",
        *(["--aosp"] if tool.aosp else []),
        "--replace",
        *request.snapshot.files,
    ]

    result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            argv=args,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            input_digest=request.snapshot.digest,
            extra_jvm_options=tool.jvm_options,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_immutable_input_digests,
            output_files=request.snapshot.files,
            description=
            f"Run Google Java Format on {pluralize(len(request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    output_snapshot = await Get(Snapshot, Digest, result.output_digest)
    return FmtResult.create(request,
                            result,
                            output_snapshot,
                            strip_chroot_path=True)
Exemplo n.º 7
0
async def build_processors(jdk: InternalJdk) -> JavaParserCompiledClassfiles:
    dest_dir = "classfiles"
    parser_lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                        JavaParserToolLockfileSentinel())
    materialized_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(prefix="__toolcp",
                                 lockfile=parser_lockfile_request),
        ),
        Get(
            Digest,
            CreateDigest([
                FileContent(
                    path=_LAUNCHER_BASENAME,
                    content=_load_javaparser_launcher_source(),
                ),
                Directory(dest_dir),
            ]),
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests((
            materialized_classpath.digest,
            source_digest,
        )),
    )

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[f"{jdk.java_home}/lib/tools.jar"],
            argv=[
                "com.sun.tools.javac.Main",
                "-cp",
                ":".join(materialized_classpath.classpath_entries()),
                "-d",
                dest_dir,
                _LAUNCHER_BASENAME,
            ],
            input_digest=merged_digest,
            output_directories=(dest_dir, ),
            description=
            f"Compile {_LAUNCHER_BASENAME} import processors with javac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir))
    return JavaParserCompiledClassfiles(digest=stripped_classfiles_digest)
Exemplo n.º 8
0
def javac_version_proc(rule_runner: RuleRunner) -> Process:
    jdk = rule_runner.request(InternalJdk, [])
    return rule_runner.request(
        Process,
        [
            JvmProcess(
                jdk=jdk,
                classpath_entries=(),
                argv=[
                    "-version",
                ],
                input_digest=EMPTY_DIGEST,
                description="",
                use_nailgun=False,
            )
        ],
    )
Exemplo n.º 9
0
async def ktlint_fmt(request: KtlintRequest, tool: KtlintSubsystem,
                     jdk: InternalJdk) -> FmtResult:
    if tool.skip:
        return FmtResult.skip(formatter_name=request.name)

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 KtlintToolLockfileSentinel())
    tool_classpath = await Get(ToolClasspath,
                               ToolClasspathRequest(lockfile=lockfile_request))

    toolcp_relpath = "__toolcp"
    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    args = [
        "com.pinterest.ktlint.Main",
        "-F",
        *request.snapshot.files,
    ]

    result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            argv=args,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            input_digest=request.snapshot.digest,
            extra_jvm_options=tool.jvm_options,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_immutable_input_digests,
            output_files=request.snapshot.files,
            description=
            f"Run Ktlint on {pluralize(len(request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )

    output_snapshot = await Get(Snapshot, Digest, result.output_digest)
    return FmtResult.create(request,
                            result,
                            output_snapshot,
                            strip_chroot_path=True)
Exemplo n.º 10
0
async def setup_scalafmt_partition(
    request: SetupScalafmtPartition,
    jdk: InternalJdk,
) -> Partition:
    sources_digest = await Get(
        Digest,
        DigestSubset(
            request.merged_sources_digest,
            PathGlobs(
                [request.config_file, *request.files],
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
                conjunction=GlobExpansionConjunction.all_match,
                description_of_origin=f"the files in scalafmt partition for config file {request.config_file}",
            ),
        ),
    )

    args = [
        "org.scalafmt.cli.Cli",
        f"--config={request.config_file}",
        "--non-interactive",
    ]
    if request.check_only:
        args.append("--list")
    else:
        args.append("--quiet")
    args.extend(request.files)

    process = JvmProcess(
        jdk=jdk,
        argv=args,
        classpath_entries=request.classpath_entries,
        input_digest=sources_digest,
        output_files=request.files,
        extra_immutable_input_digests=request.extra_immutable_input_digests,
        # extra_nailgun_keys=request.extra_immutable_input_digests,
        use_nailgun=False,
        description=f"Run `scalafmt` on {pluralize(len(request.files), 'file')}.",
        level=LogLevel.DEBUG,
    )

    return Partition(process, f"{pluralize(len(request.files), 'file')} ({request.config_file})")
Exemplo n.º 11
0
def run_javac_version(rule_runner: RuleRunner) -> str:
    jdk = rule_runner.request(InternalJdk, [])
    process_result = rule_runner.request(
        ProcessResult,
        [
            JvmProcess(
                jdk=jdk,
                classpath_entries=(),
                argv=[
                    "-version",
                ],
                input_digest=EMPTY_DIGEST,
                description="",
                use_nailgun=False,
            )
        ],
    )
    return "\n".join([
        process_result.stderr.decode("utf-8"),
        process_result.stdout.decode("utf-8")
    ], )
Exemplo n.º 12
0
    def make_avro_process(
        args: Iterable[str],
        *,
        overridden_input_digest: Digest | None = None,
        overridden_output_dir: str | None = None,
    ) -> JvmProcess:

        return JvmProcess(
            jdk=jdk,
            argv=(
                "org.apache.avro.tool.Main",
                *args,
            ),
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            input_digest=(
                overridden_input_digest if overridden_input_digest is not None else input_digest
            ),
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_immutable_input_digests,
            description="Generating Java sources from Avro source.",
            level=LogLevel.DEBUG,
            output_directories=(overridden_output_dir if overridden_output_dir else output_dir,),
        )
Exemplo n.º 13
0
def _deploy_jar_test(rule_runner: RuleRunner, target_name: str) -> None:
    tgt = rule_runner.get_target(Address("", target_name=target_name))
    jdk = rule_runner.request(InternalJdk, [])
    fat_jar = rule_runner.request(
        BuiltPackage,
        [DeployJarFieldSet.create(tgt)],
    )

    process_result = rule_runner.request(
        ProcessResult,
        [
            JvmProcess(
                jdk=jdk,
                argv=("-jar", "dave.jar"),
                classpath_entries=[],
                description="Run that test jar",
                input_digest=fat_jar.digest,
                use_nailgun=False,
            )
        ],
    )

    assert process_result.stdout.decode("utf-8").strip() == "Hello, World!"
Exemplo n.º 14
0
async def compile_wsdl_source(
    request: CompileWsdlSourceRequest,
    jdk: InternalJdk,
    jaxws: JaxWsTools,
) -> CompiledWsdlSource:
    output_dir = "_generated_files"
    toolcp_relpath = "__toolcp"

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 JaxWsToolsLockfileSentinel())
    tool_classpath, subsetted_input_digest, empty_output_dir = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(lockfile=lockfile_request),
        ),
        Get(
            Digest,
            DigestSubset(
                request.digest,
                PathGlobs(
                    [request.path],
                    glob_match_error_behavior=GlobMatchErrorBehavior.error,
                    conjunction=GlobExpansionConjunction.all_match,
                    description_of_origin="the WSDL file name",
                ),
            ),
        ),
        Get(Digest, CreateDigest([Directory(output_dir)])),
    )

    input_digest = await Get(
        Digest, MergeDigests([subsetted_input_digest, empty_output_dir]))

    immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    jaxws_args = [
        "-d",
        output_dir,
        "-encoding",
        "utf8",
        "-keep",
        "-Xnocompile",
        "-B-XautoNameResolution",
    ]
    if request.module:
        jaxws_args.extend(["-m", request.module])
    if request.package:
        jaxws_args.extend(["-p", request.package])

    jaxws_process = JvmProcess(
        jdk=jdk,
        argv=[
            "com.sun.tools.ws.WsImport",
            *jaxws_args,
            request.path,
        ],
        classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
        input_digest=input_digest,
        extra_immutable_input_digests=immutable_input_digests,
        extra_nailgun_keys=immutable_input_digests,
        description="Generating Java sources from WSDL source",
        level=LogLevel.DEBUG,
        output_directories=(output_dir, ),
    )
    jaxws_result = await Get(ProcessResult, JvmProcess, jaxws_process)

    normalized_digest = await Get(
        Digest, RemovePrefix(jaxws_result.output_digest, output_dir))
    return CompiledWsdlSource(normalized_digest)
Exemplo n.º 15
0
async def create_deploy_jar_run_request(
    field_set: DeployJarFieldSet, ) -> RunRequest:

    jdk = await Get(JdkEnvironment, JdkRequest,
                    JdkRequest.from_field(field_set.jdk_version))

    main_class = field_set.main_class.value
    assert main_class is not None

    package = await Get(BuiltPackage, DeployJarFieldSet, field_set)
    assert len(package.artifacts) == 1
    jar_path = package.artifacts[0].relpath
    assert jar_path is not None

    proc = await Get(
        Process,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[f"{{chroot}}/{jar_path}"],
            argv=(main_class, ),
            input_digest=package.digest,
            description=f"Run {main_class}.main(String[])",
            use_nailgun=False,
        ),
    )

    support_digests = await MultiGet(
        Get(Digest, AddPrefix(digest, prefix))
        for prefix, digest in proc.immutable_input_digests.items())

    runtime_jvm = await Get(__RuntimeJvm, JdkEnvironment, jdk)
    support_digests += (runtime_jvm.digest, )

    # TODO(#14386) This argument re-writing code should be done in a more standardised way.
    # See also `jdk_rules.py` for other argument re-writing code.
    def prefixed(arg: str, prefixes: Iterable[str]) -> str:
        if any(arg.startswith(prefix) for prefix in prefixes):
            return f"{{chroot}}/{arg}"
        else:
            return arg

    prefixes = (jdk.bin_dir, jdk.jdk_preparation_script, jdk.java_home)
    args = [prefixed(arg, prefixes) for arg in proc.argv]

    env = {
        **proc.env,
        "PANTS_INTERNAL_ABSOLUTE_PREFIX": "{chroot}/",
    }

    # absolutify coursier cache envvars
    for key in env:
        if key.startswith("COURSIER"):
            env[key] = prefixed(env[key], (jdk.coursier.cache_dir, ))

    request_digest = await Get(
        Digest,
        MergeDigests([
            proc.input_digest,
            *support_digests,
        ]),
    )

    return RunRequest(
        digest=request_digest,
        args=args,
        extra_env=env,
    )
Exemplo n.º 16
0
async def compile_java_source(
    bash: BashBinary,
    javac: JavacSubsystem,
    zip_binary: ZipBinary,
    request: CompileJavaSourceRequest,
) -> FallibleClasspathEntry:
    # Request the component's direct dependency classpath, and additionally any prerequisite.
    optional_prereq_request = [
        *((request.prerequisite, ) if request.prerequisite else ())
    ]
    fallibles = await MultiGet(
        Get(FallibleClasspathEntries,
            ClasspathEntryRequests(optional_prereq_request)),
        Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request)),
    )

    direct_dependency_classpath_entries = FallibleClasspathEntries(
        itertools.chain(*fallibles)).if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    # Capture just the `ClasspathEntry` objects that are listed as `export` types by source analysis
    deps_to_classpath_entries = dict(
        zip(request.component.dependencies, direct_dependency_classpath_entries
            or ()))
    # Re-request inferred dependencies to get a list of export dependency addresses
    inferred_dependencies = await MultiGet(
        Get(
            JavaInferredDependencies,
            JavaInferredDependenciesAndExportsRequest(tgt[JavaSourceField]),
        ) for tgt in request.component.members
        if JavaFieldSet.is_applicable(tgt))
    flat_exports = {
        export
        for i in inferred_dependencies for export in i.exports
    }

    export_classpath_entries = [
        classpath_entry for coarsened_target, classpath_entry in
        deps_to_classpath_entries.items()
        if any(m.address in flat_exports for m in coarsened_target.members)
    ]

    # Then collect the component's sources.
    component_members_with_sources = tuple(t for t in request.component.members
                                           if t.has_field(SourcesField))
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField), ),
                    for_sources_types=(JavaSourceField, ),
                    enable_codegen=True,
                ),
            ) for t in component_members_with_sources),
    )
    component_members_and_java_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]
    if not component_members_and_java_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest,
            MergeDigests(cpe.digest
                         for cpe in direct_dependency_classpath_entries))
        classpath_entry = ClasspathEntry.merge(
            exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    dest_dir = "classfiles"
    dest_dir_digest, jdk = await MultiGet(
        Get(
            Digest,
            CreateDigest([Directory(dest_dir)]),
        ),
        Get(JdkEnvironment, JdkRequest,
            JdkRequest.from_target(request.component)),
    )
    merged_digest = await Get(
        Digest,
        MergeDigests((
            dest_dir_digest,
            *(sources.snapshot.digest
              for _, sources in component_members_and_java_source_files),
        )),
    )

    usercp = "__cp"
    user_classpath = Classpath(direct_dependency_classpath_entries,
                               request.resolve)
    classpath_arg = ":".join(
        user_classpath.root_immutable_inputs_args(prefix=usercp))
    immutable_input_digests = dict(
        user_classpath.root_immutable_inputs(prefix=usercp))

    # Compile.
    compile_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[f"{jdk.java_home}/lib/tools.jar"],
            argv=[
                "com.sun.tools.javac.Main",
                *(("-cp", classpath_arg) if classpath_arg else ()),
                *javac.args,
                "-d",
                dest_dir,
                *sorted(
                    chain.from_iterable(
                        sources.snapshot.files for _, sources in
                        component_members_and_java_source_files)),
            ],
            input_digest=merged_digest,
            extra_immutable_input_digests=immutable_input_digests,
            output_directories=(dest_dir, ),
            description=f"Compile {request.component} with javac",
            level=LogLevel.DEBUG,
        ),
    )
    if compile_result.exit_code != 0:
        return FallibleClasspathEntry.from_fallible_process_result(
            str(request.component),
            compile_result,
            None,
        )

    # Jar.
    # NB: We jar up the outputs in a separate process because the nailgun runner cannot support
    # invoking via a `bash` wrapper (since the trailing portion of the command is executed by
    # the nailgun server). We might be able to resolve this in the future via a Javac wrapper shim.
    output_snapshot = await Get(Snapshot, Digest, compile_result.output_digest)
    output_file = compute_output_jar_filename(request.component)
    output_files: tuple[str, ...] = (output_file, )
    if output_snapshot.files:
        jar_result = await Get(
            ProcessResult,
            Process(
                argv=[
                    bash.path,
                    "-c",
                    " ".join([
                        "cd", dest_dir, ";", zip_binary.path, "-r",
                        f"../{output_file}", "."
                    ]),
                ],
                input_digest=compile_result.output_digest,
                output_files=output_files,
                description=f"Capture outputs of {request.component} for javac",
                level=LogLevel.TRACE,
            ),
        )
        jar_output_digest = jar_result.output_digest
    else:
        # If there was no output, then do not create a jar file. This may occur, for example, when compiling
        # a `package-info.java` in a single partition.
        output_files = ()
        jar_output_digest = EMPTY_DIGEST

    output_classpath = ClasspathEntry(jar_output_digest, output_files,
                                      direct_dependency_classpath_entries)

    if export_classpath_entries:
        merged_export_digest = await Get(
            Digest,
            MergeDigests((output_classpath.digest,
                          *(i.digest for i in export_classpath_entries))),
        )
        merged_classpath = ClasspathEntry.merge(
            merged_export_digest,
            (output_classpath, *export_classpath_entries))
        output_classpath = merged_classpath

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        compile_result,
        output_classpath,
    )
Exemplo n.º 17
0
async def setup_scala_parser_classfiles(jdk: InternalJdk) -> ScalaParserCompiledClassfiles:
    dest_dir = "classfiles"

    parser_source_content = pkgutil.get_data(
        "pants.backend.scala.dependency_inference", "ScalaParser.scala"
    )
    if not parser_source_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    parser_source = FileContent("ScalaParser.scala", parser_source_content)

    tool_classpath, parser_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-compiler",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-library",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-reflect",
                            version=PARSER_SCALA_VERSION,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__parsercp", artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS
            ),
        ),
        Get(Digest, CreateDigest([parser_source, Directory(dest_dir)])),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (
                tool_classpath.digest,
                parser_classpath.digest,
                source_digest,
            )
        ),
    )

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(),
            argv=[
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(parser_classpath.classpath_entries()),
                "-d",
                dest_dir,
                parser_source.path,
            ],
            input_digest=merged_digest,
            output_directories=(dest_dir,),
            description="Compile Scala parser for dependency inference with scalac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir)
    )
    return ScalaParserCompiledClassfiles(digest=stripped_classfiles_digest)
Exemplo n.º 18
0
async def setup_scalatest_for_target(
    request: TestSetupRequest,
    jvm: JvmSubsystem,
    scalatest: Scalatest,
    test_subsystem: TestSubsystem,
) -> TestSetup:

    jdk, dependencies = await MultiGet(
        Get(JdkEnvironment, JdkRequest, JdkRequest.from_field(request.field_set.jdk_version)),
        Get(Targets, DependenciesRequest(request.field_set.dependencies)),
    )

    lockfile_request = await Get(GenerateJvmLockfileFromTool, ScalatestToolLockfileSentinel())
    classpath, scalatest_classpath, files = await MultiGet(
        Get(Classpath, Addresses([request.field_set.address])),
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
        Get(
            SourceFiles,
            SourceFilesRequest(
                (dep.get(SourcesField) for dep in dependencies),
                for_sources_types=(FileSourceField,),
                enable_codegen=True,
            ),
        ),
    )

    input_digest = await Get(Digest, MergeDigests((*classpath.digests(), files.snapshot.digest)))

    toolcp_relpath = "__toolcp"
    extra_immutable_input_digests = {
        toolcp_relpath: scalatest_classpath.digest,
    }

    reports_dir_prefix = "__reports_dir"
    reports_dir = f"{reports_dir_prefix}/{request.field_set.address.path_safe_spec}"

    # Classfiles produced by the root `scalatest_test` targets are the only ones which should run.
    user_classpath_arg = ":".join(classpath.root_args())

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = (
        ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    )

    extra_jvm_args: list[str] = []
    if request.is_debug:
        extra_jvm_args.extend(jvm.debug_args)

    process = JvmProcess(
        jdk=jdk,
        classpath_entries=[
            *classpath.args(),
            *scalatest_classpath.classpath_entries(toolcp_relpath),
        ],
        argv=[
            *extra_jvm_args,
            "org.scalatest.tools.Runner",
            # TODO: We currently give the entire user classpath to the JVM for startup (which
            # mixes it with the user classpath), and then only specify the roots to run here.
            #   see https://github.com/pantsbuild/pants/issues/13871
            *(("-R", user_classpath_arg) if user_classpath_arg else ()),
            "-o",
            "-u",
            reports_dir,
            *scalatest.args,
        ],
        input_digest=input_digest,
        extra_immutable_input_digests=extra_immutable_input_digests,
        output_directories=(reports_dir,),
        description=f"Run Scalatest runner for {request.field_set.address}",
        level=LogLevel.DEBUG,
        cache_scope=cache_scope,
        use_nailgun=False,
    )
    return TestSetup(process=process, reports_dir_prefix=reports_dir_prefix)
Exemplo n.º 19
0
async def compile_kotlin_source(
    kotlin: KotlinSubsystem,
    kotlinc: KotlincSubsystem,
    request: CompileKotlinSourceRequest,
) -> FallibleClasspathEntry:
    # Request classpath entries for our direct dependencies.
    dependency_cpers = await Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request))
    direct_dependency_classpath_entries = dependency_cpers.if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    kotlin_version = kotlin.version_for_resolve(request.resolve.name)

    component_members_with_sources = tuple(
        t for t in request.component.members if t.has_field(SourcesField)
    )
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField),),
                    for_sources_types=(KotlinSourceField,),
                    enable_codegen=True,
                ),
            )
            for t in component_members_with_sources
        ),
    )

    plugins_ = await MultiGet(
        Get(
            KotlincPluginTargetsForTarget,
            KotlincPluginsForTargetRequest(target, request.resolve.name),
        )
        for target in request.component.members
    )
    plugins_request = KotlincPluginsRequest.from_target_plugins(plugins_, request.resolve)
    local_plugins = await Get(KotlincPlugins, KotlincPluginsRequest, plugins_request)

    component_members_and_kotlin_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]

    if not component_members_and_kotlin_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest, MergeDigests(cpe.digest for cpe in direct_dependency_classpath_entries)
        )
        classpath_entry = ClasspathEntry.merge(exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    toolcp_relpath = "__toolcp"
    local_kotlinc_plugins_relpath = "__localplugincp"
    usercp = "__cp"

    user_classpath = Classpath(direct_dependency_classpath_entries, request.resolve)

    tool_classpath, sources_digest, jdk = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-compiler-embeddable",
                            version=kotlin_version,
                        ),
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-scripting-compiler-embeddable",
                            version=kotlin_version,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            Digest,
            MergeDigests(
                (
                    sources.snapshot.digest
                    for _, sources in component_members_and_kotlin_source_files
                )
            ),
        ),
        Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(request.component)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        local_kotlinc_plugins_relpath: local_plugins.classpath.digest,
    }
    extra_nailgun_keys = tuple(extra_immutable_input_digests)
    extra_immutable_input_digests.update(user_classpath.immutable_inputs(prefix=usercp))

    classpath_arg = ":".join(user_classpath.immutable_inputs_args(prefix=usercp))

    output_file = compute_output_jar_filename(request.component)
    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            argv=[
                "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
                *(("-classpath", classpath_arg) if classpath_arg else ()),
                "-d",
                output_file,
                *(local_plugins.args(local_kotlinc_plugins_relpath)),
                *kotlinc.args,
                *sorted(
                    itertools.chain.from_iterable(
                        sources.snapshot.files
                        for _, sources in component_members_and_kotlin_source_files
                    )
                ),
            ],
            input_digest=sources_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_nailgun_keys,
            output_files=(output_file,),
            description=f"Compile {request.component} with kotlinc",
            level=LogLevel.DEBUG,
        ),
    )
    output: ClasspathEntry | None = None
    if process_result.exit_code == 0:
        output = ClasspathEntry(
            process_result.output_digest, (output_file,), direct_dependency_classpath_entries
        )

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        process_result,
        output,
    )
Exemplo n.º 20
0
async def generate_scrooge_thrift_sources(
    request: GenerateScroogeThriftSourcesRequest,
    jdk: InternalJdk,
    scrooge: ScroogeSubsystem,
) -> GeneratedScroogeThriftSources:
    output_dir = "_generated_files"
    toolcp_relpath = "__toolcp"

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 ScroogeToolLockfileSentinel())
    tool_classpath, transitive_targets, empty_output_dir_digest, wrapped_target = await MultiGet(
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.thrift_source_field.address])),
        Get(Digest, CreateDigest([Directory(output_dir)])),
        Get(
            WrappedTarget,
            WrappedTargetRequest(request.thrift_source_field.address,
                                 description_of_origin="<infallible>"),
        ),
    )

    transitive_sources, target_sources = await MultiGet(
        Get(
            SourceFiles,
            SourceFilesRequest(tgt[ThriftSourceField]
                               for tgt in transitive_targets.closure
                               if tgt.has_field(ThriftSourceField)),
        ),
        Get(SourceFiles, SourceFilesRequest([request.thrift_source_field])),
    )

    sources_roots = await Get(
        SourceRootsResult,
        SourceRootsRequest,
        SourceRootsRequest.for_files(transitive_sources.snapshot.files),
    )
    deduped_source_root_paths = sorted(
        {sr.path
         for sr in sources_roots.path_to_root.values()})

    input_digest = await Get(
        Digest,
        MergeDigests([
            transitive_sources.snapshot.digest,
            target_sources.snapshot.digest,
            empty_output_dir_digest,
        ]),
    )

    maybe_include_paths = []
    for path in deduped_source_root_paths:
        maybe_include_paths.extend(["-i", path])

    maybe_finagle_option = []
    if wrapped_target.target[ScroogeFinagleBoolField].value:
        maybe_finagle_option = ["--finagle"]

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            argv=[
                "com.twitter.scrooge.Main",
                *maybe_include_paths,
                "--dest",
                output_dir,
                "--language",
                request.lang_id,
                *maybe_finagle_option,
                *target_sources.snapshot.files,
            ],
            input_digest=input_digest,
            extra_jvm_options=scrooge.jvm_options,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_immutable_input_digests,
            description=
            f"Generating {request.lang_name} sources from {request.thrift_source_field.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    output_snapshot = await Get(Snapshot,
                                RemovePrefix(result.output_digest, output_dir))
    return GeneratedScroogeThriftSources(output_snapshot)
Exemplo n.º 21
0
async def generate_scala_from_protobuf(
    request: GenerateScalaFromProtobufRequest,
    protoc: Protoc,
    scalapb: ScalaPBSubsystem,
    shim_classfiles: ScalaPBShimCompiledClassfiles,
    jdk: InternalJdk,
) -> GeneratedSources:
    output_dir = "_generated_files"
    toolcp_relpath = "__toolcp"
    shimcp_relpath = "__shimcp"
    plugins_relpath = "__plugins"
    protoc_relpath = "__protoc"

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 ScalapbcToolLockfileSentinel())
    (
        downloaded_protoc_binary,
        tool_classpath,
        empty_output_dir,
        transitive_targets,
        inherit_env,
    ) = await MultiGet(
        Get(DownloadedExternalTool, ExternalToolRequest,
            protoc.get_request(Platform.current)),
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
        Get(Digest, CreateDigest([Directory(output_dir)])),
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.protocol_target.address])),
        # Need PATH so that ScalaPB can invoke `mkfifo`.
        Get(Environment, EnvironmentRequest(requested=["PATH"])),
    )

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_sources_stripped, target_sources_stripped = await MultiGet(
        Get(
            StrippedSourceFiles,
            SourceFilesRequest(tgt[ProtobufSourceField]
                               for tgt in transitive_targets.closure
                               if tgt.has_field(ProtobufSourceField)),
        ),
        Get(StrippedSourceFiles,
            SourceFilesRequest([request.protocol_target[ProtobufSourceField]
                                ])),
    )

    merged_jvm_plugins_digest = EMPTY_DIGEST
    maybe_jvm_plugins_setup_args: tuple[str, ...] = ()
    maybe_jvm_plugins_output_args: tuple[str, ...] = ()
    jvm_plugins = scalapb.jvm_plugins
    if jvm_plugins:
        materialized_jvm_plugins = await Get(
            MaterializedJvmPlugins, MaterializeJvmPluginsRequest(jvm_plugins))
        merged_jvm_plugins_digest = materialized_jvm_plugins.digest
        maybe_jvm_plugins_setup_args = materialized_jvm_plugins.setup_args(
            plugins_relpath)
        maybe_jvm_plugins_output_args = tuple(
            f"--{plugin.name}_out={output_dir}"
            for plugin in materialized_jvm_plugins.plugins)

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        shimcp_relpath: shim_classfiles.digest,
        plugins_relpath: merged_jvm_plugins_digest,
        protoc_relpath: downloaded_protoc_binary.digest,
    }

    input_digest = await Get(
        Digest,
        MergeDigests([all_sources_stripped.snapshot.digest, empty_output_dir]))

    result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[
                *tool_classpath.classpath_entries(toolcp_relpath),
                shimcp_relpath
            ],
            argv=[
                "org.pantsbuild.backend.scala.scalapb.ScalaPBShim",
                f"--protoc={os.path.join(protoc_relpath, downloaded_protoc_binary.exe)}",
                *maybe_jvm_plugins_setup_args,
                f"--scala_out={output_dir}",
                *maybe_jvm_plugins_output_args,
                *target_sources_stripped.snapshot.files,
            ],
            input_digest=input_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_immutable_input_digests,
            description=
            f"Generating Scala sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
            extra_env=inherit_env,
        ),
    )

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest,
            SourceRootRequest.for_target(request.protocol_target)),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
Exemplo n.º 22
0
async def setup_junit_for_target(
    request: TestSetupRequest,
    jvm: JvmSubsystem,
    junit: JUnit,
    test_subsystem: TestSubsystem,
) -> TestSetup:

    jdk, transitive_tgts = await MultiGet(
        Get(JdkEnvironment, JdkRequest,
            JdkRequest.from_field(request.field_set.jdk_version)),
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.field_set.address])),
    )

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 JunitToolLockfileSentinel())
    classpath, junit_classpath, files = await MultiGet(
        Get(Classpath, Addresses([request.field_set.address])),
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
        Get(
            SourceFiles,
            SourceFilesRequest(
                (dep.get(SourcesField)
                 for dep in transitive_tgts.dependencies),
                for_sources_types=(FileSourceField, ),
                enable_codegen=True,
            ),
        ),
    )

    input_digest = await Get(
        Digest, MergeDigests((*classpath.digests(), files.snapshot.digest)))

    toolcp_relpath = "__toolcp"
    extra_immutable_input_digests = {
        toolcp_relpath: junit_classpath.digest,
    }

    reports_dir_prefix = "__reports_dir"
    reports_dir = f"{reports_dir_prefix}/{request.field_set.address.path_safe_spec}"

    # Classfiles produced by the root `junit_test` targets are the only ones which should run.
    user_classpath_arg = ":".join(classpath.root_args())

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = (ProcessCacheScope.PER_SESSION
                   if test_subsystem.force else ProcessCacheScope.SUCCESSFUL)

    extra_jvm_args: list[str] = []
    if request.is_debug:
        extra_jvm_args.extend(jvm.debug_args)

    process = JvmProcess(
        jdk=jdk,
        classpath_entries=[
            *classpath.args(),
            *junit_classpath.classpath_entries(toolcp_relpath),
        ],
        argv=[
            *extra_jvm_args,
            "org.junit.platform.console.ConsoleLauncher",
            *(("--classpath", user_classpath_arg) if user_classpath_arg else
              ()),
            *(("--scan-class-path",
               user_classpath_arg) if user_classpath_arg else ()),
            "--reports-dir",
            reports_dir,
            *junit.args,
        ],
        input_digest=input_digest,
        extra_jvm_options=junit.jvm_options,
        extra_immutable_input_digests=extra_immutable_input_digests,
        output_directories=(reports_dir, ),
        description=
        f"Run JUnit 5 ConsoleLauncher against {request.field_set.address}",
        level=LogLevel.DEBUG,
        cache_scope=cache_scope,
        use_nailgun=False,
    )
    return TestSetup(process=process, reports_dir_prefix=reports_dir_prefix)
Exemplo n.º 23
0
async def setup_scalapb_shim_classfiles(
    scalapb: ScalaPBSubsystem,
    jdk: InternalJdk,
) -> ScalaPBShimCompiledClassfiles:
    dest_dir = "classfiles"

    scalapb_shim_content = pkgutil.get_data(
        "pants.backend.codegen.protobuf.scala", "ScalaPBShim.scala")
    if not scalapb_shim_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    scalapb_shim_source = FileContent("ScalaPBShim.scala",
                                      scalapb_shim_content)

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 ScalapbcToolLockfileSentinel())
    tool_classpath, shim_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=ArtifactRequirements.from_coordinates([
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-compiler",
                        version=SHIM_SCALA_VERSION,
                    ),
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-library",
                        version=SHIM_SCALA_VERSION,
                    ),
                    Coordinate(
                        group="org.scala-lang",
                        artifact="scala-reflect",
                        version=SHIM_SCALA_VERSION,
                    ),
                ]),
            ),
        ),
        Get(ToolClasspath,
            ToolClasspathRequest(prefix="__shimcp",
                                 lockfile=lockfile_request)),
        Get(Digest, CreateDigest([scalapb_shim_source,
                                  Directory(dest_dir)])),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (tool_classpath.digest, shim_classpath.digest, source_digest)))

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(),
            argv=[
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(shim_classpath.classpath_entries()),
                "-d",
                dest_dir,
                scalapb_shim_source.path,
            ],
            input_digest=merged_digest,
            extra_jvm_options=scalapb.jvm_options,
            output_directories=(dest_dir, ),
            description="Compile ScalaPB shim with scalac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir))
    return ScalaPBShimCompiledClassfiles(digest=stripped_classfiles_digest)