Example #1
0
async def bsp_resources_request(
    request: ResourcesParams,
    workspace: Workspace,
) -> ResourcesResult:
    bsp_targets = await MultiGet(
        Get(BSPBuildTargetInternal, BuildTargetIdentifier, bsp_target_id)
        for bsp_target_id in request.targets
    )

    resources_results = await MultiGet(
        Get(
            BSPResourcesResult,
            ResourcesForOneBSPTargetRequest(
                bsp_target=bsp_target,
            ),
        )
        for bsp_target in bsp_targets
    )

    # TODO: Need to determine how resources are expected to be exposed. Directories? Individual files?
    # Initially, it looks like loose directories.
    output_digest = await Get(Digest, MergeDigests([r.output_digest for r in resources_results]))
    if output_digest != EMPTY_DIGEST:
        workspace.write_digest(output_digest, path_prefix=".pants.d/bsp")

    return ResourcesResult(
        tuple(
            ResourcesItem(
                target,
                rr.resources,
            )
            for target, rr in zip(request.targets, resources_results)
        )
    )
Example #2
0
async def fetch_kotlinc_plugins(
        request: KotlincPluginsRequest) -> KotlincPlugins:
    # Fetch all the artifacts
    coarsened_targets = await Get(
        CoarsenedTargets,
        Addresses(target.address for target in request.artifacts))
    fallible_artifacts = await MultiGet(
        Get(
            FallibleClasspathEntry,
            CoursierFetchRequest(ct, resolve=request.resolve),
        ) for ct in coarsened_targets)

    artifacts = FallibleClasspathEntry.if_all_succeeded(fallible_artifacts)
    if artifacts is None:
        failed = [i for i in fallible_artifacts if i.exit_code != 0]
        raise Exception(f"Fetching local kotlinc plugins failed: {failed}")

    entries = list(ClasspathEntry.closure(artifacts))
    merged_classpath_digest = await Get(
        Digest, MergeDigests(entry.digest for entry in entries))
    merged = ClasspathEntry.merge(merged_classpath_digest, entries)

    ids = tuple(_plugin_id(target) for target in request.plugins)

    plugin_args = FrozenDict({
        _plugin_id(plugin): tuple(plugin[KotlincPluginArgsField].value or [])
        for plugin in request.plugins
    })

    return KotlincPlugins(ids=ids, classpath=merged, plugin_args=plugin_args)
Example #3
0
async def resources_bsp_target(
    request: ResourcesForOneBSPTargetRequest,
    union_membership: UnionMembership,
) -> BSPResourcesResult:
    targets = await Get(Targets, BSPBuildTargetInternal, request.bsp_target)
    resources_request_types: FrozenOrderedSet[Type[BSPResourcesRequest]] = union_membership.get(
        BSPResourcesRequest
    )
    field_sets_by_request_type: dict[Type[BSPResourcesRequest], set[FieldSet]] = defaultdict(set)
    for target in targets:
        for resources_request_type in resources_request_types:
            field_set_type = resources_request_type.field_set_type
            if field_set_type.is_applicable(target):
                field_set = field_set_type.create(target)
                field_sets_by_request_type[resources_request_type].add(field_set)

    resources_results = await MultiGet(
        Get(
            BSPResourcesResult,
            BSPResourcesRequest,
            resources_request_type(bsp_target=request.bsp_target, field_sets=tuple(field_sets)),
        )
        for resources_request_type, field_sets in field_sets_by_request_type.items()
    )

    resources = tuple(sorted({resource for rr in resources_results for resource in rr.resources}))

    output_digest = await Get(Digest, MergeDigests([rr.output_digest for rr in resources_results]))

    return BSPResourcesResult(
        resources=resources,
        output_digest=output_digest,
    )
Example #4
0
async def bsp_compile_request(
    request: CompileParams,
    workspace: Workspace,
) -> CompileResult:
    bsp_targets = await MultiGet(
        Get(BSPBuildTargetInternal, BuildTargetIdentifier, bsp_target_id)
        for bsp_target_id in request.targets
    )

    compile_results = await MultiGet(
        Get(
            BSPCompileResult,
            CompileOneBSPTargetRequest(
                bsp_target=bsp_target,
                origin_id=request.origin_id,
                arguments=request.arguments,
            ),
        )
        for bsp_target in bsp_targets
    )

    output_digest = await Get(Digest, MergeDigests([r.output_digest for r in compile_results]))
    if output_digest != EMPTY_DIGEST:
        workspace.write_digest(output_digest, path_prefix=".pants.d/bsp")

    status_code = StatusCode.OK
    if any(r.status != StatusCode.OK for r in compile_results):
        status_code = StatusCode.ERROR

    return CompileResult(
        origin_id=request.origin_id,
        status_code=status_code.value,
    )
Example #5
0
async def build_processors(jdk: InternalJdk) -> JavaParserCompiledClassfiles:
    dest_dir = "classfiles"
    parser_lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                        JavaParserToolLockfileSentinel())
    materialized_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(prefix="__toolcp",
                                 lockfile=parser_lockfile_request),
        ),
        Get(
            Digest,
            CreateDigest([
                FileContent(
                    path=_LAUNCHER_BASENAME,
                    content=_load_javaparser_launcher_source(),
                ),
                Directory(dest_dir),
            ]),
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests((
            materialized_classpath.digest,
            source_digest,
        )),
    )

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=[f"{jdk.java_home}/lib/tools.jar"],
            argv=[
                "com.sun.tools.javac.Main",
                "-cp",
                ":".join(materialized_classpath.classpath_entries()),
                "-d",
                dest_dir,
                _LAUNCHER_BASENAME,
            ],
            input_digest=merged_digest,
            output_directories=(dest_dir, ),
            description=
            f"Compile {_LAUNCHER_BASENAME} import processors with javac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir))
    return JavaParserCompiledClassfiles(digest=stripped_classfiles_digest)
Example #6
0
async def bsp_dependency_modules(
        request: DependencyModulesParams,
        workspace: Workspace) -> DependencyModulesResult:
    responses = await MultiGet(
        Get(ResolveOneDependencyModuleResult,
            ResolveOneDependencyModuleRequest(btgt))
        for btgt in request.targets)
    output_digest = await Get(Digest,
                              MergeDigests([r.digest for r in responses]))
    workspace.write_digest(output_digest, path_prefix=".pants.d/bsp")
    return DependencyModulesResult(
        tuple(
            DependencyModulesItem(target=r.bsp_target_id, modules=r.modules)
            for r in responses))
Example #7
0
async def bsp_workspace_build_targets(
    _: WorkspaceBuildTargetsParams,
    bsp_build_targets: BSPBuildTargets,
    workspace: Workspace,
) -> WorkspaceBuildTargetsResult:
    bsp_target_results = await MultiGet(
        Get(GenerateOneBSPBuildTargetResult,
            GenerateOneBSPBuildTargetRequest(target_internal))
        for target_internal in bsp_build_targets.targets_mapping.values())
    digest = await Get(Digest,
                       MergeDigests([r.digest for r in bsp_target_results]))
    if digest != EMPTY_DIGEST:
        workspace.write_digest(digest, path_prefix=".pants.d/bsp")

    return WorkspaceBuildTargetsResult(targets=tuple(
        r.build_target for r in bsp_target_results), )
Example #8
0
async def export_tool(request: ExportPythonTool,
                      pex_pex: PexPEX) -> ExportResult:
    assert request.pex_request is not None

    # TODO: Unify export_virtualenv() and export_tool(), since their implementations mostly overlap.
    dest = os.path.join("python", "virtualenvs", "tools")
    pex = await Get(Pex, PexRequest, request.pex_request)
    if not request.pex_request.internal_only:
        raise ExportError(
            f"The PexRequest for {request.resolve_name} must be internal_only."
        )

    # Note that an internal-only pex will always have the `python` field set.
    # See the build_pex() rule in pex.py.
    interpreter = cast(PythonExecutable, pex.python)

    # NOTE: We add a unique-per-tool prefix to the pex_pex path to avoid conflicts when
    # multiple tools are concurrently exporting. Without this prefix all the `export_tool`
    # invocations write the pex_pex to `python/virtualenvs/tools/pex`, and the `rm -f` of
    # the pex_pex path in one export will delete the binary out from under the others.
    pex_pex_dir = f".{request.resolve_name}.tmp"
    pex_pex_dest = os.path.join("{digest_root}", pex_pex_dir)
    pex_pex_digest = await Get(Digest, AddPrefix(pex_pex.digest, pex_pex_dir))

    merged_digest = await Get(Digest,
                              MergeDigests([pex_pex_digest, pex.digest]))
    return ExportResult(
        f"virtualenv for the tool '{request.resolve_name}'",
        dest,
        digest=merged_digest,
        post_processing_cmds=[
            PostProcessingCommand(
                [
                    interpreter.path,
                    os.path.join(pex_pex_dest, pex_pex.exe),
                    os.path.join("{digest_root}", pex.name),
                    "venv",
                    "--collisions-ok",
                    "--remove=all",
                    f"{{digest_root}}/{request.resolve_name}",
                ],
                {"PEX_MODULE": "pex.tools"},
            ),
            PostProcessingCommand(["rm", "-rf", pex_pex_dest]),
        ],
    )
Example #9
0
async def resolve_one_dependency_module(
    request: ResolveOneDependencyModuleRequest,
    union_membership: UnionMembership,
) -> ResolveOneDependencyModuleResult:
    bsp_target = await Get(BSPBuildTargetInternal, BuildTargetIdentifier,
                           request.bsp_target_id)
    targets = await Get(
        Targets,
        AddressSpecs,
        bsp_target.specs.address_specs,
    )

    field_sets_by_request_type: dict[Type[BSPDependencyModulesRequest],
                                     list[FieldSet]] = defaultdict(list)
    dep_module_request_types: FrozenOrderedSet[
        Type[BSPDependencyModulesRequest]] = union_membership.get(
            BSPDependencyModulesRequest)
    for tgt in targets:
        for dep_module_request_type in dep_module_request_types:
            field_set_type = dep_module_request_type.field_set_type
            if field_set_type.is_applicable(tgt):
                field_set = field_set_type.create(tgt)
                field_sets_by_request_type[dep_module_request_type].append(
                    field_set)

    if not field_sets_by_request_type:
        return ResolveOneDependencyModuleResult(
            bsp_target_id=request.bsp_target_id)

    responses = await MultiGet(
        Get(
            BSPDependencyModulesResult,
            BSPDependencyModulesRequest,
            dep_module_request_type(field_sets=tuple(field_sets)),
        ) for dep_module_request_type, field_sets in
        field_sets_by_request_type.items())

    modules = set(itertools.chain.from_iterable([r.modules
                                                 for r in responses]))
    digest = await Get(Digest, MergeDigests([r.digest for r in responses]))

    return ResolveOneDependencyModuleResult(
        bsp_target_id=request.bsp_target_id,
        modules=tuple(modules),
        digest=digest,
    )
Example #10
0
async def fetch_plugins(request: ScalaPluginsRequest) -> ScalaPlugins:
    # Fetch all the artifacts
    coarsened_targets = await Get(
        CoarsenedTargets,
        Addresses(target.address for target in request.artifacts))
    fallible_artifacts = await MultiGet(
        Get(
            FallibleClasspathEntry,
            CoursierFetchRequest(ct, resolve=request.resolve),
        ) for ct in coarsened_targets)

    artifacts = FallibleClasspathEntry.if_all_succeeded(fallible_artifacts)
    if artifacts is None:
        failed = [i for i in fallible_artifacts if i.exit_code != 0]
        raise Exception(f"Fetching local scala plugins failed: {failed}")

    merged_classpath_digest = await Get(
        Digest, MergeDigests(i.digest for i in artifacts))
    merged = ClasspathEntry.merge(merged_classpath_digest, artifacts)

    names = tuple(_plugin_name(target) for target in request.plugins)

    return ScalaPlugins(names=names, classpath=merged)
Example #11
0
async def compile_kotlin_source(
    kotlin: KotlinSubsystem,
    kotlinc: KotlincSubsystem,
    request: CompileKotlinSourceRequest,
) -> FallibleClasspathEntry:
    # Request classpath entries for our direct dependencies.
    dependency_cpers = await Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request))
    direct_dependency_classpath_entries = dependency_cpers.if_all_succeeded()

    if direct_dependency_classpath_entries is None:
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.DEPENDENCY_FAILED,
            output=None,
            exit_code=1,
        )

    kotlin_version = kotlin.version_for_resolve(request.resolve.name)

    component_members_with_sources = tuple(
        t for t in request.component.members if t.has_field(SourcesField)
    )
    component_members_and_source_files = zip(
        component_members_with_sources,
        await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    (t.get(SourcesField),),
                    for_sources_types=(KotlinSourceField,),
                    enable_codegen=True,
                ),
            )
            for t in component_members_with_sources
        ),
    )

    plugins_ = await MultiGet(
        Get(
            KotlincPluginTargetsForTarget,
            KotlincPluginsForTargetRequest(target, request.resolve.name),
        )
        for target in request.component.members
    )
    plugins_request = KotlincPluginsRequest.from_target_plugins(plugins_, request.resolve)
    local_plugins = await Get(KotlincPlugins, KotlincPluginsRequest, plugins_request)

    component_members_and_kotlin_source_files = [
        (target, sources)
        for target, sources in component_members_and_source_files
        if sources.snapshot.digest != EMPTY_DIGEST
    ]

    if not component_members_and_kotlin_source_files:
        # Is a generator, and so exports all of its direct deps.
        exported_digest = await Get(
            Digest, MergeDigests(cpe.digest for cpe in direct_dependency_classpath_entries)
        )
        classpath_entry = ClasspathEntry.merge(exported_digest, direct_dependency_classpath_entries)
        return FallibleClasspathEntry(
            description=str(request.component),
            result=CompileResult.SUCCEEDED,
            output=classpath_entry,
            exit_code=0,
        )

    toolcp_relpath = "__toolcp"
    local_kotlinc_plugins_relpath = "__localplugincp"
    usercp = "__cp"

    user_classpath = Classpath(direct_dependency_classpath_entries, request.resolve)

    tool_classpath, sources_digest, jdk = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-compiler-embeddable",
                            version=kotlin_version,
                        ),
                        Coordinate(
                            group="org.jetbrains.kotlin",
                            artifact="kotlin-scripting-compiler-embeddable",
                            version=kotlin_version,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            Digest,
            MergeDigests(
                (
                    sources.snapshot.digest
                    for _, sources in component_members_and_kotlin_source_files
                )
            ),
        ),
        Get(JdkEnvironment, JdkRequest, JdkRequest.from_target(request.component)),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
        local_kotlinc_plugins_relpath: local_plugins.classpath.digest,
    }
    extra_nailgun_keys = tuple(extra_immutable_input_digests)
    extra_immutable_input_digests.update(user_classpath.immutable_inputs(prefix=usercp))

    classpath_arg = ":".join(user_classpath.immutable_inputs_args(prefix=usercp))

    output_file = compute_output_jar_filename(request.component)
    process_result = await Get(
        FallibleProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            argv=[
                "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
                *(("-classpath", classpath_arg) if classpath_arg else ()),
                "-d",
                output_file,
                *(local_plugins.args(local_kotlinc_plugins_relpath)),
                *kotlinc.args,
                *sorted(
                    itertools.chain.from_iterable(
                        sources.snapshot.files
                        for _, sources in component_members_and_kotlin_source_files
                    )
                ),
            ],
            input_digest=sources_digest,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_nailgun_keys,
            output_files=(output_file,),
            description=f"Compile {request.component} with kotlinc",
            level=LogLevel.DEBUG,
        ),
    )
    output: ClasspathEntry | None = None
    if process_result.exit_code == 0:
        output = ClasspathEntry(
            process_result.output_digest, (output_file,), direct_dependency_classpath_entries
        )

    return FallibleClasspathEntry.from_fallible_process_result(
        str(request.component),
        process_result,
        output,
    )
Example #12
0
async def export_virtualenv(request: _ExportVenvRequest,
                            python_setup: PythonSetup,
                            pex_pex: PexPEX) -> ExportResult:
    if request.resolve:
        interpreter_constraints = InterpreterConstraints(
            python_setup.resolves_to_interpreter_constraints.get(
                request.resolve, python_setup.interpreter_constraints))
    else:
        interpreter_constraints = InterpreterConstraints.create_from_targets(
            request.root_python_targets,
            python_setup) or InterpreterConstraints(
                python_setup.interpreter_constraints)

    min_interpreter = interpreter_constraints.snap_to_minimum(
        python_setup.interpreter_universe)
    if not min_interpreter:
        err_msg = ((
            f"The resolve '{request.resolve}' (from `[python].resolves`) has invalid interpreter "
            f"constraints, which are set via `[python].resolves_to_interpreter_constraints`: "
            f"{interpreter_constraints}. Could not determine the minimum compatible interpreter."
        ) if request.resolve else (
            "The following interpreter constraints were computed for all the targets for which "
            f"export was requested: {interpreter_constraints}. There is no python interpreter "
            "compatible with these constraints. Please restrict the target set to one that shares "
            "a compatible interpreter."))
        raise ExportError(err_msg)

    requirements_pex = await Get(
        Pex,
        RequirementsPexRequest(
            (tgt.address for tgt in request.root_python_targets),
            internal_only=True,
            hardcoded_interpreter_constraints=min_interpreter,
        ),
    )

    # Get the full python version (including patch #), so we can use it as the venv name.
    res = await Get(
        ProcessResult,
        PexProcess(
            pex=requirements_pex,
            description="Get interpreter version",
            argv=[
                "-c",
                "import sys; print('.'.join(str(x) for x in sys.version_info[0:3]))"
            ],
        ),
    )
    py_version = res.stdout.strip().decode()

    dest = (os.path.join("python", "virtualenvs", path_safe(request.resolve))
            if request.resolve else os.path.join("python", "virtualenv"))

    merged_digest = await Get(
        Digest, MergeDigests([pex_pex.digest, requirements_pex.digest]))
    pex_pex_path = os.path.join("{digest_root}", pex_pex.exe)
    return ExportResult(
        f"virtualenv for the resolve '{request.resolve}' (using {min_interpreter})",
        dest,
        digest=merged_digest,
        post_processing_cmds=[
            PostProcessingCommand(
                [
                    pex_pex_path,
                    os.path.join("{digest_root}", requirements_pex.name),
                    "venv",
                    "--pip",
                    "--collisions-ok",
                    "--remove=all",
                    f"{{digest_root}}/{py_version}",
                ],
                {"PEX_MODULE": "pex.tools"},
            ),
            PostProcessingCommand(["rm", "-f", pex_pex_path]),
        ],
    )
Example #13
0
async def generate_one_bsp_build_target_request(
    request: GenerateOneBSPBuildTargetRequest,
    union_membership: UnionMembership,
    build_root: BuildRoot,
) -> GenerateOneBSPBuildTargetResult:
    # Find all Pants targets that are part of this BSP build target.
    targets = await Get(Targets, AddressSpecs,
                        request.bsp_target.specs.address_specs)

    # Classify the targets by the language backends that claim them to provide metadata for them.
    field_sets_by_lang_id: dict[str,
                                OrderedSet[FieldSet]] = defaultdict(OrderedSet)
    # lang_ids_by_field_set: dict[Type[FieldSet], set[str]] = defaultdict(set)
    metadata_request_types: FrozenOrderedSet[
        Type[BSPBuildTargetsMetadataRequest]] = union_membership.get(
            BSPBuildTargetsMetadataRequest)
    metadata_request_types_by_lang_id = {
        metadata_request_type.language_id: metadata_request_type
        for metadata_request_type in metadata_request_types
    }
    for tgt in targets:
        for metadata_request_type in metadata_request_types:
            field_set_type: Type[
                FieldSet] = metadata_request_type.field_set_type
            if field_set_type.is_applicable(tgt):
                field_sets_by_lang_id[metadata_request_type.language_id].add(
                    field_set_type.create(tgt))
                # lang_ids_by_field_set[field_set_type].add(metadata_request_type.language_id)

    # TODO: Consider how to check whether the provided languages are compatible or whether compatible resolves
    # selected.

    # Request each language backend to provide metadata for the BuildTarget.
    metadata_results = await MultiGet(
        Get(
            BSPBuildTargetsMetadataResult,
            BSPBuildTargetsMetadataRequest,
            metadata_request_types_by_lang_id[lang_id](
                field_sets=tuple(field_sets)),
        ) for lang_id, field_sets in field_sets_by_lang_id.items())
    metadata_results_by_lang_id = {
        lang_id: metadata_result
        for lang_id, metadata_result in zip(field_sets_by_lang_id.keys(),
                                            metadata_results)
    }

    # Pretend to merge the metadata into a single piece of metadata, but really just choose the metadata
    # from the last provider.
    metadata_merge_order = find_metadata_merge_order([
        metadata_request_types_by_lang_id[lang_id]
        for lang_id in field_sets_by_lang_id.keys()
    ])
    # TODO: None if no metadata obtained.
    metadata = metadata_results_by_lang_id[
        metadata_merge_order[-1].language_id].metadata
    digest = await Get(Digest,
                       MergeDigests([r.digest for r in metadata_results]))

    # Determine "base directory" for this build target using source roots.
    # TODO: This actually has nothing to do with source roots. It should probably be computed as an ancestor
    # directory or else be configurable by the user. It is used as a hint in IntelliJ for where to place the
    # corresponding IntelliJ module.
    source_info = await Get(BSPBuildTargetSourcesInfo, BSPBuildTargetInternal,
                            request.bsp_target)
    if source_info.source_roots:
        roots = [
            build_root.pathlib_path.joinpath(p)
            for p in source_info.source_roots
        ]
    else:
        roots = [build_root.pathlib_path]

    return GenerateOneBSPBuildTargetResult(
        build_target=BuildTarget(
            id=BuildTargetIdentifier(f"pants:{request.bsp_target.name}"),
            display_name=request.bsp_target.name,
            base_directory=roots[0].as_uri(),
            tags=(),
            capabilities=BuildTargetCapabilities(
                can_compile=any(r.can_compile for r in metadata_results),
                can_test=any(r.can_test for r in metadata_results),
                can_run=any(r.can_run for r in metadata_results),
                can_debug=any(r.can_debug for r in metadata_results),
            ),
            language_ids=tuple(sorted(field_sets_by_lang_id.keys())),
            dependencies=(),
            data=metadata,
        ),
        digest=digest,
    )
Example #14
0
async def bsp_compile_request(
    request: CompileParams,
    bsp_context: BSPContext,
    union_membership: UnionMembership,
    workspace: Workspace,
) -> CompileResult:
    compile_field_sets = union_membership.get(BSPCompileFieldSet)
    compile_results = []
    for bsp_target_id in request.targets:
        # TODO: MultiGet these all.

        wrapped_tgt = await Get(WrappedTarget, AddressInput,
                                bsp_target_id.address_input)
        tgt = wrapped_tgt.target
        _logger.info(f"tgt = {tgt}")
        applicable_field_set_impls = []
        for impl in compile_field_sets:
            if impl.is_applicable(tgt):
                applicable_field_set_impls.append(impl)
        _logger.info(f"applicable_field_sets = {applicable_field_set_impls}")
        if len(applicable_field_set_impls) == 0:
            raise ValueError(f"no applicable field set for: {tgt.address}")
        elif len(applicable_field_set_impls) > 1:
            raise ValueError(
                f"ambiguous field set mapping, >1 for: {tgt.address}")

        field_set = applicable_field_set_impls[0].create(tgt)

        task_id = TaskId(id=request.origin_id or uuid.uuid4().hex)

        bsp_context.notify_client(
            TaskStartParams(
                task_id=task_id,
                event_time=int(time.time() * 1000),
                data=CompileTask(target=bsp_target_id),
            ))

        compile_result = await Get(BSPCompileResult, BSPCompileFieldSet,
                                   field_set)
        compile_results.append(compile_result)

        bsp_context.notify_client(
            TaskFinishParams(
                task_id=task_id,
                event_time=int(time.time() * 1000),
                status=compile_result.status,
                data=CompileReport(target=bsp_target_id,
                                   origin_id=request.origin_id,
                                   errors=0,
                                   warnings=0),
            ))

    output_digest = await Get(
        Digest, MergeDigests([r.output_digest for r in compile_results]))
    if output_digest != EMPTY_DIGEST:
        workspace.write_digest(output_digest, path_prefix=".pants.d/bsp")

    status_code = StatusCode.OK
    if any(r.status != StatusCode.OK for r in compile_results):
        status_code = StatusCode.ERROR

    return CompileResult(
        origin_id=request.origin_id,
        status_code=status_code.value,
    )
Example #15
0
async def collect_fixture_configs(
    _request: CollectFixtureConfigsRequest,
    pytest: PyTest,
    python_setup: PythonSetup,
    test_extra_env: TestExtraEnv,
    targets: Targets,
) -> CollectedJVMLockfileFixtureConfigs:
    addresses = [tgt.address for tgt in targets]
    transitive_targets = await Get(TransitiveTargets,
                                   TransitiveTargetsRequest(addresses))
    all_targets = transitive_targets.closure

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        all_targets, python_setup)

    pytest_pex, requirements_pex, prepared_sources, root_sources = await MultiGet(
        Get(
            Pex,
            PexRequest(
                output_filename="pytest.pex",
                requirements=pytest.pex_requirements(),
                interpreter_constraints=interpreter_constraints,
                internal_only=True,
            ),
        ),
        Get(Pex, RequirementsPexRequest(addresses)),
        Get(
            PythonSourceFiles,
            PythonSourceFilesRequest(all_targets,
                                     include_files=True,
                                     include_resources=True),
        ),
        Get(
            PythonSourceFiles,
            PythonSourceFilesRequest(targets),
        ),
    )

    script_content = FileContent(path="collect-fixtures.py",
                                 content=COLLECTION_SCRIPT.encode(),
                                 is_executable=True)
    script_digest = await Get(Digest, CreateDigest([script_content]))

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=EntryPoint(PurePath(script_content.path).stem),
            sources=script_digest,
            internal_only=True,
            pex_path=[
                pytest_pex,
                requirements_pex,
            ],
        ),
    )
    config_file_dirs = list(
        group_by_dir(prepared_sources.source_files.files).keys())
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(config_file_dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    pytest_config_digest = config_files.snapshot.digest

    input_digest = await Get(
        Digest,
        MergeDigests((
            prepared_sources.source_files.snapshot.digest,
            pytest_config_digest,
        )),
    )

    extra_env = {
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
    }

    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=[
                name for name in root_sources.source_files.files
                if name.endswith(".py")
            ],
            extra_env=extra_env,
            input_digest=input_digest,
            output_files=("tests.json", ),
            description="Collect test lockfile requirements from all tests.",
            level=LogLevel.DEBUG,
            cache_scope=ProcessCacheScope.PER_SESSION,
        ),
    )

    result = await Get(ProcessResult, Process, process)
    digest_contents = await Get(DigestContents, Digest, result.output_digest)
    assert len(digest_contents) == 1
    assert digest_contents[0].path == "tests.json"
    raw_config_data = json.loads(digest_contents[0].content)

    configs = []
    for item in raw_config_data:
        config = JVMLockfileFixtureConfig(
            definition=JVMLockfileFixtureDefinition.from_kwargs(
                item["kwargs"]),
            test_file_path=item["test_file_path"],
        )
        configs.append(config)

    return CollectedJVMLockfileFixtureConfigs(configs)
Example #16
0
async def generate_one_bsp_build_target_request(
    request: GenerateOneBSPBuildTargetRequest,
    union_membership: UnionMembership,
    build_root: BuildRoot,
) -> GenerateOneBSPBuildTargetResult:
    # Find all Pants targets that are part of this BSP build target.
    targets = await Get(Targets, BSPBuildTargetInternal, request.bsp_target)

    # Determine whether the targets are compilable.
    can_compile = any(
        req_type.field_set_type.is_applicable(t)  # type: ignore[misc]
        for req_type in union_membership[BSPCompileRequest] for t in targets)

    # Classify the targets by the language backends that claim to provide metadata for them.
    field_sets_by_request_type: dict[type[BSPBuildTargetsMetadataRequest],
                                     OrderedSet[FieldSet]] = defaultdict(
                                         OrderedSet)
    metadata_request_types: FrozenOrderedSet[
        Type[BSPBuildTargetsMetadataRequest]] = union_membership.get(
            BSPBuildTargetsMetadataRequest)
    metadata_request_types_by_lang_id: dict[
        str, type[BSPBuildTargetsMetadataRequest]] = {}
    for metadata_request_type in metadata_request_types:
        previous = metadata_request_types_by_lang_id.get(
            metadata_request_type.language_id)
        if previous:
            raise ValueError(
                f"Multiple implementations claim to support `{metadata_request_type.language_id}`:"
                f"{bullet_list([previous.__name__, metadata_request_type.__name__])}"
                "\n"
                "Do you have conflicting language support backends enabled?")
        metadata_request_types_by_lang_id[
            metadata_request_type.language_id] = metadata_request_type

    for tgt in targets:
        for metadata_request_type in metadata_request_types:
            field_set_type: Type[
                FieldSet] = metadata_request_type.field_set_type
            if field_set_type.is_applicable(tgt):
                field_sets_by_request_type[metadata_request_type].add(
                    field_set_type.create(tgt))

    # Request each language backend to provide metadata for the BuildTarget, and then merge it.
    metadata_results = await MultiGet(
        Get(
            BSPBuildTargetsMetadataResult,
            BSPBuildTargetsMetadataRequest,
            request_type(field_sets=tuple(field_sets)),
        ) for request_type, field_sets in field_sets_by_request_type.items())
    metadata = merge_metadata(
        list(zip(field_sets_by_request_type.keys(), metadata_results)))

    digest = await Get(Digest,
                       MergeDigests([r.digest for r in metadata_results]))

    # Determine "base directory" for this build target using source roots.
    # TODO: This actually has nothing to do with source roots. It should probably be computed as an ancestor
    # directory or else be configurable by the user. It is used as a hint in IntelliJ for where to place the
    # corresponding IntelliJ module.
    source_info = await Get(BSPBuildTargetSourcesInfo, BSPBuildTargetInternal,
                            request.bsp_target)
    if source_info.source_roots:
        roots = [
            build_root.pathlib_path.joinpath(p)
            for p in source_info.source_roots
        ]
    else:
        roots = []

    base_directory: Path | None = None
    if request.bsp_target.definition.base_directory:
        base_directory = build_root.pathlib_path.joinpath(
            request.bsp_target.definition.base_directory)
    elif roots:
        base_directory = roots[0]

    return GenerateOneBSPBuildTargetResult(
        build_target=BuildTarget(
            id=BuildTargetIdentifier(f"pants:{request.bsp_target.name}"),
            display_name=request.bsp_target.name,
            base_directory=base_directory.as_uri() if base_directory else None,
            tags=(),
            capabilities=BuildTargetCapabilities(
                can_compile=can_compile,
                can_debug=False,
                # TODO: See https://github.com/pantsbuild/pants/issues/15050.
                can_run=False,
                can_test=False,
            ),
            language_ids=tuple(
                sorted(req.language_id for req in field_sets_by_request_type)),
            dependencies=(),
            data=metadata,
        ),
        digest=digest,
    )
Example #17
0
async def compile_bsp_target(
    request: CompileOneBSPTargetRequest,
    bsp_context: BSPContext,
    union_membership: UnionMembership,
) -> BSPCompileResult:
    targets = await Get(Targets, BSPBuildTargetInternal, request.bsp_target)
    compile_request_types: FrozenOrderedSet[Type[BSPCompileRequest]] = union_membership.get(
        BSPCompileRequest
    )
    field_sets_by_request_type: dict[Type[BSPCompileRequest], set[FieldSet]] = defaultdict(set)
    for target in targets:
        for compile_request_type in compile_request_types:
            field_set_type = compile_request_type.field_set_type
            if field_set_type.is_applicable(target):
                field_set = field_set_type.create(target)
                field_sets_by_request_type[compile_request_type].add(field_set)

    task_id = TaskId(
        id=uuid.uuid4().hex, parents=((request.origin_id,) if request.origin_id else None)
    )
    message = f"Compilation of {request.bsp_target.bsp_target_id.uri}"

    bsp_context.notify_client(
        TaskStartParams(
            task_id=task_id,
            event_time=int(time.time() * 1000),
            message=message,
            data=CompileTask(target=request.bsp_target.bsp_target_id),
        )
    )

    compile_results = await MultiGet(
        Get(
            BSPCompileResult,
            BSPCompileRequest,
            compile_request_type(
                bsp_target=request.bsp_target, field_sets=tuple(field_sets), task_id=task_id
            ),
        )
        for compile_request_type, field_sets in field_sets_by_request_type.items()
    )

    status = StatusCode.OK
    if any(r.status != StatusCode.OK for r in compile_results):
        status = StatusCode.ERROR

    bsp_context.notify_client(
        TaskFinishParams(
            task_id=task_id,
            event_time=int(time.time() * 1000),
            message=message,
            status=status,
            data=CompileReport(
                target=request.bsp_target.bsp_target_id,
                origin_id=request.origin_id,
                errors=0,
                warnings=0,
            ),
        )
    )

    output_digest = await Get(Digest, MergeDigests([r.output_digest for r in compile_results]))

    return BSPCompileResult(
        status=status,
        output_digest=output_digest,
    )
Example #18
0
async def export_virtualenv(request: _ExportVenvRequest,
                            python_setup: PythonSetup,
                            pex_pex: PexPEX) -> ExportResult:
    if request.resolve:
        interpreter_constraints = InterpreterConstraints(
            python_setup.resolves_to_interpreter_constraints.get(
                request.resolve, python_setup.interpreter_constraints))
    else:
        interpreter_constraints = InterpreterConstraints.create_from_targets(
            request.root_python_targets,
            python_setup) or InterpreterConstraints(
                python_setup.interpreter_constraints)

    requirements_pex = await Get(
        Pex,
        RequirementsPexRequest(
            (tgt.address for tgt in request.root_python_targets),
            hardcoded_interpreter_constraints=interpreter_constraints,
        ),
    )

    # Note that an internal-only pex will always have the `python` field set.
    # See the build_pex() rule in pex.py.
    interpreter = cast(PythonExecutable, requirements_pex.python)

    # Get the full python version (including patch #), so we can use it as the venv name.
    res = await Get(
        ProcessResult,
        Process(
            description="Get interpreter version",
            argv=[
                interpreter.path,
                "-c",
                "import sys; print('.'.join(str(x) for x in sys.version_info[0:3]))",
            ],
        ),
    )
    py_version = res.stdout.strip().decode()

    dest = (os.path.join("python", "virtualenvs", path_safe(request.resolve))
            if request.resolve else os.path.join("python", "virtualenv"))

    merged_digest = await Get(
        Digest, MergeDigests([pex_pex.digest, requirements_pex.digest]))
    pex_pex_path = os.path.join("{digest_root}", pex_pex.exe)
    maybe_resolve_str = f"for the resolve '{request.resolve}' " if request.resolve else ""
    return ExportResult(
        f"virtualenv {maybe_resolve_str}(using Python {py_version})",
        dest,
        digest=merged_digest,
        post_processing_cmds=[
            PostProcessingCommand(
                [
                    interpreter.path,
                    pex_pex_path,
                    os.path.join("{digest_root}", requirements_pex.name),
                    "venv",
                    "--pip",
                    "--collisions-ok",
                    "--remove=all",
                    f"{{digest_root}}/{py_version}",
                ],
                {"PEX_MODULE": "pex.tools"},
            ),
            PostProcessingCommand(["rm", "-f", pex_pex_path]),
        ],
    )
Example #19
0
async def setup_helm(helm_subsytem: HelmSubsystem,
                     global_plugins: HelmPlugins) -> HelmBinary:
    downloaded_binary, empty_dirs_digest = await MultiGet(
        Get(DownloadedExternalTool, ExternalToolRequest,
            helm_subsytem.get_request(Platform.current)),
        Get(
            Digest,
            CreateDigest([
                Directory(_HELM_CONFIG_DIR),
                Directory(_HELM_DATA_DIR),
            ]),
        ),
    )

    tool_relpath = "__helm"
    immutable_input_digests = {tool_relpath: downloaded_binary.digest}

    helm_path = os.path.join(tool_relpath, downloaded_binary.exe)
    helm_env = {
        "HELM_CACHE_HOME": _HELM_CACHE_DIR,
        "HELM_CONFIG_HOME": _HELM_CONFIG_DIR,
        "HELM_DATA_HOME": _HELM_DATA_DIR,
    }

    # Create a digest that will get mutated during the setup process
    mutable_input_digest = empty_dirs_digest

    # Install all global Helm plugins
    if global_plugins:
        prefixed_plugins_digests = await MultiGet(
            Get(
                Digest,
                AddPrefix(plugin.digest,
                          os.path.join(_HELM_DATA_DIR, "plugins",
                                       plugin.name)),
            ) for plugin in global_plugins)
        mutable_input_digest = await Get(
            Digest,
            MergeDigests([mutable_input_digest, *prefixed_plugins_digests]))

    updated_config_digest, updated_data_digest = await MultiGet(
        Get(
            Digest,
            DigestSubset(mutable_input_digest,
                         PathGlobs([os.path.join(_HELM_CONFIG_DIR, "**")])),
        ),
        Get(
            Digest,
            DigestSubset(mutable_input_digest,
                         PathGlobs([os.path.join(_HELM_DATA_DIR, "**")])),
        ),
    )
    config_subset_digest, data_subset_digest = await MultiGet(
        Get(Digest, RemovePrefix(updated_config_digest, _HELM_CONFIG_DIR)),
        Get(Digest, RemovePrefix(updated_data_digest, _HELM_DATA_DIR)),
    )

    setup_immutable_digests = {
        **immutable_input_digests,
        _HELM_CONFIG_DIR: config_subset_digest,
        _HELM_DATA_DIR: data_subset_digest,
    }

    local_env = await Get(Environment, EnvironmentRequest(["HOME", "PATH"]))
    return HelmBinary(
        path=helm_path,
        helm_env=helm_env,
        local_env=local_env,
        immutable_input_digests=setup_immutable_digests,
    )