Ejemplo n.º 1
0
async def strip_source_roots(source_files: SourceFiles) -> StrippedSourceFiles:
    """Removes source roots from a snapshot.

    E.g. `src/python/pants/util/strutil.py` -> `pants/util/strutil.py`.
    """
    if not source_files.snapshot.files:
        return StrippedSourceFiles(source_files.snapshot)

    if source_files.unrooted_files:
        rooted_files = set(source_files.snapshot.files) - set(
            source_files.unrooted_files)
        rooted_files_snapshot = await Get(
            Snapshot,
            DigestSubset(source_files.snapshot.digest,
                         PathGlobs(rooted_files)))
    else:
        rooted_files_snapshot = source_files.snapshot

    source_roots_result = await Get(
        SourceRootsResult,
        SourceRootsRequest,
        SourceRootsRequest.for_files(rooted_files_snapshot.files),
    )

    source_roots_to_files = defaultdict(set)
    for f, root in source_roots_result.path_to_root.items():
        source_roots_to_files[root.path].add(str(f))

    if len(source_roots_to_files) == 1:
        source_root = next(iter(source_roots_to_files.keys()))
        if source_root == ".":
            resulting_snapshot = rooted_files_snapshot
        else:
            resulting_snapshot = await Get(
                Snapshot,
                RemovePrefix(rooted_files_snapshot.digest, source_root))
    else:
        digest_subsets = await MultiGet(
            Get(Digest,
                DigestSubset(rooted_files_snapshot.digest, PathGlobs(files)))
            for files in source_roots_to_files.values())
        resulting_digests = await MultiGet(
            Get(Digest, RemovePrefix(digest, source_root))
            for digest, source_root in zip(digest_subsets,
                                           source_roots_to_files.keys()))
        resulting_snapshot = await Get(Snapshot,
                                       MergeDigests(resulting_digests))

    # Add the unrooted files back in.
    if source_files.unrooted_files:
        unrooted_files_digest = await Get(
            Digest,
            DigestSubset(source_files.snapshot.digest,
                         PathGlobs(source_files.unrooted_files)),
        )
        resulting_snapshot = await Get(
            Snapshot,
            MergeDigests((resulting_snapshot.digest, unrooted_files_digest)))

    return StrippedSourceFiles(resulting_snapshot)
Ejemplo n.º 2
0
def test_source_roots_request() -> None:
    rule_runner = RuleRunner(rules=[
        *source_root_rules(),
        QueryRule(SourceRootsResult, (SourceRootsRequest,
                                      OptionsBootstrapper)),
    ])
    req = SourceRootsRequest(
        files=(PurePath("src/python/foo/bar.py"),
               PurePath("tests/python/foo/bar_test.py")),
        dirs=(PurePath("src/python/foo"), PurePath("src/python/baz/qux")),
    )
    res = rule_runner.request_product(
        SourceRootsResult,
        [
            req,
            create_options_bootstrapper(
                args=["--source-root-patterns=['src/python','tests/python']"]),
        ],
    )
    assert {
        PurePath("src/python/foo/bar.py"): SourceRoot("src/python"),
        PurePath("tests/python/foo/bar_test.py"): SourceRoot("tests/python"),
        PurePath("src/python/foo"): SourceRoot("src/python"),
        PurePath("src/python/baz/qux"): SourceRoot("src/python"),
    } == dict(res.path_to_root)
Ejemplo n.º 3
0
 def test_source_roots_request(self) -> None:
     req = SourceRootsRequest(
         files=(PurePath("src/python/foo/bar.py"),
                PurePath("tests/python/foo/bar_test.py")),
         dirs=(PurePath("src/python/foo"), PurePath("src/python/baz/qux")),
     )
     res = self.request_single_product(
         SourceRootsResult,
         Params(
             req,
             create_options_bootstrapper(args=[
                 "--source-root-patterns=['src/python','tests/python']"
             ]),
         ),
     )
     assert {
         PurePath("src/python/foo/bar.py"): SourceRoot("src/python"),
         PurePath("tests/python/foo/bar_test.py"):
         SourceRoot("tests/python"),
         PurePath("src/python/foo"): SourceRoot("src/python"),
         PurePath("src/python/baz/qux"): SourceRoot("src/python"),
     } == dict(res.path_to_root)
Ejemplo n.º 4
0
async def resolve_bsp_build_target_source_roots(
    bsp_target: BSPBuildTargetInternal, ) -> BSPBuildTargetSourcesInfo:
    targets = await Get(Targets, AddressSpecs, bsp_target.specs.address_specs)
    targets_with_sources = [
        tgt for tgt in targets if tgt.has_field(SourcesField)
    ]
    sources_paths = await MultiGet(
        Get(SourcesPaths, SourcesPathsRequest(tgt[SourcesField]))
        for tgt in targets_with_sources)
    merged_source_files: set[str] = set()
    for sp in sources_paths:
        merged_source_files.update(sp.files)
    source_roots_result = await Get(
        SourceRootsResult, SourceRootsRequest,
        SourceRootsRequest.for_files(merged_source_files))
    source_root_paths = {
        x.path
        for x in source_roots_result.path_to_root.values()
    }
    return BSPBuildTargetSourcesInfo(
        source_files=frozenset(merged_source_files),
        source_roots=frozenset(source_root_paths),
    )
Ejemplo n.º 5
0
async def find_putative_targets(
    req: PutativePythonTargetsRequest,
    all_owned_sources: AllOwnedSources,
    python_setup: PythonSetup,
) -> PutativeTargets:
    # Find library/test/test_util targets.

    all_py_files_globs: PathGlobs = req.search_paths.path_globs("*.py")
    all_py_files = await Get(Paths, PathGlobs, all_py_files_globs)
    unowned_py_files = set(all_py_files.files) - set(all_owned_sources)
    classified_unowned_py_files = classify_source_files(unowned_py_files)
    pts = []
    for tgt_type, paths in classified_unowned_py_files.items():
        for dirname, filenames in group_by_dir(paths).items():
            if issubclass(tgt_type, PythonTestsGeneratorTarget):
                name = "tests"
                kwargs = {"name": name}
            elif issubclass(tgt_type, PythonTestUtilsGeneratorTarget):
                name = "test_utils"
                kwargs = {"name": name}
            else:
                name = os.path.basename(dirname)
                kwargs = {}
            if (
                python_setup.tailor_ignore_solitary_init_files
                and tgt_type == PythonSourcesGeneratorTarget
                and filenames == {"__init__.py"}
            ):
                continue
            pts.append(
                PutativeTarget.for_target_type(
                    tgt_type, dirname, name, sorted(filenames), kwargs=kwargs
                )
            )

    if python_setup.tailor_requirements_targets:
        # Find requirements files.
        all_requirements_files_globs: PathGlobs = req.search_paths.path_globs("*requirements*.txt")
        all_requirements_files = await Get(Paths, PathGlobs, all_requirements_files_globs)
        unowned_requirements_files = set(all_requirements_files.files) - set(all_owned_sources)
        for req_file in unowned_requirements_files:
            path, name = os.path.split(req_file)
            pts.append(
                PutativeTarget(
                    path=path,
                    # python_requirements is a macro and doesn't take a name argument, but the
                    # PutativeTarget still needs a name for display purposes.
                    name=name,
                    type_alias="python_requirements",
                    triggering_sources=[req_file],
                    owned_sources=[req_file],
                    addressable=False,
                    kwargs={} if name == "requirements.txt" else {"requirements_relpath": name},
                )
            )

    if python_setup.tailor_pex_binary_targets:
        # Find binary targets.

        # Get all files whose content indicates that they are entry points.
        digest_contents = await Get(DigestContents, PathGlobs, all_py_files_globs)
        entry_points = [
            file_content.path
            for file_content in digest_contents
            if is_entry_point(file_content.content)
        ]

        # Get the modules for these entry points.
        src_roots = await Get(
            SourceRootsResult, SourceRootsRequest, SourceRootsRequest.for_files(entry_points)
        )
        module_to_entry_point = {}
        for entry_point in entry_points:
            entry_point_path = PurePath(entry_point)
            src_root = src_roots.path_to_root[entry_point_path]
            stripped_entry_point = entry_point_path.relative_to(src_root.path)
            module = PythonModule.create_from_stripped_path(stripped_entry_point)
            module_to_entry_point[module.module] = entry_point

        # Get existing binary targets for these entry points.
        entry_point_dirs = {os.path.dirname(entry_point) for entry_point in entry_points}
        possible_existing_binary_targets = await Get(
            UnexpandedTargets, AddressSpecs(AscendantAddresses(d) for d in entry_point_dirs)
        )
        possible_existing_binary_entry_points = await MultiGet(
            Get(ResolvedPexEntryPoint, ResolvePexEntryPointRequest(t[PexEntryPointField]))
            for t in possible_existing_binary_targets
            if t.has_field(PexEntryPointField)
        )
        possible_existing_entry_point_modules = {
            rep.val.module for rep in possible_existing_binary_entry_points if rep.val
        }
        unowned_entry_point_modules = (
            module_to_entry_point.keys() - possible_existing_entry_point_modules
        )

        # Generate new targets for entry points that don't already have one.
        for entry_point_module in unowned_entry_point_modules:
            entry_point = module_to_entry_point[entry_point_module]
            path, fname = os.path.split(entry_point)
            name = os.path.splitext(fname)[0]
            pts.append(
                PutativeTarget.for_target_type(
                    target_type=PexBinary,
                    path=path,
                    name=name,
                    triggering_sources=tuple(),
                    kwargs={"name": name, "entry_point": fname},
                )
            )

    return PutativeTargets(pts)
Ejemplo n.º 6
0
async def find_putative_targets(
    req: PutativePythonTargetsRequest,
    all_owned_sources: AllOwnedSources,
    python_setup: PythonSetup,
) -> PutativeTargets:
    pts = []

    if python_setup.tailor_source_targets:
        # Find library/test/test_util targets.
        all_py_files_globs: PathGlobs = req.path_globs("*.py", "*.pyi")
        all_py_files = await Get(Paths, PathGlobs, all_py_files_globs)
        unowned_py_files = set(all_py_files.files) - set(all_owned_sources)
        classified_unowned_py_files = classify_source_files(unowned_py_files)
        for tgt_type, paths in classified_unowned_py_files.items():
            for dirname, filenames in group_by_dir(paths).items():
                name: str | None
                if issubclass(tgt_type, PythonTestsGeneratorTarget):
                    name = "tests"
                elif issubclass(tgt_type, PythonTestUtilsGeneratorTarget):
                    name = "test_utils"
                else:
                    name = None
                if (python_setup.tailor_ignore_solitary_init_files
                        and tgt_type == PythonSourcesGeneratorTarget
                        and filenames == {"__init__.py"}):
                    continue
                pts.append(
                    PutativeTarget.for_target_type(
                        tgt_type,
                        path=dirname,
                        name=name,
                        triggering_sources=sorted(filenames)))

    if python_setup.tailor_requirements_targets:
        # Find requirements files.
        (
            all_requirements_files,
            all_pipenv_lockfile_files,
            all_pyproject_toml_contents,
        ) = await MultiGet(
            Get(DigestContents, PathGlobs,
                req.path_globs("*requirements*.txt")),
            Get(DigestContents, PathGlobs, req.path_globs("Pipfile.lock")),
            Get(DigestContents, PathGlobs, req.path_globs("pyproject.toml")),
        )

        def add_req_targets(files: Iterable[FileContent], alias: str,
                            target_name: str) -> None:
            contents = {i.path: i.content for i in files}
            unowned_files = set(contents) - set(all_owned_sources)
            for fp in unowned_files:
                path, name = os.path.split(fp)

                try:
                    validate(fp, contents[fp], alias)
                except Exception as e:
                    logger.warning(
                        f"An error occurred when validating `{fp}`: {e}.\n\n"
                        "You'll need to create targets for its contents manually.\n"
                        "To silence this error in future, see "
                        "https://www.pantsbuild.org/docs/reference-tailor#section-ignore-paths \n"
                    )
                    continue

                pts.append(
                    PutativeTarget(
                        path=path,
                        name=target_name,
                        type_alias=alias,
                        triggering_sources=[fp],
                        owned_sources=[name],
                        kwargs=({} if alias != "python_requirements"
                                or name == "requirements.txt" else {
                                    "source": name
                                }),
                    ))

        def validate(path: str, contents: bytes, alias: str) -> None:
            if alias == "python_requirements":
                return validate_python_requirements(path, contents)
            elif alias == "pipenv_requirements":
                return validate_pipenv_requirements(contents)
            elif alias == "poetry_requirements":
                return validate_poetry_requirements(contents)

        def validate_python_requirements(path: str, contents: bytes) -> None:
            for _ in parse_requirements_file(contents.decode(), rel_path=path):
                pass

        def validate_pipenv_requirements(contents: bytes) -> None:
            parse_pipenv_requirements(contents)

        def validate_poetry_requirements(contents: bytes) -> None:
            p = PyProjectToml(PurePath(), PurePath(), contents.decode())
            parse_pyproject_toml(p)

        add_req_targets(all_requirements_files, "python_requirements", "reqs")
        add_req_targets(all_pipenv_lockfile_files, "pipenv_requirements",
                        "pipenv")
        add_req_targets(
            {
                fc
                for fc in all_pyproject_toml_contents
                if b"[tool.poetry" in fc.content
            },
            "poetry_requirements",
            "poetry",
        )

    if python_setup.tailor_pex_binary_targets:
        # Find binary targets.

        # Get all files whose content indicates that they are entry points or are __main__.py files.
        digest_contents = await Get(DigestContents, PathGlobs,
                                    all_py_files_globs)
        all_main_py = await Get(Paths, PathGlobs,
                                req.path_globs("__main__.py"))
        entry_points = [
            file_content.path for file_content in digest_contents
            if is_entry_point(file_content.content)
        ] + list(all_main_py.files)

        # Get the modules for these entry points.
        src_roots = await Get(SourceRootsResult, SourceRootsRequest,
                              SourceRootsRequest.for_files(entry_points))
        module_to_entry_point = {}
        for entry_point in entry_points:
            entry_point_path = PurePath(entry_point)
            src_root = src_roots.path_to_root[entry_point_path]
            stripped_entry_point = entry_point_path.relative_to(src_root.path)
            module = module_from_stripped_path(stripped_entry_point)
            module_to_entry_point[module] = entry_point

        # Get existing binary targets for these entry points.
        entry_point_dirs = {
            os.path.dirname(entry_point)
            for entry_point in entry_points
        }
        possible_existing_binary_targets = await Get(
            UnexpandedTargets,
            RawSpecs(
                ancestor_globs=tuple(
                    AncestorGlobSpec(d) for d in entry_point_dirs),
                description_of_origin="the `pex_binary` tailor rule",
            ),
        )
        possible_existing_binary_entry_points = await MultiGet(
            Get(ResolvedPexEntryPoint,
                ResolvePexEntryPointRequest(t[PexEntryPointField]))
            for t in possible_existing_binary_targets
            if t.has_field(PexEntryPointField))
        possible_existing_entry_point_modules = {
            rep.val.module
            for rep in possible_existing_binary_entry_points if rep.val
        }
        unowned_entry_point_modules = (module_to_entry_point.keys() -
                                       possible_existing_entry_point_modules)

        # Generate new targets for entry points that don't already have one.
        for entry_point_module in unowned_entry_point_modules:
            entry_point = module_to_entry_point[entry_point_module]
            path, fname = os.path.split(entry_point)
            name = os.path.splitext(fname)[0]
            pts.append(
                PutativeTarget.for_target_type(
                    target_type=PexBinary,
                    path=path,
                    name=name,
                    triggering_sources=tuple(),
                    kwargs={"entry_point": fname},
                ))

    return PutativeTargets(pts)
Ejemplo n.º 7
0
async def setup_full_package_build_request(
    request: _SetupGoProtobufPackageBuildRequest,
    protoc: Protoc,
    go_protoc_plugin: _SetupGoProtocPlugin,
    package_mapping: ImportPathToPackages,
    go_protobuf_mapping: GoProtobufImportPathMapping,
    analyzer: PackageAnalyzerSetup,
) -> FallibleBuildGoPackageRequest:
    output_dir = "_generated_files"
    protoc_relpath = "__protoc"
    protoc_go_plugin_relpath = "__protoc_gen_go"

    transitive_targets, downloaded_protoc_binary, empty_output_dir = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)),
        Get(DownloadedExternalTool, ExternalToolRequest,
            protoc.get_request(Platform.current)),
        Get(Digest, CreateDigest([Directory(output_dir)])),
    )

    all_sources = await Get(
        SourceFiles,
        SourceFilesRequest(
            sources_fields=(tgt[ProtobufSourceField]
                            for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSourceField, ),
            enable_codegen=True,
        ),
    )
    source_roots, input_digest = await MultiGet(
        Get(SourceRootsResult, SourceRootsRequest,
            SourceRootsRequest.for_files(all_sources.files)),
        Get(Digest,
            MergeDigests([all_sources.snapshot.digest, empty_output_dir])),
    )

    source_root_paths = sorted(
        {sr.path
         for sr in source_roots.path_to_root.values()})

    pkg_sources = await MultiGet(
        Get(SourcesPaths, SourcesPathsRequest(tgt[ProtobufSourceField]))
        for tgt in transitive_targets.roots)
    pkg_files = sorted({f for ps in pkg_sources for f in ps.files})

    maybe_grpc_plugin_args = []
    if any(
            tgt.get(ProtobufGrpcToggleField).value
            for tgt in transitive_targets.roots):
        maybe_grpc_plugin_args = [
            f"--go-grpc_out={output_dir}",
            "--go-grpc_opt=paths=source_relative",
        ]

    gen_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                os.path.join(protoc_relpath, downloaded_protoc_binary.exe),
                f"--plugin=go={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go')}",
                f"--plugin=go-grpc={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go-grpc')}",
                f"--go_out={output_dir}",
                "--go_opt=paths=source_relative",
                *(f"--proto_path={source_root}"
                  for source_root in source_root_paths),
                *maybe_grpc_plugin_args,
                *pkg_files,
            ],
            # Note: Necessary or else --plugin option needs absolute path.
            env={"PATH": protoc_go_plugin_relpath},
            input_digest=input_digest,
            immutable_input_digests={
                protoc_relpath: downloaded_protoc_binary.digest,
                protoc_go_plugin_relpath: go_protoc_plugin.digest,
            },
            description=f"Generating Go sources from {request.import_path}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )
    if gen_result.exit_code != 0:
        return FallibleBuildGoPackageRequest(
            request=None,
            import_path=request.import_path,
            exit_code=gen_result.exit_code,
            stderr=gen_result.stderr.decode(),
        )

    # Ensure that the generated files are in a single package directory.
    gen_sources = await Get(Snapshot, Digest, gen_result.output_digest)
    files_by_dir = group_by_dir(gen_sources.files)
    if len(files_by_dir) != 1:
        return FallibleBuildGoPackageRequest(
            request=None,
            import_path=request.import_path,
            exit_code=1,
            stderr=
            ("Expected Go files generated from Protobuf sources to be output to a single directory.\n"
             f"- import path: {request.import_path}\n"
             f"- protobuf files: {', '.join(pkg_files)}"),
        )
    gen_dir = list(files_by_dir.keys())[0]

    # Analyze the generated sources.
    input_digest = await Get(
        Digest, MergeDigests([gen_sources.digest, analyzer.digest]))
    result = await Get(
        FallibleProcessResult,
        Process(
            (analyzer.path, gen_dir),
            input_digest=input_digest,
            description=
            f"Determine metadata for generated Go package for {request.import_path}",
            level=LogLevel.DEBUG,
            env={"CGO_ENABLED": "0"},
        ),
    )

    # Parse the metadata from the analysis.
    fallible_analysis = FallibleFirstPartyPkgAnalysis.from_process_result(
        result,
        dir_path=gen_dir,
        import_path=request.import_path,
        minimum_go_version="",
        description_of_source=
        f"Go package generated from protobuf targets `{', '.join(str(addr) for addr in request.addresses)}`",
    )
    if not fallible_analysis.analysis:
        return FallibleBuildGoPackageRequest(
            request=None,
            import_path=request.import_path,
            exit_code=fallible_analysis.exit_code,
            stderr=fallible_analysis.stderr,
        )
    analysis = fallible_analysis.analysis

    # Obtain build requests for third-party dependencies.
    # TODO: Consider how to merge this code with existing dependency inference code.
    dep_build_request_addrs: list[Address] = []
    for dep_import_path in (*analysis.imports, *analysis.test_imports,
                            *analysis.xtest_imports):
        # Infer dependencies on other Go packages.
        candidate_addresses = package_mapping.mapping.get(dep_import_path)
        if candidate_addresses:
            # TODO: Use explicit dependencies to disambiguate? This should never happen with Go backend though.
            if len(candidate_addresses) > 1:
                return FallibleBuildGoPackageRequest(
                    request=None,
                    import_path=request.import_path,
                    exit_code=result.exit_code,
                    stderr=
                    (f"Multiple addresses match import of `{dep_import_path}`.\n"
                     f"addresses: {', '.join(str(a) for a in candidate_addresses)}"
                     ),
                )
            dep_build_request_addrs.extend(candidate_addresses)

        # Infer dependencies on other generated Go sources.
        go_protobuf_candidate_addresses = go_protobuf_mapping.mapping.get(
            dep_import_path)
        if go_protobuf_candidate_addresses:
            dep_build_request_addrs.extend(go_protobuf_candidate_addresses)

    dep_build_requests = await MultiGet(
        Get(BuildGoPackageRequest, BuildGoPackageTargetRequest(addr))
        for addr in dep_build_request_addrs)

    return FallibleBuildGoPackageRequest(
        request=BuildGoPackageRequest(
            import_path=request.import_path,
            digest=gen_sources.digest,
            dir_path=analysis.dir_path,
            go_file_names=analysis.go_files,
            s_file_names=analysis.s_files,
            direct_dependencies=dep_build_requests,
            minimum_go_version=analysis.minimum_go_version,
        ),
        import_path=request.import_path,
    )
Ejemplo n.º 8
0
async def strip_source_roots(source_files: SourceFiles) -> StrippedSourceFiles:
    """Removes source roots from a snapshot.

    E.g. `src/python/pants/util/strutil.py` -> `pants/util/strutil.py`.
    """
    if not source_files.snapshot.files:
        return StrippedSourceFiles(source_files.snapshot)

    if source_files.unrooted_files:
        rooted_files = set(source_files.snapshot.files) - set(
            source_files.unrooted_files)
        rooted_files_snapshot = await Get(
            Snapshot,
            DigestSubset(source_files.snapshot.digest,
                         PathGlobs(rooted_files)))
    else:
        rooted_files_snapshot = source_files.snapshot

    source_roots_result = await Get(
        SourceRootsResult,
        SourceRootsRequest,
        SourceRootsRequest.for_files(rooted_files_snapshot.files),
    )

    file_to_source_root = {
        str(file): root
        for file, root in source_roots_result.path_to_root.items()
    }
    files_grouped_by_source_root = {
        source_root.path: tuple(str(f) for f in files)
        for source_root, files in itertools.groupby(
            file_to_source_root.keys(), key=file_to_source_root.__getitem__)
    }

    if len(files_grouped_by_source_root) == 1:
        source_root = next(iter(files_grouped_by_source_root.keys()))
        if source_root == ".":
            resulting_snapshot = rooted_files_snapshot
        else:
            resulting_snapshot = await Get(
                Snapshot,
                RemovePrefix(rooted_files_snapshot.digest, source_root))
    else:
        digest_subsets = await MultiGet(
            Get(Digest,
                DigestSubset(rooted_files_snapshot.digest, PathGlobs(files)))
            for files in files_grouped_by_source_root.values())
        resulting_digests = await MultiGet(
            Get(Digest, RemovePrefix(digest, source_root))
            for digest, source_root in zip(
                digest_subsets, files_grouped_by_source_root.keys()))
        resulting_snapshot = await Get(Snapshot,
                                       MergeDigests(resulting_digests))

    # Add the unrooted files back in.
    if source_files.unrooted_files:
        unrooted_files_digest = await Get(
            Digest,
            DigestSubset(source_files.snapshot.digest,
                         PathGlobs(source_files.unrooted_files)),
        )
        resulting_snapshot = await Get(
            Snapshot,
            MergeDigests((resulting_snapshot.digest, unrooted_files_digest)))

    return StrippedSourceFiles(resulting_snapshot)
Ejemplo n.º 9
0
async def package_python_dist(
    field_set: PythonDistributionFieldSet,
    python_setup: PythonSetup,
    union_membership: UnionMembership,
) -> BuiltPackage:
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
    exported_target = ExportedTarget(transitive_targets.roots[0])

    dist_tgt = exported_target.target
    wheel = dist_tgt.get(WheelField).value
    sdist = dist_tgt.get(SDistField).value
    if not wheel and not sdist:
        raise NoDistTypeSelected(
            softwrap(
                f"""
                In order to package {dist_tgt.address.spec} at least one of {WheelField.alias!r} or
                {SDistField.alias!r} must be `True`.
                """
            )
        )

    wheel_config_settings = dist_tgt.get(WheelConfigSettingsField).value or FrozenDict()
    sdist_config_settings = dist_tgt.get(SDistConfigSettingsField).value or FrozenDict()
    backend_env_vars = dist_tgt.get(BuildBackendEnvVarsField).value
    if backend_env_vars:
        extra_build_time_env = await Get(Environment, EnvironmentRequest(sorted(backend_env_vars)))
    else:
        extra_build_time_env = Environment()

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        transitive_targets.closure, python_setup
    ) or InterpreterConstraints(python_setup.interpreter_constraints)
    chroot = await Get(
        DistBuildChroot,
        DistBuildChrootRequest(
            exported_target,
            interpreter_constraints=interpreter_constraints,
        ),
    )

    # Find the source roots for the build-time 1stparty deps (e.g., deps of setup.py).
    source_roots_result = await Get(
        SourceRootsResult,
        SourceRootsRequest(
            files=[], dirs={PurePath(tgt.address.spec_path) for tgt in transitive_targets.closure}
        ),
    )
    source_roots = tuple(sorted({sr.path for sr in source_roots_result.path_to_root.values()}))

    # Get any extra build-time environment (e.g., native extension requirements).
    build_env_requests = []
    build_env_request_types = union_membership.get(DistBuildEnvironmentRequest)
    for build_env_request_type in build_env_request_types:
        if build_env_request_type.is_applicable(dist_tgt):
            build_env_requests.append(
                build_env_request_type(
                    tuple(tt.address for tt in transitive_targets.closure), interpreter_constraints
                )
            )

    build_envs = await MultiGet(
        [
            Get(DistBuildEnvironment, DistBuildEnvironmentRequest, build_env_request)
            for build_env_request in build_env_requests
        ]
    )
    extra_build_time_requirements = tuple(
        itertools.chain.from_iterable(
            build_env.extra_build_time_requirements for build_env in build_envs
        )
    )
    input_digest = await Get(
        Digest,
        MergeDigests(
            [chroot.digest, *(build_env.extra_build_time_inputs for build_env in build_envs)]
        ),
    )

    # We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture
    # any changes setup made within it without also capturing other artifacts of the pex
    # process invocation.
    chroot_prefix = "chroot"
    working_directory = os.path.join(chroot_prefix, chroot.working_directory)
    prefixed_input = await Get(Digest, AddPrefix(input_digest, chroot_prefix))
    build_system = await Get(BuildSystem, BuildSystemRequest(prefixed_input, working_directory))

    setup_py_result = await Get(
        DistBuildResult,
        DistBuildRequest(
            build_system=build_system,
            interpreter_constraints=interpreter_constraints,
            build_wheel=wheel,
            build_sdist=sdist,
            input=prefixed_input,
            working_directory=working_directory,
            build_time_source_roots=source_roots,
            target_address_spec=exported_target.target.address.spec,
            wheel_config_settings=wheel_config_settings,
            sdist_config_settings=sdist_config_settings,
            extra_build_time_requirements=extra_build_time_requirements,
            extra_build_time_env=extra_build_time_env,
        ),
    )
    dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
    return BuiltPackage(
        setup_py_result.output,
        tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
    )
Ejemplo n.º 10
0
async def generate_scrooge_thrift_sources(
    request: GenerateScroogeThriftSourcesRequest,
    jdk: InternalJdk,
    scrooge: ScroogeSubsystem,
) -> GeneratedScroogeThriftSources:
    output_dir = "_generated_files"
    toolcp_relpath = "__toolcp"

    lockfile_request = await Get(GenerateJvmLockfileFromTool,
                                 ScroogeToolLockfileSentinel())
    tool_classpath, transitive_targets, empty_output_dir_digest, wrapped_target = await MultiGet(
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.thrift_source_field.address])),
        Get(Digest, CreateDigest([Directory(output_dir)])),
        Get(
            WrappedTarget,
            WrappedTargetRequest(request.thrift_source_field.address,
                                 description_of_origin="<infallible>"),
        ),
    )

    transitive_sources, target_sources = await MultiGet(
        Get(
            SourceFiles,
            SourceFilesRequest(tgt[ThriftSourceField]
                               for tgt in transitive_targets.closure
                               if tgt.has_field(ThriftSourceField)),
        ),
        Get(SourceFiles, SourceFilesRequest([request.thrift_source_field])),
    )

    sources_roots = await Get(
        SourceRootsResult,
        SourceRootsRequest,
        SourceRootsRequest.for_files(transitive_sources.snapshot.files),
    )
    deduped_source_root_paths = sorted(
        {sr.path
         for sr in sources_roots.path_to_root.values()})

    input_digest = await Get(
        Digest,
        MergeDigests([
            transitive_sources.snapshot.digest,
            target_sources.snapshot.digest,
            empty_output_dir_digest,
        ]),
    )

    maybe_include_paths = []
    for path in deduped_source_root_paths:
        maybe_include_paths.extend(["-i", path])

    maybe_finagle_option = []
    if wrapped_target.target[ScroogeFinagleBoolField].value:
        maybe_finagle_option = ["--finagle"]

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            argv=[
                "com.twitter.scrooge.Main",
                *maybe_include_paths,
                "--dest",
                output_dir,
                "--language",
                request.lang_id,
                *maybe_finagle_option,
                *target_sources.snapshot.files,
            ],
            input_digest=input_digest,
            extra_jvm_options=scrooge.jvm_options,
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_immutable_input_digests,
            description=
            f"Generating {request.lang_name} sources from {request.thrift_source_field.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    output_snapshot = await Get(Snapshot,
                                RemovePrefix(result.output_digest, output_dir))
    return GeneratedScroogeThriftSources(output_snapshot)
Ejemplo n.º 11
0
async def generate_apache_thrift_sources(
    request: GenerateThriftSourcesRequest,
    thrift: ApacheThriftSetup,
) -> GeneratedThriftSources:
    output_dir = "_generated_files"

    transitive_targets, empty_output_dir_digest = await MultiGet(
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.thrift_source_field.address])),
        Get(Digest, CreateDigest([Directory(output_dir)])),
    )

    transitive_sources, target_sources = await MultiGet(
        Get(
            SourceFiles,
            SourceFilesRequest(tgt[ThriftSourceField]
                               for tgt in transitive_targets.closure
                               if tgt.has_field(ThriftSourceField)),
        ),
        Get(SourceFiles, SourceFilesRequest([request.thrift_source_field])),
    )

    sources_roots = await Get(
        SourceRootsResult,
        SourceRootsRequest,
        SourceRootsRequest.for_files(transitive_sources.snapshot.files),
    )
    deduped_source_root_paths = sorted(
        {sr.path
         for sr in sources_roots.path_to_root.values()})

    input_digest = await Get(
        Digest,
        MergeDigests([
            transitive_sources.snapshot.digest,
            target_sources.snapshot.digest,
            empty_output_dir_digest,
        ]),
    )

    options_str = ""
    if request.lang_options:
        options_str = f":{','.join(opt for opt in request.lang_options)}"

    maybe_include_paths = []
    for path in deduped_source_root_paths:
        maybe_include_paths.extend(["-I", path])

    args = [
        thrift.path,
        "-out",
        output_dir,
        *maybe_include_paths,
        "--gen",
        f"{request.lang_id}{options_str}",
        *target_sources.snapshot.files,
    ]

    result = await Get(
        ProcessResult,
        Process(
            args,
            input_digest=input_digest,
            output_directories=(output_dir, ),
            description=
            f"Generating {request.lang_name} sources from {request.thrift_source_field.address}.",
            level=LogLevel.DEBUG,
        ),
    )

    output_snapshot = await Get(Snapshot,
                                RemovePrefix(result.output_digest, output_dir))
    return GeneratedThriftSources(output_snapshot)