Beispiel #1
0
async def package_python_dist(
    field_set: PythonDistributionFieldSet,
    python_setup: PythonSetup,
) -> BuiltPackage:
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
    exported_target = ExportedTarget(transitive_targets.roots[0])
    interpreter_constraints = PexInterpreterConstraints.create_from_targets(
        transitive_targets.closure, python_setup
    )
    chroot = await Get(
        SetupPyChroot,
        SetupPyChrootRequest(exported_target, py2=interpreter_constraints.includes_python2()),
    )

    # If commands were provided, run setup.py with them; Otherwise just dump chroots.
    commands = exported_target.target.get(SetupPyCommandsField).value or ()
    if commands:
        validate_commands(commands)
        setup_py_result = await Get(
            RunSetupPyResult,
            RunSetupPyRequest(exported_target, interpreter_constraints, chroot, commands),
        )
        dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
        return BuiltPackage(
            setup_py_result.output,
            tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
        )
    else:
        dirname = f"{chroot.setup_kwargs.name}-{chroot.setup_kwargs.version}"
        rel_chroot = await Get(Digest, AddPrefix(chroot.digest, dirname))
        return BuiltPackage(rel_chroot, (BuiltPackageArtifact(dirname),))
Beispiel #2
0
def packages():
    return (BuiltPackage(
        EMPTY_DIGEST,
        (
            BuiltPackageArtifact("my-package-0.1.0.tar.gz"),
            BuiltPackageArtifact("my_package-0.1.0-py3-none-any.whl"),
        ),
    ), )
Beispiel #3
0
async def package_go_binary(field_set: GoBinaryFieldSet) -> BuiltPackage:
    main_pkg = await Get(GoBinaryMainPackage, GoBinaryMainPackageRequest(field_set.main))
    built_package = await Get(
        BuiltGoPackage, BuildGoPackageTargetRequest(main_pkg.address, is_main=True)
    )
    main_pkg_a_file_path = built_package.import_paths_to_pkg_a_files["main"]
    import_config = await Get(
        ImportConfig, ImportConfigRequest(built_package.import_paths_to_pkg_a_files)
    )
    input_digest = await Get(Digest, MergeDigests([built_package.digest, import_config.digest]))

    output_filename = PurePath(field_set.output_path.value_or_default(file_ending=None))
    binary = await Get(
        LinkedGoBinary,
        LinkGoBinaryRequest(
            input_digest=input_digest,
            archives=(main_pkg_a_file_path,),
            import_config_path=import_config.CONFIG_PATH,
            output_filename=f"./{output_filename.name}",
            description=f"Link Go binary for {field_set.address}",
        ),
    )

    renamed_output_digest = await Get(Digest, AddPrefix(binary.digest, str(output_filename.parent)))

    artifact = BuiltPackageArtifact(relpath=str(output_filename))
    return BuiltPackage(renamed_output_digest, (artifact,))
Beispiel #4
0
async def package_debian_package(field_set: DebianPackageFieldSet) -> BuiltPackage:
    dpkg_deb_path = await Get(
        BinaryPaths,
        BinaryPathRequest(
            binary_name="touch",
            search_path=["/usr/bin"],
        ),
    )
    if not dpkg_deb_path.first_path:
        raise EnvironmentError("Could not find the `touch` program on search paths ")

    output_filename = field_set.output_path.value_or_default(file_ending="deb")

    # TODO(alexey): add Debian packaging logic
    result = await Get(
        ProcessResult,
        Process(
            argv=(
                "touch",
                output_filename,
            ),
            description="Create a Debian package from the produced packages.",
            output_files=(output_filename,),
        ),
    )
    return BuiltPackage(result.output_digest, artifacts=(BuiltPackageArtifact(output_filename),))
Beispiel #5
0
async def package_pex_binary(
        field_set: PexBinaryFieldSet,
        pex_binary_defaults: PexBinaryDefaults) -> BuiltPackage:
    resolved_entry_point = await Get(
        ResolvedPexEntryPoint,
        ResolvePexEntryPointRequest(field_set.entry_point))
    output_filename = field_set.output_path.value_or_default(field_set.address,
                                                             file_ending="pex")
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                internal_only=False,
                # TODO(John Sirois): Support ConsoleScript in PexBinary targets:
                #  https://github.com/pantsbuild/pants/issues/11619
                main=resolved_entry_point.val,
                platforms=PexPlatforms.create_from_platforms_field(
                    field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(
                    pex_binary_defaults),
            )),
    )
    return BuiltPackage(two_step_pex.pex.digest,
                        (BuiltPackageArtifact(output_filename), ))
Beispiel #6
0
async def package_python_dist(
    field_set: PythonDistributionFieldSet,
    python_setup: PythonSetup,
) -> BuiltPackage:
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
    exported_target = ExportedTarget(transitive_targets.roots[0])

    dist_tgt = exported_target.target
    wheel = dist_tgt.get(WheelField).value
    sdist = dist_tgt.get(SDistField).value
    if not wheel and not sdist:
        raise NoDistTypeSelected(
            f"In order to package {dist_tgt.address.spec} at least one of {WheelField.alias!r} or "
            f"{SDistField.alias!r} must be `True`."
        )

    wheel_config_settings = dist_tgt.get(WheelConfigSettingsField).value or FrozenDict()
    sdist_config_settings = dist_tgt.get(SDistConfigSettingsField).value or FrozenDict()

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        transitive_targets.closure, python_setup
    ) or InterpreterConstraints(python_setup.interpreter_constraints)
    chroot = await Get(
        DistBuildChroot,
        DistBuildChrootRequest(
            exported_target,
            py2=interpreter_constraints.includes_python2(),
        ),
    )

    # We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture
    # any changes setup made within it without also capturing other artifacts of the pex
    # process invocation.
    chroot_prefix = "chroot"
    working_directory = os.path.join(chroot_prefix, chroot.working_directory)
    prefixed_chroot = await Get(Digest, AddPrefix(chroot.digest, chroot_prefix))
    build_system = await Get(BuildSystem, BuildSystemRequest(prefixed_chroot, working_directory))
    setup_py_result = await Get(
        DistBuildResult,
        DistBuildRequest(
            build_system=build_system,
            interpreter_constraints=interpreter_constraints,
            build_wheel=wheel,
            build_sdist=sdist,
            input=prefixed_chroot,
            working_directory=working_directory,
            target_address_spec=exported_target.target.address.spec,
            wheel_config_settings=wheel_config_settings,
            sdist_config_settings=sdist_config_settings,
        ),
    )
    dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
    return BuiltPackage(
        setup_py_result.output,
        tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
    )
Beispiel #7
0
async def package_pex_binary(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    union_membership: UnionMembership,
) -> BuiltPackage:
    resolved_entry_point, transitive_targets = await MultiGet(
        Get(ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    file_tgts = targets_with_sources_types([FileSourceField],
                                           transitive_targets.dependencies,
                                           union_membership)
    if file_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in file_tgts)
        logger.warning(
            softwrap(f"""
                The `pex_binary` target {field_set.address} transitively depends on the below `files`
                targets, but Pants will not include them in the PEX. Filesystem APIs like `open()`
                are not able to load files within the binary itself; instead, they read from the
                current working directory.

                Instead, use `resources` targets or wrap this `pex_binary` in an `archive`.
                See {doc_url('resources')}.

                Files targets dependencies: {files_addresses}
                """))

    output_filename = field_set.output_path.value_or_default(file_ending="pex")

    complete_platforms = await Get(CompletePlatforms,
                                   PexCompletePlatformsField,
                                   field_set.complete_platforms)

    pex = await Get(
        Pex,
        PexFromTargetsRequest(
            addresses=[field_set.address],
            internal_only=False,
            main=resolved_entry_point.val or field_set.script.value,
            platforms=PexPlatforms.create_from_platforms_field(
                field_set.platforms),
            complete_platforms=complete_platforms,
            output_filename=output_filename,
            layout=PexLayout(field_set.layout.value),
            additional_args=field_set.generate_additional_args(
                pex_binary_defaults),
            include_requirements=field_set.include_requirements.value,
            include_local_dists=True,
        ),
    )
    return BuiltPackage(pex.digest, (BuiltPackageArtifact(output_filename), ))
Beispiel #8
0
async def package_python_awslambda(field_set: PythonAwsLambdaFieldSet) -> BuiltPackage:
    awslambda = await Get(CreatedAWSLambda, AWSLambdaFieldSet, field_set)
    return BuiltPackage(
        awslambda.digest,
        (
            BuiltPackageArtifact(
                awslambda.zip_file_relpath,
                (f"    Runtime: {awslambda.runtime}", f"    Handler: {awslambda.handler}"),
            ),
        ),
    )
Beispiel #9
0
async def package_bash_binary(field_set: BashBinaryFieldSet,
                              bash_setup: BashSetup) -> BuiltPackage:
    # We first locate the `zip` program using `BinaryPaths`. We use the option
    # `--bash-executable-search-paths` to determine which paths to search, such as `/bin` and
    # `/usr/bin`. See https://www.pantsbuild.org/v2.0/docs/rules-api-installing-tools.
    zip_program_paths = await Get(
        BinaryPaths,
        BinaryPathRequest(
            binary_name="zip",
            search_path=bash_setup.executable_search_path,
            # This will run `zip --version` to ensure it's a valid binary and to allow
            # invalidating the cache if the version changes.
            test=BinaryPathTest(args=["-v"]),
        ),
    )
    if not zip_program_paths.first_path:
        raise EnvironmentError(
            f"Could not find the `zip` program on search paths "
            f"{list(bash_setup.executable_search_path)}, so cannot create a binary for "
            f"{field_set.address}. Please check that `zip` is installed and possibly modify the "
            "option `executable_search_paths` in the `[bash-setup]` options scope."
        )

    # We need to include all relevant transitive dependencies in the zip. See
    # https://www.pantsbuild.org/v2.0/docs/rules-api-and-target-api.
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
    sources = await Get(
        SourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(BashSources, FilesSources, ResourcesSources),
        ),
    )

    output_filename = field_set.output_path.value_or_default(
        field_set.address, file_ending="zip", use_legacy_format=False)
    result = await Get(
        ProcessResult,
        Process(
            argv=(
                zip_program_paths.first_path.path,
                output_filename,
                *sources.snapshot.files,
            ),
            input_digest=sources.snapshot.digest,
            description=f"Zip {field_set.address} and its dependencies.",
            output_files=(output_filename, ),
        ),
    )
    return BuiltPackage(result.output_digest,
                        artifacts=(BuiltPackageArtifact(output_filename), ))
Beispiel #10
0
async def package_debian_package(field_set: DebianPackageFieldSet,
                                 tar_binary_path: TarBinary) -> BuiltPackage:
    dpkg_deb_path = await Get(
        BinaryPaths,
        BinaryPathRequest(
            binary_name="dpkg-deb",
            search_path=["/usr/bin"],
        ),
    )
    if not dpkg_deb_path.first_path:
        raise OSError(
            f"Could not find the `{dpkg_deb_path.binary_name}` program in `/usr/bin`."
        )

    hydrated_sources = await Get(HydratedSources,
                                 HydrateSourcesRequest(field_set.sources_dir))

    # Since all the sources are coming only from a single directory, it is
    # safe to pick an arbitrary file and get its root directory name.
    # Validation of the resolved files has been called on the target, so it is known that
    # snapshot.files isn't empty.
    sources_directory_name = PurePath(
        hydrated_sources.snapshot.files[0]).parts[0]

    result = await Get(
        ProcessResult,
        Process(
            argv=(
                dpkg_deb_path.first_path.path,
                "--build",
                sources_directory_name,
            ),
            description="Create a Debian package from the produced packages.",
            input_digest=hydrated_sources.snapshot.digest,
            # dpkg-deb produces a file with the same name as the input directory
            output_files=(f"{sources_directory_name}.deb", ),
            env={"PATH": str(PurePath(tar_binary_path.path).parent)},
        ),
    )
    # The output Debian package file needs to be renamed to match the output_path field.
    output_filename = field_set.output_path.value_or_default(
        file_ending="deb", )
    digest_entries = await Get(DigestEntries, Digest, result.output_digest)
    assert len(digest_entries) == 1
    result_file_entry = digest_entries[0]
    assert isinstance(result_file_entry, FileEntry)
    new_file = FileEntry(output_filename, result_file_entry.file_digest)

    final_result = await Get(Digest, CreateDigest([new_file]))
    return BuiltPackage(final_result,
                        artifacts=(BuiltPackageArtifact(output_filename), ))
Beispiel #11
0
async def package_archive_target(
    field_set: ArchiveFieldSet, global_options: GlobalOptions
) -> BuiltPackage:
    package_targets, files_targets = await MultiGet(
        Get(Targets, UnparsedAddressInputs, field_set.packages.to_unparsed_address_inputs()),
        Get(Targets, UnparsedAddressInputs, field_set.files.to_unparsed_address_inputs()),
    )

    package_field_sets_per_target = await Get(
        FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, package_targets)
    )
    packages = await MultiGet(
        Get(BuiltPackage, PackageFieldSet, field_set)
        for field_set in package_field_sets_per_target.field_sets
    )

    files_sources = await MultiGet(
        Get(
            HydratedSources,
            HydrateSourcesRequest(
                tgt.get(Sources), for_sources_types=(FilesSources,), enable_codegen=True
            ),
        )
        for tgt in files_targets
    )

    input_snapshot = await Get(
        Snapshot,
        MergeDigests(
            (
                *(package.digest for package in packages),
                *(sources.snapshot.digest for sources in files_sources),
            )
        ),
    )

    output_filename = field_set.output_path.value_or_default(
        field_set.address,
        file_ending=field_set.format_field.value,
        use_legacy_format=global_options.options.pants_distdir_legacy_paths,
    )
    archive = await Get(
        Digest,
        CreateArchive(
            input_snapshot,
            output_filename=output_filename,
            format=ArchiveFormat(field_set.format_field.value),
        ),
    )
    return BuiltPackage(archive, (BuiltPackageArtifact(output_filename),))
Beispiel #12
0
async def package_archive_target(field_set: ArchiveFieldSet) -> BuiltPackage:
    # TODO(#13086): Because we're using `Targets` instead of `UnexpandedTargets`, the
    #  `files` target generator gets replaced by its generated `file` targets. That replacement is
    #  necessary because we only hydrate sources for `FileSourcesField`, which is only for the
    #  `file` target.  That's really subtle!
    package_targets, file_targets = await MultiGet(
        Get(Targets, UnparsedAddressInputs,
            field_set.packages.to_unparsed_address_inputs()),
        Get(Targets, UnparsedAddressInputs,
            field_set.files.to_unparsed_address_inputs()),
    )

    package_field_sets_per_target = await Get(
        FieldSetsPerTarget,
        FieldSetsPerTargetRequest(PackageFieldSet, package_targets))
    packages = await MultiGet(
        Get(BuiltPackage, PackageFieldSet, field_set)
        for field_set in package_field_sets_per_target.field_sets)

    file_sources = await MultiGet(
        Get(
            HydratedSources,
            HydrateSourcesRequest(
                tgt.get(SourcesField),
                for_sources_types=(FileSourceField, ),
                enable_codegen=True,
            ),
        ) for tgt in file_targets)

    input_snapshot = await Get(
        Snapshot,
        MergeDigests((
            *(package.digest for package in packages),
            *(sources.snapshot.digest for sources in file_sources),
        )),
    )

    output_filename = field_set.output_path.value_or_default(
        file_ending=field_set.format_field.value)
    archive = await Get(
        Digest,
        CreateArchive(
            input_snapshot,
            output_filename=output_filename,
            format=ArchiveFormat(field_set.format_field.value),
        ),
    )
    return BuiltPackage(archive, (BuiltPackageArtifact(output_filename), ))
async def package_pex_binary(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    union_membership: UnionMembership,
) -> BuiltPackage:
    resolved_entry_point, transitive_targets = await MultiGet(
        Get(ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(field_set.entry_point)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    files_tgts = targets_with_sources_types([FilesSources],
                                            transitive_targets.dependencies,
                                            union_membership)
    if files_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in files_tgts)
        logger.warning(
            f"The pex_binary target {field_set.address} transitively depends on the below files "
            "targets, but Pants will not include them in the PEX. Filesystem APIs like `open()` "
            "are not able to load files within the binary itself; instead, they read from the "
            "current working directory."
            "\n\nInstead, use `resources` targets or wrap this `pex_binary` in an `archive`. See "
            f"{bracketed_docs_url('resources')}."
            f"\n\nFiles targets dependencies: {files_addresses}")

    output_filename = field_set.output_path.value_or_default(field_set.address,
                                                             file_ending="pex")
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                internal_only=False,
                # TODO(John Sirois): Support ConsoleScript in PexBinary targets:
                #  https://github.com/pantsbuild/pants/issues/11619
                main=resolved_entry_point.val,
                platforms=PexPlatforms.create_from_platforms_field(
                    field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(
                    pex_binary_defaults),
            )),
    )
    return BuiltPackage(two_step_pex.pex.digest,
                        (BuiltPackageArtifact(output_filename), ))
Beispiel #14
0
async def package_pex_binary(
    field_set: PexBinaryFieldSet,
    pex_binary_defaults: PexBinaryDefaults,
    global_options: GlobalOptions,
) -> BuiltPackage:
    entry_point = field_set.entry_point.value
    if entry_point is None:
        binary_source_paths = await Get(
            Paths, PathGlobs,
            field_set.sources.path_globs(FilesNotFoundBehavior.error))
        if len(binary_source_paths.files) != 1:
            raise InvalidFieldException(
                "No `entry_point` was set for the target "
                f"{repr(field_set.address)}, so it must have exactly one source, but it has "
                f"{len(binary_source_paths.files)}")
        entry_point_path = binary_source_paths.files[0]
        source_root = await Get(
            SourceRoot,
            SourceRootRequest,
            SourceRootRequest.for_file(entry_point_path),
        )
        entry_point = PexBinarySources.translate_source_file_to_entry_point(
            os.path.relpath(entry_point_path, source_root.path))

    output_filename = field_set.output_path.value_or_default(
        field_set.address,
        file_ending="pex",
        use_legacy_format=global_options.options.pants_distdir_legacy_paths,
    )
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                internal_only=False,
                entry_point=entry_point,
                platforms=PexPlatforms.create_from_platforms_field(
                    field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(
                    pex_binary_defaults),
            )),
    )
    return BuiltPackage(two_step_pex.pex.digest,
                        (BuiltPackageArtifact(output_filename), ))
Beispiel #15
0
async def run_helm_package(field_set: HelmPackageFieldSet) -> BuiltPackage:
    result_dir = "__out"

    chart, result_digest = await MultiGet(
        Get(HelmChart, HelmChartRequest(field_set)),
        Get(Digest, CreateDigest([Directory(result_dir)])),
    )

    input_digest = await Get(
        Digest, MergeDigests([chart.snapshot.digest, result_digest]))
    process_output_file = os.path.join(result_dir,
                                       _helm_artifact_filename(chart.metadata))

    process_result = await Get(
        ProcessResult,
        HelmProcess(
            argv=["package", chart.path, "-d", result_dir],
            input_digest=input_digest,
            output_files=(process_output_file, ),
            description=f"Packaging Helm chart: {field_set.address.spec_path}",
        ),
    )

    stripped_output_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, result_dir))

    final_snapshot = await Get(
        Snapshot,
        AddPrefix(stripped_output_digest,
                  field_set.output_path.value_or_default(file_ending=None)),
    )
    return BuiltPackage(
        final_snapshot.digest,
        artifacts=tuple(
            BuiltPackageArtifact(
                file, extra_log_lines=(f"Built Helm chart artifact: {file}", ))
            for file in final_snapshot.files),
    )
Beispiel #16
0
async def package_pex_binary(
        field_set: PexBinaryFieldSet,
        pex_binary_defaults: PexBinaryDefaults) -> BuiltPackage:
    resolved_entry_point = await Get(
        ResolvedPexEntryPoint,
        ResolvePexEntryPointRequest(field_set.entry_point))
    output_filename = field_set.output_path.value_or_default(field_set.address,
                                                             file_ending="pex")
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                internal_only=False,
                entry_point=resolved_entry_point.val,
                platforms=PexPlatforms.create_from_platforms_field(
                    field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(
                    pex_binary_defaults),
            )),
    )
    return BuiltPackage(two_step_pex.pex.digest,
                        (BuiltPackageArtifact(output_filename), ))
Beispiel #17
0
async def package_war(
    field_set: PackageWarFileFieldSet,
    bash: BashBinary,
    zip: ZipBinary,
) -> BuiltPackage:
    classpath = await Get(Classpath,
                          DependenciesRequest(field_set.dependencies))
    all_jar_files_digest = await Get(Digest, MergeDigests(classpath.digests()))

    prefixed_jars_digest, content, descriptor, input_setup_digest = await MultiGet(
        Get(Digest, AddPrefix(all_jar_files_digest, "__war__/WEB-INF/lib")),
        Get(RenderedWarContent, RenderWarContentRequest(field_set.content)),
        Get(
            RenderedWarDeploymentDescriptor,
            RenderWarDeploymentDescriptorRequest(field_set.descriptor,
                                                 field_set.address),
        ),
        Get(
            Digest,
            CreateDigest([
                FileContent(
                    "make_war.sh",
                    textwrap.dedent(f"""\
                    cd __war__
                    {zip.path} ../output.war -r .
                    """).encode(),
                    is_executable=True,
                ),
                Directory("__war__/WEB-INF/classes"),
                Directory("__war__/WEB-INF/lib"),
            ]),
        ),
    )

    input_digest = await Get(
        Digest,
        MergeDigests([
            prefixed_jars_digest,
            descriptor.digest,
            content.digest,
            input_setup_digest,
        ]),
    )

    result = await Get(
        ProcessResult,
        Process(
            [bash.path, "make_war.sh"],
            input_digest=input_digest,
            output_files=("output.war", ),
            description=f"Assemble WAR file for {field_set.address}",
        ),
    )

    output_entries = await Get(DigestEntries, Digest, result.output_digest)
    if len(output_entries) != 1:
        raise AssertionError("No output from war assembly step.")
    output_entry = output_entries[0]
    if not isinstance(output_entry, FileEntry):
        raise AssertionError("Unexpected digest entry")
    output_filename = PurePath(
        field_set.output_path.value_or_default(file_ending="war"))
    package_digest = await Get(
        Digest,
        CreateDigest(
            [FileEntry(str(output_filename), output_entry.file_digest)]))
    artifact = BuiltPackageArtifact(relpath=str(output_filename))
    return BuiltPackage(digest=package_digest, artifacts=(artifact, ))
Beispiel #18
0
async def package_python_awslambda(
    field_set: PythonAwsLambdaFieldSet,
    lambdex: Lambdex,
    platform: Platform,
    union_membership: UnionMembership,
) -> BuiltPackage:
    if platform.is_macos:
        logger.warning(
            "AWS Lambdas built on macOS may fail to build. If your lambda uses any third-party"
            " dependencies without binary wheels (bdist) for Linux available, it will fail to"
            " build. If this happens, you will either need to update your dependencies to only use"
            f" dependencies with pre-built wheels, or find a Linux environment to run {bin_name()}"
            " package. (See https://realpython.com/python-wheels/ for more about wheels.)\n\n(If"
            " the build does not raise an exception, it's safe to use macOS.)")

    output_filename = field_set.output_path.value_or_default(
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip", )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    pex_platforms = []
    interpreter_version = field_set.runtime.to_interpreter_version()
    if interpreter_version:
        py_major, py_minor = interpreter_version
        platform_str = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
        # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
        if py_major <= 3 and py_minor < 8:
            platform_str += "m"
        if (py_major, py_minor) == (2, 7):
            platform_str += "u"
        pex_platforms.append(platform_str)

    additional_pex_args = (
        # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
        "--manylinux=manylinux2014",
        # When we're executing Pex on Linux, allow a local interpreter to be resolved if
        # available and matching the AMI platform.
        "--resolve-local-platforms",
    )

    complete_platforms = await Get(CompletePlatforms,
                                   PexCompletePlatformsField,
                                   field_set.complete_platforms)

    pex_request = PexFromTargetsRequest(
        addresses=[field_set.address],
        internal_only=False,
        include_requirements=field_set.include_requirements.value,
        output_filename=output_filename,
        platforms=PexPlatforms(pex_platforms),
        complete_platforms=complete_platforms,
        additional_args=additional_pex_args,
        additional_lockfile_args=additional_pex_args,
    )

    lambdex_pex, pex_result, handler, transitive_targets = await MultiGet(
        Get(VenvPex, PexRequest, lambdex.to_pex_request()),
        Get(Pex, PexFromTargetsRequest, pex_request),
        Get(ResolvedPythonAwsHandler,
            ResolvePythonAwsHandlerRequest(field_set.handler)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    file_tgts = targets_with_sources_types([FileSourceField],
                                           transitive_targets.dependencies,
                                           union_membership)
    if file_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in file_tgts)
        logger.warning(
            softwrap(f"""
                The `python_awslambda` target {field_set.address} transitively depends on the below
                `files` targets, but Pants will not include them in the built Lambda. Filesystem APIs
                like `open()` are not able to load files within the binary itself; instead, they
                read from the current working directory.

                Instead, use `resources` targets. See {doc_url('resources')}.

                Files targets dependencies: {files_addresses}
                """))

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            lambdex_pex,
            argv=("build", "-e", handler.val, output_filename),
            input_digest=pex_result.digest,
            output_files=(output_filename, ),
            description=f"Setting up handler in {output_filename}",
        ),
    )

    extra_log_data: list[tuple[str, str]] = []
    if field_set.runtime.value:
        extra_log_data.append(("Runtime", field_set.runtime.value))
    extra_log_data.extend(
        ("Complete platform", path) for path in complete_platforms)
    # The AWS-facing handler function is always lambdex_handler.handler, which is the
    # wrapper injected by lambdex that manages invocation of the actual handler.
    extra_log_data.append(("Handler", "lambdex_handler.handler"))
    first_column_width = 4 + max(len(header) for header, _ in extra_log_data)

    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=tuple(
            f"{header.rjust(first_column_width, ' ')}: {data}"
            for header, data in extra_log_data),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact, ))
Beispiel #19
0
async def package_deploy_jar(
    bash: BashBinary,
    zip: ZipBinary,
    field_set: DeployJarFieldSet,
) -> BuiltPackage:
    """
    Constructs a deploy ("fat") JAR file (currently from Java sources only) by
    1. Resolving/compiling a classpath for the `root_address` target,
    2. Producing a ZIP file containing _only_ the JAR manifest file for the `main_class`
    3. Creating a deploy jar with a broken ZIP index by concatenating all dependency JARs together,
       followed by the thin JAR we created
    4. Using the unix `zip` utility's repair function to fix the broken fat jar
    """

    if field_set.main_class.value is None:
        raise Exception("Needs a `main` argument")

    #
    # 1. Produce a thin JAR containing our first-party sources and other runtime dependencies
    #

    dependencies = await Get(Addresses,
                             DependenciesRequest(field_set.dependencies))
    classpath = await Get(Classpath, Addresses, dependencies)

    #
    # 2. Produce JAR manifest, and output to a ZIP file that can be included with the JARs
    #

    main_class = field_set.main_class.value

    manifest_content = FileContent(
        _JAVA_MANIFEST_FILENAME,
        # NB: we're joining strings with newlines, because the JAR manifest format
        # needs precise indentation, and _cannot_ start with a blank line. `dedent` seriously
        # messes up those requirements.
        "\n".join([
            "Manifest-Version: 1.0",
            f"Main-Class: {main_class}",
            "",  # THIS BLANK LINE WILL BREAK EVERYTHING IF DELETED. DON'T DELETE IT.
        ]).encode("utf-8"),
    )

    manifest_jar_input_digest = await Get(Digest,
                                          CreateDigest([manifest_content]))
    manifest_jar_result = await Get(
        ProcessResult,
        Process(
            argv=[
                zip.path,
                _PANTS_MANIFEST_PARTIAL_JAR_FILENAME,
                _JAVA_MANIFEST_FILENAME,
            ],
            description="Build partial JAR containing manifest file",
            input_digest=manifest_jar_input_digest,
            output_files=[_PANTS_MANIFEST_PARTIAL_JAR_FILENAME],
        ),
    )

    manifest_jar = manifest_jar_result.output_digest

    #
    # 3/4. Create broken deploy JAR, then repair it with `zip -FF`
    #

    # NB. Concatenating multiple ZIP files produces a zip file that is _mostly_ safe to
    # be distributed (it can be fixed with `-FF`), so that's how we construct our fat JAR
    # without exploding the files to disk.
    #
    # `ZIP` files are extracted top-to-bottom and archives can have duplicate names
    # (e.g. `META-INF/MANIFEST.MF`). In the case of a `JAR` file, the JVM will understand the
    # last file with that file name to be the actual one. Therefore, our thin JAR needs to be
    # appear at the end of the file for (in particular) our manifest to take precedence.
    # If there are duplicate classnames at a given package address fat JARs, then
    # behaviour will be non-deterministic. Sorry!  --chrisjrn

    output_filename = PurePath(
        field_set.output_path.value_or_default(file_ending="jar"))
    input_filenames = " ".join(
        shlex.quote(i) for i in classpath.classpath_entries())
    _PANTS_BROKEN_DEPLOY_JAR = "pants_broken_deploy_jar.notajar"
    cat_and_repair_script = FileContent(
        _PANTS_CAT_AND_REPAIR_ZIP_FILENAME,
        # Using POSIX location/arg format for `cat`. If this gets more complicated, refactor.
        textwrap.dedent(f"""
            set -e
            /bin/cat {input_filenames} {_PANTS_MANIFEST_PARTIAL_JAR_FILENAME} > {_PANTS_BROKEN_DEPLOY_JAR}
            {zip.path} -FF {_PANTS_BROKEN_DEPLOY_JAR} --out {output_filename.name}
            """).encode("utf-8"),
    )

    cat_and_repair_script_digest = await Get(
        Digest, CreateDigest([cat_and_repair_script]))
    broken_deploy_jar_inputs_digest = await Get(
        Digest,
        MergeDigests([
            classpath.content.digest, cat_and_repair_script_digest,
            manifest_jar
        ]),
    )

    cat_and_repair = await Get(
        ProcessResult,
        Process(
            argv=[bash.path, _PANTS_CAT_AND_REPAIR_ZIP_FILENAME],
            input_digest=broken_deploy_jar_inputs_digest,
            output_files=[output_filename.name],
            description="Assemble combined JAR file",
        ),
    )

    renamed_output_digest = await Get(
        Digest,
        AddPrefix(cat_and_repair.output_digest, str(output_filename.parent)))

    artifact = BuiltPackageArtifact(relpath=str(output_filename))

    return BuiltPackage(digest=renamed_output_digest, artifacts=(artifact, ))
Beispiel #20
0
async def package_go_binary(
    field_set: GoBinaryFieldSet,
    goroot: GoLangDistribution,
) -> BuiltPackage:
    main_address = field_set.main_address.value or ""
    main_go_package_address = await Get(
        Address,
        AddressInput,
        AddressInput.parse(main_address,
                           relative_to=field_set.address.spec_path),
    )
    _logger.info(f"main_go_package_address={main_go_package_address}")
    main_go_package_target = await Get(WrappedTarget, Address,
                                       main_go_package_address)
    main_go_package_field_set = BuildGoPackageFieldSet.create(
        main_go_package_target.target)
    built_main_go_package = await Get(
        BuiltGoPackage,
        BuildGoPackageRequest(field_sets=main_go_package_field_set,
                              is_main=True))

    downloaded_goroot = await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        goroot.get_request(Platform.current),
    )

    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([main_go_package_target.target.address]))
    transitive_go_deps = [
        dep for dep in transitive_targets.dependencies
        if BuildGoPackageFieldSet.is_applicable(dep)
    ]
    built_transitive_go_deps_requests = [
        Get(BuiltGoPackage,
            BuildGoPackageRequest(BuildGoPackageFieldSet.create(tgt)))
        for tgt in transitive_go_deps
    ]
    built_transitive_go_deps = await MultiGet(built_transitive_go_deps_requests
                                              )

    import_config_digests: Dict[str, Tuple[str, Digest]] = {}
    for built_transitive_go_dep in built_transitive_go_deps:
        # TODO: Should we normalize the input path or use a random string instead of digest's fingerprint?
        # The concern is different packages with same exact code resulting in same archive bytes.
        fp = built_transitive_go_dep.object_digest.fingerprint
        prefixed_digest = await Get(
            Digest,
            AddPrefix(built_transitive_go_dep.object_digest, f"__pkgs__/{fp}"))
        import_config_digests[built_transitive_go_dep.import_path] = (
            fp, prefixed_digest)

    merged_packages_digest = await Get(
        Digest, MergeDigests([d for _, d in import_config_digests.values()]))

    enriched_goroot = await Get(
        EnrichedGoLangDistribution,
        EnrichGoLangDistributionRequest(downloaded_goroot))

    import_config: List[str] = ["# import config"]
    for import_path, (fp, _) in import_config_digests.items():
        import_config.append(
            f"packagefile {import_path}=__pkgs__/{fp}/__pkg__.a")
    for pkg, path in enriched_goroot.stdlib_packages.items():
        import_config.append(f"packagefile {pkg}={os.path.normpath(path)}")
    import_config_content = "\n".join(import_config).encode("utf-8")
    import_config_digest = await Get(
        Digest,
        CreateDigest(
            [FileContent(path="./importcfg", content=import_config_content)]))

    input_digest = await Get(
        Digest,
        MergeDigests((
            built_main_go_package.object_digest,
            downloaded_goroot.digest,
            merged_packages_digest,
            import_config_digest,
        )),
    )
    input_snapshot = await Get(Snapshot, Digest, input_digest)
    _logger.info(f"input_snapshot={input_snapshot.files}")

    output_filename_str = field_set.output_path.value
    if output_filename_str:
        output_filename = PurePath(output_filename_str)
    else:
        # TODO: Figure out default for binary_name. Had to do `or "name-not-set"` to satisfy mypy.
        binary_name = field_set.binary_name.value or "name-not-set"
        output_filename = PurePath(
            field_set.address.spec_path.replace(os.sep, ".")) / binary_name

    _logger.info(f"parent={output_filename.parent}")
    _logger.info(f"name={output_filename.name}")

    argv = [
        "./go/bin/go",
        "tool",
        "link",
        "-importcfg",
        "./importcfg",
        "-o",
        f"./{output_filename.name}",
        "./__pkg__.a",
    ]

    process = Process(
        argv=argv,
        input_digest=input_digest,
        output_files=[f"./{output_filename.name}"],
        description="Link Go binary.",
        level=LogLevel.DEBUG,
    )

    result = await Get(ProcessResult, Process, process)

    renamed_output_digest = await Get(
        Digest,
        AddPrefix(result.output_digest, output_filename.parent.as_posix()))
    ss = await Get(Snapshot, Digest, renamed_output_digest)
    _logger.info(f"ss={ss}")

    artifact = BuiltPackageArtifact(relpath=output_filename.as_posix())
    return BuiltPackage(digest=renamed_output_digest, artifacts=(artifact, ))
Beispiel #21
0
async def package_python_awslambda(
    field_set: PythonAwsLambdaFieldSet, lambdex: Lambdex
) -> BuiltPackage:
    output_filename = field_set.output_path.value_or_default(
        field_set.address,
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip",
    )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"
    pex_request = TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=[field_set.address],
            internal_only=False,
            main=None,
            output_filename=output_filename,
            platforms=PexPlatforms([platform]),
            additional_args=[
                # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
                "--manylinux=manylinux2014",
                # When we're executing Pex on Linux, allow a local interpreter to be resolved if
                # available and matching the AMI platform.
                "--resolve-local-platforms",
            ],
        )
    )

    lambdex_request = PexRequest(
        output_filename="lambdex.pex",
        internal_only=True,
        requirements=PexRequirements(lambdex.all_requirements),
        interpreter_constraints=PexInterpreterConstraints(lambdex.interpreter_constraints),
        main=lambdex.main,
    )

    lambdex_pex, pex_result, handler = await MultiGet(
        Get(VenvPex, PexRequest, lambdex_request),
        Get(TwoStepPex, TwoStepPexFromTargetsRequest, pex_request),
        Get(ResolvedPythonAwsHandler, ResolvePythonAwsHandlerRequest(field_set.handler)),
    )

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            lambdex_pex,
            argv=("build", "-e", handler.val, output_filename),
            input_digest=pex_result.pex.digest,
            output_files=(output_filename,),
            description=f"Setting up handler in {output_filename}",
        ),
    )
    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=(
            f"    Runtime: {field_set.runtime.value}",
            # The AWS-facing handler function is always lambdex_handler.handler, which is the
            # wrapper injected by lambdex that manages invocation of the actual handler.
            "    Handler: lambdex_handler.handler",
        ),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact,))
Beispiel #22
0
async def package_python_dist(
    field_set: PythonDistributionFieldSet,
    python_setup: PythonSetup,
    union_membership: UnionMembership,
) -> BuiltPackage:
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
    exported_target = ExportedTarget(transitive_targets.roots[0])

    dist_tgt = exported_target.target
    wheel = dist_tgt.get(WheelField).value
    sdist = dist_tgt.get(SDistField).value
    if not wheel and not sdist:
        raise NoDistTypeSelected(
            softwrap(
                f"""
                In order to package {dist_tgt.address.spec} at least one of {WheelField.alias!r} or
                {SDistField.alias!r} must be `True`.
                """
            )
        )

    wheel_config_settings = dist_tgt.get(WheelConfigSettingsField).value or FrozenDict()
    sdist_config_settings = dist_tgt.get(SDistConfigSettingsField).value or FrozenDict()
    backend_env_vars = dist_tgt.get(BuildBackendEnvVarsField).value
    if backend_env_vars:
        extra_build_time_env = await Get(Environment, EnvironmentRequest(sorted(backend_env_vars)))
    else:
        extra_build_time_env = Environment()

    interpreter_constraints = InterpreterConstraints.create_from_targets(
        transitive_targets.closure, python_setup
    ) or InterpreterConstraints(python_setup.interpreter_constraints)
    chroot = await Get(
        DistBuildChroot,
        DistBuildChrootRequest(
            exported_target,
            interpreter_constraints=interpreter_constraints,
        ),
    )

    # Find the source roots for the build-time 1stparty deps (e.g., deps of setup.py).
    source_roots_result = await Get(
        SourceRootsResult,
        SourceRootsRequest(
            files=[], dirs={PurePath(tgt.address.spec_path) for tgt in transitive_targets.closure}
        ),
    )
    source_roots = tuple(sorted({sr.path for sr in source_roots_result.path_to_root.values()}))

    # Get any extra build-time environment (e.g., native extension requirements).
    build_env_requests = []
    build_env_request_types = union_membership.get(DistBuildEnvironmentRequest)
    for build_env_request_type in build_env_request_types:
        if build_env_request_type.is_applicable(dist_tgt):
            build_env_requests.append(
                build_env_request_type(
                    tuple(tt.address for tt in transitive_targets.closure), interpreter_constraints
                )
            )

    build_envs = await MultiGet(
        [
            Get(DistBuildEnvironment, DistBuildEnvironmentRequest, build_env_request)
            for build_env_request in build_env_requests
        ]
    )
    extra_build_time_requirements = tuple(
        itertools.chain.from_iterable(
            build_env.extra_build_time_requirements for build_env in build_envs
        )
    )
    input_digest = await Get(
        Digest,
        MergeDigests(
            [chroot.digest, *(build_env.extra_build_time_inputs for build_env in build_envs)]
        ),
    )

    # We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture
    # any changes setup made within it without also capturing other artifacts of the pex
    # process invocation.
    chroot_prefix = "chroot"
    working_directory = os.path.join(chroot_prefix, chroot.working_directory)
    prefixed_input = await Get(Digest, AddPrefix(input_digest, chroot_prefix))
    build_system = await Get(BuildSystem, BuildSystemRequest(prefixed_input, working_directory))

    setup_py_result = await Get(
        DistBuildResult,
        DistBuildRequest(
            build_system=build_system,
            interpreter_constraints=interpreter_constraints,
            build_wheel=wheel,
            build_sdist=sdist,
            input=prefixed_input,
            working_directory=working_directory,
            build_time_source_roots=source_roots,
            target_address_spec=exported_target.target.address.spec,
            wheel_config_settings=wheel_config_settings,
            sdist_config_settings=sdist_config_settings,
            extra_build_time_requirements=extra_build_time_requirements,
            extra_build_time_env=extra_build_time_env,
        ),
    )
    dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
    return BuiltPackage(
        setup_py_result.output,
        tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
    )
Beispiel #23
0
async def package_pyoxidizer_binary(
    pyoxidizer: PyOxidizer,
    field_set: PyOxidizerFieldSet,
    runner_script: PyoxidizerRunnerScript,
    bash: BashBinary,
) -> BuiltPackage:
    direct_deps = await Get(Targets,
                            DependenciesRequest(field_set.dependencies))
    deps_field_sets = await Get(
        FieldSetsPerTarget,
        FieldSetsPerTargetRequest(PackageFieldSet, direct_deps))
    built_packages = await MultiGet(
        Get(BuiltPackage, PackageFieldSet, field_set)
        for field_set in deps_field_sets.field_sets)
    wheel_paths = [
        artifact.relpath for built_pkg in built_packages
        for artifact in built_pkg.artifacts
        if artifact.relpath is not None and artifact.relpath.endswith(".whl")
    ]
    if not wheel_paths:
        raise InvalidTargetException(
            softwrap(f"""
                The `{PyOxidizerTarget.alias}` target {field_set.address} must include
                in its `dependencies` field at least one `python_distribution` target that produces a
                `.whl` file. For example, if using `{GenerateSetupField.alias}=True`, then make sure
                `{WheelField.alias}=True`. See {doc_url('python-distributions')}.
                """))

    config_template = None
    if field_set.template.value is not None:
        config_template_source = await Get(
            HydratedSources, HydrateSourcesRequest(field_set.template))
        digest_contents = await Get(DigestContents, Digest,
                                    config_template_source.snapshot.digest)
        config_template = digest_contents[0].content.decode("utf-8")

    config = PyOxidizerConfig(
        executable_name=field_set.address.target_name,
        entry_point=field_set.entry_point.value,
        wheels=wheel_paths,
        template=config_template,
        unclassified_resources=(None if
                                not field_set.unclassified_resources.value else
                                list(field_set.unclassified_resources.value)),
    )
    rendered_config = config.render()
    logger.debug(
        f"Configuration used for {field_set.address}: {rendered_config}")

    pyoxidizer_pex, config_digest = await MultiGet(
        Get(Pex, PexRequest, pyoxidizer.to_pex_request()),
        Get(
            Digest,
            CreateDigest([
                FileContent("pyoxidizer.bzl", rendered_config.encode("utf-8"))
            ])),
    )
    input_digest = await Get(
        Digest,
        MergeDigests((
            config_digest,
            runner_script.digest,
            *(built_package.digest for built_package in built_packages),
        )),
    )
    pex_process = await Get(
        Process,
        PexProcess(
            pyoxidizer_pex,
            argv=("build", *pyoxidizer.args),
            description=f"Building {field_set.address} with PyOxidizer",
            input_digest=input_digest,
            level=LogLevel.INFO,
            output_directories=("build", ),
        ),
    )
    process_with_caching = dataclasses.replace(
        pex_process,
        argv=(bash.path, runner_script.path, *pex_process.argv),
        append_only_caches={
            **pex_process.append_only_caches,
            "pyoxidizer":
            runner_script.CACHE_PATH,
        },
    )

    result = await Get(ProcessResult, Process, process_with_caching)

    stripped_digest = await Get(Digest,
                                RemovePrefix(result.output_digest, "build"))
    final_snapshot = await Get(
        Snapshot,
        AddPrefix(stripped_digest,
                  field_set.output_path.value_or_default(file_ending=None)),
    )
    return BuiltPackage(
        final_snapshot.digest,
        artifacts=tuple(
            BuiltPackageArtifact(file) for file in final_snapshot.files),
    )
Beispiel #24
0
async def package_into_image(
    field_set: DockerPackageFieldSet,
    union_membership: UnionMembership,
) -> BuiltPackage:
    """Build a docker image from a 'docker' build target.

    Creates a build context & dockerfile from the build target & its
    dependencies. Then builds & tags that image. (see the module
    docstring for more information)
    """
    target_name = field_set.address.target_name
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest([field_set.address])
    )
    component_list = []
    logger.debug("Building Target %s", target_name)
    for field_set_type in union_membership[DockerComponentFieldSet]:
        for target in transitive_targets.dependencies:
            if field_set_type.is_applicable(target):
                logger.debug(
                    "Dependent Target %s applies to as component %s",
                    target.address,
                    field_set_type.__name__,
                )
                component_list.append(field_set_type.create(target))

    components = await MultiGet(
        Get(DockerComponent, DockerComponentFieldSet, fs) for fs in component_list
    )

    source_digests = []
    run_commands = []
    components = sorted(components, key=lambda c: c.order)
    for component in components:
        if component.sources:
            source_digests.append(component.sources)
        run_commands.extend(component.commands)
    source_digest = await Get(Digest, MergeDigests(source_digests))
    application_snapshot = await Get(Snapshot, AddPrefix(source_digest, "application"))

    if logger.isEnabledFor(logging.DEBUG):
        logger.debug("Files to be copied into the docker container")
        for file in application_snapshot.files:
            logger.debug("* %s", file)

    dockerfile_contents = _create_dockerfile(
        field_set.base_image.value,
        field_set.workdir.value,
        field_set.image_setup.value,
        run_commands,
        field_set.command.value,
    )
    logger.debug(dockerfile_contents)
    dockerfile = await Get(
        Digest,
        CreateDigest([FileContent("Dockerfile", dockerfile_contents.encode("utf-8"))]),
    )
    # create docker build context of all merged files & fetch docker
    # connection enviornment variables
    # and the location of the docker process
    search_path = ["/bin", "/usr/bin", "/usr/local/bin", "$HOME/"]
    docker_context, docker_env, docker_paths = await MultiGet(
        Get(Digest, MergeDigests([dockerfile, application_snapshot.digest])),
        Get(Environment, EnvironmentRequest(utils.DOCKER_ENV_VARS)),
        Get(
            BinaryPaths,
            BinaryPathRequest(
                binary_name="docker",
                search_path=search_path,
            ),
        ),
    )
    if not docker_paths.first_path:
        raise ValueError("Unable to locate Docker binary on paths: %s", search_path)
    process_path = docker_paths.first_path.path
    # build an list of arguments of the form ["-t",
    # "registry/name:tag"] to pass to the docker executable
    tag_arguments = _build_tag_argument_list(
        target_name, field_set.tags.value or [], field_set.registry.value
    )
    # create the image
    process_args = [process_path, "build"]
    if not logger.isEnabledFor(logging.DEBUG):
        process_args.append("-q")  # only output the hash of the image
    process_args.extend(tag_arguments)
    process_args.append(".")  # use current (sealed) directory as build context
    process_result = await Get(
        ProcessResult,
        Process(
            env=docker_env,
            argv=process_args,
            input_digest=docker_context,
            description=f"Creating Docker Image from {target_name}",
        ),
    )
    logger.info(process_result.stdout.decode())
    o = await Get(Snapshot, Digest, process_result.output_digest)
    return BuiltPackage(
        digest=process_result.output_digest,
        artifacts=([BuiltPackageArtifact(f, ()) for f in o.files]),
    )
Beispiel #25
0
async def package_python_awslambda(
        field_set: PythonAwsLambdaFieldSet, lambdex: Lambdex,
        union_membership: UnionMembership) -> BuiltPackage:
    output_filename = field_set.output_path.value_or_default(
        # Lambdas typically use the .zip suffix, so we use that instead of .pex.
        file_ending="zip", )

    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"

    additional_pex_args = (
        # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
        "--manylinux=manylinux2014",
        # When we're executing Pex on Linux, allow a local interpreter to be resolved if
        # available and matching the AMI platform.
        "--resolve-local-platforms",
    )
    pex_request = PexFromTargetsRequest(
        addresses=[field_set.address],
        internal_only=False,
        output_filename=output_filename,
        platforms=PexPlatforms([platform]),
        additional_args=additional_pex_args,
        additional_lockfile_args=additional_pex_args,
    )

    lambdex_request = PexRequest(
        output_filename="lambdex.pex",
        internal_only=True,
        requirements=lambdex.pex_requirements(),
        interpreter_constraints=lambdex.interpreter_constraints,
        main=lambdex.main,
    )

    lambdex_pex, pex_result, handler, transitive_targets = await MultiGet(
        Get(VenvPex, PexRequest, lambdex_request),
        Get(Pex, PexFromTargetsRequest, pex_request),
        Get(ResolvedPythonAwsHandler,
            ResolvePythonAwsHandlerRequest(field_set.handler)),
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])),
    )

    # Warn if users depend on `files` targets, which won't be included in the PEX and is a common
    # gotcha.
    file_tgts = targets_with_sources_types([FileSourceField],
                                           transitive_targets.dependencies,
                                           union_membership)
    if file_tgts:
        files_addresses = sorted(tgt.address.spec for tgt in file_tgts)
        logger.warning(
            f"The `python_awslambda` target {field_set.address} transitively depends on the below "
            "`files` targets, but Pants will not include them in the built Lambda. Filesystem APIs "
            "like `open()` are not able to load files within the binary itself; instead, they "
            "read from the current working directory."
            f"\n\nInstead, use `resources` targets. See {doc_url('resources')}."
            f"\n\nFiles targets dependencies: {files_addresses}")

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        VenvPexProcess(
            lambdex_pex,
            argv=("build", "-e", handler.val, output_filename),
            input_digest=pex_result.digest,
            output_files=(output_filename, ),
            description=f"Setting up handler in {output_filename}",
        ),
    )
    artifact = BuiltPackageArtifact(
        output_filename,
        extra_log_lines=(
            f"    Runtime: {field_set.runtime.value}",
            # The AWS-facing handler function is always lambdex_handler.handler, which is the
            # wrapper injected by lambdex that manages invocation of the actual handler.
            "    Handler: lambdex_handler.handler",
        ),
    )
    return BuiltPackage(digest=result.output_digest, artifacts=(artifact, ))
Beispiel #26
0
async def package_go_binary(field_set: GoBinaryFieldSet, ) -> BuiltPackage:
    main_address = field_set.main_address.value or ""
    main_go_package_address = await Get(
        Address,
        AddressInput,
        AddressInput.parse(main_address,
                           relative_to=field_set.address.spec_path),
    )
    wrapped_main_go_package_target = await Get(WrappedTarget, Address,
                                               main_go_package_address)
    main_go_package_target = wrapped_main_go_package_target.target
    built_main_go_package = await Get(
        BuiltGoPackage,
        BuildGoPackageRequest(address=main_go_package_target.address,
                              is_main=True))

    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest(roots=[main_go_package_target.address]))
    buildable_deps = [
        tgt for tgt in transitive_targets.dependencies
        if is_first_party_package_target(tgt)
        or is_third_party_package_target(tgt)
    ]

    built_transitive_go_deps_requests = [
        Get(BuiltGoPackage, BuildGoPackageRequest(address=tgt.address))
        for tgt in buildable_deps
    ]
    built_transitive_go_deps = await MultiGet(built_transitive_go_deps_requests
                                              )

    gathered_imports = await Get(
        GatheredImports,
        GatherImportsRequest(
            packages=FrozenOrderedSet(built_transitive_go_deps),
            include_stdlib=True,
        ),
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            [gathered_imports.digest, built_main_go_package.object_digest]))

    output_filename = PurePath(
        field_set.output_path.value_or_default(file_ending=None))
    result = await Get(
        ProcessResult,
        GoSdkProcess(
            input_digest=input_digest,
            command=(
                "tool",
                "link",
                "-importcfg",
                "./importcfg",
                "-o",
                f"./{output_filename.name}",
                "-buildmode=exe",  # seen in `go build -x` output
                "./__pkg__.a",
            ),
            description="Link Go binary.",
            output_files=(f"./{output_filename.name}", ),
        ),
    )

    renamed_output_digest = await Get(
        Digest, AddPrefix(result.output_digest, str(output_filename.parent)))

    artifact = BuiltPackageArtifact(relpath=str(output_filename))
    return BuiltPackage(digest=renamed_output_digest, artifacts=(artifact, ))