def test_parsed_injectables(files: list[tuple[str, str]], rule_runner: RuleRunner) -> None: dockerfile_content = dedent("""\ ARG BASE_IMAGE=:base FROM $BASE_IMAGE COPY some.target/binary.pex some.target/tool.pex /bin COPY --from=scratch this.is/ignored.pex /opt COPY binary another/cli.pex tool /bin """) rule_runner.write_files({ filename: content.format(dockerfile=dockerfile_content) for filename, content in files }) addr = Address("test") info = rule_runner.request(DockerfileInfo, [DockerfileInfoRequest(addr)]) assert info.from_image_build_args.to_dict() == {"BASE_IMAGE": ":base"} assert info.copy_source_paths == ( "some.target/binary.pex", "some.target/tool.pex", "binary", "another/cli.pex", "tool", )
async def inject_docker_dependencies(request: InjectDockerDependencies) -> InjectedDependencies: """Inspects COPY instructions in the Dockerfile for references to known packagable targets.""" dockerfile_info = await Get( DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address) ) # Parse all putative target addresses. putative_addresses = await Get( Addresses, UnparsedAddressInputs( dockerfile_info.putative_target_addresses, owning_address=dockerfile_info.address, ), ) # Get the target for those addresses that are known. directories = {address.spec_path for address in putative_addresses} all_addresses = await Get(Addresses, AddressSpecs(map(MaybeEmptySiblingAddresses, directories))) targets = await Get( Targets, Addresses((address for address in putative_addresses if address in all_addresses)) ) # Only keep those targets that we can "package". package = await Get(FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, targets)) referenced_targets = ( field_sets[0].address for field_sets in package.collection if len(field_sets) > 0 ) return InjectedDependencies(Addresses(referenced_targets))
async def run_hadolint(request: HadolintRequest, hadolint: Hadolint) -> LintResults: if hadolint.skip: return LintResults([], linter_name=request.name) downloaded_hadolint, config_files = await MultiGet( Get(DownloadedExternalTool, ExternalToolRequest, hadolint.get_request(Platform.current)), Get(ConfigFiles, ConfigFilesRequest, hadolint.config_request()), ) dockerfile_infos = await MultiGet( Get(DockerfileInfo, DockerfileInfoRequest(field_set.address)) for field_set in request.field_sets) input_digest = await Get( Digest, MergeDigests(( downloaded_hadolint.digest, config_files.snapshot.digest, *(info.digest for info in dockerfile_infos), )), ) process_result = await Get( FallibleProcessResult, Process( argv=[ downloaded_hadolint.exe, *generate_argv(dockerfile_infos, hadolint) ], # Hadolint tries to read a configuration file from a few locations on the system: # https://github.com/hadolint/hadolint/blob/43d2bfe9f71dea9ddd203d5bdbd2cc1fb512e4dd/src/Hadolint/Config/Configfile.hs#L75-L101 # # We don't want it to do this in order to have reproducible results machine to machine # and there is also the problem that on some machines, an unset (as opposed to empty) # HOME env var crashes hadolint with SIGSEGV. # See: https://github.com/hadolint/hadolint/issues/741 # # As such, we set HOME to blank so no system configuration is found and, as a side # benefit, we don't crash. # # See https://github.com/pantsbuild/pants/issues/13735 for more details. env={"HOME": ""}, input_digest=input_digest, description= f"Run `hadolint` on {pluralize(len(dockerfile_infos), 'Dockerfile')}.", level=LogLevel.DEBUG, ), ) return LintResults( [LintResult.from_fallible_process_result(process_result)], linter_name=request.name)
def test_from_image_build_arg_names(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "test/upstream/BUILD": "docker_image(name='image')", "test/upstream/Dockerfile": "FROM upstream", "test/downstream/BUILD": "docker_image(name='image')", "test/downstream/Dockerfile": dedent("""\ ARG BASE_IMAGE=test/upstream:image FROM ${BASE_IMAGE} AS base """), }) addr = Address("test/downstream", target_name="image") info = rule_runner.request(DockerfileInfo, [DockerfileInfoRequest(addr)]) assert info.from_image_build_arg_names == ("BASE_IMAGE", )
def test_copy_source_references(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "test/BUILD": "docker_image()", "test/Dockerfile": dedent("""\ FROM base COPY a b / COPY --option c/d e/f/g /h ADD ignored COPY j k / COPY """), }) info = rule_runner.request(DockerfileInfo, [DockerfileInfoRequest(Address("test"))]) assert info.copy_source_paths == ("a", "b", "c/d", "e/f/g", "j", "k")
async def inject_docker_dependencies( request: InjectDockerDependencies, all_packageable_targets: AllPackageableTargets ) -> InjectedDependencies: """Inspects the Dockerfile for references to known packagable targets.""" dockerfile_info = await Get( DockerfileInfo, DockerfileInfoRequest(request.dependencies_field.address) ) putative_image_addresses = set( await Get( Addresses, UnparsedAddressInputs( (v for v in dockerfile_info.from_image_build_args.to_dict().values() if v), owning_address=dockerfile_info.address, description_of_origin="TODO(#14468)", ), ) ) maybe_output_paths = set(dockerfile_info.copy_source_paths) # NB: There's no easy way of knowing the output path's default file ending as there could # be none or it could be dynamic. Instead of forcing clients to tell us, we just use all the # possible ones from the Dockerfile. In rare cases we over-infer, but it is relatively harmless. # NB: The suffix gets an `or None` `pathlib` includes the ".", but `OutputPathField` doesnt # expect it (if you give it "", it'll leave a trailing "."). possible_file_endings = {PurePath(path).suffix[1:] or None for path in maybe_output_paths} inject_addresses = [] for target in all_packageable_targets: if target.address in putative_image_addresses: inject_addresses.append(target.address) continue output_path_field = target.get(OutputPathField) possible_output_paths = { output_path_field.value_or_default(file_ending=file_ending) for file_ending in possible_file_endings } for output_path in possible_output_paths: if output_path in maybe_output_paths: inject_addresses.append(target.address) break return InjectedDependencies(Addresses(inject_addresses))
def test_inconsistent_build_args(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "test/BUILD": "docker_image(name='image')", "test/Dockerfile": dedent("""\ FROM image1:latest ARG OPT_A=default_1 FROM image2:latest ARG OPT_A=default_2 """), }) addr = Address("test", target_name="image") err_msg = ( r"Error while parsing test/Dockerfile for the test:image target: DockerBuildArgs: " r"duplicated 'OPT_A' with different values: 'default_1' != 'default_2'\." ) with pytest.raises(ExecutionError, match=err_msg): rule_runner.request(DockerfileInfo, [DockerfileInfoRequest(addr)])
def test_build_args(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "test/BUILD": "docker_image()", "test/Dockerfile": dedent("""\ ARG registry FROM ${registry}/image:latest ARG OPT_A ARG OPT_B=default_b_value ENV A=${OPT_A:-A_value} ENV B=${OPT_B} """), }) addr = Address("test") info = rule_runner.request(DockerfileInfo, [DockerfileInfoRequest(addr)]) assert info.build_args == DockerBuildArgs.from_strings( "registry", "OPT_A", "OPT_B=default_b_value", )
def test_baseimage_tags(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "test/BUILD": "docker_image()", "test/Dockerfile": ("FROM untagged\n" "FROM tagged:v1.2\n" "FROM digest@sha256:d1f0463b35135852308ea815c2ae54c1734b876d90288ce35828aeeff9899f9d\n" "FROM gcr.io/tekton-releases/github.com/tektoncd/operator/cmd/kubernetes/operator:" "v0.54.0@sha256:d1f0463b35135852308ea815c2ae54c1734b876d90288ce35828aeeff9899f9d\n" ), }) info = rule_runner.request(DockerfileInfo, [DockerfileInfoRequest(Address("test"))]) assert info.version_tags == ( "stage0 latest", "stage1 v1.2", # Stage 2 is not pinned with a tag. "stage3 v0.54.0", )
def test_putative_target_addresses(files: list[tuple[str, str]], rule_runner: RuleRunner) -> None: dockerfile_content = dedent("""\ FROM base COPY some.target/binary.pex some.target/tool.pex /bin COPY --from=scratch this.is/ignored.pex /opt COPY binary another/cli.pex tool /bin """) rule_runner.write_files({ filename: content.format(dockerfile=dockerfile_content) for filename, content in files }) addr = Address("test") info = rule_runner.request(DockerfileInfo, [DockerfileInfoRequest(addr)]) assert info.putative_target_addresses == ( "some/target:binary", "some/target:tool", "another:cli", )
async def create_docker_build_context( request: DockerBuildContextRequest, docker_options: DockerOptions) -> DockerBuildContext: # Get all targets to include in context. transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([request.address])) docker_image = transitive_targets.roots[0] # Get all dependencies for the root target. root_dependencies = await Get( Targets, DependenciesRequest(docker_image.get(Dependencies))) # Get all file sources from the root dependencies. That includes any non-file sources that can # be "codegen"ed into a file source. sources_request = Get( SourceFiles, SourceFilesRequest( sources_fields=[ tgt.get(SourcesField) for tgt in root_dependencies ], for_sources_types=( DockerContextFilesSourcesField, FileSourceField, ), enable_codegen=True, ), ) embedded_pkgs_per_target_request = Get( FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, transitive_targets.dependencies), ) sources, embedded_pkgs_per_target, dockerfile_info = await MultiGet( sources_request, embedded_pkgs_per_target_request, Get(DockerfileInfo, DockerfileInfoRequest(docker_image.address)), ) # Package binary dependencies for build context. embedded_pkgs = await MultiGet( Get(BuiltPackage, PackageFieldSet, field_set) for field_set in embedded_pkgs_per_target.field_sets # Exclude docker images, unless build_upstream_images is true. if request.build_upstream_images or not isinstance( getattr(field_set, "source", None), DockerImageSourceField)) if request.build_upstream_images: images_str = ", ".join(a.tags[0] for p in embedded_pkgs for a in p.artifacts if isinstance(a, BuiltDockerImage)) if images_str: logger.debug(f"Built upstream Docker images: {images_str}") else: logger.debug("Did not build any upstream Docker images") packages_str = ", ".join(a.relpath for p in embedded_pkgs for a in p.artifacts if a.relpath) if packages_str: logger.debug(f"Built packages for Docker image: {packages_str}") else: logger.debug("Did not build any packages for Docker image") embedded_pkgs_digest = [ built_package.digest for built_package in embedded_pkgs ] all_digests = (dockerfile_info.digest, sources.snapshot.digest, *embedded_pkgs_digest) # Merge all digests to get the final docker build context digest. context_request = Get(Snapshot, MergeDigests(d for d in all_digests if d)) # Requests for build args and env build_args_request = Get(DockerBuildArgs, DockerBuildArgsRequest(docker_image)) build_env_request = Get(DockerBuildEnvironment, DockerBuildEnvironmentRequest(docker_image)) context, build_args, build_env = await MultiGet(context_request, build_args_request, build_env_request) if request.build_upstream_images: # Update build arg values for FROM image build args. # Get the FROM image build args with defined values in the Dockerfile. dockerfile_build_args = { arg_name: arg_value for arg_name, arg_value in dockerfile_info.build_args.to_dict().items() if arg_value and arg_name in dockerfile_info.from_image_build_arg_names } # Parse the build args values into Address instances. from_image_addresses = await Get( Addresses, UnparsedAddressInputs( dockerfile_build_args.values(), owning_address=dockerfile_info.address, ), ) # Map those addresses to the corresponding built image ref (tag). address_to_built_image_tag = { field_set.address: image.tags[0] for field_set, built in zip(embedded_pkgs_per_target.field_sets, embedded_pkgs) for image in built.artifacts if isinstance(image, BuiltDockerImage) } # Create the FROM image build args. from_image_build_args = [ f"{arg_name}={address_to_built_image_tag[addr]}" for arg_name, addr in zip(dockerfile_build_args.keys(), from_image_addresses) ] # Merge all build args. build_args = DockerBuildArgs.from_strings(*build_args, *from_image_build_args) return DockerBuildContext.create( build_args=build_args, snapshot=context, dockerfile_info=dockerfile_info, build_env=build_env, )