Exemple #1
0
    def test_generate_sources(self) -> None:
        protocol_sources = AvroSources(["*.avro"], address=self.address)
        assert protocol_sources.can_generate(FortranSources, self.union_membership) is True

        # First, get the original protocol sources.
        hydrated_protocol_sources = self.request_single_product(
            HydratedSources, HydrateSourcesRequest(protocol_sources)
        )
        assert hydrated_protocol_sources.snapshot.files == ("src/avro/f.avro",)

        # Test directly feeding the protocol sources into the codegen rule.
        wrapped_tgt = self.request_single_product(WrappedTarget, self.address)
        generated_sources = self.request_single_product(
            GeneratedSources,
            GenerateFortranFromAvroRequest(hydrated_protocol_sources.snapshot, wrapped_tgt.target),
        )
        assert generated_sources.snapshot.files == ("src/fortran/f.f95",)

        # Test that HydrateSourcesRequest can also be used.
        generated_via_hydrate_sources = self.request_single_product(
            HydratedSources,
            HydrateSourcesRequest(
                protocol_sources, for_sources_types=[FortranSources], enable_codegen=True
            ),
        )
        assert generated_via_hydrate_sources.snapshot.files == ("src/fortran/f.f95",)
        assert generated_via_hydrate_sources.sources_type == FortranSources
Exemple #2
0
async def generate_java_from_wsdl(
        request: GenerateJavaFromWsdlRequest) -> GeneratedSources:
    sources = await Get(
        HydratedSources,
        HydrateSourcesRequest(request.protocol_target[WsdlSourceField]))

    target_package = request.protocol_target[JavaPackageField].value
    compile_results = await MultiGet(
        Get(
            CompiledWsdlSource,
            CompileWsdlSourceRequest(
                sources.snapshot.digest,
                path=path,
                module=request.protocol_target[JavaModuleField].value,
                package=target_package,
            ),
        ) for path in sources.snapshot.files)

    merged_output_digests, source_root = await MultiGet(
        Get(Digest, MergeDigests([r.output_digest for r in compile_results])),
        Get(SourceRoot, SourceRootRequest,
            SourceRootRequest.for_target(request.protocol_target)),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(merged_output_digests, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, merged_output_digests))
    return GeneratedSources(source_root_restored)
Exemple #3
0
async def determine_go_mod_info(request: GoModInfoRequest, ) -> GoModInfo:
    if isinstance(request.source, Address):
        wrapped_target = await Get(
            WrappedTarget,
            WrappedTargetRequest(request.source,
                                 description_of_origin="<go mod info rule>"),
        )
        sources_field = wrapped_target.target[GoModSourcesField]
    else:
        sources_field = request.source
    go_mod_path = sources_field.go_mod_path
    go_mod_dir = os.path.dirname(go_mod_path)

    # Get the `go.mod` (and `go.sum`) and strip so the file has no directory prefix.
    hydrated_sources = await Get(HydratedSources,
                                 HydrateSourcesRequest(sources_field))
    sources_digest = hydrated_sources.snapshot.digest

    mod_json = await Get(
        ProcessResult,
        GoSdkProcess(
            command=("mod", "edit", "-json"),
            input_digest=sources_digest,
            working_dir=go_mod_dir,
            description=f"Parse {go_mod_path}",
        ),
    )
    module_metadata = json.loads(mod_json.stdout)
    return GoModInfo(
        import_path=module_metadata["Module"]["Path"],
        digest=sources_digest,
        mod_path=go_mod_path,
        minimum_go_version=module_metadata.get("Go"),
    )
Exemple #4
0
async def determine_go_mod_info(request: GoModInfoRequest, ) -> GoModInfo:
    wrapped_target = await Get(WrappedTarget, Address, request.address)
    sources_field = wrapped_target.target[GoModSourcesField]
    go_mod_path = sources_field.go_mod_path
    go_mod_dir = os.path.dirname(go_mod_path)

    # Get the `go.mod` (and `go.sum`) and strip so the file has no directory prefix.
    hydrated_sources = await Get(HydratedSources,
                                 HydrateSourcesRequest(sources_field))
    sources_digest = hydrated_sources.snapshot.digest
    stripped_source_get = Get(Digest, RemovePrefix(sources_digest, go_mod_dir))

    mod_json_get = Get(
        ProcessResult,
        GoSdkProcess(
            command=("mod", "edit", "-json"),
            input_digest=sources_digest,
            working_dir=go_mod_dir,
            description=f"Parse {go_mod_path}",
        ),
    )

    mod_json, stripped_sources = await MultiGet(mod_json_get,
                                                stripped_source_get)
    module_metadata = json.loads(mod_json.stdout)
    return GoModInfo(
        import_path=module_metadata["Module"]["Path"],
        digest=sources_digest,
        stripped_digest=stripped_sources,
        minimum_go_version=module_metadata.get("Go"),
    )
Exemple #5
0
async def render_war_deployment_descriptor(
    request: RenderWarDeploymentDescriptorRequest,
) -> RenderedWarDeploymentDescriptor:
    descriptor_sources = await Get(
        HydratedSources,
        HydrateSourcesRequest(request.descriptor),
    )

    descriptor_sources_entries = await Get(DigestEntries, Digest,
                                           descriptor_sources.snapshot.digest)
    if len(descriptor_sources_entries) != 1:
        raise AssertionError(
            f"Expected `descriptor` field for {request.descriptor.address} to only refer to one file."
        )
    descriptor_entry = descriptor_sources_entries[0]
    if not isinstance(descriptor_entry, FileEntry):
        raise AssertionError(
            f"Expected `descriptor` field for {request.descriptor.address} to produce a file."
        )

    descriptor_digest = await Get(
        Digest,
        CreateDigest([
            FileEntry("__war__/WEB-INF/web.xml", descriptor_entry.file_digest)
        ]),
    )

    return RenderedWarDeploymentDescriptor(descriptor_digest)
Exemple #6
0
async def relocate_files(
        request: RelocateFilesViaCodegenRequest) -> GeneratedSources:
    # Unlike normal codegen, we operate the on the sources of the `files_targets` field, not the
    # `sources` of the original `relocated_sources` target.
    # TODO(#13086): Because we're using `Targets` instead of `UnexpandedTargets`, the
    #  `files` target generator gets replaced by its generated `file` targets. That replacement is
    #  necessary because we only hydrate sources for `FileSourcesField`, which is only for the
    #  `file` target.  That's really subtle!
    original_file_targets = await Get(
        Targets,
        UnparsedAddressInputs,
        request.protocol_target.get(
            RelocatedFilesOriginalTargetsField).to_unparsed_address_inputs(),
    )
    original_files_sources = await MultiGet(
        Get(
            HydratedSources,
            HydrateSourcesRequest(tgt.get(SourcesField),
                                  for_sources_types=(FileSourceField, )),
        ) for tgt in original_file_targets)
    snapshot = await Get(
        Snapshot,
        MergeDigests(sources.snapshot.digest
                     for sources in original_files_sources))

    src_val = request.protocol_target.get(RelocatedFilesSrcField).value
    dest_val = request.protocol_target.get(RelocatedFilesDestField).value
    if src_val:
        snapshot = await Get(Snapshot, RemovePrefix(snapshot.digest, src_val))
    if dest_val:
        snapshot = await Get(Snapshot, AddPrefix(snapshot.digest, dest_val))
    return GeneratedSources(snapshot)
Exemple #7
0
async def resolve_specs_snapshot(
        specs: Specs, global_options: GlobalOptions) -> SpecsSnapshot:
    """Resolve all files matching the given specs.

    Address specs will use their `Sources` field, and Filesystem specs will use whatever args were
    given. Filesystem specs may safely refer to files with no owning target.
    """
    targets = await Get(Targets, AddressSpecs, specs.address_specs)
    all_hydrated_sources = await MultiGet(
        Get(HydratedSources, HydrateSourcesRequest(tgt[Sources]))
        for tgt in targets if tgt.has_field(Sources))

    filesystem_specs_digest = (await Get(
        Digest,
        PathGlobs,
        specs.filesystem_specs.to_path_globs(
            global_options.options.owners_not_found_behavior.
            to_glob_match_error_behavior()),
    ) if specs.filesystem_specs else None)

    # NB: We merge into a single snapshot to avoid the same files being duplicated if they were
    # covered both by address specs and filesystem specs.
    digests = [
        hydrated_sources.snapshot.digest
        for hydrated_sources in all_hydrated_sources
    ]
    if filesystem_specs_digest:
        digests.append(filesystem_specs_digest)
    result = await Get(Snapshot, MergeDigests(digests))
    return SpecsSnapshot(result)
async def relocate_files(request: RelocateFilesViaCodegenRequest) -> GeneratedSources:
    # Unlike normal codegen, we operate the on the sources of the `files_targets` field, not the
    # `sources` of the original `relocated_sources` target.
    # TODO(#10915): using `await Get(Addresses, UnparsedAddressInputs)` causes a graph failure.
    original_files_targets = await MultiGet(
        Get(
            WrappedTarget,
            AddressInput,
            AddressInput.parse(v, relative_to=request.protocol_target.address.spec_path),
        )
        for v in (
            request.protocol_target.get(RelocatedFilesOriginalTargets)
            .to_unparsed_address_inputs()
            .values
        )
    )
    original_files_sources = await MultiGet(
        Get(HydratedSources, HydrateSourcesRequest(wrapped_tgt.target.get(Sources)))
        for wrapped_tgt in original_files_targets
    )
    snapshot = await Get(
        Snapshot, MergeDigests(sources.snapshot.digest for sources in original_files_sources)
    )

    src_val = request.protocol_target.get(RelocatedFilesSrcField).value
    dest_val = request.protocol_target.get(RelocatedFilesDestField).value
    if src_val:
        snapshot = await Get(Snapshot, RemovePrefix(snapshot.digest, src_val))
    if dest_val:
        snapshot = await Get(Snapshot, AddPrefix(snapshot.digest, dest_val))
    return GeneratedSources(snapshot)
async def infer_terraform_module_dependencies(
    request: InferTerraformModuleDependenciesRequest,
) -> InferredDependencies:
    hydrated_sources = await Get(HydratedSources,
                                 HydrateSourcesRequest(request.sources_field))

    paths = OrderedSet(filename for filename in hydrated_sources.snapshot.files
                       if filename.endswith(".tf"))
    result = await Get(
        ProcessResult,
        ParseTerraformModuleSources(
            sources_digest=hydrated_sources.snapshot.digest,
            paths=tuple(paths),
        ),
    )
    candidate_spec_paths = [
        line for line in result.stdout.decode("utf-8").split("\n") if line
    ]

    # For each path, see if there is a `terraform_module` target at the specified spec_path.
    candidate_targets = await Get(
        Targets,
        AddressSpecs([
            MaybeEmptySiblingAddresses(path) for path in candidate_spec_paths
        ]))
    # TODO: Need to either implement the standard ambiguous dependency logic or ban >1 terraform_module
    # per directory.
    terraform_module_addresses = [
        tgt.address for tgt in candidate_targets
        if tgt.has_field(TerraformModuleSourcesField)
    ]
    return InferredDependencies(terraform_module_addresses)
Exemple #10
0
async def infer_protobuf_dependencies(
    request: InferProtobufDependencies, protobuf_mapping: ProtobufMapping, protoc: Protoc
) -> InferredDependencies:
    if not protoc.dependency_inference:
        return InferredDependencies([], sibling_dependencies_inferrable=False)

    address = request.sources_field.address
    wrapped_tgt = await Get(WrappedTarget, Address, address)
    explicitly_provided_deps, hydrated_sources = await MultiGet(
        Get(ExplicitlyProvidedDependencies, DependenciesRequest(wrapped_tgt.target[Dependencies])),
        Get(HydratedSources, HydrateSourcesRequest(request.sources_field)),
    )
    digest_contents = await Get(DigestContents, Digest, hydrated_sources.snapshot.digest)

    result: OrderedSet[Address] = OrderedSet()
    for file_content in digest_contents:
        for import_path in parse_proto_imports(file_content.content.decode()):
            unambiguous = protobuf_mapping.mapping.get(import_path)
            ambiguous = protobuf_mapping.ambiguous_modules.get(import_path)
            if unambiguous:
                result.add(unambiguous)
            elif ambiguous:
                explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference(
                    ambiguous,
                    address,
                    import_reference="file",
                    context=(
                        f"The target {address} imports `{import_path}` in the file "
                        f"{file_content.path}"
                    ),
                )
                maybe_disambiguated = explicitly_provided_deps.disambiguated_via_ignores(ambiguous)
                if maybe_disambiguated:
                    result.add(maybe_disambiguated)
    return InferredDependencies(sorted(result), sibling_dependencies_inferrable=True)
Exemple #11
0
async def map_import_paths_of_all_go_protobuf_targets(
    targets: AllProtobufTargets, ) -> GoProtobufImportPathMapping:
    sources = await MultiGet(
        Get(
            HydratedSources,
            HydrateSourcesRequest(
                tgt[ProtobufSourceField],
                for_sources_types=(ProtobufSourceField, ),
                enable_codegen=True,
            ),
        ) for tgt in targets)

    all_contents = await MultiGet(
        Get(DigestContents, Digest, source.snapshot.digest)
        for source in sources)

    go_protobuf_targets: dict[str, set[Address]] = defaultdict(set)
    for tgt, contents in zip(targets, all_contents):
        if not contents:
            continue
        if len(contents) > 1:
            raise AssertionError(
                f"Protobuf target `{tgt.address}` mapped to more than one source file."
            )
        import_path = parse_go_package_option(contents[0].content)
        if not import_path:
            continue
        go_protobuf_targets[import_path].add(tgt.address)

    return GoProtobufImportPathMapping(
        FrozenDict(
            {ip: tuple(addrs)
             for ip, addrs in go_protobuf_targets.items()}))
Exemple #12
0
def assert_files_generated(
    rule_runner: RuleRunner,
    address: Address,
    *,
    expected_files: list[str],
    source_roots: list[str],
    mypy: bool = False,
    extra_args: list[str] | None = None,
) -> None:
    args = [
        f"--source-root-patterns={repr(source_roots)}",
        "--no-python-protobuf-infer-runtime-dependency",
        *(extra_args or ()),
    ]
    if mypy:
        args.append("--python-protobuf-mypy-plugin")
    rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"})
    tgt = rule_runner.get_target(address)
    protocol_sources = rule_runner.request(
        HydratedSources, [HydrateSourcesRequest(tgt[ProtobufSourceField])])
    generated_sources = rule_runner.request(
        GeneratedSources,
        [GeneratePythonFromProtobufRequest(protocol_sources.snapshot, tgt)],
    )
    assert set(generated_sources.snapshot.files) == set(expected_files)
Exemple #13
0
async def analyze_first_party_package(
    request: FirstPartyPkgAnalysisRequest,
    analyzer: PackageAnalyzerSetup,
    golang_subsystem: GolangSubsystem,
) -> FallibleFirstPartyPkgAnalysis:
    wrapped_target, import_path_info, owning_go_mod = await MultiGet(
        Get(WrappedTarget, Address, request.address),
        Get(FirstPartyPkgImportPath, FirstPartyPkgImportPathRequest(request.address)),
        Get(OwningGoMod, OwningGoModRequest(request.address)),
    )
    go_mod_info = await Get(GoModInfo, GoModInfoRequest(owning_go_mod.address))

    pkg_sources = await Get(
        HydratedSources,
        HydrateSourcesRequest(wrapped_target.target[GoPackageSourcesField]),
    )

    input_digest = await Get(Digest, MergeDigests([pkg_sources.snapshot.digest, analyzer.digest]))
    result = await Get(
        FallibleProcessResult,
        Process(
            (analyzer.path, request.address.spec_path or "."),
            input_digest=input_digest,
            description=f"Determine metadata for {request.address}",
            level=LogLevel.DEBUG,
            env={"CGO_ENABLED": "0"},
        ),
    )
    return FallibleFirstPartyPkgAnalysis.from_process_result(
        result,
        dir_path=request.address.spec_path,
        import_path=import_path_info.import_path,
        minimum_go_version=go_mod_info.minimum_go_version or "",
        description_of_source=f"first-party Go package `{request.address}`",
    )
async def create_python_binary(
    field_set: PythonBinaryFieldSet, python_binary_defaults: PythonBinaryDefaults
) -> CreatedBinary:
    entry_point = field_set.entry_point.value
    if entry_point is None:
        # TODO: This is overkill? We don't need to hydrate the sources and strip snapshots,
        #  we only need the path relative to the source root.
        binary_sources = await Get(HydratedSources, HydrateSourcesRequest(field_set.sources))
        stripped_binary_sources = await Get(
            StrippedSourceFiles, SourceFiles(binary_sources.snapshot, ())
        )
        entry_point = PythonBinarySources.translate_source_file_to_entry_point(
            stripped_binary_sources.snapshot.files
        )
    output_filename = f"{field_set.address.target_name}.pex"
    two_step_pex = await Get(
        TwoStepPex,
        TwoStepPexFromTargetsRequest(
            PexFromTargetsRequest(
                addresses=[field_set.address],
                entry_point=entry_point,
                platforms=PexPlatforms.create_from_platforms_field(field_set.platforms),
                output_filename=output_filename,
                additional_args=field_set.generate_additional_args(python_binary_defaults),
            )
        ),
    )
    pex = two_step_pex.pex
    return CreatedBinary(digest=pex.digest, binary_name=pex.output_filename)
Exemple #15
0
async def infer_terraform_module_dependencies(
    request: InferTerraformModuleDependenciesRequest,
) -> InferredDependencies:
    hydrated_sources = await Get(HydratedSources, HydrateSourcesRequest(request.sources_field))

    paths = OrderedSet(
        filename for filename in hydrated_sources.snapshot.files if filename.endswith(".tf")
    )
    result = await Get(
        ProcessResult,
        ParseTerraformModuleSources(
            sources_digest=hydrated_sources.snapshot.digest,
            paths=tuple(paths),
        ),
    )
    candidate_spec_paths = [line for line in result.stdout.decode("utf-8").split("\n") if line]

    # For each path, see if there is a `terraform_module` target at the specified spec_path.
    candidate_targets = await Get(
        Targets,
        RawSpecs(
            dir_globs=tuple(DirGlobSpec(path) for path in candidate_spec_paths),
            unmatched_glob_behavior=GlobMatchErrorBehavior.ignore,
            description_of_origin="the `terraform_module` dependency inference rule",
        ),
    )
    # TODO: Need to either implement the standard ambiguous dependency logic or ban >1 terraform_module
    # per directory.
    terraform_module_addresses = [
        tgt.address for tgt in candidate_targets if tgt.has_field(TerraformModuleSourcesField)
    ]
    return InferredDependencies(terraform_module_addresses)
Exemple #16
0
 def hydrate(sources_cls: Type[Sources],
             sources: Iterable[str]) -> HydratedSources:
     return self.request_single_product(
         HydratedSources,
         HydrateSourcesRequest(
             sources_cls(sources, address=Address.parse(":example"))),
     )
def assert_files_generated(
    rule_runner: RuleRunner,
    spec: str,
    *,
    expected_files: List[str],
    source_roots: List[str],
    mypy: bool = False,
) -> None:
    options = [
        "--backend-packages=pants.backend.codegen.protobuf.python",
        f"--source-root-patterns={repr(source_roots)}",
    ]
    if mypy:
        options.append("--python-protobuf-mypy-plugin")
    rule_runner.set_options(
        options,
        env_inherit={"PATH", "PYENV_ROOT", "HOME"},
    )
    tgt = rule_runner.get_target(Address(spec))
    protocol_sources = rule_runner.request(
        HydratedSources, [HydrateSourcesRequest(tgt[ProtobufSources])])
    generated_sources = rule_runner.request(
        GeneratedSources,
        [GeneratePythonFromProtobufRequest(protocol_sources.snapshot, tgt)],
    )
    assert set(generated_sources.snapshot.files) == set(expected_files)
Exemple #18
0
async def strip_source_roots_from_sources_field(
    request: StripSourcesFieldRequest, ) -> SourceRootStrippedSources:
    """Remove source roots from a target, e.g. `src/python/pants/util/strutil.py` ->
    `pants/util/strutil.py`."""
    if request.specified_files_snapshot is not None:
        sources_snapshot = request.specified_files_snapshot
    else:
        hydrated_sources = await Get[HydratedSources](HydrateSourcesRequest(
            request.sources_field,
            for_sources_types=request.for_sources_types,
            enable_codegen=request.enable_codegen,
        ))
        sources_snapshot = hydrated_sources.snapshot

    if not sources_snapshot.files:
        return SourceRootStrippedSources(EMPTY_SNAPSHOT)

    # Unlike all other `Sources` subclasses, `FilesSources` (and its subclasses) do not remove
    # their source root. This is so that filesystem APIs (e.g. Python's `open()`) may still access
    # the files as they normally would, with the full path relative to the build root.
    if isinstance(request.sources_field, FilesSources):
        return SourceRootStrippedSources(sources_snapshot)

    return await Get[SourceRootStrippedSources](StripSnapshotRequest(
        sources_snapshot,
        representative_path=representative_path_from_address(
            request.sources_field.address),
    ))
Exemple #19
0
 def test_unmatched_globs(self) -> None:
     self.create_files("", files=["f1.f95"])
     sources = Sources(["non_existent.f95"], address=Address.parse(":lib"))
     with pytest.raises(ExecutionError) as exc:
         self.request_single_product(HydratedSources, HydrateSourcesRequest(sources))
     assert "Unmatched glob" in str(exc.value)
     assert "//:lib" in str(exc.value)
     assert "non_existent.f95" in str(exc.value)
Exemple #20
0
async def determine_field_sets_with_sources(
    request: FieldSetsWithSourcesRequest, ) -> FieldSetsWithSources:
    all_sources = await MultiGet(
        Get(HydratedSources, HydrateSourcesRequest(field_set.sources))
        for field_set in request)
    return FieldSetsWithSources(
        field_set for field_set, sources in zip(request, all_sources)
        if sources.snapshot.files)
Exemple #21
0
async def determine_targets_with_sources(
        request: TargetsWithSourcesRequest) -> TargetsWithSources:
    all_sources = await MultiGet(
        Get(HydratedSources, HydrateSourcesRequest(tgt.get(SourcesField)))
        for tgt in request)
    return TargetsWithSources(tgt
                              for tgt, sources in zip(request, all_sources)
                              if sources.snapshot.files)
Exemple #22
0
    def test_expected_file_extensions(self) -> None:
        class ExpectedExtensionsSources(Sources):
            expected_file_extensions = (".f95", ".f03")

        addr = Address.parse("src/fortran:lib")
        self.create_files("src/fortran", files=["s.f95", "s.f03", "s.f08"])
        sources = ExpectedExtensionsSources(["s.f*"], address=addr)
        with pytest.raises(ExecutionError) as exc:
            self.request_single_product(HydratedSources, HydrateSourcesRequest(sources))
        assert "s.f08" in str(exc.value)
        assert str(addr) in str(exc.value)

        # Also check that we support valid sources
        valid_sources = ExpectedExtensionsSources(["s.f95"], address=addr)
        assert self.request_single_product(
            HydratedSources, HydrateSourcesRequest(valid_sources)
        ).snapshot.files == ("src/fortran/s.f95",)
Exemple #23
0
async def export_codegen(
    targets: Targets,
    union_membership: UnionMembership,
    workspace: Workspace,
    dist_dir: DistDir,
    registered_target_types: RegisteredTargetTypes,
) -> ExportCodegen:
    # We run all possible code generators. Running codegen requires specifying the expected
    # output_type, so we must inspect what is possible to generate.
    all_generate_request_types = union_membership.get(GenerateSourcesRequest)
    inputs_to_outputs = {
        req.input: req.output
        for req in all_generate_request_types
    }
    codegen_sources_fields_with_output = []
    for tgt in targets:
        if not tgt.has_field(SourcesField):
            continue
        sources = tgt[SourcesField]
        for input_type in inputs_to_outputs:
            if isinstance(sources, input_type):
                output_type = inputs_to_outputs[input_type]
                codegen_sources_fields_with_output.append(
                    (sources, output_type))

    if not codegen_sources_fields_with_output:
        codegen_targets = sorted({
            tgt_type.alias
            for tgt_type in registered_target_types.types
            for input_sources in inputs_to_outputs.keys()
            if tgt_type.class_has_field(input_sources,
                                        union_membership=union_membership)
        })
        logger.warning(
            "No codegen files/targets matched. All codegen target types: "
            f"{', '.join(codegen_targets)}")
        return ExportCodegen(exit_code=0)

    all_hydrated_sources = await MultiGet(
        Get(
            HydratedSources,
            HydrateSourcesRequest(
                sources_and_output_type[0],
                for_sources_types=(sources_and_output_type[1], ),
                enable_codegen=True,
            ),
        ) for sources_and_output_type in codegen_sources_fields_with_output)

    merged_digest = await Get(
        Digest,
        MergeDigests(hydrated_sources.snapshot.digest
                     for hydrated_sources in all_hydrated_sources),
    )

    dest = str(dist_dir.relpath / "codegen")
    logger.info(f"Writing generated files to {dest}")
    workspace.write_digest(merged_digest, path_prefix=dest)
    return ExportCodegen(exit_code=0)
Exemple #24
0
 def hydrate(sources_cls: Type[DebianSources],
             sources: Iterable[str]) -> HydratedSources:
     return sources_rule_runner.request(
         HydratedSources,
         [
             HydrateSourcesRequest(
                 sources_cls(sources, Address("", target_name="example"))),
         ],
     )
Exemple #25
0
async def infer_smalltalk_dependencies(request: InferSmalltalkDependencies) -> InferredDependencies:
    # To demo an inference rule, we simply treat each `sources` file to contain a list of
    # addresses, one per line.
    hydrated_sources = await Get[HydratedSources](HydrateSourcesRequest(request.sources_field))
    file_contents = await Get[FilesContent](Digest, hydrated_sources.snapshot.digest)
    all_lines = itertools.chain.from_iterable(
        fc.content.decode().splitlines() for fc in file_contents
    )
    return InferredDependencies(Address.parse(line) for line in all_lines)
Exemple #26
0
async def parse_thrift_file(request: ParsedThriftRequest) -> ParsedThrift:
    sources = await Get(HydratedSources,
                        HydrateSourcesRequest(request.sources_field))
    digest_contents = await Get(DigestContents, Digest,
                                sources.snapshot.digest)
    assert len(digest_contents) == 1
    file_content = digest_contents[0].content.decode()
    return ParsedThrift(
        imports=FrozenOrderedSet(_IMPORT_REGEX.findall(file_content)),
        namespaces=FrozenDict(_NAMESPACE_REGEX.findall(file_content)),
    )
Exemple #27
0
async def parse_dockerfile(request: DockerfileInfoRequest) -> DockerfileInfo:
    wrapped_target = await Get(
        WrappedTarget,
        WrappedTargetRequest(request.address,
                             description_of_origin="<infallible>"))
    target = wrapped_target.target
    sources = await Get(
        HydratedSources,
        HydrateSourcesRequest(
            target.get(SourcesField),
            for_sources_types=(DockerImageSourceField, ),
            enable_codegen=True,
        ),
    )

    dockerfiles = sources.snapshot.files
    assert len(dockerfiles) == 1, (
        f"Internal error: Expected a single source file to Dockerfile parse request {request}, "
        f"got: {dockerfiles}.")

    result = await Get(
        ProcessResult,
        DockerfileParseRequest(
            sources.snapshot.digest,
            dockerfiles,
        ),
    )

    try:
        raw_output = result.stdout.decode("utf-8")
        outputs = json.loads(raw_output)
        assert len(outputs) == len(dockerfiles)
    except Exception as e:
        raise DockerfileInfoError(
            f"Unexpected failure to parse Dockerfiles: {', '.join(dockerfiles)}, "
            f"for the {request.address} target: {e}") from e

    info = outputs[0]
    try:
        return DockerfileInfo(
            address=request.address,
            digest=sources.snapshot.digest,
            source=info["source"],
            build_args=DockerBuildArgs.from_strings(
                *info["build_args"], duplicates_must_match=True),
            copy_source_paths=tuple(info["copy_source_paths"]),
            from_image_build_args=DockerBuildArgs.from_strings(
                *info["from_image_build_args"], duplicates_must_match=True),
            version_tags=tuple(info["version_tags"]),
        )
    except ValueError as e:
        raise DockerfileInfoError(
            f"Error while parsing {info['source']} for the {request.address} target: {e}"
        ) from e
async def package_archive_target(
        field_set: ArchiveFieldSet,
        global_options: GlobalOptions) -> BuiltPackage:
    package_targets, files_targets = await MultiGet(
        Get(
            Targets,
            UnparsedAddressInputs(field_set.packages.value or (),
                                  owning_address=field_set.address),
        ),
        Get(
            Targets,
            UnparsedAddressInputs(field_set.files.value or (),
                                  owning_address=field_set.address),
        ),
    )

    package_field_sets_per_target = await Get(
        FieldSetsPerTarget,
        FieldSetsPerTargetRequest(PackageFieldSet, package_targets))
    packages = await MultiGet(
        Get(BuiltPackage, PackageFieldSet, field_set)
        for field_set in package_field_sets_per_target.field_sets)

    files_sources = await MultiGet(
        Get(
            HydratedSources,
            HydrateSourcesRequest(tgt.get(Sources),
                                  for_sources_types=(FilesSources, ),
                                  enable_codegen=True),
        ) for tgt in files_targets)

    input_snapshot = await Get(
        Snapshot,
        MergeDigests((
            *(package.digest for package in packages),
            *(sources.snapshot.digest for sources in files_sources),
        )),
    )

    output_filename = field_set.output_path.value_or_default(
        field_set.address,
        file_ending=field_set.format_field.value,
        use_legacy_format=global_options.options.pants_distdir_legacy_paths,
    )
    archive = await Get(
        Digest,
        CreateArchive(
            input_snapshot,
            output_filename=output_filename,
            format=ArchiveFormat(field_set.format_field.value),
        ),
    )
    return BuiltPackage(archive, relpath=output_filename)
Exemple #29
0
async def infer_smalltalk_dependencies(request: InferSmalltalkDependencies) -> InferredDependencies:
    # To demo an inference rule, we simply treat each `sources` file to contain a list of
    # addresses, one per line.
    hydrated_sources = await Get(HydratedSources, HydrateSourcesRequest(request.sources_field))
    digest_contents = await Get(DigestContents, Digest, hydrated_sources.snapshot.digest)
    all_lines = itertools.chain.from_iterable(
        file_content.content.decode().splitlines() for file_content in digest_contents
    )
    resolved = await MultiGet(
        Get(Address, AddressInput, AddressInput.parse(line)) for line in all_lines
    )
    return InferredDependencies(resolved)
Exemple #30
0
    def test_works_with_subclass_fields(self) -> None:
        class CustomAvroSources(AvroSources):
            pass

        protocol_sources = CustomAvroSources(["*.avro"], address=self.address)
        assert protocol_sources.can_generate(FortranSources, self.union_membership) is True
        generated = self.request_single_product(
            HydratedSources,
            HydrateSourcesRequest(
                protocol_sources, for_sources_types=[FortranSources], enable_codegen=True
            ),
        )
        assert generated.snapshot.files == ("src/fortran/f.f95",)