コード例 #1
0
ファイル: rules.py プロジェクト: hephex/pants
async def bandit_lint_partition(partition: BanditPartition,
                                bandit: Bandit) -> LintResult:

    bandit_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="bandit.pex",
            internal_only=True,
            requirements=bandit.pex_requirements(),
            interpreter_constraints=partition.interpreter_constraints,
            main=bandit.main,
        ),
    )

    config_files_get = Get(ConfigFiles, ConfigFilesRequest,
                           bandit.config_request)
    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.source
                           for field_set in partition.field_sets))
    # Ensure that the empty report dir exists.
    report_directory_digest_get = Get(Digest,
                                      CreateDigest([Directory(REPORT_DIR)]))

    bandit_pex, config_files, report_directory, source_files = await MultiGet(
        bandit_pex_get, config_files_get, report_directory_digest_get,
        source_files_get)

    input_digest = await Get(
        Digest,
        MergeDigests((source_files.snapshot.digest,
                      config_files.snapshot.digest, report_directory)),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            bandit_pex,
            argv=generate_argv(source_files, bandit),
            input_digest=input_digest,
            description=
            f"Run Bandit on {pluralize(len(partition.field_sets), 'file')}.",
            output_directories=(REPORT_DIR, ),
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
コード例 #2
0
async def run_python_test(field_set: PythonTestFieldSet,
                          test_subsystem: TestSubsystem,
                          pytest: PyTest) -> TestResult:
    if field_set.is_conftest_or_type_stub():
        return TestResult.skip(field_set.address)

    setup = await Get(TestSetup, TestSetupRequest(field_set, is_debug=False))
    result = await Get(FallibleProcessResult, Process, setup.process)

    coverage_data = None
    if test_subsystem.use_coverage:
        coverage_snapshot = await Get(
            Snapshot,
            DigestSubset(result.output_digest, PathGlobs([".coverage"])))
        if coverage_snapshot.files == (".coverage", ):
            coverage_data = PytestCoverageData(field_set.address,
                                               coverage_snapshot.digest)
        else:
            logger.warning(
                f"Failed to generate coverage data for {field_set.address}.")

    xml_results_snapshot = None
    if setup.results_file_name:
        xml_results_snapshot = await Get(
            Snapshot,
            DigestSubset(result.output_digest,
                         PathGlobs([setup.results_file_name])))
        if xml_results_snapshot.files == (setup.results_file_name, ):
            xml_results_snapshot = await Get(
                Snapshot,
                AddPrefix(xml_results_snapshot.digest,
                          pytest.options.junit_xml_dir),
            )
        else:
            logger.warning(
                f"Failed to generate JUnit XML data for {field_set.address}.")
    extra_output_snapshot = await Get(
        Snapshot,
        DigestSubset(result.output_digest,
                     PathGlobs([f"{_EXTRA_OUTPUT_DIR}/**"])))
    extra_output_snapshot = await Get(
        Snapshot, RemovePrefix(extra_output_snapshot.digest,
                               _EXTRA_OUTPUT_DIR))

    return TestResult.from_fallible_process_result(
        result,
        address=field_set.address,
        coverage_data=coverage_data,
        xml_results=xml_results_snapshot,
        extra_output=extra_output_snapshot,
    )
コード例 #3
0
async def resolve_go_module(
    request: ResolveGoModuleRequest, ) -> ResolvedGoModule:
    wrapped_target = await Get(WrappedTarget, Address, request.address)
    target = wrapped_target.target

    sources = await Get(SourceFiles,
                        SourceFilesRequest([target.get(GoModuleSources)]))
    flattened_sources_snapshot = await Get(
        Snapshot,
        RemovePrefix(sources.snapshot.digest, request.address.spec_path))

    # Parse the go.mod for the module path and minimum Go version.
    parse_result = await Get(
        ProcessResult,
        GoSdkProcess(
            input_digest=flattened_sources_snapshot.digest,
            command=("mod", "edit", "-json"),
            description=f"Parse go.mod for {request.address}.",
        ),
    )
    module_metadata = json.loads(parse_result.stdout)
    module_path = module_metadata["Module"]["Path"]
    minimum_go_version = module_metadata.get(
        "Go", "1.16"
    )  # TODO: Figure out better default if missing. Use the SDKs version versus this hard-code.

    # Resolve the dependencies in the go.mod.
    list_modules_result = await Get(
        ProcessResult,
        GoSdkProcess(
            input_digest=flattened_sources_snapshot.digest,
            command=("list", "-m", "-json", "all"),
            description=f"List modules in build of {request.address}.",
        ),
    )
    modules = parse_module_descriptors(list_modules_result.stdout)

    return ResolvedGoModule(
        target=target,
        import_path=module_path,
        minimum_go_version=minimum_go_version,
        modules=FrozenOrderedSet(modules),
        digest=flattened_sources_snapshot.
        digest,  # TODO: Is this a resolved version? Need to update for go-resolve goal?
    )
コード例 #4
0
async def run_scalatest_test(
    test_subsystem: TestSubsystem,
    field_set: ScalatestTestFieldSet,
) -> TestResult:
    test_setup = await Get(TestSetup, TestSetupRequest(field_set, is_debug=False))
    process_result = await Get(FallibleProcessResult, JvmProcess, test_setup.process)
    reports_dir_prefix = test_setup.reports_dir_prefix

    xml_result_subset = await Get(
        Digest, DigestSubset(process_result.output_digest, PathGlobs([f"{reports_dir_prefix}/**"]))
    )
    xml_results = await Get(Snapshot, RemovePrefix(xml_result_subset, reports_dir_prefix))

    return TestResult.from_fallible_process_result(
        process_result,
        address=field_set.address,
        output_setting=test_subsystem.output,
        xml_results=xml_results,
    )
コード例 #5
0
async def get_ancestor_init_py(
    targets: Targets, source_root_config: SourceRootConfig
) -> AncestorInitPyFiles:
    """Find any ancestor __init__.py files for the given targets.

    Includes sibling __init__.py files. Returns the files stripped of their source roots.
    """
    source_roots = source_root_config.get_source_roots()
    sources = await Get[SourceFiles](
        AllSourceFilesRequest(
            (tgt.get(Sources) for tgt in targets),
            for_sources_types=(PythonSources,),
            enable_codegen=True,
        )
    )
    # Find the ancestors of all dirs containing .py files, including those dirs themselves.
    source_dir_ancestors: Set[Tuple[str, str]] = set()  # Items are (src_root, path incl. src_root).
    for fp in sources.snapshot.files:
        source_dir_ancestor = os.path.dirname(fp)
        source_root = source_roots.strict_find_by_path(fp).path
        # Do not allow the repository root to leak (i.e., '.' should not be a package in setup.py).
        while source_dir_ancestor != source_root:
            source_dir_ancestors.add((source_root, source_dir_ancestor))
            source_dir_ancestor = os.path.dirname(source_dir_ancestor)

    source_dir_ancestors_list = list(source_dir_ancestors)  # To force a consistent order.

    # Note that we must MultiGet single globs instead of a a single Get for all the globs, because
    # we match each result to its originating glob (see use of zip below).
    ancestor_init_py_snapshots = await MultiGet[Snapshot](
        Get[Snapshot](PathGlobs, PathGlobs([os.path.join(source_dir_ancestor[1], "__init__.py")]))
        for source_dir_ancestor in source_dir_ancestors_list
    )

    source_root_stripped_ancestor_init_pys = await MultiGet[Digest](
        Get[Digest](RemovePrefix(snapshot.digest, source_dir_ancestor[0]))
        for snapshot, source_dir_ancestor in zip(
            ancestor_init_py_snapshots, source_dir_ancestors_list
        )
    )

    return AncestorInitPyFiles(source_root_stripped_ancestor_init_pys)
コード例 #6
0
async def maybe_extract(extractable: MaybeExtractable) -> ExtractedDigest:
    """If digest contains a single archive file, extract it, otherwise return the input digest."""
    digest = extractable.digest
    snapshot = await Get[Snapshot](Digest, digest)
    if len(snapshot.files) == 1:
        output_dir = "out/"
        extraction_cmd = get_extraction_cmd(snapshot.files[0], output_dir)
        if extraction_cmd:
            extraction_cmd_str = " ".join(extraction_cmd)
            proc = Process(
                argv=("/bin/bash", "-c", f"{extraction_cmd_str}"),
                input_digest=digest,
                description=f"Extract {snapshot.files[0]}",
                env={"PATH": "/usr/bin:/bin:/usr/local/bin"},
                output_directories=(output_dir, ),
            )
            result = await Get[ProcessResult](Process, proc)
            strip_output_dir = await Get[Digest](RemovePrefix(
                result.output_digest, output_dir))
            return ExtractedDigest(strip_output_dir)
    return ExtractedDigest(digest)
コード例 #7
0
async def maybe_extract_archive(digest: Digest, tar_binary: TarBinary,
                                unzip_binary: UnzipBinary) -> ExtractedArchive:
    """If digest contains a single archive file, extract it, otherwise return the input digest."""
    output_dir = "__output"
    snapshot, output_dir_digest = await MultiGet(
        Get(Snapshot, Digest, digest),
        Get(Digest, CreateDigest([Directory(output_dir)])),
    )
    if len(snapshot.files) != 1:
        return ExtractedArchive(digest)

    input_digest = await Get(Digest, MergeDigests((digest, output_dir_digest)))
    fp = snapshot.files[0]
    if fp.endswith(".zip"):
        argv = unzip_binary.extract_archive_argv(archive_path=fp,
                                                 output_dir=output_dir)
        env = {}
    elif fp.endswith(
        (".tar", ".tar.gz", ".tgz", ".tar.bz2", ".tbz2", ".tar.xz", ".txz")):
        argv = tar_binary.extract_archive_argv(archive_path=fp,
                                               output_dir=output_dir)
        # `tar` expects to find a couple binaries like `gzip` and `xz` by looking on the PATH.
        env = {"PATH": os.pathsep.join(SEARCH_PATHS)}
    else:
        return ExtractedArchive(digest)

    result = await Get(
        ProcessResult,
        Process(
            argv=argv,
            env=env,
            input_digest=input_digest,
            description=f"Extract {fp}",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )
    strip_output_dir = await Get(
        Digest, RemovePrefix(result.output_digest, output_dir))
    return ExtractedArchive(strip_output_dir)
コード例 #8
0
ファイル: setup_py.py プロジェクト: cristianmatache/pants
async def run_setup_py(req: RunSetupPyRequest,
                       setuptools: Setuptools) -> RunSetupPyResult:
    """Run a setup.py command on a single exported target."""
    # Note that this pex has no entrypoint. We use it to run our generated setup.py, which
    # in turn imports from and invokes setuptools.
    setuptools_pex = await Get(
        Pex,
        PexRequest(
            output_filename="setuptools.pex",
            internal_only=True,
            requirements=PexRequirements(setuptools.all_requirements),
            interpreter_constraints=(
                req.interpreter_constraints if
                setuptools.options.is_default("interpreter_constraints") else
                PexInterpreterConstraints(setuptools.interpreter_constraints)),
        ),
    )
    input_digest = await Get(
        Digest, MergeDigests((req.chroot.digest, setuptools_pex.digest)))
    # The setuptools dist dir, created by it under the chroot (not to be confused with
    # pants's own dist dir, at the buildroot).
    dist_dir = "dist/"
    result = await Get(
        ProcessResult,
        PexProcess(
            setuptools_pex,
            argv=("setup.py", *req.args),
            input_digest=input_digest,
            # setuptools commands that create dists write them to the distdir.
            # TODO: Could there be other useful files to capture?
            output_directories=(dist_dir, ),
            description=
            f"Run setuptools for {req.exported_target.target.address}",
            level=LogLevel.DEBUG,
        ),
    )
    output_digest = await Get(Digest,
                              RemovePrefix(result.output_digest, dist_dir))
    return RunSetupPyResult(output_digest)
コード例 #9
0
async def run_helm_package(field_set: HelmPackageFieldSet) -> BuiltPackage:
    result_dir = "__out"

    chart, result_digest = await MultiGet(
        Get(HelmChart, HelmChartRequest(field_set)),
        Get(Digest, CreateDigest([Directory(result_dir)])),
    )

    input_digest = await Get(
        Digest, MergeDigests([chart.snapshot.digest, result_digest]))
    process_output_file = os.path.join(result_dir,
                                       _helm_artifact_filename(chart.metadata))

    process_result = await Get(
        ProcessResult,
        HelmProcess(
            argv=["package", chart.path, "-d", result_dir],
            input_digest=input_digest,
            output_files=(process_output_file, ),
            description=f"Packaging Helm chart: {field_set.address.spec_path}",
        ),
    )

    stripped_output_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, result_dir))

    final_snapshot = await Get(
        Snapshot,
        AddPrefix(stripped_output_digest,
                  field_set.output_path.value_or_default(file_ending=None)),
    )
    return BuiltPackage(
        final_snapshot.digest,
        artifacts=tuple(
            BuiltPackageArtifact(
                file, extra_log_lines=(f"Built Helm chart artifact: {file}", ))
            for file in final_snapshot.files),
    )
コード例 #10
0
async def run_setup_py(
    req: RunSetupPyRequest, setuptools_setup: SetuptoolsSetup
) -> RunSetupPyResult:
    """Run a setup.py command on a single exported target."""
    input_digest = await Get(
        Digest, MergeDigests((req.chroot.digest, setuptools_setup.requirements_pex.digest))
    )
    # The setuptools dist dir, created by it under the chroot (not to be confused with
    # pants's own dist dir, at the buildroot).
    dist_dir = "dist/"
    result = await Get(
        ProcessResult,
        PexProcess(
            setuptools_setup.requirements_pex,
            argv=("setup.py", *req.args),
            input_digest=input_digest,
            # setuptools commands that create dists write them to the distdir.
            # TODO: Could there be other useful files to capture?
            output_directories=(dist_dir,),
            description=f"Run setuptools for {req.exported_target.target.address}",
        ),
    )
    output_digest = await Get(Digest, RemovePrefix(result.output_digest, dist_dir))
    return RunSetupPyResult(output_digest)
コード例 #11
0
ファイル: rules.py プロジェクト: hephex/pants
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest,
    protoc: Protoc,
    grpc_python_plugin: GrpcPythonPlugin,
    python_protobuf_subsystem: PythonProtobufSubsystem,
    python_protobuf_mypy_plugin: PythonProtobufMypyPlugin,
    pex_environment: PexEnvironment,
) -> GeneratedSources:
    download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest,
                                  protoc.get_request(Platform.current))

    output_dir = "_generated_files"
    create_output_dir_request = Get(Digest,
                                    CreateDigest([Directory(output_dir)]))

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([request.protocol_target.address]))

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(tgt[ProtobufSourceField]
                           for tgt in transitive_targets.closure
                           if tgt.has_field(ProtobufSourceField)),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest([request.protocol_target[ProtobufSourceField]]))

    (
        downloaded_protoc_binary,
        empty_output_dir,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    protoc_gen_mypy_script = "protoc-gen-mypy"
    protoc_gen_mypy_grpc_script = "protoc-gen-mypy_grpc"
    mypy_pex = None
    mypy_request = PexRequest(
        output_filename="mypy_protobuf.pex",
        internal_only=True,
        requirements=python_protobuf_mypy_plugin.pex_requirements(),
        interpreter_constraints=python_protobuf_mypy_plugin.
        interpreter_constraints,
    )

    if python_protobuf_subsystem.mypy_plugin:
        mypy_pex = await Get(
            VenvPex,
            VenvPexRequest(bin_names=[protoc_gen_mypy_script],
                           pex_request=mypy_request),
        )

        if request.protocol_target.get(ProtobufGrpcToggleField).value:
            mypy_info = await Get(PexResolveInfo, VenvPex, mypy_pex)

            # In order to generate stubs for gRPC code, we need mypy-protobuf 2.0 or above.
            if any(dist_info.project_name == "mypy-protobuf"
                   and dist_info.version.major >= 2
                   for dist_info in mypy_info):
                # TODO: Use `pex_path` once VenvPex stores a Pex field.
                mypy_pex = await Get(
                    VenvPex,
                    VenvPexRequest(
                        bin_names=[
                            protoc_gen_mypy_script, protoc_gen_mypy_grpc_script
                        ],
                        pex_request=mypy_request,
                    ),
                )

    downloaded_grpc_plugin = (await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        grpc_python_plugin.get_request(Platform.current),
    ) if request.protocol_target.get(ProtobufGrpcToggleField).value else None)

    unmerged_digests = [
        all_sources_stripped.snapshot.digest,
        downloaded_protoc_binary.digest,
        empty_output_dir,
    ]
    if mypy_pex:
        unmerged_digests.append(mypy_pex.digest)
    if downloaded_grpc_plugin:
        unmerged_digests.append(downloaded_grpc_plugin.digest)
    input_digest = await Get(Digest, MergeDigests(unmerged_digests))

    argv = [downloaded_protoc_binary.exe, "--python_out", output_dir]
    if mypy_pex:
        argv.extend([
            f"--plugin=protoc-gen-mypy={mypy_pex.bin[protoc_gen_mypy_script].argv0}",
            "--mypy_out",
            output_dir,
        ])
    if downloaded_grpc_plugin:
        argv.extend([
            f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}",
            "--grpc_out", output_dir
        ])

        if mypy_pex and protoc_gen_mypy_grpc_script in mypy_pex.bin:
            argv.extend([
                f"--plugin=protoc-gen-mypy_grpc={mypy_pex.bin[protoc_gen_mypy_grpc_script].argv0}",
                "--mypy_grpc_out",
                output_dir,
            ])

    argv.extend(target_sources_stripped.snapshot.files)
    result = await Get(
        ProcessResult,
        Process(
            argv,
            input_digest=input_digest,
            description=
            f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
            append_only_caches=pex_environment.in_sandbox(
                working_directory=None).append_only_caches,
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_source's source root.
        source_root_request = SourceRootRequest.for_target(
            request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
コード例 #12
0
async def pylint_lint_partition(
        partition: PylintPartition, pylint: Pylint,
        first_party_plugins: PylintFirstPartyPlugins) -> LintResult:
    requirements_pex_get = Get(
        Pex,
        RequirementsPexRequest(
            (t.address for t in partition.root_targets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    pylint_pex_get = Get(
        Pex,
        PexRequest,
        pylint.to_pex_request(
            interpreter_constraints=partition.interpreter_constraints,
            extra_requirements=first_party_plugins.requirement_strings,
        ),
    )

    prepare_python_sources_get = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
    field_set_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(t[PythonSourceField]
                           for t in partition.root_targets))
    # Ensure that the empty report dir exists.
    report_directory_digest_get = Get(Digest,
                                      CreateDigest([Directory(REPORT_DIR)]))

    (
        pylint_pex,
        requirements_pex,
        prepared_python_sources,
        field_set_sources,
        report_directory,
    ) = await MultiGet(
        pylint_pex_get,
        requirements_pex_get,
        prepare_python_sources_get,
        field_set_sources_get,
        report_directory_digest_get,
    )

    pylint_runner_pex, config_files = await MultiGet(
        Get(
            VenvPex,
            VenvPexRequest(
                PexRequest(
                    output_filename="pylint_runner.pex",
                    interpreter_constraints=partition.interpreter_constraints,
                    main=pylint.main,
                    internal_only=True,
                    pex_path=[pylint_pex, requirements_pex],
                ),
                # TODO(John Sirois): Remove this (change to the default of symlinks) when we can
                #  upgrade to a version of Pylint with https://github.com/PyCQA/pylint/issues/1470
                #  resolved.
                site_packages_copies=True,
            ),
        ),
        Get(ConfigFiles, ConfigFilesRequest,
            pylint.config_request(field_set_sources.snapshot.dirs)),
    )

    pythonpath = list(prepared_python_sources.source_roots)
    if first_party_plugins:
        pythonpath.append(first_party_plugins.PREFIX)

    input_digest = await Get(
        Digest,
        MergeDigests((
            config_files.snapshot.digest,
            first_party_plugins.sources_digest,
            prepared_python_sources.source_files.snapshot.digest,
            report_directory,
        )),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            pylint_runner_pex,
            argv=generate_argv(field_set_sources, pylint),
            input_digest=input_digest,
            output_directories=(REPORT_DIR, ),
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            concurrency_available=len(partition.root_targets),
            description=
            f"Run Pylint on {pluralize(len(partition.root_targets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
コード例 #13
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest, protoc: Protoc
) -> GeneratedSources:
    download_protoc_request = Get(
        DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)
    )

    output_dir = "_generated_files"
    # TODO(#9650): replace this with a proper intrinsic to create empty directories.
    create_output_dir_request = Get(
        ProcessResult,
        Process(
            ("/bin/mkdir", output_dir),
            description=f"Create the directory {output_dir}",
            level=LogLevel.DEBUG,
            output_directories=(output_dir,),
        ),
    )

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    transitive_targets = await Get(TransitiveTargets, Addresses([request.protocol_target.address]))
    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources,),
        ),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]]),
    )

    (
        downloaded_protoc_binary,
        create_output_dir_result,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                all_sources_stripped.snapshot.digest,
                downloaded_protoc_binary.digest,
                create_output_dir_result.output_digest,
            )
        ),
    )

    result = await Get(
        ProcessResult,
        Process(
            (
                downloaded_protoc_binary.exe,
                "--python_out",
                output_dir,
                *target_sources_stripped.snapshot.files,
            ),
            input_digest=input_digest,
            description=f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir,),
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_library's source root.
        source_root_request = SourceRootRequest.for_target(request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (
        await Get(Snapshot, AddPrefix(normalized_digest, source_root.path))
        if source_root.path != "."
        else await Get(Snapshot, Digest, normalized_digest)
    )
    return GeneratedSources(source_root_restored)
コード例 #14
0
async def package_pyoxidizer_binary(
    pyoxidizer: PyOxidizer,
    field_set: PyOxidizerFieldSet,
    runner_script: PyoxidizerRunnerScript,
    bash: BashBinary,
) -> BuiltPackage:
    direct_deps = await Get(Targets,
                            DependenciesRequest(field_set.dependencies))
    deps_field_sets = await Get(
        FieldSetsPerTarget,
        FieldSetsPerTargetRequest(PackageFieldSet, direct_deps))
    built_packages = await MultiGet(
        Get(BuiltPackage, PackageFieldSet, field_set)
        for field_set in deps_field_sets.field_sets)
    wheel_paths = [
        artifact.relpath for built_pkg in built_packages
        for artifact in built_pkg.artifacts
        if artifact.relpath is not None and artifact.relpath.endswith(".whl")
    ]
    if not wheel_paths:
        raise InvalidTargetException(
            softwrap(f"""
                The `{PyOxidizerTarget.alias}` target {field_set.address} must include
                in its `dependencies` field at least one `python_distribution` target that produces a
                `.whl` file. For example, if using `{GenerateSetupField.alias}=True`, then make sure
                `{WheelField.alias}=True`. See {doc_url('python-distributions')}.
                """))

    config_template = None
    if field_set.template.value is not None:
        config_template_source = await Get(
            HydratedSources, HydrateSourcesRequest(field_set.template))
        digest_contents = await Get(DigestContents, Digest,
                                    config_template_source.snapshot.digest)
        config_template = digest_contents[0].content.decode("utf-8")

    config = PyOxidizerConfig(
        executable_name=field_set.address.target_name,
        entry_point=field_set.entry_point.value,
        wheels=wheel_paths,
        template=config_template,
        unclassified_resources=(None if
                                not field_set.unclassified_resources.value else
                                list(field_set.unclassified_resources.value)),
    )
    rendered_config = config.render()
    logger.debug(
        f"Configuration used for {field_set.address}: {rendered_config}")

    pyoxidizer_pex, config_digest = await MultiGet(
        Get(Pex, PexRequest, pyoxidizer.to_pex_request()),
        Get(
            Digest,
            CreateDigest([
                FileContent("pyoxidizer.bzl", rendered_config.encode("utf-8"))
            ])),
    )
    input_digest = await Get(
        Digest,
        MergeDigests((
            config_digest,
            runner_script.digest,
            *(built_package.digest for built_package in built_packages),
        )),
    )
    pex_process = await Get(
        Process,
        PexProcess(
            pyoxidizer_pex,
            argv=("build", *pyoxidizer.args),
            description=f"Building {field_set.address} with PyOxidizer",
            input_digest=input_digest,
            level=LogLevel.INFO,
            output_directories=("build", ),
        ),
    )
    process_with_caching = dataclasses.replace(
        pex_process,
        argv=(bash.path, runner_script.path, *pex_process.argv),
        append_only_caches={
            **pex_process.append_only_caches,
            "pyoxidizer":
            runner_script.CACHE_PATH,
        },
    )

    result = await Get(ProcessResult, Process, process_with_caching)

    stripped_digest = await Get(Digest,
                                RemovePrefix(result.output_digest, "build"))
    final_snapshot = await Get(
        Snapshot,
        AddPrefix(stripped_digest,
                  field_set.output_path.value_or_default(file_ending=None)),
    )
    return BuiltPackage(
        final_snapshot.digest,
        artifacts=tuple(
            BuiltPackageArtifact(file) for file in final_snapshot.files),
    )
コード例 #15
0
async def run_junit_test(
    bash: BashBinary,
    jdk_setup: JdkSetup,
    junit: JUnit,
    test_subsystem: TestSubsystem,
    field_set: JavaTestFieldSet,
) -> TestResult:
    classpath = await Get(Classpath, Addresses([field_set.address]))
    junit_classpath = await Get(
        MaterializedClasspath,
        MaterializedClasspathRequest(
            prefix="__thirdpartycp",
            artifact_requirements=(ArtifactRequirements([
                Coordinate(
                    group="org.junit.platform",
                    artifact="junit-platform-console",
                    version="1.7.2",
                ),
                Coordinate(
                    group="org.junit.jupiter",
                    artifact="junit-jupiter-engine",
                    version="5.7.2",
                ),
                Coordinate(
                    group="org.junit.vintage",
                    artifact="junit-vintage-engine",
                    version="5.7.2",
                ),
            ]), ),
        ),
    )
    merged_digest = await Get(
        Digest,
        MergeDigests((classpath.content.digest, jdk_setup.digest,
                      junit_classpath.digest)),
    )

    reports_dir_prefix = "__reports_dir"
    reports_dir = f"{reports_dir_prefix}/{field_set.address.path_safe_spec}"

    user_classpath_arg = ":".join(classpath.user_classpath_entries())

    process_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash, [
                    *classpath.classpath_entries(),
                    *junit_classpath.classpath_entries()
                ]),
                "org.junit.platform.console.ConsoleLauncher",
                *(("--classpath",
                   user_classpath_arg) if user_classpath_arg else ()),
                *(("--scan-class-path",
                   user_classpath_arg) if user_classpath_arg else ()),
                "--reports-dir",
                reports_dir,
                *junit.options.args,
            ],
            input_digest=merged_digest,
            output_directories=(reports_dir, ),
            append_only_caches=jdk_setup.append_only_caches,
            env=jdk_setup.env,
            description=
            f"Run JUnit 5 ConsoleLauncher against {field_set.address}",
            level=LogLevel.DEBUG,
        ),
    )

    xml_result_subset = await Get(
        Digest,
        DigestSubset(process_result.output_digest,
                     PathGlobs([f"{reports_dir_prefix}/**"])))
    xml_results = await Get(
        Snapshot, RemovePrefix(xml_result_subset, reports_dir_prefix))

    return TestResult.from_fallible_process_result(
        process_result,
        address=field_set.address,
        output_setting=test_subsystem.output,
        xml_results=xml_results,
    )
コード例 #16
0
async def setup_first_party_pkg_digest(
    request: FirstPartyPkgDigestRequest, ) -> FallibleFirstPartyPkgDigest:
    embedder, wrapped_target, maybe_analysis = await MultiGet(
        Get(LoadedGoBinary,
            LoadedGoBinaryRequest("embedcfg", ("main.go", ), "./embedder")),
        Get(
            WrappedTarget,
            WrappedTargetRequest(
                request.address,
                description_of_origin="<first party digest setup>"),
        ),
        Get(FallibleFirstPartyPkgAnalysis,
            FirstPartyPkgAnalysisRequest(request.address)),
    )
    if maybe_analysis.analysis is None:
        return FallibleFirstPartyPkgDigest(pkg_digest=None,
                                           exit_code=maybe_analysis.exit_code,
                                           stderr=maybe_analysis.stderr)
    analysis = maybe_analysis.analysis

    tgt = wrapped_target.target
    pkg_sources = await Get(HydratedSources,
                            HydrateSourcesRequest(tgt[GoPackageSourcesField]))
    sources_digest = pkg_sources.snapshot.digest

    embed_config = None
    test_embed_config = None
    xtest_embed_config = None

    # TODO(#13795): Error if you depend on resources without corresponding embed patterns?
    if analysis.embed_patterns or analysis.test_embed_patterns or analysis.xtest_embed_patterns:
        dependencies = await Get(Targets,
                                 DependenciesRequest(tgt[Dependencies]))
        resources_sources = await Get(
            SourceFiles,
            SourceFilesRequest(
                (
                    t.get(SourcesField) for t in dependencies
                    # You can only embed resources located at or below the directory of the
                    # `go_package`. This is a restriction from Go.
                    # TODO(#13795): Error if you depend on resources above the go_package?
                    if t.address.spec_path.startswith(
                        request.address.spec_path)),
                for_sources_types=(ResourceSourceField, ),
                # TODO: Switch to True. We need to be confident though that the generated files
                #  are located below the go_package.
                enable_codegen=False,
            ),
        )
        resources_digest = await Get(
            Digest,
            RemovePrefix(resources_sources.snapshot.digest,
                         request.address.spec_path))
        resources_digest = await Get(
            Digest, AddPrefix(resources_digest, "__resources__"))
        sources_digest = await Get(
            Digest, MergeDigests((sources_digest, resources_digest)))

        patterns_json = {
            "EmbedPatterns": analysis.embed_patterns,
            "TestEmbedPatterns": analysis.test_embed_patterns,
            "XTestEmbedPatterns": analysis.xtest_embed_patterns,
        }
        patterns_json_digest = await Get(
            Digest,
            CreateDigest([
                FileContent("patterns.json",
                            json.dumps(patterns_json).encode("utf-8"))
            ]),
        )
        input_digest = await Get(
            Digest,
            MergeDigests(
                (sources_digest, patterns_json_digest, embedder.digest)))
        embed_result = await Get(
            FallibleProcessResult,
            Process(
                ("./embedder", "patterns.json"),
                input_digest=input_digest,
                description=f"Create embed mapping for {request.address}",
                level=LogLevel.DEBUG,
            ),
        )
        if embed_result.exit_code != 0:
            return FallibleFirstPartyPkgDigest(
                pkg_digest=None,
                exit_code=embed_result.exit_code,
                stderr=embed_result.stdout.decode("utf-8"),
            )
        metadata = json.loads(embed_result.stdout)
        embed_config = EmbedConfig.from_json_dict(
            metadata.get("EmbedConfig", {}))
        test_embed_config = EmbedConfig.from_json_dict(
            metadata.get("TestEmbedConfig", {}))
        xtest_embed_config = EmbedConfig.from_json_dict(
            metadata.get("XTestEmbedConfig", {}))

    return FallibleFirstPartyPkgDigest(
        FirstPartyPkgDigest(
            sources_digest,
            embed_config=embed_config,
            test_embed_config=test_embed_config,
            xtest_embed_config=xtest_embed_config,
        ))
コード例 #17
0
ファイル: rules.py プロジェクト: codealchemy/pants
async def compile_avro_source(
    request: CompileAvroSourceRequest,
    jdk: InternalJdk,
    avro_tools: AvroSubsystem,
) -> CompiledAvroSource:
    output_dir = "_generated_files"
    toolcp_relpath = "__toolcp"

    lockfile_request = await Get(GenerateJvmLockfileFromTool, AvroToolLockfileSentinel())
    tool_classpath, subsetted_input_digest, empty_output_dir = await MultiGet(
        Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)),
        Get(
            Digest,
            DigestSubset(
                request.digest,
                PathGlobs(
                    [request.path],
                    glob_match_error_behavior=GlobMatchErrorBehavior.error,
                    conjunction=GlobExpansionConjunction.all_match,
                    description_of_origin="the Avro source file name",
                ),
            ),
        ),
        Get(Digest, CreateDigest([Directory(output_dir)])),
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            [
                subsetted_input_digest,
                empty_output_dir,
            ]
        ),
    )

    extra_immutable_input_digests = {
        toolcp_relpath: tool_classpath.digest,
    }

    def make_avro_process(
        args: Iterable[str],
        *,
        overridden_input_digest: Digest | None = None,
        overridden_output_dir: str | None = None,
    ) -> JvmProcess:

        return JvmProcess(
            jdk=jdk,
            argv=(
                "org.apache.avro.tool.Main",
                *args,
            ),
            classpath_entries=tool_classpath.classpath_entries(toolcp_relpath),
            input_digest=(
                overridden_input_digest if overridden_input_digest is not None else input_digest
            ),
            extra_immutable_input_digests=extra_immutable_input_digests,
            extra_nailgun_keys=extra_immutable_input_digests,
            description="Generating Java sources from Avro source.",
            level=LogLevel.DEBUG,
            output_directories=(overridden_output_dir if overridden_output_dir else output_dir,),
        )

    path = PurePath(request.path)
    if path.suffix == ".avsc":
        result = await Get(
            ProcessResult,
            JvmProcess,
            make_avro_process(["compile", "schema", request.path, output_dir]),
        )
    elif path.suffix == ".avpr":
        result = await Get(
            ProcessResult,
            JvmProcess,
            make_avro_process(["compile", "protocol", request.path, output_dir]),
        )
    elif path.suffix == ".avdl":
        idl_output_dir = "__idl"
        avpr_path = os.path.join(idl_output_dir, str(path.with_suffix(".avpr")))
        idl_output_dir_digest = await Get(
            Digest, CreateDigest([Directory(os.path.dirname(avpr_path))])
        )
        idl_input_digest = await Get(Digest, MergeDigests([input_digest, idl_output_dir_digest]))
        idl_result = await Get(
            ProcessResult,
            JvmProcess,
            make_avro_process(
                ["idl", request.path, avpr_path],
                overridden_input_digest=idl_input_digest,
                overridden_output_dir=idl_output_dir,
            ),
        )
        generated_files_dir = await Get(Digest, CreateDigest([Directory(output_dir)]))
        protocol_input_digest = await Get(
            Digest, MergeDigests([idl_result.output_digest, generated_files_dir])
        )
        result = await Get(
            ProcessResult,
            JvmProcess,
            make_avro_process(
                ["compile", "protocol", avpr_path, output_dir],
                overridden_input_digest=protocol_input_digest,
            ),
        )
    else:
        raise AssertionError(
            f"Avro backend does not support files with extension `{path.suffix}`: {path}"
        )

    normalized_digest = await Get(Digest, RemovePrefix(result.output_digest, output_dir))
    return CompiledAvroSource(normalized_digest)
コード例 #18
0
async def run_helm_unittest(
    field_set: HelmUnitTestFieldSet,
    test_subsystem: TestSubsystem,
    unittest_subsystem: HelmUnitTestSubsystem,
) -> TestResult:
    direct_dep_targets, transitive_targets = await MultiGet(
        Get(Targets, DependenciesRequest(field_set.dependencies)),
        Get(
            TransitiveTargets,
            TransitiveTargetsRequest([field_set.address]),
        ),
    )
    chart_targets = [tgt for tgt in direct_dep_targets if HelmChartFieldSet.is_applicable(tgt)]
    if len(chart_targets) == 0:
        raise MissingUnitTestChartDependency(field_set.address)

    chart, source_files = await MultiGet(
        Get(HelmChart, HelmChartRequest, HelmChartRequest.from_target(chart_targets[0])),
        Get(
            StrippedSourceFiles,
            SourceFilesRequest(
                sources_fields=[
                    field_set.source,
                    *(
                        tgt.get(SourcesField)
                        for tgt in transitive_targets.dependencies
                        if not HelmChartFieldSet.is_applicable(tgt)
                    ),
                ],
                for_sources_types=(HelmUnitTestSourceField, ResourceSourceField),
                enable_codegen=True,
            ),
        ),
    )
    prefixed_test_files_digest = await Get(
        Digest, AddPrefix(source_files.snapshot.digest, chart.path)
    )

    reports_dir = "__reports_dir"
    reports_file = os.path.join(reports_dir, f"{field_set.address.path_safe_spec}.xml")

    input_digest = await Get(
        Digest, MergeDigests([chart.snapshot.digest, prefixed_test_files_digest])
    )

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = (
        ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL
    )

    process_result = await Get(
        FallibleProcessResult,
        HelmProcess(
            argv=[
                unittest_subsystem.plugin_name,
                "--helm3",
                "--output-type",
                unittest_subsystem.output_type.value,
                "--output-file",
                reports_file,
                chart.path,
            ],
            description=f"Running Helm unittest on: {field_set.address}",
            input_digest=input_digest,
            cache_scope=cache_scope,
            output_directories=(reports_dir,),
        ),
    )
    xml_results = await Get(Snapshot, RemovePrefix(process_result.output_digest, reports_dir))

    return TestResult.from_fallible_process_result(
        process_result,
        address=field_set.address,
        output_setting=test_subsystem.output,
        xml_results=xml_results,
    )
コード例 #19
0
async def setup_scala_parser_classfiles(jdk: InternalJdk) -> ScalaParserCompiledClassfiles:
    dest_dir = "classfiles"

    parser_source_content = pkgutil.get_data(
        "pants.backend.scala.dependency_inference", "ScalaParser.scala"
    )
    if not parser_source_content:
        raise AssertionError("Unable to find ScalaParser.scala resource.")

    parser_source = FileContent("ScalaParser.scala", parser_source_content)

    tool_classpath, parser_classpath, source_digest = await MultiGet(
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=ArtifactRequirements.from_coordinates(
                    [
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-compiler",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-library",
                            version=PARSER_SCALA_VERSION,
                        ),
                        Coordinate(
                            group="org.scala-lang",
                            artifact="scala-reflect",
                            version=PARSER_SCALA_VERSION,
                        ),
                    ]
                ),
            ),
        ),
        Get(
            ToolClasspath,
            ToolClasspathRequest(
                prefix="__parsercp", artifact_requirements=SCALA_PARSER_ARTIFACT_REQUIREMENTS
            ),
        ),
        Get(Digest, CreateDigest([parser_source, Directory(dest_dir)])),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (
                tool_classpath.digest,
                parser_classpath.digest,
                source_digest,
            )
        ),
    )

    process_result = await Get(
        ProcessResult,
        JvmProcess(
            jdk=jdk,
            classpath_entries=tool_classpath.classpath_entries(),
            argv=[
                "scala.tools.nsc.Main",
                "-bootclasspath",
                ":".join(tool_classpath.classpath_entries()),
                "-classpath",
                ":".join(parser_classpath.classpath_entries()),
                "-d",
                dest_dir,
                parser_source.path,
            ],
            input_digest=merged_digest,
            output_directories=(dest_dir,),
            description="Compile Scala parser for dependency inference with scalac",
            level=LogLevel.DEBUG,
            # NB: We do not use nailgun for this process, since it is launched exactly once.
            use_nailgun=False,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir)
    )
    return ScalaParserCompiledClassfiles(digest=stripped_classfiles_digest)
コード例 #20
0
async def strip_source_roots(source_files: SourceFiles) -> StrippedSourceFiles:
    """Removes source roots from a snapshot.

    E.g. `src/python/pants/util/strutil.py` -> `pants/util/strutil.py`.
    """
    if not source_files.snapshot.files:
        return StrippedSourceFiles(source_files.snapshot)

    if source_files.unrooted_files:
        rooted_files = set(source_files.snapshot.files) - set(
            source_files.unrooted_files)
        rooted_files_snapshot = await Get(
            Snapshot,
            DigestSubset(source_files.snapshot.digest,
                         PathGlobs(rooted_files)))
    else:
        rooted_files_snapshot = source_files.snapshot

    source_roots_result = await Get(
        SourceRootsResult,
        SourceRootsRequest,
        SourceRootsRequest.for_files(rooted_files_snapshot.files),
    )

    file_to_source_root = {
        str(file): root
        for file, root in source_roots_result.path_to_root.items()
    }
    files_grouped_by_source_root = {
        source_root.path: tuple(str(f) for f in files)
        for source_root, files in itertools.groupby(
            file_to_source_root.keys(), key=file_to_source_root.__getitem__)
    }

    if len(files_grouped_by_source_root) == 1:
        source_root = next(iter(files_grouped_by_source_root.keys()))
        if source_root == ".":
            resulting_snapshot = rooted_files_snapshot
        else:
            resulting_snapshot = await Get(
                Snapshot,
                RemovePrefix(rooted_files_snapshot.digest, source_root))
    else:
        digest_subsets = await MultiGet(
            Get(Digest,
                DigestSubset(rooted_files_snapshot.digest, PathGlobs(files)))
            for files in files_grouped_by_source_root.values())
        resulting_digests = await MultiGet(
            Get(Digest, RemovePrefix(digest, source_root))
            for digest, source_root in zip(
                digest_subsets, files_grouped_by_source_root.keys()))
        resulting_snapshot = await Get(Snapshot,
                                       MergeDigests(resulting_digests))

    # Add the unrooted files back in.
    if source_files.unrooted_files:
        unrooted_files_digest = await Get(
            Digest,
            DigestSubset(source_files.snapshot.digest,
                         PathGlobs(source_files.unrooted_files)),
        )
        resulting_snapshot = await Get(
            Snapshot,
            MergeDigests((resulting_snapshot.digest, unrooted_files_digest)))

    return StrippedSourceFiles(resulting_snapshot)
コード例 #21
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest,
    protoc: Protoc,
    grpc_python_plugin: GrpcPythonPlugin,
    python_protobuf_subsystem: PythonProtobufSubsystem,
) -> GeneratedSources:
    download_protoc_request = Get(DownloadedExternalTool, ExternalToolRequest,
                                  protoc.get_request(Platform.current))

    output_dir = "_generated_files"
    create_output_dir_request = Get(Digest,
                                    CreateDigest([Directory(output_dir)]))

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    # TODO(#10917): Use TransitiveTargets instead of TransitiveTargetsLite.
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequestLite([request.protocol_target.address]))

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources, ),
        ),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest([request.protocol_target[ProtobufSources]]))

    (
        downloaded_protoc_binary,
        empty_output_dir,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    # To run the MyPy Protobuf plugin, we first install it with Pex, then extract the wheels and
    # point Protoc to the extracted wheels with its `--plugin` argument.
    extracted_mypy_wheels = None
    if python_protobuf_subsystem.mypy_plugin:
        mypy_pex = await Get(
            Pex,
            PexRequest(
                output_filename="mypy_protobuf.pex",
                internal_only=True,
                requirements=PexRequirements(
                    [python_protobuf_subsystem.mypy_plugin_version]),
                # This is solely to ensure that we use an appropriate interpreter when resolving
                # the distribution. We don't actually run the distribution directly with Python,
                # as we extract out its binary.
                interpreter_constraints=PexInterpreterConstraints(
                    ["CPython>=3.5"]),
            ),
        )
        extracted_mypy_wheels = await Get(ExtractedPexDistributions, Pex,
                                          mypy_pex)

    downloaded_grpc_plugin = (await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        grpc_python_plugin.get_request(Platform.current),
    ) if request.protocol_target.get(ProtobufGrcpToggle).value else None)

    unmerged_digests = [
        all_sources_stripped.snapshot.digest,
        downloaded_protoc_binary.digest,
        empty_output_dir,
    ]
    if extracted_mypy_wheels:
        unmerged_digests.append(extracted_mypy_wheels.digest)
    if downloaded_grpc_plugin:
        unmerged_digests.append(downloaded_grpc_plugin.digest)
    input_digest = await Get(Digest, MergeDigests(unmerged_digests))

    argv = [downloaded_protoc_binary.exe, "--python_out", output_dir]
    if extracted_mypy_wheels:
        mypy_plugin_path = next(
            p for p in extracted_mypy_wheels.wheel_directory_paths
            if p.startswith(".deps/mypy_protobuf-"))
        argv.extend([
            f"--plugin=protoc-gen-mypy={mypy_plugin_path}/bin/protoc-gen-mypy",
            "--mypy_out",
            output_dir,
        ])
    if downloaded_grpc_plugin:
        argv.extend([
            f"--plugin=protoc-gen-grpc={downloaded_grpc_plugin.exe}",
            "--grpc_out", output_dir
        ])
    argv.extend(target_sources_stripped.snapshot.files)

    env = {}
    if extracted_mypy_wheels:
        env["PYTHONPATH"] = ":".join(
            extracted_mypy_wheels.wheel_directory_paths)

    result = await Get(
        ProcessResult,
        Process(
            argv,
            env=env,
            input_digest=input_digest,
            description=
            f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_library's source root.
        source_root_request = SourceRootRequest.for_target(
            request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
コード例 #22
0
ファイル: rules.py プロジェクト: codealchemy/pants
async def generate_go_from_protobuf(
    request: GenerateGoFromProtobufRequest,
    protoc: Protoc,
    go_protoc_plugin: _SetupGoProtocPlugin,
) -> GeneratedSources:
    output_dir = "_generated_files"
    protoc_relpath = "__protoc"
    protoc_go_plugin_relpath = "__protoc_gen_go"

    downloaded_protoc_binary, empty_output_dir, transitive_targets = await MultiGet(
        Get(DownloadedExternalTool, ExternalToolRequest,
            protoc.get_request(Platform.current)),
        Get(Digest, CreateDigest([Directory(output_dir)])),
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.protocol_target.address])),
    )

    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_sources_stripped, target_sources_stripped = await MultiGet(
        Get(
            StrippedSourceFiles,
            SourceFilesRequest(tgt[ProtobufSourceField]
                               for tgt in transitive_targets.closure
                               if tgt.has_field(ProtobufSourceField)),
        ),
        Get(StrippedSourceFiles,
            SourceFilesRequest([request.protocol_target[ProtobufSourceField]
                                ])),
    )

    input_digest = await Get(
        Digest,
        MergeDigests([all_sources_stripped.snapshot.digest, empty_output_dir]))

    maybe_grpc_plugin_args = []
    if request.protocol_target.get(ProtobufGrpcToggleField).value:
        maybe_grpc_plugin_args = [
            f"--go-grpc_out={output_dir}",
            "--go-grpc_opt=paths=source_relative",
        ]

    result = await Get(
        ProcessResult,
        Process(
            argv=[
                os.path.join(protoc_relpath, downloaded_protoc_binary.exe),
                f"--plugin=go={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go')}",
                f"--plugin=go-grpc={os.path.join('.', protoc_go_plugin_relpath, 'protoc-gen-go-grpc')}",
                f"--go_out={output_dir}",
                "--go_opt=paths=source_relative",
                *maybe_grpc_plugin_args,
                *target_sources_stripped.snapshot.files,
            ],
            # Note: Necessary or else --plugin option needs absolute path.
            env={"PATH": protoc_go_plugin_relpath},
            input_digest=input_digest,
            immutable_input_digests={
                protoc_relpath: downloaded_protoc_binary.digest,
                protoc_go_plugin_relpath: go_protoc_plugin.digest,
            },
            description=
            f"Generating Go sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir, ),
        ),
    )

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest,
            SourceRootRequest.for_target(request.protocol_target)),
    )

    source_root_restored = (await Get(
        Snapshot, AddPrefix(normalized_digest, source_root.path))
                            if source_root.path != "." else await Get(
                                Snapshot, Digest, normalized_digest))
    return GeneratedSources(source_root_restored)
コード例 #23
0
async def build_processors(bash: BashBinary, jdk_setup: JdkSetup) -> JavaParserCompiledClassfiles:
    dest_dir = "classfiles"

    materialized_classpath, source_digest = await MultiGet(
        Get(
            MaterializedClasspath,
            MaterializedClasspathRequest(
                prefix="__toolcp",
                artifact_requirements=(java_parser_artifact_requirements(),),
            ),
        ),
        Get(
            Digest,
            CreateDigest(
                [
                    FileContent(
                        path=_LAUNCHER_BASENAME,
                        content=_load_javaparser_launcher_source(),
                    ),
                    Directory(dest_dir),
                ]
            ),
        ),
    )

    merged_digest = await Get(
        Digest,
        MergeDigests(
            (
                materialized_classpath.digest,
                jdk_setup.digest,
                source_digest,
            )
        ),
    )

    # NB: We do not use nailgun for this process, since it is launched exactly once.
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                *jdk_setup.args(bash, [f"{jdk_setup.java_home}/lib/tools.jar"]),
                "com.sun.tools.javac.Main",
                "-cp",
                ":".join(materialized_classpath.classpath_entries()),
                "-d",
                dest_dir,
                _LAUNCHER_BASENAME,
            ],
            input_digest=merged_digest,
            append_only_caches=jdk_setup.append_only_caches,
            env=jdk_setup.env,
            output_directories=(dest_dir,),
            description=f"Compile {_LAUNCHER_BASENAME} import processors with javac",
            level=LogLevel.DEBUG,
        ),
    )
    stripped_classfiles_digest = await Get(
        Digest, RemovePrefix(process_result.output_digest, dest_dir)
    )
    return JavaParserCompiledClassfiles(digest=stripped_classfiles_digest)
コード例 #24
0
ファイル: rules.py プロジェクト: patricklaw/pants
async def mypy_typecheck_partition(
    partition: MyPyPartition,
    config_file: MyPyConfigFile,
    first_party_plugins: MyPyFirstPartyPlugins,
    mypy: MyPy,
    python_setup: PythonSetup,
) -> CheckResult:
    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (partition.interpreter_constraints if (
        mypy.options.is_default("interpreter_constraints")
        and partition.interpreter_constraints.requires_python38_or_newer(
            python_setup.interpreter_universe)) else
                                    mypy.interpreter_constraints)

    closure_sources_get = Get(PythonSourceFiles,
                              PythonSourceFilesRequest(partition.closure))
    roots_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(
            tgt.get(PythonSources) for tgt in partition.root_targets))

    # See `requirements_venv_pex` for how this will get wrapped in a `VenvPex`.
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in partition.root_targets),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    mypy_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=mypy.pex_requirements(
                extra_requirements=first_party_plugins.requirement_strings, ),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    closure_sources, roots_sources, mypy_pex, requirements_pex = await MultiGet(
        closure_sources_get, roots_sources_get, mypy_pex_get,
        requirements_pex_get)

    python_files = determine_python_files(roots_sources.snapshot.files)
    file_list_path = "__files.txt"
    file_list_digest_request = Get(
        Digest,
        CreateDigest(
            [FileContent(file_list_path, "\n".join(python_files).encode())]),
    )

    # This creates a venv with all the 3rd-party requirements used by the code. We tell MyPy to
    # use this venv by setting `--python-executable`. Note that this Python interpreter is
    # different than what we run MyPy with.
    #
    # We could have directly asked the `PexFromTargetsRequest` to return a `VenvPex`, rather than
    # `Pex`, but that would mean missing out on sharing a cache with other goals like `test` and
    # `run`.
    requirements_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="requirements_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    requirements_venv_pex, file_list_digest = await MultiGet(
        requirements_venv_pex_request, file_list_digest_request)

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            first_party_plugins.sources_digest,
            closure_sources.source_files.snapshot.digest,
            requirements_venv_pex.digest,
            config_file.digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(first_party_plugins.source_roots,
                            closure_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
        "MYPYPATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                venv_python=requirements_venv_pex.python.argv0,
                file_list_path=file_list_path,
                python_version=config_file.python_version_to_autoset(
                    partition.interpreter_constraints,
                    python_setup.interpreter_universe),
            ),
            input_digest=merged_input_files,
            extra_env=env,
            output_directories=(REPORT_DIR, ),
            description=f"Run MyPy on {pluralize(len(python_files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return CheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
コード例 #25
0
ファイル: fs_test.py プロジェクト: pyranja/pants
    def test_remove_prefix(self):
        # Set up files:

        relevant_files = (
            "characters/dark_tower/roland",
            "characters/dark_tower/susannah",
        )
        all_files = ((
            "books/dark_tower/gunslinger",
            "characters/altered_carbon/kovacs",
        ) + relevant_files + ("index", ))

        with temporary_dir() as temp_dir:
            safe_file_dump(os.path.join(temp_dir, "index"),
                           "books\ncharacters\n")
            safe_file_dump(
                os.path.join(temp_dir, "characters", "altered_carbon",
                             "kovacs"),
                "Envoy",
                makedirs=True,
            )

            tower_dir = os.path.join(temp_dir, "characters", "dark_tower")
            safe_file_dump(os.path.join(tower_dir, "roland"),
                           "European Burmese",
                           makedirs=True)
            safe_file_dump(os.path.join(tower_dir, "susannah"),
                           "Not sure actually",
                           makedirs=True)

            safe_file_dump(
                os.path.join(temp_dir, "books", "dark_tower", "gunslinger"),
                "1982",
                makedirs=True,
            )

            snapshot, snapshot_with_extra_files = self.scheduler.capture_snapshots(
                (
                    PathGlobsAndRoot(PathGlobs(["characters/dark_tower/*"]),
                                     temp_dir),
                    PathGlobsAndRoot(PathGlobs(["**"]), temp_dir),
                ))
            # Check that we got the full snapshots that we expect
            self.assertEqual(snapshot.files, relevant_files)
            self.assertEqual(snapshot_with_extra_files.files, all_files)

            # Strip empty prefix:
            zero_prefix_stripped_digest = self.request_single_product(
                Digest,
                RemovePrefix(snapshot.digest, ""),
            )
            self.assertEqual(snapshot.digest, zero_prefix_stripped_digest)

            # Strip a non-empty prefix shared by all files:
            stripped_digest = self.request_single_product(
                Digest,
                RemovePrefix(snapshot.digest, "characters/dark_tower"),
            )
            self.assertEqual(
                stripped_digest,
                Digest(
                    fingerprint=
                    "71e788fc25783c424db555477071f5e476d942fc958a5d06ffc1ed223f779a8c",
                    serialized_bytes_length=162,
                ),
            )
            expected_snapshot = assert_single_element(
                self.scheduler.capture_snapshots(
                    (PathGlobsAndRoot(PathGlobs(["*"]), tower_dir), )))
            self.assertEqual(expected_snapshot.files, ("roland", "susannah"))
            self.assertEqual(stripped_digest, expected_snapshot.digest)

            # Try to strip a prefix which isn't shared by all files:
            with self.assertRaisesWithMessageContaining(
                    Exception,
                    "Cannot strip prefix characters/dark_tower from root directory Digest(Fingerprint<28c47f77"
                    "867f0c8d577d2ada2f06b03fc8e5ef2d780e8942713b26c5e3f434b8>, 243) - root directory "
                    "contained non-matching directory named: books and file named: index",
            ):
                self.request_single_product(
                    Digest,
                    RemovePrefix(snapshot_with_extra_files.digest,
                                 "characters/dark_tower"),
                )
コード例 #26
0
ファイル: coursier_fetch.py プロジェクト: patricklaw/pants
async def coursier_resolve_lockfile(
    bash: BashBinary,
    coursier: Coursier,
    artifact_requirements: ArtifactRequirements,
) -> CoursierResolvedLockfile:
    """Run `coursier fetch ...` against a list of Maven coordinates and capture the result.

    This rule does two things in a single Process invocation:

        * Runs `coursier fetch` to let Coursier do the heavy lifting of resolving
          dependencies and downloading resolved artifacts (jars, etc).
        * Copies the resolved artifacts into the Process output directory, capturing
          the artifacts as content-addressed `Digest`s.

    It's important that this happens in the same process, since the process isn't
    guaranteed to run on the same machine as the rule, nor is a subsequent process
    invocation.  This guarantees that whatever Coursier resolved, it was fully
    captured into Pants' content addressed artifact storage.

    Note however that we still get the benefit of Coursier's "global" cache if it
    had already been run on the machine where the `coursier fetch` runs, so rerunning
    `coursier fetch` tends to be fast in practice.

    Finally, this rule bundles up the result into a `CoursierResolvedLockfile`.  This
    data structure encapsulates everything necessary to either materialize the
    resolved dependencies to a classpath for Java invocations, or to write the
    lockfile out to the workspace to hermetically freeze the result of the resolve.
    """

    if len(artifact_requirements) == 0:
        return CoursierResolvedLockfile(entries=())

    coursier_report_file_name = "coursier_report.json"
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                bash.path,
                coursier.wrapper_script,
                coursier.coursier.exe,
                coursier_report_file_name,
                *(req.to_coord_str() for req in artifact_requirements),
            ],
            input_digest=coursier.digest,
            output_directories=("classpath",),
            output_files=(coursier_report_file_name,),
            description=(
                "Running `coursier fetch` against "
                f"{pluralize(len(artifact_requirements), 'requirement')}: "
                f"{', '.join(req.to_coord_str() for req in artifact_requirements)}"
            ),
            level=LogLevel.DEBUG,
        ),
    )
    report_digest = await Get(
        Digest, DigestSubset(process_result.output_digest, PathGlobs([coursier_report_file_name]))
    )
    report_contents = await Get(DigestContents, Digest, report_digest)
    report = json.loads(report_contents[0].content)

    artifact_file_names = tuple(PurePath(dep["file"]).name for dep in report["dependencies"])
    artifact_output_paths = tuple(f"classpath/{file_name}" for file_name in artifact_file_names)
    artifact_digests = await MultiGet(
        Get(Digest, DigestSubset(process_result.output_digest, PathGlobs([output_path])))
        for output_path in artifact_output_paths
    )
    stripped_artifact_digests = await MultiGet(
        Get(Digest, RemovePrefix(artifact_digest, "classpath"))
        for artifact_digest in artifact_digests
    )
    artifact_file_digests = await MultiGet(
        Get(FileDigest, ExtractFileDigest(stripped_artifact_digest, file_name))
        for stripped_artifact_digest, file_name in zip(
            stripped_artifact_digests, artifact_file_names
        )
    )
    return CoursierResolvedLockfile(
        entries=tuple(
            CoursierLockfileEntry(
                coord=Coordinate.from_coord_str(dep["coord"]),
                direct_dependencies=Coordinates(
                    Coordinate.from_coord_str(dd) for dd in dep["directDependencies"]
                ),
                dependencies=Coordinates(Coordinate.from_coord_str(d) for d in dep["dependencies"]),
                file_name=file_name,
                file_digest=artifact_file_digest,
            )
            for dep, file_name, artifact_file_digest in zip(
                report["dependencies"], artifact_file_names, artifact_file_digests
            )
        )
    )
コード例 #27
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest, protoc: Protoc
) -> GeneratedSources:
    download_protoc_request = Get[DownloadedExternalTool](
        ExternalToolRequest, protoc.get_request(Platform.current)
    )

    output_dir = "_generated_files"
    # TODO(#9650): replace this with a proper intrinsic to create empty directories.
    create_output_dir_request = Get[ProcessResult](
        Process(
            ("/bin/mkdir", output_dir),
            description=f"Create the directory {output_dir}",
            output_directories=(output_dir,),
        )
    )

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    transitive_targets = await Get[TransitiveTargets](Addresses([request.protocol_target.address]))
    all_sources_request = Get[SourceFiles](
        AllSourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources,),
            # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
            # for Protobuf imports to be discoverable.
            strip_source_roots=True,
        )
    )
    stripped_target_sources_request = Get[SourceFiles](
        AllSourceFilesRequest([request.protocol_target[ProtobufSources]], strip_source_roots=True)
    )

    (
        downloaded_protoc_binary,
        create_output_dir_result,
        all_sources,
        stripped_target_sources,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_sources_request,
        stripped_target_sources_request,
    )

    input_digest = await Get[Digest](
        MergeDigests(
            (
                all_sources.snapshot.digest,
                downloaded_protoc_binary.digest,
                create_output_dir_result.output_digest,
            )
        )
    )

    result = await Get[ProcessResult](
        Process(
            (
                downloaded_protoc_binary.exe,
                "--python_out",
                output_dir,
                *stripped_target_sources.snapshot.files,
            ),
            input_digest=input_digest,
            description=f"Generating Python sources from {request.protocol_target.address}.",
            output_directories=(output_dir,),
        )
    )
    normalized_snapshot = await Get[Snapshot](RemovePrefix(result.output_digest, output_dir))
    return GeneratedSources(normalized_snapshot)
コード例 #28
0
ファイル: coursier_fetch.py プロジェクト: patricklaw/pants
async def coursier_fetch_one_coord(
    bash: BashBinary,
    coursier: Coursier,
    request: CoursierLockfileEntry,
) -> ResolvedClasspathEntry:
    """Run `coursier fetch --intrasitive` to fetch a single artifact.

    This rule exists to permit efficient subsetting of a "global" classpath
    in the form of a lockfile.  Callers can determine what subset of dependencies
    from the lockfile are needed for a given target, then request those
    lockfile entries individually.

    By fetching only one entry at a time, we maximize our cache efficiency.  If instead
    we fetched the entire subset that the caller wanted, there would be a different cache
    key for every possible subset.

    This rule also guarantees exact reproducibility.  If all caches have been
    removed, `coursier fetch` will re-download the artifact, and this rule will
    confirm that what was downloaded matches exactly (by content digest) what
    was specified in the lockfile (what Coursier originally downloaded).
    """
    coursier_report_file_name = "coursier_report.json"
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                bash.path,
                coursier.wrapper_script,
                coursier.coursier.exe,
                coursier_report_file_name,
                "--intransitive",
                request.coord.to_coord_str(),
            ],
            input_digest=coursier.digest,
            output_directories=("classpath",),
            output_files=(coursier_report_file_name,),
            description="Run coursier resolve",
            level=LogLevel.DEBUG,
        ),
    )
    report_digest = await Get(
        Digest, DigestSubset(process_result.output_digest, PathGlobs([coursier_report_file_name]))
    )
    report_contents = await Get(DigestContents, Digest, report_digest)
    report = json.loads(report_contents[0].content)

    report_deps = report["dependencies"]
    if len(report_deps) == 0:
        raise CoursierError("Coursier fetch report has no dependencies (i.e. nothing was fetched).")
    elif len(report_deps) > 1:
        raise CoursierError(
            "Coursier fetch report has multiple dependencies, but exactly 1 was expected."
        )

    dep = report_deps[0]

    resolved_coord = Coordinate.from_coord_str(dep["coord"])
    if resolved_coord != request.coord:
        raise CoursierError(
            f'Coursier resolved coord "{resolved_coord.to_coord_str()}" does not match requested coord "{request.coord.to_coord_str()}".'
        )

    file_path = PurePath(dep["file"])
    classpath_dest = f"classpath/{file_path.name}"

    resolved_file_digest = await Get(
        Digest, DigestSubset(process_result.output_digest, PathGlobs([classpath_dest]))
    )
    stripped_digest = await Get(Digest, RemovePrefix(resolved_file_digest, "classpath"))
    file_digest = await Get(
        FileDigest,
        ExtractFileDigest(stripped_digest, file_path.name),
    )
    if file_digest != request.file_digest:
        raise CoursierError(
            f"Coursier fetch for '{resolved_coord}' succeeded, but fetched artifact {file_digest} did not match the expected artifact: {request.file_digest}."
        )
    return ResolvedClasspathEntry(
        coord=request.coord, file_name=file_path.name, digest=stripped_digest
    )
コード例 #29
0
ファイル: fs_test.py プロジェクト: hephex/pants
def test_remove_prefix(rule_runner: RuleRunner) -> None:
    relevant_files = (
        "characters/dark_tower/roland",
        "characters/dark_tower/susannah",
    )
    all_files = (
        "books/dark_tower/gunslinger",
        "characters/altered_carbon/kovacs",
        *relevant_files,
        "index",
    )

    with temporary_dir() as temp_dir:
        safe_file_dump(os.path.join(temp_dir, "index"), "books\ncharacters\n")
        safe_file_dump(
            os.path.join(temp_dir, "characters", "altered_carbon", "kovacs"),
            "Envoy",
            makedirs=True,
        )

        tower_dir = os.path.join(temp_dir, "characters", "dark_tower")
        safe_file_dump(os.path.join(tower_dir, "roland"), "European Burmese", makedirs=True)
        safe_file_dump(os.path.join(tower_dir, "susannah"), "Not sure actually", makedirs=True)

        safe_file_dump(
            os.path.join(temp_dir, "books", "dark_tower", "gunslinger"),
            "1982",
            makedirs=True,
        )

        snapshot, snapshot_with_extra_files = rule_runner.scheduler.capture_snapshots(
            [
                PathGlobsAndRoot(PathGlobs(["characters/dark_tower/*"]), temp_dir),
                PathGlobsAndRoot(PathGlobs(["**"]), temp_dir),
            ]
        )

        # Check that we got the full snapshots that we expect
        assert snapshot.files == relevant_files
        assert snapshot_with_extra_files.files == all_files

        # Strip empty prefix:
        zero_prefix_stripped_digest = rule_runner.request(
            Digest, [RemovePrefix(snapshot.digest, "")]
        )
        assert snapshot.digest == zero_prefix_stripped_digest

        # Strip a non-empty prefix shared by all files:
        stripped_digest = rule_runner.request(
            Digest, [RemovePrefix(snapshot.digest, "characters/dark_tower")]
        )
        assert stripped_digest == Digest(
            fingerprint="71e788fc25783c424db555477071f5e476d942fc958a5d06ffc1ed223f779a8c",
            serialized_bytes_length=162,
        )

        expected_snapshot = assert_single_element(
            rule_runner.scheduler.capture_snapshots([PathGlobsAndRoot(PathGlobs(["*"]), tower_dir)])
        )
        assert expected_snapshot.files == ("roland", "susannah")
        assert stripped_digest == expected_snapshot.digest

        # Try to strip a prefix which isn't shared by all files:
        with pytest.raises(Exception) as exc:
            rule_runner.request(
                Digest,
                [RemovePrefix(snapshot_with_extra_files.digest, "characters/dark_tower")],
            )
        assert (
            "Cannot strip prefix characters/dark_tower from root directory (Digest "
            "with hash Fingerprint<28c47f77867f0c8d577d2ada2f06b03fc8e5ef2d780e8942713b26c5e3f434b8>)"
            " - root directory contained non-matching directory named: books and file named: index"
        ) in str(exc.value)
コード例 #30
0
ファイル: rules.py プロジェクト: codealchemy/pants
async def setup_go_protoc_plugin(platform: Platform) -> _SetupGoProtocPlugin:
    go_mod_digest = await Get(
        Digest,
        CreateDigest([
            FileContent("go.mod", GO_PROTOBUF_GO_MOD.encode()),
            FileContent("go.sum", GO_PROTOBUF_GO_SUM.encode()),
        ]),
    )

    download_sources_result = await Get(
        ProcessResult,
        GoSdkProcess(
            ["mod", "download", "all"],
            input_digest=go_mod_digest,
            output_directories=("gopath", ),
            description="Download Go `protoc` plugin sources.",
            allow_downloads=True,
        ),
    )

    go_plugin_build_result, go_grpc_plugin_build_result = await MultiGet(
        Get(
            ProcessResult,
            GoSdkProcess(
                [
                    "install",
                    "google.golang.org/protobuf/cmd/[email protected]"
                ],
                input_digest=download_sources_result.output_digest,
                output_files=["gopath/bin/protoc-gen-go"],
                description="Build Go protobuf plugin for `protoc`.",
                platform=platform,
            ),
        ),
        Get(
            ProcessResult,
            GoSdkProcess(
                [
                    "install",
                    "google.golang.org/grpc/cmd/[email protected]",
                ],
                input_digest=download_sources_result.output_digest,
                output_files=["gopath/bin/protoc-gen-go-grpc"],
                description="Build Go gRPC protobuf plugin for `protoc`.",
                platform=platform,
            ),
        ),
    )
    if go_plugin_build_result.output_digest == EMPTY_DIGEST:
        raise AssertionError(
            f"Failed to build protoc-gen-go:\n"
            f"stdout:\n{go_plugin_build_result.stdout.decode()}\n\n"
            f"stderr:\n{go_plugin_build_result.stderr.decode()}")
    if go_grpc_plugin_build_result.output_digest == EMPTY_DIGEST:
        raise AssertionError(
            f"Failed to build protoc-gen-go-grpc:\n"
            f"stdout:\n{go_grpc_plugin_build_result.stdout.decode()}\n\n"
            f"stderr:\n{go_grpc_plugin_build_result.stderr.decode()}")

    merged_output_digests = await Get(
        Digest,
        MergeDigests([
            go_plugin_build_result.output_digest,
            go_grpc_plugin_build_result.output_digest
        ]),
    )
    plugin_digest = await Get(
        Digest, RemovePrefix(merged_output_digests, "gopath/bin"))
    return _SetupGoProtocPlugin(plugin_digest)