コード例 #1
0
ファイル: first_party_pkg.py プロジェクト: hephex/pants
async def setup_analyzer() -> PackageAnalyzerSetup:
    def get_file(filename: str) -> bytes:
        content = pkgutil.get_data("pants.backend.go.util_rules", filename)
        if not content:
            raise AssertionError(f"Unable to find resource for `{filename}`.")
        return content

    analyer_sources_content = [
        FileContent(filename, get_file(filename))
        for filename in ("analyze_package.go", "read.go")
    ]

    source_digest, import_config = await MultiGet(
        Get(Digest, CreateDigest(analyer_sources_content)),
        Get(ImportConfig, ImportConfigRequest,
            ImportConfigRequest.stdlib_only()),
    )

    built_analyzer_pkg = await Get(
        BuiltGoPackage,
        BuildGoPackageRequest(
            import_path="main",
            subpath="",
            digest=source_digest,
            go_file_names=tuple(fc.path for fc in analyer_sources_content),
            s_file_names=(),
            direct_dependencies=(),
            minimum_go_version=None,
        ),
    )
    main_pkg_a_file_path = built_analyzer_pkg.import_paths_to_pkg_a_files[
        "main"]
    input_digest = await Get(
        Digest, MergeDigests([built_analyzer_pkg.digest,
                              import_config.digest]))

    analyzer = await Get(
        LinkedGoBinary,
        LinkGoBinaryRequest(
            input_digest=input_digest,
            archives=(main_pkg_a_file_path, ),
            import_config_path=import_config.CONFIG_PATH,
            output_filename=PackageAnalyzerSetup.PATH,
            description="Link Go package analyzer",
        ),
    )

    return PackageAnalyzerSetup(analyzer.digest)
コード例 #2
0
ファイル: archive.py プロジェクト: patricklaw/pants
async def maybe_extract_archive(digest: Digest) -> ExtractedArchive:
    """If digest contains a single archive file, extract it, otherwise return the input digest."""
    extract_archive_dir = "__extract_archive_dir"
    snapshot, output_dir_digest = await MultiGet(
        Get(Snapshot, Digest, digest),
        Get(Digest, CreateDigest([Directory(extract_archive_dir)])),
    )
    if len(snapshot.files) != 1:
        return ExtractedArchive(digest)

    fp = snapshot.files[0]
    is_zip = fp.endswith(".zip")
    is_tar = fp.endswith(
        (".tar", ".tar.gz", ".tgz", ".tar.bz2", ".tbz2", ".tar.xz", ".txz"))
    if not is_zip and not is_tar:
        return ExtractedArchive(digest)

    merge_digest_get = Get(Digest, MergeDigests((digest, output_dir_digest)))
    archive_path = f"../{fp}"
    if is_zip:
        input_digest, unzip_binary = await MultiGet(
            merge_digest_get,
            Get(UnzipBinary, _UnzipBinaryRequest()),
        )
        argv = unzip_binary.extract_archive_argv(archive_path)
        env = {}
    else:
        input_digest, tar_binary = await MultiGet(
            merge_digest_get,
            Get(TarBinary, _TarBinaryRequest()),
        )
        argv = tar_binary.extract_archive_argv(archive_path)
        # `tar` expects to find a couple binaries like `gzip` and `xz` by looking on the PATH.
        env = {"PATH": os.pathsep.join(SEARCH_PATHS)}

    result = await Get(
        ProcessResult,
        Process(
            argv=argv,
            env=env,
            input_digest=input_digest,
            description=f"Extract {fp}",
            level=LogLevel.DEBUG,
            output_directories=(".", ),
            working_directory=extract_archive_dir,
        ),
    )
    return ExtractedArchive(result.output_digest)
コード例 #3
0
async def format_build_file_with_yapf(request: FormatWithYapfRequest,
                                      yapf: Yapf) -> RewrittenBuildFile:
    yapf_ics = await _find_python_interpreter_constraints_from_lockfile(yapf)
    yapf_pex_get = Get(VenvPex, PexRequest,
                       yapf.to_pex_request(interpreter_constraints=yapf_ics))
    build_file_digest_get = Get(Digest,
                                CreateDigest([request.to_file_content()]))
    config_files_get = Get(
        ConfigFiles, ConfigFilesRequest,
        yapf.config_request(recursive_dirname(request.path)))
    yapf_pex, build_file_digest, config_files = await MultiGet(
        yapf_pex_get, build_file_digest_get, config_files_get)

    input_digest = await Get(
        Digest, MergeDigests(
            (build_file_digest, config_files.snapshot.digest)))

    argv = ["--in-place"]
    if yapf.config:
        argv.extend(["--config", yapf.config])
    argv.extend(yapf.args)
    argv.append(request.path)

    yapf_result = await Get(
        ProcessResult,
        VenvPexProcess(
            yapf_pex,
            argv=argv,
            input_digest=input_digest,
            output_files=(request.path, ),
            description=f"Run Yapf on {request.path}.",
            level=LogLevel.DEBUG,
        ),
    )

    if yapf_result.output_digest == build_file_digest:
        return RewrittenBuildFile(request.path,
                                  request.lines,
                                  change_descriptions=())

    result_contents = await Get(DigestContents, Digest,
                                yapf_result.output_digest)
    assert len(result_contents) == 1
    result_lines = tuple(
        result_contents[0].content.decode("utf-8").splitlines())
    return RewrittenBuildFile(request.path,
                              result_lines,
                              change_descriptions=("Format with Yapf", ))
コード例 #4
0
ファイル: pex_test.py プロジェクト: rhysyngsun/pants
def test_additional_inputs(rule_runner: RuleRunner) -> None:
    # We use pex's --preamble-file option to set a custom preamble from a file.
    # This verifies that the file was indeed provided as additional input to the pex call.
    preamble_file = "custom_preamble.txt"
    preamble = "#!CUSTOM PREAMBLE\n"
    additional_inputs = rule_runner.request(
        Digest, [CreateDigest([FileContent(path=preamble_file, content=preamble.encode())])]
    )
    additional_pex_args = (f"--preamble-file={preamble_file}",)
    pex_output = create_pex_and_get_all_data(
        rule_runner, additional_inputs=additional_inputs, additional_pex_args=additional_pex_args
    )
    with zipfile.ZipFile(pex_output["local_path"], "r") as zipfp:
        with zipfp.open("__main__.py", "r") as main:
            main_content = main.read().decode()
    assert main_content[: len(preamble)] == preamble
コード例 #5
0
def test_digest_entries_handles_empty_directory(
        rule_runner: RuleRunner) -> None:
    digest = rule_runner.request(Digest, [
        CreateDigest([Directory("a/b"),
                      FileContent("a/foo.txt", b"four\n")])
    ])
    entries = rule_runner.request(DigestEntries, [digest])
    assert entries == DigestEntries([
        Directory("a/b"),
        FileEntry(
            "a/foo.txt",
            FileDigest(
                "ab929fcd5594037960792ea0b98caf5fdaf6b60645e4ef248c28db74260f393e",
                5),
        ),
    ])
コード例 #6
0
    def test_not_executable(self):
        file_name = "echo.sh"
        file_contents = b'#!/bin/bash -eu\necho "Hello"\n'

        digest = self.request(Digest, [
            CreateDigest([FileContent(path=file_name, content=file_contents)])
        ])
        req = Process(
            argv=("./echo.sh", ),
            input_digest=digest,
            description="cat the contents of this file",
        )

        with pytest.raises(ExecutionError) as exc:
            self.request(ProcessResult, [req])
        assert "Permission" in str(exc.value)
コード例 #7
0
ファイル: graph_test.py プロジェクト: Spacerat/pants
async def generate_smalltalk_from_avro(
    request: GenerateSmalltalkFromAvroRequest,
) -> GeneratedSources:
    protocol_files = request.protocol_sources.files

    # Many codegen implementations will need to look up a protocol target's dependencies in their
    # rule. We add this here to ensure that this does not result in rule graph issues.
    _ = await Get(TransitiveTargets, Addresses([request.protocol_target.address]))

    def generate_fortran(fp: str) -> FileContent:
        parent = str(PurePath(fp).parent).replace("src/avro", "src/smalltalk")
        file_name = f"{PurePath(fp).stem}.st"
        return FileContent(str(PurePath(parent, file_name)), b"Generated")

    result = await Get(Snapshot, CreateDigest([generate_fortran(fp) for fp in protocol_files]))
    return GeneratedSources(result)
コード例 #8
0
ファイル: process_test.py プロジェクト: jperkelens/pants
    def test_not_executable(self):
        file_name = "echo.sh"
        file_contents = b'#!/bin/bash -eu\necho "Hello"\n'

        digest = self.request_single_product(
            Digest,
            CreateDigest([FileContent(path=file_name, content=file_contents)]))
        req = Process(
            argv=("./echo.sh", ),
            input_digest=digest,
            description="cat the contents of this file",
        )

        with self.assertRaisesWithMessageContaining(ExecutionError,
                                                    "Permission"):
            self.request_single_product(ProcessResult, req)
コード例 #9
0
async def generate_import_config(
        request: ImportConfigRequest,
        stdlib_imports: GoStdLibImports) -> ImportConfig:
    lines = [
        "# import config",
        *(f"packagefile {import_path}={pkg_a_path}" for import_path, pkg_a_path
          in request.import_paths_to_pkg_a_files.items()),
    ]
    if request.include_stdlib:
        lines.extend(
            f"packagefile {import_path}={static_file_path}"
            for import_path, static_file_path in stdlib_imports.items())
    content = "\n".join(lines).encode("utf-8")
    result = await Get(
        Digest, CreateDigest([FileContent(ImportConfig.CONFIG_PATH, content)]))
    return ImportConfig(result)
コード例 #10
0
def generate_original_digest(rule_runner: RuleRunner) -> Digest:
    files = [
        FileContent(path, b"dummy content") for path in [
            "a.txt",
            "b.txt",
            "c.txt",
            "subdir/a.txt",
            "subdir/b.txt",
            "subdir2/a.txt",
            "subdir2/nested_subdir/x.txt",
        ]
    ]
    return rule_runner.request(
        Digest,
        [CreateDigest(files)],
    )
コード例 #11
0
ファイル: fs_test.py プロジェクト: jperkelens/pants
 def generate_original_digest(self) -> Digest:
     content = b"dummy content"
     return self.request_single_product(
         Digest,
         CreateDigest(
             (
                 FileContent(path="a.txt", content=content),
                 FileContent(path="b.txt", content=content),
                 FileContent(path="c.txt", content=content),
                 FileContent(path="subdir/a.txt", content=content),
                 FileContent(path="subdir/b.txt", content=content),
                 FileContent(path="subdir2/a.txt", content=content),
                 FileContent(path="subdir2/nested_subdir/x.txt", content=content),
             )
         ),
     )
コード例 #12
0
async def format_build_file_with_black(request: FormatWithBlackRequest,
                                       black: Black) -> RewrittenBuildFile:
    black_pex_get = Get(VenvPex, PexRequest, black.to_pex_request())
    build_file_digest_get = Get(Digest,
                                CreateDigest([request.to_file_content()]))
    config_files_get = Get(
        ConfigFiles, ConfigFilesRequest,
        black.config_request(recursive_dirname(request.path)))
    black_pex, build_file_digest, config_files = await MultiGet(
        black_pex_get, build_file_digest_get, config_files_get)

    input_digest = await Get(
        Digest, MergeDigests(
            (build_file_digest, config_files.snapshot.digest)))

    argv = []
    if black.config:
        argv.extend(["--config", black.config])
    argv.extend(black.args)
    argv.append(request.path)

    black_result = await Get(
        ProcessResult,
        VenvPexProcess(
            black_pex,
            argv=argv,
            input_digest=input_digest,
            output_files=(request.path, ),
            description=f"Run Black on {request.path}.",
            level=LogLevel.DEBUG,
        ),
    )

    if black_result.output_digest == build_file_digest:
        return RewrittenBuildFile(request.path,
                                  request.lines,
                                  change_descriptions=())

    result_contents = await Get(DigestContents, Digest,
                                black_result.output_digest)
    assert len(result_contents) == 1
    result_lines = tuple(
        result_contents[0].content.decode("utf-8").splitlines())
    return RewrittenBuildFile(request.path,
                              result_lines,
                              change_descriptions=("Format with Black", ))
コード例 #13
0
async def internal_render_test_lockfile_fixtures(
    rendered_fixtures: RenderedJVMLockfileFixtures,
    workspace: Workspace,
    console: Console,
) -> InternalGenerateTestLockfileFixturesGoal:
    if not rendered_fixtures:
        console.write_stdout("No test lockfile fixtures found.\n")
        return InternalGenerateTestLockfileFixturesGoal(exit_code=0)

    digest_contents = [
        FileContent(rendered_fixture.path, rendered_fixture.content)
        for rendered_fixture in rendered_fixtures
    ]
    snapshot = await Get(Snapshot, CreateDigest(digest_contents))
    console.write_stdout(f"Writing test lockfile fixtures: {snapshot.files}\n")
    workspace.write_digest(snapshot.digest)
    return InternalGenerateTestLockfileFixturesGoal(exit_code=0)
コード例 #14
0
def test_create_files(rule_runner: RuleRunner) -> None:
    files = [
        FileContent("a.txt", b"hello"),
        FileContent("somedir/b.txt", b"goodbye")
    ]
    digest = rule_runner.request(
        Digest,
        [CreateDigest(files)],
    )

    process = Process(
        argv=("/bin/cat", "a.txt", "somedir/b.txt"),
        input_digest=digest,
        description="",
    )
    result = rule_runner.request(ProcessResult, [process])
    assert result.stdout == b"hellogoodbye"
コード例 #15
0
ファイル: process_test.py プロジェクト: jperkelens/pants
    def test_multiple_file_creation(self):
        digest = self.request_single_product(
            Digest,
            CreateDigest((
                FileContent(path="a.txt", content=b"hello"),
                FileContent(path="b.txt", content=b"goodbye"),
            )),
        )

        req = Process(
            argv=("/bin/cat", "a.txt", "b.txt"),
            input_digest=digest,
            description="cat the contents of this file",
        )

        result = self.request_single_product(ProcessResult, req)
        self.assertEqual(result.stdout, b"hellogoodbye")
コード例 #16
0
def test_pex_environment(rule_runner: RuleRunner) -> None:
    sources = rule_runner.request(
        Digest,
        [
            CreateDigest(
                (
                    FileContent(
                        path="main.py",
                        content=textwrap.dedent(
                            """
                        from os import environ
                        print(f"LANG={environ.get('LANG')}")
                        print(f"ftp_proxy={environ.get('ftp_proxy')}")
                        """
                        ).encode(),
                    ),
                )
            ),
        ],
    )
    pex_output = create_pex_and_get_all_data(
        rule_runner,
        entry_point="main",
        sources=sources,
        additional_pants_args=(
            "--subprocess-environment-env-vars=LANG",  # Value should come from environment.
            "--subprocess-environment-env-vars=ftp_proxy=dummyproxy",
        ),
        env={"LANG": "es_PY.UTF-8"},
    )

    process = rule_runner.request(
        Process,
        [
            PexProcess(
                pex_output["pex"],
                argv=["python", "test.pex"],
                input_digest=pex_output["pex"].digest,
                description="Run the pex and check its reported environment",
            ),
        ],
    )

    result = rule_runner.request(ProcessResult, [process])
    assert b"LANG=es_PY.UTF-8" in result.stdout
    assert b"ftp_proxy=dummyproxy" in result.stdout
コード例 #17
0
async def handle_bsp_scalac_options_request(
    request: HandleScalacOptionsRequest,
    build_root: BuildRoot,
    workspace: Workspace,
) -> HandleScalacOptionsResult:
    bsp_target = await Get(BSPBuildTargetInternal, BuildTargetIdentifier,
                           request.bsp_target_id)
    targets = await Get(
        Targets,
        AddressSpecs,
        bsp_target.specs.address_specs,
    )
    coarsened_targets = await Get(CoarsenedTargets,
                                  Addresses(tgt.address for tgt in targets))
    resolve = await Get(CoursierResolveKey, CoarsenedTargets,
                        coarsened_targets)
    lockfile = await Get(CoursierResolvedLockfile, CoursierResolveKey, resolve)

    resolve_digest = await Get(
        Digest,
        CreateDigest([
            FileEntry(entry.file_name, entry.file_digest)
            for entry in lockfile.entries
        ]),
    )

    resolve_digest = await Get(
        Digest, AddPrefix(resolve_digest, f"jvm/resolves/{resolve.name}/lib"))

    workspace.write_digest(resolve_digest, path_prefix=".pants.d/bsp")

    classpath = [
        build_root.pathlib_path.joinpath(
            f".pants.d/bsp/jvm/resolves/{resolve.name}/lib/{entry.file_name}").
        as_uri() for entry in lockfile.entries
    ]

    return HandleScalacOptionsResult(
        ScalacOptionsItem(
            target=request.bsp_target_id,
            options=(),
            classpath=tuple(classpath),
            class_directory=build_root.pathlib_path.joinpath(
                f".pants.d/bsp/jvm/resolves/{resolve.name}/classes").as_uri(),
        ))
コード例 #18
0
ファイル: rules.py プロジェクト: patricklaw/pants
async def flake8_lint_partition(partition: Flake8Partition, flake8: Flake8) -> LintResult:
    flake8_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="flake8.pex",
            internal_only=True,
            requirements=flake8.pex_requirements(),
            interpreter_constraints=partition.interpreter_constraints,
            main=flake8.main,
        ),
    )
    config_files_get = Get(ConfigFiles, ConfigFilesRequest, flake8.config_request)
    source_files_get = Get(
        SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)
    )
    # Ensure that the empty report dir exists.
    report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)]))
    flake8_pex, config_files, report_directory, source_files = await MultiGet(
        flake8_pex_get, config_files_get, report_directory_digest_get, source_files_get
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            (source_files.snapshot.digest, config_files.snapshot.digest, report_directory)
        ),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            flake8_pex,
            argv=generate_argv(source_files, flake8),
            input_digest=input_digest,
            output_directories=(REPORT_DIR,),
            description=f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )
コード例 #19
0
async def create_coverage_config(coverage: CoverageSubsystem) -> CoverageConfig:
    coverage_config = configparser.ConfigParser()
    if coverage.config:
        config_contents = await Get(
            DigestContents,
            PathGlobs(
                globs=(coverage.config,),
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
                description_of_origin=f"the option `--{coverage.options_scope}-config`",
            ),
        )
        coverage_config.read_string(config_contents[0].content.decode())
    _validate_and_update_config(coverage_config, coverage.config)
    config_stream = StringIO()
    coverage_config.write(config_stream)
    config_content = config_stream.getvalue()
    digest = await Get(Digest, CreateDigest([FileContent(".coveragerc", config_content.encode())]))
    return CoverageConfig(digest)
コード例 #20
0
async def create_or_update_coverage_config(
        coverage: CoverageSubsystem) -> CoverageConfig:
    config_files = await Get(ConfigFiles, ConfigFilesRequest,
                             coverage.config_request)
    if config_files.snapshot.files:
        digest_contents = await Get(DigestContents, Digest,
                                    config_files.snapshot.digest)
        file_content = _update_config(digest_contents[0])
    else:
        cp = configparser.ConfigParser()
        cp.add_section("run")
        cp.set("run", "relative_files", "True")
        cp.set("run", "omit", "\npytest.pex/*")
        stream = StringIO()
        cp.write(stream)
        file_content = FileContent(".coveragerc", stream.getvalue().encode())
    digest = await Get(Digest, CreateDigest([file_content]))
    return CoverageConfig(digest, file_content.path)
コード例 #21
0
async def setup_parser(hcl2_parser: TerraformHcl2Parser) -> ParserSetup:
    parser_script_content = pkgutil.get_data("pants.backend.terraform", "hcl2_parser.py")
    if not parser_script_content:
        raise ValueError("Unable to find source to hcl2_parser.py wrapper script.")

    parser_content = FileContent(
        path="__pants_tf_parser.py", content=parser_script_content, is_executable=True
    )
    parser_digest = await Get(Digest, CreateDigest([parser_content]))

    parser_pex = await Get(
        VenvPex,
        PexRequest,
        hcl2_parser.to_pex_request(
            main=EntryPoint(PurePath(parser_content.path).stem), sources=parser_digest
        ),
    )
    return ParserSetup(parser_pex)
コード例 #22
0
async def edit_build_files(
        req: EditBuildFilesRequest,
        tailor_subsystem: TailorSubsystem) -> EditedBuildFiles:
    ptgts_by_build_file = group_by_build_file(tailor_subsystem.build_file_name,
                                              req.putative_targets)
    # There may be an existing *directory* whose name collides with that of a BUILD file
    # we want to create. This is more likely on a system with case-insensitive paths,
    # such as MacOS. We detect such cases and use an alt BUILD file name to fix.
    existing_paths = await Get(Paths, PathGlobs(ptgts_by_build_file.keys()))
    existing_dirs = set(existing_paths.dirs)
    # Technically there could be a dir named "BUILD.pants" as well, but that's pretty unlikely.
    ptgts_by_build_file = {(f"{bf}.pants" if bf in existing_dirs else bf): pts
                           for bf, pts in ptgts_by_build_file.items()}
    existing_build_files_contents = await Get(
        DigestContents, PathGlobs(ptgts_by_build_file.keys()))
    existing_build_files_contents_by_path = {
        ebfc.path: ebfc.content
        for ebfc in existing_build_files_contents
    }

    def make_content(bf_path: str,
                     pts: Iterable[PutativeTarget]) -> FileContent:
        existing_content_bytes = existing_build_files_contents_by_path.get(
            bf_path)
        existing_content = (tailor_subsystem.build_file_header
                            if existing_content_bytes is None else
                            existing_content_bytes.decode())
        new_content_bytes = make_content_str(
            existing_content, tailor_subsystem.build_file_indent,
            pts).encode()
        return FileContent(bf_path, new_content_bytes)

    new_digest = await Get(
        Digest,
        CreateDigest([
            make_content(path, ptgts)
            for path, ptgts in ptgts_by_build_file.items()
        ]),
    )

    updated = set(existing_build_files_contents_by_path.keys())
    created = set(ptgts_by_build_file.keys()) - updated
    return EditedBuildFiles(new_digest, tuple(sorted(created)),
                            tuple(sorted(updated)))
コード例 #23
0
def _run_setup_py(
    rule_runner: RuleRunner,
    plugin: str,
    interpreter_constraints: PexInterpreterConstraints,
    version: Optional[str],
    setup_py_args: Iterable[str],
    install_dir: str,
) -> None:
    pex_obj = _create_pex(rule_runner, interpreter_constraints)
    setup_py_file = FileContent(
        "setup.py",
        dedent(f"""
                from setuptools import setup

                setup(name="{plugin}", version="{version or DEFAULT_VERSION}")
            """).encode(),
    )
    source_digest = rule_runner.request(
        Digest,
        [CreateDigest([setup_py_file])],
    )
    merged_digest = rule_runner.request(
        Digest, [MergeDigests([pex_obj.digest, source_digest])])

    # This should run the Pex using the same interpreter used to create it. We must set the `PATH` so that the shebang
    # works.
    process = Process(
        argv=("./setup-py-runner.pex", "setup.py", *setup_py_args),
        env={
            k: os.environ[k]
            for k in ["PATH", "HOME", "PYENV_ROOT"] if k in os.environ
        },
        input_digest=merged_digest,
        description="Run setup.py",
        output_directories=("dist/", ),
    )
    result = rule_runner.request(ProcessResult, [process])
    result_snapshot = rule_runner.request(Snapshot, [result.output_digest])
    rule_runner.scheduler.write_digest(result.output_digest,
                                       path_prefix="output")
    safe_mkdir(install_dir)
    for path in result_snapshot.files:
        shutil.copy(PurePath(rule_runner.build_root, "output", path),
                    install_dir)
コード例 #24
0
ファイル: pex_test.py プロジェクト: adam-singer/pants
def test_pex_environment(rule_runner: RuleRunner,
                         pex_type: type[Pex | VenvPex]) -> None:
    sources = rule_runner.request(
        Digest,
        [
            CreateDigest((FileContent(
                path="main.py",
                content=textwrap.dedent("""
                            from os import environ
                            print(f"LANG={environ.get('LANG')}")
                            print(f"ftp_proxy={environ.get('ftp_proxy')}")
                            """).encode(),
            ), )),
        ],
    )
    pex_output = create_pex_and_get_all_data(
        rule_runner,
        pex_type=pex_type,
        main=EntryPoint("main"),
        sources=sources,
        additional_pants_args=(
            "--subprocess-environment-env-vars=LANG",  # Value should come from environment.
            "--subprocess-environment-env-vars=ftp_proxy=dummyproxy",
        ),
        interpreter_constraints=PexInterpreterConstraints(["CPython>=3.6"]),
        env={"LANG": "es_PY.UTF-8"},
    )

    pex = pex_output["pex"]
    pex_process_type = PexProcess if isinstance(pex, Pex) else VenvPexProcess
    process = rule_runner.request(
        Process,
        [
            pex_process_type(
                pex,
                description="Run the pex and check its reported environment",
            ),
        ],
    )

    result = rule_runner.request(ProcessResult, [process])
    assert b"LANG=es_PY.UTF-8" in result.stdout
    assert b"ftp_proxy=dummyproxy" in result.stdout
コード例 #25
0
ファイル: coverage_py.py プロジェクト: leigh-johnson/pants
async def create_coverage_config(
        coverage: CoverageSubsystem) -> CoverageConfig:
    coverage_config = configparser.ConfigParser()

    config_files = await Get(ConfigFiles, ConfigFilesRequest,
                             coverage.config_request)
    if config_files.snapshot.files:
        config_contents = await Get(DigestContents, Digest,
                                    config_files.snapshot.digest)
        coverage_config.read_string(config_contents[0].content.decode())

    _validate_and_update_config(coverage_config, coverage.config)
    config_stream = StringIO()
    coverage_config.write(config_stream)
    config_content = config_stream.getvalue()
    digest = await Get(
        Digest,
        CreateDigest([FileContent(".coveragerc", config_content.encode())]))
    return CoverageConfig(digest)
コード例 #26
0
ファイル: process.py プロジェクト: matze999/pants
async def find_binary(request: BinaryPathRequest) -> BinaryPaths:
    # TODO(John Sirois): Replace this script with a statically linked native binary so we don't
    #  depend on either /bin/bash being available on the Process host.
    # TODO(#10507): Running the script directly from a shebang sometimes results in a "Text file
    #  busy" error.
    script_path = "./script.sh"
    script_content = dedent("""
        #!/usr/bin/env bash

        set -euo pipefail

        if command -v which > /dev/null; then
            command which -a $1
        else
            command -v $1
        fi
        """)
    script_digest = await Get(
        Digest,
        CreateDigest([
            FileContent(script_path,
                        script_content.encode(),
                        is_executable=True)
        ]),
    )

    paths = []
    search_path = create_path_env_var(request.search_path)
    result = await Get(
        FallibleProcessResult,
        Process(
            description=
            f"Searching for `{request.binary_name}` on PATH={search_path}",
            level=LogLevel.DEBUG,
            input_digest=script_digest,
            argv=[script_path, request.binary_name],
            env={"PATH": search_path},
        ),
    )
    if result.exit_code == 0:
        paths.extend(result.stdout.decode().splitlines())

    return BinaryPaths(binary_name=request.binary_name, paths=paths)
コード例 #27
0
 def run_process(*, is_executable: bool) -> ProcessResult:
     digest = rule_runner.request(
         Digest,
         [
             CreateDigest([
                 FileContent(
                     "echo.sh",
                     b'#!/bin/bash -eu\necho "Hello"\n',
                     is_executable=is_executable,
                 )
             ])
         ],
     )
     process = Process(
         argv=("./echo.sh", ),
         input_digest=digest,
         description="cat the contents of this file",
     )
     return rule_runner.request(ProcessResult, [process])
コード例 #28
0
    def test_nonexistent_filename_globs(self) -> None:
        # We expect to ignore, rather than error, on files that don't exist in the original snapshot.
        subset_snapshot = self.request_product(
            Snapshot,
            [
                DigestSubset(
                    self.generate_original_digest(),
                    PathGlobs(("some_file_not_in_snapshot.txt", "a.txt")),
                )
            ],
        )
        assert set(subset_snapshot.files) == {"a.txt"}

        content = b"dummy content"
        subset_input = CreateDigest((FileContent(path="a.txt",
                                                 content=content), ))

        subset_digest = self.request_product(Digest, [subset_input])
        assert subset_snapshot.digest == subset_digest
コード例 #29
0
ファイル: export_test.py プロジェクト: hephex/pants
def run_export_rule(rule_runner: RuleRunner,
                    targets: List[Target]) -> Tuple[int, str]:
    union_membership = UnionMembership(
        {ExportableDataRequest: [MockExportableDataRequest]})
    with open(os.path.join(rule_runner.build_root, "somefile"), "wb") as fp:
        fp.write(b"SOMEFILE")
    with mock_console(create_options_bootstrapper()) as (console,
                                                         stdio_reader):
        digest = rule_runner.request(
            Digest, [CreateDigest([FileContent("foo/bar", b"BAR")])])
        result: Export = run_rule_with_mocks(
            export,
            rule_args=[
                console,
                Targets(targets),
                create_goal_subsystem(ExportSubsystem),
                Workspace(rule_runner.scheduler, _enforce_effects=False),
                union_membership,
                BuildRoot(),
                DistDir(relpath=Path("dist")),
            ],
            mock_gets=[
                MockGet(
                    output_type=ExportableData,
                    input_type=ExportableDataRequest,
                    mock=lambda edr: mock_export(edr, digest, (Symlink(
                        "somefile", "link_to_somefile"), )),
                ),
                MockGet(
                    output_type=Digest,
                    input_type=MergeDigests,
                    mock=lambda md: rule_runner.request(Digest, [md]),
                ),
                MockGet(
                    output_type=Digest,
                    input_type=AddPrefix,
                    mock=lambda ap: rule_runner.request(Digest, [ap]),
                ),
            ],
            union_membership=union_membership,
        )
        return result.exit_code, stdio_reader.get_stdout()
コード例 #30
0
async def generate_jvm_lockfile(
    request: GenerateJvmLockfile, ) -> GenerateLockfileResult:
    resolved_lockfile = await Get(CoursierResolvedLockfile,
                                  ArtifactRequirements, request.artifacts)

    resolved_lockfile_contents = resolved_lockfile.to_serialized()
    metadata = JVMLockfileMetadata.new(request.artifacts)
    resolved_lockfile_contents = metadata.add_header_to_lockfile(
        resolved_lockfile_contents,
        regenerate_command=f"{bin_name()} generate-lockfiles",
        delimeter="#",
    )

    lockfile_digest = await Get(
        Digest,
        CreateDigest(
            [FileContent(request.lockfile_dest, resolved_lockfile_contents)]),
    )
    return GenerateLockfileResult(lockfile_digest, request.resolve_name,
                                  request.lockfile_dest)