Exemple #1
0
def test_cache_scope_per_restart() -> None:
    success_argv = ("/bin/bash", "-c", "echo $RANDOM")
    failure_argv = ("/bin/bash", "-c", "echo $RANDOM; exit 1")

    always_cache_success = Process(
        success_argv,
        cache_scope=ProcessCacheScope.PER_RESTART_ALWAYS,
        description="foo")
    always_cache_failure = Process(
        failure_argv,
        cache_scope=ProcessCacheScope.PER_RESTART_ALWAYS,
        description="foo")
    success_cache_success = Process(
        success_argv,
        cache_scope=ProcessCacheScope.PER_RESTART_SUCCESSFUL,
        description="foo")
    success_cache_failure = Process(
        failure_argv,
        cache_scope=ProcessCacheScope.PER_RESTART_SUCCESSFUL,
        description="foo")

    runner_one = new_rule_runner()

    def run1(process: Process) -> FallibleProcessResult:
        return runner_one.request(FallibleProcessResult, [process])

    always_cache_success_res1 = run1(always_cache_success)
    always_cache_failure_res1 = run1(always_cache_failure)
    success_cache_success_res1 = run1(success_cache_success)
    success_cache_failure_res1 = run1(success_cache_failure)

    runner_one.new_session("new session")
    always_cache_success_res2 = run1(always_cache_success)
    always_cache_failure_res2 = run1(always_cache_failure)
    success_cache_success_res2 = run1(success_cache_success)
    success_cache_failure_res2 = run1(success_cache_failure)

    # Even with a new session, most results should be memoized.
    assert always_cache_success_res1 is always_cache_success_res2
    assert always_cache_failure_res1 is always_cache_failure_res2
    assert success_cache_success_res1 is success_cache_success_res2
    assert success_cache_failure_res1 != success_cache_failure_res2

    # But a new scheduler removes all memoization. We do not cache to disk.
    runner_two = new_rule_runner()

    def run2(process: Process) -> FallibleProcessResult:
        return runner_two.request(FallibleProcessResult, [process])

    assert run2(always_cache_success) != always_cache_success_res1
    assert run2(always_cache_failure) != always_cache_failure_res1
    assert run2(success_cache_success) != success_cache_success_res1
    assert run2(success_cache_failure) != success_cache_failure_res1
Exemple #2
0
async def jvm_process(bash: BashBinary, request: JvmProcess) -> Process:

    jdk = request.jdk

    immutable_input_digests = {
        **jdk.immutable_input_digests,
        **request.extra_immutable_input_digests,
    }
    env = {
        "PANTS_INTERNAL_ABSOLUTE_PREFIX": "",
        **jdk.env,
        **request.extra_env,
    }

    use_nailgun = []
    if request.use_nailgun:
        use_nailgun = [
            *jdk.immutable_input_digests, *request.extra_nailgun_keys
        ]

    return Process(
        [*jdk.args(bash, request.classpath_entries), *request.argv],
        input_digest=request.input_digest,
        immutable_input_digests=immutable_input_digests,
        use_nailgun=use_nailgun,
        description=request.description,
        level=request.level,
        output_directories=request.output_directories,
        env=env,
        platform=request.platform,
        timeout_seconds=request.timeout_seconds,
        append_only_caches=jdk.append_only_caches,
        output_files=request.output_files,
        cache_scope=request.cache_scope or ProcessCacheScope.SUCCESSFUL,
    )
Exemple #3
0
def test_pex_execution(rule_runner: RuleRunner) -> None:
    sources = rule_runner.request(
        Digest,
        [
            CreateDigest((
                FileContent("main.py", b'print("from main")'),
                FileContent("subdir/sub.py", b'print("from sub")'),
            )),
        ],
    )
    pex_output = create_pex_and_get_all_data(rule_runner,
                                             main=EntryPoint("main"),
                                             sources=sources)

    pex_files = pex_output["files"]
    assert "pex" not in pex_files
    assert "main.py" in pex_files
    assert "subdir/sub.py" in pex_files

    # This should run the Pex using the same interpreter used to create it. We must set the `PATH` so that the shebang
    # works.
    process = Process(
        argv=("./test.pex", ),
        env={"PATH": os.getenv("PATH", "")},
        input_digest=pex_output["pex"].digest,
        description="Run the pex and make sure it works",
    )
    result = rule_runner.request(ProcessResult, [process])
    assert result.stdout == b"from main\n"
Exemple #4
0
    def test_fallible_failing_command_returns_exited_result(self):
        request = Process(argv=("/bin/bash", "-c", "exit 1"),
                          description="one-cat")

        result = self.request(FallibleProcessResult, [request])

        self.assertEqual(result.exit_code, 1)
Exemple #5
0
 async def a_rule() -> TrueResult:
     proc = Process(
         ["/bin/sh", "-c", "true"],
         description="always true",
     )
     _ = await Get(ProcessResult, MultiPlatformProcess({None: proc}))
     return TrueResult()
Exemple #6
0
    def test_context_object(self):
        scheduler = self.scheduler

        def callback(**kwargs) -> None:
            context = kwargs["context"]
            assert isinstance(context, StreamingWorkunitContext)

            completed_workunits = kwargs["completed_workunits"]
            for workunit in completed_workunits:
                if "artifacts" in workunit and "stdout_digest" in workunit["artifacts"]:
                    digest = workunit["artifacts"]["stdout_digest"]
                    output = context.single_file_digests_to_bytes([digest])
                    assert output == (b"stdout output\n",)

        handler = StreamingWorkunitHandler(
            scheduler,
            callbacks=[callback],
            report_interval_seconds=0.01,
            max_workunit_verbosity=LogLevel.INFO,
        )

        stdout_process = Process(
            argv=("/bin/bash", "-c", "/bin/echo 'stdout output'"), description="Stdout process"
        )

        with handler.session():
            self.request(ProcessResult, [stdout_process])
Exemple #7
0
async def setup_pex_process(request: PexProcess,
                            pex_environment: PexEnvironment) -> Process:
    pex = request.pex
    complete_pex_env = pex_environment.in_sandbox(
        working_directory=request.working_directory)
    argv = complete_pex_env.create_argv(pex.name,
                                        *request.argv,
                                        python=pex.python)
    env = {
        **complete_pex_env.environment_dict(python_configured=pex.python is not None),
        **(request.extra_env or {}),
    }
    input_digest = (await Get(
        Digest, MergeDigests(
            (pex.digest,
             request.input_digest))) if request.input_digest else pex.digest)
    return Process(
        argv,
        description=request.description,
        level=request.level,
        input_digest=input_digest,
        working_directory=request.working_directory,
        env=env,
        output_files=request.output_files,
        output_directories=request.output_directories,
        append_only_caches=complete_pex_env.append_only_caches,
        timeout_seconds=request.timeout_seconds,
        execution_slot_variable=request.execution_slot_variable,
        cache_scope=request.cache_scope,
    )
Exemple #8
0
def test_output_digest(rule_runner: RuleRunner, working_directory) -> None:
    # Test that the output files are relative to the working directory, both in how
    # they're specified, and their paths in the output_digest.
    input_digest = (rule_runner.request(
        Digest,
        [CreateDigest([Directory(working_directory)])],
    ) if working_directory else EMPTY_DIGEST)
    process = Process(
        input_digest=input_digest,
        argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
        description="echo roland",
        output_files=("roland", ),
        working_directory=working_directory,
    )
    result = rule_runner.request(ProcessResult, [process])
    assert result.output_digest == Digest(
        fingerprint=
        "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
        serialized_bytes_length=80,
    )

    digest_contents = rule_runner.request(DigestContents,
                                          [result.output_digest])
    assert digest_contents == DigestContents(
        [FileContent("roland", b"European Burmese", False)])
Exemple #9
0
def test_pex_execution(rule_runner: RuleRunner) -> None:
    sources = rule_runner.request(
        Digest,
        [
            CreateDigest((
                FileContent("main.py", b'print("from main")'),
                FileContent("subdir/sub.py", b'print("from sub")'),
            )),
        ],
    )
    pex_output = create_pex_and_get_all_data(rule_runner,
                                             entry_point="main",
                                             sources=sources)

    pex_files = pex_output["files"]
    assert "pex" not in pex_files
    assert "main.py" in pex_files
    assert "subdir/sub.py" in pex_files

    # We reasonably expect there to be a python interpreter on the test-running process's path.
    env = {"PATH": os.getenv("PATH", "")}

    process = Process(
        argv=("python", "test.pex"),
        env=env,
        input_digest=pex_output["pex"].digest,
        description="Run the pex and make sure it works",
    )
    result = rule_runner.request(ProcessResult, [process])
    assert result.stdout == b"from main\n"
Exemple #10
0
async def render_classpath_entry(
        classpath_entry: ClasspathEntry,
        unzip_binary: UnzipBinary) -> RenderedClasspath:
    dest_dir = "dest"
    process_results = await MultiGet(
        Get(
            ProcessResult,
            Process(
                argv=[
                    unzip_binary.path,
                    "-d",
                    dest_dir,
                    filename,
                ],
                input_digest=classpath_entry.digest,
                output_directories=(dest_dir, ),
                description=f"Extract {filename}",
            ),
        ) for filename in classpath_entry.filenames)

    listing_snapshots = await MultiGet(
        Get(Snapshot, RemovePrefix(pr.output_digest, dest_dir))
        for pr in process_results)

    return RenderedClasspath({
        path: set(listing.files)
        for path, listing in zip(classpath_entry.filenames, listing_snapshots)
    })
Exemple #11
0
async def parse_python_imports(request: ParsePythonImportsRequest) -> ParsedPythonImports:
    script = _SCRIPT_FORMAT.format(min_dots=request.string_imports_min_dots).encode()
    python_interpreter, script_digest, stripped_sources = await MultiGet(
        Get(PythonExecutable, InterpreterConstraints, request.interpreter_constraints),
        Get(Digest, CreateDigest([FileContent("__parse_python_imports.py", script)])),
        Get(StrippedSourceFiles, SourceFilesRequest([request.source])),
    )

    # We operate on PythonSourceField, which should be one file.
    assert len(stripped_sources.snapshot.files) == 1
    file = stripped_sources.snapshot.files[0]

    input_digest = await Get(
        Digest, MergeDigests([script_digest, stripped_sources.snapshot.digest])
    )
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                python_interpreter.path,
                "./__parse_python_imports.py",
                file,
            ],
            input_digest=input_digest,
            description=f"Determine Python imports for {request.source.address}",
            env={"STRING_IMPORTS": "y" if request.string_imports else "n"},
            level=LogLevel.DEBUG,
        ),
    )
    # See above for where we explicitly encoded as utf8. Even though utf8 is the
    # default for decode(), we make that explicit here for emphasis.
    return ParsedPythonImports(process_result.stdout.decode("utf8").strip().splitlines())
Exemple #12
0
async def setup_pex_process(request: PexProcess,
                            pex_environment: PexEnvironment) -> Process:
    argv = pex_environment.create_argv(
        f"./{request.pex.name}",
        *request.argv,
        python=request.pex.python,
    )
    env = {
        **pex_environment.environment_dict(python_configured=request.pex.python is not None),
        **(request.extra_env or {}),
    }
    process = Process(
        argv,
        description=request.description,
        level=request.level,
        input_digest=request.input_digest,
        env=env,
        output_files=request.output_files,
        output_directories=request.output_directories,
        timeout_seconds=request.timeout_seconds,
        execution_slot_variable=request.execution_slot_variable,
    )
    return await Get(
        Process,
        UncacheableProcess(process)) if request.uncacheable else process
Exemple #13
0
async def analyze_first_party_package(
    request: FirstPartyPkgAnalysisRequest,
    analyzer: PackageAnalyzerSetup,
    golang_subsystem: GolangSubsystem,
) -> FallibleFirstPartyPkgAnalysis:
    wrapped_target, import_path_info, owning_go_mod = await MultiGet(
        Get(WrappedTarget, Address, request.address),
        Get(FirstPartyPkgImportPath, FirstPartyPkgImportPathRequest(request.address)),
        Get(OwningGoMod, OwningGoModRequest(request.address)),
    )
    go_mod_info = await Get(GoModInfo, GoModInfoRequest(owning_go_mod.address))

    pkg_sources = await Get(
        HydratedSources,
        HydrateSourcesRequest(wrapped_target.target[GoPackageSourcesField]),
    )

    input_digest = await Get(Digest, MergeDigests([pkg_sources.snapshot.digest, analyzer.digest]))
    result = await Get(
        FallibleProcessResult,
        Process(
            (analyzer.path, request.address.spec_path or "."),
            input_digest=input_digest,
            description=f"Determine metadata for {request.address}",
            level=LogLevel.DEBUG,
            env={"CGO_ENABLED": "0"},
        ),
    )
    return FallibleFirstPartyPkgAnalysis.from_process_result(
        result,
        dir_path=request.address.spec_path,
        import_path=import_path_info.import_path,
        minimum_go_version=go_mod_info.minimum_go_version or "",
        description_of_source=f"first-party Go package `{request.address}`",
    )
Exemple #14
0
async def setup_gofmt(setup_request: SetupRequest, goroot: GoRoot) -> Setup:
    source_files = await Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    argv = (
        os.path.join(goroot.path, "bin/gofmt"),
        "-l" if setup_request.check_only else "-w",
        *source_files_snapshot.files,
    )
    process = Process(
        argv=argv,
        input_digest=source_files_snapshot.digest,
        output_files=source_files_snapshot.files,
        description=
        f"Run gofmt on {pluralize(len(source_files_snapshot.files), 'file')}.",
        level=LogLevel.DEBUG,
    )
    return Setup(process=process, original_snapshot=source_files_snapshot)
Exemple #15
0
async def run_buf(request: BufRequest, buf: BufSubsystem) -> LintResults:
    if buf.skip:
        return LintResults([], linter_name=request.name)

    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest(
            (field_set.address for field_set in request.field_sets)),
    )

    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(tgt[ProtobufSourceField]
                           for tgt in transitive_targets.closure
                           if tgt.has_field(ProtobufSourceField)),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (field_set.sources for field_set in request.field_sets),
            for_sources_types=(ProtobufSourceField, ),
            enable_codegen=True,
        ),
    )

    download_buf_get = Get(DownloadedExternalTool, ExternalToolRequest,
                           buf.get_request(Platform.current))

    target_sources_stripped, all_sources_stripped, downloaded_buf = await MultiGet(
        target_stripped_sources_request, all_stripped_sources_request,
        download_buf_get)

    input_digest = await Get(
        Digest,
        MergeDigests((
            target_sources_stripped.snapshot.digest,
            all_sources_stripped.snapshot.digest,
            downloaded_buf.digest,
        )),
    )

    process_result = await Get(
        FallibleProcessResult,
        Process(
            argv=[
                downloaded_buf.exe,
                "lint",
                *buf.args,
                "--path",
                ",".join(target_sources_stripped.snapshot.files),
            ],
            input_digest=input_digest,
            description=
            f"Run Buf on {pluralize(len(request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    result = LintResult.from_fallible_process_result(process_result)

    return LintResults([result], linter_name=request.name)
Exemple #16
0
    def build_image(
        self,
        tags: tuple[str, ...],
        digest: Digest,
        dockerfile: str | None = None,
        build_args: DockerBuildArgs | None = None,
        env: Mapping[str, str] | None = None,
    ) -> Process:
        args = [self.path, "build"]

        for tag in tags:
            args.extend(["-t", tag])

        if build_args:
            for build_arg in build_args:
                args.extend(["--build-arg", build_arg])

        if dockerfile:
            args.extend(["-f", dockerfile])

        # Add build context root.
        args.append(".")

        return Process(
            argv=tuple(args),
            description=(f"Building docker image {tags[0]}" +
                         (f" +{pluralize(len(tags)-1, 'additional tag')}."
                          if len(tags) > 1 else ".")),
            env=env,
            input_digest=digest,
            cache_scope=ProcessCacheScope.PER_SESSION,
        )
Exemple #17
0
 def test_create_from_snapshot_with_env(self):
     req = Process(
         argv=("foo", ),
         description="Some process",
         env={"VAR": "VAL"},
     )
     self.assertEqual(req.env, ("VAR", "VAL"))
Exemple #18
0
    def test_write_file(self):
        request = Process(
            argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"),
            description="echo roland",
            output_files=("roland", ),
        )

        process_result = self.request_single_product(ProcessResult, request)

        self.assertEqual(
            process_result.output_digest,
            Digest(
                fingerprint=
                "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16",
                serialized_bytes_length=80,
            ),
        )

        files_content_result = self.request_single_product(
            FilesContent,
            process_result.output_digest,
        )

        self.assertEqual(files_content_result.dependencies,
                         (FileContent("roland", b"European Burmese", False), ))
Exemple #19
0
def test_pex_execution(rule_runner: RuleRunner, pex_type: type[Pex | VenvPex],
                       internal_only: bool) -> None:
    sources = rule_runner.request(
        Digest,
        [
            CreateDigest((
                FileContent("main.py", b'print("from main")'),
                FileContent("subdir/sub.py", b'print("from sub")'),
            )),
        ],
    )
    pex_data = create_pex_and_get_all_data(
        rule_runner,
        pex_type=pex_type,
        internal_only=internal_only,
        main=EntryPoint("main"),
        sources=sources,
    )

    assert "pex" not in pex_data.files
    assert "main.py" in pex_data.files
    assert "subdir/sub.py" in pex_data.files

    # This should run the Pex using the same interpreter used to create it. We must set the `PATH`
    # so that the shebang works.
    pex_exe = (f"./{pex_data.sandbox_path}" if pex_data.is_zipapp else
               os.path.join(pex_data.sandbox_path, "__main__.py"))
    process = Process(
        argv=(pex_exe, ),
        env={"PATH": os.getenv("PATH", "")},
        input_digest=pex_data.pex.digest,
        description="Run the pex and make sure it works",
    )
    result = rule_runner.request(ProcessResult, [process])
    assert result.stdout == b"from main\n"
Exemple #20
0
async def setup_venv_pex_process(
    request: VenvPexProcess, pex_environment: PexEnvironment
) -> Process:
    venv_pex = request.venv_pex
    pex_bin = (
        os.path.relpath(venv_pex.pex.argv0, request.working_directory)
        if request.working_directory
        else venv_pex.pex.argv0
    )
    argv = (pex_bin, *request.argv)
    input_digest = (
        await Get(Digest, MergeDigests((venv_pex.digest, request.input_digest)))
        if request.input_digest
        else venv_pex.digest
    )
    return Process(
        argv=argv,
        description=request.description,
        level=request.level,
        input_digest=input_digest,
        working_directory=request.working_directory,
        env=request.extra_env,
        output_files=request.output_files,
        output_directories=request.output_directories,
        append_only_caches=pex_environment.in_sandbox(
            working_directory=request.working_directory
        ).append_only_caches,
        timeout_seconds=request.timeout_seconds,
        execution_slot_variable=request.execution_slot_variable,
        cache_scope=request.cache_scope,
    )
Exemple #21
0
    def build_image(
            self,
            tags: tuple[str, ...],
            digest: Digest,
            dockerfile: str,
            build_args: DockerBuildArgs,
            context_root: str,
            env: Mapping[str, str],
            extra_args: tuple[str, ...] = (),
    ) -> Process:
        args = [self.path, "build", *extra_args]

        for tag in tags:
            args.extend(["--tag", tag])

        for build_arg in build_args:
            args.extend(["--build-arg", build_arg])

        args.extend(["--file", dockerfile])

        # Docker context root.
        args.append(context_root)

        return Process(
            argv=tuple(args),
            description=(f"Building docker image {tags[0]}" +
                         (f" +{pluralize(len(tags)-1, 'additional tag')}."
                          if len(tags) > 1 else "")),
            env=self._get_process_environment(env),
            input_digest=digest,
            immutable_input_digests=self.extra_input_digests,
            cache_scope=ProcessCacheScope.PER_SESSION,
        )
Exemple #22
0
async def parse_python_imports(request: ParsePythonImportsRequest) -> ParsedPythonImports:
    python_interpreter, script_digest, stripped_sources = await MultiGet(
        Get(PythonExecutable, PexInterpreterConstraints, request.interpreter_constraints),
        Get(Digest, CreateDigest([FileContent("__parse_python_imports.py", _SCRIPT.encode())])),
        Get(StrippedSourceFiles, SourceFilesRequest([request.sources])),
    )
    input_digest = await Get(
        Digest, MergeDigests([script_digest, stripped_sources.snapshot.digest])
    )
    process_result = await Get(
        ProcessResult,
        Process(
            argv=[
                python_interpreter.path,
                "./__parse_python_imports.py",
                *stripped_sources.snapshot.files,
            ],
            input_digest=input_digest,
            description=f"Determine Python imports for {request.sources.address}",
            level=LogLevel.DEBUG,
        ),
    )
    explicit_imports, _, string_imports = process_result.stdout.decode().partition("--")
    return ParsedPythonImports(
        explicit_imports=FrozenOrderedSet(explicit_imports.strip().splitlines()),
        string_imports=FrozenOrderedSet(string_imports.strip().splitlines()),
    )
Exemple #23
0
def test_streaming_workunits_expanded_specs(run_tracker: RunTracker) -> None:
    rule_runner = RuleRunner(
        target_types=[PythonLibrary],
        rules=[
            QueryRule(ProcessResult, (Process,)),
        ],
    )

    rule_runner.set_options(["--backend-packages=pants.backend.python"])

    rule_runner.create_file("src/python/somefiles/BUILD", "python_library()")
    rule_runner.create_file("src/python/somefiles/a.py", "print('')")
    rule_runner.create_file("src/python/somefiles/b.py", "print('')")

    rule_runner.create_file("src/python/others/BUILD", "python_library()")
    rule_runner.create_file("src/python/others/a.py", "print('')")
    rule_runner.create_file("src/python/others/b.py", "print('')")

    specs = SpecsParser(get_buildroot()).parse_specs(
        ["src/python/somefiles::", "src/python/others/b.py"]
    )

    class Callback(WorkunitsCallback):
        @property
        def can_finish_async(self) -> bool:
            return False

        def __call__(self, **kwargs) -> None:
            context = kwargs["context"]
            assert isinstance(context, StreamingWorkunitContext)

            expanded = context.get_expanded_specs()
            targets = expanded.targets

            assert len(targets.keys()) == 2
            assert targets["src/python/others/b.py"] == [
                TargetInfo(filename="src/python/others/b.py")
            ]
            assert set(targets["src/python/somefiles"]) == {
                TargetInfo(filename="src/python/somefiles/a.py"),
                TargetInfo(filename="src/python/somefiles/b.py"),
            }

    handler = StreamingWorkunitHandler(
        scheduler=rule_runner.scheduler,
        run_tracker=run_tracker,
        callbacks=[Callback()],
        report_interval_seconds=0.01,
        max_workunit_verbosity=LogLevel.INFO,
        specs=specs,
        options_bootstrapper=create_options_bootstrapper(
            ["--backend-packages=pants.backend.python"]
        ),
        pantsd=False,
    )
    stdout_process = Process(
        argv=("/bin/bash", "-c", "/bin/echo 'stdout output'"), description="Stdout process"
    )
    with handler:
        rule_runner.request(ProcessResult, [stdout_process])
Exemple #24
0
    def test_pex_execution(self) -> None:
        sources_content = InputFilesContent((
            FileContent(path="main.py", content=b'print("from main")'),
            FileContent(path="subdir/sub.py", content=b'print("from sub")'),
        ))

        sources = self.request_single_product(Digest, sources_content)
        pex_output = self.create_pex_and_get_all_data(entry_point="main",
                                                      sources=sources)

        pex_files = pex_output["files"]
        assert "pex" not in pex_files
        assert "main.py" in pex_files
        assert "subdir/sub.py" in pex_files

        init_subsystem(PythonSetup)
        python_setup = PythonSetup.global_instance()
        env = {
            "PATH": create_path_env_var(python_setup.interpreter_search_paths)
        }

        process = Process(
            argv=("python", "test.pex"),
            env=env,
            input_files=pex_output["pex"].digest,
            description="Run the pex and make sure it works",
        )
        result = self.request_single_product(ProcessResult, process)
        assert result.stdout == b"from main\n"
Exemple #25
0
async def loose_classfiles(classpath_entry: ClasspathEntry,
                           unzip_binary: UnzipBinary) -> LooseClassfiles:
    dest_dir = "dest"
    process_results = await MultiGet(
        Get(
            ProcessResult,
            Process(
                argv=[
                    unzip_binary.path,
                    "-d",
                    dest_dir,
                    filename,
                ],
                output_directories=(dest_dir, ),
                description=f"Extract {filename}",
                immutable_input_digests=dict(
                    ClasspathEntry.immutable_inputs([classpath_entry])),
                level=LogLevel.TRACE,
            ),
        )
        for filename in ClasspathEntry.immutable_inputs_args([classpath_entry])
    )

    merged_digest = await Get(
        Digest, MergeDigests(pr.output_digest for pr in process_results))

    return LooseClassfiles(await Get(Digest,
                                     RemovePrefix(merged_digest, dest_dir)))
Exemple #26
0
def _deploy_jar_test(rule_runner: RuleRunner, target_name: str) -> None:
    tgt = rule_runner.get_target(Address("", target_name=target_name))
    fat_jar = rule_runner.request(
        BuiltPackage,
        [DeployJarFieldSet.create(tgt)],
    )

    jdk_setup = rule_runner.request(JdkSetup, [])
    bash = rule_runner.request(BashBinary, [])

    input_digests = rule_runner.request(
        Digest, [MergeDigests([jdk_setup.digest, fat_jar.digest])])
    process_result = rule_runner.request(
        ProcessResult,
        [
            Process(
                argv=jdk_setup.args(bash, []) + ("-jar", "dave.jar"),
                description="Run that test jar",
                input_digest=input_digests,
                append_only_caches=jdk_setup.append_only_caches,
                env=jdk_setup.env,
            )
        ],
    )

    assert process_result.stdout.decode("utf-8").strip() == "Hello, World!"
Exemple #27
0
async def count_loc(
    console: Console,
    succinct_code_counter: SuccinctCodeCounter,
    specs_snapshot: SpecsSnapshot,
) -> CountLinesOfCode:
    if not specs_snapshot.snapshot.files:
        return CountLinesOfCode(exit_code=0)

    scc_program = await Get(
        DownloadedExternalTool,
        ExternalToolRequest,
        succinct_code_counter.get_request(Platform.current),
    )
    input_digest = await Get(
        Digest,
        MergeDigests((scc_program.digest, specs_snapshot.snapshot.digest)))
    result = await Get(
        ProcessResult,
        Process(
            argv=(scc_program.exe, *succinct_code_counter.args),
            input_digest=input_digest,
            description=
            (f"Count lines of code for {pluralize(len(specs_snapshot.snapshot.files), 'file')}"
             ),
            level=LogLevel.DEBUG,
        ),
    )
    console.print_stdout(result.stdout.decode())
    return CountLinesOfCode(exit_code=0)
Exemple #28
0
async def generate_testmain(
    request: GenerateTestMainRequest, analyzer: AnalyzerSetup
) -> GeneratedTestMain:
    input_digest = await Get(Digest, MergeDigests([request.digest, analyzer.digest]))

    test_paths = tuple(f"{GeneratedTestMain.TEST_PKG}:{path}" for path in request.test_paths)
    xtest_paths = tuple(f"{GeneratedTestMain.XTEST_PKG}:{path}" for path in request.xtest_paths)

    result = await Get(
        ProcessResult,
        Process(
            argv=(analyzer.PATH, request.import_path, *test_paths, *xtest_paths),
            input_digest=input_digest,
            description=f"Analyze Go test sources for {request.import_path}",
            level=LogLevel.DEBUG,
            output_files=("_testmain.go",),
        ),
    )

    metadata = json.loads(result.stdout.decode("utf-8"))
    return GeneratedTestMain(
        digest=result.output_digest,
        has_tests=metadata["has_tests"],
        has_xtests=metadata["has_xtests"],
    )
Exemple #29
0
async def create_archive(request: CreateArchive) -> Digest:
    if request.format == ArchiveFormat.ZIP:
        zip_binary = await Get(ZipBinary, _ZipBinaryRequest())
        argv = zip_binary.create_archive_argv(request)
        env = {}
        input_digest = request.snapshot.digest
    else:
        tar_binary = await Get(TarBinary, _TarBinaryRequest())
        argv = tar_binary.create_archive_argv(request)
        # `tar` expects to find a couple binaries like `gzip` and `xz` by looking on the PATH.
        env = {"PATH": os.pathsep.join(SEARCH_PATHS)}
        # `tar` requires that the output filename's parent directory exists.
        output_dir_digest = await Get(
            Digest,
            CreateDigest([Directory(os.path.dirname(request.output_filename))
                          ]))
        input_digest = await Get(
            Digest, MergeDigests([output_dir_digest, request.snapshot.digest]))

    result = await Get(
        ProcessResult,
        Process(
            argv=argv,
            env=env,
            input_digest=input_digest,
            description=f"Create {request.output_filename}",
            level=LogLevel.DEBUG,
            output_files=(request.output_filename, ),
        ),
    )
    return result.output_digest
Exemple #30
0
async def setup_pex_cli_process(
    request: PexCliProcess,
    pex_binary: PexBinary,
    pex_environment: PexEnvironment,
    python_native_code: PythonNativeCode,
) -> Process:
    downloaded_pex_bin = await Get(DownloadedExternalTool, ExternalToolRequest,
                                   pex_binary.get_request(Platform.current))

    input_digest = (await Get(
        Digest,
        MergeDigests([
            request.additional_input_digest, downloaded_pex_bin.digest
        ])) if request.additional_input_digest else downloaded_pex_bin.digest)

    pex_root_path = ".cache/pex_root"
    argv = pex_environment.create_argv(downloaded_pex_bin.exe, *request.argv,
                                       "--pex-root", pex_root_path)
    env = {
        **pex_environment.environment_dict,
        **python_native_code.environment_dict,
        **(request.extra_env or {}),
    }

    return Process(
        argv,
        description=request.description,
        input_digest=input_digest,
        env=env,
        output_files=request.output_files,
        output_directories=request.output_directories,
        append_only_caches={"pex_root": pex_root_path},
    )