Ejemplo n.º 1
0
def create_dynamic_remote_options(
    *,
    initial_headers: dict[str, str] | None = None,
    address: str | None = "grpc://fake.url:10",
    token_path: str | None = None,
    plugin: str | None = None,
) -> DynamicRemoteOptions:
    if initial_headers is None:
        initial_headers = {}
    args = [
        "--remote-cache-read",
        f"--remote-execution-address={address}",
        f"--remote-store-address={address}",
        f"--remote-store-headers={initial_headers}",
        f"--remote-execution-headers={initial_headers}",
        "--remote-instance-name=main",
    ]
    if token_path:
        args.append(f"--remote-oauth-bearer-token-path={token_path}")
    if plugin:
        args.append(f"--remote-auth-plugin={plugin}")
    ob = create_options_bootstrapper(args)
    env = CompleteEnvironment({})
    _build_config, options = OptionsInitializer(ob).build_config_and_options(ob, env, raise_=False)
    return DynamicRemoteOptions.from_options(options, env)[0]
Ejemplo n.º 2
0
    def set_options(
        self,
        args: Iterable[str],
        *,
        env: Mapping[str, str] | None = None,
        env_inherit: set[str] | None = None,
    ) -> None:
        """Update the engine session with new options and/or environment variables.

        The environment variables will be used to set the `CompleteEnvironment`, which is the
        environment variables captured by the parent Pants process. Some rules use this to be able
        to read arbitrary env vars. Any options that start with `PANTS_` will also be used to set
        options.

        Environment variables listed in `env_inherit` and not in `env` will be inherited from the test
        runner's environment (os.environ)

        This will override any previously configured values.
        """
        env = {
            **{
                k: os.environ[k]
                for k in (env_inherit or set()) if k in os.environ
            },
            **(env or {}),
        }
        self.options_bootstrapper = create_options_bootstrapper(args=args,
                                                                env=env)
        self.environment = CompleteEnvironment(env)
        self._set_new_session(self.scheduler.scheduler)
Ejemplo n.º 3
0
def create_execution_options(
    *,
    initial_headers: dict[str, str],
    token_path: str | None = None,
    plugin: str | None = None,
    remote_store_address: str = "grpc://fake.url:10",
    remote_execution_address: str = "grpc://fake.url:10",
    local_only: bool = False,
) -> ExecutionOptions:
    args = [
        "--remote-cache-read",
        f"--remote-execution-address={remote_execution_address}",
        f"--remote-store-address={remote_store_address}",
        f"--remote-store-headers={initial_headers}",
        f"--remote-execution-headers={initial_headers}",
        "--remote-instance-name=main",
    ]
    if token_path:
        args.append(f"--remote-oauth-bearer-token-path={token_path}")
    if plugin:
        args.append(f"--remote-auth-plugin={plugin}")
    ob = create_options_bootstrapper(args)
    env = CompleteEnvironment({})
    _build_config, options = OptionsInitializer(
        ob, env).build_config_and_options(ob, env, raise_=False)
    return ExecutionOptions.from_options(options, env, local_only=local_only)
Ejemplo n.º 4
0
def test_invalid_variable() -> None:
    pants_env = CompleteEnvironment()

    with pytest.raises(ValueError) as exc:
        pants_env.get_subset(["3INVALID=doesn't matter"])
    assert (
        "An invalid variable was requested via the --test-extra-env-var mechanism: 3INVALID"
        in str(exc))
Ejemplo n.º 5
0
def launch_new_pantsd_instance():
    """An external entrypoint that spawns a new pantsd instance."""

    options_bootstrapper = OptionsBootstrapper.create(env=os.environ,
                                                      args=sys.argv,
                                                      allow_pantsrc=True)
    env = CompleteEnvironment(os.environ)
    daemon = PantsDaemon.create(options_bootstrapper, env)
    daemon.run_sync()
Ejemplo n.º 6
0
 def test_global_options_validation(self):
     # Specify an invalid combination of options.
     ob = OptionsBootstrapper.create(
         env={}, args=["--backend-packages=[]", "--remote-execution"], allow_pantsrc=False
     )
     env = CompleteEnvironment({})
     with self.assertRaises(OptionsError) as exc:
         OptionsInitializer(ob, env).build_config_and_options(ob, env, raise_=True)
     self.assertIn("The `--remote-execution` option requires", str(exc.exception))
Ejemplo n.º 7
0
    def test_invalid_version(self) -> None:
        options_bootstrapper = OptionsBootstrapper.create(
            env={},
            args=["--backend-packages=[]", "--pants-version=99.99.9999"],
            allow_pantsrc=False,
        )

        env = CompleteEnvironment({})
        with self.assertRaises(ExecutionError):
            OptionsInitializer(options_bootstrapper).build_config_and_options(
                options_bootstrapper, env, raise_=True)
Ejemplo n.º 8
0
    def single_daemonized_run(
        self,
        args: Tuple[str, ...],
        env: Dict[str, str],
        cancellation_latch: PySessionCancellationLatch,
    ) -> ExitCode:
        """Run a single daemonized run of Pants.

        All aspects of the `sys` global should already have been replaced in `__call__`, so this
        method should not need any special handling for the fact that it's running in a proxied
        environment.
        """

        try:
            logger.debug("Connected to pantsd")
            # Capture the client's start time, which we propagate here in order to get an accurate
            # view of total time.
            env_start_time = env.get("PANTSD_RUNTRACKER_CLIENT_START_TIME", None)
            if not env_start_time:
                # NB: We warn rather than erroring here because it eases use of non-Pants nailgun
                # clients for testing.
                logger.warning(
                    "No start time was reported by the client! Metrics may be inaccurate."
                )
            start_time = float(env_start_time) if env_start_time else time.time()

            options_bootstrapper = OptionsBootstrapper.create(
                env=env, args=args, allow_pantsrc=True
            )

            # Run using the pre-warmed Session.
            complete_env = CompleteEnvironment(env)
            scheduler, options_initializer = self._core.prepare(options_bootstrapper, complete_env)
            runner = LocalPantsRunner.create(
                complete_env,
                options_bootstrapper,
                scheduler=scheduler,
                options_initializer=options_initializer,
                cancellation_latch=cancellation_latch,
            )
            return runner.run(start_time)
        except Exception as e:
            logger.exception(e)
            return PANTS_FAILED_EXIT_CODE
        except KeyboardInterrupt:
            print("Interrupted by user.\n", file=sys.stderr)
            return PANTS_FAILED_EXIT_CODE
Ejemplo n.º 9
0
    def run(self, start_time: float) -> ExitCode:
        self.scrub_pythonpath()

        options_bootstrapper = OptionsBootstrapper.create(env=self.env,
                                                          args=self.args,
                                                          allow_pantsrc=True)
        with warnings.catch_warnings(record=True):
            bootstrap_options = options_bootstrapper.bootstrap_options
            global_bootstrap_options = bootstrap_options.for_global_scope()

        # We enable logging here, and everything before it will be routed through regular
        # Python logging.
        stdin_fileno = sys.stdin.fileno()
        stdout_fileno = sys.stdout.fileno()
        stderr_fileno = sys.stderr.fileno()
        with initialize_stdio(global_bootstrap_options), stdio_destination(
                stdin_fileno=stdin_fileno,
                stdout_fileno=stdout_fileno,
                stderr_fileno=stderr_fileno,
        ):
            # N.B. We inline imports to speed up the python thin client run, and avoids importing
            # engine types until after the runner has had a chance to set PANTS_BIN_NAME.

            if self._should_run_with_pantsd(global_bootstrap_options):
                from pants.bin.remote_pants_runner import RemotePantsRunner

                try:
                    remote_runner = RemotePantsRunner(self.args, self.env,
                                                      options_bootstrapper)
                    return remote_runner.run(start_time)
                except RemotePantsRunner.Fallback as e:
                    logger.warning(
                        f"Client exception: {e!r}, falling back to non-daemon mode"
                    )

            from pants.bin.local_pants_runner import LocalPantsRunner

            # We only install signal handling via ExceptionSink if the run will execute in this process.
            ExceptionSink.install(
                log_location=init_workdir(global_bootstrap_options),
                pantsd_instance=False)
            runner = LocalPantsRunner.create(
                env=CompleteEnvironment(self.env),
                options_bootstrapper=options_bootstrapper)
            return runner.run(start_time)
Ejemplo n.º 10
0
def test_prepare_scheduler():
    # A core with no services.
    def create_services(bootstrap_options, legacy_graph_scheduler):
        return PantsServices()

    env = CompleteEnvironment({})
    core = PantsDaemonCore(create_options_bootstrapper([]), env,
                           PyExecutor(2, 4), create_services)

    first_scheduler, first_options_initializer = core.prepare(
        create_options_bootstrapper(["-ldebug"]),
        env,
    )
    second_scheduler, second_options_initializer = core.prepare(
        create_options_bootstrapper(["-lwarn"]),
        env,
    )
    assert first_scheduler is not second_scheduler
    assert first_options_initializer is second_options_initializer
Ejemplo n.º 11
0
    def __init__(
            self,
            *,
            rules: Iterable | None = None,
            target_types: Iterable[type[Target]] | None = None,
            objects: dict[str, Any] | None = None,
            context_aware_object_factories: dict[str, Any] | None = None,
            isolated_local_store: bool = False,
            preserve_tmpdirs: bool = False,
            ca_certs_path: str | None = None,
            bootstrap_args: Iterable[str] = (),
    ) -> None:

        bootstrap_args = [*bootstrap_args]

        root_dir: Path | None = None
        if preserve_tmpdirs:
            root_dir = Path(mkdtemp(prefix="RuleRunner."))
            print(
                f"Preserving rule runner temporary directories at {root_dir}.",
                file=sys.stderr)
            bootstrap_args.extend([
                "--no-process-execution-local-cleanup",
                f"--local-execution-root-dir={root_dir}",
            ])
            build_root = (root_dir / "BUILD_ROOT").resolve()
            build_root.mkdir()
            self.build_root = str(build_root)
        else:
            self.build_root = os.path.realpath(
                safe_mkdtemp(prefix="_BUILD_ROOT"))

        safe_mkdir(self.pants_workdir)
        BuildRoot().path = self.build_root

        # TODO: Redesign rule registration for tests to be more ergonomic and to make this less
        #  special-cased.
        all_rules = (
            *(rules or ()),
            *source_root.rules(),
            QueryRule(WrappedTarget, [Address]),
            QueryRule(UnionMembership, []),
        )
        build_config_builder = BuildConfiguration.Builder()
        build_config_builder.register_aliases(
            BuildFileAliases(
                objects=objects,
                context_aware_object_factories=context_aware_object_factories))
        build_config_builder.register_rules("_dummy_for_test_", all_rules)
        build_config_builder.register_target_types("_dummy_for_test_",
                                                   target_types or ())
        self.build_config = build_config_builder.create()

        self.environment = CompleteEnvironment({})
        self.options_bootstrapper = create_options_bootstrapper(
            args=bootstrap_args)
        options = self.options_bootstrapper.full_options(self.build_config)
        global_options = self.options_bootstrapper.bootstrap_options.for_global_scope(
        )

        dynamic_remote_options, _ = DynamicRemoteOptions.from_options(
            options, self.environment)
        local_store_options = LocalStoreOptions.from_options(global_options)
        if isolated_local_store:
            if root_dir:
                lmdb_store_dir = root_dir / "lmdb_store"
                lmdb_store_dir.mkdir()
                store_dir = str(lmdb_store_dir)
            else:
                store_dir = safe_mkdtemp(prefix="lmdb_store.")
            local_store_options = dataclasses.replace(local_store_options,
                                                      store_dir=store_dir)

        local_execution_root_dir = global_options.local_execution_root_dir
        named_caches_dir = global_options.named_caches_dir

        graph_session = EngineInitializer.setup_graph_extended(
            pants_ignore_patterns=GlobalOptions.compute_pants_ignore(
                self.build_root, global_options),
            use_gitignore=False,
            local_store_options=local_store_options,
            local_execution_root_dir=local_execution_root_dir,
            named_caches_dir=named_caches_dir,
            build_root=self.build_root,
            build_configuration=self.build_config,
            executor=_EXECUTOR,
            execution_options=ExecutionOptions.from_options(
                global_options, dynamic_remote_options),
            ca_certs_path=ca_certs_path,
            engine_visualize_to=None,
        ).new_session(
            build_id="buildid_for_test",
            session_values=SessionValues({
                OptionsBootstrapper: self.options_bootstrapper,
                CompleteEnvironment: self.environment,
            }),
        )
        self.scheduler = graph_session.scheduler_session
Ejemplo n.º 12
0
def plugin_resolution(rule_runner: RuleRunner,
                      *,
                      interpreter=None,
                      chroot=None,
                      plugins=None,
                      sdist=True):
    @contextmanager
    def provide_chroot(existing):
        if existing:
            yield existing, False
        else:
            with temporary_dir() as new_chroot:
                yield new_chroot, True

    interpreter_constraints = (PexInterpreterConstraints([
        f"=={interpreter.identity.version_str}"
    ]) if interpreter else PexInterpreterConstraints([">=3.7"]))

    with provide_chroot(chroot) as (root_dir, create_artifacts):
        env: Dict[str, str] = {}
        repo_dir = None
        if plugins:
            repo_dir = os.path.join(root_dir, "repo")
            env.update(
                PANTS_PYTHON_REPOS_REPOS=f"['file://{repo_dir}']",
                PANTS_PYTHON_REPOS_INDEXES="[]",
                PANTS_PYTHON_SETUP_RESOLVER_CACHE_TTL="1",
            )
            plugin_list = []
            for plugin in plugins:
                version = None
                if isinstance(plugin, tuple):
                    plugin, version = plugin
                plugin_list.append(
                    f"{plugin}=={version}" if version else plugin)
                if create_artifacts:
                    setup_py_args = [
                        "sdist" if sdist else "bdist_wheel", "--dist-dir",
                        "dist/"
                    ]
                    _run_setup_py(
                        rule_runner,
                        plugin,
                        interpreter_constraints,
                        version,
                        setup_py_args,
                        repo_dir,
                    )
            env["PANTS_PLUGINS"] = f"[{','.join(map(repr, plugin_list))}]"

        configpath = os.path.join(root_dir, "pants.toml")
        if create_artifacts:
            touch(configpath)
        args = [f"--pants-config-files=['{configpath}']"]

        options_bootstrapper = OptionsBootstrapper.create(env=env,
                                                          args=args,
                                                          allow_pantsrc=False)
        complete_env = CompleteEnvironment({
            **{
                k: os.environ[k]
                for k in ["PATH", "HOME", "PYENV_ROOT"] if k in os.environ
            },
            **env
        })
        bootstrap_scheduler = create_bootstrap_scheduler(
            options_bootstrapper, complete_env)
        plugin_resolver = PluginResolver(
            bootstrap_scheduler,
            interpreter_constraints=interpreter_constraints)
        cache_dir = options_bootstrapper.bootstrap_options.for_global_scope(
        ).named_caches_dir

        working_set = plugin_resolver.resolve(options_bootstrapper,
                                              complete_env,
                                              WorkingSet(entries=[]))
        for dist in working_set:
            assert (Path(os.path.realpath(cache_dir))
                    in Path(os.path.realpath(dist.location)).parents)

        yield working_set, root_dir, repo_dir
Ejemplo n.º 13
0
def plugin_resolution(
    rule_runner: RuleRunner,
    *,
    interpreter: PythonInterpreter | None = None,
    chroot: str | None = None,
    plugins: Sequence[Plugin] = (),
    sdist: bool = True,
    working_set_entries: Sequence[Distribution] = (),
    use_pypi: bool = False,
):
    @contextmanager
    def provide_chroot(existing):
        if existing:
            yield existing, False
        else:
            with temporary_dir() as new_chroot:
                yield new_chroot, True

    # Default to resolving with whatever we're currently running with.
    interpreter_constraints = (InterpreterConstraints(
        [f"=={interpreter.identity.version_str}"]) if interpreter else None)
    artifact_interpreter_constraints = interpreter_constraints or InterpreterConstraints(
        [f"=={'.'.join(map(str, sys.version_info[:3]))}"])

    with provide_chroot(chroot) as (root_dir, create_artifacts):
        env: Dict[str, str] = {}
        repo_dir = None
        if plugins:
            repo_dir = os.path.join(root_dir, "repo")
            env.update(
                PANTS_PYTHON_REPOS_REPOS=f"['file://{repo_dir}']",
                PANTS_PYTHON_RESOLVER_CACHE_TTL="1",
            )
            if not use_pypi:
                env.update(PANTS_PYTHON_REPOS_INDEXES="[]")
            plugin_list = []
            for plugin in plugins:
                version = plugin.version
                plugin_list.append(
                    f"{plugin.name}=={version}" if version else plugin.name)
                if create_artifacts:
                    setup_py_args = [
                        "sdist" if sdist else "bdist_wheel", "--dist-dir",
                        "dist/"
                    ]
                    _run_setup_py(
                        rule_runner,
                        plugin.name,
                        artifact_interpreter_constraints,
                        version,
                        plugin.install_requires,
                        setup_py_args,
                        repo_dir,
                    )
            env["PANTS_PLUGINS"] = f"[{','.join(map(repr, plugin_list))}]"

        configpath = os.path.join(root_dir, "pants.toml")
        if create_artifacts:
            touch(configpath)
        args = [f"--pants-config-files=['{configpath}']"]

        options_bootstrapper = OptionsBootstrapper.create(env=env,
                                                          args=args,
                                                          allow_pantsrc=False)
        complete_env = CompleteEnvironment({
            **{
                k: os.environ[k]
                for k in ["PATH", "HOME", "PYENV_ROOT"] if k in os.environ
            },
            **env
        })
        bootstrap_scheduler = create_bootstrap_scheduler(options_bootstrapper)
        cache_dir = options_bootstrapper.bootstrap_options.for_global_scope(
        ).named_caches_dir

        input_working_set = WorkingSet(entries=[])
        for dist in working_set_entries:
            input_working_set.add(dist)
        plugin_resolver = PluginResolver(bootstrap_scheduler,
                                         interpreter_constraints,
                                         input_working_set)
        working_set = plugin_resolver.resolve(
            options_bootstrapper,
            complete_env,
        )
        for dist in working_set:
            assert (Path(os.path.realpath(cache_dir))
                    in Path(os.path.realpath(dist.location)).parents)

        yield working_set, root_dir, repo_dir
Ejemplo n.º 14
0
def test_complete_environment(input_strs: List[str],
                              expected: Dict[str, str]) -> None:
    pants_env = CompleteEnvironment({"A": "a", "B": "b", "C": "c"})

    subset = pants_env.get_subset(input_strs)
    assert dict(subset) == expected
Ejemplo n.º 15
0
    def __init__(
        self,
        *,
        rules: Iterable | None = None,
        target_types: Iterable[type[Target]] | None = None,
        objects: dict[str, Any] | None = None,
        context_aware_object_factories: dict[str, Any] | None = None,
        isolated_local_store: bool = False,
        ca_certs_path: str | None = None,
    ) -> None:
        self.build_root = os.path.realpath(mkdtemp(suffix="_BUILD_ROOT"))
        safe_mkdir(self.build_root, clean=True)
        safe_mkdir(self.pants_workdir)
        BuildRoot().path = self.build_root

        # TODO: Redesign rule registration for tests to be more ergonomic and to make this less
        #  special-cased.
        all_rules = (
            *(rules or ()),
            *source_root.rules(),
            QueryRule(WrappedTarget, [Address]),
            QueryRule(UnionMembership, []),
        )
        build_config_builder = BuildConfiguration.Builder()
        build_config_builder.register_aliases(
            BuildFileAliases(
                objects=objects,
                context_aware_object_factories=context_aware_object_factories))
        build_config_builder.register_rules(all_rules)
        build_config_builder.register_target_types(target_types or ())
        self.build_config = build_config_builder.create()

        self.environment = CompleteEnvironment({})
        self.options_bootstrapper = create_options_bootstrapper()
        options = self.options_bootstrapper.full_options(self.build_config)
        global_options = self.options_bootstrapper.bootstrap_options.for_global_scope(
        )
        local_store_dir = (os.path.realpath(safe_mkdtemp())
                           if isolated_local_store else
                           global_options.local_store_dir)
        local_execution_root_dir = global_options.local_execution_root_dir
        named_caches_dir = global_options.named_caches_dir

        graph_session = EngineInitializer.setup_graph_extended(
            pants_ignore_patterns=GlobalOptions.compute_pants_ignore(
                self.build_root, global_options),
            use_gitignore=False,
            local_store_dir=local_store_dir,
            local_execution_root_dir=local_execution_root_dir,
            named_caches_dir=named_caches_dir,
            native=Native(),
            build_root=self.build_root,
            build_configuration=self.build_config,
            executor=_EXECUTOR,
            execution_options=ExecutionOptions.from_options(
                options, self.environment),
            ca_certs_path=ca_certs_path,
            native_engine_visualize_to=None,
        ).new_session(
            build_id="buildid_for_test",
            session_values=SessionValues({
                OptionsBootstrapper: self.options_bootstrapper,
                CompleteEnvironment: self.environment,
            }),
        )
        self.scheduler = graph_session.scheduler_session