Esempio n. 1
0
 def setup_graph(
     options_bootstrapper: OptionsBootstrapper,
     build_configuration: BuildConfiguration,
     executor: Optional[PyExecutor] = None,
 ) -> GraphScheduler:
     native = Native()
     build_root = get_buildroot()
     bootstrap_options = options_bootstrapper.bootstrap_options.for_global_scope(
     )
     executor = executor or PyExecutor(
         *OptionsInitializer.compute_executor_arguments(bootstrap_options))
     return EngineInitializer.setup_graph_extended(
         options_bootstrapper,
         build_configuration,
         ExecutionOptions.from_bootstrap_options(bootstrap_options),
         native=native,
         executor=executor,
         pants_ignore_patterns=OptionsInitializer.compute_pants_ignore(
             build_root, bootstrap_options),
         use_gitignore=bootstrap_options.pants_ignore_use_gitignore,
         local_store_dir=bootstrap_options.local_store_dir,
         local_execution_root_dir=bootstrap_options.
         local_execution_root_dir,
         named_caches_dir=bootstrap_options.named_caches_dir,
         ca_certs_path=bootstrap_options.ca_certs_path,
         build_root=build_root,
         include_trace_on_error=bootstrap_options.print_stacktrace,
     )
Esempio n. 2
0
def test_warns_on_remote_cache_errors():
    executor = PyExecutor(2, 4)
    builder = PyStubCAS.builder()
    builder.always_errors()
    cas = builder.build(executor)
    address = cas.address()

    pants_run = run_pants(
        [
            "--backend-packages=['pants.backend.python']",
            "--no-dynamic-ui",
            "--level=info",
            "package",
            "testprojects/src/python/hello/main:main",
        ],
        use_pantsd=False,
        config={
            GLOBAL_SCOPE_CONFIG_SECTION: {
                "remote_cache_read": True,
                "remote_cache_write": True,
                "remote_store_server": address,
            }
        },
    )

    pants_run.assert_success()
    assert "Failed to read from remote cache: Unimplemented" in pants_run.stderr
    assert (re.search(
        "Failed to write to remote cache:.*StubCAS is configured to always fail",
        pants_run.stderr,
        re.MULTILINE,
    ) is not None)
Esempio n. 3
0
def test_close_stdio(mock_close):
    mock_options = unittest.mock.Mock()
    mock_options_values = unittest.mock.Mock()
    mock_options.for_global_scope.return_value = mock_options_values
    mock_options_values.pants_subprocessdir = "non_existent_dir"
    mock_server = unittest.mock.Mock()

    def create_services(bootstrap_options, legacy_graph_scheduler):
        return PantsServices()

    pantsd = PantsDaemon(
        native=Native(),
        work_dir="test_work_dir",
        log_level=logging.INFO,
        server=mock_server,
        core=PantsDaemonCore(PyExecutor(2, 4), create_services),
        metadata_base_dir="/tmp/pants_test_metadata_dir",
        bootstrap_options=mock_options,
    )

    with stdio_as(-1, -1, -1):
        handles = (sys.stdin, sys.stdout, sys.stderr)
        fds = [h.fileno() for h in handles]
        pantsd._close_stdio()
        mock_close.assert_has_calls(unittest.mock.call(x) for x in fds)
        for handle in handles:
            assert handle.closed is True
Esempio n. 4
0
    def _connect_and_execute(
            self, pantsd_handle: PantsDaemonClient.Handle) -> ExitCode:
        native = Native()

        global_options = self._bootstrap_options.for_global_scope()
        executor = PyExecutor(
            *OptionsInitializer.compute_executor_arguments(global_options))

        # Merge the nailgun TTY capability environment variables with the passed environment dict.
        ng_env = NailgunProtocol.ttynames_to_env(sys.stdin, sys.stdout.buffer,
                                                 sys.stderr.buffer)
        modified_env = {
            **self._env,
            **ng_env,
            "PANTSD_RUNTRACKER_CLIENT_START_TIME":
            str(self._start_time),
            "PANTSD_REQUEST_TIMEOUT_LIMIT":
            str(global_options.pantsd_timeout_when_multiple_invocations),
        }

        command = self._args[0]
        args = self._args[1:]

        retries = 3
        attempt = 1
        while True:
            port = pantsd_handle.port
            logger.debug(
                f"Connecting to pantsd on port {port} attempt {attempt}/{retries}"
            )

            # We preserve TTY settings since the server might write directly to the TTY, and we'd like
            # to clean up any side effects before exiting.
            #
            # We ignore keyboard interrupts because the nailgun client will handle them.
            with STTYSettings.preserved(), interrupts_ignored():
                try:
                    return native.new_nailgun_client(executor=executor,
                                                     port=port).execute(
                                                         command, args,
                                                         modified_env)

                # NailgunConnectionException represents a failure connecting to pantsd, so we retry
                # up to the retry limit.
                except native.lib.NailgunConnectionException as e:
                    if attempt > retries:
                        raise self.Fallback(e)

                    # Wait one second before retrying
                    logger.warning(
                        f"Pantsd was unresponsive on port {port}, retrying.")
                    time.sleep(1)

                    # One possible cause of the daemon being non-responsive during an attempt might be if a
                    # another lifecycle operation is happening concurrently (incl teardown). To account for
                    # this, we won't begin attempting restarts until at least 1 attempt has passed.
                    if attempt > 1:
                        pantsd_handle = self._client.restart()

                    attempt += 1
Esempio n. 5
0
 def setup_graph(
     options_bootstrapper: OptionsBootstrapper,
     build_configuration: BuildConfiguration,
     env: CompleteEnvironment,
     executor: Optional[PyExecutor] = None,
     local_only: bool = False,
 ) -> GraphScheduler:
     native = Native()
     build_root = get_buildroot()
     bootstrap_options = options_bootstrapper.bootstrap_options.for_global_scope(
     )
     options = options_bootstrapper.full_options(build_configuration)
     assert bootstrap_options is not None
     executor = executor or PyExecutor(
         *GlobalOptions.compute_executor_arguments(bootstrap_options))
     execution_options = ExecutionOptions.from_options(
         options, env, local_only=local_only)
     return EngineInitializer.setup_graph_extended(
         build_configuration,
         execution_options,
         native=native,
         executor=executor,
         pants_ignore_patterns=GlobalOptions.compute_pants_ignore(
             build_root, bootstrap_options),
         use_gitignore=bootstrap_options.pants_ignore_use_gitignore,
         local_store_dir=bootstrap_options.local_store_dir,
         local_execution_root_dir=bootstrap_options.
         local_execution_root_dir,
         named_caches_dir=bootstrap_options.named_caches_dir,
         ca_certs_path=bootstrap_options.ca_certs_path,
         build_root=build_root,
         include_trace_on_error=bootstrap_options.print_stacktrace,
         native_engine_visualize_to=bootstrap_options.
         native_engine_visualize_to,
     )
Esempio n. 6
0
    def create(cls, options_bootstrapper: OptionsBootstrapper) -> PantsDaemon:
        # Any warnings that would be triggered here are re-triggered later per-run of Pants, so we
        # silence them.
        with warnings.catch_warnings(record=True):
            bootstrap_options = options_bootstrapper.bootstrap_options
            bootstrap_options_values = bootstrap_options.for_global_scope()

        native = Native()

        executor = PyExecutor(*GlobalOptions.compute_executor_arguments(bootstrap_options_values))
        core = PantsDaemonCore(options_bootstrapper, executor, cls._setup_services)

        server = native.new_nailgun_server(
            executor,
            bootstrap_options_values.pantsd_pailgun_port,
            DaemonPantsRunner(core),
        )

        return PantsDaemon(
            native=native,
            work_dir=bootstrap_options_values.pants_workdir,
            log_level=bootstrap_options_values.level,
            server=server,
            core=core,
            metadata_base_dir=bootstrap_options_values.pants_subprocessdir,
            bootstrap_options=bootstrap_options,
        )
Esempio n. 7
0
def test_prepare_scheduler():
    # A core with no services.
    def create_services(bootstrap_options, legacy_graph_scheduler):
        return PantsServices()

    core = PantsDaemonCore(PyExecutor(2, 4), create_services)

    first_scheduler = core.prepare_scheduler(
        create_options_bootstrapper(["-ldebug"]))
    second_scheduler = core.prepare_scheduler(
        create_options_bootstrapper(["-lwarn"]))
    assert first_scheduler is not second_scheduler
Esempio n. 8
0
def test_prepare_scheduler():
    # A core with no services.
    def create_services(bootstrap_options, legacy_graph_scheduler):
        return PantsServices()

    env = CompleteEnvironment({})
    core = PantsDaemonCore(create_options_bootstrapper([]), env,
                           PyExecutor(2, 4), create_services)

    first_scheduler, first_options_initializer = core.prepare(
        create_options_bootstrapper(["-ldebug"]),
        env,
    )
    second_scheduler, second_options_initializer = core.prepare(
        create_options_bootstrapper(["-lwarn"]),
        env,
    )
    assert first_scheduler is not second_scheduler
    assert first_options_initializer is second_options_initializer
def test_warns_on_remote_cache_errors():
    executor = PyExecutor(2, 4)
    builder = PyStubCAS.builder()
    builder.always_errors()
    cas = builder.build(executor)

    pants_run = run_pants(
        [
            "--backend-packages=['pants.backend.python']",
            "--no-dynamic-ui",
            "--level=info",
            "package",
            "testprojects/src/python/hello/main:main",
        ],
        use_pantsd=False,
        config={
            GLOBAL_SCOPE_CONFIG_SECTION: {
                "remote_cache_read":
                True,
                "remote_cache_write":
                True,
                # NB: Our options code expects `grpc://`, which it will then convert back to
                # `http://` before sending over FFI.
                "remote_store_address":
                cas.address().replace("http://", "grpc://"),
            }
        },
    )

    pants_run.assert_success()
    assert "Failed to read from remote cache: Unimplemented" in pants_run.stderr
    assert (re.search(
        "Failed to write to remote cache:.*StubCAS is configured to always fail",
        pants_run.stderr,
        re.MULTILINE,
    ) is not None)
Esempio n. 10
0
class SchedulerTestBase:
    """A mixin for classes (tests, presumably) which need to create temporary schedulers.

    TODO: In the medium term, this should be part of pants_test.test_base.TestBase.
    """

    _native = Native()
    _executor = PyExecutor(2, 4)

    def _create_work_dir(self):
        work_dir = safe_mkdtemp()
        self.addCleanup(safe_rmtree, work_dir)
        return work_dir

    def mk_scheduler(
        self,
        rules,
        include_trace_on_error: bool = True,
    ) -> SchedulerSession:
        """Creates a SchedulerSession for a Scheduler with the given Rules installed."""
        work_dir = self._create_work_dir()

        build_root = os.path.join(work_dir, "build_root")
        os.makedirs(build_root)

        local_store_dir = os.path.realpath(safe_mkdtemp())
        local_execution_root_dir = os.path.realpath(safe_mkdtemp())
        named_caches_dir = os.path.realpath(safe_mkdtemp())
        scheduler = Scheduler(
            native=self._native,
            ignore_patterns=[],
            use_gitignore=False,
            build_root=build_root,
            local_store_dir=local_store_dir,
            local_execution_root_dir=local_execution_root_dir,
            named_caches_dir=named_caches_dir,
            ca_certs_path=None,
            rules=rules,
            union_membership=UnionMembership({}),
            executor=self._executor,
            execution_options=DEFAULT_EXECUTION_OPTIONS,
            include_trace_on_error=include_trace_on_error,
        )
        return scheduler.new_session(build_id="buildid_for_test", )

    def execute(self, scheduler, product, *subjects):
        """Runs an ExecutionRequest for the given product and subjects, and returns the result
        value."""
        request = scheduler.execution_request([product], subjects)
        returns, throws = scheduler.execute(request)
        if throws:
            with temporary_file_path(cleanup=False, suffix=".dot") as dot_file:
                scheduler.visualize_graph_to_file(dot_file)
                raise ValueError(
                    f"At least one root failed: {throws}. Visualized as {dot_file}"
                )
        return list(state.value for _, state in returns)

    def execute_expecting_one_result(self, scheduler, product, subject):
        request = scheduler.execution_request([product], [subject])
        returns, throws = scheduler.execute(request)

        if throws:
            _, state = throws[0]
            raise state.exc

        self.assertEqual(len(returns), 1)

        _, state = returns[0]
        return state
def test_warns_on_remote_cache_errors():
    executor = PyExecutor(core_threads=2, max_threads=4)
    cas = PyStubCAS.builder().always_errors().build(executor)

    def run(behavior: RemoteCacheWarningsBehavior) -> str:
        pants_run = run_pants(
            [
                "--backend-packages=['pants.backend.python']",
                "--no-dynamic-ui",
                "package",
                "testprojects/src/python/hello/main:main",
            ],
            use_pantsd=False,
            config={
                GLOBAL_SCOPE_CONFIG_SECTION: {
                    "remote_cache_read":
                    True,
                    "remote_cache_write":
                    True,
                    "remote_cache_warnings":
                    behavior.value,
                    # NB: Our options code expects `grpc://`, which it will then convert back to
                    # `http://` before sending over FFI.
                    "remote_store_address":
                    cas.address.replace("http://", "grpc://"),
                }
            },
        )
        pants_run.assert_success()
        return pants_run.stderr

    def read_err(i: int) -> str:
        return f"Failed to read from remote cache ({i} occurrences so far): Unimplemented"

    def write_err(i: int) -> str:
        return (
            f'Failed to write to remote cache ({i} occurrences so far): InvalidArgument: "StubCAS is '
            f'configured to always fail"')

    first_read_err = read_err(1)
    first_write_err = write_err(1)
    third_read_err = read_err(3)
    third_write_err = write_err(3)
    fourth_read_err = read_err(4)
    fourth_write_err = write_err(4)

    ignore_result = run(RemoteCacheWarningsBehavior.ignore)
    for err in [
            first_read_err,
            first_write_err,
            third_read_err,
            third_write_err,
            fourth_read_err,
            fourth_write_err,
    ]:
        assert err not in ignore_result

    first_only_result = run(RemoteCacheWarningsBehavior.first_only)
    for err in [first_read_err, first_write_err]:
        assert err in first_only_result, f"Not found in:\n{first_only_result}"
    for err in [
            third_read_err, third_write_err, fourth_read_err, fourth_write_err
    ]:
        assert err not in first_only_result

    backoff_result = run(RemoteCacheWarningsBehavior.backoff)
    for err in [
            first_read_err, first_write_err, fourth_read_err, fourth_write_err
    ]:
        assert err in backoff_result
    for err in [third_read_err, third_write_err]:
        assert err not in backoff_result
Esempio n. 12
0
class SchedulerTestBase:
    """A mixin for classes (tests, presumably) which need to create temporary schedulers.

    TODO: In the medium term, this should be part of pants_test.test_base.TestBase.
    """

    _native = Native()
    _executor = PyExecutor(2, 4)

    def _create_work_dir(self):
        work_dir = safe_mkdtemp()
        self.addCleanup(safe_rmtree, work_dir)
        return work_dir

    def mk_fs_tree(self,
                   build_root_src=None,
                   ignore_patterns=None,
                   work_dir=None):
        """Create a temporary FilesystemProjectTree.

        :param build_root_src: Optional directory to pre-populate from; otherwise, empty.
        :returns: A FilesystemProjectTree.
        """
        work_dir = work_dir or self._create_work_dir()
        build_root = os.path.join(work_dir, "build_root")
        if build_root_src is not None:
            shutil.copytree(build_root_src, build_root, symlinks=True)
        else:
            os.makedirs(build_root)
        return FileSystemProjectTree(build_root,
                                     ignore_patterns=ignore_patterns)

    def mk_scheduler(
        self,
        rules=None,
        project_tree=None,
        work_dir=None,
        include_trace_on_error=True,
        execution_options=None,
        ca_certs_path=None,
    ) -> SchedulerSession:
        """Creates a SchedulerSession for a Scheduler with the given Rules installed."""
        rules = rules or []
        work_dir = work_dir or self._create_work_dir()
        project_tree = project_tree or self.mk_fs_tree(work_dir=work_dir)
        local_store_dir = os.path.realpath(safe_mkdtemp())
        local_execution_root_dir = os.path.realpath(safe_mkdtemp())
        named_caches_dir = os.path.realpath(safe_mkdtemp())
        if execution_options is not None:
            eo = asdict(DEFAULT_EXECUTION_OPTIONS)
            eo.update(execution_options)
            execution_options = ExecutionOptions(**eo)
        scheduler = Scheduler(
            native=self._native,
            ignore_patterns=project_tree.ignore_patterns,
            use_gitignore=False,
            build_root=project_tree.build_root,
            local_store_dir=local_store_dir,
            local_execution_root_dir=local_execution_root_dir,
            named_caches_dir=named_caches_dir,
            ca_certs_path=ca_certs_path,
            rules=rules,
            union_membership=UnionMembership({}),
            executor=self._executor,
            execution_options=execution_options or DEFAULT_EXECUTION_OPTIONS,
            include_trace_on_error=include_trace_on_error,
        )
        return scheduler.new_session(build_id="buildid_for_test", )

    def execute(self, scheduler, product, *subjects):
        """Runs an ExecutionRequest for the given product and subjects, and returns the result
        value."""
        request = scheduler.execution_request([product], subjects)
        returns, throws = scheduler.execute(request)
        if throws:
            with temporary_file_path(cleanup=False, suffix=".dot") as dot_file:
                scheduler.visualize_graph_to_file(dot_file)
                raise ValueError(
                    f"At least one root failed: {throws}. Visualized as {dot_file}"
                )
        return list(state.value for _, state in returns)

    def execute_expecting_one_result(self, scheduler, product, subject):
        request = scheduler.execution_request([product], [subject])
        returns, throws = scheduler.execute(request)

        if throws:
            _, state = throws[0]
            raise state.exc

        self.assertEqual(len(returns), 1)

        _, state = returns[0]
        return state
Esempio n. 13
0
 def __init__(self):
     self.externs = Externs(self.lib)
     self.lib.externs_set(self.externs)
     self._executor = PyExecutor()
Esempio n. 14
0
    MissingParameterTypeAnnotation,
    MissingReturnTypeAnnotation,
    QueryRule,
    RuleIndex,
    UnrecognizedRuleArgument,
    _RuleVisitor,
    goal_rule,
    rule,
)
from pants.engine.unions import UnionMembership
from pants.option.global_options import DEFAULT_EXECUTION_OPTIONS, DEFAULT_LOCAL_STORE_OPTIONS
from pants.testutil.rule_runner import MockGet, run_rule_with_mocks
from pants.util.enums import match
from pants.util.logging import LogLevel

_EXECUTOR = PyExecutor(core_threads=2, max_threads=4)


def create_scheduler(rules, validate=True):
    """Create a Scheduler."""
    return Scheduler(
        ignore_patterns=[],
        use_gitignore=False,
        build_root=str(Path.cwd()),
        local_execution_root_dir=".",
        named_caches_dir="./.pants.d/named_caches",
        ca_certs_path=None,
        rules=rules,
        union_membership=UnionMembership({}),
        executor=_EXECUTOR,
        execution_options=DEFAULT_EXECUTION_OPTIONS,
Esempio n. 15
0
class SchedulerTestBase:
    """A mixin for classes which need to create temporary schedulers.

    TODO: In the medium term, this should be removed in favor of RuleRunner.
    """

    _executor = PyExecutor(core_threads=2, max_threads=4)

    def mk_scheduler(
        self,
        tmp_path: Path,
        rules,
        include_trace_on_error: bool = True,
        max_workunit_verbosity: LogLevel = LogLevel.DEBUG,
    ) -> SchedulerSession:
        """Creates a SchedulerSession for a Scheduler with the given Rules installed."""

        build_root = tmp_path / "build_root"
        build_root.mkdir(parents=True, exist_ok=True)

        local_execution_root_dir = os.path.realpath(safe_mkdtemp())
        named_caches_dir = os.path.realpath(safe_mkdtemp())
        scheduler = Scheduler(
            ignore_patterns=[],
            use_gitignore=False,
            build_root=build_root.as_posix(),
            local_execution_root_dir=local_execution_root_dir,
            named_caches_dir=named_caches_dir,
            ca_certs_path=None,
            rules=rules,
            union_membership=UnionMembership({}),
            executor=self._executor,
            execution_options=DEFAULT_EXECUTION_OPTIONS,
            local_store_options=DEFAULT_LOCAL_STORE_OPTIONS,
            include_trace_on_error=include_trace_on_error,
        )
        return scheduler.new_session(
            build_id="buildid_for_test",
            max_workunit_level=max_workunit_verbosity,
        )

    def execute(self, scheduler, product, *subjects):
        """Runs an ExecutionRequest for the given product and subjects, and returns the result
        value."""
        request = scheduler.execution_request([product], subjects)
        returns, throws = scheduler.execute(request)
        if throws:
            with temporary_file_path(cleanup=False, suffix=".dot") as dot_file:
                scheduler.visualize_graph_to_file(dot_file)
                raise ValueError(
                    f"At least one root failed: {throws}. Visualized as {dot_file}"
                )
        return [state.value for _, state in returns]

    def execute_expecting_one_result(self, scheduler, product, subject):
        request = scheduler.execution_request([product], [subject])
        returns, throws = scheduler.execute(request)

        if throws:
            _, state = throws[0]
            raise state.exc

        assert len(returns) == 1

        _, state = returns[0]
        return state
Esempio n. 16
0
from pants.util.dirutil import (
    recursive_dirname,
    safe_file_dump,
    safe_mkdir,
    safe_mkdtemp,
    safe_open,
)
from pants.util.ordered_set import FrozenOrderedSet

# -----------------------------------------------------------------------------------------------
# `RuleRunner`
# -----------------------------------------------------------------------------------------------

_O = TypeVar("_O")

_EXECUTOR = PyExecutor(multiprocessing.cpu_count(),
                       multiprocessing.cpu_count() * 4)


@dataclass(frozen=True)
class GoalRuleResult:
    exit_code: int
    stdout: str
    stderr: str

    @staticmethod
    def noop() -> GoalRuleResult:
        return GoalRuleResult(0, stdout="", stderr="")


# This is not frozen because we need to update the `scheduler` when setting options.
@dataclass
Esempio n. 17
0
from pants.util.dirutil import (
    recursive_dirname,
    safe_file_dump,
    safe_mkdir,
    safe_mkdtemp,
    safe_open,
)
from pants.util.ordered_set import FrozenOrderedSet

# -----------------------------------------------------------------------------------------------
# `RuleRunner`
# -----------------------------------------------------------------------------------------------

_O = TypeVar("_O")

_EXECUTOR = PyExecutor(core_threads=multiprocessing.cpu_count(),
                       max_threads=multiprocessing.cpu_count() * 4)


@dataclass(frozen=True)
class GoalRuleResult:
    exit_code: int
    stdout: str
    stderr: str

    @staticmethod
    def noop() -> GoalRuleResult:
        return GoalRuleResult(0, stdout="", stderr="")


# This is not frozen because we need to update the `scheduler` when setting options.
@dataclass
Esempio n. 18
0
    safe_mkdir,
    safe_mkdtemp,
    safe_open,
)
from pants.util.ordered_set import FrozenOrderedSet

# -----------------------------------------------------------------------------------------------
# `RuleRunner`
# -----------------------------------------------------------------------------------------------

_O = TypeVar("_O")

# Use the ~minimum possible parallelism since integration tests using RuleRunner will already be run
# by Pants using an appropriate Parallelism. We must set max_threads > core_threads; so 2 is the
# minimum, but, via trial and error, 3 minimizes test times on average.
_EXECUTOR = PyExecutor(core_threads=1, max_threads=3)


@dataclass(frozen=True)
class GoalRuleResult:
    exit_code: int
    stdout: str
    stderr: str

    @staticmethod
    def noop() -> GoalRuleResult:
        return GoalRuleResult(0, stdout="", stderr="")


# This is not frozen because we need to update the `scheduler` when setting options.
@dataclass
Esempio n. 19
0
    MissingParameterTypeAnnotation,
    MissingReturnTypeAnnotation,
    QueryRule,
    RuleIndex,
    UnrecognizedRuleArgument,
    _RuleVisitor,
    goal_rule,
    rule,
)
from pants.engine.unions import UnionMembership
from pants.option.global_options import DEFAULT_EXECUTION_OPTIONS
from pants.testutil.rule_runner import MockGet, run_rule_with_mocks
from pants.util.enums import match
from pants.util.logging import LogLevel

_EXECUTOR = PyExecutor(2, 4)


def create_scheduler(rules, validate=True, native=None):
    """Create a Scheduler."""
    native = native or Native()
    return Scheduler(
        native=native,
        ignore_patterns=[],
        use_gitignore=False,
        build_root=str(Path.cwd()),
        local_store_dir="./.pants.d/lmdb_store",
        local_execution_root_dir="./.pants.d",
        named_caches_dir="./.pants.d/named_caches",
        ca_certs_path=None,
        rules=rules,