def _expect_responses(self, expected, Local, getpass, config=None, kwargs=None, getpass_reply=None, ): """ Execute mocked sudo(), expecting watchers= kwarg in its run(). * expected: list of 2-tuples of FailingResponder prompt/response * config: Config object, if an overridden one is needed * kwargs: sudo() kwargs, if needed * getpass_reply: return value of getpass.getpass, if needed (Local and getpass are just mock injections.) """ if kwargs is None: kwargs = {} getpass.getpass.return_value = getpass_reply runner = Local.return_value context = Context(config=config) if config else Context() context.sudo('whoami', **kwargs) # Tease out the interesting bits - pattern/response - ignoring the # sentinel, etc for now. prompt_responses = [ (watcher.pattern, watcher.response) for watcher in runner.run.call_args[1]['watchers'] ] eq_(prompt_responses, expected)
def can_be_pickled(self): c = Context() c.foo = {'bar': {'biz': ['baz', 'buzz']}} c2 = pickle.loads(pickle.dumps(c)) eq_(c, c2) ok_(c is not c2) ok_(c.foo.bar.biz is not c2.foo.bar.biz)
def can_be_pickled(self): c = Context() c.foo = {'bar': {'biz': ['baz', 'buzz']}} c2 = pickle.loads(pickle.dumps(c)) assert c == c2 assert c is not c2 assert c.foo.bar.biz is not c2.foo.bar.biz
def can_be_pickled(self): c = Context() c.foo = {"bar": {"biz": ["baz", "buzz"]}} c2 = pickle.loads(pickle.dumps(c)) assert c == c2 assert c is not c2 assert c.foo.bar.biz is not c2.foo.bar.biz
def honors_runner_config_setting(self): runner_class = Mock() config = Config({'runners': {'local': runner_class}}) c = Context(config) c.run('foo') assert runner_class.mock_calls == [ call(c), call().run('foo'), ]
def cd_should_apply_to_sudo(self, Local): runner = Local.return_value c = Context() with c.cd('foo'): c.sudo('whoami') cmd = "sudo -S -p '[sudo] password: ' cd foo && whoami" assert runner.run.called, "sudo() never called runner.run()!" assert runner.run.call_args[0][0] == cmd
def prefixes_should_apply_to_run(self, Local): runner = Local.return_value c = Context() with c.prefix("cd foo"): c.run("whoami") cmd = "cd foo && whoami" assert runner.run.called, "run() never called runner.run()!" assert runner.run.call_args[0][0] == cmd
def echo_hides_extra_sudo_flags(self, getpass): skip() # see TODO in sudo() re: clean output display config = Config(overrides={'runner': _Dummy}) Context(config=config).sudo('nope', echo=True) output = sys.stdout.getvalue() sys.__stderr__.write(repr(output) + "\n") ok_("-S" not in output) ok_(Context().sudo.prompt not in output) ok_("sudo nope" in output)
def _run_build_steps(self, ctx: Context) -> None: if self.platform in [ SupportedPlatformEnum.WINDOWS_32, SupportedPlatformEnum.WINDOWS_64 ]: ctx.run('nmake clean', warn=True) ctx.run('nmake') else: return super()._run_build_steps(ctx)
def check_existing_core(c: Context, hide: bool) -> None: if c.run("python -c \"import core\"", warn=True, hide=hide): raise SystemError( "existing python2 core installation detected, please remove") if c.run("python3 -c \"import core\"", warn=True, hide=hide): raise SystemError( "existing python3 core installation detected, please remove") if c.run("which core-daemon", warn=True, hide=hide): raise SystemError("core scripts found, please remove old installation")
def echo_hides_extra_sudo_flags(self): skip() # see TODO in sudo() re: clean output display config = Config(overrides={"runner": _Dummy}) Context(config=config).sudo("nope", echo=True) output = sys.stdout.getvalue() sys.__stderr__.write(repr(output) + "\n") assert "-S" not in output assert Context().sudo.prompt not in output assert "sudo nope" in output
def prefixes_should_apply_to_sudo(self, Local): runner = Local.return_value c = Context() with c.prefix("cd foo"): c.sudo("whoami") cmd = "sudo -S -p '[sudo] password: ' cd foo && whoami" assert runner.run.called, "sudo() never called runner.run()!" assert runner.run.call_args[0][0] == cmd
def prefixes_should_apply_to_run(self, Local): runner = Local.return_value ctx = Context() with ctx.prefix('cd foo'): ctx.run('whoami') cmd = "cd foo && whoami" ok_(runner.run.called, "run() never called runner.run()!") eq_(runner.run.call_args[0][0], cmd)
def cd_should_apply_to_run(self, Local): runner = Local.return_value c = Context() with c.cd('foo'): c.run('whoami') cmd = "cd foo && whoami" assert runner.run.called, "run() never called runner.run()!" assert runner.run.call_args[0][0] == cmd
def prefixes_should_apply_to_sudo(self, Local): runner = Local.return_value ctx = Context() with ctx.prefix('cd foo'): ctx.sudo('whoami') cmd = "sudo -S -p '[sudo] password: ' cd foo && whoami" ok_(runner.run.called, "sudo() never called runner.run()!") eq_(runner.run.call_args[0][0], cmd)
def cd_should_occur_before_prefixes(self, Local): runner = Local.return_value c = Context() with c.prefix("source venv"): with c.cd("foo"): c.run("whoami") cmd = "cd foo && source venv && whoami" assert runner.run.called, "run() never called runner.run()!" assert runner.run.call_args[0][0] == cmd
def release_exists(c: Context) -> bool: """Check if the current Sentry release exists.""" try: c.run(f"sentry-cli releases --org {sentry_org} info {sentry_release}", hide="both") except UnexpectedExit: exists = False else: exists = True return exists
def cd_should_occur_before_prefixes(self, Local): runner = Local.return_value ctx = Context() with ctx.prefix('source venv'): with ctx.cd('foo'): ctx.run('whoami') cmd = "cd foo && source venv && whoami" ok_(runner.run.called, "run() never called runner.run()!") eq_(runner.run.call_args[0][0], cmd)
def install_system(c: Context, os_info: OsInfo, hide: bool) -> None: if os_info.like == OsLike.DEBIAN: c.run( "sudo apt install -y automake pkg-config gcc libev-dev ebtables " "iproute2 ethtool tk python3-tk bash", hide=hide) elif os_info.like == OsLike.REDHAT: c.run( "sudo yum install -y automake pkgconf-pkg-config gcc gcc-c++ " "libev-devel iptables-ebtables iproute python3-devel python3-tkinter " "tk ethtool make bash", hide=hide) # centos 8+ does not support netem by default if os_info.name == OsName.CENTOS and os_info.version >= 8: c.run("sudo yum install -y kernel-modules-extra", hide=hide) if not c.run("sudo modprobe sch_netem", warn=True, hide=hide): print("\nERROR: you need to install the latest kernel") print("run the following, restart, and try again") print("sudo yum update") sys.exit(1) # attempt to setup legacy ebtables when an nftables based version is found r = c.run("ebtables -V", hide=hide) if "nf_tables" in r.stdout: if not c.run( "sudo update-alternatives --set ebtables /usr/sbin/ebtables-legacy", warn=True, hide=hide): print( "\nWARNING: unable to setup ebtables-legacy, WLAN will not work" )
def precommit(ctx: Context): """Run the "pre-commit" hooks on the codebase. Parameters ---------- ctx Context. """ message_box('Running "pre-commit" hooks on the codebase...') ctx.run("pre-commit run --all-files")
def install_poetry(c: Context, dev: bool, local: bool, hide: bool) -> None: if local: with c.cd(DAEMON_DIR): c.run("poetry build -f wheel", hide=hide) c.run("sudo python3 -m pip install dist/*") else: args = "" if dev else "--no-dev" with c.cd(DAEMON_DIR): c.run(f"poetry install {args}", hide=hide) if dev: c.run("poetry run pre-commit install", hide=hide)
def transpile(context: Context, watch: bool, mode="development"): command = ['npx', 'webpack'] if watch: command.append('--watch') command.append('--output-path ./buck/static/js') command.append(f'--mode {mode}') context.run(' '.join(command))
def conn(c=None, one=False, invoke=False): if globing.invoke or invoke: from invoke import Context c = Context(Config()) c.host = 'local' return c """ 确保传入的是connect,不是local的context """ if not hasattr(c, 'host'): c = hosts.one() if one else hosts.conn(0) print("connection [{}]".format(c.host)) return c
def sha256(ctx: Context): """Compute the project *Pypi* package *sha256* with *OpenSSL*. Parameters ---------- ctx Context. """ message_box('Computing "sha256"...') with ctx.cd("dist"): ctx.run(f"openssl sha256 {PYPI_PACKAGE_NAME}-*.tar.gz")
def _run_task( ctx: Context, connector_string: str, task_name: str, multi_envs: bool = True, module_path: Optional[str] = None, task_commands: Dict = TASK_COMMANDS, **kwargs: Any, ) -> int: """ Run task in its own environment. """ cur_dir = os.getcwd() if multi_envs: if module_path: os.chdir(module_path) source_path = connector_string else: os.chdir(os.path.join(CONNECTORS_DIR, f"source-{connector_string}")) source_path = f"source_{connector_string.replace('-', '_')}" else: source_path = connector_string venv_name = tempfile.mkdtemp(dir=os.curdir) virtualenv.cli_run([venv_name]) activator = os.path.join(os.path.abspath(venv_name), "bin", "activate") commands = [] commands.extend([ cmd.format(source_path=source_path, venv=venv_name, **kwargs) for cmd in task_commands[task_name] ]) exit_code: int = 0 try: with ctx.prefix(f"source {activator}"): for command in commands: result = ctx.run(command, echo=True, warn=True) if result.return_code: exit_code = 1 break finally: shutil.rmtree(venv_name, ignore_errors=True) if module_path: os.chdir(cur_dir) return exit_code
def release(ctx: Context): """Release the project to *Pypi* with *Twine*. Parameters ---------- ctx Context. """ message_box("Releasing...") with ctx.cd("dist"): ctx.run("twine upload *.tar.gz") ctx.run("twine upload *.whl")
def todo(ctx: Context): """Export the TODO items. Parameters ---------- ctx Context. """ message_box('Exporting "TODO" items...') with ctx.cd("utilities"): ctx.run("./export_todo.py")
def requirements(ctx: Context): """Export the *requirements.txt* file. Parameters ---------- ctx Context. """ message_box('Exporting "requirements.txt" file...') ctx.run("poetry run pip list --format=freeze | " 'egrep -v "colour-checker-detection=" ' "> requirements.txt")
def _build(): """ Build local support docs tree and return the build target dir for cleanup. """ c = Context() support = join(dirname(__file__), "_support") docs = join(support, "docs") build = join(support, "_build") command = "sphinx-build -c {} -W {} {}".format(support, docs, build) with c.cd(support): # Turn off stdin mirroring to avoid irritating pytest. c.run(command, in_stream=False) return build
def deploy_mpi_operator(): """Function to deploy mpi operator in the EKS cluster. This will support v1alpha2 crd for mpijobs. """ ctx = Context() home_dir = ctx.run("echo $HOME").stdout.strip("\n") mpi_operator_dir = os.path.join(home_dir, "mpi-operator") if os.path.isdir(mpi_operator_dir): ctx.run(f"rm -rf {mpi_operator_dir}") clone_mxnet_command = f"git clone https://github.com/kubeflow/mpi-operator {mpi_operator_dir}" run(clone_mxnet_command, echo=True) run(f"kubectl create -f {mpi_operator_dir}/deploy/v1alpha2/mpi-operator.yaml", echo=True)
def test_full_integration(folder, cmd, expected_output, py2_only, tmpdir): # --durations=10, you will see each one gets run twice, maybe fix? ctx = Context() assert ctx.pformat() with ctx.cd("sites/magic_docs/examples/{}".format(folder)): result = ctx.run("TMPDIR={} {}".format(tmpdir, cmd), hide=True, warn=py2_only).stdout try: assert expected_output in result except: if py2_only and six.PY2: pytest.xfail("We knew that.") raise
def watch_docs(c): """ Watch both doc trees & rebuild them if files change. This includes e.g. rebuilding the API docs if the source code changes; rebuilding the WWW docs if the README changes; etc. """ # TODO: break back down into generic single-site version, then create split # tasks as with docs/www above. Probably wants invoke#63. # NOTE: 'www'/'docs' refer to the module level sub-collections. meh. # Readme & WWW triggers WWW www_c = Context(config=c.config.clone()) www_c.update(**www.configuration()) www_handler = make_handler( ctx=www_c, task_=www['build'], regexes=['\./README.rst', '\./sites/www'], ignore_regexes=['.*/\..*\.swp', '\./sites/www/_build'], ) # Code and docs trigger API docs_c = Context(config=c.config.clone()) docs_c.update(**docs.configuration()) api_handler = make_handler( ctx=docs_c, task_=docs['build'], regexes=['\./invoke/', '\./sites/docs'], ignore_regexes=['.*/\..*\.swp', '\./sites/docs/_build'], ) observe(www_handler, api_handler)
def sites(c): """ Build both doc sites w/ maxed nitpicking. """ # TODO: This is super lolzy but we haven't actually tackled nontrivial # in-Python task calling yet, so we do this to get a copy of 'our' context, # which has been updated with the per-collection config data of the # docs/www subcollections. docs_c = Context(config=c.config.clone()) www_c = Context(config=c.config.clone()) docs_c.update(**docs.configuration()) www_c.update(**www.configuration()) # Must build both normally first to ensure good intersphinx inventory files # exist =/ circular dependencies ahoy! Do it quietly to avoid pulluting # output; only super-serious errors will bubble up. # TODO: wants a 'temporarily tweak context settings' contextmanager # TODO: also a f*****g spinner cuz this confuses me every time I run it # when the docs aren't already prebuilt docs_c['run'].hide = True www_c['run'].hide = True docs['build'](docs_c) www['build'](www_c) docs_c['run'].hide = False www_c['run'].hide = False # Run the actual builds, with nitpick=True (nitpicks + tracebacks) docs['build'](docs_c, nitpick=True) www['build'](www_c, nitpick=True)
def sites(c): """ Build both doc sites w/ maxed nitpicking. """ # TODO: This is super lolzy but we haven't actually tackled nontrivial # in-Python task calling yet, so we do this to get a copy of 'our' context, # which has been updated with the per-collection config data of the # docs/www subcollections. docs_c = Context(config=c.config.clone()) www_c = Context(config=c.config.clone()) docs_c.update(**docs.configuration()) www_c.update(**www.configuration()) # Must build both normally first to ensure good intersphinx inventory files # exist =/ circular dependencies ahoy! Do it quietly to avoid pulluting # output; only super-serious errors will bubble up. # TODO: wants a 'temporarily tweak context settings' contextmanager docs_c['run'].hide = True www_c['run'].hide = True docs['build'](docs_c) www['build'](www_c) docs_c['run'].hide = False www_c['run'].hide = False # Then build with special nitpicking options: turn warnings into errors, # emit warnings about missing references. # Also enable tracebacks for easier debuggery. opts = "-W -n -T" docs['build'](docs_c, opts=opts) www['build'](www_c, opts=opts)
def kwarg_only_adds_to_kwarg(self, Local): runner = Local.return_value context = Context() watcher = self.watcher_klass() context.sudo("whoami", watchers=[watcher]) # When sudo() called w/ user-specified watchers, we add ours to # that list watchers = runner.run.call_args[1]["watchers"] # Will raise ValueError if not in the list watchers.remove(watcher) # Only remaining item in list should be our sudo responder assert len(watchers) == 1 assert isinstance(watchers[0], FailingResponder) assert watchers[0].pattern == self.escaped_prompt
def examples(ctx: Context): """Run the examples. Parameters ---------- ctx Context. """ message_box("Running examples...") for root, dirnames, filenames in os.walk( os.path.join(PYTHON_PACKAGE_NAME, "examples")): for filename in fnmatch.filter(filenames, "*.py"): ctx.run(f"python {os.path.join(root, filename)}")
def local(command): """ _local_ fabric.operations local like command """ c = Context() LOGGER.info("local({})".format(command)) try: result = c.run(command) except UnexpectedExit as ex: msg = "Error running command:\n{}".format(ex) LOGGER.error(msg) raise
def sites(c): """ Builds both doc sites w/ maxed nitpicking. """ # Turn warnings into errors, emit warnings about missing references. # This gives us a maximally noisy docs build. # Also enable tracebacks for easier debuggage. opts = "-W -n -T" # This is super lolzy but we haven't actually tackled nontrivial in-Python # task calling yet, so... docs_c, www_c = Context(config=c.config.clone()), Context(config=c.config.clone()) docs_c.update(**docs.configuration()) www_c.update(**www.configuration()) docs['build'](docs_c, opts=opts) www['build'](www_c, opts=opts)
def exec_in_dir(base_command, directory, options='', tee=None, exit_on_failure=True): if tee: tee = sum_paths(directory, tee) # NOTE: Path doesn't seem to be compatible with cd here, # see PR https://j.mp/3dq3nvf directory = str(directory) config = Config() config.load_shell_env() context = Context(config=config) # with context.cd(str(directory)): with context.cd(directory): exec(base_command, options, exit_on_failure, tee=tee, context=context)
def prefixes_command_with_sudo(self, Local): runner = Local.return_value Context().sudo("whoami") # NOTE: implicitly tests default sudo.prompt conf value cmd = "sudo -S -p '[sudo] password: ' whoami" assert runner.run.called, "sudo() never called runner.run()!" assert runner.run.call_args[0][0] == cmd
def _run(self, pty): runner = _KeyboardInterruptingFastLocal(Context(config=Config())) try: runner.run(_, pty=pty) except KeyboardInterrupt: pass return runner
def sites(c): """ Build both doc sites w/ maxed nitpicking. """ # TODO: This is super lolzy but we haven't actually tackled nontrivial # in-Python task calling yet, so we do this to get a copy of 'our' context, # which has been updated with the per-collection config data of the # docs/www subcollections. docs_c = Context(config=c.config.clone()) www_c = Context(config=c.config.clone()) docs_c.update(**docs.configuration()) www_c.update(**www.configuration()) # Must build both normally first to ensure good intersphinx inventory files # exist =/ circular dependencies ahoy! Do it quietly to avoid pulluting # output; only super-serious errors will bubble up. # TODO: wants a 'temporarily tweak context settings' contextmanager # TODO: also a f*****g spinner cuz this confuses me every time I run it # when the docs aren't already prebuilt docs_c["run"].hide = True www_c["run"].hide = True docs["build"](docs_c) www["build"](www_c) docs_c["run"].hide = False www_c["run"].hide = False # Run the actual builds, with nitpick=True (nitpicks + tracebacks) docs["build"](docs_c, nitpick=True) www["build"](www_c, nitpick=True)
def _run_configure_command( self, ctx: Context, openssl_target: str, zlib_lib_path: Path, zlib_include_path: Path ) -> None: if self.platform in [SupportedPlatformEnum.WINDOWS_32, SupportedPlatformEnum.WINDOWS_64]: extra_args = '-no-asm -DZLIB_WINAPI' # *hate* zlib # On Windows OpenSSL wants the full path to the lib file final_zlib_path = zlib_lib_path else: extra_args = ' -fPIC' # On Unix OpenSSL wants the path to the folder where the lib is final_zlib_path = zlib_lib_path.parent ctx.run(self._OPENSSL_CONF_CMD.format( target=openssl_target, zlib_lib_path=final_zlib_path, zlib_include_path=zlib_include_path, extra_args=extra_args ))
def _expect_responses(self, expected, config=None, kwargs=None): """ Execute mocked sudo(), expecting watchers= kwarg in its run(). * expected: list of 2-tuples of FailingResponder prompt/response * config: Config object, if an overridden one is needed * kwargs: sudo() kwargs, if needed """ if kwargs is None: kwargs = {} Local = Mock() runner = Local.return_value context = Context(config=config) if config else Context() context.config.runners.local = Local context.sudo("whoami", **kwargs) # Tease out the interesting bits - pattern/response - ignoring the # sentinel, etc for now. prompt_responses = [ (watcher.pattern, watcher.response) for watcher in runner.run.call_args[1]["watchers"] ] assert prompt_responses == expected
def nesting_should_retain_order(self, Local): runner = Local.return_value c = Context() with c.prefix('cd foo'): with c.prefix('cd bar'): c.run('whoami') cmd = "cd foo && cd bar && whoami" assert runner.run.called, "run() never called runner.run()!" # noqa assert runner.run.call_args[0][0] == cmd c.run('whoami') cmd = "cd foo && whoami" assert runner.run.called, "run() never called runner.run()!" assert runner.run.call_args[0][0] == cmd # also test that prefixes do not persist c.run('whoami') cmd = "whoami" assert runner.run.called, "run() never called runner.run()!" assert runner.run.call_args[0][0] == cmd
def nesting_should_retain_order(self, Local): runner = Local.return_value ctx = Context() with ctx.prefix('cd foo'): with ctx.prefix('cd bar'): ctx.run('whoami') cmd = "cd foo && cd bar && whoami" ok_(runner.run.called, "run() never called runner.run()!") eq_(runner.run.call_args[0][0], cmd) ctx.run('whoami') cmd = "cd foo && whoami" ok_(runner.run.called, "run() never called runner.run()!") eq_(runner.run.call_args[0][0], cmd) # also test that prefixes do not persist ctx.run('whoami') cmd = "whoami" ok_(runner.run.called, "run() never called runner.run()!") eq_(runner.run.call_args[0][0], cmd)
def build( self, ctx: Context, zlib_lib_path: Optional[Path] = None, zlib_include_path: Optional[Path] = None, should_build_for_debug: bool = False ) -> None: if not zlib_lib_path or not zlib_include_path: raise ValueError('Missing argument') # Build OpenSSL openssl_target = self._get_build_target(should_build_for_debug) with ctx.cd(str(self.src_path)): self._run_configure_command(ctx, openssl_target, zlib_lib_path, zlib_include_path) self._run_build_steps(ctx)
class configuration_proxy: "Dict-like proxy for self.config" def setup(self): config = Config({'foo': 'bar'}) self.c = Context(config=config) def direct_access_allowed(self): eq_(self.c.config.__class__, Config) eq_(self.c.config['foo'], 'bar') eq_(self.c.config.foo, 'bar') def getitem(self): "___getitem__" eq_(self.c['foo'], 'bar') def getattr(self): "__getattr__" eq_(self.c.foo, 'bar') def get(self): eq_(self.c.get('foo'), 'bar') eq_(self.c.get('biz', 'baz'), 'baz') def keys(self): skip() def values(self): skip() def iter(self): "__iter__" skip() def update(self): self.c.update({'newkey': 'newval'}) eq_(self.c['newkey'], 'newval')
def watch_docs(c): """ Watch both doc trees & rebuild them if files change. This includes e.g. rebuilding the API docs if the source code changes; rebuilding the WWW docs if the README changes; etc. Reuses the configuration values ``packaging.package`` or ``tests.package`` (the former winning over the latter if both defined) when determining which source directory to scan for API doc updates. """ # TODO: break back down into generic single-site version, then create split # tasks as with docs/www above. Probably wants invoke#63. # NOTE: 'www'/'docs' refer to the module level sub-collections. meh. # Readme & WWW triggers WWW www_c = Context(config=c.config.clone()) www_c.update(**www.configuration()) www_handler = make_handler( ctx=www_c, task_=www["build"], regexes=[r"\./README.rst", r"\./sites/www"], ignore_regexes=[r".*/\..*\.swp", r"\./sites/www/_build"], ) # Code and docs trigger API docs_c = Context(config=c.config.clone()) docs_c.update(**docs.configuration()) regexes = [r"\./sites/docs"] package = c.get("packaging", {}).get("package", None) if package is None: package = c.get("tests", {}).get("package", None) if package: regexes.append(r"\./{}/".format(package)) api_handler = make_handler( ctx=docs_c, task_=docs["build"], regexes=regexes, ignore_regexes=[r".*/\..*\.swp", r"\./sites/docs/_build"], ) observe(www_handler, api_handler)
def setup(self): config = Config(defaults={"foo": "bar", "biz": {"baz": "boz"}}) self.c = Context(config=config)
class configuration_proxy: "Dict-like proxy for self.config" def setup(self): config = Config(defaults={"foo": "bar", "biz": {"baz": "boz"}}) self.c = Context(config=config) def direct_access_allowed(self): assert self.c.config.__class__ == Config assert self.c.config["foo"] == "bar" assert self.c.config.foo == "bar" def config_attr_may_be_overwritten_at_runtime(self): new_config = Config(defaults={"foo": "notbar"}) self.c.config = new_config assert self.c.foo == "notbar" def getitem(self): "___getitem__" assert self.c["foo"] == "bar" assert self.c["biz"]["baz"] == "boz" def getattr(self): "__getattr__" assert self.c.foo == "bar" assert self.c.biz.baz == "boz" def get(self): assert self.c.get("foo") == "bar" assert self.c.get("nope", "wut") == "wut" assert self.c.biz.get("nope", "hrm") == "hrm" def pop(self): assert self.c.pop("foo") == "bar" assert self.c.pop("foo", "notbar") == "notbar" assert self.c.biz.pop("baz") == "boz" def popitem(self): assert self.c.biz.popitem() == ("baz", "boz") del self.c["biz"] assert self.c.popitem() == ("foo", "bar") assert self.c.config == {} def del_(self): "del" del self.c["foo"] del self.c["biz"]["baz"] assert self.c.biz == {} del self.c["biz"] assert self.c.config == {} def clear(self): self.c.biz.clear() assert self.c.biz == {} self.c.clear() assert self.c.config == {} def setdefault(self): assert self.c.setdefault("foo") == "bar" assert self.c.biz.setdefault("baz") == "boz" assert self.c.setdefault("notfoo", "notbar") == "notbar" assert self.c.notfoo == "notbar" assert self.c.biz.setdefault("otherbaz", "otherboz") == "otherboz" assert self.c.biz.otherbaz == "otherboz" def update(self): self.c.update({"newkey": "newval"}) assert self.c["newkey"] == "newval" assert self.c.foo == "bar" self.c.biz.update(otherbaz="otherboz") assert self.c.biz.otherbaz == "otherboz"
def honors_runner_config_setting(self): runner_class = Mock() config = Config({"runners": {"local": runner_class}}) c = Context(config) c.run("foo") assert runner_class.mock_calls == [call(c), call().run("foo")]
def defaults_to_Local(self, Local): c = Context() c.run("foo") assert Local.mock_calls == [call(c), call().run("foo")]
def build(self, ctx: Context) -> None: if self.platform in [SupportedPlatformEnum.WINDOWS_32, SupportedPlatformEnum.WINDOWS_64]: if self.platform == SupportedPlatformEnum.WINDOWS_32: arch = 'x86' build_script = 'bld_ml32.bat' build_platform = 'Win32' else: arch = 'x64' build_script = 'bld_ml64.bat' build_platform = 'x64' masm_path = self.src_path / 'contrib' / f'masm{arch}' with ctx.cd(str(masm_path)): ctx.run(build_script) ctx.run(f'msbuild ..\\vstudio\\vc14\\zlibvc.sln /P:Configuration=Release /P:Platform={build_platform}') else: # Linux/macOS build with ctx.cd(str(self.src_path)): ctx.run('CFLAGS="-fPIC" ./configure -static') ctx.run('make clean') ctx.run('make')