def _call_objs(self, contextualized): # Setup pre_body, post_body = Mock(), Mock() t1 = Task(pre_body, contextualized=contextualized) t2 = Task(post_body, contextualized=contextualized) t3 = Task(Mock(), pre=[call(t1, 5, foo='bar')], post=[call(t2, 7, biz='baz')], ) c = Collection(t1=t1, t2=t2, t3=t3) e = Executor(collection=c) e.execute('t3') # Pre-task asserts args, kwargs = pre_body.call_args eq_(kwargs, {'foo': 'bar'}) if contextualized: assert isinstance(args[0], Context) eq_(args[1], 5) else: eq_(args, (5,)) # Post-task asserts args, kwargs = post_body.call_args eq_(kwargs, {'biz': 'baz'}) if contextualized: assert isinstance(args[0], Context) eq_(args[1], 7) else: eq_(args, (7,))
def deduping_treats_different_calls_to_same_task_differently(self): body = Mock() t1 = Task(body) pre = [call(t1, 5), call(t1, 7), call(t1, 5)] t2 = Task(Mock(), pre=pre) c = Collection(t1=t1, t2=t2) e = Executor(collection=c) e.execute('t2') # Does not call the second t1(5) body.assert_has_calls([mock_call(5), mock_call(7)])
def deduping_treats_different_calls_to_same_task_differently(self): body = Mock() t1 = Task(body) pre = [call(t1, 5), call(t1, 7), call(t1, 5)] t2 = Task(Mock(), pre=pre) c = Collection(t1=t1, t2=t2) e = Executor(collection=c) e.execute("t2") # Does not call the second t1(5) param_list = [] for body_call in body.call_args_list: assert isinstance(body_call[0][0], Context) param_list.append(body_call[0][1]) assert set(param_list) == {5, 7}
def _call_objs(self): # Setup pre_body, post_body = Mock(), Mock() t1 = Task(pre_body) t2 = Task(post_body) t3 = Task(Mock(), pre=[call(t1, 5, foo='bar')], post=[call(t2, 7, biz='baz')], ) c = Collection(t1=t1, t2=t2, t3=t3) e = Executor(collection=c) e.execute('t3') # Pre-task asserts args, kwargs = pre_body.call_args assert kwargs == {'foo': 'bar'} assert isinstance(args[0], Context) assert args[1] == 5 # Post-task asserts args, kwargs = post_body.call_args assert kwargs == {'biz': 'baz'} assert isinstance(args[0], Context) assert args[1] == 7
def _call_objs(self): # Setup pre_body, post_body = Mock(), Mock() t1 = Task(pre_body) t2 = Task(post_body) t3 = Task( Mock(), pre=[call(t1, 5, foo="bar")], post=[call(t2, 7, biz="baz")], ) c = Collection(t1=t1, t2=t2, t3=t3) e = Executor(collection=c) e.execute("t3") # Pre-task asserts args, kwargs = pre_body.call_args assert kwargs == {"foo": "bar"} assert isinstance(args[0], Context) assert args[1] == 5 # Post-task asserts args, kwargs = post_body.call_args assert kwargs == {"biz": "baz"} assert isinstance(args[0], Context) assert args[1] == 7
def flags_to_arg_string(flags): return ' '.join(['--{}'.format(flag) for flag in flags]) @task(default=True) def up(ctx, d=False): args = [] if d: args.append('-d') args = ' '.join(args) ctx.run('docker-compose {} {}'.format('up', args)) @task(pre=[call(up, d=True)]) def launch(ctx): pass @task def down(ctx, flags=None): flags = DOCKER_COMPOSE_DEFAULTS['down'] + (flags or []) ctx.run('docker-compose {} {}'.format('down', flags_to_arg_string(flags))) @task(pre=[down]) def rmf(ctx): ctx.run('docker-compose {} {}'.format('rm', '-v'))
@task(i18nc, assets_build) def dist(ctx, buildno=None): '''Package for distribution''' header(dist.__doc__) perform_dist(ctx, buildno) @task(i18nc) def pydist(ctx, buildno=None): '''Perform python packaging (without compiling assets)''' header(pydist.__doc__) perform_dist(ctx, buildno) def perform_dist(ctx, buildno=None): cmd = ['python setup.py'] if buildno: cmd.append('egg_info -b {0}'.format(buildno)) cmd.append('bdist_wheel') with ctx.cd(ROOT): ctx.run(' '.join(cmd), pty=True) success('Distribution is available in dist directory') @task(clean, qa, call(cover, report=True, html=True), dist, default=True) def all(ctx): '''Run tests, reports and packaging''' pass
chunked = [] for i in range(0, len(zipdata), 79): chunked.append(zipdata[i:i + 79]) os.makedirs(os.path.dirname(installer_path), exist_ok=True) with open(installer_path, "w") as fp: fp.write( WRAPPER_TEMPLATE.format( version="" if version is None else version, zipfile="\n".join(chunked), ), ) # Ensure the permissions on the newly created file oldmode = os.stat(installer_path).st_mode & 0o7777 newmode = (oldmode | 0o555) & 0o7777 os.chmod(installer_path, newmode) print("[generate.installer] Generated installer") @invoke.task( default=True, pre=[ invoke.call(installer), invoke.call(installer, version="<8", installer_path=_path("3.2")), ], ) def all(): pass
from invoke import Collection, task, call from package import module @task def top_pre(ctx): pass @task(call(top_pre)) def toplevel(ctx): pass ns = Collection(module, toplevel)
info('Ensure PyPI can render README and CHANGELOG') readme_results = ctx.run('python setup.py check -m -s', pty=True, warn=True, hide=True) if readme_results.failed: print(readme_results.stdout) error('README and/or CHANGELOG is not renderable by PyPI') else: success('README and CHANGELOG are renderable by PyPI') if flake8_results.failed or readme_results.failed: error('Quality check failed') exit(flake8_results.return_code or readme_results.return_code) success('Quality check OK') @task def dist(ctx, buildno=None): '''Package for distribution''' header('Building a distribuable package') cmd = ['python setup.py'] if buildno: cmd.append('egg_info -b {0}'.format(buildno)) cmd.append('bdist_wheel') with ctx.cd(ROOT): ctx.run(' '.join(cmd), pty=True) success('Distribution is available in dist directory') @task(clean, qa, call(test, report=True), dist, default=True) def default(ctx): '''Perform quality report, tests and packaging''' pass
allow_reuse_address = True server = AddressReuseTCPServer(('', SERVE_PORT), ComplexHTTPRequestHandler) sys.stderr.write('Serving on port {} ...\n'.format(SERVE_PORT)) server.serve_forever() @invoke.task(pre=[clean, build]) def rebuild(): """Perform `clean` and `build`""" pass @invoke.task(pre=[build, serve]) def reserve(): """Perform `build` and `serve`""" pass @invoke.task(pre=[clean, invoke.call(build, production=True)]) def publish(): """Publish to production via SSH and RSync""" invoke.run(('rsync -e "ssh -p {port}" -P -rvzc --delete {outputdir}/ ' '{username}@{hostname}:{wwwdir} --cvs-exclude').format( username=SSH_USERNAME, hostname=SSH_HOSTNAME, port=SSH_PORT, outputdir=OUTPUTDIR, wwwdir=WWWDIR)) # Fix permissions. _remotecommand([ 'find {wwwdir} -type d -exec chmod {dirperms} {{}} +'.format( wwwdir=WWWDIR, dirperms=WWWDIRPERMS), 'find {wwwdir} -type f -exec chmod {fileperms} {{}} +'.format( wwwdir=WWWDIR, fileperms=WWWFILEPERMS)]) # ------------------------------------------------------------------------------
# use readme file for rendering index page index_path = os.path.join("docs", "index.md") index_backup_path = "index.md_original" with backed_up_file(index_path, index_backup_path): shutil.copy2("README.md", index_path) run("mkdocs build" + (" --clean" if clean else ""), echo=True) @task def api(): """ Compiles API reference into site folder. """ lein("codox") @task(post=[call(mkdocs, clean=True), call(api)]) def site(): """ Builds project site (including API docs). """ pass ################################################### Helpers @contextlib.contextmanager def backed_up_file(filepath, backup_path): """ File will be returned to its initial state on context exit. """ with temp_file(backup_path): try: print("copy " + filepath + " to backup " + backup_path) shutil.copy2(filepath, backup_path) yield finally:
print("You failed to download the zip file") exit(1) with tempfile.TemporaryDirectory() as d: c.run(f"unzip -q {zippath} -d {d}") c.run(f"mv {d}/MIT-logos-print/*.eps {logodir}") c.run(f"rm {zippath}") @task(dirs) def compile(c, name): c.run(f"latexmk {name}.tex", env={"LOCALPAPERBUILD": "1"}) @task(logo, call(compile, "poster")) def poster(c): """Compile poster.pdf""" pass @task(call(compile, "main")) def main(c): """Compile main.pdf""" pass @task(main) def arxiv(c): """Prepare arXiv submission tarball""" c.run("rm -rf arxiv")
def apply_plan(context, to=None, parallelism=64): tfplan = os.path.join(*tfplan_file(context, to)) _state_out = '-state-out={}'.format(os.path.join(*tfstate_file(context))) _parallelism = '-parallelism={}'.format(parallelism) if os.path.isfile(tfplan): run_terraform( context, 'apply {} {} {}'.format(_parallelism, _state_out, tfplan)) else: error = ('{} does not exist or is not a regular file. ' 'You need to willingly create a plan for that.') print(error.format(tfplan)) sys.exit(1) @invoke.task(pre=[invoke.call(plan, to='create')]) def up(context, parallelism=64): apply_plan(context, 'create', parallelism) # Since both destruction and update are meant to modify an infrastructure, we # won't run them automatically at this stage. @invoke.task def down(context, parallelism=64): apply_plan(context, 'destroy', parallelism) @invoke.task def update(context, parallelism=64): apply_plan(context, 'update', parallelism)
"OS"] if verbose >= 1: msg = "[detect-os] Detected: {}".format( ctx.config["run"]["env"]["DETECTED_OS"]) click.secho(msg, fg=COLOR_SUCCESS) if ctx.config["run"]["env"]["DETECTED_OS"] == "Darwin": ctx.config["run"]["env"]["ARCHFLAGS"] = "-arch x86_64" ctx.config["run"]["env"][ "PKG_CONFIG_PATH"] = "/usr/local/opt/libffi/lib/pkgconfig" ctx.config["run"]["env"]["LDFLAGS"] = "-L/usr/local/opt/openssl/lib" ctx.config["run"]["env"]["CFLAGS"] = "-I/usr/local/opt/openssl/include" @task(pre=[call(detect_os, loc="local")], incrementable=["verbose"]) def serve(ctx, loc="local", verbose=0, cleanup=False): """ start up fastapi application Usage: inv local.serve """ env = get_compose_env(ctx, loc=loc) # Override run commands' env variables one key at a time for k, v in env.items(): ctx.config["run"]["env"][k] = v if verbose >= 1: msg = "[serve] override env vars 'SERVER_NAME' and 'SERVER_HOST' - We don't want to mess w/ '.env.dist' for this situation" click.secho(msg, fg=COLOR_SUCCESS)
for artifact in artifacts_to_delete: command = f"find . -name {artifact} | xargs rm -rfv" ctx.run(command) @task def resolve(ctx, rich_output=True): ctx.run("poetry install -vvv", pty=rich_output) @task def init(ctx, include_lock=False, rich_output=True): clean(ctx, include_lock) resolve(ctx, rich_output=rich_output) @task def format(ctx): ctx.run("black .") @task def test(ctx, rich_output=True): ctx.run("pytest -s --show-capture=no", pty=rich_output) @task(pre=[call(test, rich_output=False)]) def build(ctx, rich_output=True): ctx.run("poetry build", pty=rich_output)
from invoke import task, call from . import dc @task(default=True, pre=[call(dc.launch)]) def docker(ctx): ctx.run('sleep 10') result = ctx.run('tests/run', pty=True) dc.down(ctx) exit(result.exited)
for example_name in os.listdir("examples"): example_path = os.path.join("examples", example_name) if os.path.isfile(os.path.join(example_path, "project.clj")): with chdir(example_path): lein("clean") if example_name == "todomvc": lein("cljsbuild-min") else: lein("cljsbuild once min") shutil.copytree(os.path.join("resources", "public"), os.path.join(full_site_path, "examples", example_name)) @task(post=[call(mkdocs, clean=True), call(graphs), call(api), call(examples)]) def site(): """ Cleans site folder, builds project site, compiles graphs and examples into site folder. """ pass ################################################### Helpers @contextlib.contextmanager def backed_up_file(filepath, backup_path): """ File will be returned to its initial state on context exit. """ with temp_file(backup_path): try: print("copy " + filepath + " to backup " + backup_path) shutil.copy2(filepath, backup_path) yield finally:
# The reason we need to do this instead of just directly executing the # zip script is that while Python will happily execute a zip script if # passed it on the file system, it will not however allow this to work if # passed it via stdin. This means that this wrapper script is required to # make ``curl https://...../get-pip.py | python`` continue to work. print("[generate.installer] Write the wrapper script with the bundled zip " "file") zipdata = base64.b85encode(data).decode("utf8") chunked = [] for i in range(0, len(zipdata), 79): chunked.append(zipdata[i : i + 79]) os.makedirs(os.path.dirname(installer_path), exist_ok=True) with open(installer_path, "w") as fp: fp.write(WRAPPER_TEMPLATE.format(version="" if version is None else version, zipfile="\n".join(chunked))) # Ensure the permissions on the newly created file oldmode = os.stat(installer_path).st_mode & 0o7777 newmode = (oldmode | 0o555) & 0o7777 os.chmod(installer_path, newmode) print("[generate.installer] Generated installer") @invoke.task( default=True, pre=[invoke.call(installer), invoke.call(installer, version="<8", installer_path=_path("3.2"))] ) def all(): pass
for i in range(0, len(zipdata), 79): chunked.append(zipdata[i:i + 79]) os.makedirs(os.path.dirname(installer_path), exist_ok=True) with open(installer_path, "w") as fp: fp.write( WRAPPER_TEMPLATE.format( version="" if version is None else version, zipfile="\n".join(chunked), ), ) # Ensure the permissions on the newly created file oldmode = os.stat(installer_path).st_mode & 0o7777 newmode = (oldmode | 0o555) & 0o7777 os.chmod(installer_path, newmode) print("[generate.installer] Generated installer") @invoke.task( default=True, pre=[ invoke.call(installer), invoke.call(installer, version="<8", installer_path=_path("3.2")), invoke.call(installer, version="<10", installer_path=_path("2.6")), ], ) def all(ctx): pass
print('Copying ' + src + ' to ' + dst) copy(src, dst) print('Done') @task(set_settings) def setup(ctx): """Copy the configuration and alembic sample files from their template.""" src = 'alembic.ini.sample' dst = 'alembic.ini' print('Copying ' + src + ' to ' + dst) copy(src, dst) print('Done') @task(pre=[call(set_settings, environment='test')]) def test_func(ctx): """Run the functional tests.""" ctx.run('nosetests -w ' + func_test_dir) @task(pre=[call(set_settings, environment='test')]) def test_unit(ctx): """Run the unit tests.""" ctx.run('nosetests -w ' + unit_test_dir) @task(test_func, test_unit) def test(ctx): """Run the tests (functional and unit).""" pass
# Only display result ctx.config["run"]["echo"] = True # Override run commands env variables one key at a time for k, v in env.items(): ctx.config["run"]["env"][k] = v _cmd = r"./script/open-browser.py http://localhost:11267/docs" ctx.run(_cmd) @task( incrementable=["verbose"], pre=[call(view_api_docs, loc="local"), call(view_coverage, loc="local")], ) def browser(ctx, loc="local"): """ Open api swagger docs inside of browser Usage: inv ci.view-api-docs """ env = get_compose_env(ctx, loc=loc) # Only display result ctx.config["run"]["echo"] = True # Override run commands env variables one key at a time for k, v in env.items(): ctx.config["run"]["env"][k] = v