def collection(): """Return an invoke collection for bun""" # pylint: disable=invalid-name ns = invoke.Collection(bun.backup.backup, bun.backup.check, bun.backup.ignore, bun.backup.pretend, bun.backup.restore, bun.backup.verify, bun.backup.watch, bun.config, bun.defaults) ns.configure(settings()) return ns
except KeyboardInterrupt: print('\nAborted!') return arg_display = ' '.join(f'"{n}"' for n in artifacts) ctx.run(f'twine upload --repository="{repo}" {arg_display}') version = _prebump(version, prebump) _write_version(version) ctx.run(f'git commit -am "Prebump to {version}"') @invoke.task def build_docs(ctx): _current_version = find_version() minor = _current_version.split(".")[:2] docs_folder = (_get_git_root(ctx) / 'docs').as_posix() if not docs_folder.endswith('/'): docs_folder = '{0}/'.format(docs_folder) args = ["--ext-autodoc", "--ext-viewcode", "-o", docs_folder] args.extend(["-A", "'Dan Ryan <*****@*****.**>'"]) args.extend(["-R", _current_version]) args.extend(["-V", ".".join(minor)]) args.extend(["-e", "-M", "-F", f"src/{PACKAGE_NAME}"]) print("Building docs...") ctx.run("sphinx-apidoc {0}".format(" ".join(args))) ns = invoke.Collection(build_docs, vendoring, news, release)
[int(item["status"]["capacity"]["pods"]) for item in nodes["items"]]) pod_total = c.run( "kubectl get pods --all-namespaces | grep Running | wc -l", hide="out").stdout.strip() print(f"Running pods: {pod_total}") print(f"Maximum pods: {pod_capacity}") print(f"Total nodes: {len(nodes['items'])}") @invoke.task def ansible_playbook(c, name, extra="", verbosity=1): with c.cd("deploy/"): c.run(f"ansible-playbook {name} {extra} -{'v'*verbosity}") ns = invoke.Collection() ns.add_collection(kubesae.image) ns.add_collection(kubesae.aws) ns.add_collection(kubesae.deploy) ns.add_collection(kubesae.pod) ns.add_collection(kubesae.info) ns.add_task(staging) ns.add_task(production) ns.add_task(pod_stats) ns.add_task(ansible_playbook, "playbook") ns.configure({ "app": "trafficstops_app", "aws": { "region": "us-east-2",
filepath = pathlib.Path(filepath) if not filepath.is_file(): log("profile", f"no such script {filepath!s}", LogLevel.ERROR) else: if calltree: log("profile", f"profiling script {filepath!s} calltree") ctx.run((f"python -m cProfile -o .profile.cprof {filepath!s}" " && pyprof2calltree -k -i .profile.cprof" " && rm -rf .profile.cprof")) else: log("profile", f"profiling script {filepath!s}") ctx.run(f"vprof -c cmhp {filepath!s}") ns = invoke.Collection( build_docs, vendoring, news, release, clean_mdchangelog, profile, typecheck, build, get_next_version, bump_version, generate_news, get_changelog, tag_release, )
import importlib import invoke task_module_names = [ 'base.invoke.tasks.compile', 'base.invoke.tasks.docker', 'base.invoke.tasks.python', 'base.invoke.tasks.update', ] # Create our task collection tasks = invoke.Collection() # Populate it with the tasks in each module for module_name_current in task_module_names: module_loaded = importlib.import_module(module_name_current) # Add each task from that module module_collection = invoke.Collection.from_module(module_loaded) for task_name_current in module_collection.task_names.keys(): tasks.add_task(module_collection[task_name_current], task_name_current) # Invoke expects the default collection to be named 'ns' ns = tasks
import invoke from . import admin, package def add_tasks(module, prefix=None): if prefix is None: prefix = module.__name__.rsplit('.', 1)[-1] child_namespace = invoke.Collection.from_module(module) for name in child_namespace.task_names: if name in namespace.task_names: raise ValueError('duplicate task {}'.format(name)) namespace.add_task(child_namespace[name], name=name) namespace = invoke.Collection() add_tasks(admin) add_tasks(package)
"ContextKeyType": "boolean", "ContextKeyValues": ["true"], }, { "ContextKeyName": "aws:requestedregion", "ContextKeyType": "string", "ContextKeyValues": [region], }, ], ) results = {} ev_results = perms["EvaluationResults"] for res in ev_results: arn = res["EvalResourceName"] project = arn_to_project[arn] if res["EvalDecision"] == "allowed": results[project] = True else: for rsr in res["ResourceSpecificResults"]: if rsr["EvalResourceName"] == arn: results[project] = rsr["EvalResourceDecision"] == "allowed" break return results projects = invoke.Collection("projects", ls)
utc=utc, env=env, releases_only=releases_only, profile=profile, ) format_projects(projects, author=author, contains=True, sort=sort, permissions=permissions) def get_merged_commits_from_ticket(ticket_id: str) -> Mapping[str, List[str]]: # Returns a mapping of repo_name: [commits] for PRs linked to this ticket tracker = get_tracker() prs = tracker.get_linked_prs(ticket_id) if not prs: utils.warning("No PRs linked to this ticket\n") merged_pr_commits = {} for pr in prs: if pr.state is PullRequestState.OPEN: utils.warning(f"{pr.id} is still open\n") elif pr.state is PullRequestState.MERGED: merged_pr_commits.setdefault(pr.repo, []).append(pr.merge_commit) return merged_pr_commits tickets = invoke.Collection("tickets", find)
# -*- coding: utf-8 -*- """ Invoke commands for common tasks. """ import dotenv import invoke from . import docs from . import generate from . import release dotenv.load_dotenv() namespace = invoke.Collection(docs, generate, release)
def namespace(): """ Create invoke task namespace """ class adict(object): """ attribute dict """ # pylint: disable = invalid-name, missing-docstring def __init__(self, *args, **kwargs): self.__x__ = dict(*args, **kwargs) def __getitem__(self, name): return self.__x__[name] def __getattr__(self, name): if name == '__setstate__': raise AttributeError(name) try: return self.__x__[name] except KeyError: raise AttributeError(name) def items(self): return self.__x__.items() import os as _os import sys as _sys from . import _shell def fail(msg): """ Exit with message """ _sys.stderr.write('Error: %s\n' % (msg,)) raise _invoke.Exit(1) env = adict( package='gensaschema', test=adict(ignore=[]), doc=adict( userdoc="docs/userdoc", website=adict( source="docs/website", target="dist/website", ), sphinx=adict( build='docs/_userdoc/_build', source='docs/_userdoc', ), ), pypi=adict( # repository='https://test.pypi.org/legacy/', repository='https://upload.pypi.org/legacy/', username='******', ), shell=adict((key, value) for key, value in vars(_shell).items() if not key.startswith('_')), c=_shell.command, q=lambda x: _shell.command('%s', x), fail=fail, ) _sys.path.insert(0, _os.path.dirname( _os.path.dirname(_os.path.abspath(__file__)) )) class Vars(object): """ Submodules container """ from . import ( # noqa build, check, clean, compile, doc, test, upload, ) import invoke as _invoke result = _invoke.Collection(*[value for key, value in vars(Vars).items() if not key.startswith('__')]) result.configure(env) return result
print("Building docs...") ctx.run("sphinx-apidoc {0}".format(" ".join(args))) @invoke.task def clean_mdchangelog(ctx): changelog = ROOT / "CHANGELOG.md" content = changelog.read_text() content = re.sub( r"([^\n]+)\n?\s+\[\\(#\d+)\]\(https://github\.com/sarugaku/[\w\-]+/issues/\d+\)", r"\1 \2", content, flags=re.MULTILINE, ) changelog.write_text(content) ns = invoke.Collection( typecheck, build_docs, release, clean_mdchangelog, build, get_next_version, bump_version, generate_news, get_changelog, tag_release, test, )
# Copyright 2014 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function import invoke from . import check ns = invoke.Collection(check)
if isinstance(items, str): items = [items] for item in items: if verbose: print("Removing {}".format(item)) shutil.rmtree(item, ignore_errors=True) # rmtree doesn't remove bare files try: os.remove(item) except FileNotFoundError: pass # create namespaces namespace = invoke.Collection(plugin=plugin_tasks) namespace_clean = invoke.Collection('clean') namespace.add_collection(namespace_clean, 'clean') ##### # # pytest, nox, pylint, and codecov # ##### @invoke.task() def pytest(context, junit=False, pty=True, base=False, isolated=False): """Run tests and code coverage using pytest""" with context.cd(TASK_ROOT_STR): command_str = 'pytest '
def debian(c): """An ephemeral container with which to run sysadmin tasks on the cluster Usage: inv pod.debian """ c.run( f"kubectl run -it debian --image=debian:bullseye-slim --restart=Never -- bash" ) @invoke.task def clean_migrations(c): """Removes all migration jobs Usage: inv pod.clean-migrations """ c.run(f"kubectl delete pods -n {c.config.namespace} -ljob-name=migrate") # TODO: Implement database related tasks @invoke.task def get_current_database(c): pass pod = invoke.Collection("pod") pod.add_task(shell, "shell") pod.add_task(clean_debian, "clean_debian") pod.add_task(debian, "debian") pod.add_task(clean_migrations, "clean_migrations")
Make sure you satisfy the following Python module requirements if you are trying to publish a release to PyPI: - twine >= 1.11.0 - wheel >= 0.31.0 - setuptools >= 39.1.0 """ import invoke from plugins.ext_test import ( tasks as ext_test_tasks, ) from plugins.template import ( tasks as template_tasks, ) # create namespaces namespace = invoke.Collection( ext_test=ext_test_tasks, template=template_tasks, ) namespace_clean = invoke.Collection('clean') namespace.add_collection(namespace_clean, 'clean') ##### # # pytest, pylint, and codecov # ##### @invoke.task(pre=[ext_test_tasks.pytest]) @invoke.task() def pytest(_): """Run tests and code coverage using pytest"""
) continue break git_push_command = "git push --tags" report.info(ctx, "publish", f"pushing git tags") if not draft: ctx.run(git_push_command) except KeyboardInterrupt: print() report.error(ctx, "publish", "aborting publish!") git_remove_tag_command = f"git tag -d {current_version!s}" report.warn(ctx, "publish", "removing git tags") if not draft: ctx.run(git_remove_tag_command) git_reset_command = f"git reset --soft HEAD^" report.warn(ctx, "publish", "softly reseting commit") if not draft: ctx.run(git_reset_command) namespace = invoke.Collection(build, clean, publish, docs, package, profile) namespace.configure( { "metadata": metadata, "directory": BASE_DIR, "package": {"directory": BASE_DIR / "src" / metadata["package_name"]}, "docs": {"directory": BASE_DIR / "docs"}, } )
@invoke.task def build_docs(ctx): _current_version = _read_version() minor = [str(i) for i in _current_version.release[:2]] docs_folder = (ROOT / 'docs').as_posix() if not docs_folder.endswith('/'): docs_folder = '{0}/'.format(docs_folder) args = ["--ext-autodoc", "--ext-viewcode", "-o", docs_folder] args.extend(["-A", "'Dan Ryan <*****@*****.**>'"]) args.extend(["-R", str(_current_version)]) args.extend(["-V", ".".join(minor)]) args.extend(["-e", "-M", "-F", f"src/{PACKAGE_NAME}"]) print("Building docs...") ctx.run("sphinx-apidoc {0}".format(" ".join(args))) @invoke.task def clean_mdchangelog(ctx): changelog = ROOT / "CHANGELOG.md" content = changelog.read_text() content = re.sub( r"([^\n]+)\n?\s+\[\\(#\d+)\]\(https://github\.com/sarugaku/[\w\-]+/issues/\d+\)", r"\1 \2", content, flags=re.MULTILINE) changelog.write_text(content) ns = invoke.Collection(vendoring, release, build_docs, clean_mdchangelog)
''' Adds a new remote to your git repo and pushes to Github ''' if GITUSER: ctx.run("git remote add origin https://github.com:{0}/{1}.git".format( GITUSER, REPONAME)) try: print('Pushing to github ..') ctx.run("git push -u origin master") except Exception as e: print( 'Could not push to github. ERROR: Repository not found. Make sure to add the repo to your github account. ' ) else: print('No GitHub username specified during setup') col = invoke.Collection(install, addgit, addremote) ex = invoke.executor.Executor(col) # setup intial git repo creategit = '{{ cookiecutter.create_git_repo }}' if creategit in ['yes', 'y']: ex.execute('addgit') # exists on Github? exists_github = '{{ cookiecutter.exists_on_github }}' if exists_github in ['yes', 'y']: ex.execute('addremote') print('Please add {0} into your PYTHONPATH!'.format(PYTHONDIR))
if isinstance(items, str): items = [items] for item in items: if verbose: print("Removing {}".format(item)) shutil.rmtree(item, ignore_errors=True) # rmtree doesn't remove bare files try: os.remove(item) except FileNotFoundError: pass # create namespaces namespace = invoke.Collection() namespace_clean = invoke.Collection('clean') namespace.add_collection(namespace_clean, 'clean') ##### # # pytest, tox, pylint, and codecov # ##### @invoke.task def pytest(context, junit=False, pty=True, append_cov=False): """Run tests and code coverage using pytest""" ROOT_PATH = TASK_ROOT.parent.parent
# -*- coding=utf-8 -*- # Copyied from pip's vendoring process # see https://github.com/pypa/pip/blob/95bcf8c5f6394298035a7332c441868f3b0169f4/tasks/__init__.py import invoke import re from . import vendoring, release from .vendoring import vendor_passa from pathlib import Path ROOT = Path(".").parent.parent.absolute() ns = invoke.Collection(vendoring, release, release.clean_mdchangelog, vendor_passa.vendor_passa)
def queue_command(endpoint, message=None, event=None, user_info=None): command = { "endpoint": endpoint, "message": message, "event": event, "user_info": user_info } redis_client.lpush("rosetta:commands:queue", json.dumps(command)) def process_queued_responses(slack_client): responses_to_process = redis_client.llen("rosetta:responses:queue") current_response = 1 while current_response <= responses_to_process: try: response = json.loads( redis_client.rpop("rosetta:responses:queue").decode("utf-8")) slack_client.api_call("chat.postMessage", as_user=True, **response) current_response += 1 except Exception: current_response += 1 continue namespace = invoke.Collection("rosetta") namespace.add_task(bot, name="bot")
""" import logging import os import sys import boto3 import invoke from catapult.deploy import deploy from catapult.projects import projects from catapult.release import release from catapult.tickets import tickets __version__ = "0.1" root = invoke.Collection() class _Executor(invoke.Executor): def expand_calls(self, calls, args=None, kwargs=None): """ Expand a list of `.Call` objects into a near-final list of same. The default implementation of this method simply adds a task's pre/post-task list before/after the task itself, as necessary. Subclasses may wish to do other things in addition (or instead of) the above, such as multiplying the `calls <.Call>` by argument vectors or similar. """ ret = [] for call in calls: # Normalize to Call (this method is sometimes called with pre/post
start = resolve_range(lx) end = resolve_range(rx) if resolve: text = f"{start.hex}...{end.hex}" else: changelog = utils.changelog(repo, end, start) text = changelog.text if verbose else changelog.short_text print(text) def release_contains(repo: git.Repository, release: Release, commit_oid: git.Oid, name: str): if not release.commit: utils.warning(f"{name} has a null commit ref\n") return "?" release_oid = git.Oid(hex=release.commit) try: in_release = utils.commit_contains(repo, release_oid, commit_oid) except utils.CommitNotFound as e: utils.warning(f"Error: [{repr(e)}], Project: [{name}]\n") in_release = "?" return in_release release = invoke.Collection("release", current, ls, new, find, get, log)
# -*- coding=utf-8 -*- # Copyied from pip's vendoring process # see https://github.com/pypa/pip/blob/95bcf8c5f6394298035a7332c441868f3b0169f4/tasks/__init__.py import invoke from . import vendoring, release from .vendoring import vendor_passa ns = invoke.Collection(vendoring, release, vendor_passa.vendor_passa)
# -*- coding=utf-8 -*- # Copyied from pip's vendoring process # see https://github.com/pypa/pip/blob/95bcf8c5f6394298035a7332c441868f3b0169f4/tasks/__init__.py import invoke from . import vendoring ns = invoke.Collection(vendoring)
""" Invoke tasks to help with pytest development and release process. """ import invoke from . import generate, vendoring ns = invoke.Collection( generate, vendoring )
@invoke.task( help={ "name": "project's name", "env": "name of the environment where the app will be deployed", "bucket": "name of the bucket used to store the deploys", "last": "return only the last n deploys", "contains": "commit hash or revision of a commit, eg `bcc31bc`, `HEAD`, `some_branch`", "utc": "list timestamps in UTC instead of local timezone", "profile": "name of AWS profile to use", }) @utils.require_2fa def ls(_, name, env, bucket=None, last=None, contains=None, utc=False, profile=None): """ Show all the project's deploys. """ if bucket is None: bucket = utils.get_config()["deploy"][env]["s3_bucket"] list_releases(name, last, contains, bucket, utc=utc, profile=profile) deploy = invoke.Collection("deploy", start, current, ls)
"deploy/requirements.yml") else "requirements.yaml" with c.cd("deploy/"): c.run(f"ansible-galaxy install -f -r '{req_file}' -p roles/") @invoke.task(pre=[install_requirements], default=True) def ansible_deploy(c, env=None, tag=None): """Deploy K8s application. Params: env: The target ansible host ("staging", "production", etc ...) tag: The image tag in the registry to deploy Usage: inv deploy --env=<ENVIRONMENT> --tag=<TAG> """ if env is None: env = c.config.env if tag is None: tag = c.config.tag playbook = "deploy.yaml" if os.path.exists( "deploy/deploy.yaml") else "deploy.yml" with c.cd("deploy/"): c.run( f"ansible-playbook {playbook} -l {env} -e k8s_container_image_tag={tag} -vv" ) deploy = invoke.Collection("deploy") deploy.add_task(install_requirements, "install") deploy.add_task(ansible_deploy, "deploy")
print('\nAborted!') return arg_display = ' '.join(f'"{n}"' for n in artifacts) ctx.run(f'twine upload --repository="{repo}" {arg_display}') version = _prebump(version, prebump) _write_version(version) ctx.run(f'git commit -am "Prebump to {version}"') @invoke.task def build_docs(ctx): _current_version = _read_version() minor = [str(i) for i in _current_version[:2]] docs_folder = (ROOT / 'docs').as_posix() if not docs_folder.endswith('/'): docs_folder = '{0}/'.format(docs_folder) args = ["--ext-autodoc", "--ext-viewcode", "-o", docs_folder] args.extend( ["-A", "'{{ cookiecutter.author }} <{{ cookiecutter.email }}>'"]) args.extend(["-R", _current_version]) args.extend(["-V", ".".join(minor)]) args.extend(["-e", "-M", "-F", f"src/{PACKAGE_NAME}"]) print("Building docs...") ctx.run("sphinx-apidoc {0}".format(" ".join(args))) ns = invoke.Collection(vendoring, release)
@invoke.task def develop_docs(c): """ Build Sphinx HTML docs and open them in the browser with hot reloading. The browser opens after 2 seconds. """ c.run( 'sphinx-autobuild -b html --open-browser --delay 2 docs/source/ docs/build/html/' ) ################################ # Organise Invoke's namespaces # ################################ # The main namespace MUST be named `namespace` or `ns`. # See: http://docs.pyinvoke.org/en/1.2/concepts/namespaces.html namespace = invoke.Collection() namespace.add_task(coverage_report) namespace.add_task(flake8_report) namespace.add_task(linters) namespace.add_task(set_precommit) namespace.add_task(tests) docs_namespace = invoke.Collection('docs') docs_namespace.add_task(build_docs, 'build') docs_namespace.add_task(develop_docs, 'develop') namespace.add_collection(docs_namespace)