def _get_image_id(ctx, name, commit): image_base = utils.get_config()["release"]["docker_repository"] image_prefix = utils.get_config()["release"]["docker_image_prefix"] image = f"{image_base}/{image_prefix}{name}:ref-{commit}" res = ctx.run(f"docker pull {image}", hide="out") for line in res.stdout.split("\n"): if line.startswith("Digest:"): _, _, image_id = line.partition(":") return image_id.strip() return None
def get_releases(client, key, since=None, bucket=None): """ Gets all the releases in the project's history. Arguments: client (botocore.client.S3): client for AWS S3. bucket (str): bucket's name. key (str): object's key. since (int or None): exclude version created before this version. Yield: Release a release in the project's history. """ if bucket is None: bucket = utils.get_config()["release"]["s3_bucket"] versions = sorted( _get_versions(client, bucket, key), key=lambda v: _DATETIME_MAX if v["IsLatest"] else v["LastModified"], reverse=True, ) for version in versions: try: release = _get_release(client, bucket, key, version["VersionId"]) except InvalidRelease as exc: # skip invalid releases in object history LOG.warning(f"invalid release object: {exc}") continue if since and release.version < since: continue yield release
def ls(_, name, env, bucket=None, last=None, contains=None): """ Show all the project's deploys. """ if bucket is None: bucket = utils.get_config()["deploy"][env]["s3_bucket"] list_releases(name, last, contains, bucket)
def _get_image_id(ctx, commit: git.Oid, *, name: str, image_name: Optional[str]): image_base = utils.get_config()["release"]["docker_repository"] if image_name is None: image_prefix = utils.get_config()["release"]["docker_image_prefix"] image_name = f"{image_prefix}{name}" image = f"{image_base}/{image_name}:ref-{commit.hex}" LOG.info(f"Pulling {image}") res = ctx.run(f"docker pull {image}", hide="out") for line in res.stdout.split("\n"): if line.startswith("Digest:"): _, _, image_id = line.partition(":") return image_id.strip() return None
def current(_, name, env, bucket=None, profile=None): """ Show current running version. """ client = utils.s3_client(profile) if bucket is None: bucket = utils.get_config()["deploy"][env]["s3_bucket"] last_deploy = next(get_releases(client, name, bucket=bucket), None) if last_deploy: utils.printfmt(last_deploy) else: utils.fatal("Release does not exist")
def get_git() -> BaseGit: global GIT_HOST if GIT_HOST: return GIT_HOST git_type = utils.get_config().get("git", {}).get("provider") if git_type not in GIT_CLASSES: utils.fatal( f"Unsupported git provider type {git_type!r} - try one of {sorted(GIT_CLASSES)!r}" ) git_cls = GIT_CLASSES[git_type] GIT_HOST = git_cls() return GIT_HOST
def ls(_): """ List all the projects managed with catapult. """ client = utils.s3_client() projects = [] config = utils.get_config() bucket = config["release"]["s3_bucket"] deploys = config["deploy"] resp = client.list_objects_v2(Bucket=bucket) for data in resp.get("Contents", []): name = data["Key"] projects.append(name) projects = sorted(projects) _projects = [] for name in projects: try: release = get_release(client, bucket, name) except InvalidRelease: continue data = { "Name": name, "Latest Release": f"v{release.version} {release.timestamp} ({release.commit})", } for env_name, cfg in deploys.items(): env_version, env_commit, env_timestamp = get_deployed_version( client, cfg["s3_bucket"], name) data[env_name.title( )] = f"v{env_version} {env_timestamp} ({env_commit})" _projects.append(data) projects = _projects utils.printfmt(projects)
def get_tracker() -> BaseTracker: global TRACKER if TRACKER: return TRACKER tracker_type = utils.get_config().get("issue_tracker", {}).get("provider") if tracker_type not in TRACKER_CLASSES: utils.fatal( f"Unsupported tracker type {tracker_type} - try one of {sorted(TRACKER_CLASSES)!r}" ) tracker_cls = TRACKER_CLASSES[tracker_type] TRACKER = tracker_cls() return TRACKER
def get_release(client, key, version=None, bucket=None): """ Fetches a specific release. Arguments: client (botocore.client.S3): client for AWS S3. bucket (str): bucket's name. key (str): object's key. version (int): version number. Returns: Release or None: the release identified by the given version. `None` if the version does not exist. """ if bucket is None: bucket = utils.get_config()["release"]["s3_bucket"] for release in get_releases(client, key, bucket=bucket): if release.version == version or version is None: return release return None
def start( _, name, env, version=None, bucket=None, dry=False, yes=False, rollback=False, profile=None, ): """ Deploy a release on an environment. """ client = utils.s3_client(profile) repo = utils.git_repo() if version is None: release = next(get_releases(client, name), None) else: release = get_release(client, name, int(version)) if release is None: utils.fatal("Release not found") if bucket is None: bucket = utils.get_config()["deploy"][env]["s3_bucket"] last_deploy = next(get_releases(client, name, bucket=bucket), None) last_deployed_version = int(last_deploy.version) if last_deploy else 0 if version is not None: since = min(int(version), last_deployed_version) else: since = last_deployed_version releases = list(get_releases(client, name, since=since)) # the field `commits` is not present in all documents as it was introduced # in a later version. if any of the releases doesn't track them, we'll # skip the commit filtering to avoid not showing commits in the changelog. if any(rel.commits is None for rel in releases): commits = None else: commits = [ commit for rel in releases if rel.commits for commit in rel.commits ] if last_deploy is None: # first deploy is always None changelog = utils.changelog(repo, release.commit, None, keep_only_commits=commits) changelog_text = changelog.short_text is_rollback = release.rollback else: # create a changelog from the latest deploy commit changelog = utils.changelog( repo, git.Oid(hex=release.commit), git.Oid(hex=last_deploy.commit), keep_only_commits=commits, ) changelog_text = changelog.short_text is_rollback = changelog.rollback action_type = ActionType.automated if config.IS_CONCOURSE else ActionType.manual release = dataclasses.replace( release, changelog=changelog_text, timestamp=datetime.now(), author=utils.get_author(repo, git.Oid(hex=release.commit)), rollback=is_rollback, action_type=action_type, commits=commits, ) utils.printfmt(release) if dry: return if release.rollback: commit_count = len(changelog.logs) utils.warning(":warning: This is a rollback! :warning:\n") utils.warning( f":warning: You are rolling back from {name} v{last_deployed_version} to v{version} :warning:\n" ) utils.warning( f":warning: This will remove the above {commit_count} commits from {env} :warning:\n" ) if not rollback: utils.error("Missing flag --rollback\n") utils.fatal("Aborted!") if not yes: if release.rollback: ok = utils.confirm( "Are you sure you want to start a rollback deployment?", style=utils.TextStyle.yellow, ) if not ok: utils.fatal("Aborted!") ok = utils.confirm("Are you sure you want to start this deployment?") if not ok: utils.fatal("Aborted!") put_release(client, bucket, name, release) utils.success("Started new deployment :rocket:\n")
def _get_bucket(): config = utils.get_config() if config: return config["release"]["s3_bucket"] return os.environ["CATAPULT_BUCKET_RELEASES"]
def list_projects( contains, only, permissions, utc, env, releases_only, profile ) -> List[Project]: contains_oid = None repo = None if contains: repo = utils.git_repo() contains_oid = utils.revparse(repo, contains) if contains_oid not in repo: raise Exception(f"Commit {contains_oid} does not exist in repo") if only is not None: only = re.compile(only) if env is not None: env = set(env.split(",")) client = utils.s3_client(profile) config = utils.get_config() release_bucket = config["release"]["s3_bucket"] deploys = config["deploy"] resp = client.list_objects_v2(Bucket=release_bucket) project_names = sorted(data["Key"] for data in resp.get("Contents", [])) can_release = {} can_deploy = {} if permissions: iam_client = utils.iam_client(profile) can_release = check_perms(iam_client, release_bucket, project_names, profile) can_deploy = { env_name: check_perms(iam_client, cfg["s3_bucket"], project_names, profile) for env_name, cfg in deploys.items() } _projects = [] now = datetime.now(tz=timezone.utc) localzone = get_localzone() for name in project_names: if only and only.search(name) is None: continue try: release = fetch_release(client, release_bucket, name) except InvalidRelease: continue timestamp_utc = release.timestamp timestamp = timestamp_utc if utc else timestamp_utc.astimezone(localzone) if releases_only or env is None: _projects.append( Project( name=name, version=release.version, behind=0, commit=release.commit, timestamp=timestamp, age=now - timestamp_utc, type=ProjectType.release, contains=( release_contains(repo, release, contains_oid, name) if contains else None ), env_name="", permission=can_release.get(name), action_type=release.action_type, author=release.author, ) ) if releases_only: continue for env_name, cfg in deploys.items(): try: deploy = fetch_release(client, cfg["s3_bucket"], name) except InvalidRelease: continue timestamp_utc = deploy.timestamp timestamp = timestamp_utc if utc else timestamp_utc.astimezone(localzone) if not env or env_name in env: _projects.append( Project( name=name, version=deploy.version, behind=release.version - deploy.version, commit=deploy.commit, timestamp=timestamp, age=now - timestamp_utc, type=ProjectType.deploy, env_name=env_name, contains=( release_contains(repo, deploy, contains_oid, name) if contains else None ), permission=can_deploy.get(env_name, {}).get(name), action_type=deploy.action_type, author=deploy.author, ) ) return _projects
def new( ctx, name, commit=None, version=None, dry=False, yes=False, image_name=None, rollback=False, ): """ Create a new release. """ repo = utils.git_repo() client = utils.s3_client() latest = next(get_releases(client, name), None) if commit is None: # get last commit commit = next(utils.git_log(repo), None) commit = commit and commit.hex if version is None: # crate next version version = 1 if latest is None else latest.version + 1 else: version = int(version) image_id = _get_image_id(ctx, image_name or name, commit) if image_id is None: LOG.critical("image ID not found") sys.exit(1) changelog = utils.changelog(repo, commit, latest and latest.commit) release = Release( version=version, commit=commit, changelog=changelog.text, version_id="", image=image_id, timestamp=datetime.now(), author=utils.get_author(repo), rollback=changelog.rollback, ) utils.printfmt(release) if dry: return if release.rollback: utils.warning("this is a rollback! :warning:\n") if not rollback: utils.warning("missing flag --rollback\n") utils.error("aborted!\n") sys.exit(1) if not yes: if release.rollback: ok = utils.confirm("sure you want to start a rollback?", style=utils.TextStyle.warning) if not ok: utils.error("aborted!\n") sys.exit(1) ok = utils.confirm("sure you want to create this release?") if not ok: sys.exit(1) put_release(client, utils.get_config()["release"]["s3_bucket"], name, release) utils.success("created new release :tada:\n")
def start(_, name, env, version=None, bucket=None, dry=False, yes=False, rollback=False): """ Deploy a release on an environment. """ client = utils.s3_client() repo = utils.git_repo() if version is None: release = next(get_releases(client, name), None) else: release = get_release(client, name, int(version)) if release is None: LOG.critical("Release not found") sys.exit(1) if bucket is None: bucket = utils.get_config()["deploy"][env]["s3_bucket"] last_deploy = next(get_releases(client, name, bucket=bucket), None) if last_deploy is None: # first deploy is always None changelog_text = release.changelog is_rollback = release.rollback else: # create a changelog from the latest deploy commit changelog = utils.changelog(repo, release.commit, last_deploy.commit) changelog_text = changelog.text is_rollback = changelog.rollback release = dataclasses.replace( release, changelog=changelog_text, timestamp=datetime.now(), author=utils.get_author(repo), rollback=is_rollback, ) utils.printfmt(release) if dry: return if release.rollback: utils.warning("This is a rollback! :warning:\n") if not rollback: utils.warning("Missing flag --rollback\n") utils.error("Aborted!\n") sys.exit(1) if not yes: if release.rollback: ok = utils.confirm( "Are you sure you want to start a rollback deployment?", style=utils.TextStyle.warning, ) if not ok: utils.error("Aborted!\n") sys.exit(1) ok = utils.confirm("Are you sure you want to start this deployment?") if not ok: utils.error("Aborted!\n") sys.exit(1) put_release(client, bucket, name, release) utils.success("Started new deployment :rocket:\n")
def ls(_, contains=None, sort=None, reverse=False, only=None): """ List all the projects managed with catapult. Optionally pass a full SHA-1 hash of a commit in the current repo, and each release/deploy will be marked with 'Y' if it contains that commit, 'N' if it doesn't, or '?' if it can't be determined (eg perhaps the App belongs to another repo). """ contains_oid = None repo = None if contains: contains_oid = git.Oid(hex=contains) repo = utils.git_repo() if contains_oid not in repo: raise Exception(f"Commit {contains_oid} does not exist in repo") valid_sort_keys = list(Project._fields) if not contains: valid_sort_keys.remove("contains") sort_keys = [] if sort is None else sort.split(",") if any(sort_key not in valid_sort_keys for sort_key in sort_keys): raise Exception( f"Invalid sort key in {sort!r}. Valid sort keys: {valid_sort_keys}" ) if only is not None: only = only.split(",") client = utils.s3_client() config = utils.get_config() bucket = config["release"]["s3_bucket"] deploys = config["deploy"] resp = client.list_objects_v2(Bucket=bucket) project_names = sorted(data["Key"] for data in resp.get("Contents", [])) _projects = [] now = datetime.now(tz=timezone.utc) for name in project_names: if only and name not in only: continue try: release = get_release(client, bucket, name) except InvalidRelease: continue _projects.append( Project( name=name, version=release.version, commit=release.commit, timestamp=release.timestamp, age=now - release.timestamp, type=ProjectType.release, contains=release_contains(repo, release, contains_oid, name) if contains else None, env_name="", )) for env_name, cfg in deploys.items(): try: deploy = get_release(client, cfg["s3_bucket"], name) except InvalidRelease: continue _projects.append( Project( name=name, version=deploy.version, commit=deploy.commit, timestamp=deploy.timestamp, age=now - deploy.timestamp, type=ProjectType.deploy, env_name=env_name, contains=release_contains(repo, deploy, contains_oid, name) if contains else None, )) project_dicts = [] for project in _projects: project_dict = project._asdict() if not contains: project_dict.pop("contains") project_dict["type"] = project_dict["type"].name project_dicts.append(project_dict) if sort_keys: project_dicts.sort(key=itemgetter(*sort_keys), reverse=reverse) utils.printfmt(project_dicts, tabular=True)
def ls( _, author=False, contains=None, sort=None, reverse=False, only=None, permissions=False, ): """ List all the projects managed with catapult. Optionally pass a full SHA-1 hash of a commit in the current repo, and each release/deploy will be marked with 'Y' if it contains that commit, 'N' if it doesn't, or '?' if it can't be determined (eg perhaps the App belongs to another repo). """ contains_oid = None repo = None optional_columns = { "author": bool(author), "contains": bool(contains), "permission": bool(permissions), } if contains: repo = utils.git_repo() contains_oid = utils.revparse(repo, contains) if contains_oid not in repo: raise Exception(f"Commit {contains_oid} does not exist in repo") valid_sort_keys = list(Project._fields) for column_name, show_column in optional_columns.items(): if not show_column: valid_sort_keys.remove(column_name) sort_keys = [] if sort is None else sort.split(",") if any(sort_key not in valid_sort_keys for sort_key in sort_keys): raise Exception( f"Invalid sort key in {sort!r}. Valid sort keys: {valid_sort_keys}" ) if only is not None: only = only.split(",") client = utils.s3_client() config = utils.get_config() release_bucket = config["release"]["s3_bucket"] deploys = config["deploy"] resp = client.list_objects_v2(Bucket=release_bucket) project_names = sorted(data["Key"] for data in resp.get("Contents", [])) can_release = {} can_deploy = {} if permissions: iam_client = utils.iam_client() can_release = check_perms(iam_client, release_bucket, project_names) can_deploy = { env_name: check_perms(iam_client, cfg["s3_bucket"], project_names) for env_name, cfg in deploys.items() } _projects = [] now = datetime.now(tz=timezone.utc) for name in project_names: if only and name not in only: continue try: release = get_release(client, release_bucket, name) except InvalidRelease: continue _projects.append( Project( name=name, version=release.version, behind=0, commit=release.commit, timestamp=release.timestamp, age=now - release.timestamp, type=ProjectType.release, contains=(release_contains(repo, release, contains_oid, name) if contains else None), env_name="", permission=can_release.get(name), action_type=release.action_type, author=release.author, )) for env_name, cfg in deploys.items(): try: deploy = get_release(client, cfg["s3_bucket"], name) except InvalidRelease: continue _projects.append( Project( name=name, version=deploy.version, behind=release.version - deploy.version, commit=deploy.commit, timestamp=deploy.timestamp, age=now - deploy.timestamp, type=ProjectType.deploy, env_name=env_name, contains=(release_contains(repo, deploy, contains_oid, name) if contains else None), permission=can_deploy.get(env_name, {}).get(name), action_type=deploy.action_type, author=deploy.author, )) project_dicts = [] for project in _projects: project_dict = project._asdict() for column_name, show_column in optional_columns.items(): if not show_column: project_dict.pop(column_name) style = (utils.TextStyle.yellow if project_dict["type"] is ProjectType.release else utils.TextStyle.blue) project_dict["name"] = utils.Formatted(project_dict["name"], style) project_dict["type"] = utils.Formatted(project_dict["type"].name, style) project_dicts.append(project_dict) if sort_keys: project_dicts.sort(key=itemgetter(*sort_keys), reverse=reverse) utils.printfmt(project_dicts, tabular=True)