def ensure(self) -> None: """Ensure the image exists on Docker Hub if it is publishable. The image is pushed using its spec as its tag. """ if self.publish and is_docker_image_pushed(self.spec()): ui.say(f"{self.spec()} already exists") return self.build() spawn.runv(["docker", "push", self.spec()])
def run(self, args: argparse.Namespace) -> None: if args.help: output = self.capture( ["docker-compose", self.name, "--help"], stderr=subprocess.STDOUT ) output = output.replace("docker-compose", "./mzcompose") output += "\nThis command is a wrapper around Docker Compose." if self.help_epilog: output += "\n" output += self.help_epilog print(output, file=sys.stderr) return # Make sure Docker Compose is new enough. output = ( self.capture( ["docker-compose", "version", "--short"], stderr=subprocess.STDOUT ) .strip() .strip("v") ) version = tuple(int(i) for i in output.split(".")) if version < MIN_COMPOSE_VERSION: raise UIError( f"unsupported docker-compose version v{output}", hint=f"minimum version allowed: v{'.'.join(str(p) for p in MIN_COMPOSE_VERSION)}", ) composition = load_composition(args) ui.header("Collecting mzbuild images") for d in composition.dependencies: ui.say(d.spec()) if self.runs_containers: if args.coverage: # If the user has requested coverage information, create the # coverage directory as the current user, so Docker doesn't create # it as root. (composition.path / "coverage").mkdir(exist_ok=True) self.check_docker_resource_limits() composition.dependencies.acquire() if "services" in composition.compose: composition.pull_if_variable(composition.compose["services"].keys()) self.handle_composition(args, composition)
def update_upgrade_tests_inner(released_version: Version, force: bool = False) -> None: if released_version.prerelease is not None: ui.say("Not updating upgrade tests for prerelease") return upgrade_dir = Path("./test/upgrade") version = f"v{released_version}" need_upgrade = [ str(p) for p in upgrade_dir.glob("*current_source*") if "example" not in str(p) ] if not need_upgrade and not force: return for path in need_upgrade: spawn.runv(["git", "mv", path, path.replace("current_source", version)]) git.commit_all_changed( f"Rename {len(need_upgrade)} current_source upgrade tests to {version}" )
def confirm_version_is_next(this_tag: Version, affect_remote: bool) -> None: """Check if the passed-in tag is the logical next tag""" latest_tag = get_latest_tag(affect_remote) if this_tag.minor == latest_tag.minor: if ( this_tag.patch == latest_tag.patch and this_tag.prerelease is not None and latest_tag.prerelease is not None ): # rc bump pass elif ( this_tag.patch == latest_tag.patch + 1 and this_tag.prerelease is not None and latest_tag.prerelease is None ): # first rc pass elif ( this_tag.patch == latest_tag.patch and this_tag.prerelease is None and latest_tag.prerelease is not None ): ui.say("Congratulations on the successful release!") elif ( this_tag.minor == latest_tag.minor and this_tag.patch == latest_tag.patch + 1 and this_tag.prerelease == "dev" ): # prepare next pass else: ui.say(f"ERROR: {this_tag} is not the next release after {latest_tag}") sys.exit(1) elif this_tag.minor == latest_tag.minor + 1 and this_tag.patch == 0: click.confirm("Are you sure you want to bump the minor version?", abort=True) else: click.confirm( f"The bump {latest_tag} -> {this_tag} is suspicious, are you sure?", abort=True, )
def list_prs(recent_ref: Optional[str], ancestor_ref: Optional[str]) -> None: """ List PRs between a range of refs If no refs are specified, then this will find the refs between the most recent tag and the previous semver tag (i.e. excluding RCs) """ git.fetch() if recent_ref is None or ancestor_ref is None: tags = git.get_version_tags(fetch=False) if recent_ref is None: recent = tags[0] recent_ref = str(tags[0]) else: recent = Version.parse(recent_ref) if ancestor_ref is None: for ref in tags[1:]: ancestor = ref if ( ancestor.major < recent.major or ancestor.minor < recent.minor or ancestor.patch < recent.patch ): ancestor_ref = str(ref) break ui.say( f"Using recent_ref={recent_ref} ancestor_ref={ancestor_ref}", ) commit_range = f"v{ancestor_ref}..v{recent_ref}" commits = spawn.capture( [ "git", "log", "--pretty=format:%d %s", "--abbrev-commit", "--date=iso", commit_range, "--", ], unicode=True, ) pattern = re.compile(r"^\s*\(refs/pullreqs/(\d+)|\(#(\d+)") prs = [] found_ref = False for commit in commits.splitlines(): if "build(deps)" in commit: continue match = pattern.search(commit) if match is not None: pr = match.group(1) if pr: found_ref = True else: pr = match.group(2) prs.append(pr) if not found_ref: ui.say( "WARNING: you probably don't have pullreqs configured for your repo", ) ui.say( "Add the following line to the MaterializeInc/materialize remote section in your .git/config", ) ui.say(" fetch = +refs/pull/*/head:refs/pullreqs/*") username = input("Enter your github username: "******"~/.config/materialize/dev-tools-access-token") try: with open(creds_path) as fh: token = fh.read().strip() except FileNotFoundError: raise UIError( f"""No developer tool api token at {creds_path!r} please create an access token at https://github.com/settings/tokens""" ) def get(pr: str) -> Any: return requests.get( f"https://{username}:{token}@api.github.com/repos/MaterializeInc/materialize/pulls/{pr}", headers={ "Accept": "application/vnd.github.v3+json", }, ).json() collected = [] with concurrent.futures.ThreadPoolExecutor(max_workers=10) as pool: futures = {pool.submit(get, pr): pr for pr in prs} for future in concurrent.futures.as_completed(futures): pr = futures[future] contents = future.result() try: url = contents["html_url"] title = contents["title"] collected.append((url, title)) except KeyError: raise UIError(contents) for url, title in sorted(collected): print(url, title)
def release( version: Version, checkout: Optional[str], create_branch: Optional[str], tag: bool, affect_remote: bool, ) -> None: """Update documents for a release and create tags If both `-b` and `-c` are specified, the checkout happens before the branch creation, meaning that the new branch is created on the target of `-c`. For example make release:: mkrelease -b prepare-v0.1.2 -c v0.1.1-rc1 v0.1.2-dev Has the same git semantics as:: git checkout -b prepare-v0.1.2 v0.1.1-rc1 \b Arguments: version: The version to release. The `v` prefix is optional """ if git.is_dirty(): raise UIError("working directory is not clean, stash or commit your changes") the_tag = f"v{version}" confirm_version_is_next(version, affect_remote) if checkout is not None: git.checkout(checkout) if create_branch is not None: git.create_branch(create_branch) confirm_on_latest_rc(affect_remote) change_line(BIN_CARGO_TOML, "version", f'version = "{version}"') change_line( LICENSE, "Licensed Work:", f"Licensed Work: Materialize Version {version}", ) # Don't update the change date unless some code has changed if version.prerelease: future = four_years_hence() change_line(LICENSE, "Change Date", f"Change Date: {future}") ui.say("Updating Cargo.lock") spawn.runv(["cargo", "check", "-p", "materialized"]) spawn.runv(["cargo", "check", "-p", "materialized"]) spawn.runv(["cargo", "check", "-p", "materialized", "--locked"]) if tag: git.commit_all_changed(f"release: {the_tag}") git.tag_annotated(the_tag) else: git.commit_all_changed(f"Prepare next phase of development: {the_tag}") latest_tag = get_latest_tag(fetch=False) # we have made an actual release if latest_tag.prerelease is None and click.confirm( f"Update doc/user/config.toml marking v{latest_tag} as released" ): update_versions_list(latest_tag) git.commit_all_changed(f"Update released versions to include v{latest_tag}") matching = git.first_remote_matching("MaterializeInc/materialize") if tag: if matching is not None: spawn.runv(["git", "show", "HEAD"]) if affect_remote and ui.confirm( f"\nWould you like to push the above changes as: git push {matching} {the_tag}" ): spawn.runv(["git", "push", matching, the_tag]) else: ui.say("") ui.say( f"Next step is to push {the_tag} to the MaterializeInc/materialize repo" ) else: branch = git.rev_parse("HEAD", abbrev=True) ui.say("") ui.say(f"Create a PR with your branch: '{branch}'")