def build(self, latest_branch_version): """Builds the debian package for latest version""" self.upstream_model.clone() self.upstream_model.checkout( ref=f"tags/v{str(latest_branch_version)}", force=True, cwd=self.upstream_model.name, ) with tempfile.TemporaryDirectory() as tmpdir: self.log(f"Building {self.deb_model.name} debian package") self.deb_model.base.clone(cwd=tmpdir) self.deb_model.base.checkout( ref=f"v{str(latest_branch_version)}", force=True, cwd=f"{tmpdir}/{self.deb_model.name}", ) self.bump_revision(cwd=f"{tmpdir}/{self.deb_model.name}") self.write_debversion( semver.VersionInfo.parse(latest_branch_version), src_path=Path(tmpdir) / self.deb_model.name, ) cmd_ok( f"cp -a {tmpdir}/{self.deb_model.name}/* {self.upstream_model.name}/.", shell=True, ) self.source(cwd=self.upstream_model.name) self.deb_model.base.add(["debian/changelog"], cwd=f"{tmpdir}/{self.deb_model.name}") self.deb_model.base.commit("Automated Build", cwd=f"{tmpdir}/{self.deb_model.name}") self.deb_model.base.push( ref=f"v{str(latest_branch_version)}", cwd=f"{tmpdir}/{self.deb_model.name}", )
def bundle_build(self, to_channel): if not self.opts.get("skip-build"): cmd = f"{self.src_path}/bundle -n {self.name} -o {self.dst_path} -c {to_channel} {self.opts['fragments']}" self.echo(f"Running {cmd}") cmd_ok(cmd, echo=self.echo) else: # If we're not building the bundle from the repo, we have # to copy it to the expected output location instead. shutil.copytree( Path(self.src_path) / self.opts.get("subdir", ""), self.dst_path) if self.store == "ch": # If we're building for charmhub, it needs to be packed dst_path = Path(self.dst_path) charmcraft_yaml = dst_path / "charmcraft.yaml" if not charmcraft_yaml.exists(): contents = { "type": "bundle", "parts": { "bundle": { "prime": [ str(_.relative_to(dst_path)) for _ in dst_path.glob("**/*") if _.is_file() ] } }, } with charmcraft_yaml.open("w") as fp: yaml.safe_dump(contents, fp) self.dst_path = str(CharmcraftCmd(self).pack(_cwd=dst_path))
def charm_build(self): """Perform charm build against charm/bundle""" if "override-build" in self.opts: self.echo("Override build found, running in place of charm build.") ret = script( self.opts["override-build"], cwd=self.src_path, charm=self.name, echo=self.echo, ) elif self.legacy_charm: cmd = "charm build -r --force -i https://localhost" self.echo(f"Building with: {cmd}") ret = cmd_ok( cmd, cwd=self.src_path, echo=self.echo, ) else: cmd = f"charmcraft build -f {self.src_path}" self.echo(f"Building with: {cmd}") ret = cmd_ok( cmd, cwd=self.build.build_dir, echo=self.echo, ) if not ret.ok: self.echo("Failed to build, aborting") raise SystemExit(f"Failed to build {self.name}")
def sync_branches_list(snap): """ Syncs the downstream snap branches to a yaml file for parsing in jobs """ click.echo(f"Checking: git+ssh://[email protected]/snap-{snap}") git_repo = f"git+ssh://[email protected]/snap-{snap}" snap_releases = remote_branches(git_repo) snap_releases.reverse() env = os.environ.copy() repo = f"https://{env['CDKBOT_GH_USR']}:{env['CDKBOT_GH_PSW']}@github.com/charmed-kubernetes/jenkins" with tempfile.TemporaryDirectory() as tmpdir: git.clone(repo, tmpdir) git.config("user.email", "*****@*****.**", _env=env, _cwd=tmpdir) git.config("user.name", "cdkbot", _env=env, _cwd=tmpdir) git.config("--global", "push.default", "simple", _cwd=tmpdir) output = Path(f"{tmpdir}/jobs/includes/k8s-snap-branches-list.inc") click.echo(f"Saving to {str(output)}") output.write_text(yaml.dump(snap_releases, default_flow_style=False, indent=2)) cmd_ok(f"git add {str(output)}", cwd=tmpdir) ret = cmd_ok( ["git", "commit", "-m", "Updating k8s snap branches list"], cwd=tmpdir ) if not ret.ok: return click.echo(f"Committing to {repo}.") ret = cmd_ok(["git", "push", repo, "master"], cwd=tmpdir) if not ret.ok: raise SystemExit("Failed to commit latest snap branches.")
def promote(self, from_channel="unpublished", to_channel="edge"): self.echo( f"Promoting :: {self.entity:^35} :: from:{from_channel} to: {to_channel}" ) charm_id = sh.charm.show(self.entity, "--channel", from_channel, "id") charm_id = yaml.safe_load(charm_id.stdout.decode()) resources_args = [] try: resources = sh.charm( "list-resources", charm_id["id"]["Id"], channel=from_channel, format="yaml", ) resources = yaml.safe_load(resources.stdout.decode()) if resources: resources_args = [( "--resource", "{}-{}".format(resource["name"], resource["revision"]), ) for resource in resources] except sh.ErrorReturnCode: self.echo("No resources for {}".format(charm_id)) sh.charm.release(charm_id["id"]["Id"], "--channel", to_channel, *resources_args) self.echo( f"Setting {charm_id['id']['Id']} permissions for read everyone") cmd_ok( ["charm", "grant", charm_id["id"]["Id"], "--acl=read", "everyone"], echo=self.echo, )
def push(self): """Pushes a built charm to Charmstore""" if "override-push" in self.opts: self.echo("Override push found, running in place of charm push.") script( self.opts["override-push"], cwd=self.src_path, charm=self.name, namespace=self.namespace, echo=self.echo, ) return self.echo(f"Pushing built {self.dst_path} to {self.entity}") out = retry_call( capture, fargs=[["charm", "push", self.dst_path, self.entity]], fkwargs={"check": True}, delay=2, backoff=2, exceptions=CalledProcessError, ) self.echo(f"Charm push returned: {out}") # Output includes lots of ansi escape sequences from the docker push, # and we only care about the first line, which contains the url as yaml. out = yaml.safe_load(out.stdout.decode().strip().splitlines()[0]) self.new_entity = out["url"] self.echo(f"Setting {self.new_entity} metadata: {self.commit}") cmd_ok(["charm", "set", self.new_entity, f"commit={self.commit}"], echo=self.echo)
def build( charm_list, layer_list, layer_index, charm_branch, layer_branch, resource_spec, filter_by_tag, to_channel, force, ): cmd_ok("which charm", echo=lambda m: click.echo(f"charm -> {m}")) cmd_ok("which charmcraft", echo=lambda m: click.echo(f"charmcraft -> {m}")) build_env = BuildEnv(build_type=BuildType.CHARM) build_env.db["build_args"] = { "artifact_list": charm_list, "layer_list": layer_list, "layer_index": layer_index, "charm_branch": charm_branch, "layer_branch": layer_branch, "resource_spec": resource_spec, "filter_by_tag": list(filter_by_tag), "to_channel": to_channel, "force": force, } build_env.pull_layers() entities = [] for charm_map in build_env.artifacts: for charm_name, charm_opts in charm_map.items(): if not any(match in filter_by_tag for match in charm_opts["tags"]): continue charm_entity = f"cs:~{charm_opts['namespace']}/{charm_name}" entities.append( BuildEntity(build_env, charm_name, charm_opts, charm_entity)) click.echo(f"Queued {charm_entity} for building") for entity in entities: entity.echo("Starting") try: entity.setup() entity.echo(f"Details: {entity}") if not entity.has_changed and not build_env.force: continue entity.charm_build() entity.push() entity.attach_resources() entity.promote(to_channel=to_channel) finally: entity.echo("Stopping") # pool = ThreadPool() # pool.map(_run_build, entities) build_env.save()
def upload(self, ppa, **subprocess_kwargs): """Uploads source packages via dput""" for changes in list(Path(".").glob("*changes")): cmd = f"dput {ppa} {str(changes)}" self.log(cmd) cmd_ok(cmd, **subprocess_kwargs) self.cleanup_source() self.cleanup_debian(cwd=self.upstream_model.name)
def build_summaries(snap_list, snap_versions, owner): """Return snap build summaries""" _client = lp.Client(stage="production") _client.login() snap_list_p = Path(snap_list) snap_versions_p = Path(snap_versions) snap_iter = yaml.safe_load(snap_list_p.read_text()) snap_versions_iter = yaml.safe_load(snap_versions_p.read_text()) snaps_to_process = [ f"{name}-{ver}" for name, ver in list(itertools.product(*[snap_iter, snap_versions_iter])) ] owner_link = _client.owner(owner) summaries = [] for item in snaps_to_process: builds = _client.snaps.getByName(name=item, owner=owner_link).builds[:4] for build in builds: arch = build.distro_arch_series.architecture_tag click.echo(f"Summarizing {item} - {arch}") summaries.append( { "name": f"{item}-{arch}", "created": build.datecreated.strftime("%Y-%m-%d %H:%M:%S"), "started": build.date_started.strftime("%Y-%m-%d %H:%M:%S") if build.date_started else "n/a", "finished": build.datebuilt.strftime("%Y-%m-%d %H:%M:%S") if build.datebuilt else "n/a", "buildstate": build.buildstate, "build_log_url": build.build_log_url, "store_upload_status": build.store_upload_status, "store_upload_errors": build.store_upload_error_messages, "upload_log_url": build.upload_log_url, "channels": build.snap.store_channels, } ) # Generate published snaps from snapstore click.echo("Retrieving snapstore revisions and publishing information") # Add cdk-addons here since we need to check that snap as well from snapstore snap_iter.append("cdk-addons") published_snaps = [(snap, snapapi.all_published(snap)) for snap in snap_iter] tmpl = html.template("snap_summary.html") rendered = tmpl.render({"rows": summaries, "published_snaps": published_snaps}) summary_html_p = Path("snap_summary.html") summary_html_p.write_text(rendered) cmd_ok("aws s3 cp snap_summary.html s3://jenkaas/snap_summary.html", shell=True)
def build_bundles(bundle_list, bundle_branch, filter_by_tag, bundle_repo, track, to_channel, store): """Build list of bundles from a specific branch according to filters.""" build_env = BuildEnv(build_type=BuildType.BUNDLE) build_env.db["build_args"] = { "artifact_list": bundle_list, "branch": bundle_branch, "filter_by_tag": list(filter_by_tag), "track": track, "to_channel": to_channel, } build_env.clean() default_repo_dir = build_env.default_repo_dir cmd_ok( f"git clone --branch {bundle_branch} {bundle_repo} {default_repo_dir}") entities = [] for bundle_map in build_env.artifacts: for bundle_name, bundle_opts in bundle_map.items(): if not any(match in filter_by_tag for match in bundle_opts["tags"]): continue if "downstream" in bundle_opts: bundle_opts["sub-repo"] = bundle_name bundle_opts["src_path"] = build_env.repos_dir / bundle_name else: bundle_opts["src_path"] = build_env.default_repo_dir bundle_opts["dst_path"] = build_env.bundles_dir / bundle_name build_entity = BundleBuildEntity(build_env, bundle_name, bundle_opts, store) entities.append(build_entity) for entity in entities: entity.echo("Starting") try: if "downstream" in entity.opts: # clone bundle repo override entity.setup() entity.echo(f"Details: {entity}") for channel in build_env.to_channels: entity.bundle_build(channel) entity.push() entity.promote(to_channels=[channel]) entity.reset_dst_path() finally: entity.echo("Stopping") build_env.save()
def proof_build(self): """ Perform charm build against charm/bundle """ ret = cmd_ok(f"charm build -r --force -i https://localhost", cwd=self.src_path) if not ret.ok: # Until https://github.com/juju/charm-tools/pull/554 is fixed. click.echo("Ignoring proof warning")
def push(self): """ Pushes a built charm to Charmstore """ click.echo(f"Pushing built {self.dst_path} to {self.entity}") resource_args = [] # Build a list of `oci-image` resources that have `upstream-source` defined, # which is added for this click.echoic to work. resources = yaml.safe_load( Path(self.dst_path).joinpath("metadata.yaml").read_text()).get( "resources", {}) images = { name: details["upstream-source"] for name, details in resources.items() if details["type"] == "oci-image" and details.get("upstream-source") } click.echo( f"Found {len(images)} oci-image resources:\n{pformat(images)}\n") for image in images.values(): click.echo(f"Pulling {image}...") sh.docker.pull(image) # Convert the image names and tags to `--resource foo=bar` format # for passing to `charm push`. resource_args = [ arg for name, image in images.items() for arg in ("--resource", f"{name}={image}") ] out = retry_call( capture, fargs=[[ "charm", "push", self.dst_path, self.entity, *resource_args ]], fkwargs={"check": True}, delay=2, backoff=2, exceptions=CalledProcessError, ) click.echo(f"Charm push returned: {out}") # Output includes lots of ansi escape sequences from the docker push, # and we only care about the first line, which contains the url as yaml. out = yaml.safe_load(out.stdout.decode().strip().splitlines()[0]) click.echo(f"Setting {out['url']} metadata: {self.commit}") cmd_ok(["charm", "set", out["url"], f"commit={self.commit}"])
def _release(self, max_track_rev, track): """Runs snapcraft release""" ret = cmd_ok( f"snapcraft release {self.snap_model.name} {max_track_rev} {track}", echo=self.log, ) if not ret.ok: raise Exception( f"Failed to promote {self.snap_model.name} (rev: {max_track_rev}) to track {_track}" )
def setup(self): """Set up directory for charm build.""" repository = f"https://github.com/{self.downstream}" self.echo(f"Cloning repo from {repository} branch {self.branch}") os.makedirs(self.checkout_path) ret = cmd_ok( f"git clone --branch {self.branch} {repository} {self.checkout_path}", echo=self.echo, ) if not ret.ok: raise BuildException("Clone failed") self.reactive = self.layer_path.exists()
def setup(self): """Setup directory for charm build""" downstream = f"https://github.com/{self.opts['downstream']}" self.echo(f"Cloning repo from {downstream}") os.makedirs(self.checkout_path) ret = cmd_ok( f"git clone --branch {self.charm_branch} {downstream} {self.checkout_path}", echo=self.echo, ) if not ret.ok: raise SystemExit("Clone failed") self.legacy_charm = self.layer_path.exists() if not self.legacy_charm: self.dst_path += ".charm"
def download(self, layer_name): if Path(self.build_path(layer_name)).exists(): click.echo(f"- Refreshing {layer_name} cache.") cmd_ok(f"git checkout {self.layer_branch}", cwd=self.build_path(layer_name)) cmd_ok( f"git.pull origin {self.layer_branch}", cwd=self.build_path(layer_name), ) else: click.echo(f"- Downloading {layer_name}") cmd_ok(f"charm pull-source -i {self.layer_index} {layer_name}") return True
def build( charm_list, layer_list, layer_index, charm_branch, layer_branch, resource_spec, filter_by_tag, track, to_channel, store, force, ): """Build a set of charms and publish with their resources.""" cmd_ok("which charm", echo=lambda m: click.echo(f"charm -> {m}")) cmd_ok("which charmcraft", echo=lambda m: click.echo(f"charmcraft -> {m}")) build_env = BuildEnv(build_type=BuildType.CHARM) build_env.db["build_args"] = { "artifact_list": charm_list, "layer_list": layer_list, "layer_index": layer_index, "branch": charm_branch, "layer_branch": layer_branch, "resource_spec": resource_spec, "filter_by_tag": list(filter_by_tag), "track": track, "to_channel": to_channel, "force": force, } build_env.clean() build_env.pull_layers() entities = [] for charm_map in build_env.artifacts: for charm_name, charm_opts in charm_map.items(): if not any(match in filter_by_tag for match in charm_opts["tags"]): continue charm_entity = BuildEntity(build_env, charm_name, charm_opts, store) entities.append(charm_entity) click.echo(f"Queued {charm_entity.entity} for building") failed_entities = [] for entity in entities: entity.echo("Starting") try: entity.setup() entity.echo(f"Details: {entity}") if not build_env.force: if not entity.has_changed: continue else: entity.echo("Build forced.") entity.charm_build() entity.push() entity.attach_resources() entity.promote(to_channels=build_env.to_channels) except Exception: entity.echo(traceback.format_exc()) failed_entities.append(entity) finally: entity.echo("Stopping") if any(failed_entities): count = len(failed_entities) plural = "s" if count > 1 else "" raise SystemExit( f"Encountered {count} Charm Build Failure{plural}:\n\t" + ", ".join(ch.name for ch in failed_entities)) build_env.save()
def build_bundles(bundle_list, bundle_branch, filter_by_tag, bundle_repo, to_channel): build_env = BuildEnv(build_type=BuildType.BUNDLE) build_env.db["build_args"] = { "artifact_list": bundle_list, "bundle_branch": bundle_branch, "filter_by_tag": list(filter_by_tag), "to_channel": to_channel, } repos_dir = build_env.tmp_dir / "repos" if repos_dir.exists(): shutil.rmtree(repos_dir) repos_dir.mkdir() bundles_dir = build_env.tmp_dir / "bundles" if bundles_dir.exists(): shutil.rmtree(bundles_dir) bundles_dir.mkdir() default_repo_dir = repos_dir / "bundles-kubernetes" cmd_ok( f"git clone --branch {bundle_branch} {bundle_repo} {default_repo_dir}") for bundle_map in build_env.artifacts: for bundle_name, bundle_opts in bundle_map.items(): if not any(match in filter_by_tag for match in bundle_opts["tags"]): click.echo(f"Skipping {bundle_name}") continue click.echo(f"Processing {bundle_name}") if "repo" in bundle_opts: src_path = bundle_opts["src_path"] = repos_dir / bundle_name else: src_path = bundle_opts["src_path"] = default_repo_dir dst_path = bundle_opts["dst_path"] = bundles_dir / bundle_name bundle_entity = f"cs:~{bundle_opts['namespace']}/{bundle_name}" build_entity = BundleBuildEntity(build_env, bundle_name, bundle_opts, bundle_entity) if "repo" in bundle_opts: # clone bundle repo override bundle_repo = bundle_opts["repo"] build_entity.echo(f"Cloning {bundle_repo}") cmd_ok( f"git clone --branch {bundle_branch} {bundle_repo} {src_path}", echo=build_entity.echo, ) if not bundle_opts.get("skip-build", False): cmd = f"{src_path}/bundle -o {dst_path} -c {to_channel} {bundle_opts['fragments']}" build_entity.echo(f"Running {cmd}") cmd_ok(cmd, echo=build_entity.echo) else: # If we're not building the bundle from the repo, we have # to copy it to the expected output location instead. shutil.copytree(src_path / bundle_opts.get("subdir", ""), dst_path) build_entity.push() build_entity.promote(to_channel=to_channel) build_env.save()
def attach_resource(self, from_channel): resource_builder = self.opts.get("resource_build_sh", None) if not resource_builder: return builder = Path(self.src_path) / resource_builder out_path = Path(self.src_path) / "tmp" resource_spec = yaml.safe_load( Path(self.build.resource_spec).read_text()) resource_spec_fragment = resource_spec.get(self.entity, None) click.echo(resource_spec_fragment) if not resource_spec_fragment: raise SystemExit("Unable to determine resource spec for entity") os.makedirs(str(out_path), exist_ok=True) charm_id = capture( ["charm", "show", self.entity, "--channel", from_channel, "id"]) charm_id = yaml.safe_load(charm_id.stdout.decode()) resources = capture([ "charm", "list-resources", charm_id["id"]["Id"], "--channel", from_channel, "--format", "yaml", ]) if not resources.ok: click.echo("No resources found for {}".format(charm_id)) return resources = yaml.safe_load(resources.stdout.decode()) builder_sh = builder.absolute() click.echo(f"Running {builder_sh} from {self.dst_path}") # Grab a list of all file extensions to lookout for known_resource_extensions = list( set("".join(Path(k).suffixes) for k in resource_spec_fragment.keys())) click.echo( f" attaching resources with known extensions: {', '.join(known_resource_extensions)}" ) ret = cmd_ok(["bash", str(builder_sh)], cwd=out_path) if not ret.ok: raise SystemExit("Unable to build resources") for line in glob("{}/*".format(out_path)): click.echo(f" verifying {line}") resource_path = Path(line) resource_fn = resource_path.parts[-1] resource_key = resource_spec_fragment.get(resource_fn, None) if resource_key: retry_call( cmd_ok, fargs=[[ "charm", "attach", self.entity, "--channel", from_channel, f"{resource_key}={resource_path}", ]], fkwargs={"check": True}, delay=2, backoff=2, tries=15, exceptions=CalledProcessError, )
def bump_revision(self, **subprocess_kwargs): """Bumps upstream revision for builds""" cmd_ok("dch -U 'Automated Build' -D focal", **subprocess_kwargs)
def source(self, **subprocess_kwargs): """Builds the source deb package""" cmd = ["dpkg-buildpackage", "-S", f"--sign-key={self.sign_key}"] self.log(f"Building package: {cmd}") cmd_ok(cmd, **subprocess_kwargs)
def _create_branch(repo, from_branch, to_branch, dry_run, force, patches): """ Creates a git branch based on the upstream snap repo and a version to branch as. This will also update the snapcraft.yaml with the correct version to build the snap from in that particular branch. These branches must already exist in https://github.com/kubernetes/kubernetes. Usage: snap.py branch --repo git+ssh://[email protected]/snap-kubectl \ --from-branch master \ --to-branch 1.13.2 """ env = os.environ.copy() if branch_exists(repo, to_branch, env) and not force: click.echo(f"{to_branch} already exists, skipping...") sys.exit(0) snap_basename = urlparse(repo) snap_basename = Path(snap_basename.path).name if snap_basename.endswith(".git"): snap_basename = snap_basename.rstrip(".git") tmpdir = tempfile.TemporaryDirectory() snap_basename = tmpdir.name capture(["git", "clone", repo, snap_basename]) capture(["git", "remote", "prune", "origin"], cwd=snap_basename) capture(["git", "config" "user.email", "*****@*****.**"], cwd=snap_basename) capture(["git", "config", "user.name", "cdkbot"], cwd=snap_basename) capture(["git", "checkout", "-b", to_branch], cwd=snap_basename) snapcraft_fn = Path(snap_basename) / "snapcraft.yaml" snapcraft_fn_tpl = Path(snap_basename) / "snapcraft.yaml.in" if not snapcraft_fn_tpl.exists(): click.echo(f"{snapcraft_fn_tpl} not found") sys.exit(1) # Apply patches patches_list = [] if patches: patches_path = Path(patches) if patches_path.exists(): click.echo("Patches found, applying.") patches_map = yaml.safe_load(patches_path.read_text(encoding="utf8")) # TODO: cleanup if "all" in patches_map: for patch_fn in patches_map["all"]: patch_fn = Path(patch_fn).absolute() shared_path = str(Path("shared") / patch_fn.parts[-1]) sh.cp(str(patch_fn), str(shared_path), _cwd=snap_basename) patches_list.append(shared_path) git.add(shared_path, _cwd=snap_basename) if to_branch.lstrip("v") in patches_map: for patch_fn in patches_map[to_branch.lstrip("v")]: patch_fn = Path(patch_fn).absolute() shared_path = str(Path("shared") / patch_fn.parts[-1]) sh.cp(str(patch_fn), str(shared_path), _cwd=snap_basename) patches_list.append(shared_path) git.add(shared_path, _cwd=snap_basename) k8s_major_minor = semver.parse(to_branch.lstrip("v")) k8s_major_minor = f"{k8s_major_minor['major']}.{k8s_major_minor['minor']}" snapcraft_yml = snapcraft_fn_tpl.read_text() snapcraft_yml = _render( snapcraft_fn_tpl, { "snap_version": to_branch.lstrip("v"), "patches": patches_list, "go_version": K8S_GO_MAP.get(k8s_major_minor, "go/1.12/stable"), }, ) snapcraft_fn.write_text(snapcraft_yml) if not dry_run: cmd_ok("git add .", cwd=snap_basename) cmd_ok(f"git commit -m 'Creating branch {to_branch}'", cwd=snap_basename) cmd_ok(f"git push --force {repo} {to_branch}", cwd=snap_basename)
def cleanup_source(self, **subprocess_kwargs): cmd_ok("rm -rf *.changes", shell=True, **subprocess_kwargs)
def cleanup_debian(self, **subprocess_kwargs): cmd_ok(["rm", "-rf", "debian"], **subprocess_kwargs)
def __run_git(args): username, password, layer_name, upstream, downstream = args log.info(f"Syncing {layer_name} :: {upstream} -> {downstream}") downstream = f"https://{username}:{password}@github.com/{downstream}" identifier = str(uuid.uuid4()) os.makedirs(identifier) ret = capture(f"git clone {downstream} {identifier}") if not ret.ok: log.info(f"Failed to clone repo: {ret.stderr.decode()}") sys.exit(1) cmd_ok("git config user.email '*****@*****.**'", cwd=identifier) cmd_ok("git config user.name cdkbot", cwd=identifier) cmd_ok("git config push.default simple", cwd=identifier) cmd_ok(f"git remote add upstream {upstream}", cwd=identifier) cmd_ok("git fetch upstream", cwd=identifier) cmd_ok("git checkout master", cwd=identifier) cmd_ok("git merge upstream/master", cwd=identifier) cmd_ok("git push origin", cwd=identifier) cmd_ok("rm -rf {identifier}")