def build_postsubmit(args): """Build the artifacts from a postsubmit.""" go_dir = tempfile.mkdtemp(prefix="tmpTfJobSrc") os.environ["GOPATH"] = go_dir logging.info("Temporary go_dir: %s", go_dir) src_dir = os.path.join(go_dir, "src", "github.com", REPO_ORG, REPO_NAME) util.clone_repo(src_dir, util.MASTER_REPO_OWNER, util.MASTER_REPO_NAME, args.commit) build_and_push(go_dir, src_dir, args)
def build_lastgreen(args): # pylint: disable=too-many-locals """Find the latest green postsubmit and build the artifacts. """ gcs_client = storage.Client() sha = get_latest_green_presubmit(gcs_client) bucket_name, _ = util.split_gcs_uri(args.releases_path) bucket = gcs_client.get_bucket(bucket_name) logging.info("Latest passing postsubmit is %s", sha) last_release_sha = get_last_release(bucket) logging.info("Most recent release was for %s", last_release_sha) if sha == last_release_sha: logging.info("Already cut release for %s", sha) return go_dir = tempfile.mkdtemp(prefix="tmpTfJobSrc") logging.info("Temporary go_dir: %s", go_dir) src_dir = os.path.join(go_dir, "src", "github.com", REPO_ORG, REPO_NAME) _, sha = util.clone_repo(src_dir, util.MASTER_REPO_OWNER, util.MASTER_REPO_NAME, sha) build_and_push(go_dir, src_dir, args)
def build_commit(args, branches): top_dir = args.src_dir or tempfile.mkdtemp(prefix="tmpTFJobSrc") logging.info("Top level directory for source: %s", top_dir) go_dir = os.path.join(top_dir, "go") os.environ["GOPATH"] = go_dir logging.info("Temporary go_dir: %s", go_dir) clone_dir = os.path.join(top_dir, REPO_DIR) src_dir = os.path.join(go_dir, "src", "github.com", REPO_ORG, REPO_NAME) util.clone_repo(clone_dir, REPO_ORG, REPO_NAME, args.commit, branches) # Create a symbolic link in the go path. os.makedirs(os.path.dirname(src_dir)) logging.info("Creating symbolic link %s pointing to %s", src_dir, clone_dir) os.symlink(clone_dir, src_dir) util.install_go_deps(clone_dir) build_and_push(go_dir, src_dir, args)
def clone_lastgreen(args): gcs_client = storage.Client() sha = get_latest_green_presubmit(gcs_client) util.clone_repo(args.src_dir, util.MASTER_REPO_OWNER, util.MASTER_REPO_NAME, sha)
def clone_postsubmit(args): util.clone_repo(args.src_dir, REPO_ORG, REPO_NAME, args.commit)
def clone_pr(args): branches = ["pull/{0}/head:pr".format(args.pr)] util.clone_repo(args.src_dir, REPO_ORG, REPO_NAME, args.commit, branches)
def clone_postsubmit(args): util.clone_repo(args.src_dir, util.MASTER_REPO_OWNER, util.MASTER_REPO_NAME, args.commit)
def main(): # pylint: disable=too-many-locals logging.getLogger().setLevel(logging.INFO) # pylint: disable=too-many-locals parser = argparse.ArgumentParser( description="Release artifacts for TfJob.") parser.add_argument("--releases_bucket", default="tf-on-k8s-dogfood-releases", type=str, help="The bucket to publish releases to.") # TODO(jlewi): Should pass along unknown arguments to build and push. args, _ = parser.parse_known_args() gcs_client = storage.Client() sha = get_latest_green_presubmit(gcs_client) src_dir = tempfile.mkdtemp(prefix="tmpTfJobSrc") logging.info("src_dir: %s", src_dir) sha = util.clone_repo(src_dir, util.MASTER_REPO_OWNER, util.MASTER_REPO_NAME, sha) # TODO(jlewi): We should check if we've already done a push. We could # check if the .tar.gz for the helm package exists. build_info_file = os.path.join(src_dir, "build_info.yaml") util.run([ os.path.join(src_dir, "images", "tf_operator", "build_and_push.py"), "--output=" + build_info_file ], cwd=src_dir) with open(build_info_file) as hf: build_info = yaml.load(hf) version = build_info["image"].split(":")[-1] values_file = os.path.join(src_dir, "tf-job-operator-chart", "values.yaml") update_values(values_file, build_info["image"]) chart_file = os.path.join(src_dir, "tf-job-operator-chart", "Chart.yaml") update_chart(chart_file, version) util.run(["helm", "package", "./tf-job-operator-chart"], cwd=src_dir) matches = glob.glob(os.path.join(src_dir, "tf-job-operator-chart*.tgz")) if len(matches) != 1: raise ValueError( "Expected 1 chart archive to match but found {0}".format(matches)) chart_archive = matches[0] release_path = version bucket = gcs_client.get_bucket(args.releases_bucket) targets = [ os.path.join(release_path, os.path.basename(chart_archive)), "latest/tf-job-operator-chart-latest.tgz", ] for t in targets: blob = bucket.blob(t) gcs_path = util.to_gcs_uri(args.releases_bucket, t) if blob.exists() and not t.startswith("latest"): logging.warn("%s already exists", gcs_path) continue logging.info("Uploading %s to %s.", chart_archive, gcs_path) blob.upload_from_filename(chart_archive)
def build_once(bucket_name): # pylint: disable=too-many-locals gcs_client = storage.Client() sha = get_latest_green_presubmit(gcs_client) bucket = gcs_client.get_bucket(bucket_name) logging.info("Latest passing postsubmit is %s", sha) last_release_sha = get_last_release(bucket) logging.info("Most recent release was for %s", last_release_sha) if sha == last_release_sha: logging.info("Already cut release for %s", sha) return go_dir = tempfile.mkdtemp(prefix="tmpTfJobSrc") logging.info("Temporary go_dir: %s", go_dir) src_dir = os.path.join(go_dir, "src", "github.com", REPO_ORG, REPO_NAME) _, sha = util.clone_repo(src_dir, util.MASTER_REPO_OWNER, util.MASTER_REPO_NAME, sha) # Update the GOPATH to the temporary directory. env = os.environ.copy() env["GOPATH"] = go_dir build_info_file = os.path.join(src_dir, "build_info.yaml") util.run([ os.path.join(src_dir, "images", "tf_operator", "build_and_push.py"), "--gcb", "--project=" + GCB_PROJECT, "--output=" + build_info_file ], cwd=src_dir, env=env) with open(build_info_file) as hf: build_info = yaml.load(hf) version = build_info["image"].split(":")[-1] values_file = os.path.join(src_dir, "tf-job-operator-chart", "values.yaml") update_values(values_file, build_info["image"]) chart_file = os.path.join(src_dir, "tf-job-operator-chart", "Chart.yaml") update_chart(chart_file, version) util.run(["helm", "package", "./tf-job-operator-chart"], cwd=src_dir) matches = glob.glob(os.path.join(src_dir, "tf-job-operator-chart*.tgz")) if len(matches) != 1: raise ValueError( "Expected 1 chart archive to match but found {0}".format(matches)) chart_archive = matches[0] release_path = version targets = [ os.path.join(release_path, os.path.basename(chart_archive)), "latest/tf-job-operator-chart-latest.tgz", ] for t in targets: blob = bucket.blob(t) gcs_path = util.to_gcs_uri(bucket_name, t) if blob.exists() and not t.startswith("latest"): logging.warn("%s already exists", gcs_path) continue logging.info("Uploading %s to %s.", chart_archive, gcs_path) blob.upload_from_filename(chart_archive) create_latest(bucket, sha, util.to_gcs_uri(bucket_name, targets[0]))