示例#1
0
def create_latest(bucket, sha, target):
    """Create a file in GCS with information about the latest release.

  Args:
    bucket: A google cloud storage bucket object
    sha: SHA of the release we just created
    target: The GCS path of the release we just produced.
  """
    path = os.path.join("latest_release.json")

    logging.info("Creating GCS output: %s", util.to_gcs_uri(bucket.name, path))

    data = {
        "sha": sha.strip(),
        "target": target,
    }
    blob = bucket.blob(path)
    blob.upload_from_string(json.dumps(data))
示例#2
0
def get_last_release(bucket):
  """Return the sha of the last release.

  Args:
    bucket: A google cloud storage bucket object

  Returns:
    sha: The sha of the latest release.
  """

  path = "latest_release.json"

  blob = bucket.blob(path)

  if not blob.exists():
    logging.info("File %s doesn't exist.", util.to_gcs_uri(bucket.name, path))
    return ""

  contents = blob.download_as_string()

  data = json.loads(contents)
  return data.get("sha", "").strip()
示例#3
0
def create_pr_symlink(args):
    """Create a 'symlink' in GCS pointing at the results for a PR.

  This is a null op if PROW environment variables indicate this is not a PR
  job.
  """
    gcs_client = storage.Client()
    # GCS layout is defined here:
    # https://github.com/kubernetes/test-infra/tree/master/gubernator#job-artifact-gcs-layout
    pull_number = os.getenv("PULL_NUMBER")
    if not pull_number:
        # Symlinks are only created for pull requests.
        return ""

    path = "pr-logs/directory/{job}/{build}.txt".format(
        job=os.getenv("JOB_NAME"), build=os.getenv("BUILD_NUMBER"))

    pull_number = os.getenv("PULL_NUMBER")

    repo_owner = os.getenv("REPO_OWNER")
    repo_name = os.getenv("REPO_NAME")

    build_dir = ("gs://{bucket}/pr-logs/pull/{owner}_{repo}/"
                 "{pull_number}/{job}/{build}").format(
                     bucket=args.bucket,
                     owner=repo_owner,
                     repo=repo_name,
                     pull_number=pull_number,
                     job=os.getenv("JOB_NAME"),
                     build=os.getenv("BUILD_NUMBER"))
    source = util.to_gcs_uri(args.bucket, path)
    target = get_gcs_dir(args.bucket)
    logging.info("Creating symlink %s pointing to %s", source, target)
    bucket = gcs_client.get_bucket(args.bucket)
    blob = bucket.blob(path)
    blob.upload_from_string(target)
示例#4
0
def build_and_push_artifacts(go_dir,
                             src_dir,
                             registry,
                             publish_path=None,
                             gcb_project=None,
                             build_info_path=None):
    """Build and push the artifacts.

  Args:
    go_dir: The GOPATH directory
    src_dir: The root directory where we checked out the repo.
    registry: Docker registry to use.
    publish_path: (Optional) The GCS path where artifacts should be published.
       Set to none to only build locally.
    gcb_project: The project to use with GCB to build docker images.
      If set to none uses docker to build.
    build_info_path: (Optional): GCS location to write YAML file containing
      information about the build.
  """
    # Update the GOPATH to the temporary directory.
    env = os.environ.copy()
    if go_dir:
        env["GOPATH"] = go_dir

    bin_dir = os.path.join(src_dir, "bin")
    if not os.path.exists(bin_dir):
        os.makedirs(bin_dir)

    build_info = build_operator_image(src_dir, registry, project=gcb_project)

    # Copy the chart to a temporary directory because we will modify some
    # of its YAML files.
    chart_build_dir = tempfile.mkdtemp(prefix="tmpTFJobChartBuild")
    shutil.copytree(os.path.join(src_dir, "tf-job-operator-chart"),
                    os.path.join(chart_build_dir, "tf-job-operator-chart"))
    version = build_info["image"].split(":")[-1]
    values_file = os.path.join(chart_build_dir, "tf-job-operator-chart",
                               "values.yaml")
    update_values(values_file, build_info["image"])

    chart_file = os.path.join(chart_build_dir, "tf-job-operator-chart",
                              "Chart.yaml")
    update_chart(chart_file, version)

    # Delete any existing matches because we assume there is only 1 below.
    matches = glob.glob(os.path.join(bin_dir, "tf-job-operator-chart*.tgz"))
    for m in matches:
        logging.info("Delete previous build: %s", m)
        os.unlink(m)

    util.run([
        "helm", "package", "--save=false", "--destination=" + bin_dir,
        "./tf-job-operator-chart"
    ],
             cwd=chart_build_dir)

    matches = glob.glob(os.path.join(bin_dir, "tf-job-operator-chart*.tgz"))

    if len(matches) != 1:
        raise ValueError(
            "Expected 1 chart archive to match but found {0}".format(matches))

    chart_archive = matches[0]

    release_path = version

    targets = [
        os.path.join(release_path, os.path.basename(chart_archive)),
        "latest/tf-job-operator-chart-latest.tgz",
    ]

    if publish_path:
        gcs_client = storage.Client(project=gcb_project)
        bucket_name, base_path = util.split_gcs_uri(publish_path)
        bucket = gcs_client.get_bucket(bucket_name)
        for t in targets:
            blob = bucket.blob(os.path.join(base_path, t))
            gcs_path = util.to_gcs_uri(bucket_name, blob.name)
            if not t.startswith("latest"):
                build_info["helm_chart"] = gcs_path
            if blob.exists() and not t.startswith("latest"):
                logging.warn("%s already exists", gcs_path)
                continue
            logging.info("Uploading %s to %s.", chart_archive, gcs_path)
            blob.upload_from_filename(chart_archive)

        create_latest(bucket, build_info["commit"],
                      util.to_gcs_uri(bucket_name, targets[0]))

    # Always write to the bin dir.
    paths = [os.path.join(bin_dir, "build_info.yaml")]

    if build_info_path:
        paths.append(build_info_path)

    write_build_info(build_info, paths, project=gcb_project)
示例#5
0
def main():  # pylint: disable=too-many-locals
    logging.getLogger().setLevel(logging.INFO)  # pylint: disable=too-many-locals
    parser = argparse.ArgumentParser(
        description="Release artifacts for TfJob.")

    parser.add_argument("--releases_bucket",
                        default="tf-on-k8s-dogfood-releases",
                        type=str,
                        help="The bucket to publish releases to.")

    # TODO(jlewi): Should pass along unknown arguments to build and push.
    args, _ = parser.parse_known_args()

    gcs_client = storage.Client()
    sha = get_latest_green_presubmit(gcs_client)

    src_dir = tempfile.mkdtemp(prefix="tmpTfJobSrc")
    logging.info("src_dir: %s", src_dir)

    sha = util.clone_repo(src_dir, util.MASTER_REPO_OWNER,
                          util.MASTER_REPO_NAME, sha)

    # TODO(jlewi): We should check if we've already done a push. We could
    # check if the .tar.gz for the helm package exists.
    build_info_file = os.path.join(src_dir, "build_info.yaml")
    util.run([
        os.path.join(src_dir, "images", "tf_operator", "build_and_push.py"),
        "--output=" + build_info_file
    ],
             cwd=src_dir)

    with open(build_info_file) as hf:
        build_info = yaml.load(hf)

    version = build_info["image"].split(":")[-1]
    values_file = os.path.join(src_dir, "tf-job-operator-chart", "values.yaml")
    update_values(values_file, build_info["image"])

    chart_file = os.path.join(src_dir, "tf-job-operator-chart", "Chart.yaml")
    update_chart(chart_file, version)

    util.run(["helm", "package", "./tf-job-operator-chart"], cwd=src_dir)

    matches = glob.glob(os.path.join(src_dir, "tf-job-operator-chart*.tgz"))

    if len(matches) != 1:
        raise ValueError(
            "Expected 1 chart archive to match but found {0}".format(matches))

    chart_archive = matches[0]

    release_path = version

    bucket = gcs_client.get_bucket(args.releases_bucket)

    targets = [
        os.path.join(release_path, os.path.basename(chart_archive)),
        "latest/tf-job-operator-chart-latest.tgz",
    ]

    for t in targets:
        blob = bucket.blob(t)
        gcs_path = util.to_gcs_uri(args.releases_bucket, t)
        if blob.exists() and not t.startswith("latest"):
            logging.warn("%s already exists", gcs_path)
            continue
        logging.info("Uploading %s to %s.", chart_archive, gcs_path)
        blob.upload_from_filename(chart_archive)
示例#6
0
def build_once(bucket_name):  # pylint: disable=too-many-locals
    gcs_client = storage.Client()
    sha = get_latest_green_presubmit(gcs_client)

    bucket = gcs_client.get_bucket(bucket_name)

    logging.info("Latest passing postsubmit is %s", sha)

    last_release_sha = get_last_release(bucket)
    logging.info("Most recent release was for %s", last_release_sha)

    if sha == last_release_sha:
        logging.info("Already cut release for %s", sha)
        return

    go_dir = tempfile.mkdtemp(prefix="tmpTfJobSrc")
    logging.info("Temporary go_dir: %s", go_dir)

    src_dir = os.path.join(go_dir, "src", "github.com", REPO_ORG, REPO_NAME)

    _, sha = util.clone_repo(src_dir, util.MASTER_REPO_OWNER,
                             util.MASTER_REPO_NAME, sha)

    # Update the GOPATH to the temporary directory.
    env = os.environ.copy()
    env["GOPATH"] = go_dir
    build_info_file = os.path.join(src_dir, "build_info.yaml")
    util.run([
        os.path.join(src_dir, "images", "tf_operator", "build_and_push.py"),
        "--gcb", "--project=" + GCB_PROJECT, "--output=" + build_info_file
    ],
             cwd=src_dir,
             env=env)

    with open(build_info_file) as hf:
        build_info = yaml.load(hf)

    version = build_info["image"].split(":")[-1]
    values_file = os.path.join(src_dir, "tf-job-operator-chart", "values.yaml")
    update_values(values_file, build_info["image"])

    chart_file = os.path.join(src_dir, "tf-job-operator-chart", "Chart.yaml")
    update_chart(chart_file, version)

    util.run(["helm", "package", "./tf-job-operator-chart"], cwd=src_dir)

    matches = glob.glob(os.path.join(src_dir, "tf-job-operator-chart*.tgz"))

    if len(matches) != 1:
        raise ValueError(
            "Expected 1 chart archive to match but found {0}".format(matches))

    chart_archive = matches[0]

    release_path = version

    targets = [
        os.path.join(release_path, os.path.basename(chart_archive)),
        "latest/tf-job-operator-chart-latest.tgz",
    ]

    for t in targets:
        blob = bucket.blob(t)
        gcs_path = util.to_gcs_uri(bucket_name, t)
        if blob.exists() and not t.startswith("latest"):
            logging.warn("%s already exists", gcs_path)
            continue
        logging.info("Uploading %s to %s.", chart_archive, gcs_path)
        blob.upload_from_filename(chart_archive)

    create_latest(bucket, sha, util.to_gcs_uri(bucket_name, targets[0]))